ob-metaflow-stubs 6.0.3.183rc1__py2.py3-none-any.whl → 6.0.3.184__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (220) hide show
  1. metaflow-stubs/__init__.pyi +780 -780
  2. metaflow-stubs/cards.pyi +1 -1
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/cli_components/__init__.pyi +1 -1
  5. metaflow-stubs/cli_components/utils.pyi +1 -1
  6. metaflow-stubs/client/__init__.pyi +1 -1
  7. metaflow-stubs/client/core.pyi +4 -4
  8. metaflow-stubs/client/filecache.pyi +1 -1
  9. metaflow-stubs/events.pyi +1 -1
  10. metaflow-stubs/exception.pyi +1 -1
  11. metaflow-stubs/flowspec.pyi +3 -3
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +2 -2
  14. metaflow-stubs/info_file.pyi +1 -1
  15. metaflow-stubs/metadata_provider/__init__.pyi +1 -1
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
  17. metaflow-stubs/metadata_provider/metadata.pyi +1 -1
  18. metaflow-stubs/metadata_provider/util.pyi +1 -1
  19. metaflow-stubs/metaflow_config.pyi +1 -1
  20. metaflow-stubs/metaflow_current.pyi +96 -96
  21. metaflow-stubs/metaflow_git.pyi +1 -1
  22. metaflow-stubs/mf_extensions/__init__.pyi +1 -1
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +1 -1
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +2 -2
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +1 -1
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +1 -1
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +2 -2
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +1 -1
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +1 -1
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +2 -2
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +1 -1
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +2 -2
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +1 -1
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +1 -1
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +1 -1
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +1 -1
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +1 -1
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +1 -1
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +1 -1
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +1 -1
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +1 -1
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +1 -1
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +1 -1
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +1 -1
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +1 -1
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +1 -1
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +1 -1
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +1 -1
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +1 -1
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
  83. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
  84. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
  85. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +1 -1
  86. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
  87. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +1 -1
  88. metaflow-stubs/multicore_utils.pyi +1 -1
  89. metaflow-stubs/ob_internal.pyi +1 -2
  90. metaflow-stubs/parameters.pyi +2 -2
  91. metaflow-stubs/plugins/__init__.pyi +11 -11
  92. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  93. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  94. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  95. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  96. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  97. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
  98. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
  99. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  100. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  101. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  102. metaflow-stubs/plugins/argo/argo_workflows.pyi +1 -1
  103. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +1 -1
  104. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +2 -2
  105. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  106. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  107. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  108. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  109. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  110. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  111. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  112. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
  113. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  114. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +1 -1
  115. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  116. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  117. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  118. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  119. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  120. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  121. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +1 -1
  122. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  123. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  124. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  125. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +1 -1
  126. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  127. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  128. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  129. metaflow-stubs/plugins/cards/__init__.pyi +1 -1
  130. metaflow-stubs/plugins/cards/card_client.pyi +1 -1
  131. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  132. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  133. metaflow-stubs/plugins/cards/card_decorator.pyi +1 -1
  134. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  135. metaflow-stubs/plugins/cards/card_modules/basic.pyi +1 -1
  136. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  137. metaflow-stubs/plugins/cards/card_modules/components.pyi +2 -2
  138. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  139. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  140. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  141. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  142. metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
  143. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  144. metaflow-stubs/plugins/catch_decorator.pyi +1 -1
  145. metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
  146. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  147. metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
  148. metaflow-stubs/plugins/datatools/s3/s3.pyi +1 -1
  149. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  150. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  151. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  152. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  153. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  154. metaflow-stubs/plugins/events_decorator.pyi +1 -1
  155. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  156. metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
  157. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  158. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +1 -1
  159. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  160. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  161. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  162. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  163. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  164. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +1 -1
  165. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  166. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  167. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
  168. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  169. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
  170. metaflow-stubs/plugins/ollama/__init__.pyi +1 -1
  171. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  172. metaflow-stubs/plugins/perimeters.pyi +1 -1
  173. metaflow-stubs/plugins/project_decorator.pyi +1 -1
  174. metaflow-stubs/plugins/pypi/__init__.pyi +1 -1
  175. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  176. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  177. metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
  178. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  179. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  180. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  181. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  182. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  183. metaflow-stubs/plugins/secrets/__init__.pyi +1 -1
  184. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  185. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  186. metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
  187. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  188. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +1 -1
  189. metaflow-stubs/plugins/timeout_decorator.pyi +1 -1
  190. metaflow-stubs/plugins/torchtune/__init__.pyi +1 -1
  191. metaflow-stubs/plugins/uv/__init__.pyi +1 -1
  192. metaflow-stubs/plugins/uv/uv_environment.pyi +1 -1
  193. metaflow-stubs/profilers/__init__.pyi +1 -1
  194. metaflow-stubs/pylint_wrapper.pyi +1 -1
  195. metaflow-stubs/runner/__init__.pyi +1 -1
  196. metaflow-stubs/runner/deployer.pyi +3 -3
  197. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  198. metaflow-stubs/runner/metaflow_runner.pyi +2 -2
  199. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  200. metaflow-stubs/runner/nbrun.pyi +1 -1
  201. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  202. metaflow-stubs/runner/utils.pyi +3 -3
  203. metaflow-stubs/system/__init__.pyi +1 -1
  204. metaflow-stubs/system/system_logger.pyi +2 -2
  205. metaflow-stubs/system/system_monitor.pyi +1 -1
  206. metaflow-stubs/tagging_util.pyi +1 -1
  207. metaflow-stubs/tuple_util.pyi +1 -1
  208. metaflow-stubs/user_configs/__init__.pyi +1 -1
  209. metaflow-stubs/user_configs/config_decorators.pyi +6 -6
  210. metaflow-stubs/user_configs/config_options.pyi +2 -2
  211. metaflow-stubs/user_configs/config_parameters.pyi +5 -5
  212. {ob_metaflow_stubs-6.0.3.183rc1.dist-info → ob_metaflow_stubs-6.0.3.184.dist-info}/METADATA +1 -1
  213. ob_metaflow_stubs-6.0.3.184.dist-info/RECORD +216 -0
  214. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +0 -6
  215. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +0 -51
  216. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +0 -65
  217. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +0 -74
  218. ob_metaflow_stubs-6.0.3.183rc1.dist-info/RECORD +0 -220
  219. {ob_metaflow_stubs-6.0.3.183rc1.dist-info → ob_metaflow_stubs-6.0.3.184.dist-info}/WHEEL +0 -0
  220. {ob_metaflow_stubs-6.0.3.183rc1.dist-info → ob_metaflow_stubs-6.0.3.184.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.15.18.1+obcheckpoint(0.2.1);ob(v1) #
4
- # Generated on 2025-06-23T21:15:44.995494 #
4
+ # Generated on 2025-06-25T20:19:31.361984 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
12
+ import typing
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -35,18 +35,18 @@ from .user_configs.config_parameters import ConfigValue as ConfigValue
35
35
  from .user_configs.config_parameters import config_expr as config_expr
36
36
  from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
- from . import events as events
38
+ from . import tuple_util as tuple_util
39
39
  from . import cards as cards
40
+ from . import events as events
40
41
  from . import metaflow_git as metaflow_git
41
- from . import tuple_util as tuple_util
42
42
  from . import runner as runner
43
43
  from . import plugins as plugins
44
44
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
45
45
  from . import includefile as includefile
46
46
  from .includefile import IncludeFile as IncludeFile
47
- from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
48
47
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
49
48
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
49
+ from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
50
50
  from . import client as client
51
51
  from .client.core import namespace as namespace
52
52
  from .client.core import get_namespace as get_namespace
@@ -155,38 +155,19 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
155
155
  """
156
156
  ...
157
157
 
158
- @typing.overload
159
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
160
- """
161
- Specifies secrets to be retrieved and injected as environment variables prior to
162
- the execution of a step.
163
-
164
-
165
- Parameters
166
- ----------
167
- sources : List[Union[str, Dict[str, Any]]], default: []
168
- List of secret specs, defining how the secrets are to be retrieved
169
- """
170
- ...
171
-
172
- @typing.overload
173
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
174
- ...
175
-
176
- @typing.overload
177
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
178
- ...
179
-
180
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
158
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
181
159
  """
182
- Specifies secrets to be retrieved and injected as environment variables prior to
183
- the execution of a step.
160
+ Specifies that this step should execute on DGX cloud.
184
161
 
185
162
 
186
163
  Parameters
187
164
  ----------
188
- sources : List[Union[str, Dict[str, Any]]], default: []
189
- List of secret specs, defining how the secrets are to be retrieved
165
+ gpu : int
166
+ Number of GPUs to use.
167
+ gpu_type : str
168
+ Type of Nvidia GPU to use.
169
+ queue_timeout : int
170
+ Time to keep the job in NVCF's queue.
190
171
  """
191
172
  ...
192
173
 
@@ -249,291 +230,248 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
249
230
  """
250
231
  ...
251
232
 
252
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
233
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
253
234
  """
254
- Specifies that this step should execute on Kubernetes.
235
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
255
236
 
256
237
 
257
238
  Parameters
258
239
  ----------
259
- cpu : int, default 1
260
- Number of CPUs required for this step. If `@resources` is
261
- also present, the maximum value from all decorators is used.
262
- memory : int, default 4096
263
- Memory size (in MB) required for this step. If
264
- `@resources` is also present, the maximum value from all decorators is
265
- used.
266
- disk : int, default 10240
267
- Disk size (in MB) required for this step. If
268
- `@resources` is also present, the maximum value from all decorators is
269
- used.
270
- image : str, optional, default None
271
- Docker image to use when launching on Kubernetes. If not specified, and
272
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
273
- not, a default Docker image mapping to the current version of Python is used.
274
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
275
- If given, the imagePullPolicy to be applied to the Docker image of the step.
276
- image_pull_secrets: List[str], default []
277
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
278
- Kubernetes image pull secrets to use when pulling container images
279
- in Kubernetes.
280
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
281
- Kubernetes service account to use when launching pod in Kubernetes.
282
- secrets : List[str], optional, default None
283
- Kubernetes secrets to use when launching pod in Kubernetes. These
284
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
285
- in Metaflow configuration.
286
- node_selector: Union[Dict[str,str], str], optional, default None
287
- Kubernetes node selector(s) to apply to the pod running the task.
288
- Can be passed in as a comma separated string of values e.g.
289
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
290
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
291
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
292
- Kubernetes namespace to use when launching pod in Kubernetes.
293
- gpu : int, optional, default None
294
- Number of GPUs required for this step. A value of zero implies that
295
- the scheduled node should not have GPUs.
296
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
297
- The vendor of the GPUs to be used for this step.
298
- tolerations : List[str], default []
299
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
300
- Kubernetes tolerations to use when launching pod in Kubernetes.
301
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
302
- Kubernetes labels to use when launching pod in Kubernetes.
303
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
304
- Kubernetes annotations to use when launching pod in Kubernetes.
305
- use_tmpfs : bool, default False
306
- This enables an explicit tmpfs mount for this step.
307
- tmpfs_tempdir : bool, default True
308
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
309
- tmpfs_size : int, optional, default: None
310
- The value for the size (in MiB) of the tmpfs mount for this step.
311
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
312
- memory allocated for this step.
313
- tmpfs_path : str, optional, default /metaflow_temp
314
- Path to tmpfs mount for this step.
315
- persistent_volume_claims : Dict[str, str], optional, default None
316
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
317
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
318
- shared_memory: int, optional
319
- Shared memory size (in MiB) required for this step
320
- port: int, optional
321
- Port number to specify in the Kubernetes job object
322
- compute_pool : str, optional, default None
323
- Compute pool to be used for for this step.
324
- If not specified, any accessible compute pool within the perimeter is used.
325
- hostname_resolution_timeout: int, default 10 * 60
326
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
327
- Only applicable when @parallel is used.
328
- qos: str, default: Burstable
329
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
240
+ temp_dir_root : str, optional
241
+ The root directory that will hold the temporary directory where objects will be downloaded.
330
242
 
331
- security_context: Dict[str, Any], optional, default None
332
- Container security context. Applies to the task container. Allows the following keys:
333
- - privileged: bool, optional, default None
334
- - allow_privilege_escalation: bool, optional, default None
335
- - run_as_user: int, optional, default None
336
- - run_as_group: int, optional, default None
337
- - run_as_non_root: bool, optional, default None
243
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
244
+ The list of repos (models/datasets) to load.
245
+
246
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
247
+
248
+ - If repo (model/dataset) is not found in the datastore:
249
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
250
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
251
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
252
+
253
+ - If repo is found in the datastore:
254
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
338
255
  """
339
256
  ...
340
257
 
341
258
  @typing.overload
342
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
259
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
343
260
  """
344
- Specifies the resources needed when executing this step.
345
-
346
- Use `@resources` to specify the resource requirements
347
- independently of the specific compute layer (`@batch`, `@kubernetes`).
348
-
349
- You can choose the compute layer on the command line by executing e.g.
350
- ```
351
- python myflow.py run --with batch
352
- ```
353
- or
354
- ```
355
- python myflow.py run --with kubernetes
356
- ```
357
- which executes the flow on the desired system using the
358
- requirements specified in `@resources`.
261
+ Specifies environment variables to be set prior to the execution of a step.
359
262
 
360
263
 
361
264
  Parameters
362
265
  ----------
363
- cpu : int, default 1
364
- Number of CPUs required for this step.
365
- gpu : int, optional, default None
366
- Number of GPUs required for this step.
367
- disk : int, optional, default None
368
- Disk size (in MB) required for this step. Only applies on Kubernetes.
369
- memory : int, default 4096
370
- Memory size (in MB) required for this step.
371
- shared_memory : int, optional, default None
372
- The value for the size (in MiB) of the /dev/shm volume for this step.
373
- This parameter maps to the `--shm-size` option in Docker.
266
+ vars : Dict[str, str], default {}
267
+ Dictionary of environment variables to set.
374
268
  """
375
269
  ...
376
270
 
377
271
  @typing.overload
378
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
272
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
379
273
  ...
380
274
 
381
275
  @typing.overload
382
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
276
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
383
277
  ...
384
278
 
385
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
279
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
386
280
  """
387
- Specifies the resources needed when executing this step.
388
-
389
- Use `@resources` to specify the resource requirements
390
- independently of the specific compute layer (`@batch`, `@kubernetes`).
391
-
392
- You can choose the compute layer on the command line by executing e.g.
393
- ```
394
- python myflow.py run --with batch
395
- ```
396
- or
397
- ```
398
- python myflow.py run --with kubernetes
399
- ```
400
- which executes the flow on the desired system using the
401
- requirements specified in `@resources`.
281
+ Specifies environment variables to be set prior to the execution of a step.
402
282
 
403
283
 
404
284
  Parameters
405
285
  ----------
406
- cpu : int, default 1
407
- Number of CPUs required for this step.
408
- gpu : int, optional, default None
409
- Number of GPUs required for this step.
410
- disk : int, optional, default None
411
- Disk size (in MB) required for this step. Only applies on Kubernetes.
412
- memory : int, default 4096
413
- Memory size (in MB) required for this step.
414
- shared_memory : int, optional, default None
415
- The value for the size (in MiB) of the /dev/shm volume for this step.
416
- This parameter maps to the `--shm-size` option in Docker.
286
+ vars : Dict[str, str], default {}
287
+ Dictionary of environment variables to set.
417
288
  """
418
289
  ...
419
290
 
420
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
291
+ @typing.overload
292
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
421
293
  """
422
- Specifies that this step should execute on DGX cloud.
294
+ Specifies that the step will success under all circumstances.
295
+
296
+ The decorator will create an optional artifact, specified by `var`, which
297
+ contains the exception raised. You can use it to detect the presence
298
+ of errors, indicating that all happy-path artifacts produced by the step
299
+ are missing.
423
300
 
424
301
 
425
302
  Parameters
426
303
  ----------
427
- gpu : int
428
- Number of GPUs to use.
429
- gpu_type : str
430
- Type of Nvidia GPU to use.
304
+ var : str, optional, default None
305
+ Name of the artifact in which to store the caught exception.
306
+ If not specified, the exception is not stored.
307
+ print_exception : bool, default True
308
+ Determines whether or not the exception is printed to
309
+ stdout when caught.
431
310
  """
432
311
  ...
433
312
 
434
- def vllm(*, model: str, backend: str, debug: bool, kwargs: typing.Any) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
313
+ @typing.overload
314
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
315
+ ...
316
+
317
+ @typing.overload
318
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
319
+ ...
320
+
321
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
435
322
  """
436
- This decorator is used to run vllm APIs as Metaflow task sidecars.
437
-
438
- User code call
439
- --------------
440
- @vllm(
441
- model="...",
442
- ...
443
- )
323
+ Specifies that the step will success under all circumstances.
444
324
 
445
- Valid backend options
446
- ---------------------
447
- - 'local': Run as a separate process on the local task machine.
448
-
449
- Valid model options
450
- -------------------
451
- Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
452
-
453
- NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
454
- If you need multiple models, you must create multiple @vllm decorators.
325
+ The decorator will create an optional artifact, specified by `var`, which
326
+ contains the exception raised. You can use it to detect the presence
327
+ of errors, indicating that all happy-path artifacts produced by the step
328
+ are missing.
455
329
 
456
330
 
457
331
  Parameters
458
332
  ----------
459
- model: str
460
- HuggingFace model identifier to be served by vLLM.
461
- backend: str
462
- Determines where and how to run the vLLM process.
463
- debug: bool
464
- Whether to turn on verbose debugging logs.
465
- kwargs : Any
466
- Any other keyword arguments are passed directly to the vLLM engine.
467
- This allows for flexible configuration of vLLM server settings.
468
- For example, `tensor_parallel_size=2`.
333
+ var : str, optional, default None
334
+ Name of the artifact in which to store the caught exception.
335
+ If not specified, the exception is not stored.
336
+ print_exception : bool, default True
337
+ Determines whether or not the exception is printed to
338
+ stdout when caught.
469
339
  """
470
340
  ...
471
341
 
472
342
  @typing.overload
473
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
343
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
474
344
  """
475
- Specifies that the step will success under all circumstances.
345
+ Enables loading / saving of models within a step.
476
346
 
477
- The decorator will create an optional artifact, specified by `var`, which
478
- contains the exception raised. You can use it to detect the presence
479
- of errors, indicating that all happy-path artifacts produced by the step
480
- are missing.
481
347
 
482
348
 
483
349
  Parameters
484
350
  ----------
485
- var : str, optional, default None
486
- Name of the artifact in which to store the caught exception.
487
- If not specified, the exception is not stored.
488
- print_exception : bool, default True
489
- Determines whether or not the exception is printed to
490
- stdout when caught.
351
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
352
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
353
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
354
+ - `current.checkpoint`
355
+ - `current.model`
356
+ - `current.huggingface_hub`
357
+
358
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
359
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
360
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
361
+
362
+ temp_dir_root : str, default: None
363
+ The root directory under which `current.model.loaded` will store loaded models
491
364
  """
492
365
  ...
493
366
 
494
367
  @typing.overload
495
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
368
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
496
369
  ...
497
370
 
498
371
  @typing.overload
499
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
372
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
500
373
  ...
501
374
 
502
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
375
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
503
376
  """
504
- Specifies that the step will success under all circumstances.
377
+ Enables loading / saving of models within a step.
505
378
 
506
- The decorator will create an optional artifact, specified by `var`, which
507
- contains the exception raised. You can use it to detect the presence
508
- of errors, indicating that all happy-path artifacts produced by the step
509
- are missing.
510
379
 
511
380
 
512
381
  Parameters
513
382
  ----------
514
- var : str, optional, default None
515
- Name of the artifact in which to store the caught exception.
516
- If not specified, the exception is not stored.
517
- print_exception : bool, default True
518
- Determines whether or not the exception is printed to
519
- stdout when caught.
383
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
384
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
385
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
386
+ - `current.checkpoint`
387
+ - `current.model`
388
+ - `current.huggingface_hub`
389
+
390
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
391
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
392
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
393
+
394
+ temp_dir_root : str, default: None
395
+ The root directory under which `current.model.loaded` will store loaded models
520
396
  """
521
397
  ...
522
398
 
523
399
  @typing.overload
524
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
400
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
525
401
  """
526
- Internal decorator to support Fast bakery
402
+ Specifies the resources needed when executing this step.
403
+
404
+ Use `@resources` to specify the resource requirements
405
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
406
+
407
+ You can choose the compute layer on the command line by executing e.g.
408
+ ```
409
+ python myflow.py run --with batch
410
+ ```
411
+ or
412
+ ```
413
+ python myflow.py run --with kubernetes
414
+ ```
415
+ which executes the flow on the desired system using the
416
+ requirements specified in `@resources`.
417
+
418
+
419
+ Parameters
420
+ ----------
421
+ cpu : int, default 1
422
+ Number of CPUs required for this step.
423
+ gpu : int, optional, default None
424
+ Number of GPUs required for this step.
425
+ disk : int, optional, default None
426
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
427
+ memory : int, default 4096
428
+ Memory size (in MB) required for this step.
429
+ shared_memory : int, optional, default None
430
+ The value for the size (in MiB) of the /dev/shm volume for this step.
431
+ This parameter maps to the `--shm-size` option in Docker.
527
432
  """
528
433
  ...
529
434
 
530
435
  @typing.overload
531
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
436
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
532
437
  ...
533
438
 
534
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
439
+ @typing.overload
440
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
441
+ ...
442
+
443
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
535
444
  """
536
- Internal decorator to support Fast bakery
445
+ Specifies the resources needed when executing this step.
446
+
447
+ Use `@resources` to specify the resource requirements
448
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
449
+
450
+ You can choose the compute layer on the command line by executing e.g.
451
+ ```
452
+ python myflow.py run --with batch
453
+ ```
454
+ or
455
+ ```
456
+ python myflow.py run --with kubernetes
457
+ ```
458
+ which executes the flow on the desired system using the
459
+ requirements specified in `@resources`.
460
+
461
+
462
+ Parameters
463
+ ----------
464
+ cpu : int, default 1
465
+ Number of CPUs required for this step.
466
+ gpu : int, optional, default None
467
+ Number of GPUs required for this step.
468
+ disk : int, optional, default None
469
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
470
+ memory : int, default 4096
471
+ Memory size (in MB) required for this step.
472
+ shared_memory : int, optional, default None
473
+ The value for the size (in MiB) of the /dev/shm volume for this step.
474
+ This parameter maps to the `--shm-size` option in Docker.
537
475
  """
538
476
  ...
539
477
 
@@ -588,235 +526,91 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
588
526
  """
589
527
  ...
590
528
 
591
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
529
+ @typing.overload
530
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
592
531
  """
593
- Specifies that this step should execute on DGX cloud.
532
+ Enables checkpointing for a step.
533
+
594
534
 
595
535
 
596
536
  Parameters
597
537
  ----------
598
- gpu : int
599
- Number of GPUs to use.
600
- gpu_type : str
601
- Type of Nvidia GPU to use.
602
- queue_timeout : int
603
- Time to keep the job in NVCF's queue.
538
+ load_policy : str, default: "fresh"
539
+ The policy for loading the checkpoint. The following policies are supported:
540
+ - "eager": Loads the the latest available checkpoint within the namespace.
541
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
542
+ will be loaded at the start of the task.
543
+ - "none": Do not load any checkpoint
544
+ - "fresh": Loads the lastest checkpoint created within the running Task.
545
+ This mode helps loading checkpoints across various retry attempts of the same task.
546
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
547
+ created within the task will be loaded when the task is retries execution on failure.
548
+
549
+ temp_dir_root : str, default: None
550
+ The root directory under which `current.checkpoint.directory` will be created.
604
551
  """
605
552
  ...
606
553
 
607
- def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
608
- """
609
- Specifies that this step is used to deploy an instance of the app.
610
- Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
611
-
612
-
613
- Parameters
614
- ----------
615
- app_port : int
616
- Number of GPUs to use.
617
- app_name : str
618
- Name of the app to deploy.
619
- """
554
+ @typing.overload
555
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
620
556
  ...
621
557
 
622
558
  @typing.overload
623
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
559
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
560
+ ...
561
+
562
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
624
563
  """
625
- Enables loading / saving of models within a step.
564
+ Enables checkpointing for a step.
626
565
 
627
566
 
628
567
 
629
568
  Parameters
630
569
  ----------
631
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
632
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
633
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
634
- - `current.checkpoint`
635
- - `current.model`
636
- - `current.huggingface_hub`
637
-
638
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
639
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
640
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
570
+ load_policy : str, default: "fresh"
571
+ The policy for loading the checkpoint. The following policies are supported:
572
+ - "eager": Loads the the latest available checkpoint within the namespace.
573
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
574
+ will be loaded at the start of the task.
575
+ - "none": Do not load any checkpoint
576
+ - "fresh": Loads the lastest checkpoint created within the running Task.
577
+ This mode helps loading checkpoints across various retry attempts of the same task.
578
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
579
+ created within the task will be loaded when the task is retries execution on failure.
641
580
 
642
581
  temp_dir_root : str, default: None
643
- The root directory under which `current.model.loaded` will store loaded models
582
+ The root directory under which `current.checkpoint.directory` will be created.
644
583
  """
645
584
  ...
646
585
 
647
- @typing.overload
648
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
649
- ...
650
-
651
- @typing.overload
652
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
653
- ...
654
-
655
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
586
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
656
587
  """
657
- Enables loading / saving of models within a step.
658
-
588
+ Specifies that this step should execute on DGX cloud.
659
589
 
660
590
 
661
591
  Parameters
662
592
  ----------
663
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
664
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
665
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
666
- - `current.checkpoint`
667
- - `current.model`
668
- - `current.huggingface_hub`
669
-
670
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
671
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
672
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
673
-
674
- temp_dir_root : str, default: None
675
- The root directory under which `current.model.loaded` will store loaded models
593
+ gpu : int
594
+ Number of GPUs to use.
595
+ gpu_type : str
596
+ Type of Nvidia GPU to use.
676
597
  """
677
598
  ...
678
599
 
679
600
  @typing.overload
680
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
601
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
681
602
  """
682
- Decorator prototype for all step decorators. This function gets specialized
683
- and imported for all decorators types by _import_plugin_decorators().
603
+ Internal decorator to support Fast bakery
684
604
  """
685
605
  ...
686
606
 
687
607
  @typing.overload
688
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
608
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
689
609
  ...
690
610
 
691
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
611
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
692
612
  """
693
- Decorator prototype for all step decorators. This function gets specialized
694
- and imported for all decorators types by _import_plugin_decorators().
695
- """
696
- ...
697
-
698
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
699
- """
700
- Decorator that helps cache, version and store models/datasets from huggingface hub.
701
-
702
-
703
- Parameters
704
- ----------
705
- temp_dir_root : str, optional
706
- The root directory that will hold the temporary directory where objects will be downloaded.
707
-
708
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
709
- The list of repos (models/datasets) to load.
710
-
711
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
712
-
713
- - If repo (model/dataset) is not found in the datastore:
714
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
715
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
716
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
717
-
718
- - If repo is found in the datastore:
719
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
720
- """
721
- ...
722
-
723
- @typing.overload
724
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
725
- """
726
- Enables checkpointing for a step.
727
-
728
-
729
-
730
- Parameters
731
- ----------
732
- load_policy : str, default: "fresh"
733
- The policy for loading the checkpoint. The following policies are supported:
734
- - "eager": Loads the the latest available checkpoint within the namespace.
735
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
736
- will be loaded at the start of the task.
737
- - "none": Do not load any checkpoint
738
- - "fresh": Loads the lastest checkpoint created within the running Task.
739
- This mode helps loading checkpoints across various retry attempts of the same task.
740
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
741
- created within the task will be loaded when the task is retries execution on failure.
742
-
743
- temp_dir_root : str, default: None
744
- The root directory under which `current.checkpoint.directory` will be created.
745
- """
746
- ...
747
-
748
- @typing.overload
749
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
750
- ...
751
-
752
- @typing.overload
753
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
754
- ...
755
-
756
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
757
- """
758
- Enables checkpointing for a step.
759
-
760
-
761
-
762
- Parameters
763
- ----------
764
- load_policy : str, default: "fresh"
765
- The policy for loading the checkpoint. The following policies are supported:
766
- - "eager": Loads the the latest available checkpoint within the namespace.
767
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
768
- will be loaded at the start of the task.
769
- - "none": Do not load any checkpoint
770
- - "fresh": Loads the lastest checkpoint created within the running Task.
771
- This mode helps loading checkpoints across various retry attempts of the same task.
772
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
773
- created within the task will be loaded when the task is retries execution on failure.
774
-
775
- temp_dir_root : str, default: None
776
- The root directory under which `current.checkpoint.directory` will be created.
777
- """
778
- ...
779
-
780
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
781
- """
782
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
783
-
784
- User code call
785
- --------------
786
- @ollama(
787
- models=[...],
788
- ...
789
- )
790
-
791
- Valid backend options
792
- ---------------------
793
- - 'local': Run as a separate process on the local task machine.
794
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
795
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
796
-
797
- Valid model options
798
- -------------------
799
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
800
-
801
-
802
- Parameters
803
- ----------
804
- models: list[str]
805
- List of Ollama containers running models in sidecars.
806
- backend: str
807
- Determines where and how to run the Ollama process.
808
- force_pull: bool
809
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
810
- cache_update_policy: str
811
- Cache update policy: "auto", "force", or "never".
812
- force_cache_update: bool
813
- Simple override for "force" cache update policy.
814
- debug: bool
815
- Whether to turn on verbose debugging logs.
816
- circuit_breaker_config: dict
817
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
818
- timeout_config: dict
819
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
613
+ Internal decorator to support Fast bakery
820
614
  """
821
615
  ...
822
616
 
@@ -879,88 +673,6 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
879
673
  """
880
674
  ...
881
675
 
882
- @typing.overload
883
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
884
- """
885
- Specifies environment variables to be set prior to the execution of a step.
886
-
887
-
888
- Parameters
889
- ----------
890
- vars : Dict[str, str], default {}
891
- Dictionary of environment variables to set.
892
- """
893
- ...
894
-
895
- @typing.overload
896
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
897
- ...
898
-
899
- @typing.overload
900
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
901
- ...
902
-
903
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
904
- """
905
- Specifies environment variables to be set prior to the execution of a step.
906
-
907
-
908
- Parameters
909
- ----------
910
- vars : Dict[str, str], default {}
911
- Dictionary of environment variables to set.
912
- """
913
- ...
914
-
915
- @typing.overload
916
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
917
- """
918
- Creates a human-readable report, a Metaflow Card, after this step completes.
919
-
920
- Note that you may add multiple `@card` decorators in a step with different parameters.
921
-
922
-
923
- Parameters
924
- ----------
925
- type : str, default 'default'
926
- Card type.
927
- id : str, optional, default None
928
- If multiple cards are present, use this id to identify this card.
929
- options : Dict[str, Any], default {}
930
- Options passed to the card. The contents depend on the card type.
931
- timeout : int, default 45
932
- Interrupt reporting if it takes more than this many seconds.
933
- """
934
- ...
935
-
936
- @typing.overload
937
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
938
- ...
939
-
940
- @typing.overload
941
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
942
- ...
943
-
944
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
945
- """
946
- Creates a human-readable report, a Metaflow Card, after this step completes.
947
-
948
- Note that you may add multiple `@card` decorators in a step with different parameters.
949
-
950
-
951
- Parameters
952
- ----------
953
- type : str, default 'default'
954
- Card type.
955
- id : str, optional, default None
956
- If multiple cards are present, use this id to identify this card.
957
- options : Dict[str, Any], default {}
958
- Options passed to the card. The contents depend on the card type.
959
- timeout : int, default 45
960
- Interrupt reporting if it takes more than this many seconds.
961
- """
962
- ...
963
-
964
676
  @typing.overload
965
677
  def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
966
678
  """
@@ -1016,240 +728,291 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1016
728
  """
1017
729
  ...
1018
730
 
1019
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
731
+ def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1020
732
  """
1021
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1022
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1023
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1024
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1025
- starts only after all sensors finish.
733
+ Specifies that this step is used to deploy an instance of the app.
734
+ Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
1026
735
 
1027
736
 
1028
737
  Parameters
1029
738
  ----------
1030
- timeout : int
1031
- Time, in seconds before the task times out and fails. (Default: 3600)
1032
- poke_interval : int
1033
- Time in seconds that the job should wait in between each try. (Default: 60)
1034
- mode : str
1035
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1036
- exponential_backoff : bool
1037
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1038
- pool : str
1039
- the slot pool this task should run in,
1040
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1041
- soft_fail : bool
1042
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1043
- name : str
1044
- Name of the sensor on Airflow
1045
- description : str
1046
- Description of sensor in the Airflow UI
1047
- bucket_key : Union[str, List[str]]
1048
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1049
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1050
- bucket_name : str
1051
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1052
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1053
- wildcard_match : bool
1054
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1055
- aws_conn_id : str
1056
- a reference to the s3 connection on Airflow. (Default: None)
1057
- verify : bool
1058
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
739
+ app_port : int
740
+ Number of GPUs to use.
741
+ app_name : str
742
+ Name of the app to deploy.
1059
743
  """
1060
744
  ...
1061
745
 
1062
746
  @typing.overload
1063
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
747
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1064
748
  """
1065
- Specifies the flow(s) that this flow depends on.
1066
-
1067
- ```
1068
- @trigger_on_finish(flow='FooFlow')
1069
- ```
1070
- or
1071
- ```
1072
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1073
- ```
1074
- This decorator respects the @project decorator and triggers the flow
1075
- when upstream runs within the same namespace complete successfully
1076
-
1077
- Additionally, you can specify project aware upstream flow dependencies
1078
- by specifying the fully qualified project_flow_name.
1079
- ```
1080
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1081
- ```
1082
- or
1083
- ```
1084
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1085
- ```
1086
-
1087
- You can also specify just the project or project branch (other values will be
1088
- inferred from the current project or project branch):
1089
- ```
1090
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1091
- ```
1092
-
1093
- Note that `branch` is typically one of:
1094
- - `prod`
1095
- - `user.bob`
1096
- - `test.my_experiment`
1097
- - `prod.staging`
749
+ Specifies secrets to be retrieved and injected as environment variables prior to
750
+ the execution of a step.
1098
751
 
1099
752
 
1100
753
  Parameters
1101
754
  ----------
1102
- flow : Union[str, Dict[str, str]], optional, default None
1103
- Upstream flow dependency for this flow.
1104
- flows : List[Union[str, Dict[str, str]]], default []
1105
- Upstream flow dependencies for this flow.
1106
- options : Dict[str, Any], default {}
1107
- Backend-specific configuration for tuning eventing behavior.
755
+ sources : List[Union[str, Dict[str, Any]]], default: []
756
+ List of secret specs, defining how the secrets are to be retrieved
1108
757
  """
1109
758
  ...
1110
759
 
1111
760
  @typing.overload
1112
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
761
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1113
762
  ...
1114
763
 
1115
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
764
+ @typing.overload
765
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
766
+ ...
767
+
768
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1116
769
  """
1117
- Specifies the flow(s) that this flow depends on.
770
+ Specifies secrets to be retrieved and injected as environment variables prior to
771
+ the execution of a step.
1118
772
 
1119
- ```
1120
- @trigger_on_finish(flow='FooFlow')
1121
- ```
1122
- or
1123
- ```
1124
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1125
- ```
1126
- This decorator respects the @project decorator and triggers the flow
1127
- when upstream runs within the same namespace complete successfully
1128
773
 
1129
- Additionally, you can specify project aware upstream flow dependencies
1130
- by specifying the fully qualified project_flow_name.
1131
- ```
1132
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1133
- ```
1134
- or
1135
- ```
1136
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1137
- ```
774
+ Parameters
775
+ ----------
776
+ sources : List[Union[str, Dict[str, Any]]], default: []
777
+ List of secret specs, defining how the secrets are to be retrieved
778
+ """
779
+ ...
780
+
781
+ def vllm(*, model: str, backend: str, debug: bool, kwargs: typing.Any) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
782
+ """
783
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
1138
784
 
1139
- You can also specify just the project or project branch (other values will be
1140
- inferred from the current project or project branch):
1141
- ```
1142
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1143
- ```
785
+ User code call
786
+ --------------
787
+ @vllm(
788
+ model="...",
789
+ ...
790
+ )
1144
791
 
1145
- Note that `branch` is typically one of:
1146
- - `prod`
1147
- - `user.bob`
1148
- - `test.my_experiment`
1149
- - `prod.staging`
792
+ Valid backend options
793
+ ---------------------
794
+ - 'local': Run as a separate process on the local task machine.
795
+
796
+ Valid model options
797
+ -------------------
798
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
799
+
800
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
801
+ If you need multiple models, you must create multiple @vllm decorators.
1150
802
 
1151
803
 
1152
804
  Parameters
1153
805
  ----------
1154
- flow : Union[str, Dict[str, str]], optional, default None
1155
- Upstream flow dependency for this flow.
1156
- flows : List[Union[str, Dict[str, str]]], default []
1157
- Upstream flow dependencies for this flow.
1158
- options : Dict[str, Any], default {}
1159
- Backend-specific configuration for tuning eventing behavior.
806
+ model: str
807
+ HuggingFace model identifier to be served by vLLM.
808
+ backend: str
809
+ Determines where and how to run the vLLM process.
810
+ debug: bool
811
+ Whether to turn on verbose debugging logs.
812
+ kwargs : Any
813
+ Any other keyword arguments are passed directly to the vLLM engine.
814
+ This allows for flexible configuration of vLLM server settings.
815
+ For example, `tensor_parallel_size=2`.
1160
816
  """
1161
817
  ...
1162
818
 
1163
- @typing.overload
1164
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
819
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1165
820
  """
1166
- Specifies the event(s) that this flow depends on.
821
+ Specifies that this step should execute on Kubernetes.
1167
822
 
1168
- ```
1169
- @trigger(event='foo')
1170
- ```
1171
- or
1172
- ```
1173
- @trigger(events=['foo', 'bar'])
1174
- ```
1175
823
 
1176
- Additionally, you can specify the parameter mappings
1177
- to map event payload to Metaflow parameters for the flow.
1178
- ```
1179
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1180
- ```
1181
- or
1182
- ```
1183
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1184
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1185
- ```
824
+ Parameters
825
+ ----------
826
+ cpu : int, default 1
827
+ Number of CPUs required for this step. If `@resources` is
828
+ also present, the maximum value from all decorators is used.
829
+ memory : int, default 4096
830
+ Memory size (in MB) required for this step. If
831
+ `@resources` is also present, the maximum value from all decorators is
832
+ used.
833
+ disk : int, default 10240
834
+ Disk size (in MB) required for this step. If
835
+ `@resources` is also present, the maximum value from all decorators is
836
+ used.
837
+ image : str, optional, default None
838
+ Docker image to use when launching on Kubernetes. If not specified, and
839
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
840
+ not, a default Docker image mapping to the current version of Python is used.
841
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
842
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
843
+ image_pull_secrets: List[str], default []
844
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
845
+ Kubernetes image pull secrets to use when pulling container images
846
+ in Kubernetes.
847
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
848
+ Kubernetes service account to use when launching pod in Kubernetes.
849
+ secrets : List[str], optional, default None
850
+ Kubernetes secrets to use when launching pod in Kubernetes. These
851
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
852
+ in Metaflow configuration.
853
+ node_selector: Union[Dict[str,str], str], optional, default None
854
+ Kubernetes node selector(s) to apply to the pod running the task.
855
+ Can be passed in as a comma separated string of values e.g.
856
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
857
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
858
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
859
+ Kubernetes namespace to use when launching pod in Kubernetes.
860
+ gpu : int, optional, default None
861
+ Number of GPUs required for this step. A value of zero implies that
862
+ the scheduled node should not have GPUs.
863
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
864
+ The vendor of the GPUs to be used for this step.
865
+ tolerations : List[str], default []
866
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
867
+ Kubernetes tolerations to use when launching pod in Kubernetes.
868
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
869
+ Kubernetes labels to use when launching pod in Kubernetes.
870
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
871
+ Kubernetes annotations to use when launching pod in Kubernetes.
872
+ use_tmpfs : bool, default False
873
+ This enables an explicit tmpfs mount for this step.
874
+ tmpfs_tempdir : bool, default True
875
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
876
+ tmpfs_size : int, optional, default: None
877
+ The value for the size (in MiB) of the tmpfs mount for this step.
878
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
879
+ memory allocated for this step.
880
+ tmpfs_path : str, optional, default /metaflow_temp
881
+ Path to tmpfs mount for this step.
882
+ persistent_volume_claims : Dict[str, str], optional, default None
883
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
884
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
885
+ shared_memory: int, optional
886
+ Shared memory size (in MiB) required for this step
887
+ port: int, optional
888
+ Port number to specify in the Kubernetes job object
889
+ compute_pool : str, optional, default None
890
+ Compute pool to be used for for this step.
891
+ If not specified, any accessible compute pool within the perimeter is used.
892
+ hostname_resolution_timeout: int, default 10 * 60
893
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
894
+ Only applicable when @parallel is used.
895
+ qos: str, default: Burstable
896
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
1186
897
 
1187
- 'parameters' can also be a list of strings and tuples like so:
1188
- ```
1189
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1190
- ```
1191
- This is equivalent to:
1192
- ```
1193
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1194
- ```
898
+ security_context: Dict[str, Any], optional, default None
899
+ Container security context. Applies to the task container. Allows the following keys:
900
+ - privileged: bool, optional, default None
901
+ - allow_privilege_escalation: bool, optional, default None
902
+ - run_as_user: int, optional, default None
903
+ - run_as_group: int, optional, default None
904
+ - run_as_non_root: bool, optional, default None
905
+ """
906
+ ...
907
+
908
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
909
+ """
910
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
911
+
912
+ User code call
913
+ --------------
914
+ @ollama(
915
+ models=[...],
916
+ ...
917
+ )
918
+
919
+ Valid backend options
920
+ ---------------------
921
+ - 'local': Run as a separate process on the local task machine.
922
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
923
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
924
+
925
+ Valid model options
926
+ -------------------
927
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
928
+
929
+
930
+ Parameters
931
+ ----------
932
+ models: list[str]
933
+ List of Ollama containers running models in sidecars.
934
+ backend: str
935
+ Determines where and how to run the Ollama process.
936
+ force_pull: bool
937
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
938
+ cache_update_policy: str
939
+ Cache update policy: "auto", "force", or "never".
940
+ force_cache_update: bool
941
+ Simple override for "force" cache update policy.
942
+ debug: bool
943
+ Whether to turn on verbose debugging logs.
944
+ circuit_breaker_config: dict
945
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
946
+ timeout_config: dict
947
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
948
+ """
949
+ ...
950
+
951
+ @typing.overload
952
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
953
+ """
954
+ Creates a human-readable report, a Metaflow Card, after this step completes.
955
+
956
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1195
957
 
1196
958
 
1197
959
  Parameters
1198
960
  ----------
1199
- event : Union[str, Dict[str, Any]], optional, default None
1200
- Event dependency for this flow.
1201
- events : List[Union[str, Dict[str, Any]]], default []
1202
- Events dependency for this flow.
961
+ type : str, default 'default'
962
+ Card type.
963
+ id : str, optional, default None
964
+ If multiple cards are present, use this id to identify this card.
1203
965
  options : Dict[str, Any], default {}
1204
- Backend-specific configuration for tuning eventing behavior.
966
+ Options passed to the card. The contents depend on the card type.
967
+ timeout : int, default 45
968
+ Interrupt reporting if it takes more than this many seconds.
1205
969
  """
1206
970
  ...
1207
971
 
1208
972
  @typing.overload
1209
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
973
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1210
974
  ...
1211
975
 
1212
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
976
+ @typing.overload
977
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
978
+ ...
979
+
980
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1213
981
  """
1214
- Specifies the event(s) that this flow depends on.
1215
-
1216
- ```
1217
- @trigger(event='foo')
1218
- ```
1219
- or
1220
- ```
1221
- @trigger(events=['foo', 'bar'])
1222
- ```
1223
-
1224
- Additionally, you can specify the parameter mappings
1225
- to map event payload to Metaflow parameters for the flow.
1226
- ```
1227
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1228
- ```
1229
- or
1230
- ```
1231
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1232
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1233
- ```
982
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1234
983
 
1235
- 'parameters' can also be a list of strings and tuples like so:
1236
- ```
1237
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1238
- ```
1239
- This is equivalent to:
1240
- ```
1241
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1242
- ```
984
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1243
985
 
1244
986
 
1245
987
  Parameters
1246
988
  ----------
1247
- event : Union[str, Dict[str, Any]], optional, default None
1248
- Event dependency for this flow.
1249
- events : List[Union[str, Dict[str, Any]]], default []
1250
- Events dependency for this flow.
989
+ type : str, default 'default'
990
+ Card type.
991
+ id : str, optional, default None
992
+ If multiple cards are present, use this id to identify this card.
1251
993
  options : Dict[str, Any], default {}
1252
- Backend-specific configuration for tuning eventing behavior.
994
+ Options passed to the card. The contents depend on the card type.
995
+ timeout : int, default 45
996
+ Interrupt reporting if it takes more than this many seconds.
997
+ """
998
+ ...
999
+
1000
+ @typing.overload
1001
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1002
+ """
1003
+ Decorator prototype for all step decorators. This function gets specialized
1004
+ and imported for all decorators types by _import_plugin_decorators().
1005
+ """
1006
+ ...
1007
+
1008
+ @typing.overload
1009
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1010
+ ...
1011
+
1012
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1013
+ """
1014
+ Decorator prototype for all step decorators. This function gets specialized
1015
+ and imported for all decorators types by _import_plugin_decorators().
1253
1016
  """
1254
1017
  ...
1255
1018
 
@@ -1409,55 +1172,250 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1409
1172
  self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1410
1173
  self.next(self.end)
1411
1174
 
1412
- ```
1175
+ ```
1176
+
1177
+ - Accessing objects stored in external datastores after task execution.
1178
+
1179
+ ```python
1180
+ run = Run("CheckpointsTestsFlow/8992")
1181
+ with artifact_store_from(run=run, config={
1182
+ "client_params": {
1183
+ "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1184
+ "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1185
+ },
1186
+ }):
1187
+ with Checkpoint() as cp:
1188
+ latest = cp.list(
1189
+ task=run["start"].task
1190
+ )[0]
1191
+ print(latest)
1192
+ cp.load(
1193
+ latest,
1194
+ "test-checkpoints"
1195
+ )
1196
+
1197
+ task = Task("TorchTuneFlow/8484/train/53673")
1198
+ with artifact_store_from(run=run, config={
1199
+ "client_params": {
1200
+ "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1201
+ "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1202
+ },
1203
+ }):
1204
+ load_model(
1205
+ task.data.model_ref,
1206
+ "test-models"
1207
+ )
1208
+ ```
1209
+ Parameters:
1210
+ ----------
1211
+
1212
+ type: str
1213
+ The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1214
+
1215
+ config: dict or Callable
1216
+ Dictionary of configuration options for the datastore. The following keys are required:
1217
+ - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1218
+ - example: 's3://bucket-name/path/to/root'
1219
+ - example: 'gs://bucket-name/path/to/root'
1220
+ - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1221
+ - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1222
+ - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1223
+ - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1224
+ """
1225
+ ...
1226
+
1227
+ @typing.overload
1228
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1229
+ """
1230
+ Specifies the times when the flow should be run when running on a
1231
+ production scheduler.
1232
+
1233
+
1234
+ Parameters
1235
+ ----------
1236
+ hourly : bool, default False
1237
+ Run the workflow hourly.
1238
+ daily : bool, default True
1239
+ Run the workflow daily.
1240
+ weekly : bool, default False
1241
+ Run the workflow weekly.
1242
+ cron : str, optional, default None
1243
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1244
+ specified by this expression.
1245
+ timezone : str, optional, default None
1246
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1247
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1248
+ """
1249
+ ...
1250
+
1251
+ @typing.overload
1252
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1253
+ ...
1254
+
1255
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1256
+ """
1257
+ Specifies the times when the flow should be run when running on a
1258
+ production scheduler.
1259
+
1260
+
1261
+ Parameters
1262
+ ----------
1263
+ hourly : bool, default False
1264
+ Run the workflow hourly.
1265
+ daily : bool, default True
1266
+ Run the workflow daily.
1267
+ weekly : bool, default False
1268
+ Run the workflow weekly.
1269
+ cron : str, optional, default None
1270
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1271
+ specified by this expression.
1272
+ timezone : str, optional, default None
1273
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1274
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1275
+ """
1276
+ ...
1277
+
1278
+ @typing.overload
1279
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1280
+ """
1281
+ Specifies the flow(s) that this flow depends on.
1282
+
1283
+ ```
1284
+ @trigger_on_finish(flow='FooFlow')
1285
+ ```
1286
+ or
1287
+ ```
1288
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1289
+ ```
1290
+ This decorator respects the @project decorator and triggers the flow
1291
+ when upstream runs within the same namespace complete successfully
1292
+
1293
+ Additionally, you can specify project aware upstream flow dependencies
1294
+ by specifying the fully qualified project_flow_name.
1295
+ ```
1296
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1297
+ ```
1298
+ or
1299
+ ```
1300
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1301
+ ```
1302
+
1303
+ You can also specify just the project or project branch (other values will be
1304
+ inferred from the current project or project branch):
1305
+ ```
1306
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1307
+ ```
1308
+
1309
+ Note that `branch` is typically one of:
1310
+ - `prod`
1311
+ - `user.bob`
1312
+ - `test.my_experiment`
1313
+ - `prod.staging`
1314
+
1315
+
1316
+ Parameters
1317
+ ----------
1318
+ flow : Union[str, Dict[str, str]], optional, default None
1319
+ Upstream flow dependency for this flow.
1320
+ flows : List[Union[str, Dict[str, str]]], default []
1321
+ Upstream flow dependencies for this flow.
1322
+ options : Dict[str, Any], default {}
1323
+ Backend-specific configuration for tuning eventing behavior.
1324
+ """
1325
+ ...
1326
+
1327
+ @typing.overload
1328
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1329
+ ...
1330
+
1331
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1332
+ """
1333
+ Specifies the flow(s) that this flow depends on.
1334
+
1335
+ ```
1336
+ @trigger_on_finish(flow='FooFlow')
1337
+ ```
1338
+ or
1339
+ ```
1340
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1341
+ ```
1342
+ This decorator respects the @project decorator and triggers the flow
1343
+ when upstream runs within the same namespace complete successfully
1344
+
1345
+ Additionally, you can specify project aware upstream flow dependencies
1346
+ by specifying the fully qualified project_flow_name.
1347
+ ```
1348
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1349
+ ```
1350
+ or
1351
+ ```
1352
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1353
+ ```
1413
1354
 
1414
- - Accessing objects stored in external datastores after task execution.
1355
+ You can also specify just the project or project branch (other values will be
1356
+ inferred from the current project or project branch):
1357
+ ```
1358
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1359
+ ```
1415
1360
 
1416
- ```python
1417
- run = Run("CheckpointsTestsFlow/8992")
1418
- with artifact_store_from(run=run, config={
1419
- "client_params": {
1420
- "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1421
- "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1422
- },
1423
- }):
1424
- with Checkpoint() as cp:
1425
- latest = cp.list(
1426
- task=run["start"].task
1427
- )[0]
1428
- print(latest)
1429
- cp.load(
1430
- latest,
1431
- "test-checkpoints"
1432
- )
1361
+ Note that `branch` is typically one of:
1362
+ - `prod`
1363
+ - `user.bob`
1364
+ - `test.my_experiment`
1365
+ - `prod.staging`
1433
1366
 
1434
- task = Task("TorchTuneFlow/8484/train/53673")
1435
- with artifact_store_from(run=run, config={
1436
- "client_params": {
1437
- "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1438
- "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1439
- },
1440
- }):
1441
- load_model(
1442
- task.data.model_ref,
1443
- "test-models"
1444
- )
1445
- ```
1446
- Parameters:
1367
+
1368
+ Parameters
1447
1369
  ----------
1370
+ flow : Union[str, Dict[str, str]], optional, default None
1371
+ Upstream flow dependency for this flow.
1372
+ flows : List[Union[str, Dict[str, str]]], default []
1373
+ Upstream flow dependencies for this flow.
1374
+ options : Dict[str, Any], default {}
1375
+ Backend-specific configuration for tuning eventing behavior.
1376
+ """
1377
+ ...
1378
+
1379
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1380
+ """
1381
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1382
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1383
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1384
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1385
+ starts only after all sensors finish.
1448
1386
 
1449
- type: str
1450
- The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1451
1387
 
1452
- config: dict or Callable
1453
- Dictionary of configuration options for the datastore. The following keys are required:
1454
- - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1455
- - example: 's3://bucket-name/path/to/root'
1456
- - example: 'gs://bucket-name/path/to/root'
1457
- - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1458
- - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1459
- - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1460
- - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1388
+ Parameters
1389
+ ----------
1390
+ timeout : int
1391
+ Time, in seconds before the task times out and fails. (Default: 3600)
1392
+ poke_interval : int
1393
+ Time in seconds that the job should wait in between each try. (Default: 60)
1394
+ mode : str
1395
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1396
+ exponential_backoff : bool
1397
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1398
+ pool : str
1399
+ the slot pool this task should run in,
1400
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1401
+ soft_fail : bool
1402
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1403
+ name : str
1404
+ Name of the sensor on Airflow
1405
+ description : str
1406
+ Description of sensor in the Airflow UI
1407
+ bucket_key : Union[str, List[str]]
1408
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1409
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1410
+ bucket_name : str
1411
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1412
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1413
+ wildcard_match : bool
1414
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1415
+ aws_conn_id : str
1416
+ a reference to the s3 connection on Airflow. (Default: None)
1417
+ verify : bool
1418
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1461
1419
  """
1462
1420
  ...
1463
1421
 
@@ -1496,57 +1454,6 @@ def project(*, name: str, branch: typing.Optional[str] = None, production: bool
1496
1454
  """
1497
1455
  ...
1498
1456
 
1499
- @typing.overload
1500
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1501
- """
1502
- Specifies the times when the flow should be run when running on a
1503
- production scheduler.
1504
-
1505
-
1506
- Parameters
1507
- ----------
1508
- hourly : bool, default False
1509
- Run the workflow hourly.
1510
- daily : bool, default True
1511
- Run the workflow daily.
1512
- weekly : bool, default False
1513
- Run the workflow weekly.
1514
- cron : str, optional, default None
1515
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1516
- specified by this expression.
1517
- timezone : str, optional, default None
1518
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1519
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1520
- """
1521
- ...
1522
-
1523
- @typing.overload
1524
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1525
- ...
1526
-
1527
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1528
- """
1529
- Specifies the times when the flow should be run when running on a
1530
- production scheduler.
1531
-
1532
-
1533
- Parameters
1534
- ----------
1535
- hourly : bool, default False
1536
- Run the workflow hourly.
1537
- daily : bool, default True
1538
- Run the workflow daily.
1539
- weekly : bool, default False
1540
- Run the workflow weekly.
1541
- cron : str, optional, default None
1542
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1543
- specified by this expression.
1544
- timezone : str, optional, default None
1545
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1546
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1547
- """
1548
- ...
1549
-
1550
1457
  @typing.overload
1551
1458
  def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1552
1459
  """
@@ -1588,5 +1495,98 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
1588
1495
  """
1589
1496
  ...
1590
1497
 
1498
+ @typing.overload
1499
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1500
+ """
1501
+ Specifies the event(s) that this flow depends on.
1502
+
1503
+ ```
1504
+ @trigger(event='foo')
1505
+ ```
1506
+ or
1507
+ ```
1508
+ @trigger(events=['foo', 'bar'])
1509
+ ```
1510
+
1511
+ Additionally, you can specify the parameter mappings
1512
+ to map event payload to Metaflow parameters for the flow.
1513
+ ```
1514
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1515
+ ```
1516
+ or
1517
+ ```
1518
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1519
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1520
+ ```
1521
+
1522
+ 'parameters' can also be a list of strings and tuples like so:
1523
+ ```
1524
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1525
+ ```
1526
+ This is equivalent to:
1527
+ ```
1528
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1529
+ ```
1530
+
1531
+
1532
+ Parameters
1533
+ ----------
1534
+ event : Union[str, Dict[str, Any]], optional, default None
1535
+ Event dependency for this flow.
1536
+ events : List[Union[str, Dict[str, Any]]], default []
1537
+ Events dependency for this flow.
1538
+ options : Dict[str, Any], default {}
1539
+ Backend-specific configuration for tuning eventing behavior.
1540
+ """
1541
+ ...
1542
+
1543
+ @typing.overload
1544
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1545
+ ...
1546
+
1547
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1548
+ """
1549
+ Specifies the event(s) that this flow depends on.
1550
+
1551
+ ```
1552
+ @trigger(event='foo')
1553
+ ```
1554
+ or
1555
+ ```
1556
+ @trigger(events=['foo', 'bar'])
1557
+ ```
1558
+
1559
+ Additionally, you can specify the parameter mappings
1560
+ to map event payload to Metaflow parameters for the flow.
1561
+ ```
1562
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1563
+ ```
1564
+ or
1565
+ ```
1566
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1567
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1568
+ ```
1569
+
1570
+ 'parameters' can also be a list of strings and tuples like so:
1571
+ ```
1572
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1573
+ ```
1574
+ This is equivalent to:
1575
+ ```
1576
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1577
+ ```
1578
+
1579
+
1580
+ Parameters
1581
+ ----------
1582
+ event : Union[str, Dict[str, Any]], optional, default None
1583
+ Event dependency for this flow.
1584
+ events : List[Union[str, Dict[str, Any]]], default []
1585
+ Events dependency for this flow.
1586
+ options : Dict[str, Any], default {}
1587
+ Backend-specific configuration for tuning eventing behavior.
1588
+ """
1589
+ ...
1590
+
1591
1591
  pkg_name: str
1592
1592