ob-metaflow-stubs 6.0.3.147__py2.py3-none-any.whl → 6.0.3.148__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (198) hide show
  1. metaflow-stubs/__init__.pyi +613 -614
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +5 -5
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/info_file.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +4 -2
  20. metaflow-stubs/metaflow_current.pyi +36 -36
  21. metaflow-stubs/mf_extensions/__init__.pyi +2 -2
  22. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +2 -2
  23. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +2 -2
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +2 -2
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +2 -2
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +2 -2
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +2 -2
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +2 -2
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +2 -2
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +2 -2
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +3 -3
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +2 -2
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +4 -4
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +2 -2
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +3 -3
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +4 -4
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +2 -2
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +2 -2
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +2 -2
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +2 -2
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +2 -2
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +2 -2
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +2 -2
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +2 -2
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +2 -2
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +2 -2
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +2 -2
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +2 -2
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +3 -3
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +2 -2
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +3 -3
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +2 -2
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +2 -2
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +2 -2
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +2 -2
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +2 -2
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +2 -2
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +2 -2
  61. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +2 -2
  62. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +2 -2
  63. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +2 -2
  64. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +2 -2
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +2 -2
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +2 -2
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +2 -2
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +2 -2
  69. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +2 -2
  70. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +2 -2
  71. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +2 -2
  72. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +2 -2
  73. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +2 -2
  74. metaflow-stubs/multicore_utils.pyi +2 -2
  75. metaflow-stubs/parameters.pyi +3 -3
  76. metaflow-stubs/plugins/__init__.pyi +16 -12
  77. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  78. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  79. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  80. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  81. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  82. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  83. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  84. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  85. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  86. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  87. metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
  88. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
  89. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  90. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  91. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  92. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  93. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  94. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  95. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  96. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  97. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  98. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  99. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +5 -5
  100. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  101. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  102. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  103. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  104. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  105. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +2 -2
  106. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  107. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  108. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  109. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  110. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +5 -5
  111. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  112. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  113. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  114. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  115. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  116. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  117. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  118. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  119. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  120. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  121. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  122. metaflow-stubs/plugins/cards/card_modules/components.pyi +2 -2
  123. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  124. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  125. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  126. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  127. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  128. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  129. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  130. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  131. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  132. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  133. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  134. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  135. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  136. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  137. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  138. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  139. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  140. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  141. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  142. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  143. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +5 -5
  144. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  145. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  146. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  147. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  148. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  149. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  150. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  151. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  152. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +4 -2
  153. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  154. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  155. metaflow-stubs/plugins/ollama/__init__.pyi +2 -2
  156. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  157. metaflow-stubs/plugins/perimeters.pyi +2 -2
  158. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  159. metaflow-stubs/plugins/pypi/__init__.pyi +3 -2
  160. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  161. metaflow-stubs/plugins/pypi/conda_environment.pyi +6 -6
  162. metaflow-stubs/plugins/pypi/parsers.pyi +113 -0
  163. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  164. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  165. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  166. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  167. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  168. metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
  169. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +4 -4
  170. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  171. metaflow-stubs/plugins/snowflake/__init__.pyi +2 -2
  172. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  173. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  174. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  175. metaflow-stubs/profilers/__init__.pyi +2 -2
  176. metaflow-stubs/pylint_wrapper.pyi +2 -2
  177. metaflow-stubs/runner/__init__.pyi +2 -2
  178. metaflow-stubs/runner/deployer.pyi +5 -5
  179. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  180. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  181. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  182. metaflow-stubs/runner/nbrun.pyi +2 -2
  183. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  184. metaflow-stubs/runner/utils.pyi +3 -3
  185. metaflow-stubs/system/__init__.pyi +2 -2
  186. metaflow-stubs/system/system_logger.pyi +2 -2
  187. metaflow-stubs/system/system_monitor.pyi +2 -2
  188. metaflow-stubs/tagging_util.pyi +2 -2
  189. metaflow-stubs/tuple_util.pyi +2 -2
  190. metaflow-stubs/user_configs/__init__.pyi +2 -2
  191. metaflow-stubs/user_configs/config_decorators.pyi +7 -7
  192. metaflow-stubs/user_configs/config_options.pyi +3 -3
  193. metaflow-stubs/user_configs/config_parameters.pyi +6 -6
  194. {ob_metaflow_stubs-6.0.3.147.dist-info → ob_metaflow_stubs-6.0.3.148.dist-info}/METADATA +1 -1
  195. ob_metaflow_stubs-6.0.3.148.dist-info/RECORD +198 -0
  196. ob_metaflow_stubs-6.0.3.147.dist-info/RECORD +0 -197
  197. {ob_metaflow_stubs-6.0.3.147.dist-info → ob_metaflow_stubs-6.0.3.148.dist-info}/WHEEL +0 -0
  198. {ob_metaflow_stubs-6.0.3.147.dist-info → ob_metaflow_stubs-6.0.3.148.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.15.0.1+obcheckpoint(0.1.9);ob(v1) #
4
- # Generated on 2025-03-03T17:55:45.268987 #
3
+ # MF version: 2.15.3.1+obcheckpoint(0.1.9);ob(v1) #
4
+ # Generated on 2025-03-03T22:55:42.139654 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import datetime
12
11
  import typing
12
+ import datetime
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -35,14 +35,17 @@ from .user_configs.config_parameters import ConfigValue as ConfigValue
35
35
  from .user_configs.config_parameters import config_expr as config_expr
36
36
  from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
- from . import tuple_util as tuple_util
39
38
  from . import cards as cards
39
+ from . import tuple_util as tuple_util
40
40
  from . import events as events
41
41
  from . import runner as runner
42
42
  from . import plugins as plugins
43
43
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
44
44
  from . import includefile as includefile
45
45
  from .includefile import IncludeFile as IncludeFile
46
+ from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
47
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
48
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
46
49
  from . import client as client
47
50
  from .client.core import namespace as namespace
48
51
  from .client.core import get_namespace as get_namespace
@@ -146,225 +149,164 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
146
149
  """
147
150
  ...
148
151
 
149
- @typing.overload
150
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
152
+ def nim(*, models: "list[NIM]", backend: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
151
153
  """
152
- Specifies a timeout for your step.
154
+ This decorator is used to run NIM containers in Metaflow tasks as sidecars.
153
155
 
154
- This decorator is useful if this step may hang indefinitely.
156
+ User code call
157
+ -----------
158
+ @nim(
159
+ models=['meta/llama3-8b-instruct', 'meta/llama3-70b-instruct'],
160
+ backend='managed'
161
+ )
155
162
 
156
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
157
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
158
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
163
+ Valid backend options
164
+ ---------------------
165
+ - 'managed': Outerbounds selects a compute provider based on the model.
159
166
 
160
- Note that all the values specified in parameters are added together so if you specify
161
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
167
+ Valid model options
168
+ ----------------
169
+ - 'meta/llama3-8b-instruct': 8B parameter model
170
+ - 'meta/llama3-70b-instruct': 70B parameter model
171
+ - any model here: https://nvcf.ngc.nvidia.com/functions?filter=nvidia-functions
162
172
 
163
173
 
164
174
  Parameters
165
175
  ----------
166
- seconds : int, default 0
167
- Number of seconds to wait prior to timing out.
168
- minutes : int, default 0
169
- Number of minutes to wait prior to timing out.
170
- hours : int, default 0
171
- Number of hours to wait prior to timing out.
176
+ models: list[NIM]
177
+ List of NIM containers running models in sidecars.
178
+ backend: str
179
+ Compute provider to run the NIM container.
180
+ queue_timeout : int
181
+ Time to keep the job in NVCF's queue.
172
182
  """
173
183
  ...
174
184
 
175
- @typing.overload
176
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
177
- ...
178
-
179
- @typing.overload
180
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
181
- ...
182
-
183
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
185
+ def ollama(*, models: "list[Ollama]", backend: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
184
186
  """
185
- Specifies a timeout for your step.
187
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
186
188
 
187
- This decorator is useful if this step may hang indefinitely.
189
+ User code call
190
+ -----------
191
+ @ollama(
192
+ models=['meta/llama3-8b-instruct', 'meta/llama3-70b-instruct'],
193
+ backend='local'
194
+ )
188
195
 
189
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
190
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
191
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
196
+ Valid backend options
197
+ ---------------------
198
+ - 'local': Run as a separate process on the local task machine.
199
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
200
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
192
201
 
193
- Note that all the values specified in parameters are added together so if you specify
194
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
202
+ Valid model options
203
+ ----------------
204
+ - 'llama3.2'
205
+ - 'llama3.3'
206
+ - any model here https://ollama.com/search
195
207
 
196
208
 
197
209
  Parameters
198
210
  ----------
199
- seconds : int, default 0
200
- Number of seconds to wait prior to timing out.
201
- minutes : int, default 0
202
- Number of minutes to wait prior to timing out.
203
- hours : int, default 0
204
- Number of hours to wait prior to timing out.
211
+ models: list[Ollama]
212
+ List of Ollama containers running models in sidecars.
213
+ backend: str
214
+ Determines where and how to run the Ollama process.
205
215
  """
206
216
  ...
207
217
 
208
218
  @typing.overload
209
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
219
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
210
220
  """
211
- Specifies the Conda environment for the step.
212
-
213
- Information in this decorator will augment any
214
- attributes set in the `@conda_base` flow-level decorator. Hence,
215
- you can use `@conda_base` to set packages required by all
216
- steps and use `@conda` to specify step-specific overrides.
217
-
218
-
219
- Parameters
220
- ----------
221
- packages : Dict[str, str], default {}
222
- Packages to use for this step. The key is the name of the package
223
- and the value is the version to use.
224
- libraries : Dict[str, str], default {}
225
- Supported for backward compatibility. When used with packages, packages will take precedence.
226
- python : str, optional, default None
227
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
228
- that the version used will correspond to the version of the Python interpreter used to start the run.
229
- disabled : bool, default False
230
- If set to True, disables @conda.
221
+ Decorator prototype for all step decorators. This function gets specialized
222
+ and imported for all decorators types by _import_plugin_decorators().
231
223
  """
232
224
  ...
233
225
 
234
226
  @typing.overload
235
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
227
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
236
228
  ...
237
229
 
238
- @typing.overload
239
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
230
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
231
+ """
232
+ Decorator prototype for all step decorators. This function gets specialized
233
+ and imported for all decorators types by _import_plugin_decorators().
234
+ """
240
235
  ...
241
236
 
242
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
237
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
243
238
  """
244
- Specifies the Conda environment for the step.
245
-
246
- Information in this decorator will augment any
247
- attributes set in the `@conda_base` flow-level decorator. Hence,
248
- you can use `@conda_base` to set packages required by all
249
- steps and use `@conda` to specify step-specific overrides.
239
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
250
240
 
251
241
 
252
242
  Parameters
253
243
  ----------
254
- packages : Dict[str, str], default {}
255
- Packages to use for this step. The key is the name of the package
256
- and the value is the version to use.
257
- libraries : Dict[str, str], default {}
258
- Supported for backward compatibility. When used with packages, packages will take precedence.
259
- python : str, optional, default None
260
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
261
- that the version used will correspond to the version of the Python interpreter used to start the run.
262
- disabled : bool, default False
263
- If set to True, disables @conda.
244
+ temp_dir_root : str, optional
245
+ The root directory that will hold the temporary directory where objects will be downloaded.
246
+
247
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
248
+ The list of repos (models/datasets) to load.
249
+
250
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
251
+
252
+ - If repo (model/dataset) is not found in the datastore:
253
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
254
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
255
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
256
+
257
+ - If repo is found in the datastore:
258
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
264
259
  """
265
260
  ...
266
261
 
267
262
  @typing.overload
268
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
263
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
269
264
  """
270
- Specifies the resources needed when executing this step.
271
-
272
- Use `@resources` to specify the resource requirements
273
- independently of the specific compute layer (`@batch`, `@kubernetes`).
265
+ Specifies that the step will success under all circumstances.
274
266
 
275
- You can choose the compute layer on the command line by executing e.g.
276
- ```
277
- python myflow.py run --with batch
278
- ```
279
- or
280
- ```
281
- python myflow.py run --with kubernetes
282
- ```
283
- which executes the flow on the desired system using the
284
- requirements specified in `@resources`.
267
+ The decorator will create an optional artifact, specified by `var`, which
268
+ contains the exception raised. You can use it to detect the presence
269
+ of errors, indicating that all happy-path artifacts produced by the step
270
+ are missing.
285
271
 
286
272
 
287
273
  Parameters
288
274
  ----------
289
- cpu : int, default 1
290
- Number of CPUs required for this step.
291
- gpu : int, optional, default None
292
- Number of GPUs required for this step.
293
- disk : int, optional, default None
294
- Disk size (in MB) required for this step. Only applies on Kubernetes.
295
- memory : int, default 4096
296
- Memory size (in MB) required for this step.
297
- shared_memory : int, optional, default None
298
- The value for the size (in MiB) of the /dev/shm volume for this step.
299
- This parameter maps to the `--shm-size` option in Docker.
275
+ var : str, optional, default None
276
+ Name of the artifact in which to store the caught exception.
277
+ If not specified, the exception is not stored.
278
+ print_exception : bool, default True
279
+ Determines whether or not the exception is printed to
280
+ stdout when caught.
300
281
  """
301
282
  ...
302
283
 
303
284
  @typing.overload
304
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
285
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
305
286
  ...
306
287
 
307
288
  @typing.overload
308
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
289
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
309
290
  ...
310
291
 
311
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
292
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
312
293
  """
313
- Specifies the resources needed when executing this step.
314
-
315
- Use `@resources` to specify the resource requirements
316
- independently of the specific compute layer (`@batch`, `@kubernetes`).
317
-
318
- You can choose the compute layer on the command line by executing e.g.
319
- ```
320
- python myflow.py run --with batch
321
- ```
322
- or
323
- ```
324
- python myflow.py run --with kubernetes
325
- ```
326
- which executes the flow on the desired system using the
327
- requirements specified in `@resources`.
328
-
294
+ Specifies that the step will success under all circumstances.
329
295
 
330
- Parameters
331
- ----------
332
- cpu : int, default 1
333
- Number of CPUs required for this step.
334
- gpu : int, optional, default None
335
- Number of GPUs required for this step.
336
- disk : int, optional, default None
337
- Disk size (in MB) required for this step. Only applies on Kubernetes.
338
- memory : int, default 4096
339
- Memory size (in MB) required for this step.
340
- shared_memory : int, optional, default None
341
- The value for the size (in MiB) of the /dev/shm volume for this step.
342
- This parameter maps to the `--shm-size` option in Docker.
343
- """
344
- ...
345
-
346
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
347
- """
348
- Decorator that helps cache, version and store models/datasets from huggingface hub.
296
+ The decorator will create an optional artifact, specified by `var`, which
297
+ contains the exception raised. You can use it to detect the presence
298
+ of errors, indicating that all happy-path artifacts produced by the step
299
+ are missing.
349
300
 
350
301
 
351
302
  Parameters
352
303
  ----------
353
- temp_dir_root : str, optional
354
- The root directory that will hold the temporary directory where objects will be downloaded.
355
-
356
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
357
- The list of repos (models/datasets) to load.
358
-
359
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
360
-
361
- - If repo (model/dataset) is not found in the datastore:
362
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
363
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
364
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
365
-
366
- - If repo is found in the datastore:
367
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
304
+ var : str, optional, default None
305
+ Name of the artifact in which to store the caught exception.
306
+ If not specified, the exception is not stored.
307
+ print_exception : bool, default True
308
+ Determines whether or not the exception is printed to
309
+ stdout when caught.
368
310
  """
369
311
  ...
370
312
 
@@ -423,54 +365,155 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
423
365
  """
424
366
  ...
425
367
 
426
- @typing.overload
427
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
368
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
428
369
  """
429
- Specifies the PyPI packages for the step.
370
+ Specifies that this step should execute on Kubernetes.
371
+
372
+
373
+ Parameters
374
+ ----------
375
+ cpu : int, default 1
376
+ Number of CPUs required for this step. If `@resources` is
377
+ also present, the maximum value from all decorators is used.
378
+ memory : int, default 4096
379
+ Memory size (in MB) required for this step. If
380
+ `@resources` is also present, the maximum value from all decorators is
381
+ used.
382
+ disk : int, default 10240
383
+ Disk size (in MB) required for this step. If
384
+ `@resources` is also present, the maximum value from all decorators is
385
+ used.
386
+ image : str, optional, default None
387
+ Docker image to use when launching on Kubernetes. If not specified, and
388
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
389
+ not, a default Docker image mapping to the current version of Python is used.
390
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
391
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
392
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
393
+ Kubernetes service account to use when launching pod in Kubernetes.
394
+ secrets : List[str], optional, default None
395
+ Kubernetes secrets to use when launching pod in Kubernetes. These
396
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
397
+ in Metaflow configuration.
398
+ node_selector: Union[Dict[str,str], str], optional, default None
399
+ Kubernetes node selector(s) to apply to the pod running the task.
400
+ Can be passed in as a comma separated string of values e.g.
401
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
402
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
403
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
404
+ Kubernetes namespace to use when launching pod in Kubernetes.
405
+ gpu : int, optional, default None
406
+ Number of GPUs required for this step. A value of zero implies that
407
+ the scheduled node should not have GPUs.
408
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
409
+ The vendor of the GPUs to be used for this step.
410
+ tolerations : List[str], default []
411
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
412
+ Kubernetes tolerations to use when launching pod in Kubernetes.
413
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
414
+ Kubernetes labels to use when launching pod in Kubernetes.
415
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
416
+ Kubernetes annotations to use when launching pod in Kubernetes.
417
+ use_tmpfs : bool, default False
418
+ This enables an explicit tmpfs mount for this step.
419
+ tmpfs_tempdir : bool, default True
420
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
421
+ tmpfs_size : int, optional, default: None
422
+ The value for the size (in MiB) of the tmpfs mount for this step.
423
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
424
+ memory allocated for this step.
425
+ tmpfs_path : str, optional, default /metaflow_temp
426
+ Path to tmpfs mount for this step.
427
+ persistent_volume_claims : Dict[str, str], optional, default None
428
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
429
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
430
+ shared_memory: int, optional
431
+ Shared memory size (in MiB) required for this step
432
+ port: int, optional
433
+ Port number to specify in the Kubernetes job object
434
+ compute_pool : str, optional, default None
435
+ Compute pool to be used for for this step.
436
+ If not specified, any accessible compute pool within the perimeter is used.
437
+ hostname_resolution_timeout: int, default 10 * 60
438
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
439
+ Only applicable when @parallel is used.
440
+ qos: str, default: Burstable
441
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
442
+ """
443
+ ...
444
+
445
+ @typing.overload
446
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
447
+ """
448
+ Specifies the Conda environment for the step.
430
449
 
431
450
  Information in this decorator will augment any
432
- attributes set in the `@pyi_base` flow-level decorator. Hence,
433
- you can use `@pypi_base` to set packages required by all
434
- steps and use `@pypi` to specify step-specific overrides.
451
+ attributes set in the `@conda_base` flow-level decorator. Hence,
452
+ you can use `@conda_base` to set packages required by all
453
+ steps and use `@conda` to specify step-specific overrides.
435
454
 
436
455
 
437
456
  Parameters
438
457
  ----------
439
- packages : Dict[str, str], default: {}
458
+ packages : Dict[str, str], default {}
440
459
  Packages to use for this step. The key is the name of the package
441
460
  and the value is the version to use.
442
- python : str, optional, default: None
461
+ libraries : Dict[str, str], default {}
462
+ Supported for backward compatibility. When used with packages, packages will take precedence.
463
+ python : str, optional, default None
443
464
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
444
465
  that the version used will correspond to the version of the Python interpreter used to start the run.
466
+ disabled : bool, default False
467
+ If set to True, disables @conda.
445
468
  """
446
469
  ...
447
470
 
448
471
  @typing.overload
449
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
472
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
450
473
  ...
451
474
 
452
475
  @typing.overload
453
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
476
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
454
477
  ...
455
478
 
456
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
479
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
457
480
  """
458
- Specifies the PyPI packages for the step.
481
+ Specifies the Conda environment for the step.
459
482
 
460
483
  Information in this decorator will augment any
461
- attributes set in the `@pyi_base` flow-level decorator. Hence,
462
- you can use `@pypi_base` to set packages required by all
463
- steps and use `@pypi` to specify step-specific overrides.
484
+ attributes set in the `@conda_base` flow-level decorator. Hence,
485
+ you can use `@conda_base` to set packages required by all
486
+ steps and use `@conda` to specify step-specific overrides.
464
487
 
465
488
 
466
489
  Parameters
467
490
  ----------
468
- packages : Dict[str, str], default: {}
491
+ packages : Dict[str, str], default {}
469
492
  Packages to use for this step. The key is the name of the package
470
493
  and the value is the version to use.
471
- python : str, optional, default: None
494
+ libraries : Dict[str, str], default {}
495
+ Supported for backward compatibility. When used with packages, packages will take precedence.
496
+ python : str, optional, default None
472
497
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
473
498
  that the version used will correspond to the version of the Python interpreter used to start the run.
499
+ disabled : bool, default False
500
+ If set to True, disables @conda.
501
+ """
502
+ ...
503
+
504
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
505
+ """
506
+ Specifies that this step should execute on DGX cloud.
507
+
508
+
509
+ Parameters
510
+ ----------
511
+ gpu : int
512
+ Number of GPUs to use.
513
+ gpu_type : str
514
+ Type of Nvidia GPU to use.
515
+ queue_timeout : int
516
+ Time to keep the job in NVCF's queue.
474
517
  """
475
518
  ...
476
519
 
@@ -524,147 +567,109 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
524
567
  ...
525
568
 
526
569
  @typing.overload
527
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
570
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
528
571
  """
529
- Specifies that the step will success under all circumstances.
572
+ Internal decorator to support Fast bakery
573
+ """
574
+ ...
575
+
576
+ @typing.overload
577
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
578
+ ...
579
+
580
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
581
+ """
582
+ Internal decorator to support Fast bakery
583
+ """
584
+ ...
585
+
586
+ @typing.overload
587
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
588
+ """
589
+ Specifies the PyPI packages for the step.
530
590
 
531
- The decorator will create an optional artifact, specified by `var`, which
532
- contains the exception raised. You can use it to detect the presence
533
- of errors, indicating that all happy-path artifacts produced by the step
534
- are missing.
591
+ Information in this decorator will augment any
592
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
593
+ you can use `@pypi_base` to set packages required by all
594
+ steps and use `@pypi` to specify step-specific overrides.
535
595
 
536
596
 
537
597
  Parameters
538
598
  ----------
539
- var : str, optional, default None
540
- Name of the artifact in which to store the caught exception.
541
- If not specified, the exception is not stored.
542
- print_exception : bool, default True
543
- Determines whether or not the exception is printed to
544
- stdout when caught.
599
+ packages : Dict[str, str], default: {}
600
+ Packages to use for this step. The key is the name of the package
601
+ and the value is the version to use.
602
+ python : str, optional, default: None
603
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
604
+ that the version used will correspond to the version of the Python interpreter used to start the run.
545
605
  """
546
606
  ...
547
607
 
548
608
  @typing.overload
549
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
609
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
550
610
  ...
551
611
 
552
612
  @typing.overload
553
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
613
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
554
614
  ...
555
615
 
556
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
616
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
557
617
  """
558
- Specifies that the step will success under all circumstances.
618
+ Specifies the PyPI packages for the step.
559
619
 
560
- The decorator will create an optional artifact, specified by `var`, which
561
- contains the exception raised. You can use it to detect the presence
562
- of errors, indicating that all happy-path artifacts produced by the step
563
- are missing.
620
+ Information in this decorator will augment any
621
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
622
+ you can use `@pypi_base` to set packages required by all
623
+ steps and use `@pypi` to specify step-specific overrides.
564
624
 
565
625
 
566
626
  Parameters
567
627
  ----------
568
- var : str, optional, default None
569
- Name of the artifact in which to store the caught exception.
570
- If not specified, the exception is not stored.
571
- print_exception : bool, default True
572
- Determines whether or not the exception is printed to
573
- stdout when caught.
628
+ packages : Dict[str, str], default: {}
629
+ Packages to use for this step. The key is the name of the package
630
+ and the value is the version to use.
631
+ python : str, optional, default: None
632
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
633
+ that the version used will correspond to the version of the Python interpreter used to start the run.
574
634
  """
575
635
  ...
576
636
 
577
637
  @typing.overload
578
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
638
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
579
639
  """
580
- Internal decorator to support Fast bakery
640
+ Enables loading / saving of models within a step.
641
+
642
+
643
+
581
644
  Parameters
582
645
  ----------
646
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
647
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
648
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
649
+ - `current.checkpoint`
650
+ - `current.model`
651
+ - `current.huggingface_hub`
652
+
653
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
654
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
655
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
656
+
657
+ temp_dir_root : str, default: None
658
+ The root directory under which `current.model.loaded` will store loaded models
583
659
  """
584
660
  ...
585
661
 
586
662
  @typing.overload
587
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
663
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
588
664
  ...
589
665
 
590
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
591
- """
592
- Internal decorator to support Fast bakery
593
- Parameters
594
- ----------
595
- """
666
+ @typing.overload
667
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
596
668
  ...
597
669
 
598
- def ollama(*, models: "list[Ollama]", backend: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
670
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
599
671
  """
600
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
601
-
602
- User code call
603
- -----------
604
- @ollama(
605
- models=['meta/llama3-8b-instruct', 'meta/llama3-70b-instruct'],
606
- backend='local'
607
- )
608
-
609
- Valid backend options
610
- ---------------------
611
- - 'local': Run as a separate process on the local task machine.
612
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
613
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
614
-
615
- Valid model options
616
- ----------------
617
- - 'llama3.2'
618
- - 'llama3.3'
619
- - any model here https://ollama.com/search
620
-
621
-
622
- Parameters
623
- ----------
624
- models: list[Ollama]
625
- List of Ollama containers running models in sidecars.
626
- backend: str
627
- Determines where and how to run the Ollama process.
628
- """
629
- ...
630
-
631
- def nim(*, models: "list[NIM]", backend: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
632
- """
633
- This decorator is used to run NIM containers in Metaflow tasks as sidecars.
634
-
635
- User code call
636
- -----------
637
- @nim(
638
- models=['meta/llama3-8b-instruct', 'meta/llama3-70b-instruct'],
639
- backend='managed'
640
- )
641
-
642
- Valid backend options
643
- ---------------------
644
- - 'managed': Outerbounds selects a compute provider based on the model.
645
-
646
- Valid model options
647
- ----------------
648
- - 'meta/llama3-8b-instruct': 8B parameter model
649
- - 'meta/llama3-70b-instruct': 70B parameter model
650
- - any model here: https://nvcf.ngc.nvidia.com/functions?filter=nvidia-functions
651
-
652
-
653
- Parameters
654
- ----------
655
- models: list[NIM]
656
- List of NIM containers running models in sidecars.
657
- backend: str
658
- Compute provider to run the NIM container.
659
- queue_timeout : int
660
- Time to keep the job in NVCF's queue.
661
- """
662
- ...
663
-
664
- @typing.overload
665
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
666
- """
667
- Enables loading / saving of models within a step.
672
+ Enables loading / saving of models within a step.
668
673
 
669
674
 
670
675
 
@@ -687,67 +692,96 @@ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[
687
692
  ...
688
693
 
689
694
  @typing.overload
690
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
695
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
696
+ """
697
+ Specifies secrets to be retrieved and injected as environment variables prior to
698
+ the execution of a step.
699
+
700
+
701
+ Parameters
702
+ ----------
703
+ sources : List[Union[str, Dict[str, Any]]], default: []
704
+ List of secret specs, defining how the secrets are to be retrieved
705
+ """
691
706
  ...
692
707
 
693
708
  @typing.overload
694
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
709
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
695
710
  ...
696
711
 
697
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
712
+ @typing.overload
713
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
714
+ ...
715
+
716
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
698
717
  """
699
- Enables loading / saving of models within a step.
700
-
718
+ Specifies secrets to be retrieved and injected as environment variables prior to
719
+ the execution of a step.
701
720
 
702
721
 
703
722
  Parameters
704
723
  ----------
705
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
706
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
707
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
708
- - `current.checkpoint`
709
- - `current.model`
710
- - `current.huggingface_hub`
711
-
712
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
713
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
714
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
715
-
716
- temp_dir_root : str, default: None
717
- The root directory under which `current.model.loaded` will store loaded models
724
+ sources : List[Union[str, Dict[str, Any]]], default: []
725
+ List of secret specs, defining how the secrets are to be retrieved
718
726
  """
719
727
  ...
720
728
 
721
729
  @typing.overload
722
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
730
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
723
731
  """
724
- Specifies environment variables to be set prior to the execution of a step.
732
+ Specifies a timeout for your step.
733
+
734
+ This decorator is useful if this step may hang indefinitely.
735
+
736
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
737
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
738
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
739
+
740
+ Note that all the values specified in parameters are added together so if you specify
741
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
725
742
 
726
743
 
727
744
  Parameters
728
745
  ----------
729
- vars : Dict[str, str], default {}
730
- Dictionary of environment variables to set.
746
+ seconds : int, default 0
747
+ Number of seconds to wait prior to timing out.
748
+ minutes : int, default 0
749
+ Number of minutes to wait prior to timing out.
750
+ hours : int, default 0
751
+ Number of hours to wait prior to timing out.
731
752
  """
732
753
  ...
733
754
 
734
755
  @typing.overload
735
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
756
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
736
757
  ...
737
758
 
738
759
  @typing.overload
739
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
760
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
740
761
  ...
741
762
 
742
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
763
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
743
764
  """
744
- Specifies environment variables to be set prior to the execution of a step.
765
+ Specifies a timeout for your step.
766
+
767
+ This decorator is useful if this step may hang indefinitely.
768
+
769
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
770
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
771
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
772
+
773
+ Note that all the values specified in parameters are added together so if you specify
774
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
745
775
 
746
776
 
747
777
  Parameters
748
778
  ----------
749
- vars : Dict[str, str], default {}
750
- Dictionary of environment variables to set.
779
+ seconds : int, default 0
780
+ Number of seconds to wait prior to timing out.
781
+ minutes : int, default 0
782
+ Number of minutes to wait prior to timing out.
783
+ hours : int, default 0
784
+ Number of hours to wait prior to timing out.
751
785
  """
752
786
  ...
753
787
 
@@ -808,294 +842,115 @@ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None
808
842
  """
809
843
  ...
810
844
 
811
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
812
- """
813
- Specifies that this step should execute on Kubernetes.
814
-
815
-
816
- Parameters
817
- ----------
818
- cpu : int, default 1
819
- Number of CPUs required for this step. If `@resources` is
820
- also present, the maximum value from all decorators is used.
821
- memory : int, default 4096
822
- Memory size (in MB) required for this step. If
823
- `@resources` is also present, the maximum value from all decorators is
824
- used.
825
- disk : int, default 10240
826
- Disk size (in MB) required for this step. If
827
- `@resources` is also present, the maximum value from all decorators is
828
- used.
829
- image : str, optional, default None
830
- Docker image to use when launching on Kubernetes. If not specified, and
831
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
832
- not, a default Docker image mapping to the current version of Python is used.
833
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
834
- If given, the imagePullPolicy to be applied to the Docker image of the step.
835
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
836
- Kubernetes service account to use when launching pod in Kubernetes.
837
- secrets : List[str], optional, default None
838
- Kubernetes secrets to use when launching pod in Kubernetes. These
839
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
840
- in Metaflow configuration.
841
- node_selector: Union[Dict[str,str], str], optional, default None
842
- Kubernetes node selector(s) to apply to the pod running the task.
843
- Can be passed in as a comma separated string of values e.g.
844
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
845
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
846
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
847
- Kubernetes namespace to use when launching pod in Kubernetes.
848
- gpu : int, optional, default None
849
- Number of GPUs required for this step. A value of zero implies that
850
- the scheduled node should not have GPUs.
851
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
852
- The vendor of the GPUs to be used for this step.
853
- tolerations : List[str], default []
854
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
855
- Kubernetes tolerations to use when launching pod in Kubernetes.
856
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
857
- Kubernetes labels to use when launching pod in Kubernetes.
858
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
859
- Kubernetes annotations to use when launching pod in Kubernetes.
860
- use_tmpfs : bool, default False
861
- This enables an explicit tmpfs mount for this step.
862
- tmpfs_tempdir : bool, default True
863
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
864
- tmpfs_size : int, optional, default: None
865
- The value for the size (in MiB) of the tmpfs mount for this step.
866
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
867
- memory allocated for this step.
868
- tmpfs_path : str, optional, default /metaflow_temp
869
- Path to tmpfs mount for this step.
870
- persistent_volume_claims : Dict[str, str], optional, default None
871
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
872
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
873
- shared_memory: int, optional
874
- Shared memory size (in MiB) required for this step
875
- port: int, optional
876
- Port number to specify in the Kubernetes job object
877
- compute_pool : str, optional, default None
878
- Compute pool to be used for for this step.
879
- If not specified, any accessible compute pool within the perimeter is used.
880
- hostname_resolution_timeout: int, default 10 * 60
881
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
882
- Only applicable when @parallel is used.
883
- qos: str, default: Burstable
884
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
885
- """
886
- ...
887
-
888
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
889
- """
890
- Specifies that this step should execute on DGX cloud.
891
-
892
-
893
- Parameters
894
- ----------
895
- gpu : int
896
- Number of GPUs to use.
897
- gpu_type : str
898
- Type of Nvidia GPU to use.
899
- queue_timeout : int
900
- Time to keep the job in NVCF's queue.
901
- """
902
- ...
903
-
904
845
  @typing.overload
905
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
846
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
906
847
  """
907
- Specifies secrets to be retrieved and injected as environment variables prior to
908
- the execution of a step.
848
+ Specifies environment variables to be set prior to the execution of a step.
909
849
 
910
850
 
911
851
  Parameters
912
852
  ----------
913
- sources : List[Union[str, Dict[str, Any]]], default: []
914
- List of secret specs, defining how the secrets are to be retrieved
853
+ vars : Dict[str, str], default {}
854
+ Dictionary of environment variables to set.
915
855
  """
916
856
  ...
917
857
 
918
858
  @typing.overload
919
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
859
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
920
860
  ...
921
861
 
922
862
  @typing.overload
923
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
863
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
924
864
  ...
925
865
 
926
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
866
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
927
867
  """
928
- Specifies secrets to be retrieved and injected as environment variables prior to
929
- the execution of a step.
868
+ Specifies environment variables to be set prior to the execution of a step.
930
869
 
931
870
 
932
871
  Parameters
933
872
  ----------
934
- sources : List[Union[str, Dict[str, Any]]], default: []
935
- List of secret specs, defining how the secrets are to be retrieved
873
+ vars : Dict[str, str], default {}
874
+ Dictionary of environment variables to set.
936
875
  """
937
876
  ...
938
877
 
939
878
  @typing.overload
940
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
941
- """
942
- Parameters
943
- ----------
879
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
944
880
  """
945
- ...
946
-
947
- @typing.overload
948
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
949
- ...
950
-
951
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
952
- """
953
- Parameters
954
- ----------
955
- """
956
- ...
957
-
958
- @typing.overload
959
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
960
- """
961
- Specifies the times when the flow should be run when running on a
962
- production scheduler.
963
-
964
-
965
- Parameters
966
- ----------
967
- hourly : bool, default False
968
- Run the workflow hourly.
969
- daily : bool, default True
970
- Run the workflow daily.
971
- weekly : bool, default False
972
- Run the workflow weekly.
973
- cron : str, optional, default None
974
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
975
- specified by this expression.
976
- timezone : str, optional, default None
977
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
978
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
979
- """
980
- ...
981
-
982
- @typing.overload
983
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
984
- ...
985
-
986
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
987
- """
988
- Specifies the times when the flow should be run when running on a
989
- production scheduler.
990
-
991
-
992
- Parameters
993
- ----------
994
- hourly : bool, default False
995
- Run the workflow hourly.
996
- daily : bool, default True
997
- Run the workflow daily.
998
- weekly : bool, default False
999
- Run the workflow weekly.
1000
- cron : str, optional, default None
1001
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1002
- specified by this expression.
1003
- timezone : str, optional, default None
1004
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1005
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1006
- """
1007
- ...
1008
-
1009
- @typing.overload
1010
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1011
- """
1012
- Specifies the event(s) that this flow depends on.
881
+ Specifies the resources needed when executing this step.
1013
882
 
1014
- ```
1015
- @trigger(event='foo')
1016
- ```
1017
- or
1018
- ```
1019
- @trigger(events=['foo', 'bar'])
1020
- ```
883
+ Use `@resources` to specify the resource requirements
884
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1021
885
 
1022
- Additionally, you can specify the parameter mappings
1023
- to map event payload to Metaflow parameters for the flow.
886
+ You can choose the compute layer on the command line by executing e.g.
1024
887
  ```
1025
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
888
+ python myflow.py run --with batch
1026
889
  ```
1027
890
  or
1028
891
  ```
1029
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1030
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1031
- ```
1032
-
1033
- 'parameters' can also be a list of strings and tuples like so:
1034
- ```
1035
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1036
- ```
1037
- This is equivalent to:
1038
- ```
1039
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
892
+ python myflow.py run --with kubernetes
1040
893
  ```
894
+ which executes the flow on the desired system using the
895
+ requirements specified in `@resources`.
1041
896
 
1042
897
 
1043
898
  Parameters
1044
899
  ----------
1045
- event : Union[str, Dict[str, Any]], optional, default None
1046
- Event dependency for this flow.
1047
- events : List[Union[str, Dict[str, Any]]], default []
1048
- Events dependency for this flow.
1049
- options : Dict[str, Any], default {}
1050
- Backend-specific configuration for tuning eventing behavior.
900
+ cpu : int, default 1
901
+ Number of CPUs required for this step.
902
+ gpu : int, optional, default None
903
+ Number of GPUs required for this step.
904
+ disk : int, optional, default None
905
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
906
+ memory : int, default 4096
907
+ Memory size (in MB) required for this step.
908
+ shared_memory : int, optional, default None
909
+ The value for the size (in MiB) of the /dev/shm volume for this step.
910
+ This parameter maps to the `--shm-size` option in Docker.
1051
911
  """
1052
912
  ...
1053
913
 
1054
914
  @typing.overload
1055
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
915
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1056
916
  ...
1057
917
 
1058
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
918
+ @typing.overload
919
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
920
+ ...
921
+
922
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1059
923
  """
1060
- Specifies the event(s) that this flow depends on.
924
+ Specifies the resources needed when executing this step.
1061
925
 
1062
- ```
1063
- @trigger(event='foo')
1064
- ```
1065
- or
1066
- ```
1067
- @trigger(events=['foo', 'bar'])
1068
- ```
926
+ Use `@resources` to specify the resource requirements
927
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1069
928
 
1070
- Additionally, you can specify the parameter mappings
1071
- to map event payload to Metaflow parameters for the flow.
929
+ You can choose the compute layer on the command line by executing e.g.
1072
930
  ```
1073
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
931
+ python myflow.py run --with batch
1074
932
  ```
1075
933
  or
1076
934
  ```
1077
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1078
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1079
- ```
1080
-
1081
- 'parameters' can also be a list of strings and tuples like so:
1082
- ```
1083
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1084
- ```
1085
- This is equivalent to:
1086
- ```
1087
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
935
+ python myflow.py run --with kubernetes
1088
936
  ```
937
+ which executes the flow on the desired system using the
938
+ requirements specified in `@resources`.
1089
939
 
1090
940
 
1091
941
  Parameters
1092
942
  ----------
1093
- event : Union[str, Dict[str, Any]], optional, default None
1094
- Event dependency for this flow.
1095
- events : List[Union[str, Dict[str, Any]]], default []
1096
- Events dependency for this flow.
1097
- options : Dict[str, Any], default {}
1098
- Backend-specific configuration for tuning eventing behavior.
943
+ cpu : int, default 1
944
+ Number of CPUs required for this step.
945
+ gpu : int, optional, default None
946
+ Number of GPUs required for this step.
947
+ disk : int, optional, default None
948
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
949
+ memory : int, default 4096
950
+ Memory size (in MB) required for this step.
951
+ shared_memory : int, optional, default None
952
+ The value for the size (in MiB) of the /dev/shm volume for this step.
953
+ This parameter maps to the `--shm-size` option in Docker.
1099
954
  """
1100
955
  ...
1101
956
 
@@ -1200,6 +1055,92 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1200
1055
  """
1201
1056
  ...
1202
1057
 
1058
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1059
+ """
1060
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1061
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1062
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1063
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1064
+ starts only after all sensors finish.
1065
+
1066
+
1067
+ Parameters
1068
+ ----------
1069
+ timeout : int
1070
+ Time, in seconds before the task times out and fails. (Default: 3600)
1071
+ poke_interval : int
1072
+ Time in seconds that the job should wait in between each try. (Default: 60)
1073
+ mode : str
1074
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1075
+ exponential_backoff : bool
1076
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1077
+ pool : str
1078
+ the slot pool this task should run in,
1079
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1080
+ soft_fail : bool
1081
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1082
+ name : str
1083
+ Name of the sensor on Airflow
1084
+ description : str
1085
+ Description of sensor in the Airflow UI
1086
+ bucket_key : Union[str, List[str]]
1087
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1088
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1089
+ bucket_name : str
1090
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1091
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1092
+ wildcard_match : bool
1093
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1094
+ aws_conn_id : str
1095
+ a reference to the s3 connection on Airflow. (Default: None)
1096
+ verify : bool
1097
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1098
+ """
1099
+ ...
1100
+
1101
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1102
+ """
1103
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1104
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1105
+
1106
+
1107
+ Parameters
1108
+ ----------
1109
+ timeout : int
1110
+ Time, in seconds before the task times out and fails. (Default: 3600)
1111
+ poke_interval : int
1112
+ Time in seconds that the job should wait in between each try. (Default: 60)
1113
+ mode : str
1114
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1115
+ exponential_backoff : bool
1116
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1117
+ pool : str
1118
+ the slot pool this task should run in,
1119
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1120
+ soft_fail : bool
1121
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1122
+ name : str
1123
+ Name of the sensor on Airflow
1124
+ description : str
1125
+ Description of sensor in the Airflow UI
1126
+ external_dag_id : str
1127
+ The dag_id that contains the task you want to wait for.
1128
+ external_task_ids : List[str]
1129
+ The list of task_ids that you want to wait for.
1130
+ If None (default value) the sensor waits for the DAG. (Default: None)
1131
+ allowed_states : List[str]
1132
+ Iterable of allowed states, (Default: ['success'])
1133
+ failed_states : List[str]
1134
+ Iterable of failed or dis-allowed states. (Default: None)
1135
+ execution_delta : datetime.timedelta
1136
+ time difference with the previous execution to look at,
1137
+ the default is the same logical date as the current task or DAG. (Default: None)
1138
+ check_existence: bool
1139
+ Set to True to check if the external task exists or check if
1140
+ the DAG to wait for exists. (Default: True)
1141
+ """
1142
+ ...
1143
+
1203
1144
  def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1204
1145
  """
1205
1146
  Specifies what flows belong to the same project.
@@ -1276,89 +1217,147 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
1276
1217
  """
1277
1218
  ...
1278
1219
 
1279
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1220
+ @typing.overload
1221
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1280
1222
  """
1281
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1282
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1283
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1284
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1285
- starts only after all sensors finish.
1223
+ Specifies the event(s) that this flow depends on.
1224
+
1225
+ ```
1226
+ @trigger(event='foo')
1227
+ ```
1228
+ or
1229
+ ```
1230
+ @trigger(events=['foo', 'bar'])
1231
+ ```
1232
+
1233
+ Additionally, you can specify the parameter mappings
1234
+ to map event payload to Metaflow parameters for the flow.
1235
+ ```
1236
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1237
+ ```
1238
+ or
1239
+ ```
1240
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1241
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1242
+ ```
1243
+
1244
+ 'parameters' can also be a list of strings and tuples like so:
1245
+ ```
1246
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1247
+ ```
1248
+ This is equivalent to:
1249
+ ```
1250
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1251
+ ```
1286
1252
 
1287
1253
 
1288
1254
  Parameters
1289
1255
  ----------
1290
- timeout : int
1291
- Time, in seconds before the task times out and fails. (Default: 3600)
1292
- poke_interval : int
1293
- Time in seconds that the job should wait in between each try. (Default: 60)
1294
- mode : str
1295
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1296
- exponential_backoff : bool
1297
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1298
- pool : str
1299
- the slot pool this task should run in,
1300
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1301
- soft_fail : bool
1302
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1303
- name : str
1304
- Name of the sensor on Airflow
1305
- description : str
1306
- Description of sensor in the Airflow UI
1307
- bucket_key : Union[str, List[str]]
1308
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1309
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1310
- bucket_name : str
1311
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1312
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1313
- wildcard_match : bool
1314
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1315
- aws_conn_id : str
1316
- a reference to the s3 connection on Airflow. (Default: None)
1317
- verify : bool
1318
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1256
+ event : Union[str, Dict[str, Any]], optional, default None
1257
+ Event dependency for this flow.
1258
+ events : List[Union[str, Dict[str, Any]]], default []
1259
+ Events dependency for this flow.
1260
+ options : Dict[str, Any], default {}
1261
+ Backend-specific configuration for tuning eventing behavior.
1319
1262
  """
1320
1263
  ...
1321
1264
 
1322
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1265
+ @typing.overload
1266
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1267
+ ...
1268
+
1269
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1323
1270
  """
1324
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1325
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1271
+ Specifies the event(s) that this flow depends on.
1272
+
1273
+ ```
1274
+ @trigger(event='foo')
1275
+ ```
1276
+ or
1277
+ ```
1278
+ @trigger(events=['foo', 'bar'])
1279
+ ```
1280
+
1281
+ Additionally, you can specify the parameter mappings
1282
+ to map event payload to Metaflow parameters for the flow.
1283
+ ```
1284
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1285
+ ```
1286
+ or
1287
+ ```
1288
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1289
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1290
+ ```
1291
+
1292
+ 'parameters' can also be a list of strings and tuples like so:
1293
+ ```
1294
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1295
+ ```
1296
+ This is equivalent to:
1297
+ ```
1298
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1299
+ ```
1326
1300
 
1327
1301
 
1328
1302
  Parameters
1329
1303
  ----------
1330
- timeout : int
1331
- Time, in seconds before the task times out and fails. (Default: 3600)
1332
- poke_interval : int
1333
- Time in seconds that the job should wait in between each try. (Default: 60)
1334
- mode : str
1335
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1336
- exponential_backoff : bool
1337
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1338
- pool : str
1339
- the slot pool this task should run in,
1340
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1341
- soft_fail : bool
1342
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1343
- name : str
1344
- Name of the sensor on Airflow
1345
- description : str
1346
- Description of sensor in the Airflow UI
1347
- external_dag_id : str
1348
- The dag_id that contains the task you want to wait for.
1349
- external_task_ids : List[str]
1350
- The list of task_ids that you want to wait for.
1351
- If None (default value) the sensor waits for the DAG. (Default: None)
1352
- allowed_states : List[str]
1353
- Iterable of allowed states, (Default: ['success'])
1354
- failed_states : List[str]
1355
- Iterable of failed or dis-allowed states. (Default: None)
1356
- execution_delta : datetime.timedelta
1357
- time difference with the previous execution to look at,
1358
- the default is the same logical date as the current task or DAG. (Default: None)
1359
- check_existence: bool
1360
- Set to True to check if the external task exists or check if
1361
- the DAG to wait for exists. (Default: True)
1304
+ event : Union[str, Dict[str, Any]], optional, default None
1305
+ Event dependency for this flow.
1306
+ events : List[Union[str, Dict[str, Any]]], default []
1307
+ Events dependency for this flow.
1308
+ options : Dict[str, Any], default {}
1309
+ Backend-specific configuration for tuning eventing behavior.
1310
+ """
1311
+ ...
1312
+
1313
+ @typing.overload
1314
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1315
+ """
1316
+ Specifies the times when the flow should be run when running on a
1317
+ production scheduler.
1318
+
1319
+
1320
+ Parameters
1321
+ ----------
1322
+ hourly : bool, default False
1323
+ Run the workflow hourly.
1324
+ daily : bool, default True
1325
+ Run the workflow daily.
1326
+ weekly : bool, default False
1327
+ Run the workflow weekly.
1328
+ cron : str, optional, default None
1329
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1330
+ specified by this expression.
1331
+ timezone : str, optional, default None
1332
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1333
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1334
+ """
1335
+ ...
1336
+
1337
+ @typing.overload
1338
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1339
+ ...
1340
+
1341
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1342
+ """
1343
+ Specifies the times when the flow should be run when running on a
1344
+ production scheduler.
1345
+
1346
+
1347
+ Parameters
1348
+ ----------
1349
+ hourly : bool, default False
1350
+ Run the workflow hourly.
1351
+ daily : bool, default True
1352
+ Run the workflow daily.
1353
+ weekly : bool, default False
1354
+ Run the workflow weekly.
1355
+ cron : str, optional, default None
1356
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1357
+ specified by this expression.
1358
+ timezone : str, optional, default None
1359
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1360
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1362
1361
  """
1363
1362
  ...
1364
1363