ob-metaflow-stubs 6.0.3.165__py2.py3-none-any.whl → 6.0.3.166__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (206) hide show
  1. metaflow-stubs/__init__.pyi +722 -722
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +5 -5
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +4 -4
  14. metaflow-stubs/info_file.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +3 -3
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +4 -2
  20. metaflow-stubs/metaflow_current.pyi +127 -127
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/mf_extensions/__init__.pyi +2 -2
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +2 -2
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +2 -2
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +2 -2
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +2 -2
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +3 -3
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +2 -2
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +4 -4
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +2 -2
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +2 -2
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +5 -5
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +2 -2
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +5 -5
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +2 -2
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +4 -4
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +5 -5
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +2 -2
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +2 -2
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +2 -2
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +2 -2
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +3 -3
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +3 -3
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +2 -2
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +2 -2
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +4 -4
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +2 -2
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +3 -3
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +2 -2
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +2 -2
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +2 -2
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +3 -3
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +2 -2
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +4 -4
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +2 -2
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +2 -2
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +2 -2
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +3 -3
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +2 -2
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +2 -2
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +3 -3
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +3 -3
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +2 -2
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +2 -2
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +2 -2
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +2 -2
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +2 -2
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +2 -2
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +2 -2
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +2 -2
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +2 -2
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +2 -2
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +2 -2
  75. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +2 -2
  76. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +2 -2
  77. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +2 -2
  78. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +2 -2
  79. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +2 -2
  80. metaflow-stubs/multicore_utils.pyi +2 -2
  81. metaflow-stubs/parameters.pyi +4 -4
  82. metaflow-stubs/plugins/__init__.pyi +11 -11
  83. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  84. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  85. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  86. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  87. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  88. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  89. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  90. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  91. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  92. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  93. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  94. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
  95. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  96. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  97. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  98. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  99. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  100. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  101. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  102. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  103. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  104. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  105. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  106. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  107. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  108. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  109. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  110. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  111. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +4 -4
  112. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  113. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  114. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  115. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  116. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  117. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  118. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  119. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  120. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  121. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  122. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  123. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  124. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  125. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  126. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  127. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  128. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  129. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  130. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  131. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  132. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  133. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  134. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  135. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  136. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  137. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  138. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  139. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  140. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  141. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  142. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  143. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  144. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  145. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  146. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  147. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  148. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  149. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  150. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  151. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  152. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  153. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  154. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  155. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +3 -3
  156. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  157. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  158. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  159. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  160. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  161. metaflow-stubs/plugins/ollama/__init__.pyi +3 -3
  162. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  163. metaflow-stubs/plugins/perimeters.pyi +2 -2
  164. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  165. metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
  166. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  167. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
  168. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  169. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  170. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  171. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  172. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  173. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  174. metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
  175. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +4 -4
  176. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  177. metaflow-stubs/plugins/snowflake/__init__.pyi +2 -2
  178. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  179. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  180. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  181. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  182. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  183. metaflow-stubs/profilers/__init__.pyi +2 -2
  184. metaflow-stubs/pylint_wrapper.pyi +2 -2
  185. metaflow-stubs/runner/__init__.pyi +2 -2
  186. metaflow-stubs/runner/deployer.pyi +31 -31
  187. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  188. metaflow-stubs/runner/metaflow_runner.pyi +4 -4
  189. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  190. metaflow-stubs/runner/nbrun.pyi +2 -2
  191. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  192. metaflow-stubs/runner/utils.pyi +3 -3
  193. metaflow-stubs/system/__init__.pyi +2 -2
  194. metaflow-stubs/system/system_logger.pyi +3 -3
  195. metaflow-stubs/system/system_monitor.pyi +2 -2
  196. metaflow-stubs/tagging_util.pyi +2 -2
  197. metaflow-stubs/tuple_util.pyi +2 -2
  198. metaflow-stubs/user_configs/__init__.pyi +2 -2
  199. metaflow-stubs/user_configs/config_decorators.pyi +6 -6
  200. metaflow-stubs/user_configs/config_options.pyi +3 -3
  201. metaflow-stubs/user_configs/config_parameters.pyi +6 -6
  202. {ob_metaflow_stubs-6.0.3.165.dist-info → ob_metaflow_stubs-6.0.3.166.dist-info}/METADATA +1 -1
  203. ob_metaflow_stubs-6.0.3.166.dist-info/RECORD +206 -0
  204. ob_metaflow_stubs-6.0.3.165.dist-info/RECORD +0 -206
  205. {ob_metaflow_stubs-6.0.3.165.dist-info → ob_metaflow_stubs-6.0.3.166.dist-info}/WHEEL +0 -0
  206. {ob_metaflow_stubs-6.0.3.165.dist-info → ob_metaflow_stubs-6.0.3.166.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.15.11.1+obcheckpoint(0.2.1);ob(v1) #
4
- # Generated on 2025-05-08T08:48:06.309319 #
3
+ # MF version: 2.15.11.2+obcheckpoint(0.2.1);ob(v1) #
4
+ # Generated on 2025-05-09T17:10:54.121807 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
12
+ import typing
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -36,17 +36,17 @@ from .user_configs.config_parameters import config_expr as config_expr
36
36
  from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
38
  from . import cards as cards
39
- from . import metaflow_git as metaflow_git
40
39
  from . import tuple_util as tuple_util
40
+ from . import metaflow_git as metaflow_git
41
41
  from . import events as events
42
42
  from . import runner as runner
43
43
  from . import plugins as plugins
44
44
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
45
45
  from . import includefile as includefile
46
46
  from .includefile import IncludeFile as IncludeFile
47
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
47
48
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
48
49
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
49
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
50
50
  from . import client as client
51
51
  from .client.core import namespace as namespace
52
52
  from .client.core import get_namespace as get_namespace
@@ -154,423 +154,409 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
154
154
  ...
155
155
 
156
156
  @typing.overload
157
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
157
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
158
158
  """
159
- Specifies the resources needed when executing this step.
160
-
161
- Use `@resources` to specify the resource requirements
162
- independently of the specific compute layer (`@batch`, `@kubernetes`).
163
-
164
- You can choose the compute layer on the command line by executing e.g.
165
- ```
166
- python myflow.py run --with batch
167
- ```
168
- or
169
- ```
170
- python myflow.py run --with kubernetes
171
- ```
172
- which executes the flow on the desired system using the
173
- requirements specified in `@resources`.
159
+ Specifies environment variables to be set prior to the execution of a step.
174
160
 
175
161
 
176
162
  Parameters
177
163
  ----------
178
- cpu : int, default 1
179
- Number of CPUs required for this step.
180
- gpu : int, optional, default None
181
- Number of GPUs required for this step.
182
- disk : int, optional, default None
183
- Disk size (in MB) required for this step. Only applies on Kubernetes.
184
- memory : int, default 4096
185
- Memory size (in MB) required for this step.
186
- shared_memory : int, optional, default None
187
- The value for the size (in MiB) of the /dev/shm volume for this step.
188
- This parameter maps to the `--shm-size` option in Docker.
164
+ vars : Dict[str, str], default {}
165
+ Dictionary of environment variables to set.
189
166
  """
190
167
  ...
191
168
 
192
169
  @typing.overload
193
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
170
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
194
171
  ...
195
172
 
196
173
  @typing.overload
197
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
174
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
198
175
  ...
199
176
 
200
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
177
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
201
178
  """
202
- Specifies the resources needed when executing this step.
203
-
204
- Use `@resources` to specify the resource requirements
205
- independently of the specific compute layer (`@batch`, `@kubernetes`).
206
-
207
- You can choose the compute layer on the command line by executing e.g.
208
- ```
209
- python myflow.py run --with batch
210
- ```
211
- or
212
- ```
213
- python myflow.py run --with kubernetes
214
- ```
215
- which executes the flow on the desired system using the
216
- requirements specified in `@resources`.
179
+ Specifies environment variables to be set prior to the execution of a step.
217
180
 
218
181
 
219
182
  Parameters
220
183
  ----------
221
- cpu : int, default 1
222
- Number of CPUs required for this step.
223
- gpu : int, optional, default None
224
- Number of GPUs required for this step.
225
- disk : int, optional, default None
226
- Disk size (in MB) required for this step. Only applies on Kubernetes.
227
- memory : int, default 4096
228
- Memory size (in MB) required for this step.
229
- shared_memory : int, optional, default None
230
- The value for the size (in MiB) of the /dev/shm volume for this step.
231
- This parameter maps to the `--shm-size` option in Docker.
184
+ vars : Dict[str, str], default {}
185
+ Dictionary of environment variables to set.
232
186
  """
233
187
  ...
234
188
 
235
- @typing.overload
236
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
189
+ def nim(*, models: "list[NIM]", backend: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
237
190
  """
238
- Specifies secrets to be retrieved and injected as environment variables prior to
239
- the execution of a step.
240
-
191
+ This decorator is used to run NIM containers in Metaflow tasks as sidecars.
241
192
 
242
- Parameters
243
- ----------
244
- sources : List[Union[str, Dict[str, Any]]], default: []
245
- List of secret specs, defining how the secrets are to be retrieved
246
- """
247
- ...
248
-
249
- @typing.overload
250
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
251
- ...
252
-
253
- @typing.overload
254
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
255
- ...
256
-
257
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
258
- """
259
- Specifies secrets to be retrieved and injected as environment variables prior to
260
- the execution of a step.
193
+ User code call
194
+ -----------
195
+ @nim(
196
+ models=['meta/llama3-8b-instruct', 'meta/llama3-70b-instruct'],
197
+ backend='managed'
198
+ )
261
199
 
200
+ Valid backend options
201
+ ---------------------
202
+ - 'managed': Outerbounds selects a compute provider based on the model.
262
203
 
263
- Parameters
264
- ----------
265
- sources : List[Union[str, Dict[str, Any]]], default: []
266
- List of secret specs, defining how the secrets are to be retrieved
267
- """
268
- ...
269
-
270
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
271
- """
272
- Specifies that this step should execute on DGX cloud.
204
+ Valid model options
205
+ ----------------
206
+ - 'meta/llama3-8b-instruct': 8B parameter model
207
+ - 'meta/llama3-70b-instruct': 70B parameter model
208
+ - any model here: https://nvcf.ngc.nvidia.com/functions?filter=nvidia-functions
273
209
 
274
210
 
275
211
  Parameters
276
212
  ----------
277
- gpu : int
278
- Number of GPUs to use.
279
- gpu_type : str
280
- Type of Nvidia GPU to use.
213
+ models: list[NIM]
214
+ List of NIM containers running models in sidecars.
215
+ backend: str
216
+ Compute provider to run the NIM container.
281
217
  queue_timeout : int
282
218
  Time to keep the job in NVCF's queue.
283
219
  """
284
220
  ...
285
221
 
286
222
  @typing.overload
287
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
223
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
288
224
  """
289
- Specifies that the step will success under all circumstances.
225
+ Enables checkpointing for a step.
290
226
 
291
- The decorator will create an optional artifact, specified by `var`, which
292
- contains the exception raised. You can use it to detect the presence
293
- of errors, indicating that all happy-path artifacts produced by the step
294
- are missing.
295
227
 
296
228
 
297
229
  Parameters
298
230
  ----------
299
- var : str, optional, default None
300
- Name of the artifact in which to store the caught exception.
301
- If not specified, the exception is not stored.
302
- print_exception : bool, default True
303
- Determines whether or not the exception is printed to
304
- stdout when caught.
231
+ load_policy : str, default: "fresh"
232
+ The policy for loading the checkpoint. The following policies are supported:
233
+ - "eager": Loads the the latest available checkpoint within the namespace.
234
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
235
+ will be loaded at the start of the task.
236
+ - "none": Do not load any checkpoint
237
+ - "fresh": Loads the lastest checkpoint created within the running Task.
238
+ This mode helps loading checkpoints across various retry attempts of the same task.
239
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
240
+ created within the task will be loaded when the task is retries execution on failure.
241
+
242
+ temp_dir_root : str, default: None
243
+ The root directory under which `current.checkpoint.directory` will be created.
305
244
  """
306
245
  ...
307
246
 
308
247
  @typing.overload
309
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
248
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
310
249
  ...
311
250
 
312
251
  @typing.overload
313
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
252
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
314
253
  ...
315
254
 
316
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
255
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
317
256
  """
318
- Specifies that the step will success under all circumstances.
257
+ Enables checkpointing for a step.
319
258
 
320
- The decorator will create an optional artifact, specified by `var`, which
321
- contains the exception raised. You can use it to detect the presence
322
- of errors, indicating that all happy-path artifacts produced by the step
323
- are missing.
324
259
 
325
260
 
326
261
  Parameters
327
262
  ----------
328
- var : str, optional, default None
329
- Name of the artifact in which to store the caught exception.
330
- If not specified, the exception is not stored.
331
- print_exception : bool, default True
332
- Determines whether or not the exception is printed to
333
- stdout when caught.
263
+ load_policy : str, default: "fresh"
264
+ The policy for loading the checkpoint. The following policies are supported:
265
+ - "eager": Loads the the latest available checkpoint within the namespace.
266
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
267
+ will be loaded at the start of the task.
268
+ - "none": Do not load any checkpoint
269
+ - "fresh": Loads the lastest checkpoint created within the running Task.
270
+ This mode helps loading checkpoints across various retry attempts of the same task.
271
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
272
+ created within the task will be loaded when the task is retries execution on failure.
273
+
274
+ temp_dir_root : str, default: None
275
+ The root directory under which `current.checkpoint.directory` will be created.
334
276
  """
335
277
  ...
336
278
 
337
- def nim(*, models: "list[NIM]", backend: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
279
+ def ollama(*, models: "list[Ollama]", backend: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
338
280
  """
339
- This decorator is used to run NIM containers in Metaflow tasks as sidecars.
281
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
340
282
 
341
283
  User code call
342
284
  -----------
343
- @nim(
285
+ @ollama(
344
286
  models=['meta/llama3-8b-instruct', 'meta/llama3-70b-instruct'],
345
- backend='managed'
287
+ backend='local'
346
288
  )
347
289
 
348
290
  Valid backend options
349
291
  ---------------------
350
- - 'managed': Outerbounds selects a compute provider based on the model.
292
+ - 'local': Run as a separate process on the local task machine.
293
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
294
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
351
295
 
352
296
  Valid model options
353
297
  ----------------
354
- - 'meta/llama3-8b-instruct': 8B parameter model
355
- - 'meta/llama3-70b-instruct': 70B parameter model
356
- - any model here: https://nvcf.ngc.nvidia.com/functions?filter=nvidia-functions
298
+ - 'llama3.2'
299
+ - 'llama3.3'
300
+ - any model here https://ollama.com/search
357
301
 
358
302
 
359
303
  Parameters
360
304
  ----------
361
- models: list[NIM]
362
- List of NIM containers running models in sidecars.
305
+ models: list[Ollama]
306
+ List of Ollama containers running models in sidecars.
363
307
  backend: str
364
- Compute provider to run the NIM container.
365
- queue_timeout : int
366
- Time to keep the job in NVCF's queue.
308
+ Determines where and how to run the Ollama process.
367
309
  """
368
310
  ...
369
311
 
370
- @typing.overload
371
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
312
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
372
313
  """
373
- Specifies the Conda environment for the step.
374
-
375
- Information in this decorator will augment any
376
- attributes set in the `@conda_base` flow-level decorator. Hence,
377
- you can use `@conda_base` to set packages required by all
378
- steps and use `@conda` to specify step-specific overrides.
314
+ Specifies that this step should execute on Kubernetes.
379
315
 
380
316
 
381
317
  Parameters
382
318
  ----------
383
- packages : Dict[str, str], default {}
384
- Packages to use for this step. The key is the name of the package
385
- and the value is the version to use.
386
- libraries : Dict[str, str], default {}
387
- Supported for backward compatibility. When used with packages, packages will take precedence.
388
- python : str, optional, default None
389
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
390
- that the version used will correspond to the version of the Python interpreter used to start the run.
391
- disabled : bool, default False
392
- If set to True, disables @conda.
393
- """
394
- ...
395
-
396
- @typing.overload
397
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
398
- ...
399
-
400
- @typing.overload
401
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
402
- ...
403
-
404
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
405
- """
406
- Specifies the Conda environment for the step.
407
-
408
- Information in this decorator will augment any
409
- attributes set in the `@conda_base` flow-level decorator. Hence,
410
- you can use `@conda_base` to set packages required by all
411
- steps and use `@conda` to specify step-specific overrides.
412
-
413
-
414
- Parameters
415
- ----------
416
- packages : Dict[str, str], default {}
417
- Packages to use for this step. The key is the name of the package
418
- and the value is the version to use.
419
- libraries : Dict[str, str], default {}
420
- Supported for backward compatibility. When used with packages, packages will take precedence.
421
- python : str, optional, default None
422
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
423
- that the version used will correspond to the version of the Python interpreter used to start the run.
424
- disabled : bool, default False
425
- If set to True, disables @conda.
319
+ cpu : int, default 1
320
+ Number of CPUs required for this step. If `@resources` is
321
+ also present, the maximum value from all decorators is used.
322
+ memory : int, default 4096
323
+ Memory size (in MB) required for this step. If
324
+ `@resources` is also present, the maximum value from all decorators is
325
+ used.
326
+ disk : int, default 10240
327
+ Disk size (in MB) required for this step. If
328
+ `@resources` is also present, the maximum value from all decorators is
329
+ used.
330
+ image : str, optional, default None
331
+ Docker image to use when launching on Kubernetes. If not specified, and
332
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
333
+ not, a default Docker image mapping to the current version of Python is used.
334
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
335
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
336
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
337
+ Kubernetes service account to use when launching pod in Kubernetes.
338
+ secrets : List[str], optional, default None
339
+ Kubernetes secrets to use when launching pod in Kubernetes. These
340
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
341
+ in Metaflow configuration.
342
+ node_selector: Union[Dict[str,str], str], optional, default None
343
+ Kubernetes node selector(s) to apply to the pod running the task.
344
+ Can be passed in as a comma separated string of values e.g.
345
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
346
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
347
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
348
+ Kubernetes namespace to use when launching pod in Kubernetes.
349
+ gpu : int, optional, default None
350
+ Number of GPUs required for this step. A value of zero implies that
351
+ the scheduled node should not have GPUs.
352
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
353
+ The vendor of the GPUs to be used for this step.
354
+ tolerations : List[str], default []
355
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
356
+ Kubernetes tolerations to use when launching pod in Kubernetes.
357
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
358
+ Kubernetes labels to use when launching pod in Kubernetes.
359
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
360
+ Kubernetes annotations to use when launching pod in Kubernetes.
361
+ use_tmpfs : bool, default False
362
+ This enables an explicit tmpfs mount for this step.
363
+ tmpfs_tempdir : bool, default True
364
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
365
+ tmpfs_size : int, optional, default: None
366
+ The value for the size (in MiB) of the tmpfs mount for this step.
367
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
368
+ memory allocated for this step.
369
+ tmpfs_path : str, optional, default /metaflow_temp
370
+ Path to tmpfs mount for this step.
371
+ persistent_volume_claims : Dict[str, str], optional, default None
372
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
373
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
374
+ shared_memory: int, optional
375
+ Shared memory size (in MiB) required for this step
376
+ port: int, optional
377
+ Port number to specify in the Kubernetes job object
378
+ compute_pool : str, optional, default None
379
+ Compute pool to be used for for this step.
380
+ If not specified, any accessible compute pool within the perimeter is used.
381
+ hostname_resolution_timeout: int, default 10 * 60
382
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
383
+ Only applicable when @parallel is used.
384
+ qos: str, default: Burstable
385
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
386
+
387
+ security_context: Dict[str, Any], optional, default None
388
+ Container security context. Applies to the task container. Allows the following keys:
389
+ - privileged: bool, optional, default None
390
+ - allow_privilege_escalation: bool, optional, default None
391
+ - run_as_user: int, optional, default None
392
+ - run_as_group: int, optional, default None
393
+ - run_as_non_root: bool, optional, default None
426
394
  """
427
395
  ...
428
396
 
429
397
  @typing.overload
430
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
398
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
431
399
  """
432
- Specifies the PyPI packages for the step.
400
+ Enables loading / saving of models within a step.
433
401
 
434
- Information in this decorator will augment any
435
- attributes set in the `@pyi_base` flow-level decorator. Hence,
436
- you can use `@pypi_base` to set packages required by all
437
- steps and use `@pypi` to specify step-specific overrides.
438
402
 
439
403
 
440
404
  Parameters
441
405
  ----------
442
- packages : Dict[str, str], default: {}
443
- Packages to use for this step. The key is the name of the package
444
- and the value is the version to use.
445
- python : str, optional, default: None
446
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
447
- that the version used will correspond to the version of the Python interpreter used to start the run.
406
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
407
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
408
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
409
+ - `current.checkpoint`
410
+ - `current.model`
411
+ - `current.huggingface_hub`
412
+
413
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
414
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
415
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
416
+
417
+ temp_dir_root : str, default: None
418
+ The root directory under which `current.model.loaded` will store loaded models
448
419
  """
449
420
  ...
450
421
 
451
422
  @typing.overload
452
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
423
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
453
424
  ...
454
425
 
455
426
  @typing.overload
456
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
427
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
457
428
  ...
458
429
 
459
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
430
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
460
431
  """
461
- Specifies the PyPI packages for the step.
432
+ Enables loading / saving of models within a step.
462
433
 
463
- Information in this decorator will augment any
464
- attributes set in the `@pyi_base` flow-level decorator. Hence,
465
- you can use `@pypi_base` to set packages required by all
466
- steps and use `@pypi` to specify step-specific overrides.
467
434
 
468
435
 
469
436
  Parameters
470
437
  ----------
471
- packages : Dict[str, str], default: {}
472
- Packages to use for this step. The key is the name of the package
473
- and the value is the version to use.
474
- python : str, optional, default: None
475
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
476
- that the version used will correspond to the version of the Python interpreter used to start the run.
477
- """
478
- ...
479
-
480
- @typing.overload
481
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
482
- """
483
- Specifies environment variables to be set prior to the execution of a step.
438
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
439
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
440
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
441
+ - `current.checkpoint`
442
+ - `current.model`
443
+ - `current.huggingface_hub`
484
444
 
445
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
446
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
447
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
485
448
 
486
- Parameters
487
- ----------
488
- vars : Dict[str, str], default {}
489
- Dictionary of environment variables to set.
449
+ temp_dir_root : str, default: None
450
+ The root directory under which `current.model.loaded` will store loaded models
490
451
  """
491
452
  ...
492
453
 
493
- @typing.overload
494
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
495
- ...
496
-
497
- @typing.overload
498
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
499
- ...
500
-
501
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
454
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
502
455
  """
503
- Specifies environment variables to be set prior to the execution of a step.
456
+ Specifies that this step should execute on DGX cloud.
504
457
 
505
458
 
506
459
  Parameters
507
460
  ----------
508
- vars : Dict[str, str], default {}
509
- Dictionary of environment variables to set.
461
+ gpu : int
462
+ Number of GPUs to use.
463
+ gpu_type : str
464
+ Type of Nvidia GPU to use.
465
+ queue_timeout : int
466
+ Time to keep the job in NVCF's queue.
510
467
  """
511
468
  ...
512
469
 
513
- def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
470
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
514
471
  """
515
- Specifies that this step is used to deploy an instance of the app.
516
- Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
472
+ Specifies that this step should execute on DGX cloud.
517
473
 
518
474
 
519
475
  Parameters
520
476
  ----------
521
- app_port : int
477
+ gpu : int
522
478
  Number of GPUs to use.
523
- app_name : str
524
- Name of the app to deploy.
479
+ gpu_type : str
480
+ Type of Nvidia GPU to use.
525
481
  """
526
482
  ...
527
483
 
528
484
  @typing.overload
529
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
485
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
530
486
  """
531
- Creates a human-readable report, a Metaflow Card, after this step completes.
487
+ Specifies the resources needed when executing this step.
532
488
 
533
- Note that you may add multiple `@card` decorators in a step with different parameters.
489
+ Use `@resources` to specify the resource requirements
490
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
491
+
492
+ You can choose the compute layer on the command line by executing e.g.
493
+ ```
494
+ python myflow.py run --with batch
495
+ ```
496
+ or
497
+ ```
498
+ python myflow.py run --with kubernetes
499
+ ```
500
+ which executes the flow on the desired system using the
501
+ requirements specified in `@resources`.
534
502
 
535
503
 
536
504
  Parameters
537
505
  ----------
538
- type : str, default 'default'
539
- Card type.
540
- id : str, optional, default None
541
- If multiple cards are present, use this id to identify this card.
542
- options : Dict[str, Any], default {}
543
- Options passed to the card. The contents depend on the card type.
544
- timeout : int, default 45
545
- Interrupt reporting if it takes more than this many seconds.
506
+ cpu : int, default 1
507
+ Number of CPUs required for this step.
508
+ gpu : int, optional, default None
509
+ Number of GPUs required for this step.
510
+ disk : int, optional, default None
511
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
512
+ memory : int, default 4096
513
+ Memory size (in MB) required for this step.
514
+ shared_memory : int, optional, default None
515
+ The value for the size (in MiB) of the /dev/shm volume for this step.
516
+ This parameter maps to the `--shm-size` option in Docker.
546
517
  """
547
518
  ...
548
519
 
549
520
  @typing.overload
550
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
521
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
551
522
  ...
552
523
 
553
524
  @typing.overload
554
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
525
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
555
526
  ...
556
527
 
557
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
528
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
558
529
  """
559
- Creates a human-readable report, a Metaflow Card, after this step completes.
530
+ Specifies the resources needed when executing this step.
560
531
 
561
- Note that you may add multiple `@card` decorators in a step with different parameters.
532
+ Use `@resources` to specify the resource requirements
533
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
534
+
535
+ You can choose the compute layer on the command line by executing e.g.
536
+ ```
537
+ python myflow.py run --with batch
538
+ ```
539
+ or
540
+ ```
541
+ python myflow.py run --with kubernetes
542
+ ```
543
+ which executes the flow on the desired system using the
544
+ requirements specified in `@resources`.
562
545
 
563
546
 
564
547
  Parameters
565
548
  ----------
566
- type : str, default 'default'
567
- Card type.
568
- id : str, optional, default None
569
- If multiple cards are present, use this id to identify this card.
570
- options : Dict[str, Any], default {}
571
- Options passed to the card. The contents depend on the card type.
572
- timeout : int, default 45
573
- Interrupt reporting if it takes more than this many seconds.
549
+ cpu : int, default 1
550
+ Number of CPUs required for this step.
551
+ gpu : int, optional, default None
552
+ Number of GPUs required for this step.
553
+ disk : int, optional, default None
554
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
555
+ memory : int, default 4096
556
+ Memory size (in MB) required for this step.
557
+ shared_memory : int, optional, default None
558
+ The value for the size (in MiB) of the /dev/shm volume for this step.
559
+ This parameter maps to the `--shm-size` option in Docker.
574
560
  """
575
561
  ...
576
562
 
@@ -593,31 +579,6 @@ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
593
579
  """
594
580
  ...
595
581
 
596
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
597
- """
598
- Decorator that helps cache, version and store models/datasets from huggingface hub.
599
-
600
-
601
- Parameters
602
- ----------
603
- temp_dir_root : str, optional
604
- The root directory that will hold the temporary directory where objects will be downloaded.
605
-
606
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
607
- The list of repos (models/datasets) to load.
608
-
609
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
610
-
611
- - If repo (model/dataset) is not found in the datastore:
612
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
613
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
614
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
615
-
616
- - If repo is found in the datastore:
617
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
618
- """
619
- ...
620
-
621
582
  @typing.overload
622
583
  def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
623
584
  """
@@ -674,59 +635,203 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
674
635
  ...
675
636
 
676
637
  @typing.overload
677
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
638
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
639
+ """
640
+ Specifies the PyPI packages for the step.
641
+
642
+ Information in this decorator will augment any
643
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
644
+ you can use `@pypi_base` to set packages required by all
645
+ steps and use `@pypi` to specify step-specific overrides.
646
+
647
+
648
+ Parameters
649
+ ----------
650
+ packages : Dict[str, str], default: {}
651
+ Packages to use for this step. The key is the name of the package
652
+ and the value is the version to use.
653
+ python : str, optional, default: None
654
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
655
+ that the version used will correspond to the version of the Python interpreter used to start the run.
656
+ """
657
+ ...
658
+
659
+ @typing.overload
660
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
661
+ ...
662
+
663
+ @typing.overload
664
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
665
+ ...
666
+
667
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
668
+ """
669
+ Specifies the PyPI packages for the step.
670
+
671
+ Information in this decorator will augment any
672
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
673
+ you can use `@pypi_base` to set packages required by all
674
+ steps and use `@pypi` to specify step-specific overrides.
675
+
676
+
677
+ Parameters
678
+ ----------
679
+ packages : Dict[str, str], default: {}
680
+ Packages to use for this step. The key is the name of the package
681
+ and the value is the version to use.
682
+ python : str, optional, default: None
683
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
684
+ that the version used will correspond to the version of the Python interpreter used to start the run.
685
+ """
686
+ ...
687
+
688
+ @typing.overload
689
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
690
+ """
691
+ Specifies that the step will success under all circumstances.
692
+
693
+ The decorator will create an optional artifact, specified by `var`, which
694
+ contains the exception raised. You can use it to detect the presence
695
+ of errors, indicating that all happy-path artifacts produced by the step
696
+ are missing.
697
+
698
+
699
+ Parameters
700
+ ----------
701
+ var : str, optional, default None
702
+ Name of the artifact in which to store the caught exception.
703
+ If not specified, the exception is not stored.
704
+ print_exception : bool, default True
705
+ Determines whether or not the exception is printed to
706
+ stdout when caught.
707
+ """
708
+ ...
709
+
710
+ @typing.overload
711
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
712
+ ...
713
+
714
+ @typing.overload
715
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
716
+ ...
717
+
718
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
719
+ """
720
+ Specifies that the step will success under all circumstances.
721
+
722
+ The decorator will create an optional artifact, specified by `var`, which
723
+ contains the exception raised. You can use it to detect the presence
724
+ of errors, indicating that all happy-path artifacts produced by the step
725
+ are missing.
726
+
727
+
728
+ Parameters
729
+ ----------
730
+ var : str, optional, default None
731
+ Name of the artifact in which to store the caught exception.
732
+ If not specified, the exception is not stored.
733
+ print_exception : bool, default True
734
+ Determines whether or not the exception is printed to
735
+ stdout when caught.
736
+ """
737
+ ...
738
+
739
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
740
+ """
741
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
742
+
743
+
744
+ Parameters
745
+ ----------
746
+ temp_dir_root : str, optional
747
+ The root directory that will hold the temporary directory where objects will be downloaded.
748
+
749
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
750
+ The list of repos (models/datasets) to load.
751
+
752
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
753
+
754
+ - If repo (model/dataset) is not found in the datastore:
755
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
756
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
757
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
758
+
759
+ - If repo is found in the datastore:
760
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
761
+ """
762
+ ...
763
+
764
+ @typing.overload
765
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
678
766
  """
679
- Enables loading / saving of models within a step.
767
+ Specifies a timeout for your step.
680
768
 
769
+ This decorator is useful if this step may hang indefinitely.
681
770
 
771
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
772
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
773
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
682
774
 
683
- Parameters
684
- ----------
685
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
686
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
687
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
688
- - `current.checkpoint`
689
- - `current.model`
690
- - `current.huggingface_hub`
775
+ Note that all the values specified in parameters are added together so if you specify
776
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
691
777
 
692
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
693
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
694
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
695
778
 
696
- temp_dir_root : str, default: None
697
- The root directory under which `current.model.loaded` will store loaded models
779
+ Parameters
780
+ ----------
781
+ seconds : int, default 0
782
+ Number of seconds to wait prior to timing out.
783
+ minutes : int, default 0
784
+ Number of minutes to wait prior to timing out.
785
+ hours : int, default 0
786
+ Number of hours to wait prior to timing out.
698
787
  """
699
788
  ...
700
789
 
701
790
  @typing.overload
702
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
791
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
703
792
  ...
704
793
 
705
794
  @typing.overload
706
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
795
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
707
796
  ...
708
797
 
709
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
798
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
710
799
  """
711
- Enables loading / saving of models within a step.
800
+ Specifies a timeout for your step.
801
+
802
+ This decorator is useful if this step may hang indefinitely.
803
+
804
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
805
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
806
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
712
807
 
808
+ Note that all the values specified in parameters are added together so if you specify
809
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
713
810
 
714
811
 
715
812
  Parameters
716
813
  ----------
717
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
718
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
719
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
720
- - `current.checkpoint`
721
- - `current.model`
722
- - `current.huggingface_hub`
814
+ seconds : int, default 0
815
+ Number of seconds to wait prior to timing out.
816
+ minutes : int, default 0
817
+ Number of minutes to wait prior to timing out.
818
+ hours : int, default 0
819
+ Number of hours to wait prior to timing out.
820
+ """
821
+ ...
822
+
823
+ def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
824
+ """
825
+ Specifies that this step is used to deploy an instance of the app.
826
+ Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
723
827
 
724
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
725
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
726
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
727
828
 
728
- temp_dir_root : str, default: None
729
- The root directory under which `current.model.loaded` will store loaded models
829
+ Parameters
830
+ ----------
831
+ app_port : int
832
+ Number of GPUs to use.
833
+ app_name : str
834
+ Name of the app to deploy.
730
835
  """
731
836
  ...
732
837
 
@@ -747,337 +852,187 @@ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepF
747
852
  """
748
853
  ...
749
854
 
750
- def ollama(*, models: "list[Ollama]", backend: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
855
+ @typing.overload
856
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
751
857
  """
752
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
753
-
754
- User code call
755
- -----------
756
- @ollama(
757
- models=['meta/llama3-8b-instruct', 'meta/llama3-70b-instruct'],
758
- backend='local'
759
- )
760
-
761
- Valid backend options
762
- ---------------------
763
- - 'local': Run as a separate process on the local task machine.
764
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
765
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
766
-
767
- Valid model options
768
- ----------------
769
- - 'llama3.2'
770
- - 'llama3.3'
771
- - any model here https://ollama.com/search
858
+ Specifies secrets to be retrieved and injected as environment variables prior to
859
+ the execution of a step.
772
860
 
773
861
 
774
862
  Parameters
775
863
  ----------
776
- models: list[Ollama]
777
- List of Ollama containers running models in sidecars.
778
- backend: str
779
- Determines where and how to run the Ollama process.
864
+ sources : List[Union[str, Dict[str, Any]]], default: []
865
+ List of secret specs, defining how the secrets are to be retrieved
780
866
  """
781
867
  ...
782
868
 
783
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
784
- """
785
- Specifies that this step should execute on Kubernetes.
786
-
787
-
788
- Parameters
789
- ----------
790
- cpu : int, default 1
791
- Number of CPUs required for this step. If `@resources` is
792
- also present, the maximum value from all decorators is used.
793
- memory : int, default 4096
794
- Memory size (in MB) required for this step. If
795
- `@resources` is also present, the maximum value from all decorators is
796
- used.
797
- disk : int, default 10240
798
- Disk size (in MB) required for this step. If
799
- `@resources` is also present, the maximum value from all decorators is
800
- used.
801
- image : str, optional, default None
802
- Docker image to use when launching on Kubernetes. If not specified, and
803
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
804
- not, a default Docker image mapping to the current version of Python is used.
805
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
806
- If given, the imagePullPolicy to be applied to the Docker image of the step.
807
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
808
- Kubernetes service account to use when launching pod in Kubernetes.
809
- secrets : List[str], optional, default None
810
- Kubernetes secrets to use when launching pod in Kubernetes. These
811
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
812
- in Metaflow configuration.
813
- node_selector: Union[Dict[str,str], str], optional, default None
814
- Kubernetes node selector(s) to apply to the pod running the task.
815
- Can be passed in as a comma separated string of values e.g.
816
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
817
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
818
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
819
- Kubernetes namespace to use when launching pod in Kubernetes.
820
- gpu : int, optional, default None
821
- Number of GPUs required for this step. A value of zero implies that
822
- the scheduled node should not have GPUs.
823
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
824
- The vendor of the GPUs to be used for this step.
825
- tolerations : List[str], default []
826
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
827
- Kubernetes tolerations to use when launching pod in Kubernetes.
828
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
829
- Kubernetes labels to use when launching pod in Kubernetes.
830
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
831
- Kubernetes annotations to use when launching pod in Kubernetes.
832
- use_tmpfs : bool, default False
833
- This enables an explicit tmpfs mount for this step.
834
- tmpfs_tempdir : bool, default True
835
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
836
- tmpfs_size : int, optional, default: None
837
- The value for the size (in MiB) of the tmpfs mount for this step.
838
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
839
- memory allocated for this step.
840
- tmpfs_path : str, optional, default /metaflow_temp
841
- Path to tmpfs mount for this step.
842
- persistent_volume_claims : Dict[str, str], optional, default None
843
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
844
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
845
- shared_memory: int, optional
846
- Shared memory size (in MiB) required for this step
847
- port: int, optional
848
- Port number to specify in the Kubernetes job object
849
- compute_pool : str, optional, default None
850
- Compute pool to be used for for this step.
851
- If not specified, any accessible compute pool within the perimeter is used.
852
- hostname_resolution_timeout: int, default 10 * 60
853
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
854
- Only applicable when @parallel is used.
855
- qos: str, default: Burstable
856
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
857
-
858
- security_context: Dict[str, Any], optional, default None
859
- Container security context. Applies to the task container. Allows the following keys:
860
- - privileged: bool, optional, default None
861
- - allow_privilege_escalation: bool, optional, default None
862
- - run_as_user: int, optional, default None
863
- - run_as_group: int, optional, default None
864
- - run_as_non_root: bool, optional, default None
865
- """
869
+ @typing.overload
870
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
866
871
  ...
867
872
 
868
873
  @typing.overload
869
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
874
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
875
+ ...
876
+
877
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
870
878
  """
871
- Enables checkpointing for a step.
872
-
879
+ Specifies secrets to be retrieved and injected as environment variables prior to
880
+ the execution of a step.
873
881
 
874
882
 
875
883
  Parameters
876
884
  ----------
877
- load_policy : str, default: "fresh"
878
- The policy for loading the checkpoint. The following policies are supported:
879
- - "eager": Loads the the latest available checkpoint within the namespace.
880
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
881
- will be loaded at the start of the task.
882
- - "none": Do not load any checkpoint
883
- - "fresh": Loads the lastest checkpoint created within the running Task.
884
- This mode helps loading checkpoints across various retry attempts of the same task.
885
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
886
- created within the task will be loaded when the task is retries execution on failure.
887
-
888
- temp_dir_root : str, default: None
889
- The root directory under which `current.checkpoint.directory` will be created.
885
+ sources : List[Union[str, Dict[str, Any]]], default: []
886
+ List of secret specs, defining how the secrets are to be retrieved
890
887
  """
891
888
  ...
892
889
 
893
890
  @typing.overload
894
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
895
- ...
896
-
897
- @typing.overload
898
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
899
- ...
900
-
901
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
891
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
902
892
  """
903
- Enables checkpointing for a step.
893
+ Specifies the Conda environment for the step.
904
894
 
895
+ Information in this decorator will augment any
896
+ attributes set in the `@conda_base` flow-level decorator. Hence,
897
+ you can use `@conda_base` to set packages required by all
898
+ steps and use `@conda` to specify step-specific overrides.
905
899
 
906
900
 
907
901
  Parameters
908
902
  ----------
909
- load_policy : str, default: "fresh"
910
- The policy for loading the checkpoint. The following policies are supported:
911
- - "eager": Loads the the latest available checkpoint within the namespace.
912
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
913
- will be loaded at the start of the task.
914
- - "none": Do not load any checkpoint
915
- - "fresh": Loads the lastest checkpoint created within the running Task.
916
- This mode helps loading checkpoints across various retry attempts of the same task.
917
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
918
- created within the task will be loaded when the task is retries execution on failure.
919
-
920
- temp_dir_root : str, default: None
921
- The root directory under which `current.checkpoint.directory` will be created.
903
+ packages : Dict[str, str], default {}
904
+ Packages to use for this step. The key is the name of the package
905
+ and the value is the version to use.
906
+ libraries : Dict[str, str], default {}
907
+ Supported for backward compatibility. When used with packages, packages will take precedence.
908
+ python : str, optional, default None
909
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
910
+ that the version used will correspond to the version of the Python interpreter used to start the run.
911
+ disabled : bool, default False
912
+ If set to True, disables @conda.
922
913
  """
923
914
  ...
924
915
 
925
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
916
+ @typing.overload
917
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
918
+ ...
919
+
920
+ @typing.overload
921
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
922
+ ...
923
+
924
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
926
925
  """
927
- Specifies that this step should execute on DGX cloud.
926
+ Specifies the Conda environment for the step.
927
+
928
+ Information in this decorator will augment any
929
+ attributes set in the `@conda_base` flow-level decorator. Hence,
930
+ you can use `@conda_base` to set packages required by all
931
+ steps and use `@conda` to specify step-specific overrides.
928
932
 
929
933
 
930
934
  Parameters
931
935
  ----------
932
- gpu : int
933
- Number of GPUs to use.
934
- gpu_type : str
935
- Type of Nvidia GPU to use.
936
+ packages : Dict[str, str], default {}
937
+ Packages to use for this step. The key is the name of the package
938
+ and the value is the version to use.
939
+ libraries : Dict[str, str], default {}
940
+ Supported for backward compatibility. When used with packages, packages will take precedence.
941
+ python : str, optional, default None
942
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
943
+ that the version used will correspond to the version of the Python interpreter used to start the run.
944
+ disabled : bool, default False
945
+ If set to True, disables @conda.
936
946
  """
937
947
  ...
938
948
 
939
949
  @typing.overload
940
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
950
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
941
951
  """
942
- Specifies a timeout for your step.
943
-
944
- This decorator is useful if this step may hang indefinitely.
945
-
946
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
947
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
948
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
952
+ Creates a human-readable report, a Metaflow Card, after this step completes.
949
953
 
950
- Note that all the values specified in parameters are added together so if you specify
951
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
954
+ Note that you may add multiple `@card` decorators in a step with different parameters.
952
955
 
953
956
 
954
957
  Parameters
955
958
  ----------
956
- seconds : int, default 0
957
- Number of seconds to wait prior to timing out.
958
- minutes : int, default 0
959
- Number of minutes to wait prior to timing out.
960
- hours : int, default 0
961
- Number of hours to wait prior to timing out.
959
+ type : str, default 'default'
960
+ Card type.
961
+ id : str, optional, default None
962
+ If multiple cards are present, use this id to identify this card.
963
+ options : Dict[str, Any], default {}
964
+ Options passed to the card. The contents depend on the card type.
965
+ timeout : int, default 45
966
+ Interrupt reporting if it takes more than this many seconds.
962
967
  """
963
968
  ...
964
969
 
965
970
  @typing.overload
966
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
971
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
967
972
  ...
968
973
 
969
974
  @typing.overload
970
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
971
- ...
972
-
973
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
974
- """
975
- Specifies a timeout for your step.
976
-
977
- This decorator is useful if this step may hang indefinitely.
978
-
979
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
980
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
981
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
982
-
983
- Note that all the values specified in parameters are added together so if you specify
984
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
985
-
986
-
987
- Parameters
988
- ----------
989
- seconds : int, default 0
990
- Number of seconds to wait prior to timing out.
991
- minutes : int, default 0
992
- Number of minutes to wait prior to timing out.
993
- hours : int, default 0
994
- Number of hours to wait prior to timing out.
995
- """
975
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
996
976
  ...
997
977
 
998
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
978
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
999
979
  """
1000
- Specifies what flows belong to the same project.
980
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1001
981
 
1002
- A project-specific namespace is created for all flows that
1003
- use the same `@project(name)`.
982
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1004
983
 
1005
984
 
1006
985
  Parameters
1007
986
  ----------
1008
- name : str
1009
- Project name. Make sure that the name is unique amongst all
1010
- projects that use the same production scheduler. The name may
1011
- contain only lowercase alphanumeric characters and underscores.
1012
-
1013
- branch : Optional[str], default None
1014
- The branch to use. If not specified, the branch is set to
1015
- `user.<username>` unless `production` is set to `True`. This can
1016
- also be set on the command line using `--branch` as a top-level option.
1017
- It is an error to specify `branch` in the decorator and on the command line.
1018
-
1019
- production : bool, default False
1020
- Whether or not the branch is the production branch. This can also be set on the
1021
- command line using `--production` as a top-level option. It is an error to specify
1022
- `production` in the decorator and on the command line.
1023
- The project branch name will be:
1024
- - if `branch` is specified:
1025
- - if `production` is True: `prod.<branch>`
1026
- - if `production` is False: `test.<branch>`
1027
- - if `branch` is not specified:
1028
- - if `production` is True: `prod`
1029
- - if `production` is False: `user.<username>`
987
+ type : str, default 'default'
988
+ Card type.
989
+ id : str, optional, default None
990
+ If multiple cards are present, use this id to identify this card.
991
+ options : Dict[str, Any], default {}
992
+ Options passed to the card. The contents depend on the card type.
993
+ timeout : int, default 45
994
+ Interrupt reporting if it takes more than this many seconds.
1030
995
  """
1031
996
  ...
1032
997
 
1033
998
  @typing.overload
1034
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
999
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1035
1000
  """
1036
- Specifies the Conda environment for all steps of the flow.
1037
-
1038
- Use `@conda_base` to set common libraries required by all
1039
- steps and use `@conda` to specify step-specific additions.
1001
+ Specifies the PyPI packages for all steps of the flow.
1040
1002
 
1003
+ Use `@pypi_base` to set common packages required by all
1004
+ steps and use `@pypi` to specify step-specific overrides.
1041
1005
 
1042
1006
  Parameters
1043
1007
  ----------
1044
- packages : Dict[str, str], default {}
1008
+ packages : Dict[str, str], default: {}
1045
1009
  Packages to use for this flow. The key is the name of the package
1046
1010
  and the value is the version to use.
1047
- libraries : Dict[str, str], default {}
1048
- Supported for backward compatibility. When used with packages, packages will take precedence.
1049
- python : str, optional, default None
1011
+ python : str, optional, default: None
1050
1012
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1051
1013
  that the version used will correspond to the version of the Python interpreter used to start the run.
1052
- disabled : bool, default False
1053
- If set to True, disables Conda.
1054
1014
  """
1055
1015
  ...
1056
1016
 
1057
1017
  @typing.overload
1058
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1018
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1059
1019
  ...
1060
1020
 
1061
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1021
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1062
1022
  """
1063
- Specifies the Conda environment for all steps of the flow.
1064
-
1065
- Use `@conda_base` to set common libraries required by all
1066
- steps and use `@conda` to specify step-specific additions.
1023
+ Specifies the PyPI packages for all steps of the flow.
1067
1024
 
1025
+ Use `@pypi_base` to set common packages required by all
1026
+ steps and use `@pypi` to specify step-specific overrides.
1068
1027
 
1069
1028
  Parameters
1070
1029
  ----------
1071
- packages : Dict[str, str], default {}
1030
+ packages : Dict[str, str], default: {}
1072
1031
  Packages to use for this flow. The key is the name of the package
1073
1032
  and the value is the version to use.
1074
- libraries : Dict[str, str], default {}
1075
- Supported for backward compatibility. When used with packages, packages will take precedence.
1076
- python : str, optional, default None
1033
+ python : str, optional, default: None
1077
1034
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1078
1035
  that the version used will correspond to the version of the Python interpreter used to start the run.
1079
- disabled : bool, default False
1080
- If set to True, disables Conda.
1081
1036
  """
1082
1037
  ...
1083
1038
 
@@ -1132,158 +1087,89 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
1132
1087
  """
1133
1088
  ...
1134
1089
 
1135
- def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
1090
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1136
1091
  """
1137
- Allows setting external datastores to save data for the
1138
- `@checkpoint`/`@model`/`@huggingface_hub` decorators.
1139
-
1140
- This decorator is useful when users wish to save data to a different datastore
1141
- than what is configured in Metaflow. This can be for variety of reasons:
1142
-
1143
- 1. Data security: The objects needs to be stored in a bucket (object storage) that is not accessible by other flows.
1144
- 2. Data Locality: The location where the task is executing is not located in the same region as the datastore.
1145
- - Example: Metaflow datastore lives in US East, but the task is executing in Finland datacenters.
1146
- 3. Data Lifecycle Policies: The objects need to be archived / managed separately from the Metaflow managed objects.
1147
- - Example: Flow is training very large models that need to be stored separately and will be deleted more aggressively than the Metaflow managed objects.
1148
-
1149
- Usage:
1150
- ----------
1151
-
1152
- - Using a custom IAM role to access the datastore.
1153
-
1154
- ```python
1155
- @with_artifact_store(
1156
- type="s3",
1157
- config=lambda: {
1158
- "root": "s3://my-bucket-foo/path/to/root",
1159
- "role_arn": ROLE,
1160
- },
1161
- )
1162
- class MyFlow(FlowSpec):
1163
-
1164
- @checkpoint
1165
- @step
1166
- def start(self):
1167
- with open("my_file.txt", "w") as f:
1168
- f.write("Hello, World!")
1169
- self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1170
- self.next(self.end)
1171
-
1172
- ```
1173
-
1174
- - Using credentials to access the s3-compatible datastore.
1175
-
1176
- ```python
1177
- @with_artifact_store(
1178
- type="s3",
1179
- config=lambda: {
1180
- "root": "s3://my-bucket-foo/path/to/root",
1181
- "client_params": {
1182
- "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1183
- "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1184
- },
1185
- },
1186
- )
1187
- class MyFlow(FlowSpec):
1188
-
1189
- @checkpoint
1190
- @step
1191
- def start(self):
1192
- with open("my_file.txt", "w") as f:
1193
- f.write("Hello, World!")
1194
- self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1195
- self.next(self.end)
1196
-
1197
- ```
1198
-
1199
- - Accessing objects stored in external datastores after task execution.
1200
-
1201
- ```python
1202
- run = Run("CheckpointsTestsFlow/8992")
1203
- with artifact_store_from(run=run, config={
1204
- "client_params": {
1205
- "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1206
- "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1207
- },
1208
- }):
1209
- with Checkpoint() as cp:
1210
- latest = cp.list(
1211
- task=run["start"].task
1212
- )[0]
1213
- print(latest)
1214
- cp.load(
1215
- latest,
1216
- "test-checkpoints"
1217
- )
1092
+ Specifies what flows belong to the same project.
1218
1093
 
1219
- task = Task("TorchTuneFlow/8484/train/53673")
1220
- with artifact_store_from(run=run, config={
1221
- "client_params": {
1222
- "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1223
- "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1224
- },
1225
- }):
1226
- load_model(
1227
- task.data.model_ref,
1228
- "test-models"
1229
- )
1230
- ```
1231
- Parameters:
1094
+ A project-specific namespace is created for all flows that
1095
+ use the same `@project(name)`.
1096
+
1097
+
1098
+ Parameters
1232
1099
  ----------
1100
+ name : str
1101
+ Project name. Make sure that the name is unique amongst all
1102
+ projects that use the same production scheduler. The name may
1103
+ contain only lowercase alphanumeric characters and underscores.
1233
1104
 
1234
- type: str
1235
- The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1105
+ branch : Optional[str], default None
1106
+ The branch to use. If not specified, the branch is set to
1107
+ `user.<username>` unless `production` is set to `True`. This can
1108
+ also be set on the command line using `--branch` as a top-level option.
1109
+ It is an error to specify `branch` in the decorator and on the command line.
1236
1110
 
1237
- config: dict or Callable
1238
- Dictionary of configuration options for the datastore. The following keys are required:
1239
- - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1240
- - example: 's3://bucket-name/path/to/root'
1241
- - example: 'gs://bucket-name/path/to/root'
1242
- - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1243
- - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1244
- - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1245
- - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1111
+ production : bool, default False
1112
+ Whether or not the branch is the production branch. This can also be set on the
1113
+ command line using `--production` as a top-level option. It is an error to specify
1114
+ `production` in the decorator and on the command line.
1115
+ The project branch name will be:
1116
+ - if `branch` is specified:
1117
+ - if `production` is True: `prod.<branch>`
1118
+ - if `production` is False: `test.<branch>`
1119
+ - if `branch` is not specified:
1120
+ - if `production` is True: `prod`
1121
+ - if `production` is False: `user.<username>`
1246
1122
  """
1247
1123
  ...
1248
1124
 
1249
1125
  @typing.overload
1250
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1126
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1251
1127
  """
1252
- Specifies the PyPI packages for all steps of the flow.
1128
+ Specifies the Conda environment for all steps of the flow.
1129
+
1130
+ Use `@conda_base` to set common libraries required by all
1131
+ steps and use `@conda` to specify step-specific additions.
1253
1132
 
1254
- Use `@pypi_base` to set common packages required by all
1255
- steps and use `@pypi` to specify step-specific overrides.
1256
1133
 
1257
1134
  Parameters
1258
1135
  ----------
1259
- packages : Dict[str, str], default: {}
1136
+ packages : Dict[str, str], default {}
1260
1137
  Packages to use for this flow. The key is the name of the package
1261
1138
  and the value is the version to use.
1262
- python : str, optional, default: None
1139
+ libraries : Dict[str, str], default {}
1140
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1141
+ python : str, optional, default None
1263
1142
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1264
1143
  that the version used will correspond to the version of the Python interpreter used to start the run.
1144
+ disabled : bool, default False
1145
+ If set to True, disables Conda.
1265
1146
  """
1266
1147
  ...
1267
1148
 
1268
1149
  @typing.overload
1269
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1150
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1270
1151
  ...
1271
1152
 
1272
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1153
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1273
1154
  """
1274
- Specifies the PyPI packages for all steps of the flow.
1155
+ Specifies the Conda environment for all steps of the flow.
1156
+
1157
+ Use `@conda_base` to set common libraries required by all
1158
+ steps and use `@conda` to specify step-specific additions.
1275
1159
 
1276
- Use `@pypi_base` to set common packages required by all
1277
- steps and use `@pypi` to specify step-specific overrides.
1278
1160
 
1279
1161
  Parameters
1280
1162
  ----------
1281
- packages : Dict[str, str], default: {}
1163
+ packages : Dict[str, str], default {}
1282
1164
  Packages to use for this flow. The key is the name of the package
1283
1165
  and the value is the version to use.
1284
- python : str, optional, default: None
1166
+ libraries : Dict[str, str], default {}
1167
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1168
+ python : str, optional, default None
1285
1169
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1286
1170
  that the version used will correspond to the version of the Python interpreter used to start the run.
1171
+ disabled : bool, default False
1172
+ If set to True, disables Conda.
1287
1173
  """
1288
1174
  ...
1289
1175
 
@@ -1481,46 +1367,117 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1481
1367
  """
1482
1368
  ...
1483
1369
 
1484
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1370
+ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
1485
1371
  """
1486
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1487
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1372
+ Allows setting external datastores to save data for the
1373
+ `@checkpoint`/`@model`/`@huggingface_hub` decorators.
1488
1374
 
1375
+ This decorator is useful when users wish to save data to a different datastore
1376
+ than what is configured in Metaflow. This can be for variety of reasons:
1489
1377
 
1490
- Parameters
1378
+ 1. Data security: The objects needs to be stored in a bucket (object storage) that is not accessible by other flows.
1379
+ 2. Data Locality: The location where the task is executing is not located in the same region as the datastore.
1380
+ - Example: Metaflow datastore lives in US East, but the task is executing in Finland datacenters.
1381
+ 3. Data Lifecycle Policies: The objects need to be archived / managed separately from the Metaflow managed objects.
1382
+ - Example: Flow is training very large models that need to be stored separately and will be deleted more aggressively than the Metaflow managed objects.
1383
+
1384
+ Usage:
1491
1385
  ----------
1492
- timeout : int
1493
- Time, in seconds before the task times out and fails. (Default: 3600)
1494
- poke_interval : int
1495
- Time in seconds that the job should wait in between each try. (Default: 60)
1496
- mode : str
1497
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1498
- exponential_backoff : bool
1499
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1500
- pool : str
1501
- the slot pool this task should run in,
1502
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1503
- soft_fail : bool
1504
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1505
- name : str
1506
- Name of the sensor on Airflow
1507
- description : str
1508
- Description of sensor in the Airflow UI
1509
- external_dag_id : str
1510
- The dag_id that contains the task you want to wait for.
1511
- external_task_ids : List[str]
1512
- The list of task_ids that you want to wait for.
1513
- If None (default value) the sensor waits for the DAG. (Default: None)
1514
- allowed_states : List[str]
1515
- Iterable of allowed states, (Default: ['success'])
1516
- failed_states : List[str]
1517
- Iterable of failed or dis-allowed states. (Default: None)
1518
- execution_delta : datetime.timedelta
1519
- time difference with the previous execution to look at,
1520
- the default is the same logical date as the current task or DAG. (Default: None)
1521
- check_existence: bool
1522
- Set to True to check if the external task exists or check if
1523
- the DAG to wait for exists. (Default: True)
1386
+
1387
+ - Using a custom IAM role to access the datastore.
1388
+
1389
+ ```python
1390
+ @with_artifact_store(
1391
+ type="s3",
1392
+ config=lambda: {
1393
+ "root": "s3://my-bucket-foo/path/to/root",
1394
+ "role_arn": ROLE,
1395
+ },
1396
+ )
1397
+ class MyFlow(FlowSpec):
1398
+
1399
+ @checkpoint
1400
+ @step
1401
+ def start(self):
1402
+ with open("my_file.txt", "w") as f:
1403
+ f.write("Hello, World!")
1404
+ self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1405
+ self.next(self.end)
1406
+
1407
+ ```
1408
+
1409
+ - Using credentials to access the s3-compatible datastore.
1410
+
1411
+ ```python
1412
+ @with_artifact_store(
1413
+ type="s3",
1414
+ config=lambda: {
1415
+ "root": "s3://my-bucket-foo/path/to/root",
1416
+ "client_params": {
1417
+ "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1418
+ "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1419
+ },
1420
+ },
1421
+ )
1422
+ class MyFlow(FlowSpec):
1423
+
1424
+ @checkpoint
1425
+ @step
1426
+ def start(self):
1427
+ with open("my_file.txt", "w") as f:
1428
+ f.write("Hello, World!")
1429
+ self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1430
+ self.next(self.end)
1431
+
1432
+ ```
1433
+
1434
+ - Accessing objects stored in external datastores after task execution.
1435
+
1436
+ ```python
1437
+ run = Run("CheckpointsTestsFlow/8992")
1438
+ with artifact_store_from(run=run, config={
1439
+ "client_params": {
1440
+ "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1441
+ "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1442
+ },
1443
+ }):
1444
+ with Checkpoint() as cp:
1445
+ latest = cp.list(
1446
+ task=run["start"].task
1447
+ )[0]
1448
+ print(latest)
1449
+ cp.load(
1450
+ latest,
1451
+ "test-checkpoints"
1452
+ )
1453
+
1454
+ task = Task("TorchTuneFlow/8484/train/53673")
1455
+ with artifact_store_from(run=run, config={
1456
+ "client_params": {
1457
+ "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1458
+ "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1459
+ },
1460
+ }):
1461
+ load_model(
1462
+ task.data.model_ref,
1463
+ "test-models"
1464
+ )
1465
+ ```
1466
+ Parameters:
1467
+ ----------
1468
+
1469
+ type: str
1470
+ The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1471
+
1472
+ config: dict or Callable
1473
+ Dictionary of configuration options for the datastore. The following keys are required:
1474
+ - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1475
+ - example: 's3://bucket-name/path/to/root'
1476
+ - example: 'gs://bucket-name/path/to/root'
1477
+ - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1478
+ - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1479
+ - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1480
+ - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1524
1481
  """
1525
1482
  ...
1526
1483
 
@@ -1567,5 +1524,48 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1567
1524
  """
1568
1525
  ...
1569
1526
 
1527
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1528
+ """
1529
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1530
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1531
+
1532
+
1533
+ Parameters
1534
+ ----------
1535
+ timeout : int
1536
+ Time, in seconds before the task times out and fails. (Default: 3600)
1537
+ poke_interval : int
1538
+ Time in seconds that the job should wait in between each try. (Default: 60)
1539
+ mode : str
1540
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1541
+ exponential_backoff : bool
1542
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1543
+ pool : str
1544
+ the slot pool this task should run in,
1545
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1546
+ soft_fail : bool
1547
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1548
+ name : str
1549
+ Name of the sensor on Airflow
1550
+ description : str
1551
+ Description of sensor in the Airflow UI
1552
+ external_dag_id : str
1553
+ The dag_id that contains the task you want to wait for.
1554
+ external_task_ids : List[str]
1555
+ The list of task_ids that you want to wait for.
1556
+ If None (default value) the sensor waits for the DAG. (Default: None)
1557
+ allowed_states : List[str]
1558
+ Iterable of allowed states, (Default: ['success'])
1559
+ failed_states : List[str]
1560
+ Iterable of failed or dis-allowed states. (Default: None)
1561
+ execution_delta : datetime.timedelta
1562
+ time difference with the previous execution to look at,
1563
+ the default is the same logical date as the current task or DAG. (Default: None)
1564
+ check_existence: bool
1565
+ Set to True to check if the external task exists or check if
1566
+ the DAG to wait for exists. (Default: True)
1567
+ """
1568
+ ...
1569
+
1570
1570
  pkg_name: str
1571
1571