ob-metaflow-stubs 6.0.3.179rc0__py2.py3-none-any.whl → 6.0.3.179rc2__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (220) hide show
  1. metaflow-stubs/__init__.pyi +688 -688
  2. metaflow-stubs/cards.pyi +1 -1
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/cli_components/__init__.pyi +1 -1
  5. metaflow-stubs/cli_components/utils.pyi +1 -1
  6. metaflow-stubs/client/__init__.pyi +1 -1
  7. metaflow-stubs/client/core.pyi +3 -3
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +1 -1
  10. metaflow-stubs/exception.pyi +1 -1
  11. metaflow-stubs/flowspec.pyi +3 -3
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +2 -2
  14. metaflow-stubs/info_file.pyi +1 -1
  15. metaflow-stubs/metadata_provider/__init__.pyi +1 -1
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
  17. metaflow-stubs/metadata_provider/metadata.pyi +1 -1
  18. metaflow-stubs/metadata_provider/util.pyi +1 -1
  19. metaflow-stubs/metaflow_config.pyi +1 -1
  20. metaflow-stubs/metaflow_current.pyi +100 -100
  21. metaflow-stubs/metaflow_git.pyi +1 -1
  22. metaflow-stubs/mf_extensions/__init__.pyi +1 -1
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +2 -2
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +2 -2
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +2 -2
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +3 -3
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +2 -2
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +2 -2
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +2 -2
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +3 -3
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +2 -2
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +2 -2
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +1 -1
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +2 -2
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +2 -2
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +2 -2
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +1 -1
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +1 -1
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +1 -1
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +1 -1
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +2 -2
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +1 -1
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +1 -1
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +1 -1
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +1 -1
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +2 -2
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +2 -2
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +1 -1
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +1 -1
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +1 -1
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +1 -1
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +1 -1
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +1 -1
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +1 -1
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
  87. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
  88. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
  89. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +1 -1
  90. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
  91. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +1 -1
  92. metaflow-stubs/multicore_utils.pyi +1 -1
  93. metaflow-stubs/ob_internal.pyi +1 -1
  94. metaflow-stubs/parameters.pyi +2 -2
  95. metaflow-stubs/plugins/__init__.pyi +9 -9
  96. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  97. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  98. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  99. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  100. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  101. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
  102. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
  103. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  104. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  105. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  106. metaflow-stubs/plugins/argo/argo_workflows.pyi +1 -1
  107. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  108. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +2 -2
  109. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  110. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  111. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  112. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  113. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  114. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  115. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  116. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
  117. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  118. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +1 -1
  119. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  120. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  121. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  122. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  123. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  124. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +2 -2
  125. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  126. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  127. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  128. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  129. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +1 -1
  130. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  131. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  132. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  133. metaflow-stubs/plugins/cards/__init__.pyi +1 -1
  134. metaflow-stubs/plugins/cards/card_client.pyi +1 -1
  135. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  136. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  137. metaflow-stubs/plugins/cards/card_decorator.pyi +1 -1
  138. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  139. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  140. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  141. metaflow-stubs/plugins/cards/card_modules/components.pyi +2 -2
  142. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  143. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  144. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  145. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  146. metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
  147. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  148. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  149. metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
  150. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  151. metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
  152. metaflow-stubs/plugins/datatools/s3/s3.pyi +1 -1
  153. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  154. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  155. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  156. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  157. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  158. metaflow-stubs/plugins/events_decorator.pyi +1 -1
  159. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  160. metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
  161. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  162. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +1 -1
  163. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  164. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  165. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  166. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  167. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  168. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +1 -1
  169. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  170. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  171. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
  172. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  173. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
  174. metaflow-stubs/plugins/ollama/__init__.pyi +1 -1
  175. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  176. metaflow-stubs/plugins/perimeters.pyi +1 -1
  177. metaflow-stubs/plugins/project_decorator.pyi +1 -1
  178. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  179. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  180. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  181. metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
  182. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  183. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  184. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  185. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  186. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  187. metaflow-stubs/plugins/secrets/__init__.pyi +1 -1
  188. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +1 -1
  189. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  190. metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
  191. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  192. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +1 -1
  193. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  194. metaflow-stubs/plugins/torchtune/__init__.pyi +1 -1
  195. metaflow-stubs/plugins/uv/__init__.pyi +1 -1
  196. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  197. metaflow-stubs/profilers/__init__.pyi +1 -1
  198. metaflow-stubs/pylint_wrapper.pyi +1 -1
  199. metaflow-stubs/runner/__init__.pyi +1 -1
  200. metaflow-stubs/runner/deployer.pyi +28 -28
  201. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  202. metaflow-stubs/runner/metaflow_runner.pyi +2 -2
  203. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  204. metaflow-stubs/runner/nbrun.pyi +1 -1
  205. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  206. metaflow-stubs/runner/utils.pyi +2 -2
  207. metaflow-stubs/system/__init__.pyi +1 -1
  208. metaflow-stubs/system/system_logger.pyi +2 -2
  209. metaflow-stubs/system/system_monitor.pyi +1 -1
  210. metaflow-stubs/tagging_util.pyi +1 -1
  211. metaflow-stubs/tuple_util.pyi +1 -1
  212. metaflow-stubs/user_configs/__init__.pyi +1 -1
  213. metaflow-stubs/user_configs/config_decorators.pyi +4 -4
  214. metaflow-stubs/user_configs/config_options.pyi +2 -2
  215. metaflow-stubs/user_configs/config_parameters.pyi +5 -5
  216. {ob_metaflow_stubs-6.0.3.179rc0.dist-info → ob_metaflow_stubs-6.0.3.179rc2.dist-info}/METADATA +1 -1
  217. ob_metaflow_stubs-6.0.3.179rc2.dist-info/RECORD +220 -0
  218. ob_metaflow_stubs-6.0.3.179rc0.dist-info/RECORD +0 -220
  219. {ob_metaflow_stubs-6.0.3.179rc0.dist-info → ob_metaflow_stubs-6.0.3.179rc2.dist-info}/WHEEL +0 -0
  220. {ob_metaflow_stubs-6.0.3.179rc0.dist-info → ob_metaflow_stubs-6.0.3.179rc2.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.15.17.1+obcheckpoint(0.2.1);ob(v1) #
4
- # Generated on 2025-06-13T18:47:55.254260 #
4
+ # Generated on 2025-06-13T20:28:05.258106 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -44,8 +44,8 @@ from . import plugins as plugins
44
44
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
45
45
  from . import includefile as includefile
46
46
  from .includefile import IncludeFile as IncludeFile
47
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
48
47
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
48
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
49
49
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
50
50
  from . import client as client
51
51
  from .client.core import namespace as namespace
@@ -155,70 +155,182 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
155
155
  """
156
156
  ...
157
157
 
158
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
158
+ @typing.overload
159
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
159
160
  """
160
- Specifies that this step should execute on DGX cloud.
161
+ Creates a human-readable report, a Metaflow Card, after this step completes.
162
+
163
+ Note that you may add multiple `@card` decorators in a step with different parameters.
161
164
 
162
165
 
163
166
  Parameters
164
167
  ----------
165
- gpu : int
166
- Number of GPUs to use.
167
- gpu_type : str
168
- Type of Nvidia GPU to use.
169
- queue_timeout : int
170
- Time to keep the job in NVCF's queue.
168
+ type : str, default 'default'
169
+ Card type.
170
+ id : str, optional, default None
171
+ If multiple cards are present, use this id to identify this card.
172
+ options : Dict[str, Any], default {}
173
+ Options passed to the card. The contents depend on the card type.
174
+ timeout : int, default 45
175
+ Interrupt reporting if it takes more than this many seconds.
171
176
  """
172
177
  ...
173
178
 
174
179
  @typing.overload
175
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
180
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
181
+ ...
182
+
183
+ @typing.overload
184
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
185
+ ...
186
+
187
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
176
188
  """
177
- Specifies that the step will success under all circumstances.
189
+ Creates a human-readable report, a Metaflow Card, after this step completes.
178
190
 
179
- The decorator will create an optional artifact, specified by `var`, which
180
- contains the exception raised. You can use it to detect the presence
181
- of errors, indicating that all happy-path artifacts produced by the step
182
- are missing.
191
+ Note that you may add multiple `@card` decorators in a step with different parameters.
183
192
 
184
193
 
185
194
  Parameters
186
195
  ----------
187
- var : str, optional, default None
188
- Name of the artifact in which to store the caught exception.
189
- If not specified, the exception is not stored.
190
- print_exception : bool, default True
191
- Determines whether or not the exception is printed to
192
- stdout when caught.
196
+ type : str, default 'default'
197
+ Card type.
198
+ id : str, optional, default None
199
+ If multiple cards are present, use this id to identify this card.
200
+ options : Dict[str, Any], default {}
201
+ Options passed to the card. The contents depend on the card type.
202
+ timeout : int, default 45
203
+ Interrupt reporting if it takes more than this many seconds.
193
204
  """
194
205
  ...
195
206
 
196
207
  @typing.overload
197
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
208
+ def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
209
+ """
210
+ Enables checkpointing for a step.
211
+
212
+
213
+
214
+ Parameters
215
+ ----------
216
+ load_policy : str, default: "fresh"
217
+ The policy for loading the checkpoint. The following policies are supported:
218
+ - "eager": Loads the the latest available checkpoint within the namespace.
219
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
220
+ will be loaded at the start of the task.
221
+ - "none": Do not load any checkpoint
222
+ - "fresh": Loads the lastest checkpoint created within the running Task.
223
+ This mode helps loading checkpoints across various retry attempts of the same task.
224
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
225
+ created within the task will be loaded when the task is retries execution on failure.
226
+
227
+ temp_dir_root : str, default: None
228
+ The root directory under which `current.checkpoint.directory` will be created.
229
+ """
198
230
  ...
199
231
 
200
232
  @typing.overload
201
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
233
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
202
234
  ...
203
235
 
204
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
236
+ @typing.overload
237
+ def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
238
+ ...
239
+
240
+ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
205
241
  """
206
- Specifies that the step will success under all circumstances.
242
+ Enables checkpointing for a step.
207
243
 
208
- The decorator will create an optional artifact, specified by `var`, which
209
- contains the exception raised. You can use it to detect the presence
210
- of errors, indicating that all happy-path artifacts produced by the step
211
- are missing.
212
244
 
213
245
 
214
246
  Parameters
215
247
  ----------
216
- var : str, optional, default None
217
- Name of the artifact in which to store the caught exception.
218
- If not specified, the exception is not stored.
219
- print_exception : bool, default True
220
- Determines whether or not the exception is printed to
221
- stdout when caught.
248
+ load_policy : str, default: "fresh"
249
+ The policy for loading the checkpoint. The following policies are supported:
250
+ - "eager": Loads the the latest available checkpoint within the namespace.
251
+ With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
252
+ will be loaded at the start of the task.
253
+ - "none": Do not load any checkpoint
254
+ - "fresh": Loads the lastest checkpoint created within the running Task.
255
+ This mode helps loading checkpoints across various retry attempts of the same task.
256
+ With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
257
+ created within the task will be loaded when the task is retries execution on failure.
258
+
259
+ temp_dir_root : str, default: None
260
+ The root directory under which `current.checkpoint.directory` will be created.
261
+ """
262
+ ...
263
+
264
+ @typing.overload
265
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
266
+ """
267
+ Specifies a timeout for your step.
268
+
269
+ This decorator is useful if this step may hang indefinitely.
270
+
271
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
272
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
273
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
274
+
275
+ Note that all the values specified in parameters are added together so if you specify
276
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
277
+
278
+
279
+ Parameters
280
+ ----------
281
+ seconds : int, default 0
282
+ Number of seconds to wait prior to timing out.
283
+ minutes : int, default 0
284
+ Number of minutes to wait prior to timing out.
285
+ hours : int, default 0
286
+ Number of hours to wait prior to timing out.
287
+ """
288
+ ...
289
+
290
+ @typing.overload
291
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
292
+ ...
293
+
294
+ @typing.overload
295
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
296
+ ...
297
+
298
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
299
+ """
300
+ Specifies a timeout for your step.
301
+
302
+ This decorator is useful if this step may hang indefinitely.
303
+
304
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
305
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
306
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
307
+
308
+ Note that all the values specified in parameters are added together so if you specify
309
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
310
+
311
+
312
+ Parameters
313
+ ----------
314
+ seconds : int, default 0
315
+ Number of seconds to wait prior to timing out.
316
+ minutes : int, default 0
317
+ Number of minutes to wait prior to timing out.
318
+ hours : int, default 0
319
+ Number of hours to wait prior to timing out.
320
+ """
321
+ ...
322
+
323
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
324
+ """
325
+ Specifies that this step should execute on DGX cloud.
326
+
327
+
328
+ Parameters
329
+ ----------
330
+ gpu : int
331
+ Number of GPUs to use.
332
+ gpu_type : str
333
+ Type of Nvidia GPU to use.
222
334
  """
223
335
  ...
224
336
 
@@ -273,172 +385,336 @@ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepF
273
385
  ...
274
386
 
275
387
  @typing.overload
276
- def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
388
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
277
389
  """
278
- Enables loading / saving of models within a step.
279
-
390
+ Specifies secrets to be retrieved and injected as environment variables prior to
391
+ the execution of a step.
280
392
 
281
393
 
282
394
  Parameters
283
395
  ----------
284
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
285
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
286
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
287
- - `current.checkpoint`
288
- - `current.model`
289
- - `current.huggingface_hub`
290
-
291
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
292
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
293
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
294
-
295
- temp_dir_root : str, default: None
296
- The root directory under which `current.model.loaded` will store loaded models
396
+ sources : List[Union[str, Dict[str, Any]]], default: []
397
+ List of secret specs, defining how the secrets are to be retrieved
297
398
  """
298
399
  ...
299
400
 
300
401
  @typing.overload
301
- def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
402
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
302
403
  ...
303
404
 
304
405
  @typing.overload
305
- def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
406
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
306
407
  ...
307
408
 
308
- def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
409
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
309
410
  """
310
- Enables loading / saving of models within a step.
311
-
411
+ Specifies secrets to be retrieved and injected as environment variables prior to
412
+ the execution of a step.
312
413
 
313
414
 
314
415
  Parameters
315
416
  ----------
316
- load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
317
- Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
318
- These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
319
- - `current.checkpoint`
320
- - `current.model`
321
- - `current.huggingface_hub`
322
-
323
- If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
324
- the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
325
- If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
326
-
327
- temp_dir_root : str, default: None
328
- The root directory under which `current.model.loaded` will store loaded models
417
+ sources : List[Union[str, Dict[str, Any]]], default: []
418
+ List of secret specs, defining how the secrets are to be retrieved
329
419
  """
330
420
  ...
331
421
 
332
422
  @typing.overload
333
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
423
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
334
424
  """
335
- Specifies the Conda environment for the step.
425
+ Specifies the PyPI packages for the step.
336
426
 
337
427
  Information in this decorator will augment any
338
- attributes set in the `@conda_base` flow-level decorator. Hence,
339
- you can use `@conda_base` to set packages required by all
340
- steps and use `@conda` to specify step-specific overrides.
428
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
429
+ you can use `@pypi_base` to set packages required by all
430
+ steps and use `@pypi` to specify step-specific overrides.
341
431
 
342
432
 
343
433
  Parameters
344
434
  ----------
345
- packages : Dict[str, str], default {}
435
+ packages : Dict[str, str], default: {}
346
436
  Packages to use for this step. The key is the name of the package
347
437
  and the value is the version to use.
348
- libraries : Dict[str, str], default {}
349
- Supported for backward compatibility. When used with packages, packages will take precedence.
350
- python : str, optional, default None
438
+ python : str, optional, default: None
351
439
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
352
440
  that the version used will correspond to the version of the Python interpreter used to start the run.
353
- disabled : bool, default False
354
- If set to True, disables @conda.
355
441
  """
356
442
  ...
357
443
 
358
444
  @typing.overload
359
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
445
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
360
446
  ...
361
447
 
362
448
  @typing.overload
363
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
449
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
364
450
  ...
365
451
 
366
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
452
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
367
453
  """
368
- Specifies the Conda environment for the step.
454
+ Specifies the PyPI packages for the step.
369
455
 
370
456
  Information in this decorator will augment any
371
- attributes set in the `@conda_base` flow-level decorator. Hence,
372
- you can use `@conda_base` to set packages required by all
373
- steps and use `@conda` to specify step-specific overrides.
457
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
458
+ you can use `@pypi_base` to set packages required by all
459
+ steps and use `@pypi` to specify step-specific overrides.
374
460
 
375
461
 
376
462
  Parameters
377
463
  ----------
378
- packages : Dict[str, str], default {}
464
+ packages : Dict[str, str], default: {}
379
465
  Packages to use for this step. The key is the name of the package
380
466
  and the value is the version to use.
381
- libraries : Dict[str, str], default {}
382
- Supported for backward compatibility. When used with packages, packages will take precedence.
383
- python : str, optional, default None
467
+ python : str, optional, default: None
384
468
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
385
469
  that the version used will correspond to the version of the Python interpreter used to start the run.
386
- disabled : bool, default False
387
- If set to True, disables @conda.
470
+ """
471
+ ...
472
+
473
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
474
+ """
475
+ Specifies that this step should execute on Kubernetes.
476
+
477
+
478
+ Parameters
479
+ ----------
480
+ cpu : int, default 1
481
+ Number of CPUs required for this step. If `@resources` is
482
+ also present, the maximum value from all decorators is used.
483
+ memory : int, default 4096
484
+ Memory size (in MB) required for this step. If
485
+ `@resources` is also present, the maximum value from all decorators is
486
+ used.
487
+ disk : int, default 10240
488
+ Disk size (in MB) required for this step. If
489
+ `@resources` is also present, the maximum value from all decorators is
490
+ used.
491
+ image : str, optional, default None
492
+ Docker image to use when launching on Kubernetes. If not specified, and
493
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
494
+ not, a default Docker image mapping to the current version of Python is used.
495
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
496
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
497
+ image_pull_secrets: List[str], default []
498
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
499
+ Kubernetes image pull secrets to use when pulling container images
500
+ in Kubernetes.
501
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
502
+ Kubernetes service account to use when launching pod in Kubernetes.
503
+ secrets : List[str], optional, default None
504
+ Kubernetes secrets to use when launching pod in Kubernetes. These
505
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
506
+ in Metaflow configuration.
507
+ node_selector: Union[Dict[str,str], str], optional, default None
508
+ Kubernetes node selector(s) to apply to the pod running the task.
509
+ Can be passed in as a comma separated string of values e.g.
510
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
511
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
512
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
513
+ Kubernetes namespace to use when launching pod in Kubernetes.
514
+ gpu : int, optional, default None
515
+ Number of GPUs required for this step. A value of zero implies that
516
+ the scheduled node should not have GPUs.
517
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
518
+ The vendor of the GPUs to be used for this step.
519
+ tolerations : List[str], default []
520
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
521
+ Kubernetes tolerations to use when launching pod in Kubernetes.
522
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
523
+ Kubernetes labels to use when launching pod in Kubernetes.
524
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
525
+ Kubernetes annotations to use when launching pod in Kubernetes.
526
+ use_tmpfs : bool, default False
527
+ This enables an explicit tmpfs mount for this step.
528
+ tmpfs_tempdir : bool, default True
529
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
530
+ tmpfs_size : int, optional, default: None
531
+ The value for the size (in MiB) of the tmpfs mount for this step.
532
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
533
+ memory allocated for this step.
534
+ tmpfs_path : str, optional, default /metaflow_temp
535
+ Path to tmpfs mount for this step.
536
+ persistent_volume_claims : Dict[str, str], optional, default None
537
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
538
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
539
+ shared_memory: int, optional
540
+ Shared memory size (in MiB) required for this step
541
+ port: int, optional
542
+ Port number to specify in the Kubernetes job object
543
+ compute_pool : str, optional, default None
544
+ Compute pool to be used for for this step.
545
+ If not specified, any accessible compute pool within the perimeter is used.
546
+ hostname_resolution_timeout: int, default 10 * 60
547
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
548
+ Only applicable when @parallel is used.
549
+ qos: str, default: Burstable
550
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
551
+
552
+ security_context: Dict[str, Any], optional, default None
553
+ Container security context. Applies to the task container. Allows the following keys:
554
+ - privileged: bool, optional, default None
555
+ - allow_privilege_escalation: bool, optional, default None
556
+ - run_as_user: int, optional, default None
557
+ - run_as_group: int, optional, default None
558
+ - run_as_non_root: bool, optional, default None
388
559
  """
389
560
  ...
390
561
 
391
562
  @typing.overload
392
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
563
+ def model(*, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
393
564
  """
394
- Specifies secrets to be retrieved and injected as environment variables prior to
395
- the execution of a step.
565
+ Enables loading / saving of models within a step.
566
+
396
567
 
397
568
 
398
569
  Parameters
399
570
  ----------
400
- sources : List[Union[str, Dict[str, Any]]], default: []
401
- List of secret specs, defining how the secrets are to be retrieved
571
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
572
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
573
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
574
+ - `current.checkpoint`
575
+ - `current.model`
576
+ - `current.huggingface_hub`
577
+
578
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
579
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
580
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
581
+
582
+ temp_dir_root : str, default: None
583
+ The root directory under which `current.model.loaded` will store loaded models
402
584
  """
403
585
  ...
404
586
 
405
587
  @typing.overload
406
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
588
+ def model(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
407
589
  ...
408
590
 
409
591
  @typing.overload
410
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
592
+ def model(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
411
593
  ...
412
594
 
413
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
595
+ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load: typing.Union[typing.List[str], str, typing.List[typing.Tuple[str, typing.Optional[str]]]] = None, temp_dir_root: str = None):
414
596
  """
415
- Specifies secrets to be retrieved and injected as environment variables prior to
416
- the execution of a step.
597
+ Enables loading / saving of models within a step.
598
+
417
599
 
418
600
 
419
601
  Parameters
420
602
  ----------
421
- sources : List[Union[str, Dict[str, Any]]], default: []
422
- List of secret specs, defining how the secrets are to be retrieved
603
+ load : Union[List[str],str,List[Tuple[str,Union[str,None]]]], default: None
604
+ Artifact name/s referencing the models/checkpoints to load. Artifact names refer to the names of the instance variables set to `self`.
605
+ These artifact names give to `load` be reference objects or reference `key` string's from objects created by:
606
+ - `current.checkpoint`
607
+ - `current.model`
608
+ - `current.huggingface_hub`
609
+
610
+ If a list of tuples is provided, the first element is the artifact name and the second element is the path the artifact needs be unpacked on
611
+ the local filesystem. If the second element is None, the artifact will be unpacked in the current working directory.
612
+ If a string is provided, then the artifact corresponding to that name will be loaded in the current working directory.
613
+
614
+ temp_dir_root : str, default: None
615
+ The root directory under which `current.model.loaded` will store loaded models
616
+ """
617
+ ...
618
+
619
+ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
620
+ """
621
+ This decorator is used to run Ollama APIs as Metaflow task sidecars.
622
+
623
+ User code call
624
+ --------------
625
+ @ollama(
626
+ models=[...],
627
+ ...
628
+ )
629
+
630
+ Valid backend options
631
+ ---------------------
632
+ - 'local': Run as a separate process on the local task machine.
633
+ - (TODO) 'managed': Outerbounds hosts and selects compute provider.
634
+ - (TODO) 'remote': Spin up separate instance to serve Ollama models.
635
+
636
+ Valid model options
637
+ -------------------
638
+ Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
639
+
640
+
641
+ Parameters
642
+ ----------
643
+ models: list[str]
644
+ List of Ollama containers running models in sidecars.
645
+ backend: str
646
+ Determines where and how to run the Ollama process.
647
+ force_pull: bool
648
+ Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
649
+ cache_update_policy: str
650
+ Cache update policy: "auto", "force", or "never".
651
+ force_cache_update: bool
652
+ Simple override for "force" cache update policy.
653
+ debug: bool
654
+ Whether to turn on verbose debugging logs.
655
+ circuit_breaker_config: dict
656
+ Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
657
+ timeout_config: dict
658
+ Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
423
659
  """
424
660
  ...
425
661
 
426
662
  @typing.overload
427
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
663
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
428
664
  """
429
- Decorator prototype for all step decorators. This function gets specialized
430
- and imported for all decorators types by _import_plugin_decorators().
665
+ Specifies the Conda environment for the step.
666
+
667
+ Information in this decorator will augment any
668
+ attributes set in the `@conda_base` flow-level decorator. Hence,
669
+ you can use `@conda_base` to set packages required by all
670
+ steps and use `@conda` to specify step-specific overrides.
671
+
672
+
673
+ Parameters
674
+ ----------
675
+ packages : Dict[str, str], default {}
676
+ Packages to use for this step. The key is the name of the package
677
+ and the value is the version to use.
678
+ libraries : Dict[str, str], default {}
679
+ Supported for backward compatibility. When used with packages, packages will take precedence.
680
+ python : str, optional, default None
681
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
682
+ that the version used will correspond to the version of the Python interpreter used to start the run.
683
+ disabled : bool, default False
684
+ If set to True, disables @conda.
431
685
  """
432
686
  ...
433
687
 
434
688
  @typing.overload
435
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
689
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
436
690
  ...
437
691
 
438
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
692
+ @typing.overload
693
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
694
+ ...
695
+
696
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
439
697
  """
440
- Decorator prototype for all step decorators. This function gets specialized
441
- and imported for all decorators types by _import_plugin_decorators().
698
+ Specifies the Conda environment for the step.
699
+
700
+ Information in this decorator will augment any
701
+ attributes set in the `@conda_base` flow-level decorator. Hence,
702
+ you can use `@conda_base` to set packages required by all
703
+ steps and use `@conda` to specify step-specific overrides.
704
+
705
+
706
+ Parameters
707
+ ----------
708
+ packages : Dict[str, str], default {}
709
+ Packages to use for this step. The key is the name of the package
710
+ and the value is the version to use.
711
+ libraries : Dict[str, str], default {}
712
+ Supported for backward compatibility. When used with packages, packages will take precedence.
713
+ python : str, optional, default None
714
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
715
+ that the version used will correspond to the version of the Python interpreter used to start the run.
716
+ disabled : bool, default False
717
+ If set to True, disables @conda.
442
718
  """
443
719
  ...
444
720
 
@@ -521,52 +797,9 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
521
797
  """
522
798
  ...
523
799
 
524
- def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy: str, force_cache_update: bool, debug: bool, circuit_breaker_config: dict, timeout_config: dict) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
800
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
525
801
  """
526
- This decorator is used to run Ollama APIs as Metaflow task sidecars.
527
-
528
- User code call
529
- --------------
530
- @ollama(
531
- models=[...],
532
- ...
533
- )
534
-
535
- Valid backend options
536
- ---------------------
537
- - 'local': Run as a separate process on the local task machine.
538
- - (TODO) 'managed': Outerbounds hosts and selects compute provider.
539
- - (TODO) 'remote': Spin up separate instance to serve Ollama models.
540
-
541
- Valid model options
542
- -------------------
543
- Any model here https://ollama.com/search, e.g. 'llama3.2', 'llama3.3'
544
-
545
-
546
- Parameters
547
- ----------
548
- models: list[str]
549
- List of Ollama containers running models in sidecars.
550
- backend: str
551
- Determines where and how to run the Ollama process.
552
- force_pull: bool
553
- Whether to run `ollama pull` no matter what, or first check the remote cache in Metaflow datastore for this model key.
554
- cache_update_policy: str
555
- Cache update policy: "auto", "force", or "never".
556
- force_cache_update: bool
557
- Simple override for "force" cache update policy.
558
- debug: bool
559
- Whether to turn on verbose debugging logs.
560
- circuit_breaker_config: dict
561
- Configuration for circuit breaker protection. Keys: failure_threshold, recovery_timeout, reset_timeout.
562
- timeout_config: dict
563
- Configuration for various operation timeouts. Keys: pull, stop, health_check, install, server_startup.
564
- """
565
- ...
566
-
567
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
568
- """
569
- Decorator that helps cache, version and store models/datasets from huggingface hub.
802
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
570
803
 
571
804
 
572
805
  Parameters
@@ -590,102 +823,53 @@ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.
590
823
  ...
591
824
 
592
825
  @typing.overload
593
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
594
- """
595
- Creates a human-readable report, a Metaflow Card, after this step completes.
596
-
597
- Note that you may add multiple `@card` decorators in a step with different parameters.
598
-
599
-
600
- Parameters
601
- ----------
602
- type : str, default 'default'
603
- Card type.
604
- id : str, optional, default None
605
- If multiple cards are present, use this id to identify this card.
606
- options : Dict[str, Any], default {}
607
- Options passed to the card. The contents depend on the card type.
608
- timeout : int, default 45
609
- Interrupt reporting if it takes more than this many seconds.
610
- """
611
- ...
612
-
613
- @typing.overload
614
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
615
- ...
616
-
617
- @typing.overload
618
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
619
- ...
620
-
621
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
622
- """
623
- Creates a human-readable report, a Metaflow Card, after this step completes.
624
-
625
- Note that you may add multiple `@card` decorators in a step with different parameters.
626
-
627
-
628
- Parameters
629
- ----------
630
- type : str, default 'default'
631
- Card type.
632
- id : str, optional, default None
633
- If multiple cards are present, use this id to identify this card.
634
- options : Dict[str, Any], default {}
635
- Options passed to the card. The contents depend on the card type.
636
- timeout : int, default 45
637
- Interrupt reporting if it takes more than this many seconds.
638
- """
639
- ...
640
-
641
- @typing.overload
642
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
826
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
643
827
  """
644
- Specifies the PyPI packages for the step.
828
+ Specifies that the step will success under all circumstances.
645
829
 
646
- Information in this decorator will augment any
647
- attributes set in the `@pyi_base` flow-level decorator. Hence,
648
- you can use `@pypi_base` to set packages required by all
649
- steps and use `@pypi` to specify step-specific overrides.
830
+ The decorator will create an optional artifact, specified by `var`, which
831
+ contains the exception raised. You can use it to detect the presence
832
+ of errors, indicating that all happy-path artifacts produced by the step
833
+ are missing.
650
834
 
651
835
 
652
836
  Parameters
653
837
  ----------
654
- packages : Dict[str, str], default: {}
655
- Packages to use for this step. The key is the name of the package
656
- and the value is the version to use.
657
- python : str, optional, default: None
658
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
659
- that the version used will correspond to the version of the Python interpreter used to start the run.
838
+ var : str, optional, default None
839
+ Name of the artifact in which to store the caught exception.
840
+ If not specified, the exception is not stored.
841
+ print_exception : bool, default True
842
+ Determines whether or not the exception is printed to
843
+ stdout when caught.
660
844
  """
661
845
  ...
662
846
 
663
847
  @typing.overload
664
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
848
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
665
849
  ...
666
850
 
667
851
  @typing.overload
668
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
852
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
669
853
  ...
670
854
 
671
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
855
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
672
856
  """
673
- Specifies the PyPI packages for the step.
857
+ Specifies that the step will success under all circumstances.
674
858
 
675
- Information in this decorator will augment any
676
- attributes set in the `@pyi_base` flow-level decorator. Hence,
677
- you can use `@pypi_base` to set packages required by all
678
- steps and use `@pypi` to specify step-specific overrides.
859
+ The decorator will create an optional artifact, specified by `var`, which
860
+ contains the exception raised. You can use it to detect the presence
861
+ of errors, indicating that all happy-path artifacts produced by the step
862
+ are missing.
679
863
 
680
864
 
681
865
  Parameters
682
866
  ----------
683
- packages : Dict[str, str], default: {}
684
- Packages to use for this step. The key is the name of the package
685
- and the value is the version to use.
686
- python : str, optional, default: None
687
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
688
- that the version used will correspond to the version of the Python interpreter used to start the run.
867
+ var : str, optional, default None
868
+ Name of the artifact in which to store the caught exception.
869
+ If not specified, the exception is not stored.
870
+ print_exception : bool, default True
871
+ Determines whether or not the exception is printed to
872
+ stdout when caught.
689
873
  """
690
874
  ...
691
875
 
@@ -704,65 +888,6 @@ def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union
704
888
  """
705
889
  ...
706
890
 
707
- @typing.overload
708
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
709
- """
710
- Specifies a timeout for your step.
711
-
712
- This decorator is useful if this step may hang indefinitely.
713
-
714
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
715
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
716
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
717
-
718
- Note that all the values specified in parameters are added together so if you specify
719
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
720
-
721
-
722
- Parameters
723
- ----------
724
- seconds : int, default 0
725
- Number of seconds to wait prior to timing out.
726
- minutes : int, default 0
727
- Number of minutes to wait prior to timing out.
728
- hours : int, default 0
729
- Number of hours to wait prior to timing out.
730
- """
731
- ...
732
-
733
- @typing.overload
734
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
735
- ...
736
-
737
- @typing.overload
738
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
739
- ...
740
-
741
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
742
- """
743
- Specifies a timeout for your step.
744
-
745
- This decorator is useful if this step may hang indefinitely.
746
-
747
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
748
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
749
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
750
-
751
- Note that all the values specified in parameters are added together so if you specify
752
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
753
-
754
-
755
- Parameters
756
- ----------
757
- seconds : int, default 0
758
- Number of seconds to wait prior to timing out.
759
- minutes : int, default 0
760
- Number of minutes to wait prior to timing out.
761
- hours : int, default 0
762
- Number of hours to wait prior to timing out.
763
- """
764
- ...
765
-
766
891
  @typing.overload
767
892
  def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
768
893
  """
@@ -818,343 +943,142 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
818
943
  """
819
944
  ...
820
945
 
821
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
946
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
822
947
  """
823
- Specifies that this step should execute on Kubernetes.
948
+ Specifies that this step should execute on DGX cloud.
824
949
 
825
950
 
826
951
  Parameters
827
952
  ----------
828
- cpu : int, default 1
829
- Number of CPUs required for this step. If `@resources` is
830
- also present, the maximum value from all decorators is used.
831
- memory : int, default 4096
832
- Memory size (in MB) required for this step. If
833
- `@resources` is also present, the maximum value from all decorators is
834
- used.
835
- disk : int, default 10240
836
- Disk size (in MB) required for this step. If
837
- `@resources` is also present, the maximum value from all decorators is
838
- used.
839
- image : str, optional, default None
840
- Docker image to use when launching on Kubernetes. If not specified, and
841
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
842
- not, a default Docker image mapping to the current version of Python is used.
843
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
844
- If given, the imagePullPolicy to be applied to the Docker image of the step.
845
- image_pull_secrets: List[str], default []
846
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
847
- Kubernetes image pull secrets to use when pulling container images
848
- in Kubernetes.
849
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
850
- Kubernetes service account to use when launching pod in Kubernetes.
851
- secrets : List[str], optional, default None
852
- Kubernetes secrets to use when launching pod in Kubernetes. These
853
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
854
- in Metaflow configuration.
855
- node_selector: Union[Dict[str,str], str], optional, default None
856
- Kubernetes node selector(s) to apply to the pod running the task.
857
- Can be passed in as a comma separated string of values e.g.
858
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
859
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
860
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
861
- Kubernetes namespace to use when launching pod in Kubernetes.
862
- gpu : int, optional, default None
863
- Number of GPUs required for this step. A value of zero implies that
864
- the scheduled node should not have GPUs.
865
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
866
- The vendor of the GPUs to be used for this step.
867
- tolerations : List[str], default []
868
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
869
- Kubernetes tolerations to use when launching pod in Kubernetes.
870
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
871
- Kubernetes labels to use when launching pod in Kubernetes.
872
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
873
- Kubernetes annotations to use when launching pod in Kubernetes.
874
- use_tmpfs : bool, default False
875
- This enables an explicit tmpfs mount for this step.
876
- tmpfs_tempdir : bool, default True
877
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
878
- tmpfs_size : int, optional, default: None
879
- The value for the size (in MiB) of the tmpfs mount for this step.
880
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
881
- memory allocated for this step.
882
- tmpfs_path : str, optional, default /metaflow_temp
883
- Path to tmpfs mount for this step.
884
- persistent_volume_claims : Dict[str, str], optional, default None
885
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
886
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
887
- shared_memory: int, optional
888
- Shared memory size (in MiB) required for this step
889
- port: int, optional
890
- Port number to specify in the Kubernetes job object
891
- compute_pool : str, optional, default None
892
- Compute pool to be used for for this step.
893
- If not specified, any accessible compute pool within the perimeter is used.
894
- hostname_resolution_timeout: int, default 10 * 60
895
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
896
- Only applicable when @parallel is used.
897
- qos: str, default: Burstable
898
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
899
-
900
- security_context: Dict[str, Any], optional, default None
901
- Container security context. Applies to the task container. Allows the following keys:
902
- - privileged: bool, optional, default None
903
- - allow_privilege_escalation: bool, optional, default None
904
- - run_as_user: int, optional, default None
905
- - run_as_group: int, optional, default None
906
- - run_as_non_root: bool, optional, default None
953
+ gpu : int
954
+ Number of GPUs to use.
955
+ gpu_type : str
956
+ Type of Nvidia GPU to use.
957
+ queue_timeout : int
958
+ Time to keep the job in NVCF's queue.
907
959
  """
908
960
  ...
909
961
 
910
962
  @typing.overload
911
- def checkpoint(*, load_policy: str = 'fresh', temp_dir_root: str = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
963
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
912
964
  """
913
- Enables checkpointing for a step.
914
-
915
-
916
-
917
- Parameters
918
- ----------
919
- load_policy : str, default: "fresh"
920
- The policy for loading the checkpoint. The following policies are supported:
921
- - "eager": Loads the the latest available checkpoint within the namespace.
922
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
923
- will be loaded at the start of the task.
924
- - "none": Do not load any checkpoint
925
- - "fresh": Loads the lastest checkpoint created within the running Task.
926
- This mode helps loading checkpoints across various retry attempts of the same task.
927
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
928
- created within the task will be loaded when the task is retries execution on failure.
929
-
930
- temp_dir_root : str, default: None
931
- The root directory under which `current.checkpoint.directory` will be created.
965
+ Decorator prototype for all step decorators. This function gets specialized
966
+ and imported for all decorators types by _import_plugin_decorators().
932
967
  """
933
968
  ...
934
969
 
935
970
  @typing.overload
936
- def checkpoint(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
937
- ...
938
-
939
- @typing.overload
940
- def checkpoint(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
941
- ...
942
-
943
- def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, load_policy: str = 'fresh', temp_dir_root: str = None):
944
- """
945
- Enables checkpointing for a step.
946
-
947
-
948
-
949
- Parameters
950
- ----------
951
- load_policy : str, default: "fresh"
952
- The policy for loading the checkpoint. The following policies are supported:
953
- - "eager": Loads the the latest available checkpoint within the namespace.
954
- With this mode, the latest checkpoint written by any previous task (can be even a different run) of the step
955
- will be loaded at the start of the task.
956
- - "none": Do not load any checkpoint
957
- - "fresh": Loads the lastest checkpoint created within the running Task.
958
- This mode helps loading checkpoints across various retry attempts of the same task.
959
- With this mode, no checkpoint will be loaded at the start of a task but any checkpoints
960
- created within the task will be loaded when the task is retries execution on failure.
961
-
962
- temp_dir_root : str, default: None
963
- The root directory under which `current.checkpoint.directory` will be created.
964
- """
971
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
965
972
  ...
966
973
 
967
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
974
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
968
975
  """
969
- Specifies that this step should execute on DGX cloud.
970
-
971
-
972
- Parameters
973
- ----------
974
- gpu : int
975
- Number of GPUs to use.
976
- gpu_type : str
977
- Type of Nvidia GPU to use.
976
+ Decorator prototype for all step decorators. This function gets specialized
977
+ and imported for all decorators types by _import_plugin_decorators().
978
978
  """
979
979
  ...
980
980
 
981
981
  @typing.overload
982
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
982
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
983
983
  """
984
- Specifies the event(s) that this flow depends on.
984
+ Specifies the flow(s) that this flow depends on.
985
985
 
986
986
  ```
987
- @trigger(event='foo')
987
+ @trigger_on_finish(flow='FooFlow')
988
988
  ```
989
989
  or
990
990
  ```
991
- @trigger(events=['foo', 'bar'])
991
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
992
992
  ```
993
+ This decorator respects the @project decorator and triggers the flow
994
+ when upstream runs within the same namespace complete successfully
993
995
 
994
- Additionally, you can specify the parameter mappings
995
- to map event payload to Metaflow parameters for the flow.
996
+ Additionally, you can specify project aware upstream flow dependencies
997
+ by specifying the fully qualified project_flow_name.
996
998
  ```
997
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
999
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
998
1000
  ```
999
1001
  or
1000
1002
  ```
1001
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1002
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1003
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1003
1004
  ```
1004
1005
 
1005
- 'parameters' can also be a list of strings and tuples like so:
1006
- ```
1007
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1008
- ```
1009
- This is equivalent to:
1006
+ You can also specify just the project or project branch (other values will be
1007
+ inferred from the current project or project branch):
1010
1008
  ```
1011
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1009
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1012
1010
  ```
1013
1011
 
1012
+ Note that `branch` is typically one of:
1013
+ - `prod`
1014
+ - `user.bob`
1015
+ - `test.my_experiment`
1016
+ - `prod.staging`
1017
+
1014
1018
 
1015
1019
  Parameters
1016
1020
  ----------
1017
- event : Union[str, Dict[str, Any]], optional, default None
1018
- Event dependency for this flow.
1019
- events : List[Union[str, Dict[str, Any]]], default []
1020
- Events dependency for this flow.
1021
+ flow : Union[str, Dict[str, str]], optional, default None
1022
+ Upstream flow dependency for this flow.
1023
+ flows : List[Union[str, Dict[str, str]]], default []
1024
+ Upstream flow dependencies for this flow.
1021
1025
  options : Dict[str, Any], default {}
1022
1026
  Backend-specific configuration for tuning eventing behavior.
1023
1027
  """
1024
1028
  ...
1025
1029
 
1026
1030
  @typing.overload
1027
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1031
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1028
1032
  ...
1029
1033
 
1030
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1034
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1031
1035
  """
1032
- Specifies the event(s) that this flow depends on.
1036
+ Specifies the flow(s) that this flow depends on.
1033
1037
 
1034
1038
  ```
1035
- @trigger(event='foo')
1039
+ @trigger_on_finish(flow='FooFlow')
1036
1040
  ```
1037
1041
  or
1038
1042
  ```
1039
- @trigger(events=['foo', 'bar'])
1043
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1040
1044
  ```
1045
+ This decorator respects the @project decorator and triggers the flow
1046
+ when upstream runs within the same namespace complete successfully
1041
1047
 
1042
- Additionally, you can specify the parameter mappings
1043
- to map event payload to Metaflow parameters for the flow.
1048
+ Additionally, you can specify project aware upstream flow dependencies
1049
+ by specifying the fully qualified project_flow_name.
1044
1050
  ```
1045
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1051
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1046
1052
  ```
1047
1053
  or
1048
1054
  ```
1049
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1050
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1055
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1051
1056
  ```
1052
1057
 
1053
- 'parameters' can also be a list of strings and tuples like so:
1054
- ```
1055
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1056
- ```
1057
- This is equivalent to:
1058
+ You can also specify just the project or project branch (other values will be
1059
+ inferred from the current project or project branch):
1058
1060
  ```
1059
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1061
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1060
1062
  ```
1061
1063
 
1064
+ Note that `branch` is typically one of:
1065
+ - `prod`
1066
+ - `user.bob`
1067
+ - `test.my_experiment`
1068
+ - `prod.staging`
1069
+
1062
1070
 
1063
1071
  Parameters
1064
1072
  ----------
1065
- event : Union[str, Dict[str, Any]], optional, default None
1066
- Event dependency for this flow.
1067
- events : List[Union[str, Dict[str, Any]]], default []
1068
- Events dependency for this flow.
1073
+ flow : Union[str, Dict[str, str]], optional, default None
1074
+ Upstream flow dependency for this flow.
1075
+ flows : List[Union[str, Dict[str, str]]], default []
1076
+ Upstream flow dependencies for this flow.
1069
1077
  options : Dict[str, Any], default {}
1070
1078
  Backend-specific configuration for tuning eventing behavior.
1071
1079
  """
1072
1080
  ...
1073
1081
 
1074
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1075
- """
1076
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1077
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1078
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1079
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1080
- starts only after all sensors finish.
1081
-
1082
-
1083
- Parameters
1084
- ----------
1085
- timeout : int
1086
- Time, in seconds before the task times out and fails. (Default: 3600)
1087
- poke_interval : int
1088
- Time in seconds that the job should wait in between each try. (Default: 60)
1089
- mode : str
1090
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1091
- exponential_backoff : bool
1092
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1093
- pool : str
1094
- the slot pool this task should run in,
1095
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1096
- soft_fail : bool
1097
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1098
- name : str
1099
- Name of the sensor on Airflow
1100
- description : str
1101
- Description of sensor in the Airflow UI
1102
- bucket_key : Union[str, List[str]]
1103
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1104
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1105
- bucket_name : str
1106
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1107
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1108
- wildcard_match : bool
1109
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1110
- aws_conn_id : str
1111
- a reference to the s3 connection on Airflow. (Default: None)
1112
- verify : bool
1113
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1114
- """
1115
- ...
1116
-
1117
- @typing.overload
1118
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1119
- """
1120
- Specifies the PyPI packages for all steps of the flow.
1121
-
1122
- Use `@pypi_base` to set common packages required by all
1123
- steps and use `@pypi` to specify step-specific overrides.
1124
-
1125
- Parameters
1126
- ----------
1127
- packages : Dict[str, str], default: {}
1128
- Packages to use for this flow. The key is the name of the package
1129
- and the value is the version to use.
1130
- python : str, optional, default: None
1131
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1132
- that the version used will correspond to the version of the Python interpreter used to start the run.
1133
- """
1134
- ...
1135
-
1136
- @typing.overload
1137
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1138
- ...
1139
-
1140
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1141
- """
1142
- Specifies the PyPI packages for all steps of the flow.
1143
-
1144
- Use `@pypi_base` to set common packages required by all
1145
- steps and use `@pypi` to specify step-specific overrides.
1146
-
1147
- Parameters
1148
- ----------
1149
- packages : Dict[str, str], default: {}
1150
- Packages to use for this flow. The key is the name of the package
1151
- and the value is the version to use.
1152
- python : str, optional, default: None
1153
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1154
- that the version used will correspond to the version of the Python interpreter used to start the run.
1155
- """
1156
- ...
1157
-
1158
1082
  def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1159
1083
  """
1160
1084
  The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
@@ -1198,92 +1122,6 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1198
1122
  """
1199
1123
  ...
1200
1124
 
1201
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1202
- """
1203
- Specifies what flows belong to the same project.
1204
-
1205
- A project-specific namespace is created for all flows that
1206
- use the same `@project(name)`.
1207
-
1208
-
1209
- Parameters
1210
- ----------
1211
- name : str
1212
- Project name. Make sure that the name is unique amongst all
1213
- projects that use the same production scheduler. The name may
1214
- contain only lowercase alphanumeric characters and underscores.
1215
-
1216
- branch : Optional[str], default None
1217
- The branch to use. If not specified, the branch is set to
1218
- `user.<username>` unless `production` is set to `True`. This can
1219
- also be set on the command line using `--branch` as a top-level option.
1220
- It is an error to specify `branch` in the decorator and on the command line.
1221
-
1222
- production : bool, default False
1223
- Whether or not the branch is the production branch. This can also be set on the
1224
- command line using `--production` as a top-level option. It is an error to specify
1225
- `production` in the decorator and on the command line.
1226
- The project branch name will be:
1227
- - if `branch` is specified:
1228
- - if `production` is True: `prod.<branch>`
1229
- - if `production` is False: `test.<branch>`
1230
- - if `branch` is not specified:
1231
- - if `production` is True: `prod`
1232
- - if `production` is False: `user.<username>`
1233
- """
1234
- ...
1235
-
1236
- @typing.overload
1237
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1238
- """
1239
- Specifies the times when the flow should be run when running on a
1240
- production scheduler.
1241
-
1242
-
1243
- Parameters
1244
- ----------
1245
- hourly : bool, default False
1246
- Run the workflow hourly.
1247
- daily : bool, default True
1248
- Run the workflow daily.
1249
- weekly : bool, default False
1250
- Run the workflow weekly.
1251
- cron : str, optional, default None
1252
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1253
- specified by this expression.
1254
- timezone : str, optional, default None
1255
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1256
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1257
- """
1258
- ...
1259
-
1260
- @typing.overload
1261
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1262
- ...
1263
-
1264
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1265
- """
1266
- Specifies the times when the flow should be run when running on a
1267
- production scheduler.
1268
-
1269
-
1270
- Parameters
1271
- ----------
1272
- hourly : bool, default False
1273
- Run the workflow hourly.
1274
- daily : bool, default True
1275
- Run the workflow daily.
1276
- weekly : bool, default False
1277
- Run the workflow weekly.
1278
- cron : str, optional, default None
1279
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1280
- specified by this expression.
1281
- timezone : str, optional, default None
1282
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1283
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1284
- """
1285
- ...
1286
-
1287
1125
  def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
1288
1126
  """
1289
1127
  Allows setting external datastores to save data for the
@@ -1398,102 +1236,129 @@ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None)
1398
1236
  """
1399
1237
  ...
1400
1238
 
1239
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1240
+ """
1241
+ Specifies what flows belong to the same project.
1242
+
1243
+ A project-specific namespace is created for all flows that
1244
+ use the same `@project(name)`.
1245
+
1246
+
1247
+ Parameters
1248
+ ----------
1249
+ name : str
1250
+ Project name. Make sure that the name is unique amongst all
1251
+ projects that use the same production scheduler. The name may
1252
+ contain only lowercase alphanumeric characters and underscores.
1253
+
1254
+ branch : Optional[str], default None
1255
+ The branch to use. If not specified, the branch is set to
1256
+ `user.<username>` unless `production` is set to `True`. This can
1257
+ also be set on the command line using `--branch` as a top-level option.
1258
+ It is an error to specify `branch` in the decorator and on the command line.
1259
+
1260
+ production : bool, default False
1261
+ Whether or not the branch is the production branch. This can also be set on the
1262
+ command line using `--production` as a top-level option. It is an error to specify
1263
+ `production` in the decorator and on the command line.
1264
+ The project branch name will be:
1265
+ - if `branch` is specified:
1266
+ - if `production` is True: `prod.<branch>`
1267
+ - if `production` is False: `test.<branch>`
1268
+ - if `branch` is not specified:
1269
+ - if `production` is True: `prod`
1270
+ - if `production` is False: `user.<username>`
1271
+ """
1272
+ ...
1273
+
1401
1274
  @typing.overload
1402
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1275
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1403
1276
  """
1404
- Specifies the flow(s) that this flow depends on.
1277
+ Specifies the event(s) that this flow depends on.
1405
1278
 
1406
1279
  ```
1407
- @trigger_on_finish(flow='FooFlow')
1280
+ @trigger(event='foo')
1408
1281
  ```
1409
1282
  or
1410
1283
  ```
1411
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1284
+ @trigger(events=['foo', 'bar'])
1412
1285
  ```
1413
- This decorator respects the @project decorator and triggers the flow
1414
- when upstream runs within the same namespace complete successfully
1415
1286
 
1416
- Additionally, you can specify project aware upstream flow dependencies
1417
- by specifying the fully qualified project_flow_name.
1287
+ Additionally, you can specify the parameter mappings
1288
+ to map event payload to Metaflow parameters for the flow.
1418
1289
  ```
1419
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1290
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1420
1291
  ```
1421
1292
  or
1422
1293
  ```
1423
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1294
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1295
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1424
1296
  ```
1425
1297
 
1426
- You can also specify just the project or project branch (other values will be
1427
- inferred from the current project or project branch):
1298
+ 'parameters' can also be a list of strings and tuples like so:
1428
1299
  ```
1429
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1300
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1301
+ ```
1302
+ This is equivalent to:
1303
+ ```
1304
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1430
1305
  ```
1431
-
1432
- Note that `branch` is typically one of:
1433
- - `prod`
1434
- - `user.bob`
1435
- - `test.my_experiment`
1436
- - `prod.staging`
1437
1306
 
1438
1307
 
1439
1308
  Parameters
1440
1309
  ----------
1441
- flow : Union[str, Dict[str, str]], optional, default None
1442
- Upstream flow dependency for this flow.
1443
- flows : List[Union[str, Dict[str, str]]], default []
1444
- Upstream flow dependencies for this flow.
1310
+ event : Union[str, Dict[str, Any]], optional, default None
1311
+ Event dependency for this flow.
1312
+ events : List[Union[str, Dict[str, Any]]], default []
1313
+ Events dependency for this flow.
1445
1314
  options : Dict[str, Any], default {}
1446
1315
  Backend-specific configuration for tuning eventing behavior.
1447
1316
  """
1448
1317
  ...
1449
1318
 
1450
1319
  @typing.overload
1451
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1320
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1452
1321
  ...
1453
1322
 
1454
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1323
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1455
1324
  """
1456
- Specifies the flow(s) that this flow depends on.
1325
+ Specifies the event(s) that this flow depends on.
1457
1326
 
1458
1327
  ```
1459
- @trigger_on_finish(flow='FooFlow')
1328
+ @trigger(event='foo')
1460
1329
  ```
1461
1330
  or
1462
1331
  ```
1463
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1332
+ @trigger(events=['foo', 'bar'])
1464
1333
  ```
1465
- This decorator respects the @project decorator and triggers the flow
1466
- when upstream runs within the same namespace complete successfully
1467
1334
 
1468
- Additionally, you can specify project aware upstream flow dependencies
1469
- by specifying the fully qualified project_flow_name.
1335
+ Additionally, you can specify the parameter mappings
1336
+ to map event payload to Metaflow parameters for the flow.
1470
1337
  ```
1471
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1338
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1472
1339
  ```
1473
1340
  or
1474
1341
  ```
1475
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1342
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1343
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1476
1344
  ```
1477
1345
 
1478
- You can also specify just the project or project branch (other values will be
1479
- inferred from the current project or project branch):
1346
+ 'parameters' can also be a list of strings and tuples like so:
1480
1347
  ```
1481
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1348
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1349
+ ```
1350
+ This is equivalent to:
1351
+ ```
1352
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1482
1353
  ```
1483
-
1484
- Note that `branch` is typically one of:
1485
- - `prod`
1486
- - `user.bob`
1487
- - `test.my_experiment`
1488
- - `prod.staging`
1489
1354
 
1490
1355
 
1491
1356
  Parameters
1492
1357
  ----------
1493
- flow : Union[str, Dict[str, str]], optional, default None
1494
- Upstream flow dependency for this flow.
1495
- flows : List[Union[str, Dict[str, str]]], default []
1496
- Upstream flow dependencies for this flow.
1358
+ event : Union[str, Dict[str, Any]], optional, default None
1359
+ Event dependency for this flow.
1360
+ events : List[Union[str, Dict[str, Any]]], default []
1361
+ Events dependency for this flow.
1497
1362
  options : Dict[str, Any], default {}
1498
1363
  Backend-specific configuration for tuning eventing behavior.
1499
1364
  """
@@ -1550,5 +1415,140 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1550
1415
  """
1551
1416
  ...
1552
1417
 
1418
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1419
+ """
1420
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1421
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1422
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1423
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1424
+ starts only after all sensors finish.
1425
+
1426
+
1427
+ Parameters
1428
+ ----------
1429
+ timeout : int
1430
+ Time, in seconds before the task times out and fails. (Default: 3600)
1431
+ poke_interval : int
1432
+ Time in seconds that the job should wait in between each try. (Default: 60)
1433
+ mode : str
1434
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1435
+ exponential_backoff : bool
1436
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1437
+ pool : str
1438
+ the slot pool this task should run in,
1439
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1440
+ soft_fail : bool
1441
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1442
+ name : str
1443
+ Name of the sensor on Airflow
1444
+ description : str
1445
+ Description of sensor in the Airflow UI
1446
+ bucket_key : Union[str, List[str]]
1447
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1448
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1449
+ bucket_name : str
1450
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1451
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1452
+ wildcard_match : bool
1453
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1454
+ aws_conn_id : str
1455
+ a reference to the s3 connection on Airflow. (Default: None)
1456
+ verify : bool
1457
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1458
+ """
1459
+ ...
1460
+
1461
+ @typing.overload
1462
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1463
+ """
1464
+ Specifies the PyPI packages for all steps of the flow.
1465
+
1466
+ Use `@pypi_base` to set common packages required by all
1467
+ steps and use `@pypi` to specify step-specific overrides.
1468
+
1469
+ Parameters
1470
+ ----------
1471
+ packages : Dict[str, str], default: {}
1472
+ Packages to use for this flow. The key is the name of the package
1473
+ and the value is the version to use.
1474
+ python : str, optional, default: None
1475
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1476
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1477
+ """
1478
+ ...
1479
+
1480
+ @typing.overload
1481
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1482
+ ...
1483
+
1484
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1485
+ """
1486
+ Specifies the PyPI packages for all steps of the flow.
1487
+
1488
+ Use `@pypi_base` to set common packages required by all
1489
+ steps and use `@pypi` to specify step-specific overrides.
1490
+
1491
+ Parameters
1492
+ ----------
1493
+ packages : Dict[str, str], default: {}
1494
+ Packages to use for this flow. The key is the name of the package
1495
+ and the value is the version to use.
1496
+ python : str, optional, default: None
1497
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1498
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1499
+ """
1500
+ ...
1501
+
1502
+ @typing.overload
1503
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1504
+ """
1505
+ Specifies the times when the flow should be run when running on a
1506
+ production scheduler.
1507
+
1508
+
1509
+ Parameters
1510
+ ----------
1511
+ hourly : bool, default False
1512
+ Run the workflow hourly.
1513
+ daily : bool, default True
1514
+ Run the workflow daily.
1515
+ weekly : bool, default False
1516
+ Run the workflow weekly.
1517
+ cron : str, optional, default None
1518
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1519
+ specified by this expression.
1520
+ timezone : str, optional, default None
1521
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1522
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1523
+ """
1524
+ ...
1525
+
1526
+ @typing.overload
1527
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1528
+ ...
1529
+
1530
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1531
+ """
1532
+ Specifies the times when the flow should be run when running on a
1533
+ production scheduler.
1534
+
1535
+
1536
+ Parameters
1537
+ ----------
1538
+ hourly : bool, default False
1539
+ Run the workflow hourly.
1540
+ daily : bool, default True
1541
+ Run the workflow daily.
1542
+ weekly : bool, default False
1543
+ Run the workflow weekly.
1544
+ cron : str, optional, default None
1545
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1546
+ specified by this expression.
1547
+ timezone : str, optional, default None
1548
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1549
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1550
+ """
1551
+ ...
1552
+
1553
1553
  pkg_name: str
1554
1554