ob-metaflow-stubs 6.0.3.179rc4__py2.py3-none-any.whl → 6.0.3.180rc0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (220) hide show
  1. metaflow-stubs/__init__.pyi +780 -742
  2. metaflow-stubs/cards.pyi +1 -1
  3. metaflow-stubs/cli.pyi +1 -1
  4. metaflow-stubs/cli_components/__init__.pyi +1 -1
  5. metaflow-stubs/cli_components/utils.pyi +1 -1
  6. metaflow-stubs/client/__init__.pyi +1 -1
  7. metaflow-stubs/client/core.pyi +4 -4
  8. metaflow-stubs/client/filecache.pyi +1 -1
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +1 -1
  11. metaflow-stubs/flowspec.pyi +3 -3
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +1 -1
  14. metaflow-stubs/info_file.pyi +1 -1
  15. metaflow-stubs/metadata_provider/__init__.pyi +1 -1
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +1 -1
  17. metaflow-stubs/metadata_provider/metadata.pyi +1 -1
  18. metaflow-stubs/metadata_provider/util.pyi +1 -1
  19. metaflow-stubs/metaflow_config.pyi +1 -1
  20. metaflow-stubs/metaflow_current.pyi +117 -117
  21. metaflow-stubs/metaflow_git.pyi +1 -1
  22. metaflow-stubs/mf_extensions/__init__.pyi +1 -1
  23. metaflow-stubs/mf_extensions/obcheckpoint/__init__.pyi +1 -1
  24. metaflow-stubs/mf_extensions/obcheckpoint/plugins/__init__.pyi +1 -1
  25. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/__init__.pyi +1 -1
  26. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/__init__.pyi +1 -1
  27. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/async_cards.pyi +1 -1
  28. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/deco_injection_mixin.pyi +1 -1
  29. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/card_utils/extra_components.pyi +2 -2
  30. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/__init__.pyi +1 -1
  31. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/__init__.pyi +1 -1
  32. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/checkpoint_lister.pyi +3 -3
  33. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/cards/lineage_card.pyi +1 -1
  34. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/checkpoint_storage.pyi +2 -2
  35. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/constructors.pyi +1 -1
  36. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/core.pyi +1 -1
  37. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/decorator.pyi +2 -2
  38. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/exceptions.pyi +1 -1
  39. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/final_api.pyi +1 -1
  40. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/checkpoints/lineage.pyi +1 -1
  41. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/__init__.pyi +1 -1
  42. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/context.pyi +2 -2
  43. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/core.pyi +2 -2
  44. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/decorator.pyi +1 -1
  45. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/exceptions.pyi +1 -1
  46. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/task_utils.pyi +1 -1
  47. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastore/utils.pyi +1 -1
  48. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/datastructures.pyi +1 -1
  49. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/exceptions.pyi +1 -1
  50. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/__init__.pyi +1 -1
  51. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/hf_hub/decorator.pyi +2 -2
  52. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/__init__.pyi +1 -1
  53. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/core.pyi +3 -3
  54. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/exceptions.pyi +1 -1
  55. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/modeling_utils/model_storage.pyi +2 -2
  56. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/__init__.pyi +1 -1
  57. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/flowspec_utils.pyi +1 -1
  58. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/general.pyi +1 -1
  59. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/identity_utils.pyi +1 -1
  60. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/__init__.pyi +1 -1
  61. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/base.pyi +1 -1
  62. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/serialization_handler/tar.pyi +1 -1
  63. metaflow-stubs/mf_extensions/obcheckpoint/plugins/machine_learning_utilities/utils/tar_utils.pyi +1 -1
  64. metaflow-stubs/mf_extensions/outerbounds/__init__.pyi +1 -1
  65. metaflow-stubs/mf_extensions/outerbounds/plugins/__init__.pyi +1 -1
  66. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/__init__.pyi +1 -1
  67. metaflow-stubs/mf_extensions/outerbounds/plugins/aws/assume_role_decorator.pyi +1 -1
  68. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/__init__.pyi +1 -1
  69. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/async_cards.pyi +2 -2
  70. metaflow-stubs/mf_extensions/outerbounds/plugins/card_utilities/injector.pyi +1 -1
  71. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/__init__.pyi +1 -1
  72. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/coreweave.pyi +1 -1
  73. metaflow-stubs/mf_extensions/outerbounds/plugins/checkpoint_datastores/nebius.pyi +1 -1
  74. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/__init__.pyi +1 -1
  75. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/baker.pyi +1 -1
  76. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/docker_environment.pyi +1 -1
  77. metaflow-stubs/mf_extensions/outerbounds/plugins/fast_bakery/fast_bakery.pyi +1 -1
  78. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/__init__.pyi +1 -1
  79. metaflow-stubs/mf_extensions/outerbounds/plugins/kubernetes/pod_killer.pyi +1 -1
  80. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/__init__.pyi +1 -1
  81. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/constants.pyi +1 -1
  82. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/exceptions.pyi +1 -1
  83. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/ollama.pyi +1 -1
  84. metaflow-stubs/mf_extensions/outerbounds/plugins/ollama/status_card.pyi +1 -1
  85. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/__init__.pyi +1 -1
  86. metaflow-stubs/mf_extensions/outerbounds/plugins/snowflake/snowflake.pyi +1 -1
  87. metaflow-stubs/mf_extensions/outerbounds/profilers/__init__.pyi +1 -1
  88. metaflow-stubs/mf_extensions/outerbounds/profilers/gpu.pyi +1 -1
  89. metaflow-stubs/mf_extensions/outerbounds/remote_config.pyi +1 -1
  90. metaflow-stubs/mf_extensions/outerbounds/toplevel/__init__.pyi +1 -1
  91. metaflow-stubs/mf_extensions/outerbounds/toplevel/global_aliases_for_metaflow_package.pyi +1 -1
  92. metaflow-stubs/multicore_utils.pyi +1 -1
  93. metaflow-stubs/ob_internal.pyi +1 -1
  94. metaflow-stubs/parameters.pyi +3 -3
  95. metaflow-stubs/plugins/__init__.pyi +13 -13
  96. metaflow-stubs/plugins/airflow/__init__.pyi +1 -1
  97. metaflow-stubs/plugins/airflow/airflow_utils.pyi +1 -1
  98. metaflow-stubs/plugins/airflow/exception.pyi +1 -1
  99. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +1 -1
  100. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +1 -1
  101. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +1 -1
  102. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +1 -1
  103. metaflow-stubs/plugins/argo/__init__.pyi +1 -1
  104. metaflow-stubs/plugins/argo/argo_client.pyi +1 -1
  105. metaflow-stubs/plugins/argo/argo_events.pyi +1 -1
  106. metaflow-stubs/plugins/argo/argo_workflows.pyi +1 -1
  107. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
  108. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +1 -1
  109. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  110. metaflow-stubs/plugins/aws/__init__.pyi +1 -1
  111. metaflow-stubs/plugins/aws/aws_client.pyi +1 -1
  112. metaflow-stubs/plugins/aws/aws_utils.pyi +1 -1
  113. metaflow-stubs/plugins/aws/batch/__init__.pyi +1 -1
  114. metaflow-stubs/plugins/aws/batch/batch.pyi +1 -1
  115. metaflow-stubs/plugins/aws/batch/batch_client.pyi +1 -1
  116. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +1 -1
  117. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +1 -1
  118. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  119. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +1 -1
  120. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +1 -1
  121. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +1 -1
  122. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +1 -1
  123. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +1 -1
  124. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +1 -1
  125. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  126. metaflow-stubs/plugins/azure/__init__.pyi +1 -1
  127. metaflow-stubs/plugins/azure/azure_credential.pyi +1 -1
  128. metaflow-stubs/plugins/azure/azure_exceptions.pyi +1 -1
  129. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  130. metaflow-stubs/plugins/azure/azure_utils.pyi +1 -1
  131. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +1 -1
  132. metaflow-stubs/plugins/azure/includefile_support.pyi +1 -1
  133. metaflow-stubs/plugins/cards/__init__.pyi +1 -1
  134. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  135. metaflow-stubs/plugins/cards/card_creator.pyi +1 -1
  136. metaflow-stubs/plugins/cards/card_datastore.pyi +1 -1
  137. metaflow-stubs/plugins/cards/card_decorator.pyi +1 -1
  138. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +1 -1
  139. metaflow-stubs/plugins/cards/card_modules/basic.pyi +1 -1
  140. metaflow-stubs/plugins/cards/card_modules/card.pyi +1 -1
  141. metaflow-stubs/plugins/cards/card_modules/components.pyi +2 -2
  142. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +1 -1
  143. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +1 -1
  144. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +1 -1
  145. metaflow-stubs/plugins/cards/card_resolver.pyi +1 -1
  146. metaflow-stubs/plugins/cards/component_serializer.pyi +1 -1
  147. metaflow-stubs/plugins/cards/exception.pyi +1 -1
  148. metaflow-stubs/plugins/catch_decorator.pyi +1 -1
  149. metaflow-stubs/plugins/datatools/__init__.pyi +1 -1
  150. metaflow-stubs/plugins/datatools/local.pyi +1 -1
  151. metaflow-stubs/plugins/datatools/s3/__init__.pyi +1 -1
  152. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  153. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +1 -1
  154. metaflow-stubs/plugins/datatools/s3/s3util.pyi +1 -1
  155. metaflow-stubs/plugins/debug_logger.pyi +1 -1
  156. metaflow-stubs/plugins/debug_monitor.pyi +1 -1
  157. metaflow-stubs/plugins/environment_decorator.pyi +1 -1
  158. metaflow-stubs/plugins/events_decorator.pyi +1 -1
  159. metaflow-stubs/plugins/frameworks/__init__.pyi +1 -1
  160. metaflow-stubs/plugins/frameworks/pytorch.pyi +1 -1
  161. metaflow-stubs/plugins/gcp/__init__.pyi +1 -1
  162. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  163. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +1 -1
  164. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +1 -1
  165. metaflow-stubs/plugins/gcp/gs_utils.pyi +1 -1
  166. metaflow-stubs/plugins/gcp/includefile_support.pyi +1 -1
  167. metaflow-stubs/plugins/kubernetes/__init__.pyi +1 -1
  168. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +1 -1
  169. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +1 -1
  170. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +1 -1
  171. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +1 -1
  172. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +1 -1
  173. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +1 -1
  174. metaflow-stubs/plugins/ollama/__init__.pyi +1 -1
  175. metaflow-stubs/plugins/parallel_decorator.pyi +1 -1
  176. metaflow-stubs/plugins/perimeters.pyi +1 -1
  177. metaflow-stubs/plugins/project_decorator.pyi +1 -1
  178. metaflow-stubs/plugins/pypi/__init__.pyi +1 -1
  179. metaflow-stubs/plugins/pypi/conda_decorator.pyi +1 -1
  180. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  181. metaflow-stubs/plugins/pypi/parsers.pyi +1 -1
  182. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +1 -1
  183. metaflow-stubs/plugins/pypi/pypi_environment.pyi +1 -1
  184. metaflow-stubs/plugins/pypi/utils.pyi +1 -1
  185. metaflow-stubs/plugins/resources_decorator.pyi +1 -1
  186. metaflow-stubs/plugins/retry_decorator.pyi +1 -1
  187. metaflow-stubs/plugins/secrets/__init__.pyi +1 -1
  188. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  189. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +1 -1
  190. metaflow-stubs/plugins/snowflake/__init__.pyi +1 -1
  191. metaflow-stubs/plugins/storage_executor.pyi +1 -1
  192. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +1 -1
  193. metaflow-stubs/plugins/timeout_decorator.pyi +1 -1
  194. metaflow-stubs/plugins/torchtune/__init__.pyi +1 -1
  195. metaflow-stubs/plugins/uv/__init__.pyi +1 -1
  196. metaflow-stubs/plugins/uv/uv_environment.pyi +1 -1
  197. metaflow-stubs/profilers/__init__.pyi +1 -1
  198. metaflow-stubs/pylint_wrapper.pyi +1 -1
  199. metaflow-stubs/runner/__init__.pyi +1 -1
  200. metaflow-stubs/runner/deployer.pyi +4 -4
  201. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  202. metaflow-stubs/runner/metaflow_runner.pyi +2 -2
  203. metaflow-stubs/runner/nbdeploy.pyi +1 -1
  204. metaflow-stubs/runner/nbrun.pyi +1 -1
  205. metaflow-stubs/runner/subprocess_manager.pyi +1 -1
  206. metaflow-stubs/runner/utils.pyi +3 -3
  207. metaflow-stubs/system/__init__.pyi +1 -1
  208. metaflow-stubs/system/system_logger.pyi +2 -2
  209. metaflow-stubs/system/system_monitor.pyi +1 -1
  210. metaflow-stubs/tagging_util.pyi +1 -1
  211. metaflow-stubs/tuple_util.pyi +1 -1
  212. metaflow-stubs/user_configs/__init__.pyi +1 -1
  213. metaflow-stubs/user_configs/config_decorators.pyi +5 -5
  214. metaflow-stubs/user_configs/config_options.pyi +2 -2
  215. metaflow-stubs/user_configs/config_parameters.pyi +7 -7
  216. {ob_metaflow_stubs-6.0.3.179rc4.dist-info → ob_metaflow_stubs-6.0.3.180rc0.dist-info}/METADATA +1 -1
  217. ob_metaflow_stubs-6.0.3.180rc0.dist-info/RECORD +220 -0
  218. ob_metaflow_stubs-6.0.3.179rc4.dist-info/RECORD +0 -220
  219. {ob_metaflow_stubs-6.0.3.179rc4.dist-info → ob_metaflow_stubs-6.0.3.180rc0.dist-info}/WHEEL +0 -0
  220. {ob_metaflow_stubs-6.0.3.179rc4.dist-info → ob_metaflow_stubs-6.0.3.180rc0.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
3
  # MF version: 2.15.17.1+obcheckpoint(0.2.1);ob(v1) #
4
- # Generated on 2025-06-13T21:20:48.635887 #
4
+ # Generated on 2025-06-17T08:34:56.575242 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -35,18 +35,18 @@ from .user_configs.config_parameters import ConfigValue as ConfigValue
35
35
  from .user_configs.config_parameters import config_expr as config_expr
36
36
  from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
+ from . import events as events
39
+ from . import tuple_util as tuple_util
38
40
  from . import cards as cards
39
41
  from . import metaflow_git as metaflow_git
40
- from . import tuple_util as tuple_util
41
- from . import events as events
42
42
  from . import runner as runner
43
43
  from . import plugins as plugins
44
44
  from .mf_extensions.outerbounds.toplevel.global_aliases_for_metaflow_package import S3 as S3
45
45
  from . import includefile as includefile
46
46
  from .includefile import IncludeFile as IncludeFile
47
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
47
48
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
48
49
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
49
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
50
50
  from . import client as client
51
51
  from .client.core import namespace as namespace
52
52
  from .client.core import get_namespace as get_namespace
@@ -155,117 +155,28 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
155
155
  """
156
156
  ...
157
157
 
158
- @typing.overload
159
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
160
- """
161
- Specifies the resources needed when executing this step.
162
-
163
- Use `@resources` to specify the resource requirements
164
- independently of the specific compute layer (`@batch`, `@kubernetes`).
165
-
166
- You can choose the compute layer on the command line by executing e.g.
167
- ```
168
- python myflow.py run --with batch
169
- ```
170
- or
171
- ```
172
- python myflow.py run --with kubernetes
173
- ```
174
- which executes the flow on the desired system using the
175
- requirements specified in `@resources`.
176
-
177
-
178
- Parameters
179
- ----------
180
- cpu : int, default 1
181
- Number of CPUs required for this step.
182
- gpu : int, optional, default None
183
- Number of GPUs required for this step.
184
- disk : int, optional, default None
185
- Disk size (in MB) required for this step. Only applies on Kubernetes.
186
- memory : int, default 4096
187
- Memory size (in MB) required for this step.
188
- shared_memory : int, optional, default None
189
- The value for the size (in MiB) of the /dev/shm volume for this step.
190
- This parameter maps to the `--shm-size` option in Docker.
191
- """
192
- ...
193
-
194
- @typing.overload
195
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
196
- ...
197
-
198
- @typing.overload
199
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
200
- ...
201
-
202
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
158
+ def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
203
159
  """
204
- Specifies the resources needed when executing this step.
205
-
206
- Use `@resources` to specify the resource requirements
207
- independently of the specific compute layer (`@batch`, `@kubernetes`).
208
-
209
- You can choose the compute layer on the command line by executing e.g.
210
- ```
211
- python myflow.py run --with batch
212
- ```
213
- or
214
- ```
215
- python myflow.py run --with kubernetes
216
- ```
217
- which executes the flow on the desired system using the
218
- requirements specified in `@resources`.
160
+ Decorator that helps cache, version and store models/datasets from huggingface hub.
219
161
 
220
162
 
221
163
  Parameters
222
164
  ----------
223
- cpu : int, default 1
224
- Number of CPUs required for this step.
225
- gpu : int, optional, default None
226
- Number of GPUs required for this step.
227
- disk : int, optional, default None
228
- Disk size (in MB) required for this step. Only applies on Kubernetes.
229
- memory : int, default 4096
230
- Memory size (in MB) required for this step.
231
- shared_memory : int, optional, default None
232
- The value for the size (in MiB) of the /dev/shm volume for this step.
233
- This parameter maps to the `--shm-size` option in Docker.
234
- """
235
- ...
236
-
237
- @typing.overload
238
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
239
- """
240
- Specifies secrets to be retrieved and injected as environment variables prior to
241
- the execution of a step.
165
+ temp_dir_root : str, optional
166
+ The root directory that will hold the temporary directory where objects will be downloaded.
242
167
 
168
+ load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
169
+ The list of repos (models/datasets) to load.
243
170
 
244
- Parameters
245
- ----------
246
- sources : List[Union[str, Dict[str, Any]]], default: []
247
- List of secret specs, defining how the secrets are to be retrieved
248
- """
249
- ...
250
-
251
- @typing.overload
252
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
253
- ...
254
-
255
- @typing.overload
256
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
257
- ...
258
-
259
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
260
- """
261
- Specifies secrets to be retrieved and injected as environment variables prior to
262
- the execution of a step.
171
+ Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
263
172
 
173
+ - If repo (model/dataset) is not found in the datastore:
174
+ - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
175
+ - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
176
+ - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
264
177
 
265
- Parameters
266
- ----------
267
- sources : List[Union[str, Dict[str, Any]]], default: []
268
- List of secret specs, defining how the secrets are to be retrieved
178
+ - If repo is found in the datastore:
179
+ - Loads it directly from datastore to local path (can be temporary directory or specified path)
269
180
  """
270
181
  ...
271
182
 
@@ -313,126 +224,281 @@ def ollama(*, models: list, backend: str, force_pull: bool, cache_update_policy:
313
224
  ...
314
225
 
315
226
  @typing.overload
316
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
227
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
317
228
  """
318
- Specifies the PyPI packages for the step.
319
-
320
- Information in this decorator will augment any
321
- attributes set in the `@pyi_base` flow-level decorator. Hence,
322
- you can use `@pypi_base` to set packages required by all
323
- steps and use `@pypi` to specify step-specific overrides.
229
+ Specifies secrets to be retrieved and injected as environment variables prior to
230
+ the execution of a step.
324
231
 
325
232
 
326
233
  Parameters
327
234
  ----------
328
- packages : Dict[str, str], default: {}
329
- Packages to use for this step. The key is the name of the package
330
- and the value is the version to use.
331
- python : str, optional, default: None
332
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
333
- that the version used will correspond to the version of the Python interpreter used to start the run.
235
+ sources : List[Union[str, Dict[str, Any]]], default: []
236
+ List of secret specs, defining how the secrets are to be retrieved
334
237
  """
335
238
  ...
336
239
 
337
240
  @typing.overload
338
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
241
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
339
242
  ...
340
243
 
341
244
  @typing.overload
342
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
245
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
343
246
  ...
344
247
 
345
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
248
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
346
249
  """
347
- Specifies the PyPI packages for the step.
348
-
349
- Information in this decorator will augment any
350
- attributes set in the `@pyi_base` flow-level decorator. Hence,
351
- you can use `@pypi_base` to set packages required by all
352
- steps and use `@pypi` to specify step-specific overrides.
250
+ Specifies secrets to be retrieved and injected as environment variables prior to
251
+ the execution of a step.
353
252
 
354
253
 
355
254
  Parameters
356
255
  ----------
357
- packages : Dict[str, str], default: {}
358
- Packages to use for this step. The key is the name of the package
359
- and the value is the version to use.
360
- python : str, optional, default: None
361
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
362
- that the version used will correspond to the version of the Python interpreter used to start the run.
256
+ sources : List[Union[str, Dict[str, Any]]], default: []
257
+ List of secret specs, defining how the secrets are to be retrieved
363
258
  """
364
259
  ...
365
260
 
366
- def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
261
+ @typing.overload
262
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
367
263
  """
368
- Specifies that this step should execute on DGX cloud.
264
+ Specifies the number of times the task corresponding
265
+ to a step needs to be retried.
266
+
267
+ This decorator is useful for handling transient errors, such as networking issues.
268
+ If your task contains operations that can't be retried safely, e.g. database updates,
269
+ it is advisable to annotate it with `@retry(times=0)`.
270
+
271
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
272
+ decorator will execute a no-op task after all retries have been exhausted,
273
+ ensuring that the flow execution can continue.
369
274
 
370
275
 
371
276
  Parameters
372
277
  ----------
373
- gpu : int
374
- Number of GPUs to use.
375
- gpu_type : str
376
- Type of Nvidia GPU to use.
278
+ times : int, default 3
279
+ Number of times to retry this task.
280
+ minutes_between_retries : int, default 2
281
+ Number of minutes between retries.
377
282
  """
378
283
  ...
379
284
 
380
285
  @typing.overload
381
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
286
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
287
+ ...
288
+
289
+ @typing.overload
290
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
291
+ ...
292
+
293
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
382
294
  """
383
- Specifies a timeout for your step.
384
-
385
- This decorator is useful if this step may hang indefinitely.
295
+ Specifies the number of times the task corresponding
296
+ to a step needs to be retried.
386
297
 
387
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
388
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
389
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
298
+ This decorator is useful for handling transient errors, such as networking issues.
299
+ If your task contains operations that can't be retried safely, e.g. database updates,
300
+ it is advisable to annotate it with `@retry(times=0)`.
390
301
 
391
- Note that all the values specified in parameters are added together so if you specify
392
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
302
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
303
+ decorator will execute a no-op task after all retries have been exhausted,
304
+ ensuring that the flow execution can continue.
393
305
 
394
306
 
395
307
  Parameters
396
308
  ----------
397
- seconds : int, default 0
398
- Number of seconds to wait prior to timing out.
399
- minutes : int, default 0
400
- Number of minutes to wait prior to timing out.
401
- hours : int, default 0
402
- Number of hours to wait prior to timing out.
309
+ times : int, default 3
310
+ Number of times to retry this task.
311
+ minutes_between_retries : int, default 2
312
+ Number of minutes between retries.
403
313
  """
404
314
  ...
405
315
 
406
316
  @typing.overload
407
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
317
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
318
+ """
319
+ Decorator prototype for all step decorators. This function gets specialized
320
+ and imported for all decorators types by _import_plugin_decorators().
321
+ """
408
322
  ...
409
323
 
410
324
  @typing.overload
411
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
325
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
412
326
  ...
413
327
 
414
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
328
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
415
329
  """
416
- Specifies a timeout for your step.
330
+ Decorator prototype for all step decorators. This function gets specialized
331
+ and imported for all decorators types by _import_plugin_decorators().
332
+ """
333
+ ...
334
+
335
+ def vllm(*, model: str, backend: str, debug: bool, kwargs: typing.Any) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
336
+ """
337
+ This decorator is used to run vllm APIs as Metaflow task sidecars.
417
338
 
418
- This decorator is useful if this step may hang indefinitely.
339
+ User code call
340
+ --------------
341
+ @vllm(
342
+ model="...",
343
+ ...
344
+ )
419
345
 
420
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
421
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
422
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
346
+ Valid backend options
347
+ ---------------------
348
+ - 'local': Run as a separate process on the local task machine.
423
349
 
424
- Note that all the values specified in parameters are added together so if you specify
425
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
350
+ Valid model options
351
+ -------------------
352
+ Any HuggingFace model identifier, e.g. 'meta-llama/Llama-3.2-1B'
353
+
354
+ NOTE: vLLM's OpenAI-compatible server serves ONE model per server instance.
355
+ If you need multiple models, you must create multiple @vllm decorators.
426
356
 
427
357
 
428
358
  Parameters
429
359
  ----------
430
- seconds : int, default 0
431
- Number of seconds to wait prior to timing out.
432
- minutes : int, default 0
433
- Number of minutes to wait prior to timing out.
434
- hours : int, default 0
435
- Number of hours to wait prior to timing out.
360
+ model: str
361
+ HuggingFace model identifier to be served by vLLM.
362
+ backend: str
363
+ Determines where and how to run the vLLM process.
364
+ debug: bool
365
+ Whether to turn on verbose debugging logs.
366
+ kwargs : Any
367
+ Any other keyword arguments are passed directly to the vLLM engine.
368
+ This allows for flexible configuration of vLLM server settings.
369
+ For example, `tensor_parallel_size=2`.
370
+ """
371
+ ...
372
+
373
+ def nvct(*, gpu: int, gpu_type: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
374
+ """
375
+ Specifies that this step should execute on DGX cloud.
376
+
377
+
378
+ Parameters
379
+ ----------
380
+ gpu : int
381
+ Number of GPUs to use.
382
+ gpu_type : str
383
+ Type of Nvidia GPU to use.
384
+ """
385
+ ...
386
+
387
+ def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
388
+ """
389
+ Specifies that this step should execute on DGX cloud.
390
+
391
+
392
+ Parameters
393
+ ----------
394
+ gpu : int
395
+ Number of GPUs to use.
396
+ gpu_type : str
397
+ Type of Nvidia GPU to use.
398
+ queue_timeout : int
399
+ Time to keep the job in NVCF's queue.
400
+ """
401
+ ...
402
+
403
+ @typing.overload
404
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
405
+ """
406
+ Specifies that the step will success under all circumstances.
407
+
408
+ The decorator will create an optional artifact, specified by `var`, which
409
+ contains the exception raised. You can use it to detect the presence
410
+ of errors, indicating that all happy-path artifacts produced by the step
411
+ are missing.
412
+
413
+
414
+ Parameters
415
+ ----------
416
+ var : str, optional, default None
417
+ Name of the artifact in which to store the caught exception.
418
+ If not specified, the exception is not stored.
419
+ print_exception : bool, default True
420
+ Determines whether or not the exception is printed to
421
+ stdout when caught.
422
+ """
423
+ ...
424
+
425
+ @typing.overload
426
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
427
+ ...
428
+
429
+ @typing.overload
430
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
431
+ ...
432
+
433
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
434
+ """
435
+ Specifies that the step will success under all circumstances.
436
+
437
+ The decorator will create an optional artifact, specified by `var`, which
438
+ contains the exception raised. You can use it to detect the presence
439
+ of errors, indicating that all happy-path artifacts produced by the step
440
+ are missing.
441
+
442
+
443
+ Parameters
444
+ ----------
445
+ var : str, optional, default None
446
+ Name of the artifact in which to store the caught exception.
447
+ If not specified, the exception is not stored.
448
+ print_exception : bool, default True
449
+ Determines whether or not the exception is printed to
450
+ stdout when caught.
451
+ """
452
+ ...
453
+
454
+ @typing.overload
455
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
456
+ """
457
+ Specifies the PyPI packages for the step.
458
+
459
+ Information in this decorator will augment any
460
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
461
+ you can use `@pypi_base` to set packages required by all
462
+ steps and use `@pypi` to specify step-specific overrides.
463
+
464
+
465
+ Parameters
466
+ ----------
467
+ packages : Dict[str, str], default: {}
468
+ Packages to use for this step. The key is the name of the package
469
+ and the value is the version to use.
470
+ python : str, optional, default: None
471
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
472
+ that the version used will correspond to the version of the Python interpreter used to start the run.
473
+ """
474
+ ...
475
+
476
+ @typing.overload
477
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
478
+ ...
479
+
480
+ @typing.overload
481
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
482
+ ...
483
+
484
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
485
+ """
486
+ Specifies the PyPI packages for the step.
487
+
488
+ Information in this decorator will augment any
489
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
490
+ you can use `@pypi_base` to set packages required by all
491
+ steps and use `@pypi` to specify step-specific overrides.
492
+
493
+
494
+ Parameters
495
+ ----------
496
+ packages : Dict[str, str], default: {}
497
+ Packages to use for this step. The key is the name of the package
498
+ and the value is the version to use.
499
+ python : str, optional, default: None
500
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
501
+ that the version used will correspond to the version of the Python interpreter used to start the run.
436
502
  """
437
503
  ...
438
504
 
@@ -493,6 +559,54 @@ def model(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
493
559
  """
494
560
  ...
495
561
 
562
+ def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
563
+ """
564
+ Specifies that this step is used to deploy an instance of the app.
565
+ Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
566
+
567
+
568
+ Parameters
569
+ ----------
570
+ app_port : int
571
+ Number of GPUs to use.
572
+ app_name : str
573
+ Name of the app to deploy.
574
+ """
575
+ ...
576
+
577
+ @typing.overload
578
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
579
+ """
580
+ Specifies environment variables to be set prior to the execution of a step.
581
+
582
+
583
+ Parameters
584
+ ----------
585
+ vars : Dict[str, str], default {}
586
+ Dictionary of environment variables to set.
587
+ """
588
+ ...
589
+
590
+ @typing.overload
591
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
592
+ ...
593
+
594
+ @typing.overload
595
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
596
+ ...
597
+
598
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
599
+ """
600
+ Specifies environment variables to be set prior to the execution of a step.
601
+
602
+
603
+ Parameters
604
+ ----------
605
+ vars : Dict[str, str], default {}
606
+ Dictionary of environment variables to set.
607
+ """
608
+ ...
609
+
496
610
  def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
497
611
  """
498
612
  Specifies that this step should execute on Kubernetes.
@@ -582,112 +696,62 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
582
696
  """
583
697
  ...
584
698
 
585
- def app_deploy(*, app_port: int, app_name: str) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
699
+ @typing.overload
700
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
586
701
  """
587
- Specifies that this step is used to deploy an instance of the app.
588
- Requires that self.app_name, self.app_port, self.entrypoint and self.deployDir is set.
702
+ Specifies a timeout for your step.
589
703
 
704
+ This decorator is useful if this step may hang indefinitely.
590
705
 
591
- Parameters
592
- ----------
593
- app_port : int
594
- Number of GPUs to use.
595
- app_name : str
596
- Name of the app to deploy.
597
- """
598
- ...
599
-
600
- @typing.overload
601
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
602
- """
603
- Specifies the Conda environment for the step.
706
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
707
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
708
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
604
709
 
605
- Information in this decorator will augment any
606
- attributes set in the `@conda_base` flow-level decorator. Hence,
607
- you can use `@conda_base` to set packages required by all
608
- steps and use `@conda` to specify step-specific overrides.
710
+ Note that all the values specified in parameters are added together so if you specify
711
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
609
712
 
610
713
 
611
714
  Parameters
612
715
  ----------
613
- packages : Dict[str, str], default {}
614
- Packages to use for this step. The key is the name of the package
615
- and the value is the version to use.
616
- libraries : Dict[str, str], default {}
617
- Supported for backward compatibility. When used with packages, packages will take precedence.
618
- python : str, optional, default None
619
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
620
- that the version used will correspond to the version of the Python interpreter used to start the run.
621
- disabled : bool, default False
622
- If set to True, disables @conda.
716
+ seconds : int, default 0
717
+ Number of seconds to wait prior to timing out.
718
+ minutes : int, default 0
719
+ Number of minutes to wait prior to timing out.
720
+ hours : int, default 0
721
+ Number of hours to wait prior to timing out.
623
722
  """
624
723
  ...
625
724
 
626
725
  @typing.overload
627
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
726
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
628
727
  ...
629
728
 
630
729
  @typing.overload
631
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
730
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
632
731
  ...
633
732
 
634
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
733
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
635
734
  """
636
- Specifies the Conda environment for the step.
735
+ Specifies a timeout for your step.
637
736
 
638
- Information in this decorator will augment any
639
- attributes set in the `@conda_base` flow-level decorator. Hence,
640
- you can use `@conda_base` to set packages required by all
641
- steps and use `@conda` to specify step-specific overrides.
737
+ This decorator is useful if this step may hang indefinitely.
738
+
739
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
740
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
741
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
742
+
743
+ Note that all the values specified in parameters are added together so if you specify
744
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
642
745
 
643
746
 
644
747
  Parameters
645
748
  ----------
646
- packages : Dict[str, str], default {}
647
- Packages to use for this step. The key is the name of the package
648
- and the value is the version to use.
649
- libraries : Dict[str, str], default {}
650
- Supported for backward compatibility. When used with packages, packages will take precedence.
651
- python : str, optional, default None
652
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
653
- that the version used will correspond to the version of the Python interpreter used to start the run.
654
- disabled : bool, default False
655
- If set to True, disables @conda.
656
- """
657
- ...
658
-
659
- @typing.overload
660
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
661
- """
662
- Decorator prototype for all step decorators. This function gets specialized
663
- and imported for all decorators types by _import_plugin_decorators().
664
- """
665
- ...
666
-
667
- @typing.overload
668
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
669
- ...
670
-
671
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
672
- """
673
- Decorator prototype for all step decorators. This function gets specialized
674
- and imported for all decorators types by _import_plugin_decorators().
675
- """
676
- ...
677
-
678
- def nvidia(*, gpu: int, gpu_type: str, queue_timeout: int) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
679
- """
680
- Specifies that this step should execute on DGX cloud.
681
-
682
-
683
- Parameters
684
- ----------
685
- gpu : int
686
- Number of GPUs to use.
687
- gpu_type : str
688
- Type of Nvidia GPU to use.
689
- queue_timeout : int
690
- Time to keep the job in NVCF's queue.
749
+ seconds : int, default 0
750
+ Number of seconds to wait prior to timing out.
751
+ minutes : int, default 0
752
+ Number of minutes to wait prior to timing out.
753
+ hours : int, default 0
754
+ Number of hours to wait prior to timing out.
691
755
  """
692
756
  ...
693
757
 
@@ -748,86 +812,6 @@ def checkpoint(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None
748
812
  """
749
813
  ...
750
814
 
751
- @typing.overload
752
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
753
- """
754
- Specifies the number of times the task corresponding
755
- to a step needs to be retried.
756
-
757
- This decorator is useful for handling transient errors, such as networking issues.
758
- If your task contains operations that can't be retried safely, e.g. database updates,
759
- it is advisable to annotate it with `@retry(times=0)`.
760
-
761
- This can be used in conjunction with the `@catch` decorator. The `@catch`
762
- decorator will execute a no-op task after all retries have been exhausted,
763
- ensuring that the flow execution can continue.
764
-
765
-
766
- Parameters
767
- ----------
768
- times : int, default 3
769
- Number of times to retry this task.
770
- minutes_between_retries : int, default 2
771
- Number of minutes between retries.
772
- """
773
- ...
774
-
775
- @typing.overload
776
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
777
- ...
778
-
779
- @typing.overload
780
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
781
- ...
782
-
783
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
784
- """
785
- Specifies the number of times the task corresponding
786
- to a step needs to be retried.
787
-
788
- This decorator is useful for handling transient errors, such as networking issues.
789
- If your task contains operations that can't be retried safely, e.g. database updates,
790
- it is advisable to annotate it with `@retry(times=0)`.
791
-
792
- This can be used in conjunction with the `@catch` decorator. The `@catch`
793
- decorator will execute a no-op task after all retries have been exhausted,
794
- ensuring that the flow execution can continue.
795
-
796
-
797
- Parameters
798
- ----------
799
- times : int, default 3
800
- Number of times to retry this task.
801
- minutes_between_retries : int, default 2
802
- Number of minutes between retries.
803
- """
804
- ...
805
-
806
- def huggingface_hub(*, temp_dir_root: typing.Optional[str] = None, load: typing.Union[typing.List[str], typing.List[typing.Tuple[typing.Dict, str]], typing.List[typing.Tuple[str, str]], typing.List[typing.Dict], None]) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
807
- """
808
- Decorator that helps cache, version and store models/datasets from huggingface hub.
809
-
810
-
811
- Parameters
812
- ----------
813
- temp_dir_root : str, optional
814
- The root directory that will hold the temporary directory where objects will be downloaded.
815
-
816
- load: Union[List[str], List[Tuple[Dict, str]], List[Tuple[str, str]], List[Dict], None]
817
- The list of repos (models/datasets) to load.
818
-
819
- Loaded repos can be accessed via `current.huggingface_hub.loaded`. If load is set, then the following happens:
820
-
821
- - If repo (model/dataset) is not found in the datastore:
822
- - Downloads the repo from Hugging Face Hub to a temporary directory (or uses specified path) for local access
823
- - Stores it in Metaflow's datastore (s3/gcs/azure etc.) with a unique name based on repo_type/repo_id
824
- - All HF models loaded for a `@step` will be cached separately under flow/step/namespace.
825
-
826
- - If repo is found in the datastore:
827
- - Loads it directly from datastore to local path (can be temporary directory or specified path)
828
- """
829
- ...
830
-
831
815
  @typing.overload
832
816
  def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
833
817
  """
@@ -878,138 +862,157 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
878
862
  ...
879
863
 
880
864
  @typing.overload
881
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
865
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
882
866
  """
883
- Internal decorator to support Fast bakery
867
+ Specifies the resources needed when executing this step.
868
+
869
+ Use `@resources` to specify the resource requirements
870
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
871
+
872
+ You can choose the compute layer on the command line by executing e.g.
873
+ ```
874
+ python myflow.py run --with batch
875
+ ```
876
+ or
877
+ ```
878
+ python myflow.py run --with kubernetes
879
+ ```
880
+ which executes the flow on the desired system using the
881
+ requirements specified in `@resources`.
882
+
883
+
884
+ Parameters
885
+ ----------
886
+ cpu : int, default 1
887
+ Number of CPUs required for this step.
888
+ gpu : int, optional, default None
889
+ Number of GPUs required for this step.
890
+ disk : int, optional, default None
891
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
892
+ memory : int, default 4096
893
+ Memory size (in MB) required for this step.
894
+ shared_memory : int, optional, default None
895
+ The value for the size (in MiB) of the /dev/shm volume for this step.
896
+ This parameter maps to the `--shm-size` option in Docker.
884
897
  """
885
898
  ...
886
899
 
887
900
  @typing.overload
888
- def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
901
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
889
902
  ...
890
903
 
891
- def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
892
- """
893
- Internal decorator to support Fast bakery
894
- """
904
+ @typing.overload
905
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
895
906
  ...
896
907
 
897
- @typing.overload
898
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
908
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
899
909
  """
900
- Specifies that the step will success under all circumstances.
910
+ Specifies the resources needed when executing this step.
901
911
 
902
- The decorator will create an optional artifact, specified by `var`, which
903
- contains the exception raised. You can use it to detect the presence
904
- of errors, indicating that all happy-path artifacts produced by the step
905
- are missing.
912
+ Use `@resources` to specify the resource requirements
913
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
914
+
915
+ You can choose the compute layer on the command line by executing e.g.
916
+ ```
917
+ python myflow.py run --with batch
918
+ ```
919
+ or
920
+ ```
921
+ python myflow.py run --with kubernetes
922
+ ```
923
+ which executes the flow on the desired system using the
924
+ requirements specified in `@resources`.
906
925
 
907
926
 
908
927
  Parameters
909
928
  ----------
910
- var : str, optional, default None
911
- Name of the artifact in which to store the caught exception.
912
- If not specified, the exception is not stored.
913
- print_exception : bool, default True
914
- Determines whether or not the exception is printed to
915
- stdout when caught.
929
+ cpu : int, default 1
930
+ Number of CPUs required for this step.
931
+ gpu : int, optional, default None
932
+ Number of GPUs required for this step.
933
+ disk : int, optional, default None
934
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
935
+ memory : int, default 4096
936
+ Memory size (in MB) required for this step.
937
+ shared_memory : int, optional, default None
938
+ The value for the size (in MiB) of the /dev/shm volume for this step.
939
+ This parameter maps to the `--shm-size` option in Docker.
916
940
  """
917
941
  ...
918
942
 
919
943
  @typing.overload
920
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
944
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
945
+ """
946
+ Internal decorator to support Fast bakery
947
+ """
921
948
  ...
922
949
 
923
950
  @typing.overload
924
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
951
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
925
952
  ...
926
953
 
927
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
954
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
928
955
  """
929
- Specifies that the step will success under all circumstances.
930
-
931
- The decorator will create an optional artifact, specified by `var`, which
932
- contains the exception raised. You can use it to detect the presence
933
- of errors, indicating that all happy-path artifacts produced by the step
934
- are missing.
935
-
936
-
937
- Parameters
938
- ----------
939
- var : str, optional, default None
940
- Name of the artifact in which to store the caught exception.
941
- If not specified, the exception is not stored.
942
- print_exception : bool, default True
943
- Determines whether or not the exception is printed to
944
- stdout when caught.
956
+ Internal decorator to support Fast bakery
945
957
  """
946
958
  ...
947
959
 
948
960
  @typing.overload
949
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
961
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
950
962
  """
951
- Specifies environment variables to be set prior to the execution of a step.
963
+ Specifies the Conda environment for the step.
964
+
965
+ Information in this decorator will augment any
966
+ attributes set in the `@conda_base` flow-level decorator. Hence,
967
+ you can use `@conda_base` to set packages required by all
968
+ steps and use `@conda` to specify step-specific overrides.
952
969
 
953
970
 
954
971
  Parameters
955
972
  ----------
956
- vars : Dict[str, str], default {}
957
- Dictionary of environment variables to set.
973
+ packages : Dict[str, str], default {}
974
+ Packages to use for this step. The key is the name of the package
975
+ and the value is the version to use.
976
+ libraries : Dict[str, str], default {}
977
+ Supported for backward compatibility. When used with packages, packages will take precedence.
978
+ python : str, optional, default None
979
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
980
+ that the version used will correspond to the version of the Python interpreter used to start the run.
981
+ disabled : bool, default False
982
+ If set to True, disables @conda.
958
983
  """
959
984
  ...
960
985
 
961
986
  @typing.overload
962
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
987
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
963
988
  ...
964
989
 
965
990
  @typing.overload
966
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
991
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
967
992
  ...
968
993
 
969
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
970
- """
971
- Specifies environment variables to be set prior to the execution of a step.
972
-
973
-
974
- Parameters
975
- ----------
976
- vars : Dict[str, str], default {}
977
- Dictionary of environment variables to set.
978
- """
979
- ...
980
-
981
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
994
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
982
995
  """
983
- Specifies what flows belong to the same project.
996
+ Specifies the Conda environment for the step.
984
997
 
985
- A project-specific namespace is created for all flows that
986
- use the same `@project(name)`.
998
+ Information in this decorator will augment any
999
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1000
+ you can use `@conda_base` to set packages required by all
1001
+ steps and use `@conda` to specify step-specific overrides.
987
1002
 
988
1003
 
989
1004
  Parameters
990
1005
  ----------
991
- name : str
992
- Project name. Make sure that the name is unique amongst all
993
- projects that use the same production scheduler. The name may
994
- contain only lowercase alphanumeric characters and underscores.
995
-
996
- branch : Optional[str], default None
997
- The branch to use. If not specified, the branch is set to
998
- `user.<username>` unless `production` is set to `True`. This can
999
- also be set on the command line using `--branch` as a top-level option.
1000
- It is an error to specify `branch` in the decorator and on the command line.
1001
-
1002
- production : bool, default False
1003
- Whether or not the branch is the production branch. This can also be set on the
1004
- command line using `--production` as a top-level option. It is an error to specify
1005
- `production` in the decorator and on the command line.
1006
- The project branch name will be:
1007
- - if `branch` is specified:
1008
- - if `production` is True: `prod.<branch>`
1009
- - if `production` is False: `test.<branch>`
1010
- - if `branch` is not specified:
1011
- - if `production` is True: `prod`
1012
- - if `production` is False: `user.<username>`
1006
+ packages : Dict[str, str], default {}
1007
+ Packages to use for this step. The key is the name of the package
1008
+ and the value is the version to use.
1009
+ libraries : Dict[str, str], default {}
1010
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1011
+ python : str, optional, default None
1012
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1013
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1014
+ disabled : bool, default False
1015
+ If set to True, disables @conda.
1013
1016
  """
1014
1017
  ...
1015
1018
 
@@ -1054,310 +1057,246 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
1054
1057
  """
1055
1058
  ...
1056
1059
 
1057
- def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
1060
+ @typing.overload
1061
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1058
1062
  """
1059
- Allows setting external datastores to save data for the
1060
- `@checkpoint`/`@model`/`@huggingface_hub` decorators.
1061
-
1062
- This decorator is useful when users wish to save data to a different datastore
1063
- than what is configured in Metaflow. This can be for variety of reasons:
1064
-
1065
- 1. Data security: The objects needs to be stored in a bucket (object storage) that is not accessible by other flows.
1066
- 2. Data Locality: The location where the task is executing is not located in the same region as the datastore.
1067
- - Example: Metaflow datastore lives in US East, but the task is executing in Finland datacenters.
1068
- 3. Data Lifecycle Policies: The objects need to be archived / managed separately from the Metaflow managed objects.
1069
- - Example: Flow is training very large models that need to be stored separately and will be deleted more aggressively than the Metaflow managed objects.
1063
+ Specifies the flow(s) that this flow depends on.
1070
1064
 
1071
- Usage:
1072
- ----------
1065
+ ```
1066
+ @trigger_on_finish(flow='FooFlow')
1067
+ ```
1068
+ or
1069
+ ```
1070
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1071
+ ```
1072
+ This decorator respects the @project decorator and triggers the flow
1073
+ when upstream runs within the same namespace complete successfully
1073
1074
 
1074
- - Using a custom IAM role to access the datastore.
1075
+ Additionally, you can specify project aware upstream flow dependencies
1076
+ by specifying the fully qualified project_flow_name.
1077
+ ```
1078
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1079
+ ```
1080
+ or
1081
+ ```
1082
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1083
+ ```
1075
1084
 
1076
- ```python
1077
- @with_artifact_store(
1078
- type="s3",
1079
- config=lambda: {
1080
- "root": "s3://my-bucket-foo/path/to/root",
1081
- "role_arn": ROLE,
1082
- },
1083
- )
1084
- class MyFlow(FlowSpec):
1085
+ You can also specify just the project or project branch (other values will be
1086
+ inferred from the current project or project branch):
1087
+ ```
1088
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1089
+ ```
1085
1090
 
1086
- @checkpoint
1087
- @step
1088
- def start(self):
1089
- with open("my_file.txt", "w") as f:
1090
- f.write("Hello, World!")
1091
- self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1092
- self.next(self.end)
1091
+ Note that `branch` is typically one of:
1092
+ - `prod`
1093
+ - `user.bob`
1094
+ - `test.my_experiment`
1095
+ - `prod.staging`
1093
1096
 
1094
- ```
1095
1097
 
1096
- - Using credentials to access the s3-compatible datastore.
1098
+ Parameters
1099
+ ----------
1100
+ flow : Union[str, Dict[str, str]], optional, default None
1101
+ Upstream flow dependency for this flow.
1102
+ flows : List[Union[str, Dict[str, str]]], default []
1103
+ Upstream flow dependencies for this flow.
1104
+ options : Dict[str, Any], default {}
1105
+ Backend-specific configuration for tuning eventing behavior.
1106
+ """
1107
+ ...
1108
+
1109
+ @typing.overload
1110
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1111
+ ...
1112
+
1113
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1114
+ """
1115
+ Specifies the flow(s) that this flow depends on.
1097
1116
 
1098
- ```python
1099
- @with_artifact_store(
1100
- type="s3",
1101
- config=lambda: {
1102
- "root": "s3://my-bucket-foo/path/to/root",
1103
- "client_params": {
1104
- "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1105
- "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1106
- },
1107
- },
1108
- )
1109
- class MyFlow(FlowSpec):
1117
+ ```
1118
+ @trigger_on_finish(flow='FooFlow')
1119
+ ```
1120
+ or
1121
+ ```
1122
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1123
+ ```
1124
+ This decorator respects the @project decorator and triggers the flow
1125
+ when upstream runs within the same namespace complete successfully
1110
1126
 
1111
- @checkpoint
1112
- @step
1113
- def start(self):
1114
- with open("my_file.txt", "w") as f:
1115
- f.write("Hello, World!")
1116
- self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1117
- self.next(self.end)
1127
+ Additionally, you can specify project aware upstream flow dependencies
1128
+ by specifying the fully qualified project_flow_name.
1129
+ ```
1130
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1131
+ ```
1132
+ or
1133
+ ```
1134
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1135
+ ```
1118
1136
 
1119
- ```
1137
+ You can also specify just the project or project branch (other values will be
1138
+ inferred from the current project or project branch):
1139
+ ```
1140
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1141
+ ```
1120
1142
 
1121
- - Accessing objects stored in external datastores after task execution.
1143
+ Note that `branch` is typically one of:
1144
+ - `prod`
1145
+ - `user.bob`
1146
+ - `test.my_experiment`
1147
+ - `prod.staging`
1122
1148
 
1123
- ```python
1124
- run = Run("CheckpointsTestsFlow/8992")
1125
- with artifact_store_from(run=run, config={
1126
- "client_params": {
1127
- "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1128
- "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1129
- },
1130
- }):
1131
- with Checkpoint() as cp:
1132
- latest = cp.list(
1133
- task=run["start"].task
1134
- )[0]
1135
- print(latest)
1136
- cp.load(
1137
- latest,
1138
- "test-checkpoints"
1139
- )
1140
1149
 
1141
- task = Task("TorchTuneFlow/8484/train/53673")
1142
- with artifact_store_from(run=run, config={
1143
- "client_params": {
1144
- "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1145
- "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1146
- },
1147
- }):
1148
- load_model(
1149
- task.data.model_ref,
1150
- "test-models"
1151
- )
1152
- ```
1153
- Parameters:
1150
+ Parameters
1154
1151
  ----------
1155
-
1156
- type: str
1157
- The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1158
-
1159
- config: dict or Callable
1160
- Dictionary of configuration options for the datastore. The following keys are required:
1161
- - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1162
- - example: 's3://bucket-name/path/to/root'
1163
- - example: 'gs://bucket-name/path/to/root'
1164
- - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1165
- - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1166
- - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1167
- - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1152
+ flow : Union[str, Dict[str, str]], optional, default None
1153
+ Upstream flow dependency for this flow.
1154
+ flows : List[Union[str, Dict[str, str]]], default []
1155
+ Upstream flow dependencies for this flow.
1156
+ options : Dict[str, Any], default {}
1157
+ Backend-specific configuration for tuning eventing behavior.
1168
1158
  """
1169
1159
  ...
1170
1160
 
1171
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1161
+ @typing.overload
1162
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1172
1163
  """
1173
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1174
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1175
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1176
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1177
- starts only after all sensors finish.
1164
+ Specifies the Conda environment for all steps of the flow.
1165
+
1166
+ Use `@conda_base` to set common libraries required by all
1167
+ steps and use `@conda` to specify step-specific additions.
1178
1168
 
1179
1169
 
1180
1170
  Parameters
1181
1171
  ----------
1182
- timeout : int
1183
- Time, in seconds before the task times out and fails. (Default: 3600)
1184
- poke_interval : int
1185
- Time in seconds that the job should wait in between each try. (Default: 60)
1186
- mode : str
1187
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1188
- exponential_backoff : bool
1189
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1190
- pool : str
1191
- the slot pool this task should run in,
1192
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1193
- soft_fail : bool
1194
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1195
- name : str
1196
- Name of the sensor on Airflow
1197
- description : str
1198
- Description of sensor in the Airflow UI
1199
- bucket_key : Union[str, List[str]]
1200
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1201
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1202
- bucket_name : str
1203
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1204
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1205
- wildcard_match : bool
1206
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1207
- aws_conn_id : str
1208
- a reference to the s3 connection on Airflow. (Default: None)
1209
- verify : bool
1210
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1211
- """
1212
- ...
1213
-
1214
- @typing.overload
1215
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1216
- """
1217
- Specifies the times when the flow should be run when running on a
1218
- production scheduler.
1219
-
1220
-
1221
- Parameters
1222
- ----------
1223
- hourly : bool, default False
1224
- Run the workflow hourly.
1225
- daily : bool, default True
1226
- Run the workflow daily.
1227
- weekly : bool, default False
1228
- Run the workflow weekly.
1229
- cron : str, optional, default None
1230
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1231
- specified by this expression.
1232
- timezone : str, optional, default None
1233
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1234
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1172
+ packages : Dict[str, str], default {}
1173
+ Packages to use for this flow. The key is the name of the package
1174
+ and the value is the version to use.
1175
+ libraries : Dict[str, str], default {}
1176
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1177
+ python : str, optional, default None
1178
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1179
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1180
+ disabled : bool, default False
1181
+ If set to True, disables Conda.
1235
1182
  """
1236
1183
  ...
1237
1184
 
1238
1185
  @typing.overload
1239
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1186
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1240
1187
  ...
1241
1188
 
1242
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1189
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1243
1190
  """
1244
- Specifies the times when the flow should be run when running on a
1245
- production scheduler.
1191
+ Specifies the Conda environment for all steps of the flow.
1192
+
1193
+ Use `@conda_base` to set common libraries required by all
1194
+ steps and use `@conda` to specify step-specific additions.
1246
1195
 
1247
1196
 
1248
1197
  Parameters
1249
1198
  ----------
1250
- hourly : bool, default False
1251
- Run the workflow hourly.
1252
- daily : bool, default True
1253
- Run the workflow daily.
1254
- weekly : bool, default False
1255
- Run the workflow weekly.
1256
- cron : str, optional, default None
1257
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1258
- specified by this expression.
1259
- timezone : str, optional, default None
1260
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1261
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1199
+ packages : Dict[str, str], default {}
1200
+ Packages to use for this flow. The key is the name of the package
1201
+ and the value is the version to use.
1202
+ libraries : Dict[str, str], default {}
1203
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1204
+ python : str, optional, default None
1205
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1206
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1207
+ disabled : bool, default False
1208
+ If set to True, disables Conda.
1262
1209
  """
1263
1210
  ...
1264
1211
 
1265
1212
  @typing.overload
1266
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1213
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1267
1214
  """
1268
- Specifies the flow(s) that this flow depends on.
1215
+ Specifies the event(s) that this flow depends on.
1269
1216
 
1270
1217
  ```
1271
- @trigger_on_finish(flow='FooFlow')
1218
+ @trigger(event='foo')
1272
1219
  ```
1273
1220
  or
1274
1221
  ```
1275
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1222
+ @trigger(events=['foo', 'bar'])
1276
1223
  ```
1277
- This decorator respects the @project decorator and triggers the flow
1278
- when upstream runs within the same namespace complete successfully
1279
1224
 
1280
- Additionally, you can specify project aware upstream flow dependencies
1281
- by specifying the fully qualified project_flow_name.
1225
+ Additionally, you can specify the parameter mappings
1226
+ to map event payload to Metaflow parameters for the flow.
1282
1227
  ```
1283
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1228
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1284
1229
  ```
1285
1230
  or
1286
1231
  ```
1287
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1232
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1233
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1288
1234
  ```
1289
1235
 
1290
- You can also specify just the project or project branch (other values will be
1291
- inferred from the current project or project branch):
1236
+ 'parameters' can also be a list of strings and tuples like so:
1292
1237
  ```
1293
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1238
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1239
+ ```
1240
+ This is equivalent to:
1241
+ ```
1242
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1294
1243
  ```
1295
-
1296
- Note that `branch` is typically one of:
1297
- - `prod`
1298
- - `user.bob`
1299
- - `test.my_experiment`
1300
- - `prod.staging`
1301
1244
 
1302
1245
 
1303
1246
  Parameters
1304
1247
  ----------
1305
- flow : Union[str, Dict[str, str]], optional, default None
1306
- Upstream flow dependency for this flow.
1307
- flows : List[Union[str, Dict[str, str]]], default []
1308
- Upstream flow dependencies for this flow.
1248
+ event : Union[str, Dict[str, Any]], optional, default None
1249
+ Event dependency for this flow.
1250
+ events : List[Union[str, Dict[str, Any]]], default []
1251
+ Events dependency for this flow.
1309
1252
  options : Dict[str, Any], default {}
1310
1253
  Backend-specific configuration for tuning eventing behavior.
1311
1254
  """
1312
1255
  ...
1313
1256
 
1314
1257
  @typing.overload
1315
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1258
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1316
1259
  ...
1317
1260
 
1318
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1261
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1319
1262
  """
1320
- Specifies the flow(s) that this flow depends on.
1263
+ Specifies the event(s) that this flow depends on.
1321
1264
 
1322
1265
  ```
1323
- @trigger_on_finish(flow='FooFlow')
1266
+ @trigger(event='foo')
1324
1267
  ```
1325
1268
  or
1326
1269
  ```
1327
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1270
+ @trigger(events=['foo', 'bar'])
1328
1271
  ```
1329
- This decorator respects the @project decorator and triggers the flow
1330
- when upstream runs within the same namespace complete successfully
1331
1272
 
1332
- Additionally, you can specify project aware upstream flow dependencies
1333
- by specifying the fully qualified project_flow_name.
1273
+ Additionally, you can specify the parameter mappings
1274
+ to map event payload to Metaflow parameters for the flow.
1334
1275
  ```
1335
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1276
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1336
1277
  ```
1337
1278
  or
1338
1279
  ```
1339
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1280
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1281
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1340
1282
  ```
1341
1283
 
1342
- You can also specify just the project or project branch (other values will be
1343
- inferred from the current project or project branch):
1284
+ 'parameters' can also be a list of strings and tuples like so:
1344
1285
  ```
1345
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1286
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1287
+ ```
1288
+ This is equivalent to:
1289
+ ```
1290
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1346
1291
  ```
1347
-
1348
- Note that `branch` is typically one of:
1349
- - `prod`
1350
- - `user.bob`
1351
- - `test.my_experiment`
1352
- - `prod.staging`
1353
1292
 
1354
1293
 
1355
1294
  Parameters
1356
1295
  ----------
1357
- flow : Union[str, Dict[str, str]], optional, default None
1358
- Upstream flow dependency for this flow.
1359
- flows : List[Union[str, Dict[str, str]]], default []
1360
- Upstream flow dependencies for this flow.
1296
+ event : Union[str, Dict[str, Any]], optional, default None
1297
+ Event dependency for this flow.
1298
+ events : List[Union[str, Dict[str, Any]]], default []
1299
+ Events dependency for this flow.
1361
1300
  options : Dict[str, Any], default {}
1362
1301
  Backend-specific configuration for tuning eventing behavior.
1363
1302
  """
@@ -1406,147 +1345,246 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1406
1345
  """
1407
1346
  ...
1408
1347
 
1409
- @typing.overload
1410
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1348
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1411
1349
  """
1412
- Specifies the Conda environment for all steps of the flow.
1350
+ Specifies what flows belong to the same project.
1413
1351
 
1414
- Use `@conda_base` to set common libraries required by all
1415
- steps and use `@conda` to specify step-specific additions.
1352
+ A project-specific namespace is created for all flows that
1353
+ use the same `@project(name)`.
1416
1354
 
1417
1355
 
1418
1356
  Parameters
1419
1357
  ----------
1420
- packages : Dict[str, str], default {}
1421
- Packages to use for this flow. The key is the name of the package
1422
- and the value is the version to use.
1423
- libraries : Dict[str, str], default {}
1424
- Supported for backward compatibility. When used with packages, packages will take precedence.
1425
- python : str, optional, default None
1426
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1427
- that the version used will correspond to the version of the Python interpreter used to start the run.
1428
- disabled : bool, default False
1429
- If set to True, disables Conda.
1358
+ name : str
1359
+ Project name. Make sure that the name is unique amongst all
1360
+ projects that use the same production scheduler. The name may
1361
+ contain only lowercase alphanumeric characters and underscores.
1362
+
1363
+ branch : Optional[str], default None
1364
+ The branch to use. If not specified, the branch is set to
1365
+ `user.<username>` unless `production` is set to `True`. This can
1366
+ also be set on the command line using `--branch` as a top-level option.
1367
+ It is an error to specify `branch` in the decorator and on the command line.
1368
+
1369
+ production : bool, default False
1370
+ Whether or not the branch is the production branch. This can also be set on the
1371
+ command line using `--production` as a top-level option. It is an error to specify
1372
+ `production` in the decorator and on the command line.
1373
+ The project branch name will be:
1374
+ - if `branch` is specified:
1375
+ - if `production` is True: `prod.<branch>`
1376
+ - if `production` is False: `test.<branch>`
1377
+ - if `branch` is not specified:
1378
+ - if `production` is True: `prod`
1379
+ - if `production` is False: `user.<username>`
1430
1380
  """
1431
1381
  ...
1432
1382
 
1433
1383
  @typing.overload
1434
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1435
- ...
1436
-
1437
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1384
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1438
1385
  """
1439
- Specifies the Conda environment for all steps of the flow.
1440
-
1441
- Use `@conda_base` to set common libraries required by all
1442
- steps and use `@conda` to specify step-specific additions.
1386
+ Specifies the times when the flow should be run when running on a
1387
+ production scheduler.
1443
1388
 
1444
1389
 
1445
1390
  Parameters
1446
1391
  ----------
1447
- packages : Dict[str, str], default {}
1448
- Packages to use for this flow. The key is the name of the package
1449
- and the value is the version to use.
1450
- libraries : Dict[str, str], default {}
1451
- Supported for backward compatibility. When used with packages, packages will take precedence.
1452
- python : str, optional, default None
1453
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1454
- that the version used will correspond to the version of the Python interpreter used to start the run.
1455
- disabled : bool, default False
1456
- If set to True, disables Conda.
1392
+ hourly : bool, default False
1393
+ Run the workflow hourly.
1394
+ daily : bool, default True
1395
+ Run the workflow daily.
1396
+ weekly : bool, default False
1397
+ Run the workflow weekly.
1398
+ cron : str, optional, default None
1399
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1400
+ specified by this expression.
1401
+ timezone : str, optional, default None
1402
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1403
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1457
1404
  """
1458
1405
  ...
1459
1406
 
1460
1407
  @typing.overload
1461
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1408
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1409
+ ...
1410
+
1411
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1462
1412
  """
1463
- Specifies the event(s) that this flow depends on.
1464
-
1465
- ```
1466
- @trigger(event='foo')
1467
- ```
1468
- or
1469
- ```
1470
- @trigger(events=['foo', 'bar'])
1471
- ```
1472
-
1473
- Additionally, you can specify the parameter mappings
1474
- to map event payload to Metaflow parameters for the flow.
1475
- ```
1476
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1477
- ```
1478
- or
1479
- ```
1480
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1481
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1482
- ```
1483
-
1484
- 'parameters' can also be a list of strings and tuples like so:
1485
- ```
1486
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1487
- ```
1488
- This is equivalent to:
1489
- ```
1490
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1491
- ```
1413
+ Specifies the times when the flow should be run when running on a
1414
+ production scheduler.
1492
1415
 
1493
1416
 
1494
1417
  Parameters
1495
1418
  ----------
1496
- event : Union[str, Dict[str, Any]], optional, default None
1497
- Event dependency for this flow.
1498
- events : List[Union[str, Dict[str, Any]]], default []
1499
- Events dependency for this flow.
1500
- options : Dict[str, Any], default {}
1501
- Backend-specific configuration for tuning eventing behavior.
1419
+ hourly : bool, default False
1420
+ Run the workflow hourly.
1421
+ daily : bool, default True
1422
+ Run the workflow daily.
1423
+ weekly : bool, default False
1424
+ Run the workflow weekly.
1425
+ cron : str, optional, default None
1426
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1427
+ specified by this expression.
1428
+ timezone : str, optional, default None
1429
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1430
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1502
1431
  """
1503
1432
  ...
1504
1433
 
1505
- @typing.overload
1506
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1507
- ...
1508
-
1509
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1434
+ def with_artifact_store(f: typing.Optional[typing.Type[FlowSpecDerived]] = None):
1510
1435
  """
1511
- Specifies the event(s) that this flow depends on.
1436
+ Allows setting external datastores to save data for the
1437
+ `@checkpoint`/`@model`/`@huggingface_hub` decorators.
1512
1438
 
1513
- ```
1514
- @trigger(event='foo')
1515
- ```
1516
- or
1517
- ```
1518
- @trigger(events=['foo', 'bar'])
1519
- ```
1439
+ This decorator is useful when users wish to save data to a different datastore
1440
+ than what is configured in Metaflow. This can be for variety of reasons:
1520
1441
 
1521
- Additionally, you can specify the parameter mappings
1522
- to map event payload to Metaflow parameters for the flow.
1523
- ```
1524
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1525
- ```
1526
- or
1527
- ```
1528
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1529
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1530
- ```
1442
+ 1. Data security: The objects needs to be stored in a bucket (object storage) that is not accessible by other flows.
1443
+ 2. Data Locality: The location where the task is executing is not located in the same region as the datastore.
1444
+ - Example: Metaflow datastore lives in US East, but the task is executing in Finland datacenters.
1445
+ 3. Data Lifecycle Policies: The objects need to be archived / managed separately from the Metaflow managed objects.
1446
+ - Example: Flow is training very large models that need to be stored separately and will be deleted more aggressively than the Metaflow managed objects.
1531
1447
 
1532
- 'parameters' can also be a list of strings and tuples like so:
1533
- ```
1534
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1535
- ```
1536
- This is equivalent to:
1537
- ```
1538
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1539
- ```
1448
+ Usage:
1449
+ ----------
1450
+
1451
+ - Using a custom IAM role to access the datastore.
1452
+
1453
+ ```python
1454
+ @with_artifact_store(
1455
+ type="s3",
1456
+ config=lambda: {
1457
+ "root": "s3://my-bucket-foo/path/to/root",
1458
+ "role_arn": ROLE,
1459
+ },
1460
+ )
1461
+ class MyFlow(FlowSpec):
1462
+
1463
+ @checkpoint
1464
+ @step
1465
+ def start(self):
1466
+ with open("my_file.txt", "w") as f:
1467
+ f.write("Hello, World!")
1468
+ self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1469
+ self.next(self.end)
1470
+
1471
+ ```
1472
+
1473
+ - Using credentials to access the s3-compatible datastore.
1474
+
1475
+ ```python
1476
+ @with_artifact_store(
1477
+ type="s3",
1478
+ config=lambda: {
1479
+ "root": "s3://my-bucket-foo/path/to/root",
1480
+ "client_params": {
1481
+ "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1482
+ "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1483
+ },
1484
+ },
1485
+ )
1486
+ class MyFlow(FlowSpec):
1487
+
1488
+ @checkpoint
1489
+ @step
1490
+ def start(self):
1491
+ with open("my_file.txt", "w") as f:
1492
+ f.write("Hello, World!")
1493
+ self.external_bucket_checkpoint = current.checkpoint.save("my_file.txt")
1494
+ self.next(self.end)
1495
+
1496
+ ```
1497
+
1498
+ - Accessing objects stored in external datastores after task execution.
1499
+
1500
+ ```python
1501
+ run = Run("CheckpointsTestsFlow/8992")
1502
+ with artifact_store_from(run=run, config={
1503
+ "client_params": {
1504
+ "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1505
+ "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1506
+ },
1507
+ }):
1508
+ with Checkpoint() as cp:
1509
+ latest = cp.list(
1510
+ task=run["start"].task
1511
+ )[0]
1512
+ print(latest)
1513
+ cp.load(
1514
+ latest,
1515
+ "test-checkpoints"
1516
+ )
1517
+
1518
+ task = Task("TorchTuneFlow/8484/train/53673")
1519
+ with artifact_store_from(run=run, config={
1520
+ "client_params": {
1521
+ "aws_access_key_id": os.environ.get("MY_CUSTOM_ACCESS_KEY"),
1522
+ "aws_secret_access_key": os.environ.get("MY_CUSTOM_SECRET_KEY"),
1523
+ },
1524
+ }):
1525
+ load_model(
1526
+ task.data.model_ref,
1527
+ "test-models"
1528
+ )
1529
+ ```
1530
+ Parameters:
1531
+ ----------
1532
+
1533
+ type: str
1534
+ The type of the datastore. Can be one of 's3', 'gcs', 'azure' or any other supported metaflow Datastore.
1535
+
1536
+ config: dict or Callable
1537
+ Dictionary of configuration options for the datastore. The following keys are required:
1538
+ - root: The root path in the datastore where the data will be saved. (needs to be in the format expected by the datastore)
1539
+ - example: 's3://bucket-name/path/to/root'
1540
+ - example: 'gs://bucket-name/path/to/root'
1541
+ - example: 'https://myblockacc.blob.core.windows.net/metaflow/'
1542
+ - role_arn (optional): AWS IAM role to access s3 bucket (only when `type` is 's3')
1543
+ - session_vars (optional): AWS session variables to access s3 bucket (only when `type` is 's3')
1544
+ - client_params (optional): AWS client parameters to access s3 bucket (only when `type` is 's3')
1545
+ """
1546
+ ...
1547
+
1548
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1549
+ """
1550
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1551
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1552
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1553
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1554
+ starts only after all sensors finish.
1540
1555
 
1541
1556
 
1542
1557
  Parameters
1543
1558
  ----------
1544
- event : Union[str, Dict[str, Any]], optional, default None
1545
- Event dependency for this flow.
1546
- events : List[Union[str, Dict[str, Any]]], default []
1547
- Events dependency for this flow.
1548
- options : Dict[str, Any], default {}
1549
- Backend-specific configuration for tuning eventing behavior.
1559
+ timeout : int
1560
+ Time, in seconds before the task times out and fails. (Default: 3600)
1561
+ poke_interval : int
1562
+ Time in seconds that the job should wait in between each try. (Default: 60)
1563
+ mode : str
1564
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1565
+ exponential_backoff : bool
1566
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1567
+ pool : str
1568
+ the slot pool this task should run in,
1569
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1570
+ soft_fail : bool
1571
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1572
+ name : str
1573
+ Name of the sensor on Airflow
1574
+ description : str
1575
+ Description of sensor in the Airflow UI
1576
+ bucket_key : Union[str, List[str]]
1577
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1578
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1579
+ bucket_name : str
1580
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1581
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1582
+ wildcard_match : bool
1583
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1584
+ aws_conn_id : str
1585
+ a reference to the s3 connection on Airflow. (Default: None)
1586
+ verify : bool
1587
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1550
1588
  """
1551
1589
  ...
1552
1590