metaflow-stubs 2.18.11__py2.py3-none-any.whl → 2.18.13__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of metaflow-stubs might be problematic. Click here for more details.

Files changed (168) hide show
  1. metaflow-stubs/__init__.pyi +595 -595
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +6 -6
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +3 -3
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +5 -5
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +4 -4
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +3 -3
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +5 -5
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/multicore_utils.pyi +2 -2
  23. metaflow-stubs/packaging_sys/__init__.pyi +5 -5
  24. metaflow-stubs/packaging_sys/backend.pyi +2 -2
  25. metaflow-stubs/packaging_sys/distribution_support.pyi +4 -4
  26. metaflow-stubs/packaging_sys/tar_backend.pyi +5 -5
  27. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  28. metaflow-stubs/packaging_sys/v1.pyi +3 -3
  29. metaflow-stubs/parameters.pyi +4 -4
  30. metaflow-stubs/plugins/__init__.pyi +13 -13
  31. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  33. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  34. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  35. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  37. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  38. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  39. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  41. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  42. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  43. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  44. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  45. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  46. metaflow-stubs/plugins/argo/exit_hooks.pyi +3 -3
  47. metaflow-stubs/plugins/aws/__init__.pyi +3 -3
  48. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  50. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  51. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  52. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  53. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  54. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  55. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  56. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  59. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  60. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  61. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  62. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  63. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  64. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  65. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  67. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  68. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  69. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  70. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  71. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  72. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  74. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  77. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +18 -6
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/json_viewer.pyi +3 -3
  83. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  84. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  85. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  86. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  87. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  88. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  89. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  91. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  92. metaflow-stubs/plugins/datatools/s3/s3.pyi +5 -5
  93. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  94. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  95. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  96. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  97. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  98. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  99. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  100. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  101. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  102. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  104. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  105. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  106. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  107. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  108. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  110. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +3 -3
  111. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  112. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  114. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  115. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  116. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  117. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/parsers.pyi +2 -2
  119. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  121. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  122. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  123. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  124. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  125. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  126. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  127. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  128. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  129. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  130. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  131. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  132. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  133. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  134. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  135. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  136. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  137. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  138. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  139. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  140. metaflow-stubs/pylint_wrapper.pyi +2 -2
  141. metaflow-stubs/runner/__init__.pyi +2 -2
  142. metaflow-stubs/runner/deployer.pyi +34 -34
  143. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  144. metaflow-stubs/runner/metaflow_runner.pyi +4 -4
  145. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  146. metaflow-stubs/runner/nbrun.pyi +2 -2
  147. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  148. metaflow-stubs/runner/utils.pyi +3 -3
  149. metaflow-stubs/system/__init__.pyi +2 -2
  150. metaflow-stubs/system/system_logger.pyi +2 -2
  151. metaflow-stubs/system/system_monitor.pyi +2 -2
  152. metaflow-stubs/tagging_util.pyi +2 -2
  153. metaflow-stubs/tuple_util.pyi +2 -2
  154. metaflow-stubs/user_configs/__init__.pyi +2 -2
  155. metaflow-stubs/user_configs/config_options.pyi +4 -4
  156. metaflow-stubs/user_configs/config_parameters.pyi +7 -7
  157. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  158. metaflow-stubs/user_decorators/common.pyi +2 -2
  159. metaflow-stubs/user_decorators/mutable_flow.pyi +5 -5
  160. metaflow-stubs/user_decorators/mutable_step.pyi +6 -6
  161. metaflow-stubs/user_decorators/user_flow_decorator.pyi +5 -5
  162. metaflow-stubs/user_decorators/user_step_decorator.pyi +6 -6
  163. metaflow-stubs/version.pyi +2 -2
  164. {metaflow_stubs-2.18.11.dist-info → metaflow_stubs-2.18.13.dist-info}/METADATA +2 -2
  165. metaflow_stubs-2.18.13.dist-info/RECORD +168 -0
  166. metaflow_stubs-2.18.11.dist-info/RECORD +0 -168
  167. {metaflow_stubs-2.18.11.dist-info → metaflow_stubs-2.18.13.dist-info}/WHEEL +0 -0
  168. {metaflow_stubs-2.18.11.dist-info → metaflow_stubs-2.18.13.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.18.11 #
4
- # Generated on 2025-10-07T00:51:30.224324 #
3
+ # MF version: 2.18.13 #
4
+ # Generated on 2025-10-20T17:35:52.643612 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
12
+ import typing
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -39,17 +39,17 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
39
39
  from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
+ from . import metaflow_git as metaflow_git
42
43
  from . import events as events
43
44
  from . import tuple_util as tuple_util
44
- from . import metaflow_git as metaflow_git
45
45
  from . import runner as runner
46
46
  from . import plugins as plugins
47
47
  from .plugins.datatools.s3.s3 import S3 as S3
48
48
  from . import includefile as includefile
49
49
  from .includefile import IncludeFile as IncludeFile
50
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
50
51
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
51
52
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
52
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
53
53
  from .plugins.parsers import yaml_parser as yaml_parser
54
54
  from . import cards as cards
55
55
  from . import client as client
@@ -173,53 +173,61 @@ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
173
173
  ...
174
174
 
175
175
  @typing.overload
176
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
176
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
177
177
  """
178
- Specifies that the step will success under all circumstances.
178
+ Specifies the Conda environment for the step.
179
179
 
180
- The decorator will create an optional artifact, specified by `var`, which
181
- contains the exception raised. You can use it to detect the presence
182
- of errors, indicating that all happy-path artifacts produced by the step
183
- are missing.
180
+ Information in this decorator will augment any
181
+ attributes set in the `@conda_base` flow-level decorator. Hence,
182
+ you can use `@conda_base` to set packages required by all
183
+ steps and use `@conda` to specify step-specific overrides.
184
184
 
185
185
 
186
186
  Parameters
187
187
  ----------
188
- var : str, optional, default None
189
- Name of the artifact in which to store the caught exception.
190
- If not specified, the exception is not stored.
191
- print_exception : bool, default True
192
- Determines whether or not the exception is printed to
193
- stdout when caught.
188
+ packages : Dict[str, str], default {}
189
+ Packages to use for this step. The key is the name of the package
190
+ and the value is the version to use.
191
+ libraries : Dict[str, str], default {}
192
+ Supported for backward compatibility. When used with packages, packages will take precedence.
193
+ python : str, optional, default None
194
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
195
+ that the version used will correspond to the version of the Python interpreter used to start the run.
196
+ disabled : bool, default False
197
+ If set to True, disables @conda.
194
198
  """
195
199
  ...
196
200
 
197
201
  @typing.overload
198
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
202
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
199
203
  ...
200
204
 
201
205
  @typing.overload
202
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
206
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
203
207
  ...
204
208
 
205
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
209
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
206
210
  """
207
- Specifies that the step will success under all circumstances.
211
+ Specifies the Conda environment for the step.
208
212
 
209
- The decorator will create an optional artifact, specified by `var`, which
210
- contains the exception raised. You can use it to detect the presence
211
- of errors, indicating that all happy-path artifacts produced by the step
212
- are missing.
213
+ Information in this decorator will augment any
214
+ attributes set in the `@conda_base` flow-level decorator. Hence,
215
+ you can use `@conda_base` to set packages required by all
216
+ steps and use `@conda` to specify step-specific overrides.
213
217
 
214
218
 
215
219
  Parameters
216
220
  ----------
217
- var : str, optional, default None
218
- Name of the artifact in which to store the caught exception.
219
- If not specified, the exception is not stored.
220
- print_exception : bool, default True
221
- Determines whether or not the exception is printed to
222
- stdout when caught.
221
+ packages : Dict[str, str], default {}
222
+ Packages to use for this step. The key is the name of the package
223
+ and the value is the version to use.
224
+ libraries : Dict[str, str], default {}
225
+ Supported for backward compatibility. When used with packages, packages will take precedence.
226
+ python : str, optional, default None
227
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
228
+ that the version used will correspond to the version of the Python interpreter used to start the run.
229
+ disabled : bool, default False
230
+ If set to True, disables @conda.
223
231
  """
224
232
  ...
225
233
 
@@ -262,6 +270,57 @@ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
262
270
  """
263
271
  ...
264
272
 
273
+ @typing.overload
274
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
275
+ """
276
+ Specifies the PyPI packages for the step.
277
+
278
+ Information in this decorator will augment any
279
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
280
+ you can use `@pypi_base` to set packages required by all
281
+ steps and use `@pypi` to specify step-specific overrides.
282
+
283
+
284
+ Parameters
285
+ ----------
286
+ packages : Dict[str, str], default: {}
287
+ Packages to use for this step. The key is the name of the package
288
+ and the value is the version to use.
289
+ python : str, optional, default: None
290
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
291
+ that the version used will correspond to the version of the Python interpreter used to start the run.
292
+ """
293
+ ...
294
+
295
+ @typing.overload
296
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
297
+ ...
298
+
299
+ @typing.overload
300
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
301
+ ...
302
+
303
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
304
+ """
305
+ Specifies the PyPI packages for the step.
306
+
307
+ Information in this decorator will augment any
308
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
309
+ you can use `@pypi_base` to set packages required by all
310
+ steps and use `@pypi` to specify step-specific overrides.
311
+
312
+
313
+ Parameters
314
+ ----------
315
+ packages : Dict[str, str], default: {}
316
+ Packages to use for this step. The key is the name of the package
317
+ and the value is the version to use.
318
+ python : str, optional, default: None
319
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
320
+ that the version used will correspond to the version of the Python interpreter used to start the run.
321
+ """
322
+ ...
323
+
265
324
  def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
266
325
  """
267
326
  Specifies that this step should execute on Kubernetes.
@@ -351,6 +410,88 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
351
410
  """
352
411
  ...
353
412
 
413
+ @typing.overload
414
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
415
+ """
416
+ Specifies environment variables to be set prior to the execution of a step.
417
+
418
+
419
+ Parameters
420
+ ----------
421
+ vars : Dict[str, str], default {}
422
+ Dictionary of environment variables to set.
423
+ """
424
+ ...
425
+
426
+ @typing.overload
427
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
428
+ ...
429
+
430
+ @typing.overload
431
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
432
+ ...
433
+
434
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
435
+ """
436
+ Specifies environment variables to be set prior to the execution of a step.
437
+
438
+
439
+ Parameters
440
+ ----------
441
+ vars : Dict[str, str], default {}
442
+ Dictionary of environment variables to set.
443
+ """
444
+ ...
445
+
446
+ @typing.overload
447
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
448
+ """
449
+ Creates a human-readable report, a Metaflow Card, after this step completes.
450
+
451
+ Note that you may add multiple `@card` decorators in a step with different parameters.
452
+
453
+
454
+ Parameters
455
+ ----------
456
+ type : str, default 'default'
457
+ Card type.
458
+ id : str, optional, default None
459
+ If multiple cards are present, use this id to identify this card.
460
+ options : Dict[str, Any], default {}
461
+ Options passed to the card. The contents depend on the card type.
462
+ timeout : int, default 45
463
+ Interrupt reporting if it takes more than this many seconds.
464
+ """
465
+ ...
466
+
467
+ @typing.overload
468
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
469
+ ...
470
+
471
+ @typing.overload
472
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
473
+ ...
474
+
475
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
476
+ """
477
+ Creates a human-readable report, a Metaflow Card, after this step completes.
478
+
479
+ Note that you may add multiple `@card` decorators in a step with different parameters.
480
+
481
+
482
+ Parameters
483
+ ----------
484
+ type : str, default 'default'
485
+ Card type.
486
+ id : str, optional, default None
487
+ If multiple cards are present, use this id to identify this card.
488
+ options : Dict[str, Any], default {}
489
+ Options passed to the card. The contents depend on the card type.
490
+ timeout : int, default 45
491
+ Interrupt reporting if it takes more than this many seconds.
492
+ """
493
+ ...
494
+
354
495
  @typing.overload
355
496
  def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
356
497
  """
@@ -407,195 +548,50 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
407
548
  ...
408
549
 
409
550
  @typing.overload
410
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
551
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
411
552
  """
412
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
553
+ Specifies the resources needed when executing this step.
554
+
555
+ Use `@resources` to specify the resource requirements
556
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
557
+
558
+ You can choose the compute layer on the command line by executing e.g.
559
+ ```
560
+ python myflow.py run --with batch
561
+ ```
562
+ or
563
+ ```
564
+ python myflow.py run --with kubernetes
565
+ ```
566
+ which executes the flow on the desired system using the
567
+ requirements specified in `@resources`.
413
568
 
414
569
 
415
570
  Parameters
416
571
  ----------
417
572
  cpu : int, default 1
418
- Number of CPUs required for this step. If `@resources` is
419
- also present, the maximum value from all decorators is used.
420
- gpu : int, default 0
421
- Number of GPUs required for this step. If `@resources` is
422
- also present, the maximum value from all decorators is used.
573
+ Number of CPUs required for this step.
574
+ gpu : int, optional, default None
575
+ Number of GPUs required for this step.
576
+ disk : int, optional, default None
577
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
423
578
  memory : int, default 4096
424
- Memory size (in MB) required for this step. If
425
- `@resources` is also present, the maximum value from all decorators is
426
- used.
427
- image : str, optional, default None
428
- Docker image to use when launching on AWS Batch. If not specified, and
429
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
430
- not, a default Docker image mapping to the current version of Python is used.
431
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
432
- AWS Batch Job Queue to submit the job to.
433
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
434
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
435
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
436
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
437
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
579
+ Memory size (in MB) required for this step.
438
580
  shared_memory : int, optional, default None
439
581
  The value for the size (in MiB) of the /dev/shm volume for this step.
440
582
  This parameter maps to the `--shm-size` option in Docker.
441
- max_swap : int, optional, default None
442
- The total amount of swap memory (in MiB) a container can use for this
443
- step. This parameter is translated to the `--memory-swap` option in
444
- Docker where the value is the sum of the container memory plus the
445
- `max_swap` value.
446
- swappiness : int, optional, default None
447
- This allows you to tune memory swappiness behavior for this step.
448
- A swappiness value of 0 causes swapping not to happen unless absolutely
449
- necessary. A swappiness value of 100 causes pages to be swapped very
450
- aggressively. Accepted values are whole numbers between 0 and 100.
451
- aws_batch_tags: Dict[str, str], optional, default None
452
- Sets arbitrary AWS tags on the AWS Batch compute environment.
453
- Set as string key-value pairs.
454
- use_tmpfs : bool, default False
455
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
456
- not available on Fargate compute environments
457
- tmpfs_tempdir : bool, default True
458
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
459
- tmpfs_size : int, optional, default None
460
- The value for the size (in MiB) of the tmpfs mount for this step.
461
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
462
- memory allocated for this step.
463
- tmpfs_path : str, optional, default None
464
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
465
- inferentia : int, default 0
466
- Number of Inferentia chips required for this step.
467
- trainium : int, default None
468
- Alias for inferentia. Use only one of the two.
469
- efa : int, default 0
470
- Number of elastic fabric adapter network devices to attach to container
471
- ephemeral_storage : int, default None
472
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
473
- This is only relevant for Fargate compute environments
474
- log_driver: str, optional, default None
475
- The log driver to use for the Amazon ECS container.
476
- log_options: List[str], optional, default None
477
- List of strings containing options for the chosen log driver. The configurable values
478
- depend on the `log driver` chosen. Validation of these options is not supported yet.
479
- Example: [`awslogs-group:aws/batch/job`]
480
- """
481
- ...
482
-
483
- @typing.overload
484
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
485
- ...
486
-
487
- @typing.overload
488
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
489
- ...
490
-
491
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
492
- """
493
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
494
-
495
-
496
- Parameters
497
- ----------
498
- cpu : int, default 1
499
- Number of CPUs required for this step. If `@resources` is
500
- also present, the maximum value from all decorators is used.
501
- gpu : int, default 0
502
- Number of GPUs required for this step. If `@resources` is
503
- also present, the maximum value from all decorators is used.
504
- memory : int, default 4096
505
- Memory size (in MB) required for this step. If
506
- `@resources` is also present, the maximum value from all decorators is
507
- used.
508
- image : str, optional, default None
509
- Docker image to use when launching on AWS Batch. If not specified, and
510
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
511
- not, a default Docker image mapping to the current version of Python is used.
512
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
513
- AWS Batch Job Queue to submit the job to.
514
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
515
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
516
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
517
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
518
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
519
- shared_memory : int, optional, default None
520
- The value for the size (in MiB) of the /dev/shm volume for this step.
521
- This parameter maps to the `--shm-size` option in Docker.
522
- max_swap : int, optional, default None
523
- The total amount of swap memory (in MiB) a container can use for this
524
- step. This parameter is translated to the `--memory-swap` option in
525
- Docker where the value is the sum of the container memory plus the
526
- `max_swap` value.
527
- swappiness : int, optional, default None
528
- This allows you to tune memory swappiness behavior for this step.
529
- A swappiness value of 0 causes swapping not to happen unless absolutely
530
- necessary. A swappiness value of 100 causes pages to be swapped very
531
- aggressively. Accepted values are whole numbers between 0 and 100.
532
- aws_batch_tags: Dict[str, str], optional, default None
533
- Sets arbitrary AWS tags on the AWS Batch compute environment.
534
- Set as string key-value pairs.
535
- use_tmpfs : bool, default False
536
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
537
- not available on Fargate compute environments
538
- tmpfs_tempdir : bool, default True
539
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
540
- tmpfs_size : int, optional, default None
541
- The value for the size (in MiB) of the tmpfs mount for this step.
542
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
543
- memory allocated for this step.
544
- tmpfs_path : str, optional, default None
545
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
546
- inferentia : int, default 0
547
- Number of Inferentia chips required for this step.
548
- trainium : int, default None
549
- Alias for inferentia. Use only one of the two.
550
- efa : int, default 0
551
- Number of elastic fabric adapter network devices to attach to container
552
- ephemeral_storage : int, default None
553
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
554
- This is only relevant for Fargate compute environments
555
- log_driver: str, optional, default None
556
- The log driver to use for the Amazon ECS container.
557
- log_options: List[str], optional, default None
558
- List of strings containing options for the chosen log driver. The configurable values
559
- depend on the `log driver` chosen. Validation of these options is not supported yet.
560
- Example: [`awslogs-group:aws/batch/job`]
561
- """
562
- ...
563
-
564
- @typing.overload
565
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
566
- """
567
- Specifies environment variables to be set prior to the execution of a step.
568
-
569
-
570
- Parameters
571
- ----------
572
- vars : Dict[str, str], default {}
573
- Dictionary of environment variables to set.
574
583
  """
575
584
  ...
576
585
 
577
586
  @typing.overload
578
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
587
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
579
588
  ...
580
589
 
581
590
  @typing.overload
582
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
583
- ...
584
-
585
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
586
- """
587
- Specifies environment variables to be set prior to the execution of a step.
588
-
589
-
590
- Parameters
591
- ----------
592
- vars : Dict[str, str], default {}
593
- Dictionary of environment variables to set.
594
- """
591
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
595
592
  ...
596
593
 
597
- @typing.overload
598
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
594
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
599
595
  """
600
596
  Specifies the resources needed when executing this step.
601
597
 
@@ -631,45 +627,53 @@ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Op
631
627
  ...
632
628
 
633
629
  @typing.overload
634
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
630
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
631
+ """
632
+ Specifies that the step will success under all circumstances.
633
+
634
+ The decorator will create an optional artifact, specified by `var`, which
635
+ contains the exception raised. You can use it to detect the presence
636
+ of errors, indicating that all happy-path artifacts produced by the step
637
+ are missing.
638
+
639
+
640
+ Parameters
641
+ ----------
642
+ var : str, optional, default None
643
+ Name of the artifact in which to store the caught exception.
644
+ If not specified, the exception is not stored.
645
+ print_exception : bool, default True
646
+ Determines whether or not the exception is printed to
647
+ stdout when caught.
648
+ """
635
649
  ...
636
650
 
637
651
  @typing.overload
638
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
652
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
639
653
  ...
640
654
 
641
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
655
+ @typing.overload
656
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
657
+ ...
658
+
659
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
642
660
  """
643
- Specifies the resources needed when executing this step.
644
-
645
- Use `@resources` to specify the resource requirements
646
- independently of the specific compute layer (`@batch`, `@kubernetes`).
661
+ Specifies that the step will success under all circumstances.
647
662
 
648
- You can choose the compute layer on the command line by executing e.g.
649
- ```
650
- python myflow.py run --with batch
651
- ```
652
- or
653
- ```
654
- python myflow.py run --with kubernetes
655
- ```
656
- which executes the flow on the desired system using the
657
- requirements specified in `@resources`.
663
+ The decorator will create an optional artifact, specified by `var`, which
664
+ contains the exception raised. You can use it to detect the presence
665
+ of errors, indicating that all happy-path artifacts produced by the step
666
+ are missing.
658
667
 
659
668
 
660
669
  Parameters
661
670
  ----------
662
- cpu : int, default 1
663
- Number of CPUs required for this step.
664
- gpu : int, optional, default None
665
- Number of GPUs required for this step.
666
- disk : int, optional, default None
667
- Disk size (in MB) required for this step. Only applies on Kubernetes.
668
- memory : int, default 4096
669
- Memory size (in MB) required for this step.
670
- shared_memory : int, optional, default None
671
- The value for the size (in MiB) of the /dev/shm volume for this step.
672
- This parameter maps to the `--shm-size` option in Docker.
671
+ var : str, optional, default None
672
+ Name of the artifact in which to store the caught exception.
673
+ If not specified, the exception is not stored.
674
+ print_exception : bool, default True
675
+ Determines whether or not the exception is printed to
676
+ stdout when caught.
673
677
  """
674
678
  ...
675
679
 
@@ -733,161 +737,402 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
733
737
  ...
734
738
 
735
739
  @typing.overload
736
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
737
- """
738
- Specifies the Conda environment for the step.
739
-
740
- Information in this decorator will augment any
741
- attributes set in the `@conda_base` flow-level decorator. Hence,
742
- you can use `@conda_base` to set packages required by all
743
- steps and use `@conda` to specify step-specific overrides.
744
-
745
-
746
- Parameters
747
- ----------
748
- packages : Dict[str, str], default {}
749
- Packages to use for this step. The key is the name of the package
750
- and the value is the version to use.
751
- libraries : Dict[str, str], default {}
752
- Supported for backward compatibility. When used with packages, packages will take precedence.
753
- python : str, optional, default None
754
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
755
- that the version used will correspond to the version of the Python interpreter used to start the run.
756
- disabled : bool, default False
757
- If set to True, disables @conda.
758
- """
759
- ...
760
-
761
- @typing.overload
762
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
763
- ...
764
-
765
- @typing.overload
766
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
767
- ...
768
-
769
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
740
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
770
741
  """
771
- Specifies the Conda environment for the step.
772
-
773
- Information in this decorator will augment any
774
- attributes set in the `@conda_base` flow-level decorator. Hence,
775
- you can use `@conda_base` to set packages required by all
776
- steps and use `@conda` to specify step-specific overrides.
742
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
777
743
 
778
744
 
779
745
  Parameters
780
746
  ----------
781
- packages : Dict[str, str], default {}
782
- Packages to use for this step. The key is the name of the package
783
- and the value is the version to use.
784
- libraries : Dict[str, str], default {}
785
- Supported for backward compatibility. When used with packages, packages will take precedence.
786
- python : str, optional, default None
787
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
788
- that the version used will correspond to the version of the Python interpreter used to start the run.
789
- disabled : bool, default False
790
- If set to True, disables @conda.
791
- """
792
- ...
747
+ cpu : int, default 1
748
+ Number of CPUs required for this step. If `@resources` is
749
+ also present, the maximum value from all decorators is used.
750
+ gpu : int, default 0
751
+ Number of GPUs required for this step. If `@resources` is
752
+ also present, the maximum value from all decorators is used.
753
+ memory : int, default 4096
754
+ Memory size (in MB) required for this step. If
755
+ `@resources` is also present, the maximum value from all decorators is
756
+ used.
757
+ image : str, optional, default None
758
+ Docker image to use when launching on AWS Batch. If not specified, and
759
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
760
+ not, a default Docker image mapping to the current version of Python is used.
761
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
762
+ AWS Batch Job Queue to submit the job to.
763
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
764
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
765
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
766
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
767
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
768
+ shared_memory : int, optional, default None
769
+ The value for the size (in MiB) of the /dev/shm volume for this step.
770
+ This parameter maps to the `--shm-size` option in Docker.
771
+ max_swap : int, optional, default None
772
+ The total amount of swap memory (in MiB) a container can use for this
773
+ step. This parameter is translated to the `--memory-swap` option in
774
+ Docker where the value is the sum of the container memory plus the
775
+ `max_swap` value.
776
+ swappiness : int, optional, default None
777
+ This allows you to tune memory swappiness behavior for this step.
778
+ A swappiness value of 0 causes swapping not to happen unless absolutely
779
+ necessary. A swappiness value of 100 causes pages to be swapped very
780
+ aggressively. Accepted values are whole numbers between 0 and 100.
781
+ aws_batch_tags: Dict[str, str], optional, default None
782
+ Sets arbitrary AWS tags on the AWS Batch compute environment.
783
+ Set as string key-value pairs.
784
+ use_tmpfs : bool, default False
785
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
786
+ not available on Fargate compute environments
787
+ tmpfs_tempdir : bool, default True
788
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
789
+ tmpfs_size : int, optional, default None
790
+ The value for the size (in MiB) of the tmpfs mount for this step.
791
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
792
+ memory allocated for this step.
793
+ tmpfs_path : str, optional, default None
794
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
795
+ inferentia : int, default 0
796
+ Number of Inferentia chips required for this step.
797
+ trainium : int, default None
798
+ Alias for inferentia. Use only one of the two.
799
+ efa : int, default 0
800
+ Number of elastic fabric adapter network devices to attach to container
801
+ ephemeral_storage : int, default None
802
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
803
+ This is only relevant for Fargate compute environments
804
+ log_driver: str, optional, default None
805
+ The log driver to use for the Amazon ECS container.
806
+ log_options: List[str], optional, default None
807
+ List of strings containing options for the chosen log driver. The configurable values
808
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
809
+ Example: [`awslogs-group:aws/batch/job`]
810
+ """
811
+ ...
793
812
 
794
813
  @typing.overload
795
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
814
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
815
+ ...
816
+
817
+ @typing.overload
818
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
819
+ ...
820
+
821
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
796
822
  """
797
- Creates a human-readable report, a Metaflow Card, after this step completes.
823
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
798
824
 
799
- Note that you may add multiple `@card` decorators in a step with different parameters.
825
+
826
+ Parameters
827
+ ----------
828
+ cpu : int, default 1
829
+ Number of CPUs required for this step. If `@resources` is
830
+ also present, the maximum value from all decorators is used.
831
+ gpu : int, default 0
832
+ Number of GPUs required for this step. If `@resources` is
833
+ also present, the maximum value from all decorators is used.
834
+ memory : int, default 4096
835
+ Memory size (in MB) required for this step. If
836
+ `@resources` is also present, the maximum value from all decorators is
837
+ used.
838
+ image : str, optional, default None
839
+ Docker image to use when launching on AWS Batch. If not specified, and
840
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
841
+ not, a default Docker image mapping to the current version of Python is used.
842
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
843
+ AWS Batch Job Queue to submit the job to.
844
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
845
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
846
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
847
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
848
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
849
+ shared_memory : int, optional, default None
850
+ The value for the size (in MiB) of the /dev/shm volume for this step.
851
+ This parameter maps to the `--shm-size` option in Docker.
852
+ max_swap : int, optional, default None
853
+ The total amount of swap memory (in MiB) a container can use for this
854
+ step. This parameter is translated to the `--memory-swap` option in
855
+ Docker where the value is the sum of the container memory plus the
856
+ `max_swap` value.
857
+ swappiness : int, optional, default None
858
+ This allows you to tune memory swappiness behavior for this step.
859
+ A swappiness value of 0 causes swapping not to happen unless absolutely
860
+ necessary. A swappiness value of 100 causes pages to be swapped very
861
+ aggressively. Accepted values are whole numbers between 0 and 100.
862
+ aws_batch_tags: Dict[str, str], optional, default None
863
+ Sets arbitrary AWS tags on the AWS Batch compute environment.
864
+ Set as string key-value pairs.
865
+ use_tmpfs : bool, default False
866
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
867
+ not available on Fargate compute environments
868
+ tmpfs_tempdir : bool, default True
869
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
870
+ tmpfs_size : int, optional, default None
871
+ The value for the size (in MiB) of the tmpfs mount for this step.
872
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
873
+ memory allocated for this step.
874
+ tmpfs_path : str, optional, default None
875
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
876
+ inferentia : int, default 0
877
+ Number of Inferentia chips required for this step.
878
+ trainium : int, default None
879
+ Alias for inferentia. Use only one of the two.
880
+ efa : int, default 0
881
+ Number of elastic fabric adapter network devices to attach to container
882
+ ephemeral_storage : int, default None
883
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
884
+ This is only relevant for Fargate compute environments
885
+ log_driver: str, optional, default None
886
+ The log driver to use for the Amazon ECS container.
887
+ log_options: List[str], optional, default None
888
+ List of strings containing options for the chosen log driver. The configurable values
889
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
890
+ Example: [`awslogs-group:aws/batch/job`]
891
+ """
892
+ ...
893
+
894
+ @typing.overload
895
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
896
+ """
897
+ Specifies the event(s) that this flow depends on.
898
+
899
+ ```
900
+ @trigger(event='foo')
901
+ ```
902
+ or
903
+ ```
904
+ @trigger(events=['foo', 'bar'])
905
+ ```
906
+
907
+ Additionally, you can specify the parameter mappings
908
+ to map event payload to Metaflow parameters for the flow.
909
+ ```
910
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
911
+ ```
912
+ or
913
+ ```
914
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
915
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
916
+ ```
917
+
918
+ 'parameters' can also be a list of strings and tuples like so:
919
+ ```
920
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
921
+ ```
922
+ This is equivalent to:
923
+ ```
924
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
925
+ ```
800
926
 
801
927
 
802
928
  Parameters
803
929
  ----------
804
- type : str, default 'default'
805
- Card type.
806
- id : str, optional, default None
807
- If multiple cards are present, use this id to identify this card.
930
+ event : Union[str, Dict[str, Any]], optional, default None
931
+ Event dependency for this flow.
932
+ events : List[Union[str, Dict[str, Any]]], default []
933
+ Events dependency for this flow.
808
934
  options : Dict[str, Any], default {}
809
- Options passed to the card. The contents depend on the card type.
810
- timeout : int, default 45
811
- Interrupt reporting if it takes more than this many seconds.
935
+ Backend-specific configuration for tuning eventing behavior.
812
936
  """
813
937
  ...
814
938
 
815
939
  @typing.overload
816
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
940
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
941
+ ...
942
+
943
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
944
+ """
945
+ Specifies the event(s) that this flow depends on.
946
+
947
+ ```
948
+ @trigger(event='foo')
949
+ ```
950
+ or
951
+ ```
952
+ @trigger(events=['foo', 'bar'])
953
+ ```
954
+
955
+ Additionally, you can specify the parameter mappings
956
+ to map event payload to Metaflow parameters for the flow.
957
+ ```
958
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
959
+ ```
960
+ or
961
+ ```
962
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
963
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
964
+ ```
965
+
966
+ 'parameters' can also be a list of strings and tuples like so:
967
+ ```
968
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
969
+ ```
970
+ This is equivalent to:
971
+ ```
972
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
973
+ ```
974
+
975
+
976
+ Parameters
977
+ ----------
978
+ event : Union[str, Dict[str, Any]], optional, default None
979
+ Event dependency for this flow.
980
+ events : List[Union[str, Dict[str, Any]]], default []
981
+ Events dependency for this flow.
982
+ options : Dict[str, Any], default {}
983
+ Backend-specific configuration for tuning eventing behavior.
984
+ """
817
985
  ...
818
986
 
819
987
  @typing.overload
820
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
988
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
989
+ """
990
+ Specifies the flow(s) that this flow depends on.
991
+
992
+ ```
993
+ @trigger_on_finish(flow='FooFlow')
994
+ ```
995
+ or
996
+ ```
997
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
998
+ ```
999
+ This decorator respects the @project decorator and triggers the flow
1000
+ when upstream runs within the same namespace complete successfully
1001
+
1002
+ Additionally, you can specify project aware upstream flow dependencies
1003
+ by specifying the fully qualified project_flow_name.
1004
+ ```
1005
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1006
+ ```
1007
+ or
1008
+ ```
1009
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1010
+ ```
1011
+
1012
+ You can also specify just the project or project branch (other values will be
1013
+ inferred from the current project or project branch):
1014
+ ```
1015
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1016
+ ```
1017
+
1018
+ Note that `branch` is typically one of:
1019
+ - `prod`
1020
+ - `user.bob`
1021
+ - `test.my_experiment`
1022
+ - `prod.staging`
1023
+
1024
+
1025
+ Parameters
1026
+ ----------
1027
+ flow : Union[str, Dict[str, str]], optional, default None
1028
+ Upstream flow dependency for this flow.
1029
+ flows : List[Union[str, Dict[str, str]]], default []
1030
+ Upstream flow dependencies for this flow.
1031
+ options : Dict[str, Any], default {}
1032
+ Backend-specific configuration for tuning eventing behavior.
1033
+ """
821
1034
  ...
822
1035
 
823
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1036
+ @typing.overload
1037
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1038
+ ...
1039
+
1040
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
824
1041
  """
825
- Creates a human-readable report, a Metaflow Card, after this step completes.
1042
+ Specifies the flow(s) that this flow depends on.
826
1043
 
827
- Note that you may add multiple `@card` decorators in a step with different parameters.
1044
+ ```
1045
+ @trigger_on_finish(flow='FooFlow')
1046
+ ```
1047
+ or
1048
+ ```
1049
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1050
+ ```
1051
+ This decorator respects the @project decorator and triggers the flow
1052
+ when upstream runs within the same namespace complete successfully
1053
+
1054
+ Additionally, you can specify project aware upstream flow dependencies
1055
+ by specifying the fully qualified project_flow_name.
1056
+ ```
1057
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1058
+ ```
1059
+ or
1060
+ ```
1061
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1062
+ ```
1063
+
1064
+ You can also specify just the project or project branch (other values will be
1065
+ inferred from the current project or project branch):
1066
+ ```
1067
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1068
+ ```
1069
+
1070
+ Note that `branch` is typically one of:
1071
+ - `prod`
1072
+ - `user.bob`
1073
+ - `test.my_experiment`
1074
+ - `prod.staging`
828
1075
 
829
1076
 
830
1077
  Parameters
831
1078
  ----------
832
- type : str, default 'default'
833
- Card type.
834
- id : str, optional, default None
835
- If multiple cards are present, use this id to identify this card.
1079
+ flow : Union[str, Dict[str, str]], optional, default None
1080
+ Upstream flow dependency for this flow.
1081
+ flows : List[Union[str, Dict[str, str]]], default []
1082
+ Upstream flow dependencies for this flow.
836
1083
  options : Dict[str, Any], default {}
837
- Options passed to the card. The contents depend on the card type.
838
- timeout : int, default 45
839
- Interrupt reporting if it takes more than this many seconds.
1084
+ Backend-specific configuration for tuning eventing behavior.
840
1085
  """
841
1086
  ...
842
1087
 
843
1088
  @typing.overload
844
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1089
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
845
1090
  """
846
- Specifies the PyPI packages for the step.
1091
+ Specifies the Conda environment for all steps of the flow.
847
1092
 
848
- Information in this decorator will augment any
849
- attributes set in the `@pyi_base` flow-level decorator. Hence,
850
- you can use `@pypi_base` to set packages required by all
851
- steps and use `@pypi` to specify step-specific overrides.
1093
+ Use `@conda_base` to set common libraries required by all
1094
+ steps and use `@conda` to specify step-specific additions.
852
1095
 
853
1096
 
854
1097
  Parameters
855
1098
  ----------
856
- packages : Dict[str, str], default: {}
857
- Packages to use for this step. The key is the name of the package
1099
+ packages : Dict[str, str], default {}
1100
+ Packages to use for this flow. The key is the name of the package
858
1101
  and the value is the version to use.
859
- python : str, optional, default: None
1102
+ libraries : Dict[str, str], default {}
1103
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1104
+ python : str, optional, default None
860
1105
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
861
1106
  that the version used will correspond to the version of the Python interpreter used to start the run.
1107
+ disabled : bool, default False
1108
+ If set to True, disables Conda.
862
1109
  """
863
1110
  ...
864
1111
 
865
1112
  @typing.overload
866
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
867
- ...
868
-
869
- @typing.overload
870
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1113
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
871
1114
  ...
872
1115
 
873
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1116
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
874
1117
  """
875
- Specifies the PyPI packages for the step.
1118
+ Specifies the Conda environment for all steps of the flow.
876
1119
 
877
- Information in this decorator will augment any
878
- attributes set in the `@pyi_base` flow-level decorator. Hence,
879
- you can use `@pypi_base` to set packages required by all
880
- steps and use `@pypi` to specify step-specific overrides.
1120
+ Use `@conda_base` to set common libraries required by all
1121
+ steps and use `@conda` to specify step-specific additions.
881
1122
 
882
1123
 
883
1124
  Parameters
884
1125
  ----------
885
- packages : Dict[str, str], default: {}
886
- Packages to use for this step. The key is the name of the package
1126
+ packages : Dict[str, str], default {}
1127
+ Packages to use for this flow. The key is the name of the package
887
1128
  and the value is the version to use.
888
- python : str, optional, default: None
1129
+ libraries : Dict[str, str], default {}
1130
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1131
+ python : str, optional, default None
889
1132
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
890
1133
  that the version used will correspond to the version of the Python interpreter used to start the run.
1134
+ disabled : bool, default False
1135
+ If set to True, disables Conda.
891
1136
  """
892
1137
  ...
893
1138
 
@@ -933,105 +1178,45 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
933
1178
  the DAG to wait for exists. (Default: True)
934
1179
  """
935
1180
  ...
936
-
937
- @typing.overload
938
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
939
- """
940
- Specifies the flow(s) that this flow depends on.
941
-
942
- ```
943
- @trigger_on_finish(flow='FooFlow')
944
- ```
945
- or
946
- ```
947
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
948
- ```
949
- This decorator respects the @project decorator and triggers the flow
950
- when upstream runs within the same namespace complete successfully
951
-
952
- Additionally, you can specify project aware upstream flow dependencies
953
- by specifying the fully qualified project_flow_name.
954
- ```
955
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
956
- ```
957
- or
958
- ```
959
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
960
- ```
961
-
962
- You can also specify just the project or project branch (other values will be
963
- inferred from the current project or project branch):
964
- ```
965
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
966
- ```
967
-
968
- Note that `branch` is typically one of:
969
- - `prod`
970
- - `user.bob`
971
- - `test.my_experiment`
972
- - `prod.staging`
1181
+
1182
+ @typing.overload
1183
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1184
+ """
1185
+ Specifies the PyPI packages for all steps of the flow.
973
1186
 
1187
+ Use `@pypi_base` to set common packages required by all
1188
+ steps and use `@pypi` to specify step-specific overrides.
974
1189
 
975
1190
  Parameters
976
1191
  ----------
977
- flow : Union[str, Dict[str, str]], optional, default None
978
- Upstream flow dependency for this flow.
979
- flows : List[Union[str, Dict[str, str]]], default []
980
- Upstream flow dependencies for this flow.
981
- options : Dict[str, Any], default {}
982
- Backend-specific configuration for tuning eventing behavior.
1192
+ packages : Dict[str, str], default: {}
1193
+ Packages to use for this flow. The key is the name of the package
1194
+ and the value is the version to use.
1195
+ python : str, optional, default: None
1196
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1197
+ that the version used will correspond to the version of the Python interpreter used to start the run.
983
1198
  """
984
1199
  ...
985
1200
 
986
1201
  @typing.overload
987
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1202
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
988
1203
  ...
989
1204
 
990
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1205
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
991
1206
  """
992
- Specifies the flow(s) that this flow depends on.
993
-
994
- ```
995
- @trigger_on_finish(flow='FooFlow')
996
- ```
997
- or
998
- ```
999
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1000
- ```
1001
- This decorator respects the @project decorator and triggers the flow
1002
- when upstream runs within the same namespace complete successfully
1003
-
1004
- Additionally, you can specify project aware upstream flow dependencies
1005
- by specifying the fully qualified project_flow_name.
1006
- ```
1007
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1008
- ```
1009
- or
1010
- ```
1011
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1012
- ```
1013
-
1014
- You can also specify just the project or project branch (other values will be
1015
- inferred from the current project or project branch):
1016
- ```
1017
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1018
- ```
1019
-
1020
- Note that `branch` is typically one of:
1021
- - `prod`
1022
- - `user.bob`
1023
- - `test.my_experiment`
1024
- - `prod.staging`
1207
+ Specifies the PyPI packages for all steps of the flow.
1025
1208
 
1209
+ Use `@pypi_base` to set common packages required by all
1210
+ steps and use `@pypi` to specify step-specific overrides.
1026
1211
 
1027
1212
  Parameters
1028
1213
  ----------
1029
- flow : Union[str, Dict[str, str]], optional, default None
1030
- Upstream flow dependency for this flow.
1031
- flows : List[Union[str, Dict[str, str]]], default []
1032
- Upstream flow dependencies for this flow.
1033
- options : Dict[str, Any], default {}
1034
- Backend-specific configuration for tuning eventing behavior.
1214
+ packages : Dict[str, str], default: {}
1215
+ Packages to use for this flow. The key is the name of the package
1216
+ and the value is the version to use.
1217
+ python : str, optional, default: None
1218
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1219
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1035
1220
  """
1036
1221
  ...
1037
1222
 
@@ -1078,99 +1263,6 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1078
1263
  """
1079
1264
  ...
1080
1265
 
1081
- @typing.overload
1082
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1083
- """
1084
- Specifies the event(s) that this flow depends on.
1085
-
1086
- ```
1087
- @trigger(event='foo')
1088
- ```
1089
- or
1090
- ```
1091
- @trigger(events=['foo', 'bar'])
1092
- ```
1093
-
1094
- Additionally, you can specify the parameter mappings
1095
- to map event payload to Metaflow parameters for the flow.
1096
- ```
1097
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1098
- ```
1099
- or
1100
- ```
1101
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1102
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1103
- ```
1104
-
1105
- 'parameters' can also be a list of strings and tuples like so:
1106
- ```
1107
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1108
- ```
1109
- This is equivalent to:
1110
- ```
1111
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1112
- ```
1113
-
1114
-
1115
- Parameters
1116
- ----------
1117
- event : Union[str, Dict[str, Any]], optional, default None
1118
- Event dependency for this flow.
1119
- events : List[Union[str, Dict[str, Any]]], default []
1120
- Events dependency for this flow.
1121
- options : Dict[str, Any], default {}
1122
- Backend-specific configuration for tuning eventing behavior.
1123
- """
1124
- ...
1125
-
1126
- @typing.overload
1127
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1128
- ...
1129
-
1130
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1131
- """
1132
- Specifies the event(s) that this flow depends on.
1133
-
1134
- ```
1135
- @trigger(event='foo')
1136
- ```
1137
- or
1138
- ```
1139
- @trigger(events=['foo', 'bar'])
1140
- ```
1141
-
1142
- Additionally, you can specify the parameter mappings
1143
- to map event payload to Metaflow parameters for the flow.
1144
- ```
1145
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1146
- ```
1147
- or
1148
- ```
1149
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1150
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1151
- ```
1152
-
1153
- 'parameters' can also be a list of strings and tuples like so:
1154
- ```
1155
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1156
- ```
1157
- This is equivalent to:
1158
- ```
1159
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1160
- ```
1161
-
1162
-
1163
- Parameters
1164
- ----------
1165
- event : Union[str, Dict[str, Any]], optional, default None
1166
- Event dependency for this flow.
1167
- events : List[Union[str, Dict[str, Any]]], default []
1168
- Events dependency for this flow.
1169
- options : Dict[str, Any], default {}
1170
- Backend-specific configuration for tuning eventing behavior.
1171
- """
1172
- ...
1173
-
1174
1266
  @typing.overload
1175
1267
  def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1176
1268
  """
@@ -1222,47 +1314,6 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
1222
1314
  """
1223
1315
  ...
1224
1316
 
1225
- @typing.overload
1226
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1227
- """
1228
- Specifies the PyPI packages for all steps of the flow.
1229
-
1230
- Use `@pypi_base` to set common packages required by all
1231
- steps and use `@pypi` to specify step-specific overrides.
1232
-
1233
- Parameters
1234
- ----------
1235
- packages : Dict[str, str], default: {}
1236
- Packages to use for this flow. The key is the name of the package
1237
- and the value is the version to use.
1238
- python : str, optional, default: None
1239
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1240
- that the version used will correspond to the version of the Python interpreter used to start the run.
1241
- """
1242
- ...
1243
-
1244
- @typing.overload
1245
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1246
- ...
1247
-
1248
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1249
- """
1250
- Specifies the PyPI packages for all steps of the flow.
1251
-
1252
- Use `@pypi_base` to set common packages required by all
1253
- steps and use `@pypi` to specify step-specific overrides.
1254
-
1255
- Parameters
1256
- ----------
1257
- packages : Dict[str, str], default: {}
1258
- Packages to use for this flow. The key is the name of the package
1259
- and the value is the version to use.
1260
- python : str, optional, default: None
1261
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1262
- that the version used will correspond to the version of the Python interpreter used to start the run.
1263
- """
1264
- ...
1265
-
1266
1317
  def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1267
1318
  """
1268
1319
  Specifies what flows belong to the same project.
@@ -1298,54 +1349,3 @@ def project(*, name: str, branch: typing.Optional[str] = None, production: bool
1298
1349
  """
1299
1350
  ...
1300
1351
 
1301
- @typing.overload
1302
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1303
- """
1304
- Specifies the Conda environment for all steps of the flow.
1305
-
1306
- Use `@conda_base` to set common libraries required by all
1307
- steps and use `@conda` to specify step-specific additions.
1308
-
1309
-
1310
- Parameters
1311
- ----------
1312
- packages : Dict[str, str], default {}
1313
- Packages to use for this flow. The key is the name of the package
1314
- and the value is the version to use.
1315
- libraries : Dict[str, str], default {}
1316
- Supported for backward compatibility. When used with packages, packages will take precedence.
1317
- python : str, optional, default None
1318
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1319
- that the version used will correspond to the version of the Python interpreter used to start the run.
1320
- disabled : bool, default False
1321
- If set to True, disables Conda.
1322
- """
1323
- ...
1324
-
1325
- @typing.overload
1326
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1327
- ...
1328
-
1329
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1330
- """
1331
- Specifies the Conda environment for all steps of the flow.
1332
-
1333
- Use `@conda_base` to set common libraries required by all
1334
- steps and use `@conda` to specify step-specific additions.
1335
-
1336
-
1337
- Parameters
1338
- ----------
1339
- packages : Dict[str, str], default {}
1340
- Packages to use for this flow. The key is the name of the package
1341
- and the value is the version to use.
1342
- libraries : Dict[str, str], default {}
1343
- Supported for backward compatibility. When used with packages, packages will take precedence.
1344
- python : str, optional, default None
1345
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1346
- that the version used will correspond to the version of the Python interpreter used to start the run.
1347
- disabled : bool, default False
1348
- If set to True, disables Conda.
1349
- """
1350
- ...
1351
-