metaflow-stubs 2.18.12__py2.py3-none-any.whl → 2.18.13__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of metaflow-stubs might be problematic. Click here for more details.

Files changed (168) hide show
  1. metaflow-stubs/__init__.pyi +668 -668
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +7 -7
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +3 -3
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +4 -4
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +3 -3
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +24 -24
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/multicore_utils.pyi +2 -2
  23. metaflow-stubs/packaging_sys/__init__.pyi +5 -5
  24. metaflow-stubs/packaging_sys/backend.pyi +2 -2
  25. metaflow-stubs/packaging_sys/distribution_support.pyi +3 -3
  26. metaflow-stubs/packaging_sys/tar_backend.pyi +6 -6
  27. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  28. metaflow-stubs/packaging_sys/v1.pyi +3 -3
  29. metaflow-stubs/parameters.pyi +4 -4
  30. metaflow-stubs/plugins/__init__.pyi +13 -13
  31. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  33. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  34. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  35. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  37. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  38. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  39. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  41. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  42. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  43. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  44. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  45. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +4 -4
  46. metaflow-stubs/plugins/argo/exit_hooks.pyi +3 -3
  47. metaflow-stubs/plugins/aws/__init__.pyi +4 -4
  48. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  50. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  51. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  52. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  53. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  54. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  55. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +5 -5
  56. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  59. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  60. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  61. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  62. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  63. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  64. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  65. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  67. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  68. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +5 -5
  69. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  70. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  71. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  72. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  74. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  77. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  79. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/json_viewer.pyi +3 -3
  83. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  84. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  85. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  86. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  87. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  88. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  89. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  91. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  92. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  93. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  94. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  95. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  96. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  97. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  98. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  99. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  100. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  101. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  102. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  104. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +5 -5
  105. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  106. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  107. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  108. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  110. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +3 -3
  111. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  112. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  114. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  115. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  116. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  117. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/parsers.pyi +2 -2
  119. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  121. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  122. metaflow-stubs/plugins/pypi/conda_environment.pyi +6 -6
  123. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  124. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  125. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  126. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  127. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  128. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  129. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  130. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +4 -4
  131. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  132. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  133. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  134. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  135. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  136. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  137. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  138. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  139. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  140. metaflow-stubs/pylint_wrapper.pyi +2 -2
  141. metaflow-stubs/runner/__init__.pyi +2 -2
  142. metaflow-stubs/runner/deployer.pyi +6 -6
  143. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  144. metaflow-stubs/runner/metaflow_runner.pyi +4 -4
  145. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  146. metaflow-stubs/runner/nbrun.pyi +2 -2
  147. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  148. metaflow-stubs/runner/utils.pyi +3 -3
  149. metaflow-stubs/system/__init__.pyi +2 -2
  150. metaflow-stubs/system/system_logger.pyi +2 -2
  151. metaflow-stubs/system/system_monitor.pyi +2 -2
  152. metaflow-stubs/tagging_util.pyi +2 -2
  153. metaflow-stubs/tuple_util.pyi +2 -2
  154. metaflow-stubs/user_configs/__init__.pyi +2 -2
  155. metaflow-stubs/user_configs/config_options.pyi +3 -3
  156. metaflow-stubs/user_configs/config_parameters.pyi +6 -6
  157. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  158. metaflow-stubs/user_decorators/common.pyi +2 -2
  159. metaflow-stubs/user_decorators/mutable_flow.pyi +6 -6
  160. metaflow-stubs/user_decorators/mutable_step.pyi +6 -6
  161. metaflow-stubs/user_decorators/user_flow_decorator.pyi +4 -4
  162. metaflow-stubs/user_decorators/user_step_decorator.pyi +5 -5
  163. metaflow-stubs/version.pyi +2 -2
  164. {metaflow_stubs-2.18.12.dist-info → metaflow_stubs-2.18.13.dist-info}/METADATA +2 -2
  165. metaflow_stubs-2.18.13.dist-info/RECORD +168 -0
  166. metaflow_stubs-2.18.12.dist-info/RECORD +0 -168
  167. {metaflow_stubs-2.18.12.dist-info → metaflow_stubs-2.18.13.dist-info}/WHEEL +0 -0
  168. {metaflow_stubs-2.18.12.dist-info → metaflow_stubs-2.18.13.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.18.12 #
4
- # Generated on 2025-10-15T20:44:18.181602 #
3
+ # MF version: 2.18.13 #
4
+ # Generated on 2025-10-20T17:35:52.643612 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -39,18 +39,18 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
39
39
  from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
- from . import tuple_util as tuple_util
43
42
  from . import metaflow_git as metaflow_git
44
43
  from . import events as events
44
+ from . import tuple_util as tuple_util
45
45
  from . import runner as runner
46
46
  from . import plugins as plugins
47
47
  from .plugins.datatools.s3.s3 import S3 as S3
48
48
  from . import includefile as includefile
49
49
  from .includefile import IncludeFile as IncludeFile
50
- from .plugins.parsers import yaml_parser as yaml_parser
51
50
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
52
- from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
53
51
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
52
+ from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
53
+ from .plugins.parsers import yaml_parser as yaml_parser
54
54
  from . import cards as cards
55
55
  from . import client as client
56
56
  from .client.core import namespace as namespace
@@ -154,197 +154,176 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
154
154
  ...
155
155
 
156
156
  @typing.overload
157
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
157
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
158
158
  """
159
- Specifies the number of times the task corresponding
160
- to a step needs to be retried.
161
-
162
- This decorator is useful for handling transient errors, such as networking issues.
163
- If your task contains operations that can't be retried safely, e.g. database updates,
164
- it is advisable to annotate it with `@retry(times=0)`.
165
-
166
- This can be used in conjunction with the `@catch` decorator. The `@catch`
167
- decorator will execute a no-op task after all retries have been exhausted,
168
- ensuring that the flow execution can continue.
169
-
170
-
171
- Parameters
172
- ----------
173
- times : int, default 3
174
- Number of times to retry this task.
175
- minutes_between_retries : int, default 2
176
- Number of minutes between retries.
159
+ Decorator prototype for all step decorators. This function gets specialized
160
+ and imported for all decorators types by _import_plugin_decorators().
177
161
  """
178
162
  ...
179
163
 
180
164
  @typing.overload
181
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
182
- ...
183
-
184
- @typing.overload
185
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
165
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
186
166
  ...
187
167
 
188
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
168
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
189
169
  """
190
- Specifies the number of times the task corresponding
191
- to a step needs to be retried.
192
-
193
- This decorator is useful for handling transient errors, such as networking issues.
194
- If your task contains operations that can't be retried safely, e.g. database updates,
195
- it is advisable to annotate it with `@retry(times=0)`.
196
-
197
- This can be used in conjunction with the `@catch` decorator. The `@catch`
198
- decorator will execute a no-op task after all retries have been exhausted,
199
- ensuring that the flow execution can continue.
200
-
201
-
202
- Parameters
203
- ----------
204
- times : int, default 3
205
- Number of times to retry this task.
206
- minutes_between_retries : int, default 2
207
- Number of minutes between retries.
170
+ Decorator prototype for all step decorators. This function gets specialized
171
+ and imported for all decorators types by _import_plugin_decorators().
208
172
  """
209
173
  ...
210
174
 
211
175
  @typing.overload
212
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
176
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
213
177
  """
214
- Specifies that the step will success under all circumstances.
178
+ Specifies the Conda environment for the step.
215
179
 
216
- The decorator will create an optional artifact, specified by `var`, which
217
- contains the exception raised. You can use it to detect the presence
218
- of errors, indicating that all happy-path artifacts produced by the step
219
- are missing.
180
+ Information in this decorator will augment any
181
+ attributes set in the `@conda_base` flow-level decorator. Hence,
182
+ you can use `@conda_base` to set packages required by all
183
+ steps and use `@conda` to specify step-specific overrides.
220
184
 
221
185
 
222
186
  Parameters
223
187
  ----------
224
- var : str, optional, default None
225
- Name of the artifact in which to store the caught exception.
226
- If not specified, the exception is not stored.
227
- print_exception : bool, default True
228
- Determines whether or not the exception is printed to
229
- stdout when caught.
188
+ packages : Dict[str, str], default {}
189
+ Packages to use for this step. The key is the name of the package
190
+ and the value is the version to use.
191
+ libraries : Dict[str, str], default {}
192
+ Supported for backward compatibility. When used with packages, packages will take precedence.
193
+ python : str, optional, default None
194
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
195
+ that the version used will correspond to the version of the Python interpreter used to start the run.
196
+ disabled : bool, default False
197
+ If set to True, disables @conda.
230
198
  """
231
199
  ...
232
200
 
233
201
  @typing.overload
234
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
202
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
235
203
  ...
236
204
 
237
205
  @typing.overload
238
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
206
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
239
207
  ...
240
208
 
241
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
209
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
242
210
  """
243
- Specifies that the step will success under all circumstances.
211
+ Specifies the Conda environment for the step.
244
212
 
245
- The decorator will create an optional artifact, specified by `var`, which
246
- contains the exception raised. You can use it to detect the presence
247
- of errors, indicating that all happy-path artifacts produced by the step
248
- are missing.
213
+ Information in this decorator will augment any
214
+ attributes set in the `@conda_base` flow-level decorator. Hence,
215
+ you can use `@conda_base` to set packages required by all
216
+ steps and use `@conda` to specify step-specific overrides.
249
217
 
250
218
 
251
219
  Parameters
252
220
  ----------
253
- var : str, optional, default None
254
- Name of the artifact in which to store the caught exception.
255
- If not specified, the exception is not stored.
256
- print_exception : bool, default True
257
- Determines whether or not the exception is printed to
258
- stdout when caught.
221
+ packages : Dict[str, str], default {}
222
+ Packages to use for this step. The key is the name of the package
223
+ and the value is the version to use.
224
+ libraries : Dict[str, str], default {}
225
+ Supported for backward compatibility. When used with packages, packages will take precedence.
226
+ python : str, optional, default None
227
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
228
+ that the version used will correspond to the version of the Python interpreter used to start the run.
229
+ disabled : bool, default False
230
+ If set to True, disables @conda.
259
231
  """
260
232
  ...
261
233
 
262
234
  @typing.overload
263
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
235
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
264
236
  """
265
- Creates a human-readable report, a Metaflow Card, after this step completes.
266
-
267
- Note that you may add multiple `@card` decorators in a step with different parameters.
237
+ Specifies secrets to be retrieved and injected as environment variables prior to
238
+ the execution of a step.
268
239
 
269
240
 
270
241
  Parameters
271
242
  ----------
272
- type : str, default 'default'
273
- Card type.
274
- id : str, optional, default None
275
- If multiple cards are present, use this id to identify this card.
276
- options : Dict[str, Any], default {}
277
- Options passed to the card. The contents depend on the card type.
278
- timeout : int, default 45
279
- Interrupt reporting if it takes more than this many seconds.
243
+ sources : List[Union[str, Dict[str, Any]]], default: []
244
+ List of secret specs, defining how the secrets are to be retrieved
245
+ role : str, optional, default: None
246
+ Role to use for fetching secrets
280
247
  """
281
248
  ...
282
249
 
283
250
  @typing.overload
284
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
251
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
285
252
  ...
286
253
 
287
254
  @typing.overload
288
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
255
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
289
256
  ...
290
257
 
291
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
258
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
292
259
  """
293
- Creates a human-readable report, a Metaflow Card, after this step completes.
294
-
295
- Note that you may add multiple `@card` decorators in a step with different parameters.
260
+ Specifies secrets to be retrieved and injected as environment variables prior to
261
+ the execution of a step.
296
262
 
297
263
 
298
264
  Parameters
299
265
  ----------
300
- type : str, default 'default'
301
- Card type.
302
- id : str, optional, default None
303
- If multiple cards are present, use this id to identify this card.
304
- options : Dict[str, Any], default {}
305
- Options passed to the card. The contents depend on the card type.
306
- timeout : int, default 45
307
- Interrupt reporting if it takes more than this many seconds.
266
+ sources : List[Union[str, Dict[str, Any]]], default: []
267
+ List of secret specs, defining how the secrets are to be retrieved
268
+ role : str, optional, default: None
269
+ Role to use for fetching secrets
308
270
  """
309
271
  ...
310
272
 
311
273
  @typing.overload
312
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
274
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
313
275
  """
314
- Specifies environment variables to be set prior to the execution of a step.
276
+ Specifies the PyPI packages for the step.
277
+
278
+ Information in this decorator will augment any
279
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
280
+ you can use `@pypi_base` to set packages required by all
281
+ steps and use `@pypi` to specify step-specific overrides.
315
282
 
316
283
 
317
284
  Parameters
318
285
  ----------
319
- vars : Dict[str, str], default {}
320
- Dictionary of environment variables to set.
286
+ packages : Dict[str, str], default: {}
287
+ Packages to use for this step. The key is the name of the package
288
+ and the value is the version to use.
289
+ python : str, optional, default: None
290
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
291
+ that the version used will correspond to the version of the Python interpreter used to start the run.
321
292
  """
322
293
  ...
323
294
 
324
295
  @typing.overload
325
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
296
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
326
297
  ...
327
298
 
328
299
  @typing.overload
329
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
300
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
330
301
  ...
331
302
 
332
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
303
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
333
304
  """
334
- Specifies environment variables to be set prior to the execution of a step.
305
+ Specifies the PyPI packages for the step.
306
+
307
+ Information in this decorator will augment any
308
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
309
+ you can use `@pypi_base` to set packages required by all
310
+ steps and use `@pypi` to specify step-specific overrides.
335
311
 
336
312
 
337
313
  Parameters
338
314
  ----------
339
- vars : Dict[str, str], default {}
340
- Dictionary of environment variables to set.
315
+ packages : Dict[str, str], default: {}
316
+ Packages to use for this step. The key is the name of the package
317
+ and the value is the version to use.
318
+ python : str, optional, default: None
319
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
320
+ that the version used will correspond to the version of the Python interpreter used to start the run.
341
321
  """
342
322
  ...
343
323
 
344
- @typing.overload
345
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
324
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
346
325
  """
347
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
326
+ Specifies that this step should execute on Kubernetes.
348
327
 
349
328
 
350
329
  Parameters
@@ -352,249 +331,17 @@ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optio
352
331
  cpu : int, default 1
353
332
  Number of CPUs required for this step. If `@resources` is
354
333
  also present, the maximum value from all decorators is used.
355
- gpu : int, default 0
356
- Number of GPUs required for this step. If `@resources` is
357
- also present, the maximum value from all decorators is used.
358
334
  memory : int, default 4096
359
335
  Memory size (in MB) required for this step. If
360
336
  `@resources` is also present, the maximum value from all decorators is
361
337
  used.
338
+ disk : int, default 10240
339
+ Disk size (in MB) required for this step. If
340
+ `@resources` is also present, the maximum value from all decorators is
341
+ used.
362
342
  image : str, optional, default None
363
- Docker image to use when launching on AWS Batch. If not specified, and
364
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
365
- not, a default Docker image mapping to the current version of Python is used.
366
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
367
- AWS Batch Job Queue to submit the job to.
368
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
369
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
370
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
371
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
372
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
373
- shared_memory : int, optional, default None
374
- The value for the size (in MiB) of the /dev/shm volume for this step.
375
- This parameter maps to the `--shm-size` option in Docker.
376
- max_swap : int, optional, default None
377
- The total amount of swap memory (in MiB) a container can use for this
378
- step. This parameter is translated to the `--memory-swap` option in
379
- Docker where the value is the sum of the container memory plus the
380
- `max_swap` value.
381
- swappiness : int, optional, default None
382
- This allows you to tune memory swappiness behavior for this step.
383
- A swappiness value of 0 causes swapping not to happen unless absolutely
384
- necessary. A swappiness value of 100 causes pages to be swapped very
385
- aggressively. Accepted values are whole numbers between 0 and 100.
386
- aws_batch_tags: Dict[str, str], optional, default None
387
- Sets arbitrary AWS tags on the AWS Batch compute environment.
388
- Set as string key-value pairs.
389
- use_tmpfs : bool, default False
390
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
391
- not available on Fargate compute environments
392
- tmpfs_tempdir : bool, default True
393
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
394
- tmpfs_size : int, optional, default None
395
- The value for the size (in MiB) of the tmpfs mount for this step.
396
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
397
- memory allocated for this step.
398
- tmpfs_path : str, optional, default None
399
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
400
- inferentia : int, default 0
401
- Number of Inferentia chips required for this step.
402
- trainium : int, default None
403
- Alias for inferentia. Use only one of the two.
404
- efa : int, default 0
405
- Number of elastic fabric adapter network devices to attach to container
406
- ephemeral_storage : int, default None
407
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
408
- This is only relevant for Fargate compute environments
409
- log_driver: str, optional, default None
410
- The log driver to use for the Amazon ECS container.
411
- log_options: List[str], optional, default None
412
- List of strings containing options for the chosen log driver. The configurable values
413
- depend on the `log driver` chosen. Validation of these options is not supported yet.
414
- Example: [`awslogs-group:aws/batch/job`]
415
- """
416
- ...
417
-
418
- @typing.overload
419
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
420
- ...
421
-
422
- @typing.overload
423
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
424
- ...
425
-
426
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
427
- """
428
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
429
-
430
-
431
- Parameters
432
- ----------
433
- cpu : int, default 1
434
- Number of CPUs required for this step. If `@resources` is
435
- also present, the maximum value from all decorators is used.
436
- gpu : int, default 0
437
- Number of GPUs required for this step. If `@resources` is
438
- also present, the maximum value from all decorators is used.
439
- memory : int, default 4096
440
- Memory size (in MB) required for this step. If
441
- `@resources` is also present, the maximum value from all decorators is
442
- used.
443
- image : str, optional, default None
444
- Docker image to use when launching on AWS Batch. If not specified, and
445
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
446
- not, a default Docker image mapping to the current version of Python is used.
447
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
448
- AWS Batch Job Queue to submit the job to.
449
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
450
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
451
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
452
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
453
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
454
- shared_memory : int, optional, default None
455
- The value for the size (in MiB) of the /dev/shm volume for this step.
456
- This parameter maps to the `--shm-size` option in Docker.
457
- max_swap : int, optional, default None
458
- The total amount of swap memory (in MiB) a container can use for this
459
- step. This parameter is translated to the `--memory-swap` option in
460
- Docker where the value is the sum of the container memory plus the
461
- `max_swap` value.
462
- swappiness : int, optional, default None
463
- This allows you to tune memory swappiness behavior for this step.
464
- A swappiness value of 0 causes swapping not to happen unless absolutely
465
- necessary. A swappiness value of 100 causes pages to be swapped very
466
- aggressively. Accepted values are whole numbers between 0 and 100.
467
- aws_batch_tags: Dict[str, str], optional, default None
468
- Sets arbitrary AWS tags on the AWS Batch compute environment.
469
- Set as string key-value pairs.
470
- use_tmpfs : bool, default False
471
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
472
- not available on Fargate compute environments
473
- tmpfs_tempdir : bool, default True
474
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
475
- tmpfs_size : int, optional, default None
476
- The value for the size (in MiB) of the tmpfs mount for this step.
477
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
478
- memory allocated for this step.
479
- tmpfs_path : str, optional, default None
480
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
481
- inferentia : int, default 0
482
- Number of Inferentia chips required for this step.
483
- trainium : int, default None
484
- Alias for inferentia. Use only one of the two.
485
- efa : int, default 0
486
- Number of elastic fabric adapter network devices to attach to container
487
- ephemeral_storage : int, default None
488
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
489
- This is only relevant for Fargate compute environments
490
- log_driver: str, optional, default None
491
- The log driver to use for the Amazon ECS container.
492
- log_options: List[str], optional, default None
493
- List of strings containing options for the chosen log driver. The configurable values
494
- depend on the `log driver` chosen. Validation of these options is not supported yet.
495
- Example: [`awslogs-group:aws/batch/job`]
496
- """
497
- ...
498
-
499
- @typing.overload
500
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
501
- """
502
- Decorator prototype for all step decorators. This function gets specialized
503
- and imported for all decorators types by _import_plugin_decorators().
504
- """
505
- ...
506
-
507
- @typing.overload
508
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
509
- ...
510
-
511
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
512
- """
513
- Decorator prototype for all step decorators. This function gets specialized
514
- and imported for all decorators types by _import_plugin_decorators().
515
- """
516
- ...
517
-
518
- @typing.overload
519
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
520
- """
521
- Specifies the Conda environment for the step.
522
-
523
- Information in this decorator will augment any
524
- attributes set in the `@conda_base` flow-level decorator. Hence,
525
- you can use `@conda_base` to set packages required by all
526
- steps and use `@conda` to specify step-specific overrides.
527
-
528
-
529
- Parameters
530
- ----------
531
- packages : Dict[str, str], default {}
532
- Packages to use for this step. The key is the name of the package
533
- and the value is the version to use.
534
- libraries : Dict[str, str], default {}
535
- Supported for backward compatibility. When used with packages, packages will take precedence.
536
- python : str, optional, default None
537
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
538
- that the version used will correspond to the version of the Python interpreter used to start the run.
539
- disabled : bool, default False
540
- If set to True, disables @conda.
541
- """
542
- ...
543
-
544
- @typing.overload
545
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
546
- ...
547
-
548
- @typing.overload
549
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
550
- ...
551
-
552
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
553
- """
554
- Specifies the Conda environment for the step.
555
-
556
- Information in this decorator will augment any
557
- attributes set in the `@conda_base` flow-level decorator. Hence,
558
- you can use `@conda_base` to set packages required by all
559
- steps and use `@conda` to specify step-specific overrides.
560
-
561
-
562
- Parameters
563
- ----------
564
- packages : Dict[str, str], default {}
565
- Packages to use for this step. The key is the name of the package
566
- and the value is the version to use.
567
- libraries : Dict[str, str], default {}
568
- Supported for backward compatibility. When used with packages, packages will take precedence.
569
- python : str, optional, default None
570
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
571
- that the version used will correspond to the version of the Python interpreter used to start the run.
572
- disabled : bool, default False
573
- If set to True, disables @conda.
574
- """
575
- ...
576
-
577
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
578
- """
579
- Specifies that this step should execute on Kubernetes.
580
-
581
-
582
- Parameters
583
- ----------
584
- cpu : int, default 1
585
- Number of CPUs required for this step. If `@resources` is
586
- also present, the maximum value from all decorators is used.
587
- memory : int, default 4096
588
- Memory size (in MB) required for this step. If
589
- `@resources` is also present, the maximum value from all decorators is
590
- used.
591
- disk : int, default 10240
592
- Disk size (in MB) required for this step. If
593
- `@resources` is also present, the maximum value from all decorators is
594
- used.
595
- image : str, optional, default None
596
- Docker image to use when launching on Kubernetes. If not specified, and
597
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
343
+ Docker image to use when launching on Kubernetes. If not specified, and
344
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
598
345
  not, a default Docker image mapping to the current version of Python is used.
599
346
  image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
600
347
  If given, the imagePullPolicy to be applied to the Docker image of the step.
@@ -664,314 +411,576 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
664
411
  ...
665
412
 
666
413
  @typing.overload
667
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
414
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
668
415
  """
669
- Specifies the resources needed when executing this step.
670
-
671
- Use `@resources` to specify the resource requirements
672
- independently of the specific compute layer (`@batch`, `@kubernetes`).
673
-
674
- You can choose the compute layer on the command line by executing e.g.
675
- ```
676
- python myflow.py run --with batch
677
- ```
678
- or
679
- ```
680
- python myflow.py run --with kubernetes
681
- ```
682
- which executes the flow on the desired system using the
683
- requirements specified in `@resources`.
416
+ Specifies environment variables to be set prior to the execution of a step.
684
417
 
685
418
 
686
419
  Parameters
687
420
  ----------
688
- cpu : int, default 1
689
- Number of CPUs required for this step.
690
- gpu : int, optional, default None
691
- Number of GPUs required for this step.
692
- disk : int, optional, default None
693
- Disk size (in MB) required for this step. Only applies on Kubernetes.
694
- memory : int, default 4096
695
- Memory size (in MB) required for this step.
696
- shared_memory : int, optional, default None
697
- The value for the size (in MiB) of the /dev/shm volume for this step.
698
- This parameter maps to the `--shm-size` option in Docker.
421
+ vars : Dict[str, str], default {}
422
+ Dictionary of environment variables to set.
699
423
  """
700
424
  ...
701
425
 
702
426
  @typing.overload
703
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
427
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
704
428
  ...
705
429
 
706
430
  @typing.overload
707
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
431
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
708
432
  ...
709
433
 
710
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
434
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
711
435
  """
712
- Specifies the resources needed when executing this step.
713
-
714
- Use `@resources` to specify the resource requirements
715
- independently of the specific compute layer (`@batch`, `@kubernetes`).
716
-
717
- You can choose the compute layer on the command line by executing e.g.
718
- ```
719
- python myflow.py run --with batch
720
- ```
721
- or
722
- ```
723
- python myflow.py run --with kubernetes
724
- ```
725
- which executes the flow on the desired system using the
726
- requirements specified in `@resources`.
436
+ Specifies environment variables to be set prior to the execution of a step.
727
437
 
728
438
 
729
439
  Parameters
730
440
  ----------
731
- cpu : int, default 1
732
- Number of CPUs required for this step.
733
- gpu : int, optional, default None
734
- Number of GPUs required for this step.
735
- disk : int, optional, default None
736
- Disk size (in MB) required for this step. Only applies on Kubernetes.
737
- memory : int, default 4096
738
- Memory size (in MB) required for this step.
739
- shared_memory : int, optional, default None
740
- The value for the size (in MiB) of the /dev/shm volume for this step.
741
- This parameter maps to the `--shm-size` option in Docker.
441
+ vars : Dict[str, str], default {}
442
+ Dictionary of environment variables to set.
742
443
  """
743
444
  ...
744
445
 
745
446
  @typing.overload
746
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
447
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
747
448
  """
748
- Specifies a timeout for your step.
749
-
750
- This decorator is useful if this step may hang indefinitely.
751
-
752
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
753
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
754
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
449
+ Creates a human-readable report, a Metaflow Card, after this step completes.
755
450
 
756
- Note that all the values specified in parameters are added together so if you specify
757
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
451
+ Note that you may add multiple `@card` decorators in a step with different parameters.
758
452
 
759
453
 
760
454
  Parameters
761
455
  ----------
762
- seconds : int, default 0
763
- Number of seconds to wait prior to timing out.
764
- minutes : int, default 0
765
- Number of minutes to wait prior to timing out.
766
- hours : int, default 0
767
- Number of hours to wait prior to timing out.
456
+ type : str, default 'default'
457
+ Card type.
458
+ id : str, optional, default None
459
+ If multiple cards are present, use this id to identify this card.
460
+ options : Dict[str, Any], default {}
461
+ Options passed to the card. The contents depend on the card type.
462
+ timeout : int, default 45
463
+ Interrupt reporting if it takes more than this many seconds.
768
464
  """
769
465
  ...
770
466
 
771
467
  @typing.overload
772
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
468
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
773
469
  ...
774
470
 
775
471
  @typing.overload
776
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
472
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
777
473
  ...
778
474
 
779
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
475
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
780
476
  """
781
- Specifies a timeout for your step.
782
-
783
- This decorator is useful if this step may hang indefinitely.
784
-
785
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
786
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
787
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
477
+ Creates a human-readable report, a Metaflow Card, after this step completes.
788
478
 
789
- Note that all the values specified in parameters are added together so if you specify
790
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
479
+ Note that you may add multiple `@card` decorators in a step with different parameters.
791
480
 
792
481
 
793
482
  Parameters
794
483
  ----------
795
- seconds : int, default 0
796
- Number of seconds to wait prior to timing out.
797
- minutes : int, default 0
798
- Number of minutes to wait prior to timing out.
799
- hours : int, default 0
800
- Number of hours to wait prior to timing out.
484
+ type : str, default 'default'
485
+ Card type.
486
+ id : str, optional, default None
487
+ If multiple cards are present, use this id to identify this card.
488
+ options : Dict[str, Any], default {}
489
+ Options passed to the card. The contents depend on the card type.
490
+ timeout : int, default 45
491
+ Interrupt reporting if it takes more than this many seconds.
801
492
  """
802
493
  ...
803
494
 
804
495
  @typing.overload
805
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
496
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
806
497
  """
807
- Specifies the PyPI packages for the step.
498
+ Specifies the number of times the task corresponding
499
+ to a step needs to be retried.
808
500
 
809
- Information in this decorator will augment any
810
- attributes set in the `@pyi_base` flow-level decorator. Hence,
811
- you can use `@pypi_base` to set packages required by all
812
- steps and use `@pypi` to specify step-specific overrides.
501
+ This decorator is useful for handling transient errors, such as networking issues.
502
+ If your task contains operations that can't be retried safely, e.g. database updates,
503
+ it is advisable to annotate it with `@retry(times=0)`.
504
+
505
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
506
+ decorator will execute a no-op task after all retries have been exhausted,
507
+ ensuring that the flow execution can continue.
813
508
 
814
509
 
815
510
  Parameters
816
511
  ----------
817
- packages : Dict[str, str], default: {}
818
- Packages to use for this step. The key is the name of the package
819
- and the value is the version to use.
820
- python : str, optional, default: None
821
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
822
- that the version used will correspond to the version of the Python interpreter used to start the run.
512
+ times : int, default 3
513
+ Number of times to retry this task.
514
+ minutes_between_retries : int, default 2
515
+ Number of minutes between retries.
823
516
  """
824
517
  ...
825
518
 
826
519
  @typing.overload
827
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
520
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
828
521
  ...
829
522
 
830
523
  @typing.overload
831
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
524
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
832
525
  ...
833
526
 
834
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
527
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
835
528
  """
836
- Specifies the PyPI packages for the step.
529
+ Specifies the number of times the task corresponding
530
+ to a step needs to be retried.
837
531
 
838
- Information in this decorator will augment any
839
- attributes set in the `@pyi_base` flow-level decorator. Hence,
840
- you can use `@pypi_base` to set packages required by all
841
- steps and use `@pypi` to specify step-specific overrides.
532
+ This decorator is useful for handling transient errors, such as networking issues.
533
+ If your task contains operations that can't be retried safely, e.g. database updates,
534
+ it is advisable to annotate it with `@retry(times=0)`.
535
+
536
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
537
+ decorator will execute a no-op task after all retries have been exhausted,
538
+ ensuring that the flow execution can continue.
842
539
 
843
540
 
844
541
  Parameters
845
542
  ----------
846
- packages : Dict[str, str], default: {}
847
- Packages to use for this step. The key is the name of the package
848
- and the value is the version to use.
849
- python : str, optional, default: None
850
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
851
- that the version used will correspond to the version of the Python interpreter used to start the run.
543
+ times : int, default 3
544
+ Number of times to retry this task.
545
+ minutes_between_retries : int, default 2
546
+ Number of minutes between retries.
852
547
  """
853
548
  ...
854
549
 
855
550
  @typing.overload
856
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
551
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
857
552
  """
858
- Specifies secrets to be retrieved and injected as environment variables prior to
859
- the execution of a step.
553
+ Specifies the resources needed when executing this step.
554
+
555
+ Use `@resources` to specify the resource requirements
556
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
557
+
558
+ You can choose the compute layer on the command line by executing e.g.
559
+ ```
560
+ python myflow.py run --with batch
561
+ ```
562
+ or
563
+ ```
564
+ python myflow.py run --with kubernetes
565
+ ```
566
+ which executes the flow on the desired system using the
567
+ requirements specified in `@resources`.
860
568
 
861
569
 
862
570
  Parameters
863
571
  ----------
864
- sources : List[Union[str, Dict[str, Any]]], default: []
865
- List of secret specs, defining how the secrets are to be retrieved
866
- role : str, optional, default: None
867
- Role to use for fetching secrets
572
+ cpu : int, default 1
573
+ Number of CPUs required for this step.
574
+ gpu : int, optional, default None
575
+ Number of GPUs required for this step.
576
+ disk : int, optional, default None
577
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
578
+ memory : int, default 4096
579
+ Memory size (in MB) required for this step.
580
+ shared_memory : int, optional, default None
581
+ The value for the size (in MiB) of the /dev/shm volume for this step.
582
+ This parameter maps to the `--shm-size` option in Docker.
868
583
  """
869
584
  ...
870
585
 
871
586
  @typing.overload
872
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
587
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
873
588
  ...
874
589
 
875
590
  @typing.overload
876
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
591
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
877
592
  ...
878
593
 
879
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
594
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
880
595
  """
881
- Specifies secrets to be retrieved and injected as environment variables prior to
882
- the execution of a step.
596
+ Specifies the resources needed when executing this step.
883
597
 
598
+ Use `@resources` to specify the resource requirements
599
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
884
600
 
885
- Parameters
886
- ----------
887
- sources : List[Union[str, Dict[str, Any]]], default: []
888
- List of secret specs, defining how the secrets are to be retrieved
889
- role : str, optional, default: None
890
- Role to use for fetching secrets
891
- """
892
- ...
893
-
894
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
895
- """
896
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
897
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
601
+ You can choose the compute layer on the command line by executing e.g.
602
+ ```
603
+ python myflow.py run --with batch
604
+ ```
605
+ or
606
+ ```
607
+ python myflow.py run --with kubernetes
608
+ ```
609
+ which executes the flow on the desired system using the
610
+ requirements specified in `@resources`.
898
611
 
899
612
 
900
613
  Parameters
901
614
  ----------
902
- timeout : int
903
- Time, in seconds before the task times out and fails. (Default: 3600)
904
- poke_interval : int
905
- Time in seconds that the job should wait in between each try. (Default: 60)
906
- mode : str
907
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
908
- exponential_backoff : bool
909
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
910
- pool : str
911
- the slot pool this task should run in,
912
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
913
- soft_fail : bool
914
- Set to true to mark the task as SKIPPED on failure. (Default: False)
915
- name : str
916
- Name of the sensor on Airflow
917
- description : str
918
- Description of sensor in the Airflow UI
919
- external_dag_id : str
920
- The dag_id that contains the task you want to wait for.
921
- external_task_ids : List[str]
922
- The list of task_ids that you want to wait for.
923
- If None (default value) the sensor waits for the DAG. (Default: None)
924
- allowed_states : List[str]
925
- Iterable of allowed states, (Default: ['success'])
926
- failed_states : List[str]
927
- Iterable of failed or dis-allowed states. (Default: None)
928
- execution_delta : datetime.timedelta
929
- time difference with the previous execution to look at,
930
- the default is the same logical date as the current task or DAG. (Default: None)
931
- check_existence: bool
932
- Set to True to check if the external task exists or check if
933
- the DAG to wait for exists. (Default: True)
615
+ cpu : int, default 1
616
+ Number of CPUs required for this step.
617
+ gpu : int, optional, default None
618
+ Number of GPUs required for this step.
619
+ disk : int, optional, default None
620
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
621
+ memory : int, default 4096
622
+ Memory size (in MB) required for this step.
623
+ shared_memory : int, optional, default None
624
+ The value for the size (in MiB) of the /dev/shm volume for this step.
625
+ This parameter maps to the `--shm-size` option in Docker.
934
626
  """
935
627
  ...
936
628
 
937
629
  @typing.overload
938
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
630
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
939
631
  """
940
- Specifies the PyPI packages for all steps of the flow.
632
+ Specifies that the step will success under all circumstances.
633
+
634
+ The decorator will create an optional artifact, specified by `var`, which
635
+ contains the exception raised. You can use it to detect the presence
636
+ of errors, indicating that all happy-path artifacts produced by the step
637
+ are missing.
941
638
 
942
- Use `@pypi_base` to set common packages required by all
943
- steps and use `@pypi` to specify step-specific overrides.
944
639
 
945
640
  Parameters
946
641
  ----------
947
- packages : Dict[str, str], default: {}
948
- Packages to use for this flow. The key is the name of the package
949
- and the value is the version to use.
950
- python : str, optional, default: None
951
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
952
- that the version used will correspond to the version of the Python interpreter used to start the run.
642
+ var : str, optional, default None
643
+ Name of the artifact in which to store the caught exception.
644
+ If not specified, the exception is not stored.
645
+ print_exception : bool, default True
646
+ Determines whether or not the exception is printed to
647
+ stdout when caught.
953
648
  """
954
649
  ...
955
650
 
956
651
  @typing.overload
957
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
652
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
958
653
  ...
959
654
 
960
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
655
+ @typing.overload
656
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
657
+ ...
658
+
659
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
961
660
  """
962
- Specifies the PyPI packages for all steps of the flow.
661
+ Specifies that the step will success under all circumstances.
662
+
663
+ The decorator will create an optional artifact, specified by `var`, which
664
+ contains the exception raised. You can use it to detect the presence
665
+ of errors, indicating that all happy-path artifacts produced by the step
666
+ are missing.
667
+
668
+
669
+ Parameters
670
+ ----------
671
+ var : str, optional, default None
672
+ Name of the artifact in which to store the caught exception.
673
+ If not specified, the exception is not stored.
674
+ print_exception : bool, default True
675
+ Determines whether or not the exception is printed to
676
+ stdout when caught.
677
+ """
678
+ ...
679
+
680
+ @typing.overload
681
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
682
+ """
683
+ Specifies a timeout for your step.
684
+
685
+ This decorator is useful if this step may hang indefinitely.
686
+
687
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
688
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
689
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
690
+
691
+ Note that all the values specified in parameters are added together so if you specify
692
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
693
+
694
+
695
+ Parameters
696
+ ----------
697
+ seconds : int, default 0
698
+ Number of seconds to wait prior to timing out.
699
+ minutes : int, default 0
700
+ Number of minutes to wait prior to timing out.
701
+ hours : int, default 0
702
+ Number of hours to wait prior to timing out.
703
+ """
704
+ ...
705
+
706
+ @typing.overload
707
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
708
+ ...
709
+
710
+ @typing.overload
711
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
712
+ ...
713
+
714
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
715
+ """
716
+ Specifies a timeout for your step.
717
+
718
+ This decorator is useful if this step may hang indefinitely.
719
+
720
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
721
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
722
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
723
+
724
+ Note that all the values specified in parameters are added together so if you specify
725
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
726
+
727
+
728
+ Parameters
729
+ ----------
730
+ seconds : int, default 0
731
+ Number of seconds to wait prior to timing out.
732
+ minutes : int, default 0
733
+ Number of minutes to wait prior to timing out.
734
+ hours : int, default 0
735
+ Number of hours to wait prior to timing out.
736
+ """
737
+ ...
738
+
739
+ @typing.overload
740
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
741
+ """
742
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
743
+
744
+
745
+ Parameters
746
+ ----------
747
+ cpu : int, default 1
748
+ Number of CPUs required for this step. If `@resources` is
749
+ also present, the maximum value from all decorators is used.
750
+ gpu : int, default 0
751
+ Number of GPUs required for this step. If `@resources` is
752
+ also present, the maximum value from all decorators is used.
753
+ memory : int, default 4096
754
+ Memory size (in MB) required for this step. If
755
+ `@resources` is also present, the maximum value from all decorators is
756
+ used.
757
+ image : str, optional, default None
758
+ Docker image to use when launching on AWS Batch. If not specified, and
759
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
760
+ not, a default Docker image mapping to the current version of Python is used.
761
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
762
+ AWS Batch Job Queue to submit the job to.
763
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
764
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
765
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
766
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
767
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
768
+ shared_memory : int, optional, default None
769
+ The value for the size (in MiB) of the /dev/shm volume for this step.
770
+ This parameter maps to the `--shm-size` option in Docker.
771
+ max_swap : int, optional, default None
772
+ The total amount of swap memory (in MiB) a container can use for this
773
+ step. This parameter is translated to the `--memory-swap` option in
774
+ Docker where the value is the sum of the container memory plus the
775
+ `max_swap` value.
776
+ swappiness : int, optional, default None
777
+ This allows you to tune memory swappiness behavior for this step.
778
+ A swappiness value of 0 causes swapping not to happen unless absolutely
779
+ necessary. A swappiness value of 100 causes pages to be swapped very
780
+ aggressively. Accepted values are whole numbers between 0 and 100.
781
+ aws_batch_tags: Dict[str, str], optional, default None
782
+ Sets arbitrary AWS tags on the AWS Batch compute environment.
783
+ Set as string key-value pairs.
784
+ use_tmpfs : bool, default False
785
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
786
+ not available on Fargate compute environments
787
+ tmpfs_tempdir : bool, default True
788
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
789
+ tmpfs_size : int, optional, default None
790
+ The value for the size (in MiB) of the tmpfs mount for this step.
791
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
792
+ memory allocated for this step.
793
+ tmpfs_path : str, optional, default None
794
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
795
+ inferentia : int, default 0
796
+ Number of Inferentia chips required for this step.
797
+ trainium : int, default None
798
+ Alias for inferentia. Use only one of the two.
799
+ efa : int, default 0
800
+ Number of elastic fabric adapter network devices to attach to container
801
+ ephemeral_storage : int, default None
802
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
803
+ This is only relevant for Fargate compute environments
804
+ log_driver: str, optional, default None
805
+ The log driver to use for the Amazon ECS container.
806
+ log_options: List[str], optional, default None
807
+ List of strings containing options for the chosen log driver. The configurable values
808
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
809
+ Example: [`awslogs-group:aws/batch/job`]
810
+ """
811
+ ...
812
+
813
+ @typing.overload
814
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
815
+ ...
816
+
817
+ @typing.overload
818
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
819
+ ...
820
+
821
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
822
+ """
823
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
824
+
825
+
826
+ Parameters
827
+ ----------
828
+ cpu : int, default 1
829
+ Number of CPUs required for this step. If `@resources` is
830
+ also present, the maximum value from all decorators is used.
831
+ gpu : int, default 0
832
+ Number of GPUs required for this step. If `@resources` is
833
+ also present, the maximum value from all decorators is used.
834
+ memory : int, default 4096
835
+ Memory size (in MB) required for this step. If
836
+ `@resources` is also present, the maximum value from all decorators is
837
+ used.
838
+ image : str, optional, default None
839
+ Docker image to use when launching on AWS Batch. If not specified, and
840
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
841
+ not, a default Docker image mapping to the current version of Python is used.
842
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
843
+ AWS Batch Job Queue to submit the job to.
844
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
845
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
846
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
847
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
848
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
849
+ shared_memory : int, optional, default None
850
+ The value for the size (in MiB) of the /dev/shm volume for this step.
851
+ This parameter maps to the `--shm-size` option in Docker.
852
+ max_swap : int, optional, default None
853
+ The total amount of swap memory (in MiB) a container can use for this
854
+ step. This parameter is translated to the `--memory-swap` option in
855
+ Docker where the value is the sum of the container memory plus the
856
+ `max_swap` value.
857
+ swappiness : int, optional, default None
858
+ This allows you to tune memory swappiness behavior for this step.
859
+ A swappiness value of 0 causes swapping not to happen unless absolutely
860
+ necessary. A swappiness value of 100 causes pages to be swapped very
861
+ aggressively. Accepted values are whole numbers between 0 and 100.
862
+ aws_batch_tags: Dict[str, str], optional, default None
863
+ Sets arbitrary AWS tags on the AWS Batch compute environment.
864
+ Set as string key-value pairs.
865
+ use_tmpfs : bool, default False
866
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
867
+ not available on Fargate compute environments
868
+ tmpfs_tempdir : bool, default True
869
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
870
+ tmpfs_size : int, optional, default None
871
+ The value for the size (in MiB) of the tmpfs mount for this step.
872
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
873
+ memory allocated for this step.
874
+ tmpfs_path : str, optional, default None
875
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
876
+ inferentia : int, default 0
877
+ Number of Inferentia chips required for this step.
878
+ trainium : int, default None
879
+ Alias for inferentia. Use only one of the two.
880
+ efa : int, default 0
881
+ Number of elastic fabric adapter network devices to attach to container
882
+ ephemeral_storage : int, default None
883
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
884
+ This is only relevant for Fargate compute environments
885
+ log_driver: str, optional, default None
886
+ The log driver to use for the Amazon ECS container.
887
+ log_options: List[str], optional, default None
888
+ List of strings containing options for the chosen log driver. The configurable values
889
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
890
+ Example: [`awslogs-group:aws/batch/job`]
891
+ """
892
+ ...
893
+
894
+ @typing.overload
895
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
896
+ """
897
+ Specifies the event(s) that this flow depends on.
898
+
899
+ ```
900
+ @trigger(event='foo')
901
+ ```
902
+ or
903
+ ```
904
+ @trigger(events=['foo', 'bar'])
905
+ ```
906
+
907
+ Additionally, you can specify the parameter mappings
908
+ to map event payload to Metaflow parameters for the flow.
909
+ ```
910
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
911
+ ```
912
+ or
913
+ ```
914
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
915
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
916
+ ```
917
+
918
+ 'parameters' can also be a list of strings and tuples like so:
919
+ ```
920
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
921
+ ```
922
+ This is equivalent to:
923
+ ```
924
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
925
+ ```
926
+
927
+
928
+ Parameters
929
+ ----------
930
+ event : Union[str, Dict[str, Any]], optional, default None
931
+ Event dependency for this flow.
932
+ events : List[Union[str, Dict[str, Any]]], default []
933
+ Events dependency for this flow.
934
+ options : Dict[str, Any], default {}
935
+ Backend-specific configuration for tuning eventing behavior.
936
+ """
937
+ ...
938
+
939
+ @typing.overload
940
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
941
+ ...
942
+
943
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
944
+ """
945
+ Specifies the event(s) that this flow depends on.
946
+
947
+ ```
948
+ @trigger(event='foo')
949
+ ```
950
+ or
951
+ ```
952
+ @trigger(events=['foo', 'bar'])
953
+ ```
954
+
955
+ Additionally, you can specify the parameter mappings
956
+ to map event payload to Metaflow parameters for the flow.
957
+ ```
958
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
959
+ ```
960
+ or
961
+ ```
962
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
963
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
964
+ ```
965
+
966
+ 'parameters' can also be a list of strings and tuples like so:
967
+ ```
968
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
969
+ ```
970
+ This is equivalent to:
971
+ ```
972
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
973
+ ```
963
974
 
964
- Use `@pypi_base` to set common packages required by all
965
- steps and use `@pypi` to specify step-specific overrides.
966
975
 
967
976
  Parameters
968
977
  ----------
969
- packages : Dict[str, str], default: {}
970
- Packages to use for this flow. The key is the name of the package
971
- and the value is the version to use.
972
- python : str, optional, default: None
973
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
974
- that the version used will correspond to the version of the Python interpreter used to start the run.
978
+ event : Union[str, Dict[str, Any]], optional, default None
979
+ Event dependency for this flow.
980
+ events : List[Union[str, Dict[str, Any]]], default []
981
+ Events dependency for this flow.
982
+ options : Dict[str, Any], default {}
983
+ Backend-specific configuration for tuning eventing behavior.
975
984
  """
976
985
  ...
977
986
 
@@ -1076,57 +1085,6 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1076
1085
  """
1077
1086
  ...
1078
1087
 
1079
- @typing.overload
1080
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1081
- """
1082
- Specifies the times when the flow should be run when running on a
1083
- production scheduler.
1084
-
1085
-
1086
- Parameters
1087
- ----------
1088
- hourly : bool, default False
1089
- Run the workflow hourly.
1090
- daily : bool, default True
1091
- Run the workflow daily.
1092
- weekly : bool, default False
1093
- Run the workflow weekly.
1094
- cron : str, optional, default None
1095
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1096
- specified by this expression.
1097
- timezone : str, optional, default None
1098
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1099
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1100
- """
1101
- ...
1102
-
1103
- @typing.overload
1104
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1105
- ...
1106
-
1107
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1108
- """
1109
- Specifies the times when the flow should be run when running on a
1110
- production scheduler.
1111
-
1112
-
1113
- Parameters
1114
- ----------
1115
- hourly : bool, default False
1116
- Run the workflow hourly.
1117
- daily : bool, default True
1118
- Run the workflow daily.
1119
- weekly : bool, default False
1120
- Run the workflow weekly.
1121
- cron : str, optional, default None
1122
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1123
- specified by this expression.
1124
- timezone : str, optional, default None
1125
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1126
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1127
- """
1128
- ...
1129
-
1130
1088
  @typing.overload
1131
1089
  def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1132
1090
  """
@@ -1178,96 +1136,87 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1178
1136
  """
1179
1137
  ...
1180
1138
 
1181
- @typing.overload
1182
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1139
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1183
1140
  """
1184
- Specifies the event(s) that this flow depends on.
1185
-
1186
- ```
1187
- @trigger(event='foo')
1188
- ```
1189
- or
1190
- ```
1191
- @trigger(events=['foo', 'bar'])
1192
- ```
1141
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1142
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1193
1143
 
1194
- Additionally, you can specify the parameter mappings
1195
- to map event payload to Metaflow parameters for the flow.
1196
- ```
1197
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1198
- ```
1199
- or
1200
- ```
1201
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1202
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1203
- ```
1204
1144
 
1205
- 'parameters' can also be a list of strings and tuples like so:
1206
- ```
1207
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1208
- ```
1209
- This is equivalent to:
1210
- ```
1211
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1212
- ```
1145
+ Parameters
1146
+ ----------
1147
+ timeout : int
1148
+ Time, in seconds before the task times out and fails. (Default: 3600)
1149
+ poke_interval : int
1150
+ Time in seconds that the job should wait in between each try. (Default: 60)
1151
+ mode : str
1152
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1153
+ exponential_backoff : bool
1154
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1155
+ pool : str
1156
+ the slot pool this task should run in,
1157
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1158
+ soft_fail : bool
1159
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1160
+ name : str
1161
+ Name of the sensor on Airflow
1162
+ description : str
1163
+ Description of sensor in the Airflow UI
1164
+ external_dag_id : str
1165
+ The dag_id that contains the task you want to wait for.
1166
+ external_task_ids : List[str]
1167
+ The list of task_ids that you want to wait for.
1168
+ If None (default value) the sensor waits for the DAG. (Default: None)
1169
+ allowed_states : List[str]
1170
+ Iterable of allowed states, (Default: ['success'])
1171
+ failed_states : List[str]
1172
+ Iterable of failed or dis-allowed states. (Default: None)
1173
+ execution_delta : datetime.timedelta
1174
+ time difference with the previous execution to look at,
1175
+ the default is the same logical date as the current task or DAG. (Default: None)
1176
+ check_existence: bool
1177
+ Set to True to check if the external task exists or check if
1178
+ the DAG to wait for exists. (Default: True)
1179
+ """
1180
+ ...
1181
+
1182
+ @typing.overload
1183
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1184
+ """
1185
+ Specifies the PyPI packages for all steps of the flow.
1213
1186
 
1187
+ Use `@pypi_base` to set common packages required by all
1188
+ steps and use `@pypi` to specify step-specific overrides.
1214
1189
 
1215
1190
  Parameters
1216
1191
  ----------
1217
- event : Union[str, Dict[str, Any]], optional, default None
1218
- Event dependency for this flow.
1219
- events : List[Union[str, Dict[str, Any]]], default []
1220
- Events dependency for this flow.
1221
- options : Dict[str, Any], default {}
1222
- Backend-specific configuration for tuning eventing behavior.
1192
+ packages : Dict[str, str], default: {}
1193
+ Packages to use for this flow. The key is the name of the package
1194
+ and the value is the version to use.
1195
+ python : str, optional, default: None
1196
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1197
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1223
1198
  """
1224
1199
  ...
1225
1200
 
1226
1201
  @typing.overload
1227
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1202
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1228
1203
  ...
1229
1204
 
1230
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1205
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1231
1206
  """
1232
- Specifies the event(s) that this flow depends on.
1233
-
1234
- ```
1235
- @trigger(event='foo')
1236
- ```
1237
- or
1238
- ```
1239
- @trigger(events=['foo', 'bar'])
1240
- ```
1241
-
1242
- Additionally, you can specify the parameter mappings
1243
- to map event payload to Metaflow parameters for the flow.
1244
- ```
1245
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1246
- ```
1247
- or
1248
- ```
1249
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1250
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1251
- ```
1252
-
1253
- 'parameters' can also be a list of strings and tuples like so:
1254
- ```
1255
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1256
- ```
1257
- This is equivalent to:
1258
- ```
1259
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1260
- ```
1207
+ Specifies the PyPI packages for all steps of the flow.
1261
1208
 
1209
+ Use `@pypi_base` to set common packages required by all
1210
+ steps and use `@pypi` to specify step-specific overrides.
1262
1211
 
1263
1212
  Parameters
1264
1213
  ----------
1265
- event : Union[str, Dict[str, Any]], optional, default None
1266
- Event dependency for this flow.
1267
- events : List[Union[str, Dict[str, Any]]], default []
1268
- Events dependency for this flow.
1269
- options : Dict[str, Any], default {}
1270
- Backend-specific configuration for tuning eventing behavior.
1214
+ packages : Dict[str, str], default: {}
1215
+ Packages to use for this flow. The key is the name of the package
1216
+ and the value is the version to use.
1217
+ python : str, optional, default: None
1218
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1219
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1271
1220
  """
1272
1221
  ...
1273
1222
 
@@ -1314,6 +1263,57 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1314
1263
  """
1315
1264
  ...
1316
1265
 
1266
+ @typing.overload
1267
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1268
+ """
1269
+ Specifies the times when the flow should be run when running on a
1270
+ production scheduler.
1271
+
1272
+
1273
+ Parameters
1274
+ ----------
1275
+ hourly : bool, default False
1276
+ Run the workflow hourly.
1277
+ daily : bool, default True
1278
+ Run the workflow daily.
1279
+ weekly : bool, default False
1280
+ Run the workflow weekly.
1281
+ cron : str, optional, default None
1282
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1283
+ specified by this expression.
1284
+ timezone : str, optional, default None
1285
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1286
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1287
+ """
1288
+ ...
1289
+
1290
+ @typing.overload
1291
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1292
+ ...
1293
+
1294
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1295
+ """
1296
+ Specifies the times when the flow should be run when running on a
1297
+ production scheduler.
1298
+
1299
+
1300
+ Parameters
1301
+ ----------
1302
+ hourly : bool, default False
1303
+ Run the workflow hourly.
1304
+ daily : bool, default True
1305
+ Run the workflow daily.
1306
+ weekly : bool, default False
1307
+ Run the workflow weekly.
1308
+ cron : str, optional, default None
1309
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1310
+ specified by this expression.
1311
+ timezone : str, optional, default None
1312
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1313
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1314
+ """
1315
+ ...
1316
+
1317
1317
  def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1318
1318
  """
1319
1319
  Specifies what flows belong to the same project.