metaflow-stubs 2.18.4__py2.py3-none-any.whl → 2.18.5__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of metaflow-stubs might be problematic. Click here for more details.

Files changed (166) hide show
  1. metaflow-stubs/__init__.pyi +580 -580
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +5 -5
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +2 -2
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +3 -3
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +5 -5
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/multicore_utils.pyi +2 -2
  23. metaflow-stubs/packaging_sys/__init__.pyi +4 -4
  24. metaflow-stubs/packaging_sys/backend.pyi +2 -2
  25. metaflow-stubs/packaging_sys/distribution_support.pyi +4 -4
  26. metaflow-stubs/packaging_sys/tar_backend.pyi +5 -5
  27. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  28. metaflow-stubs/packaging_sys/v1.pyi +3 -3
  29. metaflow-stubs/parameters.pyi +2 -2
  30. metaflow-stubs/plugins/__init__.pyi +12 -12
  31. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  33. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  34. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  35. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  37. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  38. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  39. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  41. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  42. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  43. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
  44. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  45. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  46. metaflow-stubs/plugins/argo/exit_hooks.pyi +2 -2
  47. metaflow-stubs/plugins/aws/__init__.pyi +4 -4
  48. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  50. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  51. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  52. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  53. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  54. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  55. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  56. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  59. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  60. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  61. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  62. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  63. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  64. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +4 -4
  65. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  67. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  68. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  69. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  70. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  71. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  72. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  79. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  83. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  84. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  85. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  86. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  87. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  88. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  89. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  91. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  92. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  93. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  94. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  95. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  96. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  97. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  98. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  99. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  100. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  101. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  104. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  105. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  106. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  107. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  108. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +3 -3
  110. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  111. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  112. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  114. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  115. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  116. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
  119. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  121. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  122. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  123. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  124. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  125. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  126. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  127. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  128. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  129. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  130. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  131. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  132. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  133. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  134. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  135. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  136. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  137. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  138. metaflow-stubs/pylint_wrapper.pyi +2 -2
  139. metaflow-stubs/runner/__init__.pyi +2 -2
  140. metaflow-stubs/runner/deployer.pyi +32 -32
  141. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  142. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  143. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  144. metaflow-stubs/runner/nbrun.pyi +2 -2
  145. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  146. metaflow-stubs/runner/utils.pyi +4 -4
  147. metaflow-stubs/system/__init__.pyi +2 -2
  148. metaflow-stubs/system/system_logger.pyi +2 -2
  149. metaflow-stubs/system/system_monitor.pyi +2 -2
  150. metaflow-stubs/tagging_util.pyi +2 -2
  151. metaflow-stubs/tuple_util.pyi +2 -2
  152. metaflow-stubs/user_configs/__init__.pyi +2 -2
  153. metaflow-stubs/user_configs/config_options.pyi +3 -3
  154. metaflow-stubs/user_configs/config_parameters.pyi +7 -7
  155. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  156. metaflow-stubs/user_decorators/common.pyi +2 -2
  157. metaflow-stubs/user_decorators/mutable_flow.pyi +5 -5
  158. metaflow-stubs/user_decorators/mutable_step.pyi +5 -5
  159. metaflow-stubs/user_decorators/user_flow_decorator.pyi +5 -5
  160. metaflow-stubs/user_decorators/user_step_decorator.pyi +4 -4
  161. metaflow-stubs/version.pyi +2 -2
  162. {metaflow_stubs-2.18.4.dist-info → metaflow_stubs-2.18.5.dist-info}/METADATA +2 -2
  163. metaflow_stubs-2.18.5.dist-info/RECORD +166 -0
  164. metaflow_stubs-2.18.4.dist-info/RECORD +0 -166
  165. {metaflow_stubs-2.18.4.dist-info → metaflow_stubs-2.18.5.dist-info}/WHEEL +0 -0
  166. {metaflow_stubs-2.18.4.dist-info → metaflow_stubs-2.18.5.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.18.4 #
4
- # Generated on 2025-09-12T00:00:14.759393 #
3
+ # MF version: 2.18.5 #
4
+ # Generated on 2025-09-16T00:24:37.068070 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -40,16 +40,16 @@ from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
42
  from . import metaflow_git as metaflow_git
43
- from . import tuple_util as tuple_util
44
43
  from . import events as events
44
+ from . import tuple_util as tuple_util
45
45
  from . import runner as runner
46
46
  from . import plugins as plugins
47
47
  from .plugins.datatools.s3.s3 import S3 as S3
48
48
  from . import includefile as includefile
49
49
  from .includefile import IncludeFile as IncludeFile
50
- from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
51
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
52
50
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
51
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
52
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
53
53
  from . import cards as cards
54
54
  from . import client as client
55
55
  from .client.core import namespace as namespace
@@ -153,53 +153,170 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
153
153
  ...
154
154
 
155
155
  @typing.overload
156
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
156
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
157
157
  """
158
- Specifies the PyPI packages for the step.
158
+ Specifies that the step will success under all circumstances.
159
+
160
+ The decorator will create an optional artifact, specified by `var`, which
161
+ contains the exception raised. You can use it to detect the presence
162
+ of errors, indicating that all happy-path artifacts produced by the step
163
+ are missing.
164
+
165
+
166
+ Parameters
167
+ ----------
168
+ var : str, optional, default None
169
+ Name of the artifact in which to store the caught exception.
170
+ If not specified, the exception is not stored.
171
+ print_exception : bool, default True
172
+ Determines whether or not the exception is printed to
173
+ stdout when caught.
174
+ """
175
+ ...
176
+
177
+ @typing.overload
178
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
179
+ ...
180
+
181
+ @typing.overload
182
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
183
+ ...
184
+
185
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
186
+ """
187
+ Specifies that the step will success under all circumstances.
188
+
189
+ The decorator will create an optional artifact, specified by `var`, which
190
+ contains the exception raised. You can use it to detect the presence
191
+ of errors, indicating that all happy-path artifacts produced by the step
192
+ are missing.
193
+
194
+
195
+ Parameters
196
+ ----------
197
+ var : str, optional, default None
198
+ Name of the artifact in which to store the caught exception.
199
+ If not specified, the exception is not stored.
200
+ print_exception : bool, default True
201
+ Determines whether or not the exception is printed to
202
+ stdout when caught.
203
+ """
204
+ ...
205
+
206
+ @typing.overload
207
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
208
+ """
209
+ Specifies the Conda environment for the step.
159
210
 
160
211
  Information in this decorator will augment any
161
- attributes set in the `@pyi_base` flow-level decorator. Hence,
162
- you can use `@pypi_base` to set packages required by all
163
- steps and use `@pypi` to specify step-specific overrides.
212
+ attributes set in the `@conda_base` flow-level decorator. Hence,
213
+ you can use `@conda_base` to set packages required by all
214
+ steps and use `@conda` to specify step-specific overrides.
164
215
 
165
216
 
166
217
  Parameters
167
218
  ----------
168
- packages : Dict[str, str], default: {}
219
+ packages : Dict[str, str], default {}
169
220
  Packages to use for this step. The key is the name of the package
170
221
  and the value is the version to use.
171
- python : str, optional, default: None
222
+ libraries : Dict[str, str], default {}
223
+ Supported for backward compatibility. When used with packages, packages will take precedence.
224
+ python : str, optional, default None
172
225
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
173
226
  that the version used will correspond to the version of the Python interpreter used to start the run.
227
+ disabled : bool, default False
228
+ If set to True, disables @conda.
174
229
  """
175
230
  ...
176
231
 
177
232
  @typing.overload
178
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
233
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
179
234
  ...
180
235
 
181
236
  @typing.overload
182
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
237
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
183
238
  ...
184
239
 
185
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
240
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
186
241
  """
187
- Specifies the PyPI packages for the step.
242
+ Specifies the Conda environment for the step.
188
243
 
189
244
  Information in this decorator will augment any
190
- attributes set in the `@pyi_base` flow-level decorator. Hence,
191
- you can use `@pypi_base` to set packages required by all
192
- steps and use `@pypi` to specify step-specific overrides.
245
+ attributes set in the `@conda_base` flow-level decorator. Hence,
246
+ you can use `@conda_base` to set packages required by all
247
+ steps and use `@conda` to specify step-specific overrides.
193
248
 
194
249
 
195
250
  Parameters
196
251
  ----------
197
- packages : Dict[str, str], default: {}
252
+ packages : Dict[str, str], default {}
198
253
  Packages to use for this step. The key is the name of the package
199
254
  and the value is the version to use.
200
- python : str, optional, default: None
255
+ libraries : Dict[str, str], default {}
256
+ Supported for backward compatibility. When used with packages, packages will take precedence.
257
+ python : str, optional, default None
201
258
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
202
259
  that the version used will correspond to the version of the Python interpreter used to start the run.
260
+ disabled : bool, default False
261
+ If set to True, disables @conda.
262
+ """
263
+ ...
264
+
265
+ @typing.overload
266
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
267
+ """
268
+ Specifies secrets to be retrieved and injected as environment variables prior to
269
+ the execution of a step.
270
+
271
+
272
+ Parameters
273
+ ----------
274
+ sources : List[Union[str, Dict[str, Any]]], default: []
275
+ List of secret specs, defining how the secrets are to be retrieved
276
+ role : str, optional, default: None
277
+ Role to use for fetching secrets
278
+ """
279
+ ...
280
+
281
+ @typing.overload
282
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
283
+ ...
284
+
285
+ @typing.overload
286
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
287
+ ...
288
+
289
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
290
+ """
291
+ Specifies secrets to be retrieved and injected as environment variables prior to
292
+ the execution of a step.
293
+
294
+
295
+ Parameters
296
+ ----------
297
+ sources : List[Union[str, Dict[str, Any]]], default: []
298
+ List of secret specs, defining how the secrets are to be retrieved
299
+ role : str, optional, default: None
300
+ Role to use for fetching secrets
301
+ """
302
+ ...
303
+
304
+ @typing.overload
305
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
306
+ """
307
+ Decorator prototype for all step decorators. This function gets specialized
308
+ and imported for all decorators types by _import_plugin_decorators().
309
+ """
310
+ ...
311
+
312
+ @typing.overload
313
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
314
+ ...
315
+
316
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
317
+ """
318
+ Decorator prototype for all step decorators. This function gets specialized
319
+ and imported for all decorators types by _import_plugin_decorators().
203
320
  """
204
321
  ...
205
322
 
@@ -259,27 +376,90 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
259
376
  ...
260
377
 
261
378
  @typing.overload
262
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
379
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
263
380
  """
264
- Decorator prototype for all step decorators. This function gets specialized
265
- and imported for all decorators types by _import_plugin_decorators().
381
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
382
+
383
+
384
+ Parameters
385
+ ----------
386
+ cpu : int, default 1
387
+ Number of CPUs required for this step. If `@resources` is
388
+ also present, the maximum value from all decorators is used.
389
+ gpu : int, default 0
390
+ Number of GPUs required for this step. If `@resources` is
391
+ also present, the maximum value from all decorators is used.
392
+ memory : int, default 4096
393
+ Memory size (in MB) required for this step. If
394
+ `@resources` is also present, the maximum value from all decorators is
395
+ used.
396
+ image : str, optional, default None
397
+ Docker image to use when launching on AWS Batch. If not specified, and
398
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
399
+ not, a default Docker image mapping to the current version of Python is used.
400
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
401
+ AWS Batch Job Queue to submit the job to.
402
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
403
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
404
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
405
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
406
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
407
+ shared_memory : int, optional, default None
408
+ The value for the size (in MiB) of the /dev/shm volume for this step.
409
+ This parameter maps to the `--shm-size` option in Docker.
410
+ max_swap : int, optional, default None
411
+ The total amount of swap memory (in MiB) a container can use for this
412
+ step. This parameter is translated to the `--memory-swap` option in
413
+ Docker where the value is the sum of the container memory plus the
414
+ `max_swap` value.
415
+ swappiness : int, optional, default None
416
+ This allows you to tune memory swappiness behavior for this step.
417
+ A swappiness value of 0 causes swapping not to happen unless absolutely
418
+ necessary. A swappiness value of 100 causes pages to be swapped very
419
+ aggressively. Accepted values are whole numbers between 0 and 100.
420
+ aws_batch_tags: Dict[str, str], optional, default None
421
+ Sets arbitrary AWS tags on the AWS Batch compute environment.
422
+ Set as string key-value pairs.
423
+ use_tmpfs : bool, default False
424
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
425
+ not available on Fargate compute environments
426
+ tmpfs_tempdir : bool, default True
427
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
428
+ tmpfs_size : int, optional, default None
429
+ The value for the size (in MiB) of the tmpfs mount for this step.
430
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
431
+ memory allocated for this step.
432
+ tmpfs_path : str, optional, default None
433
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
434
+ inferentia : int, default 0
435
+ Number of Inferentia chips required for this step.
436
+ trainium : int, default None
437
+ Alias for inferentia. Use only one of the two.
438
+ efa : int, default 0
439
+ Number of elastic fabric adapter network devices to attach to container
440
+ ephemeral_storage : int, default None
441
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
442
+ This is only relevant for Fargate compute environments
443
+ log_driver: str, optional, default None
444
+ The log driver to use for the Amazon ECS container.
445
+ log_options: List[str], optional, default None
446
+ List of strings containing options for the chosen log driver. The configurable values
447
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
448
+ Example: [`awslogs-group:aws/batch/job`]
266
449
  """
267
450
  ...
268
451
 
269
452
  @typing.overload
270
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
453
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
271
454
  ...
272
455
 
273
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
274
- """
275
- Decorator prototype for all step decorators. This function gets specialized
276
- and imported for all decorators types by _import_plugin_decorators().
277
- """
456
+ @typing.overload
457
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
278
458
  ...
279
459
 
280
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
460
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
281
461
  """
282
- Specifies that this step should execute on Kubernetes.
462
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
283
463
 
284
464
 
285
465
  Parameters
@@ -287,82 +467,125 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
287
467
  cpu : int, default 1
288
468
  Number of CPUs required for this step. If `@resources` is
289
469
  also present, the maximum value from all decorators is used.
470
+ gpu : int, default 0
471
+ Number of GPUs required for this step. If `@resources` is
472
+ also present, the maximum value from all decorators is used.
290
473
  memory : int, default 4096
291
474
  Memory size (in MB) required for this step. If
292
475
  `@resources` is also present, the maximum value from all decorators is
293
476
  used.
294
- disk : int, default 10240
295
- Disk size (in MB) required for this step. If
296
- `@resources` is also present, the maximum value from all decorators is
297
- used.
298
477
  image : str, optional, default None
299
- Docker image to use when launching on Kubernetes. If not specified, and
300
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
478
+ Docker image to use when launching on AWS Batch. If not specified, and
479
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
301
480
  not, a default Docker image mapping to the current version of Python is used.
302
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
303
- If given, the imagePullPolicy to be applied to the Docker image of the step.
304
- image_pull_secrets: List[str], default []
305
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
306
- Kubernetes image pull secrets to use when pulling container images
307
- in Kubernetes.
308
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
309
- Kubernetes service account to use when launching pod in Kubernetes.
310
- secrets : List[str], optional, default None
311
- Kubernetes secrets to use when launching pod in Kubernetes. These
312
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
313
- in Metaflow configuration.
314
- node_selector: Union[Dict[str,str], str], optional, default None
315
- Kubernetes node selector(s) to apply to the pod running the task.
316
- Can be passed in as a comma separated string of values e.g.
317
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
318
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
319
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
320
- Kubernetes namespace to use when launching pod in Kubernetes.
321
- gpu : int, optional, default None
322
- Number of GPUs required for this step. A value of zero implies that
323
- the scheduled node should not have GPUs.
324
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
325
- The vendor of the GPUs to be used for this step.
326
- tolerations : List[Dict[str,str]], default []
327
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
328
- Kubernetes tolerations to use when launching pod in Kubernetes.
329
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
330
- Kubernetes labels to use when launching pod in Kubernetes.
331
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
332
- Kubernetes annotations to use when launching pod in Kubernetes.
481
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
482
+ AWS Batch Job Queue to submit the job to.
483
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
484
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
485
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
486
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
487
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
488
+ shared_memory : int, optional, default None
489
+ The value for the size (in MiB) of the /dev/shm volume for this step.
490
+ This parameter maps to the `--shm-size` option in Docker.
491
+ max_swap : int, optional, default None
492
+ The total amount of swap memory (in MiB) a container can use for this
493
+ step. This parameter is translated to the `--memory-swap` option in
494
+ Docker where the value is the sum of the container memory plus the
495
+ `max_swap` value.
496
+ swappiness : int, optional, default None
497
+ This allows you to tune memory swappiness behavior for this step.
498
+ A swappiness value of 0 causes swapping not to happen unless absolutely
499
+ necessary. A swappiness value of 100 causes pages to be swapped very
500
+ aggressively. Accepted values are whole numbers between 0 and 100.
501
+ aws_batch_tags: Dict[str, str], optional, default None
502
+ Sets arbitrary AWS tags on the AWS Batch compute environment.
503
+ Set as string key-value pairs.
333
504
  use_tmpfs : bool, default False
334
- This enables an explicit tmpfs mount for this step.
505
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
506
+ not available on Fargate compute environments
335
507
  tmpfs_tempdir : bool, default True
336
508
  sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
337
- tmpfs_size : int, optional, default: None
509
+ tmpfs_size : int, optional, default None
338
510
  The value for the size (in MiB) of the tmpfs mount for this step.
339
511
  This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
340
512
  memory allocated for this step.
341
- tmpfs_path : str, optional, default /metaflow_temp
342
- Path to tmpfs mount for this step.
343
- persistent_volume_claims : Dict[str, str], optional, default None
344
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
345
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
346
- shared_memory: int, optional
347
- Shared memory size (in MiB) required for this step
348
- port: int, optional
349
- Port number to specify in the Kubernetes job object
350
- compute_pool : str, optional, default None
351
- Compute pool to be used for for this step.
352
- If not specified, any accessible compute pool within the perimeter is used.
353
- hostname_resolution_timeout: int, default 10 * 60
354
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
355
- Only applicable when @parallel is used.
356
- qos: str, default: Burstable
357
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
513
+ tmpfs_path : str, optional, default None
514
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
515
+ inferentia : int, default 0
516
+ Number of Inferentia chips required for this step.
517
+ trainium : int, default None
518
+ Alias for inferentia. Use only one of the two.
519
+ efa : int, default 0
520
+ Number of elastic fabric adapter network devices to attach to container
521
+ ephemeral_storage : int, default None
522
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
523
+ This is only relevant for Fargate compute environments
524
+ log_driver: str, optional, default None
525
+ The log driver to use for the Amazon ECS container.
526
+ log_options: List[str], optional, default None
527
+ List of strings containing options for the chosen log driver. The configurable values
528
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
529
+ Example: [`awslogs-group:aws/batch/job`]
530
+ """
531
+ ...
532
+
533
+ @typing.overload
534
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
535
+ """
536
+ Specifies a timeout for your step.
358
537
 
359
- security_context: Dict[str, Any], optional, default None
360
- Container security context. Applies to the task container. Allows the following keys:
361
- - privileged: bool, optional, default None
362
- - allow_privilege_escalation: bool, optional, default None
363
- - run_as_user: int, optional, default None
364
- - run_as_group: int, optional, default None
365
- - run_as_non_root: bool, optional, default None
538
+ This decorator is useful if this step may hang indefinitely.
539
+
540
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
541
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
542
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
543
+
544
+ Note that all the values specified in parameters are added together so if you specify
545
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
546
+
547
+
548
+ Parameters
549
+ ----------
550
+ seconds : int, default 0
551
+ Number of seconds to wait prior to timing out.
552
+ minutes : int, default 0
553
+ Number of minutes to wait prior to timing out.
554
+ hours : int, default 0
555
+ Number of hours to wait prior to timing out.
556
+ """
557
+ ...
558
+
559
+ @typing.overload
560
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
561
+ ...
562
+
563
+ @typing.overload
564
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
565
+ ...
566
+
567
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
568
+ """
569
+ Specifies a timeout for your step.
570
+
571
+ This decorator is useful if this step may hang indefinitely.
572
+
573
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
574
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
575
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
576
+
577
+ Note that all the values specified in parameters are added together so if you specify
578
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
579
+
580
+
581
+ Parameters
582
+ ----------
583
+ seconds : int, default 0
584
+ Number of seconds to wait prior to timing out.
585
+ minutes : int, default 0
586
+ Number of minutes to wait prior to timing out.
587
+ hours : int, default 0
588
+ Number of hours to wait prior to timing out.
366
589
  """
367
590
  ...
368
591
 
@@ -416,127 +639,92 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
416
639
  ...
417
640
 
418
641
  @typing.overload
419
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
642
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
420
643
  """
421
- Specifies the Conda environment for the step.
644
+ Specifies the PyPI packages for the step.
422
645
 
423
646
  Information in this decorator will augment any
424
- attributes set in the `@conda_base` flow-level decorator. Hence,
425
- you can use `@conda_base` to set packages required by all
426
- steps and use `@conda` to specify step-specific overrides.
647
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
648
+ you can use `@pypi_base` to set packages required by all
649
+ steps and use `@pypi` to specify step-specific overrides.
427
650
 
428
651
 
429
652
  Parameters
430
653
  ----------
431
- packages : Dict[str, str], default {}
654
+ packages : Dict[str, str], default: {}
432
655
  Packages to use for this step. The key is the name of the package
433
656
  and the value is the version to use.
434
- libraries : Dict[str, str], default {}
435
- Supported for backward compatibility. When used with packages, packages will take precedence.
436
- python : str, optional, default None
657
+ python : str, optional, default: None
437
658
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
438
659
  that the version used will correspond to the version of the Python interpreter used to start the run.
439
- disabled : bool, default False
440
- If set to True, disables @conda.
441
660
  """
442
661
  ...
443
662
 
444
663
  @typing.overload
445
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
664
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
446
665
  ...
447
666
 
448
667
  @typing.overload
449
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
668
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
450
669
  ...
451
670
 
452
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
671
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
453
672
  """
454
- Specifies the Conda environment for the step.
673
+ Specifies the PyPI packages for the step.
455
674
 
456
675
  Information in this decorator will augment any
457
- attributes set in the `@conda_base` flow-level decorator. Hence,
458
- you can use `@conda_base` to set packages required by all
459
- steps and use `@conda` to specify step-specific overrides.
676
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
677
+ you can use `@pypi_base` to set packages required by all
678
+ steps and use `@pypi` to specify step-specific overrides.
460
679
 
461
680
 
462
681
  Parameters
463
682
  ----------
464
- packages : Dict[str, str], default {}
683
+ packages : Dict[str, str], default: {}
465
684
  Packages to use for this step. The key is the name of the package
466
685
  and the value is the version to use.
467
- libraries : Dict[str, str], default {}
468
- Supported for backward compatibility. When used with packages, packages will take precedence.
469
- python : str, optional, default None
686
+ python : str, optional, default: None
470
687
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
471
688
  that the version used will correspond to the version of the Python interpreter used to start the run.
472
- disabled : bool, default False
473
- If set to True, disables @conda.
474
689
  """
475
690
  ...
476
691
 
477
692
  @typing.overload
478
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
693
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
479
694
  """
480
- Specifies a timeout for your step.
481
-
482
- This decorator is useful if this step may hang indefinitely.
483
-
484
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
485
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
486
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
487
-
488
- Note that all the values specified in parameters are added together so if you specify
489
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
695
+ Specifies environment variables to be set prior to the execution of a step.
490
696
 
491
697
 
492
698
  Parameters
493
699
  ----------
494
- seconds : int, default 0
495
- Number of seconds to wait prior to timing out.
496
- minutes : int, default 0
497
- Number of minutes to wait prior to timing out.
498
- hours : int, default 0
499
- Number of hours to wait prior to timing out.
700
+ vars : Dict[str, str], default {}
701
+ Dictionary of environment variables to set.
500
702
  """
501
703
  ...
502
704
 
503
705
  @typing.overload
504
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
706
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
505
707
  ...
506
708
 
507
709
  @typing.overload
508
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
710
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
509
711
  ...
510
712
 
511
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
713
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
512
714
  """
513
- Specifies a timeout for your step.
514
-
515
- This decorator is useful if this step may hang indefinitely.
516
-
517
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
518
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
519
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
520
-
521
- Note that all the values specified in parameters are added together so if you specify
522
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
715
+ Specifies environment variables to be set prior to the execution of a step.
523
716
 
524
717
 
525
718
  Parameters
526
719
  ----------
527
- seconds : int, default 0
528
- Number of seconds to wait prior to timing out.
529
- minutes : int, default 0
530
- Number of minutes to wait prior to timing out.
531
- hours : int, default 0
532
- Number of hours to wait prior to timing out.
720
+ vars : Dict[str, str], default {}
721
+ Dictionary of environment variables to set.
533
722
  """
534
723
  ...
535
724
 
536
- @typing.overload
537
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
725
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
538
726
  """
539
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
727
+ Specifies that this step should execute on Kubernetes.
540
728
 
541
729
 
542
730
  Parameters
@@ -544,147 +732,82 @@ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optio
544
732
  cpu : int, default 1
545
733
  Number of CPUs required for this step. If `@resources` is
546
734
  also present, the maximum value from all decorators is used.
547
- gpu : int, default 0
548
- Number of GPUs required for this step. If `@resources` is
549
- also present, the maximum value from all decorators is used.
550
735
  memory : int, default 4096
551
736
  Memory size (in MB) required for this step. If
552
737
  `@resources` is also present, the maximum value from all decorators is
553
738
  used.
554
- image : str, optional, default None
555
- Docker image to use when launching on AWS Batch. If not specified, and
556
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
557
- not, a default Docker image mapping to the current version of Python is used.
558
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
559
- AWS Batch Job Queue to submit the job to.
560
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
561
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
562
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
563
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
564
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
565
- shared_memory : int, optional, default None
566
- The value for the size (in MiB) of the /dev/shm volume for this step.
567
- This parameter maps to the `--shm-size` option in Docker.
568
- max_swap : int, optional, default None
569
- The total amount of swap memory (in MiB) a container can use for this
570
- step. This parameter is translated to the `--memory-swap` option in
571
- Docker where the value is the sum of the container memory plus the
572
- `max_swap` value.
573
- swappiness : int, optional, default None
574
- This allows you to tune memory swappiness behavior for this step.
575
- A swappiness value of 0 causes swapping not to happen unless absolutely
576
- necessary. A swappiness value of 100 causes pages to be swapped very
577
- aggressively. Accepted values are whole numbers between 0 and 100.
578
- aws_batch_tags: Dict[str, str], optional, default None
579
- Sets arbitrary AWS tags on the AWS Batch compute environment.
580
- Set as string key-value pairs.
581
- use_tmpfs : bool, default False
582
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
583
- not available on Fargate compute environments
584
- tmpfs_tempdir : bool, default True
585
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
586
- tmpfs_size : int, optional, default None
587
- The value for the size (in MiB) of the tmpfs mount for this step.
588
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
589
- memory allocated for this step.
590
- tmpfs_path : str, optional, default None
591
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
592
- inferentia : int, default 0
593
- Number of Inferentia chips required for this step.
594
- trainium : int, default None
595
- Alias for inferentia. Use only one of the two.
596
- efa : int, default 0
597
- Number of elastic fabric adapter network devices to attach to container
598
- ephemeral_storage : int, default None
599
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
600
- This is only relevant for Fargate compute environments
601
- log_driver: str, optional, default None
602
- The log driver to use for the Amazon ECS container.
603
- log_options: List[str], optional, default None
604
- List of strings containing options for the chosen log driver. The configurable values
605
- depend on the `log driver` chosen. Validation of these options is not supported yet.
606
- Example: [`awslogs-group:aws/batch/job`]
607
- """
608
- ...
609
-
610
- @typing.overload
611
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
612
- ...
613
-
614
- @typing.overload
615
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
616
- ...
617
-
618
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
619
- """
620
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
621
-
622
-
623
- Parameters
624
- ----------
625
- cpu : int, default 1
626
- Number of CPUs required for this step. If `@resources` is
627
- also present, the maximum value from all decorators is used.
628
- gpu : int, default 0
629
- Number of GPUs required for this step. If `@resources` is
630
- also present, the maximum value from all decorators is used.
631
- memory : int, default 4096
632
- Memory size (in MB) required for this step. If
739
+ disk : int, default 10240
740
+ Disk size (in MB) required for this step. If
633
741
  `@resources` is also present, the maximum value from all decorators is
634
742
  used.
635
743
  image : str, optional, default None
636
- Docker image to use when launching on AWS Batch. If not specified, and
637
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
744
+ Docker image to use when launching on Kubernetes. If not specified, and
745
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
638
746
  not, a default Docker image mapping to the current version of Python is used.
639
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
640
- AWS Batch Job Queue to submit the job to.
641
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
642
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
643
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
644
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
645
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
646
- shared_memory : int, optional, default None
647
- The value for the size (in MiB) of the /dev/shm volume for this step.
648
- This parameter maps to the `--shm-size` option in Docker.
649
- max_swap : int, optional, default None
650
- The total amount of swap memory (in MiB) a container can use for this
651
- step. This parameter is translated to the `--memory-swap` option in
652
- Docker where the value is the sum of the container memory plus the
653
- `max_swap` value.
654
- swappiness : int, optional, default None
655
- This allows you to tune memory swappiness behavior for this step.
656
- A swappiness value of 0 causes swapping not to happen unless absolutely
657
- necessary. A swappiness value of 100 causes pages to be swapped very
658
- aggressively. Accepted values are whole numbers between 0 and 100.
659
- aws_batch_tags: Dict[str, str], optional, default None
660
- Sets arbitrary AWS tags on the AWS Batch compute environment.
661
- Set as string key-value pairs.
747
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
748
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
749
+ image_pull_secrets: List[str], default []
750
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
751
+ Kubernetes image pull secrets to use when pulling container images
752
+ in Kubernetes.
753
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
754
+ Kubernetes service account to use when launching pod in Kubernetes.
755
+ secrets : List[str], optional, default None
756
+ Kubernetes secrets to use when launching pod in Kubernetes. These
757
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
758
+ in Metaflow configuration.
759
+ node_selector: Union[Dict[str,str], str], optional, default None
760
+ Kubernetes node selector(s) to apply to the pod running the task.
761
+ Can be passed in as a comma separated string of values e.g.
762
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
763
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
764
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
765
+ Kubernetes namespace to use when launching pod in Kubernetes.
766
+ gpu : int, optional, default None
767
+ Number of GPUs required for this step. A value of zero implies that
768
+ the scheduled node should not have GPUs.
769
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
770
+ The vendor of the GPUs to be used for this step.
771
+ tolerations : List[Dict[str,str]], default []
772
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
773
+ Kubernetes tolerations to use when launching pod in Kubernetes.
774
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
775
+ Kubernetes labels to use when launching pod in Kubernetes.
776
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
777
+ Kubernetes annotations to use when launching pod in Kubernetes.
662
778
  use_tmpfs : bool, default False
663
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
664
- not available on Fargate compute environments
779
+ This enables an explicit tmpfs mount for this step.
665
780
  tmpfs_tempdir : bool, default True
666
781
  sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
667
- tmpfs_size : int, optional, default None
782
+ tmpfs_size : int, optional, default: None
668
783
  The value for the size (in MiB) of the tmpfs mount for this step.
669
784
  This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
670
785
  memory allocated for this step.
671
- tmpfs_path : str, optional, default None
672
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
673
- inferentia : int, default 0
674
- Number of Inferentia chips required for this step.
675
- trainium : int, default None
676
- Alias for inferentia. Use only one of the two.
677
- efa : int, default 0
678
- Number of elastic fabric adapter network devices to attach to container
679
- ephemeral_storage : int, default None
680
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
681
- This is only relevant for Fargate compute environments
682
- log_driver: str, optional, default None
683
- The log driver to use for the Amazon ECS container.
684
- log_options: List[str], optional, default None
685
- List of strings containing options for the chosen log driver. The configurable values
686
- depend on the `log driver` chosen. Validation of these options is not supported yet.
687
- Example: [`awslogs-group:aws/batch/job`]
786
+ tmpfs_path : str, optional, default /metaflow_temp
787
+ Path to tmpfs mount for this step.
788
+ persistent_volume_claims : Dict[str, str], optional, default None
789
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
790
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
791
+ shared_memory: int, optional
792
+ Shared memory size (in MiB) required for this step
793
+ port: int, optional
794
+ Port number to specify in the Kubernetes job object
795
+ compute_pool : str, optional, default None
796
+ Compute pool to be used for for this step.
797
+ If not specified, any accessible compute pool within the perimeter is used.
798
+ hostname_resolution_timeout: int, default 10 * 60
799
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
800
+ Only applicable when @parallel is used.
801
+ qos: str, default: Burstable
802
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
803
+
804
+ security_context: Dict[str, Any], optional, default None
805
+ Container security context. Applies to the task container. Allows the following keys:
806
+ - privileged: bool, optional, default None
807
+ - allow_privilege_escalation: bool, optional, default None
808
+ - run_as_user: int, optional, default None
809
+ - run_as_group: int, optional, default None
810
+ - run_as_non_root: bool, optional, default None
688
811
  """
689
812
  ...
690
813
 
@@ -768,125 +891,232 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
768
891
  ...
769
892
 
770
893
  @typing.overload
771
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
894
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
772
895
  """
773
- Specifies that the step will success under all circumstances.
896
+ Specifies the flow(s) that this flow depends on.
774
897
 
775
- The decorator will create an optional artifact, specified by `var`, which
776
- contains the exception raised. You can use it to detect the presence
777
- of errors, indicating that all happy-path artifacts produced by the step
778
- are missing.
898
+ ```
899
+ @trigger_on_finish(flow='FooFlow')
900
+ ```
901
+ or
902
+ ```
903
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
904
+ ```
905
+ This decorator respects the @project decorator and triggers the flow
906
+ when upstream runs within the same namespace complete successfully
907
+
908
+ Additionally, you can specify project aware upstream flow dependencies
909
+ by specifying the fully qualified project_flow_name.
910
+ ```
911
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
912
+ ```
913
+ or
914
+ ```
915
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
916
+ ```
917
+
918
+ You can also specify just the project or project branch (other values will be
919
+ inferred from the current project or project branch):
920
+ ```
921
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
922
+ ```
923
+
924
+ Note that `branch` is typically one of:
925
+ - `prod`
926
+ - `user.bob`
927
+ - `test.my_experiment`
928
+ - `prod.staging`
779
929
 
780
930
 
781
931
  Parameters
782
932
  ----------
783
- var : str, optional, default None
784
- Name of the artifact in which to store the caught exception.
785
- If not specified, the exception is not stored.
786
- print_exception : bool, default True
787
- Determines whether or not the exception is printed to
788
- stdout when caught.
933
+ flow : Union[str, Dict[str, str]], optional, default None
934
+ Upstream flow dependency for this flow.
935
+ flows : List[Union[str, Dict[str, str]]], default []
936
+ Upstream flow dependencies for this flow.
937
+ options : Dict[str, Any], default {}
938
+ Backend-specific configuration for tuning eventing behavior.
789
939
  """
790
940
  ...
791
941
 
792
942
  @typing.overload
793
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
794
- ...
795
-
796
- @typing.overload
797
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
943
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
798
944
  ...
799
945
 
800
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
946
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
801
947
  """
802
- Specifies that the step will success under all circumstances.
948
+ Specifies the flow(s) that this flow depends on.
803
949
 
804
- The decorator will create an optional artifact, specified by `var`, which
805
- contains the exception raised. You can use it to detect the presence
806
- of errors, indicating that all happy-path artifacts produced by the step
807
- are missing.
950
+ ```
951
+ @trigger_on_finish(flow='FooFlow')
952
+ ```
953
+ or
954
+ ```
955
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
956
+ ```
957
+ This decorator respects the @project decorator and triggers the flow
958
+ when upstream runs within the same namespace complete successfully
959
+
960
+ Additionally, you can specify project aware upstream flow dependencies
961
+ by specifying the fully qualified project_flow_name.
962
+ ```
963
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
964
+ ```
965
+ or
966
+ ```
967
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
968
+ ```
969
+
970
+ You can also specify just the project or project branch (other values will be
971
+ inferred from the current project or project branch):
972
+ ```
973
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
974
+ ```
975
+
976
+ Note that `branch` is typically one of:
977
+ - `prod`
978
+ - `user.bob`
979
+ - `test.my_experiment`
980
+ - `prod.staging`
808
981
 
809
982
 
810
983
  Parameters
811
984
  ----------
812
- var : str, optional, default None
813
- Name of the artifact in which to store the caught exception.
814
- If not specified, the exception is not stored.
815
- print_exception : bool, default True
816
- Determines whether or not the exception is printed to
817
- stdout when caught.
985
+ flow : Union[str, Dict[str, str]], optional, default None
986
+ Upstream flow dependency for this flow.
987
+ flows : List[Union[str, Dict[str, str]]], default []
988
+ Upstream flow dependencies for this flow.
989
+ options : Dict[str, Any], default {}
990
+ Backend-specific configuration for tuning eventing behavior.
818
991
  """
819
992
  ...
820
993
 
821
994
  @typing.overload
822
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
995
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
823
996
  """
824
- Specifies environment variables to be set prior to the execution of a step.
997
+ Specifies the Conda environment for all steps of the flow.
998
+
999
+ Use `@conda_base` to set common libraries required by all
1000
+ steps and use `@conda` to specify step-specific additions.
825
1001
 
826
1002
 
827
1003
  Parameters
828
1004
  ----------
829
- vars : Dict[str, str], default {}
830
- Dictionary of environment variables to set.
1005
+ packages : Dict[str, str], default {}
1006
+ Packages to use for this flow. The key is the name of the package
1007
+ and the value is the version to use.
1008
+ libraries : Dict[str, str], default {}
1009
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1010
+ python : str, optional, default None
1011
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1012
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1013
+ disabled : bool, default False
1014
+ If set to True, disables Conda.
831
1015
  """
832
1016
  ...
833
1017
 
834
1018
  @typing.overload
835
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
836
- ...
837
-
838
- @typing.overload
839
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1019
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
840
1020
  ...
841
1021
 
842
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1022
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
843
1023
  """
844
- Specifies environment variables to be set prior to the execution of a step.
1024
+ Specifies the Conda environment for all steps of the flow.
1025
+
1026
+ Use `@conda_base` to set common libraries required by all
1027
+ steps and use `@conda` to specify step-specific additions.
845
1028
 
846
1029
 
847
1030
  Parameters
848
1031
  ----------
849
- vars : Dict[str, str], default {}
850
- Dictionary of environment variables to set.
1032
+ packages : Dict[str, str], default {}
1033
+ Packages to use for this flow. The key is the name of the package
1034
+ and the value is the version to use.
1035
+ libraries : Dict[str, str], default {}
1036
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1037
+ python : str, optional, default None
1038
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1039
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1040
+ disabled : bool, default False
1041
+ If set to True, disables Conda.
851
1042
  """
852
1043
  ...
853
1044
 
854
- @typing.overload
855
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1045
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
856
1046
  """
857
- Specifies secrets to be retrieved and injected as environment variables prior to
858
- the execution of a step.
1047
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1048
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1049
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1050
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1051
+ starts only after all sensors finish.
859
1052
 
860
1053
 
861
1054
  Parameters
862
1055
  ----------
863
- sources : List[Union[str, Dict[str, Any]]], default: []
864
- List of secret specs, defining how the secrets are to be retrieved
865
- role : str, optional, default: None
866
- Role to use for fetching secrets
1056
+ timeout : int
1057
+ Time, in seconds before the task times out and fails. (Default: 3600)
1058
+ poke_interval : int
1059
+ Time in seconds that the job should wait in between each try. (Default: 60)
1060
+ mode : str
1061
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1062
+ exponential_backoff : bool
1063
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1064
+ pool : str
1065
+ the slot pool this task should run in,
1066
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1067
+ soft_fail : bool
1068
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1069
+ name : str
1070
+ Name of the sensor on Airflow
1071
+ description : str
1072
+ Description of sensor in the Airflow UI
1073
+ bucket_key : Union[str, List[str]]
1074
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1075
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1076
+ bucket_name : str
1077
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1078
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1079
+ wildcard_match : bool
1080
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1081
+ aws_conn_id : str
1082
+ a reference to the s3 connection on Airflow. (Default: None)
1083
+ verify : bool
1084
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
867
1085
  """
868
1086
  ...
869
1087
 
870
- @typing.overload
871
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
872
- ...
873
-
874
- @typing.overload
875
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
876
- ...
877
-
878
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
1088
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
879
1089
  """
880
- Specifies secrets to be retrieved and injected as environment variables prior to
881
- the execution of a step.
1090
+ Specifies what flows belong to the same project.
1091
+
1092
+ A project-specific namespace is created for all flows that
1093
+ use the same `@project(name)`.
882
1094
 
883
1095
 
884
1096
  Parameters
885
1097
  ----------
886
- sources : List[Union[str, Dict[str, Any]]], default: []
887
- List of secret specs, defining how the secrets are to be retrieved
888
- role : str, optional, default: None
889
- Role to use for fetching secrets
1098
+ name : str
1099
+ Project name. Make sure that the name is unique amongst all
1100
+ projects that use the same production scheduler. The name may
1101
+ contain only lowercase alphanumeric characters and underscores.
1102
+
1103
+ branch : Optional[str], default None
1104
+ The branch to use. If not specified, the branch is set to
1105
+ `user.<username>` unless `production` is set to `True`. This can
1106
+ also be set on the command line using `--branch` as a top-level option.
1107
+ It is an error to specify `branch` in the decorator and on the command line.
1108
+
1109
+ production : bool, default False
1110
+ Whether or not the branch is the production branch. This can also be set on the
1111
+ command line using `--production` as a top-level option. It is an error to specify
1112
+ `production` in the decorator and on the command line.
1113
+ The project branch name will be:
1114
+ - if `branch` is specified:
1115
+ - if `production` is True: `prod.<branch>`
1116
+ - if `production` is False: `test.<branch>`
1117
+ - if `branch` is not specified:
1118
+ - if `production` is True: `prod`
1119
+ - if `production` is False: `user.<username>`
890
1120
  """
891
1121
  ...
892
1122
 
@@ -983,57 +1213,6 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
983
1213
  """
984
1214
  ...
985
1215
 
986
- @typing.overload
987
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
988
- """
989
- Specifies the Conda environment for all steps of the flow.
990
-
991
- Use `@conda_base` to set common libraries required by all
992
- steps and use `@conda` to specify step-specific additions.
993
-
994
-
995
- Parameters
996
- ----------
997
- packages : Dict[str, str], default {}
998
- Packages to use for this flow. The key is the name of the package
999
- and the value is the version to use.
1000
- libraries : Dict[str, str], default {}
1001
- Supported for backward compatibility. When used with packages, packages will take precedence.
1002
- python : str, optional, default None
1003
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1004
- that the version used will correspond to the version of the Python interpreter used to start the run.
1005
- disabled : bool, default False
1006
- If set to True, disables Conda.
1007
- """
1008
- ...
1009
-
1010
- @typing.overload
1011
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1012
- ...
1013
-
1014
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1015
- """
1016
- Specifies the Conda environment for all steps of the flow.
1017
-
1018
- Use `@conda_base` to set common libraries required by all
1019
- steps and use `@conda` to specify step-specific additions.
1020
-
1021
-
1022
- Parameters
1023
- ----------
1024
- packages : Dict[str, str], default {}
1025
- Packages to use for this flow. The key is the name of the package
1026
- and the value is the version to use.
1027
- libraries : Dict[str, str], default {}
1028
- Supported for backward compatibility. When used with packages, packages will take precedence.
1029
- python : str, optional, default None
1030
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1031
- that the version used will correspond to the version of the Python interpreter used to start the run.
1032
- disabled : bool, default False
1033
- If set to True, disables Conda.
1034
- """
1035
- ...
1036
-
1037
1216
  @typing.overload
1038
1217
  def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1039
1218
  """
@@ -1169,182 +1348,3 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
1169
1348
  """
1170
1349
  ...
1171
1350
 
1172
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1173
- """
1174
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1175
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1176
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1177
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1178
- starts only after all sensors finish.
1179
-
1180
-
1181
- Parameters
1182
- ----------
1183
- timeout : int
1184
- Time, in seconds before the task times out and fails. (Default: 3600)
1185
- poke_interval : int
1186
- Time in seconds that the job should wait in between each try. (Default: 60)
1187
- mode : str
1188
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1189
- exponential_backoff : bool
1190
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1191
- pool : str
1192
- the slot pool this task should run in,
1193
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1194
- soft_fail : bool
1195
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1196
- name : str
1197
- Name of the sensor on Airflow
1198
- description : str
1199
- Description of sensor in the Airflow UI
1200
- bucket_key : Union[str, List[str]]
1201
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1202
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1203
- bucket_name : str
1204
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1205
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1206
- wildcard_match : bool
1207
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1208
- aws_conn_id : str
1209
- a reference to the s3 connection on Airflow. (Default: None)
1210
- verify : bool
1211
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1212
- """
1213
- ...
1214
-
1215
- @typing.overload
1216
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1217
- """
1218
- Specifies the flow(s) that this flow depends on.
1219
-
1220
- ```
1221
- @trigger_on_finish(flow='FooFlow')
1222
- ```
1223
- or
1224
- ```
1225
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1226
- ```
1227
- This decorator respects the @project decorator and triggers the flow
1228
- when upstream runs within the same namespace complete successfully
1229
-
1230
- Additionally, you can specify project aware upstream flow dependencies
1231
- by specifying the fully qualified project_flow_name.
1232
- ```
1233
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1234
- ```
1235
- or
1236
- ```
1237
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1238
- ```
1239
-
1240
- You can also specify just the project or project branch (other values will be
1241
- inferred from the current project or project branch):
1242
- ```
1243
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1244
- ```
1245
-
1246
- Note that `branch` is typically one of:
1247
- - `prod`
1248
- - `user.bob`
1249
- - `test.my_experiment`
1250
- - `prod.staging`
1251
-
1252
-
1253
- Parameters
1254
- ----------
1255
- flow : Union[str, Dict[str, str]], optional, default None
1256
- Upstream flow dependency for this flow.
1257
- flows : List[Union[str, Dict[str, str]]], default []
1258
- Upstream flow dependencies for this flow.
1259
- options : Dict[str, Any], default {}
1260
- Backend-specific configuration for tuning eventing behavior.
1261
- """
1262
- ...
1263
-
1264
- @typing.overload
1265
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1266
- ...
1267
-
1268
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1269
- """
1270
- Specifies the flow(s) that this flow depends on.
1271
-
1272
- ```
1273
- @trigger_on_finish(flow='FooFlow')
1274
- ```
1275
- or
1276
- ```
1277
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1278
- ```
1279
- This decorator respects the @project decorator and triggers the flow
1280
- when upstream runs within the same namespace complete successfully
1281
-
1282
- Additionally, you can specify project aware upstream flow dependencies
1283
- by specifying the fully qualified project_flow_name.
1284
- ```
1285
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1286
- ```
1287
- or
1288
- ```
1289
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1290
- ```
1291
-
1292
- You can also specify just the project or project branch (other values will be
1293
- inferred from the current project or project branch):
1294
- ```
1295
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1296
- ```
1297
-
1298
- Note that `branch` is typically one of:
1299
- - `prod`
1300
- - `user.bob`
1301
- - `test.my_experiment`
1302
- - `prod.staging`
1303
-
1304
-
1305
- Parameters
1306
- ----------
1307
- flow : Union[str, Dict[str, str]], optional, default None
1308
- Upstream flow dependency for this flow.
1309
- flows : List[Union[str, Dict[str, str]]], default []
1310
- Upstream flow dependencies for this flow.
1311
- options : Dict[str, Any], default {}
1312
- Backend-specific configuration for tuning eventing behavior.
1313
- """
1314
- ...
1315
-
1316
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1317
- """
1318
- Specifies what flows belong to the same project.
1319
-
1320
- A project-specific namespace is created for all flows that
1321
- use the same `@project(name)`.
1322
-
1323
-
1324
- Parameters
1325
- ----------
1326
- name : str
1327
- Project name. Make sure that the name is unique amongst all
1328
- projects that use the same production scheduler. The name may
1329
- contain only lowercase alphanumeric characters and underscores.
1330
-
1331
- branch : Optional[str], default None
1332
- The branch to use. If not specified, the branch is set to
1333
- `user.<username>` unless `production` is set to `True`. This can
1334
- also be set on the command line using `--branch` as a top-level option.
1335
- It is an error to specify `branch` in the decorator and on the command line.
1336
-
1337
- production : bool, default False
1338
- Whether or not the branch is the production branch. This can also be set on the
1339
- command line using `--production` as a top-level option. It is an error to specify
1340
- `production` in the decorator and on the command line.
1341
- The project branch name will be:
1342
- - if `branch` is specified:
1343
- - if `production` is True: `prod.<branch>`
1344
- - if `production` is False: `test.<branch>`
1345
- - if `branch` is not specified:
1346
- - if `production` is True: `prod`
1347
- - if `production` is False: `user.<username>`
1348
- """
1349
- ...
1350
-