metaflow-stubs 2.18.10__py2.py3-none-any.whl → 2.18.11__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of metaflow-stubs might be problematic. Click here for more details.

Files changed (168) hide show
  1. metaflow-stubs/__init__.pyi +608 -608
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +23 -23
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/multicore_utils.pyi +2 -2
  23. metaflow-stubs/packaging_sys/__init__.pyi +5 -5
  24. metaflow-stubs/packaging_sys/backend.pyi +2 -2
  25. metaflow-stubs/packaging_sys/distribution_support.pyi +4 -4
  26. metaflow-stubs/packaging_sys/tar_backend.pyi +5 -5
  27. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  28. metaflow-stubs/packaging_sys/v1.pyi +3 -3
  29. metaflow-stubs/parameters.pyi +3 -3
  30. metaflow-stubs/plugins/__init__.pyi +10 -10
  31. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  33. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  34. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  35. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  37. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  38. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  39. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  41. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  42. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  43. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
  44. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  45. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  46. metaflow-stubs/plugins/argo/exit_hooks.pyi +2 -2
  47. metaflow-stubs/plugins/aws/__init__.pyi +3 -3
  48. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  50. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  51. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  52. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  53. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  54. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  55. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  56. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  59. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  60. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  61. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  62. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  63. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  64. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  65. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  67. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  68. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  69. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  70. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  71. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  72. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  74. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/basic.pyi +6 -4
  79. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +3 -3
  82. metaflow-stubs/plugins/cards/card_modules/json_viewer.pyi +3 -3
  83. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  84. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  85. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  86. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  87. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  88. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  89. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  91. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  92. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  93. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  94. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  95. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  96. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  97. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  98. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  99. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  100. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  101. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  102. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  104. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  105. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  106. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  107. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  108. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  110. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  111. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  112. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  114. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  115. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  116. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  117. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/parsers.pyi +2 -2
  119. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  121. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  122. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  123. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  124. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  125. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  126. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  127. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  128. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  129. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  130. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  131. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  132. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  133. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  134. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  135. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  136. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  137. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  138. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  139. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  140. metaflow-stubs/pylint_wrapper.pyi +2 -2
  141. metaflow-stubs/runner/__init__.pyi +2 -2
  142. metaflow-stubs/runner/deployer.pyi +4 -4
  143. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  144. metaflow-stubs/runner/metaflow_runner.pyi +2 -2
  145. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  146. metaflow-stubs/runner/nbrun.pyi +2 -2
  147. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  148. metaflow-stubs/runner/utils.pyi +3 -3
  149. metaflow-stubs/system/__init__.pyi +2 -2
  150. metaflow-stubs/system/system_logger.pyi +2 -2
  151. metaflow-stubs/system/system_monitor.pyi +2 -2
  152. metaflow-stubs/tagging_util.pyi +2 -2
  153. metaflow-stubs/tuple_util.pyi +2 -2
  154. metaflow-stubs/user_configs/__init__.pyi +2 -2
  155. metaflow-stubs/user_configs/config_options.pyi +3 -3
  156. metaflow-stubs/user_configs/config_parameters.pyi +5 -5
  157. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  158. metaflow-stubs/user_decorators/common.pyi +2 -2
  159. metaflow-stubs/user_decorators/mutable_flow.pyi +4 -4
  160. metaflow-stubs/user_decorators/mutable_step.pyi +5 -5
  161. metaflow-stubs/user_decorators/user_flow_decorator.pyi +3 -3
  162. metaflow-stubs/user_decorators/user_step_decorator.pyi +6 -6
  163. metaflow-stubs/version.pyi +2 -2
  164. {metaflow_stubs-2.18.10.dist-info → metaflow_stubs-2.18.11.dist-info}/METADATA +2 -2
  165. metaflow_stubs-2.18.11.dist-info/RECORD +168 -0
  166. metaflow_stubs-2.18.10.dist-info/RECORD +0 -168
  167. {metaflow_stubs-2.18.10.dist-info → metaflow_stubs-2.18.11.dist-info}/WHEEL +0 -0
  168. {metaflow_stubs-2.18.10.dist-info → metaflow_stubs-2.18.11.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.18.10 #
4
- # Generated on 2025-10-02T16:05:06.703542 #
3
+ # MF version: 2.18.11 #
4
+ # Generated on 2025-10-07T00:51:30.224324 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -40,16 +40,16 @@ from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
42
  from . import events as events
43
- from . import metaflow_git as metaflow_git
44
43
  from . import tuple_util as tuple_util
44
+ from . import metaflow_git as metaflow_git
45
45
  from . import runner as runner
46
46
  from . import plugins as plugins
47
47
  from .plugins.datatools.s3.s3 import S3 as S3
48
48
  from . import includefile as includefile
49
49
  from .includefile import IncludeFile as IncludeFile
50
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
50
51
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
51
52
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
52
- from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
53
53
  from .plugins.parsers import yaml_parser as yaml_parser
54
54
  from . import cards as cards
55
55
  from . import client as client
@@ -154,57 +154,21 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
154
154
  ...
155
155
 
156
156
  @typing.overload
157
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
157
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
158
158
  """
159
- Specifies the number of times the task corresponding
160
- to a step needs to be retried.
161
-
162
- This decorator is useful for handling transient errors, such as networking issues.
163
- If your task contains operations that can't be retried safely, e.g. database updates,
164
- it is advisable to annotate it with `@retry(times=0)`.
165
-
166
- This can be used in conjunction with the `@catch` decorator. The `@catch`
167
- decorator will execute a no-op task after all retries have been exhausted,
168
- ensuring that the flow execution can continue.
169
-
170
-
171
- Parameters
172
- ----------
173
- times : int, default 3
174
- Number of times to retry this task.
175
- minutes_between_retries : int, default 2
176
- Number of minutes between retries.
159
+ Decorator prototype for all step decorators. This function gets specialized
160
+ and imported for all decorators types by _import_plugin_decorators().
177
161
  """
178
162
  ...
179
163
 
180
164
  @typing.overload
181
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
182
- ...
183
-
184
- @typing.overload
185
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
165
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
186
166
  ...
187
167
 
188
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
168
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
189
169
  """
190
- Specifies the number of times the task corresponding
191
- to a step needs to be retried.
192
-
193
- This decorator is useful for handling transient errors, such as networking issues.
194
- If your task contains operations that can't be retried safely, e.g. database updates,
195
- it is advisable to annotate it with `@retry(times=0)`.
196
-
197
- This can be used in conjunction with the `@catch` decorator. The `@catch`
198
- decorator will execute a no-op task after all retries have been exhausted,
199
- ensuring that the flow execution can continue.
200
-
201
-
202
- Parameters
203
- ----------
204
- times : int, default 3
205
- Number of times to retry this task.
206
- minutes_between_retries : int, default 2
207
- Number of minutes between retries.
170
+ Decorator prototype for all step decorators. This function gets specialized
171
+ and imported for all decorators types by _import_plugin_decorators().
208
172
  """
209
173
  ...
210
174
 
@@ -260,35 +224,41 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
260
224
  ...
261
225
 
262
226
  @typing.overload
263
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
227
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
264
228
  """
265
- Specifies environment variables to be set prior to the execution of a step.
229
+ Specifies secrets to be retrieved and injected as environment variables prior to
230
+ the execution of a step.
266
231
 
267
232
 
268
233
  Parameters
269
234
  ----------
270
- vars : Dict[str, str], default {}
271
- Dictionary of environment variables to set.
235
+ sources : List[Union[str, Dict[str, Any]]], default: []
236
+ List of secret specs, defining how the secrets are to be retrieved
237
+ role : str, optional, default: None
238
+ Role to use for fetching secrets
272
239
  """
273
240
  ...
274
241
 
275
242
  @typing.overload
276
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
243
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
277
244
  ...
278
245
 
279
246
  @typing.overload
280
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
247
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
281
248
  ...
282
249
 
283
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
250
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
284
251
  """
285
- Specifies environment variables to be set prior to the execution of a step.
252
+ Specifies secrets to be retrieved and injected as environment variables prior to
253
+ the execution of a step.
286
254
 
287
255
 
288
256
  Parameters
289
257
  ----------
290
- vars : Dict[str, str], default {}
291
- Dictionary of environment variables to set.
258
+ sources : List[Union[str, Dict[str, Any]]], default: []
259
+ List of secret specs, defining how the secrets are to be retrieved
260
+ role : str, optional, default: None
261
+ Role to use for fetching secrets
292
262
  """
293
263
  ...
294
264
 
@@ -382,206 +352,155 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
382
352
  ...
383
353
 
384
354
  @typing.overload
385
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
355
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
386
356
  """
387
- Creates a human-readable report, a Metaflow Card, after this step completes.
357
+ Specifies the number of times the task corresponding
358
+ to a step needs to be retried.
388
359
 
389
- Note that you may add multiple `@card` decorators in a step with different parameters.
360
+ This decorator is useful for handling transient errors, such as networking issues.
361
+ If your task contains operations that can't be retried safely, e.g. database updates,
362
+ it is advisable to annotate it with `@retry(times=0)`.
363
+
364
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
365
+ decorator will execute a no-op task after all retries have been exhausted,
366
+ ensuring that the flow execution can continue.
390
367
 
391
368
 
392
369
  Parameters
393
370
  ----------
394
- type : str, default 'default'
395
- Card type.
396
- id : str, optional, default None
397
- If multiple cards are present, use this id to identify this card.
398
- options : Dict[str, Any], default {}
399
- Options passed to the card. The contents depend on the card type.
400
- timeout : int, default 45
401
- Interrupt reporting if it takes more than this many seconds.
371
+ times : int, default 3
372
+ Number of times to retry this task.
373
+ minutes_between_retries : int, default 2
374
+ Number of minutes between retries.
402
375
  """
403
376
  ...
404
377
 
405
378
  @typing.overload
406
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
379
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
407
380
  ...
408
381
 
409
382
  @typing.overload
410
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
383
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
411
384
  ...
412
385
 
413
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
386
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
414
387
  """
415
- Creates a human-readable report, a Metaflow Card, after this step completes.
388
+ Specifies the number of times the task corresponding
389
+ to a step needs to be retried.
416
390
 
417
- Note that you may add multiple `@card` decorators in a step with different parameters.
391
+ This decorator is useful for handling transient errors, such as networking issues.
392
+ If your task contains operations that can't be retried safely, e.g. database updates,
393
+ it is advisable to annotate it with `@retry(times=0)`.
394
+
395
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
396
+ decorator will execute a no-op task after all retries have been exhausted,
397
+ ensuring that the flow execution can continue.
418
398
 
419
399
 
420
400
  Parameters
421
401
  ----------
422
- type : str, default 'default'
423
- Card type.
424
- id : str, optional, default None
425
- If multiple cards are present, use this id to identify this card.
426
- options : Dict[str, Any], default {}
427
- Options passed to the card. The contents depend on the card type.
428
- timeout : int, default 45
429
- Interrupt reporting if it takes more than this many seconds.
402
+ times : int, default 3
403
+ Number of times to retry this task.
404
+ minutes_between_retries : int, default 2
405
+ Number of minutes between retries.
430
406
  """
431
407
  ...
432
408
 
433
409
  @typing.overload
434
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
410
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
435
411
  """
436
- Specifies the resources needed when executing this step.
437
-
438
- Use `@resources` to specify the resource requirements
439
- independently of the specific compute layer (`@batch`, `@kubernetes`).
440
-
441
- You can choose the compute layer on the command line by executing e.g.
442
- ```
443
- python myflow.py run --with batch
444
- ```
445
- or
446
- ```
447
- python myflow.py run --with kubernetes
448
- ```
449
- which executes the flow on the desired system using the
450
- requirements specified in `@resources`.
412
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
451
413
 
452
414
 
453
415
  Parameters
454
416
  ----------
455
417
  cpu : int, default 1
456
- Number of CPUs required for this step.
457
- gpu : int, optional, default None
458
- Number of GPUs required for this step.
459
- disk : int, optional, default None
460
- Disk size (in MB) required for this step. Only applies on Kubernetes.
418
+ Number of CPUs required for this step. If `@resources` is
419
+ also present, the maximum value from all decorators is used.
420
+ gpu : int, default 0
421
+ Number of GPUs required for this step. If `@resources` is
422
+ also present, the maximum value from all decorators is used.
461
423
  memory : int, default 4096
462
- Memory size (in MB) required for this step.
424
+ Memory size (in MB) required for this step. If
425
+ `@resources` is also present, the maximum value from all decorators is
426
+ used.
427
+ image : str, optional, default None
428
+ Docker image to use when launching on AWS Batch. If not specified, and
429
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
430
+ not, a default Docker image mapping to the current version of Python is used.
431
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
432
+ AWS Batch Job Queue to submit the job to.
433
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
434
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
435
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
436
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
437
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
463
438
  shared_memory : int, optional, default None
464
439
  The value for the size (in MiB) of the /dev/shm volume for this step.
465
440
  This parameter maps to the `--shm-size` option in Docker.
441
+ max_swap : int, optional, default None
442
+ The total amount of swap memory (in MiB) a container can use for this
443
+ step. This parameter is translated to the `--memory-swap` option in
444
+ Docker where the value is the sum of the container memory plus the
445
+ `max_swap` value.
446
+ swappiness : int, optional, default None
447
+ This allows you to tune memory swappiness behavior for this step.
448
+ A swappiness value of 0 causes swapping not to happen unless absolutely
449
+ necessary. A swappiness value of 100 causes pages to be swapped very
450
+ aggressively. Accepted values are whole numbers between 0 and 100.
451
+ aws_batch_tags: Dict[str, str], optional, default None
452
+ Sets arbitrary AWS tags on the AWS Batch compute environment.
453
+ Set as string key-value pairs.
454
+ use_tmpfs : bool, default False
455
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
456
+ not available on Fargate compute environments
457
+ tmpfs_tempdir : bool, default True
458
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
459
+ tmpfs_size : int, optional, default None
460
+ The value for the size (in MiB) of the tmpfs mount for this step.
461
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
462
+ memory allocated for this step.
463
+ tmpfs_path : str, optional, default None
464
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
465
+ inferentia : int, default 0
466
+ Number of Inferentia chips required for this step.
467
+ trainium : int, default None
468
+ Alias for inferentia. Use only one of the two.
469
+ efa : int, default 0
470
+ Number of elastic fabric adapter network devices to attach to container
471
+ ephemeral_storage : int, default None
472
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
473
+ This is only relevant for Fargate compute environments
474
+ log_driver: str, optional, default None
475
+ The log driver to use for the Amazon ECS container.
476
+ log_options: List[str], optional, default None
477
+ List of strings containing options for the chosen log driver. The configurable values
478
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
479
+ Example: [`awslogs-group:aws/batch/job`]
466
480
  """
467
481
  ...
468
482
 
469
483
  @typing.overload
470
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
484
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
471
485
  ...
472
486
 
473
487
  @typing.overload
474
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
488
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
475
489
  ...
476
490
 
477
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
491
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
478
492
  """
479
- Specifies the resources needed when executing this step.
480
-
481
- Use `@resources` to specify the resource requirements
482
- independently of the specific compute layer (`@batch`, `@kubernetes`).
483
-
484
- You can choose the compute layer on the command line by executing e.g.
485
- ```
486
- python myflow.py run --with batch
487
- ```
488
- or
489
- ```
490
- python myflow.py run --with kubernetes
491
- ```
492
- which executes the flow on the desired system using the
493
- requirements specified in `@resources`.
493
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
494
494
 
495
495
 
496
496
  Parameters
497
497
  ----------
498
498
  cpu : int, default 1
499
- Number of CPUs required for this step.
500
- gpu : int, optional, default None
501
- Number of GPUs required for this step.
502
- disk : int, optional, default None
503
- Disk size (in MB) required for this step. Only applies on Kubernetes.
504
- memory : int, default 4096
505
- Memory size (in MB) required for this step.
506
- shared_memory : int, optional, default None
507
- The value for the size (in MiB) of the /dev/shm volume for this step.
508
- This parameter maps to the `--shm-size` option in Docker.
509
- """
510
- ...
511
-
512
- @typing.overload
513
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
514
- """
515
- Specifies the Conda environment for the step.
516
-
517
- Information in this decorator will augment any
518
- attributes set in the `@conda_base` flow-level decorator. Hence,
519
- you can use `@conda_base` to set packages required by all
520
- steps and use `@conda` to specify step-specific overrides.
521
-
522
-
523
- Parameters
524
- ----------
525
- packages : Dict[str, str], default {}
526
- Packages to use for this step. The key is the name of the package
527
- and the value is the version to use.
528
- libraries : Dict[str, str], default {}
529
- Supported for backward compatibility. When used with packages, packages will take precedence.
530
- python : str, optional, default None
531
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
532
- that the version used will correspond to the version of the Python interpreter used to start the run.
533
- disabled : bool, default False
534
- If set to True, disables @conda.
535
- """
536
- ...
537
-
538
- @typing.overload
539
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
540
- ...
541
-
542
- @typing.overload
543
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
544
- ...
545
-
546
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
547
- """
548
- Specifies the Conda environment for the step.
549
-
550
- Information in this decorator will augment any
551
- attributes set in the `@conda_base` flow-level decorator. Hence,
552
- you can use `@conda_base` to set packages required by all
553
- steps and use `@conda` to specify step-specific overrides.
554
-
555
-
556
- Parameters
557
- ----------
558
- packages : Dict[str, str], default {}
559
- Packages to use for this step. The key is the name of the package
560
- and the value is the version to use.
561
- libraries : Dict[str, str], default {}
562
- Supported for backward compatibility. When used with packages, packages will take precedence.
563
- python : str, optional, default None
564
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
565
- that the version used will correspond to the version of the Python interpreter used to start the run.
566
- disabled : bool, default False
567
- If set to True, disables @conda.
568
- """
569
- ...
570
-
571
- @typing.overload
572
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
573
- """
574
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
575
-
576
-
577
- Parameters
578
- ----------
579
- cpu : int, default 1
580
- Number of CPUs required for this step. If `@resources` is
581
- also present, the maximum value from all decorators is used.
582
- gpu : int, default 0
583
- Number of GPUs required for this step. If `@resources` is
584
- also present, the maximum value from all decorators is used.
499
+ Number of CPUs required for this step. If `@resources` is
500
+ also present, the maximum value from all decorators is used.
501
+ gpu : int, default 0
502
+ Number of GPUs required for this step. If `@resources` is
503
+ also present, the maximum value from all decorators is used.
585
504
  memory : int, default 4096
586
505
  Memory size (in MB) required for this step. If
587
506
  `@resources` is also present, the maximum value from all decorators is
@@ -643,208 +562,130 @@ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optio
643
562
  ...
644
563
 
645
564
  @typing.overload
646
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
647
- ...
648
-
649
- @typing.overload
650
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
651
- ...
652
-
653
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
565
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
654
566
  """
655
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
567
+ Specifies environment variables to be set prior to the execution of a step.
656
568
 
657
569
 
658
570
  Parameters
659
571
  ----------
660
- cpu : int, default 1
661
- Number of CPUs required for this step. If `@resources` is
662
- also present, the maximum value from all decorators is used.
663
- gpu : int, default 0
664
- Number of GPUs required for this step. If `@resources` is
665
- also present, the maximum value from all decorators is used.
666
- memory : int, default 4096
667
- Memory size (in MB) required for this step. If
668
- `@resources` is also present, the maximum value from all decorators is
669
- used.
670
- image : str, optional, default None
671
- Docker image to use when launching on AWS Batch. If not specified, and
672
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
673
- not, a default Docker image mapping to the current version of Python is used.
674
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
675
- AWS Batch Job Queue to submit the job to.
676
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
677
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
678
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
679
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
680
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
681
- shared_memory : int, optional, default None
682
- The value for the size (in MiB) of the /dev/shm volume for this step.
683
- This parameter maps to the `--shm-size` option in Docker.
684
- max_swap : int, optional, default None
685
- The total amount of swap memory (in MiB) a container can use for this
686
- step. This parameter is translated to the `--memory-swap` option in
687
- Docker where the value is the sum of the container memory plus the
688
- `max_swap` value.
689
- swappiness : int, optional, default None
690
- This allows you to tune memory swappiness behavior for this step.
691
- A swappiness value of 0 causes swapping not to happen unless absolutely
692
- necessary. A swappiness value of 100 causes pages to be swapped very
693
- aggressively. Accepted values are whole numbers between 0 and 100.
694
- aws_batch_tags: Dict[str, str], optional, default None
695
- Sets arbitrary AWS tags on the AWS Batch compute environment.
696
- Set as string key-value pairs.
697
- use_tmpfs : bool, default False
698
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
699
- not available on Fargate compute environments
700
- tmpfs_tempdir : bool, default True
701
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
702
- tmpfs_size : int, optional, default None
703
- The value for the size (in MiB) of the tmpfs mount for this step.
704
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
705
- memory allocated for this step.
706
- tmpfs_path : str, optional, default None
707
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
708
- inferentia : int, default 0
709
- Number of Inferentia chips required for this step.
710
- trainium : int, default None
711
- Alias for inferentia. Use only one of the two.
712
- efa : int, default 0
713
- Number of elastic fabric adapter network devices to attach to container
714
- ephemeral_storage : int, default None
715
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
716
- This is only relevant for Fargate compute environments
717
- log_driver: str, optional, default None
718
- The log driver to use for the Amazon ECS container.
719
- log_options: List[str], optional, default None
720
- List of strings containing options for the chosen log driver. The configurable values
721
- depend on the `log driver` chosen. Validation of these options is not supported yet.
722
- Example: [`awslogs-group:aws/batch/job`]
572
+ vars : Dict[str, str], default {}
573
+ Dictionary of environment variables to set.
723
574
  """
724
575
  ...
725
576
 
726
577
  @typing.overload
727
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
728
- """
729
- Decorator prototype for all step decorators. This function gets specialized
730
- and imported for all decorators types by _import_plugin_decorators().
731
- """
578
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
732
579
  ...
733
580
 
734
581
  @typing.overload
735
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
582
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
736
583
  ...
737
584
 
738
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
585
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
739
586
  """
740
- Decorator prototype for all step decorators. This function gets specialized
741
- and imported for all decorators types by _import_plugin_decorators().
587
+ Specifies environment variables to be set prior to the execution of a step.
588
+
589
+
590
+ Parameters
591
+ ----------
592
+ vars : Dict[str, str], default {}
593
+ Dictionary of environment variables to set.
742
594
  """
743
595
  ...
744
596
 
745
597
  @typing.overload
746
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
598
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
747
599
  """
748
- Specifies secrets to be retrieved and injected as environment variables prior to
749
- the execution of a step.
600
+ Specifies the resources needed when executing this step.
601
+
602
+ Use `@resources` to specify the resource requirements
603
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
604
+
605
+ You can choose the compute layer on the command line by executing e.g.
606
+ ```
607
+ python myflow.py run --with batch
608
+ ```
609
+ or
610
+ ```
611
+ python myflow.py run --with kubernetes
612
+ ```
613
+ which executes the flow on the desired system using the
614
+ requirements specified in `@resources`.
750
615
 
751
616
 
752
617
  Parameters
753
618
  ----------
754
- sources : List[Union[str, Dict[str, Any]]], default: []
755
- List of secret specs, defining how the secrets are to be retrieved
756
- role : str, optional, default: None
757
- Role to use for fetching secrets
619
+ cpu : int, default 1
620
+ Number of CPUs required for this step.
621
+ gpu : int, optional, default None
622
+ Number of GPUs required for this step.
623
+ disk : int, optional, default None
624
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
625
+ memory : int, default 4096
626
+ Memory size (in MB) required for this step.
627
+ shared_memory : int, optional, default None
628
+ The value for the size (in MiB) of the /dev/shm volume for this step.
629
+ This parameter maps to the `--shm-size` option in Docker.
758
630
  """
759
631
  ...
760
632
 
761
633
  @typing.overload
762
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
634
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
763
635
  ...
764
636
 
765
637
  @typing.overload
766
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
638
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
767
639
  ...
768
640
 
769
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
641
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
770
642
  """
771
- Specifies secrets to be retrieved and injected as environment variables prior to
772
- the execution of a step.
643
+ Specifies the resources needed when executing this step.
644
+
645
+ Use `@resources` to specify the resource requirements
646
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
647
+
648
+ You can choose the compute layer on the command line by executing e.g.
649
+ ```
650
+ python myflow.py run --with batch
651
+ ```
652
+ or
653
+ ```
654
+ python myflow.py run --with kubernetes
655
+ ```
656
+ which executes the flow on the desired system using the
657
+ requirements specified in `@resources`.
773
658
 
774
659
 
775
660
  Parameters
776
661
  ----------
777
- sources : List[Union[str, Dict[str, Any]]], default: []
778
- List of secret specs, defining how the secrets are to be retrieved
779
- role : str, optional, default: None
780
- Role to use for fetching secrets
662
+ cpu : int, default 1
663
+ Number of CPUs required for this step.
664
+ gpu : int, optional, default None
665
+ Number of GPUs required for this step.
666
+ disk : int, optional, default None
667
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
668
+ memory : int, default 4096
669
+ Memory size (in MB) required for this step.
670
+ shared_memory : int, optional, default None
671
+ The value for the size (in MiB) of the /dev/shm volume for this step.
672
+ This parameter maps to the `--shm-size` option in Docker.
781
673
  """
782
674
  ...
783
675
 
784
676
  @typing.overload
785
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
677
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
786
678
  """
787
- Specifies the PyPI packages for the step.
679
+ Specifies a timeout for your step.
788
680
 
789
- Information in this decorator will augment any
790
- attributes set in the `@pyi_base` flow-level decorator. Hence,
791
- you can use `@pypi_base` to set packages required by all
792
- steps and use `@pypi` to specify step-specific overrides.
793
-
794
-
795
- Parameters
796
- ----------
797
- packages : Dict[str, str], default: {}
798
- Packages to use for this step. The key is the name of the package
799
- and the value is the version to use.
800
- python : str, optional, default: None
801
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
802
- that the version used will correspond to the version of the Python interpreter used to start the run.
803
- """
804
- ...
805
-
806
- @typing.overload
807
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
808
- ...
809
-
810
- @typing.overload
811
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
812
- ...
813
-
814
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
815
- """
816
- Specifies the PyPI packages for the step.
817
-
818
- Information in this decorator will augment any
819
- attributes set in the `@pyi_base` flow-level decorator. Hence,
820
- you can use `@pypi_base` to set packages required by all
821
- steps and use `@pypi` to specify step-specific overrides.
822
-
823
-
824
- Parameters
825
- ----------
826
- packages : Dict[str, str], default: {}
827
- Packages to use for this step. The key is the name of the package
828
- and the value is the version to use.
829
- python : str, optional, default: None
830
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
831
- that the version used will correspond to the version of the Python interpreter used to start the run.
832
- """
833
- ...
834
-
835
- @typing.overload
836
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
837
- """
838
- Specifies a timeout for your step.
839
-
840
- This decorator is useful if this step may hang indefinitely.
841
-
842
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
843
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
844
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
845
-
846
- Note that all the values specified in parameters are added together so if you specify
847
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
681
+ This decorator is useful if this step may hang indefinitely.
682
+
683
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
684
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
685
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
686
+
687
+ Note that all the values specified in parameters are added together so if you specify
688
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
848
689
 
849
690
 
850
691
  Parameters
@@ -892,181 +733,161 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
892
733
  ...
893
734
 
894
735
  @typing.overload
895
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
736
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
896
737
  """
897
- Specifies the event(s) that this flow depends on.
898
-
899
- ```
900
- @trigger(event='foo')
901
- ```
902
- or
903
- ```
904
- @trigger(events=['foo', 'bar'])
905
- ```
906
-
907
- Additionally, you can specify the parameter mappings
908
- to map event payload to Metaflow parameters for the flow.
909
- ```
910
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
911
- ```
912
- or
913
- ```
914
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
915
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
916
- ```
738
+ Specifies the Conda environment for the step.
917
739
 
918
- 'parameters' can also be a list of strings and tuples like so:
919
- ```
920
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
921
- ```
922
- This is equivalent to:
923
- ```
924
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
925
- ```
740
+ Information in this decorator will augment any
741
+ attributes set in the `@conda_base` flow-level decorator. Hence,
742
+ you can use `@conda_base` to set packages required by all
743
+ steps and use `@conda` to specify step-specific overrides.
926
744
 
927
745
 
928
746
  Parameters
929
747
  ----------
930
- event : Union[str, Dict[str, Any]], optional, default None
931
- Event dependency for this flow.
932
- events : List[Union[str, Dict[str, Any]]], default []
933
- Events dependency for this flow.
934
- options : Dict[str, Any], default {}
935
- Backend-specific configuration for tuning eventing behavior.
748
+ packages : Dict[str, str], default {}
749
+ Packages to use for this step. The key is the name of the package
750
+ and the value is the version to use.
751
+ libraries : Dict[str, str], default {}
752
+ Supported for backward compatibility. When used with packages, packages will take precedence.
753
+ python : str, optional, default None
754
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
755
+ that the version used will correspond to the version of the Python interpreter used to start the run.
756
+ disabled : bool, default False
757
+ If set to True, disables @conda.
936
758
  """
937
759
  ...
938
760
 
939
761
  @typing.overload
940
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
762
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
941
763
  ...
942
764
 
943
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
765
+ @typing.overload
766
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
767
+ ...
768
+
769
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
944
770
  """
945
- Specifies the event(s) that this flow depends on.
771
+ Specifies the Conda environment for the step.
946
772
 
947
- ```
948
- @trigger(event='foo')
949
- ```
950
- or
951
- ```
952
- @trigger(events=['foo', 'bar'])
953
- ```
773
+ Information in this decorator will augment any
774
+ attributes set in the `@conda_base` flow-level decorator. Hence,
775
+ you can use `@conda_base` to set packages required by all
776
+ steps and use `@conda` to specify step-specific overrides.
954
777
 
955
- Additionally, you can specify the parameter mappings
956
- to map event payload to Metaflow parameters for the flow.
957
- ```
958
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
959
- ```
960
- or
961
- ```
962
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
963
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
964
- ```
965
778
 
966
- 'parameters' can also be a list of strings and tuples like so:
967
- ```
968
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
969
- ```
970
- This is equivalent to:
971
- ```
972
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
973
- ```
779
+ Parameters
780
+ ----------
781
+ packages : Dict[str, str], default {}
782
+ Packages to use for this step. The key is the name of the package
783
+ and the value is the version to use.
784
+ libraries : Dict[str, str], default {}
785
+ Supported for backward compatibility. When used with packages, packages will take precedence.
786
+ python : str, optional, default None
787
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
788
+ that the version used will correspond to the version of the Python interpreter used to start the run.
789
+ disabled : bool, default False
790
+ If set to True, disables @conda.
791
+ """
792
+ ...
793
+
794
+ @typing.overload
795
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
796
+ """
797
+ Creates a human-readable report, a Metaflow Card, after this step completes.
798
+
799
+ Note that you may add multiple `@card` decorators in a step with different parameters.
974
800
 
975
801
 
976
802
  Parameters
977
803
  ----------
978
- event : Union[str, Dict[str, Any]], optional, default None
979
- Event dependency for this flow.
980
- events : List[Union[str, Dict[str, Any]]], default []
981
- Events dependency for this flow.
804
+ type : str, default 'default'
805
+ Card type.
806
+ id : str, optional, default None
807
+ If multiple cards are present, use this id to identify this card.
982
808
  options : Dict[str, Any], default {}
983
- Backend-specific configuration for tuning eventing behavior.
809
+ Options passed to the card. The contents depend on the card type.
810
+ timeout : int, default 45
811
+ Interrupt reporting if it takes more than this many seconds.
984
812
  """
985
813
  ...
986
814
 
987
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
815
+ @typing.overload
816
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
817
+ ...
818
+
819
+ @typing.overload
820
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
821
+ ...
822
+
823
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
988
824
  """
989
- Specifies what flows belong to the same project.
825
+ Creates a human-readable report, a Metaflow Card, after this step completes.
990
826
 
991
- A project-specific namespace is created for all flows that
992
- use the same `@project(name)`.
827
+ Note that you may add multiple `@card` decorators in a step with different parameters.
993
828
 
994
829
 
995
830
  Parameters
996
831
  ----------
997
- name : str
998
- Project name. Make sure that the name is unique amongst all
999
- projects that use the same production scheduler. The name may
1000
- contain only lowercase alphanumeric characters and underscores.
1001
-
1002
- branch : Optional[str], default None
1003
- The branch to use. If not specified, the branch is set to
1004
- `user.<username>` unless `production` is set to `True`. This can
1005
- also be set on the command line using `--branch` as a top-level option.
1006
- It is an error to specify `branch` in the decorator and on the command line.
1007
-
1008
- production : bool, default False
1009
- Whether or not the branch is the production branch. This can also be set on the
1010
- command line using `--production` as a top-level option. It is an error to specify
1011
- `production` in the decorator and on the command line.
1012
- The project branch name will be:
1013
- - if `branch` is specified:
1014
- - if `production` is True: `prod.<branch>`
1015
- - if `production` is False: `test.<branch>`
1016
- - if `branch` is not specified:
1017
- - if `production` is True: `prod`
1018
- - if `production` is False: `user.<username>`
832
+ type : str, default 'default'
833
+ Card type.
834
+ id : str, optional, default None
835
+ If multiple cards are present, use this id to identify this card.
836
+ options : Dict[str, Any], default {}
837
+ Options passed to the card. The contents depend on the card type.
838
+ timeout : int, default 45
839
+ Interrupt reporting if it takes more than this many seconds.
1019
840
  """
1020
841
  ...
1021
842
 
1022
843
  @typing.overload
1023
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
844
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1024
845
  """
1025
- Specifies the times when the flow should be run when running on a
1026
- production scheduler.
846
+ Specifies the PyPI packages for the step.
847
+
848
+ Information in this decorator will augment any
849
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
850
+ you can use `@pypi_base` to set packages required by all
851
+ steps and use `@pypi` to specify step-specific overrides.
1027
852
 
1028
853
 
1029
854
  Parameters
1030
855
  ----------
1031
- hourly : bool, default False
1032
- Run the workflow hourly.
1033
- daily : bool, default True
1034
- Run the workflow daily.
1035
- weekly : bool, default False
1036
- Run the workflow weekly.
1037
- cron : str, optional, default None
1038
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1039
- specified by this expression.
1040
- timezone : str, optional, default None
1041
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1042
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
856
+ packages : Dict[str, str], default: {}
857
+ Packages to use for this step. The key is the name of the package
858
+ and the value is the version to use.
859
+ python : str, optional, default: None
860
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
861
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1043
862
  """
1044
863
  ...
1045
864
 
1046
865
  @typing.overload
1047
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
866
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1048
867
  ...
1049
868
 
1050
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
869
+ @typing.overload
870
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
871
+ ...
872
+
873
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1051
874
  """
1052
- Specifies the times when the flow should be run when running on a
1053
- production scheduler.
875
+ Specifies the PyPI packages for the step.
876
+
877
+ Information in this decorator will augment any
878
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
879
+ you can use `@pypi_base` to set packages required by all
880
+ steps and use `@pypi` to specify step-specific overrides.
1054
881
 
1055
882
 
1056
883
  Parameters
1057
884
  ----------
1058
- hourly : bool, default False
1059
- Run the workflow hourly.
1060
- daily : bool, default True
1061
- Run the workflow daily.
1062
- weekly : bool, default False
1063
- Run the workflow weekly.
1064
- cron : str, optional, default None
1065
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1066
- specified by this expression.
1067
- timezone : str, optional, default None
1068
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1069
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
885
+ packages : Dict[str, str], default: {}
886
+ Packages to use for this step. The key is the name of the package
887
+ and the value is the version to use.
888
+ python : str, optional, default: None
889
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
890
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1070
891
  """
1071
892
  ...
1072
893
 
@@ -1113,201 +934,294 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1113
934
  """
1114
935
  ...
1115
936
 
1116
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
937
+ @typing.overload
938
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1117
939
  """
1118
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1119
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1120
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1121
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1122
- starts only after all sensors finish.
940
+ Specifies the flow(s) that this flow depends on.
1123
941
 
942
+ ```
943
+ @trigger_on_finish(flow='FooFlow')
944
+ ```
945
+ or
946
+ ```
947
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
948
+ ```
949
+ This decorator respects the @project decorator and triggers the flow
950
+ when upstream runs within the same namespace complete successfully
1124
951
 
1125
- Parameters
1126
- ----------
1127
- timeout : int
1128
- Time, in seconds before the task times out and fails. (Default: 3600)
1129
- poke_interval : int
1130
- Time in seconds that the job should wait in between each try. (Default: 60)
1131
- mode : str
1132
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1133
- exponential_backoff : bool
1134
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1135
- pool : str
1136
- the slot pool this task should run in,
1137
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1138
- soft_fail : bool
1139
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1140
- name : str
1141
- Name of the sensor on Airflow
1142
- description : str
1143
- Description of sensor in the Airflow UI
1144
- bucket_key : Union[str, List[str]]
1145
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1146
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1147
- bucket_name : str
1148
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1149
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1150
- wildcard_match : bool
1151
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1152
- aws_conn_id : str
1153
- a reference to the s3 connection on Airflow. (Default: None)
1154
- verify : bool
1155
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1156
- """
1157
- ...
1158
-
1159
- @typing.overload
1160
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1161
- """
1162
- Specifies the Conda environment for all steps of the flow.
952
+ Additionally, you can specify project aware upstream flow dependencies
953
+ by specifying the fully qualified project_flow_name.
954
+ ```
955
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
956
+ ```
957
+ or
958
+ ```
959
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
960
+ ```
1163
961
 
1164
- Use `@conda_base` to set common libraries required by all
1165
- steps and use `@conda` to specify step-specific additions.
962
+ You can also specify just the project or project branch (other values will be
963
+ inferred from the current project or project branch):
964
+ ```
965
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
966
+ ```
967
+
968
+ Note that `branch` is typically one of:
969
+ - `prod`
970
+ - `user.bob`
971
+ - `test.my_experiment`
972
+ - `prod.staging`
1166
973
 
1167
974
 
1168
975
  Parameters
1169
976
  ----------
1170
- packages : Dict[str, str], default {}
1171
- Packages to use for this flow. The key is the name of the package
1172
- and the value is the version to use.
1173
- libraries : Dict[str, str], default {}
1174
- Supported for backward compatibility. When used with packages, packages will take precedence.
1175
- python : str, optional, default None
1176
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1177
- that the version used will correspond to the version of the Python interpreter used to start the run.
1178
- disabled : bool, default False
1179
- If set to True, disables Conda.
977
+ flow : Union[str, Dict[str, str]], optional, default None
978
+ Upstream flow dependency for this flow.
979
+ flows : List[Union[str, Dict[str, str]]], default []
980
+ Upstream flow dependencies for this flow.
981
+ options : Dict[str, Any], default {}
982
+ Backend-specific configuration for tuning eventing behavior.
1180
983
  """
1181
984
  ...
1182
985
 
1183
986
  @typing.overload
1184
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
987
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1185
988
  ...
1186
989
 
1187
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
990
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1188
991
  """
1189
- Specifies the Conda environment for all steps of the flow.
992
+ Specifies the flow(s) that this flow depends on.
1190
993
 
1191
- Use `@conda_base` to set common libraries required by all
1192
- steps and use `@conda` to specify step-specific additions.
994
+ ```
995
+ @trigger_on_finish(flow='FooFlow')
996
+ ```
997
+ or
998
+ ```
999
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1000
+ ```
1001
+ This decorator respects the @project decorator and triggers the flow
1002
+ when upstream runs within the same namespace complete successfully
1003
+
1004
+ Additionally, you can specify project aware upstream flow dependencies
1005
+ by specifying the fully qualified project_flow_name.
1006
+ ```
1007
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1008
+ ```
1009
+ or
1010
+ ```
1011
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1012
+ ```
1013
+
1014
+ You can also specify just the project or project branch (other values will be
1015
+ inferred from the current project or project branch):
1016
+ ```
1017
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1018
+ ```
1019
+
1020
+ Note that `branch` is typically one of:
1021
+ - `prod`
1022
+ - `user.bob`
1023
+ - `test.my_experiment`
1024
+ - `prod.staging`
1193
1025
 
1194
1026
 
1195
1027
  Parameters
1196
1028
  ----------
1197
- packages : Dict[str, str], default {}
1198
- Packages to use for this flow. The key is the name of the package
1199
- and the value is the version to use.
1200
- libraries : Dict[str, str], default {}
1201
- Supported for backward compatibility. When used with packages, packages will take precedence.
1202
- python : str, optional, default None
1203
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1204
- that the version used will correspond to the version of the Python interpreter used to start the run.
1205
- disabled : bool, default False
1206
- If set to True, disables Conda.
1029
+ flow : Union[str, Dict[str, str]], optional, default None
1030
+ Upstream flow dependency for this flow.
1031
+ flows : List[Union[str, Dict[str, str]]], default []
1032
+ Upstream flow dependencies for this flow.
1033
+ options : Dict[str, Any], default {}
1034
+ Backend-specific configuration for tuning eventing behavior.
1035
+ """
1036
+ ...
1037
+
1038
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1039
+ """
1040
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1041
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1042
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1043
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1044
+ starts only after all sensors finish.
1045
+
1046
+
1047
+ Parameters
1048
+ ----------
1049
+ timeout : int
1050
+ Time, in seconds before the task times out and fails. (Default: 3600)
1051
+ poke_interval : int
1052
+ Time in seconds that the job should wait in between each try. (Default: 60)
1053
+ mode : str
1054
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1055
+ exponential_backoff : bool
1056
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1057
+ pool : str
1058
+ the slot pool this task should run in,
1059
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1060
+ soft_fail : bool
1061
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1062
+ name : str
1063
+ Name of the sensor on Airflow
1064
+ description : str
1065
+ Description of sensor in the Airflow UI
1066
+ bucket_key : Union[str, List[str]]
1067
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1068
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1069
+ bucket_name : str
1070
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1071
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1072
+ wildcard_match : bool
1073
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1074
+ aws_conn_id : str
1075
+ a reference to the s3 connection on Airflow. (Default: None)
1076
+ verify : bool
1077
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1207
1078
  """
1208
1079
  ...
1209
1080
 
1210
1081
  @typing.overload
1211
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1082
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1212
1083
  """
1213
- Specifies the flow(s) that this flow depends on.
1084
+ Specifies the event(s) that this flow depends on.
1214
1085
 
1215
1086
  ```
1216
- @trigger_on_finish(flow='FooFlow')
1087
+ @trigger(event='foo')
1217
1088
  ```
1218
1089
  or
1219
1090
  ```
1220
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1091
+ @trigger(events=['foo', 'bar'])
1221
1092
  ```
1222
- This decorator respects the @project decorator and triggers the flow
1223
- when upstream runs within the same namespace complete successfully
1224
1093
 
1225
- Additionally, you can specify project aware upstream flow dependencies
1226
- by specifying the fully qualified project_flow_name.
1094
+ Additionally, you can specify the parameter mappings
1095
+ to map event payload to Metaflow parameters for the flow.
1227
1096
  ```
1228
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1097
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1229
1098
  ```
1230
1099
  or
1231
1100
  ```
1232
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1101
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1102
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1233
1103
  ```
1234
1104
 
1235
- You can also specify just the project or project branch (other values will be
1236
- inferred from the current project or project branch):
1105
+ 'parameters' can also be a list of strings and tuples like so:
1237
1106
  ```
1238
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1107
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1108
+ ```
1109
+ This is equivalent to:
1110
+ ```
1111
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1239
1112
  ```
1240
-
1241
- Note that `branch` is typically one of:
1242
- - `prod`
1243
- - `user.bob`
1244
- - `test.my_experiment`
1245
- - `prod.staging`
1246
1113
 
1247
1114
 
1248
1115
  Parameters
1249
1116
  ----------
1250
- flow : Union[str, Dict[str, str]], optional, default None
1251
- Upstream flow dependency for this flow.
1252
- flows : List[Union[str, Dict[str, str]]], default []
1253
- Upstream flow dependencies for this flow.
1117
+ event : Union[str, Dict[str, Any]], optional, default None
1118
+ Event dependency for this flow.
1119
+ events : List[Union[str, Dict[str, Any]]], default []
1120
+ Events dependency for this flow.
1254
1121
  options : Dict[str, Any], default {}
1255
1122
  Backend-specific configuration for tuning eventing behavior.
1256
1123
  """
1257
1124
  ...
1258
1125
 
1259
1126
  @typing.overload
1260
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1127
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1261
1128
  ...
1262
1129
 
1263
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1130
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1264
1131
  """
1265
- Specifies the flow(s) that this flow depends on.
1132
+ Specifies the event(s) that this flow depends on.
1266
1133
 
1267
1134
  ```
1268
- @trigger_on_finish(flow='FooFlow')
1135
+ @trigger(event='foo')
1269
1136
  ```
1270
1137
  or
1271
1138
  ```
1272
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1139
+ @trigger(events=['foo', 'bar'])
1273
1140
  ```
1274
- This decorator respects the @project decorator and triggers the flow
1275
- when upstream runs within the same namespace complete successfully
1276
1141
 
1277
- Additionally, you can specify project aware upstream flow dependencies
1278
- by specifying the fully qualified project_flow_name.
1142
+ Additionally, you can specify the parameter mappings
1143
+ to map event payload to Metaflow parameters for the flow.
1279
1144
  ```
1280
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1145
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1281
1146
  ```
1282
1147
  or
1283
1148
  ```
1284
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1149
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1150
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1285
1151
  ```
1286
1152
 
1287
- You can also specify just the project or project branch (other values will be
1288
- inferred from the current project or project branch):
1153
+ 'parameters' can also be a list of strings and tuples like so:
1289
1154
  ```
1290
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1155
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1156
+ ```
1157
+ This is equivalent to:
1158
+ ```
1159
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1291
1160
  ```
1292
-
1293
- Note that `branch` is typically one of:
1294
- - `prod`
1295
- - `user.bob`
1296
- - `test.my_experiment`
1297
- - `prod.staging`
1298
1161
 
1299
1162
 
1300
1163
  Parameters
1301
1164
  ----------
1302
- flow : Union[str, Dict[str, str]], optional, default None
1303
- Upstream flow dependency for this flow.
1304
- flows : List[Union[str, Dict[str, str]]], default []
1305
- Upstream flow dependencies for this flow.
1165
+ event : Union[str, Dict[str, Any]], optional, default None
1166
+ Event dependency for this flow.
1167
+ events : List[Union[str, Dict[str, Any]]], default []
1168
+ Events dependency for this flow.
1306
1169
  options : Dict[str, Any], default {}
1307
1170
  Backend-specific configuration for tuning eventing behavior.
1308
1171
  """
1309
1172
  ...
1310
1173
 
1174
+ @typing.overload
1175
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1176
+ """
1177
+ Specifies the times when the flow should be run when running on a
1178
+ production scheduler.
1179
+
1180
+
1181
+ Parameters
1182
+ ----------
1183
+ hourly : bool, default False
1184
+ Run the workflow hourly.
1185
+ daily : bool, default True
1186
+ Run the workflow daily.
1187
+ weekly : bool, default False
1188
+ Run the workflow weekly.
1189
+ cron : str, optional, default None
1190
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1191
+ specified by this expression.
1192
+ timezone : str, optional, default None
1193
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1194
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1195
+ """
1196
+ ...
1197
+
1198
+ @typing.overload
1199
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1200
+ ...
1201
+
1202
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1203
+ """
1204
+ Specifies the times when the flow should be run when running on a
1205
+ production scheduler.
1206
+
1207
+
1208
+ Parameters
1209
+ ----------
1210
+ hourly : bool, default False
1211
+ Run the workflow hourly.
1212
+ daily : bool, default True
1213
+ Run the workflow daily.
1214
+ weekly : bool, default False
1215
+ Run the workflow weekly.
1216
+ cron : str, optional, default None
1217
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1218
+ specified by this expression.
1219
+ timezone : str, optional, default None
1220
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1221
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1222
+ """
1223
+ ...
1224
+
1311
1225
  @typing.overload
1312
1226
  def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1313
1227
  """
@@ -1349,3 +1263,89 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
1349
1263
  """
1350
1264
  ...
1351
1265
 
1266
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1267
+ """
1268
+ Specifies what flows belong to the same project.
1269
+
1270
+ A project-specific namespace is created for all flows that
1271
+ use the same `@project(name)`.
1272
+
1273
+
1274
+ Parameters
1275
+ ----------
1276
+ name : str
1277
+ Project name. Make sure that the name is unique amongst all
1278
+ projects that use the same production scheduler. The name may
1279
+ contain only lowercase alphanumeric characters and underscores.
1280
+
1281
+ branch : Optional[str], default None
1282
+ The branch to use. If not specified, the branch is set to
1283
+ `user.<username>` unless `production` is set to `True`. This can
1284
+ also be set on the command line using `--branch` as a top-level option.
1285
+ It is an error to specify `branch` in the decorator and on the command line.
1286
+
1287
+ production : bool, default False
1288
+ Whether or not the branch is the production branch. This can also be set on the
1289
+ command line using `--production` as a top-level option. It is an error to specify
1290
+ `production` in the decorator and on the command line.
1291
+ The project branch name will be:
1292
+ - if `branch` is specified:
1293
+ - if `production` is True: `prod.<branch>`
1294
+ - if `production` is False: `test.<branch>`
1295
+ - if `branch` is not specified:
1296
+ - if `production` is True: `prod`
1297
+ - if `production` is False: `user.<username>`
1298
+ """
1299
+ ...
1300
+
1301
+ @typing.overload
1302
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1303
+ """
1304
+ Specifies the Conda environment for all steps of the flow.
1305
+
1306
+ Use `@conda_base` to set common libraries required by all
1307
+ steps and use `@conda` to specify step-specific additions.
1308
+
1309
+
1310
+ Parameters
1311
+ ----------
1312
+ packages : Dict[str, str], default {}
1313
+ Packages to use for this flow. The key is the name of the package
1314
+ and the value is the version to use.
1315
+ libraries : Dict[str, str], default {}
1316
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1317
+ python : str, optional, default None
1318
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1319
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1320
+ disabled : bool, default False
1321
+ If set to True, disables Conda.
1322
+ """
1323
+ ...
1324
+
1325
+ @typing.overload
1326
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1327
+ ...
1328
+
1329
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1330
+ """
1331
+ Specifies the Conda environment for all steps of the flow.
1332
+
1333
+ Use `@conda_base` to set common libraries required by all
1334
+ steps and use `@conda` to specify step-specific additions.
1335
+
1336
+
1337
+ Parameters
1338
+ ----------
1339
+ packages : Dict[str, str], default {}
1340
+ Packages to use for this flow. The key is the name of the package
1341
+ and the value is the version to use.
1342
+ libraries : Dict[str, str], default {}
1343
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1344
+ python : str, optional, default None
1345
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1346
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1347
+ disabled : bool, default False
1348
+ If set to True, disables Conda.
1349
+ """
1350
+ ...
1351
+