metaflow-stubs 2.17.4__py2.py3-none-any.whl → 2.18.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of metaflow-stubs might be problematic. Click here for more details.

Files changed (166) hide show
  1. metaflow-stubs/__init__.pyi +539 -539
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +3 -3
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +5 -5
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +18 -18
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/multicore_utils.pyi +2 -2
  23. metaflow-stubs/packaging_sys/__init__.pyi +6 -6
  24. metaflow-stubs/packaging_sys/backend.pyi +4 -4
  25. metaflow-stubs/packaging_sys/distribution_support.pyi +3 -3
  26. metaflow-stubs/packaging_sys/tar_backend.pyi +7 -7
  27. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  28. metaflow-stubs/packaging_sys/v1.pyi +2 -2
  29. metaflow-stubs/parameters.pyi +3 -3
  30. metaflow-stubs/plugins/__init__.pyi +13 -13
  31. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  33. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  34. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  35. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  37. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  38. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  39. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  41. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  42. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -2
  43. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  44. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  45. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  46. metaflow-stubs/plugins/argo/exit_hooks.pyi +2 -2
  47. metaflow-stubs/plugins/aws/__init__.pyi +3 -3
  48. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  50. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  51. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  52. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  53. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  54. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  55. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +5 -5
  56. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  59. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  60. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  61. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  62. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  63. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  64. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  65. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  67. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  68. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +5 -5
  69. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  70. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  71. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  72. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  74. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  83. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  84. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  85. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  86. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  87. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  88. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  89. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  91. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  92. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  93. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  94. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  95. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  96. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  97. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  98. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  99. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  100. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  101. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +5 -5
  104. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  105. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  106. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  107. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  108. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  110. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  111. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  112. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  114. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  115. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  116. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  119. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/conda_environment.pyi +6 -6
  121. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  122. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  123. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  124. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  125. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  126. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  127. metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
  128. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +4 -4
  129. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  130. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  131. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  132. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  133. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  134. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  135. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  136. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  137. metaflow-stubs/plugins/uv/uv_environment.pyi +3 -3
  138. metaflow-stubs/pylint_wrapper.pyi +2 -2
  139. metaflow-stubs/runner/__init__.pyi +2 -2
  140. metaflow-stubs/runner/deployer.pyi +3 -3
  141. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  142. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  143. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  144. metaflow-stubs/runner/nbrun.pyi +2 -2
  145. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  146. metaflow-stubs/runner/utils.pyi +3 -3
  147. metaflow-stubs/system/__init__.pyi +2 -2
  148. metaflow-stubs/system/system_logger.pyi +2 -2
  149. metaflow-stubs/system/system_monitor.pyi +2 -2
  150. metaflow-stubs/tagging_util.pyi +2 -2
  151. metaflow-stubs/tuple_util.pyi +2 -2
  152. metaflow-stubs/user_configs/__init__.pyi +2 -2
  153. metaflow-stubs/user_configs/config_options.pyi +3 -3
  154. metaflow-stubs/user_configs/config_parameters.pyi +9 -7
  155. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  156. metaflow-stubs/user_decorators/common.pyi +2 -2
  157. metaflow-stubs/user_decorators/mutable_flow.pyi +4 -4
  158. metaflow-stubs/user_decorators/mutable_step.pyi +6 -6
  159. metaflow-stubs/user_decorators/user_flow_decorator.pyi +4 -4
  160. metaflow-stubs/user_decorators/user_step_decorator.pyi +6 -6
  161. metaflow-stubs/version.pyi +2 -2
  162. {metaflow_stubs-2.17.4.dist-info → metaflow_stubs-2.18.0.dist-info}/METADATA +2 -2
  163. metaflow_stubs-2.18.0.dist-info/RECORD +166 -0
  164. metaflow_stubs-2.17.4.dist-info/RECORD +0 -166
  165. {metaflow_stubs-2.17.4.dist-info → metaflow_stubs-2.18.0.dist-info}/WHEEL +0 -0
  166. {metaflow_stubs-2.17.4.dist-info → metaflow_stubs-2.18.0.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.17.4 #
4
- # Generated on 2025-08-25T17:38:02.332471 #
3
+ # MF version: 2.18.0 #
4
+ # Generated on 2025-08-27T01:57:08.613943 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
12
+ import typing
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -39,9 +39,9 @@ from .user_decorators.user_step_decorator import UserStepDecorator as UserStepDe
39
39
  from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
+ from . import events as events
42
43
  from . import metaflow_git as metaflow_git
43
44
  from . import tuple_util as tuple_util
44
- from . import events as events
45
45
  from . import runner as runner
46
46
  from . import plugins as plugins
47
47
  from .plugins.datatools.s3.s3 import S3 as S3
@@ -153,92 +153,70 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
153
153
  ...
154
154
 
155
155
  @typing.overload
156
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
156
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
157
157
  """
158
- Specifies that the step will success under all circumstances.
159
-
160
- The decorator will create an optional artifact, specified by `var`, which
161
- contains the exception raised. You can use it to detect the presence
162
- of errors, indicating that all happy-path artifacts produced by the step
163
- are missing.
164
-
165
-
166
- Parameters
167
- ----------
168
- var : str, optional, default None
169
- Name of the artifact in which to store the caught exception.
170
- If not specified, the exception is not stored.
171
- print_exception : bool, default True
172
- Determines whether or not the exception is printed to
173
- stdout when caught.
158
+ Decorator prototype for all step decorators. This function gets specialized
159
+ and imported for all decorators types by _import_plugin_decorators().
174
160
  """
175
161
  ...
176
162
 
177
163
  @typing.overload
178
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
179
- ...
180
-
181
- @typing.overload
182
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
164
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
183
165
  ...
184
166
 
185
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
167
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
186
168
  """
187
- Specifies that the step will success under all circumstances.
188
-
189
- The decorator will create an optional artifact, specified by `var`, which
190
- contains the exception raised. You can use it to detect the presence
191
- of errors, indicating that all happy-path artifacts produced by the step
192
- are missing.
193
-
194
-
195
- Parameters
196
- ----------
197
- var : str, optional, default None
198
- Name of the artifact in which to store the caught exception.
199
- If not specified, the exception is not stored.
200
- print_exception : bool, default True
201
- Determines whether or not the exception is printed to
202
- stdout when caught.
169
+ Decorator prototype for all step decorators. This function gets specialized
170
+ and imported for all decorators types by _import_plugin_decorators().
203
171
  """
204
172
  ...
205
173
 
206
174
  @typing.overload
207
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
175
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
208
176
  """
209
- Specifies secrets to be retrieved and injected as environment variables prior to
210
- the execution of a step.
177
+ Creates a human-readable report, a Metaflow Card, after this step completes.
178
+
179
+ Note that you may add multiple `@card` decorators in a step with different parameters.
211
180
 
212
181
 
213
182
  Parameters
214
183
  ----------
215
- sources : List[Union[str, Dict[str, Any]]], default: []
216
- List of secret specs, defining how the secrets are to be retrieved
217
- role : str, optional, default: None
218
- Role to use for fetching secrets
184
+ type : str, default 'default'
185
+ Card type.
186
+ id : str, optional, default None
187
+ If multiple cards are present, use this id to identify this card.
188
+ options : Dict[str, Any], default {}
189
+ Options passed to the card. The contents depend on the card type.
190
+ timeout : int, default 45
191
+ Interrupt reporting if it takes more than this many seconds.
219
192
  """
220
193
  ...
221
194
 
222
195
  @typing.overload
223
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
196
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
224
197
  ...
225
198
 
226
199
  @typing.overload
227
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
200
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
228
201
  ...
229
202
 
230
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
203
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
231
204
  """
232
- Specifies secrets to be retrieved and injected as environment variables prior to
233
- the execution of a step.
205
+ Creates a human-readable report, a Metaflow Card, after this step completes.
206
+
207
+ Note that you may add multiple `@card` decorators in a step with different parameters.
234
208
 
235
209
 
236
210
  Parameters
237
211
  ----------
238
- sources : List[Union[str, Dict[str, Any]]], default: []
239
- List of secret specs, defining how the secrets are to be retrieved
240
- role : str, optional, default: None
241
- Role to use for fetching secrets
212
+ type : str, default 'default'
213
+ Card type.
214
+ id : str, optional, default None
215
+ If multiple cards are present, use this id to identify this card.
216
+ options : Dict[str, Any], default {}
217
+ Options passed to the card. The contents depend on the card type.
218
+ timeout : int, default 45
219
+ Interrupt reporting if it takes more than this many seconds.
242
220
  """
243
221
  ...
244
222
 
@@ -293,85 +271,6 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
293
271
  """
294
272
  ...
295
273
 
296
- @typing.overload
297
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
298
- """
299
- Specifies the resources needed when executing this step.
300
-
301
- Use `@resources` to specify the resource requirements
302
- independently of the specific compute layer (`@batch`, `@kubernetes`).
303
-
304
- You can choose the compute layer on the command line by executing e.g.
305
- ```
306
- python myflow.py run --with batch
307
- ```
308
- or
309
- ```
310
- python myflow.py run --with kubernetes
311
- ```
312
- which executes the flow on the desired system using the
313
- requirements specified in `@resources`.
314
-
315
-
316
- Parameters
317
- ----------
318
- cpu : int, default 1
319
- Number of CPUs required for this step.
320
- gpu : int, optional, default None
321
- Number of GPUs required for this step.
322
- disk : int, optional, default None
323
- Disk size (in MB) required for this step. Only applies on Kubernetes.
324
- memory : int, default 4096
325
- Memory size (in MB) required for this step.
326
- shared_memory : int, optional, default None
327
- The value for the size (in MiB) of the /dev/shm volume for this step.
328
- This parameter maps to the `--shm-size` option in Docker.
329
- """
330
- ...
331
-
332
- @typing.overload
333
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
334
- ...
335
-
336
- @typing.overload
337
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
338
- ...
339
-
340
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
341
- """
342
- Specifies the resources needed when executing this step.
343
-
344
- Use `@resources` to specify the resource requirements
345
- independently of the specific compute layer (`@batch`, `@kubernetes`).
346
-
347
- You can choose the compute layer on the command line by executing e.g.
348
- ```
349
- python myflow.py run --with batch
350
- ```
351
- or
352
- ```
353
- python myflow.py run --with kubernetes
354
- ```
355
- which executes the flow on the desired system using the
356
- requirements specified in `@resources`.
357
-
358
-
359
- Parameters
360
- ----------
361
- cpu : int, default 1
362
- Number of CPUs required for this step.
363
- gpu : int, optional, default None
364
- Number of GPUs required for this step.
365
- disk : int, optional, default None
366
- Disk size (in MB) required for this step. Only applies on Kubernetes.
367
- memory : int, default 4096
368
- Memory size (in MB) required for this step.
369
- shared_memory : int, optional, default None
370
- The value for the size (in MiB) of the /dev/shm volume for this step.
371
- This parameter maps to the `--shm-size` option in Docker.
372
- """
373
- ...
374
-
375
274
  @typing.overload
376
275
  def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
377
276
  """
@@ -431,25 +330,6 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
431
330
  """
432
331
  ...
433
332
 
434
- @typing.overload
435
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
436
- """
437
- Decorator prototype for all step decorators. This function gets specialized
438
- and imported for all decorators types by _import_plugin_decorators().
439
- """
440
- ...
441
-
442
- @typing.overload
443
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
444
- ...
445
-
446
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
447
- """
448
- Decorator prototype for all step decorators. This function gets specialized
449
- and imported for all decorators types by _import_plugin_decorators().
450
- """
451
- ...
452
-
453
333
  @typing.overload
454
334
  def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
455
335
  """
@@ -484,54 +364,281 @@ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], Non
484
364
  ...
485
365
 
486
366
  @typing.overload
487
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
367
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
488
368
  """
489
- Specifies the number of times the task corresponding
490
- to a step needs to be retried.
369
+ Specifies the resources needed when executing this step.
491
370
 
492
- This decorator is useful for handling transient errors, such as networking issues.
493
- If your task contains operations that can't be retried safely, e.g. database updates,
494
- it is advisable to annotate it with `@retry(times=0)`.
371
+ Use `@resources` to specify the resource requirements
372
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
495
373
 
496
- This can be used in conjunction with the `@catch` decorator. The `@catch`
497
- decorator will execute a no-op task after all retries have been exhausted,
498
- ensuring that the flow execution can continue.
374
+ You can choose the compute layer on the command line by executing e.g.
375
+ ```
376
+ python myflow.py run --with batch
377
+ ```
378
+ or
379
+ ```
380
+ python myflow.py run --with kubernetes
381
+ ```
382
+ which executes the flow on the desired system using the
383
+ requirements specified in `@resources`.
499
384
 
500
385
 
501
386
  Parameters
502
387
  ----------
503
- times : int, default 3
504
- Number of times to retry this task.
505
- minutes_between_retries : int, default 2
506
- Number of minutes between retries.
388
+ cpu : int, default 1
389
+ Number of CPUs required for this step.
390
+ gpu : int, optional, default None
391
+ Number of GPUs required for this step.
392
+ disk : int, optional, default None
393
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
394
+ memory : int, default 4096
395
+ Memory size (in MB) required for this step.
396
+ shared_memory : int, optional, default None
397
+ The value for the size (in MiB) of the /dev/shm volume for this step.
398
+ This parameter maps to the `--shm-size` option in Docker.
507
399
  """
508
400
  ...
509
401
 
510
402
  @typing.overload
511
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
403
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
512
404
  ...
513
405
 
514
406
  @typing.overload
515
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
407
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
516
408
  ...
517
409
 
518
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
410
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
519
411
  """
520
- Specifies the number of times the task corresponding
521
- to a step needs to be retried.
412
+ Specifies the resources needed when executing this step.
522
413
 
523
- This decorator is useful for handling transient errors, such as networking issues.
524
- If your task contains operations that can't be retried safely, e.g. database updates,
525
- it is advisable to annotate it with `@retry(times=0)`.
414
+ Use `@resources` to specify the resource requirements
415
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
526
416
 
527
- This can be used in conjunction with the `@catch` decorator. The `@catch`
528
- decorator will execute a no-op task after all retries have been exhausted,
529
- ensuring that the flow execution can continue.
417
+ You can choose the compute layer on the command line by executing e.g.
418
+ ```
419
+ python myflow.py run --with batch
420
+ ```
421
+ or
422
+ ```
423
+ python myflow.py run --with kubernetes
424
+ ```
425
+ which executes the flow on the desired system using the
426
+ requirements specified in `@resources`.
530
427
 
531
428
 
532
429
  Parameters
533
430
  ----------
534
- times : int, default 3
431
+ cpu : int, default 1
432
+ Number of CPUs required for this step.
433
+ gpu : int, optional, default None
434
+ Number of GPUs required for this step.
435
+ disk : int, optional, default None
436
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
437
+ memory : int, default 4096
438
+ Memory size (in MB) required for this step.
439
+ shared_memory : int, optional, default None
440
+ The value for the size (in MiB) of the /dev/shm volume for this step.
441
+ This parameter maps to the `--shm-size` option in Docker.
442
+ """
443
+ ...
444
+
445
+ @typing.overload
446
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
447
+ """
448
+ Specifies the Conda environment for the step.
449
+
450
+ Information in this decorator will augment any
451
+ attributes set in the `@conda_base` flow-level decorator. Hence,
452
+ you can use `@conda_base` to set packages required by all
453
+ steps and use `@conda` to specify step-specific overrides.
454
+
455
+
456
+ Parameters
457
+ ----------
458
+ packages : Dict[str, str], default {}
459
+ Packages to use for this step. The key is the name of the package
460
+ and the value is the version to use.
461
+ libraries : Dict[str, str], default {}
462
+ Supported for backward compatibility. When used with packages, packages will take precedence.
463
+ python : str, optional, default None
464
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
465
+ that the version used will correspond to the version of the Python interpreter used to start the run.
466
+ disabled : bool, default False
467
+ If set to True, disables @conda.
468
+ """
469
+ ...
470
+
471
+ @typing.overload
472
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
473
+ ...
474
+
475
+ @typing.overload
476
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
477
+ ...
478
+
479
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
480
+ """
481
+ Specifies the Conda environment for the step.
482
+
483
+ Information in this decorator will augment any
484
+ attributes set in the `@conda_base` flow-level decorator. Hence,
485
+ you can use `@conda_base` to set packages required by all
486
+ steps and use `@conda` to specify step-specific overrides.
487
+
488
+
489
+ Parameters
490
+ ----------
491
+ packages : Dict[str, str], default {}
492
+ Packages to use for this step. The key is the name of the package
493
+ and the value is the version to use.
494
+ libraries : Dict[str, str], default {}
495
+ Supported for backward compatibility. When used with packages, packages will take precedence.
496
+ python : str, optional, default None
497
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
498
+ that the version used will correspond to the version of the Python interpreter used to start the run.
499
+ disabled : bool, default False
500
+ If set to True, disables @conda.
501
+ """
502
+ ...
503
+
504
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
505
+ """
506
+ Specifies that this step should execute on Kubernetes.
507
+
508
+
509
+ Parameters
510
+ ----------
511
+ cpu : int, default 1
512
+ Number of CPUs required for this step. If `@resources` is
513
+ also present, the maximum value from all decorators is used.
514
+ memory : int, default 4096
515
+ Memory size (in MB) required for this step. If
516
+ `@resources` is also present, the maximum value from all decorators is
517
+ used.
518
+ disk : int, default 10240
519
+ Disk size (in MB) required for this step. If
520
+ `@resources` is also present, the maximum value from all decorators is
521
+ used.
522
+ image : str, optional, default None
523
+ Docker image to use when launching on Kubernetes. If not specified, and
524
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
525
+ not, a default Docker image mapping to the current version of Python is used.
526
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
527
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
528
+ image_pull_secrets: List[str], default []
529
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
530
+ Kubernetes image pull secrets to use when pulling container images
531
+ in Kubernetes.
532
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
533
+ Kubernetes service account to use when launching pod in Kubernetes.
534
+ secrets : List[str], optional, default None
535
+ Kubernetes secrets to use when launching pod in Kubernetes. These
536
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
537
+ in Metaflow configuration.
538
+ node_selector: Union[Dict[str,str], str], optional, default None
539
+ Kubernetes node selector(s) to apply to the pod running the task.
540
+ Can be passed in as a comma separated string of values e.g.
541
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
542
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
543
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
544
+ Kubernetes namespace to use when launching pod in Kubernetes.
545
+ gpu : int, optional, default None
546
+ Number of GPUs required for this step. A value of zero implies that
547
+ the scheduled node should not have GPUs.
548
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
549
+ The vendor of the GPUs to be used for this step.
550
+ tolerations : List[Dict[str,str]], default []
551
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
552
+ Kubernetes tolerations to use when launching pod in Kubernetes.
553
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
554
+ Kubernetes labels to use when launching pod in Kubernetes.
555
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
556
+ Kubernetes annotations to use when launching pod in Kubernetes.
557
+ use_tmpfs : bool, default False
558
+ This enables an explicit tmpfs mount for this step.
559
+ tmpfs_tempdir : bool, default True
560
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
561
+ tmpfs_size : int, optional, default: None
562
+ The value for the size (in MiB) of the tmpfs mount for this step.
563
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
564
+ memory allocated for this step.
565
+ tmpfs_path : str, optional, default /metaflow_temp
566
+ Path to tmpfs mount for this step.
567
+ persistent_volume_claims : Dict[str, str], optional, default None
568
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
569
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
570
+ shared_memory: int, optional
571
+ Shared memory size (in MiB) required for this step
572
+ port: int, optional
573
+ Port number to specify in the Kubernetes job object
574
+ compute_pool : str, optional, default None
575
+ Compute pool to be used for for this step.
576
+ If not specified, any accessible compute pool within the perimeter is used.
577
+ hostname_resolution_timeout: int, default 10 * 60
578
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
579
+ Only applicable when @parallel is used.
580
+ qos: str, default: Burstable
581
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
582
+
583
+ security_context: Dict[str, Any], optional, default None
584
+ Container security context. Applies to the task container. Allows the following keys:
585
+ - privileged: bool, optional, default None
586
+ - allow_privilege_escalation: bool, optional, default None
587
+ - run_as_user: int, optional, default None
588
+ - run_as_group: int, optional, default None
589
+ - run_as_non_root: bool, optional, default None
590
+ """
591
+ ...
592
+
593
+ @typing.overload
594
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
595
+ """
596
+ Specifies the number of times the task corresponding
597
+ to a step needs to be retried.
598
+
599
+ This decorator is useful for handling transient errors, such as networking issues.
600
+ If your task contains operations that can't be retried safely, e.g. database updates,
601
+ it is advisable to annotate it with `@retry(times=0)`.
602
+
603
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
604
+ decorator will execute a no-op task after all retries have been exhausted,
605
+ ensuring that the flow execution can continue.
606
+
607
+
608
+ Parameters
609
+ ----------
610
+ times : int, default 3
611
+ Number of times to retry this task.
612
+ minutes_between_retries : int, default 2
613
+ Number of minutes between retries.
614
+ """
615
+ ...
616
+
617
+ @typing.overload
618
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
619
+ ...
620
+
621
+ @typing.overload
622
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
623
+ ...
624
+
625
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
626
+ """
627
+ Specifies the number of times the task corresponding
628
+ to a step needs to be retried.
629
+
630
+ This decorator is useful for handling transient errors, such as networking issues.
631
+ If your task contains operations that can't be retried safely, e.g. database updates,
632
+ it is advisable to annotate it with `@retry(times=0)`.
633
+
634
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
635
+ decorator will execute a no-op task after all retries have been exhausted,
636
+ ensuring that the flow execution can continue.
637
+
638
+
639
+ Parameters
640
+ ----------
641
+ times : int, default 3
535
642
  Number of times to retry this task.
536
643
  minutes_between_retries : int, default 2
537
644
  Number of minutes between retries.
@@ -688,199 +795,133 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
688
795
  ...
689
796
 
690
797
  @typing.overload
691
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
798
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
692
799
  """
693
- Creates a human-readable report, a Metaflow Card, after this step completes.
694
-
695
- Note that you may add multiple `@card` decorators in a step with different parameters.
800
+ Specifies secrets to be retrieved and injected as environment variables prior to
801
+ the execution of a step.
696
802
 
697
803
 
698
804
  Parameters
699
805
  ----------
700
- type : str, default 'default'
701
- Card type.
702
- id : str, optional, default None
703
- If multiple cards are present, use this id to identify this card.
704
- options : Dict[str, Any], default {}
705
- Options passed to the card. The contents depend on the card type.
706
- timeout : int, default 45
707
- Interrupt reporting if it takes more than this many seconds.
806
+ sources : List[Union[str, Dict[str, Any]]], default: []
807
+ List of secret specs, defining how the secrets are to be retrieved
808
+ role : str, optional, default: None
809
+ Role to use for fetching secrets
708
810
  """
709
811
  ...
710
812
 
711
813
  @typing.overload
712
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
814
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
713
815
  ...
714
816
 
715
817
  @typing.overload
716
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
818
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
717
819
  ...
718
820
 
719
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
821
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
720
822
  """
721
- Creates a human-readable report, a Metaflow Card, after this step completes.
722
-
723
- Note that you may add multiple `@card` decorators in a step with different parameters.
823
+ Specifies secrets to be retrieved and injected as environment variables prior to
824
+ the execution of a step.
724
825
 
725
826
 
726
827
  Parameters
727
828
  ----------
728
- type : str, default 'default'
729
- Card type.
730
- id : str, optional, default None
731
- If multiple cards are present, use this id to identify this card.
732
- options : Dict[str, Any], default {}
733
- Options passed to the card. The contents depend on the card type.
734
- timeout : int, default 45
735
- Interrupt reporting if it takes more than this many seconds.
829
+ sources : List[Union[str, Dict[str, Any]]], default: []
830
+ List of secret specs, defining how the secrets are to be retrieved
831
+ role : str, optional, default: None
832
+ Role to use for fetching secrets
736
833
  """
737
834
  ...
738
835
 
739
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
836
+ @typing.overload
837
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
740
838
  """
741
- Specifies that this step should execute on Kubernetes.
839
+ Specifies that the step will success under all circumstances.
840
+
841
+ The decorator will create an optional artifact, specified by `var`, which
842
+ contains the exception raised. You can use it to detect the presence
843
+ of errors, indicating that all happy-path artifacts produced by the step
844
+ are missing.
742
845
 
743
846
 
744
847
  Parameters
745
848
  ----------
746
- cpu : int, default 1
747
- Number of CPUs required for this step. If `@resources` is
748
- also present, the maximum value from all decorators is used.
749
- memory : int, default 4096
750
- Memory size (in MB) required for this step. If
751
- `@resources` is also present, the maximum value from all decorators is
752
- used.
753
- disk : int, default 10240
754
- Disk size (in MB) required for this step. If
755
- `@resources` is also present, the maximum value from all decorators is
756
- used.
757
- image : str, optional, default None
758
- Docker image to use when launching on Kubernetes. If not specified, and
759
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
760
- not, a default Docker image mapping to the current version of Python is used.
761
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
762
- If given, the imagePullPolicy to be applied to the Docker image of the step.
763
- image_pull_secrets: List[str], default []
764
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
765
- Kubernetes image pull secrets to use when pulling container images
766
- in Kubernetes.
767
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
768
- Kubernetes service account to use when launching pod in Kubernetes.
769
- secrets : List[str], optional, default None
770
- Kubernetes secrets to use when launching pod in Kubernetes. These
771
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
772
- in Metaflow configuration.
773
- node_selector: Union[Dict[str,str], str], optional, default None
774
- Kubernetes node selector(s) to apply to the pod running the task.
775
- Can be passed in as a comma separated string of values e.g.
776
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
777
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
778
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
779
- Kubernetes namespace to use when launching pod in Kubernetes.
780
- gpu : int, optional, default None
781
- Number of GPUs required for this step. A value of zero implies that
782
- the scheduled node should not have GPUs.
783
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
784
- The vendor of the GPUs to be used for this step.
785
- tolerations : List[Dict[str,str]], default []
786
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
787
- Kubernetes tolerations to use when launching pod in Kubernetes.
788
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
789
- Kubernetes labels to use when launching pod in Kubernetes.
790
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
791
- Kubernetes annotations to use when launching pod in Kubernetes.
792
- use_tmpfs : bool, default False
793
- This enables an explicit tmpfs mount for this step.
794
- tmpfs_tempdir : bool, default True
795
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
796
- tmpfs_size : int, optional, default: None
797
- The value for the size (in MiB) of the tmpfs mount for this step.
798
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
799
- memory allocated for this step.
800
- tmpfs_path : str, optional, default /metaflow_temp
801
- Path to tmpfs mount for this step.
802
- persistent_volume_claims : Dict[str, str], optional, default None
803
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
804
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
805
- shared_memory: int, optional
806
- Shared memory size (in MiB) required for this step
807
- port: int, optional
808
- Port number to specify in the Kubernetes job object
809
- compute_pool : str, optional, default None
810
- Compute pool to be used for for this step.
811
- If not specified, any accessible compute pool within the perimeter is used.
812
- hostname_resolution_timeout: int, default 10 * 60
813
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
814
- Only applicable when @parallel is used.
815
- qos: str, default: Burstable
816
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
817
-
818
- security_context: Dict[str, Any], optional, default None
819
- Container security context. Applies to the task container. Allows the following keys:
820
- - privileged: bool, optional, default None
821
- - allow_privilege_escalation: bool, optional, default None
822
- - run_as_user: int, optional, default None
823
- - run_as_group: int, optional, default None
824
- - run_as_non_root: bool, optional, default None
849
+ var : str, optional, default None
850
+ Name of the artifact in which to store the caught exception.
851
+ If not specified, the exception is not stored.
852
+ print_exception : bool, default True
853
+ Determines whether or not the exception is printed to
854
+ stdout when caught.
825
855
  """
826
856
  ...
827
857
 
828
858
  @typing.overload
829
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
859
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
860
+ ...
861
+
862
+ @typing.overload
863
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
864
+ ...
865
+
866
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
830
867
  """
831
- Specifies the Conda environment for the step.
868
+ Specifies that the step will success under all circumstances.
832
869
 
833
- Information in this decorator will augment any
834
- attributes set in the `@conda_base` flow-level decorator. Hence,
835
- you can use `@conda_base` to set packages required by all
836
- steps and use `@conda` to specify step-specific overrides.
870
+ The decorator will create an optional artifact, specified by `var`, which
871
+ contains the exception raised. You can use it to detect the presence
872
+ of errors, indicating that all happy-path artifacts produced by the step
873
+ are missing.
837
874
 
838
875
 
839
876
  Parameters
840
877
  ----------
841
- packages : Dict[str, str], default {}
842
- Packages to use for this step. The key is the name of the package
843
- and the value is the version to use.
844
- libraries : Dict[str, str], default {}
845
- Supported for backward compatibility. When used with packages, packages will take precedence.
846
- python : str, optional, default None
847
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
848
- that the version used will correspond to the version of the Python interpreter used to start the run.
849
- disabled : bool, default False
850
- If set to True, disables @conda.
878
+ var : str, optional, default None
879
+ Name of the artifact in which to store the caught exception.
880
+ If not specified, the exception is not stored.
881
+ print_exception : bool, default True
882
+ Determines whether or not the exception is printed to
883
+ stdout when caught.
851
884
  """
852
885
  ...
853
886
 
854
887
  @typing.overload
855
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
888
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
889
+ """
890
+ Specifies the PyPI packages for all steps of the flow.
891
+
892
+ Use `@pypi_base` to set common packages required by all
893
+ steps and use `@pypi` to specify step-specific overrides.
894
+
895
+ Parameters
896
+ ----------
897
+ packages : Dict[str, str], default: {}
898
+ Packages to use for this flow. The key is the name of the package
899
+ and the value is the version to use.
900
+ python : str, optional, default: None
901
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
902
+ that the version used will correspond to the version of the Python interpreter used to start the run.
903
+ """
856
904
  ...
857
905
 
858
906
  @typing.overload
859
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
907
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
860
908
  ...
861
909
 
862
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
910
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
863
911
  """
864
- Specifies the Conda environment for the step.
865
-
866
- Information in this decorator will augment any
867
- attributes set in the `@conda_base` flow-level decorator. Hence,
868
- you can use `@conda_base` to set packages required by all
869
- steps and use `@conda` to specify step-specific overrides.
912
+ Specifies the PyPI packages for all steps of the flow.
870
913
 
914
+ Use `@pypi_base` to set common packages required by all
915
+ steps and use `@pypi` to specify step-specific overrides.
871
916
 
872
917
  Parameters
873
918
  ----------
874
- packages : Dict[str, str], default {}
875
- Packages to use for this step. The key is the name of the package
919
+ packages : Dict[str, str], default: {}
920
+ Packages to use for this flow. The key is the name of the package
876
921
  and the value is the version to use.
877
- libraries : Dict[str, str], default {}
878
- Supported for backward compatibility. When used with packages, packages will take precedence.
879
- python : str, optional, default None
922
+ python : str, optional, default: None
880
923
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
881
924
  that the version used will correspond to the version of the Python interpreter used to start the run.
882
- disabled : bool, default False
883
- If set to True, disables @conda.
884
925
  """
885
926
  ...
886
927
 
@@ -927,196 +968,131 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
927
968
  """
928
969
  ...
929
970
 
930
- @typing.overload
931
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
971
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
932
972
  """
933
- Specifies the PyPI packages for all steps of the flow.
934
-
935
- Use `@pypi_base` to set common packages required by all
936
- steps and use `@pypi` to specify step-specific overrides.
973
+ Specifies what flows belong to the same project.
937
974
 
938
- Parameters
939
- ----------
940
- packages : Dict[str, str], default: {}
941
- Packages to use for this flow. The key is the name of the package
942
- and the value is the version to use.
943
- python : str, optional, default: None
944
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
945
- that the version used will correspond to the version of the Python interpreter used to start the run.
946
- """
947
- ...
948
-
949
- @typing.overload
950
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
951
- ...
952
-
953
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
954
- """
955
- Specifies the PyPI packages for all steps of the flow.
975
+ A project-specific namespace is created for all flows that
976
+ use the same `@project(name)`.
956
977
 
957
- Use `@pypi_base` to set common packages required by all
958
- steps and use `@pypi` to specify step-specific overrides.
959
978
 
960
979
  Parameters
961
980
  ----------
962
- packages : Dict[str, str], default: {}
963
- Packages to use for this flow. The key is the name of the package
964
- and the value is the version to use.
965
- python : str, optional, default: None
966
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
967
- that the version used will correspond to the version of the Python interpreter used to start the run.
968
- """
969
- ...
970
-
971
- @typing.overload
972
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
973
- """
974
- Specifies the flow(s) that this flow depends on.
975
-
976
- ```
977
- @trigger_on_finish(flow='FooFlow')
978
- ```
979
- or
980
- ```
981
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
982
- ```
983
- This decorator respects the @project decorator and triggers the flow
984
- when upstream runs within the same namespace complete successfully
985
-
986
- Additionally, you can specify project aware upstream flow dependencies
987
- by specifying the fully qualified project_flow_name.
988
- ```
989
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
990
- ```
991
- or
992
- ```
993
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
994
- ```
995
-
996
- You can also specify just the project or project branch (other values will be
997
- inferred from the current project or project branch):
998
- ```
999
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1000
- ```
1001
-
1002
- Note that `branch` is typically one of:
1003
- - `prod`
1004
- - `user.bob`
1005
- - `test.my_experiment`
1006
- - `prod.staging`
981
+ name : str
982
+ Project name. Make sure that the name is unique amongst all
983
+ projects that use the same production scheduler. The name may
984
+ contain only lowercase alphanumeric characters and underscores.
1007
985
 
986
+ branch : Optional[str], default None
987
+ The branch to use. If not specified, the branch is set to
988
+ `user.<username>` unless `production` is set to `True`. This can
989
+ also be set on the command line using `--branch` as a top-level option.
990
+ It is an error to specify `branch` in the decorator and on the command line.
1008
991
 
1009
- Parameters
1010
- ----------
1011
- flow : Union[str, Dict[str, str]], optional, default None
1012
- Upstream flow dependency for this flow.
1013
- flows : List[Union[str, Dict[str, str]]], default []
1014
- Upstream flow dependencies for this flow.
1015
- options : Dict[str, Any], default {}
1016
- Backend-specific configuration for tuning eventing behavior.
992
+ production : bool, default False
993
+ Whether or not the branch is the production branch. This can also be set on the
994
+ command line using `--production` as a top-level option. It is an error to specify
995
+ `production` in the decorator and on the command line.
996
+ The project branch name will be:
997
+ - if `branch` is specified:
998
+ - if `production` is True: `prod.<branch>`
999
+ - if `production` is False: `test.<branch>`
1000
+ - if `branch` is not specified:
1001
+ - if `production` is True: `prod`
1002
+ - if `production` is False: `user.<username>`
1017
1003
  """
1018
1004
  ...
1019
1005
 
1020
1006
  @typing.overload
1021
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1022
- ...
1023
-
1024
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1007
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1025
1008
  """
1026
- Specifies the flow(s) that this flow depends on.
1009
+ Specifies the event(s) that this flow depends on.
1027
1010
 
1028
1011
  ```
1029
- @trigger_on_finish(flow='FooFlow')
1012
+ @trigger(event='foo')
1030
1013
  ```
1031
1014
  or
1032
1015
  ```
1033
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1016
+ @trigger(events=['foo', 'bar'])
1034
1017
  ```
1035
- This decorator respects the @project decorator and triggers the flow
1036
- when upstream runs within the same namespace complete successfully
1037
1018
 
1038
- Additionally, you can specify project aware upstream flow dependencies
1039
- by specifying the fully qualified project_flow_name.
1019
+ Additionally, you can specify the parameter mappings
1020
+ to map event payload to Metaflow parameters for the flow.
1040
1021
  ```
1041
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1022
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1042
1023
  ```
1043
1024
  or
1044
1025
  ```
1045
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1026
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1027
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1046
1028
  ```
1047
1029
 
1048
- You can also specify just the project or project branch (other values will be
1049
- inferred from the current project or project branch):
1030
+ 'parameters' can also be a list of strings and tuples like so:
1050
1031
  ```
1051
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1032
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1033
+ ```
1034
+ This is equivalent to:
1035
+ ```
1036
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1052
1037
  ```
1053
-
1054
- Note that `branch` is typically one of:
1055
- - `prod`
1056
- - `user.bob`
1057
- - `test.my_experiment`
1058
- - `prod.staging`
1059
1038
 
1060
1039
 
1061
1040
  Parameters
1062
1041
  ----------
1063
- flow : Union[str, Dict[str, str]], optional, default None
1064
- Upstream flow dependency for this flow.
1065
- flows : List[Union[str, Dict[str, str]]], default []
1066
- Upstream flow dependencies for this flow.
1042
+ event : Union[str, Dict[str, Any]], optional, default None
1043
+ Event dependency for this flow.
1044
+ events : List[Union[str, Dict[str, Any]]], default []
1045
+ Events dependency for this flow.
1067
1046
  options : Dict[str, Any], default {}
1068
1047
  Backend-specific configuration for tuning eventing behavior.
1069
1048
  """
1070
1049
  ...
1071
1050
 
1072
1051
  @typing.overload
1073
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1074
- """
1075
- Specifies the Conda environment for all steps of the flow.
1076
-
1077
- Use `@conda_base` to set common libraries required by all
1078
- steps and use `@conda` to specify step-specific additions.
1079
-
1080
-
1081
- Parameters
1082
- ----------
1083
- packages : Dict[str, str], default {}
1084
- Packages to use for this flow. The key is the name of the package
1085
- and the value is the version to use.
1086
- libraries : Dict[str, str], default {}
1087
- Supported for backward compatibility. When used with packages, packages will take precedence.
1088
- python : str, optional, default None
1089
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1090
- that the version used will correspond to the version of the Python interpreter used to start the run.
1091
- disabled : bool, default False
1092
- If set to True, disables Conda.
1093
- """
1094
- ...
1095
-
1096
- @typing.overload
1097
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1052
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1098
1053
  ...
1099
1054
 
1100
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1055
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1101
1056
  """
1102
- Specifies the Conda environment for all steps of the flow.
1057
+ Specifies the event(s) that this flow depends on.
1103
1058
 
1104
- Use `@conda_base` to set common libraries required by all
1105
- steps and use `@conda` to specify step-specific additions.
1059
+ ```
1060
+ @trigger(event='foo')
1061
+ ```
1062
+ or
1063
+ ```
1064
+ @trigger(events=['foo', 'bar'])
1065
+ ```
1066
+
1067
+ Additionally, you can specify the parameter mappings
1068
+ to map event payload to Metaflow parameters for the flow.
1069
+ ```
1070
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1071
+ ```
1072
+ or
1073
+ ```
1074
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1075
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1076
+ ```
1077
+
1078
+ 'parameters' can also be a list of strings and tuples like so:
1079
+ ```
1080
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1081
+ ```
1082
+ This is equivalent to:
1083
+ ```
1084
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1085
+ ```
1106
1086
 
1107
1087
 
1108
1088
  Parameters
1109
1089
  ----------
1110
- packages : Dict[str, str], default {}
1111
- Packages to use for this flow. The key is the name of the package
1112
- and the value is the version to use.
1113
- libraries : Dict[str, str], default {}
1114
- Supported for backward compatibility. When used with packages, packages will take precedence.
1115
- python : str, optional, default None
1116
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1117
- that the version used will correspond to the version of the Python interpreter used to start the run.
1118
- disabled : bool, default False
1119
- If set to True, disables Conda.
1090
+ event : Union[str, Dict[str, Any]], optional, default None
1091
+ Event dependency for this flow.
1092
+ events : List[Union[str, Dict[str, Any]]], default []
1093
+ Events dependency for this flow.
1094
+ options : Dict[str, Any], default {}
1095
+ Backend-specific configuration for tuning eventing behavior.
1120
1096
  """
1121
1097
  ...
1122
1098
 
@@ -1171,41 +1147,6 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
1171
1147
  """
1172
1148
  ...
1173
1149
 
1174
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1175
- """
1176
- Specifies what flows belong to the same project.
1177
-
1178
- A project-specific namespace is created for all flows that
1179
- use the same `@project(name)`.
1180
-
1181
-
1182
- Parameters
1183
- ----------
1184
- name : str
1185
- Project name. Make sure that the name is unique amongst all
1186
- projects that use the same production scheduler. The name may
1187
- contain only lowercase alphanumeric characters and underscores.
1188
-
1189
- branch : Optional[str], default None
1190
- The branch to use. If not specified, the branch is set to
1191
- `user.<username>` unless `production` is set to `True`. This can
1192
- also be set on the command line using `--branch` as a top-level option.
1193
- It is an error to specify `branch` in the decorator and on the command line.
1194
-
1195
- production : bool, default False
1196
- Whether or not the branch is the production branch. This can also be set on the
1197
- command line using `--production` as a top-level option. It is an error to specify
1198
- `production` in the decorator and on the command line.
1199
- The project branch name will be:
1200
- - if `branch` is specified:
1201
- - if `production` is True: `prod.<branch>`
1202
- - if `production` is False: `test.<branch>`
1203
- - if `branch` is not specified:
1204
- - if `production` is True: `prod`
1205
- - if `production` is False: `user.<username>`
1206
- """
1207
- ...
1208
-
1209
1150
  def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1210
1151
  """
1211
1152
  The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
@@ -1250,93 +1191,152 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1250
1191
  ...
1251
1192
 
1252
1193
  @typing.overload
1253
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1194
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1254
1195
  """
1255
- Specifies the event(s) that this flow depends on.
1196
+ Specifies the Conda environment for all steps of the flow.
1197
+
1198
+ Use `@conda_base` to set common libraries required by all
1199
+ steps and use `@conda` to specify step-specific additions.
1200
+
1201
+
1202
+ Parameters
1203
+ ----------
1204
+ packages : Dict[str, str], default {}
1205
+ Packages to use for this flow. The key is the name of the package
1206
+ and the value is the version to use.
1207
+ libraries : Dict[str, str], default {}
1208
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1209
+ python : str, optional, default None
1210
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1211
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1212
+ disabled : bool, default False
1213
+ If set to True, disables Conda.
1214
+ """
1215
+ ...
1216
+
1217
+ @typing.overload
1218
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1219
+ ...
1220
+
1221
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1222
+ """
1223
+ Specifies the Conda environment for all steps of the flow.
1224
+
1225
+ Use `@conda_base` to set common libraries required by all
1226
+ steps and use `@conda` to specify step-specific additions.
1227
+
1228
+
1229
+ Parameters
1230
+ ----------
1231
+ packages : Dict[str, str], default {}
1232
+ Packages to use for this flow. The key is the name of the package
1233
+ and the value is the version to use.
1234
+ libraries : Dict[str, str], default {}
1235
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1236
+ python : str, optional, default None
1237
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1238
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1239
+ disabled : bool, default False
1240
+ If set to True, disables Conda.
1241
+ """
1242
+ ...
1243
+
1244
+ @typing.overload
1245
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1246
+ """
1247
+ Specifies the flow(s) that this flow depends on.
1256
1248
 
1257
1249
  ```
1258
- @trigger(event='foo')
1250
+ @trigger_on_finish(flow='FooFlow')
1259
1251
  ```
1260
1252
  or
1261
1253
  ```
1262
- @trigger(events=['foo', 'bar'])
1254
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1263
1255
  ```
1256
+ This decorator respects the @project decorator and triggers the flow
1257
+ when upstream runs within the same namespace complete successfully
1264
1258
 
1265
- Additionally, you can specify the parameter mappings
1266
- to map event payload to Metaflow parameters for the flow.
1259
+ Additionally, you can specify project aware upstream flow dependencies
1260
+ by specifying the fully qualified project_flow_name.
1267
1261
  ```
1268
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1262
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1269
1263
  ```
1270
1264
  or
1271
1265
  ```
1272
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1273
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1266
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1274
1267
  ```
1275
1268
 
1276
- 'parameters' can also be a list of strings and tuples like so:
1277
- ```
1278
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1279
- ```
1280
- This is equivalent to:
1269
+ You can also specify just the project or project branch (other values will be
1270
+ inferred from the current project or project branch):
1281
1271
  ```
1282
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1272
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1283
1273
  ```
1284
1274
 
1275
+ Note that `branch` is typically one of:
1276
+ - `prod`
1277
+ - `user.bob`
1278
+ - `test.my_experiment`
1279
+ - `prod.staging`
1280
+
1285
1281
 
1286
1282
  Parameters
1287
1283
  ----------
1288
- event : Union[str, Dict[str, Any]], optional, default None
1289
- Event dependency for this flow.
1290
- events : List[Union[str, Dict[str, Any]]], default []
1291
- Events dependency for this flow.
1284
+ flow : Union[str, Dict[str, str]], optional, default None
1285
+ Upstream flow dependency for this flow.
1286
+ flows : List[Union[str, Dict[str, str]]], default []
1287
+ Upstream flow dependencies for this flow.
1292
1288
  options : Dict[str, Any], default {}
1293
1289
  Backend-specific configuration for tuning eventing behavior.
1294
1290
  """
1295
1291
  ...
1296
1292
 
1297
1293
  @typing.overload
1298
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1294
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1299
1295
  ...
1300
1296
 
1301
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1297
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1302
1298
  """
1303
- Specifies the event(s) that this flow depends on.
1299
+ Specifies the flow(s) that this flow depends on.
1304
1300
 
1305
1301
  ```
1306
- @trigger(event='foo')
1302
+ @trigger_on_finish(flow='FooFlow')
1307
1303
  ```
1308
1304
  or
1309
1305
  ```
1310
- @trigger(events=['foo', 'bar'])
1306
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1311
1307
  ```
1308
+ This decorator respects the @project decorator and triggers the flow
1309
+ when upstream runs within the same namespace complete successfully
1312
1310
 
1313
- Additionally, you can specify the parameter mappings
1314
- to map event payload to Metaflow parameters for the flow.
1311
+ Additionally, you can specify project aware upstream flow dependencies
1312
+ by specifying the fully qualified project_flow_name.
1315
1313
  ```
1316
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1314
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1317
1315
  ```
1318
1316
  or
1319
1317
  ```
1320
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1321
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1318
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1322
1319
  ```
1323
1320
 
1324
- 'parameters' can also be a list of strings and tuples like so:
1325
- ```
1326
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1327
- ```
1328
- This is equivalent to:
1321
+ You can also specify just the project or project branch (other values will be
1322
+ inferred from the current project or project branch):
1329
1323
  ```
1330
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1324
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1331
1325
  ```
1332
1326
 
1327
+ Note that `branch` is typically one of:
1328
+ - `prod`
1329
+ - `user.bob`
1330
+ - `test.my_experiment`
1331
+ - `prod.staging`
1332
+
1333
1333
 
1334
1334
  Parameters
1335
1335
  ----------
1336
- event : Union[str, Dict[str, Any]], optional, default None
1337
- Event dependency for this flow.
1338
- events : List[Union[str, Dict[str, Any]]], default []
1339
- Events dependency for this flow.
1336
+ flow : Union[str, Dict[str, str]], optional, default None
1337
+ Upstream flow dependency for this flow.
1338
+ flows : List[Union[str, Dict[str, str]]], default []
1339
+ Upstream flow dependencies for this flow.
1340
1340
  options : Dict[str, Any], default {}
1341
1341
  Backend-specific configuration for tuning eventing behavior.
1342
1342
  """