metaflow-stubs 2.13.4__py2.py3-none-any.whl → 2.13.5__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (145) hide show
  1. metaflow-stubs/__init__.pyi +240 -240
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +4 -4
  8. metaflow-stubs/client/filecache.pyi +3 -3
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +4 -4
  14. metaflow-stubs/info_file.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +5 -5
  21. metaflow-stubs/multicore_utils.pyi +2 -2
  22. metaflow-stubs/parameters.pyi +4 -4
  23. metaflow-stubs/plugins/__init__.pyi +14 -14
  24. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  30. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  31. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  32. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  35. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  37. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  38. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  39. metaflow-stubs/plugins/aws/__init__.pyi +3 -3
  40. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  41. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  47. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  48. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +4 -4
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  57. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  61. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  62. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  63. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  64. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  65. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  73. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  77. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  78. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  79. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  80. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  81. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  82. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  83. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  84. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  85. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  86. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  87. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  88. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  89. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  90. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  91. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  92. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  93. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  94. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  95. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  96. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  97. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  98. metaflow-stubs/plugins/kubernetes/__init__.pyi +3 -2
  99. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  100. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  101. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  102. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  103. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  105. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +20 -0
  106. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  107. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  108. metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
  109. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  110. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
  111. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  112. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  113. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  114. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  115. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  116. metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
  117. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  118. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  119. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  120. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  121. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  122. metaflow-stubs/pylint_wrapper.pyi +2 -2
  123. metaflow-stubs/runner/__init__.pyi +2 -2
  124. metaflow-stubs/runner/deployer.pyi +5 -5
  125. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  126. metaflow-stubs/runner/metaflow_runner.pyi +4 -4
  127. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  128. metaflow-stubs/runner/nbrun.pyi +2 -2
  129. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  130. metaflow-stubs/runner/utils.pyi +2 -2
  131. metaflow-stubs/system/__init__.pyi +2 -2
  132. metaflow-stubs/system/system_logger.pyi +3 -3
  133. metaflow-stubs/system/system_monitor.pyi +2 -2
  134. metaflow-stubs/tagging_util.pyi +2 -2
  135. metaflow-stubs/tuple_util.pyi +2 -2
  136. metaflow-stubs/user_configs/__init__.pyi +2 -2
  137. metaflow-stubs/user_configs/config_decorators.pyi +6 -6
  138. metaflow-stubs/user_configs/config_options.pyi +2 -2
  139. metaflow-stubs/user_configs/config_parameters.pyi +7 -7
  140. metaflow-stubs/version.pyi +2 -2
  141. {metaflow_stubs-2.13.4.dist-info → metaflow_stubs-2.13.5.dist-info}/METADATA +2 -2
  142. metaflow_stubs-2.13.5.dist-info/RECORD +145 -0
  143. metaflow_stubs-2.13.4.dist-info/RECORD +0 -144
  144. {metaflow_stubs-2.13.4.dist-info → metaflow_stubs-2.13.5.dist-info}/WHEEL +0 -0
  145. {metaflow_stubs-2.13.4.dist-info → metaflow_stubs-2.13.5.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.13.4 #
4
- # Generated on 2025-01-15T17:53:58.657195 #
3
+ # MF version: 2.13.5 #
4
+ # Generated on 2025-01-16T23:35:54.592023 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import datetime
12
11
  import typing
12
+ import datetime
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -35,8 +35,8 @@ from .user_configs.config_parameters import ConfigValue as ConfigValue
35
35
  from .user_configs.config_parameters import config_expr as config_expr
36
36
  from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
- from . import events as events
39
38
  from . import tuple_util as tuple_util
39
+ from . import events as events
40
40
  from . import runner as runner
41
41
  from . import plugins as plugins
42
42
  from .plugins.datatools.s3.s3 import S3 as S3
@@ -142,6 +142,37 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
142
142
  """
143
143
  ...
144
144
 
145
+ @typing.overload
146
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
147
+ """
148
+ Specifies the Conda environment for the step.
149
+
150
+ Information in this decorator will augment any
151
+ attributes set in the `@conda_base` flow-level decorator. Hence,
152
+ you can use `@conda_base` to set packages required by all
153
+ steps and use `@conda` to specify step-specific overrides.
154
+ """
155
+ ...
156
+
157
+ @typing.overload
158
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
159
+ ...
160
+
161
+ @typing.overload
162
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
163
+ ...
164
+
165
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
166
+ """
167
+ Specifies the Conda environment for the step.
168
+
169
+ Information in this decorator will augment any
170
+ attributes set in the `@conda_base` flow-level decorator. Hence,
171
+ you can use `@conda_base` to set packages required by all
172
+ steps and use `@conda` to specify step-specific overrides.
173
+ """
174
+ ...
175
+
145
176
  @typing.overload
146
177
  def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
147
178
  """
@@ -187,109 +218,44 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
187
218
  ...
188
219
 
189
220
  @typing.overload
190
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
191
- """
192
- Specifies secrets to be retrieved and injected as environment variables prior to
193
- the execution of a step.
194
- """
195
- ...
196
-
197
- @typing.overload
198
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
199
- ...
200
-
201
- @typing.overload
202
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
203
- ...
204
-
205
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
206
- """
207
- Specifies secrets to be retrieved and injected as environment variables prior to
208
- the execution of a step.
209
- """
210
- ...
211
-
212
- @typing.overload
213
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
221
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
214
222
  """
215
- Specifies the number of times the task corresponding
216
- to a step needs to be retried.
217
-
218
- This decorator is useful for handling transient errors, such as networking issues.
219
- If your task contains operations that can't be retried safely, e.g. database updates,
220
- it is advisable to annotate it with `@retry(times=0)`.
221
-
222
- This can be used in conjunction with the `@catch` decorator. The `@catch`
223
- decorator will execute a no-op task after all retries have been exhausted,
224
- ensuring that the flow execution can continue.
223
+ Specifies environment variables to be set prior to the execution of a step.
225
224
  """
226
225
  ...
227
226
 
228
227
  @typing.overload
229
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
228
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
230
229
  ...
231
230
 
232
231
  @typing.overload
233
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
234
- ...
235
-
236
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
237
- """
238
- Specifies the number of times the task corresponding
239
- to a step needs to be retried.
240
-
241
- This decorator is useful for handling transient errors, such as networking issues.
242
- If your task contains operations that can't be retried safely, e.g. database updates,
243
- it is advisable to annotate it with `@retry(times=0)`.
244
-
245
- This can be used in conjunction with the `@catch` decorator. The `@catch`
246
- decorator will execute a no-op task after all retries have been exhausted,
247
- ensuring that the flow execution can continue.
248
- """
232
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
249
233
  ...
250
234
 
251
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
235
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
252
236
  """
253
- Specifies that this step should execute on Kubernetes.
237
+ Specifies environment variables to be set prior to the execution of a step.
254
238
  """
255
239
  ...
256
240
 
257
241
  @typing.overload
258
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
242
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
259
243
  """
260
- Specifies a timeout for your step.
261
-
262
- This decorator is useful if this step may hang indefinitely.
263
-
264
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
265
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
266
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
267
-
268
- Note that all the values specified in parameters are added together so if you specify
269
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
244
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
270
245
  """
271
246
  ...
272
247
 
273
248
  @typing.overload
274
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
249
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
275
250
  ...
276
251
 
277
252
  @typing.overload
278
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
253
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
279
254
  ...
280
255
 
281
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
256
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
282
257
  """
283
- Specifies a timeout for your step.
284
-
285
- This decorator is useful if this step may hang indefinitely.
286
-
287
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
288
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
289
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
290
-
291
- Note that all the values specified in parameters are added together so if you specify
292
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
258
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
293
259
  """
294
260
  ...
295
261
 
@@ -325,23 +291,51 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
325
291
  ...
326
292
 
327
293
  @typing.overload
328
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
294
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
329
295
  """
330
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
296
+ Specifies the resources needed when executing this step.
297
+
298
+ Use `@resources` to specify the resource requirements
299
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
300
+
301
+ You can choose the compute layer on the command line by executing e.g.
302
+ ```
303
+ python myflow.py run --with batch
304
+ ```
305
+ or
306
+ ```
307
+ python myflow.py run --with kubernetes
308
+ ```
309
+ which executes the flow on the desired system using the
310
+ requirements specified in `@resources`.
331
311
  """
332
312
  ...
333
313
 
334
314
  @typing.overload
335
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
315
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
336
316
  ...
337
317
 
338
318
  @typing.overload
339
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
319
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
340
320
  ...
341
321
 
342
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
322
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
343
323
  """
344
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
324
+ Specifies the resources needed when executing this step.
325
+
326
+ Use `@resources` to specify the resource requirements
327
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
328
+
329
+ You can choose the compute layer on the command line by executing e.g.
330
+ ```
331
+ python myflow.py run --with batch
332
+ ```
333
+ or
334
+ ```
335
+ python myflow.py run --with kubernetes
336
+ ```
337
+ which executes the flow on the desired system using the
338
+ requirements specified in `@resources`.
345
339
  """
346
340
  ...
347
341
 
@@ -376,186 +370,191 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
376
370
  """
377
371
  ...
378
372
 
373
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
374
+ """
375
+ Specifies that this step should execute on Kubernetes.
376
+ """
377
+ ...
378
+
379
379
  @typing.overload
380
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
380
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
381
381
  """
382
- Specifies environment variables to be set prior to the execution of a step.
382
+ Specifies secrets to be retrieved and injected as environment variables prior to
383
+ the execution of a step.
383
384
  """
384
385
  ...
385
386
 
386
387
  @typing.overload
387
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
388
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
388
389
  ...
389
390
 
390
391
  @typing.overload
391
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
392
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
392
393
  ...
393
394
 
394
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
395
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
395
396
  """
396
- Specifies environment variables to be set prior to the execution of a step.
397
+ Specifies secrets to be retrieved and injected as environment variables prior to
398
+ the execution of a step.
397
399
  """
398
400
  ...
399
401
 
400
402
  @typing.overload
401
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
403
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
402
404
  """
403
- Specifies the resources needed when executing this step.
405
+ Specifies a timeout for your step.
404
406
 
405
- Use `@resources` to specify the resource requirements
406
- independently of the specific compute layer (`@batch`, `@kubernetes`).
407
+ This decorator is useful if this step may hang indefinitely.
407
408
 
408
- You can choose the compute layer on the command line by executing e.g.
409
- ```
410
- python myflow.py run --with batch
411
- ```
412
- or
413
- ```
414
- python myflow.py run --with kubernetes
415
- ```
416
- which executes the flow on the desired system using the
417
- requirements specified in `@resources`.
409
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
410
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
411
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
412
+
413
+ Note that all the values specified in parameters are added together so if you specify
414
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
418
415
  """
419
416
  ...
420
417
 
421
418
  @typing.overload
422
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
419
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
423
420
  ...
424
421
 
425
422
  @typing.overload
426
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
423
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
427
424
  ...
428
425
 
429
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
426
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
430
427
  """
431
- Specifies the resources needed when executing this step.
428
+ Specifies a timeout for your step.
432
429
 
433
- Use `@resources` to specify the resource requirements
434
- independently of the specific compute layer (`@batch`, `@kubernetes`).
430
+ This decorator is useful if this step may hang indefinitely.
435
431
 
436
- You can choose the compute layer on the command line by executing e.g.
437
- ```
438
- python myflow.py run --with batch
439
- ```
440
- or
441
- ```
442
- python myflow.py run --with kubernetes
443
- ```
444
- which executes the flow on the desired system using the
445
- requirements specified in `@resources`.
432
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
433
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
434
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
435
+
436
+ Note that all the values specified in parameters are added together so if you specify
437
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
446
438
  """
447
439
  ...
448
440
 
449
441
  @typing.overload
450
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
442
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
451
443
  """
452
- Specifies the Conda environment for the step.
444
+ Specifies the number of times the task corresponding
445
+ to a step needs to be retried.
453
446
 
454
- Information in this decorator will augment any
455
- attributes set in the `@conda_base` flow-level decorator. Hence,
456
- you can use `@conda_base` to set packages required by all
457
- steps and use `@conda` to specify step-specific overrides.
447
+ This decorator is useful for handling transient errors, such as networking issues.
448
+ If your task contains operations that can't be retried safely, e.g. database updates,
449
+ it is advisable to annotate it with `@retry(times=0)`.
450
+
451
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
452
+ decorator will execute a no-op task after all retries have been exhausted,
453
+ ensuring that the flow execution can continue.
458
454
  """
459
455
  ...
460
456
 
461
457
  @typing.overload
462
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
458
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
463
459
  ...
464
460
 
465
461
  @typing.overload
466
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
462
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
467
463
  ...
468
464
 
469
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
465
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
470
466
  """
471
- Specifies the Conda environment for the step.
467
+ Specifies the number of times the task corresponding
468
+ to a step needs to be retried.
472
469
 
473
- Information in this decorator will augment any
474
- attributes set in the `@conda_base` flow-level decorator. Hence,
475
- you can use `@conda_base` to set packages required by all
476
- steps and use `@conda` to specify step-specific overrides.
470
+ This decorator is useful for handling transient errors, such as networking issues.
471
+ If your task contains operations that can't be retried safely, e.g. database updates,
472
+ it is advisable to annotate it with `@retry(times=0)`.
473
+
474
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
475
+ decorator will execute a no-op task after all retries have been exhausted,
476
+ ensuring that the flow execution can continue.
477
477
  """
478
478
  ...
479
479
 
480
480
  @typing.overload
481
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
481
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
482
482
  """
483
- Specifies the event(s) that this flow depends on.
483
+ Specifies the flow(s) that this flow depends on.
484
484
 
485
485
  ```
486
- @trigger(event='foo')
486
+ @trigger_on_finish(flow='FooFlow')
487
487
  ```
488
488
  or
489
489
  ```
490
- @trigger(events=['foo', 'bar'])
490
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
491
491
  ```
492
+ This decorator respects the @project decorator and triggers the flow
493
+ when upstream runs within the same namespace complete successfully
492
494
 
493
- Additionally, you can specify the parameter mappings
494
- to map event payload to Metaflow parameters for the flow.
495
+ Additionally, you can specify project aware upstream flow dependencies
496
+ by specifying the fully qualified project_flow_name.
495
497
  ```
496
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
498
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
497
499
  ```
498
500
  or
499
501
  ```
500
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
501
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
502
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
502
503
  ```
503
504
 
504
- 'parameters' can also be a list of strings and tuples like so:
505
- ```
506
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
507
- ```
508
- This is equivalent to:
505
+ You can also specify just the project or project branch (other values will be
506
+ inferred from the current project or project branch):
509
507
  ```
510
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
508
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
511
509
  ```
510
+
511
+ Note that `branch` is typically one of:
512
+ - `prod`
513
+ - `user.bob`
514
+ - `test.my_experiment`
515
+ - `prod.staging`
512
516
  """
513
517
  ...
514
518
 
515
519
  @typing.overload
516
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
520
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
517
521
  ...
518
522
 
519
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
523
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
520
524
  """
521
- Specifies the event(s) that this flow depends on.
525
+ Specifies the flow(s) that this flow depends on.
522
526
 
523
527
  ```
524
- @trigger(event='foo')
528
+ @trigger_on_finish(flow='FooFlow')
525
529
  ```
526
530
  or
527
531
  ```
528
- @trigger(events=['foo', 'bar'])
532
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
529
533
  ```
534
+ This decorator respects the @project decorator and triggers the flow
535
+ when upstream runs within the same namespace complete successfully
530
536
 
531
- Additionally, you can specify the parameter mappings
532
- to map event payload to Metaflow parameters for the flow.
537
+ Additionally, you can specify project aware upstream flow dependencies
538
+ by specifying the fully qualified project_flow_name.
533
539
  ```
534
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
540
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
535
541
  ```
536
542
  or
537
543
  ```
538
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
539
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
544
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
540
545
  ```
541
546
 
542
- 'parameters' can also be a list of strings and tuples like so:
543
- ```
544
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
545
- ```
546
- This is equivalent to:
547
+ You can also specify just the project or project branch (other values will be
548
+ inferred from the current project or project branch):
547
549
  ```
548
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
550
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
549
551
  ```
550
- """
551
- ...
552
-
553
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
554
- """
555
- Specifies what flows belong to the same project.
556
552
 
557
- A project-specific namespace is created for all flows that
558
- use the same `@project(name)`.
553
+ Note that `branch` is typically one of:
554
+ - `prod`
555
+ - `user.bob`
556
+ - `test.my_experiment`
557
+ - `prod.staging`
559
558
  """
560
559
  ...
561
560
 
@@ -566,94 +565,95 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
566
565
  """
567
566
  ...
568
567
 
569
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
570
- """
571
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
572
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
573
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
574
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
575
- starts only after all sensors finish.
576
- """
577
- ...
578
-
579
568
  @typing.overload
580
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
569
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
581
570
  """
582
- Specifies the flow(s) that this flow depends on.
571
+ Specifies the event(s) that this flow depends on.
583
572
 
584
573
  ```
585
- @trigger_on_finish(flow='FooFlow')
574
+ @trigger(event='foo')
586
575
  ```
587
576
  or
588
577
  ```
589
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
578
+ @trigger(events=['foo', 'bar'])
590
579
  ```
591
- This decorator respects the @project decorator and triggers the flow
592
- when upstream runs within the same namespace complete successfully
593
580
 
594
- Additionally, you can specify project aware upstream flow dependencies
595
- by specifying the fully qualified project_flow_name.
581
+ Additionally, you can specify the parameter mappings
582
+ to map event payload to Metaflow parameters for the flow.
596
583
  ```
597
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
584
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
598
585
  ```
599
586
  or
600
587
  ```
601
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
588
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
589
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
602
590
  ```
603
591
 
604
- You can also specify just the project or project branch (other values will be
605
- inferred from the current project or project branch):
592
+ 'parameters' can also be a list of strings and tuples like so:
606
593
  ```
607
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
594
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
595
+ ```
596
+ This is equivalent to:
597
+ ```
598
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
608
599
  ```
609
-
610
- Note that `branch` is typically one of:
611
- - `prod`
612
- - `user.bob`
613
- - `test.my_experiment`
614
- - `prod.staging`
615
600
  """
616
601
  ...
617
602
 
618
603
  @typing.overload
619
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
604
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
620
605
  ...
621
606
 
622
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
607
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
623
608
  """
624
- Specifies the flow(s) that this flow depends on.
609
+ Specifies the event(s) that this flow depends on.
625
610
 
626
611
  ```
627
- @trigger_on_finish(flow='FooFlow')
612
+ @trigger(event='foo')
628
613
  ```
629
614
  or
630
615
  ```
631
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
616
+ @trigger(events=['foo', 'bar'])
632
617
  ```
633
- This decorator respects the @project decorator and triggers the flow
634
- when upstream runs within the same namespace complete successfully
635
618
 
636
- Additionally, you can specify project aware upstream flow dependencies
637
- by specifying the fully qualified project_flow_name.
619
+ Additionally, you can specify the parameter mappings
620
+ to map event payload to Metaflow parameters for the flow.
638
621
  ```
639
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
622
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
640
623
  ```
641
624
  or
642
625
  ```
643
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
626
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
627
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
644
628
  ```
645
629
 
646
- You can also specify just the project or project branch (other values will be
647
- inferred from the current project or project branch):
630
+ 'parameters' can also be a list of strings and tuples like so:
648
631
  ```
649
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
632
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
650
633
  ```
651
-
652
- Note that `branch` is typically one of:
653
- - `prod`
654
- - `user.bob`
655
- - `test.my_experiment`
656
- - `prod.staging`
634
+ This is equivalent to:
635
+ ```
636
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
637
+ ```
638
+ """
639
+ ...
640
+
641
+ @typing.overload
642
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
643
+ """
644
+ Specifies the times when the flow should be run when running on a
645
+ production scheduler.
646
+ """
647
+ ...
648
+
649
+ @typing.overload
650
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
651
+ ...
652
+
653
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
654
+ """
655
+ Specifies the times when the flow should be run when running on a
656
+ production scheduler.
657
657
  """
658
658
  ...
659
659
 
@@ -680,6 +680,16 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
680
680
  """
681
681
  ...
682
682
 
683
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
684
+ """
685
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
686
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
687
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
688
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
689
+ starts only after all sensors finish.
690
+ """
691
+ ...
692
+
683
693
  @typing.overload
684
694
  def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
685
695
  """
@@ -703,22 +713,12 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
703
713
  """
704
714
  ...
705
715
 
706
- @typing.overload
707
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
708
- """
709
- Specifies the times when the flow should be run when running on a
710
- production scheduler.
711
- """
712
- ...
713
-
714
- @typing.overload
715
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
716
- ...
717
-
718
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
716
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
719
717
  """
720
- Specifies the times when the flow should be run when running on a
721
- production scheduler.
718
+ Specifies what flows belong to the same project.
719
+
720
+ A project-specific namespace is created for all flows that
721
+ use the same `@project(name)`.
722
722
  """
723
723
  ...
724
724