metaflow-stubs 2.19.1__py2.py3-none-any.whl → 2.19.3__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of metaflow-stubs might be problematic. Click here for more details.

Files changed (168) hide show
  1. metaflow-stubs/__init__.pyi +601 -601
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +4 -4
  8. metaflow-stubs/client/filecache.pyi +4 -4
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +5 -5
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +2 -2
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +3 -3
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +16 -16
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/multicore_utils.pyi +2 -2
  23. metaflow-stubs/packaging_sys/__init__.pyi +4 -4
  24. metaflow-stubs/packaging_sys/backend.pyi +3 -3
  25. metaflow-stubs/packaging_sys/distribution_support.pyi +4 -4
  26. metaflow-stubs/packaging_sys/tar_backend.pyi +4 -4
  27. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  28. metaflow-stubs/packaging_sys/v1.pyi +3 -3
  29. metaflow-stubs/parameters.pyi +3 -3
  30. metaflow-stubs/plugins/__init__.pyi +12 -12
  31. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  33. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  34. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  35. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  37. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  38. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  39. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  41. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  42. metaflow-stubs/plugins/argo/argo_workflows.pyi +4 -4
  43. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +4 -4
  44. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
  45. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +4 -4
  46. metaflow-stubs/plugins/argo/exit_hooks.pyi +3 -3
  47. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  50. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  51. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  52. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  53. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  54. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  55. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  56. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  59. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  60. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  61. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  62. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  63. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  64. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  65. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  67. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  68. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  69. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  70. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  71. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  72. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  79. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +5 -5
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/json_viewer.pyi +3 -3
  83. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  84. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  85. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  86. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  87. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  88. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  89. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  91. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  92. metaflow-stubs/plugins/datatools/s3/s3.pyi +2 -2
  93. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  94. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  95. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  96. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  97. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  98. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  99. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  100. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  101. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  102. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  104. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  105. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  106. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  107. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  108. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  110. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +3 -3
  111. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  112. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  114. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  115. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  116. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  117. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/parsers.pyi +2 -2
  119. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
  121. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  122. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
  123. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  124. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  125. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  126. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  127. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  128. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  129. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  130. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  131. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  132. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  133. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  134. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  135. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  136. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  137. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  138. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  139. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  140. metaflow-stubs/pylint_wrapper.pyi +2 -2
  141. metaflow-stubs/runner/__init__.pyi +2 -2
  142. metaflow-stubs/runner/deployer.pyi +33 -33
  143. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  144. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  145. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  146. metaflow-stubs/runner/nbrun.pyi +2 -2
  147. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  148. metaflow-stubs/runner/utils.pyi +3 -3
  149. metaflow-stubs/system/__init__.pyi +2 -2
  150. metaflow-stubs/system/system_logger.pyi +2 -2
  151. metaflow-stubs/system/system_monitor.pyi +2 -2
  152. metaflow-stubs/tagging_util.pyi +2 -2
  153. metaflow-stubs/tuple_util.pyi +2 -2
  154. metaflow-stubs/user_configs/__init__.pyi +2 -2
  155. metaflow-stubs/user_configs/config_options.pyi +3 -3
  156. metaflow-stubs/user_configs/config_parameters.pyi +4 -4
  157. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  158. metaflow-stubs/user_decorators/common.pyi +2 -2
  159. metaflow-stubs/user_decorators/mutable_flow.pyi +5 -5
  160. metaflow-stubs/user_decorators/mutable_step.pyi +2 -2
  161. metaflow-stubs/user_decorators/user_flow_decorator.pyi +3 -3
  162. metaflow-stubs/user_decorators/user_step_decorator.pyi +2 -2
  163. metaflow-stubs/version.pyi +2 -2
  164. {metaflow_stubs-2.19.1.dist-info → metaflow_stubs-2.19.3.dist-info}/METADATA +2 -2
  165. metaflow_stubs-2.19.3.dist-info/RECORD +168 -0
  166. metaflow_stubs-2.19.1.dist-info/RECORD +0 -168
  167. {metaflow_stubs-2.19.1.dist-info → metaflow_stubs-2.19.3.dist-info}/WHEEL +0 -0
  168. {metaflow_stubs-2.19.1.dist-info → metaflow_stubs-2.19.3.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.19.1 #
4
- # Generated on 2025-10-28T01:39:06.721172 #
3
+ # MF version: 2.19.3 #
4
+ # Generated on 2025-10-28T12:26:25.237849 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -47,10 +47,10 @@ from . import plugins as plugins
47
47
  from .plugins.datatools.s3.s3 import S3 as S3
48
48
  from . import includefile as includefile
49
49
  from .includefile import IncludeFile as IncludeFile
50
- from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
50
+ from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
51
51
  from .plugins.parsers import yaml_parser as yaml_parser
52
52
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
53
- from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
53
+ from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
54
54
  from . import cards as cards
55
55
  from . import client as client
56
56
  from .client.core import namespace as namespace
@@ -155,187 +155,61 @@ def step(f: typing.Callable[[~FlowSpecDerived], NoneType] | typing.Callable[[~Fl
155
155
  ...
156
156
 
157
157
  @typing.overload
158
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
159
- """
160
- Specifies the number of times the task corresponding
161
- to a step needs to be retried.
162
-
163
- This decorator is useful for handling transient errors, such as networking issues.
164
- If your task contains operations that can't be retried safely, e.g. database updates,
165
- it is advisable to annotate it with `@retry(times=0)`.
166
-
167
- This can be used in conjunction with the `@catch` decorator. The `@catch`
168
- decorator will execute a no-op task after all retries have been exhausted,
169
- ensuring that the flow execution can continue.
170
-
171
-
172
- Parameters
173
- ----------
174
- times : int, default 3
175
- Number of times to retry this task.
176
- minutes_between_retries : int, default 2
177
- Number of minutes between retries.
178
- """
179
- ...
180
-
181
- @typing.overload
182
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
183
- ...
184
-
185
- @typing.overload
186
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
187
- ...
188
-
189
- def retry(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, times: int = 3, minutes_between_retries: int = 2):
190
- """
191
- Specifies the number of times the task corresponding
192
- to a step needs to be retried.
193
-
194
- This decorator is useful for handling transient errors, such as networking issues.
195
- If your task contains operations that can't be retried safely, e.g. database updates,
196
- it is advisable to annotate it with `@retry(times=0)`.
197
-
198
- This can be used in conjunction with the `@catch` decorator. The `@catch`
199
- decorator will execute a no-op task after all retries have been exhausted,
200
- ensuring that the flow execution can continue.
201
-
202
-
203
- Parameters
204
- ----------
205
- times : int, default 3
206
- Number of times to retry this task.
207
- minutes_between_retries : int, default 2
208
- Number of minutes between retries.
209
- """
210
- ...
211
-
212
- @typing.overload
213
- def resources(*, cpu: int = 1, gpu: int | None = None, disk: int | None = None, memory: int = 4096, shared_memory: int | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
214
- """
215
- Specifies the resources needed when executing this step.
216
-
217
- Use `@resources` to specify the resource requirements
218
- independently of the specific compute layer (`@batch`, `@kubernetes`).
219
-
220
- You can choose the compute layer on the command line by executing e.g.
221
- ```
222
- python myflow.py run --with batch
223
- ```
224
- or
225
- ```
226
- python myflow.py run --with kubernetes
227
- ```
228
- which executes the flow on the desired system using the
229
- requirements specified in `@resources`.
230
-
231
-
232
- Parameters
233
- ----------
234
- cpu : int, default 1
235
- Number of CPUs required for this step.
236
- gpu : int, optional, default None
237
- Number of GPUs required for this step.
238
- disk : int, optional, default None
239
- Disk size (in MB) required for this step. Only applies on Kubernetes.
240
- memory : int, default 4096
241
- Memory size (in MB) required for this step.
242
- shared_memory : int, optional, default None
243
- The value for the size (in MiB) of the /dev/shm volume for this step.
244
- This parameter maps to the `--shm-size` option in Docker.
245
- """
246
- ...
247
-
248
- @typing.overload
249
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
250
- ...
251
-
252
- @typing.overload
253
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
254
- ...
255
-
256
- def resources(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, cpu: int = 1, gpu: int | None = None, disk: int | None = None, memory: int = 4096, shared_memory: int | None = None):
257
- """
258
- Specifies the resources needed when executing this step.
259
-
260
- Use `@resources` to specify the resource requirements
261
- independently of the specific compute layer (`@batch`, `@kubernetes`).
262
-
263
- You can choose the compute layer on the command line by executing e.g.
264
- ```
265
- python myflow.py run --with batch
266
- ```
267
- or
268
- ```
269
- python myflow.py run --with kubernetes
270
- ```
271
- which executes the flow on the desired system using the
272
- requirements specified in `@resources`.
273
-
274
-
275
- Parameters
276
- ----------
277
- cpu : int, default 1
278
- Number of CPUs required for this step.
279
- gpu : int, optional, default None
280
- Number of GPUs required for this step.
281
- disk : int, optional, default None
282
- Disk size (in MB) required for this step. Only applies on Kubernetes.
283
- memory : int, default 4096
284
- Memory size (in MB) required for this step.
285
- shared_memory : int, optional, default None
286
- The value for the size (in MiB) of the /dev/shm volume for this step.
287
- This parameter maps to the `--shm-size` option in Docker.
288
- """
289
- ...
290
-
291
- @typing.overload
292
- def pypi(*, packages: typing.Dict[str, str] = {}, python: str | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
158
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
293
159
  """
294
- Specifies the PyPI packages for the step.
160
+ Specifies the Conda environment for the step.
295
161
 
296
162
  Information in this decorator will augment any
297
- attributes set in the `@pyi_base` flow-level decorator. Hence,
298
- you can use `@pypi_base` to set packages required by all
299
- steps and use `@pypi` to specify step-specific overrides.
163
+ attributes set in the `@conda_base` flow-level decorator. Hence,
164
+ you can use `@conda_base` to set packages required by all
165
+ steps and use `@conda` to specify step-specific overrides.
300
166
 
301
167
 
302
168
  Parameters
303
169
  ----------
304
- packages : Dict[str, str], default: {}
170
+ packages : Dict[str, str], default {}
305
171
  Packages to use for this step. The key is the name of the package
306
172
  and the value is the version to use.
307
- python : str, optional, default: None
173
+ libraries : Dict[str, str], default {}
174
+ Supported for backward compatibility. When used with packages, packages will take precedence.
175
+ python : str, optional, default None
308
176
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
309
177
  that the version used will correspond to the version of the Python interpreter used to start the run.
178
+ disabled : bool, default False
179
+ If set to True, disables @conda.
310
180
  """
311
181
  ...
312
182
 
313
183
  @typing.overload
314
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
184
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
315
185
  ...
316
186
 
317
187
  @typing.overload
318
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
188
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
319
189
  ...
320
190
 
321
- def pypi(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, packages: typing.Dict[str, str] = {}, python: str | None = None):
191
+ def conda(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False):
322
192
  """
323
- Specifies the PyPI packages for the step.
193
+ Specifies the Conda environment for the step.
324
194
 
325
195
  Information in this decorator will augment any
326
- attributes set in the `@pyi_base` flow-level decorator. Hence,
327
- you can use `@pypi_base` to set packages required by all
328
- steps and use `@pypi` to specify step-specific overrides.
196
+ attributes set in the `@conda_base` flow-level decorator. Hence,
197
+ you can use `@conda_base` to set packages required by all
198
+ steps and use `@conda` to specify step-specific overrides.
329
199
 
330
200
 
331
201
  Parameters
332
202
  ----------
333
- packages : Dict[str, str], default: {}
203
+ packages : Dict[str, str], default {}
334
204
  Packages to use for this step. The key is the name of the package
335
205
  and the value is the version to use.
336
- python : str, optional, default: None
206
+ libraries : Dict[str, str], default {}
207
+ Supported for backward compatibility. When used with packages, packages will take precedence.
208
+ python : str, optional, default None
337
209
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
338
210
  that the version used will correspond to the version of the Python interpreter used to start the run.
211
+ disabled : bool, default False
212
+ If set to True, disables @conda.
339
213
  """
340
214
  ...
341
215
 
@@ -494,66 +368,175 @@ def batch(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_genera
494
368
  """
495
369
  ...
496
370
 
497
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: str | None = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.List[str] | None = None, node_selector: typing.Dict[str, str] | str | None = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: int | None = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: int | None = None, tmpfs_path: str | None = '/metaflow_temp', persistent_volume_claims: typing.Dict[str, str] | None = None, shared_memory: int | None = None, port: int | None = None, compute_pool: str | None = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Dict[str, typing.Any] | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
371
+ @typing.overload
372
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
498
373
  """
499
- Specifies that this step should execute on Kubernetes.
374
+ Decorator prototype for all step decorators. This function gets specialized
375
+ and imported for all decorators types by _import_plugin_decorators().
376
+ """
377
+ ...
378
+
379
+ @typing.overload
380
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
381
+ ...
382
+
383
+ def parallel(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None):
384
+ """
385
+ Decorator prototype for all step decorators. This function gets specialized
386
+ and imported for all decorators types by _import_plugin_decorators().
387
+ """
388
+ ...
389
+
390
+ @typing.overload
391
+ def secrets(*, sources: typing.List[str | typing.Dict[str, typing.Any]] = [], role: str | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
392
+ """
393
+ Specifies secrets to be retrieved and injected as environment variables prior to
394
+ the execution of a step.
500
395
 
501
396
 
502
397
  Parameters
503
398
  ----------
504
- cpu : int, default 1
505
- Number of CPUs required for this step. If `@resources` is
506
- also present, the maximum value from all decorators is used.
507
- memory : int, default 4096
508
- Memory size (in MB) required for this step. If
509
- `@resources` is also present, the maximum value from all decorators is
510
- used.
511
- disk : int, default 10240
512
- Disk size (in MB) required for this step. If
513
- `@resources` is also present, the maximum value from all decorators is
514
- used.
515
- image : str, optional, default None
516
- Docker image to use when launching on Kubernetes. If not specified, and
517
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
518
- not, a default Docker image mapping to the current version of Python is used.
519
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
520
- If given, the imagePullPolicy to be applied to the Docker image of the step.
521
- image_pull_secrets: List[str], default []
522
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
523
- Kubernetes image pull secrets to use when pulling container images
524
- in Kubernetes.
525
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
526
- Kubernetes service account to use when launching pod in Kubernetes.
527
- secrets : List[str], optional, default None
528
- Kubernetes secrets to use when launching pod in Kubernetes. These
529
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
530
- in Metaflow configuration.
531
- node_selector: Union[Dict[str,str], str], optional, default None
532
- Kubernetes node selector(s) to apply to the pod running the task.
533
- Can be passed in as a comma separated string of values e.g.
534
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
535
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
536
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
537
- Kubernetes namespace to use when launching pod in Kubernetes.
538
- gpu : int, optional, default None
539
- Number of GPUs required for this step. A value of zero implies that
540
- the scheduled node should not have GPUs.
541
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
542
- The vendor of the GPUs to be used for this step.
543
- tolerations : List[Dict[str,str]], default []
544
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
545
- Kubernetes tolerations to use when launching pod in Kubernetes.
546
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
547
- Kubernetes labels to use when launching pod in Kubernetes.
548
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
549
- Kubernetes annotations to use when launching pod in Kubernetes.
550
- use_tmpfs : bool, default False
551
- This enables an explicit tmpfs mount for this step.
552
- tmpfs_tempdir : bool, default True
553
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
554
- tmpfs_size : int, optional, default: None
555
- The value for the size (in MiB) of the tmpfs mount for this step.
556
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
399
+ sources : List[Union[str, Dict[str, Any]]], default: []
400
+ List of secret specs, defining how the secrets are to be retrieved
401
+ role : str, optional, default: None
402
+ Role to use for fetching secrets
403
+ """
404
+ ...
405
+
406
+ @typing.overload
407
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
408
+ ...
409
+
410
+ @typing.overload
411
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
412
+ ...
413
+
414
+ def secrets(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, sources: typing.List[str | typing.Dict[str, typing.Any]] = [], role: str | None = None):
415
+ """
416
+ Specifies secrets to be retrieved and injected as environment variables prior to
417
+ the execution of a step.
418
+
419
+
420
+ Parameters
421
+ ----------
422
+ sources : List[Union[str, Dict[str, Any]]], default: []
423
+ List of secret specs, defining how the secrets are to be retrieved
424
+ role : str, optional, default: None
425
+ Role to use for fetching secrets
426
+ """
427
+ ...
428
+
429
+ @typing.overload
430
+ def catch(*, var: str | None = None, print_exception: bool = True) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
431
+ """
432
+ Specifies that the step will success under all circumstances.
433
+
434
+ The decorator will create an optional artifact, specified by `var`, which
435
+ contains the exception raised. You can use it to detect the presence
436
+ of errors, indicating that all happy-path artifacts produced by the step
437
+ are missing.
438
+
439
+
440
+ Parameters
441
+ ----------
442
+ var : str, optional, default None
443
+ Name of the artifact in which to store the caught exception.
444
+ If not specified, the exception is not stored.
445
+ print_exception : bool, default True
446
+ Determines whether or not the exception is printed to
447
+ stdout when caught.
448
+ """
449
+ ...
450
+
451
+ @typing.overload
452
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
453
+ ...
454
+
455
+ @typing.overload
456
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
457
+ ...
458
+
459
+ def catch(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, var: str | None = None, print_exception: bool = True):
460
+ """
461
+ Specifies that the step will success under all circumstances.
462
+
463
+ The decorator will create an optional artifact, specified by `var`, which
464
+ contains the exception raised. You can use it to detect the presence
465
+ of errors, indicating that all happy-path artifacts produced by the step
466
+ are missing.
467
+
468
+
469
+ Parameters
470
+ ----------
471
+ var : str, optional, default None
472
+ Name of the artifact in which to store the caught exception.
473
+ If not specified, the exception is not stored.
474
+ print_exception : bool, default True
475
+ Determines whether or not the exception is printed to
476
+ stdout when caught.
477
+ """
478
+ ...
479
+
480
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: str | None = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.List[str] | None = None, node_selector: typing.Dict[str, str] | str | None = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: int | None = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: int | None = None, tmpfs_path: str | None = '/metaflow_temp', persistent_volume_claims: typing.Dict[str, str] | None = None, shared_memory: int | None = None, port: int | None = None, compute_pool: str | None = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Dict[str, typing.Any] | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
481
+ """
482
+ Specifies that this step should execute on Kubernetes.
483
+
484
+
485
+ Parameters
486
+ ----------
487
+ cpu : int, default 1
488
+ Number of CPUs required for this step. If `@resources` is
489
+ also present, the maximum value from all decorators is used.
490
+ memory : int, default 4096
491
+ Memory size (in MB) required for this step. If
492
+ `@resources` is also present, the maximum value from all decorators is
493
+ used.
494
+ disk : int, default 10240
495
+ Disk size (in MB) required for this step. If
496
+ `@resources` is also present, the maximum value from all decorators is
497
+ used.
498
+ image : str, optional, default None
499
+ Docker image to use when launching on Kubernetes. If not specified, and
500
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
501
+ not, a default Docker image mapping to the current version of Python is used.
502
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
503
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
504
+ image_pull_secrets: List[str], default []
505
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
506
+ Kubernetes image pull secrets to use when pulling container images
507
+ in Kubernetes.
508
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
509
+ Kubernetes service account to use when launching pod in Kubernetes.
510
+ secrets : List[str], optional, default None
511
+ Kubernetes secrets to use when launching pod in Kubernetes. These
512
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
513
+ in Metaflow configuration.
514
+ node_selector: Union[Dict[str,str], str], optional, default None
515
+ Kubernetes node selector(s) to apply to the pod running the task.
516
+ Can be passed in as a comma separated string of values e.g.
517
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
518
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
519
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
520
+ Kubernetes namespace to use when launching pod in Kubernetes.
521
+ gpu : int, optional, default None
522
+ Number of GPUs required for this step. A value of zero implies that
523
+ the scheduled node should not have GPUs.
524
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
525
+ The vendor of the GPUs to be used for this step.
526
+ tolerations : List[Dict[str,str]], default []
527
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
528
+ Kubernetes tolerations to use when launching pod in Kubernetes.
529
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
530
+ Kubernetes labels to use when launching pod in Kubernetes.
531
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
532
+ Kubernetes annotations to use when launching pod in Kubernetes.
533
+ use_tmpfs : bool, default False
534
+ This enables an explicit tmpfs mount for this step.
535
+ tmpfs_tempdir : bool, default True
536
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
537
+ tmpfs_size : int, optional, default: None
538
+ The value for the size (in MiB) of the tmpfs mount for this step.
539
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
557
540
  memory allocated for this step.
558
541
  tmpfs_path : str, optional, default /metaflow_temp
559
542
  Path to tmpfs mount for this step.
@@ -584,60 +567,169 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: st
584
567
  ...
585
568
 
586
569
  @typing.overload
587
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
570
+ def resources(*, cpu: int = 1, gpu: int | None = None, disk: int | None = None, memory: int = 4096, shared_memory: int | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
588
571
  """
589
- Decorator prototype for all step decorators. This function gets specialized
590
- and imported for all decorators types by _import_plugin_decorators().
572
+ Specifies the resources needed when executing this step.
573
+
574
+ Use `@resources` to specify the resource requirements
575
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
576
+
577
+ You can choose the compute layer on the command line by executing e.g.
578
+ ```
579
+ python myflow.py run --with batch
580
+ ```
581
+ or
582
+ ```
583
+ python myflow.py run --with kubernetes
584
+ ```
585
+ which executes the flow on the desired system using the
586
+ requirements specified in `@resources`.
587
+
588
+
589
+ Parameters
590
+ ----------
591
+ cpu : int, default 1
592
+ Number of CPUs required for this step.
593
+ gpu : int, optional, default None
594
+ Number of GPUs required for this step.
595
+ disk : int, optional, default None
596
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
597
+ memory : int, default 4096
598
+ Memory size (in MB) required for this step.
599
+ shared_memory : int, optional, default None
600
+ The value for the size (in MiB) of the /dev/shm volume for this step.
601
+ This parameter maps to the `--shm-size` option in Docker.
591
602
  """
592
603
  ...
593
604
 
594
605
  @typing.overload
595
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
606
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
596
607
  ...
597
608
 
598
- def parallel(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None):
609
+ @typing.overload
610
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
611
+ ...
612
+
613
+ def resources(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, cpu: int = 1, gpu: int | None = None, disk: int | None = None, memory: int = 4096, shared_memory: int | None = None):
599
614
  """
600
- Decorator prototype for all step decorators. This function gets specialized
601
- and imported for all decorators types by _import_plugin_decorators().
615
+ Specifies the resources needed when executing this step.
616
+
617
+ Use `@resources` to specify the resource requirements
618
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
619
+
620
+ You can choose the compute layer on the command line by executing e.g.
621
+ ```
622
+ python myflow.py run --with batch
623
+ ```
624
+ or
625
+ ```
626
+ python myflow.py run --with kubernetes
627
+ ```
628
+ which executes the flow on the desired system using the
629
+ requirements specified in `@resources`.
630
+
631
+
632
+ Parameters
633
+ ----------
634
+ cpu : int, default 1
635
+ Number of CPUs required for this step.
636
+ gpu : int, optional, default None
637
+ Number of GPUs required for this step.
638
+ disk : int, optional, default None
639
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
640
+ memory : int, default 4096
641
+ Memory size (in MB) required for this step.
642
+ shared_memory : int, optional, default None
643
+ The value for the size (in MiB) of the /dev/shm volume for this step.
644
+ This parameter maps to the `--shm-size` option in Docker.
602
645
  """
603
646
  ...
604
647
 
605
648
  @typing.overload
606
- def secrets(*, sources: typing.List[str | typing.Dict[str, typing.Any]] = [], role: str | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
649
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
607
650
  """
608
- Specifies secrets to be retrieved and injected as environment variables prior to
609
- the execution of a step.
651
+ Specifies the number of times the task corresponding
652
+ to a step needs to be retried.
653
+
654
+ This decorator is useful for handling transient errors, such as networking issues.
655
+ If your task contains operations that can't be retried safely, e.g. database updates,
656
+ it is advisable to annotate it with `@retry(times=0)`.
657
+
658
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
659
+ decorator will execute a no-op task after all retries have been exhausted,
660
+ ensuring that the flow execution can continue.
610
661
 
611
662
 
612
663
  Parameters
613
664
  ----------
614
- sources : List[Union[str, Dict[str, Any]]], default: []
615
- List of secret specs, defining how the secrets are to be retrieved
616
- role : str, optional, default: None
617
- Role to use for fetching secrets
665
+ times : int, default 3
666
+ Number of times to retry this task.
667
+ minutes_between_retries : int, default 2
668
+ Number of minutes between retries.
618
669
  """
619
670
  ...
620
671
 
621
672
  @typing.overload
622
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
673
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
623
674
  ...
624
675
 
625
676
  @typing.overload
626
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
677
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
627
678
  ...
628
679
 
629
- def secrets(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, sources: typing.List[str | typing.Dict[str, typing.Any]] = [], role: str | None = None):
680
+ def retry(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, times: int = 3, minutes_between_retries: int = 2):
630
681
  """
631
- Specifies secrets to be retrieved and injected as environment variables prior to
632
- the execution of a step.
682
+ Specifies the number of times the task corresponding
683
+ to a step needs to be retried.
684
+
685
+ This decorator is useful for handling transient errors, such as networking issues.
686
+ If your task contains operations that can't be retried safely, e.g. database updates,
687
+ it is advisable to annotate it with `@retry(times=0)`.
688
+
689
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
690
+ decorator will execute a no-op task after all retries have been exhausted,
691
+ ensuring that the flow execution can continue.
633
692
 
634
693
 
635
694
  Parameters
636
695
  ----------
637
- sources : List[Union[str, Dict[str, Any]]], default: []
638
- List of secret specs, defining how the secrets are to be retrieved
639
- role : str, optional, default: None
640
- Role to use for fetching secrets
696
+ times : int, default 3
697
+ Number of times to retry this task.
698
+ minutes_between_retries : int, default 2
699
+ Number of minutes between retries.
700
+ """
701
+ ...
702
+
703
+ @typing.overload
704
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
705
+ """
706
+ Specifies environment variables to be set prior to the execution of a step.
707
+
708
+
709
+ Parameters
710
+ ----------
711
+ vars : Dict[str, str], default {}
712
+ Dictionary of environment variables to set.
713
+ """
714
+ ...
715
+
716
+ @typing.overload
717
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
718
+ ...
719
+
720
+ @typing.overload
721
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
722
+ ...
723
+
724
+ def environment(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, vars: typing.Dict[str, str] = {}):
725
+ """
726
+ Specifies environment variables to be set prior to the execution of a step.
727
+
728
+
729
+ Parameters
730
+ ----------
731
+ vars : Dict[str, str], default {}
732
+ Dictionary of environment variables to set.
641
733
  """
642
734
  ...
643
735
 
@@ -690,6 +782,57 @@ def card(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generat
690
782
  """
691
783
  ...
692
784
 
785
+ @typing.overload
786
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: str | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
787
+ """
788
+ Specifies the PyPI packages for the step.
789
+
790
+ Information in this decorator will augment any
791
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
792
+ you can use `@pypi_base` to set packages required by all
793
+ steps and use `@pypi` to specify step-specific overrides.
794
+
795
+
796
+ Parameters
797
+ ----------
798
+ packages : Dict[str, str], default: {}
799
+ Packages to use for this step. The key is the name of the package
800
+ and the value is the version to use.
801
+ python : str, optional, default: None
802
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
803
+ that the version used will correspond to the version of the Python interpreter used to start the run.
804
+ """
805
+ ...
806
+
807
+ @typing.overload
808
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
809
+ ...
810
+
811
+ @typing.overload
812
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
813
+ ...
814
+
815
+ def pypi(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, packages: typing.Dict[str, str] = {}, python: str | None = None):
816
+ """
817
+ Specifies the PyPI packages for the step.
818
+
819
+ Information in this decorator will augment any
820
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
821
+ you can use `@pypi_base` to set packages required by all
822
+ steps and use `@pypi` to specify step-specific overrides.
823
+
824
+
825
+ Parameters
826
+ ----------
827
+ packages : Dict[str, str], default: {}
828
+ Packages to use for this step. The key is the name of the package
829
+ and the value is the version to use.
830
+ python : str, optional, default: None
831
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
832
+ that the version used will correspond to the version of the Python interpreter used to start the run.
833
+ """
834
+ ...
835
+
693
836
  @typing.overload
694
837
  def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
695
838
  """
@@ -749,283 +892,183 @@ def timeout(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_gene
749
892
  """
750
893
  ...
751
894
 
752
- @typing.overload
753
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
754
- """
755
- Specifies the Conda environment for the step.
756
-
757
- Information in this decorator will augment any
758
- attributes set in the `@conda_base` flow-level decorator. Hence,
759
- you can use `@conda_base` to set packages required by all
760
- steps and use `@conda` to specify step-specific overrides.
761
-
762
-
763
- Parameters
764
- ----------
765
- packages : Dict[str, str], default {}
766
- Packages to use for this step. The key is the name of the package
767
- and the value is the version to use.
768
- libraries : Dict[str, str], default {}
769
- Supported for backward compatibility. When used with packages, packages will take precedence.
770
- python : str, optional, default None
771
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
772
- that the version used will correspond to the version of the Python interpreter used to start the run.
773
- disabled : bool, default False
774
- If set to True, disables @conda.
775
- """
776
- ...
777
-
778
- @typing.overload
779
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
780
- ...
781
-
782
- @typing.overload
783
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
784
- ...
785
-
786
- def conda(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False):
787
- """
788
- Specifies the Conda environment for the step.
789
-
790
- Information in this decorator will augment any
791
- attributes set in the `@conda_base` flow-level decorator. Hence,
792
- you can use `@conda_base` to set packages required by all
793
- steps and use `@conda` to specify step-specific overrides.
794
-
795
-
796
- Parameters
797
- ----------
798
- packages : Dict[str, str], default {}
799
- Packages to use for this step. The key is the name of the package
800
- and the value is the version to use.
801
- libraries : Dict[str, str], default {}
802
- Supported for backward compatibility. When used with packages, packages will take precedence.
803
- python : str, optional, default None
804
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
805
- that the version used will correspond to the version of the Python interpreter used to start the run.
806
- disabled : bool, default False
807
- If set to True, disables @conda.
808
- """
809
- ...
810
-
811
- @typing.overload
812
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
895
+ def project(*, name: str, branch: str | None = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
813
896
  """
814
- Specifies environment variables to be set prior to the execution of a step.
815
-
897
+ Specifies what flows belong to the same project.
816
898
 
817
- Parameters
818
- ----------
819
- vars : Dict[str, str], default {}
820
- Dictionary of environment variables to set.
821
- """
822
- ...
823
-
824
- @typing.overload
825
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
826
- ...
827
-
828
- @typing.overload
829
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
830
- ...
831
-
832
- def environment(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, vars: typing.Dict[str, str] = {}):
833
- """
834
- Specifies environment variables to be set prior to the execution of a step.
899
+ A project-specific namespace is created for all flows that
900
+ use the same `@project(name)`.
835
901
 
836
902
 
837
903
  Parameters
838
904
  ----------
839
- vars : Dict[str, str], default {}
840
- Dictionary of environment variables to set.
905
+ name : str
906
+ Project name. Make sure that the name is unique amongst all
907
+ projects that use the same production scheduler. The name may
908
+ contain only lowercase alphanumeric characters and underscores.
909
+
910
+ branch : Optional[str], default None
911
+ The branch to use. If not specified, the branch is set to
912
+ `user.<username>` unless `production` is set to `True`. This can
913
+ also be set on the command line using `--branch` as a top-level option.
914
+ It is an error to specify `branch` in the decorator and on the command line.
915
+
916
+ production : bool, default False
917
+ Whether or not the branch is the production branch. This can also be set on the
918
+ command line using `--production` as a top-level option. It is an error to specify
919
+ `production` in the decorator and on the command line.
920
+ The project branch name will be:
921
+ - if `branch` is specified:
922
+ - if `production` is True: `prod.<branch>`
923
+ - if `production` is False: `test.<branch>`
924
+ - if `branch` is not specified:
925
+ - if `production` is True: `prod`
926
+ - if `production` is False: `user.<username>`
841
927
  """
842
928
  ...
843
929
 
844
930
  @typing.overload
845
- def catch(*, var: str | None = None, print_exception: bool = True) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
931
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: str | None = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
846
932
  """
847
- Specifies that the step will success under all circumstances.
848
-
849
- The decorator will create an optional artifact, specified by `var`, which
850
- contains the exception raised. You can use it to detect the presence
851
- of errors, indicating that all happy-path artifacts produced by the step
852
- are missing.
933
+ Specifies the PyPI packages for all steps of the flow.
853
934
 
935
+ Use `@pypi_base` to set common packages required by all
936
+ steps and use `@pypi` to specify step-specific overrides.
854
937
 
855
938
  Parameters
856
939
  ----------
857
- var : str, optional, default None
858
- Name of the artifact in which to store the caught exception.
859
- If not specified, the exception is not stored.
860
- print_exception : bool, default True
861
- Determines whether or not the exception is printed to
862
- stdout when caught.
940
+ packages : Dict[str, str], default: {}
941
+ Packages to use for this flow. The key is the name of the package
942
+ and the value is the version to use.
943
+ python : str, optional, default: None
944
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
945
+ that the version used will correspond to the version of the Python interpreter used to start the run.
863
946
  """
864
947
  ...
865
948
 
866
949
  @typing.overload
867
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
868
- ...
869
-
870
- @typing.overload
871
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
950
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
872
951
  ...
873
952
 
874
- def catch(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, var: str | None = None, print_exception: bool = True):
953
+ def pypi_base(f: typing.Type[~FlowSpecDerived] | None = None, *, packages: typing.Dict[str, str] = {}, python: str | None = None):
875
954
  """
876
- Specifies that the step will success under all circumstances.
877
-
878
- The decorator will create an optional artifact, specified by `var`, which
879
- contains the exception raised. You can use it to detect the presence
880
- of errors, indicating that all happy-path artifacts produced by the step
881
- are missing.
955
+ Specifies the PyPI packages for all steps of the flow.
882
956
 
957
+ Use `@pypi_base` to set common packages required by all
958
+ steps and use `@pypi` to specify step-specific overrides.
883
959
 
884
960
  Parameters
885
961
  ----------
886
- var : str, optional, default None
887
- Name of the artifact in which to store the caught exception.
888
- If not specified, the exception is not stored.
889
- print_exception : bool, default True
890
- Determines whether or not the exception is printed to
891
- stdout when caught.
962
+ packages : Dict[str, str], default: {}
963
+ Packages to use for this flow. The key is the name of the package
964
+ and the value is the version to use.
965
+ python : str, optional, default: None
966
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
967
+ that the version used will correspond to the version of the Python interpreter used to start the run.
892
968
  """
893
969
  ...
894
970
 
895
971
  @typing.overload
896
- def trigger(*, event: str | typing.Dict[str, typing.Any] | None = None, events: typing.List[str | typing.Dict[str, typing.Any]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
972
+ def trigger_on_finish(*, flow: typing.Dict[str, str] | str | None = None, flows: typing.List[str | typing.Dict[str, str]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
897
973
  """
898
- Specifies the event(s) that this flow depends on.
974
+ Specifies the flow(s) that this flow depends on.
899
975
 
900
976
  ```
901
- @trigger(event='foo')
977
+ @trigger_on_finish(flow='FooFlow')
902
978
  ```
903
979
  or
904
980
  ```
905
- @trigger(events=['foo', 'bar'])
981
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
906
982
  ```
983
+ This decorator respects the @project decorator and triggers the flow
984
+ when upstream runs within the same namespace complete successfully
907
985
 
908
- Additionally, you can specify the parameter mappings
909
- to map event payload to Metaflow parameters for the flow.
986
+ Additionally, you can specify project aware upstream flow dependencies
987
+ by specifying the fully qualified project_flow_name.
910
988
  ```
911
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
989
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
912
990
  ```
913
991
  or
914
992
  ```
915
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
916
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
993
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
917
994
  ```
918
995
 
919
- 'parameters' can also be a list of strings and tuples like so:
920
- ```
921
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
922
- ```
923
- This is equivalent to:
996
+ You can also specify just the project or project branch (other values will be
997
+ inferred from the current project or project branch):
924
998
  ```
925
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
999
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
926
1000
  ```
927
1001
 
1002
+ Note that `branch` is typically one of:
1003
+ - `prod`
1004
+ - `user.bob`
1005
+ - `test.my_experiment`
1006
+ - `prod.staging`
1007
+
928
1008
 
929
1009
  Parameters
930
1010
  ----------
931
- event : Union[str, Dict[str, Any]], optional, default None
932
- Event dependency for this flow.
933
- events : List[Union[str, Dict[str, Any]]], default []
934
- Events dependency for this flow.
1011
+ flow : Union[str, Dict[str, str]], optional, default None
1012
+ Upstream flow dependency for this flow.
1013
+ flows : List[Union[str, Dict[str, str]]], default []
1014
+ Upstream flow dependencies for this flow.
935
1015
  options : Dict[str, Any], default {}
936
1016
  Backend-specific configuration for tuning eventing behavior.
937
1017
  """
938
1018
  ...
939
1019
 
940
1020
  @typing.overload
941
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1021
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
942
1022
  ...
943
1023
 
944
- def trigger(f: typing.Type[~FlowSpecDerived] | None = None, *, event: str | typing.Dict[str, typing.Any] | None = None, events: typing.List[str | typing.Dict[str, typing.Any]] = [], options: typing.Dict[str, typing.Any] = {}):
1024
+ def trigger_on_finish(f: typing.Type[~FlowSpecDerived] | None = None, *, flow: typing.Dict[str, str] | str | None = None, flows: typing.List[str | typing.Dict[str, str]] = [], options: typing.Dict[str, typing.Any] = {}):
945
1025
  """
946
- Specifies the event(s) that this flow depends on.
1026
+ Specifies the flow(s) that this flow depends on.
947
1027
 
948
1028
  ```
949
- @trigger(event='foo')
1029
+ @trigger_on_finish(flow='FooFlow')
950
1030
  ```
951
1031
  or
952
1032
  ```
953
- @trigger(events=['foo', 'bar'])
1033
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
954
1034
  ```
1035
+ This decorator respects the @project decorator and triggers the flow
1036
+ when upstream runs within the same namespace complete successfully
955
1037
 
956
- Additionally, you can specify the parameter mappings
957
- to map event payload to Metaflow parameters for the flow.
1038
+ Additionally, you can specify project aware upstream flow dependencies
1039
+ by specifying the fully qualified project_flow_name.
958
1040
  ```
959
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1041
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
960
1042
  ```
961
1043
  or
962
1044
  ```
963
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
964
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1045
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
965
1046
  ```
966
1047
 
967
- 'parameters' can also be a list of strings and tuples like so:
968
- ```
969
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
970
- ```
971
- This is equivalent to:
1048
+ You can also specify just the project or project branch (other values will be
1049
+ inferred from the current project or project branch):
972
1050
  ```
973
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1051
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
974
1052
  ```
975
1053
 
1054
+ Note that `branch` is typically one of:
1055
+ - `prod`
1056
+ - `user.bob`
1057
+ - `test.my_experiment`
1058
+ - `prod.staging`
1059
+
976
1060
 
977
1061
  Parameters
978
1062
  ----------
979
- event : Union[str, Dict[str, Any]], optional, default None
980
- Event dependency for this flow.
981
- events : List[Union[str, Dict[str, Any]]], default []
982
- Events dependency for this flow.
1063
+ flow : Union[str, Dict[str, str]], optional, default None
1064
+ Upstream flow dependency for this flow.
1065
+ flows : List[Union[str, Dict[str, str]]], default []
1066
+ Upstream flow dependencies for this flow.
983
1067
  options : Dict[str, Any], default {}
984
1068
  Backend-specific configuration for tuning eventing behavior.
985
1069
  """
986
1070
  ...
987
1071
 
988
- @typing.overload
989
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: str | None = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
990
- """
991
- Specifies the PyPI packages for all steps of the flow.
992
-
993
- Use `@pypi_base` to set common packages required by all
994
- steps and use `@pypi` to specify step-specific overrides.
995
-
996
- Parameters
997
- ----------
998
- packages : Dict[str, str], default: {}
999
- Packages to use for this flow. The key is the name of the package
1000
- and the value is the version to use.
1001
- python : str, optional, default: None
1002
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1003
- that the version used will correspond to the version of the Python interpreter used to start the run.
1004
- """
1005
- ...
1006
-
1007
- @typing.overload
1008
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1009
- ...
1010
-
1011
- def pypi_base(f: typing.Type[~FlowSpecDerived] | None = None, *, packages: typing.Dict[str, str] = {}, python: str | None = None):
1012
- """
1013
- Specifies the PyPI packages for all steps of the flow.
1014
-
1015
- Use `@pypi_base` to set common packages required by all
1016
- steps and use `@pypi` to specify step-specific overrides.
1017
-
1018
- Parameters
1019
- ----------
1020
- packages : Dict[str, str], default: {}
1021
- Packages to use for this flow. The key is the name of the package
1022
- and the value is the version to use.
1023
- python : str, optional, default: None
1024
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1025
- that the version used will correspond to the version of the Python interpreter used to start the run.
1026
- """
1027
- ...
1028
-
1029
1072
  def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1030
1073
  """
1031
1074
  The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
@@ -1069,139 +1112,97 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1069
1112
  """
1070
1113
  ...
1071
1114
 
1072
- def project(*, name: str, branch: str | None = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1115
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: str | typing.List[str], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1073
1116
  """
1074
- Specifies what flows belong to the same project.
1075
-
1076
- A project-specific namespace is created for all flows that
1077
- use the same `@project(name)`.
1117
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1118
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1119
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1120
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1121
+ starts only after all sensors finish.
1078
1122
 
1079
1123
 
1080
1124
  Parameters
1081
1125
  ----------
1126
+ timeout : int
1127
+ Time, in seconds before the task times out and fails. (Default: 3600)
1128
+ poke_interval : int
1129
+ Time in seconds that the job should wait in between each try. (Default: 60)
1130
+ mode : str
1131
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1132
+ exponential_backoff : bool
1133
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1134
+ pool : str
1135
+ the slot pool this task should run in,
1136
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1137
+ soft_fail : bool
1138
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1082
1139
  name : str
1083
- Project name. Make sure that the name is unique amongst all
1084
- projects that use the same production scheduler. The name may
1085
- contain only lowercase alphanumeric characters and underscores.
1086
-
1087
- branch : Optional[str], default None
1088
- The branch to use. If not specified, the branch is set to
1089
- `user.<username>` unless `production` is set to `True`. This can
1090
- also be set on the command line using `--branch` as a top-level option.
1091
- It is an error to specify `branch` in the decorator and on the command line.
1092
-
1093
- production : bool, default False
1094
- Whether or not the branch is the production branch. This can also be set on the
1095
- command line using `--production` as a top-level option. It is an error to specify
1096
- `production` in the decorator and on the command line.
1097
- The project branch name will be:
1098
- - if `branch` is specified:
1099
- - if `production` is True: `prod.<branch>`
1100
- - if `production` is False: `test.<branch>`
1101
- - if `branch` is not specified:
1102
- - if `production` is True: `prod`
1103
- - if `production` is False: `user.<username>`
1140
+ Name of the sensor on Airflow
1141
+ description : str
1142
+ Description of sensor in the Airflow UI
1143
+ bucket_key : Union[str, List[str]]
1144
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1145
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1146
+ bucket_name : str
1147
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1148
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1149
+ wildcard_match : bool
1150
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1151
+ aws_conn_id : str
1152
+ a reference to the s3 connection on Airflow. (Default: None)
1153
+ verify : bool
1154
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1104
1155
  """
1105
1156
  ...
1106
1157
 
1107
1158
  @typing.overload
1108
- def trigger_on_finish(*, flow: typing.Dict[str, str] | str | None = None, flows: typing.List[str | typing.Dict[str, str]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1159
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1109
1160
  """
1110
- Specifies the flow(s) that this flow depends on.
1111
-
1112
- ```
1113
- @trigger_on_finish(flow='FooFlow')
1114
- ```
1115
- or
1116
- ```
1117
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1118
- ```
1119
- This decorator respects the @project decorator and triggers the flow
1120
- when upstream runs within the same namespace complete successfully
1121
-
1122
- Additionally, you can specify project aware upstream flow dependencies
1123
- by specifying the fully qualified project_flow_name.
1124
- ```
1125
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1126
- ```
1127
- or
1128
- ```
1129
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1130
- ```
1131
-
1132
- You can also specify just the project or project branch (other values will be
1133
- inferred from the current project or project branch):
1134
- ```
1135
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1136
- ```
1161
+ Specifies the Conda environment for all steps of the flow.
1137
1162
 
1138
- Note that `branch` is typically one of:
1139
- - `prod`
1140
- - `user.bob`
1141
- - `test.my_experiment`
1142
- - `prod.staging`
1163
+ Use `@conda_base` to set common libraries required by all
1164
+ steps and use `@conda` to specify step-specific additions.
1143
1165
 
1144
1166
 
1145
1167
  Parameters
1146
1168
  ----------
1147
- flow : Union[str, Dict[str, str]], optional, default None
1148
- Upstream flow dependency for this flow.
1149
- flows : List[Union[str, Dict[str, str]]], default []
1150
- Upstream flow dependencies for this flow.
1151
- options : Dict[str, Any], default {}
1152
- Backend-specific configuration for tuning eventing behavior.
1169
+ packages : Dict[str, str], default {}
1170
+ Packages to use for this flow. The key is the name of the package
1171
+ and the value is the version to use.
1172
+ libraries : Dict[str, str], default {}
1173
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1174
+ python : str, optional, default None
1175
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1176
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1177
+ disabled : bool, default False
1178
+ If set to True, disables Conda.
1153
1179
  """
1154
1180
  ...
1155
1181
 
1156
1182
  @typing.overload
1157
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1183
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1158
1184
  ...
1159
1185
 
1160
- def trigger_on_finish(f: typing.Type[~FlowSpecDerived] | None = None, *, flow: typing.Dict[str, str] | str | None = None, flows: typing.List[str | typing.Dict[str, str]] = [], options: typing.Dict[str, typing.Any] = {}):
1186
+ def conda_base(f: typing.Type[~FlowSpecDerived] | None = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False):
1161
1187
  """
1162
- Specifies the flow(s) that this flow depends on.
1163
-
1164
- ```
1165
- @trigger_on_finish(flow='FooFlow')
1166
- ```
1167
- or
1168
- ```
1169
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1170
- ```
1171
- This decorator respects the @project decorator and triggers the flow
1172
- when upstream runs within the same namespace complete successfully
1173
-
1174
- Additionally, you can specify project aware upstream flow dependencies
1175
- by specifying the fully qualified project_flow_name.
1176
- ```
1177
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1178
- ```
1179
- or
1180
- ```
1181
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1182
- ```
1183
-
1184
- You can also specify just the project or project branch (other values will be
1185
- inferred from the current project or project branch):
1186
- ```
1187
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1188
- ```
1188
+ Specifies the Conda environment for all steps of the flow.
1189
1189
 
1190
- Note that `branch` is typically one of:
1191
- - `prod`
1192
- - `user.bob`
1193
- - `test.my_experiment`
1194
- - `prod.staging`
1190
+ Use `@conda_base` to set common libraries required by all
1191
+ steps and use `@conda` to specify step-specific additions.
1195
1192
 
1196
1193
 
1197
1194
  Parameters
1198
1195
  ----------
1199
- flow : Union[str, Dict[str, str]], optional, default None
1200
- Upstream flow dependency for this flow.
1201
- flows : List[Union[str, Dict[str, str]]], default []
1202
- Upstream flow dependencies for this flow.
1203
- options : Dict[str, Any], default {}
1204
- Backend-specific configuration for tuning eventing behavior.
1196
+ packages : Dict[str, str], default {}
1197
+ Packages to use for this flow. The key is the name of the package
1198
+ and the value is the version to use.
1199
+ libraries : Dict[str, str], default {}
1200
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1201
+ python : str, optional, default None
1202
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1203
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1204
+ disabled : bool, default False
1205
+ If set to True, disables Conda.
1205
1206
  """
1206
1207
  ...
1207
1208
 
@@ -1257,96 +1258,95 @@ def schedule(f: typing.Type[~FlowSpecDerived] | None = None, *, hourly: bool = F
1257
1258
  ...
1258
1259
 
1259
1260
  @typing.overload
1260
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1261
+ def trigger(*, event: str | typing.Dict[str, typing.Any] | None = None, events: typing.List[str | typing.Dict[str, typing.Any]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1261
1262
  """
1262
- Specifies the Conda environment for all steps of the flow.
1263
+ Specifies the event(s) that this flow depends on.
1263
1264
 
1264
- Use `@conda_base` to set common libraries required by all
1265
- steps and use `@conda` to specify step-specific additions.
1265
+ ```
1266
+ @trigger(event='foo')
1267
+ ```
1268
+ or
1269
+ ```
1270
+ @trigger(events=['foo', 'bar'])
1271
+ ```
1272
+
1273
+ Additionally, you can specify the parameter mappings
1274
+ to map event payload to Metaflow parameters for the flow.
1275
+ ```
1276
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1277
+ ```
1278
+ or
1279
+ ```
1280
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1281
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1282
+ ```
1283
+
1284
+ 'parameters' can also be a list of strings and tuples like so:
1285
+ ```
1286
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1287
+ ```
1288
+ This is equivalent to:
1289
+ ```
1290
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1291
+ ```
1266
1292
 
1267
1293
 
1268
1294
  Parameters
1269
1295
  ----------
1270
- packages : Dict[str, str], default {}
1271
- Packages to use for this flow. The key is the name of the package
1272
- and the value is the version to use.
1273
- libraries : Dict[str, str], default {}
1274
- Supported for backward compatibility. When used with packages, packages will take precedence.
1275
- python : str, optional, default None
1276
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1277
- that the version used will correspond to the version of the Python interpreter used to start the run.
1278
- disabled : bool, default False
1279
- If set to True, disables Conda.
1296
+ event : Union[str, Dict[str, Any]], optional, default None
1297
+ Event dependency for this flow.
1298
+ events : List[Union[str, Dict[str, Any]]], default []
1299
+ Events dependency for this flow.
1300
+ options : Dict[str, Any], default {}
1301
+ Backend-specific configuration for tuning eventing behavior.
1280
1302
  """
1281
1303
  ...
1282
1304
 
1283
1305
  @typing.overload
1284
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1306
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1285
1307
  ...
1286
1308
 
1287
- def conda_base(f: typing.Type[~FlowSpecDerived] | None = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False):
1309
+ def trigger(f: typing.Type[~FlowSpecDerived] | None = None, *, event: str | typing.Dict[str, typing.Any] | None = None, events: typing.List[str | typing.Dict[str, typing.Any]] = [], options: typing.Dict[str, typing.Any] = {}):
1288
1310
  """
1289
- Specifies the Conda environment for all steps of the flow.
1311
+ Specifies the event(s) that this flow depends on.
1290
1312
 
1291
- Use `@conda_base` to set common libraries required by all
1292
- steps and use `@conda` to specify step-specific additions.
1313
+ ```
1314
+ @trigger(event='foo')
1315
+ ```
1316
+ or
1317
+ ```
1318
+ @trigger(events=['foo', 'bar'])
1319
+ ```
1293
1320
 
1321
+ Additionally, you can specify the parameter mappings
1322
+ to map event payload to Metaflow parameters for the flow.
1323
+ ```
1324
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1325
+ ```
1326
+ or
1327
+ ```
1328
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1329
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1330
+ ```
1294
1331
 
1295
- Parameters
1296
- ----------
1297
- packages : Dict[str, str], default {}
1298
- Packages to use for this flow. The key is the name of the package
1299
- and the value is the version to use.
1300
- libraries : Dict[str, str], default {}
1301
- Supported for backward compatibility. When used with packages, packages will take precedence.
1302
- python : str, optional, default None
1303
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1304
- that the version used will correspond to the version of the Python interpreter used to start the run.
1305
- disabled : bool, default False
1306
- If set to True, disables Conda.
1307
- """
1308
- ...
1309
-
1310
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: str | typing.List[str], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1311
- """
1312
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1313
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1314
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1315
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1316
- starts only after all sensors finish.
1332
+ 'parameters' can also be a list of strings and tuples like so:
1333
+ ```
1334
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1335
+ ```
1336
+ This is equivalent to:
1337
+ ```
1338
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1339
+ ```
1317
1340
 
1318
1341
 
1319
1342
  Parameters
1320
1343
  ----------
1321
- timeout : int
1322
- Time, in seconds before the task times out and fails. (Default: 3600)
1323
- poke_interval : int
1324
- Time in seconds that the job should wait in between each try. (Default: 60)
1325
- mode : str
1326
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1327
- exponential_backoff : bool
1328
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1329
- pool : str
1330
- the slot pool this task should run in,
1331
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1332
- soft_fail : bool
1333
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1334
- name : str
1335
- Name of the sensor on Airflow
1336
- description : str
1337
- Description of sensor in the Airflow UI
1338
- bucket_key : Union[str, List[str]]
1339
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1340
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1341
- bucket_name : str
1342
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1343
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1344
- wildcard_match : bool
1345
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1346
- aws_conn_id : str
1347
- a reference to the s3 connection on Airflow. (Default: None)
1348
- verify : bool
1349
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1344
+ event : Union[str, Dict[str, Any]], optional, default None
1345
+ Event dependency for this flow.
1346
+ events : List[Union[str, Dict[str, Any]]], default []
1347
+ Events dependency for this flow.
1348
+ options : Dict[str, Any], default {}
1349
+ Backend-specific configuration for tuning eventing behavior.
1350
1350
  """
1351
1351
  ...
1352
1352