metaflow-stubs 2.19.2__py2.py3-none-any.whl → 2.19.4__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of metaflow-stubs might be problematic. Click here for more details.

Files changed (168) hide show
  1. metaflow-stubs/__init__.pyi +557 -557
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +3 -3
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +4 -4
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +3 -3
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +5 -5
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/multicore_utils.pyi +2 -2
  23. metaflow-stubs/packaging_sys/__init__.pyi +5 -5
  24. metaflow-stubs/packaging_sys/backend.pyi +2 -2
  25. metaflow-stubs/packaging_sys/distribution_support.pyi +3 -3
  26. metaflow-stubs/packaging_sys/tar_backend.pyi +6 -6
  27. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  28. metaflow-stubs/packaging_sys/v1.pyi +3 -3
  29. metaflow-stubs/parameters.pyi +4 -4
  30. metaflow-stubs/plugins/__init__.pyi +12 -12
  31. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  33. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  34. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  35. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  37. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  38. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  39. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  41. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  42. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  43. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  44. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
  45. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +4 -4
  46. metaflow-stubs/plugins/argo/exit_hooks.pyi +3 -3
  47. metaflow-stubs/plugins/aws/__init__.pyi +3 -3
  48. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  50. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  51. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  52. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  53. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  54. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  55. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  56. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  59. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  60. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  61. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  62. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  63. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  64. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  65. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  67. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  68. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  69. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  70. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  71. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  72. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  77. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  79. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +5 -5
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/json_viewer.pyi +4 -4
  83. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  84. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  85. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  86. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  87. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  88. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  89. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  91. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  92. metaflow-stubs/plugins/datatools/s3/s3.pyi +3 -3
  93. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  94. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  95. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  96. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  97. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  98. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  99. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  100. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  101. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  102. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  104. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  105. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  106. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  107. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  108. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  110. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +3 -3
  111. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  112. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  114. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  115. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  116. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  117. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/parsers.pyi +2 -2
  119. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  121. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  122. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
  123. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  124. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  125. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  126. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  127. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  128. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  129. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  130. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  131. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  132. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  133. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  134. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  135. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  136. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  137. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  138. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  139. metaflow-stubs/plugins/uv/uv_environment.pyi +3 -3
  140. metaflow-stubs/pylint_wrapper.pyi +2 -2
  141. metaflow-stubs/runner/__init__.pyi +2 -2
  142. metaflow-stubs/runner/deployer.pyi +34 -34
  143. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  144. metaflow-stubs/runner/metaflow_runner.pyi +4 -4
  145. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  146. metaflow-stubs/runner/nbrun.pyi +2 -2
  147. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  148. metaflow-stubs/runner/utils.pyi +3 -3
  149. metaflow-stubs/system/__init__.pyi +2 -2
  150. metaflow-stubs/system/system_logger.pyi +2 -2
  151. metaflow-stubs/system/system_monitor.pyi +2 -2
  152. metaflow-stubs/tagging_util.pyi +2 -2
  153. metaflow-stubs/tuple_util.pyi +2 -2
  154. metaflow-stubs/user_configs/__init__.pyi +2 -2
  155. metaflow-stubs/user_configs/config_options.pyi +3 -3
  156. metaflow-stubs/user_configs/config_parameters.pyi +7 -7
  157. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  158. metaflow-stubs/user_decorators/common.pyi +2 -2
  159. metaflow-stubs/user_decorators/mutable_flow.pyi +5 -5
  160. metaflow-stubs/user_decorators/mutable_step.pyi +2 -2
  161. metaflow-stubs/user_decorators/user_flow_decorator.pyi +4 -4
  162. metaflow-stubs/user_decorators/user_step_decorator.pyi +5 -5
  163. metaflow-stubs/version.pyi +2 -2
  164. {metaflow_stubs-2.19.2.dist-info → metaflow_stubs-2.19.4.dist-info}/METADATA +2 -2
  165. metaflow_stubs-2.19.4.dist-info/RECORD +168 -0
  166. metaflow_stubs-2.19.2.dist-info/RECORD +0 -168
  167. {metaflow_stubs-2.19.2.dist-info → metaflow_stubs-2.19.4.dist-info}/WHEEL +0 -0
  168. {metaflow_stubs-2.19.2.dist-info → metaflow_stubs-2.19.4.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.19.2 #
4
- # Generated on 2025-10-28T11:13:58.765115 #
3
+ # MF version: 2.19.4 #
4
+ # Generated on 2025-10-29T20:17:51.770903 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -47,9 +47,9 @@ from . import plugins as plugins
47
47
  from .plugins.datatools.s3.s3 import S3 as S3
48
48
  from . import includefile as includefile
49
49
  from .includefile import IncludeFile as IncludeFile
50
- from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
51
50
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
52
51
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
52
+ from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
53
53
  from .plugins.parsers import yaml_parser as yaml_parser
54
54
  from . import cards as cards
55
55
  from . import client as client
@@ -155,165 +155,200 @@ def step(f: typing.Callable[[~FlowSpecDerived], NoneType] | typing.Callable[[~Fl
155
155
  ...
156
156
 
157
157
  @typing.overload
158
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
158
+ def secrets(*, sources: typing.List[str | typing.Dict[str, typing.Any]] = [], role: str | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
159
159
  """
160
- Specifies environment variables to be set prior to the execution of a step.
160
+ Specifies secrets to be retrieved and injected as environment variables prior to
161
+ the execution of a step.
161
162
 
162
163
 
163
164
  Parameters
164
165
  ----------
165
- vars : Dict[str, str], default {}
166
- Dictionary of environment variables to set.
166
+ sources : List[Union[str, Dict[str, Any]]], default: []
167
+ List of secret specs, defining how the secrets are to be retrieved
168
+ role : str, optional, default: None
169
+ Role to use for fetching secrets
167
170
  """
168
171
  ...
169
172
 
170
173
  @typing.overload
171
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
174
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
172
175
  ...
173
176
 
174
177
  @typing.overload
175
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
178
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
176
179
  ...
177
180
 
178
- def environment(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, vars: typing.Dict[str, str] = {}):
181
+ def secrets(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, sources: typing.List[str | typing.Dict[str, typing.Any]] = [], role: str | None = None):
179
182
  """
180
- Specifies environment variables to be set prior to the execution of a step.
183
+ Specifies secrets to be retrieved and injected as environment variables prior to
184
+ the execution of a step.
181
185
 
182
186
 
183
187
  Parameters
184
188
  ----------
185
- vars : Dict[str, str], default {}
186
- Dictionary of environment variables to set.
189
+ sources : List[Union[str, Dict[str, Any]]], default: []
190
+ List of secret specs, defining how the secrets are to be retrieved
191
+ role : str, optional, default: None
192
+ Role to use for fetching secrets
187
193
  """
188
194
  ...
189
195
 
190
196
  @typing.overload
191
- def resources(*, cpu: int = 1, gpu: int | None = None, disk: int | None = None, memory: int = 4096, shared_memory: int | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
197
+ def card(*, type: str = 'default', id: str | None = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
192
198
  """
193
- Specifies the resources needed when executing this step.
194
-
195
- Use `@resources` to specify the resource requirements
196
- independently of the specific compute layer (`@batch`, `@kubernetes`).
199
+ Creates a human-readable report, a Metaflow Card, after this step completes.
197
200
 
198
- You can choose the compute layer on the command line by executing e.g.
199
- ```
200
- python myflow.py run --with batch
201
- ```
202
- or
203
- ```
204
- python myflow.py run --with kubernetes
205
- ```
206
- which executes the flow on the desired system using the
207
- requirements specified in `@resources`.
201
+ Note that you may add multiple `@card` decorators in a step with different parameters.
208
202
 
209
203
 
210
204
  Parameters
211
205
  ----------
212
- cpu : int, default 1
213
- Number of CPUs required for this step.
214
- gpu : int, optional, default None
215
- Number of GPUs required for this step.
216
- disk : int, optional, default None
217
- Disk size (in MB) required for this step. Only applies on Kubernetes.
218
- memory : int, default 4096
219
- Memory size (in MB) required for this step.
220
- shared_memory : int, optional, default None
221
- The value for the size (in MiB) of the /dev/shm volume for this step.
222
- This parameter maps to the `--shm-size` option in Docker.
206
+ type : str, default 'default'
207
+ Card type.
208
+ id : str, optional, default None
209
+ If multiple cards are present, use this id to identify this card.
210
+ options : Dict[str, Any], default {}
211
+ Options passed to the card. The contents depend on the card type.
212
+ timeout : int, default 45
213
+ Interrupt reporting if it takes more than this many seconds.
223
214
  """
224
215
  ...
225
216
 
226
217
  @typing.overload
227
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
218
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
228
219
  ...
229
220
 
230
221
  @typing.overload
231
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
222
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
232
223
  ...
233
224
 
234
- def resources(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, cpu: int = 1, gpu: int | None = None, disk: int | None = None, memory: int = 4096, shared_memory: int | None = None):
225
+ def card(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, type: str = 'default', id: str | None = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
235
226
  """
236
- Specifies the resources needed when executing this step.
227
+ Creates a human-readable report, a Metaflow Card, after this step completes.
237
228
 
238
- Use `@resources` to specify the resource requirements
239
- independently of the specific compute layer (`@batch`, `@kubernetes`).
229
+ Note that you may add multiple `@card` decorators in a step with different parameters.
240
230
 
241
- You can choose the compute layer on the command line by executing e.g.
242
- ```
243
- python myflow.py run --with batch
244
- ```
245
- or
246
- ```
247
- python myflow.py run --with kubernetes
248
- ```
249
- which executes the flow on the desired system using the
250
- requirements specified in `@resources`.
231
+
232
+ Parameters
233
+ ----------
234
+ type : str, default 'default'
235
+ Card type.
236
+ id : str, optional, default None
237
+ If multiple cards are present, use this id to identify this card.
238
+ options : Dict[str, Any], default {}
239
+ Options passed to the card. The contents depend on the card type.
240
+ timeout : int, default 45
241
+ Interrupt reporting if it takes more than this many seconds.
242
+ """
243
+ ...
244
+
245
+ @typing.overload
246
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: str | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
247
+ """
248
+ Specifies the PyPI packages for the step.
249
+
250
+ Information in this decorator will augment any
251
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
252
+ you can use `@pypi_base` to set packages required by all
253
+ steps and use `@pypi` to specify step-specific overrides.
251
254
 
252
255
 
253
256
  Parameters
254
257
  ----------
255
- cpu : int, default 1
256
- Number of CPUs required for this step.
257
- gpu : int, optional, default None
258
- Number of GPUs required for this step.
259
- disk : int, optional, default None
260
- Disk size (in MB) required for this step. Only applies on Kubernetes.
261
- memory : int, default 4096
262
- Memory size (in MB) required for this step.
263
- shared_memory : int, optional, default None
264
- The value for the size (in MiB) of the /dev/shm volume for this step.
265
- This parameter maps to the `--shm-size` option in Docker.
258
+ packages : Dict[str, str], default: {}
259
+ Packages to use for this step. The key is the name of the package
260
+ and the value is the version to use.
261
+ python : str, optional, default: None
262
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
263
+ that the version used will correspond to the version of the Python interpreter used to start the run.
266
264
  """
267
265
  ...
268
266
 
269
267
  @typing.overload
270
- def catch(*, var: str | None = None, print_exception: bool = True) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
268
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
269
+ ...
270
+
271
+ @typing.overload
272
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
273
+ ...
274
+
275
+ def pypi(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, packages: typing.Dict[str, str] = {}, python: str | None = None):
271
276
  """
272
- Specifies that the step will success under all circumstances.
277
+ Specifies the PyPI packages for the step.
273
278
 
274
- The decorator will create an optional artifact, specified by `var`, which
275
- contains the exception raised. You can use it to detect the presence
276
- of errors, indicating that all happy-path artifacts produced by the step
277
- are missing.
279
+ Information in this decorator will augment any
280
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
281
+ you can use `@pypi_base` to set packages required by all
282
+ steps and use `@pypi` to specify step-specific overrides.
278
283
 
279
284
 
280
285
  Parameters
281
286
  ----------
282
- var : str, optional, default None
283
- Name of the artifact in which to store the caught exception.
284
- If not specified, the exception is not stored.
285
- print_exception : bool, default True
286
- Determines whether or not the exception is printed to
287
- stdout when caught.
287
+ packages : Dict[str, str], default: {}
288
+ Packages to use for this step. The key is the name of the package
289
+ and the value is the version to use.
290
+ python : str, optional, default: None
291
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
292
+ that the version used will correspond to the version of the Python interpreter used to start the run.
288
293
  """
289
294
  ...
290
295
 
291
296
  @typing.overload
292
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
297
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
298
+ """
299
+ Specifies the Conda environment for the step.
300
+
301
+ Information in this decorator will augment any
302
+ attributes set in the `@conda_base` flow-level decorator. Hence,
303
+ you can use `@conda_base` to set packages required by all
304
+ steps and use `@conda` to specify step-specific overrides.
305
+
306
+
307
+ Parameters
308
+ ----------
309
+ packages : Dict[str, str], default {}
310
+ Packages to use for this step. The key is the name of the package
311
+ and the value is the version to use.
312
+ libraries : Dict[str, str], default {}
313
+ Supported for backward compatibility. When used with packages, packages will take precedence.
314
+ python : str, optional, default None
315
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
316
+ that the version used will correspond to the version of the Python interpreter used to start the run.
317
+ disabled : bool, default False
318
+ If set to True, disables @conda.
319
+ """
293
320
  ...
294
321
 
295
322
  @typing.overload
296
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
323
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
297
324
  ...
298
325
 
299
- def catch(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, var: str | None = None, print_exception: bool = True):
326
+ @typing.overload
327
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
328
+ ...
329
+
330
+ def conda(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False):
300
331
  """
301
- Specifies that the step will success under all circumstances.
332
+ Specifies the Conda environment for the step.
302
333
 
303
- The decorator will create an optional artifact, specified by `var`, which
304
- contains the exception raised. You can use it to detect the presence
305
- of errors, indicating that all happy-path artifacts produced by the step
306
- are missing.
334
+ Information in this decorator will augment any
335
+ attributes set in the `@conda_base` flow-level decorator. Hence,
336
+ you can use `@conda_base` to set packages required by all
337
+ steps and use `@conda` to specify step-specific overrides.
307
338
 
308
339
 
309
340
  Parameters
310
341
  ----------
311
- var : str, optional, default None
312
- Name of the artifact in which to store the caught exception.
313
- If not specified, the exception is not stored.
314
- print_exception : bool, default True
315
- Determines whether or not the exception is printed to
316
- stdout when caught.
342
+ packages : Dict[str, str], default {}
343
+ Packages to use for this step. The key is the name of the package
344
+ and the value is the version to use.
345
+ libraries : Dict[str, str], default {}
346
+ Supported for backward compatibility. When used with packages, packages will take precedence.
347
+ python : str, optional, default None
348
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
349
+ that the version used will correspond to the version of the Python interpreter used to start the run.
350
+ disabled : bool, default False
351
+ If set to True, disables @conda.
317
352
  """
318
353
  ...
319
354
 
@@ -431,159 +466,52 @@ def timeout(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_gene
431
466
  """
432
467
  ...
433
468
 
434
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: str | None = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.List[str] | None = None, node_selector: typing.Dict[str, str] | str | None = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: int | None = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: int | None = None, tmpfs_path: str | None = '/metaflow_temp', persistent_volume_claims: typing.Dict[str, str] | None = None, shared_memory: int | None = None, port: int | None = None, compute_pool: str | None = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Dict[str, typing.Any] | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
469
+ @typing.overload
470
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
435
471
  """
436
- Specifies that this step should execute on Kubernetes.
472
+ Specifies environment variables to be set prior to the execution of a step.
473
+
474
+
475
+ Parameters
476
+ ----------
477
+ vars : Dict[str, str], default {}
478
+ Dictionary of environment variables to set.
479
+ """
480
+ ...
481
+
482
+ @typing.overload
483
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
484
+ ...
485
+
486
+ @typing.overload
487
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
488
+ ...
489
+
490
+ def environment(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, vars: typing.Dict[str, str] = {}):
491
+ """
492
+ Specifies environment variables to be set prior to the execution of a step.
493
+
494
+
495
+ Parameters
496
+ ----------
497
+ vars : Dict[str, str], default {}
498
+ Dictionary of environment variables to set.
499
+ """
500
+ ...
501
+
502
+ @typing.overload
503
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: str | None = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: int | None = None, max_swap: int | None = None, swappiness: int | None = None, aws_batch_tags: typing.Dict[str, str] | None = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: int | None = None, tmpfs_path: str | None = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: str | None = None, log_options: typing.List[str] | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
504
+ """
505
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
437
506
 
438
507
 
439
508
  Parameters
440
509
  ----------
441
510
  cpu : int, default 1
442
511
  Number of CPUs required for this step. If `@resources` is
443
- also present, the maximum value from all decorators is used.
444
- memory : int, default 4096
445
- Memory size (in MB) required for this step. If
446
- `@resources` is also present, the maximum value from all decorators is
447
- used.
448
- disk : int, default 10240
449
- Disk size (in MB) required for this step. If
450
- `@resources` is also present, the maximum value from all decorators is
451
- used.
452
- image : str, optional, default None
453
- Docker image to use when launching on Kubernetes. If not specified, and
454
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
455
- not, a default Docker image mapping to the current version of Python is used.
456
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
457
- If given, the imagePullPolicy to be applied to the Docker image of the step.
458
- image_pull_secrets: List[str], default []
459
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
460
- Kubernetes image pull secrets to use when pulling container images
461
- in Kubernetes.
462
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
463
- Kubernetes service account to use when launching pod in Kubernetes.
464
- secrets : List[str], optional, default None
465
- Kubernetes secrets to use when launching pod in Kubernetes. These
466
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
467
- in Metaflow configuration.
468
- node_selector: Union[Dict[str,str], str], optional, default None
469
- Kubernetes node selector(s) to apply to the pod running the task.
470
- Can be passed in as a comma separated string of values e.g.
471
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
472
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
473
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
474
- Kubernetes namespace to use when launching pod in Kubernetes.
475
- gpu : int, optional, default None
476
- Number of GPUs required for this step. A value of zero implies that
477
- the scheduled node should not have GPUs.
478
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
479
- The vendor of the GPUs to be used for this step.
480
- tolerations : List[Dict[str,str]], default []
481
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
482
- Kubernetes tolerations to use when launching pod in Kubernetes.
483
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
484
- Kubernetes labels to use when launching pod in Kubernetes.
485
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
486
- Kubernetes annotations to use when launching pod in Kubernetes.
487
- use_tmpfs : bool, default False
488
- This enables an explicit tmpfs mount for this step.
489
- tmpfs_tempdir : bool, default True
490
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
491
- tmpfs_size : int, optional, default: None
492
- The value for the size (in MiB) of the tmpfs mount for this step.
493
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
494
- memory allocated for this step.
495
- tmpfs_path : str, optional, default /metaflow_temp
496
- Path to tmpfs mount for this step.
497
- persistent_volume_claims : Dict[str, str], optional, default None
498
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
499
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
500
- shared_memory: int, optional
501
- Shared memory size (in MiB) required for this step
502
- port: int, optional
503
- Port number to specify in the Kubernetes job object
504
- compute_pool : str, optional, default None
505
- Compute pool to be used for for this step.
506
- If not specified, any accessible compute pool within the perimeter is used.
507
- hostname_resolution_timeout: int, default 10 * 60
508
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
509
- Only applicable when @parallel is used.
510
- qos: str, default: Burstable
511
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
512
-
513
- security_context: Dict[str, Any], optional, default None
514
- Container security context. Applies to the task container. Allows the following keys:
515
- - privileged: bool, optional, default None
516
- - allow_privilege_escalation: bool, optional, default None
517
- - run_as_user: int, optional, default None
518
- - run_as_group: int, optional, default None
519
- - run_as_non_root: bool, optional, default None
520
- """
521
- ...
522
-
523
- @typing.overload
524
- def pypi(*, packages: typing.Dict[str, str] = {}, python: str | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
525
- """
526
- Specifies the PyPI packages for the step.
527
-
528
- Information in this decorator will augment any
529
- attributes set in the `@pyi_base` flow-level decorator. Hence,
530
- you can use `@pypi_base` to set packages required by all
531
- steps and use `@pypi` to specify step-specific overrides.
532
-
533
-
534
- Parameters
535
- ----------
536
- packages : Dict[str, str], default: {}
537
- Packages to use for this step. The key is the name of the package
538
- and the value is the version to use.
539
- python : str, optional, default: None
540
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
541
- that the version used will correspond to the version of the Python interpreter used to start the run.
542
- """
543
- ...
544
-
545
- @typing.overload
546
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
547
- ...
548
-
549
- @typing.overload
550
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
551
- ...
552
-
553
- def pypi(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, packages: typing.Dict[str, str] = {}, python: str | None = None):
554
- """
555
- Specifies the PyPI packages for the step.
556
-
557
- Information in this decorator will augment any
558
- attributes set in the `@pyi_base` flow-level decorator. Hence,
559
- you can use `@pypi_base` to set packages required by all
560
- steps and use `@pypi` to specify step-specific overrides.
561
-
562
-
563
- Parameters
564
- ----------
565
- packages : Dict[str, str], default: {}
566
- Packages to use for this step. The key is the name of the package
567
- and the value is the version to use.
568
- python : str, optional, default: None
569
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
570
- that the version used will correspond to the version of the Python interpreter used to start the run.
571
- """
572
- ...
573
-
574
- @typing.overload
575
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: str | None = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: int | None = None, max_swap: int | None = None, swappiness: int | None = None, aws_batch_tags: typing.Dict[str, str] | None = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: int | None = None, tmpfs_path: str | None = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: str | None = None, log_options: typing.List[str] | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
576
- """
577
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
578
-
579
-
580
- Parameters
581
- ----------
582
- cpu : int, default 1
583
- Number of CPUs required for this step. If `@resources` is
584
- also present, the maximum value from all decorators is used.
585
- gpu : int, default 0
586
- Number of GPUs required for this step. If `@resources` is
512
+ also present, the maximum value from all decorators is used.
513
+ gpu : int, default 0
514
+ Number of GPUs required for this step. If `@resources` is
587
515
  also present, the maximum value from all decorators is used.
588
516
  memory : int, default 4096
589
517
  Memory size (in MB) required for this step. If
@@ -726,270 +654,463 @@ def batch(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_genera
726
654
  """
727
655
  ...
728
656
 
729
- @typing.overload
730
- def secrets(*, sources: typing.List[str | typing.Dict[str, typing.Any]] = [], role: str | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
657
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: str | None = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.List[str] | None = None, node_selector: typing.Dict[str, str] | str | None = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: int | None = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: int | None = None, tmpfs_path: str | None = '/metaflow_temp', persistent_volume_claims: typing.Dict[str, str] | None = None, shared_memory: int | None = None, port: int | None = None, compute_pool: str | None = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Dict[str, typing.Any] | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
731
658
  """
732
- Specifies secrets to be retrieved and injected as environment variables prior to
733
- the execution of a step.
659
+ Specifies that this step should execute on Kubernetes.
734
660
 
735
661
 
736
662
  Parameters
737
663
  ----------
738
- sources : List[Union[str, Dict[str, Any]]], default: []
739
- List of secret specs, defining how the secrets are to be retrieved
740
- role : str, optional, default: None
741
- Role to use for fetching secrets
664
+ cpu : int, default 1
665
+ Number of CPUs required for this step. If `@resources` is
666
+ also present, the maximum value from all decorators is used.
667
+ memory : int, default 4096
668
+ Memory size (in MB) required for this step. If
669
+ `@resources` is also present, the maximum value from all decorators is
670
+ used.
671
+ disk : int, default 10240
672
+ Disk size (in MB) required for this step. If
673
+ `@resources` is also present, the maximum value from all decorators is
674
+ used.
675
+ image : str, optional, default None
676
+ Docker image to use when launching on Kubernetes. If not specified, and
677
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
678
+ not, a default Docker image mapping to the current version of Python is used.
679
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
680
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
681
+ image_pull_secrets: List[str], default []
682
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
683
+ Kubernetes image pull secrets to use when pulling container images
684
+ in Kubernetes.
685
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
686
+ Kubernetes service account to use when launching pod in Kubernetes.
687
+ secrets : List[str], optional, default None
688
+ Kubernetes secrets to use when launching pod in Kubernetes. These
689
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
690
+ in Metaflow configuration.
691
+ node_selector: Union[Dict[str,str], str], optional, default None
692
+ Kubernetes node selector(s) to apply to the pod running the task.
693
+ Can be passed in as a comma separated string of values e.g.
694
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
695
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
696
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
697
+ Kubernetes namespace to use when launching pod in Kubernetes.
698
+ gpu : int, optional, default None
699
+ Number of GPUs required for this step. A value of zero implies that
700
+ the scheduled node should not have GPUs.
701
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
702
+ The vendor of the GPUs to be used for this step.
703
+ tolerations : List[Dict[str,str]], default []
704
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
705
+ Kubernetes tolerations to use when launching pod in Kubernetes.
706
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
707
+ Kubernetes labels to use when launching pod in Kubernetes.
708
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
709
+ Kubernetes annotations to use when launching pod in Kubernetes.
710
+ use_tmpfs : bool, default False
711
+ This enables an explicit tmpfs mount for this step.
712
+ tmpfs_tempdir : bool, default True
713
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
714
+ tmpfs_size : int, optional, default: None
715
+ The value for the size (in MiB) of the tmpfs mount for this step.
716
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
717
+ memory allocated for this step.
718
+ tmpfs_path : str, optional, default /metaflow_temp
719
+ Path to tmpfs mount for this step.
720
+ persistent_volume_claims : Dict[str, str], optional, default None
721
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
722
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
723
+ shared_memory: int, optional
724
+ Shared memory size (in MiB) required for this step
725
+ port: int, optional
726
+ Port number to specify in the Kubernetes job object
727
+ compute_pool : str, optional, default None
728
+ Compute pool to be used for for this step.
729
+ If not specified, any accessible compute pool within the perimeter is used.
730
+ hostname_resolution_timeout: int, default 10 * 60
731
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
732
+ Only applicable when @parallel is used.
733
+ qos: str, default: Burstable
734
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
735
+
736
+ security_context: Dict[str, Any], optional, default None
737
+ Container security context. Applies to the task container. Allows the following keys:
738
+ - privileged: bool, optional, default None
739
+ - allow_privilege_escalation: bool, optional, default None
740
+ - run_as_user: int, optional, default None
741
+ - run_as_group: int, optional, default None
742
+ - run_as_non_root: bool, optional, default None
742
743
  """
743
744
  ...
744
745
 
745
746
  @typing.overload
746
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
747
- ...
748
-
749
- @typing.overload
750
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
751
- ...
752
-
753
- def secrets(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, sources: typing.List[str | typing.Dict[str, typing.Any]] = [], role: str | None = None):
747
+ def resources(*, cpu: int = 1, gpu: int | None = None, disk: int | None = None, memory: int = 4096, shared_memory: int | None = None) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
754
748
  """
755
- Specifies secrets to be retrieved and injected as environment variables prior to
756
- the execution of a step.
749
+ Specifies the resources needed when executing this step.
757
750
 
751
+ Use `@resources` to specify the resource requirements
752
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
758
753
 
759
- Parameters
760
- ----------
761
- sources : List[Union[str, Dict[str, Any]]], default: []
762
- List of secret specs, defining how the secrets are to be retrieved
763
- role : str, optional, default: None
764
- Role to use for fetching secrets
765
- """
766
- ...
767
-
768
- @typing.overload
769
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
770
- """
771
- Specifies the Conda environment for the step.
772
-
773
- Information in this decorator will augment any
774
- attributes set in the `@conda_base` flow-level decorator. Hence,
775
- you can use `@conda_base` to set packages required by all
776
- steps and use `@conda` to specify step-specific overrides.
754
+ You can choose the compute layer on the command line by executing e.g.
755
+ ```
756
+ python myflow.py run --with batch
757
+ ```
758
+ or
759
+ ```
760
+ python myflow.py run --with kubernetes
761
+ ```
762
+ which executes the flow on the desired system using the
763
+ requirements specified in `@resources`.
777
764
 
778
765
 
779
766
  Parameters
780
767
  ----------
781
- packages : Dict[str, str], default {}
782
- Packages to use for this step. The key is the name of the package
783
- and the value is the version to use.
784
- libraries : Dict[str, str], default {}
785
- Supported for backward compatibility. When used with packages, packages will take precedence.
786
- python : str, optional, default None
787
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
788
- that the version used will correspond to the version of the Python interpreter used to start the run.
789
- disabled : bool, default False
790
- If set to True, disables @conda.
768
+ cpu : int, default 1
769
+ Number of CPUs required for this step.
770
+ gpu : int, optional, default None
771
+ Number of GPUs required for this step.
772
+ disk : int, optional, default None
773
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
774
+ memory : int, default 4096
775
+ Memory size (in MB) required for this step.
776
+ shared_memory : int, optional, default None
777
+ The value for the size (in MiB) of the /dev/shm volume for this step.
778
+ This parameter maps to the `--shm-size` option in Docker.
791
779
  """
792
780
  ...
793
781
 
794
782
  @typing.overload
795
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
783
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
796
784
  ...
797
785
 
798
786
  @typing.overload
799
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
787
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
800
788
  ...
801
789
 
802
- def conda(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False):
790
+ def resources(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, cpu: int = 1, gpu: int | None = None, disk: int | None = None, memory: int = 4096, shared_memory: int | None = None):
803
791
  """
804
- Specifies the Conda environment for the step.
792
+ Specifies the resources needed when executing this step.
805
793
 
806
- Information in this decorator will augment any
807
- attributes set in the `@conda_base` flow-level decorator. Hence,
808
- you can use `@conda_base` to set packages required by all
809
- steps and use `@conda` to specify step-specific overrides.
794
+ Use `@resources` to specify the resource requirements
795
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
796
+
797
+ You can choose the compute layer on the command line by executing e.g.
798
+ ```
799
+ python myflow.py run --with batch
800
+ ```
801
+ or
802
+ ```
803
+ python myflow.py run --with kubernetes
804
+ ```
805
+ which executes the flow on the desired system using the
806
+ requirements specified in `@resources`.
810
807
 
811
808
 
812
809
  Parameters
813
810
  ----------
814
- packages : Dict[str, str], default {}
815
- Packages to use for this step. The key is the name of the package
816
- and the value is the version to use.
817
- libraries : Dict[str, str], default {}
818
- Supported for backward compatibility. When used with packages, packages will take precedence.
819
- python : str, optional, default None
820
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
821
- that the version used will correspond to the version of the Python interpreter used to start the run.
822
- disabled : bool, default False
823
- If set to True, disables @conda.
811
+ cpu : int, default 1
812
+ Number of CPUs required for this step.
813
+ gpu : int, optional, default None
814
+ Number of GPUs required for this step.
815
+ disk : int, optional, default None
816
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
817
+ memory : int, default 4096
818
+ Memory size (in MB) required for this step.
819
+ shared_memory : int, optional, default None
820
+ The value for the size (in MiB) of the /dev/shm volume for this step.
821
+ This parameter maps to the `--shm-size` option in Docker.
824
822
  """
825
823
  ...
826
824
 
827
825
  @typing.overload
828
- def card(*, type: str = 'default', id: str | None = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
826
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
829
827
  """
830
- Creates a human-readable report, a Metaflow Card, after this step completes.
831
-
832
- Note that you may add multiple `@card` decorators in a step with different parameters.
833
-
834
-
835
- Parameters
836
- ----------
837
- type : str, default 'default'
838
- Card type.
839
- id : str, optional, default None
840
- If multiple cards are present, use this id to identify this card.
841
- options : Dict[str, Any], default {}
842
- Options passed to the card. The contents depend on the card type.
843
- timeout : int, default 45
844
- Interrupt reporting if it takes more than this many seconds.
828
+ Decorator prototype for all step decorators. This function gets specialized
829
+ and imported for all decorators types by _import_plugin_decorators().
845
830
  """
846
831
  ...
847
832
 
848
833
  @typing.overload
849
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
834
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
850
835
  ...
851
836
 
852
- @typing.overload
853
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
837
+ def parallel(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None):
838
+ """
839
+ Decorator prototype for all step decorators. This function gets specialized
840
+ and imported for all decorators types by _import_plugin_decorators().
841
+ """
854
842
  ...
855
843
 
856
- def card(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, type: str = 'default', id: str | None = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
844
+ @typing.overload
845
+ def catch(*, var: str | None = None, print_exception: bool = True) -> typing.Callable[[typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]], typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType]]:
857
846
  """
858
- Creates a human-readable report, a Metaflow Card, after this step completes.
847
+ Specifies that the step will success under all circumstances.
859
848
 
860
- Note that you may add multiple `@card` decorators in a step with different parameters.
849
+ The decorator will create an optional artifact, specified by `var`, which
850
+ contains the exception raised. You can use it to detect the presence
851
+ of errors, indicating that all happy-path artifacts produced by the step
852
+ are missing.
861
853
 
862
854
 
863
855
  Parameters
864
856
  ----------
865
- type : str, default 'default'
866
- Card type.
867
- id : str, optional, default None
868
- If multiple cards are present, use this id to identify this card.
869
- options : Dict[str, Any], default {}
870
- Options passed to the card. The contents depend on the card type.
871
- timeout : int, default 45
872
- Interrupt reporting if it takes more than this many seconds.
857
+ var : str, optional, default None
858
+ Name of the artifact in which to store the caught exception.
859
+ If not specified, the exception is not stored.
860
+ print_exception : bool, default True
861
+ Determines whether or not the exception is printed to
862
+ stdout when caught.
873
863
  """
874
864
  ...
875
865
 
876
866
  @typing.overload
877
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
878
- """
879
- Decorator prototype for all step decorators. This function gets specialized
880
- and imported for all decorators types by _import_plugin_decorators().
881
- """
867
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
882
868
  ...
883
869
 
884
870
  @typing.overload
885
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
871
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
886
872
  ...
887
873
 
888
- def parallel(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None):
874
+ def catch(f: typing.Callable[[~FlowSpecDerived, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | typing.Callable[[~FlowSpecDerived, typing.Any, metaflow.cmd.develop.stub_generator.StepFlag], NoneType] | None = None, *, var: str | None = None, print_exception: bool = True):
889
875
  """
890
- Decorator prototype for all step decorators. This function gets specialized
891
- and imported for all decorators types by _import_plugin_decorators().
876
+ Specifies that the step will success under all circumstances.
877
+
878
+ The decorator will create an optional artifact, specified by `var`, which
879
+ contains the exception raised. You can use it to detect the presence
880
+ of errors, indicating that all happy-path artifacts produced by the step
881
+ are missing.
882
+
883
+
884
+ Parameters
885
+ ----------
886
+ var : str, optional, default None
887
+ Name of the artifact in which to store the caught exception.
888
+ If not specified, the exception is not stored.
889
+ print_exception : bool, default True
890
+ Determines whether or not the exception is printed to
891
+ stdout when caught.
892
892
  """
893
893
  ...
894
894
 
895
895
  @typing.overload
896
- def trigger_on_finish(*, flow: typing.Dict[str, str] | str | None = None, flows: typing.List[str | typing.Dict[str, str]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
896
+ def trigger(*, event: str | typing.Dict[str, typing.Any] | None = None, events: typing.List[str | typing.Dict[str, typing.Any]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
897
897
  """
898
- Specifies the flow(s) that this flow depends on.
898
+ Specifies the event(s) that this flow depends on.
899
899
 
900
900
  ```
901
- @trigger_on_finish(flow='FooFlow')
901
+ @trigger(event='foo')
902
902
  ```
903
903
  or
904
904
  ```
905
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
905
+ @trigger(events=['foo', 'bar'])
906
906
  ```
907
- This decorator respects the @project decorator and triggers the flow
908
- when upstream runs within the same namespace complete successfully
909
907
 
910
- Additionally, you can specify project aware upstream flow dependencies
911
- by specifying the fully qualified project_flow_name.
908
+ Additionally, you can specify the parameter mappings
909
+ to map event payload to Metaflow parameters for the flow.
912
910
  ```
913
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
911
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
914
912
  ```
915
913
  or
916
914
  ```
917
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
915
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
916
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
918
917
  ```
919
918
 
920
- You can also specify just the project or project branch (other values will be
921
- inferred from the current project or project branch):
919
+ 'parameters' can also be a list of strings and tuples like so:
922
920
  ```
923
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
921
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
922
+ ```
923
+ This is equivalent to:
924
+ ```
925
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
924
926
  ```
925
-
926
- Note that `branch` is typically one of:
927
- - `prod`
928
- - `user.bob`
929
- - `test.my_experiment`
930
- - `prod.staging`
931
927
 
932
928
 
933
929
  Parameters
934
930
  ----------
935
- flow : Union[str, Dict[str, str]], optional, default None
936
- Upstream flow dependency for this flow.
937
- flows : List[Union[str, Dict[str, str]]], default []
938
- Upstream flow dependencies for this flow.
931
+ event : Union[str, Dict[str, Any]], optional, default None
932
+ Event dependency for this flow.
933
+ events : List[Union[str, Dict[str, Any]]], default []
934
+ Events dependency for this flow.
939
935
  options : Dict[str, Any], default {}
940
936
  Backend-specific configuration for tuning eventing behavior.
941
937
  """
942
938
  ...
943
939
 
944
940
  @typing.overload
945
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
941
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
946
942
  ...
947
943
 
948
- def trigger_on_finish(f: typing.Type[~FlowSpecDerived] | None = None, *, flow: typing.Dict[str, str] | str | None = None, flows: typing.List[str | typing.Dict[str, str]] = [], options: typing.Dict[str, typing.Any] = {}):
944
+ def trigger(f: typing.Type[~FlowSpecDerived] | None = None, *, event: str | typing.Dict[str, typing.Any] | None = None, events: typing.List[str | typing.Dict[str, typing.Any]] = [], options: typing.Dict[str, typing.Any] = {}):
949
945
  """
950
- Specifies the flow(s) that this flow depends on.
946
+ Specifies the event(s) that this flow depends on.
951
947
 
952
948
  ```
953
- @trigger_on_finish(flow='FooFlow')
949
+ @trigger(event='foo')
954
950
  ```
955
951
  or
956
952
  ```
957
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
953
+ @trigger(events=['foo', 'bar'])
958
954
  ```
959
- This decorator respects the @project decorator and triggers the flow
960
- when upstream runs within the same namespace complete successfully
961
955
 
962
- Additionally, you can specify project aware upstream flow dependencies
963
- by specifying the fully qualified project_flow_name.
956
+ Additionally, you can specify the parameter mappings
957
+ to map event payload to Metaflow parameters for the flow.
964
958
  ```
965
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
959
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
966
960
  ```
967
961
  or
968
962
  ```
969
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
963
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
964
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
970
965
  ```
971
966
 
972
- You can also specify just the project or project branch (other values will be
973
- inferred from the current project or project branch):
967
+ 'parameters' can also be a list of strings and tuples like so:
974
968
  ```
975
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
969
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
970
+ ```
971
+ This is equivalent to:
976
972
  ```
973
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
974
+ ```
975
+
976
+
977
+ Parameters
978
+ ----------
979
+ event : Union[str, Dict[str, Any]], optional, default None
980
+ Event dependency for this flow.
981
+ events : List[Union[str, Dict[str, Any]]], default []
982
+ Events dependency for this flow.
983
+ options : Dict[str, Any], default {}
984
+ Backend-specific configuration for tuning eventing behavior.
985
+ """
986
+ ...
987
+
988
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: str | typing.List[str], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
989
+ """
990
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
991
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
992
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
993
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
994
+ starts only after all sensors finish.
995
+
996
+
997
+ Parameters
998
+ ----------
999
+ timeout : int
1000
+ Time, in seconds before the task times out and fails. (Default: 3600)
1001
+ poke_interval : int
1002
+ Time in seconds that the job should wait in between each try. (Default: 60)
1003
+ mode : str
1004
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1005
+ exponential_backoff : bool
1006
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1007
+ pool : str
1008
+ the slot pool this task should run in,
1009
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1010
+ soft_fail : bool
1011
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1012
+ name : str
1013
+ Name of the sensor on Airflow
1014
+ description : str
1015
+ Description of sensor in the Airflow UI
1016
+ bucket_key : Union[str, List[str]]
1017
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1018
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1019
+ bucket_name : str
1020
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1021
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1022
+ wildcard_match : bool
1023
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1024
+ aws_conn_id : str
1025
+ a reference to the s3 connection on Airflow. (Default: None)
1026
+ verify : bool
1027
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1028
+ """
1029
+ ...
1030
+
1031
+ def project(*, name: str, branch: str | None = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1032
+ """
1033
+ Specifies what flows belong to the same project.
1034
+
1035
+ A project-specific namespace is created for all flows that
1036
+ use the same `@project(name)`.
1037
+
1038
+
1039
+ Parameters
1040
+ ----------
1041
+ name : str
1042
+ Project name. Make sure that the name is unique amongst all
1043
+ projects that use the same production scheduler. The name may
1044
+ contain only lowercase alphanumeric characters and underscores.
1045
+
1046
+ branch : Optional[str], default None
1047
+ The branch to use. If not specified, the branch is set to
1048
+ `user.<username>` unless `production` is set to `True`. This can
1049
+ also be set on the command line using `--branch` as a top-level option.
1050
+ It is an error to specify `branch` in the decorator and on the command line.
1051
+
1052
+ production : bool, default False
1053
+ Whether or not the branch is the production branch. This can also be set on the
1054
+ command line using `--production` as a top-level option. It is an error to specify
1055
+ `production` in the decorator and on the command line.
1056
+ The project branch name will be:
1057
+ - if `branch` is specified:
1058
+ - if `production` is True: `prod.<branch>`
1059
+ - if `production` is False: `test.<branch>`
1060
+ - if `branch` is not specified:
1061
+ - if `production` is True: `prod`
1062
+ - if `production` is False: `user.<username>`
1063
+ """
1064
+ ...
1065
+
1066
+ @typing.overload
1067
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1068
+ """
1069
+ Specifies the Conda environment for all steps of the flow.
1070
+
1071
+ Use `@conda_base` to set common libraries required by all
1072
+ steps and use `@conda` to specify step-specific additions.
1073
+
1074
+
1075
+ Parameters
1076
+ ----------
1077
+ packages : Dict[str, str], default {}
1078
+ Packages to use for this flow. The key is the name of the package
1079
+ and the value is the version to use.
1080
+ libraries : Dict[str, str], default {}
1081
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1082
+ python : str, optional, default None
1083
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1084
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1085
+ disabled : bool, default False
1086
+ If set to True, disables Conda.
1087
+ """
1088
+ ...
1089
+
1090
+ @typing.overload
1091
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1092
+ ...
1093
+
1094
+ def conda_base(f: typing.Type[~FlowSpecDerived] | None = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False):
1095
+ """
1096
+ Specifies the Conda environment for all steps of the flow.
977
1097
 
978
- Note that `branch` is typically one of:
979
- - `prod`
980
- - `user.bob`
981
- - `test.my_experiment`
982
- - `prod.staging`
1098
+ Use `@conda_base` to set common libraries required by all
1099
+ steps and use `@conda` to specify step-specific additions.
983
1100
 
984
1101
 
985
1102
  Parameters
986
1103
  ----------
987
- flow : Union[str, Dict[str, str]], optional, default None
988
- Upstream flow dependency for this flow.
989
- flows : List[Union[str, Dict[str, str]]], default []
990
- Upstream flow dependencies for this flow.
991
- options : Dict[str, Any], default {}
992
- Backend-specific configuration for tuning eventing behavior.
1104
+ packages : Dict[str, str], default {}
1105
+ Packages to use for this flow. The key is the name of the package
1106
+ and the value is the version to use.
1107
+ libraries : Dict[str, str], default {}
1108
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1109
+ python : str, optional, default None
1110
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1111
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1112
+ disabled : bool, default False
1113
+ If set to True, disables Conda.
993
1114
  """
994
1115
  ...
995
1116
 
@@ -1035,133 +1156,106 @@ def pypi_base(f: typing.Type[~FlowSpecDerived] | None = None, *, packages: typin
1035
1156
  ...
1036
1157
 
1037
1158
  @typing.overload
1038
- def trigger(*, event: str | typing.Dict[str, typing.Any] | None = None, events: typing.List[str | typing.Dict[str, typing.Any]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1159
+ def trigger_on_finish(*, flow: typing.Dict[str, str] | str | None = None, flows: typing.List[str | typing.Dict[str, str]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1039
1160
  """
1040
- Specifies the event(s) that this flow depends on.
1161
+ Specifies the flow(s) that this flow depends on.
1041
1162
 
1042
1163
  ```
1043
- @trigger(event='foo')
1164
+ @trigger_on_finish(flow='FooFlow')
1044
1165
  ```
1045
1166
  or
1046
1167
  ```
1047
- @trigger(events=['foo', 'bar'])
1168
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1048
1169
  ```
1170
+ This decorator respects the @project decorator and triggers the flow
1171
+ when upstream runs within the same namespace complete successfully
1049
1172
 
1050
- Additionally, you can specify the parameter mappings
1051
- to map event payload to Metaflow parameters for the flow.
1173
+ Additionally, you can specify project aware upstream flow dependencies
1174
+ by specifying the fully qualified project_flow_name.
1052
1175
  ```
1053
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1176
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1054
1177
  ```
1055
1178
  or
1056
1179
  ```
1057
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1058
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1180
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1059
1181
  ```
1060
1182
 
1061
- 'parameters' can also be a list of strings and tuples like so:
1062
- ```
1063
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1064
- ```
1065
- This is equivalent to:
1183
+ You can also specify just the project or project branch (other values will be
1184
+ inferred from the current project or project branch):
1066
1185
  ```
1067
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1186
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1068
1187
  ```
1069
1188
 
1189
+ Note that `branch` is typically one of:
1190
+ - `prod`
1191
+ - `user.bob`
1192
+ - `test.my_experiment`
1193
+ - `prod.staging`
1194
+
1070
1195
 
1071
1196
  Parameters
1072
1197
  ----------
1073
- event : Union[str, Dict[str, Any]], optional, default None
1074
- Event dependency for this flow.
1075
- events : List[Union[str, Dict[str, Any]]], default []
1076
- Events dependency for this flow.
1198
+ flow : Union[str, Dict[str, str]], optional, default None
1199
+ Upstream flow dependency for this flow.
1200
+ flows : List[Union[str, Dict[str, str]]], default []
1201
+ Upstream flow dependencies for this flow.
1077
1202
  options : Dict[str, Any], default {}
1078
1203
  Backend-specific configuration for tuning eventing behavior.
1079
1204
  """
1080
1205
  ...
1081
1206
 
1082
1207
  @typing.overload
1083
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1208
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1084
1209
  ...
1085
1210
 
1086
- def trigger(f: typing.Type[~FlowSpecDerived] | None = None, *, event: str | typing.Dict[str, typing.Any] | None = None, events: typing.List[str | typing.Dict[str, typing.Any]] = [], options: typing.Dict[str, typing.Any] = {}):
1211
+ def trigger_on_finish(f: typing.Type[~FlowSpecDerived] | None = None, *, flow: typing.Dict[str, str] | str | None = None, flows: typing.List[str | typing.Dict[str, str]] = [], options: typing.Dict[str, typing.Any] = {}):
1087
1212
  """
1088
- Specifies the event(s) that this flow depends on.
1213
+ Specifies the flow(s) that this flow depends on.
1089
1214
 
1090
1215
  ```
1091
- @trigger(event='foo')
1216
+ @trigger_on_finish(flow='FooFlow')
1092
1217
  ```
1093
1218
  or
1094
1219
  ```
1095
- @trigger(events=['foo', 'bar'])
1220
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1096
1221
  ```
1222
+ This decorator respects the @project decorator and triggers the flow
1223
+ when upstream runs within the same namespace complete successfully
1097
1224
 
1098
- Additionally, you can specify the parameter mappings
1099
- to map event payload to Metaflow parameters for the flow.
1225
+ Additionally, you can specify project aware upstream flow dependencies
1226
+ by specifying the fully qualified project_flow_name.
1100
1227
  ```
1101
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1228
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1102
1229
  ```
1103
1230
  or
1104
1231
  ```
1105
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1106
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1232
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1107
1233
  ```
1108
1234
 
1109
- 'parameters' can also be a list of strings and tuples like so:
1110
- ```
1111
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1112
- ```
1113
- This is equivalent to:
1235
+ You can also specify just the project or project branch (other values will be
1236
+ inferred from the current project or project branch):
1114
1237
  ```
1115
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1238
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1116
1239
  ```
1117
1240
 
1241
+ Note that `branch` is typically one of:
1242
+ - `prod`
1243
+ - `user.bob`
1244
+ - `test.my_experiment`
1245
+ - `prod.staging`
1246
+
1118
1247
 
1119
1248
  Parameters
1120
1249
  ----------
1121
- event : Union[str, Dict[str, Any]], optional, default None
1122
- Event dependency for this flow.
1123
- events : List[Union[str, Dict[str, Any]]], default []
1124
- Events dependency for this flow.
1250
+ flow : Union[str, Dict[str, str]], optional, default None
1251
+ Upstream flow dependency for this flow.
1252
+ flows : List[Union[str, Dict[str, str]]], default []
1253
+ Upstream flow dependencies for this flow.
1125
1254
  options : Dict[str, Any], default {}
1126
1255
  Backend-specific configuration for tuning eventing behavior.
1127
1256
  """
1128
1257
  ...
1129
1258
 
1130
- def project(*, name: str, branch: str | None = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1131
- """
1132
- Specifies what flows belong to the same project.
1133
-
1134
- A project-specific namespace is created for all flows that
1135
- use the same `@project(name)`.
1136
-
1137
-
1138
- Parameters
1139
- ----------
1140
- name : str
1141
- Project name. Make sure that the name is unique amongst all
1142
- projects that use the same production scheduler. The name may
1143
- contain only lowercase alphanumeric characters and underscores.
1144
-
1145
- branch : Optional[str], default None
1146
- The branch to use. If not specified, the branch is set to
1147
- `user.<username>` unless `production` is set to `True`. This can
1148
- also be set on the command line using `--branch` as a top-level option.
1149
- It is an error to specify `branch` in the decorator and on the command line.
1150
-
1151
- production : bool, default False
1152
- Whether or not the branch is the production branch. This can also be set on the
1153
- command line using `--production` as a top-level option. It is an error to specify
1154
- `production` in the decorator and on the command line.
1155
- The project branch name will be:
1156
- - if `branch` is specified:
1157
- - if `production` is True: `prod.<branch>`
1158
- - if `production` is False: `test.<branch>`
1159
- - if `branch` is not specified:
1160
- - if `production` is True: `prod`
1161
- - if `production` is False: `user.<username>`
1162
- """
1163
- ...
1164
-
1165
1259
  def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1166
1260
  """
1167
1261
  The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
@@ -1256,97 +1350,3 @@ def schedule(f: typing.Type[~FlowSpecDerived] | None = None, *, hourly: bool = F
1256
1350
  """
1257
1351
  ...
1258
1352
 
1259
- @typing.overload
1260
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1261
- """
1262
- Specifies the Conda environment for all steps of the flow.
1263
-
1264
- Use `@conda_base` to set common libraries required by all
1265
- steps and use `@conda` to specify step-specific additions.
1266
-
1267
-
1268
- Parameters
1269
- ----------
1270
- packages : Dict[str, str], default {}
1271
- Packages to use for this flow. The key is the name of the package
1272
- and the value is the version to use.
1273
- libraries : Dict[str, str], default {}
1274
- Supported for backward compatibility. When used with packages, packages will take precedence.
1275
- python : str, optional, default None
1276
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1277
- that the version used will correspond to the version of the Python interpreter used to start the run.
1278
- disabled : bool, default False
1279
- If set to True, disables Conda.
1280
- """
1281
- ...
1282
-
1283
- @typing.overload
1284
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1285
- ...
1286
-
1287
- def conda_base(f: typing.Type[~FlowSpecDerived] | None = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: str | None = None, disabled: bool = False):
1288
- """
1289
- Specifies the Conda environment for all steps of the flow.
1290
-
1291
- Use `@conda_base` to set common libraries required by all
1292
- steps and use `@conda` to specify step-specific additions.
1293
-
1294
-
1295
- Parameters
1296
- ----------
1297
- packages : Dict[str, str], default {}
1298
- Packages to use for this flow. The key is the name of the package
1299
- and the value is the version to use.
1300
- libraries : Dict[str, str], default {}
1301
- Supported for backward compatibility. When used with packages, packages will take precedence.
1302
- python : str, optional, default None
1303
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1304
- that the version used will correspond to the version of the Python interpreter used to start the run.
1305
- disabled : bool, default False
1306
- If set to True, disables Conda.
1307
- """
1308
- ...
1309
-
1310
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: str | typing.List[str], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1311
- """
1312
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1313
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1314
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1315
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1316
- starts only after all sensors finish.
1317
-
1318
-
1319
- Parameters
1320
- ----------
1321
- timeout : int
1322
- Time, in seconds before the task times out and fails. (Default: 3600)
1323
- poke_interval : int
1324
- Time in seconds that the job should wait in between each try. (Default: 60)
1325
- mode : str
1326
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1327
- exponential_backoff : bool
1328
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1329
- pool : str
1330
- the slot pool this task should run in,
1331
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1332
- soft_fail : bool
1333
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1334
- name : str
1335
- Name of the sensor on Airflow
1336
- description : str
1337
- Description of sensor in the Airflow UI
1338
- bucket_key : Union[str, List[str]]
1339
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1340
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1341
- bucket_name : str
1342
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1343
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1344
- wildcard_match : bool
1345
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1346
- aws_conn_id : str
1347
- a reference to the s3 connection on Airflow. (Default: None)
1348
- verify : bool
1349
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1350
- """
1351
- ...
1352
-