metaflow-stubs 2.15.17__py2.py3-none-any.whl → 2.15.19__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of metaflow-stubs might be problematic. Click here for more details.

Files changed (155) hide show
  1. metaflow-stubs/__init__.pyi +482 -478
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +2 -2
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/info_file.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -4
  20. metaflow-stubs/metaflow_current.pyi +28 -28
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/multicore_utils.pyi +2 -2
  23. metaflow-stubs/parameters.pyi +3 -3
  24. metaflow-stubs/plugins/__init__.pyi +16 -14
  25. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  30. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  31. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  32. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  33. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  35. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  36. metaflow-stubs/plugins/argo/argo_workflows.pyi +5 -32
  37. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  38. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +3 -3
  39. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +3 -3
  40. metaflow-stubs/plugins/argo/exit_hooks.pyi +48 -0
  41. metaflow-stubs/plugins/aws/__init__.pyi +3 -3
  42. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  43. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  46. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  47. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  48. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  49. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +5 -5
  50. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  58. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +2 -2
  59. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  61. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  62. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +5 -5
  63. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  64. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  65. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  66. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_creator.pyi +4 -3
  69. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -4
  70. metaflow-stubs/plugins/cards/card_decorator.pyi +13 -2
  71. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  73. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  75. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +10 -2
  78. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  79. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  80. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  81. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  82. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  83. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  84. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  85. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  86. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  87. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  88. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  89. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  90. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  91. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  92. metaflow-stubs/plugins/exit_hook/__init__.pyi +11 -0
  93. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +20 -0
  94. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  95. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  96. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  97. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +5 -5
  98. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  99. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  100. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  101. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  102. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  103. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  105. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  106. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  107. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  108. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  110. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  111. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  112. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  113. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  114. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
  115. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  116. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  119. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  121. metaflow-stubs/plugins/secrets/__init__.pyi +7 -3
  122. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +4 -4
  123. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +9 -49
  124. metaflow-stubs/plugins/secrets/secrets_func.pyi +32 -0
  125. metaflow-stubs/plugins/secrets/secrets_spec.pyi +42 -0
  126. metaflow-stubs/plugins/secrets/utils.pyi +28 -0
  127. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  128. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  129. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  130. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  131. metaflow-stubs/plugins/uv/uv_environment.pyi +2 -2
  132. metaflow-stubs/pylint_wrapper.pyi +2 -2
  133. metaflow-stubs/runner/__init__.pyi +2 -2
  134. metaflow-stubs/runner/deployer.pyi +4 -4
  135. metaflow-stubs/runner/deployer_impl.pyi +2 -2
  136. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  137. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  138. metaflow-stubs/runner/nbrun.pyi +2 -2
  139. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  140. metaflow-stubs/runner/utils.pyi +2 -2
  141. metaflow-stubs/system/__init__.pyi +2 -2
  142. metaflow-stubs/system/system_logger.pyi +2 -2
  143. metaflow-stubs/system/system_monitor.pyi +2 -2
  144. metaflow-stubs/tagging_util.pyi +2 -2
  145. metaflow-stubs/tuple_util.pyi +2 -2
  146. metaflow-stubs/user_configs/__init__.pyi +2 -2
  147. metaflow-stubs/user_configs/config_decorators.pyi +6 -6
  148. metaflow-stubs/user_configs/config_options.pyi +3 -3
  149. metaflow-stubs/user_configs/config_parameters.pyi +5 -5
  150. metaflow-stubs/version.pyi +2 -2
  151. {metaflow_stubs-2.15.17.dist-info → metaflow_stubs-2.15.19.dist-info}/METADATA +2 -2
  152. metaflow_stubs-2.15.19.dist-info/RECORD +155 -0
  153. metaflow_stubs-2.15.17.dist-info/RECORD +0 -149
  154. {metaflow_stubs-2.15.17.dist-info → metaflow_stubs-2.15.19.dist-info}/WHEEL +0 -0
  155. {metaflow_stubs-2.15.17.dist-info → metaflow_stubs-2.15.19.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.15.17 #
4
- # Generated on 2025-06-13T18:00:28.046472 #
3
+ # MF version: 2.15.19 #
4
+ # Generated on 2025-07-10T01:18:40.943007 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -36,16 +36,16 @@ from .user_configs.config_parameters import config_expr as config_expr
36
36
  from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
38
  from . import metaflow_git as metaflow_git
39
- from . import events as events
40
39
  from . import tuple_util as tuple_util
40
+ from . import events as events
41
41
  from . import runner as runner
42
42
  from . import plugins as plugins
43
43
  from .plugins.datatools.s3.s3 import S3 as S3
44
44
  from . import includefile as includefile
45
45
  from .includefile import IncludeFile as IncludeFile
46
46
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
47
- from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
48
47
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
48
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
49
49
  from . import cards as cards
50
50
  from . import client as client
51
51
  from .client.core import namespace as namespace
@@ -146,6 +146,155 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
146
146
  """
147
147
  ...
148
148
 
149
+ @typing.overload
150
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
151
+ """
152
+ Decorator prototype for all step decorators. This function gets specialized
153
+ and imported for all decorators types by _import_plugin_decorators().
154
+ """
155
+ ...
156
+
157
+ @typing.overload
158
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
159
+ ...
160
+
161
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
162
+ """
163
+ Decorator prototype for all step decorators. This function gets specialized
164
+ and imported for all decorators types by _import_plugin_decorators().
165
+ """
166
+ ...
167
+
168
+ @typing.overload
169
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
170
+ """
171
+ Specifies the PyPI packages for the step.
172
+
173
+ Information in this decorator will augment any
174
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
175
+ you can use `@pypi_base` to set packages required by all
176
+ steps and use `@pypi` to specify step-specific overrides.
177
+
178
+
179
+ Parameters
180
+ ----------
181
+ packages : Dict[str, str], default: {}
182
+ Packages to use for this step. The key is the name of the package
183
+ and the value is the version to use.
184
+ python : str, optional, default: None
185
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
186
+ that the version used will correspond to the version of the Python interpreter used to start the run.
187
+ """
188
+ ...
189
+
190
+ @typing.overload
191
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
192
+ ...
193
+
194
+ @typing.overload
195
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
196
+ ...
197
+
198
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
199
+ """
200
+ Specifies the PyPI packages for the step.
201
+
202
+ Information in this decorator will augment any
203
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
204
+ you can use `@pypi_base` to set packages required by all
205
+ steps and use `@pypi` to specify step-specific overrides.
206
+
207
+
208
+ Parameters
209
+ ----------
210
+ packages : Dict[str, str], default: {}
211
+ Packages to use for this step. The key is the name of the package
212
+ and the value is the version to use.
213
+ python : str, optional, default: None
214
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
215
+ that the version used will correspond to the version of the Python interpreter used to start the run.
216
+ """
217
+ ...
218
+
219
+ @typing.overload
220
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
221
+ """
222
+ Specifies the resources needed when executing this step.
223
+
224
+ Use `@resources` to specify the resource requirements
225
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
226
+
227
+ You can choose the compute layer on the command line by executing e.g.
228
+ ```
229
+ python myflow.py run --with batch
230
+ ```
231
+ or
232
+ ```
233
+ python myflow.py run --with kubernetes
234
+ ```
235
+ which executes the flow on the desired system using the
236
+ requirements specified in `@resources`.
237
+
238
+
239
+ Parameters
240
+ ----------
241
+ cpu : int, default 1
242
+ Number of CPUs required for this step.
243
+ gpu : int, optional, default None
244
+ Number of GPUs required for this step.
245
+ disk : int, optional, default None
246
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
247
+ memory : int, default 4096
248
+ Memory size (in MB) required for this step.
249
+ shared_memory : int, optional, default None
250
+ The value for the size (in MiB) of the /dev/shm volume for this step.
251
+ This parameter maps to the `--shm-size` option in Docker.
252
+ """
253
+ ...
254
+
255
+ @typing.overload
256
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
257
+ ...
258
+
259
+ @typing.overload
260
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
261
+ ...
262
+
263
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
264
+ """
265
+ Specifies the resources needed when executing this step.
266
+
267
+ Use `@resources` to specify the resource requirements
268
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
269
+
270
+ You can choose the compute layer on the command line by executing e.g.
271
+ ```
272
+ python myflow.py run --with batch
273
+ ```
274
+ or
275
+ ```
276
+ python myflow.py run --with kubernetes
277
+ ```
278
+ which executes the flow on the desired system using the
279
+ requirements specified in `@resources`.
280
+
281
+
282
+ Parameters
283
+ ----------
284
+ cpu : int, default 1
285
+ Number of CPUs required for this step.
286
+ gpu : int, optional, default None
287
+ Number of GPUs required for this step.
288
+ disk : int, optional, default None
289
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
290
+ memory : int, default 4096
291
+ Memory size (in MB) required for this step.
292
+ shared_memory : int, optional, default None
293
+ The value for the size (in MiB) of the /dev/shm volume for this step.
294
+ This parameter maps to the `--shm-size` option in Docker.
295
+ """
296
+ ...
297
+
149
298
  def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
150
299
  """
151
300
  Specifies that this step should execute on Kubernetes.
@@ -235,106 +384,6 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
235
384
  """
236
385
  ...
237
386
 
238
- @typing.overload
239
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
240
- """
241
- Specifies the PyPI packages for the step.
242
-
243
- Information in this decorator will augment any
244
- attributes set in the `@pyi_base` flow-level decorator. Hence,
245
- you can use `@pypi_base` to set packages required by all
246
- steps and use `@pypi` to specify step-specific overrides.
247
-
248
-
249
- Parameters
250
- ----------
251
- packages : Dict[str, str], default: {}
252
- Packages to use for this step. The key is the name of the package
253
- and the value is the version to use.
254
- python : str, optional, default: None
255
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
256
- that the version used will correspond to the version of the Python interpreter used to start the run.
257
- """
258
- ...
259
-
260
- @typing.overload
261
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
262
- ...
263
-
264
- @typing.overload
265
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
266
- ...
267
-
268
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
269
- """
270
- Specifies the PyPI packages for the step.
271
-
272
- Information in this decorator will augment any
273
- attributes set in the `@pyi_base` flow-level decorator. Hence,
274
- you can use `@pypi_base` to set packages required by all
275
- steps and use `@pypi` to specify step-specific overrides.
276
-
277
-
278
- Parameters
279
- ----------
280
- packages : Dict[str, str], default: {}
281
- Packages to use for this step. The key is the name of the package
282
- and the value is the version to use.
283
- python : str, optional, default: None
284
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
285
- that the version used will correspond to the version of the Python interpreter used to start the run.
286
- """
287
- ...
288
-
289
- @typing.overload
290
- def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
291
- """
292
- Creates a human-readable report, a Metaflow Card, after this step completes.
293
-
294
- Note that you may add multiple `@card` decorators in a step with different parameters.
295
-
296
-
297
- Parameters
298
- ----------
299
- type : str, default 'default'
300
- Card type.
301
- id : str, optional, default None
302
- If multiple cards are present, use this id to identify this card.
303
- options : Dict[str, Any], default {}
304
- Options passed to the card. The contents depend on the card type.
305
- timeout : int, default 45
306
- Interrupt reporting if it takes more than this many seconds.
307
- """
308
- ...
309
-
310
- @typing.overload
311
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
312
- ...
313
-
314
- @typing.overload
315
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
316
- ...
317
-
318
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
319
- """
320
- Creates a human-readable report, a Metaflow Card, after this step completes.
321
-
322
- Note that you may add multiple `@card` decorators in a step with different parameters.
323
-
324
-
325
- Parameters
326
- ----------
327
- type : str, default 'default'
328
- Card type.
329
- id : str, optional, default None
330
- If multiple cards are present, use this id to identify this card.
331
- options : Dict[str, Any], default {}
332
- Options passed to the card. The contents depend on the card type.
333
- timeout : int, default 45
334
- Interrupt reporting if it takes more than this many seconds.
335
- """
336
- ...
337
-
338
387
  @typing.overload
339
388
  def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
340
389
  """
@@ -424,140 +473,7 @@ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], Non
424
473
  ...
425
474
 
426
475
  @typing.overload
427
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
428
- """
429
- Specifies secrets to be retrieved and injected as environment variables prior to
430
- the execution of a step.
431
-
432
-
433
- Parameters
434
- ----------
435
- sources : List[Union[str, Dict[str, Any]]], default: []
436
- List of secret specs, defining how the secrets are to be retrieved
437
- """
438
- ...
439
-
440
- @typing.overload
441
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
442
- ...
443
-
444
- @typing.overload
445
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
446
- ...
447
-
448
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
449
- """
450
- Specifies secrets to be retrieved and injected as environment variables prior to
451
- the execution of a step.
452
-
453
-
454
- Parameters
455
- ----------
456
- sources : List[Union[str, Dict[str, Any]]], default: []
457
- List of secret specs, defining how the secrets are to be retrieved
458
- """
459
- ...
460
-
461
- @typing.overload
462
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
463
- """
464
- Decorator prototype for all step decorators. This function gets specialized
465
- and imported for all decorators types by _import_plugin_decorators().
466
- """
467
- ...
468
-
469
- @typing.overload
470
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
471
- ...
472
-
473
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
474
- """
475
- Decorator prototype for all step decorators. This function gets specialized
476
- and imported for all decorators types by _import_plugin_decorators().
477
- """
478
- ...
479
-
480
- @typing.overload
481
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
482
- """
483
- Specifies the resources needed when executing this step.
484
-
485
- Use `@resources` to specify the resource requirements
486
- independently of the specific compute layer (`@batch`, `@kubernetes`).
487
-
488
- You can choose the compute layer on the command line by executing e.g.
489
- ```
490
- python myflow.py run --with batch
491
- ```
492
- or
493
- ```
494
- python myflow.py run --with kubernetes
495
- ```
496
- which executes the flow on the desired system using the
497
- requirements specified in `@resources`.
498
-
499
-
500
- Parameters
501
- ----------
502
- cpu : int, default 1
503
- Number of CPUs required for this step.
504
- gpu : int, optional, default None
505
- Number of GPUs required for this step.
506
- disk : int, optional, default None
507
- Disk size (in MB) required for this step. Only applies on Kubernetes.
508
- memory : int, default 4096
509
- Memory size (in MB) required for this step.
510
- shared_memory : int, optional, default None
511
- The value for the size (in MiB) of the /dev/shm volume for this step.
512
- This parameter maps to the `--shm-size` option in Docker.
513
- """
514
- ...
515
-
516
- @typing.overload
517
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
518
- ...
519
-
520
- @typing.overload
521
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
522
- ...
523
-
524
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
525
- """
526
- Specifies the resources needed when executing this step.
527
-
528
- Use `@resources` to specify the resource requirements
529
- independently of the specific compute layer (`@batch`, `@kubernetes`).
530
-
531
- You can choose the compute layer on the command line by executing e.g.
532
- ```
533
- python myflow.py run --with batch
534
- ```
535
- or
536
- ```
537
- python myflow.py run --with kubernetes
538
- ```
539
- which executes the flow on the desired system using the
540
- requirements specified in `@resources`.
541
-
542
-
543
- Parameters
544
- ----------
545
- cpu : int, default 1
546
- Number of CPUs required for this step.
547
- gpu : int, optional, default None
548
- Number of GPUs required for this step.
549
- disk : int, optional, default None
550
- Disk size (in MB) required for this step. Only applies on Kubernetes.
551
- memory : int, default 4096
552
- Memory size (in MB) required for this step.
553
- shared_memory : int, optional, default None
554
- The value for the size (in MiB) of the /dev/shm volume for this step.
555
- This parameter maps to the `--shm-size` option in Docker.
556
- """
557
- ...
558
-
559
- @typing.overload
560
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
476
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
561
477
  """
562
478
  Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
563
479
 
@@ -764,6 +680,106 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
764
680
  """
765
681
  ...
766
682
 
683
+ @typing.overload
684
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
685
+ """
686
+ Specifies that the step will success under all circumstances.
687
+
688
+ The decorator will create an optional artifact, specified by `var`, which
689
+ contains the exception raised. You can use it to detect the presence
690
+ of errors, indicating that all happy-path artifacts produced by the step
691
+ are missing.
692
+
693
+
694
+ Parameters
695
+ ----------
696
+ var : str, optional, default None
697
+ Name of the artifact in which to store the caught exception.
698
+ If not specified, the exception is not stored.
699
+ print_exception : bool, default True
700
+ Determines whether or not the exception is printed to
701
+ stdout when caught.
702
+ """
703
+ ...
704
+
705
+ @typing.overload
706
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
707
+ ...
708
+
709
+ @typing.overload
710
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
711
+ ...
712
+
713
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
714
+ """
715
+ Specifies that the step will success under all circumstances.
716
+
717
+ The decorator will create an optional artifact, specified by `var`, which
718
+ contains the exception raised. You can use it to detect the presence
719
+ of errors, indicating that all happy-path artifacts produced by the step
720
+ are missing.
721
+
722
+
723
+ Parameters
724
+ ----------
725
+ var : str, optional, default None
726
+ Name of the artifact in which to store the caught exception.
727
+ If not specified, the exception is not stored.
728
+ print_exception : bool, default True
729
+ Determines whether or not the exception is printed to
730
+ stdout when caught.
731
+ """
732
+ ...
733
+
734
+ @typing.overload
735
+ def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
736
+ """
737
+ Creates a human-readable report, a Metaflow Card, after this step completes.
738
+
739
+ Note that you may add multiple `@card` decorators in a step with different parameters.
740
+
741
+
742
+ Parameters
743
+ ----------
744
+ type : str, default 'default'
745
+ Card type.
746
+ id : str, optional, default None
747
+ If multiple cards are present, use this id to identify this card.
748
+ options : Dict[str, Any], default {}
749
+ Options passed to the card. The contents depend on the card type.
750
+ timeout : int, default 45
751
+ Interrupt reporting if it takes more than this many seconds.
752
+ """
753
+ ...
754
+
755
+ @typing.overload
756
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
757
+ ...
758
+
759
+ @typing.overload
760
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
761
+ ...
762
+
763
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
764
+ """
765
+ Creates a human-readable report, a Metaflow Card, after this step completes.
766
+
767
+ Note that you may add multiple `@card` decorators in a step with different parameters.
768
+
769
+
770
+ Parameters
771
+ ----------
772
+ type : str, default 'default'
773
+ Card type.
774
+ id : str, optional, default None
775
+ If multiple cards are present, use this id to identify this card.
776
+ options : Dict[str, Any], default {}
777
+ Options passed to the card. The contents depend on the card type.
778
+ timeout : int, default 45
779
+ Interrupt reporting if it takes more than this many seconds.
780
+ """
781
+ ...
782
+
767
783
  @typing.overload
768
784
  def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
769
785
  """
@@ -824,189 +840,168 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
824
840
  ...
825
841
 
826
842
  @typing.overload
827
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
843
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
828
844
  """
829
- Specifies that the step will success under all circumstances.
830
-
831
- The decorator will create an optional artifact, specified by `var`, which
832
- contains the exception raised. You can use it to detect the presence
833
- of errors, indicating that all happy-path artifacts produced by the step
834
- are missing.
845
+ Specifies secrets to be retrieved and injected as environment variables prior to
846
+ the execution of a step.
835
847
 
836
848
 
837
849
  Parameters
838
850
  ----------
839
- var : str, optional, default None
840
- Name of the artifact in which to store the caught exception.
841
- If not specified, the exception is not stored.
842
- print_exception : bool, default True
843
- Determines whether or not the exception is printed to
844
- stdout when caught.
851
+ sources : List[Union[str, Dict[str, Any]]], default: []
852
+ List of secret specs, defining how the secrets are to be retrieved
853
+ role : str, optional, default: None
854
+ Role to use for fetching secrets
845
855
  """
846
856
  ...
847
857
 
848
858
  @typing.overload
849
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
859
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
850
860
  ...
851
861
 
852
862
  @typing.overload
853
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
863
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
854
864
  ...
855
865
 
856
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
866
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
857
867
  """
858
- Specifies that the step will success under all circumstances.
859
-
860
- The decorator will create an optional artifact, specified by `var`, which
861
- contains the exception raised. You can use it to detect the presence
862
- of errors, indicating that all happy-path artifacts produced by the step
863
- are missing.
868
+ Specifies secrets to be retrieved and injected as environment variables prior to
869
+ the execution of a step.
864
870
 
865
871
 
866
872
  Parameters
867
873
  ----------
868
- var : str, optional, default None
869
- Name of the artifact in which to store the caught exception.
870
- If not specified, the exception is not stored.
871
- print_exception : bool, default True
872
- Determines whether or not the exception is printed to
873
- stdout when caught.
874
+ sources : List[Union[str, Dict[str, Any]]], default: []
875
+ List of secret specs, defining how the secrets are to be retrieved
876
+ role : str, optional, default: None
877
+ Role to use for fetching secrets
874
878
  """
875
879
  ...
876
880
 
877
- @typing.overload
878
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
881
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
879
882
  """
880
- Specifies the event(s) that this flow depends on.
883
+ Specifies what flows belong to the same project.
881
884
 
882
- ```
883
- @trigger(event='foo')
884
- ```
885
- or
886
- ```
887
- @trigger(events=['foo', 'bar'])
888
- ```
885
+ A project-specific namespace is created for all flows that
886
+ use the same `@project(name)`.
889
887
 
890
- Additionally, you can specify the parameter mappings
891
- to map event payload to Metaflow parameters for the flow.
892
- ```
893
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
894
- ```
895
- or
896
- ```
897
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
898
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
899
- ```
900
888
 
901
- 'parameters' can also be a list of strings and tuples like so:
902
- ```
903
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
904
- ```
905
- This is equivalent to:
906
- ```
907
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
908
- ```
889
+ Parameters
890
+ ----------
891
+ name : str
892
+ Project name. Make sure that the name is unique amongst all
893
+ projects that use the same production scheduler. The name may
894
+ contain only lowercase alphanumeric characters and underscores.
895
+
896
+ branch : Optional[str], default None
897
+ The branch to use. If not specified, the branch is set to
898
+ `user.<username>` unless `production` is set to `True`. This can
899
+ also be set on the command line using `--branch` as a top-level option.
900
+ It is an error to specify `branch` in the decorator and on the command line.
901
+
902
+ production : bool, default False
903
+ Whether or not the branch is the production branch. This can also be set on the
904
+ command line using `--production` as a top-level option. It is an error to specify
905
+ `production` in the decorator and on the command line.
906
+ The project branch name will be:
907
+ - if `branch` is specified:
908
+ - if `production` is True: `prod.<branch>`
909
+ - if `production` is False: `test.<branch>`
910
+ - if `branch` is not specified:
911
+ - if `production` is True: `prod`
912
+ - if `production` is False: `user.<username>`
913
+ """
914
+ ...
915
+
916
+ @typing.overload
917
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
918
+ """
919
+ Specifies the times when the flow should be run when running on a
920
+ production scheduler.
909
921
 
910
922
 
911
923
  Parameters
912
924
  ----------
913
- event : Union[str, Dict[str, Any]], optional, default None
914
- Event dependency for this flow.
915
- events : List[Union[str, Dict[str, Any]]], default []
916
- Events dependency for this flow.
917
- options : Dict[str, Any], default {}
918
- Backend-specific configuration for tuning eventing behavior.
925
+ hourly : bool, default False
926
+ Run the workflow hourly.
927
+ daily : bool, default True
928
+ Run the workflow daily.
929
+ weekly : bool, default False
930
+ Run the workflow weekly.
931
+ cron : str, optional, default None
932
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
933
+ specified by this expression.
934
+ timezone : str, optional, default None
935
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
936
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
919
937
  """
920
938
  ...
921
939
 
922
940
  @typing.overload
923
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
941
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
924
942
  ...
925
943
 
926
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
944
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
927
945
  """
928
- Specifies the event(s) that this flow depends on.
929
-
930
- ```
931
- @trigger(event='foo')
932
- ```
933
- or
934
- ```
935
- @trigger(events=['foo', 'bar'])
936
- ```
946
+ Specifies the times when the flow should be run when running on a
947
+ production scheduler.
937
948
 
938
- Additionally, you can specify the parameter mappings
939
- to map event payload to Metaflow parameters for the flow.
940
- ```
941
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
942
- ```
943
- or
944
- ```
945
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
946
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
947
- ```
948
949
 
949
- 'parameters' can also be a list of strings and tuples like so:
950
- ```
951
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
952
- ```
953
- This is equivalent to:
954
- ```
955
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
956
- ```
950
+ Parameters
951
+ ----------
952
+ hourly : bool, default False
953
+ Run the workflow hourly.
954
+ daily : bool, default True
955
+ Run the workflow daily.
956
+ weekly : bool, default False
957
+ Run the workflow weekly.
958
+ cron : str, optional, default None
959
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
960
+ specified by this expression.
961
+ timezone : str, optional, default None
962
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
963
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
964
+ """
965
+ ...
966
+
967
+ @typing.overload
968
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
969
+ """
970
+ Specifies the PyPI packages for all steps of the flow.
957
971
 
972
+ Use `@pypi_base` to set common packages required by all
973
+ steps and use `@pypi` to specify step-specific overrides.
958
974
 
959
975
  Parameters
960
976
  ----------
961
- event : Union[str, Dict[str, Any]], optional, default None
962
- Event dependency for this flow.
963
- events : List[Union[str, Dict[str, Any]]], default []
964
- Events dependency for this flow.
965
- options : Dict[str, Any], default {}
966
- Backend-specific configuration for tuning eventing behavior.
977
+ packages : Dict[str, str], default: {}
978
+ Packages to use for this flow. The key is the name of the package
979
+ and the value is the version to use.
980
+ python : str, optional, default: None
981
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
982
+ that the version used will correspond to the version of the Python interpreter used to start the run.
967
983
  """
968
984
  ...
969
985
 
970
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
986
+ @typing.overload
987
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
988
+ ...
989
+
990
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
971
991
  """
972
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
973
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
974
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
975
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
976
- starts only after all sensors finish.
992
+ Specifies the PyPI packages for all steps of the flow.
977
993
 
994
+ Use `@pypi_base` to set common packages required by all
995
+ steps and use `@pypi` to specify step-specific overrides.
978
996
 
979
997
  Parameters
980
998
  ----------
981
- timeout : int
982
- Time, in seconds before the task times out and fails. (Default: 3600)
983
- poke_interval : int
984
- Time in seconds that the job should wait in between each try. (Default: 60)
985
- mode : str
986
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
987
- exponential_backoff : bool
988
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
989
- pool : str
990
- the slot pool this task should run in,
991
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
992
- soft_fail : bool
993
- Set to true to mark the task as SKIPPED on failure. (Default: False)
994
- name : str
995
- Name of the sensor on Airflow
996
- description : str
997
- Description of sensor in the Airflow UI
998
- bucket_key : Union[str, List[str]]
999
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1000
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1001
- bucket_name : str
1002
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1003
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1004
- wildcard_match : bool
1005
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1006
- aws_conn_id : str
1007
- a reference to the s3 connection on Airflow. (Default: None)
1008
- verify : bool
1009
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
999
+ packages : Dict[str, str], default: {}
1000
+ Packages to use for this flow. The key is the name of the package
1001
+ and the value is the version to use.
1002
+ python : str, optional, default: None
1003
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1004
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1010
1005
  """
1011
1006
  ...
1012
1007
 
@@ -1111,38 +1106,96 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1111
1106
  """
1112
1107
  ...
1113
1108
 
1114
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1109
+ @typing.overload
1110
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1115
1111
  """
1116
- Specifies what flows belong to the same project.
1112
+ Specifies the event(s) that this flow depends on.
1117
1113
 
1118
- A project-specific namespace is created for all flows that
1119
- use the same `@project(name)`.
1114
+ ```
1115
+ @trigger(event='foo')
1116
+ ```
1117
+ or
1118
+ ```
1119
+ @trigger(events=['foo', 'bar'])
1120
+ ```
1121
+
1122
+ Additionally, you can specify the parameter mappings
1123
+ to map event payload to Metaflow parameters for the flow.
1124
+ ```
1125
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1126
+ ```
1127
+ or
1128
+ ```
1129
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1130
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1131
+ ```
1132
+
1133
+ 'parameters' can also be a list of strings and tuples like so:
1134
+ ```
1135
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1136
+ ```
1137
+ This is equivalent to:
1138
+ ```
1139
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1140
+ ```
1120
1141
 
1121
1142
 
1122
1143
  Parameters
1123
1144
  ----------
1124
- name : str
1125
- Project name. Make sure that the name is unique amongst all
1126
- projects that use the same production scheduler. The name may
1127
- contain only lowercase alphanumeric characters and underscores.
1145
+ event : Union[str, Dict[str, Any]], optional, default None
1146
+ Event dependency for this flow.
1147
+ events : List[Union[str, Dict[str, Any]]], default []
1148
+ Events dependency for this flow.
1149
+ options : Dict[str, Any], default {}
1150
+ Backend-specific configuration for tuning eventing behavior.
1151
+ """
1152
+ ...
1153
+
1154
+ @typing.overload
1155
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1156
+ ...
1157
+
1158
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1159
+ """
1160
+ Specifies the event(s) that this flow depends on.
1128
1161
 
1129
- branch : Optional[str], default None
1130
- The branch to use. If not specified, the branch is set to
1131
- `user.<username>` unless `production` is set to `True`. This can
1132
- also be set on the command line using `--branch` as a top-level option.
1133
- It is an error to specify `branch` in the decorator and on the command line.
1162
+ ```
1163
+ @trigger(event='foo')
1164
+ ```
1165
+ or
1166
+ ```
1167
+ @trigger(events=['foo', 'bar'])
1168
+ ```
1134
1169
 
1135
- production : bool, default False
1136
- Whether or not the branch is the production branch. This can also be set on the
1137
- command line using `--production` as a top-level option. It is an error to specify
1138
- `production` in the decorator and on the command line.
1139
- The project branch name will be:
1140
- - if `branch` is specified:
1141
- - if `production` is True: `prod.<branch>`
1142
- - if `production` is False: `test.<branch>`
1143
- - if `branch` is not specified:
1144
- - if `production` is True: `prod`
1145
- - if `production` is False: `user.<username>`
1170
+ Additionally, you can specify the parameter mappings
1171
+ to map event payload to Metaflow parameters for the flow.
1172
+ ```
1173
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1174
+ ```
1175
+ or
1176
+ ```
1177
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1178
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1179
+ ```
1180
+
1181
+ 'parameters' can also be a list of strings and tuples like so:
1182
+ ```
1183
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1184
+ ```
1185
+ This is equivalent to:
1186
+ ```
1187
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1188
+ ```
1189
+
1190
+
1191
+ Parameters
1192
+ ----------
1193
+ event : Union[str, Dict[str, Any]], optional, default None
1194
+ Event dependency for this flow.
1195
+ events : List[Union[str, Dict[str, Any]]], default []
1196
+ Events dependency for this flow.
1197
+ options : Dict[str, Any], default {}
1198
+ Backend-specific configuration for tuning eventing behavior.
1146
1199
  """
1147
1200
  ...
1148
1201
 
@@ -1197,95 +1250,46 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1197
1250
  """
1198
1251
  ...
1199
1252
 
1200
- @typing.overload
1201
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1202
- """
1203
- Specifies the times when the flow should be run when running on a
1204
- production scheduler.
1205
-
1206
-
1207
- Parameters
1208
- ----------
1209
- hourly : bool, default False
1210
- Run the workflow hourly.
1211
- daily : bool, default True
1212
- Run the workflow daily.
1213
- weekly : bool, default False
1214
- Run the workflow weekly.
1215
- cron : str, optional, default None
1216
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1217
- specified by this expression.
1218
- timezone : str, optional, default None
1219
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1220
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1221
- """
1222
- ...
1223
-
1224
- @typing.overload
1225
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1226
- ...
1227
-
1228
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1229
- """
1230
- Specifies the times when the flow should be run when running on a
1231
- production scheduler.
1232
-
1233
-
1234
- Parameters
1235
- ----------
1236
- hourly : bool, default False
1237
- Run the workflow hourly.
1238
- daily : bool, default True
1239
- Run the workflow daily.
1240
- weekly : bool, default False
1241
- Run the workflow weekly.
1242
- cron : str, optional, default None
1243
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1244
- specified by this expression.
1245
- timezone : str, optional, default None
1246
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1247
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1248
- """
1249
- ...
1250
-
1251
- @typing.overload
1252
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1253
- """
1254
- Specifies the PyPI packages for all steps of the flow.
1255
-
1256
- Use `@pypi_base` to set common packages required by all
1257
- steps and use `@pypi` to specify step-specific overrides.
1258
-
1259
- Parameters
1260
- ----------
1261
- packages : Dict[str, str], default: {}
1262
- Packages to use for this flow. The key is the name of the package
1263
- and the value is the version to use.
1264
- python : str, optional, default: None
1265
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1266
- that the version used will correspond to the version of the Python interpreter used to start the run.
1267
- """
1268
- ...
1269
-
1270
- @typing.overload
1271
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1272
- ...
1273
-
1274
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1253
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1275
1254
  """
1276
- Specifies the PyPI packages for all steps of the flow.
1255
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1256
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1257
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1258
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1259
+ starts only after all sensors finish.
1277
1260
 
1278
- Use `@pypi_base` to set common packages required by all
1279
- steps and use `@pypi` to specify step-specific overrides.
1280
1261
 
1281
1262
  Parameters
1282
1263
  ----------
1283
- packages : Dict[str, str], default: {}
1284
- Packages to use for this flow. The key is the name of the package
1285
- and the value is the version to use.
1286
- python : str, optional, default: None
1287
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1288
- that the version used will correspond to the version of the Python interpreter used to start the run.
1264
+ timeout : int
1265
+ Time, in seconds before the task times out and fails. (Default: 3600)
1266
+ poke_interval : int
1267
+ Time in seconds that the job should wait in between each try. (Default: 60)
1268
+ mode : str
1269
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1270
+ exponential_backoff : bool
1271
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1272
+ pool : str
1273
+ the slot pool this task should run in,
1274
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1275
+ soft_fail : bool
1276
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1277
+ name : str
1278
+ Name of the sensor on Airflow
1279
+ description : str
1280
+ Description of sensor in the Airflow UI
1281
+ bucket_key : Union[str, List[str]]
1282
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1283
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1284
+ bucket_name : str
1285
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1286
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1287
+ wildcard_match : bool
1288
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1289
+ aws_conn_id : str
1290
+ a reference to the s3 connection on Airflow. (Default: None)
1291
+ verify : bool
1292
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1289
1293
  """
1290
1294
  ...
1291
1295