metaflow-stubs 2.18.3__py2.py3-none-any.whl → 2.18.4__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of metaflow-stubs might be problematic. Click here for more details.

Files changed (166) hide show
  1. metaflow-stubs/__init__.pyi +546 -546
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +3 -3
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +5 -5
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +4 -4
  14. metaflow-stubs/meta_files.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +3 -3
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +17 -17
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/multicore_utils.pyi +2 -2
  23. metaflow-stubs/packaging_sys/__init__.pyi +6 -6
  24. metaflow-stubs/packaging_sys/backend.pyi +4 -4
  25. metaflow-stubs/packaging_sys/distribution_support.pyi +4 -4
  26. metaflow-stubs/packaging_sys/tar_backend.pyi +6 -6
  27. metaflow-stubs/packaging_sys/utils.pyi +2 -2
  28. metaflow-stubs/packaging_sys/v1.pyi +4 -4
  29. metaflow-stubs/parameters.pyi +4 -4
  30. metaflow-stubs/plugins/__init__.pyi +14 -14
  31. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  33. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  34. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  35. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  37. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  38. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  39. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  41. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  42. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  43. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  44. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +5 -5
  45. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +4 -4
  46. metaflow-stubs/plugins/argo/exit_hooks.pyi +3 -3
  47. metaflow-stubs/plugins/aws/__init__.pyi +4 -4
  48. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  50. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  51. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  52. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  53. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  54. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  55. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  56. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  59. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  60. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  61. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  62. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  63. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +5 -5
  64. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +4 -4
  65. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  67. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  68. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  69. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  70. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  71. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  72. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  77. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  79. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  83. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  84. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  85. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  86. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  87. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  88. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  89. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  91. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  92. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  93. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  94. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  95. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  96. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  97. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  98. metaflow-stubs/plugins/exit_hook/__init__.pyi +2 -2
  99. metaflow-stubs/plugins/exit_hook/exit_hook_decorator.pyi +2 -2
  100. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  101. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  104. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  105. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  106. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  107. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  108. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  109. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +3 -3
  110. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  111. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  112. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  114. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  115. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  116. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/__init__.pyi +3 -3
  119. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/conda_environment.pyi +6 -6
  121. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  122. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  123. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  124. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  125. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  126. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  127. metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
  128. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  129. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  130. metaflow-stubs/plugins/secrets/secrets_func.pyi +2 -2
  131. metaflow-stubs/plugins/secrets/secrets_spec.pyi +2 -2
  132. metaflow-stubs/plugins/secrets/utils.pyi +2 -2
  133. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  134. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  135. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  136. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  137. metaflow-stubs/plugins/uv/uv_environment.pyi +3 -3
  138. metaflow-stubs/pylint_wrapper.pyi +2 -2
  139. metaflow-stubs/runner/__init__.pyi +2 -2
  140. metaflow-stubs/runner/deployer.pyi +33 -33
  141. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  142. metaflow-stubs/runner/metaflow_runner.pyi +4 -4
  143. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  144. metaflow-stubs/runner/nbrun.pyi +2 -2
  145. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  146. metaflow-stubs/runner/utils.pyi +4 -4
  147. metaflow-stubs/system/__init__.pyi +2 -2
  148. metaflow-stubs/system/system_logger.pyi +3 -3
  149. metaflow-stubs/system/system_monitor.pyi +2 -2
  150. metaflow-stubs/tagging_util.pyi +2 -2
  151. metaflow-stubs/tuple_util.pyi +2 -2
  152. metaflow-stubs/user_configs/__init__.pyi +2 -2
  153. metaflow-stubs/user_configs/config_options.pyi +4 -4
  154. metaflow-stubs/user_configs/config_parameters.pyi +5 -5
  155. metaflow-stubs/user_decorators/__init__.pyi +2 -2
  156. metaflow-stubs/user_decorators/common.pyi +2 -2
  157. metaflow-stubs/user_decorators/mutable_flow.pyi +5 -5
  158. metaflow-stubs/user_decorators/mutable_step.pyi +6 -6
  159. metaflow-stubs/user_decorators/user_flow_decorator.pyi +5 -5
  160. metaflow-stubs/user_decorators/user_step_decorator.pyi +6 -6
  161. metaflow-stubs/version.pyi +2 -2
  162. {metaflow_stubs-2.18.3.dist-info → metaflow_stubs-2.18.4.dist-info}/METADATA +2 -2
  163. metaflow_stubs-2.18.4.dist-info/RECORD +166 -0
  164. metaflow_stubs-2.18.3.dist-info/RECORD +0 -166
  165. {metaflow_stubs-2.18.3.dist-info → metaflow_stubs-2.18.4.dist-info}/WHEEL +0 -0
  166. {metaflow_stubs-2.18.3.dist-info → metaflow_stubs-2.18.4.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.18.3 #
4
- # Generated on 2025-09-08T23:52:16.137378 #
3
+ # MF version: 2.18.4 #
4
+ # Generated on 2025-09-12T00:00:14.759393 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import datetime
12
11
  import typing
12
+ import datetime
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -40,16 +40,16 @@ from .user_decorators.user_step_decorator import StepMutator as StepMutator
40
40
  from .user_decorators.user_step_decorator import user_step_decorator as user_step_decorator
41
41
  from .user_decorators.user_flow_decorator import FlowMutator as FlowMutator
42
42
  from . import metaflow_git as metaflow_git
43
- from . import events as events
44
43
  from . import tuple_util as tuple_util
44
+ from . import events as events
45
45
  from . import runner as runner
46
46
  from . import plugins as plugins
47
47
  from .plugins.datatools.s3.s3 import S3 as S3
48
48
  from . import includefile as includefile
49
49
  from .includefile import IncludeFile as IncludeFile
50
50
  from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
51
- from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
52
51
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
52
+ from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
53
53
  from . import cards as cards
54
54
  from . import client as client
55
55
  from .client.core import namespace as namespace
@@ -153,53 +153,108 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
153
153
  ...
154
154
 
155
155
  @typing.overload
156
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
156
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
157
157
  """
158
- Specifies that the step will success under all circumstances.
158
+ Specifies the PyPI packages for the step.
159
159
 
160
- The decorator will create an optional artifact, specified by `var`, which
161
- contains the exception raised. You can use it to detect the presence
162
- of errors, indicating that all happy-path artifacts produced by the step
163
- are missing.
160
+ Information in this decorator will augment any
161
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
162
+ you can use `@pypi_base` to set packages required by all
163
+ steps and use `@pypi` to specify step-specific overrides.
164
164
 
165
165
 
166
166
  Parameters
167
167
  ----------
168
- var : str, optional, default None
169
- Name of the artifact in which to store the caught exception.
170
- If not specified, the exception is not stored.
171
- print_exception : bool, default True
172
- Determines whether or not the exception is printed to
173
- stdout when caught.
168
+ packages : Dict[str, str], default: {}
169
+ Packages to use for this step. The key is the name of the package
170
+ and the value is the version to use.
171
+ python : str, optional, default: None
172
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
173
+ that the version used will correspond to the version of the Python interpreter used to start the run.
174
174
  """
175
175
  ...
176
176
 
177
177
  @typing.overload
178
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
178
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
179
179
  ...
180
180
 
181
181
  @typing.overload
182
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
182
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
183
183
  ...
184
184
 
185
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
185
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
186
186
  """
187
- Specifies that the step will success under all circumstances.
187
+ Specifies the PyPI packages for the step.
188
188
 
189
- The decorator will create an optional artifact, specified by `var`, which
190
- contains the exception raised. You can use it to detect the presence
191
- of errors, indicating that all happy-path artifacts produced by the step
192
- are missing.
189
+ Information in this decorator will augment any
190
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
191
+ you can use `@pypi_base` to set packages required by all
192
+ steps and use `@pypi` to specify step-specific overrides.
193
193
 
194
194
 
195
195
  Parameters
196
196
  ----------
197
- var : str, optional, default None
198
- Name of the artifact in which to store the caught exception.
199
- If not specified, the exception is not stored.
200
- print_exception : bool, default True
201
- Determines whether or not the exception is printed to
202
- stdout when caught.
197
+ packages : Dict[str, str], default: {}
198
+ Packages to use for this step. The key is the name of the package
199
+ and the value is the version to use.
200
+ python : str, optional, default: None
201
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
202
+ that the version used will correspond to the version of the Python interpreter used to start the run.
203
+ """
204
+ ...
205
+
206
+ @typing.overload
207
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
208
+ """
209
+ Specifies the number of times the task corresponding
210
+ to a step needs to be retried.
211
+
212
+ This decorator is useful for handling transient errors, such as networking issues.
213
+ If your task contains operations that can't be retried safely, e.g. database updates,
214
+ it is advisable to annotate it with `@retry(times=0)`.
215
+
216
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
217
+ decorator will execute a no-op task after all retries have been exhausted,
218
+ ensuring that the flow execution can continue.
219
+
220
+
221
+ Parameters
222
+ ----------
223
+ times : int, default 3
224
+ Number of times to retry this task.
225
+ minutes_between_retries : int, default 2
226
+ Number of minutes between retries.
227
+ """
228
+ ...
229
+
230
+ @typing.overload
231
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
232
+ ...
233
+
234
+ @typing.overload
235
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
236
+ ...
237
+
238
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
239
+ """
240
+ Specifies the number of times the task corresponding
241
+ to a step needs to be retried.
242
+
243
+ This decorator is useful for handling transient errors, such as networking issues.
244
+ If your task contains operations that can't be retried safely, e.g. database updates,
245
+ it is advisable to annotate it with `@retry(times=0)`.
246
+
247
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
248
+ decorator will execute a no-op task after all retries have been exhausted,
249
+ ensuring that the flow execution can continue.
250
+
251
+
252
+ Parameters
253
+ ----------
254
+ times : int, default 3
255
+ Number of times to retry this task.
256
+ minutes_between_retries : int, default 2
257
+ Number of minutes between retries.
203
258
  """
204
259
  ...
205
260
 
@@ -222,6 +277,95 @@ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
222
277
  """
223
278
  ...
224
279
 
280
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
281
+ """
282
+ Specifies that this step should execute on Kubernetes.
283
+
284
+
285
+ Parameters
286
+ ----------
287
+ cpu : int, default 1
288
+ Number of CPUs required for this step. If `@resources` is
289
+ also present, the maximum value from all decorators is used.
290
+ memory : int, default 4096
291
+ Memory size (in MB) required for this step. If
292
+ `@resources` is also present, the maximum value from all decorators is
293
+ used.
294
+ disk : int, default 10240
295
+ Disk size (in MB) required for this step. If
296
+ `@resources` is also present, the maximum value from all decorators is
297
+ used.
298
+ image : str, optional, default None
299
+ Docker image to use when launching on Kubernetes. If not specified, and
300
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
301
+ not, a default Docker image mapping to the current version of Python is used.
302
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
303
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
304
+ image_pull_secrets: List[str], default []
305
+ The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
306
+ Kubernetes image pull secrets to use when pulling container images
307
+ in Kubernetes.
308
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
309
+ Kubernetes service account to use when launching pod in Kubernetes.
310
+ secrets : List[str], optional, default None
311
+ Kubernetes secrets to use when launching pod in Kubernetes. These
312
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
313
+ in Metaflow configuration.
314
+ node_selector: Union[Dict[str,str], str], optional, default None
315
+ Kubernetes node selector(s) to apply to the pod running the task.
316
+ Can be passed in as a comma separated string of values e.g.
317
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
318
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
319
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
320
+ Kubernetes namespace to use when launching pod in Kubernetes.
321
+ gpu : int, optional, default None
322
+ Number of GPUs required for this step. A value of zero implies that
323
+ the scheduled node should not have GPUs.
324
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
325
+ The vendor of the GPUs to be used for this step.
326
+ tolerations : List[Dict[str,str]], default []
327
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
328
+ Kubernetes tolerations to use when launching pod in Kubernetes.
329
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
330
+ Kubernetes labels to use when launching pod in Kubernetes.
331
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
332
+ Kubernetes annotations to use when launching pod in Kubernetes.
333
+ use_tmpfs : bool, default False
334
+ This enables an explicit tmpfs mount for this step.
335
+ tmpfs_tempdir : bool, default True
336
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
337
+ tmpfs_size : int, optional, default: None
338
+ The value for the size (in MiB) of the tmpfs mount for this step.
339
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
340
+ memory allocated for this step.
341
+ tmpfs_path : str, optional, default /metaflow_temp
342
+ Path to tmpfs mount for this step.
343
+ persistent_volume_claims : Dict[str, str], optional, default None
344
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
345
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
346
+ shared_memory: int, optional
347
+ Shared memory size (in MiB) required for this step
348
+ port: int, optional
349
+ Port number to specify in the Kubernetes job object
350
+ compute_pool : str, optional, default None
351
+ Compute pool to be used for for this step.
352
+ If not specified, any accessible compute pool within the perimeter is used.
353
+ hostname_resolution_timeout: int, default 10 * 60
354
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
355
+ Only applicable when @parallel is used.
356
+ qos: str, default: Burstable
357
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
358
+
359
+ security_context: Dict[str, Any], optional, default None
360
+ Container security context. Applies to the task container. Allows the following keys:
361
+ - privileged: bool, optional, default None
362
+ - allow_privilege_escalation: bool, optional, default None
363
+ - run_as_user: int, optional, default None
364
+ - run_as_group: int, optional, default None
365
+ - run_as_non_root: bool, optional, default None
366
+ """
367
+ ...
368
+
225
369
  @typing.overload
226
370
  def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
227
371
  """
@@ -272,79 +416,66 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
272
416
  ...
273
417
 
274
418
  @typing.overload
275
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
419
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
276
420
  """
277
- Specifies secrets to be retrieved and injected as environment variables prior to
278
- the execution of a step.
421
+ Specifies the Conda environment for the step.
422
+
423
+ Information in this decorator will augment any
424
+ attributes set in the `@conda_base` flow-level decorator. Hence,
425
+ you can use `@conda_base` to set packages required by all
426
+ steps and use `@conda` to specify step-specific overrides.
279
427
 
280
428
 
281
429
  Parameters
282
430
  ----------
283
- sources : List[Union[str, Dict[str, Any]]], default: []
284
- List of secret specs, defining how the secrets are to be retrieved
285
- role : str, optional, default: None
286
- Role to use for fetching secrets
431
+ packages : Dict[str, str], default {}
432
+ Packages to use for this step. The key is the name of the package
433
+ and the value is the version to use.
434
+ libraries : Dict[str, str], default {}
435
+ Supported for backward compatibility. When used with packages, packages will take precedence.
436
+ python : str, optional, default None
437
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
438
+ that the version used will correspond to the version of the Python interpreter used to start the run.
439
+ disabled : bool, default False
440
+ If set to True, disables @conda.
287
441
  """
288
442
  ...
289
443
 
290
444
  @typing.overload
291
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
445
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
292
446
  ...
293
447
 
294
448
  @typing.overload
295
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
449
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
296
450
  ...
297
451
 
298
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
452
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
299
453
  """
300
- Specifies secrets to be retrieved and injected as environment variables prior to
301
- the execution of a step.
454
+ Specifies the Conda environment for the step.
455
+
456
+ Information in this decorator will augment any
457
+ attributes set in the `@conda_base` flow-level decorator. Hence,
458
+ you can use `@conda_base` to set packages required by all
459
+ steps and use `@conda` to specify step-specific overrides.
302
460
 
303
461
 
304
462
  Parameters
305
463
  ----------
306
- sources : List[Union[str, Dict[str, Any]]], default: []
307
- List of secret specs, defining how the secrets are to be retrieved
308
- role : str, optional, default: None
309
- Role to use for fetching secrets
464
+ packages : Dict[str, str], default {}
465
+ Packages to use for this step. The key is the name of the package
466
+ and the value is the version to use.
467
+ libraries : Dict[str, str], default {}
468
+ Supported for backward compatibility. When used with packages, packages will take precedence.
469
+ python : str, optional, default None
470
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
471
+ that the version used will correspond to the version of the Python interpreter used to start the run.
472
+ disabled : bool, default False
473
+ If set to True, disables @conda.
310
474
  """
311
475
  ...
312
476
 
313
477
  @typing.overload
314
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
315
- """
316
- Specifies environment variables to be set prior to the execution of a step.
317
-
318
-
319
- Parameters
320
- ----------
321
- vars : Dict[str, str], default {}
322
- Dictionary of environment variables to set.
323
- """
324
- ...
325
-
326
- @typing.overload
327
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
328
- ...
329
-
330
- @typing.overload
331
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
332
- ...
333
-
334
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
335
- """
336
- Specifies environment variables to be set prior to the execution of a step.
337
-
338
-
339
- Parameters
340
- ----------
341
- vars : Dict[str, str], default {}
342
- Dictionary of environment variables to set.
343
- """
344
- ...
345
-
346
- @typing.overload
347
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
478
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
348
479
  """
349
480
  Specifies a timeout for your step.
350
481
 
@@ -402,144 +533,6 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
402
533
  """
403
534
  ...
404
535
 
405
- @typing.overload
406
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
407
- """
408
- Specifies the Conda environment for the step.
409
-
410
- Information in this decorator will augment any
411
- attributes set in the `@conda_base` flow-level decorator. Hence,
412
- you can use `@conda_base` to set packages required by all
413
- steps and use `@conda` to specify step-specific overrides.
414
-
415
-
416
- Parameters
417
- ----------
418
- packages : Dict[str, str], default {}
419
- Packages to use for this step. The key is the name of the package
420
- and the value is the version to use.
421
- libraries : Dict[str, str], default {}
422
- Supported for backward compatibility. When used with packages, packages will take precedence.
423
- python : str, optional, default None
424
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
425
- that the version used will correspond to the version of the Python interpreter used to start the run.
426
- disabled : bool, default False
427
- If set to True, disables @conda.
428
- """
429
- ...
430
-
431
- @typing.overload
432
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
433
- ...
434
-
435
- @typing.overload
436
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
437
- ...
438
-
439
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
440
- """
441
- Specifies the Conda environment for the step.
442
-
443
- Information in this decorator will augment any
444
- attributes set in the `@conda_base` flow-level decorator. Hence,
445
- you can use `@conda_base` to set packages required by all
446
- steps and use `@conda` to specify step-specific overrides.
447
-
448
-
449
- Parameters
450
- ----------
451
- packages : Dict[str, str], default {}
452
- Packages to use for this step. The key is the name of the package
453
- and the value is the version to use.
454
- libraries : Dict[str, str], default {}
455
- Supported for backward compatibility. When used with packages, packages will take precedence.
456
- python : str, optional, default None
457
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
458
- that the version used will correspond to the version of the Python interpreter used to start the run.
459
- disabled : bool, default False
460
- If set to True, disables @conda.
461
- """
462
- ...
463
-
464
- @typing.overload
465
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
466
- """
467
- Specifies the resources needed when executing this step.
468
-
469
- Use `@resources` to specify the resource requirements
470
- independently of the specific compute layer (`@batch`, `@kubernetes`).
471
-
472
- You can choose the compute layer on the command line by executing e.g.
473
- ```
474
- python myflow.py run --with batch
475
- ```
476
- or
477
- ```
478
- python myflow.py run --with kubernetes
479
- ```
480
- which executes the flow on the desired system using the
481
- requirements specified in `@resources`.
482
-
483
-
484
- Parameters
485
- ----------
486
- cpu : int, default 1
487
- Number of CPUs required for this step.
488
- gpu : int, optional, default None
489
- Number of GPUs required for this step.
490
- disk : int, optional, default None
491
- Disk size (in MB) required for this step. Only applies on Kubernetes.
492
- memory : int, default 4096
493
- Memory size (in MB) required for this step.
494
- shared_memory : int, optional, default None
495
- The value for the size (in MiB) of the /dev/shm volume for this step.
496
- This parameter maps to the `--shm-size` option in Docker.
497
- """
498
- ...
499
-
500
- @typing.overload
501
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
502
- ...
503
-
504
- @typing.overload
505
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
506
- ...
507
-
508
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
509
- """
510
- Specifies the resources needed when executing this step.
511
-
512
- Use `@resources` to specify the resource requirements
513
- independently of the specific compute layer (`@batch`, `@kubernetes`).
514
-
515
- You can choose the compute layer on the command line by executing e.g.
516
- ```
517
- python myflow.py run --with batch
518
- ```
519
- or
520
- ```
521
- python myflow.py run --with kubernetes
522
- ```
523
- which executes the flow on the desired system using the
524
- requirements specified in `@resources`.
525
-
526
-
527
- Parameters
528
- ----------
529
- cpu : int, default 1
530
- Number of CPUs required for this step.
531
- gpu : int, optional, default None
532
- Number of GPUs required for this step.
533
- disk : int, optional, default None
534
- Disk size (in MB) required for this step. Only applies on Kubernetes.
535
- memory : int, default 4096
536
- Memory size (in MB) required for this step.
537
- shared_memory : int, optional, default None
538
- The value for the size (in MiB) of the /dev/shm volume for this step.
539
- This parameter maps to the `--shm-size` option in Docker.
540
- """
541
- ...
542
-
543
536
  @typing.overload
544
537
  def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, aws_batch_tags: typing.Optional[typing.Dict[str, str]] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
545
538
  """
@@ -695,238 +688,258 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
695
688
  """
696
689
  ...
697
690
 
698
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', image_pull_secrets: typing.List[str] = [], service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[typing.Dict[str, str]] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
691
+ @typing.overload
692
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
699
693
  """
700
- Specifies that this step should execute on Kubernetes.
694
+ Specifies the resources needed when executing this step.
695
+
696
+ Use `@resources` to specify the resource requirements
697
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
698
+
699
+ You can choose the compute layer on the command line by executing e.g.
700
+ ```
701
+ python myflow.py run --with batch
702
+ ```
703
+ or
704
+ ```
705
+ python myflow.py run --with kubernetes
706
+ ```
707
+ which executes the flow on the desired system using the
708
+ requirements specified in `@resources`.
701
709
 
702
710
 
703
711
  Parameters
704
712
  ----------
705
713
  cpu : int, default 1
706
- Number of CPUs required for this step. If `@resources` is
707
- also present, the maximum value from all decorators is used.
708
- memory : int, default 4096
709
- Memory size (in MB) required for this step. If
710
- `@resources` is also present, the maximum value from all decorators is
711
- used.
712
- disk : int, default 10240
713
- Disk size (in MB) required for this step. If
714
- `@resources` is also present, the maximum value from all decorators is
715
- used.
716
- image : str, optional, default None
717
- Docker image to use when launching on Kubernetes. If not specified, and
718
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
719
- not, a default Docker image mapping to the current version of Python is used.
720
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
721
- If given, the imagePullPolicy to be applied to the Docker image of the step.
722
- image_pull_secrets: List[str], default []
723
- The default is extracted from METAFLOW_KUBERNETES_IMAGE_PULL_SECRETS.
724
- Kubernetes image pull secrets to use when pulling container images
725
- in Kubernetes.
726
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
727
- Kubernetes service account to use when launching pod in Kubernetes.
728
- secrets : List[str], optional, default None
729
- Kubernetes secrets to use when launching pod in Kubernetes. These
730
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
731
- in Metaflow configuration.
732
- node_selector: Union[Dict[str,str], str], optional, default None
733
- Kubernetes node selector(s) to apply to the pod running the task.
734
- Can be passed in as a comma separated string of values e.g.
735
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
736
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
737
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
738
- Kubernetes namespace to use when launching pod in Kubernetes.
714
+ Number of CPUs required for this step.
739
715
  gpu : int, optional, default None
740
- Number of GPUs required for this step. A value of zero implies that
741
- the scheduled node should not have GPUs.
742
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
743
- The vendor of the GPUs to be used for this step.
744
- tolerations : List[Dict[str,str]], default []
745
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
746
- Kubernetes tolerations to use when launching pod in Kubernetes.
747
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
748
- Kubernetes labels to use when launching pod in Kubernetes.
749
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
750
- Kubernetes annotations to use when launching pod in Kubernetes.
751
- use_tmpfs : bool, default False
752
- This enables an explicit tmpfs mount for this step.
753
- tmpfs_tempdir : bool, default True
754
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
755
- tmpfs_size : int, optional, default: None
756
- The value for the size (in MiB) of the tmpfs mount for this step.
757
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
758
- memory allocated for this step.
759
- tmpfs_path : str, optional, default /metaflow_temp
760
- Path to tmpfs mount for this step.
761
- persistent_volume_claims : Dict[str, str], optional, default None
762
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
763
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
764
- shared_memory: int, optional
765
- Shared memory size (in MiB) required for this step
766
- port: int, optional
767
- Port number to specify in the Kubernetes job object
768
- compute_pool : str, optional, default None
769
- Compute pool to be used for for this step.
770
- If not specified, any accessible compute pool within the perimeter is used.
771
- hostname_resolution_timeout: int, default 10 * 60
772
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
773
- Only applicable when @parallel is used.
774
- qos: str, default: Burstable
775
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
716
+ Number of GPUs required for this step.
717
+ disk : int, optional, default None
718
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
719
+ memory : int, default 4096
720
+ Memory size (in MB) required for this step.
721
+ shared_memory : int, optional, default None
722
+ The value for the size (in MiB) of the /dev/shm volume for this step.
723
+ This parameter maps to the `--shm-size` option in Docker.
724
+ """
725
+ ...
726
+
727
+ @typing.overload
728
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
729
+ ...
730
+
731
+ @typing.overload
732
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
733
+ ...
734
+
735
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
736
+ """
737
+ Specifies the resources needed when executing this step.
776
738
 
777
- security_context: Dict[str, Any], optional, default None
778
- Container security context. Applies to the task container. Allows the following keys:
779
- - privileged: bool, optional, default None
780
- - allow_privilege_escalation: bool, optional, default None
781
- - run_as_user: int, optional, default None
782
- - run_as_group: int, optional, default None
783
- - run_as_non_root: bool, optional, default None
739
+ Use `@resources` to specify the resource requirements
740
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
741
+
742
+ You can choose the compute layer on the command line by executing e.g.
743
+ ```
744
+ python myflow.py run --with batch
745
+ ```
746
+ or
747
+ ```
748
+ python myflow.py run --with kubernetes
749
+ ```
750
+ which executes the flow on the desired system using the
751
+ requirements specified in `@resources`.
752
+
753
+
754
+ Parameters
755
+ ----------
756
+ cpu : int, default 1
757
+ Number of CPUs required for this step.
758
+ gpu : int, optional, default None
759
+ Number of GPUs required for this step.
760
+ disk : int, optional, default None
761
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
762
+ memory : int, default 4096
763
+ Memory size (in MB) required for this step.
764
+ shared_memory : int, optional, default None
765
+ The value for the size (in MiB) of the /dev/shm volume for this step.
766
+ This parameter maps to the `--shm-size` option in Docker.
784
767
  """
785
768
  ...
786
769
 
787
770
  @typing.overload
788
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
771
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
789
772
  """
790
- Specifies the PyPI packages for the step.
773
+ Specifies that the step will success under all circumstances.
791
774
 
792
- Information in this decorator will augment any
793
- attributes set in the `@pyi_base` flow-level decorator. Hence,
794
- you can use `@pypi_base` to set packages required by all
795
- steps and use `@pypi` to specify step-specific overrides.
775
+ The decorator will create an optional artifact, specified by `var`, which
776
+ contains the exception raised. You can use it to detect the presence
777
+ of errors, indicating that all happy-path artifacts produced by the step
778
+ are missing.
796
779
 
797
780
 
798
781
  Parameters
799
782
  ----------
800
- packages : Dict[str, str], default: {}
801
- Packages to use for this step. The key is the name of the package
802
- and the value is the version to use.
803
- python : str, optional, default: None
804
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
805
- that the version used will correspond to the version of the Python interpreter used to start the run.
783
+ var : str, optional, default None
784
+ Name of the artifact in which to store the caught exception.
785
+ If not specified, the exception is not stored.
786
+ print_exception : bool, default True
787
+ Determines whether or not the exception is printed to
788
+ stdout when caught.
806
789
  """
807
790
  ...
808
791
 
809
792
  @typing.overload
810
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
793
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
811
794
  ...
812
795
 
813
796
  @typing.overload
814
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
797
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
815
798
  ...
816
799
 
817
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
800
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
818
801
  """
819
- Specifies the PyPI packages for the step.
802
+ Specifies that the step will success under all circumstances.
820
803
 
821
- Information in this decorator will augment any
822
- attributes set in the `@pyi_base` flow-level decorator. Hence,
823
- you can use `@pypi_base` to set packages required by all
824
- steps and use `@pypi` to specify step-specific overrides.
804
+ The decorator will create an optional artifact, specified by `var`, which
805
+ contains the exception raised. You can use it to detect the presence
806
+ of errors, indicating that all happy-path artifacts produced by the step
807
+ are missing.
825
808
 
826
809
 
827
810
  Parameters
828
811
  ----------
829
- packages : Dict[str, str], default: {}
830
- Packages to use for this step. The key is the name of the package
831
- and the value is the version to use.
832
- python : str, optional, default: None
833
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
834
- that the version used will correspond to the version of the Python interpreter used to start the run.
812
+ var : str, optional, default None
813
+ Name of the artifact in which to store the caught exception.
814
+ If not specified, the exception is not stored.
815
+ print_exception : bool, default True
816
+ Determines whether or not the exception is printed to
817
+ stdout when caught.
835
818
  """
836
819
  ...
837
820
 
838
821
  @typing.overload
839
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
822
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
840
823
  """
841
- Specifies the number of times the task corresponding
842
- to a step needs to be retried.
843
-
844
- This decorator is useful for handling transient errors, such as networking issues.
845
- If your task contains operations that can't be retried safely, e.g. database updates,
846
- it is advisable to annotate it with `@retry(times=0)`.
847
-
848
- This can be used in conjunction with the `@catch` decorator. The `@catch`
849
- decorator will execute a no-op task after all retries have been exhausted,
850
- ensuring that the flow execution can continue.
824
+ Specifies environment variables to be set prior to the execution of a step.
851
825
 
852
826
 
853
827
  Parameters
854
828
  ----------
855
- times : int, default 3
856
- Number of times to retry this task.
857
- minutes_between_retries : int, default 2
858
- Number of minutes between retries.
829
+ vars : Dict[str, str], default {}
830
+ Dictionary of environment variables to set.
859
831
  """
860
832
  ...
861
833
 
862
834
  @typing.overload
863
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
835
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
864
836
  ...
865
837
 
866
838
  @typing.overload
867
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
839
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
868
840
  ...
869
841
 
870
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
842
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
871
843
  """
872
- Specifies the number of times the task corresponding
873
- to a step needs to be retried.
844
+ Specifies environment variables to be set prior to the execution of a step.
874
845
 
875
- This decorator is useful for handling transient errors, such as networking issues.
876
- If your task contains operations that can't be retried safely, e.g. database updates,
877
- it is advisable to annotate it with `@retry(times=0)`.
878
846
 
879
- This can be used in conjunction with the `@catch` decorator. The `@catch`
880
- decorator will execute a no-op task after all retries have been exhausted,
881
- ensuring that the flow execution can continue.
847
+ Parameters
848
+ ----------
849
+ vars : Dict[str, str], default {}
850
+ Dictionary of environment variables to set.
851
+ """
852
+ ...
853
+
854
+ @typing.overload
855
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
856
+ """
857
+ Specifies secrets to be retrieved and injected as environment variables prior to
858
+ the execution of a step.
882
859
 
883
860
 
884
861
  Parameters
885
862
  ----------
886
- times : int, default 3
887
- Number of times to retry this task.
888
- minutes_between_retries : int, default 2
889
- Number of minutes between retries.
863
+ sources : List[Union[str, Dict[str, Any]]], default: []
864
+ List of secret specs, defining how the secrets are to be retrieved
865
+ role : str, optional, default: None
866
+ Role to use for fetching secrets
890
867
  """
891
868
  ...
892
869
 
893
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
870
+ @typing.overload
871
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
872
+ ...
873
+
874
+ @typing.overload
875
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
876
+ ...
877
+
878
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], role: typing.Optional[str] = None):
894
879
  """
895
- Specifies what flows belong to the same project.
896
-
897
- A project-specific namespace is created for all flows that
898
- use the same `@project(name)`.
880
+ Specifies secrets to be retrieved and injected as environment variables prior to
881
+ the execution of a step.
899
882
 
900
883
 
901
884
  Parameters
902
885
  ----------
903
- name : str
904
- Project name. Make sure that the name is unique amongst all
905
- projects that use the same production scheduler. The name may
906
- contain only lowercase alphanumeric characters and underscores.
886
+ sources : List[Union[str, Dict[str, Any]]], default: []
887
+ List of secret specs, defining how the secrets are to be retrieved
888
+ role : str, optional, default: None
889
+ Role to use for fetching secrets
890
+ """
891
+ ...
892
+
893
+ @typing.overload
894
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
895
+ """
896
+ Specifies the event(s) that this flow depends on.
907
897
 
908
- branch : Optional[str], default None
909
- The branch to use. If not specified, the branch is set to
910
- `user.<username>` unless `production` is set to `True`. This can
911
- also be set on the command line using `--branch` as a top-level option.
912
- It is an error to specify `branch` in the decorator and on the command line.
898
+ ```
899
+ @trigger(event='foo')
900
+ ```
901
+ or
902
+ ```
903
+ @trigger(events=['foo', 'bar'])
904
+ ```
913
905
 
914
- production : bool, default False
915
- Whether or not the branch is the production branch. This can also be set on the
916
- command line using `--production` as a top-level option. It is an error to specify
917
- `production` in the decorator and on the command line.
918
- The project branch name will be:
919
- - if `branch` is specified:
920
- - if `production` is True: `prod.<branch>`
921
- - if `production` is False: `test.<branch>`
922
- - if `branch` is not specified:
923
- - if `production` is True: `prod`
924
- - if `production` is False: `user.<username>`
906
+ Additionally, you can specify the parameter mappings
907
+ to map event payload to Metaflow parameters for the flow.
908
+ ```
909
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
910
+ ```
911
+ or
912
+ ```
913
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
914
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
915
+ ```
916
+
917
+ 'parameters' can also be a list of strings and tuples like so:
918
+ ```
919
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
920
+ ```
921
+ This is equivalent to:
922
+ ```
923
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
924
+ ```
925
+
926
+
927
+ Parameters
928
+ ----------
929
+ event : Union[str, Dict[str, Any]], optional, default None
930
+ Event dependency for this flow.
931
+ events : List[Union[str, Dict[str, Any]]], default []
932
+ Events dependency for this flow.
933
+ options : Dict[str, Any], default {}
934
+ Backend-specific configuration for tuning eventing behavior.
925
935
  """
926
936
  ...
927
937
 
928
938
  @typing.overload
929
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
939
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
940
+ ...
941
+
942
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
930
943
  """
931
944
  Specifies the event(s) that this flow depends on.
932
945
 
@@ -961,60 +974,104 @@ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = No
961
974
 
962
975
  Parameters
963
976
  ----------
964
- event : Union[str, Dict[str, Any]], optional, default None
965
- Event dependency for this flow.
966
- events : List[Union[str, Dict[str, Any]]], default []
967
- Events dependency for this flow.
968
- options : Dict[str, Any], default {}
969
- Backend-specific configuration for tuning eventing behavior.
977
+ event : Union[str, Dict[str, Any]], optional, default None
978
+ Event dependency for this flow.
979
+ events : List[Union[str, Dict[str, Any]]], default []
980
+ Events dependency for this flow.
981
+ options : Dict[str, Any], default {}
982
+ Backend-specific configuration for tuning eventing behavior.
983
+ """
984
+ ...
985
+
986
+ @typing.overload
987
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
988
+ """
989
+ Specifies the Conda environment for all steps of the flow.
990
+
991
+ Use `@conda_base` to set common libraries required by all
992
+ steps and use `@conda` to specify step-specific additions.
993
+
994
+
995
+ Parameters
996
+ ----------
997
+ packages : Dict[str, str], default {}
998
+ Packages to use for this flow. The key is the name of the package
999
+ and the value is the version to use.
1000
+ libraries : Dict[str, str], default {}
1001
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1002
+ python : str, optional, default None
1003
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1004
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1005
+ disabled : bool, default False
1006
+ If set to True, disables Conda.
1007
+ """
1008
+ ...
1009
+
1010
+ @typing.overload
1011
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1012
+ ...
1013
+
1014
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1015
+ """
1016
+ Specifies the Conda environment for all steps of the flow.
1017
+
1018
+ Use `@conda_base` to set common libraries required by all
1019
+ steps and use `@conda` to specify step-specific additions.
1020
+
1021
+
1022
+ Parameters
1023
+ ----------
1024
+ packages : Dict[str, str], default {}
1025
+ Packages to use for this flow. The key is the name of the package
1026
+ and the value is the version to use.
1027
+ libraries : Dict[str, str], default {}
1028
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1029
+ python : str, optional, default None
1030
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1031
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1032
+ disabled : bool, default False
1033
+ If set to True, disables Conda.
1034
+ """
1035
+ ...
1036
+
1037
+ @typing.overload
1038
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1039
+ """
1040
+ Specifies the PyPI packages for all steps of the flow.
1041
+
1042
+ Use `@pypi_base` to set common packages required by all
1043
+ steps and use `@pypi` to specify step-specific overrides.
1044
+
1045
+ Parameters
1046
+ ----------
1047
+ packages : Dict[str, str], default: {}
1048
+ Packages to use for this flow. The key is the name of the package
1049
+ and the value is the version to use.
1050
+ python : str, optional, default: None
1051
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1052
+ that the version used will correspond to the version of the Python interpreter used to start the run.
970
1053
  """
971
1054
  ...
972
1055
 
973
1056
  @typing.overload
974
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1057
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
975
1058
  ...
976
1059
 
977
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1060
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
978
1061
  """
979
- Specifies the event(s) that this flow depends on.
980
-
981
- ```
982
- @trigger(event='foo')
983
- ```
984
- or
985
- ```
986
- @trigger(events=['foo', 'bar'])
987
- ```
988
-
989
- Additionally, you can specify the parameter mappings
990
- to map event payload to Metaflow parameters for the flow.
991
- ```
992
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
993
- ```
994
- or
995
- ```
996
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
997
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
998
- ```
999
-
1000
- 'parameters' can also be a list of strings and tuples like so:
1001
- ```
1002
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1003
- ```
1004
- This is equivalent to:
1005
- ```
1006
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1007
- ```
1062
+ Specifies the PyPI packages for all steps of the flow.
1008
1063
 
1064
+ Use `@pypi_base` to set common packages required by all
1065
+ steps and use `@pypi` to specify step-specific overrides.
1009
1066
 
1010
1067
  Parameters
1011
1068
  ----------
1012
- event : Union[str, Dict[str, Any]], optional, default None
1013
- Event dependency for this flow.
1014
- events : List[Union[str, Dict[str, Any]]], default []
1015
- Events dependency for this flow.
1016
- options : Dict[str, Any], default {}
1017
- Backend-specific configuration for tuning eventing behavior.
1069
+ packages : Dict[str, str], default: {}
1070
+ Packages to use for this flow. The key is the name of the package
1071
+ and the value is the version to use.
1072
+ python : str, optional, default: None
1073
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1074
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1018
1075
  """
1019
1076
  ...
1020
1077
 
@@ -1061,47 +1118,6 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1061
1118
  """
1062
1119
  ...
1063
1120
 
1064
- @typing.overload
1065
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1066
- """
1067
- Specifies the PyPI packages for all steps of the flow.
1068
-
1069
- Use `@pypi_base` to set common packages required by all
1070
- steps and use `@pypi` to specify step-specific overrides.
1071
-
1072
- Parameters
1073
- ----------
1074
- packages : Dict[str, str], default: {}
1075
- Packages to use for this flow. The key is the name of the package
1076
- and the value is the version to use.
1077
- python : str, optional, default: None
1078
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1079
- that the version used will correspond to the version of the Python interpreter used to start the run.
1080
- """
1081
- ...
1082
-
1083
- @typing.overload
1084
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1085
- ...
1086
-
1087
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1088
- """
1089
- Specifies the PyPI packages for all steps of the flow.
1090
-
1091
- Use `@pypi_base` to set common packages required by all
1092
- steps and use `@pypi` to specify step-specific overrides.
1093
-
1094
- Parameters
1095
- ----------
1096
- packages : Dict[str, str], default: {}
1097
- Packages to use for this flow. The key is the name of the package
1098
- and the value is the version to use.
1099
- python : str, optional, default: None
1100
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1101
- that the version used will correspond to the version of the Python interpreter used to start the run.
1102
- """
1103
- ...
1104
-
1105
1121
  @typing.overload
1106
1122
  def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1107
1123
  """
@@ -1153,6 +1169,49 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
1153
1169
  """
1154
1170
  ...
1155
1171
 
1172
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1173
+ """
1174
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1175
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1176
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1177
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1178
+ starts only after all sensors finish.
1179
+
1180
+
1181
+ Parameters
1182
+ ----------
1183
+ timeout : int
1184
+ Time, in seconds before the task times out and fails. (Default: 3600)
1185
+ poke_interval : int
1186
+ Time in seconds that the job should wait in between each try. (Default: 60)
1187
+ mode : str
1188
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1189
+ exponential_backoff : bool
1190
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1191
+ pool : str
1192
+ the slot pool this task should run in,
1193
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1194
+ soft_fail : bool
1195
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1196
+ name : str
1197
+ Name of the sensor on Airflow
1198
+ description : str
1199
+ Description of sensor in the Airflow UI
1200
+ bucket_key : Union[str, List[str]]
1201
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1202
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1203
+ bucket_name : str
1204
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1205
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1206
+ wildcard_match : bool
1207
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1208
+ aws_conn_id : str
1209
+ a reference to the s3 connection on Airflow. (Default: None)
1210
+ verify : bool
1211
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1212
+ """
1213
+ ...
1214
+
1156
1215
  @typing.overload
1157
1216
  def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1158
1217
  """
@@ -1254,97 +1313,38 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1254
1313
  """
1255
1314
  ...
1256
1315
 
1257
- @typing.overload
1258
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1259
- """
1260
- Specifies the Conda environment for all steps of the flow.
1261
-
1262
- Use `@conda_base` to set common libraries required by all
1263
- steps and use `@conda` to specify step-specific additions.
1264
-
1265
-
1266
- Parameters
1267
- ----------
1268
- packages : Dict[str, str], default {}
1269
- Packages to use for this flow. The key is the name of the package
1270
- and the value is the version to use.
1271
- libraries : Dict[str, str], default {}
1272
- Supported for backward compatibility. When used with packages, packages will take precedence.
1273
- python : str, optional, default None
1274
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1275
- that the version used will correspond to the version of the Python interpreter used to start the run.
1276
- disabled : bool, default False
1277
- If set to True, disables Conda.
1278
- """
1279
- ...
1280
-
1281
- @typing.overload
1282
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1283
- ...
1284
-
1285
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1316
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1286
1317
  """
1287
- Specifies the Conda environment for all steps of the flow.
1318
+ Specifies what flows belong to the same project.
1288
1319
 
1289
- Use `@conda_base` to set common libraries required by all
1290
- steps and use `@conda` to specify step-specific additions.
1320
+ A project-specific namespace is created for all flows that
1321
+ use the same `@project(name)`.
1291
1322
 
1292
1323
 
1293
1324
  Parameters
1294
1325
  ----------
1295
- packages : Dict[str, str], default {}
1296
- Packages to use for this flow. The key is the name of the package
1297
- and the value is the version to use.
1298
- libraries : Dict[str, str], default {}
1299
- Supported for backward compatibility. When used with packages, packages will take precedence.
1300
- python : str, optional, default None
1301
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1302
- that the version used will correspond to the version of the Python interpreter used to start the run.
1303
- disabled : bool, default False
1304
- If set to True, disables Conda.
1305
- """
1306
- ...
1307
-
1308
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1309
- """
1310
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1311
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1312
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1313
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1314
- starts only after all sensors finish.
1326
+ name : str
1327
+ Project name. Make sure that the name is unique amongst all
1328
+ projects that use the same production scheduler. The name may
1329
+ contain only lowercase alphanumeric characters and underscores.
1315
1330
 
1331
+ branch : Optional[str], default None
1332
+ The branch to use. If not specified, the branch is set to
1333
+ `user.<username>` unless `production` is set to `True`. This can
1334
+ also be set on the command line using `--branch` as a top-level option.
1335
+ It is an error to specify `branch` in the decorator and on the command line.
1316
1336
 
1317
- Parameters
1318
- ----------
1319
- timeout : int
1320
- Time, in seconds before the task times out and fails. (Default: 3600)
1321
- poke_interval : int
1322
- Time in seconds that the job should wait in between each try. (Default: 60)
1323
- mode : str
1324
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1325
- exponential_backoff : bool
1326
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1327
- pool : str
1328
- the slot pool this task should run in,
1329
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1330
- soft_fail : bool
1331
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1332
- name : str
1333
- Name of the sensor on Airflow
1334
- description : str
1335
- Description of sensor in the Airflow UI
1336
- bucket_key : Union[str, List[str]]
1337
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1338
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1339
- bucket_name : str
1340
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1341
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1342
- wildcard_match : bool
1343
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1344
- aws_conn_id : str
1345
- a reference to the s3 connection on Airflow. (Default: None)
1346
- verify : bool
1347
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1337
+ production : bool, default False
1338
+ Whether or not the branch is the production branch. This can also be set on the
1339
+ command line using `--production` as a top-level option. It is an error to specify
1340
+ `production` in the decorator and on the command line.
1341
+ The project branch name will be:
1342
+ - if `branch` is specified:
1343
+ - if `production` is True: `prod.<branch>`
1344
+ - if `production` is False: `test.<branch>`
1345
+ - if `branch` is not specified:
1346
+ - if `production` is True: `prod`
1347
+ - if `production` is False: `user.<username>`
1348
1348
  """
1349
1349
  ...
1350
1350