metaflow-stubs 2.15.9__py2.py3-none-any.whl → 2.15.10__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (149) hide show
  1. metaflow-stubs/__init__.pyi +615 -615
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +3 -3
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +4 -4
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +3 -3
  14. metaflow-stubs/info_file.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +2 -2
  20. metaflow-stubs/metaflow_current.pyi +17 -17
  21. metaflow-stubs/metaflow_git.pyi +2 -2
  22. metaflow-stubs/multicore_utils.pyi +2 -2
  23. metaflow-stubs/parameters.pyi +3 -3
  24. metaflow-stubs/plugins/__init__.pyi +15 -15
  25. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  30. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  31. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  32. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  33. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  35. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  36. metaflow-stubs/plugins/argo/argo_workflows.pyi +2 -2
  37. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -2
  38. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
  39. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +2 -2
  40. metaflow-stubs/plugins/aws/__init__.pyi +4 -4
  41. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  42. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  46. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  47. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  57. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  58. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  61. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  62. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  63. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  64. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  65. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/components.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  78. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  79. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  80. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  81. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  82. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  83. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  84. metaflow-stubs/plugins/datatools/s3/s3.pyi +5 -2
  85. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  86. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  87. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  88. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  89. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  90. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  91. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  92. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  93. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  94. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  95. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  96. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  97. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  98. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  99. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  100. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  101. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  102. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  103. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  105. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  106. metaflow-stubs/plugins/kubernetes/spot_monitor_sidecar.pyi +2 -2
  107. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  108. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  109. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  110. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  111. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  112. metaflow-stubs/plugins/pypi/parsers.pyi +2 -2
  113. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  114. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  115. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  116. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/secrets/__init__.pyi +3 -3
  119. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  120. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  121. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  122. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  123. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  124. metaflow-stubs/plugins/uv/__init__.pyi +2 -2
  125. metaflow-stubs/plugins/uv/uv_environment.pyi +3 -3
  126. metaflow-stubs/pylint_wrapper.pyi +2 -2
  127. metaflow-stubs/runner/__init__.pyi +2 -2
  128. metaflow-stubs/runner/deployer.pyi +29 -29
  129. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  130. metaflow-stubs/runner/metaflow_runner.pyi +2 -2
  131. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  132. metaflow-stubs/runner/nbrun.pyi +2 -2
  133. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  134. metaflow-stubs/runner/utils.pyi +4 -4
  135. metaflow-stubs/system/__init__.pyi +2 -2
  136. metaflow-stubs/system/system_logger.pyi +3 -3
  137. metaflow-stubs/system/system_monitor.pyi +2 -2
  138. metaflow-stubs/tagging_util.pyi +2 -2
  139. metaflow-stubs/tuple_util.pyi +2 -2
  140. metaflow-stubs/user_configs/__init__.pyi +2 -2
  141. metaflow-stubs/user_configs/config_decorators.pyi +4 -4
  142. metaflow-stubs/user_configs/config_options.pyi +2 -2
  143. metaflow-stubs/user_configs/config_parameters.pyi +5 -5
  144. metaflow-stubs/version.pyi +2 -2
  145. {metaflow_stubs-2.15.9.dist-info → metaflow_stubs-2.15.10.dist-info}/METADATA +2 -2
  146. metaflow_stubs-2.15.10.dist-info/RECORD +149 -0
  147. {metaflow_stubs-2.15.9.dist-info → metaflow_stubs-2.15.10.dist-info}/WHEEL +1 -1
  148. metaflow_stubs-2.15.9.dist-info/RECORD +0 -149
  149. {metaflow_stubs-2.15.9.dist-info → metaflow_stubs-2.15.10.dist-info}/top_level.txt +0 -0
@@ -1,15 +1,15 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.15.9 #
4
- # Generated on 2025-04-22T01:36:50.325150 #
3
+ # MF version: 2.15.10 #
4
+ # Generated on 2025-05-01T20:06:36.084744 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import datetime
12
11
  import typing
12
+ import datetime
13
13
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
14
14
  StepFlag = typing.NewType("StepFlag", bool)
15
15
 
@@ -35,17 +35,17 @@ from .user_configs.config_parameters import ConfigValue as ConfigValue
35
35
  from .user_configs.config_parameters import config_expr as config_expr
36
36
  from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
- from . import tuple_util as tuple_util
39
- from . import metaflow_git as metaflow_git
40
38
  from . import events as events
39
+ from . import metaflow_git as metaflow_git
40
+ from . import tuple_util as tuple_util
41
41
  from . import runner as runner
42
42
  from . import plugins as plugins
43
43
  from .plugins.datatools.s3.s3 import S3 as S3
44
44
  from . import includefile as includefile
45
45
  from .includefile import IncludeFile as IncludeFile
46
+ from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
46
47
  from .plugins.pypi.parsers import requirements_txt_parser as requirements_txt_parser
47
48
  from .plugins.pypi.parsers import pyproject_toml_parser as pyproject_toml_parser
48
- from .plugins.pypi.parsers import conda_environment_yml_parser as conda_environment_yml_parser
49
49
  from . import cards as cards
50
50
  from . import client as client
51
51
  from .client.core import namespace as namespace
@@ -146,91 +146,6 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
146
146
  """
147
147
  ...
148
148
 
149
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
150
- """
151
- Specifies that this step should execute on Kubernetes.
152
-
153
-
154
- Parameters
155
- ----------
156
- cpu : int, default 1
157
- Number of CPUs required for this step. If `@resources` is
158
- also present, the maximum value from all decorators is used.
159
- memory : int, default 4096
160
- Memory size (in MB) required for this step. If
161
- `@resources` is also present, the maximum value from all decorators is
162
- used.
163
- disk : int, default 10240
164
- Disk size (in MB) required for this step. If
165
- `@resources` is also present, the maximum value from all decorators is
166
- used.
167
- image : str, optional, default None
168
- Docker image to use when launching on Kubernetes. If not specified, and
169
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
170
- not, a default Docker image mapping to the current version of Python is used.
171
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
172
- If given, the imagePullPolicy to be applied to the Docker image of the step.
173
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
174
- Kubernetes service account to use when launching pod in Kubernetes.
175
- secrets : List[str], optional, default None
176
- Kubernetes secrets to use when launching pod in Kubernetes. These
177
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
178
- in Metaflow configuration.
179
- node_selector: Union[Dict[str,str], str], optional, default None
180
- Kubernetes node selector(s) to apply to the pod running the task.
181
- Can be passed in as a comma separated string of values e.g.
182
- 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
183
- {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
184
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
185
- Kubernetes namespace to use when launching pod in Kubernetes.
186
- gpu : int, optional, default None
187
- Number of GPUs required for this step. A value of zero implies that
188
- the scheduled node should not have GPUs.
189
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
190
- The vendor of the GPUs to be used for this step.
191
- tolerations : List[str], default []
192
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
193
- Kubernetes tolerations to use when launching pod in Kubernetes.
194
- labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
195
- Kubernetes labels to use when launching pod in Kubernetes.
196
- annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
197
- Kubernetes annotations to use when launching pod in Kubernetes.
198
- use_tmpfs : bool, default False
199
- This enables an explicit tmpfs mount for this step.
200
- tmpfs_tempdir : bool, default True
201
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
202
- tmpfs_size : int, optional, default: None
203
- The value for the size (in MiB) of the tmpfs mount for this step.
204
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
205
- memory allocated for this step.
206
- tmpfs_path : str, optional, default /metaflow_temp
207
- Path to tmpfs mount for this step.
208
- persistent_volume_claims : Dict[str, str], optional, default None
209
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
210
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
211
- shared_memory: int, optional
212
- Shared memory size (in MiB) required for this step
213
- port: int, optional
214
- Port number to specify in the Kubernetes job object
215
- compute_pool : str, optional, default None
216
- Compute pool to be used for for this step.
217
- If not specified, any accessible compute pool within the perimeter is used.
218
- hostname_resolution_timeout: int, default 10 * 60
219
- Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
220
- Only applicable when @parallel is used.
221
- qos: str, default: Burstable
222
- Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
223
-
224
- security_context: Dict[str, Any], optional, default None
225
- Container security context. Applies to the task container. Allows the following keys:
226
- - privileged: bool, optional, default None
227
- - allow_privilege_escalation: bool, optional, default None
228
- - run_as_user: int, optional, default None
229
- - run_as_group: int, optional, default None
230
- - run_as_non_root: bool, optional, default None
231
- """
232
- ...
233
-
234
149
  @typing.overload
235
150
  def card(*, type: str = 'default', id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
236
151
  """
@@ -281,219 +196,102 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
281
196
  ...
282
197
 
283
198
  @typing.overload
284
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
199
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
285
200
  """
286
- Decorator prototype for all step decorators. This function gets specialized
287
- and imported for all decorators types by _import_plugin_decorators().
201
+ Specifies the PyPI packages for the step.
202
+
203
+ Information in this decorator will augment any
204
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
205
+ you can use `@pypi_base` to set packages required by all
206
+ steps and use `@pypi` to specify step-specific overrides.
207
+
208
+
209
+ Parameters
210
+ ----------
211
+ packages : Dict[str, str], default: {}
212
+ Packages to use for this step. The key is the name of the package
213
+ and the value is the version to use.
214
+ python : str, optional, default: None
215
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
216
+ that the version used will correspond to the version of the Python interpreter used to start the run.
288
217
  """
289
218
  ...
290
219
 
291
220
  @typing.overload
292
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
221
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
293
222
  ...
294
223
 
295
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
224
+ @typing.overload
225
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
226
+ ...
227
+
228
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
296
229
  """
297
- Decorator prototype for all step decorators. This function gets specialized
298
- and imported for all decorators types by _import_plugin_decorators().
230
+ Specifies the PyPI packages for the step.
231
+
232
+ Information in this decorator will augment any
233
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
234
+ you can use `@pypi_base` to set packages required by all
235
+ steps and use `@pypi` to specify step-specific overrides.
236
+
237
+
238
+ Parameters
239
+ ----------
240
+ packages : Dict[str, str], default: {}
241
+ Packages to use for this step. The key is the name of the package
242
+ and the value is the version to use.
243
+ python : str, optional, default: None
244
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
245
+ that the version used will correspond to the version of the Python interpreter used to start the run.
299
246
  """
300
247
  ...
301
248
 
302
249
  @typing.overload
303
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
250
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
304
251
  """
305
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
252
+ Specifies a timeout for your step.
253
+
254
+ This decorator is useful if this step may hang indefinitely.
255
+
256
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
257
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
258
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
259
+
260
+ Note that all the values specified in parameters are added together so if you specify
261
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
306
262
 
307
263
 
308
264
  Parameters
309
265
  ----------
310
- cpu : int, default 1
311
- Number of CPUs required for this step. If `@resources` is
312
- also present, the maximum value from all decorators is used.
313
- gpu : int, default 0
314
- Number of GPUs required for this step. If `@resources` is
315
- also present, the maximum value from all decorators is used.
316
- memory : int, default 4096
317
- Memory size (in MB) required for this step. If
318
- `@resources` is also present, the maximum value from all decorators is
319
- used.
320
- image : str, optional, default None
321
- Docker image to use when launching on AWS Batch. If not specified, and
322
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
323
- not, a default Docker image mapping to the current version of Python is used.
324
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
325
- AWS Batch Job Queue to submit the job to.
326
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
327
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
328
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
329
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
330
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
331
- shared_memory : int, optional, default None
332
- The value for the size (in MiB) of the /dev/shm volume for this step.
333
- This parameter maps to the `--shm-size` option in Docker.
334
- max_swap : int, optional, default None
335
- The total amount of swap memory (in MiB) a container can use for this
336
- step. This parameter is translated to the `--memory-swap` option in
337
- Docker where the value is the sum of the container memory plus the
338
- `max_swap` value.
339
- swappiness : int, optional, default None
340
- This allows you to tune memory swappiness behavior for this step.
341
- A swappiness value of 0 causes swapping not to happen unless absolutely
342
- necessary. A swappiness value of 100 causes pages to be swapped very
343
- aggressively. Accepted values are whole numbers between 0 and 100.
344
- use_tmpfs : bool, default False
345
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
346
- not available on Fargate compute environments
347
- tmpfs_tempdir : bool, default True
348
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
349
- tmpfs_size : int, optional, default None
350
- The value for the size (in MiB) of the tmpfs mount for this step.
351
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
352
- memory allocated for this step.
353
- tmpfs_path : str, optional, default None
354
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
355
- inferentia : int, default 0
356
- Number of Inferentia chips required for this step.
357
- trainium : int, default None
358
- Alias for inferentia. Use only one of the two.
359
- efa : int, default 0
360
- Number of elastic fabric adapter network devices to attach to container
361
- ephemeral_storage : int, default None
362
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
363
- This is only relevant for Fargate compute environments
364
- log_driver: str, optional, default None
365
- The log driver to use for the Amazon ECS container.
366
- log_options: List[str], optional, default None
367
- List of strings containing options for the chosen log driver. The configurable values
368
- depend on the `log driver` chosen. Validation of these options is not supported yet.
369
- Example: [`awslogs-group:aws/batch/job`]
266
+ seconds : int, default 0
267
+ Number of seconds to wait prior to timing out.
268
+ minutes : int, default 0
269
+ Number of minutes to wait prior to timing out.
270
+ hours : int, default 0
271
+ Number of hours to wait prior to timing out.
370
272
  """
371
273
  ...
372
274
 
373
275
  @typing.overload
374
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
276
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
375
277
  ...
376
278
 
377
279
  @typing.overload
378
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
280
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
379
281
  ...
380
282
 
381
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
283
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
382
284
  """
383
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
384
-
385
-
386
- Parameters
387
- ----------
388
- cpu : int, default 1
389
- Number of CPUs required for this step. If `@resources` is
390
- also present, the maximum value from all decorators is used.
391
- gpu : int, default 0
392
- Number of GPUs required for this step. If `@resources` is
393
- also present, the maximum value from all decorators is used.
394
- memory : int, default 4096
395
- Memory size (in MB) required for this step. If
396
- `@resources` is also present, the maximum value from all decorators is
397
- used.
398
- image : str, optional, default None
399
- Docker image to use when launching on AWS Batch. If not specified, and
400
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
401
- not, a default Docker image mapping to the current version of Python is used.
402
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
403
- AWS Batch Job Queue to submit the job to.
404
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
405
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
406
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
407
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
408
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
409
- shared_memory : int, optional, default None
410
- The value for the size (in MiB) of the /dev/shm volume for this step.
411
- This parameter maps to the `--shm-size` option in Docker.
412
- max_swap : int, optional, default None
413
- The total amount of swap memory (in MiB) a container can use for this
414
- step. This parameter is translated to the `--memory-swap` option in
415
- Docker where the value is the sum of the container memory plus the
416
- `max_swap` value.
417
- swappiness : int, optional, default None
418
- This allows you to tune memory swappiness behavior for this step.
419
- A swappiness value of 0 causes swapping not to happen unless absolutely
420
- necessary. A swappiness value of 100 causes pages to be swapped very
421
- aggressively. Accepted values are whole numbers between 0 and 100.
422
- use_tmpfs : bool, default False
423
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
424
- not available on Fargate compute environments
425
- tmpfs_tempdir : bool, default True
426
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
427
- tmpfs_size : int, optional, default None
428
- The value for the size (in MiB) of the tmpfs mount for this step.
429
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
430
- memory allocated for this step.
431
- tmpfs_path : str, optional, default None
432
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
433
- inferentia : int, default 0
434
- Number of Inferentia chips required for this step.
435
- trainium : int, default None
436
- Alias for inferentia. Use only one of the two.
437
- efa : int, default 0
438
- Number of elastic fabric adapter network devices to attach to container
439
- ephemeral_storage : int, default None
440
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
441
- This is only relevant for Fargate compute environments
442
- log_driver: str, optional, default None
443
- The log driver to use for the Amazon ECS container.
444
- log_options: List[str], optional, default None
445
- List of strings containing options for the chosen log driver. The configurable values
446
- depend on the `log driver` chosen. Validation of these options is not supported yet.
447
- Example: [`awslogs-group:aws/batch/job`]
448
- """
449
- ...
450
-
451
- @typing.overload
452
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
453
- """
454
- Specifies a timeout for your step.
455
-
456
- This decorator is useful if this step may hang indefinitely.
457
-
458
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
459
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
460
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
461
-
462
- Note that all the values specified in parameters are added together so if you specify
463
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
464
-
465
-
466
- Parameters
467
- ----------
468
- seconds : int, default 0
469
- Number of seconds to wait prior to timing out.
470
- minutes : int, default 0
471
- Number of minutes to wait prior to timing out.
472
- hours : int, default 0
473
- Number of hours to wait prior to timing out.
474
- """
475
- ...
476
-
477
- @typing.overload
478
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
479
- ...
480
-
481
- @typing.overload
482
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
483
- ...
484
-
485
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
486
- """
487
- Specifies a timeout for your step.
488
-
489
- This decorator is useful if this step may hang indefinitely.
490
-
491
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
492
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
493
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
494
-
495
- Note that all the values specified in parameters are added together so if you specify
496
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
285
+ Specifies a timeout for your step.
286
+
287
+ This decorator is useful if this step may hang indefinitely.
288
+
289
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
290
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
291
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
292
+
293
+ Note that all the values specified in parameters are added together so if you specify
294
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
497
295
 
498
296
 
499
297
  Parameters
@@ -508,90 +306,80 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
508
306
  ...
509
307
 
510
308
  @typing.overload
511
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
309
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
512
310
  """
513
- Specifies the number of times the task corresponding
514
- to a step needs to be retried.
515
-
516
- This decorator is useful for handling transient errors, such as networking issues.
517
- If your task contains operations that can't be retried safely, e.g. database updates,
518
- it is advisable to annotate it with `@retry(times=0)`.
519
-
520
- This can be used in conjunction with the `@catch` decorator. The `@catch`
521
- decorator will execute a no-op task after all retries have been exhausted,
522
- ensuring that the flow execution can continue.
523
-
524
-
525
- Parameters
526
- ----------
527
- times : int, default 3
528
- Number of times to retry this task.
529
- minutes_between_retries : int, default 2
530
- Number of minutes between retries.
311
+ Decorator prototype for all step decorators. This function gets specialized
312
+ and imported for all decorators types by _import_plugin_decorators().
531
313
  """
532
314
  ...
533
315
 
534
316
  @typing.overload
535
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
536
- ...
537
-
538
- @typing.overload
539
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
317
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
540
318
  ...
541
319
 
542
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
320
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
543
321
  """
544
- Specifies the number of times the task corresponding
545
- to a step needs to be retried.
546
-
547
- This decorator is useful for handling transient errors, such as networking issues.
548
- If your task contains operations that can't be retried safely, e.g. database updates,
549
- it is advisable to annotate it with `@retry(times=0)`.
550
-
551
- This can be used in conjunction with the `@catch` decorator. The `@catch`
552
- decorator will execute a no-op task after all retries have been exhausted,
553
- ensuring that the flow execution can continue.
554
-
555
-
556
- Parameters
557
- ----------
558
- times : int, default 3
559
- Number of times to retry this task.
560
- minutes_between_retries : int, default 2
561
- Number of minutes between retries.
322
+ Decorator prototype for all step decorators. This function gets specialized
323
+ and imported for all decorators types by _import_plugin_decorators().
562
324
  """
563
325
  ...
564
326
 
565
327
  @typing.overload
566
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
328
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
567
329
  """
568
- Specifies environment variables to be set prior to the execution of a step.
330
+ Specifies the Conda environment for the step.
331
+
332
+ Information in this decorator will augment any
333
+ attributes set in the `@conda_base` flow-level decorator. Hence,
334
+ you can use `@conda_base` to set packages required by all
335
+ steps and use `@conda` to specify step-specific overrides.
569
336
 
570
337
 
571
338
  Parameters
572
339
  ----------
573
- vars : Dict[str, str], default {}
574
- Dictionary of environment variables to set.
340
+ packages : Dict[str, str], default {}
341
+ Packages to use for this step. The key is the name of the package
342
+ and the value is the version to use.
343
+ libraries : Dict[str, str], default {}
344
+ Supported for backward compatibility. When used with packages, packages will take precedence.
345
+ python : str, optional, default None
346
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
347
+ that the version used will correspond to the version of the Python interpreter used to start the run.
348
+ disabled : bool, default False
349
+ If set to True, disables @conda.
575
350
  """
576
351
  ...
577
352
 
578
353
  @typing.overload
579
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
354
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
580
355
  ...
581
356
 
582
357
  @typing.overload
583
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
358
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
584
359
  ...
585
360
 
586
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
361
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
587
362
  """
588
- Specifies environment variables to be set prior to the execution of a step.
363
+ Specifies the Conda environment for the step.
364
+
365
+ Information in this decorator will augment any
366
+ attributes set in the `@conda_base` flow-level decorator. Hence,
367
+ you can use `@conda_base` to set packages required by all
368
+ steps and use `@conda` to specify step-specific overrides.
589
369
 
590
370
 
591
371
  Parameters
592
372
  ----------
593
- vars : Dict[str, str], default {}
594
- Dictionary of environment variables to set.
373
+ packages : Dict[str, str], default {}
374
+ Packages to use for this step. The key is the name of the package
375
+ and the value is the version to use.
376
+ libraries : Dict[str, str], default {}
377
+ Supported for backward compatibility. When used with packages, packages will take precedence.
378
+ python : str, optional, default None
379
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
380
+ that the version used will correspond to the version of the Python interpreter used to start the run.
381
+ disabled : bool, default False
382
+ If set to True, disables @conda.
595
383
  """
596
384
  ...
597
385
 
@@ -675,411 +463,410 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
675
463
  ...
676
464
 
677
465
  @typing.overload
678
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
466
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
679
467
  """
680
- Specifies the PyPI packages for the step.
468
+ Specifies that the step will success under all circumstances.
681
469
 
682
- Information in this decorator will augment any
683
- attributes set in the `@pyi_base` flow-level decorator. Hence,
684
- you can use `@pypi_base` to set packages required by all
685
- steps and use `@pypi` to specify step-specific overrides.
470
+ The decorator will create an optional artifact, specified by `var`, which
471
+ contains the exception raised. You can use it to detect the presence
472
+ of errors, indicating that all happy-path artifacts produced by the step
473
+ are missing.
686
474
 
687
475
 
688
476
  Parameters
689
477
  ----------
690
- packages : Dict[str, str], default: {}
691
- Packages to use for this step. The key is the name of the package
692
- and the value is the version to use.
693
- python : str, optional, default: None
694
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
695
- that the version used will correspond to the version of the Python interpreter used to start the run.
478
+ var : str, optional, default None
479
+ Name of the artifact in which to store the caught exception.
480
+ If not specified, the exception is not stored.
481
+ print_exception : bool, default True
482
+ Determines whether or not the exception is printed to
483
+ stdout when caught.
696
484
  """
697
485
  ...
698
486
 
699
487
  @typing.overload
700
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
488
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
701
489
  ...
702
490
 
703
491
  @typing.overload
704
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
492
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
705
493
  ...
706
494
 
707
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
495
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
708
496
  """
709
- Specifies the PyPI packages for the step.
497
+ Specifies that the step will success under all circumstances.
710
498
 
711
- Information in this decorator will augment any
712
- attributes set in the `@pyi_base` flow-level decorator. Hence,
713
- you can use `@pypi_base` to set packages required by all
714
- steps and use `@pypi` to specify step-specific overrides.
499
+ The decorator will create an optional artifact, specified by `var`, which
500
+ contains the exception raised. You can use it to detect the presence
501
+ of errors, indicating that all happy-path artifacts produced by the step
502
+ are missing.
715
503
 
716
504
 
717
505
  Parameters
718
506
  ----------
719
- packages : Dict[str, str], default: {}
720
- Packages to use for this step. The key is the name of the package
721
- and the value is the version to use.
722
- python : str, optional, default: None
723
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
724
- that the version used will correspond to the version of the Python interpreter used to start the run.
507
+ var : str, optional, default None
508
+ Name of the artifact in which to store the caught exception.
509
+ If not specified, the exception is not stored.
510
+ print_exception : bool, default True
511
+ Determines whether or not the exception is printed to
512
+ stdout when caught.
725
513
  """
726
514
  ...
727
515
 
728
516
  @typing.overload
729
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
517
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
730
518
  """
731
- Specifies secrets to be retrieved and injected as environment variables prior to
732
- the execution of a step.
519
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
733
520
 
734
521
 
735
522
  Parameters
736
523
  ----------
737
- sources : List[Union[str, Dict[str, Any]]], default: []
738
- List of secret specs, defining how the secrets are to be retrieved
739
- """
740
- ...
741
-
742
- @typing.overload
743
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
744
- ...
745
-
746
- @typing.overload
747
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
748
- ...
749
-
750
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
751
- """
752
- Specifies secrets to be retrieved and injected as environment variables prior to
753
- the execution of a step.
754
-
755
-
756
- Parameters
757
- ----------
758
- sources : List[Union[str, Dict[str, Any]]], default: []
759
- List of secret specs, defining how the secrets are to be retrieved
760
- """
761
- ...
762
-
763
- @typing.overload
764
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
765
- """
766
- Specifies the Conda environment for the step.
767
-
768
- Information in this decorator will augment any
769
- attributes set in the `@conda_base` flow-level decorator. Hence,
770
- you can use `@conda_base` to set packages required by all
771
- steps and use `@conda` to specify step-specific overrides.
772
-
773
-
774
- Parameters
775
- ----------
776
- packages : Dict[str, str], default {}
777
- Packages to use for this step. The key is the name of the package
778
- and the value is the version to use.
779
- libraries : Dict[str, str], default {}
780
- Supported for backward compatibility. When used with packages, packages will take precedence.
781
- python : str, optional, default None
782
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
783
- that the version used will correspond to the version of the Python interpreter used to start the run.
784
- disabled : bool, default False
785
- If set to True, disables @conda.
524
+ cpu : int, default 1
525
+ Number of CPUs required for this step. If `@resources` is
526
+ also present, the maximum value from all decorators is used.
527
+ gpu : int, default 0
528
+ Number of GPUs required for this step. If `@resources` is
529
+ also present, the maximum value from all decorators is used.
530
+ memory : int, default 4096
531
+ Memory size (in MB) required for this step. If
532
+ `@resources` is also present, the maximum value from all decorators is
533
+ used.
534
+ image : str, optional, default None
535
+ Docker image to use when launching on AWS Batch. If not specified, and
536
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
537
+ not, a default Docker image mapping to the current version of Python is used.
538
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
539
+ AWS Batch Job Queue to submit the job to.
540
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
541
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
542
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
543
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
544
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
545
+ shared_memory : int, optional, default None
546
+ The value for the size (in MiB) of the /dev/shm volume for this step.
547
+ This parameter maps to the `--shm-size` option in Docker.
548
+ max_swap : int, optional, default None
549
+ The total amount of swap memory (in MiB) a container can use for this
550
+ step. This parameter is translated to the `--memory-swap` option in
551
+ Docker where the value is the sum of the container memory plus the
552
+ `max_swap` value.
553
+ swappiness : int, optional, default None
554
+ This allows you to tune memory swappiness behavior for this step.
555
+ A swappiness value of 0 causes swapping not to happen unless absolutely
556
+ necessary. A swappiness value of 100 causes pages to be swapped very
557
+ aggressively. Accepted values are whole numbers between 0 and 100.
558
+ use_tmpfs : bool, default False
559
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
560
+ not available on Fargate compute environments
561
+ tmpfs_tempdir : bool, default True
562
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
563
+ tmpfs_size : int, optional, default None
564
+ The value for the size (in MiB) of the tmpfs mount for this step.
565
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
566
+ memory allocated for this step.
567
+ tmpfs_path : str, optional, default None
568
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
569
+ inferentia : int, default 0
570
+ Number of Inferentia chips required for this step.
571
+ trainium : int, default None
572
+ Alias for inferentia. Use only one of the two.
573
+ efa : int, default 0
574
+ Number of elastic fabric adapter network devices to attach to container
575
+ ephemeral_storage : int, default None
576
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
577
+ This is only relevant for Fargate compute environments
578
+ log_driver: str, optional, default None
579
+ The log driver to use for the Amazon ECS container.
580
+ log_options: List[str], optional, default None
581
+ List of strings containing options for the chosen log driver. The configurable values
582
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
583
+ Example: [`awslogs-group:aws/batch/job`]
786
584
  """
787
585
  ...
788
586
 
789
587
  @typing.overload
790
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
588
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
791
589
  ...
792
590
 
793
591
  @typing.overload
794
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
592
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
795
593
  ...
796
594
 
797
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
595
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
798
596
  """
799
- Specifies the Conda environment for the step.
800
-
801
- Information in this decorator will augment any
802
- attributes set in the `@conda_base` flow-level decorator. Hence,
803
- you can use `@conda_base` to set packages required by all
804
- steps and use `@conda` to specify step-specific overrides.
597
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
805
598
 
806
599
 
807
600
  Parameters
808
601
  ----------
809
- packages : Dict[str, str], default {}
810
- Packages to use for this step. The key is the name of the package
811
- and the value is the version to use.
812
- libraries : Dict[str, str], default {}
813
- Supported for backward compatibility. When used with packages, packages will take precedence.
814
- python : str, optional, default None
815
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
816
- that the version used will correspond to the version of the Python interpreter used to start the run.
817
- disabled : bool, default False
818
- If set to True, disables @conda.
602
+ cpu : int, default 1
603
+ Number of CPUs required for this step. If `@resources` is
604
+ also present, the maximum value from all decorators is used.
605
+ gpu : int, default 0
606
+ Number of GPUs required for this step. If `@resources` is
607
+ also present, the maximum value from all decorators is used.
608
+ memory : int, default 4096
609
+ Memory size (in MB) required for this step. If
610
+ `@resources` is also present, the maximum value from all decorators is
611
+ used.
612
+ image : str, optional, default None
613
+ Docker image to use when launching on AWS Batch. If not specified, and
614
+ METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
615
+ not, a default Docker image mapping to the current version of Python is used.
616
+ queue : str, default METAFLOW_BATCH_JOB_QUEUE
617
+ AWS Batch Job Queue to submit the job to.
618
+ iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
619
+ AWS IAM role that AWS Batch container uses to access AWS cloud resources.
620
+ execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
621
+ AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
622
+ (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
623
+ shared_memory : int, optional, default None
624
+ The value for the size (in MiB) of the /dev/shm volume for this step.
625
+ This parameter maps to the `--shm-size` option in Docker.
626
+ max_swap : int, optional, default None
627
+ The total amount of swap memory (in MiB) a container can use for this
628
+ step. This parameter is translated to the `--memory-swap` option in
629
+ Docker where the value is the sum of the container memory plus the
630
+ `max_swap` value.
631
+ swappiness : int, optional, default None
632
+ This allows you to tune memory swappiness behavior for this step.
633
+ A swappiness value of 0 causes swapping not to happen unless absolutely
634
+ necessary. A swappiness value of 100 causes pages to be swapped very
635
+ aggressively. Accepted values are whole numbers between 0 and 100.
636
+ use_tmpfs : bool, default False
637
+ This enables an explicit tmpfs mount for this step. Note that tmpfs is
638
+ not available on Fargate compute environments
639
+ tmpfs_tempdir : bool, default True
640
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
641
+ tmpfs_size : int, optional, default None
642
+ The value for the size (in MiB) of the tmpfs mount for this step.
643
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
644
+ memory allocated for this step.
645
+ tmpfs_path : str, optional, default None
646
+ Path to tmpfs mount for this step. Defaults to /metaflow_temp.
647
+ inferentia : int, default 0
648
+ Number of Inferentia chips required for this step.
649
+ trainium : int, default None
650
+ Alias for inferentia. Use only one of the two.
651
+ efa : int, default 0
652
+ Number of elastic fabric adapter network devices to attach to container
653
+ ephemeral_storage : int, default None
654
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
655
+ This is only relevant for Fargate compute environments
656
+ log_driver: str, optional, default None
657
+ The log driver to use for the Amazon ECS container.
658
+ log_options: List[str], optional, default None
659
+ List of strings containing options for the chosen log driver. The configurable values
660
+ depend on the `log driver` chosen. Validation of these options is not supported yet.
661
+ Example: [`awslogs-group:aws/batch/job`]
819
662
  """
820
663
  ...
821
664
 
822
665
  @typing.overload
823
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
666
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
824
667
  """
825
- Specifies that the step will success under all circumstances.
668
+ Specifies the number of times the task corresponding
669
+ to a step needs to be retried.
826
670
 
827
- The decorator will create an optional artifact, specified by `var`, which
828
- contains the exception raised. You can use it to detect the presence
829
- of errors, indicating that all happy-path artifacts produced by the step
830
- are missing.
671
+ This decorator is useful for handling transient errors, such as networking issues.
672
+ If your task contains operations that can't be retried safely, e.g. database updates,
673
+ it is advisable to annotate it with `@retry(times=0)`.
674
+
675
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
676
+ decorator will execute a no-op task after all retries have been exhausted,
677
+ ensuring that the flow execution can continue.
831
678
 
832
679
 
833
680
  Parameters
834
681
  ----------
835
- var : str, optional, default None
836
- Name of the artifact in which to store the caught exception.
837
- If not specified, the exception is not stored.
838
- print_exception : bool, default True
839
- Determines whether or not the exception is printed to
840
- stdout when caught.
682
+ times : int, default 3
683
+ Number of times to retry this task.
684
+ minutes_between_retries : int, default 2
685
+ Number of minutes between retries.
841
686
  """
842
687
  ...
843
688
 
844
689
  @typing.overload
845
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
690
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
846
691
  ...
847
692
 
848
693
  @typing.overload
849
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
694
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
850
695
  ...
851
696
 
852
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
697
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
853
698
  """
854
- Specifies that the step will success under all circumstances.
699
+ Specifies the number of times the task corresponding
700
+ to a step needs to be retried.
855
701
 
856
- The decorator will create an optional artifact, specified by `var`, which
857
- contains the exception raised. You can use it to detect the presence
858
- of errors, indicating that all happy-path artifacts produced by the step
859
- are missing.
702
+ This decorator is useful for handling transient errors, such as networking issues.
703
+ If your task contains operations that can't be retried safely, e.g. database updates,
704
+ it is advisable to annotate it with `@retry(times=0)`.
705
+
706
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
707
+ decorator will execute a no-op task after all retries have been exhausted,
708
+ ensuring that the flow execution can continue.
860
709
 
861
710
 
862
711
  Parameters
863
712
  ----------
864
- var : str, optional, default None
865
- Name of the artifact in which to store the caught exception.
866
- If not specified, the exception is not stored.
867
- print_exception : bool, default True
868
- Determines whether or not the exception is printed to
869
- stdout when caught.
713
+ times : int, default 3
714
+ Number of times to retry this task.
715
+ minutes_between_retries : int, default 2
716
+ Number of minutes between retries.
870
717
  """
871
718
  ...
872
719
 
873
- def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
720
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable', security_context: typing.Optional[typing.Dict[str, typing.Any]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
874
721
  """
875
- Specifies what flows belong to the same project.
876
-
877
- A project-specific namespace is created for all flows that
878
- use the same `@project(name)`.
722
+ Specifies that this step should execute on Kubernetes.
879
723
 
880
724
 
881
725
  Parameters
882
726
  ----------
883
- name : str
884
- Project name. Make sure that the name is unique amongst all
885
- projects that use the same production scheduler. The name may
886
- contain only lowercase alphanumeric characters and underscores.
887
-
888
- branch : Optional[str], default None
889
- The branch to use. If not specified, the branch is set to
890
- `user.<username>` unless `production` is set to `True`. This can
891
- also be set on the command line using `--branch` as a top-level option.
892
- It is an error to specify `branch` in the decorator and on the command line.
727
+ cpu : int, default 1
728
+ Number of CPUs required for this step. If `@resources` is
729
+ also present, the maximum value from all decorators is used.
730
+ memory : int, default 4096
731
+ Memory size (in MB) required for this step. If
732
+ `@resources` is also present, the maximum value from all decorators is
733
+ used.
734
+ disk : int, default 10240
735
+ Disk size (in MB) required for this step. If
736
+ `@resources` is also present, the maximum value from all decorators is
737
+ used.
738
+ image : str, optional, default None
739
+ Docker image to use when launching on Kubernetes. If not specified, and
740
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
741
+ not, a default Docker image mapping to the current version of Python is used.
742
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
743
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
744
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
745
+ Kubernetes service account to use when launching pod in Kubernetes.
746
+ secrets : List[str], optional, default None
747
+ Kubernetes secrets to use when launching pod in Kubernetes. These
748
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
749
+ in Metaflow configuration.
750
+ node_selector: Union[Dict[str,str], str], optional, default None
751
+ Kubernetes node selector(s) to apply to the pod running the task.
752
+ Can be passed in as a comma separated string of values e.g.
753
+ 'kubernetes.io/os=linux,kubernetes.io/arch=amd64' or as a dictionary
754
+ {'kubernetes.io/os': 'linux', 'kubernetes.io/arch': 'amd64'}
755
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
756
+ Kubernetes namespace to use when launching pod in Kubernetes.
757
+ gpu : int, optional, default None
758
+ Number of GPUs required for this step. A value of zero implies that
759
+ the scheduled node should not have GPUs.
760
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
761
+ The vendor of the GPUs to be used for this step.
762
+ tolerations : List[str], default []
763
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
764
+ Kubernetes tolerations to use when launching pod in Kubernetes.
765
+ labels: Dict[str, str], default: METAFLOW_KUBERNETES_LABELS
766
+ Kubernetes labels to use when launching pod in Kubernetes.
767
+ annotations: Dict[str, str], default: METAFLOW_KUBERNETES_ANNOTATIONS
768
+ Kubernetes annotations to use when launching pod in Kubernetes.
769
+ use_tmpfs : bool, default False
770
+ This enables an explicit tmpfs mount for this step.
771
+ tmpfs_tempdir : bool, default True
772
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
773
+ tmpfs_size : int, optional, default: None
774
+ The value for the size (in MiB) of the tmpfs mount for this step.
775
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
776
+ memory allocated for this step.
777
+ tmpfs_path : str, optional, default /metaflow_temp
778
+ Path to tmpfs mount for this step.
779
+ persistent_volume_claims : Dict[str, str], optional, default None
780
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
781
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
782
+ shared_memory: int, optional
783
+ Shared memory size (in MiB) required for this step
784
+ port: int, optional
785
+ Port number to specify in the Kubernetes job object
786
+ compute_pool : str, optional, default None
787
+ Compute pool to be used for for this step.
788
+ If not specified, any accessible compute pool within the perimeter is used.
789
+ hostname_resolution_timeout: int, default 10 * 60
790
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
791
+ Only applicable when @parallel is used.
792
+ qos: str, default: Burstable
793
+ Quality of Service class to assign to the pod. Supported values are: Guaranteed, Burstable, BestEffort
893
794
 
894
- production : bool, default False
895
- Whether or not the branch is the production branch. This can also be set on the
896
- command line using `--production` as a top-level option. It is an error to specify
897
- `production` in the decorator and on the command line.
898
- The project branch name will be:
899
- - if `branch` is specified:
900
- - if `production` is True: `prod.<branch>`
901
- - if `production` is False: `test.<branch>`
902
- - if `branch` is not specified:
903
- - if `production` is True: `prod`
904
- - if `production` is False: `user.<username>`
795
+ security_context: Dict[str, Any], optional, default None
796
+ Container security context. Applies to the task container. Allows the following keys:
797
+ - privileged: bool, optional, default None
798
+ - allow_privilege_escalation: bool, optional, default None
799
+ - run_as_user: int, optional, default None
800
+ - run_as_group: int, optional, default None
801
+ - run_as_non_root: bool, optional, default None
905
802
  """
906
803
  ...
907
804
 
908
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
805
+ @typing.overload
806
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
909
807
  """
910
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
911
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
808
+ Specifies secrets to be retrieved and injected as environment variables prior to
809
+ the execution of a step.
912
810
 
913
811
 
914
812
  Parameters
915
813
  ----------
916
- timeout : int
917
- Time, in seconds before the task times out and fails. (Default: 3600)
918
- poke_interval : int
919
- Time in seconds that the job should wait in between each try. (Default: 60)
920
- mode : str
921
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
922
- exponential_backoff : bool
923
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
924
- pool : str
925
- the slot pool this task should run in,
926
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
927
- soft_fail : bool
928
- Set to true to mark the task as SKIPPED on failure. (Default: False)
929
- name : str
930
- Name of the sensor on Airflow
931
- description : str
932
- Description of sensor in the Airflow UI
933
- external_dag_id : str
934
- The dag_id that contains the task you want to wait for.
935
- external_task_ids : List[str]
936
- The list of task_ids that you want to wait for.
937
- If None (default value) the sensor waits for the DAG. (Default: None)
938
- allowed_states : List[str]
939
- Iterable of allowed states, (Default: ['success'])
940
- failed_states : List[str]
941
- Iterable of failed or dis-allowed states. (Default: None)
942
- execution_delta : datetime.timedelta
943
- time difference with the previous execution to look at,
944
- the default is the same logical date as the current task or DAG. (Default: None)
945
- check_existence: bool
946
- Set to True to check if the external task exists or check if
947
- the DAG to wait for exists. (Default: True)
814
+ sources : List[Union[str, Dict[str, Any]]], default: []
815
+ List of secret specs, defining how the secrets are to be retrieved
948
816
  """
949
817
  ...
950
818
 
951
819
  @typing.overload
952
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
953
- """
954
- Specifies the PyPI packages for all steps of the flow.
955
-
956
- Use `@pypi_base` to set common packages required by all
957
- steps and use `@pypi` to specify step-specific overrides.
958
-
959
- Parameters
960
- ----------
961
- packages : Dict[str, str], default: {}
962
- Packages to use for this flow. The key is the name of the package
963
- and the value is the version to use.
964
- python : str, optional, default: None
965
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
966
- that the version used will correspond to the version of the Python interpreter used to start the run.
967
- """
820
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
968
821
  ...
969
822
 
970
823
  @typing.overload
971
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
824
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
972
825
  ...
973
826
 
974
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
827
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
975
828
  """
976
- Specifies the PyPI packages for all steps of the flow.
829
+ Specifies secrets to be retrieved and injected as environment variables prior to
830
+ the execution of a step.
977
831
 
978
- Use `@pypi_base` to set common packages required by all
979
- steps and use `@pypi` to specify step-specific overrides.
980
832
 
981
833
  Parameters
982
834
  ----------
983
- packages : Dict[str, str], default: {}
984
- Packages to use for this flow. The key is the name of the package
985
- and the value is the version to use.
986
- python : str, optional, default: None
987
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
988
- that the version used will correspond to the version of the Python interpreter used to start the run.
835
+ sources : List[Union[str, Dict[str, Any]]], default: []
836
+ List of secret specs, defining how the secrets are to be retrieved
989
837
  """
990
838
  ...
991
839
 
992
840
  @typing.overload
993
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
841
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
994
842
  """
995
- Specifies the Conda environment for all steps of the flow.
996
-
997
- Use `@conda_base` to set common libraries required by all
998
- steps and use `@conda` to specify step-specific additions.
843
+ Specifies environment variables to be set prior to the execution of a step.
999
844
 
1000
845
 
1001
846
  Parameters
1002
847
  ----------
1003
- packages : Dict[str, str], default {}
1004
- Packages to use for this flow. The key is the name of the package
1005
- and the value is the version to use.
1006
- libraries : Dict[str, str], default {}
1007
- Supported for backward compatibility. When used with packages, packages will take precedence.
1008
- python : str, optional, default None
1009
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1010
- that the version used will correspond to the version of the Python interpreter used to start the run.
1011
- disabled : bool, default False
1012
- If set to True, disables Conda.
848
+ vars : Dict[str, str], default {}
849
+ Dictionary of environment variables to set.
1013
850
  """
1014
851
  ...
1015
852
 
1016
853
  @typing.overload
1017
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1018
- ...
1019
-
1020
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1021
- """
1022
- Specifies the Conda environment for all steps of the flow.
1023
-
1024
- Use `@conda_base` to set common libraries required by all
1025
- steps and use `@conda` to specify step-specific additions.
1026
-
1027
-
1028
- Parameters
1029
- ----------
1030
- packages : Dict[str, str], default {}
1031
- Packages to use for this flow. The key is the name of the package
1032
- and the value is the version to use.
1033
- libraries : Dict[str, str], default {}
1034
- Supported for backward compatibility. When used with packages, packages will take precedence.
1035
- python : str, optional, default None
1036
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1037
- that the version used will correspond to the version of the Python interpreter used to start the run.
1038
- disabled : bool, default False
1039
- If set to True, disables Conda.
1040
- """
854
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1041
855
  ...
1042
856
 
1043
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
857
+ @typing.overload
858
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
859
+ ...
860
+
861
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1044
862
  """
1045
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1046
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1047
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1048
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1049
- starts only after all sensors finish.
863
+ Specifies environment variables to be set prior to the execution of a step.
1050
864
 
1051
865
 
1052
866
  Parameters
1053
867
  ----------
1054
- timeout : int
1055
- Time, in seconds before the task times out and fails. (Default: 3600)
1056
- poke_interval : int
1057
- Time in seconds that the job should wait in between each try. (Default: 60)
1058
- mode : str
1059
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1060
- exponential_backoff : bool
1061
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1062
- pool : str
1063
- the slot pool this task should run in,
1064
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1065
- soft_fail : bool
1066
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1067
- name : str
1068
- Name of the sensor on Airflow
1069
- description : str
1070
- Description of sensor in the Airflow UI
1071
- bucket_key : Union[str, List[str]]
1072
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1073
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1074
- bucket_name : str
1075
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1076
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1077
- wildcard_match : bool
1078
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1079
- aws_conn_id : str
1080
- a reference to the s3 connection on Airflow. (Default: None)
1081
- verify : bool
1082
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
868
+ vars : Dict[str, str], default {}
869
+ Dictionary of environment variables to set.
1083
870
  """
1084
871
  ...
1085
872
 
@@ -1176,6 +963,57 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1176
963
  """
1177
964
  ...
1178
965
 
966
+ @typing.overload
967
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
968
+ """
969
+ Specifies the Conda environment for all steps of the flow.
970
+
971
+ Use `@conda_base` to set common libraries required by all
972
+ steps and use `@conda` to specify step-specific additions.
973
+
974
+
975
+ Parameters
976
+ ----------
977
+ packages : Dict[str, str], default {}
978
+ Packages to use for this flow. The key is the name of the package
979
+ and the value is the version to use.
980
+ libraries : Dict[str, str], default {}
981
+ Supported for backward compatibility. When used with packages, packages will take precedence.
982
+ python : str, optional, default None
983
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
984
+ that the version used will correspond to the version of the Python interpreter used to start the run.
985
+ disabled : bool, default False
986
+ If set to True, disables Conda.
987
+ """
988
+ ...
989
+
990
+ @typing.overload
991
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
992
+ ...
993
+
994
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
995
+ """
996
+ Specifies the Conda environment for all steps of the flow.
997
+
998
+ Use `@conda_base` to set common libraries required by all
999
+ steps and use `@conda` to specify step-specific additions.
1000
+
1001
+
1002
+ Parameters
1003
+ ----------
1004
+ packages : Dict[str, str], default {}
1005
+ Packages to use for this flow. The key is the name of the package
1006
+ and the value is the version to use.
1007
+ libraries : Dict[str, str], default {}
1008
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1009
+ python : str, optional, default None
1010
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1011
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1012
+ disabled : bool, default False
1013
+ If set to True, disables Conda.
1014
+ """
1015
+ ...
1016
+
1179
1017
  @typing.overload
1180
1018
  def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1181
1019
  """
@@ -1277,6 +1115,168 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1277
1115
  """
1278
1116
  ...
1279
1117
 
1118
+ def project(*, name: str, branch: typing.Optional[str] = None, production: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1119
+ """
1120
+ Specifies what flows belong to the same project.
1121
+
1122
+ A project-specific namespace is created for all flows that
1123
+ use the same `@project(name)`.
1124
+
1125
+
1126
+ Parameters
1127
+ ----------
1128
+ name : str
1129
+ Project name. Make sure that the name is unique amongst all
1130
+ projects that use the same production scheduler. The name may
1131
+ contain only lowercase alphanumeric characters and underscores.
1132
+
1133
+ branch : Optional[str], default None
1134
+ The branch to use. If not specified, the branch is set to
1135
+ `user.<username>` unless `production` is set to `True`. This can
1136
+ also be set on the command line using `--branch` as a top-level option.
1137
+ It is an error to specify `branch` in the decorator and on the command line.
1138
+
1139
+ production : bool, default False
1140
+ Whether or not the branch is the production branch. This can also be set on the
1141
+ command line using `--production` as a top-level option. It is an error to specify
1142
+ `production` in the decorator and on the command line.
1143
+ The project branch name will be:
1144
+ - if `branch` is specified:
1145
+ - if `production` is True: `prod.<branch>`
1146
+ - if `production` is False: `test.<branch>`
1147
+ - if `branch` is not specified:
1148
+ - if `production` is True: `prod`
1149
+ - if `production` is False: `user.<username>`
1150
+ """
1151
+ ...
1152
+
1153
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1154
+ """
1155
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1156
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1157
+
1158
+
1159
+ Parameters
1160
+ ----------
1161
+ timeout : int
1162
+ Time, in seconds before the task times out and fails. (Default: 3600)
1163
+ poke_interval : int
1164
+ Time in seconds that the job should wait in between each try. (Default: 60)
1165
+ mode : str
1166
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1167
+ exponential_backoff : bool
1168
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1169
+ pool : str
1170
+ the slot pool this task should run in,
1171
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1172
+ soft_fail : bool
1173
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1174
+ name : str
1175
+ Name of the sensor on Airflow
1176
+ description : str
1177
+ Description of sensor in the Airflow UI
1178
+ external_dag_id : str
1179
+ The dag_id that contains the task you want to wait for.
1180
+ external_task_ids : List[str]
1181
+ The list of task_ids that you want to wait for.
1182
+ If None (default value) the sensor waits for the DAG. (Default: None)
1183
+ allowed_states : List[str]
1184
+ Iterable of allowed states, (Default: ['success'])
1185
+ failed_states : List[str]
1186
+ Iterable of failed or dis-allowed states. (Default: None)
1187
+ execution_delta : datetime.timedelta
1188
+ time difference with the previous execution to look at,
1189
+ the default is the same logical date as the current task or DAG. (Default: None)
1190
+ check_existence: bool
1191
+ Set to True to check if the external task exists or check if
1192
+ the DAG to wait for exists. (Default: True)
1193
+ """
1194
+ ...
1195
+
1196
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1197
+ """
1198
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1199
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1200
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1201
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1202
+ starts only after all sensors finish.
1203
+
1204
+
1205
+ Parameters
1206
+ ----------
1207
+ timeout : int
1208
+ Time, in seconds before the task times out and fails. (Default: 3600)
1209
+ poke_interval : int
1210
+ Time in seconds that the job should wait in between each try. (Default: 60)
1211
+ mode : str
1212
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1213
+ exponential_backoff : bool
1214
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1215
+ pool : str
1216
+ the slot pool this task should run in,
1217
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1218
+ soft_fail : bool
1219
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1220
+ name : str
1221
+ Name of the sensor on Airflow
1222
+ description : str
1223
+ Description of sensor in the Airflow UI
1224
+ bucket_key : Union[str, List[str]]
1225
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1226
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1227
+ bucket_name : str
1228
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1229
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1230
+ wildcard_match : bool
1231
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1232
+ aws_conn_id : str
1233
+ a reference to the s3 connection on Airflow. (Default: None)
1234
+ verify : bool
1235
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1236
+ """
1237
+ ...
1238
+
1239
+ @typing.overload
1240
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1241
+ """
1242
+ Specifies the PyPI packages for all steps of the flow.
1243
+
1244
+ Use `@pypi_base` to set common packages required by all
1245
+ steps and use `@pypi` to specify step-specific overrides.
1246
+
1247
+ Parameters
1248
+ ----------
1249
+ packages : Dict[str, str], default: {}
1250
+ Packages to use for this flow. The key is the name of the package
1251
+ and the value is the version to use.
1252
+ python : str, optional, default: None
1253
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1254
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1255
+ """
1256
+ ...
1257
+
1258
+ @typing.overload
1259
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1260
+ ...
1261
+
1262
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1263
+ """
1264
+ Specifies the PyPI packages for all steps of the flow.
1265
+
1266
+ Use `@pypi_base` to set common packages required by all
1267
+ steps and use `@pypi` to specify step-specific overrides.
1268
+
1269
+ Parameters
1270
+ ----------
1271
+ packages : Dict[str, str], default: {}
1272
+ Packages to use for this flow. The key is the name of the package
1273
+ and the value is the version to use.
1274
+ python : str, optional, default: None
1275
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1276
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1277
+ """
1278
+ ...
1279
+
1280
1280
  @typing.overload
1281
1281
  def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1282
1282
  """