metaflow-stubs 2.13__py2.py3-none-any.whl → 2.13.1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (144) hide show
  1. metaflow-stubs/__init__.pyi +192 -192
  2. metaflow-stubs/cards.pyi +2 -2
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/cli_components/__init__.pyi +2 -2
  5. metaflow-stubs/cli_components/utils.pyi +2 -2
  6. metaflow-stubs/client/__init__.pyi +2 -2
  7. metaflow-stubs/client/core.pyi +5 -5
  8. metaflow-stubs/client/filecache.pyi +3 -3
  9. metaflow-stubs/events.pyi +2 -2
  10. metaflow-stubs/exception.pyi +2 -2
  11. metaflow-stubs/flowspec.pyi +5 -5
  12. metaflow-stubs/generated_for.txt +1 -1
  13. metaflow-stubs/includefile.pyi +4 -4
  14. metaflow-stubs/info_file.pyi +2 -2
  15. metaflow-stubs/metadata_provider/__init__.pyi +2 -2
  16. metaflow-stubs/metadata_provider/heartbeat.pyi +2 -2
  17. metaflow-stubs/metadata_provider/metadata.pyi +2 -2
  18. metaflow-stubs/metadata_provider/util.pyi +2 -2
  19. metaflow-stubs/metaflow_config.pyi +4 -2
  20. metaflow-stubs/metaflow_current.pyi +18 -18
  21. metaflow-stubs/multicore_utils.pyi +2 -2
  22. metaflow-stubs/parameters.pyi +4 -4
  23. metaflow-stubs/plugins/__init__.pyi +13 -13
  24. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  30. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +2 -2
  31. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +2 -2
  32. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  35. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -9
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +2 -4
  37. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +5 -5
  38. metaflow-stubs/plugins/argo/argo_workflows_deployer_objects.pyi +4 -4
  39. metaflow-stubs/plugins/aws/__init__.pyi +3 -3
  40. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  41. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +9 -10
  46. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  47. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  48. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +4 -4
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer_objects.pyi +3 -3
  57. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  61. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  62. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  63. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  64. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  65. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  66. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  71. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/components.pyi +5 -5
  73. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  77. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  78. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  79. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  80. metaflow-stubs/plugins/datatools/__init__.pyi +2 -2
  81. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  82. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  83. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  84. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  85. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  86. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  87. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  88. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  89. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  90. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  91. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  92. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  93. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  94. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  95. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  96. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  97. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  98. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  99. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +25 -2
  100. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +5 -22
  101. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  102. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +12 -3
  103. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +3 -7
  104. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +10 -4
  105. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  106. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  107. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  108. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  109. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  110. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  111. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  112. metaflow-stubs/plugins/pypi/utils.pyi +6 -2
  113. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  114. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  115. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  116. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  117. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  119. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  121. metaflow-stubs/pylint_wrapper.pyi +2 -2
  122. metaflow-stubs/runner/__init__.pyi +2 -2
  123. metaflow-stubs/runner/deployer.pyi +4 -4
  124. metaflow-stubs/runner/deployer_impl.pyi +3 -3
  125. metaflow-stubs/runner/metaflow_runner.pyi +3 -3
  126. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  127. metaflow-stubs/runner/nbrun.pyi +2 -2
  128. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  129. metaflow-stubs/runner/utils.pyi +3 -3
  130. metaflow-stubs/system/__init__.pyi +2 -2
  131. metaflow-stubs/system/system_logger.pyi +3 -3
  132. metaflow-stubs/system/system_monitor.pyi +2 -2
  133. metaflow-stubs/tagging_util.pyi +2 -2
  134. metaflow-stubs/tuple_util.pyi +2 -2
  135. metaflow-stubs/user_configs/__init__.pyi +2 -2
  136. metaflow-stubs/user_configs/config_decorators.pyi +6 -6
  137. metaflow-stubs/user_configs/config_options.pyi +4 -4
  138. metaflow-stubs/user_configs/config_parameters.pyi +6 -6
  139. metaflow-stubs/version.pyi +2 -2
  140. {metaflow_stubs-2.13.dist-info → metaflow_stubs-2.13.1.dist-info}/METADATA +2 -2
  141. metaflow_stubs-2.13.1.dist-info/RECORD +144 -0
  142. {metaflow_stubs-2.13.dist-info → metaflow_stubs-2.13.1.dist-info}/WHEEL +1 -1
  143. metaflow_stubs-2.13.dist-info/RECORD +0 -144
  144. {metaflow_stubs-2.13.dist-info → metaflow_stubs-2.13.1.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.13 #
4
- # Generated on 2024-12-20T07:38:30.145823 #
3
+ # MF version: 2.13.1 #
4
+ # Generated on 2025-01-06T13:29:15.375037 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -35,8 +35,8 @@ from .user_configs.config_parameters import ConfigValue as ConfigValue
35
35
  from .user_configs.config_parameters import config_expr as config_expr
36
36
  from .user_configs.config_decorators import CustomFlowDecorator as CustomFlowDecorator
37
37
  from .user_configs.config_decorators import CustomStepDecorator as CustomStepDecorator
38
- from . import tuple_util as tuple_util
39
38
  from . import events as events
39
+ from . import tuple_util as tuple_util
40
40
  from . import runner as runner
41
41
  from . import plugins as plugins
42
42
  from .plugins.datatools.s3.s3 import S3 as S3
@@ -143,62 +143,41 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
143
143
  ...
144
144
 
145
145
  @typing.overload
146
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
146
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
147
147
  """
148
- Specifies a timeout for your step.
149
-
150
- This decorator is useful if this step may hang indefinitely.
148
+ Specifies the number of times the task corresponding
149
+ to a step needs to be retried.
151
150
 
152
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
153
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
154
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
151
+ This decorator is useful for handling transient errors, such as networking issues.
152
+ If your task contains operations that can't be retried safely, e.g. database updates,
153
+ it is advisable to annotate it with `@retry(times=0)`.
155
154
 
156
- Note that all the values specified in parameters are added together so if you specify
157
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
155
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
156
+ decorator will execute a no-op task after all retries have been exhausted,
157
+ ensuring that the flow execution can continue.
158
158
  """
159
159
  ...
160
160
 
161
161
  @typing.overload
162
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
162
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
163
163
  ...
164
164
 
165
165
  @typing.overload
166
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
166
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
167
167
  ...
168
168
 
169
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
169
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
170
170
  """
171
- Specifies a timeout for your step.
172
-
173
- This decorator is useful if this step may hang indefinitely.
171
+ Specifies the number of times the task corresponding
172
+ to a step needs to be retried.
174
173
 
175
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
176
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
177
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
174
+ This decorator is useful for handling transient errors, such as networking issues.
175
+ If your task contains operations that can't be retried safely, e.g. database updates,
176
+ it is advisable to annotate it with `@retry(times=0)`.
178
177
 
179
- Note that all the values specified in parameters are added together so if you specify
180
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
181
- """
182
- ...
183
-
184
- @typing.overload
185
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
186
- """
187
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
188
- """
189
- ...
190
-
191
- @typing.overload
192
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
193
- ...
194
-
195
- @typing.overload
196
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
197
- ...
198
-
199
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
200
- """
201
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
178
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
179
+ decorator will execute a no-op task after all retries have been exhausted,
180
+ ensuring that the flow execution can continue.
202
181
  """
203
182
  ...
204
183
 
@@ -254,45 +233,6 @@ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], Non
254
233
  """
255
234
  ...
256
235
 
257
- @typing.overload
258
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
259
- """
260
- Specifies the number of times the task corresponding
261
- to a step needs to be retried.
262
-
263
- This decorator is useful for handling transient errors, such as networking issues.
264
- If your task contains operations that can't be retried safely, e.g. database updates,
265
- it is advisable to annotate it with `@retry(times=0)`.
266
-
267
- This can be used in conjunction with the `@catch` decorator. The `@catch`
268
- decorator will execute a no-op task after all retries have been exhausted,
269
- ensuring that the flow execution can continue.
270
- """
271
- ...
272
-
273
- @typing.overload
274
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
275
- ...
276
-
277
- @typing.overload
278
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
279
- ...
280
-
281
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
282
- """
283
- Specifies the number of times the task corresponding
284
- to a step needs to be retried.
285
-
286
- This decorator is useful for handling transient errors, such as networking issues.
287
- If your task contains operations that can't be retried safely, e.g. database updates,
288
- it is advisable to annotate it with `@retry(times=0)`.
289
-
290
- This can be used in conjunction with the `@catch` decorator. The `@catch`
291
- decorator will execute a no-op task after all retries have been exhausted,
292
- ensuring that the flow execution can continue.
293
- """
294
- ...
295
-
296
236
  @typing.overload
297
237
  def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
298
238
  """
@@ -313,80 +253,41 @@ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
313
253
  ...
314
254
 
315
255
  @typing.overload
316
- def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
256
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
317
257
  """
318
- Specifies the resources needed when executing this step.
258
+ Specifies a timeout for your step.
319
259
 
320
- Use `@resources` to specify the resource requirements
321
- independently of the specific compute layer (`@batch`, `@kubernetes`).
260
+ This decorator is useful if this step may hang indefinitely.
322
261
 
323
- You can choose the compute layer on the command line by executing e.g.
324
- ```
325
- python myflow.py run --with batch
326
- ```
327
- or
328
- ```
329
- python myflow.py run --with kubernetes
330
- ```
331
- which executes the flow on the desired system using the
332
- requirements specified in `@resources`.
262
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
263
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
264
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
265
+
266
+ Note that all the values specified in parameters are added together so if you specify
267
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
333
268
  """
334
269
  ...
335
270
 
336
271
  @typing.overload
337
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
272
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
338
273
  ...
339
274
 
340
275
  @typing.overload
341
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
276
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
342
277
  ...
343
278
 
344
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
279
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
345
280
  """
346
- Specifies the resources needed when executing this step.
281
+ Specifies a timeout for your step.
347
282
 
348
- Use `@resources` to specify the resource requirements
349
- independently of the specific compute layer (`@batch`, `@kubernetes`).
283
+ This decorator is useful if this step may hang indefinitely.
350
284
 
351
- You can choose the compute layer on the command line by executing e.g.
352
- ```
353
- python myflow.py run --with batch
354
- ```
355
- or
356
- ```
357
- python myflow.py run --with kubernetes
358
- ```
359
- which executes the flow on the desired system using the
360
- requirements specified in `@resources`.
361
- """
362
- ...
363
-
364
- @typing.overload
365
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
366
- """
367
- Specifies secrets to be retrieved and injected as environment variables prior to
368
- the execution of a step.
369
- """
370
- ...
371
-
372
- @typing.overload
373
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
374
- ...
375
-
376
- @typing.overload
377
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
378
- ...
379
-
380
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
381
- """
382
- Specifies secrets to be retrieved and injected as environment variables prior to
383
- the execution of a step.
384
- """
385
- ...
386
-
387
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
388
- """
389
- Specifies that this step should execute on Kubernetes.
285
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
286
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
287
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
288
+
289
+ Note that all the values specified in parameters are added together so if you specify
290
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
390
291
  """
391
292
  ...
392
293
 
@@ -446,6 +347,35 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
446
347
  """
447
348
  ...
448
349
 
350
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = 'KUBERNETES_IMAGE_PULL_POLICY', service_account: str = 'METAFLOW_KUBERNETES_SERVICE_ACCOUNT', secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = 'METAFLOW_KUBERNETES_NAMESPACE', gpu: typing.Optional[int] = None, gpu_vendor: str = 'KUBERNETES_GPU_VENDOR', tolerations: typing.List[str] = [], labels: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_LABELS', annotations: typing.Dict[str, str] = 'METAFLOW_KUBERNETES_ANNOTATIONS', use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = '/metaflow_temp', persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600, qos: str = 'Burstable') -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
351
+ """
352
+ Specifies that this step should execute on Kubernetes.
353
+ """
354
+ ...
355
+
356
+ @typing.overload
357
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
358
+ """
359
+ Specifies secrets to be retrieved and injected as environment variables prior to
360
+ the execution of a step.
361
+ """
362
+ ...
363
+
364
+ @typing.overload
365
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
366
+ ...
367
+
368
+ @typing.overload
369
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
370
+ ...
371
+
372
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
373
+ """
374
+ Specifies secrets to be retrieved and injected as environment variables prior to
375
+ the execution of a step.
376
+ """
377
+ ...
378
+
449
379
  @typing.overload
450
380
  def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
451
381
  """
@@ -477,6 +407,115 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
477
407
  """
478
408
  ...
479
409
 
410
+ @typing.overload
411
+ def resources(*, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
412
+ """
413
+ Specifies the resources needed when executing this step.
414
+
415
+ Use `@resources` to specify the resource requirements
416
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
417
+
418
+ You can choose the compute layer on the command line by executing e.g.
419
+ ```
420
+ python myflow.py run --with batch
421
+ ```
422
+ or
423
+ ```
424
+ python myflow.py run --with kubernetes
425
+ ```
426
+ which executes the flow on the desired system using the
427
+ requirements specified in `@resources`.
428
+ """
429
+ ...
430
+
431
+ @typing.overload
432
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
433
+ ...
434
+
435
+ @typing.overload
436
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
437
+ ...
438
+
439
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: typing.Optional[int] = None, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
440
+ """
441
+ Specifies the resources needed when executing this step.
442
+
443
+ Use `@resources` to specify the resource requirements
444
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
445
+
446
+ You can choose the compute layer on the command line by executing e.g.
447
+ ```
448
+ python myflow.py run --with batch
449
+ ```
450
+ or
451
+ ```
452
+ python myflow.py run --with kubernetes
453
+ ```
454
+ which executes the flow on the desired system using the
455
+ requirements specified in `@resources`.
456
+ """
457
+ ...
458
+
459
+ @typing.overload
460
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
461
+ """
462
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
463
+ """
464
+ ...
465
+
466
+ @typing.overload
467
+ def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
468
+ ...
469
+
470
+ @typing.overload
471
+ def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
472
+ ...
473
+
474
+ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = 'METAFLOW_BATCH_JOB_QUEUE', iam_role: str = 'METAFLOW_ECS_S3_ACCESS_IAM_ROLE', execution_role: str = 'METAFLOW_ECS_FARGATE_EXECUTION_ROLE', shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
475
+ """
476
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
477
+ """
478
+ ...
479
+
480
+ @typing.overload
481
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
482
+ """
483
+ Specifies the Conda environment for all steps of the flow.
484
+
485
+ Use `@conda_base` to set common libraries required by all
486
+ steps and use `@conda` to specify step-specific additions.
487
+ """
488
+ ...
489
+
490
+ @typing.overload
491
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
492
+ ...
493
+
494
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
495
+ """
496
+ Specifies the Conda environment for all steps of the flow.
497
+
498
+ Use `@conda_base` to set common libraries required by all
499
+ steps and use `@conda` to specify step-specific additions.
500
+ """
501
+ ...
502
+
503
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
504
+ """
505
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
506
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
507
+ """
508
+ ...
509
+
510
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
511
+ """
512
+ Specifies what flows belong to the same project.
513
+
514
+ A project-specific namespace is created for all flows that
515
+ use the same `@project(name)`.
516
+ """
517
+ ...
518
+
480
519
  @typing.overload
481
520
  def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
482
521
  """
@@ -631,48 +670,6 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
631
670
  """
632
671
  ...
633
672
 
634
- @typing.overload
635
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
636
- """
637
- Specifies the times when the flow should be run when running on a
638
- production scheduler.
639
- """
640
- ...
641
-
642
- @typing.overload
643
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
644
- ...
645
-
646
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
647
- """
648
- Specifies the times when the flow should be run when running on a
649
- production scheduler.
650
- """
651
- ...
652
-
653
- @typing.overload
654
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
655
- """
656
- Specifies the Conda environment for all steps of the flow.
657
-
658
- Use `@conda_base` to set common libraries required by all
659
- steps and use `@conda` to specify step-specific additions.
660
- """
661
- ...
662
-
663
- @typing.overload
664
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
665
- ...
666
-
667
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
668
- """
669
- Specifies the Conda environment for all steps of the flow.
670
-
671
- Use `@conda_base` to set common libraries required by all
672
- steps and use `@conda` to specify step-specific additions.
673
- """
674
- ...
675
-
676
673
  @typing.overload
677
674
  def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
678
675
  """
@@ -706,19 +703,22 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
706
703
  """
707
704
  ...
708
705
 
709
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
706
+ @typing.overload
707
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
710
708
  """
711
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
712
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
709
+ Specifies the times when the flow should be run when running on a
710
+ production scheduler.
713
711
  """
714
712
  ...
715
713
 
716
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
714
+ @typing.overload
715
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
716
+ ...
717
+
718
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
717
719
  """
718
- Specifies what flows belong to the same project.
719
-
720
- A project-specific namespace is created for all flows that
721
- use the same `@project(name)`.
720
+ Specifies the times when the flow should be run when running on a
721
+ production scheduler.
722
722
  """
723
723
  ...
724
724
 
metaflow-stubs/cards.pyi CHANGED
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.13 #
4
- # Generated on 2024-12-20T07:38:30.066911 #
3
+ # MF version: 2.13.1 #
4
+ # Generated on 2025-01-06T13:29:15.297521 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
metaflow-stubs/cli.pyi CHANGED
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.13 #
4
- # Generated on 2024-12-20T07:38:30.075830 #
3
+ # MF version: 2.13.1 #
4
+ # Generated on 2025-01-06T13:29:15.306401 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.13 #
4
- # Generated on 2024-12-20T07:38:30.074464 #
3
+ # MF version: 2.13.1 #
4
+ # Generated on 2025-01-06T13:29:15.305100 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.13 #
4
- # Generated on 2024-12-20T07:38:30.096971 #
3
+ # MF version: 2.13.1 #
4
+ # Generated on 2025-01-06T13:29:15.327193 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.13 #
4
- # Generated on 2024-12-20T07:38:30.067212 #
3
+ # MF version: 2.13.1 #
4
+ # Generated on 2025-01-06T13:29:15.297819 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -1,19 +1,19 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.13 #
4
- # Generated on 2024-12-20T07:38:30.071925 #
3
+ # MF version: 2.13.1 #
4
+ # Generated on 2025-01-06T13:29:15.302604 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
- import typing
10
9
  import metaflow
10
+ import typing
11
11
  if typing.TYPE_CHECKING:
12
- import metaflow.client.core
12
+ import datetime
13
13
  import typing
14
14
  import metaflow.events
15
+ import metaflow.client.core
15
16
  import tarfile
16
- import datetime
17
17
 
18
18
  from ..metaflow_current import current as current
19
19
  from ..events import Trigger as Trigger
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.13 #
4
- # Generated on 2024-12-20T07:38:30.096585 #
3
+ # MF version: 2.13.1 #
4
+ # Generated on 2025-01-06T13:29:15.326833 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -9,8 +9,8 @@ from __future__ import annotations
9
9
  import metaflow
10
10
  import typing
11
11
  if typing.TYPE_CHECKING:
12
- import metaflow.datastore.content_addressed_store
13
12
  import metaflow.exception
13
+ import metaflow.datastore.content_addressed_store
14
14
 
15
15
  from ..exception import MetaflowException as MetaflowException
16
16
 
metaflow-stubs/events.pyi CHANGED
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.13 #
4
- # Generated on 2024-12-20T07:38:30.061317 #
3
+ # MF version: 2.13.1 #
4
+ # Generated on 2025-01-06T13:29:15.291818 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -1,7 +1,7 @@
1
1
  ######################################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.13 #
4
- # Generated on 2024-12-20T07:38:30.052567 #
3
+ # MF version: 2.13.1 #
4
+ # Generated on 2025-01-06T13:29:15.282936 #
5
5
  ######################################################################################################
6
6
 
7
7
  from __future__ import annotations