metaflow-stubs 2.12.22__py2.py3-none-any.whl → 2.12.24__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (152) hide show
  1. metaflow-stubs/__init__.pyi +805 -414
  2. metaflow-stubs/cards.pyi +211 -5
  3. metaflow-stubs/cli.pyi +23 -3
  4. metaflow-stubs/client/__init__.pyi +128 -3
  5. metaflow-stubs/client/core.pyi +226 -6
  6. metaflow-stubs/client/filecache.pyi +3 -3
  7. metaflow-stubs/clone_util.pyi +5 -2
  8. metaflow-stubs/events.pyi +21 -3
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +71 -5
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +158 -5
  13. metaflow-stubs/info_file.pyi +2 -2
  14. metaflow-stubs/metadata/metadata.pyi +9 -3
  15. metaflow-stubs/metadata/util.pyi +2 -2
  16. metaflow-stubs/metaflow_config.pyi +2 -2
  17. metaflow-stubs/metaflow_current.pyi +26 -23
  18. metaflow-stubs/mflog/mflog.pyi +2 -2
  19. metaflow-stubs/multicore_utils.pyi +2 -2
  20. metaflow-stubs/parameters.pyi +70 -4
  21. metaflow-stubs/plugins/__init__.pyi +14 -3
  22. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  23. metaflow-stubs/plugins/airflow/airflow.pyi +15 -3
  24. metaflow-stubs/plugins/airflow/airflow_cli.pyi +66 -3
  25. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +5 -2
  26. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +80 -2
  29. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +6 -3
  30. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +46 -4
  31. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +46 -4
  32. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_events.pyi +16 -2
  35. metaflow-stubs/plugins/argo/argo_workflows.pyi +9 -5
  36. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +98 -7
  37. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +34 -6
  38. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +49 -4
  39. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  41. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  46. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +103 -3
  47. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +16 -4
  49. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +21 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +70 -4
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +5 -2
  58. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +49 -4
  59. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  61. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  62. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +22 -4
  63. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  64. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  65. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  66. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_cli.pyi +61 -3
  68. metaflow-stubs/plugins/cards/card_client.pyi +34 -3
  69. metaflow-stubs/plugins/cards/card_creator.pyi +5 -2
  70. metaflow-stubs/plugins/cards/card_datastore.pyi +8 -2
  71. metaflow-stubs/plugins/cards/card_decorator.pyi +52 -2
  72. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +28 -2
  73. metaflow-stubs/plugins/cards/card_modules/basic.pyi +42 -3
  74. metaflow-stubs/plugins/cards/card_modules/card.pyi +28 -2
  75. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +183 -3
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +5 -2
  82. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  83. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +35 -2
  84. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  85. metaflow-stubs/plugins/cards/component_serializer.pyi +57 -4
  86. metaflow-stubs/plugins/cards/exception.pyi +8 -2
  87. metaflow-stubs/plugins/catch_decorator.pyi +20 -3
  88. metaflow-stubs/plugins/datatools/__init__.pyi +63 -3
  89. metaflow-stubs/plugins/datatools/local.pyi +16 -2
  90. metaflow-stubs/plugins/datatools/s3/__init__.pyi +73 -4
  91. metaflow-stubs/plugins/datatools/s3/s3.pyi +82 -5
  92. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  93. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  94. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  95. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  96. metaflow-stubs/plugins/environment_decorator.pyi +10 -2
  97. metaflow-stubs/plugins/events_decorator.pyi +106 -2
  98. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  99. metaflow-stubs/plugins/frameworks/pytorch.pyi +24 -3
  100. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  101. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +16 -4
  102. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  104. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  105. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  106. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  107. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  108. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +3 -3
  109. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +66 -3
  110. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  111. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +100 -3
  112. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +5 -2
  114. metaflow-stubs/plugins/logs_cli.pyi +3 -3
  115. metaflow-stubs/plugins/package_cli.pyi +2 -2
  116. metaflow-stubs/plugins/parallel_decorator.pyi +29 -2
  117. metaflow-stubs/plugins/project_decorator.pyi +59 -2
  118. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  119. metaflow-stubs/plugins/pypi/conda_decorator.pyi +45 -2
  120. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  121. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +33 -2
  122. metaflow-stubs/plugins/pypi/pypi_environment.pyi +3 -3
  123. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  124. metaflow-stubs/plugins/resources_decorator.pyi +33 -2
  125. metaflow-stubs/plugins/retry_decorator.pyi +21 -2
  126. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  127. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  128. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +11 -2
  129. metaflow-stubs/plugins/storage_executor.pyi +6 -2
  130. metaflow-stubs/plugins/tag_cli.pyi +36 -5
  131. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +10 -3
  132. metaflow-stubs/plugins/timeout_decorator.pyi +24 -3
  133. metaflow-stubs/procpoll.pyi +2 -2
  134. metaflow-stubs/pylint_wrapper.pyi +2 -2
  135. metaflow-stubs/runner/__init__.pyi +2 -2
  136. metaflow-stubs/runner/deployer.pyi +70 -131
  137. metaflow-stubs/runner/metaflow_runner.pyi +118 -10
  138. metaflow-stubs/runner/nbdeploy.pyi +66 -2
  139. metaflow-stubs/runner/nbrun.pyi +79 -2
  140. metaflow-stubs/runner/subprocess_manager.pyi +16 -4
  141. metaflow-stubs/runner/utils.pyi +32 -2
  142. metaflow-stubs/system/__init__.pyi +3 -3
  143. metaflow-stubs/system/system_logger.pyi +3 -3
  144. metaflow-stubs/system/system_monitor.pyi +3 -3
  145. metaflow-stubs/tagging_util.pyi +2 -2
  146. metaflow-stubs/tuple_util.pyi +2 -2
  147. metaflow-stubs/version.pyi +2 -2
  148. {metaflow_stubs-2.12.22.dist-info → metaflow_stubs-2.12.24.dist-info}/METADATA +2 -2
  149. metaflow_stubs-2.12.24.dist-info/RECORD +152 -0
  150. metaflow_stubs-2.12.22.dist-info/RECORD +0 -152
  151. {metaflow_stubs-2.12.22.dist-info → metaflow_stubs-2.12.24.dist-info}/WHEEL +0 -0
  152. {metaflow_stubs-2.12.22.dist-info → metaflow_stubs-2.12.24.dist-info}/top_level.txt +0 -0
@@ -1,25 +1,25 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.22 #
4
- # Generated on 2024-09-20T00:45:49.586219 #
3
+ # MF version: 2.12.24 #
4
+ # Generated on 2024-10-04T11:37:46.949506 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.flowspec
12
- import metaflow.runner.metaflow_runner
13
- import metaflow.events
14
- import metaflow.client.core
15
- import metaflow.plugins.datatools.s3.s3
16
11
  import metaflow.metaflow_current
17
- import datetime
12
+ import metaflow._vendor.click.types
18
13
  import metaflow.datastore.inputs
14
+ import metaflow.client.core
19
15
  import typing
20
- import io
21
- import metaflow._vendor.click.types
16
+ import metaflow.events
22
17
  import metaflow.parameters
18
+ import metaflow.flowspec
19
+ import metaflow.plugins.datatools.s3.s3
20
+ import io
21
+ import datetime
22
+ import metaflow.runner.metaflow_runner
23
23
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
24
24
  StepFlag = typing.NewType("StepFlag", bool)
25
25
 
@@ -103,6 +103,14 @@ def metadata(ms: str) -> str:
103
103
  ...
104
104
 
105
105
  class FlowSpec(object, metaclass=metaflow.flowspec._FlowSpecMeta):
106
+ """
107
+ Main class from which all Flows should inherit.
108
+
109
+ Attributes
110
+ ----------
111
+ index
112
+ input
113
+ """
106
114
  def __init__(self, use_cli = True):
107
115
  """
108
116
  Construct a FlowSpec
@@ -324,6 +332,49 @@ class FlowSpec(object, metaclass=metaflow.flowspec._FlowSpecMeta):
324
332
  ...
325
333
 
326
334
  class Parameter(object, metaclass=type):
335
+ """
336
+ Defines a parameter for a flow.
337
+
338
+ Parameters must be instantiated as class variables in flow classes, e.g.
339
+ ```
340
+ class MyFlow(FlowSpec):
341
+ param = Parameter('myparam')
342
+ ```
343
+ in this case, the parameter is specified on the command line as
344
+ ```
345
+ python myflow.py run --myparam=5
346
+ ```
347
+ and its value is accessible through a read-only artifact like this:
348
+ ```
349
+ print(self.param == 5)
350
+ ```
351
+ Note that the user-visible parameter name, `myparam` above, can be
352
+ different from the artifact name, `param` above.
353
+
354
+ The parameter value is converted to a Python type based on the `type`
355
+ argument or to match the type of `default`, if it is set.
356
+
357
+ Parameters
358
+ ----------
359
+ name : str
360
+ User-visible parameter name.
361
+ default : str or float or int or bool or `JSONType` or a function.
362
+ Default value for the parameter. Use a special `JSONType` class to
363
+ indicate that the value must be a valid JSON object. A function
364
+ implies that the parameter corresponds to a *deploy-time parameter*.
365
+ The type of the default value is used as the parameter `type`.
366
+ type : Type, default None
367
+ If `default` is not specified, define the parameter type. Specify
368
+ one of `str`, `float`, `int`, `bool`, or `JSONType`. If None, defaults
369
+ to the type of `default` or `str` if none specified.
370
+ help : str, optional
371
+ Help text to show in `run --help`.
372
+ required : bool, default False
373
+ Require that the user specified a value for the parameter.
374
+ `required=True` implies that the `default` is not used.
375
+ show_default : bool, default True
376
+ If True, show the default value in the help text.
377
+ """
327
378
  def __init__(self, name: str, default: typing.Union[str, float, int, bool, typing.Dict[str, typing.Any], typing.Callable[[], typing.Union[str, float, int, bool, typing.Dict[str, typing.Any]]], None] = None, type: typing.Union[typing.Type[str], typing.Type[float], typing.Type[int], typing.Type[bool], metaflow.parameters.JSONTypeClass, None] = None, help: typing.Optional[str] = None, required: bool = False, show_default: bool = True, **kwargs: typing.Dict[str, typing.Any]):
328
379
  ...
329
380
  def __repr__(self):
@@ -353,6 +404,59 @@ class JSONTypeClass(metaflow._vendor.click.types.ParamType, metaclass=type):
353
404
  JSONType: metaflow.parameters.JSONTypeClass
354
405
 
355
406
  class S3(object, metaclass=type):
407
+ """
408
+ The Metaflow S3 client.
409
+
410
+ This object manages the connection to S3 and a temporary diretory that is used
411
+ to download objects. Note that in most cases when the data fits in memory, no local
412
+ disk IO is needed as operations are cached by the operating system, which makes
413
+ operations fast as long as there is enough memory available.
414
+
415
+ The easiest way is to use this object as a context manager:
416
+ ```
417
+ with S3() as s3:
418
+ data = [obj.blob for obj in s3.get_many(urls)]
419
+ print(data)
420
+ ```
421
+ The context manager takes care of creating and deleting a temporary directory
422
+ automatically. Without a context manager, you must call `.close()` to delete
423
+ the directory explicitly:
424
+ ```
425
+ s3 = S3()
426
+ data = [obj.blob for obj in s3.get_many(urls)]
427
+ s3.close()
428
+ ```
429
+ You can customize the location of the temporary directory with `tmproot`. It
430
+ defaults to the current working directory.
431
+
432
+ To make it easier to deal with object locations, the client can be initialized
433
+ with an S3 path prefix. There are three ways to handle locations:
434
+
435
+ 1. Use a `metaflow.Run` object or `self`, e.g. `S3(run=self)` which
436
+ initializes the prefix with the global `DATATOOLS_S3ROOT` path, combined
437
+ with the current run ID. This mode makes it easy to version data based
438
+ on the run ID consistently. You can use the `bucket` and `prefix` to
439
+ override parts of `DATATOOLS_S3ROOT`.
440
+
441
+ 2. Specify an S3 prefix explicitly with `s3root`,
442
+ e.g. `S3(s3root='s3://mybucket/some/path')`.
443
+
444
+ 3. Specify nothing, i.e. `S3()`, in which case all operations require
445
+ a full S3 url prefixed with `s3://`.
446
+
447
+ Parameters
448
+ ----------
449
+ tmproot : str, default: '.'
450
+ Where to store the temporary directory.
451
+ bucket : str, optional
452
+ Override the bucket from `DATATOOLS_S3ROOT` when `run` is specified.
453
+ prefix : str, optional
454
+ Override the path from `DATATOOLS_S3ROOT` when `run` is specified.
455
+ run : FlowSpec or Run, optional
456
+ Derive path prefix from the current or a past run ID, e.g. S3(run=self).
457
+ s3root : str, optional
458
+ If `run` is not specified, use this as the S3 prefix.
459
+ """
356
460
  @classmethod
357
461
  def get_root_from_config(cls, echo, create_on_absent = True):
358
462
  ...
@@ -644,6 +748,33 @@ class S3(object, metaclass=type):
644
748
  ...
645
749
 
646
750
  class IncludeFile(metaflow.parameters.Parameter, metaclass=type):
751
+ """
752
+ Includes a local file as a parameter for the flow.
753
+
754
+ `IncludeFile` behaves like `Parameter` except that it reads its value from a file instead of
755
+ the command line. The user provides a path to a file on the command line. The file contents
756
+ are saved as a read-only artifact which is available in all steps of the flow.
757
+
758
+ Parameters
759
+ ----------
760
+ name : str
761
+ User-visible parameter name.
762
+ default : Union[str, Callable[ParameterContext, str]]
763
+ Default path to a local file. A function
764
+ implies that the parameter corresponds to a *deploy-time parameter*.
765
+ is_text : bool, default True
766
+ Convert the file contents to a string using the provided `encoding`.
767
+ If False, the artifact is stored in `bytes`.
768
+ encoding : str, optional, default 'utf-8'
769
+ Use this encoding to decode the file contexts if `is_text=True`.
770
+ required : bool, default False
771
+ Require that the user specified a value for the parameter.
772
+ `required=True` implies that the `default` is not used.
773
+ help : str, optional
774
+ Help text to show in `run --help`.
775
+ show_default : bool, default True
776
+ If True, show the default value in the help text.
777
+ """
647
778
  def __init__(self, name: str, required: bool = False, is_text: bool = True, encoding: str = "utf-8", help: typing.Optional[str] = None, **kwargs: typing.Dict[str, str]):
648
779
  ...
649
780
  def load_parameter(self, v):
@@ -800,6 +931,113 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
800
931
  """
801
932
  ...
802
933
 
934
+ @typing.overload
935
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
936
+ """
937
+ Specifies a timeout for your step.
938
+
939
+ This decorator is useful if this step may hang indefinitely.
940
+
941
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
942
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
943
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
944
+
945
+ Note that all the values specified in parameters are added together so if you specify
946
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
947
+
948
+ Parameters
949
+ ----------
950
+ seconds : int, default 0
951
+ Number of seconds to wait prior to timing out.
952
+ minutes : int, default 0
953
+ Number of minutes to wait prior to timing out.
954
+ hours : int, default 0
955
+ Number of hours to wait prior to timing out.
956
+ """
957
+ ...
958
+
959
+ @typing.overload
960
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
961
+ ...
962
+
963
+ @typing.overload
964
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
965
+ ...
966
+
967
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
968
+ """
969
+ Specifies a timeout for your step.
970
+
971
+ This decorator is useful if this step may hang indefinitely.
972
+
973
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
974
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
975
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
976
+
977
+ Note that all the values specified in parameters are added together so if you specify
978
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
979
+
980
+ Parameters
981
+ ----------
982
+ seconds : int, default 0
983
+ Number of seconds to wait prior to timing out.
984
+ minutes : int, default 0
985
+ Number of minutes to wait prior to timing out.
986
+ hours : int, default 0
987
+ Number of hours to wait prior to timing out.
988
+ """
989
+ ...
990
+
991
+ @typing.overload
992
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
993
+ """
994
+ Specifies environment variables to be set prior to the execution of a step.
995
+
996
+ Parameters
997
+ ----------
998
+ vars : Dict[str, str], default {}
999
+ Dictionary of environment variables to set.
1000
+ """
1001
+ ...
1002
+
1003
+ @typing.overload
1004
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1005
+ ...
1006
+
1007
+ @typing.overload
1008
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1009
+ ...
1010
+
1011
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1012
+ """
1013
+ Specifies environment variables to be set prior to the execution of a step.
1014
+
1015
+ Parameters
1016
+ ----------
1017
+ vars : Dict[str, str], default {}
1018
+ Dictionary of environment variables to set.
1019
+ """
1020
+ ...
1021
+
1022
+ @typing.overload
1023
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1024
+ """
1025
+ Decorator prototype for all step decorators. This function gets specialized
1026
+ and imported for all decorators types by _import_plugin_decorators().
1027
+ """
1028
+ ...
1029
+
1030
+ @typing.overload
1031
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1032
+ ...
1033
+
1034
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1035
+ """
1036
+ Decorator prototype for all step decorators. This function gets specialized
1037
+ and imported for all decorators types by _import_plugin_decorators().
1038
+ """
1039
+ ...
1040
+
803
1041
  @typing.overload
804
1042
  def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
805
1043
  """
@@ -850,88 +1088,53 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
850
1088
  ...
851
1089
 
852
1090
  @typing.overload
853
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1091
+ def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
854
1092
  """
855
- Specifies the number of times the task corresponding
856
- to a step needs to be retried.
857
-
858
- This decorator is useful for handling transient errors, such as networking issues.
859
- If your task contains operations that can't be retried safely, e.g. database updates,
860
- it is advisable to annotate it with `@retry(times=0)`.
1093
+ Creates a human-readable report, a Metaflow Card, after this step completes.
861
1094
 
862
- This can be used in conjunction with the `@catch` decorator. The `@catch`
863
- decorator will execute a no-op task after all retries have been exhausted,
864
- ensuring that the flow execution can continue.
1095
+ Note that you may add multiple `@card` decorators in a step with different parameters.
865
1096
 
866
1097
  Parameters
867
1098
  ----------
868
- times : int, default 3
869
- Number of times to retry this task.
870
- minutes_between_retries : int, default 2
871
- Number of minutes between retries.
1099
+ type : str, default 'default'
1100
+ Card type.
1101
+ id : str, optional, default None
1102
+ If multiple cards are present, use this id to identify this card.
1103
+ options : Dict[str, Any], default {}
1104
+ Options passed to the card. The contents depend on the card type.
1105
+ timeout : int, default 45
1106
+ Interrupt reporting if it takes more than this many seconds.
1107
+
1108
+
872
1109
  """
873
1110
  ...
874
1111
 
875
1112
  @typing.overload
876
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1113
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
877
1114
  ...
878
1115
 
879
1116
  @typing.overload
880
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1117
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
881
1118
  ...
882
1119
 
883
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1120
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
884
1121
  """
885
- Specifies the number of times the task corresponding
886
- to a step needs to be retried.
887
-
888
- This decorator is useful for handling transient errors, such as networking issues.
889
- If your task contains operations that can't be retried safely, e.g. database updates,
890
- it is advisable to annotate it with `@retry(times=0)`.
1122
+ Creates a human-readable report, a Metaflow Card, after this step completes.
891
1123
 
892
- This can be used in conjunction with the `@catch` decorator. The `@catch`
893
- decorator will execute a no-op task after all retries have been exhausted,
894
- ensuring that the flow execution can continue.
1124
+ Note that you may add multiple `@card` decorators in a step with different parameters.
895
1125
 
896
1126
  Parameters
897
1127
  ----------
898
- times : int, default 3
899
- Number of times to retry this task.
900
- minutes_between_retries : int, default 2
901
- Number of minutes between retries.
902
- """
903
- ...
904
-
905
- @typing.overload
906
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
907
- """
908
- Specifies secrets to be retrieved and injected as environment variables prior to
909
- the execution of a step.
1128
+ type : str, default 'default'
1129
+ Card type.
1130
+ id : str, optional, default None
1131
+ If multiple cards are present, use this id to identify this card.
1132
+ options : Dict[str, Any], default {}
1133
+ Options passed to the card. The contents depend on the card type.
1134
+ timeout : int, default 45
1135
+ Interrupt reporting if it takes more than this many seconds.
910
1136
 
911
- Parameters
912
- ----------
913
- sources : List[Union[str, Dict[str, Any]]], default: []
914
- List of secret specs, defining how the secrets are to be retrieved
915
- """
916
- ...
917
-
918
- @typing.overload
919
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
920
- ...
921
-
922
- @typing.overload
923
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
924
- ...
925
-
926
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
927
- """
928
- Specifies secrets to be retrieved and injected as environment variables prior to
929
- the execution of a step.
930
1137
 
931
- Parameters
932
- ----------
933
- sources : List[Union[str, Dict[str, Any]]], default: []
934
- List of secret specs, defining how the secrets are to be retrieved
935
1138
  """
936
1139
  ...
937
1140
 
@@ -985,66 +1188,9 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
985
1188
  ...
986
1189
 
987
1190
  @typing.overload
988
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1191
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
989
1192
  """
990
- Specifies the Conda environment for the step.
991
-
992
- Information in this decorator will augment any
993
- attributes set in the `@conda_base` flow-level decorator. Hence,
994
- you can use `@conda_base` to set packages required by all
995
- steps and use `@conda` to specify step-specific overrides.
996
-
997
- Parameters
998
- ----------
999
- packages : Dict[str, str], default {}
1000
- Packages to use for this step. The key is the name of the package
1001
- and the value is the version to use.
1002
- libraries : Dict[str, str], default {}
1003
- Supported for backward compatibility. When used with packages, packages will take precedence.
1004
- python : str, optional, default None
1005
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1006
- that the version used will correspond to the version of the Python interpreter used to start the run.
1007
- disabled : bool, default False
1008
- If set to True, disables @conda.
1009
- """
1010
- ...
1011
-
1012
- @typing.overload
1013
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1014
- ...
1015
-
1016
- @typing.overload
1017
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1018
- ...
1019
-
1020
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1021
- """
1022
- Specifies the Conda environment for the step.
1023
-
1024
- Information in this decorator will augment any
1025
- attributes set in the `@conda_base` flow-level decorator. Hence,
1026
- you can use `@conda_base` to set packages required by all
1027
- steps and use `@conda` to specify step-specific overrides.
1028
-
1029
- Parameters
1030
- ----------
1031
- packages : Dict[str, str], default {}
1032
- Packages to use for this step. The key is the name of the package
1033
- and the value is the version to use.
1034
- libraries : Dict[str, str], default {}
1035
- Supported for backward compatibility. When used with packages, packages will take precedence.
1036
- python : str, optional, default None
1037
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1038
- that the version used will correspond to the version of the Python interpreter used to start the run.
1039
- disabled : bool, default False
1040
- If set to True, disables @conda.
1041
- """
1042
- ...
1043
-
1044
- @typing.overload
1045
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1046
- """
1047
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1193
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1048
1194
 
1049
1195
  Parameters
1050
1196
  ----------
@@ -1189,141 +1335,92 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1189
1335
  ...
1190
1336
 
1191
1337
  @typing.overload
1192
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1193
- """
1194
- Specifies environment variables to be set prior to the execution of a step.
1195
-
1196
- Parameters
1197
- ----------
1198
- vars : Dict[str, str], default {}
1199
- Dictionary of environment variables to set.
1200
- """
1201
- ...
1202
-
1203
- @typing.overload
1204
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1205
- ...
1206
-
1207
- @typing.overload
1208
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1209
- ...
1210
-
1211
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1212
- """
1213
- Specifies environment variables to be set prior to the execution of a step.
1214
-
1215
- Parameters
1216
- ----------
1217
- vars : Dict[str, str], default {}
1218
- Dictionary of environment variables to set.
1219
- """
1220
- ...
1221
-
1222
- @typing.overload
1223
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1338
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1224
1339
  """
1225
- Specifies a timeout for your step.
1226
-
1227
- This decorator is useful if this step may hang indefinitely.
1228
-
1229
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1230
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1231
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1340
+ Specifies the Conda environment for the step.
1232
1341
 
1233
- Note that all the values specified in parameters are added together so if you specify
1234
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1342
+ Information in this decorator will augment any
1343
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1344
+ you can use `@conda_base` to set packages required by all
1345
+ steps and use `@conda` to specify step-specific overrides.
1235
1346
 
1236
1347
  Parameters
1237
1348
  ----------
1238
- seconds : int, default 0
1239
- Number of seconds to wait prior to timing out.
1240
- minutes : int, default 0
1241
- Number of minutes to wait prior to timing out.
1242
- hours : int, default 0
1243
- Number of hours to wait prior to timing out.
1349
+ packages : Dict[str, str], default {}
1350
+ Packages to use for this step. The key is the name of the package
1351
+ and the value is the version to use.
1352
+ libraries : Dict[str, str], default {}
1353
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1354
+ python : str, optional, default None
1355
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1356
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1357
+ disabled : bool, default False
1358
+ If set to True, disables @conda.
1244
1359
  """
1245
1360
  ...
1246
1361
 
1247
1362
  @typing.overload
1248
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1363
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1249
1364
  ...
1250
1365
 
1251
1366
  @typing.overload
1252
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1367
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1253
1368
  ...
1254
1369
 
1255
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1370
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1256
1371
  """
1257
- Specifies a timeout for your step.
1258
-
1259
- This decorator is useful if this step may hang indefinitely.
1260
-
1261
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1262
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1263
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1372
+ Specifies the Conda environment for the step.
1264
1373
 
1265
- Note that all the values specified in parameters are added together so if you specify
1266
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1374
+ Information in this decorator will augment any
1375
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1376
+ you can use `@conda_base` to set packages required by all
1377
+ steps and use `@conda` to specify step-specific overrides.
1267
1378
 
1268
1379
  Parameters
1269
1380
  ----------
1270
- seconds : int, default 0
1271
- Number of seconds to wait prior to timing out.
1272
- minutes : int, default 0
1273
- Number of minutes to wait prior to timing out.
1274
- hours : int, default 0
1275
- Number of hours to wait prior to timing out.
1381
+ packages : Dict[str, str], default {}
1382
+ Packages to use for this step. The key is the name of the package
1383
+ and the value is the version to use.
1384
+ libraries : Dict[str, str], default {}
1385
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1386
+ python : str, optional, default None
1387
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1388
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1389
+ disabled : bool, default False
1390
+ If set to True, disables @conda.
1276
1391
  """
1277
1392
  ...
1278
1393
 
1279
1394
  @typing.overload
1280
- def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1395
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1281
1396
  """
1282
- Creates a human-readable report, a Metaflow Card, after this step completes.
1283
-
1284
- Note that you may add multiple `@card` decorators in a step with different parameters.
1397
+ Specifies secrets to be retrieved and injected as environment variables prior to
1398
+ the execution of a step.
1285
1399
 
1286
1400
  Parameters
1287
1401
  ----------
1288
- type : str, default 'default'
1289
- Card type.
1290
- id : str, optional, default None
1291
- If multiple cards are present, use this id to identify this card.
1292
- options : Dict[str, Any], default {}
1293
- Options passed to the card. The contents depend on the card type.
1294
- timeout : int, default 45
1295
- Interrupt reporting if it takes more than this many seconds.
1296
-
1297
-
1402
+ sources : List[Union[str, Dict[str, Any]]], default: []
1403
+ List of secret specs, defining how the secrets are to be retrieved
1298
1404
  """
1299
1405
  ...
1300
1406
 
1301
1407
  @typing.overload
1302
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1408
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1303
1409
  ...
1304
1410
 
1305
1411
  @typing.overload
1306
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1412
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1307
1413
  ...
1308
1414
 
1309
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1415
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1310
1416
  """
1311
- Creates a human-readable report, a Metaflow Card, after this step completes.
1312
-
1313
- Note that you may add multiple `@card` decorators in a step with different parameters.
1417
+ Specifies secrets to be retrieved and injected as environment variables prior to
1418
+ the execution of a step.
1314
1419
 
1315
1420
  Parameters
1316
1421
  ----------
1317
- type : str, default 'default'
1318
- Card type.
1319
- id : str, optional, default None
1320
- If multiple cards are present, use this id to identify this card.
1321
- options : Dict[str, Any], default {}
1322
- Options passed to the card. The contents depend on the card type.
1323
- timeout : int, default 45
1324
- Interrupt reporting if it takes more than this many seconds.
1325
-
1326
-
1422
+ sources : List[Union[str, Dict[str, Any]]], default: []
1423
+ List of secret specs, defining how the secrets are to be retrieved
1327
1424
  """
1328
1425
  ...
1329
1426
 
@@ -1394,21 +1491,227 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
1394
1491
  ...
1395
1492
 
1396
1493
  @typing.overload
1397
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1494
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1398
1495
  """
1399
- Decorator prototype for all step decorators. This function gets specialized
1400
- and imported for all decorators types by _import_plugin_decorators().
1496
+ Specifies the number of times the task corresponding
1497
+ to a step needs to be retried.
1498
+
1499
+ This decorator is useful for handling transient errors, such as networking issues.
1500
+ If your task contains operations that can't be retried safely, e.g. database updates,
1501
+ it is advisable to annotate it with `@retry(times=0)`.
1502
+
1503
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1504
+ decorator will execute a no-op task after all retries have been exhausted,
1505
+ ensuring that the flow execution can continue.
1506
+
1507
+ Parameters
1508
+ ----------
1509
+ times : int, default 3
1510
+ Number of times to retry this task.
1511
+ minutes_between_retries : int, default 2
1512
+ Number of minutes between retries.
1401
1513
  """
1402
1514
  ...
1403
1515
 
1404
1516
  @typing.overload
1405
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1517
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1406
1518
  ...
1407
1519
 
1408
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1520
+ @typing.overload
1521
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1522
+ ...
1523
+
1524
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1409
1525
  """
1410
- Decorator prototype for all step decorators. This function gets specialized
1411
- and imported for all decorators types by _import_plugin_decorators().
1526
+ Specifies the number of times the task corresponding
1527
+ to a step needs to be retried.
1528
+
1529
+ This decorator is useful for handling transient errors, such as networking issues.
1530
+ If your task contains operations that can't be retried safely, e.g. database updates,
1531
+ it is advisable to annotate it with `@retry(times=0)`.
1532
+
1533
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1534
+ decorator will execute a no-op task after all retries have been exhausted,
1535
+ ensuring that the flow execution can continue.
1536
+
1537
+ Parameters
1538
+ ----------
1539
+ times : int, default 3
1540
+ Number of times to retry this task.
1541
+ minutes_between_retries : int, default 2
1542
+ Number of minutes between retries.
1543
+ """
1544
+ ...
1545
+
1546
+ @typing.overload
1547
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1548
+ """
1549
+ Specifies the Conda environment for all steps of the flow.
1550
+
1551
+ Use `@conda_base` to set common libraries required by all
1552
+ steps and use `@conda` to specify step-specific additions.
1553
+
1554
+ Parameters
1555
+ ----------
1556
+ packages : Dict[str, str], default {}
1557
+ Packages to use for this flow. The key is the name of the package
1558
+ and the value is the version to use.
1559
+ libraries : Dict[str, str], default {}
1560
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1561
+ python : str, optional, default None
1562
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1563
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1564
+ disabled : bool, default False
1565
+ If set to True, disables Conda.
1566
+ """
1567
+ ...
1568
+
1569
+ @typing.overload
1570
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1571
+ ...
1572
+
1573
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1574
+ """
1575
+ Specifies the Conda environment for all steps of the flow.
1576
+
1577
+ Use `@conda_base` to set common libraries required by all
1578
+ steps and use `@conda` to specify step-specific additions.
1579
+
1580
+ Parameters
1581
+ ----------
1582
+ packages : Dict[str, str], default {}
1583
+ Packages to use for this flow. The key is the name of the package
1584
+ and the value is the version to use.
1585
+ libraries : Dict[str, str], default {}
1586
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1587
+ python : str, optional, default None
1588
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1589
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1590
+ disabled : bool, default False
1591
+ If set to True, disables Conda.
1592
+ """
1593
+ ...
1594
+
1595
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1596
+ """
1597
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1598
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1599
+
1600
+ Parameters
1601
+ ----------
1602
+ timeout : int
1603
+ Time, in seconds before the task times out and fails. (Default: 3600)
1604
+ poke_interval : int
1605
+ Time in seconds that the job should wait in between each try. (Default: 60)
1606
+ mode : str
1607
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1608
+ exponential_backoff : bool
1609
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1610
+ pool : str
1611
+ the slot pool this task should run in,
1612
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1613
+ soft_fail : bool
1614
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1615
+ name : str
1616
+ Name of the sensor on Airflow
1617
+ description : str
1618
+ Description of sensor in the Airflow UI
1619
+ external_dag_id : str
1620
+ The dag_id that contains the task you want to wait for.
1621
+ external_task_ids : List[str]
1622
+ The list of task_ids that you want to wait for.
1623
+ If None (default value) the sensor waits for the DAG. (Default: None)
1624
+ allowed_states : List[str]
1625
+ Iterable of allowed states, (Default: ['success'])
1626
+ failed_states : List[str]
1627
+ Iterable of failed or dis-allowed states. (Default: None)
1628
+ execution_delta : datetime.timedelta
1629
+ time difference with the previous execution to look at,
1630
+ the default is the same logical date as the current task or DAG. (Default: None)
1631
+ check_existence: bool
1632
+ Set to True to check if the external task exists or check if
1633
+ the DAG to wait for exists. (Default: True)
1634
+ """
1635
+ ...
1636
+
1637
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1638
+ """
1639
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1640
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1641
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1642
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1643
+ starts only after all sensors finish.
1644
+
1645
+ Parameters
1646
+ ----------
1647
+ timeout : int
1648
+ Time, in seconds before the task times out and fails. (Default: 3600)
1649
+ poke_interval : int
1650
+ Time in seconds that the job should wait in between each try. (Default: 60)
1651
+ mode : str
1652
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1653
+ exponential_backoff : bool
1654
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1655
+ pool : str
1656
+ the slot pool this task should run in,
1657
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1658
+ soft_fail : bool
1659
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1660
+ name : str
1661
+ Name of the sensor on Airflow
1662
+ description : str
1663
+ Description of sensor in the Airflow UI
1664
+ bucket_key : Union[str, List[str]]
1665
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1666
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1667
+ bucket_name : str
1668
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1669
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1670
+ wildcard_match : bool
1671
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1672
+ aws_conn_id : str
1673
+ a reference to the s3 connection on Airflow. (Default: None)
1674
+ verify : bool
1675
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1676
+ """
1677
+ ...
1678
+
1679
+ @typing.overload
1680
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1681
+ """
1682
+ Specifies the PyPI packages for all steps of the flow.
1683
+
1684
+ Use `@pypi_base` to set common packages required by all
1685
+ steps and use `@pypi` to specify step-specific overrides.
1686
+ Parameters
1687
+ ----------
1688
+ packages : Dict[str, str], default: {}
1689
+ Packages to use for this flow. The key is the name of the package
1690
+ and the value is the version to use.
1691
+ python : str, optional, default: None
1692
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1693
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1694
+ """
1695
+ ...
1696
+
1697
+ @typing.overload
1698
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1699
+ ...
1700
+
1701
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1702
+ """
1703
+ Specifies the PyPI packages for all steps of the flow.
1704
+
1705
+ Use `@pypi_base` to set common packages required by all
1706
+ steps and use `@pypi` to specify step-specific overrides.
1707
+ Parameters
1708
+ ----------
1709
+ packages : Dict[str, str], default: {}
1710
+ Packages to use for this flow. The key is the name of the package
1711
+ and the value is the version to use.
1712
+ python : str, optional, default: None
1713
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1714
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1412
1715
  """
1413
1716
  ...
1414
1717
 
@@ -1515,87 +1818,6 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1515
1818
  """
1516
1819
  ...
1517
1820
 
1518
- @typing.overload
1519
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1520
- """
1521
- Specifies the PyPI packages for all steps of the flow.
1522
-
1523
- Use `@pypi_base` to set common packages required by all
1524
- steps and use `@pypi` to specify step-specific overrides.
1525
- Parameters
1526
- ----------
1527
- packages : Dict[str, str], default: {}
1528
- Packages to use for this flow. The key is the name of the package
1529
- and the value is the version to use.
1530
- python : str, optional, default: None
1531
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1532
- that the version used will correspond to the version of the Python interpreter used to start the run.
1533
- """
1534
- ...
1535
-
1536
- @typing.overload
1537
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1538
- ...
1539
-
1540
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1541
- """
1542
- Specifies the PyPI packages for all steps of the flow.
1543
-
1544
- Use `@pypi_base` to set common packages required by all
1545
- steps and use `@pypi` to specify step-specific overrides.
1546
- Parameters
1547
- ----------
1548
- packages : Dict[str, str], default: {}
1549
- Packages to use for this flow. The key is the name of the package
1550
- and the value is the version to use.
1551
- python : str, optional, default: None
1552
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1553
- that the version used will correspond to the version of the Python interpreter used to start the run.
1554
- """
1555
- ...
1556
-
1557
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1558
- """
1559
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1560
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1561
-
1562
- Parameters
1563
- ----------
1564
- timeout : int
1565
- Time, in seconds before the task times out and fails. (Default: 3600)
1566
- poke_interval : int
1567
- Time in seconds that the job should wait in between each try. (Default: 60)
1568
- mode : str
1569
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1570
- exponential_backoff : bool
1571
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1572
- pool : str
1573
- the slot pool this task should run in,
1574
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1575
- soft_fail : bool
1576
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1577
- name : str
1578
- Name of the sensor on Airflow
1579
- description : str
1580
- Description of sensor in the Airflow UI
1581
- external_dag_id : str
1582
- The dag_id that contains the task you want to wait for.
1583
- external_task_ids : List[str]
1584
- The list of task_ids that you want to wait for.
1585
- If None (default value) the sensor waits for the DAG. (Default: None)
1586
- allowed_states : List[str]
1587
- Iterable of allowed states, (Default: ['success'])
1588
- failed_states : List[str]
1589
- Iterable of failed or dis-allowed states. (Default: None)
1590
- execution_delta : datetime.timedelta
1591
- time difference with the previous execution to look at,
1592
- the default is the same logical date as the current task or DAG. (Default: None)
1593
- check_existence: bool
1594
- Set to True to check if the external task exists or check if
1595
- the DAG to wait for exists. (Default: True)
1596
- """
1597
- ...
1598
-
1599
1821
  @typing.overload
1600
1822
  def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1601
1823
  """
@@ -1645,45 +1867,21 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
1645
1867
  """
1646
1868
  ...
1647
1869
 
1648
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1870
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1649
1871
  """
1650
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1651
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1652
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1653
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1654
- starts only after all sensors finish.
1872
+ Specifies what flows belong to the same project.
1873
+
1874
+ A project-specific namespace is created for all flows that
1875
+ use the same `@project(name)`.
1655
1876
 
1656
1877
  Parameters
1657
1878
  ----------
1658
- timeout : int
1659
- Time, in seconds before the task times out and fails. (Default: 3600)
1660
- poke_interval : int
1661
- Time in seconds that the job should wait in between each try. (Default: 60)
1662
- mode : str
1663
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1664
- exponential_backoff : bool
1665
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1666
- pool : str
1667
- the slot pool this task should run in,
1668
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1669
- soft_fail : bool
1670
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1671
1879
  name : str
1672
- Name of the sensor on Airflow
1673
- description : str
1674
- Description of sensor in the Airflow UI
1675
- bucket_key : Union[str, List[str]]
1676
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1677
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1678
- bucket_name : str
1679
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1680
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1681
- wildcard_match : bool
1682
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1683
- aws_conn_id : str
1684
- a reference to the s3 connection on Airflow. (Default: None)
1685
- verify : bool
1686
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1880
+ Project name. Make sure that the name is unique amongst all
1881
+ projects that use the same production scheduler. The name may
1882
+ contain only lowercase alphanumeric characters and underscores.
1883
+
1884
+
1687
1885
  """
1688
1886
  ...
1689
1887
 
@@ -1782,73 +1980,6 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1782
1980
  """
1783
1981
  ...
1784
1982
 
1785
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1786
- """
1787
- Specifies what flows belong to the same project.
1788
-
1789
- A project-specific namespace is created for all flows that
1790
- use the same `@project(name)`.
1791
-
1792
- Parameters
1793
- ----------
1794
- name : str
1795
- Project name. Make sure that the name is unique amongst all
1796
- projects that use the same production scheduler. The name may
1797
- contain only lowercase alphanumeric characters and underscores.
1798
-
1799
-
1800
- """
1801
- ...
1802
-
1803
- @typing.overload
1804
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1805
- """
1806
- Specifies the Conda environment for all steps of the flow.
1807
-
1808
- Use `@conda_base` to set common libraries required by all
1809
- steps and use `@conda` to specify step-specific additions.
1810
-
1811
- Parameters
1812
- ----------
1813
- packages : Dict[str, str], default {}
1814
- Packages to use for this flow. The key is the name of the package
1815
- and the value is the version to use.
1816
- libraries : Dict[str, str], default {}
1817
- Supported for backward compatibility. When used with packages, packages will take precedence.
1818
- python : str, optional, default None
1819
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1820
- that the version used will correspond to the version of the Python interpreter used to start the run.
1821
- disabled : bool, default False
1822
- If set to True, disables Conda.
1823
- """
1824
- ...
1825
-
1826
- @typing.overload
1827
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1828
- ...
1829
-
1830
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1831
- """
1832
- Specifies the Conda environment for all steps of the flow.
1833
-
1834
- Use `@conda_base` to set common libraries required by all
1835
- steps and use `@conda` to specify step-specific additions.
1836
-
1837
- Parameters
1838
- ----------
1839
- packages : Dict[str, str], default {}
1840
- Packages to use for this flow. The key is the name of the package
1841
- and the value is the version to use.
1842
- libraries : Dict[str, str], default {}
1843
- Supported for backward compatibility. When used with packages, packages will take precedence.
1844
- python : str, optional, default None
1845
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1846
- that the version used will correspond to the version of the Python interpreter used to start the run.
1847
- disabled : bool, default False
1848
- If set to True, disables Conda.
1849
- """
1850
- ...
1851
-
1852
1983
  def namespace(ns: typing.Optional[str]) -> typing.Optional[str]:
1853
1984
  """
1854
1985
  Switch namespace to the one provided.
@@ -1927,6 +2058,19 @@ def default_metadata() -> str:
1927
2058
  ...
1928
2059
 
1929
2060
  class Metaflow(object, metaclass=type):
2061
+ """
2062
+ Entry point to all objects in the Metaflow universe.
2063
+
2064
+ This object can be used to list all the flows present either through the explicit property
2065
+ or by iterating over this object.
2066
+
2067
+ Attributes
2068
+ ----------
2069
+ flows : List[Flow]
2070
+ Returns the list of all `Flow` objects known to this metadata provider. Note that only
2071
+ flows present in the current namespace will be returned. A `Flow` is present in a namespace
2072
+ if it has at least one run in the namespace.
2073
+ """
1930
2074
  def __init__(self):
1931
2075
  ...
1932
2076
  @property
@@ -1978,6 +2122,17 @@ class Metaflow(object, metaclass=type):
1978
2122
  ...
1979
2123
 
1980
2124
  class Flow(metaflow.client.core.MetaflowObject, metaclass=type):
2125
+ """
2126
+ A Flow represents all existing flows with a certain name, in other words,
2127
+ classes derived from `FlowSpec`. A container of `Run` objects.
2128
+
2129
+ Attributes
2130
+ ----------
2131
+ latest_run : Run
2132
+ Latest `Run` (in progress or completed, successfully or not) of this flow.
2133
+ latest_successful_run : Run
2134
+ Latest successfully completed `Run` of this flow.
2135
+ """
1981
2136
  def __init__(self, *args, **kwargs):
1982
2137
  ...
1983
2138
  @property
@@ -2064,6 +2219,26 @@ class Flow(metaflow.client.core.MetaflowObject, metaclass=type):
2064
2219
  ...
2065
2220
 
2066
2221
  class Run(metaflow.client.core.MetaflowObject, metaclass=type):
2222
+ """
2223
+ A `Run` represents an execution of a `Flow`. It is a container of `Step`s.
2224
+
2225
+ Attributes
2226
+ ----------
2227
+ data : MetaflowData
2228
+ a shortcut to run['end'].task.data, i.e. data produced by this run.
2229
+ successful : bool
2230
+ True if the run completed successfully.
2231
+ finished : bool
2232
+ True if the run completed.
2233
+ finished_at : datetime
2234
+ Time this run finished.
2235
+ code : MetaflowCode
2236
+ Code package for this run (if present). See `MetaflowCode`.
2237
+ trigger : MetaflowTrigger
2238
+ Information about event(s) that triggered this run (if present). See `MetaflowTrigger`.
2239
+ end_task : Task
2240
+ `Task` for the end step (if it is present already).
2241
+ """
2067
2242
  def steps(self, *tags: str) -> typing.Iterator[metaflow.client.core.Step]:
2068
2243
  """
2069
2244
  [Legacy function - do not use]
@@ -2296,6 +2471,23 @@ class Run(metaflow.client.core.MetaflowObject, metaclass=type):
2296
2471
  ...
2297
2472
 
2298
2473
  class Step(metaflow.client.core.MetaflowObject, metaclass=type):
2474
+ """
2475
+ A `Step` represents a user-defined step, that is, a method annotated with the `@step` decorator.
2476
+
2477
+ It contains `Task` objects associated with the step, that is, all executions of the
2478
+ `Step`. The step may contain multiple `Task`s in the case of a foreach step.
2479
+
2480
+ Attributes
2481
+ ----------
2482
+ task : Task
2483
+ The first `Task` object in this step. This is a shortcut for retrieving the only
2484
+ task contained in a non-foreach step.
2485
+ finished_at : datetime
2486
+ Time when the latest `Task` of this step finished. Note that in the case of foreaches,
2487
+ this time may change during execution of the step.
2488
+ environment_info : Dict[str, Any]
2489
+ Information about the execution environment.
2490
+ """
2299
2491
  @property
2300
2492
  def task(self) -> typing.Optional[metaflow.client.core.Task]:
2301
2493
  """
@@ -2430,6 +2622,55 @@ class Step(metaflow.client.core.MetaflowObject, metaclass=type):
2430
2622
  ...
2431
2623
 
2432
2624
  class Task(metaflow.client.core.MetaflowObject, metaclass=type):
2625
+ """
2626
+ A `Task` represents an execution of a `Step`.
2627
+
2628
+ It contains all `DataArtifact` objects produced by the task as
2629
+ well as metadata related to execution.
2630
+
2631
+ Note that the `@retry` decorator may cause multiple attempts of
2632
+ the task to be present. Usually you want the latest attempt, which
2633
+ is what instantiating a `Task` object returns by default. If
2634
+ you need to e.g. retrieve logs from a failed attempt, you can
2635
+ explicitly get information about a specific attempt by using the
2636
+ following syntax when creating a task:
2637
+
2638
+ `Task('flow/run/step/task', attempt=<attempt>)`
2639
+
2640
+ where `attempt=0` corresponds to the first attempt etc.
2641
+
2642
+ Attributes
2643
+ ----------
2644
+ metadata : List[Metadata]
2645
+ List of all metadata events associated with the task.
2646
+ metadata_dict : Dict[str, str]
2647
+ A condensed version of `metadata`: A dictionary where keys
2648
+ are names of metadata events and values the latest corresponding event.
2649
+ data : MetaflowData
2650
+ Container of all data artifacts produced by this task. Note that this
2651
+ call downloads all data locally, so it can be slower than accessing
2652
+ artifacts individually. See `MetaflowData` for more information.
2653
+ artifacts : MetaflowArtifacts
2654
+ Container of `DataArtifact` objects produced by this task.
2655
+ successful : bool
2656
+ True if the task completed successfully.
2657
+ finished : bool
2658
+ True if the task completed.
2659
+ exception : object
2660
+ Exception raised by this task if there was one.
2661
+ finished_at : datetime
2662
+ Time this task finished.
2663
+ runtime_name : str
2664
+ Runtime this task was executed on.
2665
+ stdout : str
2666
+ Standard output for the task execution.
2667
+ stderr : str
2668
+ Standard error output for the task execution.
2669
+ code : MetaflowCode
2670
+ Code package for this task (if present). See `MetaflowCode`.
2671
+ environment_info : Dict[str, str]
2672
+ Information about the execution environment.
2673
+ """
2433
2674
  def __init__(self, *args, **kwargs):
2434
2675
  ...
2435
2676
  @property
@@ -2744,6 +2985,21 @@ class Task(metaflow.client.core.MetaflowObject, metaclass=type):
2744
2985
  ...
2745
2986
 
2746
2987
  class DataArtifact(metaflow.client.core.MetaflowObject, metaclass=type):
2988
+ """
2989
+ A single data artifact and associated metadata. Note that this object does
2990
+ not contain other objects as it is the leaf object in the hierarchy.
2991
+
2992
+ Attributes
2993
+ ----------
2994
+ data : object
2995
+ The data contained in this artifact, that is, the object produced during
2996
+ execution of this run.
2997
+ sha : string
2998
+ A unique ID of this artifact.
2999
+ finished_at : datetime
3000
+ Corresponds roughly to the `Task.finished_at` time of the parent `Task`.
3001
+ An alias for `DataArtifact.created_at`.
3002
+ """
2747
3003
  @property
2748
3004
  def data(self) -> typing.Any:
2749
3005
  """
@@ -2800,6 +3056,44 @@ class DataArtifact(metaflow.client.core.MetaflowObject, metaclass=type):
2800
3056
  ...
2801
3057
 
2802
3058
  class Runner(object, metaclass=type):
3059
+ """
3060
+ Metaflow's Runner API that presents a programmatic interface
3061
+ to run flows and perform other operations either synchronously or asynchronously.
3062
+ The class expects a path to the flow file along with optional arguments
3063
+ that match top-level options on the command-line.
3064
+
3065
+ This class works as a context manager, calling `cleanup()` to remove
3066
+ temporary files at exit.
3067
+
3068
+ Example:
3069
+ ```python
3070
+ with Runner('slowflow.py', pylint=False) as runner:
3071
+ result = runner.run(alpha=5, tags=["abc", "def"], max_workers=5)
3072
+ print(result.run.finished)
3073
+ ```
3074
+
3075
+ Parameters
3076
+ ----------
3077
+ flow_file : str
3078
+ Path to the flow file to run
3079
+ show_output : bool, default True
3080
+ Show the 'stdout' and 'stderr' to the console by default,
3081
+ Only applicable for synchronous 'run' and 'resume' functions.
3082
+ profile : Optional[str], default None
3083
+ Metaflow profile to use to run this run. If not specified, the default
3084
+ profile is used (or the one already set using `METAFLOW_PROFILE`)
3085
+ env : Optional[Dict], default None
3086
+ Additional environment variables to set for the Run. This overrides the
3087
+ environment set for this process.
3088
+ cwd : Optional[str], default None
3089
+ The directory to run the subprocess in; if not specified, the current
3090
+ directory is used.
3091
+ file_read_timeout : int, default 3600
3092
+ The timeout until which we try to read the runner attribute file.
3093
+ **kwargs : Any
3094
+ Additional arguments that you would pass to `python myflow.py` before
3095
+ the `run` command.
3096
+ """
2803
3097
  def __init__(self, flow_file: str, show_output: bool = True, profile: typing.Optional[str] = None, env: typing.Optional[typing.Dict] = None, cwd: typing.Optional[str] = None, file_read_timeout: int = 3600, **kwargs):
2804
3098
  ...
2805
3099
  def __enter__(self) -> metaflow.runner.metaflow_runner.Runner:
@@ -2892,6 +3186,42 @@ class Runner(object, metaclass=type):
2892
3186
  ...
2893
3187
 
2894
3188
  class NBRunner(object, metaclass=type):
3189
+ """
3190
+ A wrapper over `Runner` for executing flows defined in a Jupyter
3191
+ notebook cell.
3192
+
3193
+ Instantiate this class on the last line of a notebook cell where
3194
+ a `flow` is defined. In contrast to `Runner`, this class is not
3195
+ meant to be used in a context manager. Instead, use a blocking helper
3196
+ function like `nbrun` (which calls `cleanup()` internally) or call
3197
+ `cleanup()` explictly when using non-blocking APIs.
3198
+
3199
+ ```python
3200
+ run = NBRunner(FlowName).nbrun()
3201
+ ```
3202
+
3203
+ Parameters
3204
+ ----------
3205
+ flow : FlowSpec
3206
+ Flow defined in the same cell
3207
+ show_output : bool, default True
3208
+ Show the 'stdout' and 'stderr' to the console by default,
3209
+ Only applicable for synchronous 'run' and 'resume' functions.
3210
+ profile : Optional[str], default None
3211
+ Metaflow profile to use to run this run. If not specified, the default
3212
+ profile is used (or the one already set using `METAFLOW_PROFILE`)
3213
+ env : Optional[Dict], default None
3214
+ Additional environment variables to set for the Run. This overrides the
3215
+ environment set for this process.
3216
+ base_dir : Optional[str], default None
3217
+ The directory to run the subprocess in; if not specified, a temporary
3218
+ directory is used.
3219
+ file_read_timeout : int, default 3600
3220
+ The timeout until which we try to read the runner attribute file.
3221
+ **kwargs : Any
3222
+ Additional arguments that you would pass to `python myflow.py` before
3223
+ the `run` command.
3224
+ """
2895
3225
  def __init__(self, flow, show_output: bool = True, profile: typing.Optional[str] = None, env: typing.Optional[typing.Dict] = None, base_dir: str = "/tmp", file_read_timeout: int = 3600, **kwargs):
2896
3226
  ...
2897
3227
  def nbrun(self, **kwargs):
@@ -2998,6 +3328,30 @@ class NBRunner(object, metaclass=type):
2998
3328
  ...
2999
3329
 
3000
3330
  class Deployer(object, metaclass=type):
3331
+ """
3332
+ Use the `Deployer` class to configure and access one of the production
3333
+ orchestrators supported by Metaflow.
3334
+
3335
+ Parameters
3336
+ ----------
3337
+ flow_file : str
3338
+ Path to the flow file to deploy.
3339
+ show_output : bool, default True
3340
+ Show the 'stdout' and 'stderr' to the console by default.
3341
+ profile : Optional[str], default None
3342
+ Metaflow profile to use for the deployment. If not specified, the default
3343
+ profile is used.
3344
+ env : Optional[Dict[str, str]], default None
3345
+ Additional environment variables to set for the deployment.
3346
+ cwd : Optional[str], default None
3347
+ The directory to run the subprocess in; if not specified, the current
3348
+ directory is used.
3349
+ file_read_timeout : int, default 3600
3350
+ The timeout until which we try to read the deployer attribute file.
3351
+ **kwargs : Any
3352
+ Additional arguments that you would pass to `python myflow.py` before
3353
+ the deployment command.
3354
+ """
3001
3355
  def __init__(self, flow_file: str, show_output: bool = True, profile: typing.Optional[str] = None, env: typing.Optional[typing.Dict] = None, cwd: typing.Optional[str] = None, file_read_timeout: int = 3600, **kwargs):
3002
3356
  ...
3003
3357
  def _Deployer__make_function(self, deployer_class):
@@ -3018,6 +3372,43 @@ class Deployer(object, metaclass=type):
3018
3372
  ...
3019
3373
 
3020
3374
  class NBDeployer(object, metaclass=type):
3375
+ """
3376
+ A wrapper over `Deployer` for deploying flows defined in a Jupyter
3377
+ notebook cell.
3378
+
3379
+ Instantiate this class on the last line of a notebook cell where
3380
+ a `flow` is defined. In contrast to `Deployer`, this class is not
3381
+ meant to be used in a context manager.
3382
+
3383
+ ```python
3384
+ deployer = NBDeployer(FlowName)
3385
+ ar = deployer.argo_workflows(name="madhur")
3386
+ ar_obj = ar.create()
3387
+ result = ar_obj.trigger(alpha=300)
3388
+ print(result.status)
3389
+ print(result.run)
3390
+ result.terminate()
3391
+ ```
3392
+
3393
+ Parameters
3394
+ ----------
3395
+ flow : FlowSpec
3396
+ Flow defined in the same cell
3397
+ show_output : bool, default True
3398
+ Show the 'stdout' and 'stderr' to the console by default,
3399
+ profile : Optional[str], default None
3400
+ Metaflow profile to use to deploy this run. If not specified, the default
3401
+ profile is used (or the one already set using `METAFLOW_PROFILE`)
3402
+ env : Optional[Dict[str, str]], default None
3403
+ Additional environment variables to set. This overrides the
3404
+ environment set for this process.
3405
+ base_dir : Optional[str], default None
3406
+ The directory to run the subprocess in; if not specified, a temporary
3407
+ directory is used.
3408
+ **kwargs : Any
3409
+ Additional arguments that you would pass to `python myflow.py` i.e. options
3410
+ listed in `python myflow.py --help`
3411
+ """
3021
3412
  def __init__(self, flow, show_output: bool = True, profile: typing.Optional[str] = None, env: typing.Optional[typing.Dict] = None, base_dir: str = "/tmp", file_read_timeout: int = 3600, **kwargs):
3022
3413
  ...
3023
3414
  def cleanup(self):