ob-metaflow-stubs 5.1__py2.py3-none-any.whl → 5.3__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (138) hide show
  1. metaflow-stubs/__init__.pyi +393 -373
  2. metaflow-stubs/cards.pyi +5 -5
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/client/__init__.pyi +4 -4
  5. metaflow-stubs/client/core.pyi +8 -8
  6. metaflow-stubs/client/filecache.pyi +2 -2
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +2 -2
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +5 -5
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +4 -4
  13. metaflow-stubs/metadata/metadata.pyi +3 -3
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +22 -6
  16. metaflow-stubs/metaflow_current.pyi +5 -5
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +3 -3
  20. metaflow-stubs/plugins/__init__.pyi +3 -3
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  23. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  24. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
  27. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
  28. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  29. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  30. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  31. metaflow-stubs/plugins/argo/argo_workflows.pyi +5 -7
  32. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +8 -6
  33. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +7 -7
  34. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +20 -5
  35. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  37. metaflow-stubs/plugins/aws/aws_utils.pyi +5 -2
  38. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  39. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  40. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +145 -0
  42. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  43. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  44. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  45. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  46. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  47. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +20 -5
  51. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  52. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  53. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  54. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  55. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  56. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  57. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  58. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  59. metaflow-stubs/plugins/cards/card_cli.pyi +5 -5
  60. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  61. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  62. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  63. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  64. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  65. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  66. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  68. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +3 -3
  71. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  72. metaflow-stubs/plugins/cards/component_serializer.pyi +3 -3
  73. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  74. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  75. metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
  76. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  77. metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
  78. metaflow-stubs/plugins/datatools/s3/s3.pyi +6 -6
  79. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  80. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  81. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  82. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  83. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  84. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  85. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  86. metaflow-stubs/plugins/frameworks/pytorch.pyi +3 -3
  87. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  88. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  89. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  90. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  91. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  92. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  93. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  94. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +6 -2
  95. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +2 -2
  96. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  97. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  98. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  99. metaflow-stubs/plugins/logs_cli.pyi +3 -3
  100. metaflow-stubs/plugins/package_cli.pyi +2 -2
  101. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  102. metaflow-stubs/plugins/perimeters.pyi +2 -2
  103. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  104. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  105. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  106. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  107. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +6 -2
  108. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  109. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  110. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  111. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  112. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  113. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  114. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  115. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  116. metaflow-stubs/plugins/tag_cli.pyi +6 -6
  117. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  118. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  119. metaflow-stubs/procpoll.pyi +2 -2
  120. metaflow-stubs/profilers/__init__.pyi +2 -2
  121. metaflow-stubs/pylint_wrapper.pyi +2 -2
  122. metaflow-stubs/runner/__init__.pyi +2 -2
  123. metaflow-stubs/runner/deployer.pyi +21 -6
  124. metaflow-stubs/runner/metaflow_runner.pyi +5 -5
  125. metaflow-stubs/runner/nbdeploy.pyi +4 -4
  126. metaflow-stubs/runner/nbrun.pyi +4 -4
  127. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  128. metaflow-stubs/runner/utils.pyi +2 -2
  129. metaflow-stubs/system/__init__.pyi +3 -3
  130. metaflow-stubs/system/system_logger.pyi +3 -3
  131. metaflow-stubs/system/system_monitor.pyi +2 -2
  132. metaflow-stubs/tagging_util.pyi +2 -2
  133. metaflow-stubs/tuple_util.pyi +2 -2
  134. {ob_metaflow_stubs-5.1.dist-info → ob_metaflow_stubs-5.3.dist-info}/METADATA +1 -1
  135. ob_metaflow_stubs-5.3.dist-info/RECORD +138 -0
  136. ob_metaflow_stubs-5.1.dist-info/RECORD +0 -137
  137. {ob_metaflow_stubs-5.1.dist-info → ob_metaflow_stubs-5.3.dist-info}/WHEEL +0 -0
  138. {ob_metaflow_stubs-5.1.dist-info → ob_metaflow_stubs-5.3.dist-info}/top_level.txt +0 -0
@@ -1,25 +1,25 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.11.0+ob(v1) #
4
- # Generated on 2024-08-08T23:50:17.875353 #
3
+ # MF version: 2.12.15.1+ob(v1) #
4
+ # Generated on 2024-08-22T21:03:01.630360 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
+ import metaflow.flowspec
11
12
  import metaflow.parameters
12
- import typing
13
13
  import metaflow.plugins.datatools.s3.s3
14
- import metaflow.client.core
15
- import metaflow.metaflow_current
16
- import metaflow.flowspec
17
- import metaflow._vendor.click.types
18
14
  import io
19
15
  import datetime
20
- import metaflow.datastore.inputs
21
16
  import metaflow.events
17
+ import metaflow.datastore.inputs
18
+ import metaflow.metaflow_current
19
+ import metaflow.client.core
22
20
  import metaflow.runner.metaflow_runner
21
+ import metaflow._vendor.click.types
22
+ import typing
23
23
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
24
24
  StepFlag = typing.NewType("StepFlag", bool)
25
25
 
@@ -776,6 +776,63 @@ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
776
776
  """
777
777
  ...
778
778
 
779
+ @typing.overload
780
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
781
+ """
782
+ Specifies a timeout for your step.
783
+
784
+ This decorator is useful if this step may hang indefinitely.
785
+
786
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
787
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
788
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
789
+
790
+ Note that all the values specified in parameters are added together so if you specify
791
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
792
+
793
+ Parameters
794
+ ----------
795
+ seconds : int, default 0
796
+ Number of seconds to wait prior to timing out.
797
+ minutes : int, default 0
798
+ Number of minutes to wait prior to timing out.
799
+ hours : int, default 0
800
+ Number of hours to wait prior to timing out.
801
+ """
802
+ ...
803
+
804
+ @typing.overload
805
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
806
+ ...
807
+
808
+ @typing.overload
809
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
810
+ ...
811
+
812
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
813
+ """
814
+ Specifies a timeout for your step.
815
+
816
+ This decorator is useful if this step may hang indefinitely.
817
+
818
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
819
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
820
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
821
+
822
+ Note that all the values specified in parameters are added together so if you specify
823
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
824
+
825
+ Parameters
826
+ ----------
827
+ seconds : int, default 0
828
+ Number of seconds to wait prior to timing out.
829
+ minutes : int, default 0
830
+ Number of minutes to wait prior to timing out.
831
+ hours : int, default 0
832
+ Number of hours to wait prior to timing out.
833
+ """
834
+ ...
835
+
779
836
  @typing.overload
780
837
  def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
781
838
  """
@@ -853,60 +910,174 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
853
910
  """
854
911
  ...
855
912
 
913
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
914
+ """
915
+ Specifies that this step should execute on Kubernetes.
916
+
917
+ Parameters
918
+ ----------
919
+ cpu : int, default 1
920
+ Number of CPUs required for this step. If `@resources` is
921
+ also present, the maximum value from all decorators is used.
922
+ memory : int, default 4096
923
+ Memory size (in MB) required for this step. If
924
+ `@resources` is also present, the maximum value from all decorators is
925
+ used.
926
+ disk : int, default 10240
927
+ Disk size (in MB) required for this step. If
928
+ `@resources` is also present, the maximum value from all decorators is
929
+ used.
930
+ image : str, optional, default None
931
+ Docker image to use when launching on Kubernetes. If not specified, and
932
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
933
+ not, a default Docker image mapping to the current version of Python is used.
934
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
935
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
936
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
937
+ Kubernetes service account to use when launching pod in Kubernetes.
938
+ secrets : List[str], optional, default None
939
+ Kubernetes secrets to use when launching pod in Kubernetes. These
940
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
941
+ in Metaflow configuration.
942
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
943
+ Kubernetes namespace to use when launching pod in Kubernetes.
944
+ gpu : int, optional, default None
945
+ Number of GPUs required for this step. A value of zero implies that
946
+ the scheduled node should not have GPUs.
947
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
948
+ The vendor of the GPUs to be used for this step.
949
+ tolerations : List[str], default []
950
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
951
+ Kubernetes tolerations to use when launching pod in Kubernetes.
952
+ use_tmpfs : bool, default False
953
+ This enables an explicit tmpfs mount for this step.
954
+ tmpfs_tempdir : bool, default True
955
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
956
+ tmpfs_size : int, optional, default: None
957
+ The value for the size (in MiB) of the tmpfs mount for this step.
958
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
959
+ memory allocated for this step.
960
+ tmpfs_path : str, optional, default /metaflow_temp
961
+ Path to tmpfs mount for this step.
962
+ persistent_volume_claims : Dict[str, str], optional, default None
963
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
964
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
965
+ shared_memory: int, optional
966
+ Shared memory size (in MiB) required for this step
967
+ port: int, optional
968
+ Port number to specify in the Kubernetes job object
969
+ compute_pool : str, optional, default None
970
+ Compute pool to be used for for this step.
971
+ If not specified, any accessible compute pool within the perimeter is used.
972
+ """
973
+ ...
974
+
856
975
  @typing.overload
857
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
976
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
858
977
  """
859
- Specifies a timeout for your step.
978
+ Decorator prototype for all step decorators. This function gets specialized
979
+ and imported for all decorators types by _import_plugin_decorators().
980
+ """
981
+ ...
982
+
983
+ @typing.overload
984
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
985
+ ...
986
+
987
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
988
+ """
989
+ Decorator prototype for all step decorators. This function gets specialized
990
+ and imported for all decorators types by _import_plugin_decorators().
991
+ """
992
+ ...
993
+
994
+ @typing.overload
995
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
996
+ """
997
+ Specifies secrets to be retrieved and injected as environment variables prior to
998
+ the execution of a step.
860
999
 
861
- This decorator is useful if this step may hang indefinitely.
1000
+ Parameters
1001
+ ----------
1002
+ sources : List[Union[str, Dict[str, Any]]], default: []
1003
+ List of secret specs, defining how the secrets are to be retrieved
1004
+ """
1005
+ ...
1006
+
1007
+ @typing.overload
1008
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1009
+ ...
1010
+
1011
+ @typing.overload
1012
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1013
+ ...
1014
+
1015
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1016
+ """
1017
+ Specifies secrets to be retrieved and injected as environment variables prior to
1018
+ the execution of a step.
862
1019
 
863
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
864
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
865
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1020
+ Parameters
1021
+ ----------
1022
+ sources : List[Union[str, Dict[str, Any]]], default: []
1023
+ List of secret specs, defining how the secrets are to be retrieved
1024
+ """
1025
+ ...
1026
+
1027
+ @typing.overload
1028
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1029
+ """
1030
+ Specifies the Conda environment for the step.
866
1031
 
867
- Note that all the values specified in parameters are added together so if you specify
868
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1032
+ Information in this decorator will augment any
1033
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1034
+ you can use `@conda_base` to set packages required by all
1035
+ steps and use `@conda` to specify step-specific overrides.
869
1036
 
870
1037
  Parameters
871
1038
  ----------
872
- seconds : int, default 0
873
- Number of seconds to wait prior to timing out.
874
- minutes : int, default 0
875
- Number of minutes to wait prior to timing out.
876
- hours : int, default 0
877
- Number of hours to wait prior to timing out.
1039
+ packages : Dict[str, str], default {}
1040
+ Packages to use for this step. The key is the name of the package
1041
+ and the value is the version to use.
1042
+ libraries : Dict[str, str], default {}
1043
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1044
+ python : str, optional, default None
1045
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1046
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1047
+ disabled : bool, default False
1048
+ If set to True, disables @conda.
878
1049
  """
879
1050
  ...
880
1051
 
881
1052
  @typing.overload
882
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1053
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
883
1054
  ...
884
1055
 
885
1056
  @typing.overload
886
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1057
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
887
1058
  ...
888
1059
 
889
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1060
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
890
1061
  """
891
- Specifies a timeout for your step.
892
-
893
- This decorator is useful if this step may hang indefinitely.
894
-
895
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
896
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
897
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1062
+ Specifies the Conda environment for the step.
898
1063
 
899
- Note that all the values specified in parameters are added together so if you specify
900
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1064
+ Information in this decorator will augment any
1065
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1066
+ you can use `@conda_base` to set packages required by all
1067
+ steps and use `@conda` to specify step-specific overrides.
901
1068
 
902
1069
  Parameters
903
1070
  ----------
904
- seconds : int, default 0
905
- Number of seconds to wait prior to timing out.
906
- minutes : int, default 0
907
- Number of minutes to wait prior to timing out.
908
- hours : int, default 0
909
- Number of hours to wait prior to timing out.
1071
+ packages : Dict[str, str], default {}
1072
+ Packages to use for this step. The key is the name of the package
1073
+ and the value is the version to use.
1074
+ libraries : Dict[str, str], default {}
1075
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1076
+ python : str, optional, default None
1077
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1078
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1079
+ disabled : bool, default False
1080
+ If set to True, disables @conda.
910
1081
  """
911
1082
  ...
912
1083
 
@@ -991,88 +1162,27 @@ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
991
1162
  ...
992
1163
 
993
1164
  @typing.overload
994
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1165
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
995
1166
  """
996
- Decorator prototype for all step decorators. This function gets specialized
997
- and imported for all decorators types by _import_plugin_decorators().
1167
+ Internal decorator to support Fast bakery
998
1168
  """
999
1169
  ...
1000
1170
 
1001
1171
  @typing.overload
1002
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1172
+ def fast_bakery_internal(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1003
1173
  ...
1004
1174
 
1005
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1175
+ def fast_bakery_internal(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1006
1176
  """
1007
- Decorator prototype for all step decorators. This function gets specialized
1008
- and imported for all decorators types by _import_plugin_decorators().
1177
+ Internal decorator to support Fast bakery
1009
1178
  """
1010
1179
  ...
1011
1180
 
1012
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1181
+ @typing.overload
1182
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1013
1183
  """
1014
- Specifies that this step should execute on Kubernetes.
1015
-
1016
- Parameters
1017
- ----------
1018
- cpu : int, default 1
1019
- Number of CPUs required for this step. If `@resources` is
1020
- also present, the maximum value from all decorators is used.
1021
- memory : int, default 4096
1022
- Memory size (in MB) required for this step. If
1023
- `@resources` is also present, the maximum value from all decorators is
1024
- used.
1025
- disk : int, default 10240
1026
- Disk size (in MB) required for this step. If
1027
- `@resources` is also present, the maximum value from all decorators is
1028
- used.
1029
- image : str, optional, default None
1030
- Docker image to use when launching on Kubernetes. If not specified, and
1031
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1032
- not, a default Docker image mapping to the current version of Python is used.
1033
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1034
- If given, the imagePullPolicy to be applied to the Docker image of the step.
1035
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1036
- Kubernetes service account to use when launching pod in Kubernetes.
1037
- secrets : List[str], optional, default None
1038
- Kubernetes secrets to use when launching pod in Kubernetes. These
1039
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1040
- in Metaflow configuration.
1041
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1042
- Kubernetes namespace to use when launching pod in Kubernetes.
1043
- gpu : int, optional, default None
1044
- Number of GPUs required for this step. A value of zero implies that
1045
- the scheduled node should not have GPUs.
1046
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1047
- The vendor of the GPUs to be used for this step.
1048
- tolerations : List[str], default []
1049
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1050
- Kubernetes tolerations to use when launching pod in Kubernetes.
1051
- use_tmpfs : bool, default False
1052
- This enables an explicit tmpfs mount for this step.
1053
- tmpfs_tempdir : bool, default True
1054
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1055
- tmpfs_size : int, optional, default: None
1056
- The value for the size (in MiB) of the tmpfs mount for this step.
1057
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1058
- memory allocated for this step.
1059
- tmpfs_path : str, optional, default /metaflow_temp
1060
- Path to tmpfs mount for this step.
1061
- persistent_volume_claims : Dict[str, str], optional, default None
1062
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1063
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1064
- shared_memory: int, optional
1065
- Shared memory size (in MiB) required for this step
1066
- port: int, optional
1067
- Port number to specify in the Kubernetes job object
1068
- """
1069
- ...
1070
-
1071
- @typing.overload
1072
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1073
- """
1074
- Specifies the number of times the task corresponding
1075
- to a step needs to be retried.
1184
+ Specifies the number of times the task corresponding
1185
+ to a step needs to be retried.
1076
1186
 
1077
1187
  This decorator is useful for handling transient errors, such as networking issues.
1078
1188
  If your task contains operations that can't be retried safely, e.g. database updates,
@@ -1121,63 +1231,6 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1121
1231
  """
1122
1232
  ...
1123
1233
 
1124
- @typing.overload
1125
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1126
- """
1127
- Specifies the Conda environment for the step.
1128
-
1129
- Information in this decorator will augment any
1130
- attributes set in the `@conda_base` flow-level decorator. Hence,
1131
- you can use `@conda_base` to set packages required by all
1132
- steps and use `@conda` to specify step-specific overrides.
1133
-
1134
- Parameters
1135
- ----------
1136
- packages : Dict[str, str], default {}
1137
- Packages to use for this step. The key is the name of the package
1138
- and the value is the version to use.
1139
- libraries : Dict[str, str], default {}
1140
- Supported for backward compatibility. When used with packages, packages will take precedence.
1141
- python : str, optional, default None
1142
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1143
- that the version used will correspond to the version of the Python interpreter used to start the run.
1144
- disabled : bool, default False
1145
- If set to True, disables @conda.
1146
- """
1147
- ...
1148
-
1149
- @typing.overload
1150
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1151
- ...
1152
-
1153
- @typing.overload
1154
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1155
- ...
1156
-
1157
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1158
- """
1159
- Specifies the Conda environment for the step.
1160
-
1161
- Information in this decorator will augment any
1162
- attributes set in the `@conda_base` flow-level decorator. Hence,
1163
- you can use `@conda_base` to set packages required by all
1164
- steps and use `@conda` to specify step-specific overrides.
1165
-
1166
- Parameters
1167
- ----------
1168
- packages : Dict[str, str], default {}
1169
- Packages to use for this step. The key is the name of the package
1170
- and the value is the version to use.
1171
- libraries : Dict[str, str], default {}
1172
- Supported for backward compatibility. When used with packages, packages will take precedence.
1173
- python : str, optional, default None
1174
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1175
- that the version used will correspond to the version of the Python interpreter used to start the run.
1176
- disabled : bool, default False
1177
- If set to True, disables @conda.
1178
- """
1179
- ...
1180
-
1181
1234
  @typing.overload
1182
1235
  def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1183
1236
  """
@@ -1229,151 +1282,6 @@ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typ
1229
1282
  """
1230
1283
  ...
1231
1284
 
1232
- @typing.overload
1233
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1234
- """
1235
- Specifies secrets to be retrieved and injected as environment variables prior to
1236
- the execution of a step.
1237
-
1238
- Parameters
1239
- ----------
1240
- sources : List[Union[str, Dict[str, Any]]], default: []
1241
- List of secret specs, defining how the secrets are to be retrieved
1242
- """
1243
- ...
1244
-
1245
- @typing.overload
1246
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1247
- ...
1248
-
1249
- @typing.overload
1250
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1251
- ...
1252
-
1253
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1254
- """
1255
- Specifies secrets to be retrieved and injected as environment variables prior to
1256
- the execution of a step.
1257
-
1258
- Parameters
1259
- ----------
1260
- sources : List[Union[str, Dict[str, Any]]], default: []
1261
- List of secret specs, defining how the secrets are to be retrieved
1262
- """
1263
- ...
1264
-
1265
- @typing.overload
1266
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1267
- """
1268
- Specifies the PyPI packages for all steps of the flow.
1269
-
1270
- Use `@pypi_base` to set common packages required by all
1271
- steps and use `@pypi` to specify step-specific overrides.
1272
- Parameters
1273
- ----------
1274
- packages : Dict[str, str], default: {}
1275
- Packages to use for this flow. The key is the name of the package
1276
- and the value is the version to use.
1277
- python : str, optional, default: None
1278
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1279
- that the version used will correspond to the version of the Python interpreter used to start the run.
1280
- """
1281
- ...
1282
-
1283
- @typing.overload
1284
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1285
- ...
1286
-
1287
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1288
- """
1289
- Specifies the PyPI packages for all steps of the flow.
1290
-
1291
- Use `@pypi_base` to set common packages required by all
1292
- steps and use `@pypi` to specify step-specific overrides.
1293
- Parameters
1294
- ----------
1295
- packages : Dict[str, str], default: {}
1296
- Packages to use for this flow. The key is the name of the package
1297
- and the value is the version to use.
1298
- python : str, optional, default: None
1299
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1300
- that the version used will correspond to the version of the Python interpreter used to start the run.
1301
- """
1302
- ...
1303
-
1304
- def nim(*, models: "list[NIM]", backend: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1305
- """
1306
- This decorator is used to run NIM containers in Metaflow tasks as sidecars.
1307
-
1308
- User code call
1309
- -----------
1310
- @nim(
1311
- models=['meta/llama3-8b-instruct', 'meta/llama3-70b-instruct'],
1312
- backend='managed'
1313
- )
1314
-
1315
- Valid backend options
1316
- ---------------------
1317
- - 'managed': Outerbounds selects a compute provider based on the model.
1318
- - 🚧 'dataplane': Run in your account.
1319
-
1320
- Valid model options
1321
- ----------------
1322
- - 'meta/llama3-8b-instruct': 8B parameter model
1323
- - 'meta/llama3-70b-instruct': 70B parameter model
1324
- - Upon request, any model here: https://nvcf.ngc.nvidia.com/functions?filter=nvidia-functions
1325
-
1326
- Parameters
1327
- ----------
1328
- models: list[NIM]
1329
- List of NIM containers running models in sidecars.
1330
- backend: str
1331
- Compute provider to run the NIM container.
1332
- """
1333
- ...
1334
-
1335
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1336
- """
1337
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1338
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1339
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1340
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1341
- starts only after all sensors finish.
1342
-
1343
- Parameters
1344
- ----------
1345
- timeout : int
1346
- Time, in seconds before the task times out and fails. (Default: 3600)
1347
- poke_interval : int
1348
- Time in seconds that the job should wait in between each try. (Default: 60)
1349
- mode : str
1350
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1351
- exponential_backoff : bool
1352
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1353
- pool : str
1354
- the slot pool this task should run in,
1355
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1356
- soft_fail : bool
1357
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1358
- name : str
1359
- Name of the sensor on Airflow
1360
- description : str
1361
- Description of sensor in the Airflow UI
1362
- bucket_key : Union[str, List[str]]
1363
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1364
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1365
- bucket_name : str
1366
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1367
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1368
- wildcard_match : bool
1369
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1370
- aws_conn_id : str
1371
- a reference to the s3 connection on Airflow. (Default: None)
1372
- verify : bool
1373
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1374
- """
1375
- ...
1376
-
1377
1285
  @typing.overload
1378
1286
  def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1379
1287
  """
@@ -1477,10 +1385,44 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1477
1385
  """
1478
1386
  ...
1479
1387
 
1480
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1388
+ def nim(*, models: "list[NIM]", backend: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1481
1389
  """
1482
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1483
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1390
+ This decorator is used to run NIM containers in Metaflow tasks as sidecars.
1391
+
1392
+ User code call
1393
+ -----------
1394
+ @nim(
1395
+ models=['meta/llama3-8b-instruct', 'meta/llama3-70b-instruct'],
1396
+ backend='managed'
1397
+ )
1398
+
1399
+ Valid backend options
1400
+ ---------------------
1401
+ - 'managed': Outerbounds selects a compute provider based on the model.
1402
+ - 🚧 'dataplane': Run in your account.
1403
+
1404
+ Valid model options
1405
+ ----------------
1406
+ - 'meta/llama3-8b-instruct': 8B parameter model
1407
+ - 'meta/llama3-70b-instruct': 70B parameter model
1408
+ - Upon request, any model here: https://nvcf.ngc.nvidia.com/functions?filter=nvidia-functions
1409
+
1410
+ Parameters
1411
+ ----------
1412
+ models: list[NIM]
1413
+ List of NIM containers running models in sidecars.
1414
+ backend: str
1415
+ Compute provider to run the NIM container.
1416
+ """
1417
+ ...
1418
+
1419
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1420
+ """
1421
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1422
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1423
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1424
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1425
+ starts only after all sensors finish.
1484
1426
 
1485
1427
  Parameters
1486
1428
  ----------
@@ -1501,21 +1443,67 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1501
1443
  Name of the sensor on Airflow
1502
1444
  description : str
1503
1445
  Description of sensor in the Airflow UI
1504
- external_dag_id : str
1505
- The dag_id that contains the task you want to wait for.
1506
- external_task_ids : List[str]
1507
- The list of task_ids that you want to wait for.
1508
- If None (default value) the sensor waits for the DAG. (Default: None)
1509
- allowed_states : List[str]
1510
- Iterable of allowed states, (Default: ['success'])
1511
- failed_states : List[str]
1512
- Iterable of failed or dis-allowed states. (Default: None)
1513
- execution_delta : datetime.timedelta
1514
- time difference with the previous execution to look at,
1515
- the default is the same logical date as the current task or DAG. (Default: None)
1516
- check_existence: bool
1517
- Set to True to check if the external task exists or check if
1518
- the DAG to wait for exists. (Default: True)
1446
+ bucket_key : Union[str, List[str]]
1447
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1448
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1449
+ bucket_name : str
1450
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1451
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1452
+ wildcard_match : bool
1453
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1454
+ aws_conn_id : str
1455
+ a reference to the s3 connection on Airflow. (Default: None)
1456
+ verify : bool
1457
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1458
+ """
1459
+ ...
1460
+
1461
+ @typing.overload
1462
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1463
+ """
1464
+ Specifies the Conda environment for all steps of the flow.
1465
+
1466
+ Use `@conda_base` to set common libraries required by all
1467
+ steps and use `@conda` to specify step-specific additions.
1468
+
1469
+ Parameters
1470
+ ----------
1471
+ packages : Dict[str, str], default {}
1472
+ Packages to use for this flow. The key is the name of the package
1473
+ and the value is the version to use.
1474
+ libraries : Dict[str, str], default {}
1475
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1476
+ python : str, optional, default None
1477
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1478
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1479
+ disabled : bool, default False
1480
+ If set to True, disables Conda.
1481
+ """
1482
+ ...
1483
+
1484
+ @typing.overload
1485
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1486
+ ...
1487
+
1488
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1489
+ """
1490
+ Specifies the Conda environment for all steps of the flow.
1491
+
1492
+ Use `@conda_base` to set common libraries required by all
1493
+ steps and use `@conda` to specify step-specific additions.
1494
+
1495
+ Parameters
1496
+ ----------
1497
+ packages : Dict[str, str], default {}
1498
+ Packages to use for this flow. The key is the name of the package
1499
+ and the value is the version to use.
1500
+ libraries : Dict[str, str], default {}
1501
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1502
+ python : str, optional, default None
1503
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1504
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1505
+ disabled : bool, default False
1506
+ If set to True, disables Conda.
1519
1507
  """
1520
1508
  ...
1521
1509
 
@@ -1663,70 +1651,102 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
1663
1651
  """
1664
1652
  ...
1665
1653
 
1666
- @typing.overload
1667
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1654
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1668
1655
  """
1669
- Specifies the Conda environment for all steps of the flow.
1656
+ Specifies what flows belong to the same project.
1670
1657
 
1671
- Use `@conda_base` to set common libraries required by all
1672
- steps and use `@conda` to specify step-specific additions.
1658
+ A project-specific namespace is created for all flows that
1659
+ use the same `@project(name)`.
1673
1660
 
1674
1661
  Parameters
1675
1662
  ----------
1676
- packages : Dict[str, str], default {}
1677
- Packages to use for this flow. The key is the name of the package
1678
- and the value is the version to use.
1679
- libraries : Dict[str, str], default {}
1680
- Supported for backward compatibility. When used with packages, packages will take precedence.
1681
- python : str, optional, default None
1682
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1683
- that the version used will correspond to the version of the Python interpreter used to start the run.
1684
- disabled : bool, default False
1685
- If set to True, disables Conda.
1663
+ name : str
1664
+ Project name. Make sure that the name is unique amongst all
1665
+ projects that use the same production scheduler. The name may
1666
+ contain only lowercase alphanumeric characters and underscores.
1667
+
1668
+
1686
1669
  """
1687
1670
  ...
1688
1671
 
1689
- @typing.overload
1690
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1672
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1673
+ """
1674
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1675
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1676
+
1677
+ Parameters
1678
+ ----------
1679
+ timeout : int
1680
+ Time, in seconds before the task times out and fails. (Default: 3600)
1681
+ poke_interval : int
1682
+ Time in seconds that the job should wait in between each try. (Default: 60)
1683
+ mode : str
1684
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1685
+ exponential_backoff : bool
1686
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1687
+ pool : str
1688
+ the slot pool this task should run in,
1689
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1690
+ soft_fail : bool
1691
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1692
+ name : str
1693
+ Name of the sensor on Airflow
1694
+ description : str
1695
+ Description of sensor in the Airflow UI
1696
+ external_dag_id : str
1697
+ The dag_id that contains the task you want to wait for.
1698
+ external_task_ids : List[str]
1699
+ The list of task_ids that you want to wait for.
1700
+ If None (default value) the sensor waits for the DAG. (Default: None)
1701
+ allowed_states : List[str]
1702
+ Iterable of allowed states, (Default: ['success'])
1703
+ failed_states : List[str]
1704
+ Iterable of failed or dis-allowed states. (Default: None)
1705
+ execution_delta : datetime.timedelta
1706
+ time difference with the previous execution to look at,
1707
+ the default is the same logical date as the current task or DAG. (Default: None)
1708
+ check_existence: bool
1709
+ Set to True to check if the external task exists or check if
1710
+ the DAG to wait for exists. (Default: True)
1711
+ """
1691
1712
  ...
1692
1713
 
1693
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1714
+ @typing.overload
1715
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1694
1716
  """
1695
- Specifies the Conda environment for all steps of the flow.
1696
-
1697
- Use `@conda_base` to set common libraries required by all
1698
- steps and use `@conda` to specify step-specific additions.
1717
+ Specifies the PyPI packages for all steps of the flow.
1699
1718
 
1719
+ Use `@pypi_base` to set common packages required by all
1720
+ steps and use `@pypi` to specify step-specific overrides.
1700
1721
  Parameters
1701
1722
  ----------
1702
- packages : Dict[str, str], default {}
1723
+ packages : Dict[str, str], default: {}
1703
1724
  Packages to use for this flow. The key is the name of the package
1704
1725
  and the value is the version to use.
1705
- libraries : Dict[str, str], default {}
1706
- Supported for backward compatibility. When used with packages, packages will take precedence.
1707
- python : str, optional, default None
1726
+ python : str, optional, default: None
1708
1727
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1709
1728
  that the version used will correspond to the version of the Python interpreter used to start the run.
1710
- disabled : bool, default False
1711
- If set to True, disables Conda.
1712
1729
  """
1713
1730
  ...
1714
1731
 
1715
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1732
+ @typing.overload
1733
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1734
+ ...
1735
+
1736
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1716
1737
  """
1717
- Specifies what flows belong to the same project.
1718
-
1719
- A project-specific namespace is created for all flows that
1720
- use the same `@project(name)`.
1738
+ Specifies the PyPI packages for all steps of the flow.
1721
1739
 
1740
+ Use `@pypi_base` to set common packages required by all
1741
+ steps and use `@pypi` to specify step-specific overrides.
1722
1742
  Parameters
1723
1743
  ----------
1724
- name : str
1725
- Project name. Make sure that the name is unique amongst all
1726
- projects that use the same production scheduler. The name may
1727
- contain only lowercase alphanumeric characters and underscores.
1728
-
1729
-
1744
+ packages : Dict[str, str], default: {}
1745
+ Packages to use for this flow. The key is the name of the package
1746
+ and the value is the version to use.
1747
+ python : str, optional, default: None
1748
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1749
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1730
1750
  """
1731
1751
  ...
1732
1752
 
@@ -2681,7 +2701,7 @@ class DataArtifact(metaflow.client.core.MetaflowObject, metaclass=type):
2681
2701
  ...
2682
2702
 
2683
2703
  class Runner(object, metaclass=type):
2684
- def __init__(self, flow_file: str, show_output: bool = True, profile: typing.Optional[str] = None, env: typing.Optional[typing.Dict] = None, cwd: typing.Optional[str] = None, **kwargs):
2704
+ def __init__(self, flow_file: str, show_output: bool = True, profile: typing.Optional[str] = None, env: typing.Optional[typing.Dict] = None, cwd: typing.Optional[str] = None, file_read_timeout: int = 3600, **kwargs):
2685
2705
  ...
2686
2706
  def __enter__(self) -> metaflow.runner.metaflow_runner.Runner:
2687
2707
  ...
@@ -2773,7 +2793,7 @@ class Runner(object, metaclass=type):
2773
2793
  ...
2774
2794
 
2775
2795
  class NBRunner(object, metaclass=type):
2776
- def __init__(self, flow, show_output: bool = True, profile: typing.Optional[str] = None, env: typing.Optional[typing.Dict] = None, base_dir: str = "/tmp", **kwargs):
2796
+ def __init__(self, flow, show_output: bool = True, profile: typing.Optional[str] = None, env: typing.Optional[typing.Dict] = None, base_dir: str = "/tmp", file_read_timeout: int = 3600, **kwargs):
2777
2797
  ...
2778
2798
  def nbrun(self, **kwargs):
2779
2799
  """
@@ -2879,7 +2899,7 @@ class NBRunner(object, metaclass=type):
2879
2899
  ...
2880
2900
 
2881
2901
  class Deployer(object, metaclass=type):
2882
- def __init__(self, flow_file: str, show_output: bool = True, profile: typing.Optional[str] = None, env: typing.Optional[typing.Dict] = None, cwd: typing.Optional[str] = None, **kwargs):
2902
+ def __init__(self, flow_file: str, show_output: bool = True, profile: typing.Optional[str] = None, env: typing.Optional[typing.Dict] = None, cwd: typing.Optional[str] = None, file_read_timeout: int = 3600, **kwargs):
2883
2903
  ...
2884
2904
  def _Deployer__make_function(self, deployer_class):
2885
2905
  """
@@ -2899,7 +2919,7 @@ class Deployer(object, metaclass=type):
2899
2919
  ...
2900
2920
 
2901
2921
  class NBDeployer(object, metaclass=type):
2902
- def __init__(self, flow, show_output: bool = True, profile: typing.Optional[str] = None, env: typing.Optional[typing.Dict] = None, base_dir: str = "/tmp", **kwargs):
2922
+ def __init__(self, flow, show_output: bool = True, profile: typing.Optional[str] = None, env: typing.Optional[typing.Dict] = None, base_dir: str = "/tmp", file_read_timeout: int = 3600, **kwargs):
2903
2923
  ...
2904
2924
  def cleanup(self):
2905
2925
  """