metaflow-stubs 2.11.15__py2.py3-none-any.whl → 2.12.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (142) hide show
  1. metaflow-stubs/__init__.pyi +606 -407
  2. metaflow-stubs/cards.pyi +4 -4
  3. metaflow-stubs/cli.pyi +42 -22
  4. metaflow-stubs/client/__init__.pyi +3 -3
  5. metaflow-stubs/client/core.pyi +7 -7
  6. metaflow-stubs/client/filecache.pyi +3 -3
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +2 -2
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +3 -3
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +4 -4
  13. metaflow-stubs/metadata/metadata.pyi +2 -2
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +14 -2
  16. metaflow-stubs/metaflow_current.pyi +5 -5
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +3 -3
  20. metaflow-stubs/plugins/__init__.pyi +10 -2
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow.pyi +12 -4
  23. metaflow-stubs/plugins/airflow/airflow_cli.pyi +3 -3
  24. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +3 -3
  29. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
  30. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
  31. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/argo/argo_client.pyi +4 -2
  33. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_workflows.pyi +7 -3
  35. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +3 -3
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  37. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  38. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  39. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  40. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  45. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  47. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +3 -3
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  56. metaflow-stubs/plugins/azure/__init__.pyi +5 -2
  57. metaflow-stubs/plugins/azure/azure_credential.pyi +28 -0
  58. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +58 -0
  60. metaflow-stubs/plugins/azure/azure_utils.pyi +5 -5
  61. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +5 -5
  62. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  63. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  64. metaflow-stubs/plugins/cards/card_cli.pyi +4 -4
  65. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  66. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  71. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  78. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +3 -3
  81. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  82. metaflow-stubs/plugins/cards/component_serializer.pyi +5 -5
  83. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  84. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  85. metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
  86. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  87. metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
  88. metaflow-stubs/plugins/datatools/s3/s3.pyi +8 -5
  89. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  91. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  92. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  93. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  94. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  95. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  96. metaflow-stubs/plugins/frameworks/pytorch.pyi +3 -3
  97. metaflow-stubs/plugins/gcp/__init__.pyi +5 -2
  98. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +73 -0
  99. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  100. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +16 -2
  101. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  103. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +15 -3
  105. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +16 -5
  106. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +13 -2
  107. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +21 -2
  108. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +15 -2
  109. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +127 -0
  110. metaflow-stubs/plugins/logs_cli.pyi +3 -3
  111. metaflow-stubs/plugins/package_cli.pyi +2 -2
  112. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  114. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  115. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  116. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  117. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/pypi_environment.pyi +3 -3
  119. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  120. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  121. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  122. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  123. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  124. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  125. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  126. metaflow-stubs/plugins/tag_cli.pyi +4 -4
  127. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  128. metaflow-stubs/plugins/timeout_decorator.pyi +5 -3
  129. metaflow-stubs/procpoll.pyi +2 -2
  130. metaflow-stubs/pylint_wrapper.pyi +2 -2
  131. metaflow-stubs/runner/__init__.pyi +9 -0
  132. metaflow-stubs/runner/metaflow_runner.pyi +696 -0
  133. metaflow-stubs/runner/nbrun.pyi +224 -0
  134. metaflow-stubs/runner/subprocess_manager.pyi +221 -0
  135. metaflow-stubs/tagging_util.pyi +2 -2
  136. metaflow-stubs/tuple_util.pyi +14 -0
  137. metaflow-stubs/version.pyi +2 -2
  138. {metaflow_stubs-2.11.15.dist-info → metaflow_stubs-2.12.0.dist-info}/METADATA +2 -2
  139. metaflow_stubs-2.12.0.dist-info/RECORD +142 -0
  140. metaflow_stubs-2.11.15.dist-info/RECORD +0 -133
  141. {metaflow_stubs-2.11.15.dist-info → metaflow_stubs-2.12.0.dist-info}/WHEEL +0 -0
  142. {metaflow_stubs-2.11.15.dist-info → metaflow_stubs-2.12.0.dist-info}/top_level.txt +0 -0
@@ -1,23 +1,24 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.11.15 #
4
- # Generated on 2024-05-08T22:46:02.315605 #
3
+ # MF version: 2.12.0 #
4
+ # Generated on 2024-05-28T09:55:27.155006 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.events
12
- import datetime
13
11
  import metaflow.parameters
14
- import io
15
- import metaflow.client.core
12
+ import metaflow._vendor.click.types
16
13
  import metaflow.datastore.inputs
14
+ import datetime
15
+ import metaflow.client.core
17
16
  import metaflow.plugins.datatools.s3.s3
18
- import metaflow.metaflow_current
19
- import metaflow._vendor.click.types
20
17
  import typing
18
+ import metaflow.metaflow_current
19
+ import io
20
+ import metaflow.events
21
+ import metaflow.runner.metaflow_runner
21
22
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
22
23
  StepFlag = typing.NewType("StepFlag", bool)
23
24
 
@@ -802,6 +803,65 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
802
803
  """
803
804
  ...
804
805
 
806
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
807
+ """
808
+ Specifies that this step should execute on Kubernetes.
809
+
810
+ Parameters
811
+ ----------
812
+ cpu : int, default 1
813
+ Number of CPUs required for this step. If `@resources` is
814
+ also present, the maximum value from all decorators is used.
815
+ memory : int, default 4096
816
+ Memory size (in MB) required for this step. If
817
+ `@resources` is also present, the maximum value from all decorators is
818
+ used.
819
+ disk : int, default 10240
820
+ Disk size (in MB) required for this step. If
821
+ `@resources` is also present, the maximum value from all decorators is
822
+ used.
823
+ image : str, optional, default None
824
+ Docker image to use when launching on Kubernetes. If not specified, and
825
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
826
+ not, a default Docker image mapping to the current version of Python is used.
827
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
828
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
829
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
830
+ Kubernetes service account to use when launching pod in Kubernetes.
831
+ secrets : List[str], optional, default None
832
+ Kubernetes secrets to use when launching pod in Kubernetes. These
833
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
834
+ in Metaflow configuration.
835
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
836
+ Kubernetes namespace to use when launching pod in Kubernetes.
837
+ gpu : int, optional, default None
838
+ Number of GPUs required for this step. A value of zero implies that
839
+ the scheduled node should not have GPUs.
840
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
841
+ The vendor of the GPUs to be used for this step.
842
+ tolerations : List[str], default []
843
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
844
+ Kubernetes tolerations to use when launching pod in Kubernetes.
845
+ use_tmpfs : bool, default False
846
+ This enables an explicit tmpfs mount for this step.
847
+ tmpfs_tempdir : bool, default True
848
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
849
+ tmpfs_size : int, optional, default: None
850
+ The value for the size (in MiB) of the tmpfs mount for this step.
851
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
852
+ memory allocated for this step.
853
+ tmpfs_path : str, optional, default /metaflow_temp
854
+ Path to tmpfs mount for this step.
855
+ persistent_volume_claims : Dict[str, str], optional, default None
856
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
857
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
858
+ shared_memory: int, optional
859
+ Shared memory size (in MiB) required for this step
860
+ port: int, optional
861
+ Port number to specify in the Kubernetes job object
862
+ """
863
+ ...
864
+
805
865
  @typing.overload
806
866
  def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
807
867
  """
@@ -860,15 +920,15 @@ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optio
860
920
  Alias for inferentia. Use only one of the two.
861
921
  efa : int, default 0
862
922
  Number of elastic fabric adapter network devices to attach to container
863
- ephemeral_storage: int, default None
864
- The total amount, in GiB, of ephemeral storage to set for the task (21-200)
923
+ ephemeral_storage : int, default None
924
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
865
925
  This is only relevant for Fargate compute environments
866
926
  log_driver: str, optional, default None
867
927
  The log driver to use for the Amazon ECS container.
868
928
  log_options: List[str], optional, default None
869
929
  List of strings containing options for the chosen log driver. The configurable values
870
930
  depend on the `log driver` chosen. Validation of these options is not supported yet.
871
- Example usage: ["awslogs-group:aws/batch/job"]
931
+ Example: [`awslogs-group:aws/batch/job`]
872
932
  """
873
933
  ...
874
934
 
@@ -937,174 +997,15 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
937
997
  Alias for inferentia. Use only one of the two.
938
998
  efa : int, default 0
939
999
  Number of elastic fabric adapter network devices to attach to container
940
- ephemeral_storage: int, default None
941
- The total amount, in GiB, of ephemeral storage to set for the task (21-200)
1000
+ ephemeral_storage : int, default None
1001
+ The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
942
1002
  This is only relevant for Fargate compute environments
943
1003
  log_driver: str, optional, default None
944
1004
  The log driver to use for the Amazon ECS container.
945
1005
  log_options: List[str], optional, default None
946
1006
  List of strings containing options for the chosen log driver. The configurable values
947
1007
  depend on the `log driver` chosen. Validation of these options is not supported yet.
948
- Example usage: ["awslogs-group:aws/batch/job"]
949
- """
950
- ...
951
-
952
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
953
- """
954
- Specifies that this step should execute on Kubernetes.
955
-
956
- Parameters
957
- ----------
958
- cpu : int, default 1
959
- Number of CPUs required for this step. If `@resources` is
960
- also present, the maximum value from all decorators is used.
961
- memory : int, default 4096
962
- Memory size (in MB) required for this step. If
963
- `@resources` is also present, the maximum value from all decorators is
964
- used.
965
- disk : int, default 10240
966
- Disk size (in MB) required for this step. If
967
- `@resources` is also present, the maximum value from all decorators is
968
- used.
969
- image : str, optional, default None
970
- Docker image to use when launching on Kubernetes. If not specified, and
971
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
972
- not, a default Docker image mapping to the current version of Python is used.
973
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
974
- If given, the imagePullPolicy to be applied to the Docker image of the step.
975
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
976
- Kubernetes service account to use when launching pod in Kubernetes.
977
- secrets : List[str], optional, default None
978
- Kubernetes secrets to use when launching pod in Kubernetes. These
979
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
980
- in Metaflow configuration.
981
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
982
- Kubernetes namespace to use when launching pod in Kubernetes.
983
- gpu : int, optional, default None
984
- Number of GPUs required for this step. A value of zero implies that
985
- the scheduled node should not have GPUs.
986
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
987
- The vendor of the GPUs to be used for this step.
988
- tolerations : List[str], default []
989
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
990
- Kubernetes tolerations to use when launching pod in Kubernetes.
991
- use_tmpfs : bool, default False
992
- This enables an explicit tmpfs mount for this step.
993
- tmpfs_tempdir : bool, default True
994
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
995
- tmpfs_size : int, optional, default: None
996
- The value for the size (in MiB) of the tmpfs mount for this step.
997
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
998
- memory allocated for this step.
999
- tmpfs_path : str, optional, default /metaflow_temp
1000
- Path to tmpfs mount for this step.
1001
- persistent_volume_claims : Dict[str, str], optional, default None
1002
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1003
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1004
- shared_memory: int, optional
1005
- Shared memory size (in MiB) required for this step
1006
- port: int, optional
1007
- Port number to specify in the Kubernetes job object
1008
- """
1009
- ...
1010
-
1011
- @typing.overload
1012
- def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1013
- """
1014
- Creates a human-readable report, a Metaflow Card, after this step completes.
1015
-
1016
- Note that you may add multiple `@card` decorators in a step with different parameters.
1017
-
1018
- Parameters
1019
- ----------
1020
- type : str, default 'default'
1021
- Card type.
1022
- id : str, optional, default None
1023
- If multiple cards are present, use this id to identify this card.
1024
- options : Dict[str, Any], default {}
1025
- Options passed to the card. The contents depend on the card type.
1026
- timeout : int, default 45
1027
- Interrupt reporting if it takes more than this many seconds.
1028
-
1029
-
1030
- """
1031
- ...
1032
-
1033
- @typing.overload
1034
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1035
- ...
1036
-
1037
- @typing.overload
1038
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1039
- ...
1040
-
1041
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1042
- """
1043
- Creates a human-readable report, a Metaflow Card, after this step completes.
1044
-
1045
- Note that you may add multiple `@card` decorators in a step with different parameters.
1046
-
1047
- Parameters
1048
- ----------
1049
- type : str, default 'default'
1050
- Card type.
1051
- id : str, optional, default None
1052
- If multiple cards are present, use this id to identify this card.
1053
- options : Dict[str, Any], default {}
1054
- Options passed to the card. The contents depend on the card type.
1055
- timeout : int, default 45
1056
- Interrupt reporting if it takes more than this many seconds.
1057
-
1058
-
1059
- """
1060
- ...
1061
-
1062
- @typing.overload
1063
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1064
- """
1065
- Specifies the PyPI packages for the step.
1066
-
1067
- Information in this decorator will augment any
1068
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1069
- you can use `@pypi_base` to set packages required by all
1070
- steps and use `@pypi` to specify step-specific overrides.
1071
-
1072
- Parameters
1073
- ----------
1074
- packages : Dict[str, str], default: {}
1075
- Packages to use for this step. The key is the name of the package
1076
- and the value is the version to use.
1077
- python : str, optional, default: None
1078
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1079
- that the version used will correspond to the version of the Python interpreter used to start the run.
1080
- """
1081
- ...
1082
-
1083
- @typing.overload
1084
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1085
- ...
1086
-
1087
- @typing.overload
1088
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1089
- ...
1090
-
1091
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1092
- """
1093
- Specifies the PyPI packages for the step.
1094
-
1095
- Information in this decorator will augment any
1096
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1097
- you can use `@pypi_base` to set packages required by all
1098
- steps and use `@pypi` to specify step-specific overrides.
1099
-
1100
- Parameters
1101
- ----------
1102
- packages : Dict[str, str], default: {}
1103
- Packages to use for this step. The key is the name of the package
1104
- and the value is the version to use.
1105
- python : str, optional, default: None
1106
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1107
- that the version used will correspond to the version of the Python interpreter used to start the run.
1008
+ Example: [`awslogs-group:aws/batch/job`]
1108
1009
  """
1109
1010
  ...
1110
1011
 
@@ -1166,51 +1067,33 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
1166
1067
  ...
1167
1068
 
1168
1069
  @typing.overload
1169
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1070
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1170
1071
  """
1171
- Specifies that the step will success under all circumstances.
1172
-
1173
- The decorator will create an optional artifact, specified by `var`, which
1174
- contains the exception raised. You can use it to detect the presence
1175
- of errors, indicating that all happy-path artifacts produced by the step
1176
- are missing.
1072
+ Specifies environment variables to be set prior to the execution of a step.
1177
1073
 
1178
1074
  Parameters
1179
1075
  ----------
1180
- var : str, optional, default None
1181
- Name of the artifact in which to store the caught exception.
1182
- If not specified, the exception is not stored.
1183
- print_exception : bool, default True
1184
- Determines whether or not the exception is printed to
1185
- stdout when caught.
1076
+ vars : Dict[str, str], default {}
1077
+ Dictionary of environment variables to set.
1186
1078
  """
1187
1079
  ...
1188
1080
 
1189
1081
  @typing.overload
1190
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1082
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1191
1083
  ...
1192
1084
 
1193
1085
  @typing.overload
1194
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1086
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1195
1087
  ...
1196
1088
 
1197
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1089
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1198
1090
  """
1199
- Specifies that the step will success under all circumstances.
1200
-
1201
- The decorator will create an optional artifact, specified by `var`, which
1202
- contains the exception raised. You can use it to detect the presence
1203
- of errors, indicating that all happy-path artifacts produced by the step
1204
- are missing.
1091
+ Specifies environment variables to be set prior to the execution of a step.
1205
1092
 
1206
1093
  Parameters
1207
1094
  ----------
1208
- var : str, optional, default None
1209
- Name of the artifact in which to store the caught exception.
1210
- If not specified, the exception is not stored.
1211
- print_exception : bool, default True
1212
- Determines whether or not the exception is printed to
1213
- stdout when caught.
1095
+ vars : Dict[str, str], default {}
1096
+ Dictionary of environment variables to set.
1214
1097
  """
1215
1098
  ...
1216
1099
 
@@ -1248,158 +1131,221 @@ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
1248
1131
  ...
1249
1132
 
1250
1133
  @typing.overload
1251
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1134
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1252
1135
  """
1253
- Specifies the Conda environment for the step.
1136
+ Specifies the number of times the task corresponding
1137
+ to a step needs to be retried.
1254
1138
 
1255
- Information in this decorator will augment any
1256
- attributes set in the `@conda_base` flow-level decorator. Hence,
1257
- you can use `@conda_base` to set packages required by all
1258
- steps and use `@conda` to specify step-specific overrides.
1139
+ This decorator is useful for handling transient errors, such as networking issues.
1140
+ If your task contains operations that can't be retried safely, e.g. database updates,
1141
+ it is advisable to annotate it with `@retry(times=0)`.
1142
+
1143
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1144
+ decorator will execute a no-op task after all retries have been exhausted,
1145
+ ensuring that the flow execution can continue.
1259
1146
 
1260
1147
  Parameters
1261
1148
  ----------
1262
- packages : Dict[str, str], default {}
1263
- Packages to use for this step. The key is the name of the package
1264
- and the value is the version to use.
1265
- libraries : Dict[str, str], default {}
1266
- Supported for backward compatibility. When used with packages, packages will take precedence.
1267
- python : str, optional, default None
1268
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1269
- that the version used will correspond to the version of the Python interpreter used to start the run.
1270
- disabled : bool, default False
1271
- If set to True, disables @conda.
1149
+ times : int, default 3
1150
+ Number of times to retry this task.
1151
+ minutes_between_retries : int, default 2
1152
+ Number of minutes between retries.
1272
1153
  """
1273
1154
  ...
1274
1155
 
1275
1156
  @typing.overload
1276
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1157
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1277
1158
  ...
1278
1159
 
1279
1160
  @typing.overload
1280
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1161
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1281
1162
  ...
1282
1163
 
1283
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1164
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1284
1165
  """
1285
- Specifies the Conda environment for the step.
1166
+ Specifies the number of times the task corresponding
1167
+ to a step needs to be retried.
1286
1168
 
1287
- Information in this decorator will augment any
1288
- attributes set in the `@conda_base` flow-level decorator. Hence,
1289
- you can use `@conda_base` to set packages required by all
1290
- steps and use `@conda` to specify step-specific overrides.
1169
+ This decorator is useful for handling transient errors, such as networking issues.
1170
+ If your task contains operations that can't be retried safely, e.g. database updates,
1171
+ it is advisable to annotate it with `@retry(times=0)`.
1172
+
1173
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1174
+ decorator will execute a no-op task after all retries have been exhausted,
1175
+ ensuring that the flow execution can continue.
1291
1176
 
1292
1177
  Parameters
1293
1178
  ----------
1294
- packages : Dict[str, str], default {}
1295
- Packages to use for this step. The key is the name of the package
1296
- and the value is the version to use.
1297
- libraries : Dict[str, str], default {}
1298
- Supported for backward compatibility. When used with packages, packages will take precedence.
1299
- python : str, optional, default None
1300
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1301
- that the version used will correspond to the version of the Python interpreter used to start the run.
1302
- disabled : bool, default False
1303
- If set to True, disables @conda.
1179
+ times : int, default 3
1180
+ Number of times to retry this task.
1181
+ minutes_between_retries : int, default 2
1182
+ Number of minutes between retries.
1304
1183
  """
1305
1184
  ...
1306
1185
 
1307
1186
  @typing.overload
1308
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1187
+ def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1309
1188
  """
1310
- Specifies environment variables to be set prior to the execution of a step.
1189
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1190
+
1191
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1311
1192
 
1312
1193
  Parameters
1313
1194
  ----------
1314
- vars : Dict[str, str], default {}
1315
- Dictionary of environment variables to set.
1195
+ type : str, default 'default'
1196
+ Card type.
1197
+ id : str, optional, default None
1198
+ If multiple cards are present, use this id to identify this card.
1199
+ options : Dict[str, Any], default {}
1200
+ Options passed to the card. The contents depend on the card type.
1201
+ timeout : int, default 45
1202
+ Interrupt reporting if it takes more than this many seconds.
1203
+
1204
+
1316
1205
  """
1317
1206
  ...
1318
1207
 
1319
1208
  @typing.overload
1320
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1209
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1321
1210
  ...
1322
1211
 
1323
1212
  @typing.overload
1324
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1213
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1325
1214
  ...
1326
1215
 
1327
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1216
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1328
1217
  """
1329
- Specifies environment variables to be set prior to the execution of a step.
1218
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1219
+
1220
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1330
1221
 
1331
1222
  Parameters
1332
1223
  ----------
1333
- vars : Dict[str, str], default {}
1334
- Dictionary of environment variables to set.
1224
+ type : str, default 'default'
1225
+ Card type.
1226
+ id : str, optional, default None
1227
+ If multiple cards are present, use this id to identify this card.
1228
+ options : Dict[str, Any], default {}
1229
+ Options passed to the card. The contents depend on the card type.
1230
+ timeout : int, default 45
1231
+ Interrupt reporting if it takes more than this many seconds.
1232
+
1233
+
1335
1234
  """
1336
1235
  ...
1337
1236
 
1338
1237
  @typing.overload
1339
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1238
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1340
1239
  """
1341
- Specifies the number of times the task corresponding
1342
- to a step needs to be retried.
1343
-
1344
- This decorator is useful for handling transient errors, such as networking issues.
1345
- If your task contains operations that can't be retried safely, e.g. database updates,
1346
- it is advisable to annotate it with `@retry(times=0)`.
1240
+ Specifies that the step will success under all circumstances.
1347
1241
 
1348
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1349
- decorator will execute a no-op task after all retries have been exhausted,
1350
- ensuring that the flow execution can continue.
1242
+ The decorator will create an optional artifact, specified by `var`, which
1243
+ contains the exception raised. You can use it to detect the presence
1244
+ of errors, indicating that all happy-path artifacts produced by the step
1245
+ are missing.
1351
1246
 
1352
1247
  Parameters
1353
1248
  ----------
1354
- times : int, default 3
1355
- Number of times to retry this task.
1356
- minutes_between_retries : int, default 2
1357
- Number of minutes between retries.
1249
+ var : str, optional, default None
1250
+ Name of the artifact in which to store the caught exception.
1251
+ If not specified, the exception is not stored.
1252
+ print_exception : bool, default True
1253
+ Determines whether or not the exception is printed to
1254
+ stdout when caught.
1358
1255
  """
1359
1256
  ...
1360
1257
 
1361
1258
  @typing.overload
1362
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1259
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1363
1260
  ...
1364
1261
 
1365
1262
  @typing.overload
1366
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1263
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1367
1264
  ...
1368
1265
 
1369
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1266
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1370
1267
  """
1371
- Specifies the number of times the task corresponding
1372
- to a step needs to be retried.
1268
+ Specifies that the step will success under all circumstances.
1373
1269
 
1374
- This decorator is useful for handling transient errors, such as networking issues.
1375
- If your task contains operations that can't be retried safely, e.g. database updates,
1376
- it is advisable to annotate it with `@retry(times=0)`.
1270
+ The decorator will create an optional artifact, specified by `var`, which
1271
+ contains the exception raised. You can use it to detect the presence
1272
+ of errors, indicating that all happy-path artifacts produced by the step
1273
+ are missing.
1377
1274
 
1378
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1379
- decorator will execute a no-op task after all retries have been exhausted,
1380
- ensuring that the flow execution can continue.
1275
+ Parameters
1276
+ ----------
1277
+ var : str, optional, default None
1278
+ Name of the artifact in which to store the caught exception.
1279
+ If not specified, the exception is not stored.
1280
+ print_exception : bool, default True
1281
+ Determines whether or not the exception is printed to
1282
+ stdout when caught.
1283
+ """
1284
+ ...
1285
+
1286
+ @typing.overload
1287
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1288
+ """
1289
+ Specifies the PyPI packages for the step.
1290
+
1291
+ Information in this decorator will augment any
1292
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1293
+ you can use `@pypi_base` to set packages required by all
1294
+ steps and use `@pypi` to specify step-specific overrides.
1381
1295
 
1382
1296
  Parameters
1383
1297
  ----------
1384
- times : int, default 3
1385
- Number of times to retry this task.
1386
- minutes_between_retries : int, default 2
1387
- Number of minutes between retries.
1298
+ packages : Dict[str, str], default: {}
1299
+ Packages to use for this step. The key is the name of the package
1300
+ and the value is the version to use.
1301
+ python : str, optional, default: None
1302
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1303
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1388
1304
  """
1389
1305
  ...
1390
1306
 
1391
1307
  @typing.overload
1392
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1308
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1309
+ ...
1310
+
1311
+ @typing.overload
1312
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1313
+ ...
1314
+
1315
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1393
1316
  """
1394
- Specifies the Conda environment for all steps of the flow.
1317
+ Specifies the PyPI packages for the step.
1395
1318
 
1396
- Use `@conda_base` to set common libraries required by all
1397
- steps and use `@conda` to specify step-specific additions.
1319
+ Information in this decorator will augment any
1320
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1321
+ you can use `@pypi_base` to set packages required by all
1322
+ steps and use `@pypi` to specify step-specific overrides.
1323
+
1324
+ Parameters
1325
+ ----------
1326
+ packages : Dict[str, str], default: {}
1327
+ Packages to use for this step. The key is the name of the package
1328
+ and the value is the version to use.
1329
+ python : str, optional, default: None
1330
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1331
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1332
+ """
1333
+ ...
1334
+
1335
+ @typing.overload
1336
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1337
+ """
1338
+ Specifies the Conda environment for the step.
1339
+
1340
+ Information in this decorator will augment any
1341
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1342
+ you can use `@conda_base` to set packages required by all
1343
+ steps and use `@conda` to specify step-specific overrides.
1398
1344
 
1399
1345
  Parameters
1400
1346
  ----------
1401
1347
  packages : Dict[str, str], default {}
1402
- Packages to use for this flow. The key is the name of the package
1348
+ Packages to use for this step. The key is the name of the package
1403
1349
  and the value is the version to use.
1404
1350
  libraries : Dict[str, str], default {}
1405
1351
  Supported for backward compatibility. When used with packages, packages will take precedence.
@@ -1407,25 +1353,31 @@ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[s
1407
1353
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1408
1354
  that the version used will correspond to the version of the Python interpreter used to start the run.
1409
1355
  disabled : bool, default False
1410
- If set to True, disables Conda.
1356
+ If set to True, disables @conda.
1411
1357
  """
1412
1358
  ...
1413
1359
 
1414
1360
  @typing.overload
1415
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1361
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1416
1362
  ...
1417
1363
 
1418
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1364
+ @typing.overload
1365
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1366
+ ...
1367
+
1368
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1419
1369
  """
1420
- Specifies the Conda environment for all steps of the flow.
1370
+ Specifies the Conda environment for the step.
1421
1371
 
1422
- Use `@conda_base` to set common libraries required by all
1423
- steps and use `@conda` to specify step-specific additions.
1372
+ Information in this decorator will augment any
1373
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1374
+ you can use `@conda_base` to set packages required by all
1375
+ steps and use `@conda` to specify step-specific overrides.
1424
1376
 
1425
1377
  Parameters
1426
1378
  ----------
1427
1379
  packages : Dict[str, str], default {}
1428
- Packages to use for this flow. The key is the name of the package
1380
+ Packages to use for this step. The key is the name of the package
1429
1381
  and the value is the version to use.
1430
1382
  libraries : Dict[str, str], default {}
1431
1383
  Supported for backward compatibility. When used with packages, packages will take precedence.
@@ -1433,7 +1385,7 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1433
1385
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1434
1386
  that the version used will correspond to the version of the Python interpreter used to start the run.
1435
1387
  disabled : bool, default False
1436
- If set to True, disables Conda.
1388
+ If set to True, disables @conda.
1437
1389
  """
1438
1390
  ...
1439
1391
 
@@ -1532,6 +1484,24 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1532
1484
  """
1533
1485
  ...
1534
1486
 
1487
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1488
+ """
1489
+ Specifies what flows belong to the same project.
1490
+
1491
+ A project-specific namespace is created for all flows that
1492
+ use the same `@project(name)`.
1493
+
1494
+ Parameters
1495
+ ----------
1496
+ name : str
1497
+ Project name. Make sure that the name is unique amongst all
1498
+ projects that use the same production scheduler. The name may
1499
+ contain only lowercase alphanumeric characters and underscores.
1500
+
1501
+
1502
+ """
1503
+ ...
1504
+
1535
1505
  @typing.overload
1536
1506
  def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1537
1507
  """
@@ -1571,48 +1541,6 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
1571
1541
  """
1572
1542
  ...
1573
1543
 
1574
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1575
- """
1576
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1577
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1578
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1579
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1580
- starts only after all sensors finish.
1581
-
1582
- Parameters
1583
- ----------
1584
- timeout : int
1585
- Time, in seconds before the task times out and fails. (Default: 3600)
1586
- poke_interval : int
1587
- Time in seconds that the job should wait in between each try. (Default: 60)
1588
- mode : str
1589
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1590
- exponential_backoff : bool
1591
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1592
- pool : str
1593
- the slot pool this task should run in,
1594
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1595
- soft_fail : bool
1596
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1597
- name : str
1598
- Name of the sensor on Airflow
1599
- description : str
1600
- Description of sensor in the Airflow UI
1601
- bucket_key : Union[str, List[str]]
1602
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1603
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1604
- bucket_name : str
1605
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1606
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1607
- wildcard_match : bool
1608
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1609
- aws_conn_id : str
1610
- a reference to the s3 connection on Airflow. (Default: None)
1611
- verify : bool
1612
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1613
- """
1614
- ...
1615
-
1616
1544
  def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1617
1545
  """
1618
1546
  The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
@@ -1641,84 +1569,17 @@ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str,
1641
1569
  The dag_id that contains the task you want to wait for.
1642
1570
  external_task_ids : List[str]
1643
1571
  The list of task_ids that you want to wait for.
1644
- If None (default value) the sensor waits for the DAG. (Default: None)
1645
- allowed_states : List[str]
1646
- Iterable of allowed states, (Default: ['success'])
1647
- failed_states : List[str]
1648
- Iterable of failed or dis-allowed states. (Default: None)
1649
- execution_delta : datetime.timedelta
1650
- time difference with the previous execution to look at,
1651
- the default is the same logical date as the current task or DAG. (Default: None)
1652
- check_existence: bool
1653
- Set to True to check if the external task exists or check if
1654
- the DAG to wait for exists. (Default: True)
1655
- """
1656
- ...
1657
-
1658
- @typing.overload
1659
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1660
- """
1661
- Specifies the times when the flow should be run when running on a
1662
- production scheduler.
1663
-
1664
- Parameters
1665
- ----------
1666
- hourly : bool, default False
1667
- Run the workflow hourly.
1668
- daily : bool, default True
1669
- Run the workflow daily.
1670
- weekly : bool, default False
1671
- Run the workflow weekly.
1672
- cron : str, optional, default None
1673
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1674
- specified by this expression.
1675
- timezone : str, optional, default None
1676
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1677
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1678
- """
1679
- ...
1680
-
1681
- @typing.overload
1682
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1683
- ...
1684
-
1685
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1686
- """
1687
- Specifies the times when the flow should be run when running on a
1688
- production scheduler.
1689
-
1690
- Parameters
1691
- ----------
1692
- hourly : bool, default False
1693
- Run the workflow hourly.
1694
- daily : bool, default True
1695
- Run the workflow daily.
1696
- weekly : bool, default False
1697
- Run the workflow weekly.
1698
- cron : str, optional, default None
1699
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1700
- specified by this expression.
1701
- timezone : str, optional, default None
1702
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1703
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1704
- """
1705
- ...
1706
-
1707
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1708
- """
1709
- Specifies what flows belong to the same project.
1710
-
1711
- A project-specific namespace is created for all flows that
1712
- use the same `@project(name)`.
1713
-
1714
- Parameters
1715
- ----------
1716
- name : str
1717
- Project name. Make sure that the name is unique amongst all
1718
- projects that use the same production scheduler. The name may
1719
- contain only lowercase alphanumeric characters and underscores.
1720
-
1721
-
1572
+ If None (default value) the sensor waits for the DAG. (Default: None)
1573
+ allowed_states : List[str]
1574
+ Iterable of allowed states, (Default: ['success'])
1575
+ failed_states : List[str]
1576
+ Iterable of failed or dis-allowed states. (Default: None)
1577
+ execution_delta : datetime.timedelta
1578
+ time difference with the previous execution to look at,
1579
+ the default is the same logical date as the current task or DAG. (Default: None)
1580
+ check_existence: bool
1581
+ Set to True to check if the external task exists or check if
1582
+ the DAG to wait for exists. (Default: True)
1722
1583
  """
1723
1584
  ...
1724
1585
 
@@ -1825,6 +1686,146 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1825
1686
  """
1826
1687
  ...
1827
1688
 
1689
+ @typing.overload
1690
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1691
+ """
1692
+ Specifies the times when the flow should be run when running on a
1693
+ production scheduler.
1694
+
1695
+ Parameters
1696
+ ----------
1697
+ hourly : bool, default False
1698
+ Run the workflow hourly.
1699
+ daily : bool, default True
1700
+ Run the workflow daily.
1701
+ weekly : bool, default False
1702
+ Run the workflow weekly.
1703
+ cron : str, optional, default None
1704
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1705
+ specified by this expression.
1706
+ timezone : str, optional, default None
1707
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1708
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1709
+ """
1710
+ ...
1711
+
1712
+ @typing.overload
1713
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1714
+ ...
1715
+
1716
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1717
+ """
1718
+ Specifies the times when the flow should be run when running on a
1719
+ production scheduler.
1720
+
1721
+ Parameters
1722
+ ----------
1723
+ hourly : bool, default False
1724
+ Run the workflow hourly.
1725
+ daily : bool, default True
1726
+ Run the workflow daily.
1727
+ weekly : bool, default False
1728
+ Run the workflow weekly.
1729
+ cron : str, optional, default None
1730
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1731
+ specified by this expression.
1732
+ timezone : str, optional, default None
1733
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1734
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1735
+ """
1736
+ ...
1737
+
1738
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1739
+ """
1740
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1741
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1742
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1743
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1744
+ starts only after all sensors finish.
1745
+
1746
+ Parameters
1747
+ ----------
1748
+ timeout : int
1749
+ Time, in seconds before the task times out and fails. (Default: 3600)
1750
+ poke_interval : int
1751
+ Time in seconds that the job should wait in between each try. (Default: 60)
1752
+ mode : str
1753
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1754
+ exponential_backoff : bool
1755
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1756
+ pool : str
1757
+ the slot pool this task should run in,
1758
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1759
+ soft_fail : bool
1760
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1761
+ name : str
1762
+ Name of the sensor on Airflow
1763
+ description : str
1764
+ Description of sensor in the Airflow UI
1765
+ bucket_key : Union[str, List[str]]
1766
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1767
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1768
+ bucket_name : str
1769
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1770
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1771
+ wildcard_match : bool
1772
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1773
+ aws_conn_id : str
1774
+ a reference to the s3 connection on Airflow. (Default: None)
1775
+ verify : bool
1776
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1777
+ """
1778
+ ...
1779
+
1780
+ @typing.overload
1781
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1782
+ """
1783
+ Specifies the Conda environment for all steps of the flow.
1784
+
1785
+ Use `@conda_base` to set common libraries required by all
1786
+ steps and use `@conda` to specify step-specific additions.
1787
+
1788
+ Parameters
1789
+ ----------
1790
+ packages : Dict[str, str], default {}
1791
+ Packages to use for this flow. The key is the name of the package
1792
+ and the value is the version to use.
1793
+ libraries : Dict[str, str], default {}
1794
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1795
+ python : str, optional, default None
1796
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1797
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1798
+ disabled : bool, default False
1799
+ If set to True, disables Conda.
1800
+ """
1801
+ ...
1802
+
1803
+ @typing.overload
1804
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1805
+ ...
1806
+
1807
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1808
+ """
1809
+ Specifies the Conda environment for all steps of the flow.
1810
+
1811
+ Use `@conda_base` to set common libraries required by all
1812
+ steps and use `@conda` to specify step-specific additions.
1813
+
1814
+ Parameters
1815
+ ----------
1816
+ packages : Dict[str, str], default {}
1817
+ Packages to use for this flow. The key is the name of the package
1818
+ and the value is the version to use.
1819
+ libraries : Dict[str, str], default {}
1820
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1821
+ python : str, optional, default None
1822
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1823
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1824
+ disabled : bool, default False
1825
+ If set to True, disables Conda.
1826
+ """
1827
+ ...
1828
+
1828
1829
  def namespace(ns: typing.Optional[str]) -> typing.Optional[str]:
1829
1830
  """
1830
1831
  Switch namespace to the one provided.
@@ -2775,3 +2776,201 @@ class DataArtifact(metaflow.client.core.MetaflowObject, metaclass=type):
2775
2776
  ...
2776
2777
  ...
2777
2778
 
2779
+ class Runner(object, metaclass=type):
2780
+ def __init__(self, flow_file: str, show_output: bool = True, profile: typing.Optional[str] = None, env: typing.Optional[typing.Dict] = None, cwd: typing.Optional[str] = None, **kwargs):
2781
+ ...
2782
+ def __enter__(self) -> metaflow.runner.metaflow_runner.Runner:
2783
+ ...
2784
+ def __aenter__(self) -> metaflow.runner.metaflow_runner.Runner:
2785
+ ...
2786
+ def _Runner__get_executing_run(self, tfp_runner_attribute, command_obj):
2787
+ ...
2788
+ def run(self, **kwargs) -> metaflow.runner.metaflow_runner.ExecutingRun:
2789
+ """
2790
+ Blocking execution of the run. This method will wait until
2791
+ the run has completed execution.
2792
+
2793
+ Parameters
2794
+ ----------
2795
+ **kwargs : Any
2796
+ Additional arguments that you would pass to `python myflow.py` after
2797
+ the `run` command, in particular, any parameters accepted by the flow.
2798
+
2799
+ Returns
2800
+ -------
2801
+ ExecutingRun
2802
+ ExecutingRun containing the results of the run.
2803
+ """
2804
+ ...
2805
+ def resume(self, **kwargs):
2806
+ """
2807
+ Blocking resume execution of the run.
2808
+ This method will wait until the resumed run has completed execution.
2809
+
2810
+ Parameters
2811
+ ----------
2812
+ **kwargs : Any
2813
+ Additional arguments that you would pass to `python ./myflow.py` after
2814
+ the `resume` command.
2815
+
2816
+ Returns
2817
+ -------
2818
+ ExecutingRun
2819
+ ExecutingRun containing the results of the resumed run.
2820
+ """
2821
+ ...
2822
+ def async_run(self, **kwargs) -> metaflow.runner.metaflow_runner.ExecutingRun:
2823
+ """
2824
+ Non-blocking execution of the run. This method will return as soon as the
2825
+ run has launched.
2826
+
2827
+ Note that this method is asynchronous and needs to be `await`ed.
2828
+
2829
+ Parameters
2830
+ ----------
2831
+ **kwargs : Any
2832
+ Additional arguments that you would pass to `python myflow.py` after
2833
+ the `run` command, in particular, any parameters accepted by the flow.
2834
+
2835
+ Returns
2836
+ -------
2837
+ ExecutingRun
2838
+ ExecutingRun representing the run that was started.
2839
+ """
2840
+ ...
2841
+ def async_resume(self, **kwargs):
2842
+ """
2843
+ Non-blocking resume execution of the run.
2844
+ This method will return as soon as the resume has launched.
2845
+
2846
+ Note that this method is asynchronous and needs to be `await`ed.
2847
+
2848
+ Parameters
2849
+ ----------
2850
+ **kwargs : Any
2851
+ Additional arguments that you would pass to `python myflow.py` after
2852
+ the `resume` command.
2853
+
2854
+ Returns
2855
+ -------
2856
+ ExecutingRun
2857
+ ExecutingRun representing the resumed run that was started.
2858
+ """
2859
+ ...
2860
+ def __exit__(self, exc_type, exc_value, traceback):
2861
+ ...
2862
+ def __aexit__(self, exc_type, exc_value, traceback):
2863
+ ...
2864
+ def cleanup(self):
2865
+ """
2866
+ Delete any temporary files created during execution.
2867
+ """
2868
+ ...
2869
+ ...
2870
+
2871
+ class NBRunner(object, metaclass=type):
2872
+ def __init__(self, flow, show_output: bool = True, profile: typing.Optional[str] = None, env: typing.Optional[typing.Dict] = None, base_dir: str = "/tmp", **kwargs):
2873
+ ...
2874
+ def nbrun(self, **kwargs):
2875
+ """
2876
+ Blocking execution of the run. This method will wait until
2877
+ the run has completed execution.
2878
+
2879
+ Note that in contrast to `run`, this method returns a
2880
+ `metaflow.Run` object directly and calls `cleanup()` internally
2881
+ to support a common notebook pattern of executing a flow and
2882
+ retrieving its results immediately.
2883
+
2884
+ Parameters
2885
+ ----------
2886
+ **kwargs : Any
2887
+ Additional arguments that you would pass to `python myflow.py` after
2888
+ the `run` command, in particular, any parameters accepted by the flow.
2889
+
2890
+ Returns
2891
+ -------
2892
+ Run
2893
+ A `metaflow.Run` object representing the finished run.
2894
+ """
2895
+ ...
2896
+ def nbresume(self, **kwargs):
2897
+ """
2898
+ Blocking resuming of a run. This method will wait until
2899
+ the resumed run has completed execution.
2900
+
2901
+ Note that in contrast to `resume`, this method returns a
2902
+ `metaflow.Run` object directly and calls `cleanup()` internally
2903
+ to support a common notebook pattern of executing a flow and
2904
+ retrieving its results immediately.
2905
+
2906
+ Parameters
2907
+ ----------
2908
+ **kwargs : Any
2909
+ Additional arguments that you would pass to `python myflow.py` after
2910
+ the `resume` command.
2911
+
2912
+ Returns
2913
+ -------
2914
+ Run
2915
+ A `metaflow.Run` object representing the resumed run.
2916
+ """
2917
+ ...
2918
+ def run(self, **kwargs):
2919
+ """
2920
+ Runs the flow.
2921
+ """
2922
+ ...
2923
+ def resume(self, **kwargs):
2924
+ """
2925
+ Resumes the flow.
2926
+ """
2927
+ ...
2928
+ def async_run(self, **kwargs):
2929
+ """
2930
+ Non-blocking execution of the run. This method will return as soon as the
2931
+ run has launched. This method is equivalent to `Runner.async_run`.
2932
+
2933
+ Note that this method is asynchronous and needs to be `await`ed.
2934
+
2935
+
2936
+ Parameters
2937
+ ----------
2938
+ **kwargs : Any
2939
+ Additional arguments that you would pass to `python myflow.py` after
2940
+ the `run` command, in particular, any parameters accepted by the flow.
2941
+
2942
+ Returns
2943
+ -------
2944
+ ExecutingRun
2945
+ ExecutingRun representing the run that was started.
2946
+ """
2947
+ ...
2948
+ def async_resume(self, **kwargs):
2949
+ """
2950
+ Non-blocking execution of the run. This method will return as soon as the
2951
+ run has launched. This method is equivalent to `Runner.async_resume`.
2952
+
2953
+ Note that this method is asynchronous and needs to be `await`ed.
2954
+
2955
+ Parameters
2956
+ ----------
2957
+ **kwargs : Any
2958
+ Additional arguments that you would pass to `python myflow.py` after
2959
+ the `run` command, in particular, any parameters accepted by the flow.
2960
+
2961
+ Returns
2962
+ -------
2963
+ ExecutingRun
2964
+ ExecutingRun representing the run that was started.
2965
+ """
2966
+ ...
2967
+ def cleanup(self):
2968
+ """
2969
+ Delete any temporary files created during execution.
2970
+
2971
+ Call this method after using `async_run` or `async_resume`. You don't
2972
+ have to call this after `nbrun` or `nbresume`.
2973
+ """
2974
+ ...
2975
+ ...
2976
+