ob-metaflow-stubs 2.11.10.3__py2.py3-none-any.whl → 3.2__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (135) hide show
  1. metaflow-stubs/__init__.pyi +492 -489
  2. metaflow-stubs/cards.pyi +4 -4
  3. metaflow-stubs/cli.pyi +3 -3
  4. metaflow-stubs/client/__init__.pyi +3 -3
  5. metaflow-stubs/client/core.pyi +6 -6
  6. metaflow-stubs/client/filecache.pyi +3 -3
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +2 -2
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +4 -4
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +4 -4
  13. metaflow-stubs/metadata/metadata.pyi +3 -3
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +12 -2
  16. metaflow-stubs/metaflow_current.pyi +5 -5
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +4 -4
  20. metaflow-stubs/plugins/__init__.pyi +2 -2
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow.pyi +3 -3
  23. metaflow-stubs/plugins/airflow/airflow_cli.pyi +3 -3
  24. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +3 -3
  29. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
  30. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
  31. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/argo/argo_client.pyi +4 -2
  33. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  35. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +3 -3
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  37. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  38. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  39. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  40. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
  45. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  47. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +3 -3
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  56. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  60. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  61. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  62. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  63. metaflow-stubs/plugins/cards/card_cli.pyi +5 -5
  64. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  65. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  70. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  77. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +3 -3
  80. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  81. metaflow-stubs/plugins/cards/component_serializer.pyi +3 -3
  82. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  83. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  84. metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
  85. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  86. metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
  87. metaflow-stubs/plugins/datatools/s3/s3.pyi +5 -5
  88. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  89. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  90. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  91. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  92. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  93. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  94. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  95. metaflow-stubs/plugins/frameworks/pytorch.pyi +3 -3
  96. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  97. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  98. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  99. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  100. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  101. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  102. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  103. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +5 -3
  104. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +3 -3
  105. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +4 -31
  106. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +9 -3
  107. metaflow-stubs/plugins/package_cli.pyi +2 -2
  108. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  109. metaflow-stubs/plugins/perimeters.pyi +24 -0
  110. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  111. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  112. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  114. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  115. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  116. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  117. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  119. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  120. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  121. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  122. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  123. metaflow-stubs/plugins/tag_cli.pyi +5 -5
  124. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  125. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  126. metaflow-stubs/procpoll.pyi +2 -2
  127. metaflow-stubs/profilers/__init__.pyi +16 -0
  128. metaflow-stubs/pylint_wrapper.pyi +2 -2
  129. metaflow-stubs/tagging_util.pyi +2 -2
  130. {ob_metaflow_stubs-2.11.10.3.dist-info → ob_metaflow_stubs-3.2.dist-info}/METADATA +1 -2
  131. ob_metaflow_stubs-3.2.dist-info/RECORD +134 -0
  132. {ob_metaflow_stubs-2.11.10.3.dist-info → ob_metaflow_stubs-3.2.dist-info}/WHEEL +1 -1
  133. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +0 -100
  134. ob_metaflow_stubs-2.11.10.3.dist-info/RECORD +0 -133
  135. {ob_metaflow_stubs-2.11.10.3.dist-info → ob_metaflow_stubs-3.2.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.11.10.3 #
4
- # Generated on 2024-04-12T18:07:07.339961 #
3
+ # MF version: 2.11.14.1+ob(v1) #
4
+ # Generated on 2024-05-07T00:05:40.513974 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -9,15 +9,15 @@ from __future__ import annotations
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
11
  import io
12
- import metaflow.metaflow_current
13
- import metaflow._vendor.click.types
12
+ import metaflow.parameters
13
+ import datetime
14
14
  import typing
15
+ import metaflow.events
16
+ import metaflow.plugins.datatools.s3.s3
15
17
  import metaflow.datastore.inputs
18
+ import metaflow.metaflow_current
16
19
  import metaflow.client.core
17
- import metaflow.parameters
18
- import metaflow.plugins.datatools.s3.s3
19
- import metaflow.events
20
- import datetime
20
+ import metaflow._vendor.click.types
21
21
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
22
22
  StepFlag = typing.NewType("StepFlag", bool)
23
23
 
@@ -725,55 +725,6 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
725
725
  """
726
726
  ...
727
727
 
728
- @typing.overload
729
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
730
- """
731
- Specifies that the step will success under all circumstances.
732
-
733
- The decorator will create an optional artifact, specified by `var`, which
734
- contains the exception raised. You can use it to detect the presence
735
- of errors, indicating that all happy-path artifacts produced by the step
736
- are missing.
737
-
738
- Parameters
739
- ----------
740
- var : str, optional, default None
741
- Name of the artifact in which to store the caught exception.
742
- If not specified, the exception is not stored.
743
- print_exception : bool, default True
744
- Determines whether or not the exception is printed to
745
- stdout when caught.
746
- """
747
- ...
748
-
749
- @typing.overload
750
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
751
- ...
752
-
753
- @typing.overload
754
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
755
- ...
756
-
757
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
758
- """
759
- Specifies that the step will success under all circumstances.
760
-
761
- The decorator will create an optional artifact, specified by `var`, which
762
- contains the exception raised. You can use it to detect the presence
763
- of errors, indicating that all happy-path artifacts produced by the step
764
- are missing.
765
-
766
- Parameters
767
- ----------
768
- var : str, optional, default None
769
- Name of the artifact in which to store the caught exception.
770
- If not specified, the exception is not stored.
771
- print_exception : bool, default True
772
- Determines whether or not the exception is printed to
773
- stdout when caught.
774
- """
775
- ...
776
-
777
728
  @typing.overload
778
729
  def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
779
730
  """
@@ -831,325 +782,321 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
831
782
  """
832
783
  ...
833
784
 
834
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
785
+ @typing.overload
786
+ def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
835
787
  """
836
- Specifies that this step should execute on Kubernetes.
788
+ Specifies the resources needed when executing this step.
789
+
790
+ Use `@resources` to specify the resource requirements
791
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
792
+
793
+ You can choose the compute layer on the command line by executing e.g.
794
+ ```
795
+ python myflow.py run --with batch
796
+ ```
797
+ or
798
+ ```
799
+ python myflow.py run --with kubernetes
800
+ ```
801
+ which executes the flow on the desired system using the
802
+ requirements specified in `@resources`.
837
803
 
838
804
  Parameters
839
805
  ----------
840
806
  cpu : int, default 1
841
- Number of CPUs required for this step. If `@resources` is
842
- also present, the maximum value from all decorators is used.
807
+ Number of CPUs required for this step.
808
+ gpu : int, default 0
809
+ Number of GPUs required for this step.
810
+ disk : int, optional, default None
811
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
843
812
  memory : int, default 4096
844
- Memory size (in MB) required for this step. If
845
- `@resources` is also present, the maximum value from all decorators is
846
- used.
847
- disk : int, default 10240
848
- Disk size (in MB) required for this step. If
849
- `@resources` is also present, the maximum value from all decorators is
850
- used.
851
- image : str, optional, default None
852
- Docker image to use when launching on Kubernetes. If not specified, and
853
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
854
- not, a default Docker image mapping to the current version of Python is used.
855
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
856
- If given, the imagePullPolicy to be applied to the Docker image of the step.
857
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
858
- Kubernetes service account to use when launching pod in Kubernetes.
859
- secrets : List[str], optional, default None
860
- Kubernetes secrets to use when launching pod in Kubernetes. These
861
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
862
- in Metaflow configuration.
863
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
864
- Kubernetes namespace to use when launching pod in Kubernetes.
865
- gpu : int, optional, default None
866
- Number of GPUs required for this step. A value of zero implies that
867
- the scheduled node should not have GPUs.
868
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
869
- The vendor of the GPUs to be used for this step.
870
- tolerations : List[str], default []
871
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
872
- Kubernetes tolerations to use when launching pod in Kubernetes.
873
- use_tmpfs : bool, default False
874
- This enables an explicit tmpfs mount for this step.
875
- tmpfs_tempdir : bool, default True
876
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
877
- tmpfs_size : int, optional, default: None
878
- The value for the size (in MiB) of the tmpfs mount for this step.
879
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
880
- memory allocated for this step.
881
- tmpfs_path : str, optional, default /metaflow_temp
882
- Path to tmpfs mount for this step.
883
- persistent_volume_claims : Dict[str, str], optional, default None
884
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
885
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
886
- shared_memory: int, optional
887
- Shared memory size (in MiB) required for this step
888
- port: int, optional
889
- Port number to specify in the Kubernetes job object
813
+ Memory size (in MB) required for this step.
814
+ shared_memory : int, optional, default None
815
+ The value for the size (in MiB) of the /dev/shm volume for this step.
816
+ This parameter maps to the `--shm-size` option in Docker.
890
817
  """
891
818
  ...
892
819
 
893
820
  @typing.overload
894
- def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
821
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
822
+ ...
823
+
824
+ @typing.overload
825
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
826
+ ...
827
+
828
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
895
829
  """
896
- Creates a human-readable report, a Metaflow Card, after this step completes.
830
+ Specifies the resources needed when executing this step.
897
831
 
898
- Note that you may add multiple `@card` decorators in a step with different parameters.
832
+ Use `@resources` to specify the resource requirements
833
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
834
+
835
+ You can choose the compute layer on the command line by executing e.g.
836
+ ```
837
+ python myflow.py run --with batch
838
+ ```
839
+ or
840
+ ```
841
+ python myflow.py run --with kubernetes
842
+ ```
843
+ which executes the flow on the desired system using the
844
+ requirements specified in `@resources`.
899
845
 
900
846
  Parameters
901
847
  ----------
902
- type : str, default 'default'
903
- Card type.
904
- id : str, optional, default None
905
- If multiple cards are present, use this id to identify this card.
906
- options : Dict[str, Any], default {}
907
- Options passed to the card. The contents depend on the card type.
908
- timeout : int, default 45
909
- Interrupt reporting if it takes more than this many seconds.
848
+ cpu : int, default 1
849
+ Number of CPUs required for this step.
850
+ gpu : int, default 0
851
+ Number of GPUs required for this step.
852
+ disk : int, optional, default None
853
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
854
+ memory : int, default 4096
855
+ Memory size (in MB) required for this step.
856
+ shared_memory : int, optional, default None
857
+ The value for the size (in MiB) of the /dev/shm volume for this step.
858
+ This parameter maps to the `--shm-size` option in Docker.
859
+ """
860
+ ...
861
+
862
+ @typing.overload
863
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
864
+ """
865
+ Specifies the number of times the task corresponding
866
+ to a step needs to be retried.
867
+
868
+ This decorator is useful for handling transient errors, such as networking issues.
869
+ If your task contains operations that can't be retried safely, e.g. database updates,
870
+ it is advisable to annotate it with `@retry(times=0)`.
910
871
 
872
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
873
+ decorator will execute a no-op task after all retries have been exhausted,
874
+ ensuring that the flow execution can continue.
911
875
 
876
+ Parameters
877
+ ----------
878
+ times : int, default 3
879
+ Number of times to retry this task.
880
+ minutes_between_retries : int, default 2
881
+ Number of minutes between retries.
912
882
  """
913
883
  ...
914
884
 
915
885
  @typing.overload
916
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
886
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
917
887
  ...
918
888
 
919
889
  @typing.overload
920
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
890
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
921
891
  ...
922
892
 
923
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
893
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
924
894
  """
925
- Creates a human-readable report, a Metaflow Card, after this step completes.
895
+ Specifies the number of times the task corresponding
896
+ to a step needs to be retried.
926
897
 
927
- Note that you may add multiple `@card` decorators in a step with different parameters.
898
+ This decorator is useful for handling transient errors, such as networking issues.
899
+ If your task contains operations that can't be retried safely, e.g. database updates,
900
+ it is advisable to annotate it with `@retry(times=0)`.
901
+
902
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
903
+ decorator will execute a no-op task after all retries have been exhausted,
904
+ ensuring that the flow execution can continue.
928
905
 
929
906
  Parameters
930
907
  ----------
931
- type : str, default 'default'
932
- Card type.
933
- id : str, optional, default None
934
- If multiple cards are present, use this id to identify this card.
935
- options : Dict[str, Any], default {}
936
- Options passed to the card. The contents depend on the card type.
937
- timeout : int, default 45
938
- Interrupt reporting if it takes more than this many seconds.
939
-
940
-
908
+ times : int, default 3
909
+ Number of times to retry this task.
910
+ minutes_between_retries : int, default 2
911
+ Number of minutes between retries.
941
912
  """
942
913
  ...
943
914
 
944
915
  @typing.overload
945
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
916
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
946
917
  """
947
- Specifies secrets to be retrieved and injected as environment variables prior to
948
- the execution of a step.
918
+ Specifies environment variables to be set prior to the execution of a step.
949
919
 
950
920
  Parameters
951
921
  ----------
952
- sources : List[Union[str, Dict[str, Any]]], default: []
953
- List of secret specs, defining how the secrets are to be retrieved
922
+ vars : Dict[str, str], default {}
923
+ Dictionary of environment variables to set.
954
924
  """
955
925
  ...
956
926
 
957
927
  @typing.overload
958
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
928
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
959
929
  ...
960
930
 
961
931
  @typing.overload
962
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
932
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
963
933
  ...
964
934
 
965
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
935
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
966
936
  """
967
- Specifies secrets to be retrieved and injected as environment variables prior to
968
- the execution of a step.
937
+ Specifies environment variables to be set prior to the execution of a step.
969
938
 
970
939
  Parameters
971
940
  ----------
972
- sources : List[Union[str, Dict[str, Any]]], default: []
973
- List of secret specs, defining how the secrets are to be retrieved
941
+ vars : Dict[str, str], default {}
942
+ Dictionary of environment variables to set.
974
943
  """
975
944
  ...
976
945
 
977
946
  @typing.overload
978
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
947
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
979
948
  """
980
- Specifies the PyPI packages for the step.
949
+ Specifies that the step will success under all circumstances.
981
950
 
982
- Information in this decorator will augment any
983
- attributes set in the `@pyi_base` flow-level decorator. Hence,
984
- you can use `@pypi_base` to set packages required by all
985
- steps and use `@pypi` to specify step-specific overrides.
951
+ The decorator will create an optional artifact, specified by `var`, which
952
+ contains the exception raised. You can use it to detect the presence
953
+ of errors, indicating that all happy-path artifacts produced by the step
954
+ are missing.
986
955
 
987
956
  Parameters
988
957
  ----------
989
- packages : Dict[str, str], default: {}
990
- Packages to use for this step. The key is the name of the package
991
- and the value is the version to use.
992
- python : str, optional, default: None
993
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
994
- that the version used will correspond to the version of the Python interpreter used to start the run.
958
+ var : str, optional, default None
959
+ Name of the artifact in which to store the caught exception.
960
+ If not specified, the exception is not stored.
961
+ print_exception : bool, default True
962
+ Determines whether or not the exception is printed to
963
+ stdout when caught.
995
964
  """
996
965
  ...
997
966
 
998
967
  @typing.overload
999
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
968
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1000
969
  ...
1001
970
 
1002
971
  @typing.overload
1003
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1004
- ...
1005
-
1006
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1007
- """
1008
- Specifies the PyPI packages for the step.
1009
-
1010
- Information in this decorator will augment any
1011
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1012
- you can use `@pypi_base` to set packages required by all
1013
- steps and use `@pypi` to specify step-specific overrides.
1014
-
1015
- Parameters
1016
- ----------
1017
- packages : Dict[str, str], default: {}
1018
- Packages to use for this step. The key is the name of the package
1019
- and the value is the version to use.
1020
- python : str, optional, default: None
1021
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1022
- that the version used will correspond to the version of the Python interpreter used to start the run.
1023
- """
972
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1024
973
  ...
1025
974
 
1026
- @typing.overload
1027
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
975
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1028
976
  """
1029
- Specifies the number of times the task corresponding
1030
- to a step needs to be retried.
1031
-
1032
- This decorator is useful for handling transient errors, such as networking issues.
1033
- If your task contains operations that can't be retried safely, e.g. database updates,
1034
- it is advisable to annotate it with `@retry(times=0)`.
977
+ Specifies that the step will success under all circumstances.
1035
978
 
1036
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1037
- decorator will execute a no-op task after all retries have been exhausted,
1038
- ensuring that the flow execution can continue.
979
+ The decorator will create an optional artifact, specified by `var`, which
980
+ contains the exception raised. You can use it to detect the presence
981
+ of errors, indicating that all happy-path artifacts produced by the step
982
+ are missing.
1039
983
 
1040
984
  Parameters
1041
985
  ----------
1042
- times : int, default 3
1043
- Number of times to retry this task.
1044
- minutes_between_retries : int, default 2
1045
- Number of minutes between retries.
986
+ var : str, optional, default None
987
+ Name of the artifact in which to store the caught exception.
988
+ If not specified, the exception is not stored.
989
+ print_exception : bool, default True
990
+ Determines whether or not the exception is printed to
991
+ stdout when caught.
1046
992
  """
1047
993
  ...
1048
994
 
1049
- @typing.overload
1050
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1051
- ...
1052
-
1053
- @typing.overload
1054
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1055
- ...
1056
-
1057
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
995
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1058
996
  """
1059
- Specifies the number of times the task corresponding
1060
- to a step needs to be retried.
1061
-
1062
- This decorator is useful for handling transient errors, such as networking issues.
1063
- If your task contains operations that can't be retried safely, e.g. database updates,
1064
- it is advisable to annotate it with `@retry(times=0)`.
1065
-
1066
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1067
- decorator will execute a no-op task after all retries have been exhausted,
1068
- ensuring that the flow execution can continue.
997
+ Specifies that this step should execute on Kubernetes.
1069
998
 
1070
999
  Parameters
1071
1000
  ----------
1072
- times : int, default 3
1073
- Number of times to retry this task.
1074
- minutes_between_retries : int, default 2
1075
- Number of minutes between retries.
1001
+ cpu : int, default 1
1002
+ Number of CPUs required for this step. If `@resources` is
1003
+ also present, the maximum value from all decorators is used.
1004
+ memory : int, default 4096
1005
+ Memory size (in MB) required for this step. If
1006
+ `@resources` is also present, the maximum value from all decorators is
1007
+ used.
1008
+ disk : int, default 10240
1009
+ Disk size (in MB) required for this step. If
1010
+ `@resources` is also present, the maximum value from all decorators is
1011
+ used.
1012
+ image : str, optional, default None
1013
+ Docker image to use when launching on Kubernetes. If not specified, and
1014
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1015
+ not, a default Docker image mapping to the current version of Python is used.
1016
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1017
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
1018
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1019
+ Kubernetes service account to use when launching pod in Kubernetes.
1020
+ secrets : List[str], optional, default None
1021
+ Kubernetes secrets to use when launching pod in Kubernetes. These
1022
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1023
+ in Metaflow configuration.
1024
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1025
+ Kubernetes namespace to use when launching pod in Kubernetes.
1026
+ gpu : int, optional, default None
1027
+ Number of GPUs required for this step. A value of zero implies that
1028
+ the scheduled node should not have GPUs.
1029
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1030
+ The vendor of the GPUs to be used for this step.
1031
+ tolerations : List[str], default []
1032
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1033
+ Kubernetes tolerations to use when launching pod in Kubernetes.
1034
+ use_tmpfs : bool, default False
1035
+ This enables an explicit tmpfs mount for this step.
1036
+ tmpfs_tempdir : bool, default True
1037
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1038
+ tmpfs_size : int, optional, default: None
1039
+ The value for the size (in MiB) of the tmpfs mount for this step.
1040
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1041
+ memory allocated for this step.
1042
+ tmpfs_path : str, optional, default /metaflow_temp
1043
+ Path to tmpfs mount for this step.
1044
+ persistent_volume_claims : Dict[str, str], optional, default None
1045
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1046
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1047
+ shared_memory: int, optional
1048
+ Shared memory size (in MiB) required for this step
1049
+ port: int, optional
1050
+ Port number to specify in the Kubernetes job object
1076
1051
  """
1077
1052
  ...
1078
1053
 
1079
1054
  @typing.overload
1080
- def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1055
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1081
1056
  """
1082
- Specifies the resources needed when executing this step.
1083
-
1084
- Use `@resources` to specify the resource requirements
1085
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1057
+ Specifies the PyPI packages for the step.
1086
1058
 
1087
- You can choose the compute layer on the command line by executing e.g.
1088
- ```
1089
- python myflow.py run --with batch
1090
- ```
1091
- or
1092
- ```
1093
- python myflow.py run --with kubernetes
1094
- ```
1095
- which executes the flow on the desired system using the
1096
- requirements specified in `@resources`.
1059
+ Information in this decorator will augment any
1060
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1061
+ you can use `@pypi_base` to set packages required by all
1062
+ steps and use `@pypi` to specify step-specific overrides.
1097
1063
 
1098
1064
  Parameters
1099
1065
  ----------
1100
- cpu : int, default 1
1101
- Number of CPUs required for this step.
1102
- gpu : int, default 0
1103
- Number of GPUs required for this step.
1104
- disk : int, optional, default None
1105
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1106
- memory : int, default 4096
1107
- Memory size (in MB) required for this step.
1108
- shared_memory : int, optional, default None
1109
- The value for the size (in MiB) of the /dev/shm volume for this step.
1110
- This parameter maps to the `--shm-size` option in Docker.
1066
+ packages : Dict[str, str], default: {}
1067
+ Packages to use for this step. The key is the name of the package
1068
+ and the value is the version to use.
1069
+ python : str, optional, default: None
1070
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1071
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1111
1072
  """
1112
1073
  ...
1113
1074
 
1114
1075
  @typing.overload
1115
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1076
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1116
1077
  ...
1117
1078
 
1118
1079
  @typing.overload
1119
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1080
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1120
1081
  ...
1121
1082
 
1122
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1083
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1123
1084
  """
1124
- Specifies the resources needed when executing this step.
1125
-
1126
- Use `@resources` to specify the resource requirements
1127
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1085
+ Specifies the PyPI packages for the step.
1128
1086
 
1129
- You can choose the compute layer on the command line by executing e.g.
1130
- ```
1131
- python myflow.py run --with batch
1132
- ```
1133
- or
1134
- ```
1135
- python myflow.py run --with kubernetes
1136
- ```
1137
- which executes the flow on the desired system using the
1138
- requirements specified in `@resources`.
1087
+ Information in this decorator will augment any
1088
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1089
+ you can use `@pypi_base` to set packages required by all
1090
+ steps and use `@pypi` to specify step-specific overrides.
1139
1091
 
1140
1092
  Parameters
1141
1093
  ----------
1142
- cpu : int, default 1
1143
- Number of CPUs required for this step.
1144
- gpu : int, default 0
1145
- Number of GPUs required for this step.
1146
- disk : int, optional, default None
1147
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1148
- memory : int, default 4096
1149
- Memory size (in MB) required for this step.
1150
- shared_memory : int, optional, default None
1151
- The value for the size (in MiB) of the /dev/shm volume for this step.
1152
- This parameter maps to the `--shm-size` option in Docker.
1094
+ packages : Dict[str, str], default: {}
1095
+ Packages to use for this step. The key is the name of the package
1096
+ and the value is the version to use.
1097
+ python : str, optional, default: None
1098
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1099
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1153
1100
  """
1154
1101
  ...
1155
1102
 
@@ -1211,33 +1158,86 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1211
1158
  ...
1212
1159
 
1213
1160
  @typing.overload
1214
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1161
+ def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1215
1162
  """
1216
- Specifies environment variables to be set prior to the execution of a step.
1163
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1164
+
1165
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1217
1166
 
1218
1167
  Parameters
1219
1168
  ----------
1220
- vars : Dict[str, str], default {}
1221
- Dictionary of environment variables to set.
1169
+ type : str, default 'default'
1170
+ Card type.
1171
+ id : str, optional, default None
1172
+ If multiple cards are present, use this id to identify this card.
1173
+ options : Dict[str, Any], default {}
1174
+ Options passed to the card. The contents depend on the card type.
1175
+ timeout : int, default 45
1176
+ Interrupt reporting if it takes more than this many seconds.
1177
+
1178
+
1222
1179
  """
1223
1180
  ...
1224
1181
 
1225
1182
  @typing.overload
1226
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1183
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1227
1184
  ...
1228
1185
 
1229
1186
  @typing.overload
1230
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1187
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1231
1188
  ...
1232
1189
 
1233
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1190
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1234
1191
  """
1235
- Specifies environment variables to be set prior to the execution of a step.
1192
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1193
+
1194
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1236
1195
 
1237
1196
  Parameters
1238
1197
  ----------
1239
- vars : Dict[str, str], default {}
1240
- Dictionary of environment variables to set.
1198
+ type : str, default 'default'
1199
+ Card type.
1200
+ id : str, optional, default None
1201
+ If multiple cards are present, use this id to identify this card.
1202
+ options : Dict[str, Any], default {}
1203
+ Options passed to the card. The contents depend on the card type.
1204
+ timeout : int, default 45
1205
+ Interrupt reporting if it takes more than this many seconds.
1206
+
1207
+
1208
+ """
1209
+ ...
1210
+
1211
+ @typing.overload
1212
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1213
+ """
1214
+ Specifies secrets to be retrieved and injected as environment variables prior to
1215
+ the execution of a step.
1216
+
1217
+ Parameters
1218
+ ----------
1219
+ sources : List[Union[str, Dict[str, Any]]], default: []
1220
+ List of secret specs, defining how the secrets are to be retrieved
1221
+ """
1222
+ ...
1223
+
1224
+ @typing.overload
1225
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1226
+ ...
1227
+
1228
+ @typing.overload
1229
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1230
+ ...
1231
+
1232
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1233
+ """
1234
+ Specifies secrets to be retrieved and injected as environment variables prior to
1235
+ the execution of a step.
1236
+
1237
+ Parameters
1238
+ ----------
1239
+ sources : List[Union[str, Dict[str, Any]]], default: []
1240
+ List of secret specs, defining how the secrets are to be retrieved
1241
1241
  """
1242
1242
  ...
1243
1243
 
@@ -1388,146 +1388,6 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1388
1388
  """
1389
1389
  ...
1390
1390
 
1391
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1392
- """
1393
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1394
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1395
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1396
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1397
- starts only after all sensors finish.
1398
-
1399
- Parameters
1400
- ----------
1401
- timeout : int
1402
- Time, in seconds before the task times out and fails. (Default: 3600)
1403
- poke_interval : int
1404
- Time in seconds that the job should wait in between each try. (Default: 60)
1405
- mode : str
1406
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1407
- exponential_backoff : bool
1408
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1409
- pool : str
1410
- the slot pool this task should run in,
1411
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1412
- soft_fail : bool
1413
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1414
- name : str
1415
- Name of the sensor on Airflow
1416
- description : str
1417
- Description of sensor in the Airflow UI
1418
- bucket_key : Union[str, List[str]]
1419
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1420
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1421
- bucket_name : str
1422
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1423
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1424
- wildcard_match : bool
1425
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1426
- aws_conn_id : str
1427
- a reference to the s3 connection on Airflow. (Default: None)
1428
- verify : bool
1429
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1430
- """
1431
- ...
1432
-
1433
- @typing.overload
1434
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1435
- """
1436
- Specifies the Conda environment for all steps of the flow.
1437
-
1438
- Use `@conda_base` to set common libraries required by all
1439
- steps and use `@conda` to specify step-specific additions.
1440
-
1441
- Parameters
1442
- ----------
1443
- packages : Dict[str, str], default {}
1444
- Packages to use for this flow. The key is the name of the package
1445
- and the value is the version to use.
1446
- libraries : Dict[str, str], default {}
1447
- Supported for backward compatibility. When used with packages, packages will take precedence.
1448
- python : str, optional, default None
1449
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1450
- that the version used will correspond to the version of the Python interpreter used to start the run.
1451
- disabled : bool, default False
1452
- If set to True, disables Conda.
1453
- """
1454
- ...
1455
-
1456
- @typing.overload
1457
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1458
- ...
1459
-
1460
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1461
- """
1462
- Specifies the Conda environment for all steps of the flow.
1463
-
1464
- Use `@conda_base` to set common libraries required by all
1465
- steps and use `@conda` to specify step-specific additions.
1466
-
1467
- Parameters
1468
- ----------
1469
- packages : Dict[str, str], default {}
1470
- Packages to use for this flow. The key is the name of the package
1471
- and the value is the version to use.
1472
- libraries : Dict[str, str], default {}
1473
- Supported for backward compatibility. When used with packages, packages will take precedence.
1474
- python : str, optional, default None
1475
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1476
- that the version used will correspond to the version of the Python interpreter used to start the run.
1477
- disabled : bool, default False
1478
- If set to True, disables Conda.
1479
- """
1480
- ...
1481
-
1482
- @typing.overload
1483
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1484
- """
1485
- Specifies the times when the flow should be run when running on a
1486
- production scheduler.
1487
-
1488
- Parameters
1489
- ----------
1490
- hourly : bool, default False
1491
- Run the workflow hourly.
1492
- daily : bool, default True
1493
- Run the workflow daily.
1494
- weekly : bool, default False
1495
- Run the workflow weekly.
1496
- cron : str, optional, default None
1497
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1498
- specified by this expression.
1499
- timezone : str, optional, default None
1500
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1501
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1502
- """
1503
- ...
1504
-
1505
- @typing.overload
1506
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1507
- ...
1508
-
1509
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1510
- """
1511
- Specifies the times when the flow should be run when running on a
1512
- production scheduler.
1513
-
1514
- Parameters
1515
- ----------
1516
- hourly : bool, default False
1517
- Run the workflow hourly.
1518
- daily : bool, default True
1519
- Run the workflow daily.
1520
- weekly : bool, default False
1521
- Run the workflow weekly.
1522
- cron : str, optional, default None
1523
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1524
- specified by this expression.
1525
- timezone : str, optional, default None
1526
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1527
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1528
- """
1529
- ...
1530
-
1531
1391
  @typing.overload
1532
1392
  def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1533
1393
  """
@@ -1620,14 +1480,63 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1620
1480
 
1621
1481
  Parameters
1622
1482
  ----------
1623
- flow : Union[str, Dict[str, str]], optional, default None
1624
- Upstream flow dependency for this flow.
1625
- flows : List[Union[str, Dict[str, str]]], default []
1626
- Upstream flow dependencies for this flow.
1627
- options : Dict[str, Any], default {}
1628
- Backend-specific configuration for tuning eventing behavior.
1629
-
1630
-
1483
+ flow : Union[str, Dict[str, str]], optional, default None
1484
+ Upstream flow dependency for this flow.
1485
+ flows : List[Union[str, Dict[str, str]]], default []
1486
+ Upstream flow dependencies for this flow.
1487
+ options : Dict[str, Any], default {}
1488
+ Backend-specific configuration for tuning eventing behavior.
1489
+
1490
+
1491
+ """
1492
+ ...
1493
+
1494
+ @typing.overload
1495
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1496
+ """
1497
+ Specifies the times when the flow should be run when running on a
1498
+ production scheduler.
1499
+
1500
+ Parameters
1501
+ ----------
1502
+ hourly : bool, default False
1503
+ Run the workflow hourly.
1504
+ daily : bool, default True
1505
+ Run the workflow daily.
1506
+ weekly : bool, default False
1507
+ Run the workflow weekly.
1508
+ cron : str, optional, default None
1509
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1510
+ specified by this expression.
1511
+ timezone : str, optional, default None
1512
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1513
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1514
+ """
1515
+ ...
1516
+
1517
+ @typing.overload
1518
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1519
+ ...
1520
+
1521
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1522
+ """
1523
+ Specifies the times when the flow should be run when running on a
1524
+ production scheduler.
1525
+
1526
+ Parameters
1527
+ ----------
1528
+ hourly : bool, default False
1529
+ Run the workflow hourly.
1530
+ daily : bool, default True
1531
+ Run the workflow daily.
1532
+ weekly : bool, default False
1533
+ Run the workflow weekly.
1534
+ cron : str, optional, default None
1535
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1536
+ specified by this expression.
1537
+ timezone : str, optional, default None
1538
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1539
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1631
1540
  """
1632
1541
  ...
1633
1542
 
@@ -1670,66 +1579,6 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
1670
1579
  """
1671
1580
  ...
1672
1581
 
1673
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1674
- """
1675
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1676
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1677
-
1678
- Parameters
1679
- ----------
1680
- timeout : int
1681
- Time, in seconds before the task times out and fails. (Default: 3600)
1682
- poke_interval : int
1683
- Time in seconds that the job should wait in between each try. (Default: 60)
1684
- mode : str
1685
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1686
- exponential_backoff : bool
1687
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1688
- pool : str
1689
- the slot pool this task should run in,
1690
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1691
- soft_fail : bool
1692
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1693
- name : str
1694
- Name of the sensor on Airflow
1695
- description : str
1696
- Description of sensor in the Airflow UI
1697
- external_dag_id : str
1698
- The dag_id that contains the task you want to wait for.
1699
- external_task_ids : List[str]
1700
- The list of task_ids that you want to wait for.
1701
- If None (default value) the sensor waits for the DAG. (Default: None)
1702
- allowed_states : List[str]
1703
- Iterable of allowed states, (Default: ['success'])
1704
- failed_states : List[str]
1705
- Iterable of failed or dis-allowed states. (Default: None)
1706
- execution_delta : datetime.timedelta
1707
- time difference with the previous execution to look at,
1708
- the default is the same logical date as the current task or DAG. (Default: None)
1709
- check_existence: bool
1710
- Set to True to check if the external task exists or check if
1711
- the DAG to wait for exists. (Default: True)
1712
- """
1713
- ...
1714
-
1715
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1716
- """
1717
- Specifies what flows belong to the same project.
1718
-
1719
- A project-specific namespace is created for all flows that
1720
- use the same `@project(name)`.
1721
-
1722
- Parameters
1723
- ----------
1724
- name : str
1725
- Project name. Make sure that the name is unique amongst all
1726
- projects that use the same production scheduler. The name may
1727
- contain only lowercase alphanumeric characters and underscores.
1728
-
1729
-
1730
- """
1731
- ...
1732
-
1733
1582
  @typing.overload
1734
1583
  def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1735
1584
  """
@@ -1825,6 +1674,157 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1825
1674
  """
1826
1675
  ...
1827
1676
 
1677
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1678
+ """
1679
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1680
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1681
+
1682
+ Parameters
1683
+ ----------
1684
+ timeout : int
1685
+ Time, in seconds before the task times out and fails. (Default: 3600)
1686
+ poke_interval : int
1687
+ Time in seconds that the job should wait in between each try. (Default: 60)
1688
+ mode : str
1689
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1690
+ exponential_backoff : bool
1691
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1692
+ pool : str
1693
+ the slot pool this task should run in,
1694
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1695
+ soft_fail : bool
1696
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1697
+ name : str
1698
+ Name of the sensor on Airflow
1699
+ description : str
1700
+ Description of sensor in the Airflow UI
1701
+ external_dag_id : str
1702
+ The dag_id that contains the task you want to wait for.
1703
+ external_task_ids : List[str]
1704
+ The list of task_ids that you want to wait for.
1705
+ If None (default value) the sensor waits for the DAG. (Default: None)
1706
+ allowed_states : List[str]
1707
+ Iterable of allowed states, (Default: ['success'])
1708
+ failed_states : List[str]
1709
+ Iterable of failed or dis-allowed states. (Default: None)
1710
+ execution_delta : datetime.timedelta
1711
+ time difference with the previous execution to look at,
1712
+ the default is the same logical date as the current task or DAG. (Default: None)
1713
+ check_existence: bool
1714
+ Set to True to check if the external task exists or check if
1715
+ the DAG to wait for exists. (Default: True)
1716
+ """
1717
+ ...
1718
+
1719
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1720
+ """
1721
+ Specifies what flows belong to the same project.
1722
+
1723
+ A project-specific namespace is created for all flows that
1724
+ use the same `@project(name)`.
1725
+
1726
+ Parameters
1727
+ ----------
1728
+ name : str
1729
+ Project name. Make sure that the name is unique amongst all
1730
+ projects that use the same production scheduler. The name may
1731
+ contain only lowercase alphanumeric characters and underscores.
1732
+
1733
+
1734
+ """
1735
+ ...
1736
+
1737
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1738
+ """
1739
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1740
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1741
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1742
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1743
+ starts only after all sensors finish.
1744
+
1745
+ Parameters
1746
+ ----------
1747
+ timeout : int
1748
+ Time, in seconds before the task times out and fails. (Default: 3600)
1749
+ poke_interval : int
1750
+ Time in seconds that the job should wait in between each try. (Default: 60)
1751
+ mode : str
1752
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1753
+ exponential_backoff : bool
1754
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1755
+ pool : str
1756
+ the slot pool this task should run in,
1757
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1758
+ soft_fail : bool
1759
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1760
+ name : str
1761
+ Name of the sensor on Airflow
1762
+ description : str
1763
+ Description of sensor in the Airflow UI
1764
+ bucket_key : Union[str, List[str]]
1765
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1766
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1767
+ bucket_name : str
1768
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1769
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1770
+ wildcard_match : bool
1771
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1772
+ aws_conn_id : str
1773
+ a reference to the s3 connection on Airflow. (Default: None)
1774
+ verify : bool
1775
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1776
+ """
1777
+ ...
1778
+
1779
+ @typing.overload
1780
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1781
+ """
1782
+ Specifies the Conda environment for all steps of the flow.
1783
+
1784
+ Use `@conda_base` to set common libraries required by all
1785
+ steps and use `@conda` to specify step-specific additions.
1786
+
1787
+ Parameters
1788
+ ----------
1789
+ packages : Dict[str, str], default {}
1790
+ Packages to use for this flow. The key is the name of the package
1791
+ and the value is the version to use.
1792
+ libraries : Dict[str, str], default {}
1793
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1794
+ python : str, optional, default None
1795
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1796
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1797
+ disabled : bool, default False
1798
+ If set to True, disables Conda.
1799
+ """
1800
+ ...
1801
+
1802
+ @typing.overload
1803
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1804
+ ...
1805
+
1806
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1807
+ """
1808
+ Specifies the Conda environment for all steps of the flow.
1809
+
1810
+ Use `@conda_base` to set common libraries required by all
1811
+ steps and use `@conda` to specify step-specific additions.
1812
+
1813
+ Parameters
1814
+ ----------
1815
+ packages : Dict[str, str], default {}
1816
+ Packages to use for this flow. The key is the name of the package
1817
+ and the value is the version to use.
1818
+ libraries : Dict[str, str], default {}
1819
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1820
+ python : str, optional, default None
1821
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1822
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1823
+ disabled : bool, default False
1824
+ If set to True, disables Conda.
1825
+ """
1826
+ ...
1827
+
1828
1828
  def namespace(ns: typing.Optional[str]) -> typing.Optional[str]:
1829
1829
  """
1830
1830
  Switch namespace to the one provided.
@@ -2775,3 +2775,6 @@ class DataArtifact(metaflow.client.core.MetaflowObject, metaclass=type):
2775
2775
  ...
2776
2776
  ...
2777
2777
 
2778
+ def get_aws_client(module, with_error = False, role_arn = None, session_vars = None, client_params = None):
2779
+ ...
2780
+