ob-metaflow-stubs 3.3__py2.py3-none-any.whl → 3.4__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (136) hide show
  1. metaflow-stubs/__init__.pyi +610 -610
  2. metaflow-stubs/cards.pyi +5 -5
  3. metaflow-stubs/cli.pyi +3 -3
  4. metaflow-stubs/client/__init__.pyi +3 -3
  5. metaflow-stubs/client/core.pyi +7 -7
  6. metaflow-stubs/client/filecache.pyi +2 -2
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +3 -3
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +4 -4
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +4 -4
  13. metaflow-stubs/metadata/metadata.pyi +3 -3
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +4 -2
  16. metaflow-stubs/metaflow_current.pyi +4 -4
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +4 -4
  20. metaflow-stubs/plugins/__init__.pyi +2 -2
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow.pyi +5 -3
  23. metaflow-stubs/plugins/airflow/airflow_cli.pyi +4 -4
  24. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +3 -3
  29. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +4 -4
  30. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +4 -4
  31. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_workflows.pyi +6 -4
  35. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +4 -4
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +5 -5
  37. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  38. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  39. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  40. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +6 -2
  43. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +4 -4
  45. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  47. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +4 -4
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  56. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +58 -0
  60. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  61. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  62. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  63. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  64. metaflow-stubs/plugins/cards/card_cli.pyi +4 -4
  65. metaflow-stubs/plugins/cards/card_client.pyi +4 -4
  66. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  69. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  78. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  81. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  82. metaflow-stubs/plugins/cards/component_serializer.pyi +3 -3
  83. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  84. metaflow-stubs/plugins/catch_decorator.pyi +4 -4
  85. metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
  86. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  87. metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
  88. metaflow-stubs/plugins/datatools/s3/s3.pyi +4 -4
  89. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  91. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  92. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  93. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  94. metaflow-stubs/plugins/events_decorator.pyi +3 -3
  95. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  96. metaflow-stubs/plugins/frameworks/pytorch.pyi +4 -4
  97. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  98. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  99. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  100. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  101. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  103. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +5 -3
  105. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +3 -3
  106. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  107. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +4 -4
  108. metaflow-stubs/plugins/logs_cli.pyi +45 -0
  109. metaflow-stubs/plugins/package_cli.pyi +2 -2
  110. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  111. metaflow-stubs/plugins/perimeters.pyi +2 -2
  112. metaflow-stubs/plugins/project_decorator.pyi +3 -3
  113. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  114. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  115. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  116. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/pypi/pypi_environment.pyi +3 -3
  118. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  119. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  121. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  122. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  123. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  124. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  125. metaflow-stubs/plugins/tag_cli.pyi +5 -5
  126. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  127. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  128. metaflow-stubs/procpoll.pyi +2 -2
  129. metaflow-stubs/profilers/__init__.pyi +2 -2
  130. metaflow-stubs/pylint_wrapper.pyi +2 -2
  131. metaflow-stubs/tagging_util.pyi +2 -2
  132. {ob_metaflow_stubs-3.3.dist-info → ob_metaflow_stubs-3.4.dist-info}/METADATA +1 -1
  133. ob_metaflow_stubs-3.4.dist-info/RECORD +136 -0
  134. ob_metaflow_stubs-3.3.dist-info/RECORD +0 -134
  135. {ob_metaflow_stubs-3.3.dist-info → ob_metaflow_stubs-3.4.dist-info}/WHEEL +0 -0
  136. {ob_metaflow_stubs-3.3.dist-info → ob_metaflow_stubs-3.4.dist-info}/top_level.txt +0 -0
@@ -1,23 +1,23 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.11.14.1+ob(v1) #
4
- # Generated on 2024-05-13T17:42:14.215304 #
3
+ # MF version: 2.11.15.2+ob(v1) #
4
+ # Generated on 2024-05-17T19:44:44.623630 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
11
  import datetime
13
- import metaflow._vendor.click.types
12
+ import io
13
+ import metaflow.events
14
14
  import metaflow.parameters
15
+ import typing
16
+ import metaflow.metaflow_current
15
17
  import metaflow.client.core
18
+ import metaflow._vendor.click.types
16
19
  import metaflow.plugins.datatools.s3.s3
17
- import metaflow.metaflow_current
18
- import metaflow.events
19
20
  import metaflow.datastore.inputs
20
- import io
21
21
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
22
22
  StepFlag = typing.NewType("StepFlag", bool)
23
23
 
@@ -726,155 +726,118 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
726
726
  ...
727
727
 
728
728
  @typing.overload
729
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
730
- """
731
- Specifies that the step will success under all circumstances.
732
-
733
- The decorator will create an optional artifact, specified by `var`, which
734
- contains the exception raised. You can use it to detect the presence
735
- of errors, indicating that all happy-path artifacts produced by the step
736
- are missing.
737
-
738
- Parameters
739
- ----------
740
- var : str, optional, default None
741
- Name of the artifact in which to store the caught exception.
742
- If not specified, the exception is not stored.
743
- print_exception : bool, default True
744
- Determines whether or not the exception is printed to
745
- stdout when caught.
746
- """
747
- ...
748
-
749
- @typing.overload
750
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
751
- ...
752
-
753
- @typing.overload
754
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
755
- ...
756
-
757
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
758
- """
759
- Specifies that the step will success under all circumstances.
760
-
761
- The decorator will create an optional artifact, specified by `var`, which
762
- contains the exception raised. You can use it to detect the presence
763
- of errors, indicating that all happy-path artifacts produced by the step
764
- are missing.
765
-
766
- Parameters
767
- ----------
768
- var : str, optional, default None
769
- Name of the artifact in which to store the caught exception.
770
- If not specified, the exception is not stored.
771
- print_exception : bool, default True
772
- Determines whether or not the exception is printed to
773
- stdout when caught.
774
- """
775
- ...
776
-
777
- @typing.overload
778
- def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
729
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
779
730
  """
780
- Creates a human-readable report, a Metaflow Card, after this step completes.
731
+ Specifies the Conda environment for the step.
781
732
 
782
- Note that you may add multiple `@card` decorators in a step with different parameters.
733
+ Information in this decorator will augment any
734
+ attributes set in the `@conda_base` flow-level decorator. Hence,
735
+ you can use `@conda_base` to set packages required by all
736
+ steps and use `@conda` to specify step-specific overrides.
783
737
 
784
738
  Parameters
785
739
  ----------
786
- type : str, default 'default'
787
- Card type.
788
- id : str, optional, default None
789
- If multiple cards are present, use this id to identify this card.
790
- options : Dict[str, Any], default {}
791
- Options passed to the card. The contents depend on the card type.
792
- timeout : int, default 45
793
- Interrupt reporting if it takes more than this many seconds.
794
-
795
-
740
+ packages : Dict[str, str], default {}
741
+ Packages to use for this step. The key is the name of the package
742
+ and the value is the version to use.
743
+ libraries : Dict[str, str], default {}
744
+ Supported for backward compatibility. When used with packages, packages will take precedence.
745
+ python : str, optional, default None
746
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
747
+ that the version used will correspond to the version of the Python interpreter used to start the run.
748
+ disabled : bool, default False
749
+ If set to True, disables @conda.
796
750
  """
797
751
  ...
798
752
 
799
753
  @typing.overload
800
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
754
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
801
755
  ...
802
756
 
803
757
  @typing.overload
804
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
805
- ...
806
-
807
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
808
- """
809
- Creates a human-readable report, a Metaflow Card, after this step completes.
810
-
811
- Note that you may add multiple `@card` decorators in a step with different parameters.
812
-
813
- Parameters
814
- ----------
815
- type : str, default 'default'
816
- Card type.
817
- id : str, optional, default None
818
- If multiple cards are present, use this id to identify this card.
819
- options : Dict[str, Any], default {}
820
- Options passed to the card. The contents depend on the card type.
821
- timeout : int, default 45
822
- Interrupt reporting if it takes more than this many seconds.
823
-
824
-
825
- """
758
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
826
759
  ...
827
760
 
828
- @typing.overload
829
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
761
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
830
762
  """
831
- Specifies the number of times the task corresponding
832
- to a step needs to be retried.
833
-
834
- This decorator is useful for handling transient errors, such as networking issues.
835
- If your task contains operations that can't be retried safely, e.g. database updates,
836
- it is advisable to annotate it with `@retry(times=0)`.
763
+ Specifies the Conda environment for the step.
837
764
 
838
- This can be used in conjunction with the `@catch` decorator. The `@catch`
839
- decorator will execute a no-op task after all retries have been exhausted,
840
- ensuring that the flow execution can continue.
765
+ Information in this decorator will augment any
766
+ attributes set in the `@conda_base` flow-level decorator. Hence,
767
+ you can use `@conda_base` to set packages required by all
768
+ steps and use `@conda` to specify step-specific overrides.
841
769
 
842
770
  Parameters
843
771
  ----------
844
- times : int, default 3
845
- Number of times to retry this task.
846
- minutes_between_retries : int, default 2
847
- Number of minutes between retries.
772
+ packages : Dict[str, str], default {}
773
+ Packages to use for this step. The key is the name of the package
774
+ and the value is the version to use.
775
+ libraries : Dict[str, str], default {}
776
+ Supported for backward compatibility. When used with packages, packages will take precedence.
777
+ python : str, optional, default None
778
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
779
+ that the version used will correspond to the version of the Python interpreter used to start the run.
780
+ disabled : bool, default False
781
+ If set to True, disables @conda.
848
782
  """
849
783
  ...
850
784
 
851
- @typing.overload
852
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
853
- ...
854
-
855
- @typing.overload
856
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
857
- ...
858
-
859
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
785
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
860
786
  """
861
- Specifies the number of times the task corresponding
862
- to a step needs to be retried.
863
-
864
- This decorator is useful for handling transient errors, such as networking issues.
865
- If your task contains operations that can't be retried safely, e.g. database updates,
866
- it is advisable to annotate it with `@retry(times=0)`.
867
-
868
- This can be used in conjunction with the `@catch` decorator. The `@catch`
869
- decorator will execute a no-op task after all retries have been exhausted,
870
- ensuring that the flow execution can continue.
787
+ Specifies that this step should execute on Kubernetes.
871
788
 
872
789
  Parameters
873
790
  ----------
874
- times : int, default 3
875
- Number of times to retry this task.
876
- minutes_between_retries : int, default 2
877
- Number of minutes between retries.
791
+ cpu : int, default 1
792
+ Number of CPUs required for this step. If `@resources` is
793
+ also present, the maximum value from all decorators is used.
794
+ memory : int, default 4096
795
+ Memory size (in MB) required for this step. If
796
+ `@resources` is also present, the maximum value from all decorators is
797
+ used.
798
+ disk : int, default 10240
799
+ Disk size (in MB) required for this step. If
800
+ `@resources` is also present, the maximum value from all decorators is
801
+ used.
802
+ image : str, optional, default None
803
+ Docker image to use when launching on Kubernetes. If not specified, and
804
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
805
+ not, a default Docker image mapping to the current version of Python is used.
806
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
807
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
808
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
809
+ Kubernetes service account to use when launching pod in Kubernetes.
810
+ secrets : List[str], optional, default None
811
+ Kubernetes secrets to use when launching pod in Kubernetes. These
812
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
813
+ in Metaflow configuration.
814
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
815
+ Kubernetes namespace to use when launching pod in Kubernetes.
816
+ gpu : int, optional, default None
817
+ Number of GPUs required for this step. A value of zero implies that
818
+ the scheduled node should not have GPUs.
819
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
820
+ The vendor of the GPUs to be used for this step.
821
+ tolerations : List[str], default []
822
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
823
+ Kubernetes tolerations to use when launching pod in Kubernetes.
824
+ use_tmpfs : bool, default False
825
+ This enables an explicit tmpfs mount for this step.
826
+ tmpfs_tempdir : bool, default True
827
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
828
+ tmpfs_size : int, optional, default: None
829
+ The value for the size (in MiB) of the tmpfs mount for this step.
830
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
831
+ memory allocated for this step.
832
+ tmpfs_path : str, optional, default /metaflow_temp
833
+ Path to tmpfs mount for this step.
834
+ persistent_volume_claims : Dict[str, str], optional, default None
835
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
836
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
837
+ shared_memory: int, optional
838
+ Shared memory size (in MiB) required for this step
839
+ port: int, optional
840
+ Port number to specify in the Kubernetes job object
878
841
  """
879
842
  ...
880
843
 
@@ -956,295 +919,9 @@ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None]
956
919
  ...
957
920
 
958
921
  @typing.overload
959
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
960
- """
961
- Specifies the PyPI packages for the step.
962
-
963
- Information in this decorator will augment any
964
- attributes set in the `@pyi_base` flow-level decorator. Hence,
965
- you can use `@pypi_base` to set packages required by all
966
- steps and use `@pypi` to specify step-specific overrides.
967
-
968
- Parameters
969
- ----------
970
- packages : Dict[str, str], default: {}
971
- Packages to use for this step. The key is the name of the package
972
- and the value is the version to use.
973
- python : str, optional, default: None
974
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
975
- that the version used will correspond to the version of the Python interpreter used to start the run.
976
- """
977
- ...
978
-
979
- @typing.overload
980
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
981
- ...
982
-
983
- @typing.overload
984
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
985
- ...
986
-
987
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
922
+ def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
988
923
  """
989
- Specifies the PyPI packages for the step.
990
-
991
- Information in this decorator will augment any
992
- attributes set in the `@pyi_base` flow-level decorator. Hence,
993
- you can use `@pypi_base` to set packages required by all
994
- steps and use `@pypi` to specify step-specific overrides.
995
-
996
- Parameters
997
- ----------
998
- packages : Dict[str, str], default: {}
999
- Packages to use for this step. The key is the name of the package
1000
- and the value is the version to use.
1001
- python : str, optional, default: None
1002
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1003
- that the version used will correspond to the version of the Python interpreter used to start the run.
1004
- """
1005
- ...
1006
-
1007
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1008
- """
1009
- Specifies that this step should execute on Kubernetes.
1010
-
1011
- Parameters
1012
- ----------
1013
- cpu : int, default 1
1014
- Number of CPUs required for this step. If `@resources` is
1015
- also present, the maximum value from all decorators is used.
1016
- memory : int, default 4096
1017
- Memory size (in MB) required for this step. If
1018
- `@resources` is also present, the maximum value from all decorators is
1019
- used.
1020
- disk : int, default 10240
1021
- Disk size (in MB) required for this step. If
1022
- `@resources` is also present, the maximum value from all decorators is
1023
- used.
1024
- image : str, optional, default None
1025
- Docker image to use when launching on Kubernetes. If not specified, and
1026
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1027
- not, a default Docker image mapping to the current version of Python is used.
1028
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1029
- If given, the imagePullPolicy to be applied to the Docker image of the step.
1030
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1031
- Kubernetes service account to use when launching pod in Kubernetes.
1032
- secrets : List[str], optional, default None
1033
- Kubernetes secrets to use when launching pod in Kubernetes. These
1034
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1035
- in Metaflow configuration.
1036
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1037
- Kubernetes namespace to use when launching pod in Kubernetes.
1038
- gpu : int, optional, default None
1039
- Number of GPUs required for this step. A value of zero implies that
1040
- the scheduled node should not have GPUs.
1041
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1042
- The vendor of the GPUs to be used for this step.
1043
- tolerations : List[str], default []
1044
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1045
- Kubernetes tolerations to use when launching pod in Kubernetes.
1046
- use_tmpfs : bool, default False
1047
- This enables an explicit tmpfs mount for this step.
1048
- tmpfs_tempdir : bool, default True
1049
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1050
- tmpfs_size : int, optional, default: None
1051
- The value for the size (in MiB) of the tmpfs mount for this step.
1052
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1053
- memory allocated for this step.
1054
- tmpfs_path : str, optional, default /metaflow_temp
1055
- Path to tmpfs mount for this step.
1056
- persistent_volume_claims : Dict[str, str], optional, default None
1057
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1058
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1059
- shared_memory: int, optional
1060
- Shared memory size (in MiB) required for this step
1061
- port: int, optional
1062
- Port number to specify in the Kubernetes job object
1063
- """
1064
- ...
1065
-
1066
- @typing.overload
1067
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1068
- """
1069
- Specifies environment variables to be set prior to the execution of a step.
1070
-
1071
- Parameters
1072
- ----------
1073
- vars : Dict[str, str], default {}
1074
- Dictionary of environment variables to set.
1075
- """
1076
- ...
1077
-
1078
- @typing.overload
1079
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1080
- ...
1081
-
1082
- @typing.overload
1083
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1084
- ...
1085
-
1086
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1087
- """
1088
- Specifies environment variables to be set prior to the execution of a step.
1089
-
1090
- Parameters
1091
- ----------
1092
- vars : Dict[str, str], default {}
1093
- Dictionary of environment variables to set.
1094
- """
1095
- ...
1096
-
1097
- @typing.overload
1098
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1099
- """
1100
- Specifies secrets to be retrieved and injected as environment variables prior to
1101
- the execution of a step.
1102
-
1103
- Parameters
1104
- ----------
1105
- sources : List[Union[str, Dict[str, Any]]], default: []
1106
- List of secret specs, defining how the secrets are to be retrieved
1107
- """
1108
- ...
1109
-
1110
- @typing.overload
1111
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1112
- ...
1113
-
1114
- @typing.overload
1115
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1116
- ...
1117
-
1118
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1119
- """
1120
- Specifies secrets to be retrieved and injected as environment variables prior to
1121
- the execution of a step.
1122
-
1123
- Parameters
1124
- ----------
1125
- sources : List[Union[str, Dict[str, Any]]], default: []
1126
- List of secret specs, defining how the secrets are to be retrieved
1127
- """
1128
- ...
1129
-
1130
- @typing.overload
1131
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1132
- """
1133
- Specifies the Conda environment for the step.
1134
-
1135
- Information in this decorator will augment any
1136
- attributes set in the `@conda_base` flow-level decorator. Hence,
1137
- you can use `@conda_base` to set packages required by all
1138
- steps and use `@conda` to specify step-specific overrides.
1139
-
1140
- Parameters
1141
- ----------
1142
- packages : Dict[str, str], default {}
1143
- Packages to use for this step. The key is the name of the package
1144
- and the value is the version to use.
1145
- libraries : Dict[str, str], default {}
1146
- Supported for backward compatibility. When used with packages, packages will take precedence.
1147
- python : str, optional, default None
1148
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1149
- that the version used will correspond to the version of the Python interpreter used to start the run.
1150
- disabled : bool, default False
1151
- If set to True, disables @conda.
1152
- """
1153
- ...
1154
-
1155
- @typing.overload
1156
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1157
- ...
1158
-
1159
- @typing.overload
1160
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1161
- ...
1162
-
1163
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1164
- """
1165
- Specifies the Conda environment for the step.
1166
-
1167
- Information in this decorator will augment any
1168
- attributes set in the `@conda_base` flow-level decorator. Hence,
1169
- you can use `@conda_base` to set packages required by all
1170
- steps and use `@conda` to specify step-specific overrides.
1171
-
1172
- Parameters
1173
- ----------
1174
- packages : Dict[str, str], default {}
1175
- Packages to use for this step. The key is the name of the package
1176
- and the value is the version to use.
1177
- libraries : Dict[str, str], default {}
1178
- Supported for backward compatibility. When used with packages, packages will take precedence.
1179
- python : str, optional, default None
1180
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1181
- that the version used will correspond to the version of the Python interpreter used to start the run.
1182
- disabled : bool, default False
1183
- If set to True, disables @conda.
1184
- """
1185
- ...
1186
-
1187
- @typing.overload
1188
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1189
- """
1190
- Specifies a timeout for your step.
1191
-
1192
- This decorator is useful if this step may hang indefinitely.
1193
-
1194
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1195
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1196
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1197
-
1198
- Note that all the values specified in parameters are added together so if you specify
1199
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1200
-
1201
- Parameters
1202
- ----------
1203
- seconds : int, default 0
1204
- Number of seconds to wait prior to timing out.
1205
- minutes : int, default 0
1206
- Number of minutes to wait prior to timing out.
1207
- hours : int, default 0
1208
- Number of hours to wait prior to timing out.
1209
- """
1210
- ...
1211
-
1212
- @typing.overload
1213
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1214
- ...
1215
-
1216
- @typing.overload
1217
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1218
- ...
1219
-
1220
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1221
- """
1222
- Specifies a timeout for your step.
1223
-
1224
- This decorator is useful if this step may hang indefinitely.
1225
-
1226
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1227
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1228
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1229
-
1230
- Note that all the values specified in parameters are added together so if you specify
1231
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1232
-
1233
- Parameters
1234
- ----------
1235
- seconds : int, default 0
1236
- Number of seconds to wait prior to timing out.
1237
- minutes : int, default 0
1238
- Number of minutes to wait prior to timing out.
1239
- hours : int, default 0
1240
- Number of hours to wait prior to timing out.
1241
- """
1242
- ...
1243
-
1244
- @typing.overload
1245
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1246
- """
1247
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
924
+ Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1248
925
 
1249
926
  Parameters
1250
927
  ----------
@@ -1389,51 +1066,364 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1389
1066
  ...
1390
1067
 
1391
1068
  @typing.overload
1392
- def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1069
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1393
1070
  """
1394
- Specifies the Conda environment for all steps of the flow.
1071
+ Specifies that the step will success under all circumstances.
1395
1072
 
1396
- Use `@conda_base` to set common libraries required by all
1397
- steps and use `@conda` to specify step-specific additions.
1073
+ The decorator will create an optional artifact, specified by `var`, which
1074
+ contains the exception raised. You can use it to detect the presence
1075
+ of errors, indicating that all happy-path artifacts produced by the step
1076
+ are missing.
1398
1077
 
1399
1078
  Parameters
1400
1079
  ----------
1401
- packages : Dict[str, str], default {}
1402
- Packages to use for this flow. The key is the name of the package
1080
+ var : str, optional, default None
1081
+ Name of the artifact in which to store the caught exception.
1082
+ If not specified, the exception is not stored.
1083
+ print_exception : bool, default True
1084
+ Determines whether or not the exception is printed to
1085
+ stdout when caught.
1086
+ """
1087
+ ...
1088
+
1089
+ @typing.overload
1090
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1091
+ ...
1092
+
1093
+ @typing.overload
1094
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1095
+ ...
1096
+
1097
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1098
+ """
1099
+ Specifies that the step will success under all circumstances.
1100
+
1101
+ The decorator will create an optional artifact, specified by `var`, which
1102
+ contains the exception raised. You can use it to detect the presence
1103
+ of errors, indicating that all happy-path artifacts produced by the step
1104
+ are missing.
1105
+
1106
+ Parameters
1107
+ ----------
1108
+ var : str, optional, default None
1109
+ Name of the artifact in which to store the caught exception.
1110
+ If not specified, the exception is not stored.
1111
+ print_exception : bool, default True
1112
+ Determines whether or not the exception is printed to
1113
+ stdout when caught.
1114
+ """
1115
+ ...
1116
+
1117
+ @typing.overload
1118
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1119
+ """
1120
+ Specifies environment variables to be set prior to the execution of a step.
1121
+
1122
+ Parameters
1123
+ ----------
1124
+ vars : Dict[str, str], default {}
1125
+ Dictionary of environment variables to set.
1126
+ """
1127
+ ...
1128
+
1129
+ @typing.overload
1130
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1131
+ ...
1132
+
1133
+ @typing.overload
1134
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1135
+ ...
1136
+
1137
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1138
+ """
1139
+ Specifies environment variables to be set prior to the execution of a step.
1140
+
1141
+ Parameters
1142
+ ----------
1143
+ vars : Dict[str, str], default {}
1144
+ Dictionary of environment variables to set.
1145
+ """
1146
+ ...
1147
+
1148
+ @typing.overload
1149
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1150
+ """
1151
+ Specifies the number of times the task corresponding
1152
+ to a step needs to be retried.
1153
+
1154
+ This decorator is useful for handling transient errors, such as networking issues.
1155
+ If your task contains operations that can't be retried safely, e.g. database updates,
1156
+ it is advisable to annotate it with `@retry(times=0)`.
1157
+
1158
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1159
+ decorator will execute a no-op task after all retries have been exhausted,
1160
+ ensuring that the flow execution can continue.
1161
+
1162
+ Parameters
1163
+ ----------
1164
+ times : int, default 3
1165
+ Number of times to retry this task.
1166
+ minutes_between_retries : int, default 2
1167
+ Number of minutes between retries.
1168
+ """
1169
+ ...
1170
+
1171
+ @typing.overload
1172
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1173
+ ...
1174
+
1175
+ @typing.overload
1176
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1177
+ ...
1178
+
1179
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1180
+ """
1181
+ Specifies the number of times the task corresponding
1182
+ to a step needs to be retried.
1183
+
1184
+ This decorator is useful for handling transient errors, such as networking issues.
1185
+ If your task contains operations that can't be retried safely, e.g. database updates,
1186
+ it is advisable to annotate it with `@retry(times=0)`.
1187
+
1188
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1189
+ decorator will execute a no-op task after all retries have been exhausted,
1190
+ ensuring that the flow execution can continue.
1191
+
1192
+ Parameters
1193
+ ----------
1194
+ times : int, default 3
1195
+ Number of times to retry this task.
1196
+ minutes_between_retries : int, default 2
1197
+ Number of minutes between retries.
1198
+ """
1199
+ ...
1200
+
1201
+ @typing.overload
1202
+ def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1203
+ """
1204
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1205
+
1206
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1207
+
1208
+ Parameters
1209
+ ----------
1210
+ type : str, default 'default'
1211
+ Card type.
1212
+ id : str, optional, default None
1213
+ If multiple cards are present, use this id to identify this card.
1214
+ options : Dict[str, Any], default {}
1215
+ Options passed to the card. The contents depend on the card type.
1216
+ timeout : int, default 45
1217
+ Interrupt reporting if it takes more than this many seconds.
1218
+
1219
+
1220
+ """
1221
+ ...
1222
+
1223
+ @typing.overload
1224
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1225
+ ...
1226
+
1227
+ @typing.overload
1228
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1229
+ ...
1230
+
1231
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1232
+ """
1233
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1234
+
1235
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1236
+
1237
+ Parameters
1238
+ ----------
1239
+ type : str, default 'default'
1240
+ Card type.
1241
+ id : str, optional, default None
1242
+ If multiple cards are present, use this id to identify this card.
1243
+ options : Dict[str, Any], default {}
1244
+ Options passed to the card. The contents depend on the card type.
1245
+ timeout : int, default 45
1246
+ Interrupt reporting if it takes more than this many seconds.
1247
+
1248
+
1249
+ """
1250
+ ...
1251
+
1252
+ @typing.overload
1253
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1254
+ """
1255
+ Specifies a timeout for your step.
1256
+
1257
+ This decorator is useful if this step may hang indefinitely.
1258
+
1259
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1260
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1261
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1262
+
1263
+ Note that all the values specified in parameters are added together so if you specify
1264
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1265
+
1266
+ Parameters
1267
+ ----------
1268
+ seconds : int, default 0
1269
+ Number of seconds to wait prior to timing out.
1270
+ minutes : int, default 0
1271
+ Number of minutes to wait prior to timing out.
1272
+ hours : int, default 0
1273
+ Number of hours to wait prior to timing out.
1274
+ """
1275
+ ...
1276
+
1277
+ @typing.overload
1278
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1279
+ ...
1280
+
1281
+ @typing.overload
1282
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1283
+ ...
1284
+
1285
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1286
+ """
1287
+ Specifies a timeout for your step.
1288
+
1289
+ This decorator is useful if this step may hang indefinitely.
1290
+
1291
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1292
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1293
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1294
+
1295
+ Note that all the values specified in parameters are added together so if you specify
1296
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1297
+
1298
+ Parameters
1299
+ ----------
1300
+ seconds : int, default 0
1301
+ Number of seconds to wait prior to timing out.
1302
+ minutes : int, default 0
1303
+ Number of minutes to wait prior to timing out.
1304
+ hours : int, default 0
1305
+ Number of hours to wait prior to timing out.
1306
+ """
1307
+ ...
1308
+
1309
+ @typing.overload
1310
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1311
+ """
1312
+ Specifies the PyPI packages for the step.
1313
+
1314
+ Information in this decorator will augment any
1315
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1316
+ you can use `@pypi_base` to set packages required by all
1317
+ steps and use `@pypi` to specify step-specific overrides.
1318
+
1319
+ Parameters
1320
+ ----------
1321
+ packages : Dict[str, str], default: {}
1322
+ Packages to use for this step. The key is the name of the package
1403
1323
  and the value is the version to use.
1404
- libraries : Dict[str, str], default {}
1405
- Supported for backward compatibility. When used with packages, packages will take precedence.
1406
- python : str, optional, default None
1324
+ python : str, optional, default: None
1407
1325
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1408
1326
  that the version used will correspond to the version of the Python interpreter used to start the run.
1409
- disabled : bool, default False
1410
- If set to True, disables Conda.
1411
1327
  """
1412
1328
  ...
1413
1329
 
1414
1330
  @typing.overload
1415
- def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1331
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1416
1332
  ...
1417
1333
 
1418
- def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1334
+ @typing.overload
1335
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1336
+ ...
1337
+
1338
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1419
1339
  """
1420
- Specifies the Conda environment for all steps of the flow.
1340
+ Specifies the PyPI packages for the step.
1421
1341
 
1422
- Use `@conda_base` to set common libraries required by all
1423
- steps and use `@conda` to specify step-specific additions.
1342
+ Information in this decorator will augment any
1343
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1344
+ you can use `@pypi_base` to set packages required by all
1345
+ steps and use `@pypi` to specify step-specific overrides.
1424
1346
 
1425
1347
  Parameters
1426
1348
  ----------
1427
- packages : Dict[str, str], default {}
1349
+ packages : Dict[str, str], default: {}
1350
+ Packages to use for this step. The key is the name of the package
1351
+ and the value is the version to use.
1352
+ python : str, optional, default: None
1353
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1354
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1355
+ """
1356
+ ...
1357
+
1358
+ @typing.overload
1359
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1360
+ """
1361
+ Specifies secrets to be retrieved and injected as environment variables prior to
1362
+ the execution of a step.
1363
+
1364
+ Parameters
1365
+ ----------
1366
+ sources : List[Union[str, Dict[str, Any]]], default: []
1367
+ List of secret specs, defining how the secrets are to be retrieved
1368
+ """
1369
+ ...
1370
+
1371
+ @typing.overload
1372
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1373
+ ...
1374
+
1375
+ @typing.overload
1376
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1377
+ ...
1378
+
1379
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1380
+ """
1381
+ Specifies secrets to be retrieved and injected as environment variables prior to
1382
+ the execution of a step.
1383
+
1384
+ Parameters
1385
+ ----------
1386
+ sources : List[Union[str, Dict[str, Any]]], default: []
1387
+ List of secret specs, defining how the secrets are to be retrieved
1388
+ """
1389
+ ...
1390
+
1391
+ @typing.overload
1392
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1393
+ """
1394
+ Specifies the PyPI packages for all steps of the flow.
1395
+
1396
+ Use `@pypi_base` to set common packages required by all
1397
+ steps and use `@pypi` to specify step-specific overrides.
1398
+ Parameters
1399
+ ----------
1400
+ packages : Dict[str, str], default: {}
1428
1401
  Packages to use for this flow. The key is the name of the package
1429
1402
  and the value is the version to use.
1430
- libraries : Dict[str, str], default {}
1431
- Supported for backward compatibility. When used with packages, packages will take precedence.
1432
- python : str, optional, default None
1403
+ python : str, optional, default: None
1404
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1405
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1406
+ """
1407
+ ...
1408
+
1409
+ @typing.overload
1410
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1411
+ ...
1412
+
1413
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1414
+ """
1415
+ Specifies the PyPI packages for all steps of the flow.
1416
+
1417
+ Use `@pypi_base` to set common packages required by all
1418
+ steps and use `@pypi` to specify step-specific overrides.
1419
+ Parameters
1420
+ ----------
1421
+ packages : Dict[str, str], default: {}
1422
+ Packages to use for this flow. The key is the name of the package
1423
+ and the value is the version to use.
1424
+ python : str, optional, default: None
1433
1425
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1434
1426
  that the version used will correspond to the version of the Python interpreter used to start the run.
1435
- disabled : bool, default False
1436
- If set to True, disables Conda.
1437
1427
  """
1438
1428
  ...
1439
1429
 
@@ -1521,212 +1511,89 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1521
1511
  @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1522
1512
  ```
1523
1513
 
1524
- Note that `branch` is typically one of:
1525
- - `prod`
1526
- - `user.bob`
1527
- - `test.my_experiment`
1528
- - `prod.staging`
1529
-
1530
- Parameters
1531
- ----------
1532
- flow : Union[str, Dict[str, str]], optional, default None
1533
- Upstream flow dependency for this flow.
1534
- flows : List[Union[str, Dict[str, str]]], default []
1535
- Upstream flow dependencies for this flow.
1536
- options : Dict[str, Any], default {}
1537
- Backend-specific configuration for tuning eventing behavior.
1538
-
1539
-
1540
- """
1541
- ...
1542
-
1543
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1544
- """
1545
- Specifies what flows belong to the same project.
1546
-
1547
- A project-specific namespace is created for all flows that
1548
- use the same `@project(name)`.
1549
-
1550
- Parameters
1551
- ----------
1552
- name : str
1553
- Project name. Make sure that the name is unique amongst all
1554
- projects that use the same production scheduler. The name may
1555
- contain only lowercase alphanumeric characters and underscores.
1556
-
1557
-
1558
- """
1559
- ...
1560
-
1561
- @typing.overload
1562
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1563
- """
1564
- Specifies the times when the flow should be run when running on a
1565
- production scheduler.
1566
-
1567
- Parameters
1568
- ----------
1569
- hourly : bool, default False
1570
- Run the workflow hourly.
1571
- daily : bool, default True
1572
- Run the workflow daily.
1573
- weekly : bool, default False
1574
- Run the workflow weekly.
1575
- cron : str, optional, default None
1576
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1577
- specified by this expression.
1578
- timezone : str, optional, default None
1579
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1580
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1581
- """
1582
- ...
1583
-
1584
- @typing.overload
1585
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1586
- ...
1587
-
1588
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1589
- """
1590
- Specifies the times when the flow should be run when running on a
1591
- production scheduler.
1592
-
1593
- Parameters
1594
- ----------
1595
- hourly : bool, default False
1596
- Run the workflow hourly.
1597
- daily : bool, default True
1598
- Run the workflow daily.
1599
- weekly : bool, default False
1600
- Run the workflow weekly.
1601
- cron : str, optional, default None
1602
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1603
- specified by this expression.
1604
- timezone : str, optional, default None
1605
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1606
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1607
- """
1608
- ...
1609
-
1610
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1611
- """
1612
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1613
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1614
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1615
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1616
- starts only after all sensors finish.
1617
-
1514
+ Note that `branch` is typically one of:
1515
+ - `prod`
1516
+ - `user.bob`
1517
+ - `test.my_experiment`
1518
+ - `prod.staging`
1519
+
1618
1520
  Parameters
1619
1521
  ----------
1620
- timeout : int
1621
- Time, in seconds before the task times out and fails. (Default: 3600)
1622
- poke_interval : int
1623
- Time in seconds that the job should wait in between each try. (Default: 60)
1624
- mode : str
1625
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1626
- exponential_backoff : bool
1627
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1628
- pool : str
1629
- the slot pool this task should run in,
1630
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1631
- soft_fail : bool
1632
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1633
- name : str
1634
- Name of the sensor on Airflow
1635
- description : str
1636
- Description of sensor in the Airflow UI
1637
- bucket_key : Union[str, List[str]]
1638
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1639
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1640
- bucket_name : str
1641
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1642
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1643
- wildcard_match : bool
1644
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1645
- aws_conn_id : str
1646
- a reference to the s3 connection on Airflow. (Default: None)
1647
- verify : bool
1648
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1522
+ flow : Union[str, Dict[str, str]], optional, default None
1523
+ Upstream flow dependency for this flow.
1524
+ flows : List[Union[str, Dict[str, str]]], default []
1525
+ Upstream flow dependencies for this flow.
1526
+ options : Dict[str, Any], default {}
1527
+ Backend-specific configuration for tuning eventing behavior.
1528
+
1529
+
1649
1530
  """
1650
1531
  ...
1651
1532
 
1652
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1533
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1653
1534
  """
1654
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1655
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1535
+ Specifies what flows belong to the same project.
1536
+
1537
+ A project-specific namespace is created for all flows that
1538
+ use the same `@project(name)`.
1656
1539
 
1657
1540
  Parameters
1658
1541
  ----------
1659
- timeout : int
1660
- Time, in seconds before the task times out and fails. (Default: 3600)
1661
- poke_interval : int
1662
- Time in seconds that the job should wait in between each try. (Default: 60)
1663
- mode : str
1664
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1665
- exponential_backoff : bool
1666
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1667
- pool : str
1668
- the slot pool this task should run in,
1669
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1670
- soft_fail : bool
1671
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1672
1542
  name : str
1673
- Name of the sensor on Airflow
1674
- description : str
1675
- Description of sensor in the Airflow UI
1676
- external_dag_id : str
1677
- The dag_id that contains the task you want to wait for.
1678
- external_task_ids : List[str]
1679
- The list of task_ids that you want to wait for.
1680
- If None (default value) the sensor waits for the DAG. (Default: None)
1681
- allowed_states : List[str]
1682
- Iterable of allowed states, (Default: ['success'])
1683
- failed_states : List[str]
1684
- Iterable of failed or dis-allowed states. (Default: None)
1685
- execution_delta : datetime.timedelta
1686
- time difference with the previous execution to look at,
1687
- the default is the same logical date as the current task or DAG. (Default: None)
1688
- check_existence: bool
1689
- Set to True to check if the external task exists or check if
1690
- the DAG to wait for exists. (Default: True)
1543
+ Project name. Make sure that the name is unique amongst all
1544
+ projects that use the same production scheduler. The name may
1545
+ contain only lowercase alphanumeric characters and underscores.
1546
+
1547
+
1691
1548
  """
1692
1549
  ...
1693
1550
 
1694
1551
  @typing.overload
1695
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1552
+ def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1696
1553
  """
1697
- Specifies the PyPI packages for all steps of the flow.
1554
+ Specifies the Conda environment for all steps of the flow.
1555
+
1556
+ Use `@conda_base` to set common libraries required by all
1557
+ steps and use `@conda` to specify step-specific additions.
1698
1558
 
1699
- Use `@pypi_base` to set common packages required by all
1700
- steps and use `@pypi` to specify step-specific overrides.
1701
1559
  Parameters
1702
1560
  ----------
1703
- packages : Dict[str, str], default: {}
1561
+ packages : Dict[str, str], default {}
1704
1562
  Packages to use for this flow. The key is the name of the package
1705
1563
  and the value is the version to use.
1706
- python : str, optional, default: None
1564
+ libraries : Dict[str, str], default {}
1565
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1566
+ python : str, optional, default None
1707
1567
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1708
1568
  that the version used will correspond to the version of the Python interpreter used to start the run.
1569
+ disabled : bool, default False
1570
+ If set to True, disables Conda.
1709
1571
  """
1710
1572
  ...
1711
1573
 
1712
1574
  @typing.overload
1713
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1575
+ def conda_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1714
1576
  ...
1715
1577
 
1716
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1578
+ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1717
1579
  """
1718
- Specifies the PyPI packages for all steps of the flow.
1580
+ Specifies the Conda environment for all steps of the flow.
1581
+
1582
+ Use `@conda_base` to set common libraries required by all
1583
+ steps and use `@conda` to specify step-specific additions.
1719
1584
 
1720
- Use `@pypi_base` to set common packages required by all
1721
- steps and use `@pypi` to specify step-specific overrides.
1722
1585
  Parameters
1723
1586
  ----------
1724
- packages : Dict[str, str], default: {}
1587
+ packages : Dict[str, str], default {}
1725
1588
  Packages to use for this flow. The key is the name of the package
1726
1589
  and the value is the version to use.
1727
- python : str, optional, default: None
1590
+ libraries : Dict[str, str], default {}
1591
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1592
+ python : str, optional, default None
1728
1593
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1729
1594
  that the version used will correspond to the version of the Python interpreter used to start the run.
1595
+ disabled : bool, default False
1596
+ If set to True, disables Conda.
1730
1597
  """
1731
1598
  ...
1732
1599
 
@@ -1825,6 +1692,139 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1825
1692
  """
1826
1693
  ...
1827
1694
 
1695
+ @typing.overload
1696
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1697
+ """
1698
+ Specifies the times when the flow should be run when running on a
1699
+ production scheduler.
1700
+
1701
+ Parameters
1702
+ ----------
1703
+ hourly : bool, default False
1704
+ Run the workflow hourly.
1705
+ daily : bool, default True
1706
+ Run the workflow daily.
1707
+ weekly : bool, default False
1708
+ Run the workflow weekly.
1709
+ cron : str, optional, default None
1710
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1711
+ specified by this expression.
1712
+ timezone : str, optional, default None
1713
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1714
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1715
+ """
1716
+ ...
1717
+
1718
+ @typing.overload
1719
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1720
+ ...
1721
+
1722
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1723
+ """
1724
+ Specifies the times when the flow should be run when running on a
1725
+ production scheduler.
1726
+
1727
+ Parameters
1728
+ ----------
1729
+ hourly : bool, default False
1730
+ Run the workflow hourly.
1731
+ daily : bool, default True
1732
+ Run the workflow daily.
1733
+ weekly : bool, default False
1734
+ Run the workflow weekly.
1735
+ cron : str, optional, default None
1736
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1737
+ specified by this expression.
1738
+ timezone : str, optional, default None
1739
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1740
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1741
+ """
1742
+ ...
1743
+
1744
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1745
+ """
1746
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1747
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1748
+
1749
+ Parameters
1750
+ ----------
1751
+ timeout : int
1752
+ Time, in seconds before the task times out and fails. (Default: 3600)
1753
+ poke_interval : int
1754
+ Time in seconds that the job should wait in between each try. (Default: 60)
1755
+ mode : str
1756
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1757
+ exponential_backoff : bool
1758
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1759
+ pool : str
1760
+ the slot pool this task should run in,
1761
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1762
+ soft_fail : bool
1763
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1764
+ name : str
1765
+ Name of the sensor on Airflow
1766
+ description : str
1767
+ Description of sensor in the Airflow UI
1768
+ external_dag_id : str
1769
+ The dag_id that contains the task you want to wait for.
1770
+ external_task_ids : List[str]
1771
+ The list of task_ids that you want to wait for.
1772
+ If None (default value) the sensor waits for the DAG. (Default: None)
1773
+ allowed_states : List[str]
1774
+ Iterable of allowed states, (Default: ['success'])
1775
+ failed_states : List[str]
1776
+ Iterable of failed or dis-allowed states. (Default: None)
1777
+ execution_delta : datetime.timedelta
1778
+ time difference with the previous execution to look at,
1779
+ the default is the same logical date as the current task or DAG. (Default: None)
1780
+ check_existence: bool
1781
+ Set to True to check if the external task exists or check if
1782
+ the DAG to wait for exists. (Default: True)
1783
+ """
1784
+ ...
1785
+
1786
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1787
+ """
1788
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1789
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1790
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1791
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1792
+ starts only after all sensors finish.
1793
+
1794
+ Parameters
1795
+ ----------
1796
+ timeout : int
1797
+ Time, in seconds before the task times out and fails. (Default: 3600)
1798
+ poke_interval : int
1799
+ Time in seconds that the job should wait in between each try. (Default: 60)
1800
+ mode : str
1801
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1802
+ exponential_backoff : bool
1803
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1804
+ pool : str
1805
+ the slot pool this task should run in,
1806
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1807
+ soft_fail : bool
1808
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1809
+ name : str
1810
+ Name of the sensor on Airflow
1811
+ description : str
1812
+ Description of sensor in the Airflow UI
1813
+ bucket_key : Union[str, List[str]]
1814
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1815
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1816
+ bucket_name : str
1817
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1818
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1819
+ wildcard_match : bool
1820
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1821
+ aws_conn_id : str
1822
+ a reference to the s3 connection on Airflow. (Default: None)
1823
+ verify : bool
1824
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1825
+ """
1826
+ ...
1827
+
1828
1828
  def namespace(ns: typing.Optional[str]) -> typing.Optional[str]:
1829
1829
  """
1830
1830
  Switch namespace to the one provided.