ob-metaflow-stubs 4.0__py2.py3-none-any.whl → 4.2__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (145) hide show
  1. metaflow-stubs/__init__.pyi +588 -588
  2. metaflow-stubs/cards.pyi +5 -5
  3. metaflow-stubs/cli.pyi +3 -3
  4. metaflow-stubs/client/__init__.pyi +3 -3
  5. metaflow-stubs/client/core.pyi +6 -6
  6. metaflow-stubs/client/filecache.pyi +2 -2
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +2 -2
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +5 -5
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +5 -5
  13. metaflow-stubs/metadata/metadata.pyi +2 -2
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +4 -2
  16. metaflow-stubs/metaflow_current.pyi +4 -4
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +4 -4
  20. metaflow-stubs/plugins/__init__.pyi +3 -3
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow.pyi +2 -2
  23. metaflow-stubs/plugins/airflow/airflow_cli.pyi +2 -2
  24. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
  30. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
  31. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_workflows.pyi +7 -3
  35. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +3 -3
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +3 -3
  37. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  38. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  39. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  40. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +2 -2
  45. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  46. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  47. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +3 -3
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  56. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  57. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  58. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  60. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  61. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  62. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  63. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  64. metaflow-stubs/plugins/cards/card_cli.pyi +5 -5
  65. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  66. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_modules/basic.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/components.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  81. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  82. metaflow-stubs/plugins/cards/component_serializer.pyi +2 -2
  83. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  84. metaflow-stubs/plugins/catch_decorator.pyi +2 -2
  85. metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
  86. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  87. metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
  88. metaflow-stubs/plugins/datatools/s3/s3.pyi +6 -6
  89. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  91. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  92. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  93. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  94. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  95. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  96. metaflow-stubs/plugins/frameworks/pytorch.pyi +3 -3
  97. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  98. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  99. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  100. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  101. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  103. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  104. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  105. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +3 -3
  106. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  107. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +2 -2
  108. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  109. metaflow-stubs/plugins/logs_cli.pyi +3 -3
  110. metaflow-stubs/plugins/package_cli.pyi +2 -2
  111. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  112. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  113. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  114. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  115. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
  116. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/pypi/pypi_environment.pyi +4 -4
  118. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  119. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  121. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  122. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  123. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  124. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  125. metaflow-stubs/plugins/tag_cli.pyi +4 -4
  126. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  127. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  128. metaflow-stubs/procpoll.pyi +2 -2
  129. metaflow-stubs/profilers/__init__.pyi +2 -2
  130. metaflow-stubs/pylint_wrapper.pyi +2 -2
  131. metaflow-stubs/runner/__init__.pyi +2 -2
  132. metaflow-stubs/runner/metaflow_runner.pyi +5 -5
  133. metaflow-stubs/runner/nbrun.pyi +2 -2
  134. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  135. metaflow-stubs/system/__init__.pyi +112 -0
  136. metaflow-stubs/system/system_logger.pyi +51 -0
  137. metaflow-stubs/system/system_monitor.pyi +73 -0
  138. metaflow-stubs/tagging_util.pyi +2 -2
  139. metaflow-stubs/tuple_util.pyi +2 -2
  140. {ob_metaflow_stubs-4.0.dist-info → ob_metaflow_stubs-4.2.dist-info}/METADATA +1 -1
  141. ob_metaflow_stubs-4.2.dist-info/RECORD +144 -0
  142. metaflow-stubs/plugins/perimeters.pyi +0 -24
  143. ob_metaflow_stubs-4.0.dist-info/RECORD +0 -142
  144. {ob_metaflow_stubs-4.0.dist-info → ob_metaflow_stubs-4.2.dist-info}/WHEEL +0 -0
  145. {ob_metaflow_stubs-4.0.dist-info → ob_metaflow_stubs-4.2.dist-info}/top_level.txt +0 -0
@@ -1,25 +1,25 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.5.2+ob(v1) #
4
- # Generated on 2024-06-24T23:02:43.033000 #
3
+ # MF version: 2.12.7.1+nim(0.0.1);ob(v1) #
4
+ # Generated on 2024-07-08T23:52:24.906750 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import io
12
- import metaflow.client.core
13
- import metaflow.parameters
11
+ import metaflow._vendor.click.types
12
+ import metaflow.events
14
13
  import metaflow.metaflow_current
14
+ import metaflow.parameters
15
+ import metaflow.datastore.inputs
16
+ import datetime
15
17
  import metaflow.flowspec
18
+ import metaflow.client.core
19
+ import metaflow.plugins.datatools.s3.s3
16
20
  import metaflow.runner.metaflow_runner
17
21
  import typing
18
- import datetime
19
- import metaflow.plugins.datatools.s3.s3
20
- import metaflow.datastore.inputs
21
- import metaflow.events
22
- import metaflow._vendor.click.types
22
+ import io
23
23
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
24
24
  StepFlag = typing.NewType("StepFlag", bool)
25
25
 
@@ -728,208 +728,438 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
728
728
  ...
729
729
 
730
730
  @typing.overload
731
- def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
731
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
732
732
  """
733
- Creates a human-readable report, a Metaflow Card, after this step completes.
733
+ Specifies that the step will success under all circumstances.
734
734
 
735
- Note that you may add multiple `@card` decorators in a step with different parameters.
735
+ The decorator will create an optional artifact, specified by `var`, which
736
+ contains the exception raised. You can use it to detect the presence
737
+ of errors, indicating that all happy-path artifacts produced by the step
738
+ are missing.
736
739
 
737
740
  Parameters
738
741
  ----------
739
- type : str, default 'default'
740
- Card type.
741
- id : str, optional, default None
742
- If multiple cards are present, use this id to identify this card.
743
- options : Dict[str, Any], default {}
744
- Options passed to the card. The contents depend on the card type.
745
- timeout : int, default 45
746
- Interrupt reporting if it takes more than this many seconds.
747
-
748
-
742
+ var : str, optional, default None
743
+ Name of the artifact in which to store the caught exception.
744
+ If not specified, the exception is not stored.
745
+ print_exception : bool, default True
746
+ Determines whether or not the exception is printed to
747
+ stdout when caught.
749
748
  """
750
749
  ...
751
750
 
752
751
  @typing.overload
753
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
752
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
754
753
  ...
755
754
 
756
755
  @typing.overload
757
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
756
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
758
757
  ...
759
758
 
760
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
759
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
761
760
  """
762
- Creates a human-readable report, a Metaflow Card, after this step completes.
761
+ Specifies that the step will success under all circumstances.
763
762
 
764
- Note that you may add multiple `@card` decorators in a step with different parameters.
763
+ The decorator will create an optional artifact, specified by `var`, which
764
+ contains the exception raised. You can use it to detect the presence
765
+ of errors, indicating that all happy-path artifacts produced by the step
766
+ are missing.
765
767
 
766
768
  Parameters
767
769
  ----------
768
- type : str, default 'default'
769
- Card type.
770
- id : str, optional, default None
771
- If multiple cards are present, use this id to identify this card.
772
- options : Dict[str, Any], default {}
773
- Options passed to the card. The contents depend on the card type.
774
- timeout : int, default 45
775
- Interrupt reporting if it takes more than this many seconds.
776
-
777
-
770
+ var : str, optional, default None
771
+ Name of the artifact in which to store the caught exception.
772
+ If not specified, the exception is not stored.
773
+ print_exception : bool, default True
774
+ Determines whether or not the exception is printed to
775
+ stdout when caught.
778
776
  """
779
777
  ...
780
778
 
781
779
  @typing.overload
782
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
780
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
783
781
  """
784
- Specifies the PyPI packages for the step.
782
+ Specifies the Conda environment for the step.
785
783
 
786
784
  Information in this decorator will augment any
787
- attributes set in the `@pyi_base` flow-level decorator. Hence,
788
- you can use `@pypi_base` to set packages required by all
789
- steps and use `@pypi` to specify step-specific overrides.
785
+ attributes set in the `@conda_base` flow-level decorator. Hence,
786
+ you can use `@conda_base` to set packages required by all
787
+ steps and use `@conda` to specify step-specific overrides.
790
788
 
791
789
  Parameters
792
790
  ----------
793
- packages : Dict[str, str], default: {}
791
+ packages : Dict[str, str], default {}
794
792
  Packages to use for this step. The key is the name of the package
795
793
  and the value is the version to use.
796
- python : str, optional, default: None
794
+ libraries : Dict[str, str], default {}
795
+ Supported for backward compatibility. When used with packages, packages will take precedence.
796
+ python : str, optional, default None
797
797
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
798
798
  that the version used will correspond to the version of the Python interpreter used to start the run.
799
+ disabled : bool, default False
800
+ If set to True, disables @conda.
799
801
  """
800
802
  ...
801
803
 
802
804
  @typing.overload
803
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
805
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
804
806
  ...
805
807
 
806
808
  @typing.overload
807
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
809
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
808
810
  ...
809
811
 
810
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
812
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
811
813
  """
812
- Specifies the PyPI packages for the step.
814
+ Specifies the Conda environment for the step.
813
815
 
814
816
  Information in this decorator will augment any
815
- attributes set in the `@pyi_base` flow-level decorator. Hence,
816
- you can use `@pypi_base` to set packages required by all
817
- steps and use `@pypi` to specify step-specific overrides.
817
+ attributes set in the `@conda_base` flow-level decorator. Hence,
818
+ you can use `@conda_base` to set packages required by all
819
+ steps and use `@conda` to specify step-specific overrides.
818
820
 
819
821
  Parameters
820
822
  ----------
821
- packages : Dict[str, str], default: {}
823
+ packages : Dict[str, str], default {}
822
824
  Packages to use for this step. The key is the name of the package
823
825
  and the value is the version to use.
824
- python : str, optional, default: None
826
+ libraries : Dict[str, str], default {}
827
+ Supported for backward compatibility. When used with packages, packages will take precedence.
828
+ python : str, optional, default None
825
829
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
826
830
  that the version used will correspond to the version of the Python interpreter used to start the run.
831
+ disabled : bool, default False
832
+ If set to True, disables @conda.
827
833
  """
828
834
  ...
829
835
 
830
- @typing.overload
831
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
836
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
832
837
  """
833
- Specifies a timeout for your step.
838
+ Specifies that this step should execute on Kubernetes.
834
839
 
835
- This decorator is useful if this step may hang indefinitely.
840
+ Parameters
841
+ ----------
842
+ cpu : int, default 1
843
+ Number of CPUs required for this step. If `@resources` is
844
+ also present, the maximum value from all decorators is used.
845
+ memory : int, default 4096
846
+ Memory size (in MB) required for this step. If
847
+ `@resources` is also present, the maximum value from all decorators is
848
+ used.
849
+ disk : int, default 10240
850
+ Disk size (in MB) required for this step. If
851
+ `@resources` is also present, the maximum value from all decorators is
852
+ used.
853
+ image : str, optional, default None
854
+ Docker image to use when launching on Kubernetes. If not specified, and
855
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
856
+ not, a default Docker image mapping to the current version of Python is used.
857
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
858
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
859
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
860
+ Kubernetes service account to use when launching pod in Kubernetes.
861
+ secrets : List[str], optional, default None
862
+ Kubernetes secrets to use when launching pod in Kubernetes. These
863
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
864
+ in Metaflow configuration.
865
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
866
+ Kubernetes namespace to use when launching pod in Kubernetes.
867
+ gpu : int, optional, default None
868
+ Number of GPUs required for this step. A value of zero implies that
869
+ the scheduled node should not have GPUs.
870
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
871
+ The vendor of the GPUs to be used for this step.
872
+ tolerations : List[str], default []
873
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
874
+ Kubernetes tolerations to use when launching pod in Kubernetes.
875
+ use_tmpfs : bool, default False
876
+ This enables an explicit tmpfs mount for this step.
877
+ tmpfs_tempdir : bool, default True
878
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
879
+ tmpfs_size : int, optional, default: None
880
+ The value for the size (in MiB) of the tmpfs mount for this step.
881
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
882
+ memory allocated for this step.
883
+ tmpfs_path : str, optional, default /metaflow_temp
884
+ Path to tmpfs mount for this step.
885
+ persistent_volume_claims : Dict[str, str], optional, default None
886
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
887
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
888
+ shared_memory: int, optional
889
+ Shared memory size (in MiB) required for this step
890
+ port: int, optional
891
+ Port number to specify in the Kubernetes job object
892
+ """
893
+ ...
894
+
895
+ @typing.overload
896
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
897
+ """
898
+ Specifies the number of times the task corresponding
899
+ to a step needs to be retried.
836
900
 
837
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
838
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
839
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
901
+ This decorator is useful for handling transient errors, such as networking issues.
902
+ If your task contains operations that can't be retried safely, e.g. database updates,
903
+ it is advisable to annotate it with `@retry(times=0)`.
840
904
 
841
- Note that all the values specified in parameters are added together so if you specify
842
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
905
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
906
+ decorator will execute a no-op task after all retries have been exhausted,
907
+ ensuring that the flow execution can continue.
843
908
 
844
909
  Parameters
845
910
  ----------
846
- seconds : int, default 0
847
- Number of seconds to wait prior to timing out.
848
- minutes : int, default 0
849
- Number of minutes to wait prior to timing out.
850
- hours : int, default 0
851
- Number of hours to wait prior to timing out.
911
+ times : int, default 3
912
+ Number of times to retry this task.
913
+ minutes_between_retries : int, default 2
914
+ Number of minutes between retries.
852
915
  """
853
916
  ...
854
917
 
855
918
  @typing.overload
856
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
919
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
857
920
  ...
858
921
 
859
922
  @typing.overload
860
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
923
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
861
924
  ...
862
925
 
863
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
926
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
864
927
  """
865
- Specifies a timeout for your step.
866
-
867
- This decorator is useful if this step may hang indefinitely.
928
+ Specifies the number of times the task corresponding
929
+ to a step needs to be retried.
868
930
 
869
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
870
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
871
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
931
+ This decorator is useful for handling transient errors, such as networking issues.
932
+ If your task contains operations that can't be retried safely, e.g. database updates,
933
+ it is advisable to annotate it with `@retry(times=0)`.
872
934
 
873
- Note that all the values specified in parameters are added together so if you specify
874
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
935
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
936
+ decorator will execute a no-op task after all retries have been exhausted,
937
+ ensuring that the flow execution can continue.
875
938
 
876
939
  Parameters
877
940
  ----------
878
- seconds : int, default 0
879
- Number of seconds to wait prior to timing out.
880
- minutes : int, default 0
881
- Number of minutes to wait prior to timing out.
882
- hours : int, default 0
883
- Number of hours to wait prior to timing out.
941
+ times : int, default 3
942
+ Number of times to retry this task.
943
+ minutes_between_retries : int, default 2
944
+ Number of minutes between retries.
884
945
  """
885
946
  ...
886
947
 
887
948
  @typing.overload
888
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
949
+ def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
889
950
  """
890
- Specifies that the step will success under all circumstances.
951
+ Creates a human-readable report, a Metaflow Card, after this step completes.
891
952
 
892
- The decorator will create an optional artifact, specified by `var`, which
893
- contains the exception raised. You can use it to detect the presence
894
- of errors, indicating that all happy-path artifacts produced by the step
895
- are missing.
953
+ Note that you may add multiple `@card` decorators in a step with different parameters.
896
954
 
897
955
  Parameters
898
956
  ----------
899
- var : str, optional, default None
900
- Name of the artifact in which to store the caught exception.
901
- If not specified, the exception is not stored.
902
- print_exception : bool, default True
903
- Determines whether or not the exception is printed to
904
- stdout when caught.
957
+ type : str, default 'default'
958
+ Card type.
959
+ id : str, optional, default None
960
+ If multiple cards are present, use this id to identify this card.
961
+ options : Dict[str, Any], default {}
962
+ Options passed to the card. The contents depend on the card type.
963
+ timeout : int, default 45
964
+ Interrupt reporting if it takes more than this many seconds.
965
+
966
+
905
967
  """
906
968
  ...
907
969
 
908
970
  @typing.overload
909
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
971
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
910
972
  ...
911
973
 
912
974
  @typing.overload
913
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
975
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
914
976
  ...
915
977
 
916
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
978
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
917
979
  """
918
- Specifies that the step will success under all circumstances.
980
+ Creates a human-readable report, a Metaflow Card, after this step completes.
919
981
 
920
- The decorator will create an optional artifact, specified by `var`, which
921
- contains the exception raised. You can use it to detect the presence
922
- of errors, indicating that all happy-path artifacts produced by the step
923
- are missing.
982
+ Note that you may add multiple `@card` decorators in a step with different parameters.
924
983
 
925
984
  Parameters
926
985
  ----------
927
- var : str, optional, default None
928
- Name of the artifact in which to store the caught exception.
929
- If not specified, the exception is not stored.
930
- print_exception : bool, default True
931
- Determines whether or not the exception is printed to
932
- stdout when caught.
986
+ type : str, default 'default'
987
+ Card type.
988
+ id : str, optional, default None
989
+ If multiple cards are present, use this id to identify this card.
990
+ options : Dict[str, Any], default {}
991
+ Options passed to the card. The contents depend on the card type.
992
+ timeout : int, default 45
993
+ Interrupt reporting if it takes more than this many seconds.
994
+
995
+
996
+ """
997
+ ...
998
+
999
+ @typing.overload
1000
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1001
+ """
1002
+ Specifies secrets to be retrieved and injected as environment variables prior to
1003
+ the execution of a step.
1004
+
1005
+ Parameters
1006
+ ----------
1007
+ sources : List[Union[str, Dict[str, Any]]], default: []
1008
+ List of secret specs, defining how the secrets are to be retrieved
1009
+ """
1010
+ ...
1011
+
1012
+ @typing.overload
1013
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1014
+ ...
1015
+
1016
+ @typing.overload
1017
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1018
+ ...
1019
+
1020
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1021
+ """
1022
+ Specifies secrets to be retrieved and injected as environment variables prior to
1023
+ the execution of a step.
1024
+
1025
+ Parameters
1026
+ ----------
1027
+ sources : List[Union[str, Dict[str, Any]]], default: []
1028
+ List of secret specs, defining how the secrets are to be retrieved
1029
+ """
1030
+ ...
1031
+
1032
+ @typing.overload
1033
+ def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1034
+ """
1035
+ Specifies the resources needed when executing this step.
1036
+
1037
+ Use `@resources` to specify the resource requirements
1038
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1039
+
1040
+ You can choose the compute layer on the command line by executing e.g.
1041
+ ```
1042
+ python myflow.py run --with batch
1043
+ ```
1044
+ or
1045
+ ```
1046
+ python myflow.py run --with kubernetes
1047
+ ```
1048
+ which executes the flow on the desired system using the
1049
+ requirements specified in `@resources`.
1050
+
1051
+ Parameters
1052
+ ----------
1053
+ cpu : int, default 1
1054
+ Number of CPUs required for this step.
1055
+ gpu : int, default 0
1056
+ Number of GPUs required for this step.
1057
+ disk : int, optional, default None
1058
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1059
+ memory : int, default 4096
1060
+ Memory size (in MB) required for this step.
1061
+ shared_memory : int, optional, default None
1062
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1063
+ This parameter maps to the `--shm-size` option in Docker.
1064
+ """
1065
+ ...
1066
+
1067
+ @typing.overload
1068
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1069
+ ...
1070
+
1071
+ @typing.overload
1072
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1073
+ ...
1074
+
1075
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1076
+ """
1077
+ Specifies the resources needed when executing this step.
1078
+
1079
+ Use `@resources` to specify the resource requirements
1080
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1081
+
1082
+ You can choose the compute layer on the command line by executing e.g.
1083
+ ```
1084
+ python myflow.py run --with batch
1085
+ ```
1086
+ or
1087
+ ```
1088
+ python myflow.py run --with kubernetes
1089
+ ```
1090
+ which executes the flow on the desired system using the
1091
+ requirements specified in `@resources`.
1092
+
1093
+ Parameters
1094
+ ----------
1095
+ cpu : int, default 1
1096
+ Number of CPUs required for this step.
1097
+ gpu : int, default 0
1098
+ Number of GPUs required for this step.
1099
+ disk : int, optional, default None
1100
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1101
+ memory : int, default 4096
1102
+ Memory size (in MB) required for this step.
1103
+ shared_memory : int, optional, default None
1104
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1105
+ This parameter maps to the `--shm-size` option in Docker.
1106
+ """
1107
+ ...
1108
+
1109
+ @typing.overload
1110
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1111
+ """
1112
+ Specifies a timeout for your step.
1113
+
1114
+ This decorator is useful if this step may hang indefinitely.
1115
+
1116
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1117
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1118
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1119
+
1120
+ Note that all the values specified in parameters are added together so if you specify
1121
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1122
+
1123
+ Parameters
1124
+ ----------
1125
+ seconds : int, default 0
1126
+ Number of seconds to wait prior to timing out.
1127
+ minutes : int, default 0
1128
+ Number of minutes to wait prior to timing out.
1129
+ hours : int, default 0
1130
+ Number of hours to wait prior to timing out.
1131
+ """
1132
+ ...
1133
+
1134
+ @typing.overload
1135
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1136
+ ...
1137
+
1138
+ @typing.overload
1139
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1140
+ ...
1141
+
1142
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1143
+ """
1144
+ Specifies a timeout for your step.
1145
+
1146
+ This decorator is useful if this step may hang indefinitely.
1147
+
1148
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1149
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1150
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1151
+
1152
+ Note that all the values specified in parameters are added together so if you specify
1153
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1154
+
1155
+ Parameters
1156
+ ----------
1157
+ seconds : int, default 0
1158
+ Number of seconds to wait prior to timing out.
1159
+ minutes : int, default 0
1160
+ Number of minutes to wait prior to timing out.
1161
+ hours : int, default 0
1162
+ Number of hours to wait prior to timing out.
933
1163
  """
934
1164
  ...
935
1165
 
@@ -1112,281 +1342,154 @@ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], Non
1112
1342
  ...
1113
1343
 
1114
1344
  @typing.overload
1115
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1345
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1116
1346
  """
1117
- Specifies the number of times the task corresponding
1118
- to a step needs to be retried.
1119
-
1120
- This decorator is useful for handling transient errors, such as networking issues.
1121
- If your task contains operations that can't be retried safely, e.g. database updates,
1122
- it is advisable to annotate it with `@retry(times=0)`.
1347
+ Specifies the PyPI packages for the step.
1123
1348
 
1124
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1125
- decorator will execute a no-op task after all retries have been exhausted,
1126
- ensuring that the flow execution can continue.
1349
+ Information in this decorator will augment any
1350
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1351
+ you can use `@pypi_base` to set packages required by all
1352
+ steps and use `@pypi` to specify step-specific overrides.
1127
1353
 
1128
1354
  Parameters
1129
1355
  ----------
1130
- times : int, default 3
1131
- Number of times to retry this task.
1132
- minutes_between_retries : int, default 2
1133
- Number of minutes between retries.
1356
+ packages : Dict[str, str], default: {}
1357
+ Packages to use for this step. The key is the name of the package
1358
+ and the value is the version to use.
1359
+ python : str, optional, default: None
1360
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1361
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1134
1362
  """
1135
1363
  ...
1136
1364
 
1137
1365
  @typing.overload
1138
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1366
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1139
1367
  ...
1140
1368
 
1141
1369
  @typing.overload
1142
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1370
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1143
1371
  ...
1144
1372
 
1145
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1373
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1146
1374
  """
1147
- Specifies the number of times the task corresponding
1148
- to a step needs to be retried.
1149
-
1150
- This decorator is useful for handling transient errors, such as networking issues.
1151
- If your task contains operations that can't be retried safely, e.g. database updates,
1152
- it is advisable to annotate it with `@retry(times=0)`.
1375
+ Specifies the PyPI packages for the step.
1153
1376
 
1154
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1155
- decorator will execute a no-op task after all retries have been exhausted,
1156
- ensuring that the flow execution can continue.
1377
+ Information in this decorator will augment any
1378
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
1379
+ you can use `@pypi_base` to set packages required by all
1380
+ steps and use `@pypi` to specify step-specific overrides.
1157
1381
 
1158
1382
  Parameters
1159
1383
  ----------
1160
- times : int, default 3
1161
- Number of times to retry this task.
1162
- minutes_between_retries : int, default 2
1163
- Number of minutes between retries.
1384
+ packages : Dict[str, str], default: {}
1385
+ Packages to use for this step. The key is the name of the package
1386
+ and the value is the version to use.
1387
+ python : str, optional, default: None
1388
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1389
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1164
1390
  """
1165
1391
  ...
1166
1392
 
1167
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1393
+ @typing.overload
1394
+ def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1168
1395
  """
1169
- Specifies that this step should execute on Kubernetes.
1396
+ Specifies the flow(s) that this flow depends on.
1170
1397
 
1171
- Parameters
1172
- ----------
1173
- cpu : int, default 1
1174
- Number of CPUs required for this step. If `@resources` is
1175
- also present, the maximum value from all decorators is used.
1176
- memory : int, default 4096
1177
- Memory size (in MB) required for this step. If
1178
- `@resources` is also present, the maximum value from all decorators is
1179
- used.
1180
- disk : int, default 10240
1181
- Disk size (in MB) required for this step. If
1182
- `@resources` is also present, the maximum value from all decorators is
1183
- used.
1184
- image : str, optional, default None
1185
- Docker image to use when launching on Kubernetes. If not specified, and
1186
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1187
- not, a default Docker image mapping to the current version of Python is used.
1188
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1189
- If given, the imagePullPolicy to be applied to the Docker image of the step.
1190
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1191
- Kubernetes service account to use when launching pod in Kubernetes.
1192
- secrets : List[str], optional, default None
1193
- Kubernetes secrets to use when launching pod in Kubernetes. These
1194
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1195
- in Metaflow configuration.
1196
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1197
- Kubernetes namespace to use when launching pod in Kubernetes.
1198
- gpu : int, optional, default None
1199
- Number of GPUs required for this step. A value of zero implies that
1200
- the scheduled node should not have GPUs.
1201
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1202
- The vendor of the GPUs to be used for this step.
1203
- tolerations : List[str], default []
1204
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1205
- Kubernetes tolerations to use when launching pod in Kubernetes.
1206
- use_tmpfs : bool, default False
1207
- This enables an explicit tmpfs mount for this step.
1208
- tmpfs_tempdir : bool, default True
1209
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1210
- tmpfs_size : int, optional, default: None
1211
- The value for the size (in MiB) of the tmpfs mount for this step.
1212
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1213
- memory allocated for this step.
1214
- tmpfs_path : str, optional, default /metaflow_temp
1215
- Path to tmpfs mount for this step.
1216
- persistent_volume_claims : Dict[str, str], optional, default None
1217
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1218
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1219
- shared_memory: int, optional
1220
- Shared memory size (in MiB) required for this step
1221
- port: int, optional
1222
- Port number to specify in the Kubernetes job object
1223
- """
1224
- ...
1225
-
1226
- @typing.overload
1227
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1228
- """
1229
- Specifies the Conda environment for the step.
1398
+ ```
1399
+ @trigger_on_finish(flow='FooFlow')
1400
+ ```
1401
+ or
1402
+ ```
1403
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1404
+ ```
1405
+ This decorator respects the @project decorator and triggers the flow
1406
+ when upstream runs within the same namespace complete successfully
1230
1407
 
1231
- Information in this decorator will augment any
1232
- attributes set in the `@conda_base` flow-level decorator. Hence,
1233
- you can use `@conda_base` to set packages required by all
1234
- steps and use `@conda` to specify step-specific overrides.
1408
+ Additionally, you can specify project aware upstream flow dependencies
1409
+ by specifying the fully qualified project_flow_name.
1410
+ ```
1411
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1412
+ ```
1413
+ or
1414
+ ```
1415
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1416
+ ```
1235
1417
 
1236
- Parameters
1237
- ----------
1238
- packages : Dict[str, str], default {}
1239
- Packages to use for this step. The key is the name of the package
1240
- and the value is the version to use.
1241
- libraries : Dict[str, str], default {}
1242
- Supported for backward compatibility. When used with packages, packages will take precedence.
1243
- python : str, optional, default None
1244
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1245
- that the version used will correspond to the version of the Python interpreter used to start the run.
1246
- disabled : bool, default False
1247
- If set to True, disables @conda.
1248
- """
1249
- ...
1250
-
1251
- @typing.overload
1252
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1253
- ...
1254
-
1255
- @typing.overload
1256
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1257
- ...
1258
-
1259
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1260
- """
1261
- Specifies the Conda environment for the step.
1418
+ You can also specify just the project or project branch (other values will be
1419
+ inferred from the current project or project branch):
1420
+ ```
1421
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1422
+ ```
1262
1423
 
1263
- Information in this decorator will augment any
1264
- attributes set in the `@conda_base` flow-level decorator. Hence,
1265
- you can use `@conda_base` to set packages required by all
1266
- steps and use `@conda` to specify step-specific overrides.
1424
+ Note that `branch` is typically one of:
1425
+ - `prod`
1426
+ - `user.bob`
1427
+ - `test.my_experiment`
1428
+ - `prod.staging`
1267
1429
 
1268
1430
  Parameters
1269
1431
  ----------
1270
- packages : Dict[str, str], default {}
1271
- Packages to use for this step. The key is the name of the package
1272
- and the value is the version to use.
1273
- libraries : Dict[str, str], default {}
1274
- Supported for backward compatibility. When used with packages, packages will take precedence.
1275
- python : str, optional, default None
1276
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1277
- that the version used will correspond to the version of the Python interpreter used to start the run.
1278
- disabled : bool, default False
1279
- If set to True, disables @conda.
1280
- """
1281
- ...
1282
-
1283
- @typing.overload
1284
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1285
- """
1286
- Specifies secrets to be retrieved and injected as environment variables prior to
1287
- the execution of a step.
1432
+ flow : Union[str, Dict[str, str]], optional, default None
1433
+ Upstream flow dependency for this flow.
1434
+ flows : List[Union[str, Dict[str, str]]], default []
1435
+ Upstream flow dependencies for this flow.
1436
+ options : Dict[str, Any], default {}
1437
+ Backend-specific configuration for tuning eventing behavior.
1438
+
1288
1439
 
1289
- Parameters
1290
- ----------
1291
- sources : List[Union[str, Dict[str, Any]]], default: []
1292
- List of secret specs, defining how the secrets are to be retrieved
1293
1440
  """
1294
1441
  ...
1295
1442
 
1296
1443
  @typing.overload
1297
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1298
- ...
1299
-
1300
- @typing.overload
1301
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1302
- ...
1303
-
1304
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1305
- """
1306
- Specifies secrets to be retrieved and injected as environment variables prior to
1307
- the execution of a step.
1308
-
1309
- Parameters
1310
- ----------
1311
- sources : List[Union[str, Dict[str, Any]]], default: []
1312
- List of secret specs, defining how the secrets are to be retrieved
1313
- """
1444
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1314
1445
  ...
1315
1446
 
1316
- @typing.overload
1317
- def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1447
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1318
1448
  """
1319
- Specifies the resources needed when executing this step.
1320
-
1321
- Use `@resources` to specify the resource requirements
1322
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1449
+ Specifies the flow(s) that this flow depends on.
1323
1450
 
1324
- You can choose the compute layer on the command line by executing e.g.
1325
1451
  ```
1326
- python myflow.py run --with batch
1452
+ @trigger_on_finish(flow='FooFlow')
1327
1453
  ```
1328
1454
  or
1329
1455
  ```
1330
- python myflow.py run --with kubernetes
1456
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1331
1457
  ```
1332
- which executes the flow on the desired system using the
1333
- requirements specified in `@resources`.
1334
-
1335
- Parameters
1336
- ----------
1337
- cpu : int, default 1
1338
- Number of CPUs required for this step.
1339
- gpu : int, default 0
1340
- Number of GPUs required for this step.
1341
- disk : int, optional, default None
1342
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1343
- memory : int, default 4096
1344
- Memory size (in MB) required for this step.
1345
- shared_memory : int, optional, default None
1346
- The value for the size (in MiB) of the /dev/shm volume for this step.
1347
- This parameter maps to the `--shm-size` option in Docker.
1348
- """
1349
- ...
1350
-
1351
- @typing.overload
1352
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1353
- ...
1354
-
1355
- @typing.overload
1356
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1357
- ...
1358
-
1359
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1360
- """
1361
- Specifies the resources needed when executing this step.
1362
-
1363
- Use `@resources` to specify the resource requirements
1364
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1458
+ This decorator respects the @project decorator and triggers the flow
1459
+ when upstream runs within the same namespace complete successfully
1365
1460
 
1366
- You can choose the compute layer on the command line by executing e.g.
1461
+ Additionally, you can specify project aware upstream flow dependencies
1462
+ by specifying the fully qualified project_flow_name.
1367
1463
  ```
1368
- python myflow.py run --with batch
1464
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1369
1465
  ```
1370
1466
  or
1371
1467
  ```
1372
- python myflow.py run --with kubernetes
1468
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1373
1469
  ```
1374
- which executes the flow on the desired system using the
1375
- requirements specified in `@resources`.
1470
+
1471
+ You can also specify just the project or project branch (other values will be
1472
+ inferred from the current project or project branch):
1473
+ ```
1474
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1475
+ ```
1476
+
1477
+ Note that `branch` is typically one of:
1478
+ - `prod`
1479
+ - `user.bob`
1480
+ - `test.my_experiment`
1481
+ - `prod.staging`
1376
1482
 
1377
1483
  Parameters
1378
1484
  ----------
1379
- cpu : int, default 1
1380
- Number of CPUs required for this step.
1381
- gpu : int, default 0
1382
- Number of GPUs required for this step.
1383
- disk : int, optional, default None
1384
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1385
- memory : int, default 4096
1386
- Memory size (in MB) required for this step.
1387
- shared_memory : int, optional, default None
1388
- The value for the size (in MiB) of the /dev/shm volume for this step.
1389
- This parameter maps to the `--shm-size` option in Docker.
1485
+ flow : Union[str, Dict[str, str]], optional, default None
1486
+ Upstream flow dependency for this flow.
1487
+ flows : List[Union[str, Dict[str, str]]], default []
1488
+ Upstream flow dependencies for this flow.
1489
+ options : Dict[str, Any], default {}
1490
+ Backend-specific configuration for tuning eventing behavior.
1491
+
1492
+
1390
1493
  """
1391
1494
  ...
1392
1495
 
@@ -1429,6 +1532,157 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
1429
1532
  """
1430
1533
  ...
1431
1534
 
1535
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1536
+ """
1537
+ Specifies what flows belong to the same project.
1538
+
1539
+ A project-specific namespace is created for all flows that
1540
+ use the same `@project(name)`.
1541
+
1542
+ Parameters
1543
+ ----------
1544
+ name : str
1545
+ Project name. Make sure that the name is unique amongst all
1546
+ projects that use the same production scheduler. The name may
1547
+ contain only lowercase alphanumeric characters and underscores.
1548
+
1549
+
1550
+ """
1551
+ ...
1552
+
1553
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1554
+ """
1555
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1556
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1557
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1558
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1559
+ starts only after all sensors finish.
1560
+
1561
+ Parameters
1562
+ ----------
1563
+ timeout : int
1564
+ Time, in seconds before the task times out and fails. (Default: 3600)
1565
+ poke_interval : int
1566
+ Time in seconds that the job should wait in between each try. (Default: 60)
1567
+ mode : str
1568
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1569
+ exponential_backoff : bool
1570
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1571
+ pool : str
1572
+ the slot pool this task should run in,
1573
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1574
+ soft_fail : bool
1575
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1576
+ name : str
1577
+ Name of the sensor on Airflow
1578
+ description : str
1579
+ Description of sensor in the Airflow UI
1580
+ bucket_key : Union[str, List[str]]
1581
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1582
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1583
+ bucket_name : str
1584
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1585
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1586
+ wildcard_match : bool
1587
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1588
+ aws_conn_id : str
1589
+ a reference to the s3 connection on Airflow. (Default: None)
1590
+ verify : bool
1591
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1592
+ """
1593
+ ...
1594
+
1595
+ @typing.overload
1596
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1597
+ """
1598
+ Specifies the times when the flow should be run when running on a
1599
+ production scheduler.
1600
+
1601
+ Parameters
1602
+ ----------
1603
+ hourly : bool, default False
1604
+ Run the workflow hourly.
1605
+ daily : bool, default True
1606
+ Run the workflow daily.
1607
+ weekly : bool, default False
1608
+ Run the workflow weekly.
1609
+ cron : str, optional, default None
1610
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1611
+ specified by this expression.
1612
+ timezone : str, optional, default None
1613
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1614
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1615
+ """
1616
+ ...
1617
+
1618
+ @typing.overload
1619
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1620
+ ...
1621
+
1622
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1623
+ """
1624
+ Specifies the times when the flow should be run when running on a
1625
+ production scheduler.
1626
+
1627
+ Parameters
1628
+ ----------
1629
+ hourly : bool, default False
1630
+ Run the workflow hourly.
1631
+ daily : bool, default True
1632
+ Run the workflow daily.
1633
+ weekly : bool, default False
1634
+ Run the workflow weekly.
1635
+ cron : str, optional, default None
1636
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1637
+ specified by this expression.
1638
+ timezone : str, optional, default None
1639
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1640
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1641
+ """
1642
+ ...
1643
+
1644
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1645
+ """
1646
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1647
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1648
+
1649
+ Parameters
1650
+ ----------
1651
+ timeout : int
1652
+ Time, in seconds before the task times out and fails. (Default: 3600)
1653
+ poke_interval : int
1654
+ Time in seconds that the job should wait in between each try. (Default: 60)
1655
+ mode : str
1656
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1657
+ exponential_backoff : bool
1658
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1659
+ pool : str
1660
+ the slot pool this task should run in,
1661
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1662
+ soft_fail : bool
1663
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1664
+ name : str
1665
+ Name of the sensor on Airflow
1666
+ description : str
1667
+ Description of sensor in the Airflow UI
1668
+ external_dag_id : str
1669
+ The dag_id that contains the task you want to wait for.
1670
+ external_task_ids : List[str]
1671
+ The list of task_ids that you want to wait for.
1672
+ If None (default value) the sensor waits for the DAG. (Default: None)
1673
+ allowed_states : List[str]
1674
+ Iterable of allowed states, (Default: ['success'])
1675
+ failed_states : List[str]
1676
+ Iterable of failed or dis-allowed states. (Default: None)
1677
+ execution_delta : datetime.timedelta
1678
+ time difference with the previous execution to look at,
1679
+ the default is the same logical date as the current task or DAG. (Default: None)
1680
+ check_existence: bool
1681
+ Set to True to check if the external task exists or check if
1682
+ the DAG to wait for exists. (Default: True)
1683
+ """
1684
+ ...
1685
+
1432
1686
  @typing.overload
1433
1687
  def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1434
1688
  """
@@ -1478,90 +1732,6 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1478
1732
  """
1479
1733
  ...
1480
1734
 
1481
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1482
- """
1483
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1484
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1485
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1486
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1487
- starts only after all sensors finish.
1488
-
1489
- Parameters
1490
- ----------
1491
- timeout : int
1492
- Time, in seconds before the task times out and fails. (Default: 3600)
1493
- poke_interval : int
1494
- Time in seconds that the job should wait in between each try. (Default: 60)
1495
- mode : str
1496
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1497
- exponential_backoff : bool
1498
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1499
- pool : str
1500
- the slot pool this task should run in,
1501
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1502
- soft_fail : bool
1503
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1504
- name : str
1505
- Name of the sensor on Airflow
1506
- description : str
1507
- Description of sensor in the Airflow UI
1508
- bucket_key : Union[str, List[str]]
1509
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1510
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1511
- bucket_name : str
1512
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1513
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1514
- wildcard_match : bool
1515
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1516
- aws_conn_id : str
1517
- a reference to the s3 connection on Airflow. (Default: None)
1518
- verify : bool
1519
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1520
- """
1521
- ...
1522
-
1523
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1524
- """
1525
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1526
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1527
-
1528
- Parameters
1529
- ----------
1530
- timeout : int
1531
- Time, in seconds before the task times out and fails. (Default: 3600)
1532
- poke_interval : int
1533
- Time in seconds that the job should wait in between each try. (Default: 60)
1534
- mode : str
1535
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1536
- exponential_backoff : bool
1537
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1538
- pool : str
1539
- the slot pool this task should run in,
1540
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1541
- soft_fail : bool
1542
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1543
- name : str
1544
- Name of the sensor on Airflow
1545
- description : str
1546
- Description of sensor in the Airflow UI
1547
- external_dag_id : str
1548
- The dag_id that contains the task you want to wait for.
1549
- external_task_ids : List[str]
1550
- The list of task_ids that you want to wait for.
1551
- If None (default value) the sensor waits for the DAG. (Default: None)
1552
- allowed_states : List[str]
1553
- Iterable of allowed states, (Default: ['success'])
1554
- failed_states : List[str]
1555
- Iterable of failed or dis-allowed states. (Default: None)
1556
- execution_delta : datetime.timedelta
1557
- time difference with the previous execution to look at,
1558
- the default is the same logical date as the current task or DAG. (Default: None)
1559
- check_existence: bool
1560
- Set to True to check if the external task exists or check if
1561
- the DAG to wait for exists. (Default: True)
1562
- """
1563
- ...
1564
-
1565
1735
  @typing.overload
1566
1736
  def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1567
1737
  """
@@ -1657,176 +1827,6 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1657
1827
  """
1658
1828
  ...
1659
1829
 
1660
- @typing.overload
1661
- def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1662
- """
1663
- Specifies the flow(s) that this flow depends on.
1664
-
1665
- ```
1666
- @trigger_on_finish(flow='FooFlow')
1667
- ```
1668
- or
1669
- ```
1670
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1671
- ```
1672
- This decorator respects the @project decorator and triggers the flow
1673
- when upstream runs within the same namespace complete successfully
1674
-
1675
- Additionally, you can specify project aware upstream flow dependencies
1676
- by specifying the fully qualified project_flow_name.
1677
- ```
1678
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1679
- ```
1680
- or
1681
- ```
1682
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1683
- ```
1684
-
1685
- You can also specify just the project or project branch (other values will be
1686
- inferred from the current project or project branch):
1687
- ```
1688
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1689
- ```
1690
-
1691
- Note that `branch` is typically one of:
1692
- - `prod`
1693
- - `user.bob`
1694
- - `test.my_experiment`
1695
- - `prod.staging`
1696
-
1697
- Parameters
1698
- ----------
1699
- flow : Union[str, Dict[str, str]], optional, default None
1700
- Upstream flow dependency for this flow.
1701
- flows : List[Union[str, Dict[str, str]]], default []
1702
- Upstream flow dependencies for this flow.
1703
- options : Dict[str, Any], default {}
1704
- Backend-specific configuration for tuning eventing behavior.
1705
-
1706
-
1707
- """
1708
- ...
1709
-
1710
- @typing.overload
1711
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1712
- ...
1713
-
1714
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1715
- """
1716
- Specifies the flow(s) that this flow depends on.
1717
-
1718
- ```
1719
- @trigger_on_finish(flow='FooFlow')
1720
- ```
1721
- or
1722
- ```
1723
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1724
- ```
1725
- This decorator respects the @project decorator and triggers the flow
1726
- when upstream runs within the same namespace complete successfully
1727
-
1728
- Additionally, you can specify project aware upstream flow dependencies
1729
- by specifying the fully qualified project_flow_name.
1730
- ```
1731
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1732
- ```
1733
- or
1734
- ```
1735
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1736
- ```
1737
-
1738
- You can also specify just the project or project branch (other values will be
1739
- inferred from the current project or project branch):
1740
- ```
1741
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1742
- ```
1743
-
1744
- Note that `branch` is typically one of:
1745
- - `prod`
1746
- - `user.bob`
1747
- - `test.my_experiment`
1748
- - `prod.staging`
1749
-
1750
- Parameters
1751
- ----------
1752
- flow : Union[str, Dict[str, str]], optional, default None
1753
- Upstream flow dependency for this flow.
1754
- flows : List[Union[str, Dict[str, str]]], default []
1755
- Upstream flow dependencies for this flow.
1756
- options : Dict[str, Any], default {}
1757
- Backend-specific configuration for tuning eventing behavior.
1758
-
1759
-
1760
- """
1761
- ...
1762
-
1763
- @typing.overload
1764
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1765
- """
1766
- Specifies the times when the flow should be run when running on a
1767
- production scheduler.
1768
-
1769
- Parameters
1770
- ----------
1771
- hourly : bool, default False
1772
- Run the workflow hourly.
1773
- daily : bool, default True
1774
- Run the workflow daily.
1775
- weekly : bool, default False
1776
- Run the workflow weekly.
1777
- cron : str, optional, default None
1778
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1779
- specified by this expression.
1780
- timezone : str, optional, default None
1781
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1782
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1783
- """
1784
- ...
1785
-
1786
- @typing.overload
1787
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1788
- ...
1789
-
1790
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1791
- """
1792
- Specifies the times when the flow should be run when running on a
1793
- production scheduler.
1794
-
1795
- Parameters
1796
- ----------
1797
- hourly : bool, default False
1798
- Run the workflow hourly.
1799
- daily : bool, default True
1800
- Run the workflow daily.
1801
- weekly : bool, default False
1802
- Run the workflow weekly.
1803
- cron : str, optional, default None
1804
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1805
- specified by this expression.
1806
- timezone : str, optional, default None
1807
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1808
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1809
- """
1810
- ...
1811
-
1812
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1813
- """
1814
- Specifies what flows belong to the same project.
1815
-
1816
- A project-specific namespace is created for all flows that
1817
- use the same `@project(name)`.
1818
-
1819
- Parameters
1820
- ----------
1821
- name : str
1822
- Project name. Make sure that the name is unique amongst all
1823
- projects that use the same production scheduler. The name may
1824
- contain only lowercase alphanumeric characters and underscores.
1825
-
1826
-
1827
- """
1828
- ...
1829
-
1830
1830
  def namespace(ns: typing.Optional[str]) -> typing.Optional[str]:
1831
1831
  """
1832
1832
  Switch namespace to the one provided.