ob-metaflow-stubs 4.9__py2.py3-none-any.whl → 5.1__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (150) hide show
  1. metaflow-stubs/__init__.pyi +471 -618
  2. metaflow-stubs/cards.pyi +5 -5
  3. metaflow-stubs/cli.pyi +3 -3
  4. metaflow-stubs/client/__init__.pyi +4 -4
  5. metaflow-stubs/client/core.pyi +5 -5
  6. metaflow-stubs/client/filecache.pyi +2 -2
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +2 -2
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +6 -6
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +5 -5
  13. metaflow-stubs/metadata/metadata.pyi +2 -2
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +4 -2
  16. metaflow-stubs/metaflow_current.pyi +35 -35
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +4 -4
  20. metaflow-stubs/plugins/__init__.pyi +3 -3
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  23. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  24. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
  27. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
  28. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  29. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  30. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  31. metaflow-stubs/plugins/argo/argo_workflows.pyi +46 -12
  32. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +13 -13
  33. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +6 -6
  34. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +5 -5
  35. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  37. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  38. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  39. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  40. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  41. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  42. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +3 -3
  43. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  44. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  45. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  46. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  47. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  50. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  51. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  52. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  53. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +3 -3
  54. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  55. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  56. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  57. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  58. metaflow-stubs/plugins/cards/card_cli.pyi +5 -5
  59. metaflow-stubs/plugins/cards/card_client.pyi +2 -2
  60. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  61. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  62. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  63. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  64. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  65. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  67. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  71. metaflow-stubs/plugins/cards/component_serializer.pyi +3 -3
  72. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  73. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  74. metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
  75. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  76. metaflow-stubs/plugins/datatools/s3/__init__.pyi +2 -2
  77. metaflow-stubs/plugins/datatools/s3/s3.pyi +7 -7
  78. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  79. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  80. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  81. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  82. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  83. metaflow-stubs/plugins/events_decorator.pyi +3 -3
  84. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  85. metaflow-stubs/plugins/frameworks/pytorch.pyi +3 -3
  86. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  87. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +3 -3
  88. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  89. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  90. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  91. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  92. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  93. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +3 -3
  94. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +4 -4
  95. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  96. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +3 -3
  97. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -4
  98. metaflow-stubs/plugins/logs_cli.pyi +4 -4
  99. metaflow-stubs/plugins/package_cli.pyi +2 -2
  100. metaflow-stubs/plugins/parallel_decorator.pyi +3 -3
  101. metaflow-stubs/plugins/perimeters.pyi +2 -2
  102. metaflow-stubs/plugins/project_decorator.pyi +3 -3
  103. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  104. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  105. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  106. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  107. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  108. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  109. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  110. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  111. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  112. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  113. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  114. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  115. metaflow-stubs/plugins/tag_cli.pyi +4 -4
  116. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +2 -2
  117. metaflow-stubs/plugins/timeout_decorator.pyi +2 -2
  118. metaflow-stubs/procpoll.pyi +2 -2
  119. metaflow-stubs/profilers/__init__.pyi +2 -2
  120. metaflow-stubs/pylint_wrapper.pyi +2 -2
  121. metaflow-stubs/runner/__init__.pyi +2 -2
  122. metaflow-stubs/runner/deployer.pyi +3 -3
  123. metaflow-stubs/runner/metaflow_runner.pyi +4 -4
  124. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  125. metaflow-stubs/runner/nbrun.pyi +2 -2
  126. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  127. metaflow-stubs/runner/utils.pyi +2 -2
  128. metaflow-stubs/system/__init__.pyi +3 -3
  129. metaflow-stubs/system/system_logger.pyi +2 -2
  130. metaflow-stubs/system/system_monitor.pyi +3 -3
  131. metaflow-stubs/tagging_util.pyi +2 -2
  132. metaflow-stubs/tuple_util.pyi +2 -2
  133. {ob_metaflow_stubs-4.9.dist-info → ob_metaflow_stubs-5.1.dist-info}/METADATA +1 -1
  134. ob_metaflow_stubs-5.1.dist-info/RECORD +137 -0
  135. metaflow-stubs/plugins/airflow/airflow.pyi +0 -187
  136. metaflow-stubs/plugins/airflow/airflow_cli.pyi +0 -90
  137. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +0 -50
  138. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +0 -46
  139. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +0 -145
  140. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +0 -22
  141. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +0 -159
  142. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +0 -49
  143. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +0 -78
  144. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +0 -77
  145. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +0 -11
  146. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +0 -114
  147. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +0 -75
  148. ob_metaflow_stubs-4.9.dist-info/RECORD +0 -150
  149. {ob_metaflow_stubs-4.9.dist-info → ob_metaflow_stubs-5.1.dist-info}/WHEEL +0 -0
  150. {ob_metaflow_stubs-4.9.dist-info → ob_metaflow_stubs-5.1.dist-info}/top_level.txt +0 -0
@@ -1,25 +1,25 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.10.1+ob(v1) #
4
- # Generated on 2024-07-31T05:43:20.962506 #
3
+ # MF version: 2.12.11.0+ob(v1) #
4
+ # Generated on 2024-08-08T23:50:17.875353 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.events
12
- import metaflow.flowspec
13
- import metaflow.metaflow_current
14
- import metaflow._vendor.click.types
11
+ import metaflow.parameters
12
+ import typing
15
13
  import metaflow.plugins.datatools.s3.s3
16
14
  import metaflow.client.core
17
- import metaflow.datastore.inputs
15
+ import metaflow.metaflow_current
16
+ import metaflow.flowspec
17
+ import metaflow._vendor.click.types
18
+ import io
18
19
  import datetime
20
+ import metaflow.datastore.inputs
21
+ import metaflow.events
19
22
  import metaflow.runner.metaflow_runner
20
- import typing
21
- import metaflow.parameters
22
- import io
23
23
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
24
24
  StepFlag = typing.NewType("StepFlag", bool)
25
25
 
@@ -728,141 +728,128 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
728
728
  ...
729
729
 
730
730
  @typing.overload
731
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
732
- """
733
- Specifies environment variables to be set prior to the execution of a step.
734
-
735
- Parameters
736
- ----------
737
- vars : Dict[str, str], default {}
738
- Dictionary of environment variables to set.
739
- """
740
- ...
741
-
742
- @typing.overload
743
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
744
- ...
745
-
746
- @typing.overload
747
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
748
- ...
749
-
750
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
751
- """
752
- Specifies environment variables to be set prior to the execution of a step.
753
-
754
- Parameters
755
- ----------
756
- vars : Dict[str, str], default {}
757
- Dictionary of environment variables to set.
758
- """
759
- ...
760
-
761
- @typing.overload
762
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
731
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
763
732
  """
764
- Specifies the Conda environment for the step.
733
+ Specifies that the step will success under all circumstances.
765
734
 
766
- Information in this decorator will augment any
767
- attributes set in the `@conda_base` flow-level decorator. Hence,
768
- you can use `@conda_base` to set packages required by all
769
- steps and use `@conda` to specify step-specific overrides.
735
+ The decorator will create an optional artifact, specified by `var`, which
736
+ contains the exception raised. You can use it to detect the presence
737
+ of errors, indicating that all happy-path artifacts produced by the step
738
+ are missing.
770
739
 
771
740
  Parameters
772
741
  ----------
773
- packages : Dict[str, str], default {}
774
- Packages to use for this step. The key is the name of the package
775
- and the value is the version to use.
776
- libraries : Dict[str, str], default {}
777
- Supported for backward compatibility. When used with packages, packages will take precedence.
778
- python : str, optional, default None
779
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
780
- that the version used will correspond to the version of the Python interpreter used to start the run.
781
- disabled : bool, default False
782
- If set to True, disables @conda.
742
+ var : str, optional, default None
743
+ Name of the artifact in which to store the caught exception.
744
+ If not specified, the exception is not stored.
745
+ print_exception : bool, default True
746
+ Determines whether or not the exception is printed to
747
+ stdout when caught.
783
748
  """
784
749
  ...
785
750
 
786
751
  @typing.overload
787
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
752
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
788
753
  ...
789
754
 
790
755
  @typing.overload
791
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
756
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
792
757
  ...
793
758
 
794
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
759
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
795
760
  """
796
- Specifies the Conda environment for the step.
761
+ Specifies that the step will success under all circumstances.
797
762
 
798
- Information in this decorator will augment any
799
- attributes set in the `@conda_base` flow-level decorator. Hence,
800
- you can use `@conda_base` to set packages required by all
801
- steps and use `@conda` to specify step-specific overrides.
763
+ The decorator will create an optional artifact, specified by `var`, which
764
+ contains the exception raised. You can use it to detect the presence
765
+ of errors, indicating that all happy-path artifacts produced by the step
766
+ are missing.
802
767
 
803
768
  Parameters
804
769
  ----------
805
- packages : Dict[str, str], default {}
806
- Packages to use for this step. The key is the name of the package
807
- and the value is the version to use.
808
- libraries : Dict[str, str], default {}
809
- Supported for backward compatibility. When used with packages, packages will take precedence.
810
- python : str, optional, default None
811
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
812
- that the version used will correspond to the version of the Python interpreter used to start the run.
813
- disabled : bool, default False
814
- If set to True, disables @conda.
770
+ var : str, optional, default None
771
+ Name of the artifact in which to store the caught exception.
772
+ If not specified, the exception is not stored.
773
+ print_exception : bool, default True
774
+ Determines whether or not the exception is printed to
775
+ stdout when caught.
815
776
  """
816
777
  ...
817
778
 
818
779
  @typing.overload
819
- def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
780
+ def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
820
781
  """
821
- Creates a human-readable report, a Metaflow Card, after this step completes.
782
+ Specifies the resources needed when executing this step.
822
783
 
823
- Note that you may add multiple `@card` decorators in a step with different parameters.
784
+ Use `@resources` to specify the resource requirements
785
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
786
+
787
+ You can choose the compute layer on the command line by executing e.g.
788
+ ```
789
+ python myflow.py run --with batch
790
+ ```
791
+ or
792
+ ```
793
+ python myflow.py run --with kubernetes
794
+ ```
795
+ which executes the flow on the desired system using the
796
+ requirements specified in `@resources`.
824
797
 
825
798
  Parameters
826
799
  ----------
827
- type : str, default 'default'
828
- Card type.
829
- id : str, optional, default None
830
- If multiple cards are present, use this id to identify this card.
831
- options : Dict[str, Any], default {}
832
- Options passed to the card. The contents depend on the card type.
833
- timeout : int, default 45
834
- Interrupt reporting if it takes more than this many seconds.
835
-
836
-
800
+ cpu : int, default 1
801
+ Number of CPUs required for this step.
802
+ gpu : int, default 0
803
+ Number of GPUs required for this step.
804
+ disk : int, optional, default None
805
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
806
+ memory : int, default 4096
807
+ Memory size (in MB) required for this step.
808
+ shared_memory : int, optional, default None
809
+ The value for the size (in MiB) of the /dev/shm volume for this step.
810
+ This parameter maps to the `--shm-size` option in Docker.
837
811
  """
838
812
  ...
839
813
 
840
814
  @typing.overload
841
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
815
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
842
816
  ...
843
817
 
844
818
  @typing.overload
845
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
819
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
846
820
  ...
847
821
 
848
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
822
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
849
823
  """
850
- Creates a human-readable report, a Metaflow Card, after this step completes.
824
+ Specifies the resources needed when executing this step.
851
825
 
852
- Note that you may add multiple `@card` decorators in a step with different parameters.
826
+ Use `@resources` to specify the resource requirements
827
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
828
+
829
+ You can choose the compute layer on the command line by executing e.g.
830
+ ```
831
+ python myflow.py run --with batch
832
+ ```
833
+ or
834
+ ```
835
+ python myflow.py run --with kubernetes
836
+ ```
837
+ which executes the flow on the desired system using the
838
+ requirements specified in `@resources`.
853
839
 
854
840
  Parameters
855
841
  ----------
856
- type : str, default 'default'
857
- Card type.
858
- id : str, optional, default None
859
- If multiple cards are present, use this id to identify this card.
860
- options : Dict[str, Any], default {}
861
- Options passed to the card. The contents depend on the card type.
862
- timeout : int, default 45
863
- Interrupt reporting if it takes more than this many seconds.
864
-
865
-
842
+ cpu : int, default 1
843
+ Number of CPUs required for this step.
844
+ gpu : int, default 0
845
+ Number of GPUs required for this step.
846
+ disk : int, optional, default None
847
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
848
+ memory : int, default 4096
849
+ Memory size (in MB) required for this step.
850
+ shared_memory : int, optional, default None
851
+ The value for the size (in MiB) of the /dev/shm volume for this step.
852
+ This parameter maps to the `--shm-size` option in Docker.
866
853
  """
867
854
  ...
868
855
 
@@ -924,84 +911,82 @@ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
924
911
  ...
925
912
 
926
913
  @typing.overload
927
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
914
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
928
915
  """
929
- Specifies that the step will success under all circumstances.
930
-
931
- The decorator will create an optional artifact, specified by `var`, which
932
- contains the exception raised. You can use it to detect the presence
933
- of errors, indicating that all happy-path artifacts produced by the step
934
- are missing.
916
+ Specifies environment variables to be set prior to the execution of a step.
935
917
 
936
918
  Parameters
937
919
  ----------
938
- var : str, optional, default None
939
- Name of the artifact in which to store the caught exception.
940
- If not specified, the exception is not stored.
941
- print_exception : bool, default True
942
- Determines whether or not the exception is printed to
943
- stdout when caught.
920
+ vars : Dict[str, str], default {}
921
+ Dictionary of environment variables to set.
944
922
  """
945
923
  ...
946
924
 
947
925
  @typing.overload
948
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
926
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
949
927
  ...
950
928
 
951
929
  @typing.overload
952
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
930
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
953
931
  ...
954
932
 
955
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
933
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
956
934
  """
957
- Specifies that the step will success under all circumstances.
958
-
959
- The decorator will create an optional artifact, specified by `var`, which
960
- contains the exception raised. You can use it to detect the presence
961
- of errors, indicating that all happy-path artifacts produced by the step
962
- are missing.
935
+ Specifies environment variables to be set prior to the execution of a step.
963
936
 
964
937
  Parameters
965
938
  ----------
966
- var : str, optional, default None
967
- Name of the artifact in which to store the caught exception.
968
- If not specified, the exception is not stored.
969
- print_exception : bool, default True
970
- Determines whether or not the exception is printed to
971
- stdout when caught.
939
+ vars : Dict[str, str], default {}
940
+ Dictionary of environment variables to set.
972
941
  """
973
942
  ...
974
943
 
975
944
  @typing.overload
976
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
945
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
977
946
  """
978
- Specifies secrets to be retrieved and injected as environment variables prior to
979
- the execution of a step.
947
+ Specifies the PyPI packages for the step.
948
+
949
+ Information in this decorator will augment any
950
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
951
+ you can use `@pypi_base` to set packages required by all
952
+ steps and use `@pypi` to specify step-specific overrides.
980
953
 
981
954
  Parameters
982
955
  ----------
983
- sources : List[Union[str, Dict[str, Any]]], default: []
984
- List of secret specs, defining how the secrets are to be retrieved
956
+ packages : Dict[str, str], default: {}
957
+ Packages to use for this step. The key is the name of the package
958
+ and the value is the version to use.
959
+ python : str, optional, default: None
960
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
961
+ that the version used will correspond to the version of the Python interpreter used to start the run.
985
962
  """
986
963
  ...
987
964
 
988
965
  @typing.overload
989
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
966
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
990
967
  ...
991
968
 
992
969
  @typing.overload
993
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
970
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
994
971
  ...
995
972
 
996
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
973
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
997
974
  """
998
- Specifies secrets to be retrieved and injected as environment variables prior to
999
- the execution of a step.
975
+ Specifies the PyPI packages for the step.
1000
976
 
1001
- Parameters
1002
- ----------
1003
- sources : List[Union[str, Dict[str, Any]]], default: []
1004
- List of secret specs, defining how the secrets are to be retrieved
977
+ Information in this decorator will augment any
978
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
979
+ you can use `@pypi_base` to set packages required by all
980
+ steps and use `@pypi` to specify step-specific overrides.
981
+
982
+ Parameters
983
+ ----------
984
+ packages : Dict[str, str], default: {}
985
+ Packages to use for this step. The key is the name of the package
986
+ and the value is the version to use.
987
+ python : str, optional, default: None
988
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
989
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1005
990
  """
1006
991
  ...
1007
992
 
@@ -1024,59 +1009,6 @@ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
1024
1009
  """
1025
1010
  ...
1026
1011
 
1027
- @typing.overload
1028
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1029
- """
1030
- Specifies the number of times the task corresponding
1031
- to a step needs to be retried.
1032
-
1033
- This decorator is useful for handling transient errors, such as networking issues.
1034
- If your task contains operations that can't be retried safely, e.g. database updates,
1035
- it is advisable to annotate it with `@retry(times=0)`.
1036
-
1037
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1038
- decorator will execute a no-op task after all retries have been exhausted,
1039
- ensuring that the flow execution can continue.
1040
-
1041
- Parameters
1042
- ----------
1043
- times : int, default 3
1044
- Number of times to retry this task.
1045
- minutes_between_retries : int, default 2
1046
- Number of minutes between retries.
1047
- """
1048
- ...
1049
-
1050
- @typing.overload
1051
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1052
- ...
1053
-
1054
- @typing.overload
1055
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1056
- ...
1057
-
1058
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1059
- """
1060
- Specifies the number of times the task corresponding
1061
- to a step needs to be retried.
1062
-
1063
- This decorator is useful for handling transient errors, such as networking issues.
1064
- If your task contains operations that can't be retried safely, e.g. database updates,
1065
- it is advisable to annotate it with `@retry(times=0)`.
1066
-
1067
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1068
- decorator will execute a no-op task after all retries have been exhausted,
1069
- ensuring that the flow execution can continue.
1070
-
1071
- Parameters
1072
- ----------
1073
- times : int, default 3
1074
- Number of times to retry this task.
1075
- minutes_between_retries : int, default 2
1076
- Number of minutes between retries.
1077
- """
1078
- ...
1079
-
1080
1012
  def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1081
1013
  """
1082
1014
  Specifies that this step should execute on Kubernetes.
@@ -1137,324 +1069,196 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
1137
1069
  ...
1138
1070
 
1139
1071
  @typing.overload
1140
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1072
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1141
1073
  """
1142
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1074
+ Specifies the number of times the task corresponding
1075
+ to a step needs to be retried.
1076
+
1077
+ This decorator is useful for handling transient errors, such as networking issues.
1078
+ If your task contains operations that can't be retried safely, e.g. database updates,
1079
+ it is advisable to annotate it with `@retry(times=0)`.
1080
+
1081
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1082
+ decorator will execute a no-op task after all retries have been exhausted,
1083
+ ensuring that the flow execution can continue.
1143
1084
 
1144
1085
  Parameters
1145
1086
  ----------
1146
- cpu : int, default 1
1147
- Number of CPUs required for this step. If `@resources` is
1148
- also present, the maximum value from all decorators is used.
1149
- gpu : int, default 0
1150
- Number of GPUs required for this step. If `@resources` is
1151
- also present, the maximum value from all decorators is used.
1152
- memory : int, default 4096
1153
- Memory size (in MB) required for this step. If
1154
- `@resources` is also present, the maximum value from all decorators is
1155
- used.
1156
- image : str, optional, default None
1157
- Docker image to use when launching on AWS Batch. If not specified, and
1158
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1159
- not, a default Docker image mapping to the current version of Python is used.
1160
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
1161
- AWS Batch Job Queue to submit the job to.
1162
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1163
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1164
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1165
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1166
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1167
- shared_memory : int, optional, default None
1168
- The value for the size (in MiB) of the /dev/shm volume for this step.
1169
- This parameter maps to the `--shm-size` option in Docker.
1170
- max_swap : int, optional, default None
1171
- The total amount of swap memory (in MiB) a container can use for this
1172
- step. This parameter is translated to the `--memory-swap` option in
1173
- Docker where the value is the sum of the container memory plus the
1174
- `max_swap` value.
1175
- swappiness : int, optional, default None
1176
- This allows you to tune memory swappiness behavior for this step.
1177
- A swappiness value of 0 causes swapping not to happen unless absolutely
1178
- necessary. A swappiness value of 100 causes pages to be swapped very
1179
- aggressively. Accepted values are whole numbers between 0 and 100.
1180
- use_tmpfs : bool, default False
1181
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
1182
- not available on Fargate compute environments
1183
- tmpfs_tempdir : bool, default True
1184
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1185
- tmpfs_size : int, optional, default None
1186
- The value for the size (in MiB) of the tmpfs mount for this step.
1187
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1188
- memory allocated for this step.
1189
- tmpfs_path : str, optional, default None
1190
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1191
- inferentia : int, default 0
1192
- Number of Inferentia chips required for this step.
1193
- trainium : int, default None
1194
- Alias for inferentia. Use only one of the two.
1195
- efa : int, default 0
1196
- Number of elastic fabric adapter network devices to attach to container
1197
- ephemeral_storage : int, default None
1198
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
1199
- This is only relevant for Fargate compute environments
1200
- log_driver: str, optional, default None
1201
- The log driver to use for the Amazon ECS container.
1202
- log_options: List[str], optional, default None
1203
- List of strings containing options for the chosen log driver. The configurable values
1204
- depend on the `log driver` chosen. Validation of these options is not supported yet.
1205
- Example: [`awslogs-group:aws/batch/job`]
1087
+ times : int, default 3
1088
+ Number of times to retry this task.
1089
+ minutes_between_retries : int, default 2
1090
+ Number of minutes between retries.
1206
1091
  """
1207
1092
  ...
1208
1093
 
1209
1094
  @typing.overload
1210
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1095
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1211
1096
  ...
1212
1097
 
1213
1098
  @typing.overload
1214
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1099
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1215
1100
  ...
1216
1101
 
1217
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
1102
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1218
1103
  """
1219
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1104
+ Specifies the number of times the task corresponding
1105
+ to a step needs to be retried.
1106
+
1107
+ This decorator is useful for handling transient errors, such as networking issues.
1108
+ If your task contains operations that can't be retried safely, e.g. database updates,
1109
+ it is advisable to annotate it with `@retry(times=0)`.
1110
+
1111
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1112
+ decorator will execute a no-op task after all retries have been exhausted,
1113
+ ensuring that the flow execution can continue.
1220
1114
 
1221
1115
  Parameters
1222
1116
  ----------
1223
- cpu : int, default 1
1224
- Number of CPUs required for this step. If `@resources` is
1225
- also present, the maximum value from all decorators is used.
1226
- gpu : int, default 0
1227
- Number of GPUs required for this step. If `@resources` is
1228
- also present, the maximum value from all decorators is used.
1229
- memory : int, default 4096
1230
- Memory size (in MB) required for this step. If
1231
- `@resources` is also present, the maximum value from all decorators is
1232
- used.
1233
- image : str, optional, default None
1234
- Docker image to use when launching on AWS Batch. If not specified, and
1235
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1236
- not, a default Docker image mapping to the current version of Python is used.
1237
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
1238
- AWS Batch Job Queue to submit the job to.
1239
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1240
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1241
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1242
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1243
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1244
- shared_memory : int, optional, default None
1245
- The value for the size (in MiB) of the /dev/shm volume for this step.
1246
- This parameter maps to the `--shm-size` option in Docker.
1247
- max_swap : int, optional, default None
1248
- The total amount of swap memory (in MiB) a container can use for this
1249
- step. This parameter is translated to the `--memory-swap` option in
1250
- Docker where the value is the sum of the container memory plus the
1251
- `max_swap` value.
1252
- swappiness : int, optional, default None
1253
- This allows you to tune memory swappiness behavior for this step.
1254
- A swappiness value of 0 causes swapping not to happen unless absolutely
1255
- necessary. A swappiness value of 100 causes pages to be swapped very
1256
- aggressively. Accepted values are whole numbers between 0 and 100.
1257
- use_tmpfs : bool, default False
1258
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
1259
- not available on Fargate compute environments
1260
- tmpfs_tempdir : bool, default True
1261
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1262
- tmpfs_size : int, optional, default None
1263
- The value for the size (in MiB) of the tmpfs mount for this step.
1264
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1265
- memory allocated for this step.
1266
- tmpfs_path : str, optional, default None
1267
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1268
- inferentia : int, default 0
1269
- Number of Inferentia chips required for this step.
1270
- trainium : int, default None
1271
- Alias for inferentia. Use only one of the two.
1272
- efa : int, default 0
1273
- Number of elastic fabric adapter network devices to attach to container
1274
- ephemeral_storage : int, default None
1275
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
1276
- This is only relevant for Fargate compute environments
1277
- log_driver: str, optional, default None
1278
- The log driver to use for the Amazon ECS container.
1279
- log_options: List[str], optional, default None
1280
- List of strings containing options for the chosen log driver. The configurable values
1281
- depend on the `log driver` chosen. Validation of these options is not supported yet.
1282
- Example: [`awslogs-group:aws/batch/job`]
1117
+ times : int, default 3
1118
+ Number of times to retry this task.
1119
+ minutes_between_retries : int, default 2
1120
+ Number of minutes between retries.
1283
1121
  """
1284
1122
  ...
1285
1123
 
1286
1124
  @typing.overload
1287
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1125
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1288
1126
  """
1289
- Specifies the PyPI packages for the step.
1127
+ Specifies the Conda environment for the step.
1290
1128
 
1291
1129
  Information in this decorator will augment any
1292
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1293
- you can use `@pypi_base` to set packages required by all
1294
- steps and use `@pypi` to specify step-specific overrides.
1130
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1131
+ you can use `@conda_base` to set packages required by all
1132
+ steps and use `@conda` to specify step-specific overrides.
1295
1133
 
1296
1134
  Parameters
1297
1135
  ----------
1298
- packages : Dict[str, str], default: {}
1136
+ packages : Dict[str, str], default {}
1299
1137
  Packages to use for this step. The key is the name of the package
1300
1138
  and the value is the version to use.
1301
- python : str, optional, default: None
1139
+ libraries : Dict[str, str], default {}
1140
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1141
+ python : str, optional, default None
1302
1142
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1303
1143
  that the version used will correspond to the version of the Python interpreter used to start the run.
1144
+ disabled : bool, default False
1145
+ If set to True, disables @conda.
1304
1146
  """
1305
1147
  ...
1306
1148
 
1307
1149
  @typing.overload
1308
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1150
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1309
1151
  ...
1310
1152
 
1311
1153
  @typing.overload
1312
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1154
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1313
1155
  ...
1314
1156
 
1315
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1157
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1316
1158
  """
1317
- Specifies the PyPI packages for the step.
1159
+ Specifies the Conda environment for the step.
1318
1160
 
1319
1161
  Information in this decorator will augment any
1320
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1321
- you can use `@pypi_base` to set packages required by all
1322
- steps and use `@pypi` to specify step-specific overrides.
1162
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1163
+ you can use `@conda_base` to set packages required by all
1164
+ steps and use `@conda` to specify step-specific overrides.
1323
1165
 
1324
1166
  Parameters
1325
1167
  ----------
1326
- packages : Dict[str, str], default: {}
1168
+ packages : Dict[str, str], default {}
1327
1169
  Packages to use for this step. The key is the name of the package
1328
1170
  and the value is the version to use.
1329
- python : str, optional, default: None
1171
+ libraries : Dict[str, str], default {}
1172
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1173
+ python : str, optional, default None
1330
1174
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1331
1175
  that the version used will correspond to the version of the Python interpreter used to start the run.
1176
+ disabled : bool, default False
1177
+ If set to True, disables @conda.
1332
1178
  """
1333
1179
  ...
1334
1180
 
1335
1181
  @typing.overload
1336
- def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1182
+ def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1337
1183
  """
1338
- Specifies the resources needed when executing this step.
1339
-
1340
- Use `@resources` to specify the resource requirements
1341
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1184
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1342
1185
 
1343
- You can choose the compute layer on the command line by executing e.g.
1344
- ```
1345
- python myflow.py run --with batch
1346
- ```
1347
- or
1348
- ```
1349
- python myflow.py run --with kubernetes
1350
- ```
1351
- which executes the flow on the desired system using the
1352
- requirements specified in `@resources`.
1186
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1353
1187
 
1354
1188
  Parameters
1355
1189
  ----------
1356
- cpu : int, default 1
1357
- Number of CPUs required for this step.
1358
- gpu : int, default 0
1359
- Number of GPUs required for this step.
1360
- disk : int, optional, default None
1361
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1362
- memory : int, default 4096
1363
- Memory size (in MB) required for this step.
1364
- shared_memory : int, optional, default None
1365
- The value for the size (in MiB) of the /dev/shm volume for this step.
1366
- This parameter maps to the `--shm-size` option in Docker.
1190
+ type : str, default 'default'
1191
+ Card type.
1192
+ id : str, optional, default None
1193
+ If multiple cards are present, use this id to identify this card.
1194
+ options : Dict[str, Any], default {}
1195
+ Options passed to the card. The contents depend on the card type.
1196
+ timeout : int, default 45
1197
+ Interrupt reporting if it takes more than this many seconds.
1198
+
1199
+
1367
1200
  """
1368
1201
  ...
1369
1202
 
1370
1203
  @typing.overload
1371
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1204
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1372
1205
  ...
1373
1206
 
1374
1207
  @typing.overload
1375
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1208
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1376
1209
  ...
1377
1210
 
1378
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1211
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1379
1212
  """
1380
- Specifies the resources needed when executing this step.
1381
-
1382
- Use `@resources` to specify the resource requirements
1383
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1213
+ Creates a human-readable report, a Metaflow Card, after this step completes.
1384
1214
 
1385
- You can choose the compute layer on the command line by executing e.g.
1386
- ```
1387
- python myflow.py run --with batch
1388
- ```
1389
- or
1390
- ```
1391
- python myflow.py run --with kubernetes
1392
- ```
1393
- which executes the flow on the desired system using the
1394
- requirements specified in `@resources`.
1215
+ Note that you may add multiple `@card` decorators in a step with different parameters.
1395
1216
 
1396
1217
  Parameters
1397
1218
  ----------
1398
- cpu : int, default 1
1399
- Number of CPUs required for this step.
1400
- gpu : int, default 0
1401
- Number of GPUs required for this step.
1402
- disk : int, optional, default None
1403
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1404
- memory : int, default 4096
1405
- Memory size (in MB) required for this step.
1406
- shared_memory : int, optional, default None
1407
- The value for the size (in MiB) of the /dev/shm volume for this step.
1408
- This parameter maps to the `--shm-size` option in Docker.
1219
+ type : str, default 'default'
1220
+ Card type.
1221
+ id : str, optional, default None
1222
+ If multiple cards are present, use this id to identify this card.
1223
+ options : Dict[str, Any], default {}
1224
+ Options passed to the card. The contents depend on the card type.
1225
+ timeout : int, default 45
1226
+ Interrupt reporting if it takes more than this many seconds.
1227
+
1228
+
1409
1229
  """
1410
1230
  ...
1411
1231
 
1412
- def nim(*, models: "list[NIM]", backend: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1232
+ @typing.overload
1233
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1413
1234
  """
1414
- This decorator is used to run NIM containers in Metaflow tasks as sidecars.
1415
-
1416
- User code call
1417
- -----------
1418
- @nim(
1419
- models=['meta/llama3-8b-instruct', 'meta/llama3-70b-instruct'],
1420
- backend='managed'
1421
- )
1422
-
1423
- Valid backend options
1424
- ---------------------
1425
- - 'managed': Outerbounds selects a compute provider based on the model.
1426
- - 🚧 'dataplane': Run in your account.
1427
-
1428
- Valid model options
1429
- ----------------
1430
- - 'meta/llama3-8b-instruct': 8B parameter model
1431
- - 'meta/llama3-70b-instruct': 70B parameter model
1432
- - Upon request, any model here: https://nvcf.ngc.nvidia.com/functions?filter=nvidia-functions
1235
+ Specifies secrets to be retrieved and injected as environment variables prior to
1236
+ the execution of a step.
1433
1237
 
1434
1238
  Parameters
1435
1239
  ----------
1436
- models: list[NIM]
1437
- List of NIM containers running models in sidecars.
1438
- backend: str
1439
- Compute provider to run the NIM container.
1240
+ sources : List[Union[str, Dict[str, Any]]], default: []
1241
+ List of secret specs, defining how the secrets are to be retrieved
1440
1242
  """
1441
1243
  ...
1442
1244
 
1443
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1245
+ @typing.overload
1246
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1247
+ ...
1248
+
1249
+ @typing.overload
1250
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1251
+ ...
1252
+
1253
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1444
1254
  """
1445
- Specifies what flows belong to the same project.
1446
-
1447
- A project-specific namespace is created for all flows that
1448
- use the same `@project(name)`.
1255
+ Specifies secrets to be retrieved and injected as environment variables prior to
1256
+ the execution of a step.
1449
1257
 
1450
1258
  Parameters
1451
1259
  ----------
1452
- name : str
1453
- Project name. Make sure that the name is unique amongst all
1454
- projects that use the same production scheduler. The name may
1455
- contain only lowercase alphanumeric characters and underscores.
1456
-
1457
-
1260
+ sources : List[Union[str, Dict[str, Any]]], default: []
1261
+ List of secret specs, defining how the secrets are to be retrieved
1458
1262
  """
1459
1263
  ...
1460
1264
 
@@ -1497,6 +1301,37 @@ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packag
1497
1301
  """
1498
1302
  ...
1499
1303
 
1304
+ def nim(*, models: "list[NIM]", backend: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1305
+ """
1306
+ This decorator is used to run NIM containers in Metaflow tasks as sidecars.
1307
+
1308
+ User code call
1309
+ -----------
1310
+ @nim(
1311
+ models=['meta/llama3-8b-instruct', 'meta/llama3-70b-instruct'],
1312
+ backend='managed'
1313
+ )
1314
+
1315
+ Valid backend options
1316
+ ---------------------
1317
+ - 'managed': Outerbounds selects a compute provider based on the model.
1318
+ - 🚧 'dataplane': Run in your account.
1319
+
1320
+ Valid model options
1321
+ ----------------
1322
+ - 'meta/llama3-8b-instruct': 8B parameter model
1323
+ - 'meta/llama3-70b-instruct': 70B parameter model
1324
+ - Upon request, any model here: https://nvcf.ngc.nvidia.com/functions?filter=nvidia-functions
1325
+
1326
+ Parameters
1327
+ ----------
1328
+ models: list[NIM]
1329
+ List of NIM containers running models in sidecars.
1330
+ backend: str
1331
+ Compute provider to run the NIM container.
1332
+ """
1333
+ ...
1334
+
1500
1335
  def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1501
1336
  """
1502
1337
  The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
@@ -1524,67 +1359,163 @@ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, expone
1524
1359
  Name of the sensor on Airflow
1525
1360
  description : str
1526
1361
  Description of sensor in the Airflow UI
1527
- bucket_key : Union[str, List[str]]
1528
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1529
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1530
- bucket_name : str
1531
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1532
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1533
- wildcard_match : bool
1534
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1535
- aws_conn_id : str
1536
- a reference to the s3 connection on Airflow. (Default: None)
1537
- verify : bool
1538
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1539
- """
1540
- ...
1541
-
1542
- @typing.overload
1543
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1544
- """
1545
- Specifies the times when the flow should be run when running on a
1546
- production scheduler.
1547
-
1548
- Parameters
1549
- ----------
1550
- hourly : bool, default False
1551
- Run the workflow hourly.
1552
- daily : bool, default True
1553
- Run the workflow daily.
1554
- weekly : bool, default False
1555
- Run the workflow weekly.
1556
- cron : str, optional, default None
1557
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1558
- specified by this expression.
1559
- timezone : str, optional, default None
1560
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1561
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1562
- """
1563
- ...
1564
-
1565
- @typing.overload
1566
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1567
- ...
1568
-
1569
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1570
- """
1571
- Specifies the times when the flow should be run when running on a
1572
- production scheduler.
1573
-
1574
- Parameters
1575
- ----------
1576
- hourly : bool, default False
1577
- Run the workflow hourly.
1578
- daily : bool, default True
1579
- Run the workflow daily.
1580
- weekly : bool, default False
1581
- Run the workflow weekly.
1582
- cron : str, optional, default None
1583
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1584
- specified by this expression.
1585
- timezone : str, optional, default None
1586
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1587
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1362
+ bucket_key : Union[str, List[str]]
1363
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1364
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1365
+ bucket_name : str
1366
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1367
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1368
+ wildcard_match : bool
1369
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1370
+ aws_conn_id : str
1371
+ a reference to the s3 connection on Airflow. (Default: None)
1372
+ verify : bool
1373
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1374
+ """
1375
+ ...
1376
+
1377
+ @typing.overload
1378
+ def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1379
+ """
1380
+ Specifies the flow(s) that this flow depends on.
1381
+
1382
+ ```
1383
+ @trigger_on_finish(flow='FooFlow')
1384
+ ```
1385
+ or
1386
+ ```
1387
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1388
+ ```
1389
+ This decorator respects the @project decorator and triggers the flow
1390
+ when upstream runs within the same namespace complete successfully
1391
+
1392
+ Additionally, you can specify project aware upstream flow dependencies
1393
+ by specifying the fully qualified project_flow_name.
1394
+ ```
1395
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1396
+ ```
1397
+ or
1398
+ ```
1399
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1400
+ ```
1401
+
1402
+ You can also specify just the project or project branch (other values will be
1403
+ inferred from the current project or project branch):
1404
+ ```
1405
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1406
+ ```
1407
+
1408
+ Note that `branch` is typically one of:
1409
+ - `prod`
1410
+ - `user.bob`
1411
+ - `test.my_experiment`
1412
+ - `prod.staging`
1413
+
1414
+ Parameters
1415
+ ----------
1416
+ flow : Union[str, Dict[str, str]], optional, default None
1417
+ Upstream flow dependency for this flow.
1418
+ flows : List[Union[str, Dict[str, str]]], default []
1419
+ Upstream flow dependencies for this flow.
1420
+ options : Dict[str, Any], default {}
1421
+ Backend-specific configuration for tuning eventing behavior.
1422
+
1423
+
1424
+ """
1425
+ ...
1426
+
1427
+ @typing.overload
1428
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1429
+ ...
1430
+
1431
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1432
+ """
1433
+ Specifies the flow(s) that this flow depends on.
1434
+
1435
+ ```
1436
+ @trigger_on_finish(flow='FooFlow')
1437
+ ```
1438
+ or
1439
+ ```
1440
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1441
+ ```
1442
+ This decorator respects the @project decorator and triggers the flow
1443
+ when upstream runs within the same namespace complete successfully
1444
+
1445
+ Additionally, you can specify project aware upstream flow dependencies
1446
+ by specifying the fully qualified project_flow_name.
1447
+ ```
1448
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1449
+ ```
1450
+ or
1451
+ ```
1452
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1453
+ ```
1454
+
1455
+ You can also specify just the project or project branch (other values will be
1456
+ inferred from the current project or project branch):
1457
+ ```
1458
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1459
+ ```
1460
+
1461
+ Note that `branch` is typically one of:
1462
+ - `prod`
1463
+ - `user.bob`
1464
+ - `test.my_experiment`
1465
+ - `prod.staging`
1466
+
1467
+ Parameters
1468
+ ----------
1469
+ flow : Union[str, Dict[str, str]], optional, default None
1470
+ Upstream flow dependency for this flow.
1471
+ flows : List[Union[str, Dict[str, str]]], default []
1472
+ Upstream flow dependencies for this flow.
1473
+ options : Dict[str, Any], default {}
1474
+ Backend-specific configuration for tuning eventing behavior.
1475
+
1476
+
1477
+ """
1478
+ ...
1479
+
1480
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1481
+ """
1482
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1483
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1484
+
1485
+ Parameters
1486
+ ----------
1487
+ timeout : int
1488
+ Time, in seconds before the task times out and fails. (Default: 3600)
1489
+ poke_interval : int
1490
+ Time in seconds that the job should wait in between each try. (Default: 60)
1491
+ mode : str
1492
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1493
+ exponential_backoff : bool
1494
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1495
+ pool : str
1496
+ the slot pool this task should run in,
1497
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1498
+ soft_fail : bool
1499
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1500
+ name : str
1501
+ Name of the sensor on Airflow
1502
+ description : str
1503
+ Description of sensor in the Airflow UI
1504
+ external_dag_id : str
1505
+ The dag_id that contains the task you want to wait for.
1506
+ external_task_ids : List[str]
1507
+ The list of task_ids that you want to wait for.
1508
+ If None (default value) the sensor waits for the DAG. (Default: None)
1509
+ allowed_states : List[str]
1510
+ Iterable of allowed states, (Default: ['success'])
1511
+ failed_states : List[str]
1512
+ Iterable of failed or dis-allowed states. (Default: None)
1513
+ execution_delta : datetime.timedelta
1514
+ time difference with the previous execution to look at,
1515
+ the default is the same logical date as the current task or DAG. (Default: None)
1516
+ check_existence: bool
1517
+ Set to True to check if the external task exists or check if
1518
+ the DAG to wait for exists. (Default: True)
1588
1519
  """
1589
1520
  ...
1590
1521
 
@@ -1683,45 +1614,52 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1683
1614
  """
1684
1615
  ...
1685
1616
 
1686
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1617
+ @typing.overload
1618
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1687
1619
  """
1688
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1689
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1620
+ Specifies the times when the flow should be run when running on a
1621
+ production scheduler.
1690
1622
 
1691
1623
  Parameters
1692
1624
  ----------
1693
- timeout : int
1694
- Time, in seconds before the task times out and fails. (Default: 3600)
1695
- poke_interval : int
1696
- Time in seconds that the job should wait in between each try. (Default: 60)
1697
- mode : str
1698
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1699
- exponential_backoff : bool
1700
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1701
- pool : str
1702
- the slot pool this task should run in,
1703
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1704
- soft_fail : bool
1705
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1706
- name : str
1707
- Name of the sensor on Airflow
1708
- description : str
1709
- Description of sensor in the Airflow UI
1710
- external_dag_id : str
1711
- The dag_id that contains the task you want to wait for.
1712
- external_task_ids : List[str]
1713
- The list of task_ids that you want to wait for.
1714
- If None (default value) the sensor waits for the DAG. (Default: None)
1715
- allowed_states : List[str]
1716
- Iterable of allowed states, (Default: ['success'])
1717
- failed_states : List[str]
1718
- Iterable of failed or dis-allowed states. (Default: None)
1719
- execution_delta : datetime.timedelta
1720
- time difference with the previous execution to look at,
1721
- the default is the same logical date as the current task or DAG. (Default: None)
1722
- check_existence: bool
1723
- Set to True to check if the external task exists or check if
1724
- the DAG to wait for exists. (Default: True)
1625
+ hourly : bool, default False
1626
+ Run the workflow hourly.
1627
+ daily : bool, default True
1628
+ Run the workflow daily.
1629
+ weekly : bool, default False
1630
+ Run the workflow weekly.
1631
+ cron : str, optional, default None
1632
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1633
+ specified by this expression.
1634
+ timezone : str, optional, default None
1635
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1636
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1637
+ """
1638
+ ...
1639
+
1640
+ @typing.overload
1641
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1642
+ ...
1643
+
1644
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1645
+ """
1646
+ Specifies the times when the flow should be run when running on a
1647
+ production scheduler.
1648
+
1649
+ Parameters
1650
+ ----------
1651
+ hourly : bool, default False
1652
+ Run the workflow hourly.
1653
+ daily : bool, default True
1654
+ Run the workflow daily.
1655
+ weekly : bool, default False
1656
+ Run the workflow weekly.
1657
+ cron : str, optional, default None
1658
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1659
+ specified by this expression.
1660
+ timezone : str, optional, default None
1661
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1662
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1725
1663
  """
1726
1664
  ...
1727
1665
 
@@ -1774,104 +1712,19 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1774
1712
  """
1775
1713
  ...
1776
1714
 
1777
- @typing.overload
1778
- def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1779
- """
1780
- Specifies the flow(s) that this flow depends on.
1781
-
1782
- ```
1783
- @trigger_on_finish(flow='FooFlow')
1784
- ```
1785
- or
1786
- ```
1787
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1788
- ```
1789
- This decorator respects the @project decorator and triggers the flow
1790
- when upstream runs within the same namespace complete successfully
1791
-
1792
- Additionally, you can specify project aware upstream flow dependencies
1793
- by specifying the fully qualified project_flow_name.
1794
- ```
1795
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1796
- ```
1797
- or
1798
- ```
1799
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1800
- ```
1801
-
1802
- You can also specify just the project or project branch (other values will be
1803
- inferred from the current project or project branch):
1804
- ```
1805
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1806
- ```
1807
-
1808
- Note that `branch` is typically one of:
1809
- - `prod`
1810
- - `user.bob`
1811
- - `test.my_experiment`
1812
- - `prod.staging`
1813
-
1814
- Parameters
1815
- ----------
1816
- flow : Union[str, Dict[str, str]], optional, default None
1817
- Upstream flow dependency for this flow.
1818
- flows : List[Union[str, Dict[str, str]]], default []
1819
- Upstream flow dependencies for this flow.
1820
- options : Dict[str, Any], default {}
1821
- Backend-specific configuration for tuning eventing behavior.
1822
-
1823
-
1824
- """
1825
- ...
1826
-
1827
- @typing.overload
1828
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1829
- ...
1830
-
1831
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1715
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1832
1716
  """
1833
- Specifies the flow(s) that this flow depends on.
1834
-
1835
- ```
1836
- @trigger_on_finish(flow='FooFlow')
1837
- ```
1838
- or
1839
- ```
1840
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1841
- ```
1842
- This decorator respects the @project decorator and triggers the flow
1843
- when upstream runs within the same namespace complete successfully
1844
-
1845
- Additionally, you can specify project aware upstream flow dependencies
1846
- by specifying the fully qualified project_flow_name.
1847
- ```
1848
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1849
- ```
1850
- or
1851
- ```
1852
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1853
- ```
1854
-
1855
- You can also specify just the project or project branch (other values will be
1856
- inferred from the current project or project branch):
1857
- ```
1858
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1859
- ```
1717
+ Specifies what flows belong to the same project.
1860
1718
 
1861
- Note that `branch` is typically one of:
1862
- - `prod`
1863
- - `user.bob`
1864
- - `test.my_experiment`
1865
- - `prod.staging`
1719
+ A project-specific namespace is created for all flows that
1720
+ use the same `@project(name)`.
1866
1721
 
1867
1722
  Parameters
1868
1723
  ----------
1869
- flow : Union[str, Dict[str, str]], optional, default None
1870
- Upstream flow dependency for this flow.
1871
- flows : List[Union[str, Dict[str, str]]], default []
1872
- Upstream flow dependencies for this flow.
1873
- options : Dict[str, Any], default {}
1874
- Backend-specific configuration for tuning eventing behavior.
1724
+ name : str
1725
+ Project name. Make sure that the name is unique amongst all
1726
+ projects that use the same production scheduler. The name may
1727
+ contain only lowercase alphanumeric characters and underscores.
1875
1728
 
1876
1729
 
1877
1730
  """