metaflow-stubs 2.12.8__py2.py3-none-any.whl → 2.12.9__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (150) hide show
  1. metaflow-stubs/__init__.pyi +535 -486
  2. metaflow-stubs/cards.pyi +6 -6
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/client/__init__.pyi +3 -3
  5. metaflow-stubs/client/core.pyi +7 -7
  6. metaflow-stubs/client/filecache.pyi +3 -3
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +2 -2
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +4 -4
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +5 -5
  13. metaflow-stubs/metadata/metadata.pyi +2 -2
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +2 -2
  16. metaflow-stubs/metaflow_current.pyi +32 -9
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +3 -3
  20. metaflow-stubs/plugins/__init__.pyi +7 -3
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow.pyi +5 -5
  23. metaflow-stubs/plugins/airflow/airflow_cli.pyi +3 -3
  24. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +3 -3
  29. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
  30. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
  31. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  32. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  34. metaflow-stubs/plugins/argo/argo_workflows.pyi +38 -5
  35. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +270 -5
  36. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +228 -3
  37. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +292 -0
  38. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  39. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  40. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
  46. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  47. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  48. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +24 -4
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +260 -0
  58. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  59. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  61. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  62. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  63. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  64. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  65. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_cli.pyi +3 -3
  67. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  68. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  71. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  72. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  73. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  74. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  80. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  81. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  83. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  84. metaflow-stubs/plugins/cards/component_serializer.pyi +3 -3
  85. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  86. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  87. metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
  88. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  89. metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
  90. metaflow-stubs/plugins/datatools/s3/s3.pyi +6 -6
  91. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  92. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  93. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  94. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  95. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  96. metaflow-stubs/plugins/events_decorator.pyi +3 -3
  97. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  98. metaflow-stubs/plugins/frameworks/pytorch.pyi +5 -3
  99. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  100. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  101. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  102. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  104. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  105. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  106. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +18 -10
  107. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +12 -12
  108. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +26 -3
  109. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +14 -31
  110. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +26 -3
  111. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +82 -28
  112. metaflow-stubs/plugins/logs_cli.pyi +4 -4
  113. metaflow-stubs/plugins/package_cli.pyi +2 -2
  114. metaflow-stubs/plugins/parallel_decorator.pyi +47 -2
  115. metaflow-stubs/plugins/project_decorator.pyi +3 -3
  116. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  117. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  118. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  119. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/pypi_environment.pyi +3 -3
  121. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  122. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  123. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  124. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  125. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  126. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +3 -3
  127. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  128. metaflow-stubs/plugins/tag_cli.pyi +4 -4
  129. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +26 -3
  130. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  131. metaflow-stubs/procpoll.pyi +2 -2
  132. metaflow-stubs/pylint_wrapper.pyi +2 -2
  133. metaflow-stubs/runner/__init__.pyi +2 -2
  134. metaflow-stubs/runner/deployer.pyi +354 -0
  135. metaflow-stubs/runner/metaflow_runner.pyi +10 -10
  136. metaflow-stubs/runner/nbdeploy.pyi +54 -0
  137. metaflow-stubs/runner/nbrun.pyi +7 -7
  138. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  139. metaflow-stubs/runner/utils.pyi +25 -0
  140. metaflow-stubs/system/__init__.pyi +2 -2
  141. metaflow-stubs/system/system_logger.pyi +2 -2
  142. metaflow-stubs/system/system_monitor.pyi +2 -2
  143. metaflow-stubs/tagging_util.pyi +2 -2
  144. metaflow-stubs/tuple_util.pyi +2 -2
  145. metaflow-stubs/version.pyi +2 -2
  146. {metaflow_stubs-2.12.8.dist-info → metaflow_stubs-2.12.9.dist-info}/METADATA +2 -2
  147. metaflow_stubs-2.12.9.dist-info/RECORD +150 -0
  148. {metaflow_stubs-2.12.8.dist-info → metaflow_stubs-2.12.9.dist-info}/WHEEL +1 -1
  149. metaflow_stubs-2.12.8.dist-info/RECORD +0 -145
  150. {metaflow_stubs-2.12.8.dist-info → metaflow_stubs-2.12.9.dist-info}/top_level.txt +0 -0
@@ -1,25 +1,25 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.8 #
4
- # Generated on 2024-07-16T15:51:55.346854 #
3
+ # MF version: 2.12.9 #
4
+ # Generated on 2024-07-25T18:20:16.223359 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.plugins.datatools.s3.s3
12
11
  import metaflow.runner.metaflow_runner
13
- import metaflow.datastore.inputs
12
+ import io
14
13
  import metaflow.flowspec
15
- import metaflow._vendor.click.types
16
- import datetime
17
- import metaflow.metaflow_current
18
- import metaflow.events
19
14
  import typing
15
+ import datetime
20
16
  import metaflow.client.core
21
17
  import metaflow.parameters
22
- import io
18
+ import metaflow.metaflow_current
19
+ import metaflow.datastore.inputs
20
+ import metaflow._vendor.click.types
21
+ import metaflow.events
22
+ import metaflow.plugins.datatools.s3.s3
23
23
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
24
24
  StepFlag = typing.NewType("StepFlag", bool)
25
25
 
@@ -727,186 +727,113 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
727
727
  """
728
728
  ...
729
729
 
730
- @typing.overload
731
- def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
730
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
732
731
  """
733
- Specifies the resources needed when executing this step.
734
-
735
- Use `@resources` to specify the resource requirements
736
- independently of the specific compute layer (`@batch`, `@kubernetes`).
737
-
738
- You can choose the compute layer on the command line by executing e.g.
739
- ```
740
- python myflow.py run --with batch
741
- ```
742
- or
743
- ```
744
- python myflow.py run --with kubernetes
745
- ```
746
- which executes the flow on the desired system using the
747
- requirements specified in `@resources`.
732
+ Specifies that this step should execute on Kubernetes.
748
733
 
749
734
  Parameters
750
735
  ----------
751
736
  cpu : int, default 1
752
- Number of CPUs required for this step.
753
- gpu : int, default 0
754
- Number of GPUs required for this step.
755
- disk : int, optional, default None
756
- Disk size (in MB) required for this step. Only applies on Kubernetes.
737
+ Number of CPUs required for this step. If `@resources` is
738
+ also present, the maximum value from all decorators is used.
757
739
  memory : int, default 4096
758
- Memory size (in MB) required for this step.
759
- shared_memory : int, optional, default None
760
- The value for the size (in MiB) of the /dev/shm volume for this step.
761
- This parameter maps to the `--shm-size` option in Docker.
740
+ Memory size (in MB) required for this step. If
741
+ `@resources` is also present, the maximum value from all decorators is
742
+ used.
743
+ disk : int, default 10240
744
+ Disk size (in MB) required for this step. If
745
+ `@resources` is also present, the maximum value from all decorators is
746
+ used.
747
+ image : str, optional, default None
748
+ Docker image to use when launching on Kubernetes. If not specified, and
749
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
750
+ not, a default Docker image mapping to the current version of Python is used.
751
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
752
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
753
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
754
+ Kubernetes service account to use when launching pod in Kubernetes.
755
+ secrets : List[str], optional, default None
756
+ Kubernetes secrets to use when launching pod in Kubernetes. These
757
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
758
+ in Metaflow configuration.
759
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
760
+ Kubernetes namespace to use when launching pod in Kubernetes.
761
+ gpu : int, optional, default None
762
+ Number of GPUs required for this step. A value of zero implies that
763
+ the scheduled node should not have GPUs.
764
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
765
+ The vendor of the GPUs to be used for this step.
766
+ tolerations : List[str], default []
767
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
768
+ Kubernetes tolerations to use when launching pod in Kubernetes.
769
+ use_tmpfs : bool, default False
770
+ This enables an explicit tmpfs mount for this step.
771
+ tmpfs_tempdir : bool, default True
772
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
773
+ tmpfs_size : int, optional, default: None
774
+ The value for the size (in MiB) of the tmpfs mount for this step.
775
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
776
+ memory allocated for this step.
777
+ tmpfs_path : str, optional, default /metaflow_temp
778
+ Path to tmpfs mount for this step.
779
+ persistent_volume_claims : Dict[str, str], optional, default None
780
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
781
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
782
+ shared_memory: int, optional
783
+ Shared memory size (in MiB) required for this step
784
+ port: int, optional
785
+ Port number to specify in the Kubernetes job object
762
786
  """
763
787
  ...
764
788
 
765
789
  @typing.overload
766
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
767
- ...
768
-
769
- @typing.overload
770
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
771
- ...
772
-
773
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
790
+ def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
774
791
  """
775
- Specifies the resources needed when executing this step.
776
-
777
- Use `@resources` to specify the resource requirements
778
- independently of the specific compute layer (`@batch`, `@kubernetes`).
792
+ Creates a human-readable report, a Metaflow Card, after this step completes.
779
793
 
780
- You can choose the compute layer on the command line by executing e.g.
781
- ```
782
- python myflow.py run --with batch
783
- ```
784
- or
785
- ```
786
- python myflow.py run --with kubernetes
787
- ```
788
- which executes the flow on the desired system using the
789
- requirements specified in `@resources`.
794
+ Note that you may add multiple `@card` decorators in a step with different parameters.
790
795
 
791
796
  Parameters
792
797
  ----------
793
- cpu : int, default 1
794
- Number of CPUs required for this step.
795
- gpu : int, default 0
796
- Number of GPUs required for this step.
797
- disk : int, optional, default None
798
- Disk size (in MB) required for this step. Only applies on Kubernetes.
799
- memory : int, default 4096
800
- Memory size (in MB) required for this step.
801
- shared_memory : int, optional, default None
802
- The value for the size (in MiB) of the /dev/shm volume for this step.
803
- This parameter maps to the `--shm-size` option in Docker.
804
- """
805
- ...
806
-
807
- @typing.overload
808
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
809
- """
810
- Specifies a timeout for your step.
811
-
812
- This decorator is useful if this step may hang indefinitely.
813
-
814
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
815
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
816
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
798
+ type : str, default 'default'
799
+ Card type.
800
+ id : str, optional, default None
801
+ If multiple cards are present, use this id to identify this card.
802
+ options : Dict[str, Any], default {}
803
+ Options passed to the card. The contents depend on the card type.
804
+ timeout : int, default 45
805
+ Interrupt reporting if it takes more than this many seconds.
817
806
 
818
- Note that all the values specified in parameters are added together so if you specify
819
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
820
807
 
821
- Parameters
822
- ----------
823
- seconds : int, default 0
824
- Number of seconds to wait prior to timing out.
825
- minutes : int, default 0
826
- Number of minutes to wait prior to timing out.
827
- hours : int, default 0
828
- Number of hours to wait prior to timing out.
829
808
  """
830
809
  ...
831
810
 
832
811
  @typing.overload
833
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
812
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
834
813
  ...
835
814
 
836
815
  @typing.overload
837
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
838
- ...
839
-
840
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
841
- """
842
- Specifies a timeout for your step.
843
-
844
- This decorator is useful if this step may hang indefinitely.
845
-
846
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
847
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
848
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
849
-
850
- Note that all the values specified in parameters are added together so if you specify
851
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
852
-
853
- Parameters
854
- ----------
855
- seconds : int, default 0
856
- Number of seconds to wait prior to timing out.
857
- minutes : int, default 0
858
- Number of minutes to wait prior to timing out.
859
- hours : int, default 0
860
- Number of hours to wait prior to timing out.
861
- """
816
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
862
817
  ...
863
818
 
864
- @typing.overload
865
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
819
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
866
820
  """
867
- Specifies that the step will success under all circumstances.
821
+ Creates a human-readable report, a Metaflow Card, after this step completes.
868
822
 
869
- The decorator will create an optional artifact, specified by `var`, which
870
- contains the exception raised. You can use it to detect the presence
871
- of errors, indicating that all happy-path artifacts produced by the step
872
- are missing.
823
+ Note that you may add multiple `@card` decorators in a step with different parameters.
873
824
 
874
825
  Parameters
875
826
  ----------
876
- var : str, optional, default None
877
- Name of the artifact in which to store the caught exception.
878
- If not specified, the exception is not stored.
879
- print_exception : bool, default True
880
- Determines whether or not the exception is printed to
881
- stdout when caught.
882
- """
883
- ...
884
-
885
- @typing.overload
886
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
887
- ...
888
-
889
- @typing.overload
890
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
891
- ...
892
-
893
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
894
- """
895
- Specifies that the step will success under all circumstances.
827
+ type : str, default 'default'
828
+ Card type.
829
+ id : str, optional, default None
830
+ If multiple cards are present, use this id to identify this card.
831
+ options : Dict[str, Any], default {}
832
+ Options passed to the card. The contents depend on the card type.
833
+ timeout : int, default 45
834
+ Interrupt reporting if it takes more than this many seconds.
896
835
 
897
- The decorator will create an optional artifact, specified by `var`, which
898
- contains the exception raised. You can use it to detect the presence
899
- of errors, indicating that all happy-path artifacts produced by the step
900
- are missing.
901
836
 
902
- Parameters
903
- ----------
904
- var : str, optional, default None
905
- Name of the artifact in which to store the caught exception.
906
- If not specified, the exception is not stored.
907
- print_exception : bool, default True
908
- Determines whether or not the exception is printed to
909
- stdout when caught.
910
837
  """
911
838
  ...
912
839
 
@@ -1058,38 +985,35 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1058
985
  ...
1059
986
 
1060
987
  @typing.overload
1061
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
988
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1062
989
  """
1063
- Specifies environment variables to be set prior to the execution of a step.
990
+ Specifies the PyPI packages for the step.
991
+
992
+ Information in this decorator will augment any
993
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
994
+ you can use `@pypi_base` to set packages required by all
995
+ steps and use `@pypi` to specify step-specific overrides.
1064
996
 
1065
997
  Parameters
1066
998
  ----------
1067
- vars : Dict[str, str], default {}
1068
- Dictionary of environment variables to set.
999
+ packages : Dict[str, str], default: {}
1000
+ Packages to use for this step. The key is the name of the package
1001
+ and the value is the version to use.
1002
+ python : str, optional, default: None
1003
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1004
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1069
1005
  """
1070
1006
  ...
1071
1007
 
1072
1008
  @typing.overload
1073
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1009
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1074
1010
  ...
1075
1011
 
1076
1012
  @typing.overload
1077
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1013
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1078
1014
  ...
1079
1015
 
1080
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1081
- """
1082
- Specifies environment variables to be set prior to the execution of a step.
1083
-
1084
- Parameters
1085
- ----------
1086
- vars : Dict[str, str], default {}
1087
- Dictionary of environment variables to set.
1088
- """
1089
- ...
1090
-
1091
- @typing.overload
1092
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1016
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1093
1017
  """
1094
1018
  Specifies the PyPI packages for the step.
1095
1019
 
@@ -1110,30 +1034,137 @@ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] =
1110
1034
  ...
1111
1035
 
1112
1036
  @typing.overload
1113
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1037
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1038
+ """
1039
+ Specifies that the step will success under all circumstances.
1040
+
1041
+ The decorator will create an optional artifact, specified by `var`, which
1042
+ contains the exception raised. You can use it to detect the presence
1043
+ of errors, indicating that all happy-path artifacts produced by the step
1044
+ are missing.
1045
+
1046
+ Parameters
1047
+ ----------
1048
+ var : str, optional, default None
1049
+ Name of the artifact in which to store the caught exception.
1050
+ If not specified, the exception is not stored.
1051
+ print_exception : bool, default True
1052
+ Determines whether or not the exception is printed to
1053
+ stdout when caught.
1054
+ """
1114
1055
  ...
1115
1056
 
1116
1057
  @typing.overload
1117
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1058
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1118
1059
  ...
1119
1060
 
1120
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1061
+ @typing.overload
1062
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1063
+ ...
1064
+
1065
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1121
1066
  """
1122
- Specifies the PyPI packages for the step.
1067
+ Specifies that the step will success under all circumstances.
1123
1068
 
1124
- Information in this decorator will augment any
1125
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1126
- you can use `@pypi_base` to set packages required by all
1127
- steps and use `@pypi` to specify step-specific overrides.
1069
+ The decorator will create an optional artifact, specified by `var`, which
1070
+ contains the exception raised. You can use it to detect the presence
1071
+ of errors, indicating that all happy-path artifacts produced by the step
1072
+ are missing.
1128
1073
 
1129
1074
  Parameters
1130
1075
  ----------
1131
- packages : Dict[str, str], default: {}
1132
- Packages to use for this step. The key is the name of the package
1133
- and the value is the version to use.
1134
- python : str, optional, default: None
1135
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1136
- that the version used will correspond to the version of the Python interpreter used to start the run.
1076
+ var : str, optional, default None
1077
+ Name of the artifact in which to store the caught exception.
1078
+ If not specified, the exception is not stored.
1079
+ print_exception : bool, default True
1080
+ Determines whether or not the exception is printed to
1081
+ stdout when caught.
1082
+ """
1083
+ ...
1084
+
1085
+ @typing.overload
1086
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1087
+ """
1088
+ Specifies secrets to be retrieved and injected as environment variables prior to
1089
+ the execution of a step.
1090
+
1091
+ Parameters
1092
+ ----------
1093
+ sources : List[Union[str, Dict[str, Any]]], default: []
1094
+ List of secret specs, defining how the secrets are to be retrieved
1095
+ """
1096
+ ...
1097
+
1098
+ @typing.overload
1099
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1100
+ ...
1101
+
1102
+ @typing.overload
1103
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1104
+ ...
1105
+
1106
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1107
+ """
1108
+ Specifies secrets to be retrieved and injected as environment variables prior to
1109
+ the execution of a step.
1110
+
1111
+ Parameters
1112
+ ----------
1113
+ sources : List[Union[str, Dict[str, Any]]], default: []
1114
+ List of secret specs, defining how the secrets are to be retrieved
1115
+ """
1116
+ ...
1117
+
1118
+ @typing.overload
1119
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1120
+ """
1121
+ Specifies the number of times the task corresponding
1122
+ to a step needs to be retried.
1123
+
1124
+ This decorator is useful for handling transient errors, such as networking issues.
1125
+ If your task contains operations that can't be retried safely, e.g. database updates,
1126
+ it is advisable to annotate it with `@retry(times=0)`.
1127
+
1128
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1129
+ decorator will execute a no-op task after all retries have been exhausted,
1130
+ ensuring that the flow execution can continue.
1131
+
1132
+ Parameters
1133
+ ----------
1134
+ times : int, default 3
1135
+ Number of times to retry this task.
1136
+ minutes_between_retries : int, default 2
1137
+ Number of minutes between retries.
1138
+ """
1139
+ ...
1140
+
1141
+ @typing.overload
1142
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1143
+ ...
1144
+
1145
+ @typing.overload
1146
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1147
+ ...
1148
+
1149
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1150
+ """
1151
+ Specifies the number of times the task corresponding
1152
+ to a step needs to be retried.
1153
+
1154
+ This decorator is useful for handling transient errors, such as networking issues.
1155
+ If your task contains operations that can't be retried safely, e.g. database updates,
1156
+ it is advisable to annotate it with `@retry(times=0)`.
1157
+
1158
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1159
+ decorator will execute a no-op task after all retries have been exhausted,
1160
+ ensuring that the flow execution can continue.
1161
+
1162
+ Parameters
1163
+ ----------
1164
+ times : int, default 3
1165
+ Number of times to retry this task.
1166
+ minutes_between_retries : int, default 2
1167
+ Number of minutes between retries.
1137
1168
  """
1138
1169
  ...
1139
1170
 
@@ -1195,247 +1226,281 @@ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1195
1226
  ...
1196
1227
 
1197
1228
  @typing.overload
1198
- def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1229
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1199
1230
  """
1200
- Creates a human-readable report, a Metaflow Card, after this step completes.
1201
-
1202
- Note that you may add multiple `@card` decorators in a step with different parameters.
1231
+ Specifies environment variables to be set prior to the execution of a step.
1203
1232
 
1204
1233
  Parameters
1205
1234
  ----------
1206
- type : str, default 'default'
1207
- Card type.
1208
- id : str, optional, default None
1209
- If multiple cards are present, use this id to identify this card.
1210
- options : Dict[str, Any], default {}
1211
- Options passed to the card. The contents depend on the card type.
1212
- timeout : int, default 45
1213
- Interrupt reporting if it takes more than this many seconds.
1214
-
1215
-
1235
+ vars : Dict[str, str], default {}
1236
+ Dictionary of environment variables to set.
1216
1237
  """
1217
1238
  ...
1218
1239
 
1219
1240
  @typing.overload
1220
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1241
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1221
1242
  ...
1222
1243
 
1223
1244
  @typing.overload
1224
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1245
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1225
1246
  ...
1226
1247
 
1227
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1248
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1228
1249
  """
1229
- Creates a human-readable report, a Metaflow Card, after this step completes.
1230
-
1231
- Note that you may add multiple `@card` decorators in a step with different parameters.
1250
+ Specifies environment variables to be set prior to the execution of a step.
1232
1251
 
1233
1252
  Parameters
1234
1253
  ----------
1235
- type : str, default 'default'
1236
- Card type.
1237
- id : str, optional, default None
1238
- If multiple cards are present, use this id to identify this card.
1239
- options : Dict[str, Any], default {}
1240
- Options passed to the card. The contents depend on the card type.
1241
- timeout : int, default 45
1242
- Interrupt reporting if it takes more than this many seconds.
1243
-
1244
-
1254
+ vars : Dict[str, str], default {}
1255
+ Dictionary of environment variables to set.
1245
1256
  """
1246
1257
  ...
1247
1258
 
1248
1259
  @typing.overload
1249
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1260
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1250
1261
  """
1251
- Specifies secrets to be retrieved and injected as environment variables prior to
1252
- the execution of a step.
1253
-
1254
- Parameters
1255
- ----------
1256
- sources : List[Union[str, Dict[str, Any]]], default: []
1257
- List of secret specs, defining how the secrets are to be retrieved
1262
+ Decorator prototype for all step decorators. This function gets specialized
1263
+ and imported for all decorators types by _import_plugin_decorators().
1258
1264
  """
1259
1265
  ...
1260
1266
 
1261
1267
  @typing.overload
1262
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1268
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1263
1269
  ...
1264
1270
 
1265
- @typing.overload
1266
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1271
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1272
+ """
1273
+ Decorator prototype for all step decorators. This function gets specialized
1274
+ and imported for all decorators types by _import_plugin_decorators().
1275
+ """
1267
1276
  ...
1268
1277
 
1269
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1278
+ @typing.overload
1279
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1270
1280
  """
1271
- Specifies secrets to be retrieved and injected as environment variables prior to
1272
- the execution of a step.
1281
+ Specifies a timeout for your step.
1282
+
1283
+ This decorator is useful if this step may hang indefinitely.
1284
+
1285
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1286
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1287
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1288
+
1289
+ Note that all the values specified in parameters are added together so if you specify
1290
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1273
1291
 
1274
1292
  Parameters
1275
1293
  ----------
1276
- sources : List[Union[str, Dict[str, Any]]], default: []
1277
- List of secret specs, defining how the secrets are to be retrieved
1294
+ seconds : int, default 0
1295
+ Number of seconds to wait prior to timing out.
1296
+ minutes : int, default 0
1297
+ Number of minutes to wait prior to timing out.
1298
+ hours : int, default 0
1299
+ Number of hours to wait prior to timing out.
1278
1300
  """
1279
1301
  ...
1280
1302
 
1281
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1303
+ @typing.overload
1304
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1305
+ ...
1306
+
1307
+ @typing.overload
1308
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1309
+ ...
1310
+
1311
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1282
1312
  """
1283
- Specifies that this step should execute on Kubernetes.
1313
+ Specifies a timeout for your step.
1314
+
1315
+ This decorator is useful if this step may hang indefinitely.
1316
+
1317
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1318
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1319
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1320
+
1321
+ Note that all the values specified in parameters are added together so if you specify
1322
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1284
1323
 
1285
1324
  Parameters
1286
1325
  ----------
1287
- cpu : int, default 1
1288
- Number of CPUs required for this step. If `@resources` is
1289
- also present, the maximum value from all decorators is used.
1290
- memory : int, default 4096
1291
- Memory size (in MB) required for this step. If
1292
- `@resources` is also present, the maximum value from all decorators is
1293
- used.
1294
- disk : int, default 10240
1295
- Disk size (in MB) required for this step. If
1296
- `@resources` is also present, the maximum value from all decorators is
1297
- used.
1298
- image : str, optional, default None
1299
- Docker image to use when launching on Kubernetes. If not specified, and
1300
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1301
- not, a default Docker image mapping to the current version of Python is used.
1302
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1303
- If given, the imagePullPolicy to be applied to the Docker image of the step.
1304
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1305
- Kubernetes service account to use when launching pod in Kubernetes.
1306
- secrets : List[str], optional, default None
1307
- Kubernetes secrets to use when launching pod in Kubernetes. These
1308
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1309
- in Metaflow configuration.
1310
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1311
- Kubernetes namespace to use when launching pod in Kubernetes.
1312
- gpu : int, optional, default None
1313
- Number of GPUs required for this step. A value of zero implies that
1314
- the scheduled node should not have GPUs.
1315
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1316
- The vendor of the GPUs to be used for this step.
1317
- tolerations : List[str], default []
1318
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1319
- Kubernetes tolerations to use when launching pod in Kubernetes.
1320
- use_tmpfs : bool, default False
1321
- This enables an explicit tmpfs mount for this step.
1322
- tmpfs_tempdir : bool, default True
1323
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1324
- tmpfs_size : int, optional, default: None
1325
- The value for the size (in MiB) of the tmpfs mount for this step.
1326
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1327
- memory allocated for this step.
1328
- tmpfs_path : str, optional, default /metaflow_temp
1329
- Path to tmpfs mount for this step.
1330
- persistent_volume_claims : Dict[str, str], optional, default None
1331
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1332
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1333
- shared_memory: int, optional
1334
- Shared memory size (in MiB) required for this step
1335
- port: int, optional
1336
- Port number to specify in the Kubernetes job object
1326
+ seconds : int, default 0
1327
+ Number of seconds to wait prior to timing out.
1328
+ minutes : int, default 0
1329
+ Number of minutes to wait prior to timing out.
1330
+ hours : int, default 0
1331
+ Number of hours to wait prior to timing out.
1337
1332
  """
1338
1333
  ...
1339
1334
 
1340
1335
  @typing.overload
1341
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1336
+ def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1342
1337
  """
1343
- Specifies the number of times the task corresponding
1344
- to a step needs to be retried.
1338
+ Specifies the resources needed when executing this step.
1345
1339
 
1346
- This decorator is useful for handling transient errors, such as networking issues.
1347
- If your task contains operations that can't be retried safely, e.g. database updates,
1348
- it is advisable to annotate it with `@retry(times=0)`.
1340
+ Use `@resources` to specify the resource requirements
1341
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1349
1342
 
1350
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1351
- decorator will execute a no-op task after all retries have been exhausted,
1352
- ensuring that the flow execution can continue.
1343
+ You can choose the compute layer on the command line by executing e.g.
1344
+ ```
1345
+ python myflow.py run --with batch
1346
+ ```
1347
+ or
1348
+ ```
1349
+ python myflow.py run --with kubernetes
1350
+ ```
1351
+ which executes the flow on the desired system using the
1352
+ requirements specified in `@resources`.
1353
1353
 
1354
1354
  Parameters
1355
1355
  ----------
1356
- times : int, default 3
1357
- Number of times to retry this task.
1358
- minutes_between_retries : int, default 2
1359
- Number of minutes between retries.
1356
+ cpu : int, default 1
1357
+ Number of CPUs required for this step.
1358
+ gpu : int, default 0
1359
+ Number of GPUs required for this step.
1360
+ disk : int, optional, default None
1361
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1362
+ memory : int, default 4096
1363
+ Memory size (in MB) required for this step.
1364
+ shared_memory : int, optional, default None
1365
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1366
+ This parameter maps to the `--shm-size` option in Docker.
1360
1367
  """
1361
1368
  ...
1362
1369
 
1363
1370
  @typing.overload
1364
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1371
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1365
1372
  ...
1366
1373
 
1367
1374
  @typing.overload
1368
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1375
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1369
1376
  ...
1370
1377
 
1371
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1378
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1372
1379
  """
1373
- Specifies the number of times the task corresponding
1374
- to a step needs to be retried.
1380
+ Specifies the resources needed when executing this step.
1375
1381
 
1376
- This decorator is useful for handling transient errors, such as networking issues.
1377
- If your task contains operations that can't be retried safely, e.g. database updates,
1378
- it is advisable to annotate it with `@retry(times=0)`.
1382
+ Use `@resources` to specify the resource requirements
1383
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1379
1384
 
1380
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1381
- decorator will execute a no-op task after all retries have been exhausted,
1382
- ensuring that the flow execution can continue.
1385
+ You can choose the compute layer on the command line by executing e.g.
1386
+ ```
1387
+ python myflow.py run --with batch
1388
+ ```
1389
+ or
1390
+ ```
1391
+ python myflow.py run --with kubernetes
1392
+ ```
1393
+ which executes the flow on the desired system using the
1394
+ requirements specified in `@resources`.
1383
1395
 
1384
1396
  Parameters
1385
1397
  ----------
1386
- times : int, default 3
1387
- Number of times to retry this task.
1388
- minutes_between_retries : int, default 2
1389
- Number of minutes between retries.
1398
+ cpu : int, default 1
1399
+ Number of CPUs required for this step.
1400
+ gpu : int, default 0
1401
+ Number of GPUs required for this step.
1402
+ disk : int, optional, default None
1403
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1404
+ memory : int, default 4096
1405
+ Memory size (in MB) required for this step.
1406
+ shared_memory : int, optional, default None
1407
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1408
+ This parameter maps to the `--shm-size` option in Docker.
1390
1409
  """
1391
1410
  ...
1392
1411
 
1393
1412
  @typing.overload
1394
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1413
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1395
1414
  """
1396
- Specifies the times when the flow should be run when running on a
1397
- production scheduler.
1415
+ Specifies the event(s) that this flow depends on.
1416
+
1417
+ ```
1418
+ @trigger(event='foo')
1419
+ ```
1420
+ or
1421
+ ```
1422
+ @trigger(events=['foo', 'bar'])
1423
+ ```
1424
+
1425
+ Additionally, you can specify the parameter mappings
1426
+ to map event payload to Metaflow parameters for the flow.
1427
+ ```
1428
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1429
+ ```
1430
+ or
1431
+ ```
1432
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1433
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1434
+ ```
1435
+
1436
+ 'parameters' can also be a list of strings and tuples like so:
1437
+ ```
1438
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1439
+ ```
1440
+ This is equivalent to:
1441
+ ```
1442
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1443
+ ```
1398
1444
 
1399
1445
  Parameters
1400
1446
  ----------
1401
- hourly : bool, default False
1402
- Run the workflow hourly.
1403
- daily : bool, default True
1404
- Run the workflow daily.
1405
- weekly : bool, default False
1406
- Run the workflow weekly.
1407
- cron : str, optional, default None
1408
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1409
- specified by this expression.
1410
- timezone : str, optional, default None
1411
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1412
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1447
+ event : Union[str, Dict[str, Any]], optional, default None
1448
+ Event dependency for this flow.
1449
+ events : List[Union[str, Dict[str, Any]]], default []
1450
+ Events dependency for this flow.
1451
+ options : Dict[str, Any], default {}
1452
+ Backend-specific configuration for tuning eventing behavior.
1453
+
1454
+
1413
1455
  """
1414
1456
  ...
1415
1457
 
1416
1458
  @typing.overload
1417
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1459
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1418
1460
  ...
1419
1461
 
1420
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1462
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1421
1463
  """
1422
- Specifies the times when the flow should be run when running on a
1423
- production scheduler.
1464
+ Specifies the event(s) that this flow depends on.
1465
+
1466
+ ```
1467
+ @trigger(event='foo')
1468
+ ```
1469
+ or
1470
+ ```
1471
+ @trigger(events=['foo', 'bar'])
1472
+ ```
1473
+
1474
+ Additionally, you can specify the parameter mappings
1475
+ to map event payload to Metaflow parameters for the flow.
1476
+ ```
1477
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1478
+ ```
1479
+ or
1480
+ ```
1481
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1482
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1483
+ ```
1484
+
1485
+ 'parameters' can also be a list of strings and tuples like so:
1486
+ ```
1487
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1488
+ ```
1489
+ This is equivalent to:
1490
+ ```
1491
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1492
+ ```
1424
1493
 
1425
1494
  Parameters
1426
1495
  ----------
1427
- hourly : bool, default False
1428
- Run the workflow hourly.
1429
- daily : bool, default True
1430
- Run the workflow daily.
1431
- weekly : bool, default False
1432
- Run the workflow weekly.
1433
- cron : str, optional, default None
1434
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1435
- specified by this expression.
1436
- timezone : str, optional, default None
1437
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1438
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1496
+ event : Union[str, Dict[str, Any]], optional, default None
1497
+ Event dependency for this flow.
1498
+ events : List[Union[str, Dict[str, Any]]], default []
1499
+ Events dependency for this flow.
1500
+ options : Dict[str, Any], default {}
1501
+ Backend-specific configuration for tuning eventing behavior.
1502
+
1503
+
1439
1504
  """
1440
1505
  ...
1441
1506
 
@@ -1543,139 +1608,41 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1543
1608
  ...
1544
1609
 
1545
1610
  @typing.overload
1546
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1611
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1547
1612
  """
1548
- Specifies the event(s) that this flow depends on.
1549
-
1550
- ```
1551
- @trigger(event='foo')
1552
- ```
1553
- or
1554
- ```
1555
- @trigger(events=['foo', 'bar'])
1556
- ```
1557
-
1558
- Additionally, you can specify the parameter mappings
1559
- to map event payload to Metaflow parameters for the flow.
1560
- ```
1561
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1562
- ```
1563
- or
1564
- ```
1565
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1566
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1567
- ```
1568
-
1569
- 'parameters' can also be a list of strings and tuples like so:
1570
- ```
1571
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1572
- ```
1573
- This is equivalent to:
1574
- ```
1575
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1576
- ```
1613
+ Specifies the PyPI packages for all steps of the flow.
1577
1614
 
1615
+ Use `@pypi_base` to set common packages required by all
1616
+ steps and use `@pypi` to specify step-specific overrides.
1578
1617
  Parameters
1579
1618
  ----------
1580
- event : Union[str, Dict[str, Any]], optional, default None
1581
- Event dependency for this flow.
1582
- events : List[Union[str, Dict[str, Any]]], default []
1583
- Events dependency for this flow.
1584
- options : Dict[str, Any], default {}
1585
- Backend-specific configuration for tuning eventing behavior.
1586
-
1587
-
1619
+ packages : Dict[str, str], default: {}
1620
+ Packages to use for this flow. The key is the name of the package
1621
+ and the value is the version to use.
1622
+ python : str, optional, default: None
1623
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1624
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1588
1625
  """
1589
1626
  ...
1590
1627
 
1591
1628
  @typing.overload
1592
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1593
- ...
1594
-
1595
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1596
- """
1597
- Specifies the event(s) that this flow depends on.
1598
-
1599
- ```
1600
- @trigger(event='foo')
1601
- ```
1602
- or
1603
- ```
1604
- @trigger(events=['foo', 'bar'])
1605
- ```
1606
-
1607
- Additionally, you can specify the parameter mappings
1608
- to map event payload to Metaflow parameters for the flow.
1609
- ```
1610
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1611
- ```
1612
- or
1613
- ```
1614
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1615
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1616
- ```
1617
-
1618
- 'parameters' can also be a list of strings and tuples like so:
1619
- ```
1620
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1621
- ```
1622
- This is equivalent to:
1623
- ```
1624
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1625
- ```
1626
-
1627
- Parameters
1628
- ----------
1629
- event : Union[str, Dict[str, Any]], optional, default None
1630
- Event dependency for this flow.
1631
- events : List[Union[str, Dict[str, Any]]], default []
1632
- Events dependency for this flow.
1633
- options : Dict[str, Any], default {}
1634
- Backend-specific configuration for tuning eventing behavior.
1635
-
1636
-
1637
- """
1629
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1638
1630
  ...
1639
1631
 
1640
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1632
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1641
1633
  """
1642
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1643
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1634
+ Specifies the PyPI packages for all steps of the flow.
1644
1635
 
1636
+ Use `@pypi_base` to set common packages required by all
1637
+ steps and use `@pypi` to specify step-specific overrides.
1645
1638
  Parameters
1646
1639
  ----------
1647
- timeout : int
1648
- Time, in seconds before the task times out and fails. (Default: 3600)
1649
- poke_interval : int
1650
- Time in seconds that the job should wait in between each try. (Default: 60)
1651
- mode : str
1652
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1653
- exponential_backoff : bool
1654
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1655
- pool : str
1656
- the slot pool this task should run in,
1657
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1658
- soft_fail : bool
1659
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1660
- name : str
1661
- Name of the sensor on Airflow
1662
- description : str
1663
- Description of sensor in the Airflow UI
1664
- external_dag_id : str
1665
- The dag_id that contains the task you want to wait for.
1666
- external_task_ids : List[str]
1667
- The list of task_ids that you want to wait for.
1668
- If None (default value) the sensor waits for the DAG. (Default: None)
1669
- allowed_states : List[str]
1670
- Iterable of allowed states, (Default: ['success'])
1671
- failed_states : List[str]
1672
- Iterable of failed or dis-allowed states. (Default: None)
1673
- execution_delta : datetime.timedelta
1674
- time difference with the previous execution to look at,
1675
- the default is the same logical date as the current task or DAG. (Default: None)
1676
- check_existence: bool
1677
- Set to True to check if the external task exists or check if
1678
- the DAG to wait for exists. (Default: True)
1640
+ packages : Dict[str, str], default: {}
1641
+ Packages to use for this flow. The key is the name of the package
1642
+ and the value is the version to use.
1643
+ python : str, optional, default: None
1644
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1645
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1679
1646
  """
1680
1647
  ...
1681
1648
 
@@ -1728,6 +1695,48 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1728
1695
  """
1729
1696
  ...
1730
1697
 
1698
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1699
+ """
1700
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1701
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1702
+
1703
+ Parameters
1704
+ ----------
1705
+ timeout : int
1706
+ Time, in seconds before the task times out and fails. (Default: 3600)
1707
+ poke_interval : int
1708
+ Time in seconds that the job should wait in between each try. (Default: 60)
1709
+ mode : str
1710
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1711
+ exponential_backoff : bool
1712
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1713
+ pool : str
1714
+ the slot pool this task should run in,
1715
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1716
+ soft_fail : bool
1717
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1718
+ name : str
1719
+ Name of the sensor on Airflow
1720
+ description : str
1721
+ Description of sensor in the Airflow UI
1722
+ external_dag_id : str
1723
+ The dag_id that contains the task you want to wait for.
1724
+ external_task_ids : List[str]
1725
+ The list of task_ids that you want to wait for.
1726
+ If None (default value) the sensor waits for the DAG. (Default: None)
1727
+ allowed_states : List[str]
1728
+ Iterable of allowed states, (Default: ['success'])
1729
+ failed_states : List[str]
1730
+ Iterable of failed or dis-allowed states. (Default: None)
1731
+ execution_delta : datetime.timedelta
1732
+ time difference with the previous execution to look at,
1733
+ the default is the same logical date as the current task or DAG. (Default: None)
1734
+ check_existence: bool
1735
+ Set to True to check if the external task exists or check if
1736
+ the DAG to wait for exists. (Default: True)
1737
+ """
1738
+ ...
1739
+
1731
1740
  def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1732
1741
  """
1733
1742
  Specifies what flows belong to the same project.
@@ -1747,41 +1756,51 @@ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typ
1747
1756
  ...
1748
1757
 
1749
1758
  @typing.overload
1750
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1759
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1751
1760
  """
1752
- Specifies the PyPI packages for all steps of the flow.
1761
+ Specifies the times when the flow should be run when running on a
1762
+ production scheduler.
1753
1763
 
1754
- Use `@pypi_base` to set common packages required by all
1755
- steps and use `@pypi` to specify step-specific overrides.
1756
1764
  Parameters
1757
1765
  ----------
1758
- packages : Dict[str, str], default: {}
1759
- Packages to use for this flow. The key is the name of the package
1760
- and the value is the version to use.
1761
- python : str, optional, default: None
1762
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1763
- that the version used will correspond to the version of the Python interpreter used to start the run.
1766
+ hourly : bool, default False
1767
+ Run the workflow hourly.
1768
+ daily : bool, default True
1769
+ Run the workflow daily.
1770
+ weekly : bool, default False
1771
+ Run the workflow weekly.
1772
+ cron : str, optional, default None
1773
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1774
+ specified by this expression.
1775
+ timezone : str, optional, default None
1776
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1777
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1764
1778
  """
1765
1779
  ...
1766
1780
 
1767
1781
  @typing.overload
1768
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1782
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1769
1783
  ...
1770
1784
 
1771
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1785
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1772
1786
  """
1773
- Specifies the PyPI packages for all steps of the flow.
1787
+ Specifies the times when the flow should be run when running on a
1788
+ production scheduler.
1774
1789
 
1775
- Use `@pypi_base` to set common packages required by all
1776
- steps and use `@pypi` to specify step-specific overrides.
1777
1790
  Parameters
1778
1791
  ----------
1779
- packages : Dict[str, str], default: {}
1780
- Packages to use for this flow. The key is the name of the package
1781
- and the value is the version to use.
1782
- python : str, optional, default: None
1783
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1784
- that the version used will correspond to the version of the Python interpreter used to start the run.
1792
+ hourly : bool, default False
1793
+ Run the workflow hourly.
1794
+ daily : bool, default True
1795
+ Run the workflow daily.
1796
+ weekly : bool, default False
1797
+ Run the workflow weekly.
1798
+ cron : str, optional, default None
1799
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1800
+ specified by this expression.
1801
+ timezone : str, optional, default None
1802
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1803
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1785
1804
  """
1786
1805
  ...
1787
1806
 
@@ -2975,3 +2994,33 @@ class NBRunner(object, metaclass=type):
2975
2994
  ...
2976
2995
  ...
2977
2996
 
2997
+ class Deployer(object, metaclass=type):
2998
+ def __init__(self, flow_file: str, show_output: bool = True, profile: typing.Optional[str] = None, env: typing.Optional[typing.Dict] = None, cwd: typing.Optional[str] = None, **kwargs):
2999
+ ...
3000
+ def _Deployer__make_function(self, deployer_class):
3001
+ """
3002
+ Create a function for the given deployer class.
3003
+
3004
+ Parameters
3005
+ ----------
3006
+ deployer_class : Type[DeployerImpl]
3007
+ Deployer implementation class.
3008
+
3009
+ Returns
3010
+ -------
3011
+ Callable
3012
+ Function that initializes and returns an instance of the deployer class.
3013
+ """
3014
+ ...
3015
+ ...
3016
+
3017
+ class NBDeployer(object, metaclass=type):
3018
+ def __init__(self, flow, show_output: bool = True, profile: typing.Optional[str] = None, env: typing.Optional[typing.Dict] = None, base_dir: str = "/tmp", **kwargs):
3019
+ ...
3020
+ def cleanup(self):
3021
+ """
3022
+ Delete any temporary files created during execution.
3023
+ """
3024
+ ...
3025
+ ...
3026
+