metaflow-stubs 2.12.25__py2.py3-none-any.whl → 2.12.26__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (152) hide show
  1. metaflow-stubs/__init__.pyi +544 -541
  2. metaflow-stubs/cards.pyi +5 -5
  3. metaflow-stubs/cli.pyi +3 -3
  4. metaflow-stubs/client/__init__.pyi +4 -4
  5. metaflow-stubs/client/core.pyi +6 -6
  6. metaflow-stubs/client/filecache.pyi +2 -2
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +3 -3
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +6 -6
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +4 -4
  13. metaflow-stubs/info_file.pyi +2 -2
  14. metaflow-stubs/metadata/metadata.pyi +2 -2
  15. metaflow-stubs/metadata/util.pyi +2 -2
  16. metaflow-stubs/metaflow_config.pyi +2 -2
  17. metaflow-stubs/metaflow_current.pyi +10 -9
  18. metaflow-stubs/mflog/mflog.pyi +2 -2
  19. metaflow-stubs/multicore_utils.pyi +2 -2
  20. metaflow-stubs/parameters.pyi +4 -4
  21. metaflow-stubs/plugins/__init__.pyi +3 -3
  22. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  23. metaflow-stubs/plugins/airflow/airflow.pyi +3 -3
  24. metaflow-stubs/plugins/airflow/airflow_cli.pyi +7 -4
  25. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +2 -2
  26. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  27. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  28. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  29. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +3 -3
  30. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
  31. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
  32. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  33. metaflow-stubs/plugins/argo/argo_client.pyi +11 -2
  34. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  35. metaflow-stubs/plugins/argo/argo_workflows.pyi +5 -5
  36. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +10 -7
  37. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +8 -8
  38. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +2 -2
  39. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  41. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  43. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  44. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +2 -2
  45. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  46. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +3 -3
  47. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  48. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +4 -4
  49. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  52. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  53. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  54. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  55. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +3 -3
  56. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  57. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +2 -2
  58. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +2 -2
  59. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  60. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  61. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  62. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +4 -4
  63. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  64. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  65. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  66. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  67. metaflow-stubs/plugins/cards/card_cli.pyi +5 -5
  68. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  69. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_decorator.pyi +3 -3
  72. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  73. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  74. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  75. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +2 -2
  76. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +2 -2
  77. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +2 -2
  78. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +2 -2
  79. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +2 -2
  80. metaflow-stubs/plugins/cards/card_modules/components.pyi +3 -3
  81. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  82. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  83. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  84. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  85. metaflow-stubs/plugins/cards/component_serializer.pyi +4 -4
  86. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  87. metaflow-stubs/plugins/catch_decorator.pyi +4 -4
  88. metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
  89. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  90. metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
  91. metaflow-stubs/plugins/datatools/s3/s3.pyi +5 -5
  92. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  93. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  94. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  95. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  96. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  97. metaflow-stubs/plugins/events_decorator.pyi +3 -3
  98. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  99. metaflow-stubs/plugins/frameworks/pytorch.pyi +8 -7
  100. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  101. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +4 -4
  102. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  103. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  104. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  105. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  106. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  107. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  108. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +3 -3
  109. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +6 -3
  110. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  111. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +6 -3
  112. metaflow-stubs/plugins/kubernetes/kubernetes_job.pyi +2 -2
  113. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  114. metaflow-stubs/plugins/logs_cli.pyi +4 -4
  115. metaflow-stubs/plugins/package_cli.pyi +2 -2
  116. metaflow-stubs/plugins/parallel_decorator.pyi +8 -7
  117. metaflow-stubs/plugins/project_decorator.pyi +3 -3
  118. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  119. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  120. metaflow-stubs/plugins/pypi/conda_environment.pyi +4 -4
  121. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  122. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  123. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  124. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  125. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  126. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  127. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +3 -3
  128. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  129. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  130. metaflow-stubs/plugins/tag_cli.pyi +4 -4
  131. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  132. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  133. metaflow-stubs/procpoll.pyi +2 -2
  134. metaflow-stubs/pylint_wrapper.pyi +2 -2
  135. metaflow-stubs/runner/__init__.pyi +2 -2
  136. metaflow-stubs/runner/deployer.pyi +4 -4
  137. metaflow-stubs/runner/metaflow_runner.pyi +5 -8
  138. metaflow-stubs/runner/nbdeploy.pyi +5 -7
  139. metaflow-stubs/runner/nbrun.pyi +5 -7
  140. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  141. metaflow-stubs/runner/utils.pyi +2 -6
  142. metaflow-stubs/system/__init__.pyi +4 -4
  143. metaflow-stubs/system/system_logger.pyi +3 -3
  144. metaflow-stubs/system/system_monitor.pyi +3 -3
  145. metaflow-stubs/tagging_util.pyi +2 -2
  146. metaflow-stubs/tuple_util.pyi +2 -2
  147. metaflow-stubs/version.pyi +2 -2
  148. {metaflow_stubs-2.12.25.dist-info → metaflow_stubs-2.12.26.dist-info}/METADATA +2 -2
  149. metaflow_stubs-2.12.26.dist-info/RECORD +152 -0
  150. {metaflow_stubs-2.12.25.dist-info → metaflow_stubs-2.12.26.dist-info}/WHEEL +1 -1
  151. metaflow_stubs-2.12.25.dist-info/RECORD +0 -152
  152. {metaflow_stubs-2.12.25.dist-info → metaflow_stubs-2.12.26.dist-info}/top_level.txt +0 -0
@@ -1,25 +1,25 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.25 #
4
- # Generated on 2024-10-07T19:08:03.779487 #
3
+ # MF version: 2.12.26 #
4
+ # Generated on 2024-10-23T21:00:22.846814 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
- import metaflow.metaflow_current
13
- import io
11
+ import metaflow.flowspec
14
12
  import metaflow.client.core
13
+ import metaflow.parameters
14
+ import metaflow.runner.metaflow_runner
15
+ import datetime
16
+ import io
15
17
  import metaflow.datastore.inputs
18
+ import typing
16
19
  import metaflow._vendor.click.types
17
- import metaflow.flowspec
18
- import metaflow.plugins.datatools.s3.s3
19
- import datetime
20
- import metaflow.parameters
21
20
  import metaflow.events
22
- import metaflow.runner.metaflow_runner
21
+ import metaflow.metaflow_current
22
+ import metaflow.plugins.datatools.s3.s3
23
23
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
24
24
  StepFlag = typing.NewType("StepFlag", bool)
25
25
 
@@ -855,513 +855,547 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
855
855
  ...
856
856
 
857
857
  @typing.overload
858
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
858
+ def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
859
859
  """
860
- Specifies that the step will success under all circumstances.
860
+ Creates a human-readable report, a Metaflow Card, after this step completes.
861
861
 
862
- The decorator will create an optional artifact, specified by `var`, which
863
- contains the exception raised. You can use it to detect the presence
864
- of errors, indicating that all happy-path artifacts produced by the step
865
- are missing.
862
+ Note that you may add multiple `@card` decorators in a step with different parameters.
866
863
 
867
864
  Parameters
868
865
  ----------
869
- var : str, optional, default None
870
- Name of the artifact in which to store the caught exception.
871
- If not specified, the exception is not stored.
872
- print_exception : bool, default True
873
- Determines whether or not the exception is printed to
874
- stdout when caught.
866
+ type : str, default 'default'
867
+ Card type.
868
+ id : str, optional, default None
869
+ If multiple cards are present, use this id to identify this card.
870
+ options : Dict[str, Any], default {}
871
+ Options passed to the card. The contents depend on the card type.
872
+ timeout : int, default 45
873
+ Interrupt reporting if it takes more than this many seconds.
874
+
875
+
875
876
  """
876
877
  ...
877
878
 
878
879
  @typing.overload
879
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
880
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
880
881
  ...
881
882
 
882
883
  @typing.overload
883
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
884
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
884
885
  ...
885
886
 
886
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
887
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
887
888
  """
888
- Specifies that the step will success under all circumstances.
889
+ Creates a human-readable report, a Metaflow Card, after this step completes.
889
890
 
890
- The decorator will create an optional artifact, specified by `var`, which
891
- contains the exception raised. You can use it to detect the presence
892
- of errors, indicating that all happy-path artifacts produced by the step
893
- are missing.
891
+ Note that you may add multiple `@card` decorators in a step with different parameters.
894
892
 
895
893
  Parameters
896
894
  ----------
897
- var : str, optional, default None
898
- Name of the artifact in which to store the caught exception.
899
- If not specified, the exception is not stored.
900
- print_exception : bool, default True
901
- Determines whether or not the exception is printed to
902
- stdout when caught.
895
+ type : str, default 'default'
896
+ Card type.
897
+ id : str, optional, default None
898
+ If multiple cards are present, use this id to identify this card.
899
+ options : Dict[str, Any], default {}
900
+ Options passed to the card. The contents depend on the card type.
901
+ timeout : int, default 45
902
+ Interrupt reporting if it takes more than this many seconds.
903
+
904
+
903
905
  """
904
906
  ...
905
907
 
906
908
  @typing.overload
907
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
909
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
908
910
  """
909
- Specifies a timeout for your step.
910
-
911
- This decorator is useful if this step may hang indefinitely.
912
-
913
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
914
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
915
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
916
-
917
- Note that all the values specified in parameters are added together so if you specify
918
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
911
+ Specifies environment variables to be set prior to the execution of a step.
919
912
 
920
913
  Parameters
921
914
  ----------
922
- seconds : int, default 0
923
- Number of seconds to wait prior to timing out.
924
- minutes : int, default 0
925
- Number of minutes to wait prior to timing out.
926
- hours : int, default 0
927
- Number of hours to wait prior to timing out.
915
+ vars : Dict[str, str], default {}
916
+ Dictionary of environment variables to set.
928
917
  """
929
918
  ...
930
919
 
931
920
  @typing.overload
932
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
921
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
933
922
  ...
934
923
 
935
924
  @typing.overload
936
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
937
- ...
938
-
939
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
940
- """
941
- Specifies a timeout for your step.
942
-
943
- This decorator is useful if this step may hang indefinitely.
944
-
945
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
946
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
947
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
948
-
949
- Note that all the values specified in parameters are added together so if you specify
950
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
951
-
952
- Parameters
953
- ----------
954
- seconds : int, default 0
955
- Number of seconds to wait prior to timing out.
956
- minutes : int, default 0
957
- Number of minutes to wait prior to timing out.
958
- hours : int, default 0
959
- Number of hours to wait prior to timing out.
960
- """
925
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
961
926
  ...
962
927
 
963
- def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
928
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
964
929
  """
965
- Specifies that this step should execute on Kubernetes.
930
+ Specifies environment variables to be set prior to the execution of a step.
966
931
 
967
932
  Parameters
968
933
  ----------
969
- cpu : int, default 1
970
- Number of CPUs required for this step. If `@resources` is
971
- also present, the maximum value from all decorators is used.
972
- memory : int, default 4096
973
- Memory size (in MB) required for this step. If
974
- `@resources` is also present, the maximum value from all decorators is
975
- used.
976
- disk : int, default 10240
977
- Disk size (in MB) required for this step. If
978
- `@resources` is also present, the maximum value from all decorators is
979
- used.
980
- image : str, optional, default None
981
- Docker image to use when launching on Kubernetes. If not specified, and
982
- METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
983
- not, a default Docker image mapping to the current version of Python is used.
984
- image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
985
- If given, the imagePullPolicy to be applied to the Docker image of the step.
986
- service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
987
- Kubernetes service account to use when launching pod in Kubernetes.
988
- secrets : List[str], optional, default None
989
- Kubernetes secrets to use when launching pod in Kubernetes. These
990
- secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
991
- in Metaflow configuration.
992
- node_selector: Union[Dict[str,str], str], optional, default None
993
- Kubernetes node selector(s) to apply to the pod running the task.
994
- Can be passed in as a comma separated string of values e.g. "kubernetes.io/os=linux,kubernetes.io/arch=amd64"
995
- or as a dictionary {"kubernetes.io/os": "linux", "kubernetes.io/arch": "amd64"}
996
- namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
997
- Kubernetes namespace to use when launching pod in Kubernetes.
998
- gpu : int, optional, default None
999
- Number of GPUs required for this step. A value of zero implies that
1000
- the scheduled node should not have GPUs.
1001
- gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1002
- The vendor of the GPUs to be used for this step.
1003
- tolerations : List[str], default []
1004
- The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1005
- Kubernetes tolerations to use when launching pod in Kubernetes.
1006
- use_tmpfs : bool, default False
1007
- This enables an explicit tmpfs mount for this step.
1008
- tmpfs_tempdir : bool, default True
1009
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1010
- tmpfs_size : int, optional, default: None
1011
- The value for the size (in MiB) of the tmpfs mount for this step.
1012
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1013
- memory allocated for this step.
1014
- tmpfs_path : str, optional, default /metaflow_temp
1015
- Path to tmpfs mount for this step.
1016
- persistent_volume_claims : Dict[str, str], optional, default None
1017
- A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1018
- volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1019
- shared_memory: int, optional
1020
- Shared memory size (in MiB) required for this step
1021
- port: int, optional
1022
- Port number to specify in the Kubernetes job object
1023
- compute_pool : str, optional, default None
1024
- Compute pool to be used for for this step.
1025
- If not specified, any accessible compute pool within the perimeter is used.
934
+ vars : Dict[str, str], default {}
935
+ Dictionary of environment variables to set.
1026
936
  """
1027
937
  ...
1028
938
 
1029
939
  @typing.overload
1030
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
940
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1031
941
  """
1032
- Specifies the Conda environment for the step.
942
+ Specifies the PyPI packages for the step.
1033
943
 
1034
944
  Information in this decorator will augment any
1035
- attributes set in the `@conda_base` flow-level decorator. Hence,
1036
- you can use `@conda_base` to set packages required by all
1037
- steps and use `@conda` to specify step-specific overrides.
945
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
946
+ you can use `@pypi_base` to set packages required by all
947
+ steps and use `@pypi` to specify step-specific overrides.
1038
948
 
1039
949
  Parameters
1040
950
  ----------
1041
- packages : Dict[str, str], default {}
951
+ packages : Dict[str, str], default: {}
1042
952
  Packages to use for this step. The key is the name of the package
1043
953
  and the value is the version to use.
1044
- libraries : Dict[str, str], default {}
1045
- Supported for backward compatibility. When used with packages, packages will take precedence.
1046
- python : str, optional, default None
954
+ python : str, optional, default: None
1047
955
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1048
956
  that the version used will correspond to the version of the Python interpreter used to start the run.
1049
- disabled : bool, default False
1050
- If set to True, disables @conda.
1051
957
  """
1052
958
  ...
1053
959
 
1054
960
  @typing.overload
1055
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
961
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1056
962
  ...
1057
963
 
1058
964
  @typing.overload
1059
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
965
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1060
966
  ...
1061
967
 
1062
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
968
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1063
969
  """
1064
- Specifies the Conda environment for the step.
970
+ Specifies the PyPI packages for the step.
1065
971
 
1066
972
  Information in this decorator will augment any
1067
- attributes set in the `@conda_base` flow-level decorator. Hence,
1068
- you can use `@conda_base` to set packages required by all
1069
- steps and use `@conda` to specify step-specific overrides.
973
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
974
+ you can use `@pypi_base` to set packages required by all
975
+ steps and use `@pypi` to specify step-specific overrides.
1070
976
 
1071
977
  Parameters
1072
978
  ----------
1073
- packages : Dict[str, str], default {}
979
+ packages : Dict[str, str], default: {}
1074
980
  Packages to use for this step. The key is the name of the package
1075
981
  and the value is the version to use.
1076
- libraries : Dict[str, str], default {}
1077
- Supported for backward compatibility. When used with packages, packages will take precedence.
1078
- python : str, optional, default None
982
+ python : str, optional, default: None
1079
983
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1080
984
  that the version used will correspond to the version of the Python interpreter used to start the run.
1081
- disabled : bool, default False
1082
- If set to True, disables @conda.
1083
985
  """
1084
986
  ...
1085
987
 
1086
988
  @typing.overload
1087
- def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
989
+ def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1088
990
  """
1089
- Specifies the resources needed when executing this step.
991
+ Specifies the number of times the task corresponding
992
+ to a step needs to be retried.
1090
993
 
1091
- Use `@resources` to specify the resource requirements
1092
- independently of the specific compute layer (`@batch`, `@kubernetes`).
994
+ This decorator is useful for handling transient errors, such as networking issues.
995
+ If your task contains operations that can't be retried safely, e.g. database updates,
996
+ it is advisable to annotate it with `@retry(times=0)`.
1093
997
 
1094
- You can choose the compute layer on the command line by executing e.g.
1095
- ```
1096
- python myflow.py run --with batch
1097
- ```
1098
- or
1099
- ```
1100
- python myflow.py run --with kubernetes
1101
- ```
1102
- which executes the flow on the desired system using the
1103
- requirements specified in `@resources`.
998
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
999
+ decorator will execute a no-op task after all retries have been exhausted,
1000
+ ensuring that the flow execution can continue.
1104
1001
 
1105
1002
  Parameters
1106
1003
  ----------
1107
- cpu : int, default 1
1108
- Number of CPUs required for this step.
1109
- gpu : int, default 0
1110
- Number of GPUs required for this step.
1111
- disk : int, optional, default None
1112
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1113
- memory : int, default 4096
1114
- Memory size (in MB) required for this step.
1115
- shared_memory : int, optional, default None
1116
- The value for the size (in MiB) of the /dev/shm volume for this step.
1117
- This parameter maps to the `--shm-size` option in Docker.
1118
- """
1004
+ times : int, default 3
1005
+ Number of times to retry this task.
1006
+ minutes_between_retries : int, default 2
1007
+ Number of minutes between retries.
1008
+ """
1119
1009
  ...
1120
1010
 
1121
1011
  @typing.overload
1122
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1012
+ def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1123
1013
  ...
1124
1014
 
1125
1015
  @typing.overload
1126
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1016
+ def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1127
1017
  ...
1128
1018
 
1129
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1019
+ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1130
1020
  """
1131
- Specifies the resources needed when executing this step.
1021
+ Specifies the number of times the task corresponding
1022
+ to a step needs to be retried.
1132
1023
 
1133
- Use `@resources` to specify the resource requirements
1134
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1024
+ This decorator is useful for handling transient errors, such as networking issues.
1025
+ If your task contains operations that can't be retried safely, e.g. database updates,
1026
+ it is advisable to annotate it with `@retry(times=0)`.
1135
1027
 
1136
- You can choose the compute layer on the command line by executing e.g.
1137
- ```
1138
- python myflow.py run --with batch
1139
- ```
1140
- or
1141
- ```
1142
- python myflow.py run --with kubernetes
1143
- ```
1144
- which executes the flow on the desired system using the
1145
- requirements specified in `@resources`.
1028
+ This can be used in conjunction with the `@catch` decorator. The `@catch`
1029
+ decorator will execute a no-op task after all retries have been exhausted,
1030
+ ensuring that the flow execution can continue.
1146
1031
 
1147
1032
  Parameters
1148
1033
  ----------
1149
- cpu : int, default 1
1150
- Number of CPUs required for this step.
1151
- gpu : int, default 0
1152
- Number of GPUs required for this step.
1153
- disk : int, optional, default None
1154
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1155
- memory : int, default 4096
1156
- Memory size (in MB) required for this step.
1157
- shared_memory : int, optional, default None
1158
- The value for the size (in MiB) of the /dev/shm volume for this step.
1159
- This parameter maps to the `--shm-size` option in Docker.
1034
+ times : int, default 3
1035
+ Number of times to retry this task.
1036
+ minutes_between_retries : int, default 2
1037
+ Number of minutes between retries.
1160
1038
  """
1161
1039
  ...
1162
1040
 
1163
1041
  @typing.overload
1164
- def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1042
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1165
1043
  """
1166
- Specifies secrets to be retrieved and injected as environment variables prior to
1167
- the execution of a step.
1044
+ Specifies a timeout for your step.
1045
+
1046
+ This decorator is useful if this step may hang indefinitely.
1047
+
1048
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1049
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1050
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1051
+
1052
+ Note that all the values specified in parameters are added together so if you specify
1053
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1168
1054
 
1169
1055
  Parameters
1170
1056
  ----------
1171
- sources : List[Union[str, Dict[str, Any]]], default: []
1172
- List of secret specs, defining how the secrets are to be retrieved
1057
+ seconds : int, default 0
1058
+ Number of seconds to wait prior to timing out.
1059
+ minutes : int, default 0
1060
+ Number of minutes to wait prior to timing out.
1061
+ hours : int, default 0
1062
+ Number of hours to wait prior to timing out.
1173
1063
  """
1174
1064
  ...
1175
1065
 
1176
1066
  @typing.overload
1177
- def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1067
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1178
1068
  ...
1179
1069
 
1180
1070
  @typing.overload
1181
- def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1071
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1182
1072
  ...
1183
1073
 
1184
- def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1074
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1185
1075
  """
1186
- Specifies secrets to be retrieved and injected as environment variables prior to
1187
- the execution of a step.
1076
+ Specifies a timeout for your step.
1077
+
1078
+ This decorator is useful if this step may hang indefinitely.
1079
+
1080
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1081
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1082
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1083
+
1084
+ Note that all the values specified in parameters are added together so if you specify
1085
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1188
1086
 
1189
1087
  Parameters
1190
1088
  ----------
1191
- sources : List[Union[str, Dict[str, Any]]], default: []
1192
- List of secret specs, defining how the secrets are to be retrieved
1089
+ seconds : int, default 0
1090
+ Number of seconds to wait prior to timing out.
1091
+ minutes : int, default 0
1092
+ Number of minutes to wait prior to timing out.
1093
+ hours : int, default 0
1094
+ Number of hours to wait prior to timing out.
1193
1095
  """
1194
1096
  ...
1195
1097
 
1196
1098
  @typing.overload
1197
- def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1099
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1198
1100
  """
1199
- Creates a human-readable report, a Metaflow Card, after this step completes.
1101
+ Specifies that the step will success under all circumstances.
1200
1102
 
1201
- Note that you may add multiple `@card` decorators in a step with different parameters.
1103
+ The decorator will create an optional artifact, specified by `var`, which
1104
+ contains the exception raised. You can use it to detect the presence
1105
+ of errors, indicating that all happy-path artifacts produced by the step
1106
+ are missing.
1202
1107
 
1203
1108
  Parameters
1204
1109
  ----------
1205
- type : str, default 'default'
1206
- Card type.
1207
- id : str, optional, default None
1208
- If multiple cards are present, use this id to identify this card.
1209
- options : Dict[str, Any], default {}
1210
- Options passed to the card. The contents depend on the card type.
1211
- timeout : int, default 45
1212
- Interrupt reporting if it takes more than this many seconds.
1213
-
1214
-
1110
+ var : str, optional, default None
1111
+ Name of the artifact in which to store the caught exception.
1112
+ If not specified, the exception is not stored.
1113
+ print_exception : bool, default True
1114
+ Determines whether or not the exception is printed to
1115
+ stdout when caught.
1215
1116
  """
1216
1117
  ...
1217
1118
 
1218
1119
  @typing.overload
1219
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1120
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1220
1121
  ...
1221
1122
 
1222
1123
  @typing.overload
1223
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1124
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1224
1125
  ...
1225
1126
 
1226
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1127
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1227
1128
  """
1228
- Creates a human-readable report, a Metaflow Card, after this step completes.
1129
+ Specifies that the step will success under all circumstances.
1229
1130
 
1230
- Note that you may add multiple `@card` decorators in a step with different parameters.
1131
+ The decorator will create an optional artifact, specified by `var`, which
1132
+ contains the exception raised. You can use it to detect the presence
1133
+ of errors, indicating that all happy-path artifacts produced by the step
1134
+ are missing.
1231
1135
 
1232
1136
  Parameters
1233
1137
  ----------
1234
- type : str, default 'default'
1235
- Card type.
1236
- id : str, optional, default None
1237
- If multiple cards are present, use this id to identify this card.
1238
- options : Dict[str, Any], default {}
1239
- Options passed to the card. The contents depend on the card type.
1240
- timeout : int, default 45
1241
- Interrupt reporting if it takes more than this many seconds.
1138
+ var : str, optional, default None
1139
+ Name of the artifact in which to store the caught exception.
1140
+ If not specified, the exception is not stored.
1141
+ print_exception : bool, default True
1142
+ Determines whether or not the exception is printed to
1143
+ stdout when caught.
1144
+ """
1145
+ ...
1146
+
1147
+ @typing.overload
1148
+ def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1149
+ """
1150
+ Specifies secrets to be retrieved and injected as environment variables prior to
1151
+ the execution of a step.
1242
1152
 
1153
+ Parameters
1154
+ ----------
1155
+ sources : List[Union[str, Dict[str, Any]]], default: []
1156
+ List of secret specs, defining how the secrets are to be retrieved
1157
+ """
1158
+ ...
1159
+
1160
+ @typing.overload
1161
+ def secrets(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1162
+ ...
1163
+
1164
+ @typing.overload
1165
+ def secrets(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1166
+ ...
1167
+
1168
+ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []):
1169
+ """
1170
+ Specifies secrets to be retrieved and injected as environment variables prior to
1171
+ the execution of a step.
1243
1172
 
1173
+ Parameters
1174
+ ----------
1175
+ sources : List[Union[str, Dict[str, Any]]], default: []
1176
+ List of secret specs, defining how the secrets are to be retrieved
1244
1177
  """
1245
1178
  ...
1246
1179
 
1247
1180
  @typing.overload
1248
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1181
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1249
1182
  """
1250
- Specifies the PyPI packages for the step.
1183
+ Decorator prototype for all step decorators. This function gets specialized
1184
+ and imported for all decorators types by _import_plugin_decorators().
1185
+ """
1186
+ ...
1187
+
1188
+ @typing.overload
1189
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1190
+ ...
1191
+
1192
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1193
+ """
1194
+ Decorator prototype for all step decorators. This function gets specialized
1195
+ and imported for all decorators types by _import_plugin_decorators().
1196
+ """
1197
+ ...
1198
+
1199
+ @typing.overload
1200
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1201
+ """
1202
+ Specifies the Conda environment for the step.
1251
1203
 
1252
1204
  Information in this decorator will augment any
1253
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1254
- you can use `@pypi_base` to set packages required by all
1255
- steps and use `@pypi` to specify step-specific overrides.
1205
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1206
+ you can use `@conda_base` to set packages required by all
1207
+ steps and use `@conda` to specify step-specific overrides.
1256
1208
 
1257
1209
  Parameters
1258
1210
  ----------
1259
- packages : Dict[str, str], default: {}
1211
+ packages : Dict[str, str], default {}
1260
1212
  Packages to use for this step. The key is the name of the package
1261
1213
  and the value is the version to use.
1262
- python : str, optional, default: None
1214
+ libraries : Dict[str, str], default {}
1215
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1216
+ python : str, optional, default None
1263
1217
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1264
1218
  that the version used will correspond to the version of the Python interpreter used to start the run.
1219
+ disabled : bool, default False
1220
+ If set to True, disables @conda.
1265
1221
  """
1266
1222
  ...
1267
1223
 
1268
1224
  @typing.overload
1269
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1225
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1270
1226
  ...
1271
1227
 
1272
1228
  @typing.overload
1273
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1229
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1274
1230
  ...
1275
1231
 
1276
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1232
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
1277
1233
  """
1278
- Specifies the PyPI packages for the step.
1234
+ Specifies the Conda environment for the step.
1279
1235
 
1280
1236
  Information in this decorator will augment any
1281
- attributes set in the `@pyi_base` flow-level decorator. Hence,
1282
- you can use `@pypi_base` to set packages required by all
1283
- steps and use `@pypi` to specify step-specific overrides.
1237
+ attributes set in the `@conda_base` flow-level decorator. Hence,
1238
+ you can use `@conda_base` to set packages required by all
1239
+ steps and use `@conda` to specify step-specific overrides.
1284
1240
 
1285
1241
  Parameters
1286
1242
  ----------
1287
- packages : Dict[str, str], default: {}
1243
+ packages : Dict[str, str], default {}
1288
1244
  Packages to use for this step. The key is the name of the package
1289
1245
  and the value is the version to use.
1290
- python : str, optional, default: None
1246
+ libraries : Dict[str, str], default {}
1247
+ Supported for backward compatibility. When used with packages, packages will take precedence.
1248
+ python : str, optional, default None
1291
1249
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
1292
1250
  that the version used will correspond to the version of the Python interpreter used to start the run.
1251
+ disabled : bool, default False
1252
+ If set to True, disables @conda.
1293
1253
  """
1294
1254
  ...
1295
1255
 
1296
1256
  @typing.overload
1297
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1257
+ def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1298
1258
  """
1299
- Decorator prototype for all step decorators. This function gets specialized
1300
- and imported for all decorators types by _import_plugin_decorators().
1259
+ Specifies the resources needed when executing this step.
1260
+
1261
+ Use `@resources` to specify the resource requirements
1262
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1263
+
1264
+ You can choose the compute layer on the command line by executing e.g.
1265
+ ```
1266
+ python myflow.py run --with batch
1267
+ ```
1268
+ or
1269
+ ```
1270
+ python myflow.py run --with kubernetes
1271
+ ```
1272
+ which executes the flow on the desired system using the
1273
+ requirements specified in `@resources`.
1274
+
1275
+ Parameters
1276
+ ----------
1277
+ cpu : int, default 1
1278
+ Number of CPUs required for this step.
1279
+ gpu : int, default 0
1280
+ Number of GPUs required for this step.
1281
+ disk : int, optional, default None
1282
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1283
+ memory : int, default 4096
1284
+ Memory size (in MB) required for this step.
1285
+ shared_memory : int, optional, default None
1286
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1287
+ This parameter maps to the `--shm-size` option in Docker.
1301
1288
  """
1302
1289
  ...
1303
1290
 
1304
1291
  @typing.overload
1305
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1292
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1306
1293
  ...
1307
1294
 
1308
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1309
- """
1310
- Decorator prototype for all step decorators. This function gets specialized
1311
- and imported for all decorators types by _import_plugin_decorators().
1312
- """
1295
+ @typing.overload
1296
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1313
1297
  ...
1314
1298
 
1315
- @typing.overload
1316
- def retry(*, times: int = 3, minutes_between_retries: int = 2) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1299
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1317
1300
  """
1318
- Specifies the number of times the task corresponding
1319
- to a step needs to be retried.
1301
+ Specifies the resources needed when executing this step.
1320
1302
 
1321
- This decorator is useful for handling transient errors, such as networking issues.
1322
- If your task contains operations that can't be retried safely, e.g. database updates,
1323
- it is advisable to annotate it with `@retry(times=0)`.
1303
+ Use `@resources` to specify the resource requirements
1304
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1324
1305
 
1325
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1326
- decorator will execute a no-op task after all retries have been exhausted,
1327
- ensuring that the flow execution can continue.
1306
+ You can choose the compute layer on the command line by executing e.g.
1307
+ ```
1308
+ python myflow.py run --with batch
1309
+ ```
1310
+ or
1311
+ ```
1312
+ python myflow.py run --with kubernetes
1313
+ ```
1314
+ which executes the flow on the desired system using the
1315
+ requirements specified in `@resources`.
1328
1316
 
1329
1317
  Parameters
1330
1318
  ----------
1331
- times : int, default 3
1332
- Number of times to retry this task.
1333
- minutes_between_retries : int, default 2
1334
- Number of minutes between retries.
1319
+ cpu : int, default 1
1320
+ Number of CPUs required for this step.
1321
+ gpu : int, default 0
1322
+ Number of GPUs required for this step.
1323
+ disk : int, optional, default None
1324
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1325
+ memory : int, default 4096
1326
+ Memory size (in MB) required for this step.
1327
+ shared_memory : int, optional, default None
1328
+ The value for the size (in MiB) of the /dev/shm volume for this step.
1329
+ This parameter maps to the `--shm-size` option in Docker.
1335
1330
  """
1336
1331
  ...
1337
1332
 
1338
- @typing.overload
1339
- def retry(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1340
- ...
1341
-
1342
- @typing.overload
1343
- def retry(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1344
- ...
1345
-
1346
- def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, times: int = 3, minutes_between_retries: int = 2):
1333
+ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: typing.Optional[str] = None, image_pull_policy: str = "KUBERNETES_IMAGE_PULL_POLICY", service_account: str = "METAFLOW_KUBERNETES_SERVICE_ACCOUNT", secrets: typing.Optional[typing.List[str]] = None, node_selector: typing.Union[typing.Dict[str, str], str, None] = None, namespace: str = "METAFLOW_KUBERNETES_NAMESPACE", gpu: typing.Optional[int] = None, gpu_vendor: str = "KUBERNETES_GPU_VENDOR", tolerations: typing.List[str] = [], use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = "/metaflow_temp", persistent_volume_claims: typing.Optional[typing.Dict[str, str]] = None, shared_memory: typing.Optional[int] = None, port: typing.Optional[int] = None, compute_pool: typing.Optional[str] = None, hostname_resolution_timeout: int = 600) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1347
1334
  """
1348
- Specifies the number of times the task corresponding
1349
- to a step needs to be retried.
1350
-
1351
- This decorator is useful for handling transient errors, such as networking issues.
1352
- If your task contains operations that can't be retried safely, e.g. database updates,
1353
- it is advisable to annotate it with `@retry(times=0)`.
1354
-
1355
- This can be used in conjunction with the `@catch` decorator. The `@catch`
1356
- decorator will execute a no-op task after all retries have been exhausted,
1357
- ensuring that the flow execution can continue.
1335
+ Specifies that this step should execute on Kubernetes.
1358
1336
 
1359
1337
  Parameters
1360
1338
  ----------
1361
- times : int, default 3
1362
- Number of times to retry this task.
1363
- minutes_between_retries : int, default 2
1364
- Number of minutes between retries.
1339
+ cpu : int, default 1
1340
+ Number of CPUs required for this step. If `@resources` is
1341
+ also present, the maximum value from all decorators is used.
1342
+ memory : int, default 4096
1343
+ Memory size (in MB) required for this step. If
1344
+ `@resources` is also present, the maximum value from all decorators is
1345
+ used.
1346
+ disk : int, default 10240
1347
+ Disk size (in MB) required for this step. If
1348
+ `@resources` is also present, the maximum value from all decorators is
1349
+ used.
1350
+ image : str, optional, default None
1351
+ Docker image to use when launching on Kubernetes. If not specified, and
1352
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
1353
+ not, a default Docker image mapping to the current version of Python is used.
1354
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
1355
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
1356
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
1357
+ Kubernetes service account to use when launching pod in Kubernetes.
1358
+ secrets : List[str], optional, default None
1359
+ Kubernetes secrets to use when launching pod in Kubernetes. These
1360
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
1361
+ in Metaflow configuration.
1362
+ node_selector: Union[Dict[str,str], str], optional, default None
1363
+ Kubernetes node selector(s) to apply to the pod running the task.
1364
+ Can be passed in as a comma separated string of values e.g. "kubernetes.io/os=linux,kubernetes.io/arch=amd64"
1365
+ or as a dictionary {"kubernetes.io/os": "linux", "kubernetes.io/arch": "amd64"}
1366
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
1367
+ Kubernetes namespace to use when launching pod in Kubernetes.
1368
+ gpu : int, optional, default None
1369
+ Number of GPUs required for this step. A value of zero implies that
1370
+ the scheduled node should not have GPUs.
1371
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
1372
+ The vendor of the GPUs to be used for this step.
1373
+ tolerations : List[str], default []
1374
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
1375
+ Kubernetes tolerations to use when launching pod in Kubernetes.
1376
+ use_tmpfs : bool, default False
1377
+ This enables an explicit tmpfs mount for this step.
1378
+ tmpfs_tempdir : bool, default True
1379
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1380
+ tmpfs_size : int, optional, default: None
1381
+ The value for the size (in MiB) of the tmpfs mount for this step.
1382
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1383
+ memory allocated for this step.
1384
+ tmpfs_path : str, optional, default /metaflow_temp
1385
+ Path to tmpfs mount for this step.
1386
+ persistent_volume_claims : Dict[str, str], optional, default None
1387
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
1388
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
1389
+ shared_memory: int, optional
1390
+ Shared memory size (in MiB) required for this step
1391
+ port: int, optional
1392
+ Port number to specify in the Kubernetes job object
1393
+ compute_pool : str, optional, default None
1394
+ Compute pool to be used for for this step.
1395
+ If not specified, any accessible compute pool within the perimeter is used.
1396
+ hostname_resolution_timeout: int, default 10 * 60
1397
+ Timeout in seconds for the workers tasks in the gang scheduled cluster to resolve the hostname of control task.
1398
+ Only applicable when @parallel is used.
1365
1399
  """
1366
1400
  ...
1367
1401
 
@@ -1513,79 +1547,44 @@ def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
1513
1547
  ...
1514
1548
 
1515
1549
  @typing.overload
1516
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1517
- """
1518
- Specifies environment variables to be set prior to the execution of a step.
1519
-
1520
- Parameters
1521
- ----------
1522
- vars : Dict[str, str], default {}
1523
- Dictionary of environment variables to set.
1524
- """
1525
- ...
1526
-
1527
- @typing.overload
1528
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1529
- ...
1530
-
1531
- @typing.overload
1532
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1533
- ...
1534
-
1535
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1536
- """
1537
- Specifies environment variables to be set prior to the execution of a step.
1538
-
1539
- Parameters
1540
- ----------
1541
- vars : Dict[str, str], default {}
1542
- Dictionary of environment variables to set.
1543
- """
1544
- ...
1545
-
1546
- @typing.overload
1547
- def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1550
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1548
1551
  """
1549
- Specifies the flow(s) that this flow depends on.
1552
+ Specifies the event(s) that this flow depends on.
1550
1553
 
1551
1554
  ```
1552
- @trigger_on_finish(flow='FooFlow')
1555
+ @trigger(event='foo')
1553
1556
  ```
1554
1557
  or
1555
1558
  ```
1556
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1559
+ @trigger(events=['foo', 'bar'])
1557
1560
  ```
1558
- This decorator respects the @project decorator and triggers the flow
1559
- when upstream runs within the same namespace complete successfully
1560
1561
 
1561
- Additionally, you can specify project aware upstream flow dependencies
1562
- by specifying the fully qualified project_flow_name.
1562
+ Additionally, you can specify the parameter mappings
1563
+ to map event payload to Metaflow parameters for the flow.
1563
1564
  ```
1564
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1565
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1565
1566
  ```
1566
1567
  or
1567
1568
  ```
1568
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1569
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1570
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1569
1571
  ```
1570
1572
 
1571
- You can also specify just the project or project branch (other values will be
1572
- inferred from the current project or project branch):
1573
+ 'parameters' can also be a list of strings and tuples like so:
1573
1574
  ```
1574
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1575
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1576
+ ```
1577
+ This is equivalent to:
1578
+ ```
1579
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1575
1580
  ```
1576
-
1577
- Note that `branch` is typically one of:
1578
- - `prod`
1579
- - `user.bob`
1580
- - `test.my_experiment`
1581
- - `prod.staging`
1582
1581
 
1583
1582
  Parameters
1584
1583
  ----------
1585
- flow : Union[str, Dict[str, str]], optional, default None
1586
- Upstream flow dependency for this flow.
1587
- flows : List[Union[str, Dict[str, str]]], default []
1588
- Upstream flow dependencies for this flow.
1584
+ event : Union[str, Dict[str, Any]], optional, default None
1585
+ Event dependency for this flow.
1586
+ events : List[Union[str, Dict[str, Any]]], default []
1587
+ Events dependency for this flow.
1589
1588
  options : Dict[str, Any], default {}
1590
1589
  Backend-specific configuration for tuning eventing behavior.
1591
1590
 
@@ -1594,94 +1593,51 @@ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] =
1594
1593
  ...
1595
1594
 
1596
1595
  @typing.overload
1597
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1596
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1598
1597
  ...
1599
1598
 
1600
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1599
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1601
1600
  """
1602
- Specifies the flow(s) that this flow depends on.
1601
+ Specifies the event(s) that this flow depends on.
1603
1602
 
1604
1603
  ```
1605
- @trigger_on_finish(flow='FooFlow')
1604
+ @trigger(event='foo')
1606
1605
  ```
1607
1606
  or
1608
1607
  ```
1609
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1608
+ @trigger(events=['foo', 'bar'])
1610
1609
  ```
1611
- This decorator respects the @project decorator and triggers the flow
1612
- when upstream runs within the same namespace complete successfully
1613
1610
 
1614
- Additionally, you can specify project aware upstream flow dependencies
1615
- by specifying the fully qualified project_flow_name.
1611
+ Additionally, you can specify the parameter mappings
1612
+ to map event payload to Metaflow parameters for the flow.
1616
1613
  ```
1617
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1614
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1618
1615
  ```
1619
1616
  or
1620
1617
  ```
1621
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1618
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1619
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1622
1620
  ```
1623
1621
 
1624
- You can also specify just the project or project branch (other values will be
1625
- inferred from the current project or project branch):
1622
+ 'parameters' can also be a list of strings and tuples like so:
1626
1623
  ```
1627
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1624
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1625
+ ```
1626
+ This is equivalent to:
1627
+ ```
1628
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1628
1629
  ```
1629
1630
 
1630
- Note that `branch` is typically one of:
1631
- - `prod`
1632
- - `user.bob`
1633
- - `test.my_experiment`
1634
- - `prod.staging`
1635
-
1636
- Parameters
1637
- ----------
1638
- flow : Union[str, Dict[str, str]], optional, default None
1639
- Upstream flow dependency for this flow.
1640
- flows : List[Union[str, Dict[str, str]]], default []
1641
- Upstream flow dependencies for this flow.
1642
- options : Dict[str, Any], default {}
1643
- Backend-specific configuration for tuning eventing behavior.
1644
-
1645
-
1646
- """
1647
- ...
1648
-
1649
- @typing.overload
1650
- def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1651
- """
1652
- Specifies the PyPI packages for all steps of the flow.
1653
-
1654
- Use `@pypi_base` to set common packages required by all
1655
- steps and use `@pypi` to specify step-specific overrides.
1656
- Parameters
1657
- ----------
1658
- packages : Dict[str, str], default: {}
1659
- Packages to use for this flow. The key is the name of the package
1660
- and the value is the version to use.
1661
- python : str, optional, default: None
1662
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1663
- that the version used will correspond to the version of the Python interpreter used to start the run.
1664
- """
1665
- ...
1666
-
1667
- @typing.overload
1668
- def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1669
- ...
1670
-
1671
- def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1672
- """
1673
- Specifies the PyPI packages for all steps of the flow.
1674
-
1675
- Use `@pypi_base` to set common packages required by all
1676
- steps and use `@pypi` to specify step-specific overrides.
1677
1631
  Parameters
1678
1632
  ----------
1679
- packages : Dict[str, str], default: {}
1680
- Packages to use for this flow. The key is the name of the package
1681
- and the value is the version to use.
1682
- python : str, optional, default: None
1683
- Version of Python to use, e.g. '3.7.4'. A default value of None implies
1684
- that the version used will correspond to the version of the Python interpreter used to start the run.
1633
+ event : Union[str, Dict[str, Any]], optional, default None
1634
+ Event dependency for this flow.
1635
+ events : List[Union[str, Dict[str, Any]]], default []
1636
+ Events dependency for this flow.
1637
+ options : Dict[str, Any], default {}
1638
+ Backend-specific configuration for tuning eventing behavior.
1639
+
1640
+
1685
1641
  """
1686
1642
  ...
1687
1643
 
@@ -1777,44 +1733,147 @@ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly:
1777
1733
  ...
1778
1734
 
1779
1735
  @typing.overload
1780
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1736
+ def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1781
1737
  """
1782
- Specifies the event(s) that this flow depends on.
1738
+ Specifies the PyPI packages for all steps of the flow.
1739
+
1740
+ Use `@pypi_base` to set common packages required by all
1741
+ steps and use `@pypi` to specify step-specific overrides.
1742
+ Parameters
1743
+ ----------
1744
+ packages : Dict[str, str], default: {}
1745
+ Packages to use for this flow. The key is the name of the package
1746
+ and the value is the version to use.
1747
+ python : str, optional, default: None
1748
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1749
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1750
+ """
1751
+ ...
1752
+
1753
+ @typing.overload
1754
+ def pypi_base(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1755
+ ...
1756
+
1757
+ def pypi_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
1758
+ """
1759
+ Specifies the PyPI packages for all steps of the flow.
1760
+
1761
+ Use `@pypi_base` to set common packages required by all
1762
+ steps and use `@pypi` to specify step-specific overrides.
1763
+ Parameters
1764
+ ----------
1765
+ packages : Dict[str, str], default: {}
1766
+ Packages to use for this flow. The key is the name of the package
1767
+ and the value is the version to use.
1768
+ python : str, optional, default: None
1769
+ Version of Python to use, e.g. '3.7.4'. A default value of None implies
1770
+ that the version used will correspond to the version of the Python interpreter used to start the run.
1771
+ """
1772
+ ...
1773
+
1774
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1775
+ """
1776
+ Specifies what flows belong to the same project.
1777
+
1778
+ A project-specific namespace is created for all flows that
1779
+ use the same `@project(name)`.
1780
+
1781
+ Parameters
1782
+ ----------
1783
+ name : str
1784
+ Project name. Make sure that the name is unique amongst all
1785
+ projects that use the same production scheduler. The name may
1786
+ contain only lowercase alphanumeric characters and underscores.
1787
+
1788
+
1789
+ """
1790
+ ...
1791
+
1792
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1793
+ """
1794
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1795
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1796
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1797
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1798
+ starts only after all sensors finish.
1799
+
1800
+ Parameters
1801
+ ----------
1802
+ timeout : int
1803
+ Time, in seconds before the task times out and fails. (Default: 3600)
1804
+ poke_interval : int
1805
+ Time in seconds that the job should wait in between each try. (Default: 60)
1806
+ mode : str
1807
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1808
+ exponential_backoff : bool
1809
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1810
+ pool : str
1811
+ the slot pool this task should run in,
1812
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1813
+ soft_fail : bool
1814
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1815
+ name : str
1816
+ Name of the sensor on Airflow
1817
+ description : str
1818
+ Description of sensor in the Airflow UI
1819
+ bucket_key : Union[str, List[str]]
1820
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1821
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1822
+ bucket_name : str
1823
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1824
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1825
+ wildcard_match : bool
1826
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1827
+ aws_conn_id : str
1828
+ a reference to the s3 connection on Airflow. (Default: None)
1829
+ verify : bool
1830
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1831
+ """
1832
+ ...
1833
+
1834
+ @typing.overload
1835
+ def trigger_on_finish(*, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1836
+ """
1837
+ Specifies the flow(s) that this flow depends on.
1783
1838
 
1784
1839
  ```
1785
- @trigger(event='foo')
1840
+ @trigger_on_finish(flow='FooFlow')
1786
1841
  ```
1787
1842
  or
1788
1843
  ```
1789
- @trigger(events=['foo', 'bar'])
1844
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1790
1845
  ```
1846
+ This decorator respects the @project decorator and triggers the flow
1847
+ when upstream runs within the same namespace complete successfully
1791
1848
 
1792
- Additionally, you can specify the parameter mappings
1793
- to map event payload to Metaflow parameters for the flow.
1849
+ Additionally, you can specify project aware upstream flow dependencies
1850
+ by specifying the fully qualified project_flow_name.
1794
1851
  ```
1795
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1852
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1796
1853
  ```
1797
1854
  or
1798
1855
  ```
1799
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1800
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1856
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1801
1857
  ```
1802
1858
 
1803
- 'parameters' can also be a list of strings and tuples like so:
1804
- ```
1805
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1806
- ```
1807
- This is equivalent to:
1859
+ You can also specify just the project or project branch (other values will be
1860
+ inferred from the current project or project branch):
1808
1861
  ```
1809
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1862
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1810
1863
  ```
1811
1864
 
1865
+ Note that `branch` is typically one of:
1866
+ - `prod`
1867
+ - `user.bob`
1868
+ - `test.my_experiment`
1869
+ - `prod.staging`
1870
+
1812
1871
  Parameters
1813
1872
  ----------
1814
- event : Union[str, Dict[str, Any]], optional, default None
1815
- Event dependency for this flow.
1816
- events : List[Union[str, Dict[str, Any]]], default []
1817
- Events dependency for this flow.
1873
+ flow : Union[str, Dict[str, str]], optional, default None
1874
+ Upstream flow dependency for this flow.
1875
+ flows : List[Union[str, Dict[str, str]]], default []
1876
+ Upstream flow dependencies for this flow.
1818
1877
  options : Dict[str, Any], default {}
1819
1878
  Backend-specific configuration for tuning eventing behavior.
1820
1879
 
@@ -1823,47 +1882,51 @@ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = No
1823
1882
  ...
1824
1883
 
1825
1884
  @typing.overload
1826
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1885
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1827
1886
  ...
1828
1887
 
1829
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1888
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[typing.Dict[str, str], str, None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1830
1889
  """
1831
- Specifies the event(s) that this flow depends on.
1890
+ Specifies the flow(s) that this flow depends on.
1832
1891
 
1833
1892
  ```
1834
- @trigger(event='foo')
1893
+ @trigger_on_finish(flow='FooFlow')
1835
1894
  ```
1836
1895
  or
1837
1896
  ```
1838
- @trigger(events=['foo', 'bar'])
1897
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1839
1898
  ```
1899
+ This decorator respects the @project decorator and triggers the flow
1900
+ when upstream runs within the same namespace complete successfully
1840
1901
 
1841
- Additionally, you can specify the parameter mappings
1842
- to map event payload to Metaflow parameters for the flow.
1902
+ Additionally, you can specify project aware upstream flow dependencies
1903
+ by specifying the fully qualified project_flow_name.
1843
1904
  ```
1844
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1905
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1845
1906
  ```
1846
1907
  or
1847
1908
  ```
1848
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1849
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1909
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1850
1910
  ```
1851
1911
 
1852
- 'parameters' can also be a list of strings and tuples like so:
1853
- ```
1854
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1855
- ```
1856
- This is equivalent to:
1912
+ You can also specify just the project or project branch (other values will be
1913
+ inferred from the current project or project branch):
1857
1914
  ```
1858
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1915
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1859
1916
  ```
1860
1917
 
1918
+ Note that `branch` is typically one of:
1919
+ - `prod`
1920
+ - `user.bob`
1921
+ - `test.my_experiment`
1922
+ - `prod.staging`
1923
+
1861
1924
  Parameters
1862
1925
  ----------
1863
- event : Union[str, Dict[str, Any]], optional, default None
1864
- Event dependency for this flow.
1865
- events : List[Union[str, Dict[str, Any]]], default []
1866
- Events dependency for this flow.
1926
+ flow : Union[str, Dict[str, str]], optional, default None
1927
+ Upstream flow dependency for this flow.
1928
+ flows : List[Union[str, Dict[str, str]]], default []
1929
+ Upstream flow dependencies for this flow.
1867
1930
  options : Dict[str, Any], default {}
1868
1931
  Backend-specific configuration for tuning eventing behavior.
1869
1932
 
@@ -1871,24 +1934,6 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1871
1934
  """
1872
1935
  ...
1873
1936
 
1874
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1875
- """
1876
- Specifies what flows belong to the same project.
1877
-
1878
- A project-specific namespace is created for all flows that
1879
- use the same `@project(name)`.
1880
-
1881
- Parameters
1882
- ----------
1883
- name : str
1884
- Project name. Make sure that the name is unique amongst all
1885
- projects that use the same production scheduler. The name may
1886
- contain only lowercase alphanumeric characters and underscores.
1887
-
1888
-
1889
- """
1890
- ...
1891
-
1892
1937
  @typing.overload
1893
1938
  def conda_base(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1894
1939
  """
@@ -1938,48 +1983,6 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1938
1983
  """
1939
1984
  ...
1940
1985
 
1941
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1942
- """
1943
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1944
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1945
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1946
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1947
- starts only after all sensors finish.
1948
-
1949
- Parameters
1950
- ----------
1951
- timeout : int
1952
- Time, in seconds before the task times out and fails. (Default: 3600)
1953
- poke_interval : int
1954
- Time in seconds that the job should wait in between each try. (Default: 60)
1955
- mode : str
1956
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1957
- exponential_backoff : bool
1958
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1959
- pool : str
1960
- the slot pool this task should run in,
1961
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1962
- soft_fail : bool
1963
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1964
- name : str
1965
- Name of the sensor on Airflow
1966
- description : str
1967
- Description of sensor in the Airflow UI
1968
- bucket_key : Union[str, List[str]]
1969
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1970
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1971
- bucket_name : str
1972
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1973
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1974
- wildcard_match : bool
1975
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1976
- aws_conn_id : str
1977
- a reference to the s3 connection on Airflow. (Default: None)
1978
- verify : bool
1979
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1980
- """
1981
- ...
1982
-
1983
1986
  def namespace(ns: typing.Optional[str]) -> typing.Optional[str]:
1984
1987
  """
1985
1988
  Switch namespace to the one provided.
@@ -3214,15 +3217,15 @@ class NBRunner(object, metaclass=type):
3214
3217
  Additional environment variables to set for the Run. This overrides the
3215
3218
  environment set for this process.
3216
3219
  base_dir : Optional[str], default None
3217
- The directory to run the subprocess in; if not specified, a temporary
3218
- directory is used.
3220
+ The directory to run the subprocess in; if not specified, the current
3221
+ working directory is used.
3219
3222
  file_read_timeout : int, default 3600
3220
3223
  The timeout until which we try to read the runner attribute file.
3221
3224
  **kwargs : Any
3222
3225
  Additional arguments that you would pass to `python myflow.py` before
3223
3226
  the `run` command.
3224
3227
  """
3225
- def __init__(self, flow, show_output: bool = True, profile: typing.Optional[str] = None, env: typing.Optional[typing.Dict] = None, base_dir: str = "/tmp", file_read_timeout: int = 3600, **kwargs):
3228
+ def __init__(self, flow, show_output: bool = True, profile: typing.Optional[str] = None, env: typing.Optional[typing.Dict] = None, base_dir: typing.Optional[str] = None, file_read_timeout: int = 3600, **kwargs):
3226
3229
  ...
3227
3230
  def nbrun(self, **kwargs):
3228
3231
  """
@@ -3403,13 +3406,13 @@ class NBDeployer(object, metaclass=type):
3403
3406
  Additional environment variables to set. This overrides the
3404
3407
  environment set for this process.
3405
3408
  base_dir : Optional[str], default None
3406
- The directory to run the subprocess in; if not specified, a temporary
3407
- directory is used.
3409
+ The directory to run the subprocess in; if not specified, the current
3410
+ working directory is used.
3408
3411
  **kwargs : Any
3409
3412
  Additional arguments that you would pass to `python myflow.py` i.e. options
3410
3413
  listed in `python myflow.py --help`
3411
3414
  """
3412
- def __init__(self, flow, show_output: bool = True, profile: typing.Optional[str] = None, env: typing.Optional[typing.Dict] = None, base_dir: str = "/tmp", file_read_timeout: int = 3600, **kwargs):
3415
+ def __init__(self, flow, show_output: bool = True, profile: typing.Optional[str] = None, env: typing.Optional[typing.Dict] = None, base_dir: typing.Optional[str] = None, file_read_timeout: int = 3600, **kwargs):
3413
3416
  ...
3414
3417
  def cleanup(self):
3415
3418
  """