ob-metaflow-stubs 4.8__py2.py3-none-any.whl → 5.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (150) hide show
  1. metaflow-stubs/__init__.pyi +471 -618
  2. metaflow-stubs/cards.pyi +5 -5
  3. metaflow-stubs/cli.pyi +2 -2
  4. metaflow-stubs/client/__init__.pyi +3 -3
  5. metaflow-stubs/client/core.pyi +7 -7
  6. metaflow-stubs/client/filecache.pyi +2 -2
  7. metaflow-stubs/clone_util.pyi +2 -2
  8. metaflow-stubs/events.pyi +2 -2
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +5 -5
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +4 -4
  13. metaflow-stubs/metadata/metadata.pyi +2 -2
  14. metaflow-stubs/metadata/util.pyi +2 -2
  15. metaflow-stubs/metaflow_config.pyi +2 -2
  16. metaflow-stubs/metaflow_current.pyi +23 -23
  17. metaflow-stubs/mflog/mflog.pyi +2 -2
  18. metaflow-stubs/multicore_utils.pyi +2 -2
  19. metaflow-stubs/parameters.pyi +4 -4
  20. metaflow-stubs/plugins/__init__.pyi +2 -2
  21. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  22. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  23. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  24. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +3 -3
  26. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +3 -3
  27. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +3 -3
  28. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  29. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  30. metaflow-stubs/plugins/argo/argo_events.pyi +2 -2
  31. metaflow-stubs/plugins/argo/argo_workflows.pyi +3 -3
  32. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +6 -6
  33. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +7 -7
  34. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +4 -4
  35. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  36. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  37. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  38. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  39. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  40. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  41. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  42. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +2 -2
  43. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  44. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  45. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  46. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +2 -2
  47. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  49. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +3 -3
  50. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  51. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  52. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  53. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +2 -2
  54. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  55. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  56. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  57. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  58. metaflow-stubs/plugins/cards/card_cli.pyi +5 -5
  59. metaflow-stubs/plugins/cards/card_client.pyi +3 -3
  60. metaflow-stubs/plugins/cards/card_creator.pyi +2 -2
  61. metaflow-stubs/plugins/cards/card_datastore.pyi +2 -2
  62. metaflow-stubs/plugins/cards/card_decorator.pyi +2 -2
  63. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +2 -2
  64. metaflow-stubs/plugins/cards/card_modules/basic.pyi +3 -3
  65. metaflow-stubs/plugins/cards/card_modules/card.pyi +2 -2
  66. metaflow-stubs/plugins/cards/card_modules/components.pyi +4 -4
  67. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +2 -2
  68. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  69. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +2 -2
  70. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  71. metaflow-stubs/plugins/cards/component_serializer.pyi +4 -4
  72. metaflow-stubs/plugins/cards/exception.pyi +2 -2
  73. metaflow-stubs/plugins/catch_decorator.pyi +3 -3
  74. metaflow-stubs/plugins/datatools/__init__.pyi +3 -3
  75. metaflow-stubs/plugins/datatools/local.pyi +2 -2
  76. metaflow-stubs/plugins/datatools/s3/__init__.pyi +3 -3
  77. metaflow-stubs/plugins/datatools/s3/s3.pyi +6 -6
  78. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  79. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  80. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  81. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  82. metaflow-stubs/plugins/environment_decorator.pyi +2 -2
  83. metaflow-stubs/plugins/events_decorator.pyi +2 -2
  84. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  85. metaflow-stubs/plugins/frameworks/pytorch.pyi +2 -2
  86. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  87. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +2 -2
  88. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  89. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  90. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  91. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  92. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  93. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +2 -2
  94. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +3 -3
  95. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  96. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +3 -3
  97. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +2 -2
  98. metaflow-stubs/plugins/logs_cli.pyi +3 -3
  99. metaflow-stubs/plugins/package_cli.pyi +2 -2
  100. metaflow-stubs/plugins/parallel_decorator.pyi +2 -2
  101. metaflow-stubs/plugins/perimeters.pyi +2 -2
  102. metaflow-stubs/plugins/project_decorator.pyi +2 -2
  103. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  104. metaflow-stubs/plugins/pypi/conda_decorator.pyi +2 -2
  105. metaflow-stubs/plugins/pypi/conda_environment.pyi +3 -3
  106. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +2 -2
  107. metaflow-stubs/plugins/pypi/pypi_environment.pyi +3 -3
  108. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  109. metaflow-stubs/plugins/resources_decorator.pyi +2 -2
  110. metaflow-stubs/plugins/retry_decorator.pyi +2 -2
  111. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  112. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  113. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +2 -2
  114. metaflow-stubs/plugins/storage_executor.pyi +2 -2
  115. metaflow-stubs/plugins/tag_cli.pyi +4 -4
  116. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +3 -3
  117. metaflow-stubs/plugins/timeout_decorator.pyi +3 -3
  118. metaflow-stubs/procpoll.pyi +2 -2
  119. metaflow-stubs/profilers/__init__.pyi +2 -2
  120. metaflow-stubs/pylint_wrapper.pyi +2 -2
  121. metaflow-stubs/runner/__init__.pyi +2 -2
  122. metaflow-stubs/runner/deployer.pyi +2 -2
  123. metaflow-stubs/runner/metaflow_runner.pyi +4 -4
  124. metaflow-stubs/runner/nbdeploy.pyi +2 -2
  125. metaflow-stubs/runner/nbrun.pyi +2 -2
  126. metaflow-stubs/runner/subprocess_manager.pyi +2 -2
  127. metaflow-stubs/runner/utils.pyi +2 -2
  128. metaflow-stubs/system/__init__.pyi +3 -3
  129. metaflow-stubs/system/system_logger.pyi +2 -2
  130. metaflow-stubs/system/system_monitor.pyi +3 -3
  131. metaflow-stubs/tagging_util.pyi +2 -2
  132. metaflow-stubs/tuple_util.pyi +2 -2
  133. {ob_metaflow_stubs-4.8.dist-info → ob_metaflow_stubs-5.0.dist-info}/METADATA +1 -1
  134. ob_metaflow_stubs-5.0.dist-info/RECORD +137 -0
  135. metaflow-stubs/plugins/airflow/airflow.pyi +0 -187
  136. metaflow-stubs/plugins/airflow/airflow_cli.pyi +0 -90
  137. metaflow-stubs/plugins/airflow/airflow_decorator.pyi +0 -50
  138. metaflow-stubs/plugins/aws/batch/batch_cli.pyi +0 -46
  139. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +0 -145
  140. metaflow-stubs/plugins/aws/step_functions/dynamo_db_client.pyi +0 -22
  141. metaflow-stubs/plugins/aws/step_functions/step_functions_cli.pyi +0 -159
  142. metaflow-stubs/plugins/aws/step_functions/step_functions_decorator.pyi +0 -49
  143. metaflow-stubs/plugins/cards/card_modules/chevron/__init__.pyi +0 -78
  144. metaflow-stubs/plugins/cards/card_modules/chevron/main.pyi +0 -77
  145. metaflow-stubs/plugins/cards/card_modules/chevron/metadata.pyi +0 -11
  146. metaflow-stubs/plugins/cards/card_modules/chevron/renderer.pyi +0 -114
  147. metaflow-stubs/plugins/cards/card_modules/chevron/tokenizer.pyi +0 -75
  148. ob_metaflow_stubs-4.8.dist-info/RECORD +0 -150
  149. {ob_metaflow_stubs-4.8.dist-info → ob_metaflow_stubs-5.0.dist-info}/WHEEL +0 -0
  150. {ob_metaflow_stubs-4.8.dist-info → ob_metaflow_stubs-5.0.dist-info}/top_level.txt +0 -0
@@ -1,25 +1,25 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.9.1+ob(v1) #
4
- # Generated on 2024-07-25T19:14:17.762690 #
3
+ # MF version: 2.12.10.2+ob(v1) #
4
+ # Generated on 2024-08-06T17:10:53.103007 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import typing
12
- import metaflow._vendor.click.types
13
- import datetime
14
- import metaflow.client.core
15
- import metaflow.datastore.inputs
16
11
  import metaflow.events
17
- import metaflow.plugins.datatools.s3.s3
12
+ import metaflow.flowspec
18
13
  import metaflow.parameters
14
+ import metaflow.metaflow_current
19
15
  import io
20
- import metaflow.flowspec
16
+ import metaflow.client.core
17
+ import metaflow.datastore.inputs
21
18
  import metaflow.runner.metaflow_runner
22
- import metaflow.metaflow_current
19
+ import typing
20
+ import metaflow._vendor.click.types
21
+ import metaflow.plugins.datatools.s3.s3
22
+ import datetime
23
23
  FlowSpecDerived = typing.TypeVar("FlowSpecDerived", bound="FlowSpec", contravariant=False, covariant=False)
24
24
  StepFlag = typing.NewType("StepFlag", bool)
25
25
 
@@ -728,127 +728,190 @@ def step(f: typing.Union[typing.Callable[[FlowSpecDerived], None], typing.Callab
728
728
  ...
729
729
 
730
730
  @typing.overload
731
- def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
731
+ def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
732
732
  """
733
- Specifies the PyPI packages for the step.
733
+ Specifies the Conda environment for the step.
734
734
 
735
735
  Information in this decorator will augment any
736
- attributes set in the `@pyi_base` flow-level decorator. Hence,
737
- you can use `@pypi_base` to set packages required by all
738
- steps and use `@pypi` to specify step-specific overrides.
736
+ attributes set in the `@conda_base` flow-level decorator. Hence,
737
+ you can use `@conda_base` to set packages required by all
738
+ steps and use `@conda` to specify step-specific overrides.
739
739
 
740
740
  Parameters
741
741
  ----------
742
- packages : Dict[str, str], default: {}
742
+ packages : Dict[str, str], default {}
743
743
  Packages to use for this step. The key is the name of the package
744
744
  and the value is the version to use.
745
- python : str, optional, default: None
745
+ libraries : Dict[str, str], default {}
746
+ Supported for backward compatibility. When used with packages, packages will take precedence.
747
+ python : str, optional, default None
746
748
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
747
749
  that the version used will correspond to the version of the Python interpreter used to start the run.
750
+ disabled : bool, default False
751
+ If set to True, disables @conda.
748
752
  """
749
753
  ...
750
754
 
751
755
  @typing.overload
752
- def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
756
+ def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
753
757
  ...
754
758
 
755
759
  @typing.overload
756
- def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
760
+ def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
757
761
  ...
758
762
 
759
- def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
763
+ def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
760
764
  """
761
- Specifies the PyPI packages for the step.
765
+ Specifies the Conda environment for the step.
762
766
 
763
767
  Information in this decorator will augment any
764
- attributes set in the `@pyi_base` flow-level decorator. Hence,
765
- you can use `@pypi_base` to set packages required by all
766
- steps and use `@pypi` to specify step-specific overrides.
768
+ attributes set in the `@conda_base` flow-level decorator. Hence,
769
+ you can use `@conda_base` to set packages required by all
770
+ steps and use `@conda` to specify step-specific overrides.
767
771
 
768
772
  Parameters
769
773
  ----------
770
- packages : Dict[str, str], default: {}
774
+ packages : Dict[str, str], default {}
771
775
  Packages to use for this step. The key is the name of the package
772
776
  and the value is the version to use.
773
- python : str, optional, default: None
777
+ libraries : Dict[str, str], default {}
778
+ Supported for backward compatibility. When used with packages, packages will take precedence.
779
+ python : str, optional, default None
774
780
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
775
781
  that the version used will correspond to the version of the Python interpreter used to start the run.
782
+ disabled : bool, default False
783
+ If set to True, disables @conda.
776
784
  """
777
785
  ...
778
786
 
779
787
  @typing.overload
780
- def conda(*, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
788
+ def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
781
789
  """
782
- Specifies the Conda environment for the step.
790
+ Specifies environment variables to be set prior to the execution of a step.
791
+
792
+ Parameters
793
+ ----------
794
+ vars : Dict[str, str], default {}
795
+ Dictionary of environment variables to set.
796
+ """
797
+ ...
798
+
799
+ @typing.overload
800
+ def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
801
+ ...
802
+
803
+ @typing.overload
804
+ def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
805
+ ...
806
+
807
+ def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
808
+ """
809
+ Specifies environment variables to be set prior to the execution of a step.
810
+
811
+ Parameters
812
+ ----------
813
+ vars : Dict[str, str], default {}
814
+ Dictionary of environment variables to set.
815
+ """
816
+ ...
817
+
818
+ @typing.overload
819
+ def pypi(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
820
+ """
821
+ Specifies the PyPI packages for the step.
783
822
 
784
823
  Information in this decorator will augment any
785
- attributes set in the `@conda_base` flow-level decorator. Hence,
786
- you can use `@conda_base` to set packages required by all
787
- steps and use `@conda` to specify step-specific overrides.
824
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
825
+ you can use `@pypi_base` to set packages required by all
826
+ steps and use `@pypi` to specify step-specific overrides.
788
827
 
789
828
  Parameters
790
829
  ----------
791
- packages : Dict[str, str], default {}
830
+ packages : Dict[str, str], default: {}
792
831
  Packages to use for this step. The key is the name of the package
793
832
  and the value is the version to use.
794
- libraries : Dict[str, str], default {}
795
- Supported for backward compatibility. When used with packages, packages will take precedence.
796
- python : str, optional, default None
833
+ python : str, optional, default: None
797
834
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
798
835
  that the version used will correspond to the version of the Python interpreter used to start the run.
799
- disabled : bool, default False
800
- If set to True, disables @conda.
801
836
  """
802
837
  ...
803
838
 
804
839
  @typing.overload
805
- def conda(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
840
+ def pypi(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
806
841
  ...
807
842
 
808
843
  @typing.overload
809
- def conda(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
844
+ def pypi(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
810
845
  ...
811
846
 
812
- def conda(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, libraries: typing.Dict[str, str] = {}, python: typing.Optional[str] = None, disabled: bool = False):
847
+ def pypi(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None):
813
848
  """
814
- Specifies the Conda environment for the step.
849
+ Specifies the PyPI packages for the step.
815
850
 
816
851
  Information in this decorator will augment any
817
- attributes set in the `@conda_base` flow-level decorator. Hence,
818
- you can use `@conda_base` to set packages required by all
819
- steps and use `@conda` to specify step-specific overrides.
852
+ attributes set in the `@pyi_base` flow-level decorator. Hence,
853
+ you can use `@pypi_base` to set packages required by all
854
+ steps and use `@pypi` to specify step-specific overrides.
820
855
 
821
856
  Parameters
822
857
  ----------
823
- packages : Dict[str, str], default {}
858
+ packages : Dict[str, str], default: {}
824
859
  Packages to use for this step. The key is the name of the package
825
860
  and the value is the version to use.
826
- libraries : Dict[str, str], default {}
827
- Supported for backward compatibility. When used with packages, packages will take precedence.
828
- python : str, optional, default None
861
+ python : str, optional, default: None
829
862
  Version of Python to use, e.g. '3.7.4'. A default value of None implies
830
863
  that the version used will correspond to the version of the Python interpreter used to start the run.
831
- disabled : bool, default False
832
- If set to True, disables @conda.
833
864
  """
834
865
  ...
835
866
 
836
867
  @typing.overload
837
- def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
868
+ def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
838
869
  """
839
- Decorator prototype for all step decorators. This function gets specialized
840
- and imported for all decorators types by _import_plugin_decorators().
870
+ Creates a human-readable report, a Metaflow Card, after this step completes.
871
+
872
+ Note that you may add multiple `@card` decorators in a step with different parameters.
873
+
874
+ Parameters
875
+ ----------
876
+ type : str, default 'default'
877
+ Card type.
878
+ id : str, optional, default None
879
+ If multiple cards are present, use this id to identify this card.
880
+ options : Dict[str, Any], default {}
881
+ Options passed to the card. The contents depend on the card type.
882
+ timeout : int, default 45
883
+ Interrupt reporting if it takes more than this many seconds.
884
+
885
+
841
886
  """
842
887
  ...
843
888
 
844
889
  @typing.overload
845
- def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
890
+ def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
846
891
  ...
847
892
 
848
- def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
893
+ @typing.overload
894
+ def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
895
+ ...
896
+
897
+ def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
849
898
  """
850
- Decorator prototype for all step decorators. This function gets specialized
851
- and imported for all decorators types by _import_plugin_decorators().
899
+ Creates a human-readable report, a Metaflow Card, after this step completes.
900
+
901
+ Note that you may add multiple `@card` decorators in a step with different parameters.
902
+
903
+ Parameters
904
+ ----------
905
+ type : str, default 'default'
906
+ Card type.
907
+ id : str, optional, default None
908
+ If multiple cards are present, use this id to identify this card.
909
+ options : Dict[str, Any], default {}
910
+ Options passed to the card. The contents depend on the card type.
911
+ timeout : int, default 45
912
+ Interrupt reporting if it takes more than this many seconds.
913
+
914
+
852
915
  """
853
916
  ...
854
917
 
@@ -906,229 +969,185 @@ def retry(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], ty
906
969
  ...
907
970
 
908
971
  @typing.overload
909
- def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
972
+ def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
910
973
  """
911
- Specifies that the step will success under all circumstances.
974
+ Specifies a timeout for your step.
912
975
 
913
- The decorator will create an optional artifact, specified by `var`, which
914
- contains the exception raised. You can use it to detect the presence
915
- of errors, indicating that all happy-path artifacts produced by the step
916
- are missing.
976
+ This decorator is useful if this step may hang indefinitely.
977
+
978
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
979
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
980
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
981
+
982
+ Note that all the values specified in parameters are added together so if you specify
983
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
917
984
 
918
985
  Parameters
919
986
  ----------
920
- var : str, optional, default None
921
- Name of the artifact in which to store the caught exception.
922
- If not specified, the exception is not stored.
923
- print_exception : bool, default True
924
- Determines whether or not the exception is printed to
925
- stdout when caught.
987
+ seconds : int, default 0
988
+ Number of seconds to wait prior to timing out.
989
+ minutes : int, default 0
990
+ Number of minutes to wait prior to timing out.
991
+ hours : int, default 0
992
+ Number of hours to wait prior to timing out.
926
993
  """
927
994
  ...
928
995
 
929
996
  @typing.overload
930
- def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
997
+ def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
931
998
  ...
932
999
 
933
1000
  @typing.overload
934
- def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1001
+ def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
935
1002
  ...
936
1003
 
937
- def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1004
+ def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
938
1005
  """
939
- Specifies that the step will success under all circumstances.
1006
+ Specifies a timeout for your step.
940
1007
 
941
- The decorator will create an optional artifact, specified by `var`, which
942
- contains the exception raised. You can use it to detect the presence
943
- of errors, indicating that all happy-path artifacts produced by the step
944
- are missing.
1008
+ This decorator is useful if this step may hang indefinitely.
1009
+
1010
+ This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1011
+ A timeout is considered to be an exception thrown by the step. It will cause the step to be
1012
+ retried if needed and the exception will be caught by the `@catch` decorator, if present.
1013
+
1014
+ Note that all the values specified in parameters are added together so if you specify
1015
+ 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
945
1016
 
946
1017
  Parameters
947
1018
  ----------
948
- var : str, optional, default None
949
- Name of the artifact in which to store the caught exception.
950
- If not specified, the exception is not stored.
951
- print_exception : bool, default True
952
- Determines whether or not the exception is printed to
953
- stdout when caught.
1019
+ seconds : int, default 0
1020
+ Number of seconds to wait prior to timing out.
1021
+ minutes : int, default 0
1022
+ Number of minutes to wait prior to timing out.
1023
+ hours : int, default 0
1024
+ Number of hours to wait prior to timing out.
954
1025
  """
955
1026
  ...
956
1027
 
957
1028
  @typing.overload
958
- def batch(*, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1029
+ def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
959
1030
  """
960
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1031
+ Specifies the resources needed when executing this step.
1032
+
1033
+ Use `@resources` to specify the resource requirements
1034
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1035
+
1036
+ You can choose the compute layer on the command line by executing e.g.
1037
+ ```
1038
+ python myflow.py run --with batch
1039
+ ```
1040
+ or
1041
+ ```
1042
+ python myflow.py run --with kubernetes
1043
+ ```
1044
+ which executes the flow on the desired system using the
1045
+ requirements specified in `@resources`.
961
1046
 
962
1047
  Parameters
963
1048
  ----------
964
1049
  cpu : int, default 1
965
- Number of CPUs required for this step. If `@resources` is
966
- also present, the maximum value from all decorators is used.
1050
+ Number of CPUs required for this step.
967
1051
  gpu : int, default 0
968
- Number of GPUs required for this step. If `@resources` is
969
- also present, the maximum value from all decorators is used.
1052
+ Number of GPUs required for this step.
1053
+ disk : int, optional, default None
1054
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
970
1055
  memory : int, default 4096
971
- Memory size (in MB) required for this step. If
972
- `@resources` is also present, the maximum value from all decorators is
973
- used.
974
- image : str, optional, default None
975
- Docker image to use when launching on AWS Batch. If not specified, and
976
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
977
- not, a default Docker image mapping to the current version of Python is used.
978
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
979
- AWS Batch Job Queue to submit the job to.
980
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
981
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
982
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
983
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
984
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1056
+ Memory size (in MB) required for this step.
985
1057
  shared_memory : int, optional, default None
986
1058
  The value for the size (in MiB) of the /dev/shm volume for this step.
987
1059
  This parameter maps to the `--shm-size` option in Docker.
988
- max_swap : int, optional, default None
989
- The total amount of swap memory (in MiB) a container can use for this
990
- step. This parameter is translated to the `--memory-swap` option in
991
- Docker where the value is the sum of the container memory plus the
992
- `max_swap` value.
993
- swappiness : int, optional, default None
994
- This allows you to tune memory swappiness behavior for this step.
995
- A swappiness value of 0 causes swapping not to happen unless absolutely
996
- necessary. A swappiness value of 100 causes pages to be swapped very
997
- aggressively. Accepted values are whole numbers between 0 and 100.
998
- use_tmpfs : bool, default False
999
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
1000
- not available on Fargate compute environments
1001
- tmpfs_tempdir : bool, default True
1002
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1003
- tmpfs_size : int, optional, default None
1004
- The value for the size (in MiB) of the tmpfs mount for this step.
1005
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1006
- memory allocated for this step.
1007
- tmpfs_path : str, optional, default None
1008
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1009
- inferentia : int, default 0
1010
- Number of Inferentia chips required for this step.
1011
- trainium : int, default None
1012
- Alias for inferentia. Use only one of the two.
1013
- efa : int, default 0
1014
- Number of elastic fabric adapter network devices to attach to container
1015
- ephemeral_storage : int, default None
1016
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
1017
- This is only relevant for Fargate compute environments
1018
- log_driver: str, optional, default None
1019
- The log driver to use for the Amazon ECS container.
1020
- log_options: List[str], optional, default None
1021
- List of strings containing options for the chosen log driver. The configurable values
1022
- depend on the `log driver` chosen. Validation of these options is not supported yet.
1023
- Example: [`awslogs-group:aws/batch/job`]
1024
1060
  """
1025
1061
  ...
1026
1062
 
1027
1063
  @typing.overload
1028
- def batch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1064
+ def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1029
1065
  ...
1030
1066
 
1031
1067
  @typing.overload
1032
- def batch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1068
+ def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1033
1069
  ...
1034
1070
 
1035
- def batch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, memory: int = 4096, image: typing.Optional[str] = None, queue: str = "METAFLOW_BATCH_JOB_QUEUE", iam_role: str = "METAFLOW_ECS_S3_ACCESS_IAM_ROLE", execution_role: str = "METAFLOW_ECS_FARGATE_EXECUTION_ROLE", shared_memory: typing.Optional[int] = None, max_swap: typing.Optional[int] = None, swappiness: typing.Optional[int] = None, use_tmpfs: bool = False, tmpfs_tempdir: bool = True, tmpfs_size: typing.Optional[int] = None, tmpfs_path: typing.Optional[str] = None, inferentia: int = 0, trainium: int = None, efa: int = 0, ephemeral_storage: int = None, log_driver: typing.Optional[str] = None, log_options: typing.Optional[typing.List[str]] = None):
1071
+ def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1036
1072
  """
1037
- Specifies that this step should execute on [AWS Batch](https://aws.amazon.com/batch/).
1073
+ Specifies the resources needed when executing this step.
1074
+
1075
+ Use `@resources` to specify the resource requirements
1076
+ independently of the specific compute layer (`@batch`, `@kubernetes`).
1077
+
1078
+ You can choose the compute layer on the command line by executing e.g.
1079
+ ```
1080
+ python myflow.py run --with batch
1081
+ ```
1082
+ or
1083
+ ```
1084
+ python myflow.py run --with kubernetes
1085
+ ```
1086
+ which executes the flow on the desired system using the
1087
+ requirements specified in `@resources`.
1038
1088
 
1039
1089
  Parameters
1040
1090
  ----------
1041
1091
  cpu : int, default 1
1042
- Number of CPUs required for this step. If `@resources` is
1043
- also present, the maximum value from all decorators is used.
1092
+ Number of CPUs required for this step.
1044
1093
  gpu : int, default 0
1045
- Number of GPUs required for this step. If `@resources` is
1046
- also present, the maximum value from all decorators is used.
1094
+ Number of GPUs required for this step.
1095
+ disk : int, optional, default None
1096
+ Disk size (in MB) required for this step. Only applies on Kubernetes.
1047
1097
  memory : int, default 4096
1048
- Memory size (in MB) required for this step. If
1049
- `@resources` is also present, the maximum value from all decorators is
1050
- used.
1051
- image : str, optional, default None
1052
- Docker image to use when launching on AWS Batch. If not specified, and
1053
- METAFLOW_BATCH_CONTAINER_IMAGE is specified, that image is used. If
1054
- not, a default Docker image mapping to the current version of Python is used.
1055
- queue : str, default METAFLOW_BATCH_JOB_QUEUE
1056
- AWS Batch Job Queue to submit the job to.
1057
- iam_role : str, default METAFLOW_ECS_S3_ACCESS_IAM_ROLE
1058
- AWS IAM role that AWS Batch container uses to access AWS cloud resources.
1059
- execution_role : str, default METAFLOW_ECS_FARGATE_EXECUTION_ROLE
1060
- AWS IAM role that AWS Batch can use [to trigger AWS Fargate tasks]
1061
- (https://docs.aws.amazon.com/batch/latest/userguide/execution-IAM-role.html).
1098
+ Memory size (in MB) required for this step.
1062
1099
  shared_memory : int, optional, default None
1063
1100
  The value for the size (in MiB) of the /dev/shm volume for this step.
1064
1101
  This parameter maps to the `--shm-size` option in Docker.
1065
- max_swap : int, optional, default None
1066
- The total amount of swap memory (in MiB) a container can use for this
1067
- step. This parameter is translated to the `--memory-swap` option in
1068
- Docker where the value is the sum of the container memory plus the
1069
- `max_swap` value.
1070
- swappiness : int, optional, default None
1071
- This allows you to tune memory swappiness behavior for this step.
1072
- A swappiness value of 0 causes swapping not to happen unless absolutely
1073
- necessary. A swappiness value of 100 causes pages to be swapped very
1074
- aggressively. Accepted values are whole numbers between 0 and 100.
1075
- use_tmpfs : bool, default False
1076
- This enables an explicit tmpfs mount for this step. Note that tmpfs is
1077
- not available on Fargate compute environments
1078
- tmpfs_tempdir : bool, default True
1079
- sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
1080
- tmpfs_size : int, optional, default None
1081
- The value for the size (in MiB) of the tmpfs mount for this step.
1082
- This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
1083
- memory allocated for this step.
1084
- tmpfs_path : str, optional, default None
1085
- Path to tmpfs mount for this step. Defaults to /metaflow_temp.
1086
- inferentia : int, default 0
1087
- Number of Inferentia chips required for this step.
1088
- trainium : int, default None
1089
- Alias for inferentia. Use only one of the two.
1090
- efa : int, default 0
1091
- Number of elastic fabric adapter network devices to attach to container
1092
- ephemeral_storage : int, default None
1093
- The total amount, in GiB, of ephemeral storage to set for the task, 21-200GiB.
1094
- This is only relevant for Fargate compute environments
1095
- log_driver: str, optional, default None
1096
- The log driver to use for the Amazon ECS container.
1097
- log_options: List[str], optional, default None
1098
- List of strings containing options for the chosen log driver. The configurable values
1099
- depend on the `log driver` chosen. Validation of these options is not supported yet.
1100
- Example: [`awslogs-group:aws/batch/job`]
1101
1102
  """
1102
1103
  ...
1103
1104
 
1104
1105
  @typing.overload
1105
- def environment(*, vars: typing.Dict[str, str] = {}) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1106
+ def catch(*, var: typing.Optional[str] = None, print_exception: bool = True) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1106
1107
  """
1107
- Specifies environment variables to be set prior to the execution of a step.
1108
+ Specifies that the step will success under all circumstances.
1109
+
1110
+ The decorator will create an optional artifact, specified by `var`, which
1111
+ contains the exception raised. You can use it to detect the presence
1112
+ of errors, indicating that all happy-path artifacts produced by the step
1113
+ are missing.
1108
1114
 
1109
1115
  Parameters
1110
1116
  ----------
1111
- vars : Dict[str, str], default {}
1112
- Dictionary of environment variables to set.
1117
+ var : str, optional, default None
1118
+ Name of the artifact in which to store the caught exception.
1119
+ If not specified, the exception is not stored.
1120
+ print_exception : bool, default True
1121
+ Determines whether or not the exception is printed to
1122
+ stdout when caught.
1113
1123
  """
1114
1124
  ...
1115
1125
 
1116
1126
  @typing.overload
1117
- def environment(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1127
+ def catch(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1118
1128
  ...
1119
1129
 
1120
1130
  @typing.overload
1121
- def environment(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1131
+ def catch(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1122
1132
  ...
1123
1133
 
1124
- def environment(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, vars: typing.Dict[str, str] = {}):
1134
+ def catch(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, var: typing.Optional[str] = None, print_exception: bool = True):
1125
1135
  """
1126
- Specifies environment variables to be set prior to the execution of a step.
1136
+ Specifies that the step will success under all circumstances.
1137
+
1138
+ The decorator will create an optional artifact, specified by `var`, which
1139
+ contains the exception raised. You can use it to detect the presence
1140
+ of errors, indicating that all happy-path artifacts produced by the step
1141
+ are missing.
1127
1142
 
1128
1143
  Parameters
1129
1144
  ----------
1130
- vars : Dict[str, str], default {}
1131
- Dictionary of environment variables to set.
1145
+ var : str, optional, default None
1146
+ Name of the artifact in which to store the caught exception.
1147
+ If not specified, the exception is not stored.
1148
+ print_exception : bool, default True
1149
+ Determines whether or not the exception is printed to
1150
+ stdout when caught.
1132
1151
  """
1133
1152
  ...
1134
1153
 
@@ -1191,6 +1210,25 @@ def kubernetes(*, cpu: int = 1, memory: int = 4096, disk: int = 10240, image: ty
1191
1210
  """
1192
1211
  ...
1193
1212
 
1213
+ @typing.overload
1214
+ def parallel(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1215
+ """
1216
+ Decorator prototype for all step decorators. This function gets specialized
1217
+ and imported for all decorators types by _import_plugin_decorators().
1218
+ """
1219
+ ...
1220
+
1221
+ @typing.overload
1222
+ def parallel(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1223
+ ...
1224
+
1225
+ def parallel(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None):
1226
+ """
1227
+ Decorator prototype for all step decorators. This function gets specialized
1228
+ and imported for all decorators types by _import_plugin_decorators().
1229
+ """
1230
+ ...
1231
+
1194
1232
  @typing.overload
1195
1233
  def secrets(*, sources: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = []) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1196
1234
  """
@@ -1224,230 +1262,213 @@ def secrets(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None],
1224
1262
  """
1225
1263
  ...
1226
1264
 
1227
- @typing.overload
1228
- def timeout(*, seconds: int = 0, minutes: int = 0, hours: int = 0) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1265
+ def nim(*, models: "list[NIM]", backend: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1229
1266
  """
1230
- Specifies a timeout for your step.
1267
+ This decorator is used to run NIM containers in Metaflow tasks as sidecars.
1231
1268
 
1232
- This decorator is useful if this step may hang indefinitely.
1269
+ User code call
1270
+ -----------
1271
+ @nim(
1272
+ models=['meta/llama3-8b-instruct', 'meta/llama3-70b-instruct'],
1273
+ backend='managed'
1274
+ )
1233
1275
 
1234
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1235
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1236
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1276
+ Valid backend options
1277
+ ---------------------
1278
+ - 'managed': Outerbounds selects a compute provider based on the model.
1279
+ - 🚧 'dataplane': Run in your account.
1237
1280
 
1238
- Note that all the values specified in parameters are added together so if you specify
1239
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1281
+ Valid model options
1282
+ ----------------
1283
+ - 'meta/llama3-8b-instruct': 8B parameter model
1284
+ - 'meta/llama3-70b-instruct': 70B parameter model
1285
+ - Upon request, any model here: https://nvcf.ngc.nvidia.com/functions?filter=nvidia-functions
1240
1286
 
1241
1287
  Parameters
1242
1288
  ----------
1243
- seconds : int, default 0
1244
- Number of seconds to wait prior to timing out.
1245
- minutes : int, default 0
1246
- Number of minutes to wait prior to timing out.
1247
- hours : int, default 0
1248
- Number of hours to wait prior to timing out.
1289
+ models: list[NIM]
1290
+ List of NIM containers running models in sidecars.
1291
+ backend: str
1292
+ Compute provider to run the NIM container.
1249
1293
  """
1250
1294
  ...
1251
1295
 
1252
- @typing.overload
1253
- def timeout(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1254
- ...
1255
-
1256
- @typing.overload
1257
- def timeout(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1258
- ...
1259
-
1260
- def timeout(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, seconds: int = 0, minutes: int = 0, hours: int = 0):
1296
+ def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1261
1297
  """
1262
- Specifies a timeout for your step.
1263
-
1264
- This decorator is useful if this step may hang indefinitely.
1265
-
1266
- This can be used in conjunction with the `@retry` decorator as well as the `@catch` decorator.
1267
- A timeout is considered to be an exception thrown by the step. It will cause the step to be
1268
- retried if needed and the exception will be caught by the `@catch` decorator, if present.
1269
-
1270
- Note that all the values specified in parameters are added together so if you specify
1271
- 60 seconds and 1 hour, the decorator will have an effective timeout of 1 hour and 1 minute.
1298
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1299
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1300
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1301
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
1302
+ starts only after all sensors finish.
1272
1303
 
1273
1304
  Parameters
1274
1305
  ----------
1275
- seconds : int, default 0
1276
- Number of seconds to wait prior to timing out.
1277
- minutes : int, default 0
1278
- Number of minutes to wait prior to timing out.
1279
- hours : int, default 0
1280
- Number of hours to wait prior to timing out.
1306
+ timeout : int
1307
+ Time, in seconds before the task times out and fails. (Default: 3600)
1308
+ poke_interval : int
1309
+ Time in seconds that the job should wait in between each try. (Default: 60)
1310
+ mode : str
1311
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1312
+ exponential_backoff : bool
1313
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1314
+ pool : str
1315
+ the slot pool this task should run in,
1316
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1317
+ soft_fail : bool
1318
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1319
+ name : str
1320
+ Name of the sensor on Airflow
1321
+ description : str
1322
+ Description of sensor in the Airflow UI
1323
+ bucket_key : Union[str, List[str]]
1324
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1325
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
1326
+ bucket_name : str
1327
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1328
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1329
+ wildcard_match : bool
1330
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1331
+ aws_conn_id : str
1332
+ a reference to the s3 connection on Airflow. (Default: None)
1333
+ verify : bool
1334
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
1281
1335
  """
1282
1336
  ...
1283
1337
 
1284
- @typing.overload
1285
- def card(*, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1338
+ def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1286
1339
  """
1287
- Creates a human-readable report, a Metaflow Card, after this step completes.
1288
-
1289
- Note that you may add multiple `@card` decorators in a step with different parameters.
1340
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1341
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1290
1342
 
1291
1343
  Parameters
1292
1344
  ----------
1293
- type : str, default 'default'
1294
- Card type.
1295
- id : str, optional, default None
1296
- If multiple cards are present, use this id to identify this card.
1297
- options : Dict[str, Any], default {}
1298
- Options passed to the card. The contents depend on the card type.
1299
- timeout : int, default 45
1300
- Interrupt reporting if it takes more than this many seconds.
1301
-
1302
-
1345
+ timeout : int
1346
+ Time, in seconds before the task times out and fails. (Default: 3600)
1347
+ poke_interval : int
1348
+ Time in seconds that the job should wait in between each try. (Default: 60)
1349
+ mode : str
1350
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1351
+ exponential_backoff : bool
1352
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1353
+ pool : str
1354
+ the slot pool this task should run in,
1355
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
1356
+ soft_fail : bool
1357
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
1358
+ name : str
1359
+ Name of the sensor on Airflow
1360
+ description : str
1361
+ Description of sensor in the Airflow UI
1362
+ external_dag_id : str
1363
+ The dag_id that contains the task you want to wait for.
1364
+ external_task_ids : List[str]
1365
+ The list of task_ids that you want to wait for.
1366
+ If None (default value) the sensor waits for the DAG. (Default: None)
1367
+ allowed_states : List[str]
1368
+ Iterable of allowed states, (Default: ['success'])
1369
+ failed_states : List[str]
1370
+ Iterable of failed or dis-allowed states. (Default: None)
1371
+ execution_delta : datetime.timedelta
1372
+ time difference with the previous execution to look at,
1373
+ the default is the same logical date as the current task or DAG. (Default: None)
1374
+ check_existence: bool
1375
+ Set to True to check if the external task exists or check if
1376
+ the DAG to wait for exists. (Default: True)
1303
1377
  """
1304
1378
  ...
1305
1379
 
1306
1380
  @typing.overload
1307
- def card(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1308
- ...
1309
-
1310
- @typing.overload
1311
- def card(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1312
- ...
1313
-
1314
- def card(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, type: str = "default", id: typing.Optional[str] = None, options: typing.Dict[str, typing.Any] = {}, timeout: int = 45):
1315
- """
1316
- Creates a human-readable report, a Metaflow Card, after this step completes.
1317
-
1318
- Note that you may add multiple `@card` decorators in a step with different parameters.
1319
-
1320
- Parameters
1321
- ----------
1322
- type : str, default 'default'
1323
- Card type.
1324
- id : str, optional, default None
1325
- If multiple cards are present, use this id to identify this card.
1326
- options : Dict[str, Any], default {}
1327
- Options passed to the card. The contents depend on the card type.
1328
- timeout : int, default 45
1329
- Interrupt reporting if it takes more than this many seconds.
1330
-
1331
-
1332
- """
1333
- ...
1334
-
1335
- @typing.overload
1336
- def resources(*, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None) -> typing.Callable[[typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]], typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]]]:
1381
+ def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1337
1382
  """
1338
- Specifies the resources needed when executing this step.
1339
-
1340
- Use `@resources` to specify the resource requirements
1341
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1342
-
1343
- You can choose the compute layer on the command line by executing e.g.
1344
- ```
1345
- python myflow.py run --with batch
1346
- ```
1347
- or
1348
- ```
1349
- python myflow.py run --with kubernetes
1350
- ```
1351
- which executes the flow on the desired system using the
1352
- requirements specified in `@resources`.
1383
+ Specifies the times when the flow should be run when running on a
1384
+ production scheduler.
1353
1385
 
1354
1386
  Parameters
1355
1387
  ----------
1356
- cpu : int, default 1
1357
- Number of CPUs required for this step.
1358
- gpu : int, default 0
1359
- Number of GPUs required for this step.
1360
- disk : int, optional, default None
1361
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1362
- memory : int, default 4096
1363
- Memory size (in MB) required for this step.
1364
- shared_memory : int, optional, default None
1365
- The value for the size (in MiB) of the /dev/shm volume for this step.
1366
- This parameter maps to the `--shm-size` option in Docker.
1388
+ hourly : bool, default False
1389
+ Run the workflow hourly.
1390
+ daily : bool, default True
1391
+ Run the workflow daily.
1392
+ weekly : bool, default False
1393
+ Run the workflow weekly.
1394
+ cron : str, optional, default None
1395
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1396
+ specified by this expression.
1397
+ timezone : str, optional, default None
1398
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1399
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1367
1400
  """
1368
1401
  ...
1369
1402
 
1370
1403
  @typing.overload
1371
- def resources(f: typing.Callable[[FlowSpecDerived, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, StepFlag], None]:
1372
- ...
1373
-
1374
- @typing.overload
1375
- def resources(f: typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]) -> typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None]:
1404
+ def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1376
1405
  ...
1377
1406
 
1378
- def resources(f: typing.Union[typing.Callable[[FlowSpecDerived, StepFlag], None], typing.Callable[[FlowSpecDerived, typing.Any, StepFlag], None], None] = None, *, cpu: int = 1, gpu: int = 0, disk: typing.Optional[int] = None, memory: int = 4096, shared_memory: typing.Optional[int] = None):
1407
+ def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1379
1408
  """
1380
- Specifies the resources needed when executing this step.
1381
-
1382
- Use `@resources` to specify the resource requirements
1383
- independently of the specific compute layer (`@batch`, `@kubernetes`).
1384
-
1385
- You can choose the compute layer on the command line by executing e.g.
1386
- ```
1387
- python myflow.py run --with batch
1388
- ```
1389
- or
1390
- ```
1391
- python myflow.py run --with kubernetes
1392
- ```
1393
- which executes the flow on the desired system using the
1394
- requirements specified in `@resources`.
1409
+ Specifies the times when the flow should be run when running on a
1410
+ production scheduler.
1395
1411
 
1396
1412
  Parameters
1397
1413
  ----------
1398
- cpu : int, default 1
1399
- Number of CPUs required for this step.
1400
- gpu : int, default 0
1401
- Number of GPUs required for this step.
1402
- disk : int, optional, default None
1403
- Disk size (in MB) required for this step. Only applies on Kubernetes.
1404
- memory : int, default 4096
1405
- Memory size (in MB) required for this step.
1406
- shared_memory : int, optional, default None
1407
- The value for the size (in MiB) of the /dev/shm volume for this step.
1408
- This parameter maps to the `--shm-size` option in Docker.
1414
+ hourly : bool, default False
1415
+ Run the workflow hourly.
1416
+ daily : bool, default True
1417
+ Run the workflow daily.
1418
+ weekly : bool, default False
1419
+ Run the workflow weekly.
1420
+ cron : str, optional, default None
1421
+ Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1422
+ specified by this expression.
1423
+ timezone : str, optional, default None
1424
+ Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1425
+ which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1409
1426
  """
1410
1427
  ...
1411
1428
 
1412
1429
  @typing.overload
1413
- def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1430
+ def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1414
1431
  """
1415
- Specifies the event(s) that this flow depends on.
1432
+ Specifies the flow(s) that this flow depends on.
1416
1433
 
1417
1434
  ```
1418
- @trigger(event='foo')
1435
+ @trigger_on_finish(flow='FooFlow')
1419
1436
  ```
1420
1437
  or
1421
1438
  ```
1422
- @trigger(events=['foo', 'bar'])
1439
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1423
1440
  ```
1441
+ This decorator respects the @project decorator and triggers the flow
1442
+ when upstream runs within the same namespace complete successfully
1424
1443
 
1425
- Additionally, you can specify the parameter mappings
1426
- to map event payload to Metaflow parameters for the flow.
1444
+ Additionally, you can specify project aware upstream flow dependencies
1445
+ by specifying the fully qualified project_flow_name.
1427
1446
  ```
1428
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1447
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1429
1448
  ```
1430
1449
  or
1431
1450
  ```
1432
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1433
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1451
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1434
1452
  ```
1435
1453
 
1436
- 'parameters' can also be a list of strings and tuples like so:
1437
- ```
1438
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1439
- ```
1440
- This is equivalent to:
1454
+ You can also specify just the project or project branch (other values will be
1455
+ inferred from the current project or project branch):
1441
1456
  ```
1442
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1457
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1443
1458
  ```
1444
1459
 
1460
+ Note that `branch` is typically one of:
1461
+ - `prod`
1462
+ - `user.bob`
1463
+ - `test.my_experiment`
1464
+ - `prod.staging`
1465
+
1445
1466
  Parameters
1446
1467
  ----------
1447
- event : Union[str, Dict[str, Any]], optional, default None
1448
- Event dependency for this flow.
1449
- events : List[Union[str, Dict[str, Any]]], default []
1450
- Events dependency for this flow.
1468
+ flow : Union[str, Dict[str, str]], optional, default None
1469
+ Upstream flow dependency for this flow.
1470
+ flows : List[Union[str, Dict[str, str]]], default []
1471
+ Upstream flow dependencies for this flow.
1451
1472
  options : Dict[str, Any], default {}
1452
1473
  Backend-specific configuration for tuning eventing behavior.
1453
1474
 
@@ -1456,47 +1477,51 @@ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = No
1456
1477
  ...
1457
1478
 
1458
1479
  @typing.overload
1459
- def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1480
+ def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1460
1481
  ...
1461
1482
 
1462
- def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1483
+ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1463
1484
  """
1464
- Specifies the event(s) that this flow depends on.
1485
+ Specifies the flow(s) that this flow depends on.
1465
1486
 
1466
1487
  ```
1467
- @trigger(event='foo')
1488
+ @trigger_on_finish(flow='FooFlow')
1468
1489
  ```
1469
1490
  or
1470
1491
  ```
1471
- @trigger(events=['foo', 'bar'])
1492
+ @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1472
1493
  ```
1494
+ This decorator respects the @project decorator and triggers the flow
1495
+ when upstream runs within the same namespace complete successfully
1473
1496
 
1474
- Additionally, you can specify the parameter mappings
1475
- to map event payload to Metaflow parameters for the flow.
1497
+ Additionally, you can specify project aware upstream flow dependencies
1498
+ by specifying the fully qualified project_flow_name.
1476
1499
  ```
1477
- @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1500
+ @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1478
1501
  ```
1479
1502
  or
1480
1503
  ```
1481
- @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1482
- {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1504
+ @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1483
1505
  ```
1484
1506
 
1485
- 'parameters' can also be a list of strings and tuples like so:
1486
- ```
1487
- @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1488
- ```
1489
- This is equivalent to:
1507
+ You can also specify just the project or project branch (other values will be
1508
+ inferred from the current project or project branch):
1490
1509
  ```
1491
- @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1510
+ @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1492
1511
  ```
1493
1512
 
1513
+ Note that `branch` is typically one of:
1514
+ - `prod`
1515
+ - `user.bob`
1516
+ - `test.my_experiment`
1517
+ - `prod.staging`
1518
+
1494
1519
  Parameters
1495
1520
  ----------
1496
- event : Union[str, Dict[str, Any]], optional, default None
1497
- Event dependency for this flow.
1498
- events : List[Union[str, Dict[str, Any]]], default []
1499
- Events dependency for this flow.
1521
+ flow : Union[str, Dict[str, str]], optional, default None
1522
+ Upstream flow dependency for this flow.
1523
+ flows : List[Union[str, Dict[str, str]]], default []
1524
+ Upstream flow dependencies for this flow.
1500
1525
  options : Dict[str, Any], default {}
1501
1526
  Backend-specific configuration for tuning eventing behavior.
1502
1527
 
@@ -1504,6 +1529,24 @@ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: t
1504
1529
  """
1505
1530
  ...
1506
1531
 
1532
+ def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1533
+ """
1534
+ Specifies what flows belong to the same project.
1535
+
1536
+ A project-specific namespace is created for all flows that
1537
+ use the same `@project(name)`.
1538
+
1539
+ Parameters
1540
+ ----------
1541
+ name : str
1542
+ Project name. Make sure that the name is unique amongst all
1543
+ projects that use the same production scheduler. The name may
1544
+ contain only lowercase alphanumeric characters and underscores.
1545
+
1546
+
1547
+ """
1548
+ ...
1549
+
1507
1550
  @typing.overload
1508
1551
  def pypi_base(*, packages: typing.Dict[str, str] = {}, python: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1509
1552
  """
@@ -1592,98 +1635,45 @@ def conda_base(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, packa
1592
1635
  """
1593
1636
  ...
1594
1637
 
1595
- def nim(*, models: "list[NIM]", backend: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1596
- """
1597
- This decorator is used to run NIM containers in Metaflow tasks as sidecars.
1598
-
1599
- User code call
1600
- -----------
1601
- @nim(
1602
- models=['meta/llama3-8b-instruct', 'meta/llama3-70b-instruct'],
1603
- backend='managed'
1604
- )
1605
-
1606
- Valid backend options
1607
- ---------------------
1608
- - 'managed': Outerbounds selects a compute provider based on the model.
1609
- - 🚧 'dataplane': Run in your account.
1610
-
1611
- Valid model options
1612
- ----------------
1613
- - 'meta/llama3-8b-instruct': 8B parameter model
1614
- - 'meta/llama3-70b-instruct': 70B parameter model
1615
- - Upon request, any model here: https://nvcf.ngc.nvidia.com/functions?filter=nvidia-functions
1616
-
1617
- Parameters
1618
- ----------
1619
- models: list[NIM]
1620
- List of NIM containers running models in sidecars.
1621
- backend: str
1622
- Compute provider to run the NIM container.
1623
- """
1624
- ...
1625
-
1626
- def project(*, name: str) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1627
- """
1628
- Specifies what flows belong to the same project.
1629
-
1630
- A project-specific namespace is created for all flows that
1631
- use the same `@project(name)`.
1632
-
1633
- Parameters
1634
- ----------
1635
- name : str
1636
- Project name. Make sure that the name is unique amongst all
1637
- projects that use the same production scheduler. The name may
1638
- contain only lowercase alphanumeric characters and underscores.
1639
-
1640
-
1641
- """
1642
- ...
1643
-
1644
1638
  @typing.overload
1645
- def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1639
+ def trigger(*, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1646
1640
  """
1647
- Specifies the flow(s) that this flow depends on.
1641
+ Specifies the event(s) that this flow depends on.
1648
1642
 
1649
1643
  ```
1650
- @trigger_on_finish(flow='FooFlow')
1644
+ @trigger(event='foo')
1651
1645
  ```
1652
1646
  or
1653
1647
  ```
1654
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1648
+ @trigger(events=['foo', 'bar'])
1655
1649
  ```
1656
- This decorator respects the @project decorator and triggers the flow
1657
- when upstream runs within the same namespace complete successfully
1658
1650
 
1659
- Additionally, you can specify project aware upstream flow dependencies
1660
- by specifying the fully qualified project_flow_name.
1651
+ Additionally, you can specify the parameter mappings
1652
+ to map event payload to Metaflow parameters for the flow.
1661
1653
  ```
1662
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1654
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1663
1655
  ```
1664
1656
  or
1665
1657
  ```
1666
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1658
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1659
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1667
1660
  ```
1668
1661
 
1669
- You can also specify just the project or project branch (other values will be
1670
- inferred from the current project or project branch):
1662
+ 'parameters' can also be a list of strings and tuples like so:
1671
1663
  ```
1672
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1664
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1665
+ ```
1666
+ This is equivalent to:
1667
+ ```
1668
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1673
1669
  ```
1674
-
1675
- Note that `branch` is typically one of:
1676
- - `prod`
1677
- - `user.bob`
1678
- - `test.my_experiment`
1679
- - `prod.staging`
1680
1670
 
1681
1671
  Parameters
1682
1672
  ----------
1683
- flow : Union[str, Dict[str, str]], optional, default None
1684
- Upstream flow dependency for this flow.
1685
- flows : List[Union[str, Dict[str, str]]], default []
1686
- Upstream flow dependencies for this flow.
1673
+ event : Union[str, Dict[str, Any]], optional, default None
1674
+ Event dependency for this flow.
1675
+ events : List[Union[str, Dict[str, Any]]], default []
1676
+ Events dependency for this flow.
1687
1677
  options : Dict[str, Any], default {}
1688
1678
  Backend-specific configuration for tuning eventing behavior.
1689
1679
 
@@ -1692,51 +1682,47 @@ def trigger_on_finish(*, flow: typing.Union[str, typing.Dict[str, str], None] =
1692
1682
  ...
1693
1683
 
1694
1684
  @typing.overload
1695
- def trigger_on_finish(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1685
+ def trigger(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1696
1686
  ...
1697
1687
 
1698
- def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, flow: typing.Union[str, typing.Dict[str, str], None] = None, flows: typing.List[typing.Union[str, typing.Dict[str, str]]] = [], options: typing.Dict[str, typing.Any] = {}):
1688
+ def trigger(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, event: typing.Union[str, typing.Dict[str, typing.Any], None] = None, events: typing.List[typing.Union[str, typing.Dict[str, typing.Any]]] = [], options: typing.Dict[str, typing.Any] = {}):
1699
1689
  """
1700
- Specifies the flow(s) that this flow depends on.
1690
+ Specifies the event(s) that this flow depends on.
1701
1691
 
1702
1692
  ```
1703
- @trigger_on_finish(flow='FooFlow')
1693
+ @trigger(event='foo')
1704
1694
  ```
1705
1695
  or
1706
1696
  ```
1707
- @trigger_on_finish(flows=['FooFlow', 'BarFlow'])
1697
+ @trigger(events=['foo', 'bar'])
1708
1698
  ```
1709
- This decorator respects the @project decorator and triggers the flow
1710
- when upstream runs within the same namespace complete successfully
1711
1699
 
1712
- Additionally, you can specify project aware upstream flow dependencies
1713
- by specifying the fully qualified project_flow_name.
1700
+ Additionally, you can specify the parameter mappings
1701
+ to map event payload to Metaflow parameters for the flow.
1714
1702
  ```
1715
- @trigger_on_finish(flow='my_project.branch.my_branch.FooFlow')
1703
+ @trigger(event={'name':'foo', 'parameters':{'flow_param': 'event_field'}})
1716
1704
  ```
1717
1705
  or
1718
1706
  ```
1719
- @trigger_on_finish(flows=['my_project.branch.my_branch.FooFlow', 'BarFlow'])
1707
+ @trigger(events=[{'name':'foo', 'parameters':{'flow_param_1': 'event_field_1'},
1708
+ {'name':'bar', 'parameters':{'flow_param_2': 'event_field_2'}])
1720
1709
  ```
1721
1710
 
1722
- You can also specify just the project or project branch (other values will be
1723
- inferred from the current project or project branch):
1711
+ 'parameters' can also be a list of strings and tuples like so:
1724
1712
  ```
1725
- @trigger_on_finish(flow={"name": "FooFlow", "project": "my_project", "project_branch": "branch"})
1713
+ @trigger(event={'name':'foo', 'parameters':['common_name', ('flow_param', 'event_field')]})
1714
+ ```
1715
+ This is equivalent to:
1716
+ ```
1717
+ @trigger(event={'name':'foo', 'parameters':{'common_name': 'common_name', 'flow_param': 'event_field'}})
1726
1718
  ```
1727
-
1728
- Note that `branch` is typically one of:
1729
- - `prod`
1730
- - `user.bob`
1731
- - `test.my_experiment`
1732
- - `prod.staging`
1733
1719
 
1734
1720
  Parameters
1735
1721
  ----------
1736
- flow : Union[str, Dict[str, str]], optional, default None
1737
- Upstream flow dependency for this flow.
1738
- flows : List[Union[str, Dict[str, str]]], default []
1739
- Upstream flow dependencies for this flow.
1722
+ event : Union[str, Dict[str, Any]], optional, default None
1723
+ Event dependency for this flow.
1724
+ events : List[Union[str, Dict[str, Any]]], default []
1725
+ Events dependency for this flow.
1740
1726
  options : Dict[str, Any], default {}
1741
1727
  Backend-specific configuration for tuning eventing behavior.
1742
1728
 
@@ -1744,139 +1730,6 @@ def trigger_on_finish(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *
1744
1730
  """
1745
1731
  ...
1746
1732
 
1747
- def airflow_external_task_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, external_dag_id: str, external_task_ids: typing.List[str], allowed_states: typing.List[str], failed_states: typing.List[str], execution_delta: "datetime.timedelta", check_existence: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1748
- """
1749
- The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
1750
- This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
1751
-
1752
- Parameters
1753
- ----------
1754
- timeout : int
1755
- Time, in seconds before the task times out and fails. (Default: 3600)
1756
- poke_interval : int
1757
- Time in seconds that the job should wait in between each try. (Default: 60)
1758
- mode : str
1759
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1760
- exponential_backoff : bool
1761
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1762
- pool : str
1763
- the slot pool this task should run in,
1764
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1765
- soft_fail : bool
1766
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1767
- name : str
1768
- Name of the sensor on Airflow
1769
- description : str
1770
- Description of sensor in the Airflow UI
1771
- external_dag_id : str
1772
- The dag_id that contains the task you want to wait for.
1773
- external_task_ids : List[str]
1774
- The list of task_ids that you want to wait for.
1775
- If None (default value) the sensor waits for the DAG. (Default: None)
1776
- allowed_states : List[str]
1777
- Iterable of allowed states, (Default: ['success'])
1778
- failed_states : List[str]
1779
- Iterable of failed or dis-allowed states. (Default: None)
1780
- execution_delta : datetime.timedelta
1781
- time difference with the previous execution to look at,
1782
- the default is the same logical date as the current task or DAG. (Default: None)
1783
- check_existence: bool
1784
- Set to True to check if the external task exists or check if
1785
- the DAG to wait for exists. (Default: True)
1786
- """
1787
- ...
1788
-
1789
- @typing.overload
1790
- def schedule(*, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1791
- """
1792
- Specifies the times when the flow should be run when running on a
1793
- production scheduler.
1794
-
1795
- Parameters
1796
- ----------
1797
- hourly : bool, default False
1798
- Run the workflow hourly.
1799
- daily : bool, default True
1800
- Run the workflow daily.
1801
- weekly : bool, default False
1802
- Run the workflow weekly.
1803
- cron : str, optional, default None
1804
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1805
- specified by this expression.
1806
- timezone : str, optional, default None
1807
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1808
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1809
- """
1810
- ...
1811
-
1812
- @typing.overload
1813
- def schedule(f: typing.Type[FlowSpecDerived]) -> typing.Type[FlowSpecDerived]:
1814
- ...
1815
-
1816
- def schedule(f: typing.Optional[typing.Type[FlowSpecDerived]] = None, *, hourly: bool = False, daily: bool = True, weekly: bool = False, cron: typing.Optional[str] = None, timezone: typing.Optional[str] = None):
1817
- """
1818
- Specifies the times when the flow should be run when running on a
1819
- production scheduler.
1820
-
1821
- Parameters
1822
- ----------
1823
- hourly : bool, default False
1824
- Run the workflow hourly.
1825
- daily : bool, default True
1826
- Run the workflow daily.
1827
- weekly : bool, default False
1828
- Run the workflow weekly.
1829
- cron : str, optional, default None
1830
- Run the workflow at [a custom Cron schedule](https://docs.aws.amazon.com/eventbridge/latest/userguide/scheduled-events.html#cron-expressions)
1831
- specified by this expression.
1832
- timezone : str, optional, default None
1833
- Timezone on which the schedule runs (default: None). Currently supported only for Argo workflows,
1834
- which accepts timezones in [IANA format](https://nodatime.org/TimeZones).
1835
- """
1836
- ...
1837
-
1838
- def airflow_s3_key_sensor(*, timeout: int, poke_interval: int, mode: str, exponential_backoff: bool, pool: str, soft_fail: bool, name: str, description: str, bucket_key: typing.Union[str, typing.List[str]], bucket_name: str, wildcard_match: bool, aws_conn_id: str, verify: bool) -> typing.Callable[[typing.Type[FlowSpecDerived]], typing.Type[FlowSpecDerived]]:
1839
- """
1840
- The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
1841
- before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
1842
- and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
1843
- added as a flow decorators. Adding more than one decorator will ensure that `start` step
1844
- starts only after all sensors finish.
1845
-
1846
- Parameters
1847
- ----------
1848
- timeout : int
1849
- Time, in seconds before the task times out and fails. (Default: 3600)
1850
- poke_interval : int
1851
- Time in seconds that the job should wait in between each try. (Default: 60)
1852
- mode : str
1853
- How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
1854
- exponential_backoff : bool
1855
- allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
1856
- pool : str
1857
- the slot pool this task should run in,
1858
- slot pools are a way to limit concurrency for certain tasks. (Default:None)
1859
- soft_fail : bool
1860
- Set to true to mark the task as SKIPPED on failure. (Default: False)
1861
- name : str
1862
- Name of the sensor on Airflow
1863
- description : str
1864
- Description of sensor in the Airflow UI
1865
- bucket_key : Union[str, List[str]]
1866
- The key(s) being waited on. Supports full s3:// style url or relative path from root level.
1867
- When it's specified as a full s3:// url, please leave `bucket_name` as None
1868
- bucket_name : str
1869
- Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
1870
- When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
1871
- wildcard_match : bool
1872
- whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
1873
- aws_conn_id : str
1874
- a reference to the s3 connection on Airflow. (Default: None)
1875
- verify : bool
1876
- Whether or not to verify SSL certificates for S3 connection. (Default: None)
1877
- """
1878
- ...
1879
-
1880
1733
  def namespace(ns: typing.Optional[str]) -> typing.Optional[str]:
1881
1734
  """
1882
1735
  Switch namespace to the one provided.