ob-metaflow-stubs 6.0.3.104__py2.py3-none-any.whl → 6.0.3.105__py2.py3-none-any.whl

Sign up to get free protection for your applications and to get access to all the features.
Files changed (140) hide show
  1. metaflow-stubs/__init__.pyi +888 -550
  2. metaflow-stubs/cards.pyi +210 -4
  3. metaflow-stubs/cli.pyi +22 -2
  4. metaflow-stubs/client/__init__.pyi +128 -3
  5. metaflow-stubs/client/core.pyi +226 -6
  6. metaflow-stubs/client/filecache.pyi +3 -3
  7. metaflow-stubs/clone_util.pyi +5 -2
  8. metaflow-stubs/events.pyi +21 -3
  9. metaflow-stubs/exception.pyi +2 -2
  10. metaflow-stubs/flowspec.pyi +71 -5
  11. metaflow-stubs/generated_for.txt +1 -1
  12. metaflow-stubs/includefile.pyi +158 -5
  13. metaflow-stubs/info_file.pyi +2 -2
  14. metaflow-stubs/metadata/metadata.pyi +9 -3
  15. metaflow-stubs/metadata/util.pyi +2 -2
  16. metaflow-stubs/metaflow_config.pyi +2 -2
  17. metaflow-stubs/metaflow_current.pyi +69 -66
  18. metaflow-stubs/mflog/mflog.pyi +2 -2
  19. metaflow-stubs/multicore_utils.pyi +2 -2
  20. metaflow-stubs/parameters.pyi +69 -3
  21. metaflow-stubs/plugins/__init__.pyi +14 -3
  22. metaflow-stubs/plugins/airflow/__init__.pyi +2 -2
  23. metaflow-stubs/plugins/airflow/airflow_utils.pyi +2 -2
  24. metaflow-stubs/plugins/airflow/exception.pyi +2 -2
  25. metaflow-stubs/plugins/airflow/sensors/__init__.pyi +80 -2
  26. metaflow-stubs/plugins/airflow/sensors/base_sensor.pyi +6 -3
  27. metaflow-stubs/plugins/airflow/sensors/external_task_sensor.pyi +46 -4
  28. metaflow-stubs/plugins/airflow/sensors/s3_sensor.pyi +46 -4
  29. metaflow-stubs/plugins/argo/__init__.pyi +2 -2
  30. metaflow-stubs/plugins/argo/argo_client.pyi +2 -2
  31. metaflow-stubs/plugins/argo/argo_events.pyi +16 -2
  32. metaflow-stubs/plugins/argo/argo_workflows.pyi +17 -4
  33. metaflow-stubs/plugins/argo/argo_workflows_cli.pyi +97 -6
  34. metaflow-stubs/plugins/argo/argo_workflows_decorator.pyi +35 -7
  35. metaflow-stubs/plugins/argo/argo_workflows_deployer.pyi +50 -5
  36. metaflow-stubs/plugins/aws/__init__.pyi +2 -2
  37. metaflow-stubs/plugins/aws/aws_client.pyi +2 -2
  38. metaflow-stubs/plugins/aws/aws_utils.pyi +2 -2
  39. metaflow-stubs/plugins/aws/batch/__init__.pyi +2 -2
  40. metaflow-stubs/plugins/aws/batch/batch.pyi +2 -2
  41. metaflow-stubs/plugins/aws/batch/batch_client.pyi +2 -2
  42. metaflow-stubs/plugins/aws/batch/batch_decorator.pyi +103 -3
  43. metaflow-stubs/plugins/aws/secrets_manager/__init__.pyi +2 -2
  44. metaflow-stubs/plugins/aws/secrets_manager/aws_secrets_manager_secrets_provider.pyi +15 -3
  45. metaflow-stubs/plugins/aws/step_functions/__init__.pyi +2 -2
  46. metaflow-stubs/plugins/aws/step_functions/event_bridge_client.pyi +2 -2
  47. metaflow-stubs/plugins/aws/step_functions/production_token.pyi +2 -2
  48. metaflow-stubs/plugins/aws/step_functions/schedule_decorator.pyi +21 -2
  49. metaflow-stubs/plugins/aws/step_functions/step_functions.pyi +2 -2
  50. metaflow-stubs/plugins/aws/step_functions/step_functions_client.pyi +2 -2
  51. metaflow-stubs/plugins/aws/step_functions/step_functions_deployer.pyi +49 -4
  52. metaflow-stubs/plugins/azure/__init__.pyi +2 -2
  53. metaflow-stubs/plugins/azure/azure_credential.pyi +2 -2
  54. metaflow-stubs/plugins/azure/azure_exceptions.pyi +2 -2
  55. metaflow-stubs/plugins/azure/azure_secret_manager_secrets_provider.pyi +21 -3
  56. metaflow-stubs/plugins/azure/azure_utils.pyi +2 -2
  57. metaflow-stubs/plugins/azure/blob_service_client_factory.pyi +2 -2
  58. metaflow-stubs/plugins/azure/includefile_support.pyi +2 -2
  59. metaflow-stubs/plugins/cards/__init__.pyi +2 -2
  60. metaflow-stubs/plugins/cards/card_cli.pyi +62 -4
  61. metaflow-stubs/plugins/cards/card_client.pyi +34 -3
  62. metaflow-stubs/plugins/cards/card_creator.pyi +5 -2
  63. metaflow-stubs/plugins/cards/card_datastore.pyi +8 -2
  64. metaflow-stubs/plugins/cards/card_decorator.pyi +53 -3
  65. metaflow-stubs/plugins/cards/card_modules/__init__.pyi +28 -2
  66. metaflow-stubs/plugins/cards/card_modules/basic.pyi +42 -3
  67. metaflow-stubs/plugins/cards/card_modules/card.pyi +28 -2
  68. metaflow-stubs/plugins/cards/card_modules/components.pyi +183 -3
  69. metaflow-stubs/plugins/cards/card_modules/convert_to_native_type.pyi +5 -2
  70. metaflow-stubs/plugins/cards/card_modules/renderer_tools.pyi +2 -2
  71. metaflow-stubs/plugins/cards/card_modules/test_cards.pyi +36 -3
  72. metaflow-stubs/plugins/cards/card_resolver.pyi +2 -2
  73. metaflow-stubs/plugins/cards/component_serializer.pyi +56 -3
  74. metaflow-stubs/plugins/cards/exception.pyi +8 -2
  75. metaflow-stubs/plugins/catch_decorator.pyi +20 -3
  76. metaflow-stubs/plugins/datatools/__init__.pyi +64 -4
  77. metaflow-stubs/plugins/datatools/local.pyi +16 -2
  78. metaflow-stubs/plugins/datatools/s3/__init__.pyi +73 -4
  79. metaflow-stubs/plugins/datatools/s3/s3.pyi +82 -5
  80. metaflow-stubs/plugins/datatools/s3/s3tail.pyi +2 -2
  81. metaflow-stubs/plugins/datatools/s3/s3util.pyi +2 -2
  82. metaflow-stubs/plugins/debug_logger.pyi +2 -2
  83. metaflow-stubs/plugins/debug_monitor.pyi +2 -2
  84. metaflow-stubs/plugins/environment_decorator.pyi +10 -2
  85. metaflow-stubs/plugins/events_decorator.pyi +107 -3
  86. metaflow-stubs/plugins/frameworks/__init__.pyi +2 -2
  87. metaflow-stubs/plugins/frameworks/pytorch.pyi +25 -4
  88. metaflow-stubs/plugins/gcp/__init__.pyi +2 -2
  89. metaflow-stubs/plugins/gcp/gcp_secret_manager_secrets_provider.pyi +15 -3
  90. metaflow-stubs/plugins/gcp/gs_exceptions.pyi +2 -2
  91. metaflow-stubs/plugins/gcp/gs_storage_client_factory.pyi +2 -2
  92. metaflow-stubs/plugins/gcp/gs_utils.pyi +2 -2
  93. metaflow-stubs/plugins/gcp/includefile_support.pyi +2 -2
  94. metaflow-stubs/plugins/kubernetes/__init__.pyi +2 -2
  95. metaflow-stubs/plugins/kubernetes/kube_utils.pyi +2 -2
  96. metaflow-stubs/plugins/kubernetes/kubernetes.pyi +11 -2
  97. metaflow-stubs/plugins/kubernetes/kubernetes_cli.pyi +67 -4
  98. metaflow-stubs/plugins/kubernetes/kubernetes_client.pyi +2 -2
  99. metaflow-stubs/plugins/kubernetes/kubernetes_decorator.pyi +100 -3
  100. metaflow-stubs/plugins/kubernetes/kubernetes_jobsets.pyi +5 -2
  101. metaflow-stubs/plugins/logs_cli.pyi +3 -3
  102. metaflow-stubs/plugins/package_cli.pyi +2 -2
  103. metaflow-stubs/plugins/parallel_decorator.pyi +30 -3
  104. metaflow-stubs/plugins/perimeters.pyi +2 -2
  105. metaflow-stubs/plugins/project_decorator.pyi +60 -3
  106. metaflow-stubs/plugins/pypi/__init__.pyi +2 -2
  107. metaflow-stubs/plugins/pypi/conda_decorator.pyi +45 -2
  108. metaflow-stubs/plugins/pypi/conda_environment.pyi +5 -5
  109. metaflow-stubs/plugins/pypi/pypi_decorator.pyi +33 -2
  110. metaflow-stubs/plugins/pypi/pypi_environment.pyi +2 -2
  111. metaflow-stubs/plugins/pypi/utils.pyi +2 -2
  112. metaflow-stubs/plugins/resources_decorator.pyi +33 -2
  113. metaflow-stubs/plugins/retry_decorator.pyi +21 -2
  114. metaflow-stubs/plugins/secrets/__init__.pyi +2 -2
  115. metaflow-stubs/plugins/secrets/inline_secrets_provider.pyi +2 -2
  116. metaflow-stubs/plugins/secrets/secrets_decorator.pyi +11 -2
  117. metaflow-stubs/plugins/storage_executor.pyi +6 -2
  118. metaflow-stubs/plugins/tag_cli.pyi +35 -4
  119. metaflow-stubs/plugins/test_unbounded_foreach_decorator.pyi +10 -3
  120. metaflow-stubs/plugins/timeout_decorator.pyi +24 -3
  121. metaflow-stubs/procpoll.pyi +2 -2
  122. metaflow-stubs/profilers/__init__.pyi +2 -2
  123. metaflow-stubs/pylint_wrapper.pyi +2 -2
  124. metaflow-stubs/runner/__init__.pyi +2 -2
  125. metaflow-stubs/runner/deployer.pyi +70 -131
  126. metaflow-stubs/runner/metaflow_runner.pyi +117 -9
  127. metaflow-stubs/runner/nbdeploy.pyi +66 -2
  128. metaflow-stubs/runner/nbrun.pyi +79 -2
  129. metaflow-stubs/runner/subprocess_manager.pyi +16 -4
  130. metaflow-stubs/runner/utils.pyi +32 -2
  131. metaflow-stubs/system/__init__.pyi +3 -3
  132. metaflow-stubs/system/system_logger.pyi +2 -2
  133. metaflow-stubs/system/system_monitor.pyi +2 -2
  134. metaflow-stubs/tagging_util.pyi +2 -2
  135. metaflow-stubs/tuple_util.pyi +2 -2
  136. {ob_metaflow_stubs-6.0.3.104.dist-info → ob_metaflow_stubs-6.0.3.105.dist-info}/METADATA +1 -1
  137. ob_metaflow_stubs-6.0.3.105.dist-info/RECORD +140 -0
  138. ob_metaflow_stubs-6.0.3.104.dist-info/RECORD +0 -140
  139. {ob_metaflow_stubs-6.0.3.104.dist-info → ob_metaflow_stubs-6.0.3.105.dist-info}/WHEEL +0 -0
  140. {ob_metaflow_stubs-6.0.3.104.dist-info → ob_metaflow_stubs-6.0.3.105.dist-info}/top_level.txt +0 -0
@@ -1,7 +1,7 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.22.1+obcheckpoint(0.0.11);ob(v1) #
4
- # Generated on 2024-09-20T21:46:07.411119 #
3
+ # MF version: 2.12.24.1+obcheckpoint(0.0.11);ob(v1) #
4
+ # Generated on 2024-10-04T10:13:10.710461 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -1,7 +1,7 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.22.1+obcheckpoint(0.0.11);ob(v1) #
4
- # Generated on 2024-09-20T21:46:07.412352 #
3
+ # MF version: 2.12.24.1+obcheckpoint(0.0.11);ob(v1) #
4
+ # Generated on 2024-10-04T10:13:10.711726 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -11,6 +11,45 @@ if typing.TYPE_CHECKING:
11
11
  import metaflow.plugins.airflow.sensors.base_sensor
12
12
 
13
13
  class ExternalTaskSensorDecorator(metaflow.plugins.airflow.sensors.base_sensor.AirflowSensorDecorator, metaclass=type):
14
+ """
15
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
16
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
17
+
18
+ Parameters
19
+ ----------
20
+ timeout : int
21
+ Time, in seconds before the task times out and fails. (Default: 3600)
22
+ poke_interval : int
23
+ Time in seconds that the job should wait in between each try. (Default: 60)
24
+ mode : str
25
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
26
+ exponential_backoff : bool
27
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
28
+ pool : str
29
+ the slot pool this task should run in,
30
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
31
+ soft_fail : bool
32
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
33
+ name : str
34
+ Name of the sensor on Airflow
35
+ description : str
36
+ Description of sensor in the Airflow UI
37
+ external_dag_id : str
38
+ The dag_id that contains the task you want to wait for.
39
+ external_task_ids : List[str]
40
+ The list of task_ids that you want to wait for.
41
+ If None (default value) the sensor waits for the DAG. (Default: None)
42
+ allowed_states : List[str]
43
+ Iterable of allowed states, (Default: ['success'])
44
+ failed_states : List[str]
45
+ Iterable of failed or dis-allowed states. (Default: None)
46
+ execution_delta : datetime.timedelta
47
+ time difference with the previous execution to look at,
48
+ the default is the same logical date as the current task or DAG. (Default: None)
49
+ check_existence: bool
50
+ Set to True to check if the external task exists or check if
51
+ the DAG to wait for exists. (Default: True)
52
+ """
14
53
  def serialize_operator_args(self):
15
54
  ...
16
55
  def validate(self, flow):
@@ -18,6 +57,45 @@ class ExternalTaskSensorDecorator(metaflow.plugins.airflow.sensors.base_sensor.A
18
57
  ...
19
58
 
20
59
  class S3KeySensorDecorator(metaflow.plugins.airflow.sensors.base_sensor.AirflowSensorDecorator, metaclass=type):
60
+ """
61
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
62
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
63
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
64
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
65
+ starts only after all sensors finish.
66
+
67
+ Parameters
68
+ ----------
69
+ timeout : int
70
+ Time, in seconds before the task times out and fails. (Default: 3600)
71
+ poke_interval : int
72
+ Time in seconds that the job should wait in between each try. (Default: 60)
73
+ mode : str
74
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
75
+ exponential_backoff : bool
76
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
77
+ pool : str
78
+ the slot pool this task should run in,
79
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
80
+ soft_fail : bool
81
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
82
+ name : str
83
+ Name of the sensor on Airflow
84
+ description : str
85
+ Description of sensor in the Airflow UI
86
+ bucket_key : Union[str, List[str]]
87
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
88
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
89
+ bucket_name : str
90
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
91
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
92
+ wildcard_match : bool
93
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
94
+ aws_conn_id : str
95
+ a reference to the s3 connection on Airflow. (Default: None)
96
+ verify : bool
97
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
98
+ """
21
99
  def validate(self, flow):
22
100
  ...
23
101
  ...
@@ -1,15 +1,15 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.22.1+obcheckpoint(0.0.11);ob(v1) #
4
- # Generated on 2024-09-20T21:46:07.422492 #
3
+ # MF version: 2.12.24.1+obcheckpoint(0.0.11);ob(v1) #
4
+ # Generated on 2024-10-04T10:13:10.722939 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.decorators
12
11
  import metaflow.exception
12
+ import metaflow.decorators
13
13
 
14
14
  class AirflowException(metaflow.exception.MetaflowException, metaclass=type):
15
15
  def __init__(self, msg):
@@ -39,6 +39,9 @@ def id_creator(val, hash_len):
39
39
  TASK_ID_HASH_LEN: int
40
40
 
41
41
  class AirflowSensorDecorator(metaflow.decorators.FlowDecorator, metaclass=type):
42
+ """
43
+ Base class for all Airflow sensor decorators.
44
+ """
42
45
  def __init__(self, *args, **kwargs):
43
46
  ...
44
47
  def serialize_operator_args(self):
@@ -1,18 +1,21 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.22.1+obcheckpoint(0.0.11);ob(v1) #
4
- # Generated on 2024-09-20T21:46:07.422869 #
3
+ # MF version: 2.12.24.1+obcheckpoint(0.0.11);ob(v1) #
4
+ # Generated on 2024-10-04T10:13:10.723364 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.decorators
12
- import metaflow.exception
13
11
  import metaflow.plugins.airflow.sensors.base_sensor
12
+ import metaflow.exception
13
+ import metaflow.decorators
14
14
 
15
15
  class AirflowSensorDecorator(metaflow.decorators.FlowDecorator, metaclass=type):
16
+ """
17
+ Base class for all Airflow sensor decorators.
18
+ """
16
19
  def __init__(self, *args, **kwargs):
17
20
  ...
18
21
  def serialize_operator_args(self):
@@ -46,6 +49,45 @@ class AirflowException(metaflow.exception.MetaflowException, metaclass=type):
46
49
  AIRFLOW_STATES: dict
47
50
 
48
51
  class ExternalTaskSensorDecorator(metaflow.plugins.airflow.sensors.base_sensor.AirflowSensorDecorator, metaclass=type):
52
+ """
53
+ The `@airflow_external_task_sensor` decorator attaches a Airflow [ExternalTaskSensor](https://airflow.apache.org/docs/apache-airflow/stable/_api/airflow/sensors/external_task/index.html#airflow.sensors.external_task.ExternalTaskSensor) before the start step of the flow.
54
+ This decorator only works when a flow is scheduled on Airflow and is compiled using `airflow create`. More than one `@airflow_external_task_sensor` can be added as a flow decorators. Adding more than one decorator will ensure that `start` step starts only after all sensors finish.
55
+
56
+ Parameters
57
+ ----------
58
+ timeout : int
59
+ Time, in seconds before the task times out and fails. (Default: 3600)
60
+ poke_interval : int
61
+ Time in seconds that the job should wait in between each try. (Default: 60)
62
+ mode : str
63
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
64
+ exponential_backoff : bool
65
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
66
+ pool : str
67
+ the slot pool this task should run in,
68
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
69
+ soft_fail : bool
70
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
71
+ name : str
72
+ Name of the sensor on Airflow
73
+ description : str
74
+ Description of sensor in the Airflow UI
75
+ external_dag_id : str
76
+ The dag_id that contains the task you want to wait for.
77
+ external_task_ids : List[str]
78
+ The list of task_ids that you want to wait for.
79
+ If None (default value) the sensor waits for the DAG. (Default: None)
80
+ allowed_states : List[str]
81
+ Iterable of allowed states, (Default: ['success'])
82
+ failed_states : List[str]
83
+ Iterable of failed or dis-allowed states. (Default: None)
84
+ execution_delta : datetime.timedelta
85
+ time difference with the previous execution to look at,
86
+ the default is the same logical date as the current task or DAG. (Default: None)
87
+ check_existence: bool
88
+ Set to True to check if the external task exists or check if
89
+ the DAG to wait for exists. (Default: True)
90
+ """
49
91
  def serialize_operator_args(self):
50
92
  ...
51
93
  def validate(self, flow):
@@ -1,18 +1,21 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.22.1+obcheckpoint(0.0.11);ob(v1) #
4
- # Generated on 2024-09-20T21:46:07.423219 #
3
+ # MF version: 2.12.24.1+obcheckpoint(0.0.11);ob(v1) #
4
+ # Generated on 2024-10-04T10:13:10.723735 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.decorators
12
- import metaflow.exception
13
11
  import metaflow.plugins.airflow.sensors.base_sensor
12
+ import metaflow.exception
13
+ import metaflow.decorators
14
14
 
15
15
  class AirflowSensorDecorator(metaflow.decorators.FlowDecorator, metaclass=type):
16
+ """
17
+ Base class for all Airflow sensor decorators.
18
+ """
16
19
  def __init__(self, *args, **kwargs):
17
20
  ...
18
21
  def serialize_operator_args(self):
@@ -44,6 +47,45 @@ class AirflowException(metaflow.exception.MetaflowException, metaclass=type):
44
47
  ...
45
48
 
46
49
  class S3KeySensorDecorator(metaflow.plugins.airflow.sensors.base_sensor.AirflowSensorDecorator, metaclass=type):
50
+ """
51
+ The `@airflow_s3_key_sensor` decorator attaches a Airflow [S3KeySensor](https://airflow.apache.org/docs/apache-airflow-providers-amazon/stable/_api/airflow/providers/amazon/aws/sensors/s3/index.html#airflow.providers.amazon.aws.sensors.s3.S3KeySensor)
52
+ before the start step of the flow. This decorator only works when a flow is scheduled on Airflow
53
+ and is compiled using `airflow create`. More than one `@airflow_s3_key_sensor` can be
54
+ added as a flow decorators. Adding more than one decorator will ensure that `start` step
55
+ starts only after all sensors finish.
56
+
57
+ Parameters
58
+ ----------
59
+ timeout : int
60
+ Time, in seconds before the task times out and fails. (Default: 3600)
61
+ poke_interval : int
62
+ Time in seconds that the job should wait in between each try. (Default: 60)
63
+ mode : str
64
+ How the sensor operates. Options are: { poke | reschedule }. (Default: "poke")
65
+ exponential_backoff : bool
66
+ allow progressive longer waits between pokes by using exponential backoff algorithm. (Default: True)
67
+ pool : str
68
+ the slot pool this task should run in,
69
+ slot pools are a way to limit concurrency for certain tasks. (Default:None)
70
+ soft_fail : bool
71
+ Set to true to mark the task as SKIPPED on failure. (Default: False)
72
+ name : str
73
+ Name of the sensor on Airflow
74
+ description : str
75
+ Description of sensor in the Airflow UI
76
+ bucket_key : Union[str, List[str]]
77
+ The key(s) being waited on. Supports full s3:// style url or relative path from root level.
78
+ When it's specified as a full s3:// url, please leave `bucket_name` as None
79
+ bucket_name : str
80
+ Name of the S3 bucket. Only needed when bucket_key is not provided as a full s3:// url.
81
+ When specified, all the keys passed to bucket_key refers to this bucket. (Default:None)
82
+ wildcard_match : bool
83
+ whether the bucket_key should be interpreted as a Unix wildcard pattern. (Default: False)
84
+ aws_conn_id : str
85
+ a reference to the s3 connection on Airflow. (Default: None)
86
+ verify : bool
87
+ Whether or not to verify SSL certificates for S3 connection. (Default: None)
88
+ """
47
89
  def validate(self, flow):
48
90
  ...
49
91
  ...
@@ -1,7 +1,7 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.22.1+obcheckpoint(0.0.11);ob(v1) #
4
- # Generated on 2024-09-20T21:46:07.371939 #
3
+ # MF version: 2.12.24.1+obcheckpoint(0.0.11);ob(v1) #
4
+ # Generated on 2024-10-04T10:13:10.668194 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -1,7 +1,7 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.22.1+obcheckpoint(0.0.11);ob(v1) #
4
- # Generated on 2024-09-20T21:46:07.396477 #
3
+ # MF version: 2.12.24.1+obcheckpoint(0.0.11);ob(v1) #
4
+ # Generated on 2024-10-04T10:13:10.691500 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -1,7 +1,7 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.22.1+obcheckpoint(0.0.11);ob(v1) #
4
- # Generated on 2024-09-20T21:46:07.394642 #
3
+ # MF version: 2.12.24.1+obcheckpoint(0.0.11);ob(v1) #
4
+ # Generated on 2024-10-04T10:13:10.689629 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
@@ -27,6 +27,20 @@ class ArgoEventException(metaflow.exception.MetaflowException, metaclass=type):
27
27
  ...
28
28
 
29
29
  class ArgoEvent(object, metaclass=type):
30
+ """
31
+ ArgoEvent is a small event, a message, that can be published to Argo Workflows. The
32
+ event will eventually start all flows which have been previously deployed with `@trigger`
33
+ to wait for this particular named event.
34
+
35
+ Parameters
36
+ ----------
37
+ name : str,
38
+ Name of the event
39
+ url : str, optional
40
+ Override the event endpoint from `ARGO_EVENTS_WEBHOOK_URL`.
41
+ payload : Dict, optional
42
+ A set of key-value pairs delivered in this event. Used to set parameters of triggered flows.
43
+ """
30
44
  def __init__(self, name, url = None, payload = None, access_token = None):
31
45
  ...
32
46
  def add_to_payload(self, key, value):
@@ -1,17 +1,17 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.22.1+obcheckpoint(0.0.11);ob(v1) #
4
- # Generated on 2024-09-20T21:46:07.401235 #
3
+ # MF version: 2.12.24.1+obcheckpoint(0.0.11);ob(v1) #
4
+ # Generated on 2024-10-04T10:13:10.696117 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.graph
12
- import metaflow._vendor.click.types
13
11
  import metaflow.metaflow_current
12
+ import metaflow.graph
14
13
  import metaflow.parameters
14
+ import metaflow._vendor.click.types
15
15
  import metaflow.exception
16
16
 
17
17
  inf: float
@@ -108,6 +108,15 @@ UI_URL: None
108
108
 
109
109
  PAGERDUTY_TEMPLATE_URL: None
110
110
 
111
+ def init_config() -> typing.Dict[str, str]:
112
+ """
113
+ OSS Metaflow reads the config file on every step initialization. This is because OSS assumes config files change
114
+ relatively infrequently. We want to avoid config values changing between flow steps. Our solution to prevent this
115
+ is to read a config once and cache it on an environment variable. Environment variables carry over between steps
116
+ because steps are executed in subprocesses (local) or environments which expect environment variables to be set.
117
+ """
118
+ ...
119
+
111
120
  BASH_SAVE_LOGS: str
112
121
 
113
122
  def deploy_time_eval(value):
@@ -364,6 +373,10 @@ class DaemonTemplate(object, metaclass=type):
364
373
  ...
365
374
  def container(self, container):
366
375
  ...
376
+ def service_account_name(self, service_account_name):
377
+ ...
378
+ def retry_strategy(self, times, minutes_between_retries):
379
+ ...
367
380
  def to_json(self):
368
381
  ...
369
382
  def __str__(self):
@@ -1,25 +1,45 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.22.1+obcheckpoint(0.0.11);ob(v1) #
4
- # Generated on 2024-09-20T21:46:07.404814 #
3
+ # MF version: 2.12.24.1+obcheckpoint(0.0.11);ob(v1) #
4
+ # Generated on 2024-10-04T10:13:10.699934 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.graph
12
- import metaflow.client.core
13
- import metaflow.decorators
14
- import metaflow.events
15
11
  import metaflow.metaflow_current
12
+ import metaflow.events
16
13
  import datetime
14
+ import metaflow.graph
17
15
  import metaflow.parameters
16
+ import metaflow.client.core
18
17
  import metaflow.exception
18
+ import metaflow.decorators
19
19
 
20
20
  JSONType: metaflow.parameters.JSONTypeClass
21
21
 
22
22
  class Run(metaflow.client.core.MetaflowObject, metaclass=type):
23
+ """
24
+ A `Run` represents an execution of a `Flow`. It is a container of `Step`s.
25
+
26
+ Attributes
27
+ ----------
28
+ data : MetaflowData
29
+ a shortcut to run['end'].task.data, i.e. data produced by this run.
30
+ successful : bool
31
+ True if the run completed successfully.
32
+ finished : bool
33
+ True if the run completed.
34
+ finished_at : datetime
35
+ Time this run finished.
36
+ code : MetaflowCode
37
+ Code package for this run (if present). See `MetaflowCode`.
38
+ trigger : MetaflowTrigger
39
+ Information about event(s) that triggered this run (if present). See `MetaflowTrigger`.
40
+ end_task : Task
41
+ `Task` for the end step (if it is present already).
42
+ """
23
43
  def steps(self, *tags: str) -> typing.Iterator[metaflow.client.core.Step]:
24
44
  """
25
45
  [Legacy function - do not use]
@@ -304,11 +324,82 @@ def store_token(token_prefix, token):
304
324
  ...
305
325
 
306
326
  class EnvironmentDecorator(metaflow.decorators.StepDecorator, metaclass=type):
327
+ """
328
+ Specifies environment variables to be set prior to the execution of a step.
329
+
330
+ Parameters
331
+ ----------
332
+ vars : Dict[str, str], default {}
333
+ Dictionary of environment variables to set.
334
+ """
307
335
  def runtime_step_cli(self, cli_args, retry_count, max_user_code_retries, ubf_context):
308
336
  ...
309
337
  ...
310
338
 
311
339
  class KubernetesDecorator(metaflow.decorators.StepDecorator, metaclass=type):
340
+ """
341
+ Specifies that this step should execute on Kubernetes.
342
+
343
+ Parameters
344
+ ----------
345
+ cpu : int, default 1
346
+ Number of CPUs required for this step. If `@resources` is
347
+ also present, the maximum value from all decorators is used.
348
+ memory : int, default 4096
349
+ Memory size (in MB) required for this step. If
350
+ `@resources` is also present, the maximum value from all decorators is
351
+ used.
352
+ disk : int, default 10240
353
+ Disk size (in MB) required for this step. If
354
+ `@resources` is also present, the maximum value from all decorators is
355
+ used.
356
+ image : str, optional, default None
357
+ Docker image to use when launching on Kubernetes. If not specified, and
358
+ METAFLOW_KUBERNETES_CONTAINER_IMAGE is specified, that image is used. If
359
+ not, a default Docker image mapping to the current version of Python is used.
360
+ image_pull_policy: str, default KUBERNETES_IMAGE_PULL_POLICY
361
+ If given, the imagePullPolicy to be applied to the Docker image of the step.
362
+ service_account : str, default METAFLOW_KUBERNETES_SERVICE_ACCOUNT
363
+ Kubernetes service account to use when launching pod in Kubernetes.
364
+ secrets : List[str], optional, default None
365
+ Kubernetes secrets to use when launching pod in Kubernetes. These
366
+ secrets are in addition to the ones defined in `METAFLOW_KUBERNETES_SECRETS`
367
+ in Metaflow configuration.
368
+ node_selector: Union[Dict[str,str], str], optional, default None
369
+ Kubernetes node selector(s) to apply to the pod running the task.
370
+ Can be passed in as a comma separated string of values e.g. "kubernetes.io/os=linux,kubernetes.io/arch=amd64"
371
+ or as a dictionary {"kubernetes.io/os": "linux", "kubernetes.io/arch": "amd64"}
372
+ namespace : str, default METAFLOW_KUBERNETES_NAMESPACE
373
+ Kubernetes namespace to use when launching pod in Kubernetes.
374
+ gpu : int, optional, default None
375
+ Number of GPUs required for this step. A value of zero implies that
376
+ the scheduled node should not have GPUs.
377
+ gpu_vendor : str, default KUBERNETES_GPU_VENDOR
378
+ The vendor of the GPUs to be used for this step.
379
+ tolerations : List[str], default []
380
+ The default is extracted from METAFLOW_KUBERNETES_TOLERATIONS.
381
+ Kubernetes tolerations to use when launching pod in Kubernetes.
382
+ use_tmpfs : bool, default False
383
+ This enables an explicit tmpfs mount for this step.
384
+ tmpfs_tempdir : bool, default True
385
+ sets METAFLOW_TEMPDIR to tmpfs_path if set for this step.
386
+ tmpfs_size : int, optional, default: None
387
+ The value for the size (in MiB) of the tmpfs mount for this step.
388
+ This parameter maps to the `--tmpfs` option in Docker. Defaults to 50% of the
389
+ memory allocated for this step.
390
+ tmpfs_path : str, optional, default /metaflow_temp
391
+ Path to tmpfs mount for this step.
392
+ persistent_volume_claims : Dict[str, str], optional, default None
393
+ A map (dictionary) of persistent volumes to be mounted to the pod for this step. The map is from persistent
394
+ volumes to the path to which the volume is to be mounted, e.g., `{'pvc-name': '/path/to/mount/on'}`.
395
+ shared_memory: int, optional
396
+ Shared memory size (in MiB) required for this step
397
+ port: int, optional
398
+ Port number to specify in the Kubernetes job object
399
+ compute_pool : str, optional, default None
400
+ Compute pool to be used for for this step.
401
+ If not specified, any accessible compute pool within the perimeter is used.
402
+ """
312
403
  def __init__(self, attributes = None, statically_defined = False):
313
404
  ...
314
405
  def step_init(self, flow, graph, step, decos, environment, flow_datastore, logger):
@@ -1,25 +1,28 @@
1
1
  ##################################################################################
2
2
  # Auto-generated Metaflow stub file #
3
- # MF version: 2.12.22.1+obcheckpoint(0.0.11);ob(v1) #
4
- # Generated on 2024-09-20T21:46:07.395856 #
3
+ # MF version: 2.12.24.1+obcheckpoint(0.0.11);ob(v1) #
4
+ # Generated on 2024-10-04T10:13:10.690866 #
5
5
  ##################################################################################
6
6
 
7
7
  from __future__ import annotations
8
8
 
9
9
  import typing
10
10
  if typing.TYPE_CHECKING:
11
- import metaflow.graph
12
- import metaflow.decorators
13
- import metaflow.events
14
11
  import metaflow.metaflow_current
15
- import metaflow
16
- import metaflow.datastore.inputs
17
12
  import metaflow.flowspec
18
13
  import typing
14
+ import metaflow
15
+ import metaflow.events
16
+ import metaflow.graph
17
+ import metaflow.datastore.inputs
18
+ import metaflow.decorators
19
19
 
20
20
  current: metaflow.metaflow_current.Current
21
21
 
22
22
  class Trigger(object, metaclass=type):
23
+ """
24
+ Defines a container of event triggers' metadata.
25
+ """
23
26
  def __init__(self, _meta = None):
24
27
  ...
25
28
  @classmethod
@@ -93,6 +96,9 @@ class Trigger(object, metaclass=type):
93
96
  ...
94
97
 
95
98
  class MetaDatum(tuple, metaclass=type):
99
+ """
100
+ MetaDatum(field, value, type, tags)
101
+ """
96
102
  @staticmethod
97
103
  def __new__(_cls, field, value, type, tags):
98
104
  """
@@ -114,6 +120,14 @@ class MetaDatum(tuple, metaclass=type):
114
120
  ARGO_EVENTS_WEBHOOK_URL: None
115
121
 
116
122
  class FlowSpec(object, metaclass=metaflow.flowspec._FlowSpecMeta):
123
+ """
124
+ Main class from which all Flows should inherit.
125
+
126
+ Attributes
127
+ ----------
128
+ index
129
+ input
130
+ """
117
131
  def __init__(self, use_cli = True):
118
132
  """
119
133
  Construct a FlowSpec
@@ -335,6 +349,20 @@ class FlowSpec(object, metaclass=metaflow.flowspec._FlowSpecMeta):
335
349
  ...
336
350
 
337
351
  class ArgoEvent(object, metaclass=type):
352
+ """
353
+ ArgoEvent is a small event, a message, that can be published to Argo Workflows. The
354
+ event will eventually start all flows which have been previously deployed with `@trigger`
355
+ to wait for this particular named event.
356
+
357
+ Parameters
358
+ ----------
359
+ name : str,
360
+ Name of the event
361
+ url : str, optional
362
+ Override the event endpoint from `ARGO_EVENTS_WEBHOOK_URL`.
363
+ payload : Dict, optional
364
+ A set of key-value pairs delivered in this event. Used to set parameters of triggered flows.
365
+ """
338
366
  def __init__(self, name, url = None, payload = None, access_token = None):
339
367
  ...
340
368
  def add_to_payload(self, key, value):