databricks-bundles 0.265.0__py3-none-any.whl → 0.267.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (37) hide show
  1. databricks/bundles/core/__init__.py +3 -1
  2. databricks/bundles/core/_load.py +2 -2
  3. databricks/bundles/core/_resource_mutator.py +33 -0
  4. databricks/bundles/core/_resource_type.py +6 -0
  5. databricks/bundles/core/_resources.py +44 -0
  6. databricks/bundles/core/_variable.py +1 -1
  7. databricks/bundles/jobs/__init__.py +6 -0
  8. databricks/bundles/jobs/_models/continuous.py +14 -0
  9. databricks/bundles/jobs/_models/environment.py +10 -0
  10. databricks/bundles/jobs/_models/gcp_attributes.py +14 -0
  11. databricks/bundles/jobs/_models/init_script_info.py +16 -0
  12. databricks/bundles/jobs/_models/job_email_notifications.py +13 -1
  13. databricks/bundles/jobs/_models/library.py +10 -0
  14. databricks/bundles/jobs/_models/spark_jar_task.py +25 -1
  15. databricks/bundles/jobs/_models/task_email_notifications.py +13 -1
  16. databricks/bundles/jobs/_models/task_retry_mode.py +17 -0
  17. databricks/bundles/pipelines/__init__.py +32 -0
  18. databricks/bundles/pipelines/_models/gcp_attributes.py +14 -0
  19. databricks/bundles/pipelines/_models/ingestion_gateway_pipeline_definition.py +10 -0
  20. databricks/bundles/pipelines/_models/ingestion_pipeline_definition.py +18 -0
  21. databricks/bundles/pipelines/_models/init_script_info.py +16 -0
  22. databricks/bundles/pipelines/_models/pipeline.py +2 -2
  23. databricks/bundles/pipelines/_models/postgres_catalog_config.py +50 -0
  24. databricks/bundles/pipelines/_models/postgres_slot_config.py +60 -0
  25. databricks/bundles/pipelines/_models/source_catalog_config.py +64 -0
  26. databricks/bundles/pipelines/_models/source_config.py +48 -0
  27. databricks/bundles/version.py +1 -1
  28. databricks/bundles/volumes/__init__.py +25 -0
  29. databricks/bundles/volumes/_models/volume.py +96 -0
  30. databricks/bundles/volumes/_models/volume_grant.py +40 -0
  31. databricks/bundles/volumes/_models/volume_grant_privilege.py +16 -0
  32. databricks/bundles/volumes/_models/volume_type.py +14 -0
  33. {databricks_bundles-0.265.0.dist-info → databricks_bundles-0.267.0.dist-info}/METADATA +2 -2
  34. {databricks_bundles-0.265.0.dist-info → databricks_bundles-0.267.0.dist-info}/RECORD +36 -27
  35. databricks/bundles/compute/__init__.py +0 -0
  36. {databricks_bundles-0.265.0.dist-info → databricks_bundles-0.267.0.dist-info}/WHEEL +0 -0
  37. {databricks_bundles-0.265.0.dist-info → databricks_bundles-0.267.0.dist-info}/licenses/LICENSE +0 -0
@@ -13,12 +13,13 @@ __all__ = [
13
13
  "VariableOrList",
14
14
  "VariableOrOptional",
15
15
  "job_mutator",
16
- "pipeline_mutator",
17
16
  "load_resources_from_current_package_module",
18
17
  "load_resources_from_module",
19
18
  "load_resources_from_modules",
20
19
  "load_resources_from_package_module",
20
+ "pipeline_mutator",
21
21
  "variables",
22
+ "volume_mutator",
22
23
  ]
23
24
 
24
25
  from databricks.bundles.core._bundle import Bundle
@@ -39,6 +40,7 @@ from databricks.bundles.core._resource_mutator import (
39
40
  ResourceMutator,
40
41
  job_mutator,
41
42
  pipeline_mutator,
43
+ volume_mutator,
42
44
  )
43
45
  from databricks.bundles.core._resources import Resources
44
46
  from databricks.bundles.core._variable import (
@@ -14,9 +14,9 @@ from databricks.bundles.core._resources import Resources
14
14
 
15
15
  __all__ = [
16
16
  "load_resources_from_current_package_module",
17
- "load_resources_from_package_module",
18
- "load_resources_from_modules",
19
17
  "load_resources_from_module",
18
+ "load_resources_from_modules",
19
+ "load_resources_from_package_module",
20
20
  ]
21
21
 
22
22
  """
@@ -8,6 +8,7 @@ from databricks.bundles.core._resource import Resource
8
8
  if TYPE_CHECKING:
9
9
  from databricks.bundles.jobs._models.job import Job
10
10
  from databricks.bundles.pipelines._models.pipeline import Pipeline
11
+ from databricks.bundles.volumes._models.volume import Volume
11
12
 
12
13
  _T = TypeVar("_T", bound=Resource)
13
14
 
@@ -127,3 +128,35 @@ def pipeline_mutator(function: Callable) -> ResourceMutator["Pipeline"]:
127
128
  from databricks.bundles.pipelines._models.pipeline import Pipeline
128
129
 
129
130
  return ResourceMutator(resource_type=Pipeline, function=function)
131
+
132
+
133
+ @overload
134
+ def volume_mutator(
135
+ function: Callable[[Bundle, "Volume"], "Volume"],
136
+ ) -> ResourceMutator["Volume"]: ...
137
+
138
+
139
+ @overload
140
+ def volume_mutator(
141
+ function: Callable[["Volume"], "Volume"],
142
+ ) -> ResourceMutator["Volume"]: ...
143
+
144
+
145
+ def volume_mutator(function: Callable) -> ResourceMutator["Volume"]:
146
+ """
147
+ Decorator for defining a volume mutator. Function should return a new instance of the volume with the desired changes,
148
+ instead of mutating the input volume.
149
+
150
+ Example:
151
+
152
+ .. code-block:: python
153
+
154
+ @volume_mutator
155
+ def my_volume_mutator(bundle: Bundle, volume: Volume) -> Volume:
156
+ return replace(volume, name="my_volume")
157
+
158
+ :param function: Function that mutates a volume.
159
+ """
160
+ from databricks.bundles.volumes._models.volume import Volume
161
+
162
+ return ResourceMutator(resource_type=Volume, function=function)
@@ -2,6 +2,7 @@ from dataclasses import dataclass
2
2
  from typing import Type
3
3
 
4
4
  from databricks.bundles.core._resource import Resource
5
+ from databricks.bundles.volumes._models.volume import Volume
5
6
 
6
7
 
7
8
  @dataclass(kw_only=True, frozen=True)
@@ -45,4 +46,9 @@ class _ResourceType:
45
46
  plural_name="pipelines",
46
47
  singular_name="pipeline",
47
48
  ),
49
+ _ResourceType(
50
+ resource_type=Volume,
51
+ plural_name="volumes",
52
+ singular_name="volume",
53
+ ),
48
54
  )
@@ -8,6 +8,7 @@ from databricks.bundles.core._transform import _transform
8
8
  if TYPE_CHECKING:
9
9
  from databricks.bundles.jobs._models.job import Job, JobParam
10
10
  from databricks.bundles.pipelines._models.pipeline import Pipeline, PipelineParam
11
+ from databricks.bundles.volumes._models.volume import Volume, VolumeParam
11
12
 
12
13
  __all__ = ["Resources"]
13
14
 
@@ -57,6 +58,7 @@ class Resources:
57
58
  def __init__(self):
58
59
  self._jobs = dict[str, "Job"]()
59
60
  self._pipelines = dict[str, "Pipeline"]()
61
+ self._volumes = dict[str, "Volume"]()
60
62
  self._locations = dict[tuple[str, ...], Location]()
61
63
  self._diagnostics = Diagnostics()
62
64
 
@@ -68,6 +70,10 @@ class Resources:
68
70
  def pipelines(self) -> dict[str, "Pipeline"]:
69
71
  return self._pipelines
70
72
 
73
+ @property
74
+ def volumes(self) -> dict[str, "Volume"]:
75
+ return self._volumes
76
+
71
77
  @property
72
78
  def diagnostics(self) -> Diagnostics:
73
79
  """
@@ -93,6 +99,7 @@ class Resources:
93
99
 
94
100
  from databricks.bundles.jobs import Job
95
101
  from databricks.bundles.pipelines import Pipeline
102
+ from databricks.bundles.volumes import Volume
96
103
 
97
104
  location = location or Location.from_stack_frame(depth=1)
98
105
 
@@ -101,6 +108,8 @@ class Resources:
101
108
  self.add_job(resource_name, resource, location=location)
102
109
  case Pipeline():
103
110
  self.add_pipeline(resource_name, resource, location=location)
111
+ case Volume():
112
+ self.add_volume(resource_name, resource, location=location)
104
113
  case _:
105
114
  raise ValueError(f"Unsupported resource type: {type(resource)}")
106
115
 
@@ -168,6 +177,38 @@ class Resources:
168
177
 
169
178
  self._pipelines[resource_name] = pipeline
170
179
 
180
+ def add_volume(
181
+ self,
182
+ resource_name: str,
183
+ volume: "VolumeParam",
184
+ *,
185
+ location: Optional[Location] = None,
186
+ ) -> None:
187
+ """
188
+ Adds a volume to the collection of resources. Resource name must be unique across all volumes.
189
+
190
+ :param resource_name: unique identifier for the volume
191
+ :param volume: the volume to add, can be Volume or dict
192
+ :param location: optional location of the volume in the source code
193
+ """
194
+ from databricks.bundles.volumes import Volume
195
+
196
+ volume = _transform(Volume, volume)
197
+ path = ("resources", "volumes", resource_name)
198
+ location = location or Location.from_stack_frame(depth=1)
199
+
200
+ if self._volumes.get(resource_name):
201
+ self.add_diagnostic_error(
202
+ msg=f"Duplicate resource name '{resource_name}' for a volume. Resource names must be unique.",
203
+ location=location,
204
+ path=path,
205
+ )
206
+ else:
207
+ if location:
208
+ self.add_location(path, location)
209
+
210
+ self._volumes[resource_name] = volume
211
+
171
212
  def add_location(self, path: tuple[str, ...], location: Location) -> None:
172
213
  """
173
214
  Associate source code location with a path in the bundle configuration.
@@ -244,6 +285,9 @@ class Resources:
244
285
  for name, pipeline in other.pipelines.items():
245
286
  self.add_pipeline(name, pipeline)
246
287
 
288
+ for name, volume in other.volumes.items():
289
+ self.add_volume(name, volume)
290
+
247
291
  for path, location in other._locations.items():
248
292
  self.add_location(path, location)
249
293
 
@@ -11,9 +11,9 @@ from typing import (
11
11
  __all__ = [
12
12
  "Variable",
13
13
  "VariableOr",
14
- "VariableOrOptional",
15
14
  "VariableOrDict",
16
15
  "VariableOrList",
16
+ "VariableOrOptional",
17
17
  "variables",
18
18
  ]
19
19
 
@@ -244,6 +244,8 @@ __all__ = [
244
244
  "TaskNotificationSettingsDict",
245
245
  "TaskNotificationSettingsParam",
246
246
  "TaskParam",
247
+ "TaskRetryMode",
248
+ "TaskRetryModeParam",
247
249
  "TriggerSettings",
248
250
  "TriggerSettingsDict",
249
251
  "TriggerSettingsParam",
@@ -648,6 +650,10 @@ from databricks.bundles.jobs._models.task_notification_settings import (
648
650
  TaskNotificationSettingsDict,
649
651
  TaskNotificationSettingsParam,
650
652
  )
653
+ from databricks.bundles.jobs._models.task_retry_mode import (
654
+ TaskRetryMode,
655
+ TaskRetryModeParam,
656
+ )
651
657
  from databricks.bundles.jobs._models.trigger_settings import (
652
658
  TriggerSettings,
653
659
  TriggerSettingsDict,
@@ -5,6 +5,10 @@ from databricks.bundles.core._transform import _transform
5
5
  from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
6
  from databricks.bundles.core._variable import VariableOrOptional
7
7
  from databricks.bundles.jobs._models.pause_status import PauseStatus, PauseStatusParam
8
+ from databricks.bundles.jobs._models.task_retry_mode import (
9
+ TaskRetryMode,
10
+ TaskRetryModeParam,
11
+ )
8
12
 
9
13
  if TYPE_CHECKING:
10
14
  from typing_extensions import Self
@@ -19,6 +23,11 @@ class Continuous:
19
23
  Indicate whether the continuous execution of the job is paused or not. Defaults to UNPAUSED.
20
24
  """
21
25
 
26
+ task_retry_mode: VariableOrOptional[TaskRetryMode] = None
27
+ """
28
+ Indicate whether the continuous job is applying task level retries or not. Defaults to NEVER.
29
+ """
30
+
22
31
  @classmethod
23
32
  def from_dict(cls, value: "ContinuousDict") -> "Self":
24
33
  return _transform(cls, value)
@@ -35,5 +44,10 @@ class ContinuousDict(TypedDict, total=False):
35
44
  Indicate whether the continuous execution of the job is paused or not. Defaults to UNPAUSED.
36
45
  """
37
46
 
47
+ task_retry_mode: VariableOrOptional[TaskRetryModeParam]
48
+ """
49
+ Indicate whether the continuous job is applying task level retries or not. Defaults to NEVER.
50
+ """
51
+
38
52
 
39
53
  ContinuousParam = ContinuousDict | Continuous
@@ -16,6 +16,11 @@ class Environment:
16
16
  In this minimal environment spec, only pip dependencies are supported.
17
17
  """
18
18
 
19
+ client: VariableOrOptional[str] = None
20
+ """
21
+ [DEPRECATED] Use `environment_version` instead.
22
+ """
23
+
19
24
  dependencies: VariableOrList[str] = field(default_factory=list)
20
25
  """
21
26
  List of pip dependencies, as supported by the version of pip in this environment.
@@ -46,6 +51,11 @@ class Environment:
46
51
  class EnvironmentDict(TypedDict, total=False):
47
52
  """"""
48
53
 
54
+ client: VariableOrOptional[str]
55
+ """
56
+ [DEPRECATED] Use `environment_version` instead.
57
+ """
58
+
49
59
  dependencies: VariableOrList[str]
50
60
  """
51
61
  List of pip dependencies, as supported by the version of pip in this environment.
@@ -53,6 +53,13 @@ class GcpAttributes:
53
53
  for the supported number of local SSDs for each instance type.
54
54
  """
55
55
 
56
+ use_preemptible_executors: VariableOrOptional[bool] = None
57
+ """
58
+ [DEPRECATED] This field determines whether the spark executors will be scheduled to run on preemptible
59
+ VMs (when set to true) versus standard compute engine VMs (when set to false; default).
60
+ Note: Soon to be deprecated, use the 'availability' field instead.
61
+ """
62
+
56
63
  zone_id: VariableOrOptional[str] = None
57
64
  """
58
65
  Identifier for the availability zone in which the cluster resides.
@@ -108,6 +115,13 @@ class GcpAttributesDict(TypedDict, total=False):
108
115
  for the supported number of local SSDs for each instance type.
109
116
  """
110
117
 
118
+ use_preemptible_executors: VariableOrOptional[bool]
119
+ """
120
+ [DEPRECATED] This field determines whether the spark executors will be scheduled to run on preemptible
121
+ VMs (when set to true) versus standard compute engine VMs (when set to false; default).
122
+ Note: Soon to be deprecated, use the 'availability' field instead.
123
+ """
124
+
111
125
  zone_id: VariableOrOptional[str]
112
126
  """
113
127
  Identifier for the availability zone in which the cluster resides.
@@ -8,6 +8,10 @@ from databricks.bundles.jobs._models.adlsgen2_info import (
8
8
  Adlsgen2Info,
9
9
  Adlsgen2InfoParam,
10
10
  )
11
+ from databricks.bundles.jobs._models.dbfs_storage_info import (
12
+ DbfsStorageInfo,
13
+ DbfsStorageInfoParam,
14
+ )
11
15
  from databricks.bundles.jobs._models.gcs_storage_info import (
12
16
  GcsStorageInfo,
13
17
  GcsStorageInfoParam,
@@ -45,6 +49,12 @@ class InitScriptInfo:
45
49
  Contains the Azure Data Lake Storage destination path
46
50
  """
47
51
 
52
+ dbfs: VariableOrOptional[DbfsStorageInfo] = None
53
+ """
54
+ [DEPRECATED] destination needs to be provided. e.g.
55
+ `{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } }`
56
+ """
57
+
48
58
  file: VariableOrOptional[LocalFileInfo] = None
49
59
  """
50
60
  destination needs to be provided, e.g.
@@ -93,6 +103,12 @@ class InitScriptInfoDict(TypedDict, total=False):
93
103
  Contains the Azure Data Lake Storage destination path
94
104
  """
95
105
 
106
+ dbfs: VariableOrOptional[DbfsStorageInfoParam]
107
+ """
108
+ [DEPRECATED] destination needs to be provided. e.g.
109
+ `{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } }`
110
+ """
111
+
96
112
  file: VariableOrOptional[LocalFileInfoParam]
97
113
  """
98
114
  destination needs to be provided, e.g.
@@ -3,7 +3,7 @@ from typing import TYPE_CHECKING, TypedDict
3
3
 
4
4
  from databricks.bundles.core._transform import _transform
5
5
  from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
- from databricks.bundles.core._variable import VariableOrList
6
+ from databricks.bundles.core._variable import VariableOrList, VariableOrOptional
7
7
 
8
8
  if TYPE_CHECKING:
9
9
  from typing_extensions import Self
@@ -13,6 +13,12 @@ if TYPE_CHECKING:
13
13
  class JobEmailNotifications:
14
14
  """"""
15
15
 
16
+ no_alert_for_skipped_runs: VariableOrOptional[bool] = None
17
+ """
18
+ [DEPRECATED] If true, do not send email to recipients specified in `on_failure` if the run is skipped.
19
+ This field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.
20
+ """
21
+
16
22
  on_duration_warning_threshold_exceeded: VariableOrList[str] = field(
17
23
  default_factory=list
18
24
  )
@@ -53,6 +59,12 @@ class JobEmailNotifications:
53
59
  class JobEmailNotificationsDict(TypedDict, total=False):
54
60
  """"""
55
61
 
62
+ no_alert_for_skipped_runs: VariableOrOptional[bool]
63
+ """
64
+ [DEPRECATED] If true, do not send email to recipients specified in `on_failure` if the run is skipped.
65
+ This field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.
66
+ """
67
+
56
68
  on_duration_warning_threshold_exceeded: VariableOrList[str]
57
69
  """
58
70
  A list of email addresses to be notified when the duration of a run exceeds the threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. If no rule for the `RUN_DURATION_SECONDS` metric is specified in the `health` field for the job, notifications are not sent.
@@ -30,6 +30,11 @@ class Library:
30
30
  Specification of a CRAN library to be installed as part of the library
31
31
  """
32
32
 
33
+ egg: VariableOrOptional[str] = None
34
+ """
35
+ [DEPRECATED] Deprecated. URI of the egg library to install. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above.
36
+ """
37
+
33
38
  jar: VariableOrOptional[str] = None
34
39
  """
35
40
  URI of the JAR library to install. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs.
@@ -82,6 +87,11 @@ class LibraryDict(TypedDict, total=False):
82
87
  Specification of a CRAN library to be installed as part of the library
83
88
  """
84
89
 
90
+ egg: VariableOrOptional[str]
91
+ """
92
+ [DEPRECATED] Deprecated. URI of the egg library to install. Installing Python egg files is deprecated and is not supported in Databricks Runtime 14.0 and above.
93
+ """
94
+
85
95
  jar: VariableOrOptional[str]
86
96
  """
87
97
  URI of the JAR library to install. Supported URIs include Workspace paths, Unity Catalog Volumes paths, and S3 URIs.
@@ -3,7 +3,11 @@ from typing import TYPE_CHECKING, TypedDict
3
3
 
4
4
  from databricks.bundles.core._transform import _transform
5
5
  from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
- from databricks.bundles.core._variable import VariableOr, VariableOrList
6
+ from databricks.bundles.core._variable import (
7
+ VariableOr,
8
+ VariableOrList,
9
+ VariableOrOptional,
10
+ )
7
11
 
8
12
  if TYPE_CHECKING:
9
13
  from typing_extensions import Self
@@ -20,6 +24,11 @@ class SparkJarTask:
20
24
  The code must use `SparkContext.getOrCreate` to obtain a Spark context; otherwise, runs of the job fail.
21
25
  """
22
26
 
27
+ jar_uri: VariableOrOptional[str] = None
28
+ """
29
+ [DEPRECATED] Deprecated since 04/2016. Provide a `jar` through the `libraries` field instead. For an example, see :method:jobs/create.
30
+ """
31
+
23
32
  parameters: VariableOrList[str] = field(default_factory=list)
24
33
  """
25
34
  Parameters passed to the main method.
@@ -27,6 +36,11 @@ class SparkJarTask:
27
36
  Use [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.
28
37
  """
29
38
 
39
+ run_as_repl: VariableOrOptional[bool] = None
40
+ """
41
+ [DEPRECATED] Deprecated. A value of `false` is no longer supported.
42
+ """
43
+
30
44
  @classmethod
31
45
  def from_dict(cls, value: "SparkJarTaskDict") -> "Self":
32
46
  return _transform(cls, value)
@@ -45,6 +59,11 @@ class SparkJarTaskDict(TypedDict, total=False):
45
59
  The code must use `SparkContext.getOrCreate` to obtain a Spark context; otherwise, runs of the job fail.
46
60
  """
47
61
 
62
+ jar_uri: VariableOrOptional[str]
63
+ """
64
+ [DEPRECATED] Deprecated since 04/2016. Provide a `jar` through the `libraries` field instead. For an example, see :method:jobs/create.
65
+ """
66
+
48
67
  parameters: VariableOrList[str]
49
68
  """
50
69
  Parameters passed to the main method.
@@ -52,5 +71,10 @@ class SparkJarTaskDict(TypedDict, total=False):
52
71
  Use [Task parameter variables](https://docs.databricks.com/jobs.html#parameter-variables) to set parameters containing information about job runs.
53
72
  """
54
73
 
74
+ run_as_repl: VariableOrOptional[bool]
75
+ """
76
+ [DEPRECATED] Deprecated. A value of `false` is no longer supported.
77
+ """
78
+
55
79
 
56
80
  SparkJarTaskParam = SparkJarTaskDict | SparkJarTask
@@ -3,7 +3,7 @@ from typing import TYPE_CHECKING, TypedDict
3
3
 
4
4
  from databricks.bundles.core._transform import _transform
5
5
  from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
- from databricks.bundles.core._variable import VariableOrList
6
+ from databricks.bundles.core._variable import VariableOrList, VariableOrOptional
7
7
 
8
8
  if TYPE_CHECKING:
9
9
  from typing_extensions import Self
@@ -13,6 +13,12 @@ if TYPE_CHECKING:
13
13
  class TaskEmailNotifications:
14
14
  """"""
15
15
 
16
+ no_alert_for_skipped_runs: VariableOrOptional[bool] = None
17
+ """
18
+ [DEPRECATED] If true, do not send email to recipients specified in `on_failure` if the run is skipped.
19
+ This field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.
20
+ """
21
+
16
22
  on_duration_warning_threshold_exceeded: VariableOrList[str] = field(
17
23
  default_factory=list
18
24
  )
@@ -53,6 +59,12 @@ class TaskEmailNotifications:
53
59
  class TaskEmailNotificationsDict(TypedDict, total=False):
54
60
  """"""
55
61
 
62
+ no_alert_for_skipped_runs: VariableOrOptional[bool]
63
+ """
64
+ [DEPRECATED] If true, do not send email to recipients specified in `on_failure` if the run is skipped.
65
+ This field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.
66
+ """
67
+
56
68
  on_duration_warning_threshold_exceeded: VariableOrList[str]
57
69
  """
58
70
  A list of email addresses to be notified when the duration of a run exceeds the threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. If no rule for the `RUN_DURATION_SECONDS` metric is specified in the `health` field for the job, notifications are not sent.
@@ -0,0 +1,17 @@
1
+ from enum import Enum
2
+ from typing import Literal
3
+
4
+
5
+ class TaskRetryMode(Enum):
6
+ """
7
+ task retry mode of the continuous job
8
+ * NEVER: The failed task will not be retried.
9
+ * ON_FAILURE: Retry a failed task if at least one other task in the job is still running its first attempt.
10
+ When this condition is no longer met or the retry limit is reached, the job run is cancelled and a new run is started.
11
+ """
12
+
13
+ NEVER = "NEVER"
14
+ ON_FAILURE = "ON_FAILURE"
15
+
16
+
17
+ TaskRetryModeParam = Literal["NEVER", "ON_FAILURE"] | TaskRetryMode
@@ -96,6 +96,12 @@ __all__ = [
96
96
  "PipelinesEnvironment",
97
97
  "PipelinesEnvironmentDict",
98
98
  "PipelinesEnvironmentParam",
99
+ "PostgresCatalogConfig",
100
+ "PostgresCatalogConfigDict",
101
+ "PostgresCatalogConfigParam",
102
+ "PostgresSlotConfig",
103
+ "PostgresSlotConfigDict",
104
+ "PostgresSlotConfigParam",
99
105
  "ReportSpec",
100
106
  "ReportSpecDict",
101
107
  "ReportSpecParam",
@@ -111,6 +117,12 @@ __all__ = [
111
117
  "SchemaSpec",
112
118
  "SchemaSpecDict",
113
119
  "SchemaSpecParam",
120
+ "SourceCatalogConfig",
121
+ "SourceCatalogConfigDict",
122
+ "SourceCatalogConfigParam",
123
+ "SourceConfig",
124
+ "SourceConfigDict",
125
+ "SourceConfigParam",
114
126
  "TableSpec",
115
127
  "TableSpecDict",
116
128
  "TableSpecParam",
@@ -292,6 +304,16 @@ from databricks.bundles.pipelines._models.pipelines_environment import (
292
304
  PipelinesEnvironmentDict,
293
305
  PipelinesEnvironmentParam,
294
306
  )
307
+ from databricks.bundles.pipelines._models.postgres_catalog_config import (
308
+ PostgresCatalogConfig,
309
+ PostgresCatalogConfigDict,
310
+ PostgresCatalogConfigParam,
311
+ )
312
+ from databricks.bundles.pipelines._models.postgres_slot_config import (
313
+ PostgresSlotConfig,
314
+ PostgresSlotConfigDict,
315
+ PostgresSlotConfigParam,
316
+ )
295
317
  from databricks.bundles.pipelines._models.report_spec import (
296
318
  ReportSpec,
297
319
  ReportSpecDict,
@@ -313,6 +335,16 @@ from databricks.bundles.pipelines._models.schema_spec import (
313
335
  SchemaSpecDict,
314
336
  SchemaSpecParam,
315
337
  )
338
+ from databricks.bundles.pipelines._models.source_catalog_config import (
339
+ SourceCatalogConfig,
340
+ SourceCatalogConfigDict,
341
+ SourceCatalogConfigParam,
342
+ )
343
+ from databricks.bundles.pipelines._models.source_config import (
344
+ SourceConfig,
345
+ SourceConfigDict,
346
+ SourceConfigParam,
347
+ )
316
348
  from databricks.bundles.pipelines._models.table_spec import (
317
349
  TableSpec,
318
350
  TableSpecDict,
@@ -53,6 +53,13 @@ class GcpAttributes:
53
53
  for the supported number of local SSDs for each instance type.
54
54
  """
55
55
 
56
+ use_preemptible_executors: VariableOrOptional[bool] = None
57
+ """
58
+ [DEPRECATED] This field determines whether the spark executors will be scheduled to run on preemptible
59
+ VMs (when set to true) versus standard compute engine VMs (when set to false; default).
60
+ Note: Soon to be deprecated, use the 'availability' field instead.
61
+ """
62
+
56
63
  zone_id: VariableOrOptional[str] = None
57
64
  """
58
65
  Identifier for the availability zone in which the cluster resides.
@@ -108,6 +115,13 @@ class GcpAttributesDict(TypedDict, total=False):
108
115
  for the supported number of local SSDs for each instance type.
109
116
  """
110
117
 
118
+ use_preemptible_executors: VariableOrOptional[bool]
119
+ """
120
+ [DEPRECATED] This field determines whether the spark executors will be scheduled to run on preemptible
121
+ VMs (when set to true) versus standard compute engine VMs (when set to false; default).
122
+ Note: Soon to be deprecated, use the 'availability' field instead.
123
+ """
124
+
111
125
  zone_id: VariableOrOptional[str]
112
126
  """
113
127
  Identifier for the availability zone in which the cluster resides.
@@ -30,6 +30,11 @@ class IngestionGatewayPipelineDefinition:
30
30
  Required, Immutable. The name of the schema for the gateway pipelines's storage location.
31
31
  """
32
32
 
33
+ connection_id: VariableOrOptional[str] = None
34
+ """
35
+ [DEPRECATED] [Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.
36
+ """
37
+
33
38
  gateway_storage_name: VariableOrOptional[str] = None
34
39
  """
35
40
  Optional. The Unity Catalog-compatible name for the gateway storage location.
@@ -63,6 +68,11 @@ class IngestionGatewayPipelineDefinitionDict(TypedDict, total=False):
63
68
  Required, Immutable. The name of the schema for the gateway pipelines's storage location.
64
69
  """
65
70
 
71
+ connection_id: VariableOrOptional[str]
72
+ """
73
+ [DEPRECATED] [Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.
74
+ """
75
+
66
76
  gateway_storage_name: VariableOrOptional[str]
67
77
  """
68
78
  Optional. The Unity Catalog-compatible name for the gateway storage location.
@@ -12,6 +12,10 @@ from databricks.bundles.pipelines._models.ingestion_source_type import (
12
12
  IngestionSourceType,
13
13
  IngestionSourceTypeParam,
14
14
  )
15
+ from databricks.bundles.pipelines._models.source_config import (
16
+ SourceConfig,
17
+ SourceConfigParam,
18
+ )
15
19
  from databricks.bundles.pipelines._models.table_specific_config import (
16
20
  TableSpecificConfig,
17
21
  TableSpecificConfigParam,
@@ -40,6 +44,13 @@ class IngestionPipelineDefinition:
40
44
  Required. Settings specifying tables to replicate and the destination for the replicated tables.
41
45
  """
42
46
 
47
+ source_configurations: VariableOrList[SourceConfig] = field(default_factory=list)
48
+ """
49
+ :meta private: [EXPERIMENTAL]
50
+
51
+ Top-level source configurations
52
+ """
53
+
43
54
  source_type: VariableOrOptional[IngestionSourceType] = None
44
55
  """
45
56
  The type of the foreign source.
@@ -78,6 +89,13 @@ class IngestionPipelineDefinitionDict(TypedDict, total=False):
78
89
  Required. Settings specifying tables to replicate and the destination for the replicated tables.
79
90
  """
80
91
 
92
+ source_configurations: VariableOrList[SourceConfigParam]
93
+ """
94
+ :meta private: [EXPERIMENTAL]
95
+
96
+ Top-level source configurations
97
+ """
98
+
81
99
  source_type: VariableOrOptional[IngestionSourceTypeParam]
82
100
  """
83
101
  The type of the foreign source.