databricks-bundles 0.266.0__py3-none-any.whl → 0.268.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. databricks/bundles/build.py +48 -7
  2. databricks/bundles/core/__init__.py +2 -0
  3. databricks/bundles/core/_diagnostics.py +11 -0
  4. databricks/bundles/core/_load.py +2 -2
  5. databricks/bundles/core/_resource_mutator.py +33 -0
  6. databricks/bundles/core/_resource_type.py +7 -1
  7. databricks/bundles/core/_resources.py +44 -0
  8. databricks/bundles/core/_variable.py +1 -1
  9. databricks/bundles/jobs/__init__.py +14 -0
  10. databricks/bundles/jobs/_models/continuous.py +14 -0
  11. databricks/bundles/jobs/_models/environment.py +10 -0
  12. databricks/bundles/jobs/_models/gcp_attributes.py +14 -0
  13. databricks/bundles/jobs/_models/init_script_info.py +16 -0
  14. databricks/bundles/jobs/_models/job.py +11 -0
  15. databricks/bundles/jobs/_models/job_email_notifications.py +13 -1
  16. databricks/bundles/jobs/_models/library.py +10 -0
  17. databricks/bundles/jobs/_models/lifecycle.py +38 -0
  18. databricks/bundles/jobs/_models/spark_jar_task.py +25 -1
  19. databricks/bundles/jobs/_models/task_email_notifications.py +13 -1
  20. databricks/bundles/jobs/_models/task_retry_mode.py +17 -0
  21. databricks/bundles/pipelines/__init__.py +40 -0
  22. databricks/bundles/pipelines/_models/gcp_attributes.py +14 -0
  23. databricks/bundles/pipelines/_models/ingestion_gateway_pipeline_definition.py +10 -0
  24. databricks/bundles/pipelines/_models/ingestion_pipeline_definition.py +18 -0
  25. databricks/bundles/pipelines/_models/init_script_info.py +16 -0
  26. databricks/bundles/pipelines/_models/lifecycle.py +38 -0
  27. databricks/bundles/pipelines/_models/pipeline.py +16 -2
  28. databricks/bundles/pipelines/_models/postgres_catalog_config.py +50 -0
  29. databricks/bundles/pipelines/_models/postgres_slot_config.py +60 -0
  30. databricks/bundles/pipelines/_models/source_catalog_config.py +64 -0
  31. databricks/bundles/pipelines/_models/source_config.py +48 -0
  32. databricks/bundles/schemas/__init__.py +30 -0
  33. databricks/bundles/schemas/_models/lifecycle.py +38 -0
  34. databricks/bundles/schemas/_models/schema.py +97 -0
  35. databricks/bundles/schemas/_models/schema_grant.py +40 -0
  36. databricks/bundles/schemas/_models/schema_grant_privilege.py +38 -0
  37. databricks/bundles/version.py +1 -1
  38. databricks/bundles/volumes/__init__.py +8 -0
  39. databricks/bundles/volumes/_models/lifecycle.py +38 -0
  40. databricks/bundles/volumes/_models/volume.py +11 -0
  41. {databricks_bundles-0.266.0.dist-info → databricks_bundles-0.268.0.dist-info}/METADATA +2 -2
  42. {databricks_bundles-0.266.0.dist-info → databricks_bundles-0.268.0.dist-info}/RECORD +44 -31
  43. {databricks_bundles-0.266.0.dist-info → databricks_bundles-0.268.0.dist-info}/WHEEL +0 -0
  44. {databricks_bundles-0.266.0.dist-info → databricks_bundles-0.268.0.dist-info}/licenses/LICENSE +0 -0
@@ -3,7 +3,7 @@ from typing import TYPE_CHECKING, TypedDict
3
3
 
4
4
  from databricks.bundles.core._transform import _transform
5
5
  from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
- from databricks.bundles.core._variable import VariableOrList
6
+ from databricks.bundles.core._variable import VariableOrList, VariableOrOptional
7
7
 
8
8
  if TYPE_CHECKING:
9
9
  from typing_extensions import Self
@@ -13,6 +13,12 @@ if TYPE_CHECKING:
13
13
  class TaskEmailNotifications:
14
14
  """"""
15
15
 
16
+ no_alert_for_skipped_runs: VariableOrOptional[bool] = None
17
+ """
18
+ [DEPRECATED] If true, do not send email to recipients specified in `on_failure` if the run is skipped.
19
+ This field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.
20
+ """
21
+
16
22
  on_duration_warning_threshold_exceeded: VariableOrList[str] = field(
17
23
  default_factory=list
18
24
  )
@@ -53,6 +59,12 @@ class TaskEmailNotifications:
53
59
  class TaskEmailNotificationsDict(TypedDict, total=False):
54
60
  """"""
55
61
 
62
+ no_alert_for_skipped_runs: VariableOrOptional[bool]
63
+ """
64
+ [DEPRECATED] If true, do not send email to recipients specified in `on_failure` if the run is skipped.
65
+ This field is `deprecated`. Please use the `notification_settings.no_alert_for_skipped_runs` field.
66
+ """
67
+
56
68
  on_duration_warning_threshold_exceeded: VariableOrList[str]
57
69
  """
58
70
  A list of email addresses to be notified when the duration of a run exceeds the threshold specified for the `RUN_DURATION_SECONDS` metric in the `health` field. If no rule for the `RUN_DURATION_SECONDS` metric is specified in the `health` field for the job, notifications are not sent.
@@ -0,0 +1,17 @@
1
+ from enum import Enum
2
+ from typing import Literal
3
+
4
+
5
+ class TaskRetryMode(Enum):
6
+ """
7
+ task retry mode of the continuous job
8
+ * NEVER: The failed task will not be retried.
9
+ * ON_FAILURE: Retry a failed task if at least one other task in the job is still running its first attempt.
10
+ When this condition is no longer met or the retry limit is reached, the job run is cancelled and a new run is started.
11
+ """
12
+
13
+ NEVER = "NEVER"
14
+ ON_FAILURE = "ON_FAILURE"
15
+
16
+
17
+ TaskRetryModeParam = Literal["NEVER", "ON_FAILURE"] | TaskRetryMode
@@ -56,6 +56,9 @@ __all__ = [
56
56
  "InitScriptInfo",
57
57
  "InitScriptInfoDict",
58
58
  "InitScriptInfoParam",
59
+ "Lifecycle",
60
+ "LifecycleDict",
61
+ "LifecycleParam",
59
62
  "LocalFileInfo",
60
63
  "LocalFileInfoDict",
61
64
  "LocalFileInfoParam",
@@ -96,6 +99,12 @@ __all__ = [
96
99
  "PipelinesEnvironment",
97
100
  "PipelinesEnvironmentDict",
98
101
  "PipelinesEnvironmentParam",
102
+ "PostgresCatalogConfig",
103
+ "PostgresCatalogConfigDict",
104
+ "PostgresCatalogConfigParam",
105
+ "PostgresSlotConfig",
106
+ "PostgresSlotConfigDict",
107
+ "PostgresSlotConfigParam",
99
108
  "ReportSpec",
100
109
  "ReportSpecDict",
101
110
  "ReportSpecParam",
@@ -111,6 +120,12 @@ __all__ = [
111
120
  "SchemaSpec",
112
121
  "SchemaSpecDict",
113
122
  "SchemaSpecParam",
123
+ "SourceCatalogConfig",
124
+ "SourceCatalogConfigDict",
125
+ "SourceCatalogConfigParam",
126
+ "SourceConfig",
127
+ "SourceConfigDict",
128
+ "SourceConfigParam",
114
129
  "TableSpec",
115
130
  "TableSpecDict",
116
131
  "TableSpecParam",
@@ -224,6 +239,11 @@ from databricks.bundles.pipelines._models.init_script_info import (
224
239
  InitScriptInfoDict,
225
240
  InitScriptInfoParam,
226
241
  )
242
+ from databricks.bundles.pipelines._models.lifecycle import (
243
+ Lifecycle,
244
+ LifecycleDict,
245
+ LifecycleParam,
246
+ )
227
247
  from databricks.bundles.pipelines._models.local_file_info import (
228
248
  LocalFileInfo,
229
249
  LocalFileInfoDict,
@@ -292,6 +312,16 @@ from databricks.bundles.pipelines._models.pipelines_environment import (
292
312
  PipelinesEnvironmentDict,
293
313
  PipelinesEnvironmentParam,
294
314
  )
315
+ from databricks.bundles.pipelines._models.postgres_catalog_config import (
316
+ PostgresCatalogConfig,
317
+ PostgresCatalogConfigDict,
318
+ PostgresCatalogConfigParam,
319
+ )
320
+ from databricks.bundles.pipelines._models.postgres_slot_config import (
321
+ PostgresSlotConfig,
322
+ PostgresSlotConfigDict,
323
+ PostgresSlotConfigParam,
324
+ )
295
325
  from databricks.bundles.pipelines._models.report_spec import (
296
326
  ReportSpec,
297
327
  ReportSpecDict,
@@ -313,6 +343,16 @@ from databricks.bundles.pipelines._models.schema_spec import (
313
343
  SchemaSpecDict,
314
344
  SchemaSpecParam,
315
345
  )
346
+ from databricks.bundles.pipelines._models.source_catalog_config import (
347
+ SourceCatalogConfig,
348
+ SourceCatalogConfigDict,
349
+ SourceCatalogConfigParam,
350
+ )
351
+ from databricks.bundles.pipelines._models.source_config import (
352
+ SourceConfig,
353
+ SourceConfigDict,
354
+ SourceConfigParam,
355
+ )
316
356
  from databricks.bundles.pipelines._models.table_spec import (
317
357
  TableSpec,
318
358
  TableSpecDict,
@@ -53,6 +53,13 @@ class GcpAttributes:
53
53
  for the supported number of local SSDs for each instance type.
54
54
  """
55
55
 
56
+ use_preemptible_executors: VariableOrOptional[bool] = None
57
+ """
58
+ [DEPRECATED] This field determines whether the spark executors will be scheduled to run on preemptible
59
+ VMs (when set to true) versus standard compute engine VMs (when set to false; default).
60
+ Note: Soon to be deprecated, use the 'availability' field instead.
61
+ """
62
+
56
63
  zone_id: VariableOrOptional[str] = None
57
64
  """
58
65
  Identifier for the availability zone in which the cluster resides.
@@ -108,6 +115,13 @@ class GcpAttributesDict(TypedDict, total=False):
108
115
  for the supported number of local SSDs for each instance type.
109
116
  """
110
117
 
118
+ use_preemptible_executors: VariableOrOptional[bool]
119
+ """
120
+ [DEPRECATED] This field determines whether the spark executors will be scheduled to run on preemptible
121
+ VMs (when set to true) versus standard compute engine VMs (when set to false; default).
122
+ Note: Soon to be deprecated, use the 'availability' field instead.
123
+ """
124
+
111
125
  zone_id: VariableOrOptional[str]
112
126
  """
113
127
  Identifier for the availability zone in which the cluster resides.
@@ -30,6 +30,11 @@ class IngestionGatewayPipelineDefinition:
30
30
  Required, Immutable. The name of the schema for the gateway pipelines's storage location.
31
31
  """
32
32
 
33
+ connection_id: VariableOrOptional[str] = None
34
+ """
35
+ [DEPRECATED] [Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.
36
+ """
37
+
33
38
  gateway_storage_name: VariableOrOptional[str] = None
34
39
  """
35
40
  Optional. The Unity Catalog-compatible name for the gateway storage location.
@@ -63,6 +68,11 @@ class IngestionGatewayPipelineDefinitionDict(TypedDict, total=False):
63
68
  Required, Immutable. The name of the schema for the gateway pipelines's storage location.
64
69
  """
65
70
 
71
+ connection_id: VariableOrOptional[str]
72
+ """
73
+ [DEPRECATED] [Deprecated, use connection_name instead] Immutable. The Unity Catalog connection that this gateway pipeline uses to communicate with the source.
74
+ """
75
+
66
76
  gateway_storage_name: VariableOrOptional[str]
67
77
  """
68
78
  Optional. The Unity Catalog-compatible name for the gateway storage location.
@@ -12,6 +12,10 @@ from databricks.bundles.pipelines._models.ingestion_source_type import (
12
12
  IngestionSourceType,
13
13
  IngestionSourceTypeParam,
14
14
  )
15
+ from databricks.bundles.pipelines._models.source_config import (
16
+ SourceConfig,
17
+ SourceConfigParam,
18
+ )
15
19
  from databricks.bundles.pipelines._models.table_specific_config import (
16
20
  TableSpecificConfig,
17
21
  TableSpecificConfigParam,
@@ -40,6 +44,13 @@ class IngestionPipelineDefinition:
40
44
  Required. Settings specifying tables to replicate and the destination for the replicated tables.
41
45
  """
42
46
 
47
+ source_configurations: VariableOrList[SourceConfig] = field(default_factory=list)
48
+ """
49
+ :meta private: [EXPERIMENTAL]
50
+
51
+ Top-level source configurations
52
+ """
53
+
43
54
  source_type: VariableOrOptional[IngestionSourceType] = None
44
55
  """
45
56
  The type of the foreign source.
@@ -78,6 +89,13 @@ class IngestionPipelineDefinitionDict(TypedDict, total=False):
78
89
  Required. Settings specifying tables to replicate and the destination for the replicated tables.
79
90
  """
80
91
 
92
+ source_configurations: VariableOrList[SourceConfigParam]
93
+ """
94
+ :meta private: [EXPERIMENTAL]
95
+
96
+ Top-level source configurations
97
+ """
98
+
81
99
  source_type: VariableOrOptional[IngestionSourceTypeParam]
82
100
  """
83
101
  The type of the foreign source.
@@ -8,6 +8,10 @@ from databricks.bundles.pipelines._models.adlsgen2_info import (
8
8
  Adlsgen2Info,
9
9
  Adlsgen2InfoParam,
10
10
  )
11
+ from databricks.bundles.pipelines._models.dbfs_storage_info import (
12
+ DbfsStorageInfo,
13
+ DbfsStorageInfoParam,
14
+ )
11
15
  from databricks.bundles.pipelines._models.gcs_storage_info import (
12
16
  GcsStorageInfo,
13
17
  GcsStorageInfoParam,
@@ -45,6 +49,12 @@ class InitScriptInfo:
45
49
  Contains the Azure Data Lake Storage destination path
46
50
  """
47
51
 
52
+ dbfs: VariableOrOptional[DbfsStorageInfo] = None
53
+ """
54
+ [DEPRECATED] destination needs to be provided. e.g.
55
+ `{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } }`
56
+ """
57
+
48
58
  file: VariableOrOptional[LocalFileInfo] = None
49
59
  """
50
60
  destination needs to be provided, e.g.
@@ -93,6 +103,12 @@ class InitScriptInfoDict(TypedDict, total=False):
93
103
  Contains the Azure Data Lake Storage destination path
94
104
  """
95
105
 
106
+ dbfs: VariableOrOptional[DbfsStorageInfoParam]
107
+ """
108
+ [DEPRECATED] destination needs to be provided. e.g.
109
+ `{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } }`
110
+ """
111
+
96
112
  file: VariableOrOptional[LocalFileInfoParam]
97
113
  """
98
114
  destination needs to be provided, e.g.
@@ -0,0 +1,38 @@
1
+ from dataclasses import dataclass
2
+ from typing import TYPE_CHECKING, TypedDict
3
+
4
+ from databricks.bundles.core._transform import _transform
5
+ from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
+ from databricks.bundles.core._variable import VariableOrOptional
7
+
8
+ if TYPE_CHECKING:
9
+ from typing_extensions import Self
10
+
11
+
12
+ @dataclass(kw_only=True)
13
+ class Lifecycle:
14
+ """"""
15
+
16
+ prevent_destroy: VariableOrOptional[bool] = None
17
+ """
18
+ Lifecycle setting to prevent the resource from being destroyed.
19
+ """
20
+
21
+ @classmethod
22
+ def from_dict(cls, value: "LifecycleDict") -> "Self":
23
+ return _transform(cls, value)
24
+
25
+ def as_dict(self) -> "LifecycleDict":
26
+ return _transform_to_json_value(self) # type:ignore
27
+
28
+
29
+ class LifecycleDict(TypedDict, total=False):
30
+ """"""
31
+
32
+ prevent_destroy: VariableOrOptional[bool]
33
+ """
34
+ Lifecycle setting to prevent the resource from being destroyed.
35
+ """
36
+
37
+
38
+ LifecycleParam = LifecycleDict | Lifecycle
@@ -25,6 +25,10 @@ from databricks.bundles.pipelines._models.ingestion_pipeline_definition import (
25
25
  IngestionPipelineDefinition,
26
26
  IngestionPipelineDefinitionParam,
27
27
  )
28
+ from databricks.bundles.pipelines._models.lifecycle import (
29
+ Lifecycle,
30
+ LifecycleParam,
31
+ )
28
32
  from databricks.bundles.pipelines._models.notifications import (
29
33
  Notifications,
30
34
  NotificationsParam,
@@ -143,6 +147,11 @@ class Pipeline(Resource):
143
147
  Libraries or code needed by this deployment.
144
148
  """
145
149
 
150
+ lifecycle: VariableOrOptional[Lifecycle] = None
151
+ """
152
+ Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.
153
+ """
154
+
146
155
  name: VariableOrOptional[str] = None
147
156
  """
148
157
  Friendly identifier for this pipeline.
@@ -203,7 +212,7 @@ class Pipeline(Resource):
203
212
 
204
213
  target: VariableOrOptional[str] = None
205
214
  """
206
- Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field.
215
+ [DEPRECATED] Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field.
207
216
  """
208
217
 
209
218
  @classmethod
@@ -301,6 +310,11 @@ class PipelineDict(TypedDict, total=False):
301
310
  Libraries or code needed by this deployment.
302
311
  """
303
312
 
313
+ lifecycle: VariableOrOptional[LifecycleParam]
314
+ """
315
+ Lifecycle is a struct that contains the lifecycle settings for a resource. It controls the behavior of the resource when it is deployed or destroyed.
316
+ """
317
+
304
318
  name: VariableOrOptional[str]
305
319
  """
306
320
  Friendly identifier for this pipeline.
@@ -361,7 +375,7 @@ class PipelineDict(TypedDict, total=False):
361
375
 
362
376
  target: VariableOrOptional[str]
363
377
  """
364
- Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field.
378
+ [DEPRECATED] Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field.
365
379
  """
366
380
 
367
381
 
@@ -0,0 +1,50 @@
1
+ from dataclasses import dataclass
2
+ from typing import TYPE_CHECKING, TypedDict
3
+
4
+ from databricks.bundles.core._transform import _transform
5
+ from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
+ from databricks.bundles.core._variable import VariableOrOptional
7
+ from databricks.bundles.pipelines._models.postgres_slot_config import (
8
+ PostgresSlotConfig,
9
+ PostgresSlotConfigParam,
10
+ )
11
+
12
+ if TYPE_CHECKING:
13
+ from typing_extensions import Self
14
+
15
+
16
+ @dataclass(kw_only=True)
17
+ class PostgresCatalogConfig:
18
+ """
19
+ :meta private: [EXPERIMENTAL]
20
+
21
+ PG-specific catalog-level configuration parameters
22
+ """
23
+
24
+ slot_config: VariableOrOptional[PostgresSlotConfig] = None
25
+ """
26
+ :meta private: [EXPERIMENTAL]
27
+
28
+ Optional. The Postgres slot configuration to use for logical replication
29
+ """
30
+
31
+ @classmethod
32
+ def from_dict(cls, value: "PostgresCatalogConfigDict") -> "Self":
33
+ return _transform(cls, value)
34
+
35
+ def as_dict(self) -> "PostgresCatalogConfigDict":
36
+ return _transform_to_json_value(self) # type:ignore
37
+
38
+
39
+ class PostgresCatalogConfigDict(TypedDict, total=False):
40
+ """"""
41
+
42
+ slot_config: VariableOrOptional[PostgresSlotConfigParam]
43
+ """
44
+ :meta private: [EXPERIMENTAL]
45
+
46
+ Optional. The Postgres slot configuration to use for logical replication
47
+ """
48
+
49
+
50
+ PostgresCatalogConfigParam = PostgresCatalogConfigDict | PostgresCatalogConfig
@@ -0,0 +1,60 @@
1
+ from dataclasses import dataclass
2
+ from typing import TYPE_CHECKING, TypedDict
3
+
4
+ from databricks.bundles.core._transform import _transform
5
+ from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
+ from databricks.bundles.core._variable import VariableOrOptional
7
+
8
+ if TYPE_CHECKING:
9
+ from typing_extensions import Self
10
+
11
+
12
+ @dataclass(kw_only=True)
13
+ class PostgresSlotConfig:
14
+ """
15
+ :meta private: [EXPERIMENTAL]
16
+
17
+ PostgresSlotConfig contains the configuration for a Postgres logical replication slot
18
+ """
19
+
20
+ publication_name: VariableOrOptional[str] = None
21
+ """
22
+ :meta private: [EXPERIMENTAL]
23
+
24
+ The name of the publication to use for the Postgres source
25
+ """
26
+
27
+ slot_name: VariableOrOptional[str] = None
28
+ """
29
+ :meta private: [EXPERIMENTAL]
30
+
31
+ The name of the logical replication slot to use for the Postgres source
32
+ """
33
+
34
+ @classmethod
35
+ def from_dict(cls, value: "PostgresSlotConfigDict") -> "Self":
36
+ return _transform(cls, value)
37
+
38
+ def as_dict(self) -> "PostgresSlotConfigDict":
39
+ return _transform_to_json_value(self) # type:ignore
40
+
41
+
42
+ class PostgresSlotConfigDict(TypedDict, total=False):
43
+ """"""
44
+
45
+ publication_name: VariableOrOptional[str]
46
+ """
47
+ :meta private: [EXPERIMENTAL]
48
+
49
+ The name of the publication to use for the Postgres source
50
+ """
51
+
52
+ slot_name: VariableOrOptional[str]
53
+ """
54
+ :meta private: [EXPERIMENTAL]
55
+
56
+ The name of the logical replication slot to use for the Postgres source
57
+ """
58
+
59
+
60
+ PostgresSlotConfigParam = PostgresSlotConfigDict | PostgresSlotConfig
@@ -0,0 +1,64 @@
1
+ from dataclasses import dataclass
2
+ from typing import TYPE_CHECKING, TypedDict
3
+
4
+ from databricks.bundles.core._transform import _transform
5
+ from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
+ from databricks.bundles.core._variable import VariableOrOptional
7
+ from databricks.bundles.pipelines._models.postgres_catalog_config import (
8
+ PostgresCatalogConfig,
9
+ PostgresCatalogConfigParam,
10
+ )
11
+
12
+ if TYPE_CHECKING:
13
+ from typing_extensions import Self
14
+
15
+
16
+ @dataclass(kw_only=True)
17
+ class SourceCatalogConfig:
18
+ """
19
+ :meta private: [EXPERIMENTAL]
20
+
21
+ SourceCatalogConfig contains catalog-level custom configuration parameters for each source
22
+ """
23
+
24
+ postgres: VariableOrOptional[PostgresCatalogConfig] = None
25
+ """
26
+ :meta private: [EXPERIMENTAL]
27
+
28
+ Postgres-specific catalog-level configuration parameters
29
+ """
30
+
31
+ source_catalog: VariableOrOptional[str] = None
32
+ """
33
+ :meta private: [EXPERIMENTAL]
34
+
35
+ Source catalog name
36
+ """
37
+
38
+ @classmethod
39
+ def from_dict(cls, value: "SourceCatalogConfigDict") -> "Self":
40
+ return _transform(cls, value)
41
+
42
+ def as_dict(self) -> "SourceCatalogConfigDict":
43
+ return _transform_to_json_value(self) # type:ignore
44
+
45
+
46
+ class SourceCatalogConfigDict(TypedDict, total=False):
47
+ """"""
48
+
49
+ postgres: VariableOrOptional[PostgresCatalogConfigParam]
50
+ """
51
+ :meta private: [EXPERIMENTAL]
52
+
53
+ Postgres-specific catalog-level configuration parameters
54
+ """
55
+
56
+ source_catalog: VariableOrOptional[str]
57
+ """
58
+ :meta private: [EXPERIMENTAL]
59
+
60
+ Source catalog name
61
+ """
62
+
63
+
64
+ SourceCatalogConfigParam = SourceCatalogConfigDict | SourceCatalogConfig
@@ -0,0 +1,48 @@
1
+ from dataclasses import dataclass
2
+ from typing import TYPE_CHECKING, TypedDict
3
+
4
+ from databricks.bundles.core._transform import _transform
5
+ from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
+ from databricks.bundles.core._variable import VariableOrOptional
7
+ from databricks.bundles.pipelines._models.source_catalog_config import (
8
+ SourceCatalogConfig,
9
+ SourceCatalogConfigParam,
10
+ )
11
+
12
+ if TYPE_CHECKING:
13
+ from typing_extensions import Self
14
+
15
+
16
+ @dataclass(kw_only=True)
17
+ class SourceConfig:
18
+ """
19
+ :meta private: [EXPERIMENTAL]
20
+ """
21
+
22
+ catalog: VariableOrOptional[SourceCatalogConfig] = None
23
+ """
24
+ :meta private: [EXPERIMENTAL]
25
+
26
+ Catalog-level source configuration parameters
27
+ """
28
+
29
+ @classmethod
30
+ def from_dict(cls, value: "SourceConfigDict") -> "Self":
31
+ return _transform(cls, value)
32
+
33
+ def as_dict(self) -> "SourceConfigDict":
34
+ return _transform_to_json_value(self) # type:ignore
35
+
36
+
37
+ class SourceConfigDict(TypedDict, total=False):
38
+ """"""
39
+
40
+ catalog: VariableOrOptional[SourceCatalogConfigParam]
41
+ """
42
+ :meta private: [EXPERIMENTAL]
43
+
44
+ Catalog-level source configuration parameters
45
+ """
46
+
47
+
48
+ SourceConfigParam = SourceConfigDict | SourceConfig
@@ -0,0 +1,30 @@
1
+ __all__ = [
2
+ "Lifecycle",
3
+ "LifecycleDict",
4
+ "LifecycleParam",
5
+ "Schema",
6
+ "SchemaDict",
7
+ "SchemaGrant",
8
+ "SchemaGrantDict",
9
+ "SchemaGrantParam",
10
+ "SchemaGrantPrivilege",
11
+ "SchemaGrantPrivilegeParam",
12
+ "SchemaParam",
13
+ ]
14
+
15
+
16
+ from databricks.bundles.schemas._models.lifecycle import (
17
+ Lifecycle,
18
+ LifecycleDict,
19
+ LifecycleParam,
20
+ )
21
+ from databricks.bundles.schemas._models.schema import Schema, SchemaDict, SchemaParam
22
+ from databricks.bundles.schemas._models.schema_grant import (
23
+ SchemaGrant,
24
+ SchemaGrantDict,
25
+ SchemaGrantParam,
26
+ )
27
+ from databricks.bundles.schemas._models.schema_grant_privilege import (
28
+ SchemaGrantPrivilege,
29
+ SchemaGrantPrivilegeParam,
30
+ )
@@ -0,0 +1,38 @@
1
+ from dataclasses import dataclass
2
+ from typing import TYPE_CHECKING, TypedDict
3
+
4
+ from databricks.bundles.core._transform import _transform
5
+ from databricks.bundles.core._transform_to_json import _transform_to_json_value
6
+ from databricks.bundles.core._variable import VariableOrOptional
7
+
8
+ if TYPE_CHECKING:
9
+ from typing_extensions import Self
10
+
11
+
12
+ @dataclass(kw_only=True)
13
+ class Lifecycle:
14
+ """"""
15
+
16
+ prevent_destroy: VariableOrOptional[bool] = None
17
+ """
18
+ Lifecycle setting to prevent the resource from being destroyed.
19
+ """
20
+
21
+ @classmethod
22
+ def from_dict(cls, value: "LifecycleDict") -> "Self":
23
+ return _transform(cls, value)
24
+
25
+ def as_dict(self) -> "LifecycleDict":
26
+ return _transform_to_json_value(self) # type:ignore
27
+
28
+
29
+ class LifecycleDict(TypedDict, total=False):
30
+ """"""
31
+
32
+ prevent_destroy: VariableOrOptional[bool]
33
+ """
34
+ Lifecycle setting to prevent the resource from being destroyed.
35
+ """
36
+
37
+
38
+ LifecycleParam = LifecycleDict | Lifecycle