databricks-bundles 0.265.0__py3-none-any.whl → 0.267.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- databricks/bundles/core/__init__.py +3 -1
- databricks/bundles/core/_load.py +2 -2
- databricks/bundles/core/_resource_mutator.py +33 -0
- databricks/bundles/core/_resource_type.py +6 -0
- databricks/bundles/core/_resources.py +44 -0
- databricks/bundles/core/_variable.py +1 -1
- databricks/bundles/jobs/__init__.py +6 -0
- databricks/bundles/jobs/_models/continuous.py +14 -0
- databricks/bundles/jobs/_models/environment.py +10 -0
- databricks/bundles/jobs/_models/gcp_attributes.py +14 -0
- databricks/bundles/jobs/_models/init_script_info.py +16 -0
- databricks/bundles/jobs/_models/job_email_notifications.py +13 -1
- databricks/bundles/jobs/_models/library.py +10 -0
- databricks/bundles/jobs/_models/spark_jar_task.py +25 -1
- databricks/bundles/jobs/_models/task_email_notifications.py +13 -1
- databricks/bundles/jobs/_models/task_retry_mode.py +17 -0
- databricks/bundles/pipelines/__init__.py +32 -0
- databricks/bundles/pipelines/_models/gcp_attributes.py +14 -0
- databricks/bundles/pipelines/_models/ingestion_gateway_pipeline_definition.py +10 -0
- databricks/bundles/pipelines/_models/ingestion_pipeline_definition.py +18 -0
- databricks/bundles/pipelines/_models/init_script_info.py +16 -0
- databricks/bundles/pipelines/_models/pipeline.py +2 -2
- databricks/bundles/pipelines/_models/postgres_catalog_config.py +50 -0
- databricks/bundles/pipelines/_models/postgres_slot_config.py +60 -0
- databricks/bundles/pipelines/_models/source_catalog_config.py +64 -0
- databricks/bundles/pipelines/_models/source_config.py +48 -0
- databricks/bundles/version.py +1 -1
- databricks/bundles/volumes/__init__.py +25 -0
- databricks/bundles/volumes/_models/volume.py +96 -0
- databricks/bundles/volumes/_models/volume_grant.py +40 -0
- databricks/bundles/volumes/_models/volume_grant_privilege.py +16 -0
- databricks/bundles/volumes/_models/volume_type.py +14 -0
- {databricks_bundles-0.265.0.dist-info → databricks_bundles-0.267.0.dist-info}/METADATA +2 -2
- {databricks_bundles-0.265.0.dist-info → databricks_bundles-0.267.0.dist-info}/RECORD +36 -27
- databricks/bundles/compute/__init__.py +0 -0
- {databricks_bundles-0.265.0.dist-info → databricks_bundles-0.267.0.dist-info}/WHEEL +0 -0
- {databricks_bundles-0.265.0.dist-info → databricks_bundles-0.267.0.dist-info}/licenses/LICENSE +0 -0
|
@@ -8,6 +8,10 @@ from databricks.bundles.pipelines._models.adlsgen2_info import (
|
|
|
8
8
|
Adlsgen2Info,
|
|
9
9
|
Adlsgen2InfoParam,
|
|
10
10
|
)
|
|
11
|
+
from databricks.bundles.pipelines._models.dbfs_storage_info import (
|
|
12
|
+
DbfsStorageInfo,
|
|
13
|
+
DbfsStorageInfoParam,
|
|
14
|
+
)
|
|
11
15
|
from databricks.bundles.pipelines._models.gcs_storage_info import (
|
|
12
16
|
GcsStorageInfo,
|
|
13
17
|
GcsStorageInfoParam,
|
|
@@ -45,6 +49,12 @@ class InitScriptInfo:
|
|
|
45
49
|
Contains the Azure Data Lake Storage destination path
|
|
46
50
|
"""
|
|
47
51
|
|
|
52
|
+
dbfs: VariableOrOptional[DbfsStorageInfo] = None
|
|
53
|
+
"""
|
|
54
|
+
[DEPRECATED] destination needs to be provided. e.g.
|
|
55
|
+
`{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } }`
|
|
56
|
+
"""
|
|
57
|
+
|
|
48
58
|
file: VariableOrOptional[LocalFileInfo] = None
|
|
49
59
|
"""
|
|
50
60
|
destination needs to be provided, e.g.
|
|
@@ -93,6 +103,12 @@ class InitScriptInfoDict(TypedDict, total=False):
|
|
|
93
103
|
Contains the Azure Data Lake Storage destination path
|
|
94
104
|
"""
|
|
95
105
|
|
|
106
|
+
dbfs: VariableOrOptional[DbfsStorageInfoParam]
|
|
107
|
+
"""
|
|
108
|
+
[DEPRECATED] destination needs to be provided. e.g.
|
|
109
|
+
`{ "dbfs": { "destination" : "dbfs:/home/cluster_log" } }`
|
|
110
|
+
"""
|
|
111
|
+
|
|
96
112
|
file: VariableOrOptional[LocalFileInfoParam]
|
|
97
113
|
"""
|
|
98
114
|
destination needs to be provided, e.g.
|
|
@@ -203,7 +203,7 @@ class Pipeline(Resource):
|
|
|
203
203
|
|
|
204
204
|
target: VariableOrOptional[str] = None
|
|
205
205
|
"""
|
|
206
|
-
Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field.
|
|
206
|
+
[DEPRECATED] Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field.
|
|
207
207
|
"""
|
|
208
208
|
|
|
209
209
|
@classmethod
|
|
@@ -361,7 +361,7 @@ class PipelineDict(TypedDict, total=False):
|
|
|
361
361
|
|
|
362
362
|
target: VariableOrOptional[str]
|
|
363
363
|
"""
|
|
364
|
-
Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field.
|
|
364
|
+
[DEPRECATED] Target schema (database) to add tables in this pipeline to. Exactly one of `schema` or `target` must be specified. To publish to Unity Catalog, also specify `catalog`. This legacy field is deprecated for pipeline creation in favor of the `schema` field.
|
|
365
365
|
"""
|
|
366
366
|
|
|
367
367
|
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from typing import TYPE_CHECKING, TypedDict
|
|
3
|
+
|
|
4
|
+
from databricks.bundles.core._transform import _transform
|
|
5
|
+
from databricks.bundles.core._transform_to_json import _transform_to_json_value
|
|
6
|
+
from databricks.bundles.core._variable import VariableOrOptional
|
|
7
|
+
from databricks.bundles.pipelines._models.postgres_slot_config import (
|
|
8
|
+
PostgresSlotConfig,
|
|
9
|
+
PostgresSlotConfigParam,
|
|
10
|
+
)
|
|
11
|
+
|
|
12
|
+
if TYPE_CHECKING:
|
|
13
|
+
from typing_extensions import Self
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@dataclass(kw_only=True)
|
|
17
|
+
class PostgresCatalogConfig:
|
|
18
|
+
"""
|
|
19
|
+
:meta private: [EXPERIMENTAL]
|
|
20
|
+
|
|
21
|
+
PG-specific catalog-level configuration parameters
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
slot_config: VariableOrOptional[PostgresSlotConfig] = None
|
|
25
|
+
"""
|
|
26
|
+
:meta private: [EXPERIMENTAL]
|
|
27
|
+
|
|
28
|
+
Optional. The Postgres slot configuration to use for logical replication
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
@classmethod
|
|
32
|
+
def from_dict(cls, value: "PostgresCatalogConfigDict") -> "Self":
|
|
33
|
+
return _transform(cls, value)
|
|
34
|
+
|
|
35
|
+
def as_dict(self) -> "PostgresCatalogConfigDict":
|
|
36
|
+
return _transform_to_json_value(self) # type:ignore
|
|
37
|
+
|
|
38
|
+
|
|
39
|
+
class PostgresCatalogConfigDict(TypedDict, total=False):
|
|
40
|
+
""""""
|
|
41
|
+
|
|
42
|
+
slot_config: VariableOrOptional[PostgresSlotConfigParam]
|
|
43
|
+
"""
|
|
44
|
+
:meta private: [EXPERIMENTAL]
|
|
45
|
+
|
|
46
|
+
Optional. The Postgres slot configuration to use for logical replication
|
|
47
|
+
"""
|
|
48
|
+
|
|
49
|
+
|
|
50
|
+
PostgresCatalogConfigParam = PostgresCatalogConfigDict | PostgresCatalogConfig
|
|
@@ -0,0 +1,60 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from typing import TYPE_CHECKING, TypedDict
|
|
3
|
+
|
|
4
|
+
from databricks.bundles.core._transform import _transform
|
|
5
|
+
from databricks.bundles.core._transform_to_json import _transform_to_json_value
|
|
6
|
+
from databricks.bundles.core._variable import VariableOrOptional
|
|
7
|
+
|
|
8
|
+
if TYPE_CHECKING:
|
|
9
|
+
from typing_extensions import Self
|
|
10
|
+
|
|
11
|
+
|
|
12
|
+
@dataclass(kw_only=True)
|
|
13
|
+
class PostgresSlotConfig:
|
|
14
|
+
"""
|
|
15
|
+
:meta private: [EXPERIMENTAL]
|
|
16
|
+
|
|
17
|
+
PostgresSlotConfig contains the configuration for a Postgres logical replication slot
|
|
18
|
+
"""
|
|
19
|
+
|
|
20
|
+
publication_name: VariableOrOptional[str] = None
|
|
21
|
+
"""
|
|
22
|
+
:meta private: [EXPERIMENTAL]
|
|
23
|
+
|
|
24
|
+
The name of the publication to use for the Postgres source
|
|
25
|
+
"""
|
|
26
|
+
|
|
27
|
+
slot_name: VariableOrOptional[str] = None
|
|
28
|
+
"""
|
|
29
|
+
:meta private: [EXPERIMENTAL]
|
|
30
|
+
|
|
31
|
+
The name of the logical replication slot to use for the Postgres source
|
|
32
|
+
"""
|
|
33
|
+
|
|
34
|
+
@classmethod
|
|
35
|
+
def from_dict(cls, value: "PostgresSlotConfigDict") -> "Self":
|
|
36
|
+
return _transform(cls, value)
|
|
37
|
+
|
|
38
|
+
def as_dict(self) -> "PostgresSlotConfigDict":
|
|
39
|
+
return _transform_to_json_value(self) # type:ignore
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
class PostgresSlotConfigDict(TypedDict, total=False):
|
|
43
|
+
""""""
|
|
44
|
+
|
|
45
|
+
publication_name: VariableOrOptional[str]
|
|
46
|
+
"""
|
|
47
|
+
:meta private: [EXPERIMENTAL]
|
|
48
|
+
|
|
49
|
+
The name of the publication to use for the Postgres source
|
|
50
|
+
"""
|
|
51
|
+
|
|
52
|
+
slot_name: VariableOrOptional[str]
|
|
53
|
+
"""
|
|
54
|
+
:meta private: [EXPERIMENTAL]
|
|
55
|
+
|
|
56
|
+
The name of the logical replication slot to use for the Postgres source
|
|
57
|
+
"""
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
PostgresSlotConfigParam = PostgresSlotConfigDict | PostgresSlotConfig
|
|
@@ -0,0 +1,64 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from typing import TYPE_CHECKING, TypedDict
|
|
3
|
+
|
|
4
|
+
from databricks.bundles.core._transform import _transform
|
|
5
|
+
from databricks.bundles.core._transform_to_json import _transform_to_json_value
|
|
6
|
+
from databricks.bundles.core._variable import VariableOrOptional
|
|
7
|
+
from databricks.bundles.pipelines._models.postgres_catalog_config import (
|
|
8
|
+
PostgresCatalogConfig,
|
|
9
|
+
PostgresCatalogConfigParam,
|
|
10
|
+
)
|
|
11
|
+
|
|
12
|
+
if TYPE_CHECKING:
|
|
13
|
+
from typing_extensions import Self
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@dataclass(kw_only=True)
|
|
17
|
+
class SourceCatalogConfig:
|
|
18
|
+
"""
|
|
19
|
+
:meta private: [EXPERIMENTAL]
|
|
20
|
+
|
|
21
|
+
SourceCatalogConfig contains catalog-level custom configuration parameters for each source
|
|
22
|
+
"""
|
|
23
|
+
|
|
24
|
+
postgres: VariableOrOptional[PostgresCatalogConfig] = None
|
|
25
|
+
"""
|
|
26
|
+
:meta private: [EXPERIMENTAL]
|
|
27
|
+
|
|
28
|
+
Postgres-specific catalog-level configuration parameters
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
source_catalog: VariableOrOptional[str] = None
|
|
32
|
+
"""
|
|
33
|
+
:meta private: [EXPERIMENTAL]
|
|
34
|
+
|
|
35
|
+
Source catalog name
|
|
36
|
+
"""
|
|
37
|
+
|
|
38
|
+
@classmethod
|
|
39
|
+
def from_dict(cls, value: "SourceCatalogConfigDict") -> "Self":
|
|
40
|
+
return _transform(cls, value)
|
|
41
|
+
|
|
42
|
+
def as_dict(self) -> "SourceCatalogConfigDict":
|
|
43
|
+
return _transform_to_json_value(self) # type:ignore
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class SourceCatalogConfigDict(TypedDict, total=False):
|
|
47
|
+
""""""
|
|
48
|
+
|
|
49
|
+
postgres: VariableOrOptional[PostgresCatalogConfigParam]
|
|
50
|
+
"""
|
|
51
|
+
:meta private: [EXPERIMENTAL]
|
|
52
|
+
|
|
53
|
+
Postgres-specific catalog-level configuration parameters
|
|
54
|
+
"""
|
|
55
|
+
|
|
56
|
+
source_catalog: VariableOrOptional[str]
|
|
57
|
+
"""
|
|
58
|
+
:meta private: [EXPERIMENTAL]
|
|
59
|
+
|
|
60
|
+
Source catalog name
|
|
61
|
+
"""
|
|
62
|
+
|
|
63
|
+
|
|
64
|
+
SourceCatalogConfigParam = SourceCatalogConfigDict | SourceCatalogConfig
|
|
@@ -0,0 +1,48 @@
|
|
|
1
|
+
from dataclasses import dataclass
|
|
2
|
+
from typing import TYPE_CHECKING, TypedDict
|
|
3
|
+
|
|
4
|
+
from databricks.bundles.core._transform import _transform
|
|
5
|
+
from databricks.bundles.core._transform_to_json import _transform_to_json_value
|
|
6
|
+
from databricks.bundles.core._variable import VariableOrOptional
|
|
7
|
+
from databricks.bundles.pipelines._models.source_catalog_config import (
|
|
8
|
+
SourceCatalogConfig,
|
|
9
|
+
SourceCatalogConfigParam,
|
|
10
|
+
)
|
|
11
|
+
|
|
12
|
+
if TYPE_CHECKING:
|
|
13
|
+
from typing_extensions import Self
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@dataclass(kw_only=True)
|
|
17
|
+
class SourceConfig:
|
|
18
|
+
"""
|
|
19
|
+
:meta private: [EXPERIMENTAL]
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
catalog: VariableOrOptional[SourceCatalogConfig] = None
|
|
23
|
+
"""
|
|
24
|
+
:meta private: [EXPERIMENTAL]
|
|
25
|
+
|
|
26
|
+
Catalog-level source configuration parameters
|
|
27
|
+
"""
|
|
28
|
+
|
|
29
|
+
@classmethod
|
|
30
|
+
def from_dict(cls, value: "SourceConfigDict") -> "Self":
|
|
31
|
+
return _transform(cls, value)
|
|
32
|
+
|
|
33
|
+
def as_dict(self) -> "SourceConfigDict":
|
|
34
|
+
return _transform_to_json_value(self) # type:ignore
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
class SourceConfigDict(TypedDict, total=False):
|
|
38
|
+
""""""
|
|
39
|
+
|
|
40
|
+
catalog: VariableOrOptional[SourceCatalogConfigParam]
|
|
41
|
+
"""
|
|
42
|
+
:meta private: [EXPERIMENTAL]
|
|
43
|
+
|
|
44
|
+
Catalog-level source configuration parameters
|
|
45
|
+
"""
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
SourceConfigParam = SourceConfigDict | SourceConfig
|
databricks/bundles/version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = "0.
|
|
1
|
+
__version__ = "0.267.0"
|
|
@@ -0,0 +1,25 @@
|
|
|
1
|
+
__all__ = [
|
|
2
|
+
"Volume",
|
|
3
|
+
"VolumeDict",
|
|
4
|
+
"VolumeGrant",
|
|
5
|
+
"VolumeGrantDict",
|
|
6
|
+
"VolumeGrantParam",
|
|
7
|
+
"VolumeGrantPrivilege",
|
|
8
|
+
"VolumeGrantPrivilegeParam",
|
|
9
|
+
"VolumeParam",
|
|
10
|
+
"VolumeType",
|
|
11
|
+
"VolumeTypeParam",
|
|
12
|
+
]
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
from databricks.bundles.volumes._models.volume import Volume, VolumeDict, VolumeParam
|
|
16
|
+
from databricks.bundles.volumes._models.volume_grant import (
|
|
17
|
+
VolumeGrant,
|
|
18
|
+
VolumeGrantDict,
|
|
19
|
+
VolumeGrantParam,
|
|
20
|
+
)
|
|
21
|
+
from databricks.bundles.volumes._models.volume_grant_privilege import (
|
|
22
|
+
VolumeGrantPrivilege,
|
|
23
|
+
VolumeGrantPrivilegeParam,
|
|
24
|
+
)
|
|
25
|
+
from databricks.bundles.volumes._models.volume_type import VolumeType, VolumeTypeParam
|
|
@@ -0,0 +1,96 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from typing import TYPE_CHECKING, TypedDict
|
|
3
|
+
|
|
4
|
+
from databricks.bundles.core._resource import Resource
|
|
5
|
+
from databricks.bundles.core._transform import _transform
|
|
6
|
+
from databricks.bundles.core._transform_to_json import _transform_to_json_value
|
|
7
|
+
from databricks.bundles.core._variable import (
|
|
8
|
+
VariableOr,
|
|
9
|
+
VariableOrList,
|
|
10
|
+
VariableOrOptional,
|
|
11
|
+
)
|
|
12
|
+
from databricks.bundles.volumes._models.volume_grant import (
|
|
13
|
+
VolumeGrant,
|
|
14
|
+
VolumeGrantParam,
|
|
15
|
+
)
|
|
16
|
+
from databricks.bundles.volumes._models.volume_type import VolumeType, VolumeTypeParam
|
|
17
|
+
|
|
18
|
+
if TYPE_CHECKING:
|
|
19
|
+
from typing_extensions import Self
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
@dataclass(kw_only=True)
|
|
23
|
+
class Volume(Resource):
|
|
24
|
+
""""""
|
|
25
|
+
|
|
26
|
+
catalog_name: VariableOr[str]
|
|
27
|
+
"""
|
|
28
|
+
The name of the catalog where the schema and the volume are
|
|
29
|
+
"""
|
|
30
|
+
|
|
31
|
+
name: VariableOr[str]
|
|
32
|
+
"""
|
|
33
|
+
The name of the volume
|
|
34
|
+
"""
|
|
35
|
+
|
|
36
|
+
schema_name: VariableOr[str]
|
|
37
|
+
"""
|
|
38
|
+
The name of the schema where the volume is
|
|
39
|
+
"""
|
|
40
|
+
|
|
41
|
+
comment: VariableOrOptional[str] = None
|
|
42
|
+
"""
|
|
43
|
+
The comment attached to the volume
|
|
44
|
+
"""
|
|
45
|
+
|
|
46
|
+
grants: VariableOrList[VolumeGrant] = field(default_factory=list)
|
|
47
|
+
|
|
48
|
+
storage_location: VariableOrOptional[str] = None
|
|
49
|
+
"""
|
|
50
|
+
The storage location on the cloud
|
|
51
|
+
"""
|
|
52
|
+
|
|
53
|
+
volume_type: VariableOrOptional[VolumeType] = None
|
|
54
|
+
|
|
55
|
+
@classmethod
|
|
56
|
+
def from_dict(cls, value: "VolumeDict") -> "Self":
|
|
57
|
+
return _transform(cls, value)
|
|
58
|
+
|
|
59
|
+
def as_dict(self) -> "VolumeDict":
|
|
60
|
+
return _transform_to_json_value(self) # type:ignore
|
|
61
|
+
|
|
62
|
+
|
|
63
|
+
class VolumeDict(TypedDict, total=False):
|
|
64
|
+
""""""
|
|
65
|
+
|
|
66
|
+
catalog_name: VariableOr[str]
|
|
67
|
+
"""
|
|
68
|
+
The name of the catalog where the schema and the volume are
|
|
69
|
+
"""
|
|
70
|
+
|
|
71
|
+
name: VariableOr[str]
|
|
72
|
+
"""
|
|
73
|
+
The name of the volume
|
|
74
|
+
"""
|
|
75
|
+
|
|
76
|
+
schema_name: VariableOr[str]
|
|
77
|
+
"""
|
|
78
|
+
The name of the schema where the volume is
|
|
79
|
+
"""
|
|
80
|
+
|
|
81
|
+
comment: VariableOrOptional[str]
|
|
82
|
+
"""
|
|
83
|
+
The comment attached to the volume
|
|
84
|
+
"""
|
|
85
|
+
|
|
86
|
+
grants: VariableOrList[VolumeGrantParam]
|
|
87
|
+
|
|
88
|
+
storage_location: VariableOrOptional[str]
|
|
89
|
+
"""
|
|
90
|
+
The storage location on the cloud
|
|
91
|
+
"""
|
|
92
|
+
|
|
93
|
+
volume_type: VariableOrOptional[VolumeTypeParam]
|
|
94
|
+
|
|
95
|
+
|
|
96
|
+
VolumeParam = VolumeDict | Volume
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
from dataclasses import dataclass, field
|
|
2
|
+
from typing import TYPE_CHECKING, TypedDict
|
|
3
|
+
|
|
4
|
+
from databricks.bundles.core._transform import _transform
|
|
5
|
+
from databricks.bundles.core._transform_to_json import _transform_to_json_value
|
|
6
|
+
from databricks.bundles.core._variable import VariableOr, VariableOrList
|
|
7
|
+
from databricks.bundles.volumes._models.volume_grant_privilege import (
|
|
8
|
+
VolumeGrantPrivilege,
|
|
9
|
+
VolumeGrantPrivilegeParam,
|
|
10
|
+
)
|
|
11
|
+
|
|
12
|
+
if TYPE_CHECKING:
|
|
13
|
+
from typing_extensions import Self
|
|
14
|
+
|
|
15
|
+
|
|
16
|
+
@dataclass(kw_only=True)
|
|
17
|
+
class VolumeGrant:
|
|
18
|
+
""""""
|
|
19
|
+
|
|
20
|
+
principal: VariableOr[str]
|
|
21
|
+
|
|
22
|
+
privileges: VariableOrList[VolumeGrantPrivilege] = field(default_factory=list)
|
|
23
|
+
|
|
24
|
+
@classmethod
|
|
25
|
+
def from_dict(cls, value: "VolumeGrantDict") -> "Self":
|
|
26
|
+
return _transform(cls, value)
|
|
27
|
+
|
|
28
|
+
def as_dict(self) -> "VolumeGrantDict":
|
|
29
|
+
return _transform_to_json_value(self) # type:ignore
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
class VolumeGrantDict(TypedDict, total=False):
|
|
33
|
+
""""""
|
|
34
|
+
|
|
35
|
+
principal: VariableOr[str]
|
|
36
|
+
|
|
37
|
+
privileges: VariableOrList[VolumeGrantPrivilegeParam]
|
|
38
|
+
|
|
39
|
+
|
|
40
|
+
VolumeGrantParam = VolumeGrantDict | VolumeGrant
|
|
@@ -0,0 +1,16 @@
|
|
|
1
|
+
from enum import Enum
|
|
2
|
+
from typing import Literal
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class VolumeGrantPrivilege(Enum):
|
|
6
|
+
ALL_PRIVILEGES = "ALL_PRIVILEGES"
|
|
7
|
+
APPLY_TAG = "APPLY_TAG"
|
|
8
|
+
MANAGE = "MANAGE"
|
|
9
|
+
READ_VOLUME = "READ_VOLUME"
|
|
10
|
+
WRITE_VOLUME = "WRITE_VOLUME"
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
VolumeGrantPrivilegeParam = (
|
|
14
|
+
Literal["ALL_PRIVILEGES", "APPLY_TAG", "MANAGE", "READ_VOLUME", "WRITE_VOLUME"]
|
|
15
|
+
| VolumeGrantPrivilege
|
|
16
|
+
)
|
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
from enum import Enum
|
|
2
|
+
from typing import Literal
|
|
3
|
+
|
|
4
|
+
|
|
5
|
+
class VolumeType(Enum):
|
|
6
|
+
"""
|
|
7
|
+
The type of the volume. An external volume is located in the specified external location. A managed volume is located in the default location which is specified by the parent schema, or the parent catalog, or the Metastore. [Learn more](https://docs.databricks.com/aws/en/volumes/managed-vs-external)
|
|
8
|
+
"""
|
|
9
|
+
|
|
10
|
+
EXTERNAL = "EXTERNAL"
|
|
11
|
+
MANAGED = "MANAGED"
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
VolumeTypeParam = Literal["EXTERNAL", "MANAGED"] | VolumeType
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.4
|
|
2
2
|
Name: databricks-bundles
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.267.0
|
|
4
4
|
Summary: Python support for Databricks Asset Bundles
|
|
5
5
|
Author-email: Gleb Kanterov <gleb.kanterov@databricks.com>
|
|
6
6
|
Requires-Python: >=3.10
|
|
@@ -22,7 +22,7 @@ Reference documentation is available at https://databricks.github.io/cli/experim
|
|
|
22
22
|
|
|
23
23
|
To use `databricks-bundles`, you must first:
|
|
24
24
|
|
|
25
|
-
1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.
|
|
25
|
+
1. Install the [Databricks CLI](https://github.com/databricks/cli), version 0.267.0 or above
|
|
26
26
|
2. Authenticate to your Databricks workspace if you have not done so already:
|
|
27
27
|
|
|
28
28
|
```bash
|