databricks-sdk 0.53.0__py3-none-any.whl → 0.55.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/__init__.py +298 -255
- databricks/sdk/config.py +15 -4
- databricks/sdk/credentials_provider.py +101 -55
- databricks/sdk/errors/base.py +1 -30
- databricks/sdk/oauth.py +0 -5
- databricks/sdk/oidc.py +206 -0
- databricks/sdk/service/apps.py +58 -0
- databricks/sdk/service/catalog.py +1198 -181
- databricks/sdk/service/cleanrooms.py +116 -1
- databricks/sdk/service/compute.py +33 -68
- databricks/sdk/service/dashboards.py +7 -0
- databricks/sdk/service/iam.py +167 -103
- databricks/sdk/service/jobs.py +7 -6
- databricks/sdk/service/ml.py +1230 -55
- databricks/sdk/service/oauth2.py +17 -0
- databricks/sdk/service/pipelines.py +105 -0
- databricks/sdk/service/serving.py +314 -0
- databricks/sdk/service/settings.py +1284 -59
- databricks/sdk/service/sharing.py +388 -2
- databricks/sdk/service/sql.py +53 -84
- databricks/sdk/service/vectorsearch.py +0 -28
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.53.0.dist-info → databricks_sdk-0.55.0.dist-info}/METADATA +1 -1
- {databricks_sdk-0.53.0.dist-info → databricks_sdk-0.55.0.dist-info}/RECORD +28 -27
- {databricks_sdk-0.53.0.dist-info → databricks_sdk-0.55.0.dist-info}/WHEEL +1 -1
- {databricks_sdk-0.53.0.dist-info → databricks_sdk-0.55.0.dist-info}/licenses/LICENSE +0 -0
- {databricks_sdk-0.53.0.dist-info → databricks_sdk-0.55.0.dist-info}/licenses/NOTICE +0 -0
- {databricks_sdk-0.53.0.dist-info → databricks_sdk-0.55.0.dist-info}/top_level.txt +0 -0
|
@@ -587,6 +587,39 @@ class AwsIamRoleResponse:
|
|
|
587
587
|
)
|
|
588
588
|
|
|
589
589
|
|
|
590
|
+
@dataclass
|
|
591
|
+
class AwsSqsQueue:
|
|
592
|
+
managed_resource_id: Optional[str] = None
|
|
593
|
+
"""Unique identifier included in the name of file events managed cloud resources."""
|
|
594
|
+
|
|
595
|
+
queue_url: Optional[str] = None
|
|
596
|
+
"""The AQS queue url in the format https://sqs.{region}.amazonaws.com/{account id}/{queue name}
|
|
597
|
+
REQUIRED for provided_sqs."""
|
|
598
|
+
|
|
599
|
+
def as_dict(self) -> dict:
|
|
600
|
+
"""Serializes the AwsSqsQueue into a dictionary suitable for use as a JSON request body."""
|
|
601
|
+
body = {}
|
|
602
|
+
if self.managed_resource_id is not None:
|
|
603
|
+
body["managed_resource_id"] = self.managed_resource_id
|
|
604
|
+
if self.queue_url is not None:
|
|
605
|
+
body["queue_url"] = self.queue_url
|
|
606
|
+
return body
|
|
607
|
+
|
|
608
|
+
def as_shallow_dict(self) -> dict:
|
|
609
|
+
"""Serializes the AwsSqsQueue into a shallow dictionary of its immediate attributes."""
|
|
610
|
+
body = {}
|
|
611
|
+
if self.managed_resource_id is not None:
|
|
612
|
+
body["managed_resource_id"] = self.managed_resource_id
|
|
613
|
+
if self.queue_url is not None:
|
|
614
|
+
body["queue_url"] = self.queue_url
|
|
615
|
+
return body
|
|
616
|
+
|
|
617
|
+
@classmethod
|
|
618
|
+
def from_dict(cls, d: Dict[str, Any]) -> AwsSqsQueue:
|
|
619
|
+
"""Deserializes the AwsSqsQueue from a dictionary."""
|
|
620
|
+
return cls(managed_resource_id=d.get("managed_resource_id", None), queue_url=d.get("queue_url", None))
|
|
621
|
+
|
|
622
|
+
|
|
590
623
|
@dataclass
|
|
591
624
|
class AzureActiveDirectoryToken:
|
|
592
625
|
"""Azure Active Directory token, essentially the Oauth token for Azure Service Principal or Managed
|
|
@@ -757,6 +790,60 @@ class AzureManagedIdentityResponse:
|
|
|
757
790
|
)
|
|
758
791
|
|
|
759
792
|
|
|
793
|
+
@dataclass
|
|
794
|
+
class AzureQueueStorage:
|
|
795
|
+
managed_resource_id: Optional[str] = None
|
|
796
|
+
"""Unique identifier included in the name of file events managed cloud resources."""
|
|
797
|
+
|
|
798
|
+
queue_url: Optional[str] = None
|
|
799
|
+
"""The AQS queue url in the format https://{storage account}.queue.core.windows.net/{queue name}
|
|
800
|
+
REQUIRED for provided_aqs."""
|
|
801
|
+
|
|
802
|
+
resource_group: Optional[str] = None
|
|
803
|
+
"""The resource group for the queue, event grid subscription, and external location storage
|
|
804
|
+
account. ONLY REQUIRED for locations with a service principal storage credential"""
|
|
805
|
+
|
|
806
|
+
subscription_id: Optional[str] = None
|
|
807
|
+
"""OPTIONAL: The subscription id for the queue, event grid subscription, and external location
|
|
808
|
+
storage account. REQUIRED for locations with a service principal storage credential"""
|
|
809
|
+
|
|
810
|
+
def as_dict(self) -> dict:
|
|
811
|
+
"""Serializes the AzureQueueStorage into a dictionary suitable for use as a JSON request body."""
|
|
812
|
+
body = {}
|
|
813
|
+
if self.managed_resource_id is not None:
|
|
814
|
+
body["managed_resource_id"] = self.managed_resource_id
|
|
815
|
+
if self.queue_url is not None:
|
|
816
|
+
body["queue_url"] = self.queue_url
|
|
817
|
+
if self.resource_group is not None:
|
|
818
|
+
body["resource_group"] = self.resource_group
|
|
819
|
+
if self.subscription_id is not None:
|
|
820
|
+
body["subscription_id"] = self.subscription_id
|
|
821
|
+
return body
|
|
822
|
+
|
|
823
|
+
def as_shallow_dict(self) -> dict:
|
|
824
|
+
"""Serializes the AzureQueueStorage into a shallow dictionary of its immediate attributes."""
|
|
825
|
+
body = {}
|
|
826
|
+
if self.managed_resource_id is not None:
|
|
827
|
+
body["managed_resource_id"] = self.managed_resource_id
|
|
828
|
+
if self.queue_url is not None:
|
|
829
|
+
body["queue_url"] = self.queue_url
|
|
830
|
+
if self.resource_group is not None:
|
|
831
|
+
body["resource_group"] = self.resource_group
|
|
832
|
+
if self.subscription_id is not None:
|
|
833
|
+
body["subscription_id"] = self.subscription_id
|
|
834
|
+
return body
|
|
835
|
+
|
|
836
|
+
@classmethod
|
|
837
|
+
def from_dict(cls, d: Dict[str, Any]) -> AzureQueueStorage:
|
|
838
|
+
"""Deserializes the AzureQueueStorage from a dictionary."""
|
|
839
|
+
return cls(
|
|
840
|
+
managed_resource_id=d.get("managed_resource_id", None),
|
|
841
|
+
queue_url=d.get("queue_url", None),
|
|
842
|
+
resource_group=d.get("resource_group", None),
|
|
843
|
+
subscription_id=d.get("subscription_id", None),
|
|
844
|
+
)
|
|
845
|
+
|
|
846
|
+
|
|
760
847
|
@dataclass
|
|
761
848
|
class AzureServicePrincipal:
|
|
762
849
|
"""The Azure service principal configuration. Only applicable when purpose is **STORAGE**."""
|
|
@@ -903,7 +990,8 @@ class CatalogInfo:
|
|
|
903
990
|
provisioning_info: Optional[ProvisioningInfo] = None
|
|
904
991
|
"""Status of an asynchronously provisioned resource."""
|
|
905
992
|
|
|
906
|
-
securable_type: Optional[
|
|
993
|
+
securable_type: Optional[SecurableType] = None
|
|
994
|
+
"""The type of Unity Catalog securable."""
|
|
907
995
|
|
|
908
996
|
share_name: Optional[str] = None
|
|
909
997
|
"""The name of the share under the share provider."""
|
|
@@ -958,7 +1046,7 @@ class CatalogInfo:
|
|
|
958
1046
|
if self.provisioning_info:
|
|
959
1047
|
body["provisioning_info"] = self.provisioning_info.as_dict()
|
|
960
1048
|
if self.securable_type is not None:
|
|
961
|
-
body["securable_type"] = self.securable_type
|
|
1049
|
+
body["securable_type"] = self.securable_type.value
|
|
962
1050
|
if self.share_name is not None:
|
|
963
1051
|
body["share_name"] = self.share_name
|
|
964
1052
|
if self.storage_location is not None:
|
|
@@ -1045,7 +1133,7 @@ class CatalogInfo:
|
|
|
1045
1133
|
properties=d.get("properties", None),
|
|
1046
1134
|
provider_name=d.get("provider_name", None),
|
|
1047
1135
|
provisioning_info=_from_dict(d, "provisioning_info", ProvisioningInfo),
|
|
1048
|
-
securable_type=d
|
|
1136
|
+
securable_type=_enum(d, "securable_type", SecurableType),
|
|
1049
1137
|
share_name=d.get("share_name", None),
|
|
1050
1138
|
storage_location=d.get("storage_location", None),
|
|
1051
1139
|
storage_root=d.get("storage_root", None),
|
|
@@ -1055,7 +1143,6 @@ class CatalogInfo:
|
|
|
1055
1143
|
|
|
1056
1144
|
|
|
1057
1145
|
class CatalogIsolationMode(Enum):
|
|
1058
|
-
"""Whether the current securable is accessible from all workspaces or a specific set of workspaces."""
|
|
1059
1146
|
|
|
1060
1147
|
ISOLATED = "ISOLATED"
|
|
1061
1148
|
OPEN = "OPEN"
|
|
@@ -1066,8 +1153,11 @@ class CatalogType(Enum):
|
|
|
1066
1153
|
|
|
1067
1154
|
DELTASHARING_CATALOG = "DELTASHARING_CATALOG"
|
|
1068
1155
|
FOREIGN_CATALOG = "FOREIGN_CATALOG"
|
|
1156
|
+
INTERNAL_CATALOG = "INTERNAL_CATALOG"
|
|
1069
1157
|
MANAGED_CATALOG = "MANAGED_CATALOG"
|
|
1158
|
+
MANAGED_ONLINE_CATALOG = "MANAGED_ONLINE_CATALOG"
|
|
1070
1159
|
SYSTEM_CATALOG = "SYSTEM_CATALOG"
|
|
1160
|
+
UNKNOWN_CATALOG_TYPE = "UNKNOWN_CATALOG_TYPE"
|
|
1071
1161
|
|
|
1072
1162
|
|
|
1073
1163
|
@dataclass
|
|
@@ -1772,12 +1862,12 @@ class CreateExternalLocation:
|
|
|
1772
1862
|
credential_name: str
|
|
1773
1863
|
"""Name of the storage credential used with this location."""
|
|
1774
1864
|
|
|
1775
|
-
access_point: Optional[str] = None
|
|
1776
|
-
"""The AWS access point to use when accesing s3 for this external location."""
|
|
1777
|
-
|
|
1778
1865
|
comment: Optional[str] = None
|
|
1779
1866
|
"""User-provided free-form text description."""
|
|
1780
1867
|
|
|
1868
|
+
enable_file_events: Optional[bool] = None
|
|
1869
|
+
"""[Create:OPT Update:OPT] Whether to enable file events on this external location."""
|
|
1870
|
+
|
|
1781
1871
|
encryption_details: Optional[EncryptionDetails] = None
|
|
1782
1872
|
"""Encryption options that apply to clients connecting to cloud storage."""
|
|
1783
1873
|
|
|
@@ -1786,6 +1876,9 @@ class CreateExternalLocation:
|
|
|
1786
1876
|
enabled, the access to the location falls back to cluster credentials if UC credentials are not
|
|
1787
1877
|
sufficient."""
|
|
1788
1878
|
|
|
1879
|
+
file_event_queue: Optional[FileEventQueue] = None
|
|
1880
|
+
"""[Create:OPT Update:OPT] File event queue settings."""
|
|
1881
|
+
|
|
1789
1882
|
read_only: Optional[bool] = None
|
|
1790
1883
|
"""Indicates whether the external location is read-only."""
|
|
1791
1884
|
|
|
@@ -1795,16 +1888,18 @@ class CreateExternalLocation:
|
|
|
1795
1888
|
def as_dict(self) -> dict:
|
|
1796
1889
|
"""Serializes the CreateExternalLocation into a dictionary suitable for use as a JSON request body."""
|
|
1797
1890
|
body = {}
|
|
1798
|
-
if self.access_point is not None:
|
|
1799
|
-
body["access_point"] = self.access_point
|
|
1800
1891
|
if self.comment is not None:
|
|
1801
1892
|
body["comment"] = self.comment
|
|
1802
1893
|
if self.credential_name is not None:
|
|
1803
1894
|
body["credential_name"] = self.credential_name
|
|
1895
|
+
if self.enable_file_events is not None:
|
|
1896
|
+
body["enable_file_events"] = self.enable_file_events
|
|
1804
1897
|
if self.encryption_details:
|
|
1805
1898
|
body["encryption_details"] = self.encryption_details.as_dict()
|
|
1806
1899
|
if self.fallback is not None:
|
|
1807
1900
|
body["fallback"] = self.fallback
|
|
1901
|
+
if self.file_event_queue:
|
|
1902
|
+
body["file_event_queue"] = self.file_event_queue.as_dict()
|
|
1808
1903
|
if self.name is not None:
|
|
1809
1904
|
body["name"] = self.name
|
|
1810
1905
|
if self.read_only is not None:
|
|
@@ -1818,16 +1913,18 @@ class CreateExternalLocation:
|
|
|
1818
1913
|
def as_shallow_dict(self) -> dict:
|
|
1819
1914
|
"""Serializes the CreateExternalLocation into a shallow dictionary of its immediate attributes."""
|
|
1820
1915
|
body = {}
|
|
1821
|
-
if self.access_point is not None:
|
|
1822
|
-
body["access_point"] = self.access_point
|
|
1823
1916
|
if self.comment is not None:
|
|
1824
1917
|
body["comment"] = self.comment
|
|
1825
1918
|
if self.credential_name is not None:
|
|
1826
1919
|
body["credential_name"] = self.credential_name
|
|
1920
|
+
if self.enable_file_events is not None:
|
|
1921
|
+
body["enable_file_events"] = self.enable_file_events
|
|
1827
1922
|
if self.encryption_details:
|
|
1828
1923
|
body["encryption_details"] = self.encryption_details
|
|
1829
1924
|
if self.fallback is not None:
|
|
1830
1925
|
body["fallback"] = self.fallback
|
|
1926
|
+
if self.file_event_queue:
|
|
1927
|
+
body["file_event_queue"] = self.file_event_queue
|
|
1831
1928
|
if self.name is not None:
|
|
1832
1929
|
body["name"] = self.name
|
|
1833
1930
|
if self.read_only is not None:
|
|
@@ -1842,11 +1939,12 @@ class CreateExternalLocation:
|
|
|
1842
1939
|
def from_dict(cls, d: Dict[str, Any]) -> CreateExternalLocation:
|
|
1843
1940
|
"""Deserializes the CreateExternalLocation from a dictionary."""
|
|
1844
1941
|
return cls(
|
|
1845
|
-
access_point=d.get("access_point", None),
|
|
1846
1942
|
comment=d.get("comment", None),
|
|
1847
1943
|
credential_name=d.get("credential_name", None),
|
|
1944
|
+
enable_file_events=d.get("enable_file_events", None),
|
|
1848
1945
|
encryption_details=_from_dict(d, "encryption_details", EncryptionDetails),
|
|
1849
1946
|
fallback=d.get("fallback", None),
|
|
1947
|
+
file_event_queue=_from_dict(d, "file_event_queue", FileEventQueue),
|
|
1850
1948
|
name=d.get("name", None),
|
|
1851
1949
|
read_only=d.get("read_only", None),
|
|
1852
1950
|
skip_validation=d.get("skip_validation", None),
|
|
@@ -2864,33 +2962,6 @@ class CredentialValidationResult:
|
|
|
2864
2962
|
return cls(message=d.get("message", None), result=_enum(d, "result", ValidateCredentialResult))
|
|
2865
2963
|
|
|
2866
2964
|
|
|
2867
|
-
@dataclass
|
|
2868
|
-
class CurrentWorkspaceBindings:
|
|
2869
|
-
"""Currently assigned workspaces"""
|
|
2870
|
-
|
|
2871
|
-
workspaces: Optional[List[int]] = None
|
|
2872
|
-
"""A list of workspace IDs."""
|
|
2873
|
-
|
|
2874
|
-
def as_dict(self) -> dict:
|
|
2875
|
-
"""Serializes the CurrentWorkspaceBindings into a dictionary suitable for use as a JSON request body."""
|
|
2876
|
-
body = {}
|
|
2877
|
-
if self.workspaces:
|
|
2878
|
-
body["workspaces"] = [v for v in self.workspaces]
|
|
2879
|
-
return body
|
|
2880
|
-
|
|
2881
|
-
def as_shallow_dict(self) -> dict:
|
|
2882
|
-
"""Serializes the CurrentWorkspaceBindings into a shallow dictionary of its immediate attributes."""
|
|
2883
|
-
body = {}
|
|
2884
|
-
if self.workspaces:
|
|
2885
|
-
body["workspaces"] = self.workspaces
|
|
2886
|
-
return body
|
|
2887
|
-
|
|
2888
|
-
@classmethod
|
|
2889
|
-
def from_dict(cls, d: Dict[str, Any]) -> CurrentWorkspaceBindings:
|
|
2890
|
-
"""Deserializes the CurrentWorkspaceBindings from a dictionary."""
|
|
2891
|
-
return cls(workspaces=d.get("workspaces", None))
|
|
2892
|
-
|
|
2893
|
-
|
|
2894
2965
|
class DataSourceFormat(Enum):
|
|
2895
2966
|
"""Data source format"""
|
|
2896
2967
|
|
|
@@ -2919,6 +2990,183 @@ class DataSourceFormat(Enum):
|
|
|
2919
2990
|
WORKDAY_RAAS_FORMAT = "WORKDAY_RAAS_FORMAT"
|
|
2920
2991
|
|
|
2921
2992
|
|
|
2993
|
+
@dataclass
|
|
2994
|
+
class DatabaseCatalog:
|
|
2995
|
+
name: str
|
|
2996
|
+
"""The name of the catalog in UC."""
|
|
2997
|
+
|
|
2998
|
+
database_instance_name: str
|
|
2999
|
+
"""The name of the DatabaseInstance housing the database."""
|
|
3000
|
+
|
|
3001
|
+
database_name: str
|
|
3002
|
+
"""The name of the database (in a instance) associated with the catalog."""
|
|
3003
|
+
|
|
3004
|
+
create_database_if_not_exists: Optional[bool] = None
|
|
3005
|
+
|
|
3006
|
+
uid: Optional[str] = None
|
|
3007
|
+
|
|
3008
|
+
def as_dict(self) -> dict:
|
|
3009
|
+
"""Serializes the DatabaseCatalog into a dictionary suitable for use as a JSON request body."""
|
|
3010
|
+
body = {}
|
|
3011
|
+
if self.create_database_if_not_exists is not None:
|
|
3012
|
+
body["create_database_if_not_exists"] = self.create_database_if_not_exists
|
|
3013
|
+
if self.database_instance_name is not None:
|
|
3014
|
+
body["database_instance_name"] = self.database_instance_name
|
|
3015
|
+
if self.database_name is not None:
|
|
3016
|
+
body["database_name"] = self.database_name
|
|
3017
|
+
if self.name is not None:
|
|
3018
|
+
body["name"] = self.name
|
|
3019
|
+
if self.uid is not None:
|
|
3020
|
+
body["uid"] = self.uid
|
|
3021
|
+
return body
|
|
3022
|
+
|
|
3023
|
+
def as_shallow_dict(self) -> dict:
|
|
3024
|
+
"""Serializes the DatabaseCatalog into a shallow dictionary of its immediate attributes."""
|
|
3025
|
+
body = {}
|
|
3026
|
+
if self.create_database_if_not_exists is not None:
|
|
3027
|
+
body["create_database_if_not_exists"] = self.create_database_if_not_exists
|
|
3028
|
+
if self.database_instance_name is not None:
|
|
3029
|
+
body["database_instance_name"] = self.database_instance_name
|
|
3030
|
+
if self.database_name is not None:
|
|
3031
|
+
body["database_name"] = self.database_name
|
|
3032
|
+
if self.name is not None:
|
|
3033
|
+
body["name"] = self.name
|
|
3034
|
+
if self.uid is not None:
|
|
3035
|
+
body["uid"] = self.uid
|
|
3036
|
+
return body
|
|
3037
|
+
|
|
3038
|
+
@classmethod
|
|
3039
|
+
def from_dict(cls, d: Dict[str, Any]) -> DatabaseCatalog:
|
|
3040
|
+
"""Deserializes the DatabaseCatalog from a dictionary."""
|
|
3041
|
+
return cls(
|
|
3042
|
+
create_database_if_not_exists=d.get("create_database_if_not_exists", None),
|
|
3043
|
+
database_instance_name=d.get("database_instance_name", None),
|
|
3044
|
+
database_name=d.get("database_name", None),
|
|
3045
|
+
name=d.get("name", None),
|
|
3046
|
+
uid=d.get("uid", None),
|
|
3047
|
+
)
|
|
3048
|
+
|
|
3049
|
+
|
|
3050
|
+
@dataclass
|
|
3051
|
+
class DatabaseInstance:
|
|
3052
|
+
"""A DatabaseInstance represents a logical Postgres instance, comprised of both compute and
|
|
3053
|
+
storage."""
|
|
3054
|
+
|
|
3055
|
+
name: str
|
|
3056
|
+
"""The name of the instance. This is the unique identifier for the instance."""
|
|
3057
|
+
|
|
3058
|
+
admin_password: Optional[str] = None
|
|
3059
|
+
"""Password for admin user to create. If not provided, no user will be created."""
|
|
3060
|
+
|
|
3061
|
+
admin_rolename: Optional[str] = None
|
|
3062
|
+
"""Name of the admin role for the instance. If not provided, defaults to 'databricks_admin'."""
|
|
3063
|
+
|
|
3064
|
+
capacity: Optional[str] = None
|
|
3065
|
+
"""The sku of the instance. Valid values are "CU_1", "CU_2", "CU_4"."""
|
|
3066
|
+
|
|
3067
|
+
creation_time: Optional[str] = None
|
|
3068
|
+
"""The timestamp when the instance was created."""
|
|
3069
|
+
|
|
3070
|
+
creator: Optional[str] = None
|
|
3071
|
+
"""The email of the creator of the instance."""
|
|
3072
|
+
|
|
3073
|
+
pg_version: Optional[str] = None
|
|
3074
|
+
"""The version of Postgres running on the instance."""
|
|
3075
|
+
|
|
3076
|
+
read_write_dns: Optional[str] = None
|
|
3077
|
+
"""The DNS endpoint to connect to the instance for read+write access."""
|
|
3078
|
+
|
|
3079
|
+
state: Optional[DatabaseInstanceState] = None
|
|
3080
|
+
"""The current state of the instance."""
|
|
3081
|
+
|
|
3082
|
+
stopped: Optional[bool] = None
|
|
3083
|
+
"""Whether the instance is stopped."""
|
|
3084
|
+
|
|
3085
|
+
uid: Optional[str] = None
|
|
3086
|
+
"""An immutable UUID identifier for the instance."""
|
|
3087
|
+
|
|
3088
|
+
def as_dict(self) -> dict:
|
|
3089
|
+
"""Serializes the DatabaseInstance into a dictionary suitable for use as a JSON request body."""
|
|
3090
|
+
body = {}
|
|
3091
|
+
if self.admin_password is not None:
|
|
3092
|
+
body["admin_password"] = self.admin_password
|
|
3093
|
+
if self.admin_rolename is not None:
|
|
3094
|
+
body["admin_rolename"] = self.admin_rolename
|
|
3095
|
+
if self.capacity is not None:
|
|
3096
|
+
body["capacity"] = self.capacity
|
|
3097
|
+
if self.creation_time is not None:
|
|
3098
|
+
body["creation_time"] = self.creation_time
|
|
3099
|
+
if self.creator is not None:
|
|
3100
|
+
body["creator"] = self.creator
|
|
3101
|
+
if self.name is not None:
|
|
3102
|
+
body["name"] = self.name
|
|
3103
|
+
if self.pg_version is not None:
|
|
3104
|
+
body["pg_version"] = self.pg_version
|
|
3105
|
+
if self.read_write_dns is not None:
|
|
3106
|
+
body["read_write_dns"] = self.read_write_dns
|
|
3107
|
+
if self.state is not None:
|
|
3108
|
+
body["state"] = self.state.value
|
|
3109
|
+
if self.stopped is not None:
|
|
3110
|
+
body["stopped"] = self.stopped
|
|
3111
|
+
if self.uid is not None:
|
|
3112
|
+
body["uid"] = self.uid
|
|
3113
|
+
return body
|
|
3114
|
+
|
|
3115
|
+
def as_shallow_dict(self) -> dict:
|
|
3116
|
+
"""Serializes the DatabaseInstance into a shallow dictionary of its immediate attributes."""
|
|
3117
|
+
body = {}
|
|
3118
|
+
if self.admin_password is not None:
|
|
3119
|
+
body["admin_password"] = self.admin_password
|
|
3120
|
+
if self.admin_rolename is not None:
|
|
3121
|
+
body["admin_rolename"] = self.admin_rolename
|
|
3122
|
+
if self.capacity is not None:
|
|
3123
|
+
body["capacity"] = self.capacity
|
|
3124
|
+
if self.creation_time is not None:
|
|
3125
|
+
body["creation_time"] = self.creation_time
|
|
3126
|
+
if self.creator is not None:
|
|
3127
|
+
body["creator"] = self.creator
|
|
3128
|
+
if self.name is not None:
|
|
3129
|
+
body["name"] = self.name
|
|
3130
|
+
if self.pg_version is not None:
|
|
3131
|
+
body["pg_version"] = self.pg_version
|
|
3132
|
+
if self.read_write_dns is not None:
|
|
3133
|
+
body["read_write_dns"] = self.read_write_dns
|
|
3134
|
+
if self.state is not None:
|
|
3135
|
+
body["state"] = self.state
|
|
3136
|
+
if self.stopped is not None:
|
|
3137
|
+
body["stopped"] = self.stopped
|
|
3138
|
+
if self.uid is not None:
|
|
3139
|
+
body["uid"] = self.uid
|
|
3140
|
+
return body
|
|
3141
|
+
|
|
3142
|
+
@classmethod
|
|
3143
|
+
def from_dict(cls, d: Dict[str, Any]) -> DatabaseInstance:
|
|
3144
|
+
"""Deserializes the DatabaseInstance from a dictionary."""
|
|
3145
|
+
return cls(
|
|
3146
|
+
admin_password=d.get("admin_password", None),
|
|
3147
|
+
admin_rolename=d.get("admin_rolename", None),
|
|
3148
|
+
capacity=d.get("capacity", None),
|
|
3149
|
+
creation_time=d.get("creation_time", None),
|
|
3150
|
+
creator=d.get("creator", None),
|
|
3151
|
+
name=d.get("name", None),
|
|
3152
|
+
pg_version=d.get("pg_version", None),
|
|
3153
|
+
read_write_dns=d.get("read_write_dns", None),
|
|
3154
|
+
state=_enum(d, "state", DatabaseInstanceState),
|
|
3155
|
+
stopped=d.get("stopped", None),
|
|
3156
|
+
uid=d.get("uid", None),
|
|
3157
|
+
)
|
|
3158
|
+
|
|
3159
|
+
|
|
3160
|
+
class DatabaseInstanceState(Enum):
|
|
3161
|
+
|
|
3162
|
+
AVAILABLE = "AVAILABLE"
|
|
3163
|
+
DELETING = "DELETING"
|
|
3164
|
+
FAILING_OVER = "FAILING_OVER"
|
|
3165
|
+
STARTING = "STARTING"
|
|
3166
|
+
STOPPED = "STOPPED"
|
|
3167
|
+
UPDATING = "UPDATING"
|
|
3168
|
+
|
|
3169
|
+
|
|
2922
3170
|
@dataclass
|
|
2923
3171
|
class DatabricksGcpServiceAccount:
|
|
2924
3172
|
"""GCP long-lived credential. Databricks-created Google Cloud Storage service account."""
|
|
@@ -3052,6 +3300,42 @@ class DeleteCredentialResponse:
|
|
|
3052
3300
|
return cls()
|
|
3053
3301
|
|
|
3054
3302
|
|
|
3303
|
+
@dataclass
|
|
3304
|
+
class DeleteDatabaseCatalogResponse:
|
|
3305
|
+
def as_dict(self) -> dict:
|
|
3306
|
+
"""Serializes the DeleteDatabaseCatalogResponse into a dictionary suitable for use as a JSON request body."""
|
|
3307
|
+
body = {}
|
|
3308
|
+
return body
|
|
3309
|
+
|
|
3310
|
+
def as_shallow_dict(self) -> dict:
|
|
3311
|
+
"""Serializes the DeleteDatabaseCatalogResponse into a shallow dictionary of its immediate attributes."""
|
|
3312
|
+
body = {}
|
|
3313
|
+
return body
|
|
3314
|
+
|
|
3315
|
+
@classmethod
|
|
3316
|
+
def from_dict(cls, d: Dict[str, Any]) -> DeleteDatabaseCatalogResponse:
|
|
3317
|
+
"""Deserializes the DeleteDatabaseCatalogResponse from a dictionary."""
|
|
3318
|
+
return cls()
|
|
3319
|
+
|
|
3320
|
+
|
|
3321
|
+
@dataclass
|
|
3322
|
+
class DeleteDatabaseInstanceResponse:
|
|
3323
|
+
def as_dict(self) -> dict:
|
|
3324
|
+
"""Serializes the DeleteDatabaseInstanceResponse into a dictionary suitable for use as a JSON request body."""
|
|
3325
|
+
body = {}
|
|
3326
|
+
return body
|
|
3327
|
+
|
|
3328
|
+
def as_shallow_dict(self) -> dict:
|
|
3329
|
+
"""Serializes the DeleteDatabaseInstanceResponse into a shallow dictionary of its immediate attributes."""
|
|
3330
|
+
body = {}
|
|
3331
|
+
return body
|
|
3332
|
+
|
|
3333
|
+
@classmethod
|
|
3334
|
+
def from_dict(cls, d: Dict[str, Any]) -> DeleteDatabaseInstanceResponse:
|
|
3335
|
+
"""Deserializes the DeleteDatabaseInstanceResponse from a dictionary."""
|
|
3336
|
+
return cls()
|
|
3337
|
+
|
|
3338
|
+
|
|
3055
3339
|
@dataclass
|
|
3056
3340
|
class DeleteResponse:
|
|
3057
3341
|
def as_dict(self) -> dict:
|
|
@@ -3070,6 +3354,24 @@ class DeleteResponse:
|
|
|
3070
3354
|
return cls()
|
|
3071
3355
|
|
|
3072
3356
|
|
|
3357
|
+
@dataclass
|
|
3358
|
+
class DeleteSyncedDatabaseTableResponse:
|
|
3359
|
+
def as_dict(self) -> dict:
|
|
3360
|
+
"""Serializes the DeleteSyncedDatabaseTableResponse into a dictionary suitable for use as a JSON request body."""
|
|
3361
|
+
body = {}
|
|
3362
|
+
return body
|
|
3363
|
+
|
|
3364
|
+
def as_shallow_dict(self) -> dict:
|
|
3365
|
+
"""Serializes the DeleteSyncedDatabaseTableResponse into a shallow dictionary of its immediate attributes."""
|
|
3366
|
+
body = {}
|
|
3367
|
+
return body
|
|
3368
|
+
|
|
3369
|
+
@classmethod
|
|
3370
|
+
def from_dict(cls, d: Dict[str, Any]) -> DeleteSyncedDatabaseTableResponse:
|
|
3371
|
+
"""Deserializes the DeleteSyncedDatabaseTableResponse from a dictionary."""
|
|
3372
|
+
return cls()
|
|
3373
|
+
|
|
3374
|
+
|
|
3073
3375
|
@dataclass
|
|
3074
3376
|
class DeltaRuntimePropertiesKvPairs:
|
|
3075
3377
|
"""Properties pertaining to the current state of the delta table as given by the commit server.
|
|
@@ -3336,13 +3638,55 @@ class EffectivePrivilegeAssignment:
|
|
|
3336
3638
|
|
|
3337
3639
|
|
|
3338
3640
|
class EnablePredictiveOptimization(Enum):
|
|
3339
|
-
"""Whether predictive optimization should be enabled for this object and objects under it."""
|
|
3340
3641
|
|
|
3341
3642
|
DISABLE = "DISABLE"
|
|
3342
3643
|
ENABLE = "ENABLE"
|
|
3343
3644
|
INHERIT = "INHERIT"
|
|
3344
3645
|
|
|
3345
3646
|
|
|
3647
|
+
@dataclass
|
|
3648
|
+
class EnableRequest:
|
|
3649
|
+
catalog_name: Optional[str] = None
|
|
3650
|
+
"""the catalog for which the system schema is to enabled in"""
|
|
3651
|
+
|
|
3652
|
+
metastore_id: Optional[str] = None
|
|
3653
|
+
"""The metastore ID under which the system schema lives."""
|
|
3654
|
+
|
|
3655
|
+
schema_name: Optional[str] = None
|
|
3656
|
+
"""Full name of the system schema."""
|
|
3657
|
+
|
|
3658
|
+
def as_dict(self) -> dict:
|
|
3659
|
+
"""Serializes the EnableRequest into a dictionary suitable for use as a JSON request body."""
|
|
3660
|
+
body = {}
|
|
3661
|
+
if self.catalog_name is not None:
|
|
3662
|
+
body["catalog_name"] = self.catalog_name
|
|
3663
|
+
if self.metastore_id is not None:
|
|
3664
|
+
body["metastore_id"] = self.metastore_id
|
|
3665
|
+
if self.schema_name is not None:
|
|
3666
|
+
body["schema_name"] = self.schema_name
|
|
3667
|
+
return body
|
|
3668
|
+
|
|
3669
|
+
def as_shallow_dict(self) -> dict:
|
|
3670
|
+
"""Serializes the EnableRequest into a shallow dictionary of its immediate attributes."""
|
|
3671
|
+
body = {}
|
|
3672
|
+
if self.catalog_name is not None:
|
|
3673
|
+
body["catalog_name"] = self.catalog_name
|
|
3674
|
+
if self.metastore_id is not None:
|
|
3675
|
+
body["metastore_id"] = self.metastore_id
|
|
3676
|
+
if self.schema_name is not None:
|
|
3677
|
+
body["schema_name"] = self.schema_name
|
|
3678
|
+
return body
|
|
3679
|
+
|
|
3680
|
+
@classmethod
|
|
3681
|
+
def from_dict(cls, d: Dict[str, Any]) -> EnableRequest:
|
|
3682
|
+
"""Deserializes the EnableRequest from a dictionary."""
|
|
3683
|
+
return cls(
|
|
3684
|
+
catalog_name=d.get("catalog_name", None),
|
|
3685
|
+
metastore_id=d.get("metastore_id", None),
|
|
3686
|
+
schema_name=d.get("schema_name", None),
|
|
3687
|
+
)
|
|
3688
|
+
|
|
3689
|
+
|
|
3346
3690
|
@dataclass
|
|
3347
3691
|
class EnableResponse:
|
|
3348
3692
|
def as_dict(self) -> dict:
|
|
@@ -3390,9 +3734,6 @@ class EncryptionDetails:
|
|
|
3390
3734
|
|
|
3391
3735
|
@dataclass
|
|
3392
3736
|
class ExternalLocationInfo:
|
|
3393
|
-
access_point: Optional[str] = None
|
|
3394
|
-
"""The AWS access point to use when accesing s3 for this external location."""
|
|
3395
|
-
|
|
3396
3737
|
browse_only: Optional[bool] = None
|
|
3397
3738
|
"""Indicates whether the principal is limited to retrieving metadata for the associated object
|
|
3398
3739
|
through the BROWSE privilege when include_browse is enabled in the request."""
|
|
@@ -3412,6 +3753,9 @@ class ExternalLocationInfo:
|
|
|
3412
3753
|
credential_name: Optional[str] = None
|
|
3413
3754
|
"""Name of the storage credential used with this location."""
|
|
3414
3755
|
|
|
3756
|
+
enable_file_events: Optional[bool] = None
|
|
3757
|
+
"""[Create:OPT Update:OPT] Whether to enable file events on this external location."""
|
|
3758
|
+
|
|
3415
3759
|
encryption_details: Optional[EncryptionDetails] = None
|
|
3416
3760
|
"""Encryption options that apply to clients connecting to cloud storage."""
|
|
3417
3761
|
|
|
@@ -3420,6 +3764,9 @@ class ExternalLocationInfo:
|
|
|
3420
3764
|
enabled, the access to the location falls back to cluster credentials if UC credentials are not
|
|
3421
3765
|
sufficient."""
|
|
3422
3766
|
|
|
3767
|
+
file_event_queue: Optional[FileEventQueue] = None
|
|
3768
|
+
"""[Create:OPT Update:OPT] File event queue settings."""
|
|
3769
|
+
|
|
3423
3770
|
isolation_mode: Optional[IsolationMode] = None
|
|
3424
3771
|
|
|
3425
3772
|
metastore_id: Optional[str] = None
|
|
@@ -3446,8 +3793,6 @@ class ExternalLocationInfo:
|
|
|
3446
3793
|
def as_dict(self) -> dict:
|
|
3447
3794
|
"""Serializes the ExternalLocationInfo into a dictionary suitable for use as a JSON request body."""
|
|
3448
3795
|
body = {}
|
|
3449
|
-
if self.access_point is not None:
|
|
3450
|
-
body["access_point"] = self.access_point
|
|
3451
3796
|
if self.browse_only is not None:
|
|
3452
3797
|
body["browse_only"] = self.browse_only
|
|
3453
3798
|
if self.comment is not None:
|
|
@@ -3460,10 +3805,14 @@ class ExternalLocationInfo:
|
|
|
3460
3805
|
body["credential_id"] = self.credential_id
|
|
3461
3806
|
if self.credential_name is not None:
|
|
3462
3807
|
body["credential_name"] = self.credential_name
|
|
3808
|
+
if self.enable_file_events is not None:
|
|
3809
|
+
body["enable_file_events"] = self.enable_file_events
|
|
3463
3810
|
if self.encryption_details:
|
|
3464
3811
|
body["encryption_details"] = self.encryption_details.as_dict()
|
|
3465
3812
|
if self.fallback is not None:
|
|
3466
3813
|
body["fallback"] = self.fallback
|
|
3814
|
+
if self.file_event_queue:
|
|
3815
|
+
body["file_event_queue"] = self.file_event_queue.as_dict()
|
|
3467
3816
|
if self.isolation_mode is not None:
|
|
3468
3817
|
body["isolation_mode"] = self.isolation_mode.value
|
|
3469
3818
|
if self.metastore_id is not None:
|
|
@@ -3485,8 +3834,6 @@ class ExternalLocationInfo:
|
|
|
3485
3834
|
def as_shallow_dict(self) -> dict:
|
|
3486
3835
|
"""Serializes the ExternalLocationInfo into a shallow dictionary of its immediate attributes."""
|
|
3487
3836
|
body = {}
|
|
3488
|
-
if self.access_point is not None:
|
|
3489
|
-
body["access_point"] = self.access_point
|
|
3490
3837
|
if self.browse_only is not None:
|
|
3491
3838
|
body["browse_only"] = self.browse_only
|
|
3492
3839
|
if self.comment is not None:
|
|
@@ -3499,10 +3846,14 @@ class ExternalLocationInfo:
|
|
|
3499
3846
|
body["credential_id"] = self.credential_id
|
|
3500
3847
|
if self.credential_name is not None:
|
|
3501
3848
|
body["credential_name"] = self.credential_name
|
|
3849
|
+
if self.enable_file_events is not None:
|
|
3850
|
+
body["enable_file_events"] = self.enable_file_events
|
|
3502
3851
|
if self.encryption_details:
|
|
3503
3852
|
body["encryption_details"] = self.encryption_details
|
|
3504
3853
|
if self.fallback is not None:
|
|
3505
3854
|
body["fallback"] = self.fallback
|
|
3855
|
+
if self.file_event_queue:
|
|
3856
|
+
body["file_event_queue"] = self.file_event_queue
|
|
3506
3857
|
if self.isolation_mode is not None:
|
|
3507
3858
|
body["isolation_mode"] = self.isolation_mode
|
|
3508
3859
|
if self.metastore_id is not None:
|
|
@@ -3525,15 +3876,16 @@ class ExternalLocationInfo:
|
|
|
3525
3876
|
def from_dict(cls, d: Dict[str, Any]) -> ExternalLocationInfo:
|
|
3526
3877
|
"""Deserializes the ExternalLocationInfo from a dictionary."""
|
|
3527
3878
|
return cls(
|
|
3528
|
-
access_point=d.get("access_point", None),
|
|
3529
3879
|
browse_only=d.get("browse_only", None),
|
|
3530
3880
|
comment=d.get("comment", None),
|
|
3531
3881
|
created_at=d.get("created_at", None),
|
|
3532
3882
|
created_by=d.get("created_by", None),
|
|
3533
3883
|
credential_id=d.get("credential_id", None),
|
|
3534
3884
|
credential_name=d.get("credential_name", None),
|
|
3885
|
+
enable_file_events=d.get("enable_file_events", None),
|
|
3535
3886
|
encryption_details=_from_dict(d, "encryption_details", EncryptionDetails),
|
|
3536
3887
|
fallback=d.get("fallback", None),
|
|
3888
|
+
file_event_queue=_from_dict(d, "file_event_queue", FileEventQueue),
|
|
3537
3889
|
isolation_mode=_enum(d, "isolation_mode", IsolationMode),
|
|
3538
3890
|
metastore_id=d.get("metastore_id", None),
|
|
3539
3891
|
name=d.get("name", None),
|
|
@@ -3586,6 +3938,67 @@ class FailedStatus:
|
|
|
3586
3938
|
)
|
|
3587
3939
|
|
|
3588
3940
|
|
|
3941
|
+
@dataclass
|
|
3942
|
+
class FileEventQueue:
|
|
3943
|
+
managed_aqs: Optional[AzureQueueStorage] = None
|
|
3944
|
+
|
|
3945
|
+
managed_pubsub: Optional[GcpPubsub] = None
|
|
3946
|
+
|
|
3947
|
+
managed_sqs: Optional[AwsSqsQueue] = None
|
|
3948
|
+
|
|
3949
|
+
provided_aqs: Optional[AzureQueueStorage] = None
|
|
3950
|
+
|
|
3951
|
+
provided_pubsub: Optional[GcpPubsub] = None
|
|
3952
|
+
|
|
3953
|
+
provided_sqs: Optional[AwsSqsQueue] = None
|
|
3954
|
+
|
|
3955
|
+
def as_dict(self) -> dict:
|
|
3956
|
+
"""Serializes the FileEventQueue into a dictionary suitable for use as a JSON request body."""
|
|
3957
|
+
body = {}
|
|
3958
|
+
if self.managed_aqs:
|
|
3959
|
+
body["managed_aqs"] = self.managed_aqs.as_dict()
|
|
3960
|
+
if self.managed_pubsub:
|
|
3961
|
+
body["managed_pubsub"] = self.managed_pubsub.as_dict()
|
|
3962
|
+
if self.managed_sqs:
|
|
3963
|
+
body["managed_sqs"] = self.managed_sqs.as_dict()
|
|
3964
|
+
if self.provided_aqs:
|
|
3965
|
+
body["provided_aqs"] = self.provided_aqs.as_dict()
|
|
3966
|
+
if self.provided_pubsub:
|
|
3967
|
+
body["provided_pubsub"] = self.provided_pubsub.as_dict()
|
|
3968
|
+
if self.provided_sqs:
|
|
3969
|
+
body["provided_sqs"] = self.provided_sqs.as_dict()
|
|
3970
|
+
return body
|
|
3971
|
+
|
|
3972
|
+
def as_shallow_dict(self) -> dict:
|
|
3973
|
+
"""Serializes the FileEventQueue into a shallow dictionary of its immediate attributes."""
|
|
3974
|
+
body = {}
|
|
3975
|
+
if self.managed_aqs:
|
|
3976
|
+
body["managed_aqs"] = self.managed_aqs
|
|
3977
|
+
if self.managed_pubsub:
|
|
3978
|
+
body["managed_pubsub"] = self.managed_pubsub
|
|
3979
|
+
if self.managed_sqs:
|
|
3980
|
+
body["managed_sqs"] = self.managed_sqs
|
|
3981
|
+
if self.provided_aqs:
|
|
3982
|
+
body["provided_aqs"] = self.provided_aqs
|
|
3983
|
+
if self.provided_pubsub:
|
|
3984
|
+
body["provided_pubsub"] = self.provided_pubsub
|
|
3985
|
+
if self.provided_sqs:
|
|
3986
|
+
body["provided_sqs"] = self.provided_sqs
|
|
3987
|
+
return body
|
|
3988
|
+
|
|
3989
|
+
@classmethod
|
|
3990
|
+
def from_dict(cls, d: Dict[str, Any]) -> FileEventQueue:
|
|
3991
|
+
"""Deserializes the FileEventQueue from a dictionary."""
|
|
3992
|
+
return cls(
|
|
3993
|
+
managed_aqs=_from_dict(d, "managed_aqs", AzureQueueStorage),
|
|
3994
|
+
managed_pubsub=_from_dict(d, "managed_pubsub", GcpPubsub),
|
|
3995
|
+
managed_sqs=_from_dict(d, "managed_sqs", AwsSqsQueue),
|
|
3996
|
+
provided_aqs=_from_dict(d, "provided_aqs", AzureQueueStorage),
|
|
3997
|
+
provided_pubsub=_from_dict(d, "provided_pubsub", GcpPubsub),
|
|
3998
|
+
provided_sqs=_from_dict(d, "provided_sqs", AwsSqsQueue),
|
|
3999
|
+
)
|
|
4000
|
+
|
|
4001
|
+
|
|
3589
4002
|
@dataclass
|
|
3590
4003
|
class ForeignKeyConstraint:
|
|
3591
4004
|
name: str
|
|
@@ -4136,6 +4549,41 @@ class GcpOauthToken:
|
|
|
4136
4549
|
return cls(oauth_token=d.get("oauth_token", None))
|
|
4137
4550
|
|
|
4138
4551
|
|
|
4552
|
+
@dataclass
|
|
4553
|
+
class GcpPubsub:
|
|
4554
|
+
managed_resource_id: Optional[str] = None
|
|
4555
|
+
"""Unique identifier included in the name of file events managed cloud resources."""
|
|
4556
|
+
|
|
4557
|
+
subscription_name: Optional[str] = None
|
|
4558
|
+
"""The Pub/Sub subscription name in the format projects/{project}/subscriptions/{subscription name}
|
|
4559
|
+
REQUIRED for provided_pubsub."""
|
|
4560
|
+
|
|
4561
|
+
def as_dict(self) -> dict:
|
|
4562
|
+
"""Serializes the GcpPubsub into a dictionary suitable for use as a JSON request body."""
|
|
4563
|
+
body = {}
|
|
4564
|
+
if self.managed_resource_id is not None:
|
|
4565
|
+
body["managed_resource_id"] = self.managed_resource_id
|
|
4566
|
+
if self.subscription_name is not None:
|
|
4567
|
+
body["subscription_name"] = self.subscription_name
|
|
4568
|
+
return body
|
|
4569
|
+
|
|
4570
|
+
def as_shallow_dict(self) -> dict:
|
|
4571
|
+
"""Serializes the GcpPubsub into a shallow dictionary of its immediate attributes."""
|
|
4572
|
+
body = {}
|
|
4573
|
+
if self.managed_resource_id is not None:
|
|
4574
|
+
body["managed_resource_id"] = self.managed_resource_id
|
|
4575
|
+
if self.subscription_name is not None:
|
|
4576
|
+
body["subscription_name"] = self.subscription_name
|
|
4577
|
+
return body
|
|
4578
|
+
|
|
4579
|
+
@classmethod
|
|
4580
|
+
def from_dict(cls, d: Dict[str, Any]) -> GcpPubsub:
|
|
4581
|
+
"""Deserializes the GcpPubsub from a dictionary."""
|
|
4582
|
+
return cls(
|
|
4583
|
+
managed_resource_id=d.get("managed_resource_id", None), subscription_name=d.get("subscription_name", None)
|
|
4584
|
+
)
|
|
4585
|
+
|
|
4586
|
+
|
|
4139
4587
|
@dataclass
|
|
4140
4588
|
class GenerateTemporaryServiceCredentialAzureOptions:
|
|
4141
4589
|
"""The Azure cloud options to customize the requested temporary credential"""
|
|
@@ -4353,12 +4801,29 @@ class GenerateTemporaryTableCredentialResponse:
|
|
|
4353
4801
|
)
|
|
4354
4802
|
|
|
4355
4803
|
|
|
4356
|
-
|
|
4804
|
+
@dataclass
|
|
4805
|
+
class GetCatalogWorkspaceBindingsResponse:
|
|
4806
|
+
workspaces: Optional[List[int]] = None
|
|
4807
|
+
"""A list of workspace IDs"""
|
|
4808
|
+
|
|
4809
|
+
def as_dict(self) -> dict:
|
|
4810
|
+
"""Serializes the GetCatalogWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body."""
|
|
4811
|
+
body = {}
|
|
4812
|
+
if self.workspaces:
|
|
4813
|
+
body["workspaces"] = [v for v in self.workspaces]
|
|
4814
|
+
return body
|
|
4815
|
+
|
|
4816
|
+
def as_shallow_dict(self) -> dict:
|
|
4817
|
+
"""Serializes the GetCatalogWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes."""
|
|
4818
|
+
body = {}
|
|
4819
|
+
if self.workspaces:
|
|
4820
|
+
body["workspaces"] = self.workspaces
|
|
4821
|
+
return body
|
|
4357
4822
|
|
|
4358
|
-
|
|
4359
|
-
|
|
4360
|
-
|
|
4361
|
-
|
|
4823
|
+
@classmethod
|
|
4824
|
+
def from_dict(cls, d: Dict[str, Any]) -> GetCatalogWorkspaceBindingsResponse:
|
|
4825
|
+
"""Deserializes the GetCatalogWorkspaceBindingsResponse from a dictionary."""
|
|
4826
|
+
return cls(workspaces=d.get("workspaces", None))
|
|
4362
4827
|
|
|
4363
4828
|
|
|
4364
4829
|
@dataclass
|
|
@@ -4571,6 +5036,41 @@ class GetQuotaResponse:
|
|
|
4571
5036
|
return cls(quota_info=_from_dict(d, "quota_info", QuotaInfo))
|
|
4572
5037
|
|
|
4573
5038
|
|
|
5039
|
+
@dataclass
|
|
5040
|
+
class GetWorkspaceBindingsResponse:
|
|
5041
|
+
bindings: Optional[List[WorkspaceBinding]] = None
|
|
5042
|
+
"""List of workspace bindings"""
|
|
5043
|
+
|
|
5044
|
+
next_page_token: Optional[str] = None
|
|
5045
|
+
"""Opaque token to retrieve the next page of results. Absent if there are no more pages.
|
|
5046
|
+
__page_token__ should be set to this value for the next request (for the next page of results)."""
|
|
5047
|
+
|
|
5048
|
+
def as_dict(self) -> dict:
|
|
5049
|
+
"""Serializes the GetWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body."""
|
|
5050
|
+
body = {}
|
|
5051
|
+
if self.bindings:
|
|
5052
|
+
body["bindings"] = [v.as_dict() for v in self.bindings]
|
|
5053
|
+
if self.next_page_token is not None:
|
|
5054
|
+
body["next_page_token"] = self.next_page_token
|
|
5055
|
+
return body
|
|
5056
|
+
|
|
5057
|
+
def as_shallow_dict(self) -> dict:
|
|
5058
|
+
"""Serializes the GetWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes."""
|
|
5059
|
+
body = {}
|
|
5060
|
+
if self.bindings:
|
|
5061
|
+
body["bindings"] = self.bindings
|
|
5062
|
+
if self.next_page_token is not None:
|
|
5063
|
+
body["next_page_token"] = self.next_page_token
|
|
5064
|
+
return body
|
|
5065
|
+
|
|
5066
|
+
@classmethod
|
|
5067
|
+
def from_dict(cls, d: Dict[str, Any]) -> GetWorkspaceBindingsResponse:
|
|
5068
|
+
"""Deserializes the GetWorkspaceBindingsResponse from a dictionary."""
|
|
5069
|
+
return cls(
|
|
5070
|
+
bindings=_repeated_dict(d, "bindings", WorkspaceBinding), next_page_token=d.get("next_page_token", None)
|
|
5071
|
+
)
|
|
5072
|
+
|
|
5073
|
+
|
|
4574
5074
|
class IsolationMode(Enum):
|
|
4575
5075
|
|
|
4576
5076
|
ISOLATION_MODE_ISOLATED = "ISOLATION_MODE_ISOLATED"
|
|
@@ -4730,6 +5230,41 @@ class ListCredentialsResponse:
|
|
|
4730
5230
|
)
|
|
4731
5231
|
|
|
4732
5232
|
|
|
5233
|
+
@dataclass
|
|
5234
|
+
class ListDatabaseInstancesResponse:
|
|
5235
|
+
database_instances: Optional[List[DatabaseInstance]] = None
|
|
5236
|
+
"""List of instances."""
|
|
5237
|
+
|
|
5238
|
+
next_page_token: Optional[str] = None
|
|
5239
|
+
"""Pagination token to request the next page of instances."""
|
|
5240
|
+
|
|
5241
|
+
def as_dict(self) -> dict:
|
|
5242
|
+
"""Serializes the ListDatabaseInstancesResponse into a dictionary suitable for use as a JSON request body."""
|
|
5243
|
+
body = {}
|
|
5244
|
+
if self.database_instances:
|
|
5245
|
+
body["database_instances"] = [v.as_dict() for v in self.database_instances]
|
|
5246
|
+
if self.next_page_token is not None:
|
|
5247
|
+
body["next_page_token"] = self.next_page_token
|
|
5248
|
+
return body
|
|
5249
|
+
|
|
5250
|
+
def as_shallow_dict(self) -> dict:
|
|
5251
|
+
"""Serializes the ListDatabaseInstancesResponse into a shallow dictionary of its immediate attributes."""
|
|
5252
|
+
body = {}
|
|
5253
|
+
if self.database_instances:
|
|
5254
|
+
body["database_instances"] = self.database_instances
|
|
5255
|
+
if self.next_page_token is not None:
|
|
5256
|
+
body["next_page_token"] = self.next_page_token
|
|
5257
|
+
return body
|
|
5258
|
+
|
|
5259
|
+
@classmethod
|
|
5260
|
+
def from_dict(cls, d: Dict[str, Any]) -> ListDatabaseInstancesResponse:
|
|
5261
|
+
"""Deserializes the ListDatabaseInstancesResponse from a dictionary."""
|
|
5262
|
+
return cls(
|
|
5263
|
+
database_instances=_repeated_dict(d, "database_instances", DatabaseInstance),
|
|
5264
|
+
next_page_token=d.get("next_page_token", None),
|
|
5265
|
+
)
|
|
5266
|
+
|
|
5267
|
+
|
|
4733
5268
|
@dataclass
|
|
4734
5269
|
class ListExternalLocationsResponse:
|
|
4735
5270
|
external_locations: Optional[List[ExternalLocationInfo]] = None
|
|
@@ -6234,6 +6769,43 @@ class NamedTableConstraint:
|
|
|
6234
6769
|
return cls(name=d.get("name", None))
|
|
6235
6770
|
|
|
6236
6771
|
|
|
6772
|
+
@dataclass
|
|
6773
|
+
class NewPipelineSpec:
|
|
6774
|
+
"""Custom fields that user can set for pipeline while creating SyncedDatabaseTable. Note that other
|
|
6775
|
+
fields of pipeline are still inferred by table def internally"""
|
|
6776
|
+
|
|
6777
|
+
storage_catalog: Optional[str] = None
|
|
6778
|
+
"""UC catalog for the pipeline to store intermediate files (checkpoints, event logs etc). This
|
|
6779
|
+
needs to be a standard catalog where the user has permissions to create Delta tables."""
|
|
6780
|
+
|
|
6781
|
+
storage_schema: Optional[str] = None
|
|
6782
|
+
"""UC schema for the pipeline to store intermediate files (checkpoints, event logs etc). This needs
|
|
6783
|
+
to be in the standard catalog where the user has permissions to create Delta tables."""
|
|
6784
|
+
|
|
6785
|
+
def as_dict(self) -> dict:
|
|
6786
|
+
"""Serializes the NewPipelineSpec into a dictionary suitable for use as a JSON request body."""
|
|
6787
|
+
body = {}
|
|
6788
|
+
if self.storage_catalog is not None:
|
|
6789
|
+
body["storage_catalog"] = self.storage_catalog
|
|
6790
|
+
if self.storage_schema is not None:
|
|
6791
|
+
body["storage_schema"] = self.storage_schema
|
|
6792
|
+
return body
|
|
6793
|
+
|
|
6794
|
+
def as_shallow_dict(self) -> dict:
|
|
6795
|
+
"""Serializes the NewPipelineSpec into a shallow dictionary of its immediate attributes."""
|
|
6796
|
+
body = {}
|
|
6797
|
+
if self.storage_catalog is not None:
|
|
6798
|
+
body["storage_catalog"] = self.storage_catalog
|
|
6799
|
+
if self.storage_schema is not None:
|
|
6800
|
+
body["storage_schema"] = self.storage_schema
|
|
6801
|
+
return body
|
|
6802
|
+
|
|
6803
|
+
@classmethod
|
|
6804
|
+
def from_dict(cls, d: Dict[str, Any]) -> NewPipelineSpec:
|
|
6805
|
+
"""Deserializes the NewPipelineSpec from a dictionary."""
|
|
6806
|
+
return cls(storage_catalog=d.get("storage_catalog", None), storage_schema=d.get("storage_schema", None))
|
|
6807
|
+
|
|
6808
|
+
|
|
6237
6809
|
@dataclass
|
|
6238
6810
|
class OnlineTable:
|
|
6239
6811
|
"""Online Table information."""
|
|
@@ -6643,6 +7215,9 @@ class PrimaryKeyConstraint:
|
|
|
6643
7215
|
child_columns: List[str]
|
|
6644
7216
|
"""Column names for this constraint."""
|
|
6645
7217
|
|
|
7218
|
+
timeseries_columns: Optional[List[str]] = None
|
|
7219
|
+
"""Column names that represent a timeseries."""
|
|
7220
|
+
|
|
6646
7221
|
def as_dict(self) -> dict:
|
|
6647
7222
|
"""Serializes the PrimaryKeyConstraint into a dictionary suitable for use as a JSON request body."""
|
|
6648
7223
|
body = {}
|
|
@@ -6650,6 +7225,8 @@ class PrimaryKeyConstraint:
|
|
|
6650
7225
|
body["child_columns"] = [v for v in self.child_columns]
|
|
6651
7226
|
if self.name is not None:
|
|
6652
7227
|
body["name"] = self.name
|
|
7228
|
+
if self.timeseries_columns:
|
|
7229
|
+
body["timeseries_columns"] = [v for v in self.timeseries_columns]
|
|
6653
7230
|
return body
|
|
6654
7231
|
|
|
6655
7232
|
def as_shallow_dict(self) -> dict:
|
|
@@ -6659,12 +7236,18 @@ class PrimaryKeyConstraint:
|
|
|
6659
7236
|
body["child_columns"] = self.child_columns
|
|
6660
7237
|
if self.name is not None:
|
|
6661
7238
|
body["name"] = self.name
|
|
7239
|
+
if self.timeseries_columns:
|
|
7240
|
+
body["timeseries_columns"] = self.timeseries_columns
|
|
6662
7241
|
return body
|
|
6663
7242
|
|
|
6664
7243
|
@classmethod
|
|
6665
7244
|
def from_dict(cls, d: Dict[str, Any]) -> PrimaryKeyConstraint:
|
|
6666
7245
|
"""Deserializes the PrimaryKeyConstraint from a dictionary."""
|
|
6667
|
-
return cls(
|
|
7246
|
+
return cls(
|
|
7247
|
+
child_columns=d.get("child_columns", None),
|
|
7248
|
+
name=d.get("name", None),
|
|
7249
|
+
timeseries_columns=d.get("timeseries_columns", None),
|
|
7250
|
+
)
|
|
6668
7251
|
|
|
6669
7252
|
|
|
6670
7253
|
class Privilege(Enum):
|
|
@@ -6760,6 +7343,7 @@ class ProvisioningInfo:
|
|
|
6760
7343
|
"""Status of an asynchronously provisioned resource."""
|
|
6761
7344
|
|
|
6762
7345
|
state: Optional[ProvisioningInfoState] = None
|
|
7346
|
+
"""The provisioning state of the resource."""
|
|
6763
7347
|
|
|
6764
7348
|
def as_dict(self) -> dict:
|
|
6765
7349
|
"""Serializes the ProvisioningInfo into a dictionary suitable for use as a JSON request body."""
|
|
@@ -7188,7 +7772,6 @@ class SchemaInfo:
|
|
|
7188
7772
|
effective_predictive_optimization_flag: Optional[EffectivePredictiveOptimizationFlag] = None
|
|
7189
7773
|
|
|
7190
7774
|
enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None
|
|
7191
|
-
"""Whether predictive optimization should be enabled for this object and objects under it."""
|
|
7192
7775
|
|
|
7193
7776
|
full_name: Optional[str] = None
|
|
7194
7777
|
"""Full name of schema, in form of __catalog_name__.__schema_name__."""
|
|
@@ -7336,13 +7919,14 @@ SecurablePropertiesMap = Dict[str, str]
|
|
|
7336
7919
|
|
|
7337
7920
|
|
|
7338
7921
|
class SecurableType(Enum):
|
|
7339
|
-
"""The type of Unity Catalog securable"""
|
|
7922
|
+
"""The type of Unity Catalog securable."""
|
|
7340
7923
|
|
|
7341
7924
|
CATALOG = "CATALOG"
|
|
7342
7925
|
CLEAN_ROOM = "CLEAN_ROOM"
|
|
7343
7926
|
CONNECTION = "CONNECTION"
|
|
7344
7927
|
CREDENTIAL = "CREDENTIAL"
|
|
7345
7928
|
EXTERNAL_LOCATION = "EXTERNAL_LOCATION"
|
|
7929
|
+
EXTERNAL_METADATA = "EXTERNAL_METADATA"
|
|
7346
7930
|
FUNCTION = "FUNCTION"
|
|
7347
7931
|
METASTORE = "METASTORE"
|
|
7348
7932
|
PIPELINE = "PIPELINE"
|
|
@@ -7350,8 +7934,10 @@ class SecurableType(Enum):
|
|
|
7350
7934
|
RECIPIENT = "RECIPIENT"
|
|
7351
7935
|
SCHEMA = "SCHEMA"
|
|
7352
7936
|
SHARE = "SHARE"
|
|
7937
|
+
STAGING_TABLE = "STAGING_TABLE"
|
|
7353
7938
|
STORAGE_CREDENTIAL = "STORAGE_CREDENTIAL"
|
|
7354
7939
|
TABLE = "TABLE"
|
|
7940
|
+
UNKNOWN_SECURABLE_TYPE = "UNKNOWN_SECURABLE_TYPE"
|
|
7355
7941
|
VOLUME = "VOLUME"
|
|
7356
7942
|
|
|
7357
7943
|
|
|
@@ -7460,10 +8046,11 @@ class SseEncryptionDetails:
|
|
|
7460
8046
|
"""Server-Side Encryption properties for clients communicating with AWS s3."""
|
|
7461
8047
|
|
|
7462
8048
|
algorithm: Optional[SseEncryptionDetailsAlgorithm] = None
|
|
7463
|
-
"""
|
|
8049
|
+
"""Sets the value of the 'x-amz-server-side-encryption' header in S3 request."""
|
|
7464
8050
|
|
|
7465
8051
|
aws_kms_key_arn: Optional[str] = None
|
|
7466
|
-
"""
|
|
8052
|
+
"""Optional. The ARN of the SSE-KMS key used with the S3 location, when algorithm = "SSE-KMS". Sets
|
|
8053
|
+
the value of the 'x-amz-server-side-encryption-aws-kms-key-id' header."""
|
|
7467
8054
|
|
|
7468
8055
|
def as_dict(self) -> dict:
|
|
7469
8056
|
"""Serializes the SseEncryptionDetails into a dictionary suitable for use as a JSON request body."""
|
|
@@ -7493,7 +8080,6 @@ class SseEncryptionDetails:
|
|
|
7493
8080
|
|
|
7494
8081
|
|
|
7495
8082
|
class SseEncryptionDetailsAlgorithm(Enum):
|
|
7496
|
-
"""The type of key encryption to use (affects headers from s3 client)."""
|
|
7497
8083
|
|
|
7498
8084
|
AWS_SSE_KMS = "AWS_SSE_KMS"
|
|
7499
8085
|
AWS_SSE_S3 = "AWS_SSE_S3"
|
|
@@ -7663,14 +8249,193 @@ class StorageCredentialInfo:
|
|
|
7663
8249
|
)
|
|
7664
8250
|
|
|
7665
8251
|
|
|
8252
|
+
@dataclass
|
|
8253
|
+
class SyncedDatabaseTable:
|
|
8254
|
+
"""Next field marker: 10"""
|
|
8255
|
+
|
|
8256
|
+
name: str
|
|
8257
|
+
"""Full three-part (catalog, schema, table) name of the table."""
|
|
8258
|
+
|
|
8259
|
+
data_synchronization_status: Optional[OnlineTableStatus] = None
|
|
8260
|
+
"""Synced Table data synchronization status"""
|
|
8261
|
+
|
|
8262
|
+
database_instance_name: Optional[str] = None
|
|
8263
|
+
"""Name of the target database instance. This is required when creating synced database tables in
|
|
8264
|
+
standard catalogs. This is optional when creating synced database tables in registered catalogs.
|
|
8265
|
+
If this field is specified when creating synced database tables in registered catalogs, the
|
|
8266
|
+
database instance name MUST match that of the registered catalog (or the request will be
|
|
8267
|
+
rejected)."""
|
|
8268
|
+
|
|
8269
|
+
logical_database_name: Optional[str] = None
|
|
8270
|
+
"""Target Postgres database object (logical database) name for this table. This field is optional
|
|
8271
|
+
in all scenarios.
|
|
8272
|
+
|
|
8273
|
+
When creating a synced table in a registered Postgres catalog, the target Postgres database name
|
|
8274
|
+
is inferred to be that of the registered catalog. If this field is specified in this scenario,
|
|
8275
|
+
the Postgres database name MUST match that of the registered catalog (or the request will be
|
|
8276
|
+
rejected).
|
|
8277
|
+
|
|
8278
|
+
When creating a synced table in a standard catalog, the target database name is inferred to be
|
|
8279
|
+
that of the standard catalog. In this scenario, specifying this field will allow targeting an
|
|
8280
|
+
arbitrary postgres database."""
|
|
8281
|
+
|
|
8282
|
+
spec: Optional[SyncedTableSpec] = None
|
|
8283
|
+
"""Specification of a synced database table."""
|
|
8284
|
+
|
|
8285
|
+
table_serving_url: Optional[str] = None
|
|
8286
|
+
"""Data serving REST API URL for this table"""
|
|
8287
|
+
|
|
8288
|
+
unity_catalog_provisioning_state: Optional[ProvisioningInfoState] = None
|
|
8289
|
+
"""The provisioning state of the synced table entity in Unity Catalog. This is distinct from the
|
|
8290
|
+
state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline
|
|
8291
|
+
may be in "PROVISIONING" as it runs asynchronously)."""
|
|
8292
|
+
|
|
8293
|
+
def as_dict(self) -> dict:
|
|
8294
|
+
"""Serializes the SyncedDatabaseTable into a dictionary suitable for use as a JSON request body."""
|
|
8295
|
+
body = {}
|
|
8296
|
+
if self.data_synchronization_status:
|
|
8297
|
+
body["data_synchronization_status"] = self.data_synchronization_status.as_dict()
|
|
8298
|
+
if self.database_instance_name is not None:
|
|
8299
|
+
body["database_instance_name"] = self.database_instance_name
|
|
8300
|
+
if self.logical_database_name is not None:
|
|
8301
|
+
body["logical_database_name"] = self.logical_database_name
|
|
8302
|
+
if self.name is not None:
|
|
8303
|
+
body["name"] = self.name
|
|
8304
|
+
if self.spec:
|
|
8305
|
+
body["spec"] = self.spec.as_dict()
|
|
8306
|
+
if self.table_serving_url is not None:
|
|
8307
|
+
body["table_serving_url"] = self.table_serving_url
|
|
8308
|
+
if self.unity_catalog_provisioning_state is not None:
|
|
8309
|
+
body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state.value
|
|
8310
|
+
return body
|
|
8311
|
+
|
|
8312
|
+
def as_shallow_dict(self) -> dict:
|
|
8313
|
+
"""Serializes the SyncedDatabaseTable into a shallow dictionary of its immediate attributes."""
|
|
8314
|
+
body = {}
|
|
8315
|
+
if self.data_synchronization_status:
|
|
8316
|
+
body["data_synchronization_status"] = self.data_synchronization_status
|
|
8317
|
+
if self.database_instance_name is not None:
|
|
8318
|
+
body["database_instance_name"] = self.database_instance_name
|
|
8319
|
+
if self.logical_database_name is not None:
|
|
8320
|
+
body["logical_database_name"] = self.logical_database_name
|
|
8321
|
+
if self.name is not None:
|
|
8322
|
+
body["name"] = self.name
|
|
8323
|
+
if self.spec:
|
|
8324
|
+
body["spec"] = self.spec
|
|
8325
|
+
if self.table_serving_url is not None:
|
|
8326
|
+
body["table_serving_url"] = self.table_serving_url
|
|
8327
|
+
if self.unity_catalog_provisioning_state is not None:
|
|
8328
|
+
body["unity_catalog_provisioning_state"] = self.unity_catalog_provisioning_state
|
|
8329
|
+
return body
|
|
8330
|
+
|
|
8331
|
+
@classmethod
|
|
8332
|
+
def from_dict(cls, d: Dict[str, Any]) -> SyncedDatabaseTable:
|
|
8333
|
+
"""Deserializes the SyncedDatabaseTable from a dictionary."""
|
|
8334
|
+
return cls(
|
|
8335
|
+
data_synchronization_status=_from_dict(d, "data_synchronization_status", OnlineTableStatus),
|
|
8336
|
+
database_instance_name=d.get("database_instance_name", None),
|
|
8337
|
+
logical_database_name=d.get("logical_database_name", None),
|
|
8338
|
+
name=d.get("name", None),
|
|
8339
|
+
spec=_from_dict(d, "spec", SyncedTableSpec),
|
|
8340
|
+
table_serving_url=d.get("table_serving_url", None),
|
|
8341
|
+
unity_catalog_provisioning_state=_enum(d, "unity_catalog_provisioning_state", ProvisioningInfoState),
|
|
8342
|
+
)
|
|
8343
|
+
|
|
8344
|
+
|
|
8345
|
+
class SyncedTableSchedulingPolicy(Enum):
|
|
8346
|
+
|
|
8347
|
+
CONTINUOUS = "CONTINUOUS"
|
|
8348
|
+
SNAPSHOT = "SNAPSHOT"
|
|
8349
|
+
TRIGGERED = "TRIGGERED"
|
|
8350
|
+
|
|
8351
|
+
|
|
8352
|
+
@dataclass
|
|
8353
|
+
class SyncedTableSpec:
|
|
8354
|
+
"""Specification of a synced database table."""
|
|
8355
|
+
|
|
8356
|
+
create_database_objects_if_missing: Optional[bool] = None
|
|
8357
|
+
"""If true, the synced table's logical database and schema resources in PG will be created if they
|
|
8358
|
+
do not already exist."""
|
|
8359
|
+
|
|
8360
|
+
new_pipeline_spec: Optional[NewPipelineSpec] = None
|
|
8361
|
+
"""Spec of new pipeline. Should be empty if pipeline_id is set"""
|
|
8362
|
+
|
|
8363
|
+
pipeline_id: Optional[str] = None
|
|
8364
|
+
"""ID of the associated pipeline. Should be empty if new_pipeline_spec is set"""
|
|
8365
|
+
|
|
8366
|
+
primary_key_columns: Optional[List[str]] = None
|
|
8367
|
+
"""Primary Key columns to be used for data insert/update in the destination."""
|
|
8368
|
+
|
|
8369
|
+
scheduling_policy: Optional[SyncedTableSchedulingPolicy] = None
|
|
8370
|
+
"""Scheduling policy of the underlying pipeline."""
|
|
8371
|
+
|
|
8372
|
+
source_table_full_name: Optional[str] = None
|
|
8373
|
+
"""Three-part (catalog, schema, table) name of the source Delta table."""
|
|
8374
|
+
|
|
8375
|
+
timeseries_key: Optional[str] = None
|
|
8376
|
+
"""Time series key to deduplicate (tie-break) rows with the same primary key."""
|
|
8377
|
+
|
|
8378
|
+
def as_dict(self) -> dict:
|
|
8379
|
+
"""Serializes the SyncedTableSpec into a dictionary suitable for use as a JSON request body."""
|
|
8380
|
+
body = {}
|
|
8381
|
+
if self.create_database_objects_if_missing is not None:
|
|
8382
|
+
body["create_database_objects_if_missing"] = self.create_database_objects_if_missing
|
|
8383
|
+
if self.new_pipeline_spec:
|
|
8384
|
+
body["new_pipeline_spec"] = self.new_pipeline_spec.as_dict()
|
|
8385
|
+
if self.pipeline_id is not None:
|
|
8386
|
+
body["pipeline_id"] = self.pipeline_id
|
|
8387
|
+
if self.primary_key_columns:
|
|
8388
|
+
body["primary_key_columns"] = [v for v in self.primary_key_columns]
|
|
8389
|
+
if self.scheduling_policy is not None:
|
|
8390
|
+
body["scheduling_policy"] = self.scheduling_policy.value
|
|
8391
|
+
if self.source_table_full_name is not None:
|
|
8392
|
+
body["source_table_full_name"] = self.source_table_full_name
|
|
8393
|
+
if self.timeseries_key is not None:
|
|
8394
|
+
body["timeseries_key"] = self.timeseries_key
|
|
8395
|
+
return body
|
|
8396
|
+
|
|
8397
|
+
def as_shallow_dict(self) -> dict:
|
|
8398
|
+
"""Serializes the SyncedTableSpec into a shallow dictionary of its immediate attributes."""
|
|
8399
|
+
body = {}
|
|
8400
|
+
if self.create_database_objects_if_missing is not None:
|
|
8401
|
+
body["create_database_objects_if_missing"] = self.create_database_objects_if_missing
|
|
8402
|
+
if self.new_pipeline_spec:
|
|
8403
|
+
body["new_pipeline_spec"] = self.new_pipeline_spec
|
|
8404
|
+
if self.pipeline_id is not None:
|
|
8405
|
+
body["pipeline_id"] = self.pipeline_id
|
|
8406
|
+
if self.primary_key_columns:
|
|
8407
|
+
body["primary_key_columns"] = self.primary_key_columns
|
|
8408
|
+
if self.scheduling_policy is not None:
|
|
8409
|
+
body["scheduling_policy"] = self.scheduling_policy
|
|
8410
|
+
if self.source_table_full_name is not None:
|
|
8411
|
+
body["source_table_full_name"] = self.source_table_full_name
|
|
8412
|
+
if self.timeseries_key is not None:
|
|
8413
|
+
body["timeseries_key"] = self.timeseries_key
|
|
8414
|
+
return body
|
|
8415
|
+
|
|
8416
|
+
@classmethod
|
|
8417
|
+
def from_dict(cls, d: Dict[str, Any]) -> SyncedTableSpec:
|
|
8418
|
+
"""Deserializes the SyncedTableSpec from a dictionary."""
|
|
8419
|
+
return cls(
|
|
8420
|
+
create_database_objects_if_missing=d.get("create_database_objects_if_missing", None),
|
|
8421
|
+
new_pipeline_spec=_from_dict(d, "new_pipeline_spec", NewPipelineSpec),
|
|
8422
|
+
pipeline_id=d.get("pipeline_id", None),
|
|
8423
|
+
primary_key_columns=d.get("primary_key_columns", None),
|
|
8424
|
+
scheduling_policy=_enum(d, "scheduling_policy", SyncedTableSchedulingPolicy),
|
|
8425
|
+
source_table_full_name=d.get("source_table_full_name", None),
|
|
8426
|
+
timeseries_key=d.get("timeseries_key", None),
|
|
8427
|
+
)
|
|
8428
|
+
|
|
8429
|
+
|
|
7666
8430
|
@dataclass
|
|
7667
8431
|
class SystemSchemaInfo:
|
|
7668
|
-
schema:
|
|
8432
|
+
schema: str
|
|
7669
8433
|
"""Name of the system schema."""
|
|
7670
8434
|
|
|
7671
|
-
state:
|
|
8435
|
+
state: str
|
|
7672
8436
|
"""The current state of enablement for the system schema. An empty string means the system schema
|
|
7673
|
-
is available and ready for opt-in.
|
|
8437
|
+
is available and ready for opt-in. Possible values: AVAILABLE | ENABLE_INITIALIZED |
|
|
8438
|
+
ENABLE_COMPLETED | DISABLE_INITIALIZED | UNAVAILABLE"""
|
|
7674
8439
|
|
|
7675
8440
|
def as_dict(self) -> dict:
|
|
7676
8441
|
"""Serializes the SystemSchemaInfo into a dictionary suitable for use as a JSON request body."""
|
|
@@ -7678,7 +8443,7 @@ class SystemSchemaInfo:
|
|
|
7678
8443
|
if self.schema is not None:
|
|
7679
8444
|
body["schema"] = self.schema
|
|
7680
8445
|
if self.state is not None:
|
|
7681
|
-
body["state"] = self.state
|
|
8446
|
+
body["state"] = self.state
|
|
7682
8447
|
return body
|
|
7683
8448
|
|
|
7684
8449
|
def as_shallow_dict(self) -> dict:
|
|
@@ -7693,18 +8458,7 @@ class SystemSchemaInfo:
|
|
|
7693
8458
|
@classmethod
|
|
7694
8459
|
def from_dict(cls, d: Dict[str, Any]) -> SystemSchemaInfo:
|
|
7695
8460
|
"""Deserializes the SystemSchemaInfo from a dictionary."""
|
|
7696
|
-
return cls(schema=d.get("schema", None), state=
|
|
7697
|
-
|
|
7698
|
-
|
|
7699
|
-
class SystemSchemaInfoState(Enum):
|
|
7700
|
-
"""The current state of enablement for the system schema. An empty string means the system schema
|
|
7701
|
-
is available and ready for opt-in."""
|
|
7702
|
-
|
|
7703
|
-
AVAILABLE = "AVAILABLE"
|
|
7704
|
-
DISABLE_INITIALIZED = "DISABLE_INITIALIZED"
|
|
7705
|
-
ENABLE_COMPLETED = "ENABLE_COMPLETED"
|
|
7706
|
-
ENABLE_INITIALIZED = "ENABLE_INITIALIZED"
|
|
7707
|
-
UNAVAILABLE = "UNAVAILABLE"
|
|
8461
|
+
return cls(schema=d.get("schema", None), state=d.get("state", None))
|
|
7708
8462
|
|
|
7709
8463
|
|
|
7710
8464
|
@dataclass
|
|
@@ -7843,7 +8597,6 @@ class TableInfo:
|
|
|
7843
8597
|
effective_predictive_optimization_flag: Optional[EffectivePredictiveOptimizationFlag] = None
|
|
7844
8598
|
|
|
7845
8599
|
enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None
|
|
7846
|
-
"""Whether predictive optimization should be enabled for this object and objects under it."""
|
|
7847
8600
|
|
|
7848
8601
|
encryption_details: Optional[EncryptionDetails] = None
|
|
7849
8602
|
"""Encryption options that apply to clients connecting to cloud storage."""
|
|
@@ -8340,14 +9093,6 @@ class UpdateAssignmentResponse:
|
|
|
8340
9093
|
return cls()
|
|
8341
9094
|
|
|
8342
9095
|
|
|
8343
|
-
class UpdateBindingsSecurableType(Enum):
|
|
8344
|
-
|
|
8345
|
-
CATALOG = "catalog"
|
|
8346
|
-
CREDENTIAL = "credential"
|
|
8347
|
-
EXTERNAL_LOCATION = "external_location"
|
|
8348
|
-
STORAGE_CREDENTIAL = "storage_credential"
|
|
8349
|
-
|
|
8350
|
-
|
|
8351
9096
|
@dataclass
|
|
8352
9097
|
class UpdateCatalog:
|
|
8353
9098
|
comment: Optional[str] = None
|
|
@@ -8431,6 +9176,31 @@ class UpdateCatalog:
|
|
|
8431
9176
|
)
|
|
8432
9177
|
|
|
8433
9178
|
|
|
9179
|
+
@dataclass
|
|
9180
|
+
class UpdateCatalogWorkspaceBindingsResponse:
|
|
9181
|
+
workspaces: Optional[List[int]] = None
|
|
9182
|
+
"""A list of workspace IDs"""
|
|
9183
|
+
|
|
9184
|
+
def as_dict(self) -> dict:
|
|
9185
|
+
"""Serializes the UpdateCatalogWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body."""
|
|
9186
|
+
body = {}
|
|
9187
|
+
if self.workspaces:
|
|
9188
|
+
body["workspaces"] = [v for v in self.workspaces]
|
|
9189
|
+
return body
|
|
9190
|
+
|
|
9191
|
+
def as_shallow_dict(self) -> dict:
|
|
9192
|
+
"""Serializes the UpdateCatalogWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes."""
|
|
9193
|
+
body = {}
|
|
9194
|
+
if self.workspaces:
|
|
9195
|
+
body["workspaces"] = self.workspaces
|
|
9196
|
+
return body
|
|
9197
|
+
|
|
9198
|
+
@classmethod
|
|
9199
|
+
def from_dict(cls, d: Dict[str, Any]) -> UpdateCatalogWorkspaceBindingsResponse:
|
|
9200
|
+
"""Deserializes the UpdateCatalogWorkspaceBindingsResponse from a dictionary."""
|
|
9201
|
+
return cls(workspaces=d.get("workspaces", None))
|
|
9202
|
+
|
|
9203
|
+
|
|
8434
9204
|
@dataclass
|
|
8435
9205
|
class UpdateConnection:
|
|
8436
9206
|
options: Dict[str, str]
|
|
@@ -8601,15 +9371,15 @@ class UpdateCredentialRequest:
|
|
|
8601
9371
|
|
|
8602
9372
|
@dataclass
|
|
8603
9373
|
class UpdateExternalLocation:
|
|
8604
|
-
access_point: Optional[str] = None
|
|
8605
|
-
"""The AWS access point to use when accesing s3 for this external location."""
|
|
8606
|
-
|
|
8607
9374
|
comment: Optional[str] = None
|
|
8608
9375
|
"""User-provided free-form text description."""
|
|
8609
9376
|
|
|
8610
9377
|
credential_name: Optional[str] = None
|
|
8611
9378
|
"""Name of the storage credential used with this location."""
|
|
8612
9379
|
|
|
9380
|
+
enable_file_events: Optional[bool] = None
|
|
9381
|
+
"""[Create:OPT Update:OPT] Whether to enable file events on this external location."""
|
|
9382
|
+
|
|
8613
9383
|
encryption_details: Optional[EncryptionDetails] = None
|
|
8614
9384
|
"""Encryption options that apply to clients connecting to cloud storage."""
|
|
8615
9385
|
|
|
@@ -8618,6 +9388,9 @@ class UpdateExternalLocation:
|
|
|
8618
9388
|
enabled, the access to the location falls back to cluster credentials if UC credentials are not
|
|
8619
9389
|
sufficient."""
|
|
8620
9390
|
|
|
9391
|
+
file_event_queue: Optional[FileEventQueue] = None
|
|
9392
|
+
"""[Create:OPT Update:OPT] File event queue settings."""
|
|
9393
|
+
|
|
8621
9394
|
force: Optional[bool] = None
|
|
8622
9395
|
"""Force update even if changing url invalidates dependent external tables or mounts."""
|
|
8623
9396
|
|
|
@@ -8644,16 +9417,18 @@ class UpdateExternalLocation:
|
|
|
8644
9417
|
def as_dict(self) -> dict:
|
|
8645
9418
|
"""Serializes the UpdateExternalLocation into a dictionary suitable for use as a JSON request body."""
|
|
8646
9419
|
body = {}
|
|
8647
|
-
if self.access_point is not None:
|
|
8648
|
-
body["access_point"] = self.access_point
|
|
8649
9420
|
if self.comment is not None:
|
|
8650
9421
|
body["comment"] = self.comment
|
|
8651
9422
|
if self.credential_name is not None:
|
|
8652
9423
|
body["credential_name"] = self.credential_name
|
|
9424
|
+
if self.enable_file_events is not None:
|
|
9425
|
+
body["enable_file_events"] = self.enable_file_events
|
|
8653
9426
|
if self.encryption_details:
|
|
8654
9427
|
body["encryption_details"] = self.encryption_details.as_dict()
|
|
8655
9428
|
if self.fallback is not None:
|
|
8656
9429
|
body["fallback"] = self.fallback
|
|
9430
|
+
if self.file_event_queue:
|
|
9431
|
+
body["file_event_queue"] = self.file_event_queue.as_dict()
|
|
8657
9432
|
if self.force is not None:
|
|
8658
9433
|
body["force"] = self.force
|
|
8659
9434
|
if self.isolation_mode is not None:
|
|
@@ -8675,16 +9450,18 @@ class UpdateExternalLocation:
|
|
|
8675
9450
|
def as_shallow_dict(self) -> dict:
|
|
8676
9451
|
"""Serializes the UpdateExternalLocation into a shallow dictionary of its immediate attributes."""
|
|
8677
9452
|
body = {}
|
|
8678
|
-
if self.access_point is not None:
|
|
8679
|
-
body["access_point"] = self.access_point
|
|
8680
9453
|
if self.comment is not None:
|
|
8681
9454
|
body["comment"] = self.comment
|
|
8682
9455
|
if self.credential_name is not None:
|
|
8683
9456
|
body["credential_name"] = self.credential_name
|
|
9457
|
+
if self.enable_file_events is not None:
|
|
9458
|
+
body["enable_file_events"] = self.enable_file_events
|
|
8684
9459
|
if self.encryption_details:
|
|
8685
9460
|
body["encryption_details"] = self.encryption_details
|
|
8686
9461
|
if self.fallback is not None:
|
|
8687
9462
|
body["fallback"] = self.fallback
|
|
9463
|
+
if self.file_event_queue:
|
|
9464
|
+
body["file_event_queue"] = self.file_event_queue
|
|
8688
9465
|
if self.force is not None:
|
|
8689
9466
|
body["force"] = self.force
|
|
8690
9467
|
if self.isolation_mode is not None:
|
|
@@ -8707,11 +9484,12 @@ class UpdateExternalLocation:
|
|
|
8707
9484
|
def from_dict(cls, d: Dict[str, Any]) -> UpdateExternalLocation:
|
|
8708
9485
|
"""Deserializes the UpdateExternalLocation from a dictionary."""
|
|
8709
9486
|
return cls(
|
|
8710
|
-
access_point=d.get("access_point", None),
|
|
8711
9487
|
comment=d.get("comment", None),
|
|
8712
9488
|
credential_name=d.get("credential_name", None),
|
|
9489
|
+
enable_file_events=d.get("enable_file_events", None),
|
|
8713
9490
|
encryption_details=_from_dict(d, "encryption_details", EncryptionDetails),
|
|
8714
9491
|
fallback=d.get("fallback", None),
|
|
9492
|
+
file_event_queue=_from_dict(d, "file_event_queue", FileEventQueue),
|
|
8715
9493
|
force=d.get("force", None),
|
|
8716
9494
|
isolation_mode=_enum(d, "isolation_mode", IsolationMode),
|
|
8717
9495
|
name=d.get("name", None),
|
|
@@ -9175,7 +9953,6 @@ class UpdateSchema:
|
|
|
9175
9953
|
"""User-provided free-form text description."""
|
|
9176
9954
|
|
|
9177
9955
|
enable_predictive_optimization: Optional[EnablePredictiveOptimization] = None
|
|
9178
|
-
"""Whether predictive optimization should be enabled for this object and objects under it."""
|
|
9179
9956
|
|
|
9180
9957
|
full_name: Optional[str] = None
|
|
9181
9958
|
"""Full name of the schema."""
|
|
@@ -9457,16 +10234,17 @@ class UpdateWorkspaceBindings:
|
|
|
9457
10234
|
@dataclass
|
|
9458
10235
|
class UpdateWorkspaceBindingsParameters:
|
|
9459
10236
|
add: Optional[List[WorkspaceBinding]] = None
|
|
9460
|
-
"""List of workspace bindings"""
|
|
10237
|
+
"""List of workspace bindings."""
|
|
9461
10238
|
|
|
9462
10239
|
remove: Optional[List[WorkspaceBinding]] = None
|
|
9463
|
-
"""List of workspace bindings"""
|
|
10240
|
+
"""List of workspace bindings."""
|
|
9464
10241
|
|
|
9465
10242
|
securable_name: Optional[str] = None
|
|
9466
10243
|
"""The name of the securable."""
|
|
9467
10244
|
|
|
9468
|
-
securable_type: Optional[
|
|
9469
|
-
"""The type of the securable to bind to a workspace
|
|
10245
|
+
securable_type: Optional[str] = None
|
|
10246
|
+
"""The type of the securable to bind to a workspace (catalog, storage_credential, credential, or
|
|
10247
|
+
external_location)."""
|
|
9470
10248
|
|
|
9471
10249
|
def as_dict(self) -> dict:
|
|
9472
10250
|
"""Serializes the UpdateWorkspaceBindingsParameters into a dictionary suitable for use as a JSON request body."""
|
|
@@ -9478,7 +10256,7 @@ class UpdateWorkspaceBindingsParameters:
|
|
|
9478
10256
|
if self.securable_name is not None:
|
|
9479
10257
|
body["securable_name"] = self.securable_name
|
|
9480
10258
|
if self.securable_type is not None:
|
|
9481
|
-
body["securable_type"] = self.securable_type
|
|
10259
|
+
body["securable_type"] = self.securable_type
|
|
9482
10260
|
return body
|
|
9483
10261
|
|
|
9484
10262
|
def as_shallow_dict(self) -> dict:
|
|
@@ -9501,10 +10279,37 @@ class UpdateWorkspaceBindingsParameters:
|
|
|
9501
10279
|
add=_repeated_dict(d, "add", WorkspaceBinding),
|
|
9502
10280
|
remove=_repeated_dict(d, "remove", WorkspaceBinding),
|
|
9503
10281
|
securable_name=d.get("securable_name", None),
|
|
9504
|
-
securable_type=
|
|
10282
|
+
securable_type=d.get("securable_type", None),
|
|
9505
10283
|
)
|
|
9506
10284
|
|
|
9507
10285
|
|
|
10286
|
+
@dataclass
|
|
10287
|
+
class UpdateWorkspaceBindingsResponse:
|
|
10288
|
+
"""A list of workspace IDs that are bound to the securable"""
|
|
10289
|
+
|
|
10290
|
+
bindings: Optional[List[WorkspaceBinding]] = None
|
|
10291
|
+
"""List of workspace bindings."""
|
|
10292
|
+
|
|
10293
|
+
def as_dict(self) -> dict:
|
|
10294
|
+
"""Serializes the UpdateWorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body."""
|
|
10295
|
+
body = {}
|
|
10296
|
+
if self.bindings:
|
|
10297
|
+
body["bindings"] = [v.as_dict() for v in self.bindings]
|
|
10298
|
+
return body
|
|
10299
|
+
|
|
10300
|
+
def as_shallow_dict(self) -> dict:
|
|
10301
|
+
"""Serializes the UpdateWorkspaceBindingsResponse into a shallow dictionary of its immediate attributes."""
|
|
10302
|
+
body = {}
|
|
10303
|
+
if self.bindings:
|
|
10304
|
+
body["bindings"] = self.bindings
|
|
10305
|
+
return body
|
|
10306
|
+
|
|
10307
|
+
@classmethod
|
|
10308
|
+
def from_dict(cls, d: Dict[str, Any]) -> UpdateWorkspaceBindingsResponse:
|
|
10309
|
+
"""Deserializes the UpdateWorkspaceBindingsResponse from a dictionary."""
|
|
10310
|
+
return cls(bindings=_repeated_dict(d, "bindings", WorkspaceBinding))
|
|
10311
|
+
|
|
10312
|
+
|
|
9508
10313
|
@dataclass
|
|
9509
10314
|
class ValidateCredentialRequest:
|
|
9510
10315
|
"""Next ID: 17"""
|
|
@@ -9990,9 +10795,11 @@ class VolumeType(Enum):
|
|
|
9990
10795
|
|
|
9991
10796
|
@dataclass
|
|
9992
10797
|
class WorkspaceBinding:
|
|
9993
|
-
|
|
10798
|
+
workspace_id: int
|
|
10799
|
+
"""Required"""
|
|
9994
10800
|
|
|
9995
|
-
|
|
10801
|
+
binding_type: Optional[WorkspaceBindingBindingType] = None
|
|
10802
|
+
"""One of READ_WRITE/READ_ONLY. Default is READ_WRITE."""
|
|
9996
10803
|
|
|
9997
10804
|
def as_dict(self) -> dict:
|
|
9998
10805
|
"""Serializes the WorkspaceBinding into a dictionary suitable for use as a JSON request body."""
|
|
@@ -10021,48 +10828,13 @@ class WorkspaceBinding:
|
|
|
10021
10828
|
|
|
10022
10829
|
|
|
10023
10830
|
class WorkspaceBindingBindingType(Enum):
|
|
10831
|
+
"""Using `BINDING_TYPE_` prefix here to avoid conflict with `TableOperation` enum in
|
|
10832
|
+
`credentials_common.proto`."""
|
|
10024
10833
|
|
|
10025
10834
|
BINDING_TYPE_READ_ONLY = "BINDING_TYPE_READ_ONLY"
|
|
10026
10835
|
BINDING_TYPE_READ_WRITE = "BINDING_TYPE_READ_WRITE"
|
|
10027
10836
|
|
|
10028
10837
|
|
|
10029
|
-
@dataclass
|
|
10030
|
-
class WorkspaceBindingsResponse:
|
|
10031
|
-
"""Currently assigned workspace bindings"""
|
|
10032
|
-
|
|
10033
|
-
bindings: Optional[List[WorkspaceBinding]] = None
|
|
10034
|
-
"""List of workspace bindings"""
|
|
10035
|
-
|
|
10036
|
-
next_page_token: Optional[str] = None
|
|
10037
|
-
"""Opaque token to retrieve the next page of results. Absent if there are no more pages.
|
|
10038
|
-
__page_token__ should be set to this value for the next request (for the next page of results)."""
|
|
10039
|
-
|
|
10040
|
-
def as_dict(self) -> dict:
|
|
10041
|
-
"""Serializes the WorkspaceBindingsResponse into a dictionary suitable for use as a JSON request body."""
|
|
10042
|
-
body = {}
|
|
10043
|
-
if self.bindings:
|
|
10044
|
-
body["bindings"] = [v.as_dict() for v in self.bindings]
|
|
10045
|
-
if self.next_page_token is not None:
|
|
10046
|
-
body["next_page_token"] = self.next_page_token
|
|
10047
|
-
return body
|
|
10048
|
-
|
|
10049
|
-
def as_shallow_dict(self) -> dict:
|
|
10050
|
-
"""Serializes the WorkspaceBindingsResponse into a shallow dictionary of its immediate attributes."""
|
|
10051
|
-
body = {}
|
|
10052
|
-
if self.bindings:
|
|
10053
|
-
body["bindings"] = self.bindings
|
|
10054
|
-
if self.next_page_token is not None:
|
|
10055
|
-
body["next_page_token"] = self.next_page_token
|
|
10056
|
-
return body
|
|
10057
|
-
|
|
10058
|
-
@classmethod
|
|
10059
|
-
def from_dict(cls, d: Dict[str, Any]) -> WorkspaceBindingsResponse:
|
|
10060
|
-
"""Deserializes the WorkspaceBindingsResponse from a dictionary."""
|
|
10061
|
-
return cls(
|
|
10062
|
-
bindings=_repeated_dict(d, "bindings", WorkspaceBinding), next_page_token=d.get("next_page_token", None)
|
|
10063
|
-
)
|
|
10064
|
-
|
|
10065
|
-
|
|
10066
10838
|
class AccountMetastoreAssignmentsAPI:
|
|
10067
10839
|
"""These APIs manage metastore assignments to a workspace."""
|
|
10068
10840
|
|
|
@@ -10706,8 +11478,6 @@ class CatalogsAPI:
|
|
|
10706
11478
|
"Accept": "application/json",
|
|
10707
11479
|
}
|
|
10708
11480
|
|
|
10709
|
-
if "max_results" not in query:
|
|
10710
|
-
query["max_results"] = 0
|
|
10711
11481
|
while True:
|
|
10712
11482
|
json = self._api.do("GET", "/api/2.1/unity-catalog/catalogs", query=query, headers=headers)
|
|
10713
11483
|
if "catalogs" in json:
|
|
@@ -11316,6 +12086,241 @@ class CredentialsAPI:
|
|
|
11316
12086
|
return ValidateCredentialResponse.from_dict(res)
|
|
11317
12087
|
|
|
11318
12088
|
|
|
12089
|
+
class DatabaseInstancesAPI:
|
|
12090
|
+
"""Database Instances provide access to a database via REST API or direct SQL."""
|
|
12091
|
+
|
|
12092
|
+
def __init__(self, api_client):
|
|
12093
|
+
self._api = api_client
|
|
12094
|
+
|
|
12095
|
+
def create_database_catalog(self, catalog: DatabaseCatalog) -> DatabaseCatalog:
|
|
12096
|
+
"""Create a Database Catalog.
|
|
12097
|
+
|
|
12098
|
+
:param catalog: :class:`DatabaseCatalog`
|
|
12099
|
+
|
|
12100
|
+
:returns: :class:`DatabaseCatalog`
|
|
12101
|
+
"""
|
|
12102
|
+
body = catalog.as_dict()
|
|
12103
|
+
headers = {
|
|
12104
|
+
"Accept": "application/json",
|
|
12105
|
+
"Content-Type": "application/json",
|
|
12106
|
+
}
|
|
12107
|
+
|
|
12108
|
+
res = self._api.do("POST", "/api/2.0/database/catalogs", body=body, headers=headers)
|
|
12109
|
+
return DatabaseCatalog.from_dict(res)
|
|
12110
|
+
|
|
12111
|
+
def create_database_instance(self, database_instance: DatabaseInstance) -> DatabaseInstance:
|
|
12112
|
+
"""Create a Database Instance.
|
|
12113
|
+
|
|
12114
|
+
:param database_instance: :class:`DatabaseInstance`
|
|
12115
|
+
A DatabaseInstance represents a logical Postgres instance, comprised of both compute and storage.
|
|
12116
|
+
|
|
12117
|
+
:returns: :class:`DatabaseInstance`
|
|
12118
|
+
"""
|
|
12119
|
+
body = database_instance.as_dict()
|
|
12120
|
+
headers = {
|
|
12121
|
+
"Accept": "application/json",
|
|
12122
|
+
"Content-Type": "application/json",
|
|
12123
|
+
}
|
|
12124
|
+
|
|
12125
|
+
res = self._api.do("POST", "/api/2.0/database/instances", body=body, headers=headers)
|
|
12126
|
+
return DatabaseInstance.from_dict(res)
|
|
12127
|
+
|
|
12128
|
+
def create_synced_database_table(self, synced_table: SyncedDatabaseTable) -> SyncedDatabaseTable:
|
|
12129
|
+
"""Create a Synced Database Table.
|
|
12130
|
+
|
|
12131
|
+
:param synced_table: :class:`SyncedDatabaseTable`
|
|
12132
|
+
Next field marker: 10
|
|
12133
|
+
|
|
12134
|
+
:returns: :class:`SyncedDatabaseTable`
|
|
12135
|
+
"""
|
|
12136
|
+
body = synced_table.as_dict()
|
|
12137
|
+
headers = {
|
|
12138
|
+
"Accept": "application/json",
|
|
12139
|
+
"Content-Type": "application/json",
|
|
12140
|
+
}
|
|
12141
|
+
|
|
12142
|
+
res = self._api.do("POST", "/api/2.0/database/synced_tables", body=body, headers=headers)
|
|
12143
|
+
return SyncedDatabaseTable.from_dict(res)
|
|
12144
|
+
|
|
12145
|
+
def delete_database_catalog(self, name: str):
|
|
12146
|
+
"""Delete a Database Catalog.
|
|
12147
|
+
|
|
12148
|
+
:param name: str
|
|
12149
|
+
|
|
12150
|
+
|
|
12151
|
+
"""
|
|
12152
|
+
|
|
12153
|
+
headers = {
|
|
12154
|
+
"Accept": "application/json",
|
|
12155
|
+
}
|
|
12156
|
+
|
|
12157
|
+
self._api.do("DELETE", f"/api/2.0/database/catalogs/{name}", headers=headers)
|
|
12158
|
+
|
|
12159
|
+
def delete_database_instance(self, name: str, *, force: Optional[bool] = None, purge: Optional[bool] = None):
|
|
12160
|
+
"""Delete a Database Instance.
|
|
12161
|
+
|
|
12162
|
+
:param name: str
|
|
12163
|
+
Name of the instance to delete.
|
|
12164
|
+
:param force: bool (optional)
|
|
12165
|
+
By default, a instance cannot be deleted if it has descendant instances created via PITR. If this
|
|
12166
|
+
flag is specified as true, all descendent instances will be deleted as well.
|
|
12167
|
+
:param purge: bool (optional)
|
|
12168
|
+
If false, the database instance is soft deleted. Soft deleted instances behave as if they are
|
|
12169
|
+
deleted, and cannot be used for CRUD operations nor connected to. However they can be undeleted by
|
|
12170
|
+
calling the undelete API for a limited time. If true, the database instance is hard deleted and
|
|
12171
|
+
cannot be undeleted.
|
|
12172
|
+
|
|
12173
|
+
|
|
12174
|
+
"""
|
|
12175
|
+
|
|
12176
|
+
query = {}
|
|
12177
|
+
if force is not None:
|
|
12178
|
+
query["force"] = force
|
|
12179
|
+
if purge is not None:
|
|
12180
|
+
query["purge"] = purge
|
|
12181
|
+
headers = {
|
|
12182
|
+
"Accept": "application/json",
|
|
12183
|
+
}
|
|
12184
|
+
|
|
12185
|
+
self._api.do("DELETE", f"/api/2.0/database/instances/{name}", query=query, headers=headers)
|
|
12186
|
+
|
|
12187
|
+
def delete_synced_database_table(self, name: str):
|
|
12188
|
+
"""Delete a Synced Database Table.
|
|
12189
|
+
|
|
12190
|
+
:param name: str
|
|
12191
|
+
|
|
12192
|
+
|
|
12193
|
+
"""
|
|
12194
|
+
|
|
12195
|
+
headers = {
|
|
12196
|
+
"Accept": "application/json",
|
|
12197
|
+
}
|
|
12198
|
+
|
|
12199
|
+
self._api.do("DELETE", f"/api/2.0/database/synced_tables/{name}", headers=headers)
|
|
12200
|
+
|
|
12201
|
+
def find_database_instance_by_uid(self, *, uid: Optional[str] = None) -> DatabaseInstance:
|
|
12202
|
+
"""Find a Database Instance by uid.
|
|
12203
|
+
|
|
12204
|
+
:param uid: str (optional)
|
|
12205
|
+
UID of the cluster to get.
|
|
12206
|
+
|
|
12207
|
+
:returns: :class:`DatabaseInstance`
|
|
12208
|
+
"""
|
|
12209
|
+
|
|
12210
|
+
query = {}
|
|
12211
|
+
if uid is not None:
|
|
12212
|
+
query["uid"] = uid
|
|
12213
|
+
headers = {
|
|
12214
|
+
"Accept": "application/json",
|
|
12215
|
+
}
|
|
12216
|
+
|
|
12217
|
+
res = self._api.do("GET", "/api/2.0/database/instances:findByUid", query=query, headers=headers)
|
|
12218
|
+
return DatabaseInstance.from_dict(res)
|
|
12219
|
+
|
|
12220
|
+
def get_database_catalog(self, name: str) -> DatabaseCatalog:
|
|
12221
|
+
"""Get a Database Catalog.
|
|
12222
|
+
|
|
12223
|
+
:param name: str
|
|
12224
|
+
|
|
12225
|
+
:returns: :class:`DatabaseCatalog`
|
|
12226
|
+
"""
|
|
12227
|
+
|
|
12228
|
+
headers = {
|
|
12229
|
+
"Accept": "application/json",
|
|
12230
|
+
}
|
|
12231
|
+
|
|
12232
|
+
res = self._api.do("GET", f"/api/2.0/database/catalogs/{name}", headers=headers)
|
|
12233
|
+
return DatabaseCatalog.from_dict(res)
|
|
12234
|
+
|
|
12235
|
+
def get_database_instance(self, name: str) -> DatabaseInstance:
|
|
12236
|
+
"""Get a Database Instance.
|
|
12237
|
+
|
|
12238
|
+
:param name: str
|
|
12239
|
+
Name of the cluster to get.
|
|
12240
|
+
|
|
12241
|
+
:returns: :class:`DatabaseInstance`
|
|
12242
|
+
"""
|
|
12243
|
+
|
|
12244
|
+
headers = {
|
|
12245
|
+
"Accept": "application/json",
|
|
12246
|
+
}
|
|
12247
|
+
|
|
12248
|
+
res = self._api.do("GET", f"/api/2.0/database/instances/{name}", headers=headers)
|
|
12249
|
+
return DatabaseInstance.from_dict(res)
|
|
12250
|
+
|
|
12251
|
+
def get_synced_database_table(self, name: str) -> SyncedDatabaseTable:
|
|
12252
|
+
"""Get a Synced Database Table.
|
|
12253
|
+
|
|
12254
|
+
:param name: str
|
|
12255
|
+
|
|
12256
|
+
:returns: :class:`SyncedDatabaseTable`
|
|
12257
|
+
"""
|
|
12258
|
+
|
|
12259
|
+
headers = {
|
|
12260
|
+
"Accept": "application/json",
|
|
12261
|
+
}
|
|
12262
|
+
|
|
12263
|
+
res = self._api.do("GET", f"/api/2.0/database/synced_tables/{name}", headers=headers)
|
|
12264
|
+
return SyncedDatabaseTable.from_dict(res)
|
|
12265
|
+
|
|
12266
|
+
def list_database_instances(
|
|
12267
|
+
self, *, page_size: Optional[int] = None, page_token: Optional[str] = None
|
|
12268
|
+
) -> Iterator[DatabaseInstance]:
|
|
12269
|
+
"""List Database Instances.
|
|
12270
|
+
|
|
12271
|
+
:param page_size: int (optional)
|
|
12272
|
+
Upper bound for items returned.
|
|
12273
|
+
:param page_token: str (optional)
|
|
12274
|
+
Pagination token to go to the next page of Database Instances. Requests first page if absent.
|
|
12275
|
+
|
|
12276
|
+
:returns: Iterator over :class:`DatabaseInstance`
|
|
12277
|
+
"""
|
|
12278
|
+
|
|
12279
|
+
query = {}
|
|
12280
|
+
if page_size is not None:
|
|
12281
|
+
query["page_size"] = page_size
|
|
12282
|
+
if page_token is not None:
|
|
12283
|
+
query["page_token"] = page_token
|
|
12284
|
+
headers = {
|
|
12285
|
+
"Accept": "application/json",
|
|
12286
|
+
}
|
|
12287
|
+
|
|
12288
|
+
while True:
|
|
12289
|
+
json = self._api.do("GET", "/api/2.0/database/instances", query=query, headers=headers)
|
|
12290
|
+
if "database_instances" in json:
|
|
12291
|
+
for v in json["database_instances"]:
|
|
12292
|
+
yield DatabaseInstance.from_dict(v)
|
|
12293
|
+
if "next_page_token" not in json or not json["next_page_token"]:
|
|
12294
|
+
return
|
|
12295
|
+
query["page_token"] = json["next_page_token"]
|
|
12296
|
+
|
|
12297
|
+
def update_database_instance(
|
|
12298
|
+
self, name: str, database_instance: DatabaseInstance, update_mask: str
|
|
12299
|
+
) -> DatabaseInstance:
|
|
12300
|
+
"""Update a Database Instance.
|
|
12301
|
+
|
|
12302
|
+
:param name: str
|
|
12303
|
+
The name of the instance. This is the unique identifier for the instance.
|
|
12304
|
+
:param database_instance: :class:`DatabaseInstance`
|
|
12305
|
+
A DatabaseInstance represents a logical Postgres instance, comprised of both compute and storage.
|
|
12306
|
+
:param update_mask: str
|
|
12307
|
+
The list of fields to update.
|
|
12308
|
+
|
|
12309
|
+
:returns: :class:`DatabaseInstance`
|
|
12310
|
+
"""
|
|
12311
|
+
body = database_instance.as_dict()
|
|
12312
|
+
query = {}
|
|
12313
|
+
if update_mask is not None:
|
|
12314
|
+
query["update_mask"] = update_mask
|
|
12315
|
+
headers = {
|
|
12316
|
+
"Accept": "application/json",
|
|
12317
|
+
"Content-Type": "application/json",
|
|
12318
|
+
}
|
|
12319
|
+
|
|
12320
|
+
res = self._api.do("PATCH", f"/api/2.0/database/instances/{name}", query=query, body=body, headers=headers)
|
|
12321
|
+
return DatabaseInstance.from_dict(res)
|
|
12322
|
+
|
|
12323
|
+
|
|
11319
12324
|
class ExternalLocationsAPI:
|
|
11320
12325
|
"""An external location is an object that combines a cloud storage path with a storage credential that
|
|
11321
12326
|
authorizes access to the cloud storage path. Each external location is subject to Unity Catalog
|
|
@@ -11337,10 +12342,11 @@ class ExternalLocationsAPI:
|
|
|
11337
12342
|
url: str,
|
|
11338
12343
|
credential_name: str,
|
|
11339
12344
|
*,
|
|
11340
|
-
access_point: Optional[str] = None,
|
|
11341
12345
|
comment: Optional[str] = None,
|
|
12346
|
+
enable_file_events: Optional[bool] = None,
|
|
11342
12347
|
encryption_details: Optional[EncryptionDetails] = None,
|
|
11343
12348
|
fallback: Optional[bool] = None,
|
|
12349
|
+
file_event_queue: Optional[FileEventQueue] = None,
|
|
11344
12350
|
read_only: Optional[bool] = None,
|
|
11345
12351
|
skip_validation: Optional[bool] = None,
|
|
11346
12352
|
) -> ExternalLocationInfo:
|
|
@@ -11356,16 +12362,18 @@ class ExternalLocationsAPI:
|
|
|
11356
12362
|
Path URL of the external location.
|
|
11357
12363
|
:param credential_name: str
|
|
11358
12364
|
Name of the storage credential used with this location.
|
|
11359
|
-
:param access_point: str (optional)
|
|
11360
|
-
The AWS access point to use when accesing s3 for this external location.
|
|
11361
12365
|
:param comment: str (optional)
|
|
11362
12366
|
User-provided free-form text description.
|
|
12367
|
+
:param enable_file_events: bool (optional)
|
|
12368
|
+
[Create:OPT Update:OPT] Whether to enable file events on this external location.
|
|
11363
12369
|
:param encryption_details: :class:`EncryptionDetails` (optional)
|
|
11364
12370
|
Encryption options that apply to clients connecting to cloud storage.
|
|
11365
12371
|
:param fallback: bool (optional)
|
|
11366
12372
|
Indicates whether fallback mode is enabled for this external location. When fallback mode is
|
|
11367
12373
|
enabled, the access to the location falls back to cluster credentials if UC credentials are not
|
|
11368
12374
|
sufficient.
|
|
12375
|
+
:param file_event_queue: :class:`FileEventQueue` (optional)
|
|
12376
|
+
[Create:OPT Update:OPT] File event queue settings.
|
|
11369
12377
|
:param read_only: bool (optional)
|
|
11370
12378
|
Indicates whether the external location is read-only.
|
|
11371
12379
|
:param skip_validation: bool (optional)
|
|
@@ -11374,16 +12382,18 @@ class ExternalLocationsAPI:
|
|
|
11374
12382
|
:returns: :class:`ExternalLocationInfo`
|
|
11375
12383
|
"""
|
|
11376
12384
|
body = {}
|
|
11377
|
-
if access_point is not None:
|
|
11378
|
-
body["access_point"] = access_point
|
|
11379
12385
|
if comment is not None:
|
|
11380
12386
|
body["comment"] = comment
|
|
11381
12387
|
if credential_name is not None:
|
|
11382
12388
|
body["credential_name"] = credential_name
|
|
12389
|
+
if enable_file_events is not None:
|
|
12390
|
+
body["enable_file_events"] = enable_file_events
|
|
11383
12391
|
if encryption_details is not None:
|
|
11384
12392
|
body["encryption_details"] = encryption_details.as_dict()
|
|
11385
12393
|
if fallback is not None:
|
|
11386
12394
|
body["fallback"] = fallback
|
|
12395
|
+
if file_event_queue is not None:
|
|
12396
|
+
body["file_event_queue"] = file_event_queue.as_dict()
|
|
11387
12397
|
if name is not None:
|
|
11388
12398
|
body["name"] = name
|
|
11389
12399
|
if read_only is not None:
|
|
@@ -11486,8 +12496,6 @@ class ExternalLocationsAPI:
|
|
|
11486
12496
|
"Accept": "application/json",
|
|
11487
12497
|
}
|
|
11488
12498
|
|
|
11489
|
-
if "max_results" not in query:
|
|
11490
|
-
query["max_results"] = 0
|
|
11491
12499
|
while True:
|
|
11492
12500
|
json = self._api.do("GET", "/api/2.1/unity-catalog/external-locations", query=query, headers=headers)
|
|
11493
12501
|
if "external_locations" in json:
|
|
@@ -11501,11 +12509,12 @@ class ExternalLocationsAPI:
|
|
|
11501
12509
|
self,
|
|
11502
12510
|
name: str,
|
|
11503
12511
|
*,
|
|
11504
|
-
access_point: Optional[str] = None,
|
|
11505
12512
|
comment: Optional[str] = None,
|
|
11506
12513
|
credential_name: Optional[str] = None,
|
|
12514
|
+
enable_file_events: Optional[bool] = None,
|
|
11507
12515
|
encryption_details: Optional[EncryptionDetails] = None,
|
|
11508
12516
|
fallback: Optional[bool] = None,
|
|
12517
|
+
file_event_queue: Optional[FileEventQueue] = None,
|
|
11509
12518
|
force: Optional[bool] = None,
|
|
11510
12519
|
isolation_mode: Optional[IsolationMode] = None,
|
|
11511
12520
|
new_name: Optional[str] = None,
|
|
@@ -11522,18 +12531,20 @@ class ExternalLocationsAPI:
|
|
|
11522
12531
|
|
|
11523
12532
|
:param name: str
|
|
11524
12533
|
Name of the external location.
|
|
11525
|
-
:param access_point: str (optional)
|
|
11526
|
-
The AWS access point to use when accesing s3 for this external location.
|
|
11527
12534
|
:param comment: str (optional)
|
|
11528
12535
|
User-provided free-form text description.
|
|
11529
12536
|
:param credential_name: str (optional)
|
|
11530
12537
|
Name of the storage credential used with this location.
|
|
12538
|
+
:param enable_file_events: bool (optional)
|
|
12539
|
+
[Create:OPT Update:OPT] Whether to enable file events on this external location.
|
|
11531
12540
|
:param encryption_details: :class:`EncryptionDetails` (optional)
|
|
11532
12541
|
Encryption options that apply to clients connecting to cloud storage.
|
|
11533
12542
|
:param fallback: bool (optional)
|
|
11534
12543
|
Indicates whether fallback mode is enabled for this external location. When fallback mode is
|
|
11535
12544
|
enabled, the access to the location falls back to cluster credentials if UC credentials are not
|
|
11536
12545
|
sufficient.
|
|
12546
|
+
:param file_event_queue: :class:`FileEventQueue` (optional)
|
|
12547
|
+
[Create:OPT Update:OPT] File event queue settings.
|
|
11537
12548
|
:param force: bool (optional)
|
|
11538
12549
|
Force update even if changing url invalidates dependent external tables or mounts.
|
|
11539
12550
|
:param isolation_mode: :class:`IsolationMode` (optional)
|
|
@@ -11551,16 +12562,18 @@ class ExternalLocationsAPI:
|
|
|
11551
12562
|
:returns: :class:`ExternalLocationInfo`
|
|
11552
12563
|
"""
|
|
11553
12564
|
body = {}
|
|
11554
|
-
if access_point is not None:
|
|
11555
|
-
body["access_point"] = access_point
|
|
11556
12565
|
if comment is not None:
|
|
11557
12566
|
body["comment"] = comment
|
|
11558
12567
|
if credential_name is not None:
|
|
11559
12568
|
body["credential_name"] = credential_name
|
|
12569
|
+
if enable_file_events is not None:
|
|
12570
|
+
body["enable_file_events"] = enable_file_events
|
|
11560
12571
|
if encryption_details is not None:
|
|
11561
12572
|
body["encryption_details"] = encryption_details.as_dict()
|
|
11562
12573
|
if fallback is not None:
|
|
11563
12574
|
body["fallback"] = fallback
|
|
12575
|
+
if file_event_queue is not None:
|
|
12576
|
+
body["file_event_queue"] = file_event_queue.as_dict()
|
|
11564
12577
|
if force is not None:
|
|
11565
12578
|
body["force"] = force
|
|
11566
12579
|
if isolation_mode is not None:
|
|
@@ -13429,7 +14442,6 @@ class SchemasAPI:
|
|
|
13429
14442
|
:param comment: str (optional)
|
|
13430
14443
|
User-provided free-form text description.
|
|
13431
14444
|
:param enable_predictive_optimization: :class:`EnablePredictiveOptimization` (optional)
|
|
13432
|
-
Whether predictive optimization should be enabled for this object and objects under it.
|
|
13433
14445
|
:param new_name: str (optional)
|
|
13434
14446
|
New name for the schema.
|
|
13435
14447
|
:param owner: str (optional)
|
|
@@ -13808,7 +14820,7 @@ class SystemSchemasAPI:
|
|
|
13808
14820
|
"DELETE", f"/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas/{schema_name}", headers=headers
|
|
13809
14821
|
)
|
|
13810
14822
|
|
|
13811
|
-
def enable(self, metastore_id: str, schema_name: str):
|
|
14823
|
+
def enable(self, metastore_id: str, schema_name: str, *, catalog_name: Optional[str] = None):
|
|
13812
14824
|
"""Enable a system schema.
|
|
13813
14825
|
|
|
13814
14826
|
Enables the system schema and adds it to the system catalog. The caller must be an account admin or a
|
|
@@ -13818,16 +14830,24 @@ class SystemSchemasAPI:
|
|
|
13818
14830
|
The metastore ID under which the system schema lives.
|
|
13819
14831
|
:param schema_name: str
|
|
13820
14832
|
Full name of the system schema.
|
|
14833
|
+
:param catalog_name: str (optional)
|
|
14834
|
+
the catalog for which the system schema is to enabled in
|
|
13821
14835
|
|
|
13822
14836
|
|
|
13823
14837
|
"""
|
|
13824
|
-
|
|
14838
|
+
body = {}
|
|
14839
|
+
if catalog_name is not None:
|
|
14840
|
+
body["catalog_name"] = catalog_name
|
|
13825
14841
|
headers = {
|
|
13826
14842
|
"Accept": "application/json",
|
|
14843
|
+
"Content-Type": "application/json",
|
|
13827
14844
|
}
|
|
13828
14845
|
|
|
13829
14846
|
self._api.do(
|
|
13830
|
-
"PUT",
|
|
14847
|
+
"PUT",
|
|
14848
|
+
f"/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas/{schema_name}",
|
|
14849
|
+
body=body,
|
|
14850
|
+
headers=headers,
|
|
13831
14851
|
)
|
|
13832
14852
|
|
|
13833
14853
|
def list(
|
|
@@ -13860,8 +14880,6 @@ class SystemSchemasAPI:
|
|
|
13860
14880
|
"Accept": "application/json",
|
|
13861
14881
|
}
|
|
13862
14882
|
|
|
13863
|
-
if "max_results" not in query:
|
|
13864
|
-
query["max_results"] = 0
|
|
13865
14883
|
while True:
|
|
13866
14884
|
json = self._api.do(
|
|
13867
14885
|
"GET", f"/api/2.1/unity-catalog/metastores/{metastore_id}/systemschemas", query=query, headers=headers
|
|
@@ -14539,12 +15557,12 @@ class WorkspaceBindingsAPI:
|
|
|
14539
15557
|
the new path (/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}) which introduces the
|
|
14540
15558
|
ability to bind a securable in READ_ONLY mode (catalogs only).
|
|
14541
15559
|
|
|
14542
|
-
Securable types that support binding: - catalog - storage_credential - external_location"""
|
|
15560
|
+
Securable types that support binding: - catalog - storage_credential - credential - external_location"""
|
|
14543
15561
|
|
|
14544
15562
|
def __init__(self, api_client):
|
|
14545
15563
|
self._api = api_client
|
|
14546
15564
|
|
|
14547
|
-
def get(self, name: str) ->
|
|
15565
|
+
def get(self, name: str) -> GetCatalogWorkspaceBindingsResponse:
|
|
14548
15566
|
"""Get catalog workspace bindings.
|
|
14549
15567
|
|
|
14550
15568
|
Gets workspace bindings of the catalog. The caller must be a metastore admin or an owner of the
|
|
@@ -14553,7 +15571,7 @@ class WorkspaceBindingsAPI:
|
|
|
14553
15571
|
:param name: str
|
|
14554
15572
|
The name of the catalog.
|
|
14555
15573
|
|
|
14556
|
-
:returns: :class:`
|
|
15574
|
+
:returns: :class:`GetCatalogWorkspaceBindingsResponse`
|
|
14557
15575
|
"""
|
|
14558
15576
|
|
|
14559
15577
|
headers = {
|
|
@@ -14561,11 +15579,11 @@ class WorkspaceBindingsAPI:
|
|
|
14561
15579
|
}
|
|
14562
15580
|
|
|
14563
15581
|
res = self._api.do("GET", f"/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}", headers=headers)
|
|
14564
|
-
return
|
|
15582
|
+
return GetCatalogWorkspaceBindingsResponse.from_dict(res)
|
|
14565
15583
|
|
|
14566
15584
|
def get_bindings(
|
|
14567
15585
|
self,
|
|
14568
|
-
securable_type:
|
|
15586
|
+
securable_type: str,
|
|
14569
15587
|
securable_name: str,
|
|
14570
15588
|
*,
|
|
14571
15589
|
max_results: Optional[int] = None,
|
|
@@ -14576,8 +15594,9 @@ class WorkspaceBindingsAPI:
|
|
|
14576
15594
|
Gets workspace bindings of the securable. The caller must be a metastore admin or an owner of the
|
|
14577
15595
|
securable.
|
|
14578
15596
|
|
|
14579
|
-
:param securable_type:
|
|
14580
|
-
The type of the securable to bind to a workspace
|
|
15597
|
+
:param securable_type: str
|
|
15598
|
+
The type of the securable to bind to a workspace (catalog, storage_credential, credential, or
|
|
15599
|
+
external_location).
|
|
14581
15600
|
:param securable_name: str
|
|
14582
15601
|
The name of the securable.
|
|
14583
15602
|
:param max_results: int (optional)
|
|
@@ -14603,7 +15622,7 @@ class WorkspaceBindingsAPI:
|
|
|
14603
15622
|
while True:
|
|
14604
15623
|
json = self._api.do(
|
|
14605
15624
|
"GET",
|
|
14606
|
-
f"/api/2.1/unity-catalog/bindings/{securable_type
|
|
15625
|
+
f"/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}",
|
|
14607
15626
|
query=query,
|
|
14608
15627
|
headers=headers,
|
|
14609
15628
|
)
|
|
@@ -14620,7 +15639,7 @@ class WorkspaceBindingsAPI:
|
|
|
14620
15639
|
*,
|
|
14621
15640
|
assign_workspaces: Optional[List[int]] = None,
|
|
14622
15641
|
unassign_workspaces: Optional[List[int]] = None,
|
|
14623
|
-
) ->
|
|
15642
|
+
) -> UpdateCatalogWorkspaceBindingsResponse:
|
|
14624
15643
|
"""Update catalog workspace bindings.
|
|
14625
15644
|
|
|
14626
15645
|
Updates workspace bindings of the catalog. The caller must be a metastore admin or an owner of the
|
|
@@ -14633,7 +15652,7 @@ class WorkspaceBindingsAPI:
|
|
|
14633
15652
|
:param unassign_workspaces: List[int] (optional)
|
|
14634
15653
|
A list of workspace IDs.
|
|
14635
15654
|
|
|
14636
|
-
:returns: :class:`
|
|
15655
|
+
:returns: :class:`UpdateCatalogWorkspaceBindingsResponse`
|
|
14637
15656
|
"""
|
|
14638
15657
|
body = {}
|
|
14639
15658
|
if assign_workspaces is not None:
|
|
@@ -14648,31 +15667,32 @@ class WorkspaceBindingsAPI:
|
|
|
14648
15667
|
res = self._api.do(
|
|
14649
15668
|
"PATCH", f"/api/2.1/unity-catalog/workspace-bindings/catalogs/{name}", body=body, headers=headers
|
|
14650
15669
|
)
|
|
14651
|
-
return
|
|
15670
|
+
return UpdateCatalogWorkspaceBindingsResponse.from_dict(res)
|
|
14652
15671
|
|
|
14653
15672
|
def update_bindings(
|
|
14654
15673
|
self,
|
|
14655
|
-
securable_type:
|
|
15674
|
+
securable_type: str,
|
|
14656
15675
|
securable_name: str,
|
|
14657
15676
|
*,
|
|
14658
15677
|
add: Optional[List[WorkspaceBinding]] = None,
|
|
14659
15678
|
remove: Optional[List[WorkspaceBinding]] = None,
|
|
14660
|
-
) ->
|
|
15679
|
+
) -> UpdateWorkspaceBindingsResponse:
|
|
14661
15680
|
"""Update securable workspace bindings.
|
|
14662
15681
|
|
|
14663
15682
|
Updates workspace bindings of the securable. The caller must be a metastore admin or an owner of the
|
|
14664
15683
|
securable.
|
|
14665
15684
|
|
|
14666
|
-
:param securable_type:
|
|
14667
|
-
The type of the securable to bind to a workspace
|
|
15685
|
+
:param securable_type: str
|
|
15686
|
+
The type of the securable to bind to a workspace (catalog, storage_credential, credential, or
|
|
15687
|
+
external_location).
|
|
14668
15688
|
:param securable_name: str
|
|
14669
15689
|
The name of the securable.
|
|
14670
15690
|
:param add: List[:class:`WorkspaceBinding`] (optional)
|
|
14671
|
-
List of workspace bindings
|
|
15691
|
+
List of workspace bindings.
|
|
14672
15692
|
:param remove: List[:class:`WorkspaceBinding`] (optional)
|
|
14673
|
-
List of workspace bindings
|
|
15693
|
+
List of workspace bindings.
|
|
14674
15694
|
|
|
14675
|
-
:returns: :class:`
|
|
15695
|
+
:returns: :class:`UpdateWorkspaceBindingsResponse`
|
|
14676
15696
|
"""
|
|
14677
15697
|
body = {}
|
|
14678
15698
|
if add is not None:
|
|
@@ -14685,9 +15705,6 @@ class WorkspaceBindingsAPI:
|
|
|
14685
15705
|
}
|
|
14686
15706
|
|
|
14687
15707
|
res = self._api.do(
|
|
14688
|
-
"PATCH",
|
|
14689
|
-
f"/api/2.1/unity-catalog/bindings/{securable_type.value}/{securable_name}",
|
|
14690
|
-
body=body,
|
|
14691
|
-
headers=headers,
|
|
15708
|
+
"PATCH", f"/api/2.1/unity-catalog/bindings/{securable_type}/{securable_name}", body=body, headers=headers
|
|
14692
15709
|
)
|
|
14693
|
-
return
|
|
15710
|
+
return UpdateWorkspaceBindingsResponse.from_dict(res)
|