databricks-sdk 0.27.1__py3-none-any.whl → 0.29.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/__init__.py +16 -12
- databricks/sdk/azure.py +0 -27
- databricks/sdk/config.py +71 -19
- databricks/sdk/core.py +27 -0
- databricks/sdk/credentials_provider.py +121 -44
- databricks/sdk/dbutils.py +81 -3
- databricks/sdk/environments.py +34 -1
- databricks/sdk/errors/__init__.py +1 -0
- databricks/sdk/errors/mapper.py +4 -0
- databricks/sdk/errors/private_link.py +60 -0
- databricks/sdk/oauth.py +8 -6
- databricks/sdk/service/catalog.py +774 -632
- databricks/sdk/service/compute.py +91 -116
- databricks/sdk/service/dashboards.py +707 -2
- databricks/sdk/service/jobs.py +126 -163
- databricks/sdk/service/marketplace.py +145 -31
- databricks/sdk/service/oauth2.py +22 -0
- databricks/sdk/service/pipelines.py +119 -4
- databricks/sdk/service/serving.py +217 -64
- databricks/sdk/service/settings.py +1 -0
- databricks/sdk/service/sharing.py +36 -2
- databricks/sdk/service/sql.py +103 -24
- databricks/sdk/service/vectorsearch.py +263 -1
- databricks/sdk/service/workspace.py +8 -4
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.27.1.dist-info → databricks_sdk-0.29.0.dist-info}/METADATA +2 -1
- databricks_sdk-0.29.0.dist-info/RECORD +57 -0
- databricks_sdk-0.27.1.dist-info/RECORD +0 -56
- {databricks_sdk-0.27.1.dist-info → databricks_sdk-0.29.0.dist-info}/LICENSE +0 -0
- {databricks_sdk-0.27.1.dist-info → databricks_sdk-0.29.0.dist-info}/NOTICE +0 -0
- {databricks_sdk-0.27.1.dist-info → databricks_sdk-0.29.0.dist-info}/WHEEL +0 -0
- {databricks_sdk-0.27.1.dist-info → databricks_sdk-0.29.0.dist-info}/top_level.txt +0 -0
databricks/sdk/__init__.py
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
import databricks.sdk.core as client
|
|
2
2
|
import databricks.sdk.dbutils as dbutils
|
|
3
3
|
from databricks.sdk import azure
|
|
4
|
-
from databricks.sdk.credentials_provider import
|
|
4
|
+
from databricks.sdk.credentials_provider import CredentialsStrategy
|
|
5
5
|
from databricks.sdk.mixins.compute import ClustersExt
|
|
6
6
|
from databricks.sdk.mixins.files import DbfsExt
|
|
7
7
|
from databricks.sdk.mixins.workspace import WorkspaceExt
|
|
@@ -13,9 +13,9 @@ from databricks.sdk.service.catalog import (AccountMetastoreAssignmentsAPI,
|
|
|
13
13
|
ArtifactAllowlistsAPI, CatalogsAPI,
|
|
14
14
|
ConnectionsAPI,
|
|
15
15
|
ExternalLocationsAPI, FunctionsAPI,
|
|
16
|
-
GrantsAPI,
|
|
17
|
-
|
|
18
|
-
|
|
16
|
+
GrantsAPI, MetastoresAPI,
|
|
17
|
+
ModelVersionsAPI, OnlineTablesAPI,
|
|
18
|
+
QualityMonitorsAPI,
|
|
19
19
|
RegisteredModelsAPI, SchemasAPI,
|
|
20
20
|
StorageCredentialsAPI,
|
|
21
21
|
SystemSchemasAPI,
|
|
@@ -131,7 +131,8 @@ class WorkspaceClient:
|
|
|
131
131
|
debug_headers: bool = None,
|
|
132
132
|
product="unknown",
|
|
133
133
|
product_version="0.0.0",
|
|
134
|
-
|
|
134
|
+
credentials_strategy: CredentialsStrategy = None,
|
|
135
|
+
credentials_provider: CredentialsStrategy = None,
|
|
135
136
|
config: client.Config = None):
|
|
136
137
|
if not config:
|
|
137
138
|
config = client.Config(host=host,
|
|
@@ -152,6 +153,7 @@ class WorkspaceClient:
|
|
|
152
153
|
cluster_id=cluster_id,
|
|
153
154
|
google_credentials=google_credentials,
|
|
154
155
|
google_service_account=google_service_account,
|
|
156
|
+
credentials_strategy=credentials_strategy,
|
|
155
157
|
credentials_provider=credentials_provider,
|
|
156
158
|
debug_truncate_bytes=debug_truncate_bytes,
|
|
157
159
|
debug_headers=debug_headers,
|
|
@@ -194,7 +196,6 @@ class WorkspaceClient:
|
|
|
194
196
|
self._instance_profiles = InstanceProfilesAPI(self._api_client)
|
|
195
197
|
self._ip_access_lists = IpAccessListsAPI(self._api_client)
|
|
196
198
|
self._jobs = JobsAPI(self._api_client)
|
|
197
|
-
self._lakehouse_monitors = LakehouseMonitorsAPI(self._api_client)
|
|
198
199
|
self._lakeview = LakeviewAPI(self._api_client)
|
|
199
200
|
self._libraries = LibrariesAPI(self._api_client)
|
|
200
201
|
self._metastores = MetastoresAPI(self._api_client)
|
|
@@ -214,6 +215,7 @@ class WorkspaceClient:
|
|
|
214
215
|
self._api_client)
|
|
215
216
|
self._provider_providers = ProviderProvidersAPI(self._api_client)
|
|
216
217
|
self._providers = ProvidersAPI(self._api_client)
|
|
218
|
+
self._quality_monitors = QualityMonitorsAPI(self._api_client)
|
|
217
219
|
self._queries = QueriesAPI(self._api_client)
|
|
218
220
|
self._query_history = QueryHistoryAPI(self._api_client)
|
|
219
221
|
self._query_visualizations = QueryVisualizationsAPI(self._api_client)
|
|
@@ -425,11 +427,6 @@ class WorkspaceClient:
|
|
|
425
427
|
"""The Jobs API allows you to create, edit, and delete jobs."""
|
|
426
428
|
return self._jobs
|
|
427
429
|
|
|
428
|
-
@property
|
|
429
|
-
def lakehouse_monitors(self) -> LakehouseMonitorsAPI:
|
|
430
|
-
"""A monitor computes and monitors data or model quality metrics for a table over time."""
|
|
431
|
-
return self._lakehouse_monitors
|
|
432
|
-
|
|
433
430
|
@property
|
|
434
431
|
def lakeview(self) -> LakeviewAPI:
|
|
435
432
|
"""These APIs provide specific management operations for Lakeview dashboards."""
|
|
@@ -520,6 +517,11 @@ class WorkspaceClient:
|
|
|
520
517
|
"""A data provider is an object representing the organization in the real world who shares the data."""
|
|
521
518
|
return self._providers
|
|
522
519
|
|
|
520
|
+
@property
|
|
521
|
+
def quality_monitors(self) -> QualityMonitorsAPI:
|
|
522
|
+
"""A monitor computes and monitors data or model quality metrics for a table over time."""
|
|
523
|
+
return self._quality_monitors
|
|
524
|
+
|
|
523
525
|
@property
|
|
524
526
|
def queries(self) -> QueriesAPI:
|
|
525
527
|
"""These endpoints are used for CRUD operations on query definitions."""
|
|
@@ -700,7 +702,8 @@ class AccountClient:
|
|
|
700
702
|
debug_headers: bool = None,
|
|
701
703
|
product="unknown",
|
|
702
704
|
product_version="0.0.0",
|
|
703
|
-
|
|
705
|
+
credentials_strategy: CredentialsStrategy = None,
|
|
706
|
+
credentials_provider: CredentialsStrategy = None,
|
|
704
707
|
config: client.Config = None):
|
|
705
708
|
if not config:
|
|
706
709
|
config = client.Config(host=host,
|
|
@@ -721,6 +724,7 @@ class AccountClient:
|
|
|
721
724
|
cluster_id=cluster_id,
|
|
722
725
|
google_credentials=google_credentials,
|
|
723
726
|
google_service_account=google_service_account,
|
|
727
|
+
credentials_strategy=credentials_strategy,
|
|
724
728
|
credentials_provider=credentials_provider,
|
|
725
729
|
debug_truncate_bytes=debug_truncate_bytes,
|
|
726
730
|
debug_headers=debug_headers,
|
databricks/sdk/azure.py
CHANGED
|
@@ -1,36 +1,9 @@
|
|
|
1
|
-
from dataclasses import dataclass
|
|
2
1
|
from typing import Dict
|
|
3
2
|
|
|
4
3
|
from .oauth import TokenSource
|
|
5
4
|
from .service.provisioning import Workspace
|
|
6
5
|
|
|
7
6
|
|
|
8
|
-
@dataclass
|
|
9
|
-
class AzureEnvironment:
|
|
10
|
-
name: str
|
|
11
|
-
service_management_endpoint: str
|
|
12
|
-
resource_manager_endpoint: str
|
|
13
|
-
active_directory_endpoint: str
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
ARM_DATABRICKS_RESOURCE_ID = "2ff814a6-3304-4ab8-85cb-cd0e6f879c1d"
|
|
17
|
-
|
|
18
|
-
ENVIRONMENTS = dict(
|
|
19
|
-
PUBLIC=AzureEnvironment(name="PUBLIC",
|
|
20
|
-
service_management_endpoint="https://management.core.windows.net/",
|
|
21
|
-
resource_manager_endpoint="https://management.azure.com/",
|
|
22
|
-
active_directory_endpoint="https://login.microsoftonline.com/"),
|
|
23
|
-
USGOVERNMENT=AzureEnvironment(name="USGOVERNMENT",
|
|
24
|
-
service_management_endpoint="https://management.core.usgovcloudapi.net/",
|
|
25
|
-
resource_manager_endpoint="https://management.usgovcloudapi.net/",
|
|
26
|
-
active_directory_endpoint="https://login.microsoftonline.us/"),
|
|
27
|
-
CHINA=AzureEnvironment(name="CHINA",
|
|
28
|
-
service_management_endpoint="https://management.core.chinacloudapi.cn/",
|
|
29
|
-
resource_manager_endpoint="https://management.chinacloudapi.cn/",
|
|
30
|
-
active_directory_endpoint="https://login.chinacloudapi.cn/"),
|
|
31
|
-
)
|
|
32
|
-
|
|
33
|
-
|
|
34
7
|
def add_workspace_id_header(cfg: 'Config', headers: Dict[str, str]):
|
|
35
8
|
if cfg.azure_workspace_resource_id:
|
|
36
9
|
headers["X-Databricks-Azure-Workspace-Resource-Id"] = cfg.azure_workspace_resource_id
|
databricks/sdk/config.py
CHANGED
|
@@ -6,16 +6,15 @@ import pathlib
|
|
|
6
6
|
import platform
|
|
7
7
|
import sys
|
|
8
8
|
import urllib.parse
|
|
9
|
-
from typing import Dict, Iterable, Optional
|
|
9
|
+
from typing import Dict, Iterable, List, Optional, Tuple
|
|
10
10
|
|
|
11
11
|
import requests
|
|
12
12
|
|
|
13
|
-
from .azure import AzureEnvironment
|
|
14
13
|
from .clock import Clock, RealClock
|
|
15
|
-
from .credentials_provider import
|
|
16
|
-
from .environments import (ALL_ENVS,
|
|
17
|
-
DatabricksEnvironment)
|
|
18
|
-
from .oauth import OidcEndpoints
|
|
14
|
+
from .credentials_provider import CredentialsStrategy, DefaultCredentials
|
|
15
|
+
from .environments import (ALL_ENVS, AzureEnvironment, Cloud,
|
|
16
|
+
DatabricksEnvironment, get_environment_for_hostname)
|
|
17
|
+
from .oauth import OidcEndpoints, Token
|
|
19
18
|
from .version import __version__
|
|
20
19
|
|
|
21
20
|
logger = logging.getLogger('databricks.sdk')
|
|
@@ -45,6 +44,32 @@ class ConfigAttribute:
|
|
|
45
44
|
return f"<ConfigAttribute '{self.name}' {self.transform.__name__}>"
|
|
46
45
|
|
|
47
46
|
|
|
47
|
+
_DEFAULT_PRODUCT_NAME = 'unknown'
|
|
48
|
+
_DEFAULT_PRODUCT_VERSION = '0.0.0'
|
|
49
|
+
_STATIC_USER_AGENT: Tuple[str, str, List[str]] = (_DEFAULT_PRODUCT_NAME, _DEFAULT_PRODUCT_VERSION, [])
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
def with_product(product: str, product_version: str):
|
|
53
|
+
"""[INTERNAL API] Change the product name and version used in the User-Agent header."""
|
|
54
|
+
global _STATIC_USER_AGENT
|
|
55
|
+
prev_product, prev_version, prev_other_info = _STATIC_USER_AGENT
|
|
56
|
+
logger.debug(f'Changing product from {prev_product}/{prev_version} to {product}/{product_version}')
|
|
57
|
+
_STATIC_USER_AGENT = product, product_version, prev_other_info
|
|
58
|
+
|
|
59
|
+
|
|
60
|
+
def with_user_agent_extra(key: str, value: str):
|
|
61
|
+
"""[INTERNAL API] Add extra metadata to the User-Agent header when developing a library."""
|
|
62
|
+
global _STATIC_USER_AGENT
|
|
63
|
+
product_name, product_version, other_info = _STATIC_USER_AGENT
|
|
64
|
+
for item in other_info:
|
|
65
|
+
if item.startswith(f"{key}/"):
|
|
66
|
+
# ensure that we don't have duplicates
|
|
67
|
+
other_info.remove(item)
|
|
68
|
+
break
|
|
69
|
+
other_info.append(f"{key}/{value}")
|
|
70
|
+
_STATIC_USER_AGENT = product_name, product_version, other_info
|
|
71
|
+
|
|
72
|
+
|
|
48
73
|
class Config:
|
|
49
74
|
host: str = ConfigAttribute(env='DATABRICKS_HOST')
|
|
50
75
|
account_id: str = ConfigAttribute(env='DATABRICKS_ACCOUNT_ID')
|
|
@@ -67,6 +92,7 @@ class Config:
|
|
|
67
92
|
auth_type: str = ConfigAttribute(env='DATABRICKS_AUTH_TYPE')
|
|
68
93
|
cluster_id: str = ConfigAttribute(env='DATABRICKS_CLUSTER_ID')
|
|
69
94
|
warehouse_id: str = ConfigAttribute(env='DATABRICKS_WAREHOUSE_ID')
|
|
95
|
+
serverless_compute_id: str = ConfigAttribute(env='DATABRICKS_SERVERLESS_COMPUTE_ID')
|
|
70
96
|
skip_verify: bool = ConfigAttribute()
|
|
71
97
|
http_timeout_seconds: float = ConfigAttribute()
|
|
72
98
|
debug_truncate_bytes: int = ConfigAttribute(env='DATABRICKS_DEBUG_TRUNCATE_BYTES')
|
|
@@ -82,15 +108,34 @@ class Config:
|
|
|
82
108
|
|
|
83
109
|
def __init__(self,
|
|
84
110
|
*,
|
|
85
|
-
|
|
86
|
-
|
|
87
|
-
|
|
111
|
+
# Deprecated. Use credentials_strategy instead.
|
|
112
|
+
credentials_provider: CredentialsStrategy = None,
|
|
113
|
+
credentials_strategy: CredentialsStrategy = None,
|
|
114
|
+
product=_DEFAULT_PRODUCT_NAME,
|
|
115
|
+
product_version=_DEFAULT_PRODUCT_VERSION,
|
|
88
116
|
clock: Clock = None,
|
|
89
117
|
**kwargs):
|
|
90
118
|
self._header_factory = None
|
|
91
119
|
self._inner = {}
|
|
120
|
+
# as in SDK for Go, pull information from global static user agent context,
|
|
121
|
+
# so that we can track additional metadata for mid-stream libraries, as well
|
|
122
|
+
# as for cases, when the downstream product is used as a library and is not
|
|
123
|
+
# configured with a proper product name and version.
|
|
124
|
+
static_product, static_version, _ = _STATIC_USER_AGENT
|
|
125
|
+
if product == _DEFAULT_PRODUCT_NAME:
|
|
126
|
+
product = static_product
|
|
127
|
+
if product_version == _DEFAULT_PRODUCT_VERSION:
|
|
128
|
+
product_version = static_version
|
|
92
129
|
self._user_agent_other_info = []
|
|
93
|
-
|
|
130
|
+
if credentials_strategy and credentials_provider:
|
|
131
|
+
raise ValueError(
|
|
132
|
+
"When providing `credentials_strategy` field, `credential_provider` cannot be specified.")
|
|
133
|
+
if credentials_provider:
|
|
134
|
+
logger.warning(
|
|
135
|
+
"parameter 'credentials_provider' is deprecated. Use 'credentials_strategy' instead.")
|
|
136
|
+
self._credentials_strategy = next(
|
|
137
|
+
s for s in [credentials_strategy, credentials_provider,
|
|
138
|
+
DefaultCredentials()] if s is not None)
|
|
94
139
|
if 'databricks_environment' in kwargs:
|
|
95
140
|
self.databricks_environment = kwargs['databricks_environment']
|
|
96
141
|
del kwargs['databricks_environment']
|
|
@@ -108,6 +153,9 @@ class Config:
|
|
|
108
153
|
message = self.wrap_debug_info(str(e))
|
|
109
154
|
raise ValueError(message) from e
|
|
110
155
|
|
|
156
|
+
def oauth_token(self) -> Token:
|
|
157
|
+
return self._credentials_strategy.oauth_token(self)
|
|
158
|
+
|
|
111
159
|
def wrap_debug_info(self, message: str) -> str:
|
|
112
160
|
debug_string = self.debug_string()
|
|
113
161
|
if debug_string:
|
|
@@ -154,11 +202,7 @@ class Config:
|
|
|
154
202
|
"""Returns the environment based on configuration."""
|
|
155
203
|
if self.databricks_environment:
|
|
156
204
|
return self.databricks_environment
|
|
157
|
-
if self.host:
|
|
158
|
-
for environment in ALL_ENVS:
|
|
159
|
-
if self.host.endswith(environment.dns_zone):
|
|
160
|
-
return environment
|
|
161
|
-
if self.azure_workspace_resource_id:
|
|
205
|
+
if not self.host and self.azure_workspace_resource_id:
|
|
162
206
|
azure_env = self._get_azure_environment_name()
|
|
163
207
|
for environment in ALL_ENVS:
|
|
164
208
|
if environment.cloud != Cloud.AZURE:
|
|
@@ -168,10 +212,12 @@ class Config:
|
|
|
168
212
|
if environment.dns_zone.startswith(".dev") or environment.dns_zone.startswith(".staging"):
|
|
169
213
|
continue
|
|
170
214
|
return environment
|
|
171
|
-
return
|
|
215
|
+
return get_environment_for_hostname(self.host)
|
|
172
216
|
|
|
173
217
|
@property
|
|
174
218
|
def is_azure(self) -> bool:
|
|
219
|
+
if self.azure_workspace_resource_id:
|
|
220
|
+
return True
|
|
175
221
|
return self.environment.cloud == Cloud.AZURE
|
|
176
222
|
|
|
177
223
|
@property
|
|
@@ -223,6 +269,12 @@ class Config:
|
|
|
223
269
|
]
|
|
224
270
|
if len(self._user_agent_other_info) > 0:
|
|
225
271
|
ua.append(' '.join(self._user_agent_other_info))
|
|
272
|
+
# as in SDK for Go, pull information from global static user agent context,
|
|
273
|
+
# so that we can track additional metadata for mid-stream libraries. this value
|
|
274
|
+
# is shared across all instances of Config objects intentionally.
|
|
275
|
+
_, _, static_info = _STATIC_USER_AGENT
|
|
276
|
+
if len(static_info) > 0:
|
|
277
|
+
ua.append(' '.join(static_info))
|
|
226
278
|
if len(self._upstream_user_agent) > 0:
|
|
227
279
|
ua.append(self._upstream_user_agent)
|
|
228
280
|
if 'DATABRICKS_RUNTIME_VERSION' in os.environ:
|
|
@@ -439,12 +491,12 @@ class Config:
|
|
|
439
491
|
|
|
440
492
|
def init_auth(self):
|
|
441
493
|
try:
|
|
442
|
-
self._header_factory = self.
|
|
443
|
-
self.auth_type = self.
|
|
494
|
+
self._header_factory = self._credentials_strategy(self)
|
|
495
|
+
self.auth_type = self._credentials_strategy.auth_type()
|
|
444
496
|
if not self._header_factory:
|
|
445
497
|
raise ValueError('not configured')
|
|
446
498
|
except ValueError as e:
|
|
447
|
-
raise ValueError(f'{self.
|
|
499
|
+
raise ValueError(f'{self._credentials_strategy.auth_type()} auth: {e}') from e
|
|
448
500
|
|
|
449
501
|
def __repr__(self):
|
|
450
502
|
return f'<{self.debug_string()}>'
|
databricks/sdk/core.py
CHANGED
|
@@ -4,6 +4,7 @@ from datetime import timedelta
|
|
|
4
4
|
from json import JSONDecodeError
|
|
5
5
|
from types import TracebackType
|
|
6
6
|
from typing import Any, BinaryIO, Iterator, Type
|
|
7
|
+
from urllib.parse import urlencode
|
|
7
8
|
|
|
8
9
|
from requests.adapters import HTTPAdapter
|
|
9
10
|
|
|
@@ -12,12 +13,18 @@ from .config import *
|
|
|
12
13
|
# To preserve backwards compatibility (as these definitions were previously in this module)
|
|
13
14
|
from .credentials_provider import *
|
|
14
15
|
from .errors import DatabricksError, error_mapper
|
|
16
|
+
from .errors.private_link import _is_private_link_redirect
|
|
17
|
+
from .oauth import retrieve_token
|
|
15
18
|
from .retries import retried
|
|
16
19
|
|
|
17
20
|
__all__ = ['Config', 'DatabricksError']
|
|
18
21
|
|
|
19
22
|
logger = logging.getLogger('databricks.sdk')
|
|
20
23
|
|
|
24
|
+
URL_ENCODED_CONTENT_TYPE = "application/x-www-form-urlencoded"
|
|
25
|
+
JWT_BEARER_GRANT_TYPE = "urn:ietf:params:oauth:grant-type:jwt-bearer"
|
|
26
|
+
OIDC_TOKEN_PATH = "/oidc/v1/token"
|
|
27
|
+
|
|
21
28
|
|
|
22
29
|
class ApiClient:
|
|
23
30
|
_cfg: Config
|
|
@@ -108,6 +115,22 @@ class ApiClient:
|
|
|
108
115
|
flattened = dict(flatten_dict(with_fixed_bools))
|
|
109
116
|
return flattened
|
|
110
117
|
|
|
118
|
+
def get_oauth_token(self, auth_details: str) -> Token:
|
|
119
|
+
if not self._cfg.auth_type:
|
|
120
|
+
self._cfg.authenticate()
|
|
121
|
+
original_token = self._cfg.oauth_token()
|
|
122
|
+
headers = {"Content-Type": URL_ENCODED_CONTENT_TYPE}
|
|
123
|
+
params = urlencode({
|
|
124
|
+
"grant_type": JWT_BEARER_GRANT_TYPE,
|
|
125
|
+
"authorization_details": auth_details,
|
|
126
|
+
"assertion": original_token.access_token
|
|
127
|
+
})
|
|
128
|
+
return retrieve_token(client_id=self._cfg.client_id,
|
|
129
|
+
client_secret=self._cfg.client_secret,
|
|
130
|
+
token_url=self._cfg.host + OIDC_TOKEN_PATH,
|
|
131
|
+
params=params,
|
|
132
|
+
headers=headers)
|
|
133
|
+
|
|
111
134
|
def do(self,
|
|
112
135
|
method: str,
|
|
113
136
|
path: str,
|
|
@@ -239,6 +262,10 @@ class ApiClient:
|
|
|
239
262
|
# See https://stackoverflow.com/a/58821552/277035
|
|
240
263
|
payload = response.json()
|
|
241
264
|
raise self._make_nicer_error(response=response, **payload) from None
|
|
265
|
+
# Private link failures happen via a redirect to the login page. From a requests-perspective, the request
|
|
266
|
+
# is successful, but the response is not what we expect. We need to handle this case separately.
|
|
267
|
+
if _is_private_link_redirect(response):
|
|
268
|
+
raise self._make_nicer_error(response=response) from None
|
|
242
269
|
return response
|
|
243
270
|
except requests.exceptions.JSONDecodeError:
|
|
244
271
|
message = self._make_sense_from_html(response.text)
|