databricks-sdk 0.17.0__py3-none-any.whl → 0.19.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (36) hide show
  1. databricks/sdk/__init__.py +41 -5
  2. databricks/sdk/azure.py +17 -7
  3. databricks/sdk/clock.py +49 -0
  4. databricks/sdk/config.py +459 -0
  5. databricks/sdk/core.py +7 -1026
  6. databricks/sdk/credentials_provider.py +628 -0
  7. databricks/sdk/environments.py +72 -0
  8. databricks/sdk/errors/__init__.py +1 -1
  9. databricks/sdk/errors/mapper.py +5 -5
  10. databricks/sdk/mixins/workspace.py +3 -3
  11. databricks/sdk/oauth.py +2 -1
  12. databricks/sdk/retries.py +9 -5
  13. databricks/sdk/service/_internal.py +1 -1
  14. databricks/sdk/service/catalog.py +946 -82
  15. databricks/sdk/service/compute.py +106 -41
  16. databricks/sdk/service/files.py +145 -31
  17. databricks/sdk/service/iam.py +44 -40
  18. databricks/sdk/service/jobs.py +199 -20
  19. databricks/sdk/service/ml.py +33 -42
  20. databricks/sdk/service/oauth2.py +3 -4
  21. databricks/sdk/service/pipelines.py +51 -31
  22. databricks/sdk/service/serving.py +1 -2
  23. databricks/sdk/service/settings.py +377 -72
  24. databricks/sdk/service/sharing.py +3 -4
  25. databricks/sdk/service/sql.py +27 -19
  26. databricks/sdk/service/vectorsearch.py +13 -17
  27. databricks/sdk/service/workspace.py +20 -11
  28. databricks/sdk/version.py +1 -1
  29. {databricks_sdk-0.17.0.dist-info → databricks_sdk-0.19.0.dist-info}/METADATA +4 -4
  30. databricks_sdk-0.19.0.dist-info/RECORD +53 -0
  31. databricks_sdk-0.17.0.dist-info/RECORD +0 -49
  32. /databricks/sdk/errors/{mapping.py → platform.py} +0 -0
  33. {databricks_sdk-0.17.0.dist-info → databricks_sdk-0.19.0.dist-info}/LICENSE +0 -0
  34. {databricks_sdk-0.17.0.dist-info → databricks_sdk-0.19.0.dist-info}/NOTICE +0 -0
  35. {databricks_sdk-0.17.0.dist-info → databricks_sdk-0.19.0.dist-info}/WHEEL +0 -0
  36. {databricks_sdk-0.17.0.dist-info → databricks_sdk-0.19.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,72 @@
1
+ from dataclasses import dataclass
2
+ from enum import Enum
3
+ from typing import Optional
4
+
5
+ from .azure import ARM_DATABRICKS_RESOURCE_ID, ENVIRONMENTS, AzureEnvironment
6
+
7
+
8
+ class Cloud(Enum):
9
+ AWS = "AWS"
10
+ AZURE = "AZURE"
11
+ GCP = "GCP"
12
+
13
+
14
+ @dataclass
15
+ class DatabricksEnvironment:
16
+ cloud: Cloud
17
+ dns_zone: str
18
+ azure_application_id: Optional[str] = None
19
+ azure_environment: Optional[AzureEnvironment] = None
20
+
21
+ def deployment_url(self, name: str) -> str:
22
+ return f"https://{name}{self.dns_zone}"
23
+
24
+ @property
25
+ def azure_service_management_endpoint(self) -> Optional[str]:
26
+ if self.azure_environment is None:
27
+ return None
28
+ return self.azure_environment.service_management_endpoint
29
+
30
+ @property
31
+ def azure_resource_manager_endpoint(self) -> Optional[str]:
32
+ if self.azure_environment is None:
33
+ return None
34
+ return self.azure_environment.resource_manager_endpoint
35
+
36
+ @property
37
+ def azure_active_directory_endpoint(self) -> Optional[str]:
38
+ if self.azure_environment is None:
39
+ return None
40
+ return self.azure_environment.active_directory_endpoint
41
+
42
+
43
+ DEFAULT_ENVIRONMENT = DatabricksEnvironment(Cloud.AWS, ".cloud.databricks.com")
44
+
45
+ ALL_ENVS = [
46
+ DatabricksEnvironment(Cloud.AWS, ".dev.databricks.com"),
47
+ DatabricksEnvironment(Cloud.AWS, ".staging.cloud.databricks.com"),
48
+ DatabricksEnvironment(Cloud.AWS, ".cloud.databricks.us"), DEFAULT_ENVIRONMENT,
49
+ DatabricksEnvironment(Cloud.AZURE,
50
+ ".dev.azuredatabricks.net",
51
+ azure_application_id="62a912ac-b58e-4c1d-89ea-b2dbfc7358fc",
52
+ azure_environment=ENVIRONMENTS["PUBLIC"]),
53
+ DatabricksEnvironment(Cloud.AZURE,
54
+ ".staging.azuredatabricks.net",
55
+ azure_application_id="4a67d088-db5c-48f1-9ff2-0aace800ae68",
56
+ azure_environment=ENVIRONMENTS["PUBLIC"]),
57
+ DatabricksEnvironment(Cloud.AZURE,
58
+ ".azuredatabricks.net",
59
+ azure_application_id=ARM_DATABRICKS_RESOURCE_ID,
60
+ azure_environment=ENVIRONMENTS["PUBLIC"]),
61
+ DatabricksEnvironment(Cloud.AZURE,
62
+ ".databricks.azure.us",
63
+ azure_application_id=ARM_DATABRICKS_RESOURCE_ID,
64
+ azure_environment=ENVIRONMENTS["USGOVERNMENT"]),
65
+ DatabricksEnvironment(Cloud.AZURE,
66
+ ".databricks.azure.cn",
67
+ azure_application_id=ARM_DATABRICKS_RESOURCE_ID,
68
+ azure_environment=ENVIRONMENTS["CHINA"]),
69
+ DatabricksEnvironment(Cloud.GCP, ".dev.gcp.databricks.com"),
70
+ DatabricksEnvironment(Cloud.GCP, ".staging.gcp.databricks.com"),
71
+ DatabricksEnvironment(Cloud.GCP, ".gcp.databricks.com")
72
+ ]
@@ -1,4 +1,4 @@
1
1
  from .base import DatabricksError, ErrorDetail
2
2
  from .mapper import error_mapper
3
- from .mapping import *
3
+ from .platform import *
4
4
  from .sdk import *
@@ -1,18 +1,18 @@
1
- from databricks.sdk.errors import mapping
1
+ from databricks.sdk.errors import platform
2
2
  from databricks.sdk.errors.base import DatabricksError
3
3
 
4
4
 
5
5
  def error_mapper(status_code: int, raw: dict) -> DatabricksError:
6
6
  error_code = raw.get('error_code', None)
7
- if error_code in mapping.ERROR_CODE_MAPPING:
7
+ if error_code in platform.ERROR_CODE_MAPPING:
8
8
  # more specific error codes override more generic HTTP status codes
9
- return mapping.ERROR_CODE_MAPPING[error_code](**raw)
9
+ return platform.ERROR_CODE_MAPPING[error_code](**raw)
10
10
 
11
- if status_code in mapping.STATUS_CODE_MAPPING:
11
+ if status_code in platform.STATUS_CODE_MAPPING:
12
12
  # more generic HTTP status codes matched after more specific error codes,
13
13
  # where there's a default exception class per HTTP status code, and we do
14
14
  # rely on Databricks platform exception mapper to do the right thing.
15
- return mapping.STATUS_CODE_MAPPING[status_code](**raw)
15
+ return platform.STATUS_CODE_MAPPING[status_code](**raw)
16
16
 
17
17
  # backwards-compatible error creation for cases like using older versions of
18
18
  # the SDK on way never releases of the platform.
@@ -1,4 +1,4 @@
1
- from typing import BinaryIO, Iterator, Optional
1
+ from typing import BinaryIO, Iterator, Optional, Union
2
2
 
3
3
  from ..core import DatabricksError
4
4
  from ..service.workspace import (ExportFormat, ImportFormat, Language,
@@ -37,7 +37,7 @@ class WorkspaceExt(WorkspaceAPI):
37
37
 
38
38
  def upload(self,
39
39
  path: str,
40
- content: BinaryIO,
40
+ content: Union[bytes, BinaryIO],
41
41
  *,
42
42
  format: Optional[ImportFormat] = None,
43
43
  language: Optional[Language] = None,
@@ -51,7 +51,7 @@ class WorkspaceExt(WorkspaceAPI):
51
51
  * `INVALID_PARAMETER_VALUE`: if `format` and `content` values are not compatible.
52
52
 
53
53
  :param path: target location of the file on workspace.
54
- :param content: file-like `io.BinaryIO` of the `path` contents.
54
+ :param content: the contents as either raw binary data `bytes` or a file-like the file-like `io.BinaryIO` of the `path` contents.
55
55
  :param format: By default, `ImportFormat.SOURCE`. If using `ImportFormat.AUTO` the `path`
56
56
  is imported or exported as either a workspace file or a notebook, depending
57
57
  on an analysis of the `item`’s extension and the header content provided in
databricks/sdk/oauth.py CHANGED
@@ -357,7 +357,8 @@ class OAuthClient:
357
357
  scopes: List[str] = None,
358
358
  client_secret: str = None):
359
359
  # TODO: is it a circular dependency?..
360
- from .core import Config, credentials_provider
360
+ from .core import Config
361
+ from .credentials_provider import credentials_provider
361
362
 
362
363
  @credentials_provider('noop', [])
363
364
  def noop_credentials(_: any):
databricks/sdk/retries.py CHANGED
@@ -1,30 +1,34 @@
1
1
  import functools
2
2
  import logging
3
- import time
4
3
  from datetime import timedelta
5
4
  from random import random
6
5
  from typing import Callable, Optional, Sequence, Type
7
6
 
7
+ from .clock import Clock, RealClock
8
+
8
9
  logger = logging.getLogger(__name__)
9
10
 
10
11
 
11
12
  def retried(*,
12
13
  on: Sequence[Type[BaseException]] = None,
13
14
  is_retryable: Callable[[BaseException], Optional[str]] = None,
14
- timeout=timedelta(minutes=20)):
15
+ timeout=timedelta(minutes=20),
16
+ clock: Clock = None):
15
17
  has_allowlist = on is not None
16
18
  has_callback = is_retryable is not None
17
19
  if not (has_allowlist or has_callback) or (has_allowlist and has_callback):
18
20
  raise SyntaxError('either on=[Exception] or callback=lambda x: .. is required')
21
+ if clock is None:
22
+ clock = RealClock()
19
23
 
20
24
  def decorator(func):
21
25
 
22
26
  @functools.wraps(func)
23
27
  def wrapper(*args, **kwargs):
24
- deadline = time.time() + timeout.total_seconds()
28
+ deadline = clock.time() + timeout.total_seconds()
25
29
  attempt = 1
26
30
  last_err = None
27
- while time.time() < deadline:
31
+ while clock.time() < deadline:
28
32
  try:
29
33
  return func(*args, **kwargs)
30
34
  except Exception as err:
@@ -50,7 +54,7 @@ def retried(*,
50
54
  raise err
51
55
 
52
56
  logger.debug(f'Retrying: {retry_reason} (sleeping ~{sleep}s)')
53
- time.sleep(sleep + random())
57
+ clock.sleep(sleep + random())
54
58
  attempt += 1
55
59
  raise TimeoutError(f'Timed out after {timeout}') from last_err
56
60
 
@@ -10,7 +10,7 @@ def _from_dict(d: Dict[str, any], field: str, cls: Type) -> any:
10
10
 
11
11
  def _repeated_dict(d: Dict[str, any], field: str, cls: Type) -> any:
12
12
  if field not in d or not d[field]:
13
- return None
13
+ return []
14
14
  from_dict = getattr(cls, 'from_dict')
15
15
  return [from_dict(v) for v in d[field]]
16
16