databricks-sdk 0.67.0__py3-none-any.whl → 0.68.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (47) hide show
  1. databricks/sdk/__init__.py +8 -0
  2. databricks/sdk/_base_client.py +4 -1
  3. databricks/sdk/common/lro.py +17 -0
  4. databricks/sdk/common/types/__init__.py +0 -0
  5. databricks/sdk/common/types/fieldmask.py +39 -0
  6. databricks/sdk/credentials_provider.py +61 -12
  7. databricks/sdk/dbutils.py +5 -1
  8. databricks/sdk/errors/parser.py +8 -3
  9. databricks/sdk/mixins/files.py +1 -0
  10. databricks/sdk/oidc_token_supplier.py +80 -0
  11. databricks/sdk/retries.py +102 -2
  12. databricks/sdk/service/_internal.py +93 -1
  13. databricks/sdk/service/agentbricks.py +1 -1
  14. databricks/sdk/service/apps.py +264 -1
  15. databricks/sdk/service/billing.py +2 -3
  16. databricks/sdk/service/catalog.py +1026 -540
  17. databricks/sdk/service/cleanrooms.py +3 -3
  18. databricks/sdk/service/compute.py +21 -33
  19. databricks/sdk/service/dashboards.py +7 -3
  20. databricks/sdk/service/database.py +3 -2
  21. databricks/sdk/service/dataquality.py +1145 -0
  22. databricks/sdk/service/files.py +2 -1
  23. databricks/sdk/service/iam.py +2 -1
  24. databricks/sdk/service/iamv2.py +1 -1
  25. databricks/sdk/service/jobs.py +6 -9
  26. databricks/sdk/service/marketplace.py +3 -1
  27. databricks/sdk/service/ml.py +3 -1
  28. databricks/sdk/service/oauth2.py +1 -1
  29. databricks/sdk/service/pipelines.py +5 -6
  30. databricks/sdk/service/provisioning.py +544 -655
  31. databricks/sdk/service/qualitymonitorv2.py +1 -1
  32. databricks/sdk/service/serving.py +3 -1
  33. databricks/sdk/service/settings.py +5 -2
  34. databricks/sdk/service/settingsv2.py +1 -1
  35. databricks/sdk/service/sharing.py +12 -3
  36. databricks/sdk/service/sql.py +305 -70
  37. databricks/sdk/service/tags.py +1 -1
  38. databricks/sdk/service/vectorsearch.py +3 -1
  39. databricks/sdk/service/workspace.py +70 -17
  40. databricks/sdk/version.py +1 -1
  41. {databricks_sdk-0.67.0.dist-info → databricks_sdk-0.68.0.dist-info}/METADATA +4 -2
  42. databricks_sdk-0.68.0.dist-info/RECORD +83 -0
  43. databricks_sdk-0.67.0.dist-info/RECORD +0 -79
  44. {databricks_sdk-0.67.0.dist-info → databricks_sdk-0.68.0.dist-info}/WHEEL +0 -0
  45. {databricks_sdk-0.67.0.dist-info → databricks_sdk-0.68.0.dist-info}/licenses/LICENSE +0 -0
  46. {databricks_sdk-0.67.0.dist-info → databricks_sdk-0.68.0.dist-info}/licenses/NOTICE +0 -0
  47. {databricks_sdk-0.67.0.dist-info → databricks_sdk-0.68.0.dist-info}/top_level.txt +0 -0
@@ -21,6 +21,7 @@ from databricks.sdk.service import cleanrooms as pkg_cleanrooms
21
21
  from databricks.sdk.service import compute as pkg_compute
22
22
  from databricks.sdk.service import dashboards as pkg_dashboards
23
23
  from databricks.sdk.service import database as pkg_database
24
+ from databricks.sdk.service import dataquality as pkg_dataquality
24
25
  from databricks.sdk.service import files as pkg_files
25
26
  from databricks.sdk.service import iam as pkg_iam
26
27
  from databricks.sdk.service import iamv2 as pkg_iamv2
@@ -79,6 +80,7 @@ from databricks.sdk.service.compute import (ClusterPoliciesAPI, ClustersAPI,
79
80
  from databricks.sdk.service.dashboards import (GenieAPI, LakeviewAPI,
80
81
  LakeviewEmbeddedAPI)
81
82
  from databricks.sdk.service.database import DatabaseAPI
83
+ from databricks.sdk.service.dataquality import DataQualityAPI
82
84
  from databricks.sdk.service.files import DbfsAPI, FilesAPI
83
85
  from databricks.sdk.service.iam import (AccessControlAPI,
84
86
  AccountAccessControlAPI,
@@ -282,6 +284,7 @@ class WorkspaceClient:
282
284
  self._current_user = pkg_iam.CurrentUserAPI(self._api_client)
283
285
  self._dashboard_widgets = pkg_sql.DashboardWidgetsAPI(self._api_client)
284
286
  self._dashboards = pkg_sql.DashboardsAPI(self._api_client)
287
+ self._data_quality = pkg_dataquality.DataQualityAPI(self._api_client)
285
288
  self._data_sources = pkg_sql.DataSourcesAPI(self._api_client)
286
289
  self._database = pkg_database.DatabaseAPI(self._api_client)
287
290
  self._dbfs = DbfsExt(self._api_client)
@@ -540,6 +543,11 @@ class WorkspaceClient:
540
543
  """In general, there is little need to modify dashboards using the API."""
541
544
  return self._dashboards
542
545
 
546
+ @property
547
+ def data_quality(self) -> pkg_dataquality.DataQualityAPI:
548
+ """Manage the data quality of Unity Catalog objects (currently support `schema` and `table`)."""
549
+ return self._data_quality
550
+
543
551
  @property
544
552
  def data_sources(self) -> pkg_sql.DataSourcesAPI:
545
553
  """This API is provided to assist you in making new query objects."""
@@ -99,7 +99,10 @@ class _BaseClient:
99
99
  # Default to 60 seconds
100
100
  self._http_timeout_seconds = http_timeout_seconds or 60
101
101
 
102
- self._error_parser = _Parser(extra_error_customizers=extra_error_customizers)
102
+ self._error_parser = _Parser(
103
+ extra_error_customizers=extra_error_customizers,
104
+ debug_headers=debug_headers,
105
+ )
103
106
 
104
107
  def _authenticate(self, r: requests.PreparedRequest) -> requests.PreparedRequest:
105
108
  if self._header_factory:
@@ -0,0 +1,17 @@
1
+ from datetime import timedelta
2
+ from typing import Optional
3
+
4
+
5
+ class LroOptions:
6
+ """LroOptions is the options for the Long Running Operations.
7
+ DO NOT USE THIS OPTION. This option is still under development
8
+ and can be updated in the future without notice.
9
+ """
10
+
11
+ def __init__(self, *, timeout: Optional[timedelta] = None):
12
+ """
13
+ Args:
14
+ timeout: The timeout for the Long Running Operations.
15
+ If not set, the default timeout is 20 minutes.
16
+ """
17
+ self.timeout = timeout or timedelta(minutes=20)
File without changes
@@ -0,0 +1,39 @@
1
+ class FieldMask(object):
2
+ """Class for FieldMask message type."""
3
+
4
+ # This is based on the base implementation from protobuf.
5
+ # https://pigweed.googlesource.com/third_party/github/protocolbuffers/protobuf/+/HEAD/python/google/protobuf/internal/field_mask.py
6
+ # The original implementation only works with proto generated classes.
7
+ # Since our classes are not generated from proto files, we need to implement it manually.
8
+
9
+ def __init__(self, field_mask=None):
10
+ """Initializes the FieldMask."""
11
+ if field_mask:
12
+ self.paths = field_mask
13
+
14
+ def ToJsonString(self) -> str:
15
+ """Converts FieldMask to string."""
16
+ return ",".join(self.paths)
17
+
18
+ def FromJsonString(self, value: str) -> None:
19
+ """Converts string to FieldMask."""
20
+ if not isinstance(value, str):
21
+ raise ValueError("FieldMask JSON value not a string: {!r}".format(value))
22
+ if value:
23
+ self.paths = value.split(",")
24
+ else:
25
+ self.paths = []
26
+
27
+ def __eq__(self, other) -> bool:
28
+ """Check equality based on paths."""
29
+ if not isinstance(other, FieldMask):
30
+ return False
31
+ return self.paths == other.paths
32
+
33
+ def __hash__(self) -> int:
34
+ """Hash based on paths tuple."""
35
+ return hash(tuple(self.paths))
36
+
37
+ def __repr__(self) -> str:
38
+ """String representation for debugging."""
39
+ return f"FieldMask(paths={self.paths})"
@@ -12,7 +12,7 @@ import sys
12
12
  import threading
13
13
  import time
14
14
  from datetime import datetime
15
- from typing import Callable, Dict, List, Optional, Tuple, Union
15
+ from typing import Any, Callable, Dict, List, Optional, Tuple, Union
16
16
 
17
17
  import google.auth # type: ignore
18
18
  import requests
@@ -89,7 +89,6 @@ def credentials_strategy(name: str, require: List[str]):
89
89
  @functools.wraps(func)
90
90
  def wrapper(cfg: "Config") -> Optional[CredentialsProvider]:
91
91
  for attr in require:
92
- getattr(cfg, attr)
93
92
  if not getattr(cfg, attr):
94
93
  return None
95
94
  return func(cfg)
@@ -103,7 +102,12 @@ def credentials_strategy(name: str, require: List[str]):
103
102
  def oauth_credentials_strategy(name: str, require: List[str]):
104
103
  """Given the function that receives a Config and returns an OauthHeaderFactory,
105
104
  create an OauthCredentialsProvider with a given name and required configuration
106
- attribute names to be present for this function to be called."""
105
+ attribute names to be present for this function to be called.
106
+
107
+ Args:
108
+ name: The name of the authentication strategy
109
+ require: List of config attributes that must be present
110
+ """
107
111
 
108
112
  def inner(
109
113
  func: Callable[["Config"], OAuthCredentialsProvider],
@@ -356,33 +360,47 @@ def oidc_credentials_provider(cfg, id_token_source: oidc.IdTokenSource) -> Optio
356
360
  return OAuthCredentialsProvider(refreshed_headers, token)
357
361
 
358
362
 
359
- @oauth_credentials_strategy("github-oidc", ["host", "client_id"])
360
- def github_oidc(cfg: "Config") -> Optional[CredentialsProvider]:
363
+ def _oidc_credentials_provider(
364
+ cfg: "Config", supplier_factory: Callable[[], Any], provider_name: str
365
+ ) -> Optional[CredentialsProvider]:
361
366
  """
362
- DatabricksWIFCredentials uses a Token Supplier to get a JWT Token and exchanges
363
- it for a Databricks Token.
367
+ Generic OIDC credentials provider that works with any OIDC token supplier.
368
+
369
+ Args:
370
+ cfg: Databricks configuration
371
+ supplier_factory: Callable that returns an OIDC token supplier instance
372
+ provider_name: Human-readable name (e.g., "GitHub OIDC", "Azure DevOps OIDC")
364
373
 
365
- Supported suppliers:
366
- - GitHub OIDC
374
+ Returns:
375
+ OAuthCredentialsProvider if successful, None if supplier unavailable or token retrieval fails
367
376
  """
368
- supplier = oidc_token_supplier.GitHubOIDCTokenSupplier()
377
+ # Try to create the supplier
378
+ try:
379
+ supplier = supplier_factory()
380
+ except Exception as e:
381
+ logger.debug(f"{provider_name}: {str(e)}")
382
+ return None
369
383
 
384
+ # Determine the audience for token exchange
370
385
  audience = cfg.token_audience
371
386
  if audience is None and cfg.is_account_client:
372
387
  audience = cfg.account_id
373
388
  if audience is None and not cfg.is_account_client:
374
389
  audience = cfg.oidc_endpoints.token_endpoint
375
390
 
376
- # Try to get an idToken. If no supplier returns a token, we cannot use this authentication mode.
391
+ # Try to get an OIDC token. If no supplier returns a token, we cannot use this authentication mode.
377
392
  id_token = supplier.get_oidc_token(audience)
378
393
  if not id_token:
394
+ logger.debug(f"{provider_name}: no token available, skipping authentication method")
379
395
  return None
380
396
 
397
+ logger.info(f"Configured {provider_name} authentication")
398
+
381
399
  def token_source_for(audience: str) -> oauth.TokenSource:
382
400
  id_token = supplier.get_oidc_token(audience)
383
401
  if not id_token:
384
402
  # Should not happen, since we checked it above.
385
- raise Exception("Cannot get OIDC token")
403
+ raise Exception(f"Cannot get {provider_name} token")
386
404
 
387
405
  return oauth.ClientCredentials(
388
406
  client_id=cfg.client_id,
@@ -408,6 +426,36 @@ def github_oidc(cfg: "Config") -> Optional[CredentialsProvider]:
408
426
  return OAuthCredentialsProvider(refreshed_headers, token)
409
427
 
410
428
 
429
+ @oauth_credentials_strategy("github-oidc", ["host", "client_id"])
430
+ def github_oidc(cfg: "Config") -> Optional[CredentialsProvider]:
431
+ """
432
+ GitHub OIDC authentication uses a Token Supplier to get a JWT Token and exchanges
433
+ it for a Databricks Token.
434
+
435
+ Supported in GitHub Actions with OIDC service connections.
436
+ """
437
+ return _oidc_credentials_provider(
438
+ cfg=cfg,
439
+ supplier_factory=lambda: oidc_token_supplier.GitHubOIDCTokenSupplier(),
440
+ provider_name="GitHub OIDC",
441
+ )
442
+
443
+
444
+ @oauth_credentials_strategy("azure-devops-oidc", ["host", "client_id"])
445
+ def azure_devops_oidc(cfg: "Config") -> Optional[CredentialsProvider]:
446
+ """
447
+ Azure DevOps OIDC authentication uses a Token Supplier to get a JWT Token
448
+ and exchanges it for a Databricks Token.
449
+
450
+ Supported in Azure DevOps pipelines with OIDC service connections.
451
+ """
452
+ return _oidc_credentials_provider(
453
+ cfg=cfg,
454
+ supplier_factory=lambda: oidc_token_supplier.AzureDevOpsOIDCTokenSupplier(),
455
+ provider_name="Azure DevOps OIDC",
456
+ )
457
+
458
+
411
459
  @oauth_credentials_strategy("github-oidc-azure", ["host", "azure_client_id"])
412
460
  def github_oidc_azure(cfg: "Config") -> Optional[CredentialsProvider]:
413
461
  if "ACTIONS_ID_TOKEN_REQUEST_TOKEN" not in os.environ:
@@ -1019,6 +1067,7 @@ class DefaultCredentials:
1019
1067
  azure_service_principal,
1020
1068
  github_oidc_azure,
1021
1069
  azure_cli,
1070
+ azure_devops_oidc,
1022
1071
  external_browser,
1023
1072
  databricks_cli,
1024
1073
  runtime_native_auth,
databricks/sdk/dbutils.py CHANGED
@@ -210,7 +210,11 @@ class _JobsUtil:
210
210
  class RemoteDbUtils:
211
211
 
212
212
  def __init__(self, config: "Config" = None):
213
- self._config = Config() if not config else config
213
+ # Create a shallow copy of the config to allow the use of a custom
214
+ # user-agent while avoiding modifying the original config.
215
+ self._config = Config() if not config else config.copy()
216
+ self._config.with_user_agent_extra("dbutils", "remote")
217
+
214
218
  self._client = ApiClient(self._config)
215
219
  self._clusters = compute_ext.ClustersExt(self._client)
216
220
  self._commands = compute.CommandExecutionAPI(self._client)
@@ -31,12 +31,15 @@ _error_customizers = [
31
31
  ]
32
32
 
33
33
 
34
- def _unknown_error(response: requests.Response) -> str:
34
+ def _unknown_error(response: requests.Response, debug_headers: bool = False) -> str:
35
35
  """A standard error message that can be shown when an API response cannot be parsed.
36
36
 
37
37
  This error message includes a link to the issue tracker for the SDK for users to report the issue to us.
38
+
39
+ :param response: The response object from the API request.
40
+ :param debug_headers: Whether to include headers in the request log. Defaults to False to defensively handle cases where request headers might contain sensitive data (e.g. tokens).
38
41
  """
39
- request_log = RoundTrip(response, debug_headers=True, debug_truncate_bytes=10 * 1024).generate()
42
+ request_log = RoundTrip(response, debug_headers=debug_headers, debug_truncate_bytes=10 * 1024).generate()
40
43
  return (
41
44
  "This is likely a bug in the Databricks SDK for Python or the underlying "
42
45
  "API. Please report this issue with the following debugging information to the SDK issue tracker at "
@@ -56,11 +59,13 @@ class _Parser:
56
59
  self,
57
60
  extra_error_parsers: List[_ErrorDeserializer] = [],
58
61
  extra_error_customizers: List[_ErrorCustomizer] = [],
62
+ debug_headers: bool = False,
59
63
  ):
60
64
  self._error_parsers = _error_deserializers + (extra_error_parsers if extra_error_parsers is not None else [])
61
65
  self._error_customizers = _error_customizers + (
62
66
  extra_error_customizers if extra_error_customizers is not None else []
63
67
  )
68
+ self._debug_headers = debug_headers
64
69
 
65
70
  def get_api_error(self, response: requests.Response) -> Optional[DatabricksError]:
66
71
  """
@@ -84,7 +89,7 @@ class _Parser:
84
89
  )
85
90
  return _error_mapper(
86
91
  response,
87
- {"message": "unable to parse response. " + _unknown_error(response)},
92
+ {"message": "unable to parse response. " + _unknown_error(response, self._debug_headers)},
88
93
  )
89
94
 
90
95
  # Private link failures happen via a redirect to the login page. From a requests-perspective, the request
@@ -1337,6 +1337,7 @@ class FilesExt(files.FilesAPI):
1337
1337
  # where we believe request didn't reach the server
1338
1338
  is_retryable=extended_is_retryable,
1339
1339
  before_retry=before_retry,
1340
+ clock=self._config.clock,
1340
1341
  )(delegate)()
1341
1342
 
1342
1343
  def _open_download_stream(
@@ -1,9 +1,13 @@
1
+ import logging
1
2
  import os
2
3
  from typing import Optional
3
4
 
4
5
  import requests
5
6
 
7
+ logger = logging.getLogger("databricks.sdk")
6
8
 
9
+
10
+ # TODO: Check the required environment variables while creating the instance rather than in the get_oidc_token method to allow early return.
7
11
  class GitHubOIDCTokenSupplier:
8
12
  """
9
13
  Supplies OIDC tokens from GitHub Actions.
@@ -26,3 +30,79 @@ class GitHubOIDCTokenSupplier:
26
30
  return None
27
31
 
28
32
  return response_json["value"]
33
+
34
+
35
+ class AzureDevOpsOIDCTokenSupplier:
36
+ """
37
+ Supplies OIDC tokens from Azure DevOps pipelines.
38
+
39
+ Constructs the OIDC token request URL using official Azure DevOps predefined variables.
40
+ See: https://docs.microsoft.com/en-us/azure/devops/pipelines/build/variables
41
+ """
42
+
43
+ def __init__(self):
44
+ """Initialize and validate Azure DevOps environment variables."""
45
+ # Get Azure DevOps environment variables.
46
+ self.access_token = os.environ.get("SYSTEM_ACCESSTOKEN")
47
+ self.collection_uri = os.environ.get("SYSTEM_TEAMFOUNDATIONCOLLECTIONURI")
48
+ self.project_id = os.environ.get("SYSTEM_TEAMPROJECTID")
49
+ self.plan_id = os.environ.get("SYSTEM_PLANID")
50
+ self.job_id = os.environ.get("SYSTEM_JOBID")
51
+ self.hub_name = os.environ.get("SYSTEM_HOSTTYPE")
52
+
53
+ # Check for required variables with specific error messages.
54
+ missing_vars = []
55
+ if not self.access_token:
56
+ missing_vars.append("SYSTEM_ACCESSTOKEN")
57
+ if not self.collection_uri:
58
+ missing_vars.append("SYSTEM_TEAMFOUNDATIONCOLLECTIONURI")
59
+ if not self.project_id:
60
+ missing_vars.append("SYSTEM_TEAMPROJECTID")
61
+ if not self.plan_id:
62
+ missing_vars.append("SYSTEM_PLANID")
63
+ if not self.job_id:
64
+ missing_vars.append("SYSTEM_JOBID")
65
+ if not self.hub_name:
66
+ missing_vars.append("SYSTEM_HOSTTYPE")
67
+
68
+ if missing_vars:
69
+ if "SYSTEM_ACCESSTOKEN" in missing_vars:
70
+ error_msg = "Azure DevOps OIDC: SYSTEM_ACCESSTOKEN env var not found. If calling from Azure DevOps Pipeline, please set this env var following https://learn.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#systemaccesstoken"
71
+ else:
72
+ error_msg = f"Azure DevOps OIDC: missing required environment variables: {', '.join(missing_vars)}"
73
+ raise ValueError(error_msg)
74
+
75
+ def get_oidc_token(self, audience: str) -> Optional[str]:
76
+ # Note: Azure DevOps OIDC tokens have a fixed audience of "api://AzureADTokenExchange".
77
+ # The audience parameter is ignored but kept for interface compatibility with other OIDC suppliers.
78
+
79
+ try:
80
+ # Construct the OIDC token request URL.
81
+ # Format: {collection_uri}{project_id}/_apis/distributedtask/hubs/{hubName}/plans/{planId}/jobs/{jobId}/oidctoken.
82
+ request_url = f"{self.collection_uri}{self.project_id}/_apis/distributedtask/hubs/{self.hub_name}/plans/{self.plan_id}/jobs/{self.job_id}/oidctoken"
83
+
84
+ # Add API version (audience is fixed to "api://AzureADTokenExchange" by Azure DevOps).
85
+ endpoint = f"{request_url}?api-version=7.2-preview.1"
86
+ headers = {
87
+ "Authorization": f"Bearer {self.access_token}",
88
+ "Content-Type": "application/json",
89
+ "Content-Length": "0",
90
+ }
91
+
92
+ # Azure DevOps OIDC endpoint requires POST request with empty body.
93
+ response = requests.post(endpoint, headers=headers)
94
+ if not response.ok:
95
+ logger.debug(f"Azure DevOps OIDC: token request failed with status {response.status_code}")
96
+ return None
97
+
98
+ # Azure DevOps returns the token in 'oidcToken' field.
99
+ response_json = response.json()
100
+ if "oidcToken" not in response_json:
101
+ logger.debug("Azure DevOps OIDC: response missing 'oidcToken' field")
102
+ return None
103
+
104
+ logger.debug("Azure DevOps OIDC: successfully obtained token")
105
+ return response_json["oidcToken"]
106
+ except Exception as e:
107
+ logger.debug(f"Azure DevOps OIDC: failed to get token: {e}")
108
+ return None
databricks/sdk/retries.py CHANGED
@@ -1,13 +1,15 @@
1
1
  import functools
2
2
  import logging
3
3
  from datetime import timedelta
4
- from random import random
5
- from typing import Callable, Optional, Sequence, Type
4
+ from random import random, uniform
5
+ from typing import Callable, Optional, Sequence, Tuple, Type, TypeVar
6
6
 
7
7
  from .clock import Clock, RealClock
8
8
 
9
9
  logger = logging.getLogger(__name__)
10
10
 
11
+ T = TypeVar("T")
12
+
11
13
 
12
14
  def retried(
13
15
  *,
@@ -67,3 +69,101 @@ def retried(
67
69
  return wrapper
68
70
 
69
71
  return decorator
72
+
73
+
74
+ class RetryError(Exception):
75
+ """Error that can be returned from poll functions to control retry behavior."""
76
+
77
+ def __init__(self, err: Exception, halt: bool = False):
78
+ self.err = err
79
+ self.halt = halt
80
+ super().__init__(str(err))
81
+
82
+ @staticmethod
83
+ def continues(msg: str) -> "RetryError":
84
+ """Create a non-halting retry error with a message."""
85
+ return RetryError(Exception(msg), halt=False)
86
+
87
+ @staticmethod
88
+ def halt(err: Exception) -> "RetryError":
89
+ """Create a halting retry error."""
90
+ return RetryError(err, halt=True)
91
+
92
+
93
+ def _backoff(attempt: int) -> float:
94
+ """Calculate backoff time with jitter.
95
+
96
+ Linear backoff: attempt * 1 second, capped at 10 seconds
97
+ Plus random jitter between 50ms and 750ms.
98
+ """
99
+ wait = min(10, attempt)
100
+ jitter = uniform(0.05, 0.75)
101
+ return wait + jitter
102
+
103
+
104
+ def poll(
105
+ fn: Callable[[], Tuple[Optional[T], Optional[RetryError]]],
106
+ timeout: timedelta = timedelta(minutes=20),
107
+ clock: Optional[Clock] = None,
108
+ ) -> T:
109
+ """Poll a function until it succeeds or times out.
110
+
111
+ The backoff is linear backoff and jitter.
112
+
113
+ This function is not meant to be used directly by users.
114
+ It is used internally by the SDK to poll for the result of an operation.
115
+ It can be changed in the future without any notice.
116
+
117
+ :param fn: Function that returns (result, error).
118
+ Return (None, RetryError.continues("msg")) to continue polling.
119
+ Return (None, RetryError.halt(err)) to stop with error.
120
+ Return (result, None) on success.
121
+ :param timeout: Maximum time to poll (default: 20 minutes)
122
+ :param clock: Clock implementation for testing (default: RealClock)
123
+ :returns: The result of the successful function call
124
+ :raises TimeoutError: If the timeout is reached
125
+ :raises Exception: If a halting error is encountered
126
+
127
+ Example:
128
+ def check_operation():
129
+ op = get_operation()
130
+ if not op.done:
131
+ return None, RetryError.continues("operation still in progress")
132
+ if op.error:
133
+ return None, RetryError.halt(Exception(f"operation failed: {op.error}"))
134
+ return op.result, None
135
+
136
+ result = poll(check_operation, timeout=timedelta(minutes=5))
137
+ """
138
+ if clock is None:
139
+ clock = RealClock()
140
+
141
+ deadline = clock.time() + timeout.total_seconds()
142
+ attempt = 0
143
+ last_err = None
144
+
145
+ while clock.time() < deadline:
146
+ attempt += 1
147
+
148
+ try:
149
+ result, err = fn()
150
+
151
+ if err is None:
152
+ return result
153
+
154
+ if err.halt:
155
+ raise err.err
156
+
157
+ # Continue polling.
158
+ last_err = err.err
159
+ wait = _backoff(attempt)
160
+ logger.debug(f"{str(err.err).rstrip('.')}. Sleeping {wait:.3f}s")
161
+ clock.sleep(wait)
162
+
163
+ except RetryError:
164
+ raise
165
+ except Exception as e:
166
+ # Unexpected error, halt immediately.
167
+ raise e
168
+
169
+ raise TimeoutError(f"Timed out after {timeout}") from last_err
@@ -1,6 +1,11 @@
1
1
  import datetime
2
2
  import urllib.parse
3
- from typing import Callable, Dict, Generic, Optional, Type, TypeVar
3
+ from typing import Callable, Dict, Generic, List, Optional, Type, TypeVar
4
+
5
+ from google.protobuf.duration_pb2 import Duration
6
+ from google.protobuf.timestamp_pb2 import Timestamp
7
+
8
+ from databricks.sdk.common.types.fieldmask import FieldMask
4
9
 
5
10
 
6
11
  def _from_dict(d: Dict[str, any], field: str, cls: Type) -> any:
@@ -46,6 +51,93 @@ def _escape_multi_segment_path_parameter(param: str) -> str:
46
51
  return urllib.parse.quote(param)
47
52
 
48
53
 
54
+ def _timestamp(d: Dict[str, any], field: str) -> Optional[Timestamp]:
55
+ """
56
+ Helper function to convert a timestamp string to a Timestamp object.
57
+ It takes a dictionary and a field name, and returns a Timestamp object.
58
+ The field name is the key in the dictionary that contains the timestamp string.
59
+ """
60
+ if field not in d or not d[field]:
61
+ return None
62
+ ts = Timestamp()
63
+ ts.FromJsonString(d[field])
64
+ return ts
65
+
66
+
67
+ def _repeated_timestamp(d: Dict[str, any], field: str) -> Optional[List[Timestamp]]:
68
+ """
69
+ Helper function to convert a list of timestamp strings to a list of Timestamp objects.
70
+ It takes a dictionary and a field name, and returns a list of Timestamp objects.
71
+ The field name is the key in the dictionary that contains the list of timestamp strings.
72
+ """
73
+ if field not in d or not d[field]:
74
+ return None
75
+ result = []
76
+ for v in d[field]:
77
+ ts = Timestamp()
78
+ ts.FromJsonString(v)
79
+ result.append(ts)
80
+ return result
81
+
82
+
83
+ def _duration(d: Dict[str, any], field: str) -> Optional[Duration]:
84
+ """
85
+ Helper function to convert a duration string to a Duration object.
86
+ It takes a dictionary and a field name, and returns a Duration object.
87
+ The field name is the key in the dictionary that contains the duration string.
88
+ """
89
+ if field not in d or not d[field]:
90
+ return None
91
+ dur = Duration()
92
+ dur.FromJsonString(d[field])
93
+ return dur
94
+
95
+
96
+ def _repeated_duration(d: Dict[str, any], field: str) -> Optional[List[Duration]]:
97
+ """
98
+ Helper function to convert a list of duration strings to a list of Duration objects.
99
+ It takes a dictionary and a field name, and returns a list of Duration objects.
100
+ The field name is the key in the dictionary that contains the list of duration strings.
101
+ """
102
+ if field not in d or not d[field]:
103
+ return None
104
+ result = []
105
+ for v in d[field]:
106
+ dur = Duration()
107
+ dur.FromJsonString(v)
108
+ result.append(dur)
109
+ return result
110
+
111
+
112
+ def _fieldmask(d: Dict[str, any], field: str) -> Optional[FieldMask]:
113
+ """
114
+ Helper function to convert a fieldmask string to a FieldMask object.
115
+ It takes a dictionary and a field name, and returns a FieldMask object.
116
+ The field name is the key in the dictionary that contains the fieldmask string.
117
+ """
118
+ if field not in d or not d[field]:
119
+ return None
120
+ fm = FieldMask()
121
+ fm.FromJsonString(d[field])
122
+ return fm
123
+
124
+
125
+ def _repeated_fieldmask(d: Dict[str, any], field: str) -> Optional[List[FieldMask]]:
126
+ """
127
+ Helper function to convert a list of fieldmask strings to a list of FieldMask objects.
128
+ It takes a dictionary and a field name, and returns a list of FieldMask objects.
129
+ The field name is the key in the dictionary that contains the list of fieldmask strings.
130
+ """
131
+ if field not in d or not d[field]:
132
+ return None
133
+ result = []
134
+ for v in d[field]:
135
+ fm = FieldMask()
136
+ fm.FromJsonString(v)
137
+ result.append(fm)
138
+ return result
139
+
140
+
49
141
  ReturnType = TypeVar("ReturnType")
50
142
 
51
143
 
@@ -7,7 +7,7 @@ from dataclasses import dataclass
7
7
  from enum import Enum
8
8
  from typing import Any, Dict, List, Optional
9
9
 
10
- from ._internal import _enum, _from_dict, _repeated_dict
10
+ from databricks.sdk.service._internal import _enum, _from_dict, _repeated_dict
11
11
 
12
12
  _LOG = logging.getLogger("databricks.sdk")
13
13