databricks-sdk 0.67.0__py3-none-any.whl → 0.69.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (49) hide show
  1. databricks/sdk/__init__.py +14 -10
  2. databricks/sdk/_base_client.py +4 -1
  3. databricks/sdk/common/lro.py +17 -0
  4. databricks/sdk/common/types/__init__.py +0 -0
  5. databricks/sdk/common/types/fieldmask.py +39 -0
  6. databricks/sdk/config.py +62 -14
  7. databricks/sdk/credentials_provider.py +61 -12
  8. databricks/sdk/dbutils.py +5 -1
  9. databricks/sdk/errors/parser.py +8 -3
  10. databricks/sdk/mixins/files.py +1156 -111
  11. databricks/sdk/mixins/files_utils.py +293 -0
  12. databricks/sdk/oidc_token_supplier.py +80 -0
  13. databricks/sdk/retries.py +102 -2
  14. databricks/sdk/service/_internal.py +93 -1
  15. databricks/sdk/service/agentbricks.py +1 -1
  16. databricks/sdk/service/apps.py +264 -1
  17. databricks/sdk/service/billing.py +2 -3
  18. databricks/sdk/service/catalog.py +1026 -540
  19. databricks/sdk/service/cleanrooms.py +3 -3
  20. databricks/sdk/service/compute.py +21 -33
  21. databricks/sdk/service/dashboards.py +7 -3
  22. databricks/sdk/service/database.py +3 -2
  23. databricks/sdk/service/dataquality.py +1145 -0
  24. databricks/sdk/service/files.py +2 -1
  25. databricks/sdk/service/iam.py +2 -1
  26. databricks/sdk/service/iamv2.py +1 -1
  27. databricks/sdk/service/jobs.py +6 -9
  28. databricks/sdk/service/marketplace.py +3 -1
  29. databricks/sdk/service/ml.py +3 -1
  30. databricks/sdk/service/oauth2.py +1 -1
  31. databricks/sdk/service/pipelines.py +5 -6
  32. databricks/sdk/service/provisioning.py +544 -655
  33. databricks/sdk/service/qualitymonitorv2.py +1 -1
  34. databricks/sdk/service/serving.py +3 -1
  35. databricks/sdk/service/settings.py +5 -2
  36. databricks/sdk/service/settingsv2.py +1 -1
  37. databricks/sdk/service/sharing.py +12 -3
  38. databricks/sdk/service/sql.py +305 -70
  39. databricks/sdk/service/tags.py +1 -1
  40. databricks/sdk/service/vectorsearch.py +3 -1
  41. databricks/sdk/service/workspace.py +70 -17
  42. databricks/sdk/version.py +1 -1
  43. {databricks_sdk-0.67.0.dist-info → databricks_sdk-0.69.0.dist-info}/METADATA +4 -2
  44. databricks_sdk-0.69.0.dist-info/RECORD +84 -0
  45. databricks_sdk-0.67.0.dist-info/RECORD +0 -79
  46. {databricks_sdk-0.67.0.dist-info → databricks_sdk-0.69.0.dist-info}/WHEEL +0 -0
  47. {databricks_sdk-0.67.0.dist-info → databricks_sdk-0.69.0.dist-info}/licenses/LICENSE +0 -0
  48. {databricks_sdk-0.67.0.dist-info → databricks_sdk-0.69.0.dist-info}/licenses/NOTICE +0 -0
  49. {databricks_sdk-0.67.0.dist-info → databricks_sdk-0.69.0.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,293 @@
1
+ from __future__ import annotations
2
+
3
+ import os
4
+ import threading
5
+ from dataclasses import dataclass
6
+ from typing import Any, BinaryIO, Callable, Iterable, Optional
7
+
8
+
9
+ @dataclass
10
+ class CreateDownloadUrlResponse:
11
+ """Response from the download URL API call."""
12
+
13
+ url: str
14
+ """The presigned URL to download the file."""
15
+ headers: dict[str, str]
16
+ """Headers to use when making the download request."""
17
+
18
+ @classmethod
19
+ def from_dict(cls, data: dict[str, Any]) -> CreateDownloadUrlResponse:
20
+ """Create an instance from a dictionary."""
21
+ if "url" not in data:
22
+ raise ValueError("Missing 'url' in response data")
23
+ headers = data["headers"] if "headers" in data else {}
24
+ parsed_headers = {x["name"]: x["value"] for x in headers}
25
+ return cls(url=data["url"], headers=parsed_headers)
26
+
27
+
28
+ class _ConcatenatedInputStream(BinaryIO):
29
+ """This class joins two input streams into one."""
30
+
31
+ def __init__(self, head_stream: BinaryIO, tail_stream: BinaryIO):
32
+ if not head_stream.readable():
33
+ raise ValueError("head_stream is not readable")
34
+ if not tail_stream.readable():
35
+ raise ValueError("tail_stream is not readable")
36
+
37
+ self._head_stream = head_stream
38
+ self._tail_stream = tail_stream
39
+ self._head_size = None
40
+ self._tail_size = None
41
+
42
+ def close(self) -> None:
43
+ try:
44
+ self._head_stream.close()
45
+ finally:
46
+ self._tail_stream.close()
47
+
48
+ def fileno(self) -> int:
49
+ raise AttributeError()
50
+
51
+ def flush(self) -> None:
52
+ raise NotImplementedError("Stream is not writable")
53
+
54
+ def isatty(self) -> bool:
55
+ raise NotImplementedError()
56
+
57
+ def read(self, __n: int = -1) -> bytes:
58
+ head = self._head_stream.read(__n)
59
+ remaining_bytes = __n - len(head) if __n >= 0 else __n
60
+ tail = self._tail_stream.read(remaining_bytes)
61
+ return head + tail
62
+
63
+ def readable(self) -> bool:
64
+ return True
65
+
66
+ def readline(self, __limit: int = -1) -> bytes:
67
+ # Read and return one line from the stream.
68
+ # If __limit is specified, at most __limit bytes will be read.
69
+ # The line terminator is always b'\n' for binary files.
70
+ head = self._head_stream.readline(__limit)
71
+ if len(head) > 0 and head[-1:] == b"\n":
72
+ # end of line happened before (or at) the limit
73
+ return head
74
+
75
+ # if __limit >= 0, len(head) can't exceed limit
76
+ remaining_bytes = __limit - len(head) if __limit >= 0 else __limit
77
+ tail = self._tail_stream.readline(remaining_bytes)
78
+ return head + tail
79
+
80
+ def readlines(self, __hint: int = -1) -> list[bytes]:
81
+ # Read and return a list of lines from the stream.
82
+ # Hint can be specified to control the number of lines read: no more lines will be read
83
+ # If the total size (in bytes/characters) of all lines so far exceeds hint.
84
+
85
+ # In fact, BytesIO(bytes) will not read next line if total size of all lines
86
+ # *equals or* exceeds hint.
87
+
88
+ head_result = self._head_stream.readlines(__hint)
89
+ head_total_bytes = sum(len(line) for line in head_result)
90
+
91
+ if 0 < __hint <= head_total_bytes and head_total_bytes > 0:
92
+ # We reached (or passed) the hint by reading from head_stream, or exhausted head_stream.
93
+
94
+ if head_result[-1][-1:] == b"\n":
95
+ # If we reached/passed the hint and also stopped at the line break, return.
96
+ return head_result
97
+
98
+ # Reading from head_stream could have stopped only because the stream was exhausted
99
+ if len(self._head_stream.read(1)) > 0:
100
+ raise ValueError(
101
+ f"Stream reading finished prematurely after reading {head_total_bytes} bytes, reaching or exceeding hint {__hint}"
102
+ )
103
+
104
+ # We need to finish reading the current line, now from tail_stream.
105
+
106
+ tail_result = self._tail_stream.readlines(1) # We will only read the first line from tail_stream.
107
+ assert len(tail_result) <= 1
108
+ if len(tail_result) > 0:
109
+ # We will then append the tail as the last line of the result.
110
+ return head_result[:-1] + [head_result[-1] + tail_result[0]]
111
+ else:
112
+ return head_result
113
+
114
+ # We did not reach the hint by reading head_stream but exhausted it, continue reading from tail_stream
115
+ # with an adjusted hint
116
+ if __hint >= 0:
117
+ remaining_bytes = __hint - head_total_bytes
118
+ else:
119
+ remaining_bytes = __hint
120
+
121
+ tail_result = self._tail_stream.readlines(remaining_bytes)
122
+
123
+ if head_total_bytes > 0 and head_result[-1][-1:] != b"\n" and len(tail_result) > 0:
124
+ # If head stream does not end with the line break, we need to concatenate
125
+ # the last line of the head result and the first line of tail result
126
+ return head_result[:-1] + [head_result[-1] + tail_result[0]] + tail_result[1:]
127
+ else:
128
+ # Otherwise, just append two lists of lines.
129
+ return head_result + tail_result
130
+
131
+ def _get_stream_size(self, stream: BinaryIO) -> int:
132
+ prev_offset = stream.tell()
133
+ try:
134
+ stream.seek(0, os.SEEK_END)
135
+ return stream.tell()
136
+ finally:
137
+ stream.seek(prev_offset, os.SEEK_SET)
138
+
139
+ def _get_head_size(self) -> int:
140
+ if self._head_size is None:
141
+ self._head_size = self._get_stream_size(self._head_stream)
142
+ return self._head_size
143
+
144
+ def _get_tail_size(self) -> int:
145
+ if self._tail_size is None:
146
+ self._tail_size = self._get_stream_size(self._tail_stream)
147
+ return self._tail_size
148
+
149
+ def seek(self, __offset: int, __whence: int = os.SEEK_SET) -> int:
150
+ if not self.seekable():
151
+ raise NotImplementedError("Stream is not seekable")
152
+
153
+ if __whence == os.SEEK_SET:
154
+ if __offset < 0:
155
+ # Follow native buffer behavior
156
+ raise ValueError(f"Negative seek value: {__offset}")
157
+
158
+ head_size = self._get_head_size()
159
+
160
+ if __offset <= head_size:
161
+ self._head_stream.seek(__offset, os.SEEK_SET)
162
+ self._tail_stream.seek(0, os.SEEK_SET)
163
+ else:
164
+ self._head_stream.seek(0, os.SEEK_END) # move head stream to the end
165
+ self._tail_stream.seek(__offset - head_size, os.SEEK_SET)
166
+
167
+ elif __whence == os.SEEK_CUR:
168
+ current_offset = self.tell()
169
+ new_offset = current_offset + __offset
170
+ if new_offset < 0:
171
+ # gracefully don't seek before start
172
+ new_offset = 0
173
+ self.seek(new_offset, os.SEEK_SET)
174
+
175
+ elif __whence == os.SEEK_END:
176
+ if __offset > 0:
177
+ # Python allows to seek beyond the end of stream.
178
+
179
+ # Move head to EOF and tail to (EOF + offset), so subsequent tell()
180
+ # returns len(head) + len(tail) + offset, same as for native buffer
181
+ self._head_stream.seek(0, os.SEEK_END)
182
+ self._tail_stream.seek(__offset, os.SEEK_END)
183
+ else:
184
+ self._tail_stream.seek(__offset, os.SEEK_END)
185
+ tail_pos = self._tail_stream.tell()
186
+ if tail_pos > 0:
187
+ # target position lies within the tail, move head to EOF
188
+ self._head_stream.seek(0, os.SEEK_END)
189
+ else:
190
+ tail_size = self._get_tail_size()
191
+ self._head_stream.seek(__offset + tail_size, os.SEEK_END)
192
+ else:
193
+ raise ValueError(__whence)
194
+ return self.tell()
195
+
196
+ def seekable(self) -> bool:
197
+ return self._head_stream.seekable() and self._tail_stream.seekable()
198
+
199
+ def __getattribute__(self, name: str) -> Any:
200
+ if name == "fileno":
201
+ raise AttributeError()
202
+ elif name in ["tell", "seek"] and not self.seekable():
203
+ raise AttributeError()
204
+
205
+ return super().__getattribute__(name)
206
+
207
+ def tell(self) -> int:
208
+ if not self.seekable():
209
+ raise NotImplementedError()
210
+
211
+ # Assuming that tail stream stays at 0 until head stream is exhausted
212
+ return self._head_stream.tell() + self._tail_stream.tell()
213
+
214
+ def truncate(self, __size: Optional[int] = None) -> int:
215
+ raise NotImplementedError("Stream is not writable")
216
+
217
+ def writable(self) -> bool:
218
+ return False
219
+
220
+ def write(self, __s: bytes) -> int:
221
+ raise NotImplementedError("Stream is not writable")
222
+
223
+ def writelines(self, __lines: Iterable[bytes]) -> None:
224
+ raise NotImplementedError("Stream is not writable")
225
+
226
+ def __next__(self) -> bytes:
227
+ # IOBase [...] supports the iterator protocol, meaning that an IOBase object can be
228
+ # iterated over yielding the lines in a stream. [...] See readline().
229
+ result = self.readline()
230
+ if len(result) == 0:
231
+ raise StopIteration
232
+ return result
233
+
234
+ def __iter__(self) -> "BinaryIO":
235
+ return self
236
+
237
+ def __enter__(self) -> "BinaryIO":
238
+ self._head_stream.__enter__()
239
+ self._tail_stream.__enter__()
240
+ return self
241
+
242
+ def __exit__(self, __type, __value, __traceback) -> None:
243
+ self._head_stream.__exit__(__type, __value, __traceback)
244
+ self._tail_stream.__exit__(__type, __value, __traceback)
245
+
246
+ def __str__(self) -> str:
247
+ return f"Concat: {self._head_stream}, {self._tail_stream}]"
248
+
249
+
250
+ class _PresignedUrlDistributor:
251
+ """
252
+ Distributes and manages presigned URLs for downloading files.
253
+
254
+ This class ensures thread-safe access to a presigned URL, allowing retrieval and invalidation.
255
+ When the URL is invalidated, a new one will be fetched using the provided function.
256
+ """
257
+
258
+ def __init__(self, get_new_url_func: Callable[[], CreateDownloadUrlResponse]):
259
+ """
260
+ Initialize the distributor.
261
+
262
+ Args:
263
+ get_new_url_func: A callable that returns a new presigned URL response.
264
+ """
265
+ self._get_new_url_func = get_new_url_func
266
+ self._current_url = None
267
+ self.current_version = 0
268
+ self.lock = threading.RLock()
269
+
270
+ def get_url(self) -> tuple[CreateDownloadUrlResponse, int]:
271
+ """
272
+ Get the current presigned URL and its version.
273
+
274
+ Returns:
275
+ A tuple containing the current presigned URL response and its version.
276
+ """
277
+ with self.lock:
278
+ if self._current_url is None:
279
+ self._current_url = self._get_new_url_func()
280
+ return self._current_url, self.current_version
281
+
282
+ def invalidate_url(self, version: int) -> None:
283
+ """
284
+ Invalidate the current presigned URL if the version matches. If the version does not match,
285
+ the URL remains unchanged. This ensures that only the most recent version can invalidate the URL.
286
+
287
+ Args:
288
+ version: The version to check before invalidating the URL.
289
+ """
290
+ with self.lock:
291
+ if version == self.current_version:
292
+ self._current_url = None
293
+ self.current_version += 1
@@ -1,9 +1,13 @@
1
+ import logging
1
2
  import os
2
3
  from typing import Optional
3
4
 
4
5
  import requests
5
6
 
7
+ logger = logging.getLogger("databricks.sdk")
6
8
 
9
+
10
+ # TODO: Check the required environment variables while creating the instance rather than in the get_oidc_token method to allow early return.
7
11
  class GitHubOIDCTokenSupplier:
8
12
  """
9
13
  Supplies OIDC tokens from GitHub Actions.
@@ -26,3 +30,79 @@ class GitHubOIDCTokenSupplier:
26
30
  return None
27
31
 
28
32
  return response_json["value"]
33
+
34
+
35
+ class AzureDevOpsOIDCTokenSupplier:
36
+ """
37
+ Supplies OIDC tokens from Azure DevOps pipelines.
38
+
39
+ Constructs the OIDC token request URL using official Azure DevOps predefined variables.
40
+ See: https://docs.microsoft.com/en-us/azure/devops/pipelines/build/variables
41
+ """
42
+
43
+ def __init__(self):
44
+ """Initialize and validate Azure DevOps environment variables."""
45
+ # Get Azure DevOps environment variables.
46
+ self.access_token = os.environ.get("SYSTEM_ACCESSTOKEN")
47
+ self.collection_uri = os.environ.get("SYSTEM_TEAMFOUNDATIONCOLLECTIONURI")
48
+ self.project_id = os.environ.get("SYSTEM_TEAMPROJECTID")
49
+ self.plan_id = os.environ.get("SYSTEM_PLANID")
50
+ self.job_id = os.environ.get("SYSTEM_JOBID")
51
+ self.hub_name = os.environ.get("SYSTEM_HOSTTYPE")
52
+
53
+ # Check for required variables with specific error messages.
54
+ missing_vars = []
55
+ if not self.access_token:
56
+ missing_vars.append("SYSTEM_ACCESSTOKEN")
57
+ if not self.collection_uri:
58
+ missing_vars.append("SYSTEM_TEAMFOUNDATIONCOLLECTIONURI")
59
+ if not self.project_id:
60
+ missing_vars.append("SYSTEM_TEAMPROJECTID")
61
+ if not self.plan_id:
62
+ missing_vars.append("SYSTEM_PLANID")
63
+ if not self.job_id:
64
+ missing_vars.append("SYSTEM_JOBID")
65
+ if not self.hub_name:
66
+ missing_vars.append("SYSTEM_HOSTTYPE")
67
+
68
+ if missing_vars:
69
+ if "SYSTEM_ACCESSTOKEN" in missing_vars:
70
+ error_msg = "Azure DevOps OIDC: SYSTEM_ACCESSTOKEN env var not found. If calling from Azure DevOps Pipeline, please set this env var following https://learn.microsoft.com/en-us/azure/devops/pipelines/build/variables?view=azure-devops&tabs=yaml#systemaccesstoken"
71
+ else:
72
+ error_msg = f"Azure DevOps OIDC: missing required environment variables: {', '.join(missing_vars)}"
73
+ raise ValueError(error_msg)
74
+
75
+ def get_oidc_token(self, audience: str) -> Optional[str]:
76
+ # Note: Azure DevOps OIDC tokens have a fixed audience of "api://AzureADTokenExchange".
77
+ # The audience parameter is ignored but kept for interface compatibility with other OIDC suppliers.
78
+
79
+ try:
80
+ # Construct the OIDC token request URL.
81
+ # Format: {collection_uri}{project_id}/_apis/distributedtask/hubs/{hubName}/plans/{planId}/jobs/{jobId}/oidctoken.
82
+ request_url = f"{self.collection_uri}{self.project_id}/_apis/distributedtask/hubs/{self.hub_name}/plans/{self.plan_id}/jobs/{self.job_id}/oidctoken"
83
+
84
+ # Add API version (audience is fixed to "api://AzureADTokenExchange" by Azure DevOps).
85
+ endpoint = f"{request_url}?api-version=7.2-preview.1"
86
+ headers = {
87
+ "Authorization": f"Bearer {self.access_token}",
88
+ "Content-Type": "application/json",
89
+ "Content-Length": "0",
90
+ }
91
+
92
+ # Azure DevOps OIDC endpoint requires POST request with empty body.
93
+ response = requests.post(endpoint, headers=headers)
94
+ if not response.ok:
95
+ logger.debug(f"Azure DevOps OIDC: token request failed with status {response.status_code}")
96
+ return None
97
+
98
+ # Azure DevOps returns the token in 'oidcToken' field.
99
+ response_json = response.json()
100
+ if "oidcToken" not in response_json:
101
+ logger.debug("Azure DevOps OIDC: response missing 'oidcToken' field")
102
+ return None
103
+
104
+ logger.debug("Azure DevOps OIDC: successfully obtained token")
105
+ return response_json["oidcToken"]
106
+ except Exception as e:
107
+ logger.debug(f"Azure DevOps OIDC: failed to get token: {e}")
108
+ return None
databricks/sdk/retries.py CHANGED
@@ -1,13 +1,15 @@
1
1
  import functools
2
2
  import logging
3
3
  from datetime import timedelta
4
- from random import random
5
- from typing import Callable, Optional, Sequence, Type
4
+ from random import random, uniform
5
+ from typing import Callable, Optional, Sequence, Tuple, Type, TypeVar
6
6
 
7
7
  from .clock import Clock, RealClock
8
8
 
9
9
  logger = logging.getLogger(__name__)
10
10
 
11
+ T = TypeVar("T")
12
+
11
13
 
12
14
  def retried(
13
15
  *,
@@ -67,3 +69,101 @@ def retried(
67
69
  return wrapper
68
70
 
69
71
  return decorator
72
+
73
+
74
+ class RetryError(Exception):
75
+ """Error that can be returned from poll functions to control retry behavior."""
76
+
77
+ def __init__(self, err: Exception, halt: bool = False):
78
+ self.err = err
79
+ self.halt = halt
80
+ super().__init__(str(err))
81
+
82
+ @staticmethod
83
+ def continues(msg: str) -> "RetryError":
84
+ """Create a non-halting retry error with a message."""
85
+ return RetryError(Exception(msg), halt=False)
86
+
87
+ @staticmethod
88
+ def halt(err: Exception) -> "RetryError":
89
+ """Create a halting retry error."""
90
+ return RetryError(err, halt=True)
91
+
92
+
93
+ def _backoff(attempt: int) -> float:
94
+ """Calculate backoff time with jitter.
95
+
96
+ Linear backoff: attempt * 1 second, capped at 10 seconds
97
+ Plus random jitter between 50ms and 750ms.
98
+ """
99
+ wait = min(10, attempt)
100
+ jitter = uniform(0.05, 0.75)
101
+ return wait + jitter
102
+
103
+
104
+ def poll(
105
+ fn: Callable[[], Tuple[Optional[T], Optional[RetryError]]],
106
+ timeout: timedelta = timedelta(minutes=20),
107
+ clock: Optional[Clock] = None,
108
+ ) -> T:
109
+ """Poll a function until it succeeds or times out.
110
+
111
+ The backoff is linear backoff and jitter.
112
+
113
+ This function is not meant to be used directly by users.
114
+ It is used internally by the SDK to poll for the result of an operation.
115
+ It can be changed in the future without any notice.
116
+
117
+ :param fn: Function that returns (result, error).
118
+ Return (None, RetryError.continues("msg")) to continue polling.
119
+ Return (None, RetryError.halt(err)) to stop with error.
120
+ Return (result, None) on success.
121
+ :param timeout: Maximum time to poll (default: 20 minutes)
122
+ :param clock: Clock implementation for testing (default: RealClock)
123
+ :returns: The result of the successful function call
124
+ :raises TimeoutError: If the timeout is reached
125
+ :raises Exception: If a halting error is encountered
126
+
127
+ Example:
128
+ def check_operation():
129
+ op = get_operation()
130
+ if not op.done:
131
+ return None, RetryError.continues("operation still in progress")
132
+ if op.error:
133
+ return None, RetryError.halt(Exception(f"operation failed: {op.error}"))
134
+ return op.result, None
135
+
136
+ result = poll(check_operation, timeout=timedelta(minutes=5))
137
+ """
138
+ if clock is None:
139
+ clock = RealClock()
140
+
141
+ deadline = clock.time() + timeout.total_seconds()
142
+ attempt = 0
143
+ last_err = None
144
+
145
+ while clock.time() < deadline:
146
+ attempt += 1
147
+
148
+ try:
149
+ result, err = fn()
150
+
151
+ if err is None:
152
+ return result
153
+
154
+ if err.halt:
155
+ raise err.err
156
+
157
+ # Continue polling.
158
+ last_err = err.err
159
+ wait = _backoff(attempt)
160
+ logger.debug(f"{str(err.err).rstrip('.')}. Sleeping {wait:.3f}s")
161
+ clock.sleep(wait)
162
+
163
+ except RetryError:
164
+ raise
165
+ except Exception as e:
166
+ # Unexpected error, halt immediately.
167
+ raise e
168
+
169
+ raise TimeoutError(f"Timed out after {timeout}") from last_err
@@ -1,6 +1,11 @@
1
1
  import datetime
2
2
  import urllib.parse
3
- from typing import Callable, Dict, Generic, Optional, Type, TypeVar
3
+ from typing import Callable, Dict, Generic, List, Optional, Type, TypeVar
4
+
5
+ from google.protobuf.duration_pb2 import Duration
6
+ from google.protobuf.timestamp_pb2 import Timestamp
7
+
8
+ from databricks.sdk.common.types.fieldmask import FieldMask
4
9
 
5
10
 
6
11
  def _from_dict(d: Dict[str, any], field: str, cls: Type) -> any:
@@ -46,6 +51,93 @@ def _escape_multi_segment_path_parameter(param: str) -> str:
46
51
  return urllib.parse.quote(param)
47
52
 
48
53
 
54
+ def _timestamp(d: Dict[str, any], field: str) -> Optional[Timestamp]:
55
+ """
56
+ Helper function to convert a timestamp string to a Timestamp object.
57
+ It takes a dictionary and a field name, and returns a Timestamp object.
58
+ The field name is the key in the dictionary that contains the timestamp string.
59
+ """
60
+ if field not in d or not d[field]:
61
+ return None
62
+ ts = Timestamp()
63
+ ts.FromJsonString(d[field])
64
+ return ts
65
+
66
+
67
+ def _repeated_timestamp(d: Dict[str, any], field: str) -> Optional[List[Timestamp]]:
68
+ """
69
+ Helper function to convert a list of timestamp strings to a list of Timestamp objects.
70
+ It takes a dictionary and a field name, and returns a list of Timestamp objects.
71
+ The field name is the key in the dictionary that contains the list of timestamp strings.
72
+ """
73
+ if field not in d or not d[field]:
74
+ return None
75
+ result = []
76
+ for v in d[field]:
77
+ ts = Timestamp()
78
+ ts.FromJsonString(v)
79
+ result.append(ts)
80
+ return result
81
+
82
+
83
+ def _duration(d: Dict[str, any], field: str) -> Optional[Duration]:
84
+ """
85
+ Helper function to convert a duration string to a Duration object.
86
+ It takes a dictionary and a field name, and returns a Duration object.
87
+ The field name is the key in the dictionary that contains the duration string.
88
+ """
89
+ if field not in d or not d[field]:
90
+ return None
91
+ dur = Duration()
92
+ dur.FromJsonString(d[field])
93
+ return dur
94
+
95
+
96
+ def _repeated_duration(d: Dict[str, any], field: str) -> Optional[List[Duration]]:
97
+ """
98
+ Helper function to convert a list of duration strings to a list of Duration objects.
99
+ It takes a dictionary and a field name, and returns a list of Duration objects.
100
+ The field name is the key in the dictionary that contains the list of duration strings.
101
+ """
102
+ if field not in d or not d[field]:
103
+ return None
104
+ result = []
105
+ for v in d[field]:
106
+ dur = Duration()
107
+ dur.FromJsonString(v)
108
+ result.append(dur)
109
+ return result
110
+
111
+
112
+ def _fieldmask(d: Dict[str, any], field: str) -> Optional[FieldMask]:
113
+ """
114
+ Helper function to convert a fieldmask string to a FieldMask object.
115
+ It takes a dictionary and a field name, and returns a FieldMask object.
116
+ The field name is the key in the dictionary that contains the fieldmask string.
117
+ """
118
+ if field not in d or not d[field]:
119
+ return None
120
+ fm = FieldMask()
121
+ fm.FromJsonString(d[field])
122
+ return fm
123
+
124
+
125
+ def _repeated_fieldmask(d: Dict[str, any], field: str) -> Optional[List[FieldMask]]:
126
+ """
127
+ Helper function to convert a list of fieldmask strings to a list of FieldMask objects.
128
+ It takes a dictionary and a field name, and returns a list of FieldMask objects.
129
+ The field name is the key in the dictionary that contains the list of fieldmask strings.
130
+ """
131
+ if field not in d or not d[field]:
132
+ return None
133
+ result = []
134
+ for v in d[field]:
135
+ fm = FieldMask()
136
+ fm.FromJsonString(v)
137
+ result.append(fm)
138
+ return result
139
+
140
+
49
141
  ReturnType = TypeVar("ReturnType")
50
142
 
51
143
 
@@ -7,7 +7,7 @@ from dataclasses import dataclass
7
7
  from enum import Enum
8
8
  from typing import Any, Dict, List, Optional
9
9
 
10
- from ._internal import _enum, _from_dict, _repeated_dict
10
+ from databricks.sdk.service._internal import _enum, _from_dict, _repeated_dict
11
11
 
12
12
  _LOG = logging.getLogger("databricks.sdk")
13
13