databricks-sdk 0.36.0__tar.gz → 0.38.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (97) hide show
  1. {databricks_sdk-0.36.0/databricks_sdk.egg-info → databricks_sdk-0.38.0}/PKG-INFO +1 -1
  2. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/__init__.py +22 -29
  3. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/_base_client.py +61 -14
  4. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/config.py +10 -9
  5. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/credentials_provider.py +6 -5
  6. databricks_sdk-0.38.0/databricks/sdk/mixins/jobs.py +49 -0
  7. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/service/apps.py +50 -186
  8. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/service/billing.py +1 -1
  9. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/service/catalog.py +952 -45
  10. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/service/compute.py +23 -20
  11. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/service/dashboards.py +31 -281
  12. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/service/iam.py +6 -4
  13. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/service/jobs.py +93 -76
  14. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/service/marketplace.py +1 -0
  15. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/service/ml.py +4 -3
  16. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/service/oauth2.py +29 -8
  17. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/service/pipelines.py +94 -20
  18. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/service/provisioning.py +68 -0
  19. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/service/serving.py +2 -2
  20. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/service/settings.py +322 -2
  21. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/service/sharing.py +2 -618
  22. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/service/sql.py +7 -7
  23. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/service/workspace.py +7 -4
  24. databricks_sdk-0.38.0/databricks/sdk/version.py +1 -0
  25. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0/databricks_sdk.egg-info}/PKG-INFO +1 -1
  26. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks_sdk.egg-info/SOURCES.txt +2 -0
  27. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/tests/test_base_client.py +169 -0
  28. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/tests/test_core.py +14 -8
  29. databricks_sdk-0.38.0/tests/test_jobs_mixin.py +123 -0
  30. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/tests/test_model_serving_auth.py +16 -7
  31. databricks_sdk-0.36.0/databricks/sdk/version.py +0 -1
  32. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/LICENSE +0 -0
  33. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/NOTICE +0 -0
  34. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/README.md +0 -0
  35. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/__init__.py +0 -0
  36. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/_property.py +0 -0
  37. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/_widgets/__init__.py +0 -0
  38. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/_widgets/default_widgets_utils.py +0 -0
  39. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/_widgets/ipywidgets_utils.py +0 -0
  40. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/azure.py +0 -0
  41. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/casing.py +0 -0
  42. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/clock.py +0 -0
  43. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/core.py +0 -0
  44. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/data_plane.py +0 -0
  45. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/dbutils.py +0 -0
  46. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/environments.py +0 -0
  47. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/errors/__init__.py +0 -0
  48. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/errors/base.py +0 -0
  49. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/errors/customizer.py +0 -0
  50. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/errors/deserializer.py +0 -0
  51. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/errors/mapper.py +0 -0
  52. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/errors/overrides.py +0 -0
  53. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/errors/parser.py +0 -0
  54. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/errors/platform.py +0 -0
  55. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/errors/private_link.py +0 -0
  56. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/errors/sdk.py +0 -0
  57. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/logger/__init__.py +0 -0
  58. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/logger/round_trip_logger.py +0 -0
  59. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/mixins/__init__.py +0 -0
  60. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/mixins/compute.py +0 -0
  61. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/mixins/files.py +0 -0
  62. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/mixins/open_ai_client.py +0 -0
  63. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/mixins/workspace.py +0 -0
  64. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/oauth.py +0 -0
  65. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/py.typed +0 -0
  66. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/retries.py +0 -0
  67. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/runtime/__init__.py +0 -0
  68. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/runtime/dbutils_stub.py +0 -0
  69. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/service/__init__.py +0 -0
  70. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/service/_internal.py +0 -0
  71. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/service/files.py +0 -0
  72. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/service/vectorsearch.py +0 -0
  73. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks/sdk/useragent.py +0 -0
  74. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks_sdk.egg-info/dependency_links.txt +0 -0
  75. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks_sdk.egg-info/requires.txt +0 -0
  76. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/databricks_sdk.egg-info/top_level.txt +0 -0
  77. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/setup.cfg +0 -0
  78. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/setup.py +0 -0
  79. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/tests/test_auth.py +0 -0
  80. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/tests/test_auth_manual_tests.py +0 -0
  81. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/tests/test_client.py +0 -0
  82. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/tests/test_compute_mixins.py +0 -0
  83. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/tests/test_config.py +0 -0
  84. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/tests/test_data_plane.py +0 -0
  85. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/tests/test_dbfs_mixins.py +0 -0
  86. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/tests/test_dbutils.py +0 -0
  87. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/tests/test_environments.py +0 -0
  88. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/tests/test_errors.py +0 -0
  89. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/tests/test_init_file.py +0 -0
  90. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/tests/test_internal.py +0 -0
  91. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/tests/test_jobs.py +0 -0
  92. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/tests/test_metadata_service_auth.py +0 -0
  93. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/tests/test_misc.py +0 -0
  94. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/tests/test_oauth.py +0 -0
  95. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/tests/test_open_ai_mixin.py +0 -0
  96. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/tests/test_retries.py +0 -0
  97. {databricks_sdk-0.36.0 → databricks_sdk-0.38.0}/tests/test_user_agent.py +0 -0
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: databricks-sdk
3
- Version: 0.36.0
3
+ Version: 0.38.0
4
4
  Summary: Databricks SDK for Python (Beta)
5
5
  Home-page: https://databricks-sdk-py.readthedocs.io
6
6
  Author: Serge Smertin
@@ -6,6 +6,7 @@ from databricks.sdk import azure
6
6
  from databricks.sdk.credentials_provider import CredentialsStrategy
7
7
  from databricks.sdk.mixins.compute import ClustersExt
8
8
  from databricks.sdk.mixins.files import DbfsExt
9
+ from databricks.sdk.mixins.jobs import JobsExt
9
10
  from databricks.sdk.mixins.open_ai_client import ServingEndpointsExt
10
11
  from databricks.sdk.mixins.workspace import WorkspaceExt
11
12
  from databricks.sdk.service.apps import AppsAPI
@@ -15,7 +16,7 @@ from databricks.sdk.service.catalog import (AccountMetastoreAssignmentsAPI,
15
16
  AccountMetastoresAPI,
16
17
  AccountStorageCredentialsAPI,
17
18
  ArtifactAllowlistsAPI, CatalogsAPI,
18
- ConnectionsAPI,
19
+ ConnectionsAPI, CredentialsAPI,
19
20
  ExternalLocationsAPI, FunctionsAPI,
20
21
  GrantsAPI, MetastoresAPI,
21
22
  ModelVersionsAPI, OnlineTablesAPI,
@@ -64,26 +65,18 @@ from databricks.sdk.service.provisioning import (CredentialsAPI,
64
65
  Workspace, WorkspacesAPI)
65
66
  from databricks.sdk.service.serving import (ServingEndpointsAPI,
66
67
  ServingEndpointsDataPlaneAPI)
67
- from databricks.sdk.service.settings import (AccountIpAccessListsAPI,
68
- AccountSettingsAPI,
69
- AutomaticClusterUpdateAPI,
70
- ComplianceSecurityProfileAPI,
71
- CredentialsManagerAPI,
72
- CspEnablementAccountAPI,
73
- DefaultNamespaceAPI,
74
- DisableLegacyAccessAPI,
75
- DisableLegacyDbfsAPI,
76
- DisableLegacyFeaturesAPI,
77
- EnhancedSecurityMonitoringAPI,
78
- EsmEnablementAccountAPI,
79
- IpAccessListsAPI,
80
- NetworkConnectivityAPI,
81
- NotificationDestinationsAPI,
82
- PersonalComputeAPI,
83
- RestrictWorkspaceAdminsAPI,
84
- SettingsAPI, TokenManagementAPI,
85
- TokensAPI, WorkspaceConfAPI)
86
- from databricks.sdk.service.sharing import (CleanRoomsAPI, ProvidersAPI,
68
+ from databricks.sdk.service.settings import (
69
+ AccountIpAccessListsAPI, AccountSettingsAPI,
70
+ AibiDashboardEmbeddingAccessPolicyAPI,
71
+ AibiDashboardEmbeddingApprovedDomainsAPI, AutomaticClusterUpdateAPI,
72
+ ComplianceSecurityProfileAPI, CredentialsManagerAPI,
73
+ CspEnablementAccountAPI, DefaultNamespaceAPI, DisableLegacyAccessAPI,
74
+ DisableLegacyDbfsAPI, DisableLegacyFeaturesAPI,
75
+ EnhancedSecurityMonitoringAPI, EsmEnablementAccountAPI, IpAccessListsAPI,
76
+ NetworkConnectivityAPI, NotificationDestinationsAPI, PersonalComputeAPI,
77
+ RestrictWorkspaceAdminsAPI, SettingsAPI, TokenManagementAPI, TokensAPI,
78
+ WorkspaceConfAPI)
79
+ from databricks.sdk.service.sharing import (ProvidersAPI,
87
80
  RecipientActivationAPI,
88
81
  RecipientsAPI, SharesAPI)
89
82
  from databricks.sdk.service.sql import (AlertsAPI, AlertsLegacyAPI,
@@ -183,7 +176,6 @@ class WorkspaceClient:
183
176
  self._apps = AppsAPI(self._api_client)
184
177
  self._artifact_allowlists = ArtifactAllowlistsAPI(self._api_client)
185
178
  self._catalogs = CatalogsAPI(self._api_client)
186
- self._clean_rooms = CleanRoomsAPI(self._api_client)
187
179
  self._cluster_policies = ClusterPoliciesAPI(self._api_client)
188
180
  self._clusters = ClustersExt(self._api_client)
189
181
  self._command_execution = CommandExecutionAPI(self._api_client)
@@ -193,6 +185,7 @@ class WorkspaceClient:
193
185
  self._consumer_listings = ConsumerListingsAPI(self._api_client)
194
186
  self._consumer_personalization_requests = ConsumerPersonalizationRequestsAPI(self._api_client)
195
187
  self._consumer_providers = ConsumerProvidersAPI(self._api_client)
188
+ self._credentials = CredentialsAPI(self._api_client)
196
189
  self._credentials_manager = CredentialsManagerAPI(self._api_client)
197
190
  self._current_user = CurrentUserAPI(self._api_client)
198
191
  self._dashboard_widgets = DashboardWidgetsAPI(self._api_client)
@@ -212,7 +205,7 @@ class WorkspaceClient:
212
205
  self._instance_pools = InstancePoolsAPI(self._api_client)
213
206
  self._instance_profiles = InstanceProfilesAPI(self._api_client)
214
207
  self._ip_access_lists = IpAccessListsAPI(self._api_client)
215
- self._jobs = JobsAPI(self._api_client)
208
+ self._jobs = JobsExt(self._api_client)
216
209
  self._lakeview = LakeviewAPI(self._api_client)
217
210
  self._libraries = LibrariesAPI(self._api_client)
218
211
  self._metastores = MetastoresAPI(self._api_client)
@@ -312,11 +305,6 @@ class WorkspaceClient:
312
305
  """A catalog is the first layer of Unity Catalog’s three-level namespace."""
313
306
  return self._catalogs
314
307
 
315
- @property
316
- def clean_rooms(self) -> CleanRoomsAPI:
317
- """A clean room is a secure, privacy-protecting environment where two or more parties can share sensitive enterprise data, including customer data, for measurements, insights, activation and other use cases."""
318
- return self._clean_rooms
319
-
320
308
  @property
321
309
  def cluster_policies(self) -> ClusterPoliciesAPI:
322
310
  """You can use cluster policies to control users' ability to configure clusters based on a set of rules."""
@@ -362,6 +350,11 @@ class WorkspaceClient:
362
350
  """Providers are the entities that publish listings to the Marketplace."""
363
351
  return self._consumer_providers
364
352
 
353
+ @property
354
+ def credentials(self) -> CredentialsAPI:
355
+ """A credential represents an authentication and authorization mechanism for accessing services on your cloud tenant."""
356
+ return self._credentials
357
+
365
358
  @property
366
359
  def credentials_manager(self) -> CredentialsManagerAPI:
367
360
  """Credentials manager interacts with with Identity Providers to to perform token exchanges using stored credentials and refresh tokens."""
@@ -458,7 +451,7 @@ class WorkspaceClient:
458
451
  return self._ip_access_lists
459
452
 
460
453
  @property
461
- def jobs(self) -> JobsAPI:
454
+ def jobs(self) -> JobsExt:
462
455
  """The Jobs API allows you to create, edit, and delete jobs."""
463
456
  return self._jobs
464
457
 
@@ -1,3 +1,4 @@
1
+ import io
1
2
  import logging
2
3
  import urllib.parse
3
4
  from datetime import timedelta
@@ -50,7 +51,8 @@ class _BaseClient:
50
51
  http_timeout_seconds: float = None,
51
52
  extra_error_customizers: List[_ErrorCustomizer] = None,
52
53
  debug_headers: bool = False,
53
- clock: Clock = None):
54
+ clock: Clock = None,
55
+ streaming_buffer_size: int = 1024 * 1024): # 1MB
54
56
  """
55
57
  :param debug_truncate_bytes:
56
58
  :param retry_timeout_seconds:
@@ -68,6 +70,7 @@ class _BaseClient:
68
70
  :param extra_error_customizers:
69
71
  :param debug_headers: Whether to include debug headers in the request log.
70
72
  :param clock: Clock object to use for time-related operations.
73
+ :param streaming_buffer_size: The size of the buffer to use for streaming responses.
71
74
  """
72
75
 
73
76
  self._debug_truncate_bytes = debug_truncate_bytes or 96
@@ -78,6 +81,7 @@ class _BaseClient:
78
81
  self._clock = clock or RealClock()
79
82
  self._session = requests.Session()
80
83
  self._session.auth = self._authenticate
84
+ self._streaming_buffer_size = streaming_buffer_size
81
85
 
82
86
  # We don't use `max_retries` from HTTPAdapter to align with a more production-ready
83
87
  # retry strategy established in the Databricks SDK for Go. See _is_retryable and
@@ -127,6 +131,14 @@ class _BaseClient:
127
131
  flattened = dict(flatten_dict(with_fixed_bools))
128
132
  return flattened
129
133
 
134
+ @staticmethod
135
+ def _is_seekable_stream(data) -> bool:
136
+ if data is None:
137
+ return False
138
+ if not isinstance(data, io.IOBase):
139
+ return False
140
+ return data.seekable()
141
+
130
142
  def do(self,
131
143
  method: str,
132
144
  url: str,
@@ -141,24 +153,39 @@ class _BaseClient:
141
153
  if headers is None:
142
154
  headers = {}
143
155
  headers['User-Agent'] = self._user_agent_base
144
- retryable = retried(timeout=timedelta(seconds=self._retry_timeout_seconds),
145
- is_retryable=self._is_retryable,
146
- clock=self._clock)
147
- response = retryable(self._perform)(method,
148
- url,
149
- query=query,
150
- headers=headers,
151
- body=body,
152
- raw=raw,
153
- files=files,
154
- data=data,
155
- auth=auth)
156
+
157
+ # Wrap strings and bytes in a seekable stream so that we can rewind them.
158
+ if isinstance(data, (str, bytes)):
159
+ data = io.BytesIO(data.encode('utf-8') if isinstance(data, str) else data)
160
+
161
+ # Only retry if the request is not a stream or if the stream is seekable and
162
+ # we can rewind it. This is necessary to avoid bugs where the retry doesn't
163
+ # re-read already read data from the body.
164
+ if data is not None and not self._is_seekable_stream(data):
165
+ logger.debug(f"Retry disabled for non-seekable stream: type={type(data)}")
166
+ call = self._perform
167
+ else:
168
+ call = retried(timeout=timedelta(seconds=self._retry_timeout_seconds),
169
+ is_retryable=self._is_retryable,
170
+ clock=self._clock)(self._perform)
171
+
172
+ response = call(method,
173
+ url,
174
+ query=query,
175
+ headers=headers,
176
+ body=body,
177
+ raw=raw,
178
+ files=files,
179
+ data=data,
180
+ auth=auth)
156
181
 
157
182
  resp = dict()
158
183
  for header in response_headers if response_headers else []:
159
184
  resp[header] = response.headers.get(Casing.to_header_case(header))
160
185
  if raw:
161
- resp["contents"] = _StreamingResponse(response)
186
+ streaming_response = _StreamingResponse(response)
187
+ streaming_response.set_chunk_size(self._streaming_buffer_size)
188
+ resp["contents"] = streaming_response
162
189
  return resp
163
190
  if not len(response.content):
164
191
  return resp
@@ -221,6 +248,12 @@ class _BaseClient:
221
248
  files=None,
222
249
  data=None,
223
250
  auth: Callable[[requests.PreparedRequest], requests.PreparedRequest] = None):
251
+ # Keep track of the initial position of the stream so that we can rewind it if
252
+ # we need to retry the request.
253
+ initial_data_position = 0
254
+ if self._is_seekable_stream(data):
255
+ initial_data_position = data.tell()
256
+
224
257
  response = self._session.request(method,
225
258
  url,
226
259
  params=self._fix_query_string(query),
@@ -232,9 +265,18 @@ class _BaseClient:
232
265
  stream=raw,
233
266
  timeout=self._http_timeout_seconds)
234
267
  self._record_request_log(response, raw=raw or data is not None or files is not None)
268
+
235
269
  error = self._error_parser.get_api_error(response)
236
270
  if error is not None:
271
+ # If the request body is a seekable stream, rewind it so that it is ready
272
+ # to be read again in case of a retry.
273
+ #
274
+ # TODO: This should be moved into a "before-retry" hook to avoid one
275
+ # unnecessary seek on the last failed retry before aborting.
276
+ if self._is_seekable_stream(data):
277
+ data.seek(initial_data_position)
237
278
  raise error from None
279
+
238
280
  return response
239
281
 
240
282
  def _record_request_log(self, response: requests.Response, raw: bool = False) -> None:
@@ -283,6 +325,11 @@ class _StreamingResponse(BinaryIO):
283
325
  return False
284
326
 
285
327
  def read(self, n: int = -1) -> bytes:
328
+ """
329
+ Read up to n bytes from the response stream. If n is negative, read
330
+ until the end of the stream.
331
+ """
332
+
286
333
  self._open()
287
334
  read_everything = n < 0
288
335
  remaining_bytes = n
@@ -92,15 +92,16 @@ class Config:
92
92
  max_connections_per_pool: int = ConfigAttribute()
93
93
  databricks_environment: Optional[DatabricksEnvironment] = None
94
94
 
95
- def __init__(self,
96
- *,
97
- # Deprecated. Use credentials_strategy instead.
98
- credentials_provider: Optional[CredentialsStrategy] = None,
99
- credentials_strategy: Optional[CredentialsStrategy] = None,
100
- product=None,
101
- product_version=None,
102
- clock: Optional[Clock] = None,
103
- **kwargs):
95
+ def __init__(
96
+ self,
97
+ *,
98
+ # Deprecated. Use credentials_strategy instead.
99
+ credentials_provider: Optional[CredentialsStrategy] = None,
100
+ credentials_strategy: Optional[CredentialsStrategy] = None,
101
+ product=None,
102
+ product_version=None,
103
+ clock: Optional[Clock] = None,
104
+ **kwargs):
104
105
  self._header_factory = None
105
106
  self._inner = {}
106
107
  self._user_agent_other_info = []
@@ -304,11 +304,12 @@ def github_oidc_azure(cfg: 'Config') -> Optional[CredentialsProvider]:
304
304
  # detect Azure AD Tenant ID if it's not specified directly
305
305
  token_endpoint = cfg.oidc_endpoints.token_endpoint
306
306
  cfg.azure_tenant_id = token_endpoint.replace(aad_endpoint, '').split('/')[0]
307
- inner = ClientCredentials(client_id=cfg.azure_client_id,
308
- client_secret="", # we have no (rotatable) secrets in OIDC flow
309
- token_url=f"{aad_endpoint}{cfg.azure_tenant_id}/oauth2/token",
310
- endpoint_params=params,
311
- use_params=True)
307
+ inner = ClientCredentials(
308
+ client_id=cfg.azure_client_id,
309
+ client_secret="", # we have no (rotatable) secrets in OIDC flow
310
+ token_url=f"{aad_endpoint}{cfg.azure_tenant_id}/oauth2/token",
311
+ endpoint_params=params,
312
+ use_params=True)
312
313
 
313
314
  def refreshed_headers() -> Dict[str, str]:
314
315
  token = inner.token()
@@ -0,0 +1,49 @@
1
+ from typing import Optional
2
+
3
+ from databricks.sdk.service import jobs
4
+
5
+
6
+ class JobsExt(jobs.JobsAPI):
7
+
8
+ def get_run(self,
9
+ run_id: int,
10
+ *,
11
+ include_history: Optional[bool] = None,
12
+ include_resolved_values: Optional[bool] = None,
13
+ page_token: Optional[str] = None) -> jobs.Run:
14
+ """
15
+ This method fetches the details of a run identified by `run_id`. If the run has multiple pages of tasks or iterations,
16
+ it will paginate through all pages and aggregate the results.
17
+ :param run_id: int
18
+ The canonical identifier of the run for which to retrieve the metadata. This field is required.
19
+ :param include_history: bool (optional)
20
+ Whether to include the repair history in the response.
21
+ :param include_resolved_values: bool (optional)
22
+ Whether to include resolved parameter values in the response.
23
+ :param page_token: str (optional)
24
+ To list the next page or the previous page of job tasks, set this field to the value of the
25
+ `next_page_token` or `prev_page_token` returned in the GetJob response.
26
+ :returns: :class:`Run`
27
+ """
28
+ run = super().get_run(run_id,
29
+ include_history=include_history,
30
+ include_resolved_values=include_resolved_values,
31
+ page_token=page_token)
32
+
33
+ # When querying a Job run, a page token is returned when there are more than 100 tasks. No iterations are defined for a Job run. Therefore, the next page in the response only includes the next page of tasks.
34
+ # When querying a ForEach task run, a page token is returned when there are more than 100 iterations. Only a single task is returned, corresponding to the ForEach task itself. Therefore, the client only reads the iterations from the next page and not the tasks.
35
+ is_paginating_iterations = run.iterations is not None and len(run.iterations) > 0
36
+
37
+ while run.next_page_token is not None:
38
+ next_run = super().get_run(run_id,
39
+ include_history=include_history,
40
+ include_resolved_values=include_resolved_values,
41
+ page_token=run.next_page_token)
42
+ if is_paginating_iterations:
43
+ run.iterations.extend(next_run.iterations)
44
+ else:
45
+ run.tasks.extend(next_run.tasks)
46
+ run.next_page_token = next_run.next_page_token
47
+
48
+ run.prev_page_token = None
49
+ return run