databricks-sdk 0.34.0__py3-none-any.whl → 0.35.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of databricks-sdk might be problematic. Click here for more details.
- databricks/sdk/__init__.py +3 -2
- databricks/sdk/mixins/open_ai_client.py +52 -0
- databricks/sdk/service/apps.py +1 -1
- databricks/sdk/service/catalog.py +12 -3
- databricks/sdk/service/dashboards.py +8 -1
- databricks/sdk/service/jobs.py +52 -1
- databricks/sdk/service/pipelines.py +53 -3
- databricks/sdk/service/sql.py +20 -0
- databricks/sdk/version.py +1 -1
- {databricks_sdk-0.34.0.dist-info → databricks_sdk-0.35.0.dist-info}/METADATA +8 -1
- {databricks_sdk-0.34.0.dist-info → databricks_sdk-0.35.0.dist-info}/NOTICE +14 -0
- {databricks_sdk-0.34.0.dist-info → databricks_sdk-0.35.0.dist-info}/RECORD +15 -14
- {databricks_sdk-0.34.0.dist-info → databricks_sdk-0.35.0.dist-info}/WHEEL +1 -1
- {databricks_sdk-0.34.0.dist-info → databricks_sdk-0.35.0.dist-info}/LICENSE +0 -0
- {databricks_sdk-0.34.0.dist-info → databricks_sdk-0.35.0.dist-info}/top_level.txt +0 -0
databricks/sdk/__init__.py
CHANGED
|
@@ -6,6 +6,7 @@ from databricks.sdk import azure
|
|
|
6
6
|
from databricks.sdk.credentials_provider import CredentialsStrategy
|
|
7
7
|
from databricks.sdk.mixins.compute import ClustersExt
|
|
8
8
|
from databricks.sdk.mixins.files import DbfsExt
|
|
9
|
+
from databricks.sdk.mixins.open_ai_client import ServingEndpointsExt
|
|
9
10
|
from databricks.sdk.mixins.workspace import WorkspaceExt
|
|
10
11
|
from databricks.sdk.service.apps import AppsAPI
|
|
11
12
|
from databricks.sdk.service.billing import (BillableUsageAPI, BudgetsAPI,
|
|
@@ -175,7 +176,7 @@ class WorkspaceClient:
|
|
|
175
176
|
self._config = config.copy()
|
|
176
177
|
self._dbutils = _make_dbutils(self._config)
|
|
177
178
|
self._api_client = client.ApiClient(self._config)
|
|
178
|
-
serving_endpoints =
|
|
179
|
+
serving_endpoints = ServingEndpointsExt(self._api_client)
|
|
179
180
|
self._account_access_control_proxy = AccountAccessControlProxyAPI(self._api_client)
|
|
180
181
|
self._alerts = AlertsAPI(self._api_client)
|
|
181
182
|
self._alerts_legacy = AlertsLegacyAPI(self._api_client)
|
|
@@ -637,7 +638,7 @@ class WorkspaceClient:
|
|
|
637
638
|
return self._service_principals
|
|
638
639
|
|
|
639
640
|
@property
|
|
640
|
-
def serving_endpoints(self) ->
|
|
641
|
+
def serving_endpoints(self) -> ServingEndpointsExt:
|
|
641
642
|
"""The Serving Endpoints API allows you to create, update, and delete model serving endpoints."""
|
|
642
643
|
return self._serving_endpoints
|
|
643
644
|
|
|
@@ -0,0 +1,52 @@
|
|
|
1
|
+
from databricks.sdk.service.serving import ServingEndpointsAPI
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
class ServingEndpointsExt(ServingEndpointsAPI):
|
|
5
|
+
|
|
6
|
+
# Using the HTTP Client to pass in the databricks authorization
|
|
7
|
+
# This method will be called on every invocation, so when using with model serving will always get the refreshed token
|
|
8
|
+
def _get_authorized_http_client(self):
|
|
9
|
+
import httpx
|
|
10
|
+
|
|
11
|
+
class BearerAuth(httpx.Auth):
|
|
12
|
+
|
|
13
|
+
def __init__(self, get_headers_func):
|
|
14
|
+
self.get_headers_func = get_headers_func
|
|
15
|
+
|
|
16
|
+
def auth_flow(self, request: httpx.Request) -> httpx.Request:
|
|
17
|
+
auth_headers = self.get_headers_func()
|
|
18
|
+
request.headers["Authorization"] = auth_headers["Authorization"]
|
|
19
|
+
yield request
|
|
20
|
+
|
|
21
|
+
databricks_token_auth = BearerAuth(self._api._cfg.authenticate)
|
|
22
|
+
|
|
23
|
+
# Create an HTTP client with Bearer Token authentication
|
|
24
|
+
http_client = httpx.Client(auth=databricks_token_auth)
|
|
25
|
+
return http_client
|
|
26
|
+
|
|
27
|
+
def get_open_ai_client(self):
|
|
28
|
+
try:
|
|
29
|
+
from openai import OpenAI
|
|
30
|
+
except Exception:
|
|
31
|
+
raise ImportError(
|
|
32
|
+
"Open AI is not installed. Please install the Databricks SDK with the following command `pip isntall databricks-sdk[openai]`"
|
|
33
|
+
)
|
|
34
|
+
|
|
35
|
+
return OpenAI(
|
|
36
|
+
base_url=self._api._cfg.host + "/serving-endpoints",
|
|
37
|
+
api_key="no-token", # Passing in a placeholder to pass validations, this will not be used
|
|
38
|
+
http_client=self._get_authorized_http_client())
|
|
39
|
+
|
|
40
|
+
def get_langchain_chat_open_ai_client(self, model):
|
|
41
|
+
try:
|
|
42
|
+
from langchain_openai import ChatOpenAI
|
|
43
|
+
except Exception:
|
|
44
|
+
raise ImportError(
|
|
45
|
+
"Langchain Open AI is not installed. Please install the Databricks SDK with the following command `pip isntall databricks-sdk[openai]` and ensure you are using python>3.7"
|
|
46
|
+
)
|
|
47
|
+
|
|
48
|
+
return ChatOpenAI(
|
|
49
|
+
model=model,
|
|
50
|
+
openai_api_base=self._api._cfg.host + "/serving-endpoints",
|
|
51
|
+
api_key="no-token", # Passing in a placeholder to pass validations, this will not be used
|
|
52
|
+
http_client=self._get_authorized_http_client())
|
databricks/sdk/service/apps.py
CHANGED
|
@@ -787,7 +787,7 @@ class AppsAPI:
|
|
|
787
787
|
callback: Optional[Callable[[App], None]] = None) -> App:
|
|
788
788
|
deadline = time.time() + timeout.total_seconds()
|
|
789
789
|
target_states = (ComputeState.ACTIVE, )
|
|
790
|
-
failure_states = (ComputeState.ERROR, )
|
|
790
|
+
failure_states = (ComputeState.ERROR, ComputeState.STOPPED, )
|
|
791
791
|
status_message = 'polling...'
|
|
792
792
|
attempt = 1
|
|
793
793
|
while time.time() < deadline:
|
|
@@ -3865,11 +3865,16 @@ class OnlineTable:
|
|
|
3865
3865
|
"""Specification of the online table."""
|
|
3866
3866
|
|
|
3867
3867
|
status: Optional[OnlineTableStatus] = None
|
|
3868
|
-
"""Online Table status"""
|
|
3868
|
+
"""Online Table data synchronization status"""
|
|
3869
3869
|
|
|
3870
3870
|
table_serving_url: Optional[str] = None
|
|
3871
3871
|
"""Data serving REST API URL for this table"""
|
|
3872
3872
|
|
|
3873
|
+
unity_catalog_provisioning_state: Optional[ProvisioningInfoState] = None
|
|
3874
|
+
"""The provisioning state of the online table entity in Unity Catalog. This is distinct from the
|
|
3875
|
+
state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline
|
|
3876
|
+
may be in "PROVISIONING" as it runs asynchronously)."""
|
|
3877
|
+
|
|
3873
3878
|
def as_dict(self) -> dict:
|
|
3874
3879
|
"""Serializes the OnlineTable into a dictionary suitable for use as a JSON request body."""
|
|
3875
3880
|
body = {}
|
|
@@ -3877,6 +3882,8 @@ class OnlineTable:
|
|
|
3877
3882
|
if self.spec: body['spec'] = self.spec.as_dict()
|
|
3878
3883
|
if self.status: body['status'] = self.status.as_dict()
|
|
3879
3884
|
if self.table_serving_url is not None: body['table_serving_url'] = self.table_serving_url
|
|
3885
|
+
if self.unity_catalog_provisioning_state is not None:
|
|
3886
|
+
body['unity_catalog_provisioning_state'] = self.unity_catalog_provisioning_state.value
|
|
3880
3887
|
return body
|
|
3881
3888
|
|
|
3882
3889
|
@classmethod
|
|
@@ -3885,7 +3892,9 @@ class OnlineTable:
|
|
|
3885
3892
|
return cls(name=d.get('name', None),
|
|
3886
3893
|
spec=_from_dict(d, 'spec', OnlineTableSpec),
|
|
3887
3894
|
status=_from_dict(d, 'status', OnlineTableStatus),
|
|
3888
|
-
table_serving_url=d.get('table_serving_url', None)
|
|
3895
|
+
table_serving_url=d.get('table_serving_url', None),
|
|
3896
|
+
unity_catalog_provisioning_state=_enum(d, 'unity_catalog_provisioning_state',
|
|
3897
|
+
ProvisioningInfoState))
|
|
3889
3898
|
|
|
3890
3899
|
|
|
3891
3900
|
@dataclass
|
|
@@ -4244,7 +4253,7 @@ class ProvisioningInfoState(Enum):
|
|
|
4244
4253
|
DELETING = 'DELETING'
|
|
4245
4254
|
FAILED = 'FAILED'
|
|
4246
4255
|
PROVISIONING = 'PROVISIONING'
|
|
4247
|
-
|
|
4256
|
+
UPDATING = 'UPDATING'
|
|
4248
4257
|
|
|
4249
4258
|
|
|
4250
4259
|
@dataclass
|
|
@@ -607,6 +607,7 @@ class MessageErrorType(Enum):
|
|
|
607
607
|
LOCAL_CONTEXT_EXCEEDED_EXCEPTION = 'LOCAL_CONTEXT_EXCEEDED_EXCEPTION'
|
|
608
608
|
MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION = 'MESSAGE_DELETED_WHILE_EXECUTING_EXCEPTION'
|
|
609
609
|
MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION = 'MESSAGE_UPDATED_WHILE_EXECUTING_EXCEPTION'
|
|
610
|
+
NO_QUERY_TO_VISUALIZE_EXCEPTION = 'NO_QUERY_TO_VISUALIZE_EXCEPTION'
|
|
610
611
|
NO_TABLES_TO_QUERY_EXCEPTION = 'NO_TABLES_TO_QUERY_EXCEPTION'
|
|
611
612
|
RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION = 'RATE_LIMIT_EXCEEDED_GENERIC_EXCEPTION'
|
|
612
613
|
RATE_LIMIT_EXCEEDED_SPECIFIED_WAIT_EXCEPTION = 'RATE_LIMIT_EXCEEDED_SPECIFIED_WAIT_EXCEPTION'
|
|
@@ -784,6 +785,9 @@ class QueryAttachment:
|
|
|
784
785
|
|
|
785
786
|
@dataclass
|
|
786
787
|
class Result:
|
|
788
|
+
is_truncated: Optional[bool] = None
|
|
789
|
+
"""If result is truncated"""
|
|
790
|
+
|
|
787
791
|
row_count: Optional[int] = None
|
|
788
792
|
"""Row count of the result"""
|
|
789
793
|
|
|
@@ -794,6 +798,7 @@ class Result:
|
|
|
794
798
|
def as_dict(self) -> dict:
|
|
795
799
|
"""Serializes the Result into a dictionary suitable for use as a JSON request body."""
|
|
796
800
|
body = {}
|
|
801
|
+
if self.is_truncated is not None: body['is_truncated'] = self.is_truncated
|
|
797
802
|
if self.row_count is not None: body['row_count'] = self.row_count
|
|
798
803
|
if self.statement_id is not None: body['statement_id'] = self.statement_id
|
|
799
804
|
return body
|
|
@@ -801,7 +806,9 @@ class Result:
|
|
|
801
806
|
@classmethod
|
|
802
807
|
def from_dict(cls, d: Dict[str, any]) -> Result:
|
|
803
808
|
"""Deserializes the Result from a dictionary."""
|
|
804
|
-
return cls(
|
|
809
|
+
return cls(is_truncated=d.get('is_truncated', None),
|
|
810
|
+
row_count=d.get('row_count', None),
|
|
811
|
+
statement_id=d.get('statement_id', None))
|
|
805
812
|
|
|
806
813
|
|
|
807
814
|
@dataclass
|
databricks/sdk/service/jobs.py
CHANGED
|
@@ -29,6 +29,12 @@ class BaseJob:
|
|
|
29
29
|
"""The creator user name. This field won’t be included in the response if the user has already
|
|
30
30
|
been deleted."""
|
|
31
31
|
|
|
32
|
+
effective_budget_policy_id: Optional[str] = None
|
|
33
|
+
"""The id of the budget policy used by this job for cost attribution purposes. This may be set
|
|
34
|
+
through (in order of precedence): 1. Budget admins through the account or workspace console 2.
|
|
35
|
+
Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based
|
|
36
|
+
on accessible budget policies of the run_as identity on job creation or modification."""
|
|
37
|
+
|
|
32
38
|
job_id: Optional[int] = None
|
|
33
39
|
"""The canonical identifier for this job."""
|
|
34
40
|
|
|
@@ -41,6 +47,8 @@ class BaseJob:
|
|
|
41
47
|
body = {}
|
|
42
48
|
if self.created_time is not None: body['created_time'] = self.created_time
|
|
43
49
|
if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
|
|
50
|
+
if self.effective_budget_policy_id is not None:
|
|
51
|
+
body['effective_budget_policy_id'] = self.effective_budget_policy_id
|
|
44
52
|
if self.job_id is not None: body['job_id'] = self.job_id
|
|
45
53
|
if self.settings: body['settings'] = self.settings.as_dict()
|
|
46
54
|
return body
|
|
@@ -50,6 +58,7 @@ class BaseJob:
|
|
|
50
58
|
"""Deserializes the BaseJob from a dictionary."""
|
|
51
59
|
return cls(created_time=d.get('created_time', None),
|
|
52
60
|
creator_user_name=d.get('creator_user_name', None),
|
|
61
|
+
effective_budget_policy_id=d.get('effective_budget_policy_id', None),
|
|
53
62
|
job_id=d.get('job_id', None),
|
|
54
63
|
settings=_from_dict(d, 'settings', JobSettings))
|
|
55
64
|
|
|
@@ -484,6 +493,11 @@ class CreateJob:
|
|
|
484
493
|
access_control_list: Optional[List[JobAccessControlRequest]] = None
|
|
485
494
|
"""List of permissions to set on the job."""
|
|
486
495
|
|
|
496
|
+
budget_policy_id: Optional[str] = None
|
|
497
|
+
"""The id of the user specified budget policy to use for this job. If not specified, a default
|
|
498
|
+
budget policy may be applied when creating or modifying the job. See
|
|
499
|
+
`effective_budget_policy_id` for the budget policy used by this workload."""
|
|
500
|
+
|
|
487
501
|
continuous: Optional[Continuous] = None
|
|
488
502
|
"""An optional continuous property for this job. The continuous property will ensure that there is
|
|
489
503
|
always one run executing. Only one of `schedule` and `continuous` can be used."""
|
|
@@ -591,6 +605,7 @@ class CreateJob:
|
|
|
591
605
|
body = {}
|
|
592
606
|
if self.access_control_list:
|
|
593
607
|
body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
|
|
608
|
+
if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
|
|
594
609
|
if self.continuous: body['continuous'] = self.continuous.as_dict()
|
|
595
610
|
if self.deployment: body['deployment'] = self.deployment.as_dict()
|
|
596
611
|
if self.description is not None: body['description'] = self.description
|
|
@@ -619,6 +634,7 @@ class CreateJob:
|
|
|
619
634
|
def from_dict(cls, d: Dict[str, any]) -> CreateJob:
|
|
620
635
|
"""Deserializes the CreateJob from a dictionary."""
|
|
621
636
|
return cls(access_control_list=_repeated_dict(d, 'access_control_list', JobAccessControlRequest),
|
|
637
|
+
budget_policy_id=d.get('budget_policy_id', None),
|
|
622
638
|
continuous=_from_dict(d, 'continuous', Continuous),
|
|
623
639
|
deployment=_from_dict(d, 'deployment', JobDeployment),
|
|
624
640
|
description=d.get('description', None),
|
|
@@ -1261,6 +1277,12 @@ class Job:
|
|
|
1261
1277
|
"""The creator user name. This field won’t be included in the response if the user has already
|
|
1262
1278
|
been deleted."""
|
|
1263
1279
|
|
|
1280
|
+
effective_budget_policy_id: Optional[str] = None
|
|
1281
|
+
"""The id of the budget policy used by this job for cost attribution purposes. This may be set
|
|
1282
|
+
through (in order of precedence): 1. Budget admins through the account or workspace console 2.
|
|
1283
|
+
Jobs UI in the job details page and Jobs API using `budget_policy_id` 3. Inferred default based
|
|
1284
|
+
on accessible budget policies of the run_as identity on job creation or modification."""
|
|
1285
|
+
|
|
1264
1286
|
job_id: Optional[int] = None
|
|
1265
1287
|
"""The canonical identifier for this job."""
|
|
1266
1288
|
|
|
@@ -1282,6 +1304,8 @@ class Job:
|
|
|
1282
1304
|
body = {}
|
|
1283
1305
|
if self.created_time is not None: body['created_time'] = self.created_time
|
|
1284
1306
|
if self.creator_user_name is not None: body['creator_user_name'] = self.creator_user_name
|
|
1307
|
+
if self.effective_budget_policy_id is not None:
|
|
1308
|
+
body['effective_budget_policy_id'] = self.effective_budget_policy_id
|
|
1285
1309
|
if self.job_id is not None: body['job_id'] = self.job_id
|
|
1286
1310
|
if self.run_as_user_name is not None: body['run_as_user_name'] = self.run_as_user_name
|
|
1287
1311
|
if self.settings: body['settings'] = self.settings.as_dict()
|
|
@@ -1292,6 +1316,7 @@ class Job:
|
|
|
1292
1316
|
"""Deserializes the Job from a dictionary."""
|
|
1293
1317
|
return cls(created_time=d.get('created_time', None),
|
|
1294
1318
|
creator_user_name=d.get('creator_user_name', None),
|
|
1319
|
+
effective_budget_policy_id=d.get('effective_budget_policy_id', None),
|
|
1295
1320
|
job_id=d.get('job_id', None),
|
|
1296
1321
|
run_as_user_name=d.get('run_as_user_name', None),
|
|
1297
1322
|
settings=_from_dict(d, 'settings', JobSettings))
|
|
@@ -1755,6 +1780,11 @@ class JobRunAs:
|
|
|
1755
1780
|
|
|
1756
1781
|
@dataclass
|
|
1757
1782
|
class JobSettings:
|
|
1783
|
+
budget_policy_id: Optional[str] = None
|
|
1784
|
+
"""The id of the user specified budget policy to use for this job. If not specified, a default
|
|
1785
|
+
budget policy may be applied when creating or modifying the job. See
|
|
1786
|
+
`effective_budget_policy_id` for the budget policy used by this workload."""
|
|
1787
|
+
|
|
1758
1788
|
continuous: Optional[Continuous] = None
|
|
1759
1789
|
"""An optional continuous property for this job. The continuous property will ensure that there is
|
|
1760
1790
|
always one run executing. Only one of `schedule` and `continuous` can be used."""
|
|
@@ -1860,6 +1890,7 @@ class JobSettings:
|
|
|
1860
1890
|
def as_dict(self) -> dict:
|
|
1861
1891
|
"""Serializes the JobSettings into a dictionary suitable for use as a JSON request body."""
|
|
1862
1892
|
body = {}
|
|
1893
|
+
if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
|
|
1863
1894
|
if self.continuous: body['continuous'] = self.continuous.as_dict()
|
|
1864
1895
|
if self.deployment: body['deployment'] = self.deployment.as_dict()
|
|
1865
1896
|
if self.description is not None: body['description'] = self.description
|
|
@@ -1887,7 +1918,8 @@ class JobSettings:
|
|
|
1887
1918
|
@classmethod
|
|
1888
1919
|
def from_dict(cls, d: Dict[str, any]) -> JobSettings:
|
|
1889
1920
|
"""Deserializes the JobSettings from a dictionary."""
|
|
1890
|
-
return cls(
|
|
1921
|
+
return cls(budget_policy_id=d.get('budget_policy_id', None),
|
|
1922
|
+
continuous=_from_dict(d, 'continuous', Continuous),
|
|
1891
1923
|
deployment=_from_dict(d, 'deployment', JobDeployment),
|
|
1892
1924
|
description=d.get('description', None),
|
|
1893
1925
|
edit_mode=_enum(d, 'edit_mode', JobEditMode),
|
|
@@ -4507,6 +4539,10 @@ class SubmitRun:
|
|
|
4507
4539
|
access_control_list: Optional[List[JobAccessControlRequest]] = None
|
|
4508
4540
|
"""List of permissions to set on the job."""
|
|
4509
4541
|
|
|
4542
|
+
budget_policy_id: Optional[str] = None
|
|
4543
|
+
"""The user specified id of the budget policy to use for this one-time run. If not specified, the
|
|
4544
|
+
run will be not be attributed to any budget policy."""
|
|
4545
|
+
|
|
4510
4546
|
email_notifications: Optional[JobEmailNotifications] = None
|
|
4511
4547
|
"""An optional set of email addresses notified when the run begins or completes."""
|
|
4512
4548
|
|
|
@@ -4567,6 +4603,7 @@ class SubmitRun:
|
|
|
4567
4603
|
body = {}
|
|
4568
4604
|
if self.access_control_list:
|
|
4569
4605
|
body['access_control_list'] = [v.as_dict() for v in self.access_control_list]
|
|
4606
|
+
if self.budget_policy_id is not None: body['budget_policy_id'] = self.budget_policy_id
|
|
4570
4607
|
if self.email_notifications: body['email_notifications'] = self.email_notifications.as_dict()
|
|
4571
4608
|
if self.environments: body['environments'] = [v.as_dict() for v in self.environments]
|
|
4572
4609
|
if self.git_source: body['git_source'] = self.git_source.as_dict()
|
|
@@ -4585,6 +4622,7 @@ class SubmitRun:
|
|
|
4585
4622
|
def from_dict(cls, d: Dict[str, any]) -> SubmitRun:
|
|
4586
4623
|
"""Deserializes the SubmitRun from a dictionary."""
|
|
4587
4624
|
return cls(access_control_list=_repeated_dict(d, 'access_control_list', JobAccessControlRequest),
|
|
4625
|
+
budget_policy_id=d.get('budget_policy_id', None),
|
|
4588
4626
|
email_notifications=_from_dict(d, 'email_notifications', JobEmailNotifications),
|
|
4589
4627
|
environments=_repeated_dict(d, 'environments', JobEnvironment),
|
|
4590
4628
|
git_source=_from_dict(d, 'git_source', GitSource),
|
|
@@ -5619,6 +5657,7 @@ class JobsAPI:
|
|
|
5619
5657
|
def create(self,
|
|
5620
5658
|
*,
|
|
5621
5659
|
access_control_list: Optional[List[JobAccessControlRequest]] = None,
|
|
5660
|
+
budget_policy_id: Optional[str] = None,
|
|
5622
5661
|
continuous: Optional[Continuous] = None,
|
|
5623
5662
|
deployment: Optional[JobDeployment] = None,
|
|
5624
5663
|
description: Optional[str] = None,
|
|
@@ -5647,6 +5686,10 @@ class JobsAPI:
|
|
|
5647
5686
|
|
|
5648
5687
|
:param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
|
|
5649
5688
|
List of permissions to set on the job.
|
|
5689
|
+
:param budget_policy_id: str (optional)
|
|
5690
|
+
The id of the user specified budget policy to use for this job. If not specified, a default budget
|
|
5691
|
+
policy may be applied when creating or modifying the job. See `effective_budget_policy_id` for the
|
|
5692
|
+
budget policy used by this workload.
|
|
5650
5693
|
:param continuous: :class:`Continuous` (optional)
|
|
5651
5694
|
An optional continuous property for this job. The continuous property will ensure that there is
|
|
5652
5695
|
always one run executing. Only one of `schedule` and `continuous` can be used.
|
|
@@ -5731,6 +5774,7 @@ class JobsAPI:
|
|
|
5731
5774
|
body = {}
|
|
5732
5775
|
if access_control_list is not None:
|
|
5733
5776
|
body['access_control_list'] = [v.as_dict() for v in access_control_list]
|
|
5777
|
+
if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id
|
|
5734
5778
|
if continuous is not None: body['continuous'] = continuous.as_dict()
|
|
5735
5779
|
if deployment is not None: body['deployment'] = deployment.as_dict()
|
|
5736
5780
|
if description is not None: body['description'] = description
|
|
@@ -6398,6 +6442,7 @@ class JobsAPI:
|
|
|
6398
6442
|
def submit(self,
|
|
6399
6443
|
*,
|
|
6400
6444
|
access_control_list: Optional[List[JobAccessControlRequest]] = None,
|
|
6445
|
+
budget_policy_id: Optional[str] = None,
|
|
6401
6446
|
email_notifications: Optional[JobEmailNotifications] = None,
|
|
6402
6447
|
environments: Optional[List[JobEnvironment]] = None,
|
|
6403
6448
|
git_source: Optional[GitSource] = None,
|
|
@@ -6418,6 +6463,9 @@ class JobsAPI:
|
|
|
6418
6463
|
|
|
6419
6464
|
:param access_control_list: List[:class:`JobAccessControlRequest`] (optional)
|
|
6420
6465
|
List of permissions to set on the job.
|
|
6466
|
+
:param budget_policy_id: str (optional)
|
|
6467
|
+
The user specified id of the budget policy to use for this one-time run. If not specified, the run
|
|
6468
|
+
will be not be attributed to any budget policy.
|
|
6421
6469
|
:param email_notifications: :class:`JobEmailNotifications` (optional)
|
|
6422
6470
|
An optional set of email addresses notified when the run begins or completes.
|
|
6423
6471
|
:param environments: List[:class:`JobEnvironment`] (optional)
|
|
@@ -6469,6 +6517,7 @@ class JobsAPI:
|
|
|
6469
6517
|
body = {}
|
|
6470
6518
|
if access_control_list is not None:
|
|
6471
6519
|
body['access_control_list'] = [v.as_dict() for v in access_control_list]
|
|
6520
|
+
if budget_policy_id is not None: body['budget_policy_id'] = budget_policy_id
|
|
6472
6521
|
if email_notifications is not None: body['email_notifications'] = email_notifications.as_dict()
|
|
6473
6522
|
if environments is not None: body['environments'] = [v.as_dict() for v in environments]
|
|
6474
6523
|
if git_source is not None: body['git_source'] = git_source.as_dict()
|
|
@@ -6492,6 +6541,7 @@ class JobsAPI:
|
|
|
6492
6541
|
self,
|
|
6493
6542
|
*,
|
|
6494
6543
|
access_control_list: Optional[List[JobAccessControlRequest]] = None,
|
|
6544
|
+
budget_policy_id: Optional[str] = None,
|
|
6495
6545
|
email_notifications: Optional[JobEmailNotifications] = None,
|
|
6496
6546
|
environments: Optional[List[JobEnvironment]] = None,
|
|
6497
6547
|
git_source: Optional[GitSource] = None,
|
|
@@ -6506,6 +6556,7 @@ class JobsAPI:
|
|
|
6506
6556
|
webhook_notifications: Optional[WebhookNotifications] = None,
|
|
6507
6557
|
timeout=timedelta(minutes=20)) -> Run:
|
|
6508
6558
|
return self.submit(access_control_list=access_control_list,
|
|
6559
|
+
budget_policy_id=budget_policy_id,
|
|
6509
6560
|
email_notifications=email_notifications,
|
|
6510
6561
|
environments=environments,
|
|
6511
6562
|
git_source=git_source,
|
|
@@ -587,6 +587,9 @@ class GetUpdateResponse:
|
|
|
587
587
|
|
|
588
588
|
@dataclass
|
|
589
589
|
class IngestionConfig:
|
|
590
|
+
report: Optional[ReportSpec] = None
|
|
591
|
+
"""Select tables from a specific source report."""
|
|
592
|
+
|
|
590
593
|
schema: Optional[SchemaSpec] = None
|
|
591
594
|
"""Select tables from a specific source schema."""
|
|
592
595
|
|
|
@@ -596,6 +599,7 @@ class IngestionConfig:
|
|
|
596
599
|
def as_dict(self) -> dict:
|
|
597
600
|
"""Serializes the IngestionConfig into a dictionary suitable for use as a JSON request body."""
|
|
598
601
|
body = {}
|
|
602
|
+
if self.report: body['report'] = self.report.as_dict()
|
|
599
603
|
if self.schema: body['schema'] = self.schema.as_dict()
|
|
600
604
|
if self.table: body['table'] = self.table.as_dict()
|
|
601
605
|
return body
|
|
@@ -603,7 +607,9 @@ class IngestionConfig:
|
|
|
603
607
|
@classmethod
|
|
604
608
|
def from_dict(cls, d: Dict[str, any]) -> IngestionConfig:
|
|
605
609
|
"""Deserializes the IngestionConfig from a dictionary."""
|
|
606
|
-
return cls(
|
|
610
|
+
return cls(report=_from_dict(d, 'report', ReportSpec),
|
|
611
|
+
schema=_from_dict(d, 'schema', SchemaSpec),
|
|
612
|
+
table=_from_dict(d, 'table', TableSpec))
|
|
607
613
|
|
|
608
614
|
|
|
609
615
|
@dataclass
|
|
@@ -1624,6 +1630,44 @@ class PipelineTrigger:
|
|
|
1624
1630
|
return cls(cron=_from_dict(d, 'cron', CronTrigger), manual=_from_dict(d, 'manual', ManualTrigger))
|
|
1625
1631
|
|
|
1626
1632
|
|
|
1633
|
+
@dataclass
|
|
1634
|
+
class ReportSpec:
|
|
1635
|
+
destination_catalog: Optional[str] = None
|
|
1636
|
+
"""Required. Destination catalog to store table."""
|
|
1637
|
+
|
|
1638
|
+
destination_schema: Optional[str] = None
|
|
1639
|
+
"""Required. Destination schema to store table."""
|
|
1640
|
+
|
|
1641
|
+
destination_table: Optional[str] = None
|
|
1642
|
+
"""Required. Destination table name. The pipeline fails if a table with that name already exists."""
|
|
1643
|
+
|
|
1644
|
+
source_url: Optional[str] = None
|
|
1645
|
+
"""Required. Report URL in the source system."""
|
|
1646
|
+
|
|
1647
|
+
table_configuration: Optional[TableSpecificConfig] = None
|
|
1648
|
+
"""Configuration settings to control the ingestion of tables. These settings override the
|
|
1649
|
+
table_configuration defined in the IngestionPipelineDefinition object."""
|
|
1650
|
+
|
|
1651
|
+
def as_dict(self) -> dict:
|
|
1652
|
+
"""Serializes the ReportSpec into a dictionary suitable for use as a JSON request body."""
|
|
1653
|
+
body = {}
|
|
1654
|
+
if self.destination_catalog is not None: body['destination_catalog'] = self.destination_catalog
|
|
1655
|
+
if self.destination_schema is not None: body['destination_schema'] = self.destination_schema
|
|
1656
|
+
if self.destination_table is not None: body['destination_table'] = self.destination_table
|
|
1657
|
+
if self.source_url is not None: body['source_url'] = self.source_url
|
|
1658
|
+
if self.table_configuration: body['table_configuration'] = self.table_configuration.as_dict()
|
|
1659
|
+
return body
|
|
1660
|
+
|
|
1661
|
+
@classmethod
|
|
1662
|
+
def from_dict(cls, d: Dict[str, any]) -> ReportSpec:
|
|
1663
|
+
"""Deserializes the ReportSpec from a dictionary."""
|
|
1664
|
+
return cls(destination_catalog=d.get('destination_catalog', None),
|
|
1665
|
+
destination_schema=d.get('destination_schema', None),
|
|
1666
|
+
destination_table=d.get('destination_table', None),
|
|
1667
|
+
source_url=d.get('source_url', None),
|
|
1668
|
+
table_configuration=_from_dict(d, 'table_configuration', TableSpecificConfig))
|
|
1669
|
+
|
|
1670
|
+
|
|
1627
1671
|
@dataclass
|
|
1628
1672
|
class SchemaSpec:
|
|
1629
1673
|
destination_catalog: Optional[str] = None
|
|
@@ -1841,7 +1885,7 @@ class TableSpec:
|
|
|
1841
1885
|
"""Required. Destination schema to store table."""
|
|
1842
1886
|
|
|
1843
1887
|
destination_table: Optional[str] = None
|
|
1844
|
-
"""Optional. Destination table name. The pipeline fails
|
|
1888
|
+
"""Optional. Destination table name. The pipeline fails if a table with that name already exists.
|
|
1845
1889
|
If not set, the source table name is used."""
|
|
1846
1890
|
|
|
1847
1891
|
source_catalog: Optional[str] = None
|
|
@@ -1893,6 +1937,10 @@ class TableSpecificConfig:
|
|
|
1893
1937
|
scd_type: Optional[TableSpecificConfigScdType] = None
|
|
1894
1938
|
"""The SCD type to use to ingest the table."""
|
|
1895
1939
|
|
|
1940
|
+
sequence_by: Optional[List[str]] = None
|
|
1941
|
+
"""The column names specifying the logical order of events in the source data. Delta Live Tables
|
|
1942
|
+
uses this sequencing to handle change events that arrive out of order."""
|
|
1943
|
+
|
|
1896
1944
|
def as_dict(self) -> dict:
|
|
1897
1945
|
"""Serializes the TableSpecificConfig into a dictionary suitable for use as a JSON request body."""
|
|
1898
1946
|
body = {}
|
|
@@ -1900,6 +1948,7 @@ class TableSpecificConfig:
|
|
|
1900
1948
|
if self.salesforce_include_formula_fields is not None:
|
|
1901
1949
|
body['salesforce_include_formula_fields'] = self.salesforce_include_formula_fields
|
|
1902
1950
|
if self.scd_type is not None: body['scd_type'] = self.scd_type.value
|
|
1951
|
+
if self.sequence_by: body['sequence_by'] = [v for v in self.sequence_by]
|
|
1903
1952
|
return body
|
|
1904
1953
|
|
|
1905
1954
|
@classmethod
|
|
@@ -1907,7 +1956,8 @@ class TableSpecificConfig:
|
|
|
1907
1956
|
"""Deserializes the TableSpecificConfig from a dictionary."""
|
|
1908
1957
|
return cls(primary_keys=d.get('primary_keys', None),
|
|
1909
1958
|
salesforce_include_formula_fields=d.get('salesforce_include_formula_fields', None),
|
|
1910
|
-
scd_type=_enum(d, 'scd_type', TableSpecificConfigScdType)
|
|
1959
|
+
scd_type=_enum(d, 'scd_type', TableSpecificConfigScdType),
|
|
1960
|
+
sequence_by=d.get('sequence_by', None))
|
|
1911
1961
|
|
|
1912
1962
|
|
|
1913
1963
|
class TableSpecificConfigScdType(Enum):
|
databricks/sdk/service/sql.py
CHANGED
|
@@ -72,6 +72,9 @@ class Alert:
|
|
|
72
72
|
lifecycle_state: Optional[LifecycleState] = None
|
|
73
73
|
"""The workspace state of the alert. Used for tracking trashed status."""
|
|
74
74
|
|
|
75
|
+
notify_on_ok: Optional[bool] = None
|
|
76
|
+
"""Whether to notify alert subscribers when alert returns back to normal."""
|
|
77
|
+
|
|
75
78
|
owner_user_name: Optional[str] = None
|
|
76
79
|
"""The owner's username. This field is set to "Unavailable" if the user has been deleted."""
|
|
77
80
|
|
|
@@ -105,6 +108,7 @@ class Alert:
|
|
|
105
108
|
if self.display_name is not None: body['display_name'] = self.display_name
|
|
106
109
|
if self.id is not None: body['id'] = self.id
|
|
107
110
|
if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value
|
|
111
|
+
if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok
|
|
108
112
|
if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
|
|
109
113
|
if self.parent_path is not None: body['parent_path'] = self.parent_path
|
|
110
114
|
if self.query_id is not None: body['query_id'] = self.query_id
|
|
@@ -124,6 +128,7 @@ class Alert:
|
|
|
124
128
|
display_name=d.get('display_name', None),
|
|
125
129
|
id=d.get('id', None),
|
|
126
130
|
lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState),
|
|
131
|
+
notify_on_ok=d.get('notify_on_ok', None),
|
|
127
132
|
owner_user_name=d.get('owner_user_name', None),
|
|
128
133
|
parent_path=d.get('parent_path', None),
|
|
129
134
|
query_id=d.get('query_id', None),
|
|
@@ -652,6 +657,9 @@ class CreateAlertRequestAlert:
|
|
|
652
657
|
display_name: Optional[str] = None
|
|
653
658
|
"""The display name of the alert."""
|
|
654
659
|
|
|
660
|
+
notify_on_ok: Optional[bool] = None
|
|
661
|
+
"""Whether to notify alert subscribers when alert returns back to normal."""
|
|
662
|
+
|
|
655
663
|
parent_path: Optional[str] = None
|
|
656
664
|
"""The workspace path of the folder containing the alert."""
|
|
657
665
|
|
|
@@ -669,6 +677,7 @@ class CreateAlertRequestAlert:
|
|
|
669
677
|
if self.custom_body is not None: body['custom_body'] = self.custom_body
|
|
670
678
|
if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
|
|
671
679
|
if self.display_name is not None: body['display_name'] = self.display_name
|
|
680
|
+
if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok
|
|
672
681
|
if self.parent_path is not None: body['parent_path'] = self.parent_path
|
|
673
682
|
if self.query_id is not None: body['query_id'] = self.query_id
|
|
674
683
|
if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
|
|
@@ -681,6 +690,7 @@ class CreateAlertRequestAlert:
|
|
|
681
690
|
custom_body=d.get('custom_body', None),
|
|
682
691
|
custom_subject=d.get('custom_subject', None),
|
|
683
692
|
display_name=d.get('display_name', None),
|
|
693
|
+
notify_on_ok=d.get('notify_on_ok', None),
|
|
684
694
|
parent_path=d.get('parent_path', None),
|
|
685
695
|
query_id=d.get('query_id', None),
|
|
686
696
|
seconds_to_retrigger=d.get('seconds_to_retrigger', None))
|
|
@@ -2696,6 +2706,9 @@ class ListAlertsResponseAlert:
|
|
|
2696
2706
|
lifecycle_state: Optional[LifecycleState] = None
|
|
2697
2707
|
"""The workspace state of the alert. Used for tracking trashed status."""
|
|
2698
2708
|
|
|
2709
|
+
notify_on_ok: Optional[bool] = None
|
|
2710
|
+
"""Whether to notify alert subscribers when alert returns back to normal."""
|
|
2711
|
+
|
|
2699
2712
|
owner_user_name: Optional[str] = None
|
|
2700
2713
|
"""The owner's username. This field is set to "Unavailable" if the user has been deleted."""
|
|
2701
2714
|
|
|
@@ -2726,6 +2739,7 @@ class ListAlertsResponseAlert:
|
|
|
2726
2739
|
if self.display_name is not None: body['display_name'] = self.display_name
|
|
2727
2740
|
if self.id is not None: body['id'] = self.id
|
|
2728
2741
|
if self.lifecycle_state is not None: body['lifecycle_state'] = self.lifecycle_state.value
|
|
2742
|
+
if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok
|
|
2729
2743
|
if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
|
|
2730
2744
|
if self.query_id is not None: body['query_id'] = self.query_id
|
|
2731
2745
|
if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
|
|
@@ -2744,6 +2758,7 @@ class ListAlertsResponseAlert:
|
|
|
2744
2758
|
display_name=d.get('display_name', None),
|
|
2745
2759
|
id=d.get('id', None),
|
|
2746
2760
|
lifecycle_state=_enum(d, 'lifecycle_state', LifecycleState),
|
|
2761
|
+
notify_on_ok=d.get('notify_on_ok', None),
|
|
2747
2762
|
owner_user_name=d.get('owner_user_name', None),
|
|
2748
2763
|
query_id=d.get('query_id', None),
|
|
2749
2764
|
seconds_to_retrigger=d.get('seconds_to_retrigger', None),
|
|
@@ -4561,6 +4576,9 @@ class UpdateAlertRequestAlert:
|
|
|
4561
4576
|
display_name: Optional[str] = None
|
|
4562
4577
|
"""The display name of the alert."""
|
|
4563
4578
|
|
|
4579
|
+
notify_on_ok: Optional[bool] = None
|
|
4580
|
+
"""Whether to notify alert subscribers when alert returns back to normal."""
|
|
4581
|
+
|
|
4564
4582
|
owner_user_name: Optional[str] = None
|
|
4565
4583
|
"""The owner's username. This field is set to "Unavailable" if the user has been deleted."""
|
|
4566
4584
|
|
|
@@ -4578,6 +4596,7 @@ class UpdateAlertRequestAlert:
|
|
|
4578
4596
|
if self.custom_body is not None: body['custom_body'] = self.custom_body
|
|
4579
4597
|
if self.custom_subject is not None: body['custom_subject'] = self.custom_subject
|
|
4580
4598
|
if self.display_name is not None: body['display_name'] = self.display_name
|
|
4599
|
+
if self.notify_on_ok is not None: body['notify_on_ok'] = self.notify_on_ok
|
|
4581
4600
|
if self.owner_user_name is not None: body['owner_user_name'] = self.owner_user_name
|
|
4582
4601
|
if self.query_id is not None: body['query_id'] = self.query_id
|
|
4583
4602
|
if self.seconds_to_retrigger is not None: body['seconds_to_retrigger'] = self.seconds_to_retrigger
|
|
@@ -4590,6 +4609,7 @@ class UpdateAlertRequestAlert:
|
|
|
4590
4609
|
custom_body=d.get('custom_body', None),
|
|
4591
4610
|
custom_subject=d.get('custom_subject', None),
|
|
4592
4611
|
display_name=d.get('display_name', None),
|
|
4612
|
+
notify_on_ok=d.get('notify_on_ok', None),
|
|
4593
4613
|
owner_user_name=d.get('owner_user_name', None),
|
|
4594
4614
|
query_id=d.get('query_id', None),
|
|
4595
4615
|
seconds_to_retrigger=d.get('seconds_to_retrigger', None))
|
databricks/sdk/version.py
CHANGED
|
@@ -1 +1 @@
|
|
|
1
|
-
__version__ = '0.
|
|
1
|
+
__version__ = '0.35.0'
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: databricks-sdk
|
|
3
|
-
Version: 0.
|
|
3
|
+
Version: 0.35.0
|
|
4
4
|
Summary: Databricks SDK for Python (Beta)
|
|
5
5
|
Home-page: https://databricks-sdk-py.readthedocs.io
|
|
6
6
|
Author: Serge Smertin
|
|
@@ -40,9 +40,16 @@ Requires-Dist: requests-mock ; extra == 'dev'
|
|
|
40
40
|
Requires-Dist: pyfakefs ; extra == 'dev'
|
|
41
41
|
Requires-Dist: databricks-connect ; extra == 'dev'
|
|
42
42
|
Requires-Dist: pytest-rerunfailures ; extra == 'dev'
|
|
43
|
+
Requires-Dist: openai ; extra == 'dev'
|
|
44
|
+
Requires-Dist: httpx ; extra == 'dev'
|
|
45
|
+
Requires-Dist: langchain-openai ; (python_version > "3.7") and extra == 'dev'
|
|
43
46
|
Provides-Extra: notebook
|
|
44
47
|
Requires-Dist: ipython (<9,>=8) ; extra == 'notebook'
|
|
45
48
|
Requires-Dist: ipywidgets (<9,>=8) ; extra == 'notebook'
|
|
49
|
+
Provides-Extra: openai
|
|
50
|
+
Requires-Dist: openai ; extra == 'openai'
|
|
51
|
+
Requires-Dist: httpx ; extra == 'openai'
|
|
52
|
+
Requires-Dist: langchain-openai ; (python_version > "3.7") and extra == 'openai'
|
|
46
53
|
|
|
47
54
|
# Databricks SDK for Python (Beta)
|
|
48
55
|
|
|
@@ -12,8 +12,22 @@ googleapis/google-auth-library-python - https://github.com/googleapis/google-aut
|
|
|
12
12
|
Copyright google-auth-library-python authors
|
|
13
13
|
License - https://github.com/googleapis/google-auth-library-python/blob/main/LICENSE
|
|
14
14
|
|
|
15
|
+
openai/openai-python - https://github.com/openai/openai-python
|
|
16
|
+
Copyright 2024 OpenAI
|
|
17
|
+
License - https://github.com/openai/openai-python/blob/main/LICENSE
|
|
18
|
+
|
|
15
19
|
This software contains code from the following open source projects, licensed under the BSD (3-clause) license.
|
|
16
20
|
|
|
17
21
|
x/oauth2 - https://cs.opensource.google/go/x/oauth2/+/master:oauth2.go
|
|
18
22
|
Copyright 2014 The Go Authors. All rights reserved.
|
|
19
23
|
License - https://cs.opensource.google/go/x/oauth2/+/master:LICENSE
|
|
24
|
+
|
|
25
|
+
encode/httpx - https://github.com/encode/httpx
|
|
26
|
+
Copyright 2019, Encode OSS Ltd
|
|
27
|
+
License - https://github.com/encode/httpx/blob/master/LICENSE.md
|
|
28
|
+
|
|
29
|
+
This software contains code from the following open source projects, licensed under the MIT license:
|
|
30
|
+
|
|
31
|
+
langchain-ai/langchain - https://github.com/langchain-ai/langchain/blob/master/libs/partners/openai
|
|
32
|
+
Copyright 2023 LangChain, Inc.
|
|
33
|
+
License - https://github.com/langchain-ai/langchain/blob/master/libs/partners/openai/LICENSE
|
|
@@ -1,5 +1,5 @@
|
|
|
1
1
|
databricks/__init__.py,sha256=CF2MJcZFwbpn9TwQER8qnCDhkPooBGQNVkX4v7g6p3g,537
|
|
2
|
-
databricks/sdk/__init__.py,sha256=
|
|
2
|
+
databricks/sdk/__init__.py,sha256=A-5aOiuEgJPKounKicgO8gBLNc8e69cOECWcxJPaM1s,49057
|
|
3
3
|
databricks/sdk/_base_client.py,sha256=5fe2Yw6hWedoUGhSf0lR8W7a80Uv081Jg-NUknnI2WM,13419
|
|
4
4
|
databricks/sdk/_property.py,sha256=sGjsipeFrjMBSVPjtIb0HNCRcMIhFpVx6wq4BkC3LWs,1636
|
|
5
5
|
databricks/sdk/azure.py,sha256=8P7nEdun0hbQCap9Ojo7yZse_JHxnhYsE6ApojnPz7Q,1009
|
|
@@ -15,7 +15,7 @@ databricks/sdk/oauth.py,sha256=KzcJPYLL3JL6RDvf_Q8SDAaF9xSaoYNCRD4rYInZDuo,18319
|
|
|
15
15
|
databricks/sdk/py.typed,sha256=pSvaHpbY1UPNEXyVFUjlgBhjPFZMmVC_UNrPC7eMOHI,74
|
|
16
16
|
databricks/sdk/retries.py,sha256=WgLh12bwdBc6fCQlaig3kKu18cVhPzFDGsspvq629Ew,2454
|
|
17
17
|
databricks/sdk/useragent.py,sha256=I2-VnJSE6cg9QV4GXkoQSkHsEB3bDvRGgkawbBNl4G0,5540
|
|
18
|
-
databricks/sdk/version.py,sha256=
|
|
18
|
+
databricks/sdk/version.py,sha256=v-SUCw8aYFwwUo18pb8PUZgCvB3GcqP_vOda81M_gxI,23
|
|
19
19
|
databricks/sdk/_widgets/__init__.py,sha256=Qm3JB8LmdPgEn_-VgxKkodTO4gn6OdaDPwsYcDmeIRI,2667
|
|
20
20
|
databricks/sdk/_widgets/default_widgets_utils.py,sha256=Rk59AFzVYVpOektB_yC_7j-vSt5OdtZA85IlG0kw0xA,1202
|
|
21
21
|
databricks/sdk/_widgets/ipywidgets_utils.py,sha256=P-AyGeahPiX3S59mxpAMgffi4gyJ0irEOY7Ekkn9nQ0,2850
|
|
@@ -34,33 +34,34 @@ databricks/sdk/logger/round_trip_logger.py,sha256=SMtHDfdqy5Noge2iZO-LpuEm92rz3A
|
|
|
34
34
|
databricks/sdk/mixins/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
35
35
|
databricks/sdk/mixins/compute.py,sha256=khb00BzBckc4RLUF4-GnNMCSO5lXKt_XYMM3IhiUxlA,11237
|
|
36
36
|
databricks/sdk/mixins/files.py,sha256=bLGFu1kVIQECTmuc_9jUf-n_Cth4COBMbmKqAYxkEkM,20542
|
|
37
|
+
databricks/sdk/mixins/open_ai_client.py,sha256=rwHJUB6v0V4CVmtMZ4MkNACAlM3JWdJOPR6-kroORSw,2204
|
|
37
38
|
databricks/sdk/mixins/workspace.py,sha256=dWMNvuEi8jJ5wMhrDt1LiqxNdWSsmEuDTzrcZR-eJzY,4896
|
|
38
39
|
databricks/sdk/runtime/__init__.py,sha256=9NnZkBzeZXZRQxcE1qKzAszQEzcpIgpL7lQzW3_kxEU,7266
|
|
39
40
|
databricks/sdk/runtime/dbutils_stub.py,sha256=UFbRZF-bBcwxjbv_pxma00bjNtktLLaYpo8oHRc4-9g,11421
|
|
40
41
|
databricks/sdk/service/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
41
42
|
databricks/sdk/service/_internal.py,sha256=nWbJfW5eJCQgAZ3TmA26xoWb6SNZ5N76ZA8bO1N4AsU,1961
|
|
42
|
-
databricks/sdk/service/apps.py,sha256=
|
|
43
|
+
databricks/sdk/service/apps.py,sha256=2l2m9CtmlF5A5XybsGEnAKADAT4ys1x8wvBT8jMts1E,48902
|
|
43
44
|
databricks/sdk/service/billing.py,sha256=Ru6GumI-M4_X71HTMj2VSVBQ7tRMTrwKzhdwNyiC3fA,69733
|
|
44
|
-
databricks/sdk/service/catalog.py,sha256=
|
|
45
|
+
databricks/sdk/service/catalog.py,sha256=jUs3tX-XhjQjo-yVqBYShs9zkdmE5KuuT27eUpMKG8Q,448457
|
|
45
46
|
databricks/sdk/service/compute.py,sha256=5vVDW2F8bh5E-EeIomWIRr3Mk_rhIAMnlOEG3IH30DQ,436163
|
|
46
|
-
databricks/sdk/service/dashboards.py,sha256=
|
|
47
|
+
databricks/sdk/service/dashboards.py,sha256=y9a2cBl6-NU9YrCNzEZ-PvJAOQDum3hOxMvl4DC7d-s,78274
|
|
47
48
|
databricks/sdk/service/files.py,sha256=VCt83YSI9rhQexmxaQdrUXHq2UCYfZcDMLvJx5X6n1M,38162
|
|
48
49
|
databricks/sdk/service/iam.py,sha256=P_2k7_MDV-Iw4heUD78i3XQriSoYZX1Jhhfnn4gS4Zk,148548
|
|
49
|
-
databricks/sdk/service/jobs.py,sha256=
|
|
50
|
+
databricks/sdk/service/jobs.py,sha256=hUOnr78B0Hh_EHzY9VHUA_6mxxidtFoPXA-G8RbVfgM,337787
|
|
50
51
|
databricks/sdk/service/marketplace.py,sha256=Fgk_8V9zbQ8QcNPUw-yZehHv8LgnDtFJUe-YixjxkYo,136405
|
|
51
52
|
databricks/sdk/service/ml.py,sha256=KG5nG9ap1IJejha2JFhX13f61C6tShO0AnHvLNDz0KE,236858
|
|
52
53
|
databricks/sdk/service/oauth2.py,sha256=67pr6gUnYwO6BaGNQfjW1qvcEB3ejdNbI9Pmvqs5bSE,39928
|
|
53
|
-
databricks/sdk/service/pipelines.py,sha256
|
|
54
|
+
databricks/sdk/service/pipelines.py,sha256=-cI6VvG2VXrjwMec1avCxXoGLa0tGbp3UoI-eB_iZH0,124987
|
|
54
55
|
databricks/sdk/service/provisioning.py,sha256=DP4Df4X-p0JEUk4zAJQhjX_wxpMi673OKLXFhxl6YSE,142678
|
|
55
56
|
databricks/sdk/service/serving.py,sha256=IBDul9fW1dYSINYkV4lOFamM6SF7Wy3ru5dUT_B2t0w,156476
|
|
56
57
|
databricks/sdk/service/settings.py,sha256=pYIm3yjamDoiZ2z54AtuQDS-Wj2dIEdTJBmZkBocKNE,221129
|
|
57
58
|
databricks/sdk/service/sharing.py,sha256=R6MoLh8BZ01OrezAsss53rY2mhbJKGkq-fm7nrYtFkQ,113261
|
|
58
|
-
databricks/sdk/service/sql.py,sha256=
|
|
59
|
+
databricks/sdk/service/sql.py,sha256=UjOtKxlVRrxL7TA3vfsXSoWWxWzzcYvdJrHoRfrqZ90,326240
|
|
59
60
|
databricks/sdk/service/vectorsearch.py,sha256=a5Y4vrS_oAJJqa69XwKMANhGuZi5glS0PSXBXz1bKGU,62961
|
|
60
61
|
databricks/sdk/service/workspace.py,sha256=b5EWqWB2fVX15eGw_Dkl554Jug0tuNag5ZpkDFn53ec,106659
|
|
61
|
-
databricks_sdk-0.
|
|
62
|
-
databricks_sdk-0.
|
|
63
|
-
databricks_sdk-0.
|
|
64
|
-
databricks_sdk-0.
|
|
65
|
-
databricks_sdk-0.
|
|
66
|
-
databricks_sdk-0.
|
|
62
|
+
databricks_sdk-0.35.0.dist-info/LICENSE,sha256=afBgTZo-JsYqj4VOjnejBetMuHKcFR30YobDdpVFkqY,11411
|
|
63
|
+
databricks_sdk-0.35.0.dist-info/METADATA,sha256=xD1j7lAueJNimnhHIYqKz14UX6Jplaaclq7tI8G_Rt4,38309
|
|
64
|
+
databricks_sdk-0.35.0.dist-info/NOTICE,sha256=tkRcQYA1k68wDLcnOWbg2xJDsUOJw8G8DGBhb8dnI3w,1588
|
|
65
|
+
databricks_sdk-0.35.0.dist-info/WHEEL,sha256=OVMc5UfuAQiSplgO0_WdW7vXVGAt9Hdd6qtN4HotdyA,91
|
|
66
|
+
databricks_sdk-0.35.0.dist-info/top_level.txt,sha256=7kRdatoSgU0EUurRQJ_3F1Nv4EOSHWAr6ng25tJOJKU,11
|
|
67
|
+
databricks_sdk-0.35.0.dist-info/RECORD,,
|
|
File without changes
|
|
File without changes
|