databricks-sdk 0.22.0__py3-none-any.whl → 0.24.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

@@ -35,6 +35,51 @@ class Ai21LabsConfig:
35
35
  return cls(ai21labs_api_key=d.get('ai21labs_api_key', None))
36
36
 
37
37
 
38
+ @dataclass
39
+ class AmazonBedrockConfig:
40
+ aws_region: str
41
+ """The AWS region to use. Bedrock has to be enabled there."""
42
+
43
+ aws_access_key_id: str
44
+ """The Databricks secret key reference for an AWS Access Key ID with permissions to interact with
45
+ Bedrock services."""
46
+
47
+ aws_secret_access_key: str
48
+ """The Databricks secret key reference for an AWS Secret Access Key paired with the access key ID,
49
+ with permissions to interact with Bedrock services."""
50
+
51
+ bedrock_provider: AmazonBedrockConfigBedrockProvider
52
+ """The underlying provider in Amazon Bedrock. Supported values (case insensitive) include:
53
+ Anthropic, Cohere, AI21Labs, Amazon."""
54
+
55
+ def as_dict(self) -> dict:
56
+ """Serializes the AmazonBedrockConfig into a dictionary suitable for use as a JSON request body."""
57
+ body = {}
58
+ if self.aws_access_key_id is not None: body['aws_access_key_id'] = self.aws_access_key_id
59
+ if self.aws_region is not None: body['aws_region'] = self.aws_region
60
+ if self.aws_secret_access_key is not None: body['aws_secret_access_key'] = self.aws_secret_access_key
61
+ if self.bedrock_provider is not None: body['bedrock_provider'] = self.bedrock_provider.value
62
+ return body
63
+
64
+ @classmethod
65
+ def from_dict(cls, d: Dict[str, any]) -> AmazonBedrockConfig:
66
+ """Deserializes the AmazonBedrockConfig from a dictionary."""
67
+ return cls(aws_access_key_id=d.get('aws_access_key_id', None),
68
+ aws_region=d.get('aws_region', None),
69
+ aws_secret_access_key=d.get('aws_secret_access_key', None),
70
+ bedrock_provider=_enum(d, 'bedrock_provider', AmazonBedrockConfigBedrockProvider))
71
+
72
+
73
+ class AmazonBedrockConfigBedrockProvider(Enum):
74
+ """The underlying provider in Amazon Bedrock. Supported values (case insensitive) include:
75
+ Anthropic, Cohere, AI21Labs, Amazon."""
76
+
77
+ AI21LABS = 'ai21labs'
78
+ AMAZON = 'amazon'
79
+ ANTHROPIC = 'anthropic'
80
+ COHERE = 'cohere'
81
+
82
+
38
83
  @dataclass
39
84
  class AnthropicConfig:
40
85
  anthropic_api_key: str
@@ -243,51 +288,6 @@ class AutoCaptureState:
243
288
  return cls(payload_table=_from_dict(d, 'payload_table', PayloadTable))
244
289
 
245
290
 
246
- @dataclass
247
- class AwsBedrockConfig:
248
- aws_region: str
249
- """The AWS region to use. Bedrock has to be enabled there."""
250
-
251
- aws_access_key_id: str
252
- """The Databricks secret key reference for an AWS Access Key ID with permissions to interact with
253
- Bedrock services."""
254
-
255
- aws_secret_access_key: str
256
- """The Databricks secret key reference for an AWS Secret Access Key paired with the access key ID,
257
- with permissions to interact with Bedrock services."""
258
-
259
- bedrock_provider: AwsBedrockConfigBedrockProvider
260
- """The underlying provider in AWS Bedrock. Supported values (case insensitive) include: Anthropic,
261
- Cohere, AI21Labs, Amazon."""
262
-
263
- def as_dict(self) -> dict:
264
- """Serializes the AwsBedrockConfig into a dictionary suitable for use as a JSON request body."""
265
- body = {}
266
- if self.aws_access_key_id is not None: body['aws_access_key_id'] = self.aws_access_key_id
267
- if self.aws_region is not None: body['aws_region'] = self.aws_region
268
- if self.aws_secret_access_key is not None: body['aws_secret_access_key'] = self.aws_secret_access_key
269
- if self.bedrock_provider is not None: body['bedrock_provider'] = self.bedrock_provider.value
270
- return body
271
-
272
- @classmethod
273
- def from_dict(cls, d: Dict[str, any]) -> AwsBedrockConfig:
274
- """Deserializes the AwsBedrockConfig from a dictionary."""
275
- return cls(aws_access_key_id=d.get('aws_access_key_id', None),
276
- aws_region=d.get('aws_region', None),
277
- aws_secret_access_key=d.get('aws_secret_access_key', None),
278
- bedrock_provider=_enum(d, 'bedrock_provider', AwsBedrockConfigBedrockProvider))
279
-
280
-
281
- class AwsBedrockConfigBedrockProvider(Enum):
282
- """The underlying provider in AWS Bedrock. Supported values (case insensitive) include: Anthropic,
283
- Cohere, AI21Labs, Amazon."""
284
-
285
- AI21LABS = 'ai21labs'
286
- AMAZON = 'amazon'
287
- ANTHROPIC = 'anthropic'
288
- COHERE = 'cohere'
289
-
290
-
291
291
  @dataclass
292
292
  class BuildLogsResponse:
293
293
  logs: str
@@ -661,6 +661,10 @@ class EndpointCoreConfigSummary:
661
661
 
662
662
  @dataclass
663
663
  class EndpointPendingConfig:
664
+ auto_capture_config: Optional[AutoCaptureConfigOutput] = None
665
+ """Configuration for Inference Tables which automatically logs requests and responses to Unity
666
+ Catalog."""
667
+
664
668
  config_version: Optional[int] = None
665
669
  """The config version that the serving endpoint is currently serving."""
666
670
 
@@ -680,6 +684,7 @@ class EndpointPendingConfig:
680
684
  def as_dict(self) -> dict:
681
685
  """Serializes the EndpointPendingConfig into a dictionary suitable for use as a JSON request body."""
682
686
  body = {}
687
+ if self.auto_capture_config: body['auto_capture_config'] = self.auto_capture_config.as_dict()
683
688
  if self.config_version is not None: body['config_version'] = self.config_version
684
689
  if self.served_entities: body['served_entities'] = [v.as_dict() for v in self.served_entities]
685
690
  if self.served_models: body['served_models'] = [v.as_dict() for v in self.served_models]
@@ -690,7 +695,8 @@ class EndpointPendingConfig:
690
695
  @classmethod
691
696
  def from_dict(cls, d: Dict[str, any]) -> EndpointPendingConfig:
692
697
  """Deserializes the EndpointPendingConfig from a dictionary."""
693
- return cls(config_version=d.get('config_version', None),
698
+ return cls(auto_capture_config=_from_dict(d, 'auto_capture_config', AutoCaptureConfigOutput),
699
+ config_version=d.get('config_version', None),
694
700
  served_entities=_repeated_dict(d, 'served_entities', ServedEntityOutput),
695
701
  served_models=_repeated_dict(d, 'served_models', ServedModelOutput),
696
702
  start_time=d.get('start_time', None),
@@ -783,7 +789,7 @@ class ExportMetricsResponse:
783
789
  class ExternalModel:
784
790
  provider: ExternalModelProvider
785
791
  """The name of the provider for the external model. Currently, the supported providers are
786
- 'ai21labs', 'anthropic', 'aws-bedrock', 'cohere', 'databricks-model-serving', 'openai', and
792
+ 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'openai', and
787
793
  'palm'.","""
788
794
 
789
795
  name: str
@@ -795,12 +801,12 @@ class ExternalModel:
795
801
  ai21labs_config: Optional[Ai21LabsConfig] = None
796
802
  """AI21Labs Config. Only required if the provider is 'ai21labs'."""
797
803
 
804
+ amazon_bedrock_config: Optional[AmazonBedrockConfig] = None
805
+ """Amazon Bedrock Config. Only required if the provider is 'amazon-bedrock'."""
806
+
798
807
  anthropic_config: Optional[AnthropicConfig] = None
799
808
  """Anthropic Config. Only required if the provider is 'anthropic'."""
800
809
 
801
- aws_bedrock_config: Optional[AwsBedrockConfig] = None
802
- """AWS Bedrock Config. Only required if the provider is 'aws-bedrock'."""
803
-
804
810
  cohere_config: Optional[CohereConfig] = None
805
811
  """Cohere Config. Only required if the provider is 'cohere'."""
806
812
 
@@ -817,8 +823,8 @@ class ExternalModel:
817
823
  """Serializes the ExternalModel into a dictionary suitable for use as a JSON request body."""
818
824
  body = {}
819
825
  if self.ai21labs_config: body['ai21labs_config'] = self.ai21labs_config.as_dict()
826
+ if self.amazon_bedrock_config: body['amazon_bedrock_config'] = self.amazon_bedrock_config.as_dict()
820
827
  if self.anthropic_config: body['anthropic_config'] = self.anthropic_config.as_dict()
821
- if self.aws_bedrock_config: body['aws_bedrock_config'] = self.aws_bedrock_config.as_dict()
822
828
  if self.cohere_config: body['cohere_config'] = self.cohere_config.as_dict()
823
829
  if self.databricks_model_serving_config:
824
830
  body['databricks_model_serving_config'] = self.databricks_model_serving_config.as_dict()
@@ -833,8 +839,8 @@ class ExternalModel:
833
839
  def from_dict(cls, d: Dict[str, any]) -> ExternalModel:
834
840
  """Deserializes the ExternalModel from a dictionary."""
835
841
  return cls(ai21labs_config=_from_dict(d, 'ai21labs_config', Ai21LabsConfig),
842
+ amazon_bedrock_config=_from_dict(d, 'amazon_bedrock_config', AmazonBedrockConfig),
836
843
  anthropic_config=_from_dict(d, 'anthropic_config', AnthropicConfig),
837
- aws_bedrock_config=_from_dict(d, 'aws_bedrock_config', AwsBedrockConfig),
838
844
  cohere_config=_from_dict(d, 'cohere_config', CohereConfig),
839
845
  databricks_model_serving_config=_from_dict(d, 'databricks_model_serving_config',
840
846
  DatabricksModelServingConfig),
@@ -847,12 +853,12 @@ class ExternalModel:
847
853
 
848
854
  class ExternalModelProvider(Enum):
849
855
  """The name of the provider for the external model. Currently, the supported providers are
850
- 'ai21labs', 'anthropic', 'aws-bedrock', 'cohere', 'databricks-model-serving', 'openai', and
856
+ 'ai21labs', 'anthropic', 'amazon-bedrock', 'cohere', 'databricks-model-serving', 'openai', and
851
857
  'palm'.","""
852
858
 
853
859
  AI21LABS = 'ai21labs'
860
+ AMAZON_BEDROCK = 'amazon-bedrock'
854
861
  ANTHROPIC = 'anthropic'
855
- AWS_BEDROCK = 'aws-bedrock'
856
862
  COHERE = 'cohere'
857
863
  DATABRICKS_MODEL_SERVING = 'databricks-model-serving'
858
864
  OPENAI = 'openai'
@@ -1287,7 +1287,12 @@ class SharedDataObject:
1287
1287
  comment: Optional[str] = None
1288
1288
  """A user-provided comment when adding the data object to the share. [Update:OPT]"""
1289
1289
 
1290
- data_object_type: Optional[str] = None
1290
+ content: Optional[str] = None
1291
+ """The content of the notebook file when the data object type is NOTEBOOK_FILE. This should be
1292
+ base64 encoded. Required for adding a NOTEBOOK_FILE, optional for updating, ignored for other
1293
+ types."""
1294
+
1295
+ data_object_type: Optional[SharedDataObjectDataObjectType] = None
1291
1296
  """The type of the data object."""
1292
1297
 
1293
1298
  history_data_sharing_status: Optional[SharedDataObjectHistoryDataSharingStatus] = None
@@ -1326,7 +1331,8 @@ class SharedDataObject:
1326
1331
  if self.added_by is not None: body['added_by'] = self.added_by
1327
1332
  if self.cdf_enabled is not None: body['cdf_enabled'] = self.cdf_enabled
1328
1333
  if self.comment is not None: body['comment'] = self.comment
1329
- if self.data_object_type is not None: body['data_object_type'] = self.data_object_type
1334
+ if self.content is not None: body['content'] = self.content
1335
+ if self.data_object_type is not None: body['data_object_type'] = self.data_object_type.value
1330
1336
  if self.history_data_sharing_status is not None:
1331
1337
  body['history_data_sharing_status'] = self.history_data_sharing_status.value
1332
1338
  if self.name is not None: body['name'] = self.name
@@ -1344,7 +1350,8 @@ class SharedDataObject:
1344
1350
  added_by=d.get('added_by', None),
1345
1351
  cdf_enabled=d.get('cdf_enabled', None),
1346
1352
  comment=d.get('comment', None),
1347
- data_object_type=d.get('data_object_type', None),
1353
+ content=d.get('content', None),
1354
+ data_object_type=_enum(d, 'data_object_type', SharedDataObjectDataObjectType),
1348
1355
  history_data_sharing_status=_enum(d, 'history_data_sharing_status',
1349
1356
  SharedDataObjectHistoryDataSharingStatus),
1350
1357
  name=d.get('name', None),
@@ -1355,6 +1362,18 @@ class SharedDataObject:
1355
1362
  string_shared_as=d.get('string_shared_as', None))
1356
1363
 
1357
1364
 
1365
+ class SharedDataObjectDataObjectType(Enum):
1366
+ """The type of the data object."""
1367
+
1368
+ MATERIALIZED_VIEW = 'MATERIALIZED_VIEW'
1369
+ MODEL = 'MODEL'
1370
+ NOTEBOOK_FILE = 'NOTEBOOK_FILE'
1371
+ SCHEMA = 'SCHEMA'
1372
+ STREAMING_TABLE = 'STREAMING_TABLE'
1373
+ TABLE = 'TABLE'
1374
+ VIEW = 'VIEW'
1375
+
1376
+
1358
1377
  class SharedDataObjectHistoryDataSharingStatus(Enum):
1359
1378
  """Whether to enable or disable sharing of data history. If not specified, the default is
1360
1379
  **DISABLED**."""
@@ -320,6 +320,9 @@ class DeltaSyncVectorIndexSpecResponse:
320
320
 
321
321
  @dataclass
322
322
  class DirectAccessVectorIndexSpec:
323
+ embedding_source_columns: Optional[List[EmbeddingSourceColumn]] = None
324
+ """Contains the optional model endpoint to use during query time."""
325
+
323
326
  embedding_vector_columns: Optional[List[EmbeddingVectorColumn]] = None
324
327
 
325
328
  schema_json: Optional[str] = None
@@ -333,6 +336,8 @@ class DirectAccessVectorIndexSpec:
333
336
  def as_dict(self) -> dict:
334
337
  """Serializes the DirectAccessVectorIndexSpec into a dictionary suitable for use as a JSON request body."""
335
338
  body = {}
339
+ if self.embedding_source_columns:
340
+ body['embedding_source_columns'] = [v.as_dict() for v in self.embedding_source_columns]
336
341
  if self.embedding_vector_columns:
337
342
  body['embedding_vector_columns'] = [v.as_dict() for v in self.embedding_vector_columns]
338
343
  if self.schema_json is not None: body['schema_json'] = self.schema_json
@@ -341,7 +346,9 @@ class DirectAccessVectorIndexSpec:
341
346
  @classmethod
342
347
  def from_dict(cls, d: Dict[str, any]) -> DirectAccessVectorIndexSpec:
343
348
  """Deserializes the DirectAccessVectorIndexSpec from a dictionary."""
344
- return cls(embedding_vector_columns=_repeated_dict(d, 'embedding_vector_columns',
349
+ return cls(embedding_source_columns=_repeated_dict(d, 'embedding_source_columns',
350
+ EmbeddingSourceColumn),
351
+ embedding_vector_columns=_repeated_dict(d, 'embedding_vector_columns',
345
352
  EmbeddingVectorColumn),
346
353
  schema_json=d.get('schema_json', None))
347
354
 
@@ -71,10 +71,18 @@ class CreateCredentials:
71
71
  gitLabEnterpriseEdition and awsCodeCommit."""
72
72
 
73
73
  git_username: Optional[str] = None
74
- """Git username."""
74
+ """The username or email provided with your Git provider account, depending on which provider you
75
+ are using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or
76
+ username may be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS
77
+ CodeCommit, BitBucket or BitBucket Server, username must be used. For all other providers please
78
+ see your provider's Personal Access Token authentication documentation to see what is supported."""
75
79
 
76
80
  personal_access_token: Optional[str] = None
77
- """The personal access token used to authenticate to the corresponding Git provider."""
81
+ """The personal access token used to authenticate to the corresponding Git provider. For certain
82
+ providers, support may exist for other types of scoped access tokens. [Learn more]. The personal
83
+ access token used to authenticate to the corresponding Git
84
+
85
+ [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html"""
78
86
 
79
87
  def as_dict(self) -> dict:
80
88
  """Serializes the CreateCredentials into a dictionary suitable for use as a JSON request body."""
@@ -103,7 +111,11 @@ class CreateCredentialsResponse:
103
111
  gitLabEnterpriseEdition and awsCodeCommit."""
104
112
 
105
113
  git_username: Optional[str] = None
106
- """Git username."""
114
+ """The username or email provided with your Git provider account, depending on which provider you
115
+ are using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or
116
+ username may be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS
117
+ CodeCommit, BitBucket or BitBucket Server, username must be used. For all other providers please
118
+ see your provider's Personal Access Token authentication documentation to see what is supported."""
107
119
 
108
120
  def as_dict(self) -> dict:
109
121
  """Serializes the CreateCredentialsResponse into a dictionary suitable for use as a JSON request body."""
@@ -215,7 +227,11 @@ class CredentialInfo:
215
227
  gitLabEnterpriseEdition and awsCodeCommit."""
216
228
 
217
229
  git_username: Optional[str] = None
218
- """Git username."""
230
+ """The username or email provided with your Git provider account, depending on which provider you
231
+ are using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or
232
+ username may be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS
233
+ CodeCommit, BitBucket or BitBucket Server, username must be used. For all other providers please
234
+ see your provider's Personal Access Token authentication documentation to see what is supported."""
219
235
 
220
236
  def as_dict(self) -> dict:
221
237
  """Serializes the CredentialInfo into a dictionary suitable for use as a JSON request body."""
@@ -1171,10 +1187,18 @@ class UpdateCredentials:
1171
1187
  gitLabEnterpriseEdition and awsCodeCommit."""
1172
1188
 
1173
1189
  git_username: Optional[str] = None
1174
- """Git username."""
1190
+ """The username or email provided with your Git provider account, depending on which provider you
1191
+ are using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or
1192
+ username may be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS
1193
+ CodeCommit, BitBucket or BitBucket Server, username must be used. For all other providers please
1194
+ see your provider's Personal Access Token authentication documentation to see what is supported."""
1175
1195
 
1176
1196
  personal_access_token: Optional[str] = None
1177
- """The personal access token used to authenticate to the corresponding Git provider."""
1197
+ """The personal access token used to authenticate to the corresponding Git provider. For certain
1198
+ providers, support may exist for other types of scoped access tokens. [Learn more]. The personal
1199
+ access token used to authenticate to the corresponding Git
1200
+
1201
+ [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html"""
1178
1202
 
1179
1203
  def as_dict(self) -> dict:
1180
1204
  """Serializes the UpdateCredentials into a dictionary suitable for use as a JSON request body."""
@@ -1449,9 +1473,17 @@ class GitCredentialsAPI:
1449
1473
  bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
1450
1474
  gitLabEnterpriseEdition and awsCodeCommit.
1451
1475
  :param git_username: str (optional)
1452
- Git username.
1476
+ The username or email provided with your Git provider account, depending on which provider you are
1477
+ using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or username may
1478
+ be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS CodeCommit, BitBucket or
1479
+ BitBucket Server, username must be used. For all other providers please see your provider's Personal
1480
+ Access Token authentication documentation to see what is supported.
1453
1481
  :param personal_access_token: str (optional)
1454
- The personal access token used to authenticate to the corresponding Git provider.
1482
+ The personal access token used to authenticate to the corresponding Git provider. For certain
1483
+ providers, support may exist for other types of scoped access tokens. [Learn more]. The personal
1484
+ access token used to authenticate to the corresponding Git
1485
+
1486
+ [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html
1455
1487
 
1456
1488
  :returns: :class:`CreateCredentialsResponse`
1457
1489
  """
@@ -1526,9 +1558,17 @@ class GitCredentialsAPI:
1526
1558
  bitbucketCloud, gitLab, azureDevOpsServices, gitHubEnterprise, bitbucketServer,
1527
1559
  gitLabEnterpriseEdition and awsCodeCommit.
1528
1560
  :param git_username: str (optional)
1529
- Git username.
1561
+ The username or email provided with your Git provider account, depending on which provider you are
1562
+ using. For GitHub, GitHub Enterprise Server, or Azure DevOps Services, either email or username may
1563
+ be used. For GitLab, GitLab Enterprise Edition, email must be used. For AWS CodeCommit, BitBucket or
1564
+ BitBucket Server, username must be used. For all other providers please see your provider's Personal
1565
+ Access Token authentication documentation to see what is supported.
1530
1566
  :param personal_access_token: str (optional)
1531
- The personal access token used to authenticate to the corresponding Git provider.
1567
+ The personal access token used to authenticate to the corresponding Git provider. For certain
1568
+ providers, support may exist for other types of scoped access tokens. [Learn more]. The personal
1569
+ access token used to authenticate to the corresponding Git
1570
+
1571
+ [Learn more]: https://docs.databricks.com/repos/get-access-tokens-from-git-provider.html
1532
1572
 
1533
1573
 
1534
1574
  """
databricks/sdk/version.py CHANGED
@@ -1 +1 @@
1
- __version__ = '0.22.0'
1
+ __version__ = '0.24.0'
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.1
2
2
  Name: databricks-sdk
3
- Version: 0.22.0
3
+ Version: 0.24.0
4
4
  Summary: Databricks SDK for Python (Beta)
5
5
  Home-page: https://databricks-sdk-py.readthedocs.io
6
6
  Author: Serge Smertin
@@ -11,7 +11,7 @@ databricks/sdk/environments.py,sha256=gStDfgI07ECd6Pb82Rf-nRjf48NH6hOY3UfTXm4YNZ
11
11
  databricks/sdk/oauth.py,sha256=jqe0yrrTUfRL8kpR21Odwn4R_X6Ns-hTLu3dKYDI1EM,18313
12
12
  databricks/sdk/py.typed,sha256=pSvaHpbY1UPNEXyVFUjlgBhjPFZMmVC_UNrPC7eMOHI,74
13
13
  databricks/sdk/retries.py,sha256=WgLh12bwdBc6fCQlaig3kKu18cVhPzFDGsspvq629Ew,2454
14
- databricks/sdk/version.py,sha256=h1iZs_ySY6is5xPKVypDpl9q8RFwvTXfv3Urt9ZSKSQ,23
14
+ databricks/sdk/version.py,sha256=lbgaUA9yL_X3wnFgvRc5nIlmJ4gFm6Ala6TbPHN-5aY,23
15
15
  databricks/sdk/_widgets/__init__.py,sha256=Qm3JB8LmdPgEn_-VgxKkodTO4gn6OdaDPwsYcDmeIRI,2667
16
16
  databricks/sdk/_widgets/default_widgets_utils.py,sha256=Rk59AFzVYVpOektB_yC_7j-vSt5OdtZA85IlG0kw0xA,1202
17
17
  databricks/sdk/_widgets/ipywidgets_utils.py,sha256=P-AyGeahPiX3S59mxpAMgffi4gyJ0irEOY7Ekkn9nQ0,2850
@@ -27,27 +27,27 @@ databricks/sdk/mixins/workspace.py,sha256=dWMNvuEi8jJ5wMhrDt1LiqxNdWSsmEuDTzrcZR
27
27
  databricks/sdk/runtime/__init__.py,sha256=9NnZkBzeZXZRQxcE1qKzAszQEzcpIgpL7lQzW3_kxEU,7266
28
28
  databricks/sdk/runtime/dbutils_stub.py,sha256=UFbRZF-bBcwxjbv_pxma00bjNtktLLaYpo8oHRc4-9g,11421
29
29
  databricks/sdk/service/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
30
- databricks/sdk/service/_internal.py,sha256=VvKT8sYgF8aVYp-nxS2s8QYc8GhhQvI3IcFJZJx1g50,1841
30
+ databricks/sdk/service/_internal.py,sha256=nWbJfW5eJCQgAZ3TmA26xoWb6SNZ5N76ZA8bO1N4AsU,1961
31
31
  databricks/sdk/service/billing.py,sha256=Hbe5bMsBrpebuAl8yj-GwVRGktrzKwiZJj3gq1wUMaI,50625
32
- databricks/sdk/service/catalog.py,sha256=nNgOawD0tAL6WFeYi47-7KnW_KeW-lUFUF3-rq--ZIc,401561
32
+ databricks/sdk/service/catalog.py,sha256=EytdKjPNqN6uhmUMRnQFJu5xaKP2xbQJ9GYsmnhRZLs,402330
33
33
  databricks/sdk/service/compute.py,sha256=fEDhQfFiycLWpOBz6ALhLXGvnNYVFsZvMlPm0j0lXpU,395632
34
- databricks/sdk/service/dashboards.py,sha256=9wsQNgrOxrZ3kBSyOfbWaWid9WJgozAu1qCVJ0H4YVM,15660
35
- databricks/sdk/service/files.py,sha256=myqpOOxwIaGwcepukUgV7QDhhZ-pHJg-SDvHU4NFX30,37599
36
- databricks/sdk/service/iam.py,sha256=Obbtlzl5G1Sd-ShmTDQq93ewDRzXbTH4D_lgNLS46uY,147465
37
- databricks/sdk/service/jobs.py,sha256=BmWC-6_xHZjQmM0BQAlObfe2BHzBsD0nxyS5OykRfCs,285521
34
+ databricks/sdk/service/dashboards.py,sha256=PwhX73El3POXdblc7ZOm2PAkhf5TcSZ5Na73_ne2Zb4,18801
35
+ databricks/sdk/service/files.py,sha256=VCt83YSI9rhQexmxaQdrUXHq2UCYfZcDMLvJx5X6n1M,38162
36
+ databricks/sdk/service/iam.py,sha256=3Pe8ZpP413ys2GsaCEbvEKvv1eEji8OeuKrnGWbtb1E,147612
37
+ databricks/sdk/service/jobs.py,sha256=Pi0tkKKIYqtWwKTucT2ZE0bsh4A4oPPuRQwwq7p8ock,304889
38
38
  databricks/sdk/service/ml.py,sha256=vohBdESClI3EOpO-ZZ44W-CMz1alq5Tw4oJnWa99Z2M,236128
39
39
  databricks/sdk/service/oauth2.py,sha256=zpEA7glY_EsPvMgkk-hmt4eVgrmtcSGgduI7XlShNUo,36215
40
40
  databricks/sdk/service/pipelines.py,sha256=2Kum7q_4hVZcEaww2c0NLJj0Fbe9G0BGsF71JKDCvv8,101546
41
41
  databricks/sdk/service/provisioning.py,sha256=DP4Df4X-p0JEUk4zAJQhjX_wxpMi673OKLXFhxl6YSE,142678
42
- databricks/sdk/service/serving.py,sha256=_jbFNtF3lwkvWEYou__r5oLfQljp1jtPtaYKRuJGcS8,130163
42
+ databricks/sdk/service/serving.py,sha256=TuF7GAN2w_IUWf-ymx1g1f6uf1g3ot83EBZn52edqbI,130618
43
43
  databricks/sdk/service/settings.py,sha256=7KITQbOPbaESxsCssckPdxLAS64auzfij1DLTRd9cQI,176630
44
- databricks/sdk/service/sharing.py,sha256=nlDZPzZvj-FQVFxr1dByu7KY5AkVbAqq7LNqaNBGf_E,97807
44
+ databricks/sdk/service/sharing.py,sha256=21BdjeHQTWvAfkoYfMBwj0hGYGOxRoGGYdz3M-20xV4,98520
45
45
  databricks/sdk/service/sql.py,sha256=FMbmD7h1ZrH5-28-fxt7A0o5KZYphenYzFQbzS8vVEw,256266
46
- databricks/sdk/service/vectorsearch.py,sha256=p8PIx6cKjko-kaHoKXpIvMc56r6qiQUIz1gDskPsrys,51124
47
- databricks/sdk/service/workspace.py,sha256=Cc_fgKbZ5phzdjg7ryM-6uoVQ_ug3h4rhzQJFyk0RBM,96837
48
- databricks_sdk-0.22.0.dist-info/LICENSE,sha256=afBgTZo-JsYqj4VOjnejBetMuHKcFR30YobDdpVFkqY,11411
49
- databricks_sdk-0.22.0.dist-info/METADATA,sha256=8qKdf4CcBME8_cRu1kN-KjNWrmlGmwggILafBdHFx8c,34642
50
- databricks_sdk-0.22.0.dist-info/NOTICE,sha256=Qnc0m8JjZNTDV80y0h1aJGvsr4GqM63m1nr2VTypg6E,963
51
- databricks_sdk-0.22.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
52
- databricks_sdk-0.22.0.dist-info/top_level.txt,sha256=7kRdatoSgU0EUurRQJ_3F1Nv4EOSHWAr6ng25tJOJKU,11
53
- databricks_sdk-0.22.0.dist-info/RECORD,,
46
+ databricks/sdk/service/vectorsearch.py,sha256=R5RACGGpM9w2yUylBTiV5Bk-YC6O2OorKlNUHqKFawg,51588
47
+ databricks/sdk/service/workspace.py,sha256=4BZLbKu6yF9Jztb07uzUnEflA8JKOD9LIOQEJm5BK9c,100855
48
+ databricks_sdk-0.24.0.dist-info/LICENSE,sha256=afBgTZo-JsYqj4VOjnejBetMuHKcFR30YobDdpVFkqY,11411
49
+ databricks_sdk-0.24.0.dist-info/METADATA,sha256=yrgQWRk8_Wgb5AoiwfCXx32MFKu0SG4vN84-53lCGAo,34642
50
+ databricks_sdk-0.24.0.dist-info/NOTICE,sha256=Qnc0m8JjZNTDV80y0h1aJGvsr4GqM63m1nr2VTypg6E,963
51
+ databricks_sdk-0.24.0.dist-info/WHEEL,sha256=G16H4A3IeoQmnOrYV4ueZGKSjhipXx8zc8nu9FGlvMA,92
52
+ databricks_sdk-0.24.0.dist-info/top_level.txt,sha256=7kRdatoSgU0EUurRQJ_3F1Nv4EOSHWAr6ng25tJOJKU,11
53
+ databricks_sdk-0.24.0.dist-info/RECORD,,