databricks-sdk 0.69.0__tar.gz → 0.71.0__tar.gz

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

Files changed (96) hide show
  1. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/CHANGELOG.md +51 -0
  2. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/PKG-INFO +1 -1
  3. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/dbutils.py +17 -0
  4. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/mixins/files.py +10 -10
  5. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/service/agentbricks.py +2 -0
  6. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/service/apps.py +10 -0
  7. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/service/billing.py +13 -3
  8. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/service/catalog.py +131 -47
  9. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/service/cleanrooms.py +11 -3
  10. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/service/compute.py +64 -0
  11. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/service/dashboards.py +10 -0
  12. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/service/database.py +12 -0
  13. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/service/dataquality.py +201 -52
  14. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/service/files.py +7 -72
  15. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/service/iam.py +26 -36
  16. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/service/iamv2.py +6 -0
  17. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/service/jobs.py +86 -154
  18. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/service/marketplace.py +18 -0
  19. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/service/ml.py +464 -13
  20. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/service/oauth2.py +37 -19
  21. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/service/pipelines.py +25 -2
  22. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/service/provisioning.py +19 -1
  23. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/service/qualitymonitorv2.py +2 -0
  24. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/service/serving.py +16 -21
  25. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/service/settings.py +45 -72
  26. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/service/settingsv2.py +2 -0
  27. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/service/sharing.py +23 -69
  28. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/service/sql.py +85 -62
  29. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/service/tags.py +2 -0
  30. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/service/vectorsearch.py +8 -0
  31. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/service/workspace.py +18 -91
  32. databricks_sdk-0.71.0/databricks/sdk/version.py +1 -0
  33. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks_sdk.egg-info/PKG-INFO +1 -1
  34. databricks_sdk-0.69.0/databricks/sdk/version.py +0 -1
  35. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/CONTRIBUTING.md +0 -0
  36. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/DCO +0 -0
  37. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/LICENSE +0 -0
  38. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/MANIFEST.in +0 -0
  39. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/Makefile +0 -0
  40. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/NOTICE +0 -0
  41. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/README.md +0 -0
  42. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/SECURITY.md +0 -0
  43. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/__init__.py +0 -0
  44. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/__init__.py +24 -24
  45. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/_base_client.py +0 -0
  46. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/_property.py +0 -0
  47. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/_widgets/__init__.py +0 -0
  48. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/_widgets/default_widgets_utils.py +0 -0
  49. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/_widgets/ipywidgets_utils.py +0 -0
  50. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/azure.py +0 -0
  51. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/casing.py +0 -0
  52. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/clock.py +0 -0
  53. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/common/lro.py +0 -0
  54. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/common/types/__init__.py +0 -0
  55. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/common/types/fieldmask.py +0 -0
  56. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/config.py +0 -0
  57. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/core.py +0 -0
  58. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/credentials_provider.py +0 -0
  59. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/data_plane.py +0 -0
  60. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/environments.py +0 -0
  61. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/errors/__init__.py +0 -0
  62. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/errors/base.py +0 -0
  63. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/errors/customizer.py +0 -0
  64. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/errors/deserializer.py +0 -0
  65. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/errors/details.py +0 -0
  66. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/errors/mapper.py +0 -0
  67. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/errors/overrides.py +0 -0
  68. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/errors/parser.py +0 -0
  69. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/errors/platform.py +0 -0
  70. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/errors/private_link.py +0 -0
  71. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/errors/sdk.py +0 -0
  72. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/logger/__init__.py +0 -0
  73. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/logger/round_trip_logger.py +0 -0
  74. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/mixins/__init__.py +0 -0
  75. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/mixins/compute.py +0 -0
  76. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/mixins/files_utils.py +0 -0
  77. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/mixins/jobs.py +0 -0
  78. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/mixins/open_ai_client.py +0 -0
  79. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/mixins/sharing.py +0 -0
  80. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/mixins/workspace.py +0 -0
  81. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/oauth.py +0 -0
  82. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/oidc.py +0 -0
  83. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/oidc_token_supplier.py +0 -0
  84. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/py.typed +0 -0
  85. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/retries.py +0 -0
  86. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/runtime/__init__.py +0 -0
  87. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/runtime/dbutils_stub.py +0 -0
  88. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/service/__init__.py +0 -0
  89. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/service/_internal.py +0 -0
  90. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks/sdk/useragent.py +0 -0
  91. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks_sdk.egg-info/SOURCES.txt +0 -0
  92. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks_sdk.egg-info/dependency_links.txt +0 -0
  93. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks_sdk.egg-info/requires.txt +0 -0
  94. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/databricks_sdk.egg-info/top_level.txt +0 -0
  95. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/pyproject.toml +0 -0
  96. {databricks_sdk-0.69.0 → databricks_sdk-0.71.0}/setup.cfg +0 -0
@@ -1,5 +1,56 @@
1
1
  # Version changelog
2
2
 
3
+ ## Release v0.71.0
4
+
5
+ ### Bug Fixes
6
+
7
+ - Fix the issue where `FilesExt.upload`'s second parameter was changed from `contents` to `content` unintentionally. Now the interface is backward compatible to versions previous than 0.69.0.
8
+
9
+ ### API Changes
10
+ * Add `instance_profile_arn` field for `databricks.sdk.service.compute.InstancePoolAwsAttributes`.
11
+ * Add `continuous`, `sliding` and `tumbling` fields for `databricks.sdk.service.ml.TimeWindow`.
12
+ * Add `usage_policy_id` field for `databricks.sdk.service.pipelines.CreatePipeline`.
13
+ * Add `usage_policy_id` field for `databricks.sdk.service.pipelines.EditPipeline`.
14
+ * Add `usage_policy_id` field for `databricks.sdk.service.pipelines.PipelineSpec`.
15
+ * Add `read_files_bytes` field for `databricks.sdk.service.sql.QueryMetrics`.
16
+ * Add `select` enum value for `databricks.sdk.service.apps.AppManifestAppResourceUcSecurableSpecUcSecurablePermission`.
17
+ * Add `table` enum value for `databricks.sdk.service.apps.AppManifestAppResourceUcSecurableSpecUcSecurableType`.
18
+ * Add `decommission_started` and `decommission_ended` enum values for `databricks.sdk.service.compute.EventType`.
19
+ * Add `dbr_image_resolution_failure` enum value for `databricks.sdk.service.compute.TerminationReasonCode`.
20
+ * Add `dbr_image_resolution_failure` enum value for `databricks.sdk.service.sql.TerminationReasonCode`.
21
+ * [Breaking] Change `offline_store_config` and `online_store_config` fields for `databricks.sdk.service.ml.MaterializedFeature` to no longer be required.
22
+ * Change `offline_store_config` and `online_store_config` fields for `databricks.sdk.service.ml.MaterializedFeature` to no longer be required.
23
+ * [Breaking] Change `lifecycle_state` field for `databricks.sdk.service.sql.AlertV2` to type `databricks.sdk.service.sql.AlertLifecycleState` dataclass.
24
+ * [Breaking] Remove `table` field for `databricks.sdk.service.jobs.TriggerSettings`.
25
+ * [Breaking] Remove `duration` and `offset` fields for `databricks.sdk.service.ml.TimeWindow`.
26
+
27
+
28
+ ## Release v0.70.0
29
+
30
+ ### Bug Fixes
31
+ - Improving the error message that is shown when the unsupported `dbutils.credentials.getServiceCredentialsProvider` method is used. This method can only be used inside of a notebook.
32
+
33
+ ### API Changes
34
+ * Add `create_materialized_feature()`, `delete_materialized_feature()`, `get_materialized_feature()`, `list_materialized_features()` and `update_materialized_feature()` methods for [w.feature_engineering](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/ml/feature_engineering.html) workspace-level service.
35
+ * Add `filter_condition` field for `databricks.sdk.service.ml.Feature`.
36
+ * Add `absolute_session_lifetime_in_minutes` and `enable_single_use_refresh_tokens` fields for `databricks.sdk.service.oauth2.TokenAccessPolicy`.
37
+ * Add `network_connectivity_config_id` field for `databricks.sdk.service.provisioning.CreateWorkspaceRequest`.
38
+ * Add `oauth_mtls` enum value for `databricks.sdk.service.catalog.CredentialType`.
39
+ * Add `network_check_nic_failure_due_to_misconfig`, `network_check_dns_server_failure_due_to_misconfig`, `network_check_storage_failure_due_to_misconfig`, `network_check_metadata_endpoint_failure_due_to_misconfig`, `network_check_control_plane_failure_due_to_misconfig` and `network_check_multiple_components_failure_due_to_misconfig` enum values for `databricks.sdk.service.compute.TerminationReasonCode`.
40
+ * Add `creating` and `create_failed` enum values for `databricks.sdk.service.settings.NccPrivateEndpointRulePrivateLinkConnectionState`.
41
+ * Add `network_check_nic_failure_due_to_misconfig`, `network_check_dns_server_failure_due_to_misconfig`, `network_check_storage_failure_due_to_misconfig`, `network_check_metadata_endpoint_failure_due_to_misconfig`, `network_check_control_plane_failure_due_to_misconfig` and `network_check_multiple_components_failure_due_to_misconfig` enum values for `databricks.sdk.service.sql.TerminationReasonCode`.
42
+ * [Breaking] Change `display_name`, `evaluation`, `query_text`, `schedule` and `warehouse_id` fields for `databricks.sdk.service.sql.AlertV2` to be required.
43
+ * Change `display_name`, `evaluation`, `query_text`, `schedule` and `warehouse_id` fields for `databricks.sdk.service.sql.AlertV2` to be required.
44
+ * Change `comparison_operator` and `source` fields for `databricks.sdk.service.sql.AlertV2Evaluation` to be required.
45
+ * [Breaking] Change `comparison_operator` and `source` fields for `databricks.sdk.service.sql.AlertV2Evaluation` to be required.
46
+ * Change `name` field for `databricks.sdk.service.sql.AlertV2OperandColumn` to be required.
47
+ * [Breaking] Change `name` field for `databricks.sdk.service.sql.AlertV2OperandColumn` to be required.
48
+ * [Breaking] Change `quartz_cron_schedule` and `timezone_id` fields for `databricks.sdk.service.sql.CronSchedule` to be required.
49
+ * Change `quartz_cron_schedule` and `timezone_id` fields for `databricks.sdk.service.sql.CronSchedule` to be required.
50
+ * [Breaking] Remove `update()` method for [w.recipient_federation_policies](https://databricks-sdk-py.readthedocs.io/en/latest/workspace/sharing/recipient_federation_policies.html) workspace-level service.
51
+ * [Breaking] Remove `results` field for `databricks.sdk.service.sql.ListAlertsV2Response`.
52
+
53
+
3
54
  ## Release v0.69.0
4
55
 
5
56
  ### New Features and Improvements
@@ -1,6 +1,6 @@
1
1
  Metadata-Version: 2.4
2
2
  Name: databricks-sdk
3
- Version: 0.69.0
3
+ Version: 0.71.0
4
4
  Summary: Databricks SDK for Python (Beta)
5
5
  Project-URL: Documentation, https://databricks-sdk-py.readthedocs.io
6
6
  Keywords: databricks,sdk
@@ -281,10 +281,17 @@ def get_local_notebook_path():
281
281
  return value
282
282
 
283
283
 
284
+ def not_supported_method_err_msg(methodName):
285
+ return f"Method '{methodName}' is not supported in the SDK version of DBUtils"
286
+
287
+
284
288
  class _OverrideProxyUtil:
285
289
 
286
290
  @classmethod
287
291
  def new(cls, path: str):
292
+ if path in cls.not_supported_override_paths:
293
+ raise ValueError(cls.not_supported_override_paths[path])
294
+
288
295
  if len(cls.__get_matching_overrides(path)) > 0:
289
296
  return _OverrideProxyUtil(path)
290
297
  return None
@@ -301,6 +308,16 @@ class _OverrideProxyUtil:
301
308
  "notebook.entry_point.getDbutils().notebook().getContext().notebookPath().get()": get_local_notebook_path,
302
309
  }
303
310
 
311
+ # These paths work the same as 'proxy_override_paths' but instead of using a local implementation we raise an exception.
312
+ not_supported_override_paths = {
313
+ # The object returned by 'credentials.getServiceCredentialProvider()' can't be serialized to JSON.
314
+ # Without this override, the command would fail with an error 'TypeError: Object of type Session is not JSON serializable'.
315
+ # We override it to show a better error message
316
+ "credentials.getServiceCredentialsProvider": not_supported_method_err_msg(
317
+ "credentials.getServiceCredentialsProvider"
318
+ ),
319
+ }
320
+
304
321
  @classmethod
305
322
  def __get_matching_overrides(cls, path: str):
306
323
  return [x for x in cls.proxy_override_paths.keys() if x.startswith(path)]
@@ -1064,7 +1064,7 @@ class FilesExt(files.FilesAPI):
1064
1064
  def upload(
1065
1065
  self,
1066
1066
  file_path: str,
1067
- content: BinaryIO,
1067
+ contents: BinaryIO,
1068
1068
  *,
1069
1069
  overwrite: Optional[bool] = None,
1070
1070
  part_size: Optional[int] = None,
@@ -1076,7 +1076,7 @@ class FilesExt(files.FilesAPI):
1076
1076
 
1077
1077
  :param file_path: str
1078
1078
  The absolute remote path of the target file, e.g. /Volumes/path/to/your/file
1079
- :param content: BinaryIO
1079
+ :param contents: BinaryIO
1080
1080
  The contents of the file to upload. This must be a BinaryIO stream.
1081
1081
  :param overwrite: bool (optional)
1082
1082
  If true, an existing file will be overwritten. When not specified, assumed True.
@@ -1096,7 +1096,7 @@ class FilesExt(files.FilesAPI):
1096
1096
 
1097
1097
  if self._config.disable_experimental_files_api_client:
1098
1098
  _LOG.info("Disable experimental files API client, will use the original upload method.")
1099
- super().upload(file_path=file_path, contents=content, overwrite=overwrite)
1099
+ super().upload(file_path=file_path, contents=contents, overwrite=overwrite)
1100
1100
  return UploadStreamResult()
1101
1101
 
1102
1102
  _LOG.debug(f"Uploading file from BinaryIO stream")
@@ -1107,12 +1107,12 @@ class FilesExt(files.FilesAPI):
1107
1107
 
1108
1108
  # Determine content length if the stream is seekable
1109
1109
  content_length = None
1110
- if content.seekable():
1110
+ if contents.seekable():
1111
1111
  _LOG.debug(f"Uploading using seekable mode")
1112
1112
  # If the stream is seekable, we can read its size.
1113
- content.seek(0, os.SEEK_END)
1114
- content_length = content.tell()
1115
- content.seek(0)
1113
+ contents.seek(0, os.SEEK_END)
1114
+ content_length = contents.tell()
1115
+ contents.seek(0)
1116
1116
 
1117
1117
  # Get optimized part size and batch size based on content length and provided part size
1118
1118
  optimized_part_size, optimized_batch_size = self._get_optimized_performance_parameters_for_upload(
@@ -1135,17 +1135,17 @@ class FilesExt(files.FilesAPI):
1135
1135
  )
1136
1136
 
1137
1137
  if ctx.use_parallel:
1138
- self._parallel_upload_from_stream(ctx, content)
1138
+ self._parallel_upload_from_stream(ctx, contents)
1139
1139
  return UploadStreamResult()
1140
1140
  elif ctx.content_length is not None:
1141
- self._upload_single_thread_with_known_size(ctx, content)
1141
+ self._upload_single_thread_with_known_size(ctx, contents)
1142
1142
  return UploadStreamResult()
1143
1143
  else:
1144
1144
  _LOG.debug(f"Uploading using non-seekable mode")
1145
1145
  # If the stream is not seekable, we cannot determine its size.
1146
1146
  # We will use a multipart upload.
1147
1147
  _LOG.debug(f"Using multipart upload for non-seekable input stream of unknown size for file {file_path}")
1148
- self._single_thread_multipart_upload(ctx, content)
1148
+ self._single_thread_multipart_upload(ctx, contents)
1149
1149
  return UploadStreamResult()
1150
1150
 
1151
1151
  def upload_from(
@@ -238,6 +238,7 @@ class AgentBricksAPI:
238
238
 
239
239
  :returns: :class:`CustomLlm`
240
240
  """
241
+
241
242
  body = {}
242
243
  if agent_artifact_path is not None:
243
244
  body["agent_artifact_path"] = agent_artifact_path
@@ -328,6 +329,7 @@ class AgentBricksAPI:
328
329
 
329
330
  :returns: :class:`CustomLlm`
330
331
  """
332
+
331
333
  body = {}
332
334
  if custom_llm is not None:
333
335
  body["custom_llm"] = custom_llm.as_dict()
@@ -790,11 +790,13 @@ class AppManifestAppResourceUcSecurableSpecUcSecurablePermission(Enum):
790
790
 
791
791
  MANAGE = "MANAGE"
792
792
  READ_VOLUME = "READ_VOLUME"
793
+ SELECT = "SELECT"
793
794
  WRITE_VOLUME = "WRITE_VOLUME"
794
795
 
795
796
 
796
797
  class AppManifestAppResourceUcSecurableSpecUcSecurableType(Enum):
797
798
 
799
+ TABLE = "TABLE"
798
800
  VOLUME = "VOLUME"
799
801
 
800
802
 
@@ -1867,6 +1869,7 @@ class AppsAPI:
1867
1869
  Long-running operation waiter for :class:`App`.
1868
1870
  See :method:wait_get_app_active for more details.
1869
1871
  """
1872
+
1870
1873
  body = app.as_dict()
1871
1874
  query = {}
1872
1875
  if no_compute is not None:
@@ -1903,6 +1906,7 @@ class AppsAPI:
1903
1906
  Long-running operation waiter for :class:`AppUpdate`.
1904
1907
  See :method:wait_get_update_app_succeeded for more details.
1905
1908
  """
1909
+
1906
1910
  body = {}
1907
1911
  if app is not None:
1908
1912
  body["app"] = app.as_dict()
@@ -1949,6 +1953,7 @@ class AppsAPI:
1949
1953
  Long-running operation waiter for :class:`AppDeployment`.
1950
1954
  See :method:wait_get_deployment_app_succeeded for more details.
1951
1955
  """
1956
+
1952
1957
  body = app_deployment.as_dict()
1953
1958
  headers = {
1954
1959
  "Accept": "application/json",
@@ -2124,6 +2129,7 @@ class AppsAPI:
2124
2129
 
2125
2130
  :returns: :class:`AppPermissions`
2126
2131
  """
2132
+
2127
2133
  body = {}
2128
2134
  if access_control_list is not None:
2129
2135
  body["access_control_list"] = [v.as_dict() for v in access_control_list]
@@ -2189,6 +2195,7 @@ class AppsAPI:
2189
2195
 
2190
2196
  :returns: :class:`App`
2191
2197
  """
2198
+
2192
2199
  body = app.as_dict()
2193
2200
  headers = {
2194
2201
  "Accept": "application/json",
@@ -2209,6 +2216,7 @@ class AppsAPI:
2209
2216
 
2210
2217
  :returns: :class:`AppPermissions`
2211
2218
  """
2219
+
2212
2220
  body = {}
2213
2221
  if access_control_list is not None:
2214
2222
  body["access_control_list"] = [v.as_dict() for v in access_control_list]
@@ -2234,6 +2242,7 @@ class AppsSettingsAPI:
2234
2242
 
2235
2243
  :returns: :class:`CustomTemplate`
2236
2244
  """
2245
+
2237
2246
  body = template.as_dict()
2238
2247
  headers = {
2239
2248
  "Accept": "application/json",
@@ -2316,6 +2325,7 @@ class AppsSettingsAPI:
2316
2325
 
2317
2326
  :returns: :class:`CustomTemplate`
2318
2327
  """
2328
+
2319
2329
  body = template.as_dict()
2320
2330
  headers = {
2321
2331
  "Accept": "application/json",
@@ -3,6 +3,7 @@
3
3
  from __future__ import annotations
4
4
 
5
5
  import logging
6
+ import uuid
6
7
  from dataclasses import dataclass
7
8
  from enum import Enum
8
9
  from typing import Any, BinaryIO, Dict, Iterator, List, Optional
@@ -1067,9 +1068,6 @@ class LogDeliveryConfiguration:
1067
1068
  [Configuring audit logs]: https://docs.databricks.com/administration-guide/account-settings/audit-logs.html
1068
1069
  [View billable usage]: https://docs.databricks.com/administration-guide/account-settings/usage.html"""
1069
1070
 
1070
- account_id: str
1071
- """Databricks account ID."""
1072
-
1073
1071
  credentials_id: str
1074
1072
  """The ID for a method:credentials/create that represents the AWS IAM role with policy and trust
1075
1073
  relationship as described in the main billable usage documentation page. See [Configure billable
@@ -1083,6 +1081,9 @@ class LogDeliveryConfiguration:
1083
1081
 
1084
1082
  [Configure billable usage delivery]: https://docs.databricks.com/administration-guide/account-settings/billable-usage-delivery.html"""
1085
1083
 
1084
+ account_id: Optional[str] = None
1085
+ """Databricks account ID."""
1086
+
1086
1087
  config_id: Optional[str] = None
1087
1088
  """The unique UUID of log delivery configuration"""
1088
1089
 
@@ -1565,6 +1566,9 @@ class BudgetPolicyAPI:
1565
1566
 
1566
1567
  :returns: :class:`BudgetPolicy`
1567
1568
  """
1569
+
1570
+ if request_id is None or request_id == "":
1571
+ request_id = str(uuid.uuid4())
1568
1572
  body = {}
1569
1573
  if policy is not None:
1570
1574
  body["policy"] = policy.as_dict()
@@ -1679,6 +1683,7 @@ class BudgetPolicyAPI:
1679
1683
 
1680
1684
  :returns: :class:`BudgetPolicy`
1681
1685
  """
1686
+
1682
1687
  body = policy.as_dict()
1683
1688
  query = {}
1684
1689
  if limit_config is not None:
@@ -1715,6 +1720,7 @@ class BudgetsAPI:
1715
1720
 
1716
1721
  :returns: :class:`CreateBudgetConfigurationResponse`
1717
1722
  """
1723
+
1718
1724
  body = {}
1719
1725
  if budget is not None:
1720
1726
  body["budget"] = budget.as_dict()
@@ -1797,6 +1803,7 @@ class BudgetsAPI:
1797
1803
 
1798
1804
  :returns: :class:`UpdateBudgetConfigurationResponse`
1799
1805
  """
1806
+
1800
1807
  body = {}
1801
1808
  if budget is not None:
1802
1809
  body["budget"] = budget.as_dict()
@@ -1895,6 +1902,7 @@ class LogDeliveryAPI:
1895
1902
 
1896
1903
  :returns: :class:`WrappedLogDeliveryConfiguration`
1897
1904
  """
1905
+
1898
1906
  body = {}
1899
1907
  if log_delivery_configuration is not None:
1900
1908
  body["log_delivery_configuration"] = log_delivery_configuration.as_dict()
@@ -1989,6 +1997,7 @@ class LogDeliveryAPI:
1989
1997
 
1990
1998
 
1991
1999
  """
2000
+
1992
2001
  body = {}
1993
2002
  if status is not None:
1994
2003
  body["status"] = status.value
@@ -2026,6 +2035,7 @@ class UsageDashboardsAPI:
2026
2035
 
2027
2036
  :returns: :class:`CreateBillingUsageDashboardResponse`
2028
2037
  """
2038
+
2029
2039
  body = {}
2030
2040
  if dashboard_type is not None:
2031
2041
  body["dashboard_type"] = dashboard_type.value