databricks-sdk 0.32.2__py3-none-any.whl → 0.33.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of databricks-sdk might be problematic. Click here for more details.

@@ -274,6 +274,42 @@ class AssignResponse:
274
274
  return cls()
275
275
 
276
276
 
277
+ @dataclass
278
+ class AwsCredentials:
279
+ """AWS temporary credentials for API authentication. Read more at
280
+ https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html."""
281
+
282
+ access_key_id: Optional[str] = None
283
+ """The access key ID that identifies the temporary credentials."""
284
+
285
+ access_point: Optional[str] = None
286
+ """The Amazon Resource Name (ARN) of the S3 access point for temporary credentials related the
287
+ external location."""
288
+
289
+ secret_access_key: Optional[str] = None
290
+ """The secret access key that can be used to sign AWS API requests."""
291
+
292
+ session_token: Optional[str] = None
293
+ """The token that users must pass to AWS API to use the temporary credentials."""
294
+
295
+ def as_dict(self) -> dict:
296
+ """Serializes the AwsCredentials into a dictionary suitable for use as a JSON request body."""
297
+ body = {}
298
+ if self.access_key_id is not None: body['access_key_id'] = self.access_key_id
299
+ if self.access_point is not None: body['access_point'] = self.access_point
300
+ if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key
301
+ if self.session_token is not None: body['session_token'] = self.session_token
302
+ return body
303
+
304
+ @classmethod
305
+ def from_dict(cls, d: Dict[str, any]) -> AwsCredentials:
306
+ """Deserializes the AwsCredentials from a dictionary."""
307
+ return cls(access_key_id=d.get('access_key_id', None),
308
+ access_point=d.get('access_point', None),
309
+ secret_access_key=d.get('secret_access_key', None),
310
+ session_token=d.get('session_token', None))
311
+
312
+
277
313
  @dataclass
278
314
  class AwsIamRoleRequest:
279
315
  role_arn: str
@@ -405,6 +441,26 @@ class AzureServicePrincipal:
405
441
  directory_id=d.get('directory_id', None))
406
442
 
407
443
 
444
+ @dataclass
445
+ class AzureUserDelegationSas:
446
+ """Azure temporary credentials for API authentication. Read more at
447
+ https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas"""
448
+
449
+ sas_token: Optional[str] = None
450
+ """The signed URI (SAS Token) used to access blob services for a given path"""
451
+
452
+ def as_dict(self) -> dict:
453
+ """Serializes the AzureUserDelegationSas into a dictionary suitable for use as a JSON request body."""
454
+ body = {}
455
+ if self.sas_token is not None: body['sas_token'] = self.sas_token
456
+ return body
457
+
458
+ @classmethod
459
+ def from_dict(cls, d: Dict[str, any]) -> AzureUserDelegationSas:
460
+ """Deserializes the AzureUserDelegationSas from a dictionary."""
461
+ return cls(sas_token=d.get('sas_token', None))
462
+
463
+
408
464
  @dataclass
409
465
  class CancelRefreshResponse:
410
466
 
@@ -1086,9 +1142,6 @@ class CreateFunction:
1086
1142
  full_data_type: str
1087
1143
  """Pretty printed function data type."""
1088
1144
 
1089
- return_params: FunctionParameterInfos
1090
- """Table function return parameters."""
1091
-
1092
1145
  routine_body: CreateFunctionRoutineBody
1093
1146
  """Function language. When **EXTERNAL** is used, the language of the routine function should be
1094
1147
  specified in the __external_language__ field, and the __return_params__ of the function cannot
@@ -1098,9 +1151,6 @@ class CreateFunction:
1098
1151
  routine_definition: str
1099
1152
  """Function body."""
1100
1153
 
1101
- routine_dependencies: DependencyList
1102
- """Function dependencies."""
1103
-
1104
1154
  parameter_style: CreateFunctionParameterStyle
1105
1155
  """Function parameter style. **S** is the value for SQL."""
1106
1156
 
@@ -1131,6 +1181,12 @@ class CreateFunction:
1131
1181
  properties: Optional[str] = None
1132
1182
  """JSON-serialized key-value pair map, encoded (escaped) as a string."""
1133
1183
 
1184
+ return_params: Optional[FunctionParameterInfos] = None
1185
+ """Table function return parameters."""
1186
+
1187
+ routine_dependencies: Optional[DependencyList] = None
1188
+ """Function dependencies."""
1189
+
1134
1190
  sql_path: Optional[str] = None
1135
1191
  """List of schemes whose objects can be referenced without qualification."""
1136
1192
 
@@ -2438,6 +2494,97 @@ class FunctionParameterType(Enum):
2438
2494
  PARAM = 'PARAM'
2439
2495
 
2440
2496
 
2497
+ @dataclass
2498
+ class GcpOauthToken:
2499
+ """GCP temporary credentials for API authentication. Read more at
2500
+ https://developers.google.com/identity/protocols/oauth2/service-account"""
2501
+
2502
+ oauth_token: Optional[str] = None
2503
+
2504
+ def as_dict(self) -> dict:
2505
+ """Serializes the GcpOauthToken into a dictionary suitable for use as a JSON request body."""
2506
+ body = {}
2507
+ if self.oauth_token is not None: body['oauth_token'] = self.oauth_token
2508
+ return body
2509
+
2510
+ @classmethod
2511
+ def from_dict(cls, d: Dict[str, any]) -> GcpOauthToken:
2512
+ """Deserializes the GcpOauthToken from a dictionary."""
2513
+ return cls(oauth_token=d.get('oauth_token', None))
2514
+
2515
+
2516
+ @dataclass
2517
+ class GenerateTemporaryTableCredentialRequest:
2518
+ operation: Optional[TableOperation] = None
2519
+ """The operation performed against the table data, either READ or READ_WRITE. If READ_WRITE is
2520
+ specified, the credentials returned will have write permissions, otherwise, it will be read
2521
+ only."""
2522
+
2523
+ table_id: Optional[str] = None
2524
+ """UUID of the table to read or write."""
2525
+
2526
+ def as_dict(self) -> dict:
2527
+ """Serializes the GenerateTemporaryTableCredentialRequest into a dictionary suitable for use as a JSON request body."""
2528
+ body = {}
2529
+ if self.operation is not None: body['operation'] = self.operation.value
2530
+ if self.table_id is not None: body['table_id'] = self.table_id
2531
+ return body
2532
+
2533
+ @classmethod
2534
+ def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryTableCredentialRequest:
2535
+ """Deserializes the GenerateTemporaryTableCredentialRequest from a dictionary."""
2536
+ return cls(operation=_enum(d, 'operation', TableOperation), table_id=d.get('table_id', None))
2537
+
2538
+
2539
+ @dataclass
2540
+ class GenerateTemporaryTableCredentialResponse:
2541
+ aws_temp_credentials: Optional[AwsCredentials] = None
2542
+ """AWS temporary credentials for API authentication. Read more at
2543
+ https://docs.aws.amazon.com/STS/latest/APIReference/API_Credentials.html."""
2544
+
2545
+ azure_user_delegation_sas: Optional[AzureUserDelegationSas] = None
2546
+ """Azure temporary credentials for API authentication. Read more at
2547
+ https://docs.microsoft.com/en-us/rest/api/storageservices/create-user-delegation-sas"""
2548
+
2549
+ expiration_time: Optional[int] = None
2550
+ """Server time when the credential will expire, in unix epoch milliseconds since January 1, 1970 at
2551
+ 00:00:00 UTC. The API client is advised to cache the credential given this expiration time."""
2552
+
2553
+ gcp_oauth_token: Optional[GcpOauthToken] = None
2554
+ """GCP temporary credentials for API authentication. Read more at
2555
+ https://developers.google.com/identity/protocols/oauth2/service-account"""
2556
+
2557
+ r2_temp_credentials: Optional[R2Credentials] = None
2558
+ """R2 temporary credentials for API authentication. Read more at
2559
+ https://developers.cloudflare.com/r2/api/s3/tokens/."""
2560
+
2561
+ url: Optional[str] = None
2562
+ """The URL of the storage path accessible by the temporary credential."""
2563
+
2564
+ def as_dict(self) -> dict:
2565
+ """Serializes the GenerateTemporaryTableCredentialResponse into a dictionary suitable for use as a JSON request body."""
2566
+ body = {}
2567
+ if self.aws_temp_credentials: body['aws_temp_credentials'] = self.aws_temp_credentials.as_dict()
2568
+ if self.azure_user_delegation_sas:
2569
+ body['azure_user_delegation_sas'] = self.azure_user_delegation_sas.as_dict()
2570
+ if self.expiration_time is not None: body['expiration_time'] = self.expiration_time
2571
+ if self.gcp_oauth_token: body['gcp_oauth_token'] = self.gcp_oauth_token.as_dict()
2572
+ if self.r2_temp_credentials: body['r2_temp_credentials'] = self.r2_temp_credentials.as_dict()
2573
+ if self.url is not None: body['url'] = self.url
2574
+ return body
2575
+
2576
+ @classmethod
2577
+ def from_dict(cls, d: Dict[str, any]) -> GenerateTemporaryTableCredentialResponse:
2578
+ """Deserializes the GenerateTemporaryTableCredentialResponse from a dictionary."""
2579
+ return cls(aws_temp_credentials=_from_dict(d, 'aws_temp_credentials', AwsCredentials),
2580
+ azure_user_delegation_sas=_from_dict(d, 'azure_user_delegation_sas',
2581
+ AzureUserDelegationSas),
2582
+ expiration_time=d.get('expiration_time', None),
2583
+ gcp_oauth_token=_from_dict(d, 'gcp_oauth_token', GcpOauthToken),
2584
+ r2_temp_credentials=_from_dict(d, 'r2_temp_credentials', R2Credentials),
2585
+ url=d.get('url', None))
2586
+
2587
+
2441
2588
  class GetBindingsSecurableType(Enum):
2442
2589
 
2443
2590
  CATALOG = 'catalog'
@@ -2469,6 +2616,9 @@ class GetMetastoreSummaryResponse:
2469
2616
  delta_sharing_scope: Optional[GetMetastoreSummaryResponseDeltaSharingScope] = None
2470
2617
  """The scope of Delta Sharing enabled for the metastore."""
2471
2618
 
2619
+ external_access_enabled: Optional[bool] = None
2620
+ """Whether to allow non-DBR clients to directly access entities under the metastore."""
2621
+
2472
2622
  global_metastore_id: Optional[str] = None
2473
2623
  """Globally unique metastore ID across clouds and regions, of the form `cloud:region:metastore_id`."""
2474
2624
 
@@ -2516,6 +2666,8 @@ class GetMetastoreSummaryResponse:
2516
2666
  body[
2517
2667
  'delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds
2518
2668
  if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope.value
2669
+ if self.external_access_enabled is not None:
2670
+ body['external_access_enabled'] = self.external_access_enabled
2519
2671
  if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id
2520
2672
  if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
2521
2673
  if self.name is not None: body['name'] = self.name
@@ -2544,6 +2696,7 @@ class GetMetastoreSummaryResponse:
2544
2696
  'delta_sharing_recipient_token_lifetime_in_seconds', None),
2545
2697
  delta_sharing_scope=_enum(d, 'delta_sharing_scope',
2546
2698
  GetMetastoreSummaryResponseDeltaSharingScope),
2699
+ external_access_enabled=d.get('external_access_enabled', None),
2547
2700
  global_metastore_id=d.get('global_metastore_id', None),
2548
2701
  metastore_id=d.get('metastore_id', None),
2549
2702
  name=d.get('name', None),
@@ -2996,6 +3149,9 @@ class MetastoreInfo:
2996
3149
  delta_sharing_scope: Optional[MetastoreInfoDeltaSharingScope] = None
2997
3150
  """The scope of Delta Sharing enabled for the metastore."""
2998
3151
 
3152
+ external_access_enabled: Optional[bool] = None
3153
+ """Whether to allow non-DBR clients to directly access entities under the metastore."""
3154
+
2999
3155
  global_metastore_id: Optional[str] = None
3000
3156
  """Globally unique metastore ID across clouds and regions, of the form `cloud:region:metastore_id`."""
3001
3157
 
@@ -3043,6 +3199,8 @@ class MetastoreInfo:
3043
3199
  body[
3044
3200
  'delta_sharing_recipient_token_lifetime_in_seconds'] = self.delta_sharing_recipient_token_lifetime_in_seconds
3045
3201
  if self.delta_sharing_scope is not None: body['delta_sharing_scope'] = self.delta_sharing_scope.value
3202
+ if self.external_access_enabled is not None:
3203
+ body['external_access_enabled'] = self.external_access_enabled
3046
3204
  if self.global_metastore_id is not None: body['global_metastore_id'] = self.global_metastore_id
3047
3205
  if self.metastore_id is not None: body['metastore_id'] = self.metastore_id
3048
3206
  if self.name is not None: body['name'] = self.name
@@ -3070,6 +3228,7 @@ class MetastoreInfo:
3070
3228
  delta_sharing_recipient_token_lifetime_in_seconds=d.get(
3071
3229
  'delta_sharing_recipient_token_lifetime_in_seconds', None),
3072
3230
  delta_sharing_scope=_enum(d, 'delta_sharing_scope', MetastoreInfoDeltaSharingScope),
3231
+ external_access_enabled=d.get('external_access_enabled', None),
3073
3232
  global_metastore_id=d.get('global_metastore_id', None),
3074
3233
  metastore_id=d.get('metastore_id', None),
3075
3234
  name=d.get('name', None),
@@ -4151,6 +4310,36 @@ class QuotaInfo:
4151
4310
  quota_name=d.get('quota_name', None))
4152
4311
 
4153
4312
 
4313
+ @dataclass
4314
+ class R2Credentials:
4315
+ """R2 temporary credentials for API authentication. Read more at
4316
+ https://developers.cloudflare.com/r2/api/s3/tokens/."""
4317
+
4318
+ access_key_id: Optional[str] = None
4319
+ """The access key ID that identifies the temporary credentials."""
4320
+
4321
+ secret_access_key: Optional[str] = None
4322
+ """The secret access key associated with the access key."""
4323
+
4324
+ session_token: Optional[str] = None
4325
+ """The generated JWT that users must pass to use the temporary credentials."""
4326
+
4327
+ def as_dict(self) -> dict:
4328
+ """Serializes the R2Credentials into a dictionary suitable for use as a JSON request body."""
4329
+ body = {}
4330
+ if self.access_key_id is not None: body['access_key_id'] = self.access_key_id
4331
+ if self.secret_access_key is not None: body['secret_access_key'] = self.secret_access_key
4332
+ if self.session_token is not None: body['session_token'] = self.session_token
4333
+ return body
4334
+
4335
+ @classmethod
4336
+ def from_dict(cls, d: Dict[str, any]) -> R2Credentials:
4337
+ """Deserializes the R2Credentials from a dictionary."""
4338
+ return cls(access_key_id=d.get('access_key_id', None),
4339
+ secret_access_key=d.get('secret_access_key', None),
4340
+ session_token=d.get('session_token', None))
4341
+
4342
+
4154
4343
  @dataclass
4155
4344
  class RegenerateDashboardRequest:
4156
4345
  table_name: Optional[str] = None
@@ -4896,6 +5085,12 @@ class TableInfo:
4896
5085
  view_dependencies=_from_dict(d, 'view_dependencies', DependencyList))
4897
5086
 
4898
5087
 
5088
+ class TableOperation(Enum):
5089
+
5090
+ READ = 'READ'
5091
+ READ_WRITE = 'READ_WRITE'
5092
+
5093
+
4899
5094
  @dataclass
4900
5095
  class TableRowFilter:
4901
5096
  function_name: str
@@ -9135,7 +9330,8 @@ class TablesAPI:
9135
9330
  full_name: str,
9136
9331
  *,
9137
9332
  include_browse: Optional[bool] = None,
9138
- include_delta_metadata: Optional[bool] = None) -> TableInfo:
9333
+ include_delta_metadata: Optional[bool] = None,
9334
+ include_manifest_capabilities: Optional[bool] = None) -> TableInfo:
9139
9335
  """Get a table.
9140
9336
 
9141
9337
  Gets a table from the metastore for a specific catalog and schema. The caller must satisfy one of the
@@ -9151,6 +9347,8 @@ class TablesAPI:
9151
9347
  for
9152
9348
  :param include_delta_metadata: bool (optional)
9153
9349
  Whether delta metadata should be included in the response.
9350
+ :param include_manifest_capabilities: bool (optional)
9351
+ Whether to include a manifest containing capabilities the table has.
9154
9352
 
9155
9353
  :returns: :class:`TableInfo`
9156
9354
  """
@@ -9158,6 +9356,8 @@ class TablesAPI:
9158
9356
  query = {}
9159
9357
  if include_browse is not None: query['include_browse'] = include_browse
9160
9358
  if include_delta_metadata is not None: query['include_delta_metadata'] = include_delta_metadata
9359
+ if include_manifest_capabilities is not None:
9360
+ query['include_manifest_capabilities'] = include_manifest_capabilities
9161
9361
  headers = {'Accept': 'application/json', }
9162
9362
 
9163
9363
  res = self._api.do('GET', f'/api/2.1/unity-catalog/tables/{full_name}', query=query, headers=headers)
@@ -9169,6 +9369,7 @@ class TablesAPI:
9169
9369
  *,
9170
9370
  include_browse: Optional[bool] = None,
9171
9371
  include_delta_metadata: Optional[bool] = None,
9372
+ include_manifest_capabilities: Optional[bool] = None,
9172
9373
  max_results: Optional[int] = None,
9173
9374
  omit_columns: Optional[bool] = None,
9174
9375
  omit_properties: Optional[bool] = None,
@@ -9190,6 +9391,8 @@ class TablesAPI:
9190
9391
  for
9191
9392
  :param include_delta_metadata: bool (optional)
9192
9393
  Whether delta metadata should be included in the response.
9394
+ :param include_manifest_capabilities: bool (optional)
9395
+ Whether to include a manifest containing capabilities the table has.
9193
9396
  :param max_results: int (optional)
9194
9397
  Maximum number of tables to return. If not set, all the tables are returned (not recommended). -
9195
9398
  when set to a value greater than 0, the page length is the minimum of this value and a server
@@ -9209,6 +9412,8 @@ class TablesAPI:
9209
9412
  if catalog_name is not None: query['catalog_name'] = catalog_name
9210
9413
  if include_browse is not None: query['include_browse'] = include_browse
9211
9414
  if include_delta_metadata is not None: query['include_delta_metadata'] = include_delta_metadata
9415
+ if include_manifest_capabilities is not None:
9416
+ query['include_manifest_capabilities'] = include_manifest_capabilities
9212
9417
  if max_results is not None: query['max_results'] = max_results
9213
9418
  if omit_columns is not None: query['omit_columns'] = omit_columns
9214
9419
  if omit_properties is not None: query['omit_properties'] = omit_properties
@@ -9228,6 +9433,7 @@ class TablesAPI:
9228
9433
  def list_summaries(self,
9229
9434
  catalog_name: str,
9230
9435
  *,
9436
+ include_manifest_capabilities: Optional[bool] = None,
9231
9437
  max_results: Optional[int] = None,
9232
9438
  page_token: Optional[str] = None,
9233
9439
  schema_name_pattern: Optional[str] = None,
@@ -9247,6 +9453,8 @@ class TablesAPI:
9247
9453
 
9248
9454
  :param catalog_name: str
9249
9455
  Name of parent catalog for tables of interest.
9456
+ :param include_manifest_capabilities: bool (optional)
9457
+ Whether to include a manifest containing capabilities the table has.
9250
9458
  :param max_results: int (optional)
9251
9459
  Maximum number of summaries for tables to return. If not set, the page length is set to a server
9252
9460
  configured value (10000, as of 1/5/2024). - when set to a value greater than 0, the page length is
@@ -9265,6 +9473,8 @@ class TablesAPI:
9265
9473
 
9266
9474
  query = {}
9267
9475
  if catalog_name is not None: query['catalog_name'] = catalog_name
9476
+ if include_manifest_capabilities is not None:
9477
+ query['include_manifest_capabilities'] = include_manifest_capabilities
9268
9478
  if max_results is not None: query['max_results'] = max_results
9269
9479
  if page_token is not None: query['page_token'] = page_token
9270
9480
  if schema_name_pattern is not None: query['schema_name_pattern'] = schema_name_pattern
@@ -9301,6 +9511,55 @@ class TablesAPI:
9301
9511
  self._api.do('PATCH', f'/api/2.1/unity-catalog/tables/{full_name}', body=body, headers=headers)
9302
9512
 
9303
9513
 
9514
+ class TemporaryTableCredentialsAPI:
9515
+ """Temporary Table Credentials refer to short-lived, downscoped credentials used to access cloud storage
9516
+ locationswhere table data is stored in Databricks. These credentials are employed to provide secure and
9517
+ time-limitedaccess to data in cloud environments such as AWS, Azure, and Google Cloud. Each cloud provider
9518
+ has its own typeof credentials: AWS uses temporary session tokens via AWS Security Token Service (STS),
9519
+ Azure utilizesShared Access Signatures (SAS) for its data storage services, and Google Cloud supports
9520
+ temporary credentialsthrough OAuth 2.0.Temporary table credentials ensure that data access is limited in
9521
+ scope and duration, reducing the risk ofunauthorized access or misuse. To use the temporary table
9522
+ credentials API, a metastore admin needs to enable the external_access_enabled flag (off by default) at
9523
+ the metastore level, and user needs to be granted the EXTERNAL USE SCHEMA permission at the schema level
9524
+ by catalog admin. Note that EXTERNAL USE SCHEMA is a schema level permission that can only be granted by
9525
+ catalog admin explicitly and is not included in schema ownership or ALL PRIVILEGES on the schema for
9526
+ security reason."""
9527
+
9528
+ def __init__(self, api_client):
9529
+ self._api = api_client
9530
+
9531
+ def generate_temporary_table_credentials(
9532
+ self,
9533
+ *,
9534
+ operation: Optional[TableOperation] = None,
9535
+ table_id: Optional[str] = None) -> GenerateTemporaryTableCredentialResponse:
9536
+ """Generate a temporary table credential.
9537
+
9538
+ Get a short-lived credential for directly accessing the table data on cloud storage. The metastore
9539
+ must have external_access_enabled flag set to true (default false). The caller must have
9540
+ EXTERNAL_USE_SCHEMA privilege on the parent schema and this privilege can only be granted by catalog
9541
+ owners.
9542
+
9543
+ :param operation: :class:`TableOperation` (optional)
9544
+ The operation performed against the table data, either READ or READ_WRITE. If READ_WRITE is
9545
+ specified, the credentials returned will have write permissions, otherwise, it will be read only.
9546
+ :param table_id: str (optional)
9547
+ UUID of the table to read or write.
9548
+
9549
+ :returns: :class:`GenerateTemporaryTableCredentialResponse`
9550
+ """
9551
+ body = {}
9552
+ if operation is not None: body['operation'] = operation.value
9553
+ if table_id is not None: body['table_id'] = table_id
9554
+ headers = {'Accept': 'application/json', 'Content-Type': 'application/json', }
9555
+
9556
+ res = self._api.do('POST',
9557
+ '/api/2.0/unity-catalog/temporary-table-credentials',
9558
+ body=body,
9559
+ headers=headers)
9560
+ return GenerateTemporaryTableCredentialResponse.from_dict(res)
9561
+
9562
+
9304
9563
  class VolumesAPI:
9305
9564
  """Volumes are a Unity Catalog (UC) capability for accessing, storing, governing, organizing and processing
9306
9565
  files. Use cases include running machine learning on unstructured data such as image, audio, video, or PDF
@@ -598,8 +598,13 @@ class ClusterAttributes:
598
598
  """The ID of the cluster policy used to create the cluster if applicable."""
599
599
 
600
600
  runtime_engine: Optional[RuntimeEngine] = None
601
- """Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime
602
- engine is inferred from spark_version."""
601
+ """Determines the cluster's runtime engine, either standard or Photon.
602
+
603
+ This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
604
+ `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
605
+
606
+ If left unspecified, the runtime engine defaults to standard unless the spark_version contains
607
+ -photon-, in which case Photon will be used."""
603
608
 
604
609
  single_user_name: Optional[str] = None
605
610
  """Single user name if data_security_mode is `SINGLE_USER`"""
@@ -882,8 +887,13 @@ class ClusterDetails:
882
887
  """The ID of the cluster policy used to create the cluster if applicable."""
883
888
 
884
889
  runtime_engine: Optional[RuntimeEngine] = None
885
- """Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime
886
- engine is inferred from spark_version."""
890
+ """Determines the cluster's runtime engine, either standard or Photon.
891
+
892
+ This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
893
+ `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
894
+
895
+ If left unspecified, the runtime engine defaults to standard unless the spark_version contains
896
+ -photon-, in which case Photon will be used."""
887
897
 
888
898
  single_user_name: Optional[str] = None
889
899
  """Single user name if data_security_mode is `SINGLE_USER`"""
@@ -1596,8 +1606,13 @@ class ClusterSpec:
1596
1606
  """The ID of the cluster policy used to create the cluster if applicable."""
1597
1607
 
1598
1608
  runtime_engine: Optional[RuntimeEngine] = None
1599
- """Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime
1600
- engine is inferred from spark_version."""
1609
+ """Determines the cluster's runtime engine, either standard or Photon.
1610
+
1611
+ This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
1612
+ `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
1613
+
1614
+ If left unspecified, the runtime engine defaults to standard unless the spark_version contains
1615
+ -photon-, in which case Photon will be used."""
1601
1616
 
1602
1617
  single_user_name: Optional[str] = None
1603
1618
  """Single user name if data_security_mode is `SINGLE_USER`"""
@@ -1912,8 +1927,13 @@ class CreateCluster:
1912
1927
  """The ID of the cluster policy used to create the cluster if applicable."""
1913
1928
 
1914
1929
  runtime_engine: Optional[RuntimeEngine] = None
1915
- """Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime
1916
- engine is inferred from spark_version."""
1930
+ """Determines the cluster's runtime engine, either standard or Photon.
1931
+
1932
+ This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
1933
+ `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
1934
+
1935
+ If left unspecified, the runtime engine defaults to standard unless the spark_version contains
1936
+ -photon-, in which case Photon will be used."""
1917
1937
 
1918
1938
  single_user_name: Optional[str] = None
1919
1939
  """Single user name if data_security_mode is `SINGLE_USER`"""
@@ -2759,8 +2779,13 @@ class EditCluster:
2759
2779
  """The ID of the cluster policy used to create the cluster if applicable."""
2760
2780
 
2761
2781
  runtime_engine: Optional[RuntimeEngine] = None
2762
- """Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime
2763
- engine is inferred from spark_version."""
2782
+ """Determines the cluster's runtime engine, either standard or Photon.
2783
+
2784
+ This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
2785
+ `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
2786
+
2787
+ If left unspecified, the runtime engine defaults to standard unless the spark_version contains
2788
+ -photon-, in which case Photon will be used."""
2764
2789
 
2765
2790
  single_user_name: Optional[str] = None
2766
2791
  """Single user name if data_security_mode is `SINGLE_USER`"""
@@ -5647,8 +5672,13 @@ class Results:
5647
5672
 
5648
5673
 
5649
5674
  class RuntimeEngine(Enum):
5650
- """Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime
5651
- engine is inferred from spark_version."""
5675
+ """Determines the cluster's runtime engine, either standard or Photon.
5676
+
5677
+ This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
5678
+ `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
5679
+
5680
+ If left unspecified, the runtime engine defaults to standard unless the spark_version contains
5681
+ -photon-, in which case Photon will be used."""
5652
5682
 
5653
5683
  NULL = 'NULL'
5654
5684
  PHOTON = 'PHOTON'
@@ -6181,8 +6211,13 @@ class UpdateClusterResource:
6181
6211
  """The ID of the cluster policy used to create the cluster if applicable."""
6182
6212
 
6183
6213
  runtime_engine: Optional[RuntimeEngine] = None
6184
- """Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime
6185
- engine is inferred from spark_version."""
6214
+ """Determines the cluster's runtime engine, either standard or Photon.
6215
+
6216
+ This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
6217
+ `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
6218
+
6219
+ If left unspecified, the runtime engine defaults to standard unless the spark_version contains
6220
+ -photon-, in which case Photon will be used."""
6186
6221
 
6187
6222
  single_user_name: Optional[str] = None
6188
6223
  """Single user name if data_security_mode is `SINGLE_USER`"""
@@ -6805,6 +6840,11 @@ class ClustersAPI:
6805
6840
  If Databricks acquires at least 85% of the requested on-demand nodes, cluster creation will succeed.
6806
6841
  Otherwise the cluster will terminate with an informative error message.
6807
6842
 
6843
+ Rather than authoring the cluster's JSON definition from scratch, Databricks recommends filling out
6844
+ the [create compute UI] and then copying the generated JSON definition from the UI.
6845
+
6846
+ [create compute UI]: https://docs.databricks.com/compute/configure.html
6847
+
6808
6848
  :param spark_version: str
6809
6849
  The Spark version of the cluster, e.g. `3.3.x-scala2.11`. A list of available Spark versions can be
6810
6850
  retrieved by using the :method:clusters/sparkVersions API call.
@@ -6900,8 +6940,13 @@ class ClustersAPI:
6900
6940
  :param policy_id: str (optional)
6901
6941
  The ID of the cluster policy used to create the cluster if applicable.
6902
6942
  :param runtime_engine: :class:`RuntimeEngine` (optional)
6903
- Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime engine
6904
- is inferred from spark_version.
6943
+ Determines the cluster's runtime engine, either standard or Photon.
6944
+
6945
+ This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
6946
+ `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
6947
+
6948
+ If left unspecified, the runtime engine defaults to standard unless the spark_version contains
6949
+ -photon-, in which case Photon will be used.
6905
6950
  :param single_user_name: str (optional)
6906
6951
  Single user name if data_security_mode is `SINGLE_USER`
6907
6952
  :param spark_conf: Dict[str,str] (optional)
@@ -7194,8 +7239,13 @@ class ClustersAPI:
7194
7239
  :param policy_id: str (optional)
7195
7240
  The ID of the cluster policy used to create the cluster if applicable.
7196
7241
  :param runtime_engine: :class:`RuntimeEngine` (optional)
7197
- Decides which runtime engine to be use, e.g. Standard vs. Photon. If unspecified, the runtime engine
7198
- is inferred from spark_version.
7242
+ Determines the cluster's runtime engine, either standard or Photon.
7243
+
7244
+ This field is not compatible with legacy `spark_version` values that contain `-photon-`. Remove
7245
+ `-photon-` from the `spark_version` and set `runtime_engine` to `PHOTON`.
7246
+
7247
+ If left unspecified, the runtime engine defaults to standard unless the spark_version contains
7248
+ -photon-, in which case Photon will be used.
7199
7249
  :param single_user_name: str (optional)
7200
7250
  Single user name if data_security_mode is `SINGLE_USER`
7201
7251
  :param spark_conf: Dict[str,str] (optional)