pulumi-databricks 1.77.0a1760375482__py3-none-any.whl → 1.79.0a1762839813__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (176) hide show
  1. pulumi_databricks/__init__.py +54 -46
  2. pulumi_databricks/_inputs.py +26079 -32918
  3. pulumi_databricks/account_federation_policy.py +44 -16
  4. pulumi_databricks/account_network_policy.py +56 -2
  5. pulumi_databricks/account_setting_v2.py +46 -83
  6. pulumi_databricks/alert_v2.py +168 -134
  7. pulumi_databricks/app.py +153 -0
  8. pulumi_databricks/apps_settings_custom_template.py +75 -52
  9. pulumi_databricks/budget_policy.py +6 -2
  10. pulumi_databricks/catalog.py +65 -23
  11. pulumi_databricks/cluster.py +47 -168
  12. pulumi_databricks/cluster_policy.py +120 -0
  13. pulumi_databricks/config/__init__.pyi +4 -0
  14. pulumi_databricks/config/vars.py +8 -0
  15. pulumi_databricks/data_quality_monitor.py +453 -0
  16. pulumi_databricks/data_quality_refresh.py +492 -0
  17. pulumi_databricks/database_database_catalog.py +9 -52
  18. pulumi_databricks/database_instance.py +271 -120
  19. pulumi_databricks/database_synced_database_table.py +239 -52
  20. pulumi_databricks/entitlements.py +21 -21
  21. pulumi_databricks/entity_tag_assignment.py +83 -52
  22. pulumi_databricks/external_location.py +131 -7
  23. pulumi_databricks/external_metadata.py +16 -59
  24. pulumi_databricks/feature_engineering_feature.py +480 -0
  25. pulumi_databricks/feature_engineering_materialized_feature.py +397 -0
  26. pulumi_databricks/file.py +2 -2
  27. pulumi_databricks/get_account_federation_policies.py +22 -3
  28. pulumi_databricks/get_account_federation_policy.py +12 -27
  29. pulumi_databricks/get_account_network_policies.py +4 -0
  30. pulumi_databricks/get_account_network_policy.py +9 -18
  31. pulumi_databricks/get_account_setting_v2.py +36 -141
  32. pulumi_databricks/get_alert_v2.py +41 -107
  33. pulumi_databricks/get_alerts_v2.py +29 -31
  34. pulumi_databricks/get_app.py +21 -5
  35. pulumi_databricks/get_apps.py +22 -6
  36. pulumi_databricks/get_apps_settings_custom_template.py +11 -55
  37. pulumi_databricks/get_apps_settings_custom_templates.py +25 -21
  38. pulumi_databricks/get_aws_assume_role_policy.py +14 -14
  39. pulumi_databricks/get_aws_bucket_policy.py +10 -10
  40. pulumi_databricks/get_aws_unity_catalog_assume_role_policy.py +10 -10
  41. pulumi_databricks/get_aws_unity_catalog_policy.py +10 -10
  42. pulumi_databricks/get_budget_policies.py +70 -6
  43. pulumi_databricks/get_budget_policy.py +9 -32
  44. pulumi_databricks/get_catalog.py +21 -4
  45. pulumi_databricks/get_catalogs.py +23 -4
  46. pulumi_databricks/get_cluster.py +75 -4
  47. pulumi_databricks/get_cluster_policy.py +23 -4
  48. pulumi_databricks/get_clusters.py +21 -4
  49. pulumi_databricks/get_current_config.py +4 -4
  50. pulumi_databricks/get_current_metastore.py +21 -4
  51. pulumi_databricks/get_dashboards.py +20 -4
  52. pulumi_databricks/get_data_quality_monitor.py +210 -0
  53. pulumi_databricks/get_data_quality_monitors.py +143 -0
  54. pulumi_databricks/get_data_quality_refresh.py +270 -0
  55. pulumi_databricks/get_data_quality_refreshes.py +207 -0
  56. pulumi_databricks/get_database_database_catalog.py +10 -41
  57. pulumi_databricks/get_database_database_catalogs.py +37 -18
  58. pulumi_databricks/get_database_instance.py +93 -99
  59. pulumi_databricks/get_database_instances.py +20 -16
  60. pulumi_databricks/get_database_synced_database_table.py +11 -49
  61. pulumi_databricks/get_database_synced_database_tables.py +42 -23
  62. pulumi_databricks/get_directory.py +20 -1
  63. pulumi_databricks/get_entity_tag_assignment.py +63 -30
  64. pulumi_databricks/get_entity_tag_assignments.py +69 -23
  65. pulumi_databricks/get_external_location.py +21 -4
  66. pulumi_databricks/get_external_locations.py +23 -4
  67. pulumi_databricks/get_external_metadata.py +18 -73
  68. pulumi_databricks/get_external_metadatas.py +22 -16
  69. pulumi_databricks/get_feature_engineering_feature.py +179 -0
  70. pulumi_databricks/get_feature_engineering_features.py +103 -0
  71. pulumi_databricks/get_feature_engineering_materialized_feature.py +180 -0
  72. pulumi_databricks/get_feature_engineering_materialized_features.py +123 -0
  73. pulumi_databricks/get_functions.py +16 -1
  74. pulumi_databricks/get_instance_profiles.py +21 -4
  75. pulumi_databricks/get_jobs.py +23 -4
  76. pulumi_databricks/get_materialized_features_feature_tag.py +7 -32
  77. pulumi_databricks/get_materialized_features_feature_tags.py +50 -18
  78. pulumi_databricks/get_metastore.py +6 -6
  79. pulumi_databricks/get_mlflow_experiment.py +18 -1
  80. pulumi_databricks/get_mlflow_model.py +18 -1
  81. pulumi_databricks/get_mlflow_models.py +23 -4
  82. pulumi_databricks/get_node_type.py +42 -5
  83. pulumi_databricks/get_notebook.py +20 -1
  84. pulumi_databricks/get_notification_destinations.py +17 -1
  85. pulumi_databricks/get_online_store.py +11 -38
  86. pulumi_databricks/get_online_stores.py +20 -18
  87. pulumi_databricks/get_pipelines.py +23 -4
  88. pulumi_databricks/get_policy_info.py +21 -109
  89. pulumi_databricks/get_policy_infos.py +50 -25
  90. pulumi_databricks/get_quality_monitor_v2.py +8 -21
  91. pulumi_databricks/get_quality_monitors_v2.py +23 -25
  92. pulumi_databricks/get_registered_model.py +19 -4
  93. pulumi_databricks/get_registered_model_versions.py +19 -4
  94. pulumi_databricks/get_rfa_access_request_destinations.py +126 -0
  95. pulumi_databricks/get_schema.py +18 -1
  96. pulumi_databricks/get_schemas.py +23 -4
  97. pulumi_databricks/get_service_principal_federation_policies.py +23 -4
  98. pulumi_databricks/get_service_principal_federation_policy.py +15 -24
  99. pulumi_databricks/get_service_principals.py +64 -0
  100. pulumi_databricks/get_serving_endpoints.py +19 -4
  101. pulumi_databricks/get_share.py +117 -18
  102. pulumi_databricks/get_shares.py +22 -3
  103. pulumi_databricks/get_spark_version.py +20 -1
  104. pulumi_databricks/get_sql_warehouse.py +16 -1
  105. pulumi_databricks/get_sql_warehouses.py +20 -1
  106. pulumi_databricks/get_storage_credential.py +18 -1
  107. pulumi_databricks/get_storage_credentials.py +23 -4
  108. pulumi_databricks/get_table.py +18 -1
  109. pulumi_databricks/get_tables.py +20 -1
  110. pulumi_databricks/get_tag_policies.py +55 -23
  111. pulumi_databricks/get_tag_policy.py +60 -40
  112. pulumi_databricks/get_views.py +20 -1
  113. pulumi_databricks/get_volume.py +18 -1
  114. pulumi_databricks/get_volumes.py +20 -1
  115. pulumi_databricks/get_workspace_network_option.py +8 -16
  116. pulumi_databricks/get_workspace_setting_v2.py +41 -163
  117. pulumi_databricks/get_zones.py +20 -1
  118. pulumi_databricks/git_credential.py +54 -7
  119. pulumi_databricks/group.py +21 -21
  120. pulumi_databricks/instance_profile.py +0 -182
  121. pulumi_databricks/job.py +47 -0
  122. pulumi_databricks/library.py +165 -0
  123. pulumi_databricks/materialized_features_feature_tag.py +9 -52
  124. pulumi_databricks/metastore.py +81 -7
  125. pulumi_databricks/metastore_data_access.py +48 -0
  126. pulumi_databricks/mlflow_webhook.py +4 -4
  127. pulumi_databricks/mws_credentials.py +10 -10
  128. pulumi_databricks/mws_customer_managed_keys.py +0 -288
  129. pulumi_databricks/mws_log_delivery.py +146 -0
  130. pulumi_databricks/mws_storage_configurations.py +16 -16
  131. pulumi_databricks/mws_vpc_endpoint.py +56 -56
  132. pulumi_databricks/mws_workspaces.py +146 -65
  133. pulumi_databricks/notebook.py +49 -0
  134. pulumi_databricks/online_store.py +9 -52
  135. pulumi_databricks/outputs.py +11885 -13474
  136. pulumi_databricks/permission_assignment.py +299 -8
  137. pulumi_databricks/permissions.py +6 -6
  138. pulumi_databricks/pipeline.py +37 -3
  139. pulumi_databricks/policy_info.py +9 -52
  140. pulumi_databricks/provider.py +36 -1
  141. pulumi_databricks/pulumi-plugin.json +1 -1
  142. pulumi_databricks/quality_monitor.py +47 -0
  143. pulumi_databricks/quality_monitor_v2.py +9 -52
  144. pulumi_databricks/recipient.py +74 -0
  145. pulumi_databricks/registered_model.py +308 -36
  146. pulumi_databricks/rfa_access_request_destinations.py +286 -0
  147. pulumi_databricks/schema.py +7 -7
  148. pulumi_databricks/service_principal_federation_policy.py +48 -16
  149. pulumi_databricks/share.py +71 -84
  150. pulumi_databricks/sql_table.py +42 -14
  151. pulumi_databricks/storage_credential.py +15 -2
  152. pulumi_databricks/tag_policy.py +111 -54
  153. pulumi_databricks/user.py +21 -21
  154. pulumi_databricks/volume.py +7 -7
  155. pulumi_databricks/workspace_binding.py +0 -48
  156. pulumi_databricks/workspace_network_option.py +6 -2
  157. pulumi_databricks/workspace_setting_v2.py +49 -133
  158. {pulumi_databricks-1.77.0a1760375482.dist-info → pulumi_databricks-1.79.0a1762839813.dist-info}/METADATA +1 -1
  159. pulumi_databricks-1.79.0a1762839813.dist-info/RECORD +250 -0
  160. pulumi_databricks/clean_room_asset.py +0 -938
  161. pulumi_databricks/clean_room_auto_approval_rule.py +0 -473
  162. pulumi_databricks/clean_rooms_clean_room.py +0 -569
  163. pulumi_databricks/get_clean_room_asset.py +0 -416
  164. pulumi_databricks/get_clean_room_asset_revisions_clean_room_asset.py +0 -392
  165. pulumi_databricks/get_clean_room_asset_revisions_clean_room_assets.py +0 -121
  166. pulumi_databricks/get_clean_room_assets.py +0 -146
  167. pulumi_databricks/get_clean_room_auto_approval_rule.py +0 -217
  168. pulumi_databricks/get_clean_room_auto_approval_rules.py +0 -101
  169. pulumi_databricks/get_clean_rooms_clean_room.py +0 -289
  170. pulumi_databricks/get_clean_rooms_clean_rooms.py +0 -125
  171. pulumi_databricks/get_recipient_federation_policies.py +0 -101
  172. pulumi_databricks/get_recipient_federation_policy.py +0 -182
  173. pulumi_databricks/recipient_federation_policy.py +0 -393
  174. pulumi_databricks-1.77.0a1760375482.dist-info/RECORD +0 -250
  175. {pulumi_databricks-1.77.0a1760375482.dist-info → pulumi_databricks-1.79.0a1762839813.dist-info}/WHEEL +0 -0
  176. {pulumi_databricks-1.77.0a1760375482.dist-info → pulumi_databricks-1.79.0a1762839813.dist-info}/top_level.txt +0 -0
@@ -24,8 +24,7 @@ class DatabaseSyncedDatabaseTableArgs:
24
24
  database_instance_name: Optional[pulumi.Input[_builtins.str]] = None,
25
25
  logical_database_name: Optional[pulumi.Input[_builtins.str]] = None,
26
26
  name: Optional[pulumi.Input[_builtins.str]] = None,
27
- spec: Optional[pulumi.Input['DatabaseSyncedDatabaseTableSpecArgs']] = None,
28
- workspace_id: Optional[pulumi.Input[_builtins.str]] = None):
27
+ spec: Optional[pulumi.Input['DatabaseSyncedDatabaseTableSpecArgs']] = None):
29
28
  """
30
29
  The set of arguments for constructing a DatabaseSyncedDatabaseTable resource.
31
30
  :param pulumi.Input[_builtins.str] database_instance_name: Name of the target database instance. This is required when creating synced database tables in standard catalogs.
@@ -43,7 +42,6 @@ class DatabaseSyncedDatabaseTableArgs:
43
42
  In this scenario, specifying this field will allow targeting an arbitrary postgres database.
44
43
  Note that this has implications for the `create_database_objects_is_missing` field in `spec`
45
44
  :param pulumi.Input[_builtins.str] name: Full three-part (catalog, schema, table) name of the table
46
- :param pulumi.Input[_builtins.str] workspace_id: Workspace ID of the resource
47
45
  """
48
46
  if database_instance_name is not None:
49
47
  pulumi.set(__self__, "database_instance_name", database_instance_name)
@@ -53,8 +51,6 @@ class DatabaseSyncedDatabaseTableArgs:
53
51
  pulumi.set(__self__, "name", name)
54
52
  if spec is not None:
55
53
  pulumi.set(__self__, "spec", spec)
56
- if workspace_id is not None:
57
- pulumi.set(__self__, "workspace_id", workspace_id)
58
54
 
59
55
  @_builtins.property
60
56
  @pulumi.getter(name="databaseInstanceName")
@@ -113,18 +109,6 @@ class DatabaseSyncedDatabaseTableArgs:
113
109
  def spec(self, value: Optional[pulumi.Input['DatabaseSyncedDatabaseTableSpecArgs']]):
114
110
  pulumi.set(self, "spec", value)
115
111
 
116
- @_builtins.property
117
- @pulumi.getter(name="workspaceId")
118
- def workspace_id(self) -> Optional[pulumi.Input[_builtins.str]]:
119
- """
120
- Workspace ID of the resource
121
- """
122
- return pulumi.get(self, "workspace_id")
123
-
124
- @workspace_id.setter
125
- def workspace_id(self, value: Optional[pulumi.Input[_builtins.str]]):
126
- pulumi.set(self, "workspace_id", value)
127
-
128
112
 
129
113
  @pulumi.input_type
130
114
  class _DatabaseSyncedDatabaseTableState:
@@ -136,8 +120,7 @@ class _DatabaseSyncedDatabaseTableState:
136
120
  logical_database_name: Optional[pulumi.Input[_builtins.str]] = None,
137
121
  name: Optional[pulumi.Input[_builtins.str]] = None,
138
122
  spec: Optional[pulumi.Input['DatabaseSyncedDatabaseTableSpecArgs']] = None,
139
- unity_catalog_provisioning_state: Optional[pulumi.Input[_builtins.str]] = None,
140
- workspace_id: Optional[pulumi.Input[_builtins.str]] = None):
123
+ unity_catalog_provisioning_state: Optional[pulumi.Input[_builtins.str]] = None):
141
124
  """
142
125
  Input properties used for looking up and filtering DatabaseSyncedDatabaseTable resources.
143
126
  :param pulumi.Input['DatabaseSyncedDatabaseTableDataSynchronizationStatusArgs'] data_synchronization_status: (SyncedTableStatus) - Synced Table data synchronization status
@@ -162,7 +145,6 @@ class _DatabaseSyncedDatabaseTableState:
162
145
  :param pulumi.Input[_builtins.str] unity_catalog_provisioning_state: (string) - The provisioning state of the synced table entity in Unity Catalog. This is distinct from the
163
146
  state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline
164
147
  may be in "PROVISIONING" as it runs asynchronously). Possible values are: `ACTIVE`, `DEGRADED`, `DELETING`, `FAILED`, `PROVISIONING`, `UPDATING`
165
- :param pulumi.Input[_builtins.str] workspace_id: Workspace ID of the resource
166
148
  """
167
149
  if data_synchronization_status is not None:
168
150
  pulumi.set(__self__, "data_synchronization_status", data_synchronization_status)
@@ -180,8 +162,6 @@ class _DatabaseSyncedDatabaseTableState:
180
162
  pulumi.set(__self__, "spec", spec)
181
163
  if unity_catalog_provisioning_state is not None:
182
164
  pulumi.set(__self__, "unity_catalog_provisioning_state", unity_catalog_provisioning_state)
183
- if workspace_id is not None:
184
- pulumi.set(__self__, "workspace_id", workspace_id)
185
165
 
186
166
  @_builtins.property
187
167
  @pulumi.getter(name="dataSynchronizationStatus")
@@ -291,18 +271,6 @@ class _DatabaseSyncedDatabaseTableState:
291
271
  def unity_catalog_provisioning_state(self, value: Optional[pulumi.Input[_builtins.str]]):
292
272
  pulumi.set(self, "unity_catalog_provisioning_state", value)
293
273
 
294
- @_builtins.property
295
- @pulumi.getter(name="workspaceId")
296
- def workspace_id(self) -> Optional[pulumi.Input[_builtins.str]]:
297
- """
298
- Workspace ID of the resource
299
- """
300
- return pulumi.get(self, "workspace_id")
301
-
302
- @workspace_id.setter
303
- def workspace_id(self, value: Optional[pulumi.Input[_builtins.str]]):
304
- pulumi.set(self, "workspace_id", value)
305
-
306
274
 
307
275
  @pulumi.type_token("databricks:index/databaseSyncedDatabaseTable:DatabaseSyncedDatabaseTable")
308
276
  class DatabaseSyncedDatabaseTable(pulumi.CustomResource):
@@ -314,9 +282,10 @@ class DatabaseSyncedDatabaseTable(pulumi.CustomResource):
314
282
  logical_database_name: Optional[pulumi.Input[_builtins.str]] = None,
315
283
  name: Optional[pulumi.Input[_builtins.str]] = None,
316
284
  spec: Optional[pulumi.Input[Union['DatabaseSyncedDatabaseTableSpecArgs', 'DatabaseSyncedDatabaseTableSpecArgsDict']]] = None,
317
- workspace_id: Optional[pulumi.Input[_builtins.str]] = None,
318
285
  __props__=None):
319
286
  """
287
+ [![Private Preview](https://img.shields.io/badge/Release_Stage-Private_Preview-blueviolet)](https://docs.databricks.com/aws/en/release-notes/release-types)
288
+
320
289
  Lakebase Synced Database Tables are Postgres tables automatically synced from a source table inside Unity Catalog.
321
290
  They can be used to serve realtime queries without the operational overhead of managing ETL pipelines.
322
291
 
@@ -329,15 +298,130 @@ class DatabaseSyncedDatabaseTable(pulumi.CustomResource):
329
298
 
330
299
  This example creates a Synced Database Table inside a Database Catalog.
331
300
 
301
+ ```python
302
+ import pulumi
303
+ import pulumi_databricks as databricks
304
+
305
+ this = databricks.DatabaseSyncedDatabaseTable("this",
306
+ name="my_database_catalog.public.synced_table",
307
+ logical_database_name="databricks_postgres",
308
+ spec={
309
+ "scheduling_policy": "SNAPSHOT",
310
+ "source_table_full_name": "source_delta.tpch.customer",
311
+ "primary_key_columns": ["c_custkey"],
312
+ "create_database_objects_if_missing": True,
313
+ "new_pipeline_spec": {
314
+ "storage_catalog": "source_delta",
315
+ "storage_schema": "tpch",
316
+ },
317
+ })
318
+ ```
319
+
332
320
  ### Creating a Synced Database Table inside a Standard Catalog
333
321
 
334
322
  This example creates a Synced Database Table inside a Standard Catalog.
335
323
 
324
+ ```python
325
+ import pulumi
326
+ import pulumi_databricks as databricks
327
+
328
+ this = databricks.DatabaseSyncedDatabaseTable("this",
329
+ name="my_standard_catalog.public.synced_table",
330
+ logical_database_name="databricks_postgres",
331
+ database_instance_name="my-database-instance",
332
+ spec={
333
+ "scheduling_policy": "SNAPSHOT",
334
+ "source_table_full_name": "source_delta.tpch.customer",
335
+ "primary_key_columns": ["c_custkey"],
336
+ "create_database_objects_if_missing": True,
337
+ "new_pipeline_spec": {
338
+ "storage_catalog": "source_delta",
339
+ "storage_schema": "tpch",
340
+ },
341
+ })
342
+ ```
343
+
336
344
  ### Creating multiple Synced Database Tables and bin packing them into a single pipeline
337
345
 
338
346
  This example creates two Synced Database Tables. The first one specifies a new pipeline spec,
339
347
  which generates a new pipeline. The second one utilizes the pipeline ID of the first table.
340
348
 
349
+ ```python
350
+ import pulumi
351
+ import pulumi_databricks as databricks
352
+
353
+ instance = databricks.DatabaseInstance("instance",
354
+ name="my-database-instance",
355
+ capacity="CU_1")
356
+ synced_table1 = databricks.DatabaseSyncedDatabaseTable("synced_table_1",
357
+ name="my_standard_catalog.public.synced_table1",
358
+ logical_database_name="databricks_postgres",
359
+ database_instance_name=instance.name,
360
+ spec={
361
+ "scheduling_policy": "SNAPSHOT",
362
+ "source_table_full_name": "source_delta.tpch.customer",
363
+ "primary_key_columns": ["c_custkey"],
364
+ "create_database_objects_if_missing": True,
365
+ "new_pipeline_spec": {
366
+ "storage_catalog": "source_delta",
367
+ "storage_schema": "tpch",
368
+ },
369
+ })
370
+ synced_table2 = databricks.DatabaseSyncedDatabaseTable("synced_table_2",
371
+ name="my_standard_catalog.public.synced_table2",
372
+ logical_database_name="databricks_postgres",
373
+ database_instance_name=instance.name,
374
+ spec={
375
+ "scheduling_policy": "SNAPSHOT",
376
+ "source_table_full_name": "source_delta.tpch.customer",
377
+ "primary_key_columns": ["c_custkey"],
378
+ "create_database_objects_if_missing": True,
379
+ "existing_pipeline_id": synced_table1.data_synchronization_status.pipeline_id,
380
+ })
381
+ ```
382
+
383
+ ### Creating a Synced Database Table with a custom Jobs schedule
384
+
385
+ This example creates a Synced Database Table and customizes the pipeline schedule. It assumes you already have
386
+
387
+ - A database instance named `"my-database-instance"`
388
+ - A standard catalog named `"my_standard_catalog"`
389
+ - A schema in the standard catalog named `"default"`
390
+ - A source delta table named `"source_delta.schema.customer"` with the primary key `"c_custkey"`
391
+
392
+ ```python
393
+ import pulumi
394
+ import pulumi_databricks as databricks
395
+
396
+ synced_table = databricks.DatabaseSyncedDatabaseTable("synced_table",
397
+ name="my_standard_catalog.default.my_synced_table",
398
+ logical_database_name="terraform_test_db",
399
+ database_instance_name="my-database-instance",
400
+ spec={
401
+ "scheduling_policy": "SNAPSHOT",
402
+ "source_table_full_name": "source_delta.schema.customer",
403
+ "primary_key_columns": ["c_custkey"],
404
+ "create_database_objects_if_missing": True,
405
+ "new_pipeline_spec": {
406
+ "storage_catalog": "source_delta",
407
+ "storage_schema": "schema",
408
+ },
409
+ })
410
+ sync_pipeline_schedule_job = databricks.Job("sync_pipeline_schedule_job",
411
+ name="Synced Pipeline Refresh",
412
+ description="Job to schedule synced database table pipeline. ",
413
+ tasks=[{
414
+ "task_key": "synced-table-pipeline",
415
+ "pipeline_task": {
416
+ "pipeline_id": synced_table.data_synchronization_status.pipeline_id,
417
+ },
418
+ }],
419
+ schedule={
420
+ "quartz_cron_expression": "0 0 0 * * ?",
421
+ "timezone_id": "Europe/Helsinki",
422
+ })
423
+ ```
424
+
341
425
  ## Import
342
426
 
343
427
  As of Pulumi v1.5, resources can be imported through configuration.
@@ -355,7 +439,7 @@ class DatabaseSyncedDatabaseTable(pulumi.CustomResource):
355
439
  If you are using an older version of Pulumi, import the resource using the `pulumi import` command as follows:
356
440
 
357
441
  ```sh
358
- $ pulumi import databricks:index/databaseSyncedDatabaseTable:DatabaseSyncedDatabaseTable databricks_database_synced_database_table "name"
442
+ $ pulumi import databricks:index/databaseSyncedDatabaseTable:DatabaseSyncedDatabaseTable this "name"
359
443
  ```
360
444
 
361
445
  :param str resource_name: The name of the resource.
@@ -375,7 +459,6 @@ class DatabaseSyncedDatabaseTable(pulumi.CustomResource):
375
459
  In this scenario, specifying this field will allow targeting an arbitrary postgres database.
376
460
  Note that this has implications for the `create_database_objects_is_missing` field in `spec`
377
461
  :param pulumi.Input[_builtins.str] name: Full three-part (catalog, schema, table) name of the table
378
- :param pulumi.Input[_builtins.str] workspace_id: Workspace ID of the resource
379
462
  """
380
463
  ...
381
464
  @overload
@@ -384,6 +467,8 @@ class DatabaseSyncedDatabaseTable(pulumi.CustomResource):
384
467
  args: Optional[DatabaseSyncedDatabaseTableArgs] = None,
385
468
  opts: Optional[pulumi.ResourceOptions] = None):
386
469
  """
470
+ [![Private Preview](https://img.shields.io/badge/Release_Stage-Private_Preview-blueviolet)](https://docs.databricks.com/aws/en/release-notes/release-types)
471
+
387
472
  Lakebase Synced Database Tables are Postgres tables automatically synced from a source table inside Unity Catalog.
388
473
  They can be used to serve realtime queries without the operational overhead of managing ETL pipelines.
389
474
 
@@ -396,15 +481,130 @@ class DatabaseSyncedDatabaseTable(pulumi.CustomResource):
396
481
 
397
482
  This example creates a Synced Database Table inside a Database Catalog.
398
483
 
484
+ ```python
485
+ import pulumi
486
+ import pulumi_databricks as databricks
487
+
488
+ this = databricks.DatabaseSyncedDatabaseTable("this",
489
+ name="my_database_catalog.public.synced_table",
490
+ logical_database_name="databricks_postgres",
491
+ spec={
492
+ "scheduling_policy": "SNAPSHOT",
493
+ "source_table_full_name": "source_delta.tpch.customer",
494
+ "primary_key_columns": ["c_custkey"],
495
+ "create_database_objects_if_missing": True,
496
+ "new_pipeline_spec": {
497
+ "storage_catalog": "source_delta",
498
+ "storage_schema": "tpch",
499
+ },
500
+ })
501
+ ```
502
+
399
503
  ### Creating a Synced Database Table inside a Standard Catalog
400
504
 
401
505
  This example creates a Synced Database Table inside a Standard Catalog.
402
506
 
507
+ ```python
508
+ import pulumi
509
+ import pulumi_databricks as databricks
510
+
511
+ this = databricks.DatabaseSyncedDatabaseTable("this",
512
+ name="my_standard_catalog.public.synced_table",
513
+ logical_database_name="databricks_postgres",
514
+ database_instance_name="my-database-instance",
515
+ spec={
516
+ "scheduling_policy": "SNAPSHOT",
517
+ "source_table_full_name": "source_delta.tpch.customer",
518
+ "primary_key_columns": ["c_custkey"],
519
+ "create_database_objects_if_missing": True,
520
+ "new_pipeline_spec": {
521
+ "storage_catalog": "source_delta",
522
+ "storage_schema": "tpch",
523
+ },
524
+ })
525
+ ```
526
+
403
527
  ### Creating multiple Synced Database Tables and bin packing them into a single pipeline
404
528
 
405
529
  This example creates two Synced Database Tables. The first one specifies a new pipeline spec,
406
530
  which generates a new pipeline. The second one utilizes the pipeline ID of the first table.
407
531
 
532
+ ```python
533
+ import pulumi
534
+ import pulumi_databricks as databricks
535
+
536
+ instance = databricks.DatabaseInstance("instance",
537
+ name="my-database-instance",
538
+ capacity="CU_1")
539
+ synced_table1 = databricks.DatabaseSyncedDatabaseTable("synced_table_1",
540
+ name="my_standard_catalog.public.synced_table1",
541
+ logical_database_name="databricks_postgres",
542
+ database_instance_name=instance.name,
543
+ spec={
544
+ "scheduling_policy": "SNAPSHOT",
545
+ "source_table_full_name": "source_delta.tpch.customer",
546
+ "primary_key_columns": ["c_custkey"],
547
+ "create_database_objects_if_missing": True,
548
+ "new_pipeline_spec": {
549
+ "storage_catalog": "source_delta",
550
+ "storage_schema": "tpch",
551
+ },
552
+ })
553
+ synced_table2 = databricks.DatabaseSyncedDatabaseTable("synced_table_2",
554
+ name="my_standard_catalog.public.synced_table2",
555
+ logical_database_name="databricks_postgres",
556
+ database_instance_name=instance.name,
557
+ spec={
558
+ "scheduling_policy": "SNAPSHOT",
559
+ "source_table_full_name": "source_delta.tpch.customer",
560
+ "primary_key_columns": ["c_custkey"],
561
+ "create_database_objects_if_missing": True,
562
+ "existing_pipeline_id": synced_table1.data_synchronization_status.pipeline_id,
563
+ })
564
+ ```
565
+
566
+ ### Creating a Synced Database Table with a custom Jobs schedule
567
+
568
+ This example creates a Synced Database Table and customizes the pipeline schedule. It assumes you already have
569
+
570
+ - A database instance named `"my-database-instance"`
571
+ - A standard catalog named `"my_standard_catalog"`
572
+ - A schema in the standard catalog named `"default"`
573
+ - A source delta table named `"source_delta.schema.customer"` with the primary key `"c_custkey"`
574
+
575
+ ```python
576
+ import pulumi
577
+ import pulumi_databricks as databricks
578
+
579
+ synced_table = databricks.DatabaseSyncedDatabaseTable("synced_table",
580
+ name="my_standard_catalog.default.my_synced_table",
581
+ logical_database_name="terraform_test_db",
582
+ database_instance_name="my-database-instance",
583
+ spec={
584
+ "scheduling_policy": "SNAPSHOT",
585
+ "source_table_full_name": "source_delta.schema.customer",
586
+ "primary_key_columns": ["c_custkey"],
587
+ "create_database_objects_if_missing": True,
588
+ "new_pipeline_spec": {
589
+ "storage_catalog": "source_delta",
590
+ "storage_schema": "schema",
591
+ },
592
+ })
593
+ sync_pipeline_schedule_job = databricks.Job("sync_pipeline_schedule_job",
594
+ name="Synced Pipeline Refresh",
595
+ description="Job to schedule synced database table pipeline. ",
596
+ tasks=[{
597
+ "task_key": "synced-table-pipeline",
598
+ "pipeline_task": {
599
+ "pipeline_id": synced_table.data_synchronization_status.pipeline_id,
600
+ },
601
+ }],
602
+ schedule={
603
+ "quartz_cron_expression": "0 0 0 * * ?",
604
+ "timezone_id": "Europe/Helsinki",
605
+ })
606
+ ```
607
+
408
608
  ## Import
409
609
 
410
610
  As of Pulumi v1.5, resources can be imported through configuration.
@@ -422,7 +622,7 @@ class DatabaseSyncedDatabaseTable(pulumi.CustomResource):
422
622
  If you are using an older version of Pulumi, import the resource using the `pulumi import` command as follows:
423
623
 
424
624
  ```sh
425
- $ pulumi import databricks:index/databaseSyncedDatabaseTable:DatabaseSyncedDatabaseTable databricks_database_synced_database_table "name"
625
+ $ pulumi import databricks:index/databaseSyncedDatabaseTable:DatabaseSyncedDatabaseTable this "name"
426
626
  ```
427
627
 
428
628
  :param str resource_name: The name of the resource.
@@ -444,7 +644,6 @@ class DatabaseSyncedDatabaseTable(pulumi.CustomResource):
444
644
  logical_database_name: Optional[pulumi.Input[_builtins.str]] = None,
445
645
  name: Optional[pulumi.Input[_builtins.str]] = None,
446
646
  spec: Optional[pulumi.Input[Union['DatabaseSyncedDatabaseTableSpecArgs', 'DatabaseSyncedDatabaseTableSpecArgsDict']]] = None,
447
- workspace_id: Optional[pulumi.Input[_builtins.str]] = None,
448
647
  __props__=None):
449
648
  opts = pulumi.ResourceOptions.merge(_utilities.get_resource_opts_defaults(), opts)
450
649
  if not isinstance(opts, pulumi.ResourceOptions):
@@ -458,7 +657,6 @@ class DatabaseSyncedDatabaseTable(pulumi.CustomResource):
458
657
  __props__.__dict__["logical_database_name"] = logical_database_name
459
658
  __props__.__dict__["name"] = name
460
659
  __props__.__dict__["spec"] = spec
461
- __props__.__dict__["workspace_id"] = workspace_id
462
660
  __props__.__dict__["data_synchronization_status"] = None
463
661
  __props__.__dict__["effective_database_instance_name"] = None
464
662
  __props__.__dict__["effective_logical_database_name"] = None
@@ -480,8 +678,7 @@ class DatabaseSyncedDatabaseTable(pulumi.CustomResource):
480
678
  logical_database_name: Optional[pulumi.Input[_builtins.str]] = None,
481
679
  name: Optional[pulumi.Input[_builtins.str]] = None,
482
680
  spec: Optional[pulumi.Input[Union['DatabaseSyncedDatabaseTableSpecArgs', 'DatabaseSyncedDatabaseTableSpecArgsDict']]] = None,
483
- unity_catalog_provisioning_state: Optional[pulumi.Input[_builtins.str]] = None,
484
- workspace_id: Optional[pulumi.Input[_builtins.str]] = None) -> 'DatabaseSyncedDatabaseTable':
681
+ unity_catalog_provisioning_state: Optional[pulumi.Input[_builtins.str]] = None) -> 'DatabaseSyncedDatabaseTable':
485
682
  """
486
683
  Get an existing DatabaseSyncedDatabaseTable resource's state with the given name, id, and optional extra
487
684
  properties used to qualify the lookup.
@@ -511,7 +708,6 @@ class DatabaseSyncedDatabaseTable(pulumi.CustomResource):
511
708
  :param pulumi.Input[_builtins.str] unity_catalog_provisioning_state: (string) - The provisioning state of the synced table entity in Unity Catalog. This is distinct from the
512
709
  state of the data synchronization pipeline (i.e. the table may be in "ACTIVE" but the pipeline
513
710
  may be in "PROVISIONING" as it runs asynchronously). Possible values are: `ACTIVE`, `DEGRADED`, `DELETING`, `FAILED`, `PROVISIONING`, `UPDATING`
514
- :param pulumi.Input[_builtins.str] workspace_id: Workspace ID of the resource
515
711
  """
516
712
  opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
517
713
 
@@ -525,7 +721,6 @@ class DatabaseSyncedDatabaseTable(pulumi.CustomResource):
525
721
  __props__.__dict__["name"] = name
526
722
  __props__.__dict__["spec"] = spec
527
723
  __props__.__dict__["unity_catalog_provisioning_state"] = unity_catalog_provisioning_state
528
- __props__.__dict__["workspace_id"] = workspace_id
529
724
  return DatabaseSyncedDatabaseTable(resource_name, opts=opts, __props__=__props__)
530
725
 
531
726
  @_builtins.property
@@ -604,11 +799,3 @@ class DatabaseSyncedDatabaseTable(pulumi.CustomResource):
604
799
  """
605
800
  return pulumi.get(self, "unity_catalog_provisioning_state")
606
801
 
607
- @_builtins.property
608
- @pulumi.getter(name="workspaceId")
609
- def workspace_id(self) -> pulumi.Output[Optional[_builtins.str]]:
610
- """
611
- Workspace ID of the resource
612
- """
613
- return pulumi.get(self, "workspace_id")
614
-
@@ -31,14 +31,14 @@ class EntitlementsArgs:
31
31
  The set of arguments for constructing a Entitlements resource.
32
32
  :param pulumi.Input[_builtins.bool] allow_cluster_create: Allow the principal to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with Permissions and `cluster_id` argument. Everyone without `allow_cluster_create` argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
33
33
  :param pulumi.Input[_builtins.bool] allow_instance_pool_create: Allow the principal to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with Permissions and instance_pool_id argument.
34
- :param pulumi.Input[_builtins.bool] databricks_sql_access: This is a field to allow the principal to have access to [Databricks SQL](https://databricks.com/product/databricks-sql) feature in User Interface and through databricks_sql_endpoint.
34
+ :param pulumi.Input[_builtins.bool] databricks_sql_access: This is a field to allow the principal to have access to [Databricks SQL](https://databricks.com/product/databricks-sql) UI, [Databricks One](https://docs.databricks.com/aws/en/workspace/databricks-one#who-can-access-databricks-one) and through databricks_sql_endpoint.
35
35
  :param pulumi.Input[_builtins.str] group_id: Canonical unique identifier for the group.
36
36
  :param pulumi.Input[_builtins.str] service_principal_id: Canonical unique identifier for the service principal.
37
37
 
38
38
  The following entitlements are available.
39
39
  :param pulumi.Input[_builtins.str] user_id: Canonical unique identifier for the user.
40
- :param pulumi.Input[_builtins.bool] workspace_access: This is a field to allow the principal to have access to a Databricks Workspace.
41
- :param pulumi.Input[_builtins.bool] workspace_consume: This is a field to allow the principal to have access to a Databricks Workspace as consumer, with limited access to workspace UI. Couldn't be used with `workspace_access` or `databricks_sql_access`.
40
+ :param pulumi.Input[_builtins.bool] workspace_access: This is a field to allow the principal to have access to a Databricks Workspace UI and [Databricks One](https://docs.databricks.com/aws/en/workspace/databricks-one#who-can-access-databricks-one).
41
+ :param pulumi.Input[_builtins.bool] workspace_consume: This is a field to allow the principal to have access only to [Databricks One](https://docs.databricks.com/aws/en/workspace/databricks-one#who-can-access-databricks-one). Couldn't be used with `workspace_access` or `databricks_sql_access`.
42
42
  """
43
43
  if allow_cluster_create is not None:
44
44
  pulumi.set(__self__, "allow_cluster_create", allow_cluster_create)
@@ -85,7 +85,7 @@ class EntitlementsArgs:
85
85
  @pulumi.getter(name="databricksSqlAccess")
86
86
  def databricks_sql_access(self) -> Optional[pulumi.Input[_builtins.bool]]:
87
87
  """
88
- This is a field to allow the principal to have access to [Databricks SQL](https://databricks.com/product/databricks-sql) feature in User Interface and through databricks_sql_endpoint.
88
+ This is a field to allow the principal to have access to [Databricks SQL](https://databricks.com/product/databricks-sql) UI, [Databricks One](https://docs.databricks.com/aws/en/workspace/databricks-one#who-can-access-databricks-one) and through databricks_sql_endpoint.
89
89
  """
90
90
  return pulumi.get(self, "databricks_sql_access")
91
91
 
@@ -135,7 +135,7 @@ class EntitlementsArgs:
135
135
  @pulumi.getter(name="workspaceAccess")
136
136
  def workspace_access(self) -> Optional[pulumi.Input[_builtins.bool]]:
137
137
  """
138
- This is a field to allow the principal to have access to a Databricks Workspace.
138
+ This is a field to allow the principal to have access to a Databricks Workspace UI and [Databricks One](https://docs.databricks.com/aws/en/workspace/databricks-one#who-can-access-databricks-one).
139
139
  """
140
140
  return pulumi.get(self, "workspace_access")
141
141
 
@@ -147,7 +147,7 @@ class EntitlementsArgs:
147
147
  @pulumi.getter(name="workspaceConsume")
148
148
  def workspace_consume(self) -> Optional[pulumi.Input[_builtins.bool]]:
149
149
  """
150
- This is a field to allow the principal to have access to a Databricks Workspace as consumer, with limited access to workspace UI. Couldn't be used with `workspace_access` or `databricks_sql_access`.
150
+ This is a field to allow the principal to have access only to [Databricks One](https://docs.databricks.com/aws/en/workspace/databricks-one#who-can-access-databricks-one). Couldn't be used with `workspace_access` or `databricks_sql_access`.
151
151
  """
152
152
  return pulumi.get(self, "workspace_consume")
153
153
 
@@ -171,14 +171,14 @@ class _EntitlementsState:
171
171
  Input properties used for looking up and filtering Entitlements resources.
172
172
  :param pulumi.Input[_builtins.bool] allow_cluster_create: Allow the principal to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with Permissions and `cluster_id` argument. Everyone without `allow_cluster_create` argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
173
173
  :param pulumi.Input[_builtins.bool] allow_instance_pool_create: Allow the principal to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with Permissions and instance_pool_id argument.
174
- :param pulumi.Input[_builtins.bool] databricks_sql_access: This is a field to allow the principal to have access to [Databricks SQL](https://databricks.com/product/databricks-sql) feature in User Interface and through databricks_sql_endpoint.
174
+ :param pulumi.Input[_builtins.bool] databricks_sql_access: This is a field to allow the principal to have access to [Databricks SQL](https://databricks.com/product/databricks-sql) UI, [Databricks One](https://docs.databricks.com/aws/en/workspace/databricks-one#who-can-access-databricks-one) and through databricks_sql_endpoint.
175
175
  :param pulumi.Input[_builtins.str] group_id: Canonical unique identifier for the group.
176
176
  :param pulumi.Input[_builtins.str] service_principal_id: Canonical unique identifier for the service principal.
177
177
 
178
178
  The following entitlements are available.
179
179
  :param pulumi.Input[_builtins.str] user_id: Canonical unique identifier for the user.
180
- :param pulumi.Input[_builtins.bool] workspace_access: This is a field to allow the principal to have access to a Databricks Workspace.
181
- :param pulumi.Input[_builtins.bool] workspace_consume: This is a field to allow the principal to have access to a Databricks Workspace as consumer, with limited access to workspace UI. Couldn't be used with `workspace_access` or `databricks_sql_access`.
180
+ :param pulumi.Input[_builtins.bool] workspace_access: This is a field to allow the principal to have access to a Databricks Workspace UI and [Databricks One](https://docs.databricks.com/aws/en/workspace/databricks-one#who-can-access-databricks-one).
181
+ :param pulumi.Input[_builtins.bool] workspace_consume: This is a field to allow the principal to have access only to [Databricks One](https://docs.databricks.com/aws/en/workspace/databricks-one#who-can-access-databricks-one). Couldn't be used with `workspace_access` or `databricks_sql_access`.
182
182
  """
183
183
  if allow_cluster_create is not None:
184
184
  pulumi.set(__self__, "allow_cluster_create", allow_cluster_create)
@@ -225,7 +225,7 @@ class _EntitlementsState:
225
225
  @pulumi.getter(name="databricksSqlAccess")
226
226
  def databricks_sql_access(self) -> Optional[pulumi.Input[_builtins.bool]]:
227
227
  """
228
- This is a field to allow the principal to have access to [Databricks SQL](https://databricks.com/product/databricks-sql) feature in User Interface and through databricks_sql_endpoint.
228
+ This is a field to allow the principal to have access to [Databricks SQL](https://databricks.com/product/databricks-sql) UI, [Databricks One](https://docs.databricks.com/aws/en/workspace/databricks-one#who-can-access-databricks-one) and through databricks_sql_endpoint.
229
229
  """
230
230
  return pulumi.get(self, "databricks_sql_access")
231
231
 
@@ -275,7 +275,7 @@ class _EntitlementsState:
275
275
  @pulumi.getter(name="workspaceAccess")
276
276
  def workspace_access(self) -> Optional[pulumi.Input[_builtins.bool]]:
277
277
  """
278
- This is a field to allow the principal to have access to a Databricks Workspace.
278
+ This is a field to allow the principal to have access to a Databricks Workspace UI and [Databricks One](https://docs.databricks.com/aws/en/workspace/databricks-one#who-can-access-databricks-one).
279
279
  """
280
280
  return pulumi.get(self, "workspace_access")
281
281
 
@@ -287,7 +287,7 @@ class _EntitlementsState:
287
287
  @pulumi.getter(name="workspaceConsume")
288
288
  def workspace_consume(self) -> Optional[pulumi.Input[_builtins.bool]]:
289
289
  """
290
- This is a field to allow the principal to have access to a Databricks Workspace as consumer, with limited access to workspace UI. Couldn't be used with `workspace_access` or `databricks_sql_access`.
290
+ This is a field to allow the principal to have access only to [Databricks One](https://docs.databricks.com/aws/en/workspace/databricks-one#who-can-access-databricks-one). Couldn't be used with `workspace_access` or `databricks_sql_access`.
291
291
  """
292
292
  return pulumi.get(self, "workspace_consume")
293
293
 
@@ -403,14 +403,14 @@ class Entitlements(pulumi.CustomResource):
403
403
  :param pulumi.ResourceOptions opts: Options for the resource.
404
404
  :param pulumi.Input[_builtins.bool] allow_cluster_create: Allow the principal to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with Permissions and `cluster_id` argument. Everyone without `allow_cluster_create` argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
405
405
  :param pulumi.Input[_builtins.bool] allow_instance_pool_create: Allow the principal to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with Permissions and instance_pool_id argument.
406
- :param pulumi.Input[_builtins.bool] databricks_sql_access: This is a field to allow the principal to have access to [Databricks SQL](https://databricks.com/product/databricks-sql) feature in User Interface and through databricks_sql_endpoint.
406
+ :param pulumi.Input[_builtins.bool] databricks_sql_access: This is a field to allow the principal to have access to [Databricks SQL](https://databricks.com/product/databricks-sql) UI, [Databricks One](https://docs.databricks.com/aws/en/workspace/databricks-one#who-can-access-databricks-one) and through databricks_sql_endpoint.
407
407
  :param pulumi.Input[_builtins.str] group_id: Canonical unique identifier for the group.
408
408
  :param pulumi.Input[_builtins.str] service_principal_id: Canonical unique identifier for the service principal.
409
409
 
410
410
  The following entitlements are available.
411
411
  :param pulumi.Input[_builtins.str] user_id: Canonical unique identifier for the user.
412
- :param pulumi.Input[_builtins.bool] workspace_access: This is a field to allow the principal to have access to a Databricks Workspace.
413
- :param pulumi.Input[_builtins.bool] workspace_consume: This is a field to allow the principal to have access to a Databricks Workspace as consumer, with limited access to workspace UI. Couldn't be used with `workspace_access` or `databricks_sql_access`.
412
+ :param pulumi.Input[_builtins.bool] workspace_access: This is a field to allow the principal to have access to a Databricks Workspace UI and [Databricks One](https://docs.databricks.com/aws/en/workspace/databricks-one#who-can-access-databricks-one).
413
+ :param pulumi.Input[_builtins.bool] workspace_consume: This is a field to allow the principal to have access only to [Databricks One](https://docs.databricks.com/aws/en/workspace/databricks-one#who-can-access-databricks-one). Couldn't be used with `workspace_access` or `databricks_sql_access`.
414
414
  """
415
415
  ...
416
416
  @overload
@@ -573,14 +573,14 @@ class Entitlements(pulumi.CustomResource):
573
573
  :param pulumi.ResourceOptions opts: Options for the resource.
574
574
  :param pulumi.Input[_builtins.bool] allow_cluster_create: Allow the principal to have cluster create privileges. Defaults to false. More fine grained permissions could be assigned with Permissions and `cluster_id` argument. Everyone without `allow_cluster_create` argument set, but with permission to use Cluster Policy would be able to create clusters, but within boundaries of that specific policy.
575
575
  :param pulumi.Input[_builtins.bool] allow_instance_pool_create: Allow the principal to have instance pool create privileges. Defaults to false. More fine grained permissions could be assigned with Permissions and instance_pool_id argument.
576
- :param pulumi.Input[_builtins.bool] databricks_sql_access: This is a field to allow the principal to have access to [Databricks SQL](https://databricks.com/product/databricks-sql) feature in User Interface and through databricks_sql_endpoint.
576
+ :param pulumi.Input[_builtins.bool] databricks_sql_access: This is a field to allow the principal to have access to [Databricks SQL](https://databricks.com/product/databricks-sql) UI, [Databricks One](https://docs.databricks.com/aws/en/workspace/databricks-one#who-can-access-databricks-one) and through databricks_sql_endpoint.
577
577
  :param pulumi.Input[_builtins.str] group_id: Canonical unique identifier for the group.
578
578
  :param pulumi.Input[_builtins.str] service_principal_id: Canonical unique identifier for the service principal.
579
579
 
580
580
  The following entitlements are available.
581
581
  :param pulumi.Input[_builtins.str] user_id: Canonical unique identifier for the user.
582
- :param pulumi.Input[_builtins.bool] workspace_access: This is a field to allow the principal to have access to a Databricks Workspace.
583
- :param pulumi.Input[_builtins.bool] workspace_consume: This is a field to allow the principal to have access to a Databricks Workspace as consumer, with limited access to workspace UI. Couldn't be used with `workspace_access` or `databricks_sql_access`.
582
+ :param pulumi.Input[_builtins.bool] workspace_access: This is a field to allow the principal to have access to a Databricks Workspace UI and [Databricks One](https://docs.databricks.com/aws/en/workspace/databricks-one#who-can-access-databricks-one).
583
+ :param pulumi.Input[_builtins.bool] workspace_consume: This is a field to allow the principal to have access only to [Databricks One](https://docs.databricks.com/aws/en/workspace/databricks-one#who-can-access-databricks-one). Couldn't be used with `workspace_access` or `databricks_sql_access`.
584
584
  """
585
585
  opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
586
586
 
@@ -616,7 +616,7 @@ class Entitlements(pulumi.CustomResource):
616
616
  @pulumi.getter(name="databricksSqlAccess")
617
617
  def databricks_sql_access(self) -> pulumi.Output[Optional[_builtins.bool]]:
618
618
  """
619
- This is a field to allow the principal to have access to [Databricks SQL](https://databricks.com/product/databricks-sql) feature in User Interface and through databricks_sql_endpoint.
619
+ This is a field to allow the principal to have access to [Databricks SQL](https://databricks.com/product/databricks-sql) UI, [Databricks One](https://docs.databricks.com/aws/en/workspace/databricks-one#who-can-access-databricks-one) and through databricks_sql_endpoint.
620
620
  """
621
621
  return pulumi.get(self, "databricks_sql_access")
622
622
 
@@ -650,7 +650,7 @@ class Entitlements(pulumi.CustomResource):
650
650
  @pulumi.getter(name="workspaceAccess")
651
651
  def workspace_access(self) -> pulumi.Output[Optional[_builtins.bool]]:
652
652
  """
653
- This is a field to allow the principal to have access to a Databricks Workspace.
653
+ This is a field to allow the principal to have access to a Databricks Workspace UI and [Databricks One](https://docs.databricks.com/aws/en/workspace/databricks-one#who-can-access-databricks-one).
654
654
  """
655
655
  return pulumi.get(self, "workspace_access")
656
656
 
@@ -658,7 +658,7 @@ class Entitlements(pulumi.CustomResource):
658
658
  @pulumi.getter(name="workspaceConsume")
659
659
  def workspace_consume(self) -> pulumi.Output[Optional[_builtins.bool]]:
660
660
  """
661
- This is a field to allow the principal to have access to a Databricks Workspace as consumer, with limited access to workspace UI. Couldn't be used with `workspace_access` or `databricks_sql_access`.
661
+ This is a field to allow the principal to have access only to [Databricks One](https://docs.databricks.com/aws/en/workspace/databricks-one#who-can-access-databricks-one). Couldn't be used with `workspace_access` or `databricks_sql_access`.
662
662
  """
663
663
  return pulumi.get(self, "workspace_consume")
664
664