pulumi-databricks 1.78.0a1762407761__py3-none-any.whl → 1.79.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (86) hide show
  1. pulumi_databricks/__init__.py +23 -0
  2. pulumi_databricks/_inputs.py +1630 -181
  3. pulumi_databricks/access_control_rule_set.py +81 -0
  4. pulumi_databricks/account_federation_policy.py +24 -0
  5. pulumi_databricks/account_network_policy.py +50 -0
  6. pulumi_databricks/account_setting_v2.py +181 -12
  7. pulumi_databricks/alert_v2.py +74 -2
  8. pulumi_databricks/app.py +71 -7
  9. pulumi_databricks/apps_settings_custom_template.py +102 -0
  10. pulumi_databricks/catalog.py +8 -8
  11. pulumi_databricks/cluster_policy.py +120 -0
  12. pulumi_databricks/config/__init__.pyi +4 -0
  13. pulumi_databricks/config/vars.py +8 -0
  14. pulumi_databricks/credential.py +7 -7
  15. pulumi_databricks/dashboard.py +94 -0
  16. pulumi_databricks/data_quality_monitor.py +2 -2
  17. pulumi_databricks/data_quality_refresh.py +78 -2
  18. pulumi_databricks/database_synced_database_table.py +212 -0
  19. pulumi_databricks/external_location.py +131 -7
  20. pulumi_databricks/feature_engineering_feature.py +52 -19
  21. pulumi_databricks/feature_engineering_kafka_config.py +463 -0
  22. pulumi_databricks/feature_engineering_materialized_feature.py +47 -0
  23. pulumi_databricks/file.py +2 -2
  24. pulumi_databricks/get_account_setting_v2.py +16 -16
  25. pulumi_databricks/get_alert_v2.py +2 -2
  26. pulumi_databricks/get_alerts_v2.py +2 -2
  27. pulumi_databricks/get_aws_assume_role_policy.py +14 -14
  28. pulumi_databricks/get_aws_bucket_policy.py +10 -10
  29. pulumi_databricks/get_aws_unity_catalog_assume_role_policy.py +10 -10
  30. pulumi_databricks/get_aws_unity_catalog_policy.py +10 -10
  31. pulumi_databricks/get_cluster.py +54 -0
  32. pulumi_databricks/get_current_config.py +4 -4
  33. pulumi_databricks/get_dashboards.py +32 -0
  34. pulumi_databricks/get_data_quality_monitor.py +2 -2
  35. pulumi_databricks/get_data_quality_monitors.py +2 -2
  36. pulumi_databricks/get_data_quality_refresh.py +2 -2
  37. pulumi_databricks/get_data_quality_refreshes.py +2 -2
  38. pulumi_databricks/get_feature_engineering_feature.py +12 -1
  39. pulumi_databricks/get_feature_engineering_kafka_config.py +182 -0
  40. pulumi_databricks/get_feature_engineering_kafka_configs.py +103 -0
  41. pulumi_databricks/get_feature_engineering_materialized_feature.py +16 -2
  42. pulumi_databricks/get_metastore.py +6 -6
  43. pulumi_databricks/get_notebook.py +20 -1
  44. pulumi_databricks/get_policy_info.py +36 -2
  45. pulumi_databricks/get_policy_infos.py +34 -2
  46. pulumi_databricks/get_service_principals.py +93 -7
  47. pulumi_databricks/get_spark_version.py +2 -2
  48. pulumi_databricks/get_tag_policies.py +2 -2
  49. pulumi_databricks/get_tag_policy.py +2 -2
  50. pulumi_databricks/get_users.py +194 -0
  51. pulumi_databricks/get_workspace_entity_tag_assignment.py +180 -0
  52. pulumi_databricks/get_workspace_entity_tag_assignments.py +171 -0
  53. pulumi_databricks/get_workspace_setting_v2.py +16 -16
  54. pulumi_databricks/instance_profile.py +0 -182
  55. pulumi_databricks/lakehouse_monitor.py +2 -2
  56. pulumi_databricks/metastore.py +81 -7
  57. pulumi_databricks/metastore_data_access.py +48 -0
  58. pulumi_databricks/mlflow_webhook.py +4 -4
  59. pulumi_databricks/mws_credentials.py +10 -10
  60. pulumi_databricks/mws_customer_managed_keys.py +0 -288
  61. pulumi_databricks/mws_log_delivery.py +146 -0
  62. pulumi_databricks/mws_storage_configurations.py +16 -16
  63. pulumi_databricks/mws_vpc_endpoint.py +56 -56
  64. pulumi_databricks/mws_workspaces.py +115 -55
  65. pulumi_databricks/notebook.py +49 -0
  66. pulumi_databricks/outputs.py +2017 -240
  67. pulumi_databricks/permission_assignment.py +49 -0
  68. pulumi_databricks/permissions.py +6 -6
  69. pulumi_databricks/pipeline.py +7 -7
  70. pulumi_databricks/policy_info.py +122 -2
  71. pulumi_databricks/provider.py +36 -1
  72. pulumi_databricks/pulumi-plugin.json +1 -1
  73. pulumi_databricks/recipient.py +74 -0
  74. pulumi_databricks/registered_model.py +7 -7
  75. pulumi_databricks/rfa_access_request_destinations.py +86 -19
  76. pulumi_databricks/schema.py +7 -7
  77. pulumi_databricks/service_principal_federation_policy.py +28 -0
  78. pulumi_databricks/sql_table.py +7 -7
  79. pulumi_databricks/tag_policy.py +2 -2
  80. pulumi_databricks/volume.py +7 -7
  81. pulumi_databricks/workspace_entity_tag_assignment.py +375 -0
  82. pulumi_databricks/workspace_setting_v2.py +181 -12
  83. {pulumi_databricks-1.78.0a1762407761.dist-info → pulumi_databricks-1.79.0.dist-info}/METADATA +1 -1
  84. {pulumi_databricks-1.78.0a1762407761.dist-info → pulumi_databricks-1.79.0.dist-info}/RECORD +86 -79
  85. {pulumi_databricks-1.78.0a1762407761.dist-info → pulumi_databricks-1.79.0.dist-info}/WHEEL +0 -0
  86. {pulumi_databricks-1.78.0a1762407761.dist-info → pulumi_databricks-1.79.0.dist-info}/top_level.txt +0 -0
@@ -25,6 +25,8 @@ class DashboardArgs:
25
25
  create_time: Optional[pulumi.Input[_builtins.str]] = None,
26
26
  dashboard_change_detected: Optional[pulumi.Input[_builtins.bool]] = None,
27
27
  dashboard_id: Optional[pulumi.Input[_builtins.str]] = None,
28
+ dataset_catalog: Optional[pulumi.Input[_builtins.str]] = None,
29
+ dataset_schema: Optional[pulumi.Input[_builtins.str]] = None,
28
30
  embed_credentials: Optional[pulumi.Input[_builtins.bool]] = None,
29
31
  etag: Optional[pulumi.Input[_builtins.str]] = None,
30
32
  file_path: Optional[pulumi.Input[_builtins.str]] = None,
@@ -38,6 +40,8 @@ class DashboardArgs:
38
40
  :param pulumi.Input[_builtins.str] display_name: The display name of the dashboard.
39
41
  :param pulumi.Input[_builtins.str] parent_path: The workspace path of the folder containing the dashboard. Includes leading slash and no trailing slash. If folder doesn't exist, it will be created.
40
42
  :param pulumi.Input[_builtins.str] warehouse_id: The warehouse ID used to run the dashboard.
43
+ :param pulumi.Input[_builtins.str] dataset_catalog: Sets the default catalog for all datasets in this dashboard. Does not impact table references that use fully qualified catalog names (ex: samples.nyctaxi.trips).
44
+ :param pulumi.Input[_builtins.str] dataset_schema: Sets the default schema for all datasets in this dashboard. Does not impact table references that use fully qualified catalog names (ex: samples.nyctaxi.trips).
41
45
  :param pulumi.Input[_builtins.bool] embed_credentials: Whether to embed credentials in the dashboard. Default is `true`.
42
46
  :param pulumi.Input[_builtins.str] file_path: The path to the dashboard JSON file. Conflicts with `serialized_dashboard`.
43
47
  :param pulumi.Input[_builtins.str] serialized_dashboard: The contents of the dashboard in serialized string form. Conflicts with `file_path`.
@@ -51,6 +55,10 @@ class DashboardArgs:
51
55
  pulumi.set(__self__, "dashboard_change_detected", dashboard_change_detected)
52
56
  if dashboard_id is not None:
53
57
  pulumi.set(__self__, "dashboard_id", dashboard_id)
58
+ if dataset_catalog is not None:
59
+ pulumi.set(__self__, "dataset_catalog", dataset_catalog)
60
+ if dataset_schema is not None:
61
+ pulumi.set(__self__, "dataset_schema", dataset_schema)
54
62
  if embed_credentials is not None:
55
63
  pulumi.set(__self__, "embed_credentials", embed_credentials)
56
64
  if etag is not None:
@@ -131,6 +139,30 @@ class DashboardArgs:
131
139
  def dashboard_id(self, value: Optional[pulumi.Input[_builtins.str]]):
132
140
  pulumi.set(self, "dashboard_id", value)
133
141
 
142
+ @_builtins.property
143
+ @pulumi.getter(name="datasetCatalog")
144
+ def dataset_catalog(self) -> Optional[pulumi.Input[_builtins.str]]:
145
+ """
146
+ Sets the default catalog for all datasets in this dashboard. Does not impact table references that use fully qualified catalog names (ex: samples.nyctaxi.trips).
147
+ """
148
+ return pulumi.get(self, "dataset_catalog")
149
+
150
+ @dataset_catalog.setter
151
+ def dataset_catalog(self, value: Optional[pulumi.Input[_builtins.str]]):
152
+ pulumi.set(self, "dataset_catalog", value)
153
+
154
+ @_builtins.property
155
+ @pulumi.getter(name="datasetSchema")
156
+ def dataset_schema(self) -> Optional[pulumi.Input[_builtins.str]]:
157
+ """
158
+ Sets the default schema for all datasets in this dashboard. Does not impact table references that use fully qualified catalog names (ex: samples.nyctaxi.trips).
159
+ """
160
+ return pulumi.get(self, "dataset_schema")
161
+
162
+ @dataset_schema.setter
163
+ def dataset_schema(self, value: Optional[pulumi.Input[_builtins.str]]):
164
+ pulumi.set(self, "dataset_schema", value)
165
+
134
166
  @_builtins.property
135
167
  @pulumi.getter(name="embedCredentials")
136
168
  def embed_credentials(self) -> Optional[pulumi.Input[_builtins.bool]]:
@@ -219,6 +251,8 @@ class _DashboardState:
219
251
  create_time: Optional[pulumi.Input[_builtins.str]] = None,
220
252
  dashboard_change_detected: Optional[pulumi.Input[_builtins.bool]] = None,
221
253
  dashboard_id: Optional[pulumi.Input[_builtins.str]] = None,
254
+ dataset_catalog: Optional[pulumi.Input[_builtins.str]] = None,
255
+ dataset_schema: Optional[pulumi.Input[_builtins.str]] = None,
222
256
  display_name: Optional[pulumi.Input[_builtins.str]] = None,
223
257
  embed_credentials: Optional[pulumi.Input[_builtins.bool]] = None,
224
258
  etag: Optional[pulumi.Input[_builtins.str]] = None,
@@ -232,6 +266,8 @@ class _DashboardState:
232
266
  warehouse_id: Optional[pulumi.Input[_builtins.str]] = None):
233
267
  """
234
268
  Input properties used for looking up and filtering Dashboard resources.
269
+ :param pulumi.Input[_builtins.str] dataset_catalog: Sets the default catalog for all datasets in this dashboard. Does not impact table references that use fully qualified catalog names (ex: samples.nyctaxi.trips).
270
+ :param pulumi.Input[_builtins.str] dataset_schema: Sets the default schema for all datasets in this dashboard. Does not impact table references that use fully qualified catalog names (ex: samples.nyctaxi.trips).
235
271
  :param pulumi.Input[_builtins.str] display_name: The display name of the dashboard.
236
272
  :param pulumi.Input[_builtins.bool] embed_credentials: Whether to embed credentials in the dashboard. Default is `true`.
237
273
  :param pulumi.Input[_builtins.str] file_path: The path to the dashboard JSON file. Conflicts with `serialized_dashboard`.
@@ -245,6 +281,10 @@ class _DashboardState:
245
281
  pulumi.set(__self__, "dashboard_change_detected", dashboard_change_detected)
246
282
  if dashboard_id is not None:
247
283
  pulumi.set(__self__, "dashboard_id", dashboard_id)
284
+ if dataset_catalog is not None:
285
+ pulumi.set(__self__, "dataset_catalog", dataset_catalog)
286
+ if dataset_schema is not None:
287
+ pulumi.set(__self__, "dataset_schema", dataset_schema)
248
288
  if display_name is not None:
249
289
  pulumi.set(__self__, "display_name", display_name)
250
290
  if embed_credentials is not None:
@@ -295,6 +335,30 @@ class _DashboardState:
295
335
  def dashboard_id(self, value: Optional[pulumi.Input[_builtins.str]]):
296
336
  pulumi.set(self, "dashboard_id", value)
297
337
 
338
+ @_builtins.property
339
+ @pulumi.getter(name="datasetCatalog")
340
+ def dataset_catalog(self) -> Optional[pulumi.Input[_builtins.str]]:
341
+ """
342
+ Sets the default catalog for all datasets in this dashboard. Does not impact table references that use fully qualified catalog names (ex: samples.nyctaxi.trips).
343
+ """
344
+ return pulumi.get(self, "dataset_catalog")
345
+
346
+ @dataset_catalog.setter
347
+ def dataset_catalog(self, value: Optional[pulumi.Input[_builtins.str]]):
348
+ pulumi.set(self, "dataset_catalog", value)
349
+
350
+ @_builtins.property
351
+ @pulumi.getter(name="datasetSchema")
352
+ def dataset_schema(self) -> Optional[pulumi.Input[_builtins.str]]:
353
+ """
354
+ Sets the default schema for all datasets in this dashboard. Does not impact table references that use fully qualified catalog names (ex: samples.nyctaxi.trips).
355
+ """
356
+ return pulumi.get(self, "dataset_schema")
357
+
358
+ @dataset_schema.setter
359
+ def dataset_schema(self, value: Optional[pulumi.Input[_builtins.str]]):
360
+ pulumi.set(self, "dataset_schema", value)
361
+
298
362
  @_builtins.property
299
363
  @pulumi.getter(name="displayName")
300
364
  def display_name(self) -> Optional[pulumi.Input[_builtins.str]]:
@@ -422,6 +486,8 @@ class Dashboard(pulumi.CustomResource):
422
486
  create_time: Optional[pulumi.Input[_builtins.str]] = None,
423
487
  dashboard_change_detected: Optional[pulumi.Input[_builtins.bool]] = None,
424
488
  dashboard_id: Optional[pulumi.Input[_builtins.str]] = None,
489
+ dataset_catalog: Optional[pulumi.Input[_builtins.str]] = None,
490
+ dataset_schema: Optional[pulumi.Input[_builtins.str]] = None,
425
491
  display_name: Optional[pulumi.Input[_builtins.str]] = None,
426
492
  embed_credentials: Optional[pulumi.Input[_builtins.bool]] = None,
427
493
  etag: Optional[pulumi.Input[_builtins.str]] = None,
@@ -482,6 +548,8 @@ class Dashboard(pulumi.CustomResource):
482
548
 
483
549
  :param str resource_name: The name of the resource.
484
550
  :param pulumi.ResourceOptions opts: Options for the resource.
551
+ :param pulumi.Input[_builtins.str] dataset_catalog: Sets the default catalog for all datasets in this dashboard. Does not impact table references that use fully qualified catalog names (ex: samples.nyctaxi.trips).
552
+ :param pulumi.Input[_builtins.str] dataset_schema: Sets the default schema for all datasets in this dashboard. Does not impact table references that use fully qualified catalog names (ex: samples.nyctaxi.trips).
485
553
  :param pulumi.Input[_builtins.str] display_name: The display name of the dashboard.
486
554
  :param pulumi.Input[_builtins.bool] embed_credentials: Whether to embed credentials in the dashboard. Default is `true`.
487
555
  :param pulumi.Input[_builtins.str] file_path: The path to the dashboard JSON file. Conflicts with `serialized_dashboard`.
@@ -559,6 +627,8 @@ class Dashboard(pulumi.CustomResource):
559
627
  create_time: Optional[pulumi.Input[_builtins.str]] = None,
560
628
  dashboard_change_detected: Optional[pulumi.Input[_builtins.bool]] = None,
561
629
  dashboard_id: Optional[pulumi.Input[_builtins.str]] = None,
630
+ dataset_catalog: Optional[pulumi.Input[_builtins.str]] = None,
631
+ dataset_schema: Optional[pulumi.Input[_builtins.str]] = None,
562
632
  display_name: Optional[pulumi.Input[_builtins.str]] = None,
563
633
  embed_credentials: Optional[pulumi.Input[_builtins.bool]] = None,
564
634
  etag: Optional[pulumi.Input[_builtins.str]] = None,
@@ -582,6 +652,8 @@ class Dashboard(pulumi.CustomResource):
582
652
  __props__.__dict__["create_time"] = create_time
583
653
  __props__.__dict__["dashboard_change_detected"] = dashboard_change_detected
584
654
  __props__.__dict__["dashboard_id"] = dashboard_id
655
+ __props__.__dict__["dataset_catalog"] = dataset_catalog
656
+ __props__.__dict__["dataset_schema"] = dataset_schema
585
657
  if display_name is None and not opts.urn:
586
658
  raise TypeError("Missing required property 'display_name'")
587
659
  __props__.__dict__["display_name"] = display_name
@@ -612,6 +684,8 @@ class Dashboard(pulumi.CustomResource):
612
684
  create_time: Optional[pulumi.Input[_builtins.str]] = None,
613
685
  dashboard_change_detected: Optional[pulumi.Input[_builtins.bool]] = None,
614
686
  dashboard_id: Optional[pulumi.Input[_builtins.str]] = None,
687
+ dataset_catalog: Optional[pulumi.Input[_builtins.str]] = None,
688
+ dataset_schema: Optional[pulumi.Input[_builtins.str]] = None,
615
689
  display_name: Optional[pulumi.Input[_builtins.str]] = None,
616
690
  embed_credentials: Optional[pulumi.Input[_builtins.bool]] = None,
617
691
  etag: Optional[pulumi.Input[_builtins.str]] = None,
@@ -630,6 +704,8 @@ class Dashboard(pulumi.CustomResource):
630
704
  :param str resource_name: The unique name of the resulting resource.
631
705
  :param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
632
706
  :param pulumi.ResourceOptions opts: Options for the resource.
707
+ :param pulumi.Input[_builtins.str] dataset_catalog: Sets the default catalog for all datasets in this dashboard. Does not impact table references that use fully qualified catalog names (ex: samples.nyctaxi.trips).
708
+ :param pulumi.Input[_builtins.str] dataset_schema: Sets the default schema for all datasets in this dashboard. Does not impact table references that use fully qualified catalog names (ex: samples.nyctaxi.trips).
633
709
  :param pulumi.Input[_builtins.str] display_name: The display name of the dashboard.
634
710
  :param pulumi.Input[_builtins.bool] embed_credentials: Whether to embed credentials in the dashboard. Default is `true`.
635
711
  :param pulumi.Input[_builtins.str] file_path: The path to the dashboard JSON file. Conflicts with `serialized_dashboard`.
@@ -644,6 +720,8 @@ class Dashboard(pulumi.CustomResource):
644
720
  __props__.__dict__["create_time"] = create_time
645
721
  __props__.__dict__["dashboard_change_detected"] = dashboard_change_detected
646
722
  __props__.__dict__["dashboard_id"] = dashboard_id
723
+ __props__.__dict__["dataset_catalog"] = dataset_catalog
724
+ __props__.__dict__["dataset_schema"] = dataset_schema
647
725
  __props__.__dict__["display_name"] = display_name
648
726
  __props__.__dict__["embed_credentials"] = embed_credentials
649
727
  __props__.__dict__["etag"] = etag
@@ -672,6 +750,22 @@ class Dashboard(pulumi.CustomResource):
672
750
  def dashboard_id(self) -> pulumi.Output[_builtins.str]:
673
751
  return pulumi.get(self, "dashboard_id")
674
752
 
753
+ @_builtins.property
754
+ @pulumi.getter(name="datasetCatalog")
755
+ def dataset_catalog(self) -> pulumi.Output[Optional[_builtins.str]]:
756
+ """
757
+ Sets the default catalog for all datasets in this dashboard. Does not impact table references that use fully qualified catalog names (ex: samples.nyctaxi.trips).
758
+ """
759
+ return pulumi.get(self, "dataset_catalog")
760
+
761
+ @_builtins.property
762
+ @pulumi.getter(name="datasetSchema")
763
+ def dataset_schema(self) -> pulumi.Output[Optional[_builtins.str]]:
764
+ """
765
+ Sets the default schema for all datasets in this dashboard. Does not impact table references that use fully qualified catalog names (ex: samples.nyctaxi.trips).
766
+ """
767
+ return pulumi.get(self, "dataset_schema")
768
+
675
769
  @_builtins.property
676
770
  @pulumi.getter(name="displayName")
677
771
  def display_name(self) -> pulumi.Output[_builtins.str]:
@@ -208,7 +208,7 @@ class DataQualityMonitor(pulumi.CustomResource):
208
208
  object_type: Optional[pulumi.Input[_builtins.str]] = None,
209
209
  __props__=None):
210
210
  """
211
- [![Public Beta](https://img.shields.io/badge/Release_Stage-Public_Beta-orange)](https://docs.databricks.com/aws/en/release-notes/release-types)
211
+ [![Public Preview](https://img.shields.io/badge/Release_Stage-Public_Preview-yellowgreen)](https://docs.databricks.com/aws/en/release-notes/release-types)
212
212
 
213
213
  This resource allows you to set up data quality monitoring checks for Unity Catalog objects, currently schema and table.
214
214
 
@@ -280,7 +280,7 @@ class DataQualityMonitor(pulumi.CustomResource):
280
280
  args: DataQualityMonitorArgs,
281
281
  opts: Optional[pulumi.ResourceOptions] = None):
282
282
  """
283
- [![Public Beta](https://img.shields.io/badge/Release_Stage-Public_Beta-orange)](https://docs.databricks.com/aws/en/release-notes/release-types)
283
+ [![Public Preview](https://img.shields.io/badge/Release_Stage-Public_Preview-yellowgreen)](https://docs.databricks.com/aws/en/release-notes/release-types)
284
284
 
285
285
  This resource allows you to set up data quality monitoring checks for Unity Catalog objects, currently schema and table.
286
286
 
@@ -232,7 +232,7 @@ class DataQualityRefresh(pulumi.CustomResource):
232
232
  object_type: Optional[pulumi.Input[_builtins.str]] = None,
233
233
  __props__=None):
234
234
  """
235
- [![Public Beta](https://img.shields.io/badge/Release_Stage-Public_Beta-orange)](https://docs.databricks.com/aws/en/release-notes/release-types)
235
+ [![Public Preview](https://img.shields.io/badge/Release_Stage-Public_Preview-yellowgreen)](https://docs.databricks.com/aws/en/release-notes/release-types)
236
236
 
237
237
  This resource allows you to refresh the data quality monitoring checks on Unity Catalog tables.
238
238
 
@@ -248,6 +248,44 @@ class DataQualityRefresh(pulumi.CustomResource):
248
248
 
249
249
  ## Example Usage
250
250
 
251
+ ```python
252
+ import pulumi
253
+ import pulumi_databricks as databricks
254
+
255
+ sandbox = databricks.Catalog("sandbox",
256
+ name="sandbox",
257
+ comment="this catalog is managed by terraform",
258
+ properties={
259
+ "purpose": "testing",
260
+ })
261
+ my_test_schema = databricks.Schema("myTestSchema",
262
+ catalog_name=sandbox.id,
263
+ name="myTestSchema",
264
+ comment="this database is managed by terraform",
265
+ properties={
266
+ "kind": "various",
267
+ })
268
+ my_test_table = databricks.SqlTable("myTestTable",
269
+ catalog_name="main",
270
+ schema_name=my_test_schema.name,
271
+ name="bar",
272
+ table_type="MANAGED",
273
+ data_source_format="DELTA",
274
+ columns=[{
275
+ "name": "timestamp",
276
+ "type": "int",
277
+ }])
278
+ this = databricks.DataQualityMonitor("this",
279
+ object_type="table",
280
+ object_id=my_test_table.id,
281
+ data_profiling_config={
282
+ "output_schema": my_test_schema.schema_id,
283
+ })
284
+ this_data_quality_refresh = databricks.DataQualityRefresh("this",
285
+ object_type="table",
286
+ object_id=my_test_table.id)
287
+ ```
288
+
251
289
  ## Import
252
290
 
253
291
  As of Pulumi v1.5, resources can be imported through configuration.
@@ -288,7 +326,7 @@ class DataQualityRefresh(pulumi.CustomResource):
288
326
  args: DataQualityRefreshArgs,
289
327
  opts: Optional[pulumi.ResourceOptions] = None):
290
328
  """
291
- [![Public Beta](https://img.shields.io/badge/Release_Stage-Public_Beta-orange)](https://docs.databricks.com/aws/en/release-notes/release-types)
329
+ [![Public Preview](https://img.shields.io/badge/Release_Stage-Public_Preview-yellowgreen)](https://docs.databricks.com/aws/en/release-notes/release-types)
292
330
 
293
331
  This resource allows you to refresh the data quality monitoring checks on Unity Catalog tables.
294
332
 
@@ -304,6 +342,44 @@ class DataQualityRefresh(pulumi.CustomResource):
304
342
 
305
343
  ## Example Usage
306
344
 
345
+ ```python
346
+ import pulumi
347
+ import pulumi_databricks as databricks
348
+
349
+ sandbox = databricks.Catalog("sandbox",
350
+ name="sandbox",
351
+ comment="this catalog is managed by terraform",
352
+ properties={
353
+ "purpose": "testing",
354
+ })
355
+ my_test_schema = databricks.Schema("myTestSchema",
356
+ catalog_name=sandbox.id,
357
+ name="myTestSchema",
358
+ comment="this database is managed by terraform",
359
+ properties={
360
+ "kind": "various",
361
+ })
362
+ my_test_table = databricks.SqlTable("myTestTable",
363
+ catalog_name="main",
364
+ schema_name=my_test_schema.name,
365
+ name="bar",
366
+ table_type="MANAGED",
367
+ data_source_format="DELTA",
368
+ columns=[{
369
+ "name": "timestamp",
370
+ "type": "int",
371
+ }])
372
+ this = databricks.DataQualityMonitor("this",
373
+ object_type="table",
374
+ object_id=my_test_table.id,
375
+ data_profiling_config={
376
+ "output_schema": my_test_schema.schema_id,
377
+ })
378
+ this_data_quality_refresh = databricks.DataQualityRefresh("this",
379
+ object_type="table",
380
+ object_id=my_test_table.id)
381
+ ```
382
+
307
383
  ## Import
308
384
 
309
385
  As of Pulumi v1.5, resources can be imported through configuration.
@@ -298,15 +298,88 @@ class DatabaseSyncedDatabaseTable(pulumi.CustomResource):
298
298
 
299
299
  This example creates a Synced Database Table inside a Database Catalog.
300
300
 
301
+ ```python
302
+ import pulumi
303
+ import pulumi_databricks as databricks
304
+
305
+ this = databricks.DatabaseSyncedDatabaseTable("this",
306
+ name="my_database_catalog.public.synced_table",
307
+ logical_database_name="databricks_postgres",
308
+ spec={
309
+ "scheduling_policy": "SNAPSHOT",
310
+ "source_table_full_name": "source_delta.tpch.customer",
311
+ "primary_key_columns": ["c_custkey"],
312
+ "create_database_objects_if_missing": True,
313
+ "new_pipeline_spec": {
314
+ "storage_catalog": "source_delta",
315
+ "storage_schema": "tpch",
316
+ },
317
+ })
318
+ ```
319
+
301
320
  ### Creating a Synced Database Table inside a Standard Catalog
302
321
 
303
322
  This example creates a Synced Database Table inside a Standard Catalog.
304
323
 
324
+ ```python
325
+ import pulumi
326
+ import pulumi_databricks as databricks
327
+
328
+ this = databricks.DatabaseSyncedDatabaseTable("this",
329
+ name="my_standard_catalog.public.synced_table",
330
+ logical_database_name="databricks_postgres",
331
+ database_instance_name="my-database-instance",
332
+ spec={
333
+ "scheduling_policy": "SNAPSHOT",
334
+ "source_table_full_name": "source_delta.tpch.customer",
335
+ "primary_key_columns": ["c_custkey"],
336
+ "create_database_objects_if_missing": True,
337
+ "new_pipeline_spec": {
338
+ "storage_catalog": "source_delta",
339
+ "storage_schema": "tpch",
340
+ },
341
+ })
342
+ ```
343
+
305
344
  ### Creating multiple Synced Database Tables and bin packing them into a single pipeline
306
345
 
307
346
  This example creates two Synced Database Tables. The first one specifies a new pipeline spec,
308
347
  which generates a new pipeline. The second one utilizes the pipeline ID of the first table.
309
348
 
349
+ ```python
350
+ import pulumi
351
+ import pulumi_databricks as databricks
352
+
353
+ instance = databricks.DatabaseInstance("instance",
354
+ name="my-database-instance",
355
+ capacity="CU_1")
356
+ synced_table1 = databricks.DatabaseSyncedDatabaseTable("synced_table_1",
357
+ name="my_standard_catalog.public.synced_table1",
358
+ logical_database_name="databricks_postgres",
359
+ database_instance_name=instance.name,
360
+ spec={
361
+ "scheduling_policy": "SNAPSHOT",
362
+ "source_table_full_name": "source_delta.tpch.customer",
363
+ "primary_key_columns": ["c_custkey"],
364
+ "create_database_objects_if_missing": True,
365
+ "new_pipeline_spec": {
366
+ "storage_catalog": "source_delta",
367
+ "storage_schema": "tpch",
368
+ },
369
+ })
370
+ synced_table2 = databricks.DatabaseSyncedDatabaseTable("synced_table_2",
371
+ name="my_standard_catalog.public.synced_table2",
372
+ logical_database_name="databricks_postgres",
373
+ database_instance_name=instance.name,
374
+ spec={
375
+ "scheduling_policy": "SNAPSHOT",
376
+ "source_table_full_name": "source_delta.tpch.customer",
377
+ "primary_key_columns": ["c_custkey"],
378
+ "create_database_objects_if_missing": True,
379
+ "existing_pipeline_id": synced_table1.data_synchronization_status.pipeline_id,
380
+ })
381
+ ```
382
+
310
383
  ### Creating a Synced Database Table with a custom Jobs schedule
311
384
 
312
385
  This example creates a Synced Database Table and customizes the pipeline schedule. It assumes you already have
@@ -316,6 +389,39 @@ class DatabaseSyncedDatabaseTable(pulumi.CustomResource):
316
389
  - A schema in the standard catalog named `"default"`
317
390
  - A source delta table named `"source_delta.schema.customer"` with the primary key `"c_custkey"`
318
391
 
392
+ ```python
393
+ import pulumi
394
+ import pulumi_databricks as databricks
395
+
396
+ synced_table = databricks.DatabaseSyncedDatabaseTable("synced_table",
397
+ name="my_standard_catalog.default.my_synced_table",
398
+ logical_database_name="terraform_test_db",
399
+ database_instance_name="my-database-instance",
400
+ spec={
401
+ "scheduling_policy": "SNAPSHOT",
402
+ "source_table_full_name": "source_delta.schema.customer",
403
+ "primary_key_columns": ["c_custkey"],
404
+ "create_database_objects_if_missing": True,
405
+ "new_pipeline_spec": {
406
+ "storage_catalog": "source_delta",
407
+ "storage_schema": "schema",
408
+ },
409
+ })
410
+ sync_pipeline_schedule_job = databricks.Job("sync_pipeline_schedule_job",
411
+ name="Synced Pipeline Refresh",
412
+ description="Job to schedule synced database table pipeline. ",
413
+ tasks=[{
414
+ "task_key": "synced-table-pipeline",
415
+ "pipeline_task": {
416
+ "pipeline_id": synced_table.data_synchronization_status.pipeline_id,
417
+ },
418
+ }],
419
+ schedule={
420
+ "quartz_cron_expression": "0 0 0 * * ?",
421
+ "timezone_id": "Europe/Helsinki",
422
+ })
423
+ ```
424
+
319
425
  ## Import
320
426
 
321
427
  As of Pulumi v1.5, resources can be imported through configuration.
@@ -375,15 +481,88 @@ class DatabaseSyncedDatabaseTable(pulumi.CustomResource):
375
481
 
376
482
  This example creates a Synced Database Table inside a Database Catalog.
377
483
 
484
+ ```python
485
+ import pulumi
486
+ import pulumi_databricks as databricks
487
+
488
+ this = databricks.DatabaseSyncedDatabaseTable("this",
489
+ name="my_database_catalog.public.synced_table",
490
+ logical_database_name="databricks_postgres",
491
+ spec={
492
+ "scheduling_policy": "SNAPSHOT",
493
+ "source_table_full_name": "source_delta.tpch.customer",
494
+ "primary_key_columns": ["c_custkey"],
495
+ "create_database_objects_if_missing": True,
496
+ "new_pipeline_spec": {
497
+ "storage_catalog": "source_delta",
498
+ "storage_schema": "tpch",
499
+ },
500
+ })
501
+ ```
502
+
378
503
  ### Creating a Synced Database Table inside a Standard Catalog
379
504
 
380
505
  This example creates a Synced Database Table inside a Standard Catalog.
381
506
 
507
+ ```python
508
+ import pulumi
509
+ import pulumi_databricks as databricks
510
+
511
+ this = databricks.DatabaseSyncedDatabaseTable("this",
512
+ name="my_standard_catalog.public.synced_table",
513
+ logical_database_name="databricks_postgres",
514
+ database_instance_name="my-database-instance",
515
+ spec={
516
+ "scheduling_policy": "SNAPSHOT",
517
+ "source_table_full_name": "source_delta.tpch.customer",
518
+ "primary_key_columns": ["c_custkey"],
519
+ "create_database_objects_if_missing": True,
520
+ "new_pipeline_spec": {
521
+ "storage_catalog": "source_delta",
522
+ "storage_schema": "tpch",
523
+ },
524
+ })
525
+ ```
526
+
382
527
  ### Creating multiple Synced Database Tables and bin packing them into a single pipeline
383
528
 
384
529
  This example creates two Synced Database Tables. The first one specifies a new pipeline spec,
385
530
  which generates a new pipeline. The second one utilizes the pipeline ID of the first table.
386
531
 
532
+ ```python
533
+ import pulumi
534
+ import pulumi_databricks as databricks
535
+
536
+ instance = databricks.DatabaseInstance("instance",
537
+ name="my-database-instance",
538
+ capacity="CU_1")
539
+ synced_table1 = databricks.DatabaseSyncedDatabaseTable("synced_table_1",
540
+ name="my_standard_catalog.public.synced_table1",
541
+ logical_database_name="databricks_postgres",
542
+ database_instance_name=instance.name,
543
+ spec={
544
+ "scheduling_policy": "SNAPSHOT",
545
+ "source_table_full_name": "source_delta.tpch.customer",
546
+ "primary_key_columns": ["c_custkey"],
547
+ "create_database_objects_if_missing": True,
548
+ "new_pipeline_spec": {
549
+ "storage_catalog": "source_delta",
550
+ "storage_schema": "tpch",
551
+ },
552
+ })
553
+ synced_table2 = databricks.DatabaseSyncedDatabaseTable("synced_table_2",
554
+ name="my_standard_catalog.public.synced_table2",
555
+ logical_database_name="databricks_postgres",
556
+ database_instance_name=instance.name,
557
+ spec={
558
+ "scheduling_policy": "SNAPSHOT",
559
+ "source_table_full_name": "source_delta.tpch.customer",
560
+ "primary_key_columns": ["c_custkey"],
561
+ "create_database_objects_if_missing": True,
562
+ "existing_pipeline_id": synced_table1.data_synchronization_status.pipeline_id,
563
+ })
564
+ ```
565
+
387
566
  ### Creating a Synced Database Table with a custom Jobs schedule
388
567
 
389
568
  This example creates a Synced Database Table and customizes the pipeline schedule. It assumes you already have
@@ -393,6 +572,39 @@ class DatabaseSyncedDatabaseTable(pulumi.CustomResource):
393
572
  - A schema in the standard catalog named `"default"`
394
573
  - A source delta table named `"source_delta.schema.customer"` with the primary key `"c_custkey"`
395
574
 
575
+ ```python
576
+ import pulumi
577
+ import pulumi_databricks as databricks
578
+
579
+ synced_table = databricks.DatabaseSyncedDatabaseTable("synced_table",
580
+ name="my_standard_catalog.default.my_synced_table",
581
+ logical_database_name="terraform_test_db",
582
+ database_instance_name="my-database-instance",
583
+ spec={
584
+ "scheduling_policy": "SNAPSHOT",
585
+ "source_table_full_name": "source_delta.schema.customer",
586
+ "primary_key_columns": ["c_custkey"],
587
+ "create_database_objects_if_missing": True,
588
+ "new_pipeline_spec": {
589
+ "storage_catalog": "source_delta",
590
+ "storage_schema": "schema",
591
+ },
592
+ })
593
+ sync_pipeline_schedule_job = databricks.Job("sync_pipeline_schedule_job",
594
+ name="Synced Pipeline Refresh",
595
+ description="Job to schedule synced database table pipeline. ",
596
+ tasks=[{
597
+ "task_key": "synced-table-pipeline",
598
+ "pipeline_task": {
599
+ "pipeline_id": synced_table.data_synchronization_status.pipeline_id,
600
+ },
601
+ }],
602
+ schedule={
603
+ "quartz_cron_expression": "0 0 0 * * ?",
604
+ "timezone_id": "Europe/Helsinki",
605
+ })
606
+ ```
607
+
396
608
  ## Import
397
609
 
398
610
  As of Pulumi v1.5, resources can be imported through configuration.