pulumi-databricks 1.77.0a1762276204__py3-none-any.whl → 1.78.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- pulumi_databricks/_inputs.py +117 -120
- pulumi_databricks/account_federation_policy.py +24 -0
- pulumi_databricks/account_network_policy.py +50 -0
- pulumi_databricks/alert_v2.py +72 -0
- pulumi_databricks/app.py +64 -0
- pulumi_databricks/apps_settings_custom_template.py +66 -0
- pulumi_databricks/catalog.py +7 -7
- pulumi_databricks/cluster_policy.py +120 -0
- pulumi_databricks/config/__init__.pyi +4 -0
- pulumi_databricks/config/vars.py +8 -0
- pulumi_databricks/database_synced_database_table.py +212 -0
- pulumi_databricks/external_location.py +131 -7
- pulumi_databricks/file.py +2 -2
- pulumi_databricks/get_aws_assume_role_policy.py +14 -14
- pulumi_databricks/get_aws_bucket_policy.py +10 -10
- pulumi_databricks/get_aws_unity_catalog_assume_role_policy.py +10 -10
- pulumi_databricks/get_aws_unity_catalog_policy.py +10 -10
- pulumi_databricks/get_cluster.py +54 -0
- pulumi_databricks/get_current_config.py +4 -4
- pulumi_databricks/get_metastore.py +6 -6
- pulumi_databricks/get_notebook.py +20 -1
- pulumi_databricks/get_service_principals.py +64 -0
- pulumi_databricks/instance_profile.py +0 -182
- pulumi_databricks/metastore.py +81 -7
- pulumi_databricks/metastore_data_access.py +48 -0
- pulumi_databricks/mlflow_webhook.py +4 -4
- pulumi_databricks/mws_credentials.py +10 -10
- pulumi_databricks/mws_customer_managed_keys.py +0 -288
- pulumi_databricks/mws_log_delivery.py +146 -0
- pulumi_databricks/mws_storage_configurations.py +16 -16
- pulumi_databricks/mws_vpc_endpoint.py +56 -56
- pulumi_databricks/mws_workspaces.py +85 -51
- pulumi_databricks/notebook.py +49 -0
- pulumi_databricks/outputs.py +99 -76
- pulumi_databricks/permission_assignment.py +49 -0
- pulumi_databricks/permissions.py +6 -6
- pulumi_databricks/provider.py +36 -1
- pulumi_databricks/pulumi-plugin.json +1 -1
- pulumi_databricks/recipient.py +74 -0
- pulumi_databricks/registered_model.py +7 -7
- pulumi_databricks/schema.py +7 -7
- pulumi_databricks/service_principal_federation_policy.py +28 -0
- pulumi_databricks/sql_table.py +7 -7
- pulumi_databricks/volume.py +7 -7
- {pulumi_databricks-1.77.0a1762276204.dist-info → pulumi_databricks-1.78.0.dist-info}/METADATA +1 -1
- {pulumi_databricks-1.77.0a1762276204.dist-info → pulumi_databricks-1.78.0.dist-info}/RECORD +48 -48
- {pulumi_databricks-1.77.0a1762276204.dist-info → pulumi_databricks-1.78.0.dist-info}/WHEEL +0 -0
- {pulumi_databricks-1.77.0a1762276204.dist-info → pulumi_databricks-1.78.0.dist-info}/top_level.txt +0 -0
|
@@ -39,7 +39,7 @@ class ExternalLocationArgs:
|
|
|
39
39
|
"""
|
|
40
40
|
The set of arguments for constructing a ExternalLocation resource.
|
|
41
41
|
:param pulumi.Input[_builtins.str] credential_name: Name of the StorageCredential to use with this external location.
|
|
42
|
-
:param pulumi.Input[_builtins.str] url: Path URL in cloud storage, of the form: `s3://[bucket-host]/[bucket-dir]` (AWS), `abfss://[user]@[host]/[path]` (Azure), `gs://[bucket-host]/[bucket-dir]` (GCP).
|
|
42
|
+
:param pulumi.Input[_builtins.str] url: Path URL in cloud storage, of the form: `s3://[bucket-host]/[bucket-dir]` (AWS), `abfss://[user]@[host]/[path]` (Azure), `gs://[bucket-host]/[bucket-dir]` (GCP). If the URL contains special characters, such as space, `&`, etc., they should be percent-encoded (space > `%20`, etc.).
|
|
43
43
|
:param pulumi.Input[_builtins.str] comment: User-supplied free-form text.
|
|
44
44
|
:param pulumi.Input[_builtins.bool] enable_file_events: indicates if managed file events are enabled for this external location. Requires `file_event_queue` block.
|
|
45
45
|
:param pulumi.Input[_builtins.bool] fallback: Indicates whether fallback mode is enabled for this external location. When fallback mode is enabled (disabled by default), the access to the location falls back to cluster credentials if UC credentials are not sufficient.
|
|
@@ -96,7 +96,7 @@ class ExternalLocationArgs:
|
|
|
96
96
|
@pulumi.getter
|
|
97
97
|
def url(self) -> pulumi.Input[_builtins.str]:
|
|
98
98
|
"""
|
|
99
|
-
Path URL in cloud storage, of the form: `s3://[bucket-host]/[bucket-dir]` (AWS), `abfss://[user]@[host]/[path]` (Azure), `gs://[bucket-host]/[bucket-dir]` (GCP).
|
|
99
|
+
Path URL in cloud storage, of the form: `s3://[bucket-host]/[bucket-dir]` (AWS), `abfss://[user]@[host]/[path]` (Azure), `gs://[bucket-host]/[bucket-dir]` (GCP). If the URL contains special characters, such as space, `&`, etc., they should be percent-encoded (space > `%20`, etc.).
|
|
100
100
|
"""
|
|
101
101
|
return pulumi.get(self, "url")
|
|
102
102
|
|
|
@@ -294,7 +294,7 @@ class _ExternalLocationState:
|
|
|
294
294
|
:param pulumi.Input[_builtins.bool] skip_validation: Suppress validation errors if any & force save the external location
|
|
295
295
|
:param pulumi.Input[_builtins.int] updated_at: Time at which external location this was last modified, in epoch milliseconds.
|
|
296
296
|
:param pulumi.Input[_builtins.str] updated_by: Username of user who last modified the external location.
|
|
297
|
-
:param pulumi.Input[_builtins.str] url: Path URL in cloud storage, of the form: `s3://[bucket-host]/[bucket-dir]` (AWS), `abfss://[user]@[host]/[path]` (Azure), `gs://[bucket-host]/[bucket-dir]` (GCP).
|
|
297
|
+
:param pulumi.Input[_builtins.str] url: Path URL in cloud storage, of the form: `s3://[bucket-host]/[bucket-dir]` (AWS), `abfss://[user]@[host]/[path]` (Azure), `gs://[bucket-host]/[bucket-dir]` (GCP). If the URL contains special characters, such as space, `&`, etc., they should be percent-encoded (space > `%20`, etc.).
|
|
298
298
|
"""
|
|
299
299
|
if browse_only is not None:
|
|
300
300
|
pulumi.set(__self__, "browse_only", browse_only)
|
|
@@ -571,7 +571,7 @@ class _ExternalLocationState:
|
|
|
571
571
|
@pulumi.getter
|
|
572
572
|
def url(self) -> Optional[pulumi.Input[_builtins.str]]:
|
|
573
573
|
"""
|
|
574
|
-
Path URL in cloud storage, of the form: `s3://[bucket-host]/[bucket-dir]` (AWS), `abfss://[user]@[host]/[path]` (Azure), `gs://[bucket-host]/[bucket-dir]` (GCP).
|
|
574
|
+
Path URL in cloud storage, of the form: `s3://[bucket-host]/[bucket-dir]` (AWS), `abfss://[user]@[host]/[path]` (Azure), `gs://[bucket-host]/[bucket-dir]` (GCP). If the URL contains special characters, such as space, `&`, etc., they should be percent-encoded (space > `%20`, etc.).
|
|
575
575
|
"""
|
|
576
576
|
return pulumi.get(self, "url")
|
|
577
577
|
|
|
@@ -642,6 +642,68 @@ class ExternalLocation(pulumi.CustomResource):
|
|
|
642
642
|
|
|
643
643
|
For Azure
|
|
644
644
|
|
|
645
|
+
```python
|
|
646
|
+
import pulumi
|
|
647
|
+
import pulumi_databricks as databricks
|
|
648
|
+
import pulumi_std as std
|
|
649
|
+
|
|
650
|
+
external = databricks.StorageCredential("external",
|
|
651
|
+
name=ext_cred["displayName"],
|
|
652
|
+
azure_service_principal={
|
|
653
|
+
"directory_id": tenant_id,
|
|
654
|
+
"application_id": ext_cred["applicationId"],
|
|
655
|
+
"client_secret": ext_cred_azuread_application_password["value"],
|
|
656
|
+
},
|
|
657
|
+
comment="Managed by TF",
|
|
658
|
+
opts = pulumi.ResourceOptions(depends_on=[this]))
|
|
659
|
+
some = databricks.ExternalLocation("some",
|
|
660
|
+
name="external",
|
|
661
|
+
url=std.format(input="abfss://%s@%s.dfs.core.windows.net",
|
|
662
|
+
args=[
|
|
663
|
+
ext_storage["name"],
|
|
664
|
+
ext_storage_azurerm_storage_account["name"],
|
|
665
|
+
]).result,
|
|
666
|
+
credential_name=external.id,
|
|
667
|
+
comment="Managed by TF",
|
|
668
|
+
opts = pulumi.ResourceOptions(depends_on=[this]))
|
|
669
|
+
some_grants = databricks.Grants("some",
|
|
670
|
+
external_location=some.id,
|
|
671
|
+
grants=[{
|
|
672
|
+
"principal": "Data Engineers",
|
|
673
|
+
"privileges": [
|
|
674
|
+
"CREATE_EXTERNAL_TABLE",
|
|
675
|
+
"READ_FILES",
|
|
676
|
+
],
|
|
677
|
+
}])
|
|
678
|
+
```
|
|
679
|
+
|
|
680
|
+
For GCP
|
|
681
|
+
|
|
682
|
+
```python
|
|
683
|
+
import pulumi
|
|
684
|
+
import pulumi_databricks as databricks
|
|
685
|
+
|
|
686
|
+
ext = databricks.StorageCredential("ext",
|
|
687
|
+
name="the-creds",
|
|
688
|
+
databricks_gcp_service_account={})
|
|
689
|
+
some = databricks.ExternalLocation("some",
|
|
690
|
+
name="the-ext-location",
|
|
691
|
+
url=f"gs://{ext_bucket['name']}",
|
|
692
|
+
credential_name=ext.id,
|
|
693
|
+
comment="Managed by TF")
|
|
694
|
+
some_grants = databricks.Grants("some",
|
|
695
|
+
external_location=some.id,
|
|
696
|
+
grants=[{
|
|
697
|
+
"principal": "Data Engineers",
|
|
698
|
+
"privileges": [
|
|
699
|
+
"CREATE_EXTERNAL_TABLE",
|
|
700
|
+
"READ_FILES",
|
|
701
|
+
],
|
|
702
|
+
}])
|
|
703
|
+
```
|
|
704
|
+
|
|
705
|
+
Example `encryption_details` specifying SSE_S3 encryption:
|
|
706
|
+
|
|
645
707
|
## Import
|
|
646
708
|
|
|
647
709
|
This resource can be imported by `name`:
|
|
@@ -677,7 +739,7 @@ class ExternalLocation(pulumi.CustomResource):
|
|
|
677
739
|
:param pulumi.Input[_builtins.str] owner: Username/groupname/sp application_id of the external location owner.
|
|
678
740
|
:param pulumi.Input[_builtins.bool] read_only: Indicates whether the external location is read-only.
|
|
679
741
|
:param pulumi.Input[_builtins.bool] skip_validation: Suppress validation errors if any & force save the external location
|
|
680
|
-
:param pulumi.Input[_builtins.str] url: Path URL in cloud storage, of the form: `s3://[bucket-host]/[bucket-dir]` (AWS), `abfss://[user]@[host]/[path]` (Azure), `gs://[bucket-host]/[bucket-dir]` (GCP).
|
|
742
|
+
:param pulumi.Input[_builtins.str] url: Path URL in cloud storage, of the form: `s3://[bucket-host]/[bucket-dir]` (AWS), `abfss://[user]@[host]/[path]` (Azure), `gs://[bucket-host]/[bucket-dir]` (GCP). If the URL contains special characters, such as space, `&`, etc., they should be percent-encoded (space > `%20`, etc.).
|
|
681
743
|
"""
|
|
682
744
|
...
|
|
683
745
|
@overload
|
|
@@ -725,6 +787,68 @@ class ExternalLocation(pulumi.CustomResource):
|
|
|
725
787
|
|
|
726
788
|
For Azure
|
|
727
789
|
|
|
790
|
+
```python
|
|
791
|
+
import pulumi
|
|
792
|
+
import pulumi_databricks as databricks
|
|
793
|
+
import pulumi_std as std
|
|
794
|
+
|
|
795
|
+
external = databricks.StorageCredential("external",
|
|
796
|
+
name=ext_cred["displayName"],
|
|
797
|
+
azure_service_principal={
|
|
798
|
+
"directory_id": tenant_id,
|
|
799
|
+
"application_id": ext_cred["applicationId"],
|
|
800
|
+
"client_secret": ext_cred_azuread_application_password["value"],
|
|
801
|
+
},
|
|
802
|
+
comment="Managed by TF",
|
|
803
|
+
opts = pulumi.ResourceOptions(depends_on=[this]))
|
|
804
|
+
some = databricks.ExternalLocation("some",
|
|
805
|
+
name="external",
|
|
806
|
+
url=std.format(input="abfss://%s@%s.dfs.core.windows.net",
|
|
807
|
+
args=[
|
|
808
|
+
ext_storage["name"],
|
|
809
|
+
ext_storage_azurerm_storage_account["name"],
|
|
810
|
+
]).result,
|
|
811
|
+
credential_name=external.id,
|
|
812
|
+
comment="Managed by TF",
|
|
813
|
+
opts = pulumi.ResourceOptions(depends_on=[this]))
|
|
814
|
+
some_grants = databricks.Grants("some",
|
|
815
|
+
external_location=some.id,
|
|
816
|
+
grants=[{
|
|
817
|
+
"principal": "Data Engineers",
|
|
818
|
+
"privileges": [
|
|
819
|
+
"CREATE_EXTERNAL_TABLE",
|
|
820
|
+
"READ_FILES",
|
|
821
|
+
],
|
|
822
|
+
}])
|
|
823
|
+
```
|
|
824
|
+
|
|
825
|
+
For GCP
|
|
826
|
+
|
|
827
|
+
```python
|
|
828
|
+
import pulumi
|
|
829
|
+
import pulumi_databricks as databricks
|
|
830
|
+
|
|
831
|
+
ext = databricks.StorageCredential("ext",
|
|
832
|
+
name="the-creds",
|
|
833
|
+
databricks_gcp_service_account={})
|
|
834
|
+
some = databricks.ExternalLocation("some",
|
|
835
|
+
name="the-ext-location",
|
|
836
|
+
url=f"gs://{ext_bucket['name']}",
|
|
837
|
+
credential_name=ext.id,
|
|
838
|
+
comment="Managed by TF")
|
|
839
|
+
some_grants = databricks.Grants("some",
|
|
840
|
+
external_location=some.id,
|
|
841
|
+
grants=[{
|
|
842
|
+
"principal": "Data Engineers",
|
|
843
|
+
"privileges": [
|
|
844
|
+
"CREATE_EXTERNAL_TABLE",
|
|
845
|
+
"READ_FILES",
|
|
846
|
+
],
|
|
847
|
+
}])
|
|
848
|
+
```
|
|
849
|
+
|
|
850
|
+
Example `encryption_details` specifying SSE_S3 encryption:
|
|
851
|
+
|
|
728
852
|
## Import
|
|
729
853
|
|
|
730
854
|
This resource can be imported by `name`:
|
|
@@ -865,7 +989,7 @@ class ExternalLocation(pulumi.CustomResource):
|
|
|
865
989
|
:param pulumi.Input[_builtins.bool] skip_validation: Suppress validation errors if any & force save the external location
|
|
866
990
|
:param pulumi.Input[_builtins.int] updated_at: Time at which external location this was last modified, in epoch milliseconds.
|
|
867
991
|
:param pulumi.Input[_builtins.str] updated_by: Username of user who last modified the external location.
|
|
868
|
-
:param pulumi.Input[_builtins.str] url: Path URL in cloud storage, of the form: `s3://[bucket-host]/[bucket-dir]` (AWS), `abfss://[user]@[host]/[path]` (Azure), `gs://[bucket-host]/[bucket-dir]` (GCP).
|
|
992
|
+
:param pulumi.Input[_builtins.str] url: Path URL in cloud storage, of the form: `s3://[bucket-host]/[bucket-dir]` (AWS), `abfss://[user]@[host]/[path]` (Azure), `gs://[bucket-host]/[bucket-dir]` (GCP). If the URL contains special characters, such as space, `&`, etc., they should be percent-encoded (space > `%20`, etc.).
|
|
869
993
|
"""
|
|
870
994
|
opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
|
|
871
995
|
|
|
@@ -1046,7 +1170,7 @@ class ExternalLocation(pulumi.CustomResource):
|
|
|
1046
1170
|
@pulumi.getter
|
|
1047
1171
|
def url(self) -> pulumi.Output[_builtins.str]:
|
|
1048
1172
|
"""
|
|
1049
|
-
Path URL in cloud storage, of the form: `s3://[bucket-host]/[bucket-dir]` (AWS), `abfss://[user]@[host]/[path]` (Azure), `gs://[bucket-host]/[bucket-dir]` (GCP).
|
|
1173
|
+
Path URL in cloud storage, of the form: `s3://[bucket-host]/[bucket-dir]` (AWS), `abfss://[user]@[host]/[path]` (Azure), `gs://[bucket-host]/[bucket-dir]` (GCP). If the URL contains special characters, such as space, `&`, etc., they should be percent-encoded (space > `%20`, etc.).
|
|
1050
1174
|
"""
|
|
1051
1175
|
return pulumi.get(self, "url")
|
|
1052
1176
|
|
pulumi_databricks/file.py
CHANGED
|
@@ -270,7 +270,7 @@ class File(pulumi.CustomResource):
|
|
|
270
270
|
|
|
271
271
|
init_script = databricks.File("init_script",
|
|
272
272
|
content_base64=std.base64encode(input=\"\"\"#!/bin/bash
|
|
273
|
-
echo "Hello World"
|
|
273
|
+
echo \\"Hello World\\"
|
|
274
274
|
\"\"\").result,
|
|
275
275
|
path=f"{this['volumePath']}/fileName")
|
|
276
276
|
```
|
|
@@ -368,7 +368,7 @@ class File(pulumi.CustomResource):
|
|
|
368
368
|
|
|
369
369
|
init_script = databricks.File("init_script",
|
|
370
370
|
content_base64=std.base64encode(input=\"\"\"#!/bin/bash
|
|
371
|
-
echo "Hello World"
|
|
371
|
+
echo \\"Hello World\\"
|
|
372
372
|
\"\"\").result,
|
|
373
373
|
path=f"{this['volumePath']}/fileName")
|
|
374
374
|
```
|
|
@@ -121,22 +121,22 @@ def get_aws_assume_role_policy(aws_partition: Optional[_builtins.str] = None,
|
|
|
121
121
|
# Account Id that could be found in the top right corner of https://accounts.cloud.databricks.com/
|
|
122
122
|
databricks_account_id = config.require_object("databricksAccountId")
|
|
123
123
|
this = databricks.get_aws_cross_account_policy()
|
|
124
|
-
cross_account_policy = aws.
|
|
125
|
-
name=f
|
|
124
|
+
cross_account_policy = aws.index.IamPolicy("cross_account_policy",
|
|
125
|
+
name=f{prefix}-crossaccount-iam-policy,
|
|
126
126
|
policy=this.json)
|
|
127
127
|
this_get_aws_assume_role_policy = databricks.get_aws_assume_role_policy(external_id=databricks_account_id)
|
|
128
|
-
cross_account = aws.
|
|
129
|
-
name=f
|
|
128
|
+
cross_account = aws.index.IamRole("cross_account",
|
|
129
|
+
name=f{prefix}-crossaccount-iam-role,
|
|
130
130
|
assume_role_policy=this_get_aws_assume_role_policy.json,
|
|
131
|
-
description=
|
|
132
|
-
|
|
131
|
+
description=Grants Databricks full access to VPC resources)
|
|
132
|
+
cross_account_iam_role_policy_attachment = aws.index.IamRolePolicyAttachment("cross_account",
|
|
133
133
|
policy_arn=cross_account_policy.arn,
|
|
134
134
|
role=cross_account.name)
|
|
135
135
|
# required only in case of multi-workspace setup
|
|
136
136
|
this_mws_credentials = databricks.MwsCredentials("this",
|
|
137
137
|
account_id=databricks_account_id,
|
|
138
138
|
credentials_name=f"{prefix}-creds",
|
|
139
|
-
role_arn=cross_account
|
|
139
|
+
role_arn=cross_account["arn"])
|
|
140
140
|
```
|
|
141
141
|
|
|
142
142
|
## Related Resources
|
|
@@ -190,22 +190,22 @@ def get_aws_assume_role_policy_output(aws_partition: Optional[pulumi.Input[Optio
|
|
|
190
190
|
# Account Id that could be found in the top right corner of https://accounts.cloud.databricks.com/
|
|
191
191
|
databricks_account_id = config.require_object("databricksAccountId")
|
|
192
192
|
this = databricks.get_aws_cross_account_policy()
|
|
193
|
-
cross_account_policy = aws.
|
|
194
|
-
name=f
|
|
193
|
+
cross_account_policy = aws.index.IamPolicy("cross_account_policy",
|
|
194
|
+
name=f{prefix}-crossaccount-iam-policy,
|
|
195
195
|
policy=this.json)
|
|
196
196
|
this_get_aws_assume_role_policy = databricks.get_aws_assume_role_policy(external_id=databricks_account_id)
|
|
197
|
-
cross_account = aws.
|
|
198
|
-
name=f
|
|
197
|
+
cross_account = aws.index.IamRole("cross_account",
|
|
198
|
+
name=f{prefix}-crossaccount-iam-role,
|
|
199
199
|
assume_role_policy=this_get_aws_assume_role_policy.json,
|
|
200
|
-
description=
|
|
201
|
-
|
|
200
|
+
description=Grants Databricks full access to VPC resources)
|
|
201
|
+
cross_account_iam_role_policy_attachment = aws.index.IamRolePolicyAttachment("cross_account",
|
|
202
202
|
policy_arn=cross_account_policy.arn,
|
|
203
203
|
role=cross_account.name)
|
|
204
204
|
# required only in case of multi-workspace setup
|
|
205
205
|
this_mws_credentials = databricks.MwsCredentials("this",
|
|
206
206
|
account_id=databricks_account_id,
|
|
207
207
|
credentials_name=f"{prefix}-creds",
|
|
208
|
-
role_arn=cross_account
|
|
208
|
+
role_arn=cross_account["arn"])
|
|
209
209
|
```
|
|
210
210
|
|
|
211
211
|
## Related Resources
|
|
@@ -125,12 +125,12 @@ def get_aws_bucket_policy(aws_partition: Optional[_builtins.str] = None,
|
|
|
125
125
|
import pulumi_aws as aws
|
|
126
126
|
import pulumi_databricks as databricks
|
|
127
127
|
|
|
128
|
-
|
|
129
|
-
bucket
|
|
128
|
+
this_s3_bucket = aws.index.S3Bucket("this",
|
|
129
|
+
bucket=<unique_bucket_name>,
|
|
130
130
|
force_destroy=True)
|
|
131
|
-
this = databricks.
|
|
132
|
-
|
|
133
|
-
bucket=
|
|
131
|
+
this = databricks.get_aws_bucket_policy(bucket=this_s3_bucket["bucket"])
|
|
132
|
+
this_s3_bucket_policy = aws.index.S3BucketPolicy("this",
|
|
133
|
+
bucket=this_s3_bucket.id,
|
|
134
134
|
policy=this.json)
|
|
135
135
|
```
|
|
136
136
|
|
|
@@ -177,12 +177,12 @@ def get_aws_bucket_policy_output(aws_partition: Optional[pulumi.Input[Optional[_
|
|
|
177
177
|
import pulumi_aws as aws
|
|
178
178
|
import pulumi_databricks as databricks
|
|
179
179
|
|
|
180
|
-
|
|
181
|
-
bucket
|
|
180
|
+
this_s3_bucket = aws.index.S3Bucket("this",
|
|
181
|
+
bucket=<unique_bucket_name>,
|
|
182
182
|
force_destroy=True)
|
|
183
|
-
this = databricks.
|
|
184
|
-
|
|
185
|
-
bucket=
|
|
183
|
+
this = databricks.get_aws_bucket_policy(bucket=this_s3_bucket["bucket"])
|
|
184
|
+
this_s3_bucket_policy = aws.index.S3BucketPolicy("this",
|
|
185
|
+
bucket=this_s3_bucket.id,
|
|
186
186
|
policy=this.json)
|
|
187
187
|
```
|
|
188
188
|
|
|
@@ -130,13 +130,13 @@ def get_aws_unity_catalog_assume_role_policy(aws_account_id: Optional[_builtins.
|
|
|
130
130
|
this_get_aws_unity_catalog_assume_role_policy = databricks.get_aws_unity_catalog_assume_role_policy(aws_account_id=aws_account_id,
|
|
131
131
|
role_name=f"{prefix}-uc-access",
|
|
132
132
|
external_id="12345")
|
|
133
|
-
unity_metastore = aws.
|
|
134
|
-
name=f
|
|
133
|
+
unity_metastore = aws.index.IamPolicy("unity_metastore",
|
|
134
|
+
name=f{prefix}-unity-catalog-metastore-access-iam-policy,
|
|
135
135
|
policy=this.json)
|
|
136
|
-
metastore_data_access = aws.
|
|
137
|
-
name=f
|
|
136
|
+
metastore_data_access = aws.index.IamRole("metastore_data_access",
|
|
137
|
+
name=f{prefix}-uc-access,
|
|
138
138
|
assume_role_policy=this_get_aws_unity_catalog_assume_role_policy.json)
|
|
139
|
-
|
|
139
|
+
metastore_data_access_iam_role_policy_attachment = aws.index.IamRolePolicyAttachment("metastore_data_access",
|
|
140
140
|
role=metastore_data_access.name,
|
|
141
141
|
policy_arn=unity_metastore.arn)
|
|
142
142
|
```
|
|
@@ -192,13 +192,13 @@ def get_aws_unity_catalog_assume_role_policy_output(aws_account_id: Optional[pul
|
|
|
192
192
|
this_get_aws_unity_catalog_assume_role_policy = databricks.get_aws_unity_catalog_assume_role_policy(aws_account_id=aws_account_id,
|
|
193
193
|
role_name=f"{prefix}-uc-access",
|
|
194
194
|
external_id="12345")
|
|
195
|
-
unity_metastore = aws.
|
|
196
|
-
name=f
|
|
195
|
+
unity_metastore = aws.index.IamPolicy("unity_metastore",
|
|
196
|
+
name=f{prefix}-unity-catalog-metastore-access-iam-policy,
|
|
197
197
|
policy=this.json)
|
|
198
|
-
metastore_data_access = aws.
|
|
199
|
-
name=f
|
|
198
|
+
metastore_data_access = aws.index.IamRole("metastore_data_access",
|
|
199
|
+
name=f{prefix}-uc-access,
|
|
200
200
|
assume_role_policy=this_get_aws_unity_catalog_assume_role_policy.json)
|
|
201
|
-
|
|
201
|
+
metastore_data_access_iam_role_policy_attachment = aws.index.IamRolePolicyAttachment("metastore_data_access",
|
|
202
202
|
role=metastore_data_access.name,
|
|
203
203
|
policy_arn=unity_metastore.arn)
|
|
204
204
|
```
|
|
@@ -133,13 +133,13 @@ def get_aws_unity_catalog_policy(aws_account_id: Optional[_builtins.str] = None,
|
|
|
133
133
|
this_get_aws_unity_catalog_assume_role_policy = databricks.get_aws_unity_catalog_assume_role_policy(aws_account_id=aws_account_id,
|
|
134
134
|
role_name=f"{prefix}-uc-access",
|
|
135
135
|
external_id="12345")
|
|
136
|
-
unity_metastore = aws.
|
|
137
|
-
name=f
|
|
136
|
+
unity_metastore = aws.index.IamPolicy("unity_metastore",
|
|
137
|
+
name=f{prefix}-unity-catalog-metastore-access-iam-policy,
|
|
138
138
|
policy=this.json)
|
|
139
|
-
metastore_data_access = aws.
|
|
140
|
-
name=f
|
|
139
|
+
metastore_data_access = aws.index.IamRole("metastore_data_access",
|
|
140
|
+
name=f{prefix}-uc-access,
|
|
141
141
|
assume_role_policy=this_get_aws_unity_catalog_assume_role_policy.json)
|
|
142
|
-
|
|
142
|
+
metastore_data_access_iam_role_policy_attachment = aws.index.IamRolePolicyAttachment("metastore_data_access",
|
|
143
143
|
role=metastore_data_access.name,
|
|
144
144
|
policy_arn=unity_metastore.arn)
|
|
145
145
|
```
|
|
@@ -195,13 +195,13 @@ def get_aws_unity_catalog_policy_output(aws_account_id: Optional[pulumi.Input[_b
|
|
|
195
195
|
this_get_aws_unity_catalog_assume_role_policy = databricks.get_aws_unity_catalog_assume_role_policy(aws_account_id=aws_account_id,
|
|
196
196
|
role_name=f"{prefix}-uc-access",
|
|
197
197
|
external_id="12345")
|
|
198
|
-
unity_metastore = aws.
|
|
199
|
-
name=f
|
|
198
|
+
unity_metastore = aws.index.IamPolicy("unity_metastore",
|
|
199
|
+
name=f{prefix}-unity-catalog-metastore-access-iam-policy,
|
|
200
200
|
policy=this.json)
|
|
201
|
-
metastore_data_access = aws.
|
|
202
|
-
name=f
|
|
201
|
+
metastore_data_access = aws.index.IamRole("metastore_data_access",
|
|
202
|
+
name=f{prefix}-uc-access,
|
|
203
203
|
assume_role_policy=this_get_aws_unity_catalog_assume_role_policy.json)
|
|
204
|
-
|
|
204
|
+
metastore_data_access_iam_role_policy_attachment = aws.index.IamRolePolicyAttachment("metastore_data_access",
|
|
205
205
|
role=metastore_data_access.name,
|
|
206
206
|
policy_arn=unity_metastore.arn)
|
|
207
207
|
```
|
pulumi_databricks/get_cluster.py
CHANGED
|
@@ -116,6 +116,33 @@ def get_cluster(cluster_id: Optional[_builtins.str] = None,
|
|
|
116
116
|
all_get_cluster = {__key: databricks.get_cluster(cluster_id=__value) for __key, __value in all.ids}
|
|
117
117
|
```
|
|
118
118
|
|
|
119
|
+
### Multiple clusters with the same name
|
|
120
|
+
|
|
121
|
+
When fetching a cluster whose name is not unique (including terminated but not permanently deleted clusters), you must use the `cluster_id` argument to uniquely identify the cluster. Combine this data source with `get_clusters` to get the `cluster_id` of the cluster you want to fetch.
|
|
122
|
+
|
|
123
|
+
```python
|
|
124
|
+
import pulumi
|
|
125
|
+
import pulumi_databricks as databricks
|
|
126
|
+
|
|
127
|
+
my_cluster = databricks.get_clusters(cluster_name_contains="my-cluster",
|
|
128
|
+
filter_by={
|
|
129
|
+
"cluster_states": ["RUNNING"],
|
|
130
|
+
})
|
|
131
|
+
my_cluster_get_cluster = databricks.get_cluster(cluster_id=my_cluster.ids[0])
|
|
132
|
+
```
|
|
133
|
+
|
|
134
|
+
## Related Resources
|
|
135
|
+
|
|
136
|
+
The following resources are often used in the same context:
|
|
137
|
+
|
|
138
|
+
* End to end workspace management guide.
|
|
139
|
+
* Cluster to create [Databricks Clusters](https://docs.databricks.com/clusters/index.html).
|
|
140
|
+
* ClusterPolicy to create a Cluster policy, which limits the ability to create clusters based on a set of rules.
|
|
141
|
+
* InstancePool to manage [instance pools](https://docs.databricks.com/clusters/instance-pools/index.html) to reduce cluster start and auto-scaling times by maintaining a set of idle, ready-to-use instances.
|
|
142
|
+
* Job to manage [Databricks Jobs](https://docs.databricks.com/jobs.html) to run non-interactive code in a databricks_cluster.
|
|
143
|
+
* Library to install a [library](https://docs.databricks.com/libraries/index.html) on databricks_cluster.
|
|
144
|
+
* Pipeline to deploy [Lakeflow Declarative Pipelines](https://docs.databricks.com/aws/en/dlt).
|
|
145
|
+
|
|
119
146
|
|
|
120
147
|
:param _builtins.str cluster_id: The id of the cluster.
|
|
121
148
|
:param Union['GetClusterClusterInfoArgs', 'GetClusterClusterInfoArgsDict'] cluster_info: block, consisting of following fields:
|
|
@@ -161,6 +188,33 @@ def get_cluster_output(cluster_id: Optional[pulumi.Input[Optional[_builtins.str]
|
|
|
161
188
|
all_get_cluster = {__key: databricks.get_cluster(cluster_id=__value) for __key, __value in all.ids}
|
|
162
189
|
```
|
|
163
190
|
|
|
191
|
+
### Multiple clusters with the same name
|
|
192
|
+
|
|
193
|
+
When fetching a cluster whose name is not unique (including terminated but not permanently deleted clusters), you must use the `cluster_id` argument to uniquely identify the cluster. Combine this data source with `get_clusters` to get the `cluster_id` of the cluster you want to fetch.
|
|
194
|
+
|
|
195
|
+
```python
|
|
196
|
+
import pulumi
|
|
197
|
+
import pulumi_databricks as databricks
|
|
198
|
+
|
|
199
|
+
my_cluster = databricks.get_clusters(cluster_name_contains="my-cluster",
|
|
200
|
+
filter_by={
|
|
201
|
+
"cluster_states": ["RUNNING"],
|
|
202
|
+
})
|
|
203
|
+
my_cluster_get_cluster = databricks.get_cluster(cluster_id=my_cluster.ids[0])
|
|
204
|
+
```
|
|
205
|
+
|
|
206
|
+
## Related Resources
|
|
207
|
+
|
|
208
|
+
The following resources are often used in the same context:
|
|
209
|
+
|
|
210
|
+
* End to end workspace management guide.
|
|
211
|
+
* Cluster to create [Databricks Clusters](https://docs.databricks.com/clusters/index.html).
|
|
212
|
+
* ClusterPolicy to create a Cluster policy, which limits the ability to create clusters based on a set of rules.
|
|
213
|
+
* InstancePool to manage [instance pools](https://docs.databricks.com/clusters/instance-pools/index.html) to reduce cluster start and auto-scaling times by maintaining a set of idle, ready-to-use instances.
|
|
214
|
+
* Job to manage [Databricks Jobs](https://docs.databricks.com/jobs.html) to run non-interactive code in a databricks_cluster.
|
|
215
|
+
* Library to install a [library](https://docs.databricks.com/libraries/index.html) on databricks_cluster.
|
|
216
|
+
* Pipeline to deploy [Lakeflow Declarative Pipelines](https://docs.databricks.com/aws/en/dlt).
|
|
217
|
+
|
|
164
218
|
|
|
165
219
|
:param _builtins.str cluster_id: The id of the cluster.
|
|
166
220
|
:param Union['GetClusterClusterInfoArgs', 'GetClusterClusterInfoArgsDict'] cluster_info: block, consisting of following fields:
|
|
@@ -130,10 +130,10 @@ def get_current_config(account_id: Optional[_builtins.str] = None,
|
|
|
130
130
|
"azure": True,
|
|
131
131
|
}].apply(lambda entries: [{
|
|
132
132
|
"accessConnectorId": cloud_credential_id,
|
|
133
|
-
} for
|
|
133
|
+
} for entry2 in entries])),
|
|
134
134
|
databricks_gcp_service_account=single_or_none([{"key": k, "value": v} for k, v in {} if this.cloud_type == "gcp" else {
|
|
135
135
|
"gcp": True,
|
|
136
|
-
}].apply(lambda entries: [{} for
|
|
136
|
+
}].apply(lambda entries: [{} for entry3 in entries])),
|
|
137
137
|
name="storage_cred",
|
|
138
138
|
comment="Managed by TF")
|
|
139
139
|
```
|
|
@@ -209,10 +209,10 @@ def get_current_config_output(account_id: Optional[pulumi.Input[Optional[_builti
|
|
|
209
209
|
"azure": True,
|
|
210
210
|
}].apply(lambda entries: [{
|
|
211
211
|
"accessConnectorId": cloud_credential_id,
|
|
212
|
-
} for
|
|
212
|
+
} for entry2 in entries])),
|
|
213
213
|
databricks_gcp_service_account=single_or_none([{"key": k, "value": v} for k, v in {} if this.cloud_type == "gcp" else {
|
|
214
214
|
"gcp": True,
|
|
215
|
-
}].apply(lambda entries: [{} for
|
|
215
|
+
}].apply(lambda entries: [{} for entry3 in entries])),
|
|
216
216
|
name="storage_cred",
|
|
217
217
|
comment="Managed by TF")
|
|
218
218
|
```
|
|
@@ -113,12 +113,12 @@ def get_metastore(id: Optional[_builtins.str] = None,
|
|
|
113
113
|
import pulumi_aws as aws
|
|
114
114
|
import pulumi_databricks as databricks
|
|
115
115
|
|
|
116
|
-
metastore = aws.
|
|
117
|
-
bucket=f
|
|
116
|
+
metastore = aws.index.S3Bucket("metastore",
|
|
117
|
+
bucket=f{prefix}-metastore,
|
|
118
118
|
force_destroy=True)
|
|
119
119
|
this_metastore = databricks.Metastore("this",
|
|
120
120
|
name="primary",
|
|
121
|
-
storage_root=
|
|
121
|
+
storage_root=f"s3://{metastore['id']}/metastore",
|
|
122
122
|
owner=unity_admin_group,
|
|
123
123
|
force_destroy=True)
|
|
124
124
|
this = databricks.get_metastore_output(metastore_id=this_metastore.id)
|
|
@@ -175,12 +175,12 @@ def get_metastore_output(id: Optional[pulumi.Input[Optional[_builtins.str]]] = N
|
|
|
175
175
|
import pulumi_aws as aws
|
|
176
176
|
import pulumi_databricks as databricks
|
|
177
177
|
|
|
178
|
-
metastore = aws.
|
|
179
|
-
bucket=f
|
|
178
|
+
metastore = aws.index.S3Bucket("metastore",
|
|
179
|
+
bucket=f{prefix}-metastore,
|
|
180
180
|
force_destroy=True)
|
|
181
181
|
this_metastore = databricks.Metastore("this",
|
|
182
182
|
name="primary",
|
|
183
|
-
storage_root=
|
|
183
|
+
storage_root=f"s3://{metastore['id']}/metastore",
|
|
184
184
|
owner=unity_admin_group,
|
|
185
185
|
force_destroy=True)
|
|
186
186
|
this = databricks.get_metastore_output(metastore_id=this_metastore.id)
|
|
@@ -13,6 +13,8 @@ if sys.version_info >= (3, 11):
|
|
|
13
13
|
else:
|
|
14
14
|
from typing_extensions import NotRequired, TypedDict, TypeAlias
|
|
15
15
|
from . import _utilities
|
|
16
|
+
from . import outputs
|
|
17
|
+
from ._inputs import *
|
|
16
18
|
|
|
17
19
|
__all__ = [
|
|
18
20
|
'GetNotebookResult',
|
|
@@ -26,7 +28,7 @@ class GetNotebookResult:
|
|
|
26
28
|
"""
|
|
27
29
|
A collection of values returned by getNotebook.
|
|
28
30
|
"""
|
|
29
|
-
def __init__(__self__, content=None, format=None, id=None, language=None, object_id=None, object_type=None, path=None, workspace_path=None):
|
|
31
|
+
def __init__(__self__, content=None, format=None, id=None, language=None, object_id=None, object_type=None, path=None, provider_config=None, workspace_path=None):
|
|
30
32
|
if content and not isinstance(content, str):
|
|
31
33
|
raise TypeError("Expected argument 'content' to be a str")
|
|
32
34
|
pulumi.set(__self__, "content", content)
|
|
@@ -48,6 +50,9 @@ class GetNotebookResult:
|
|
|
48
50
|
if path and not isinstance(path, str):
|
|
49
51
|
raise TypeError("Expected argument 'path' to be a str")
|
|
50
52
|
pulumi.set(__self__, "path", path)
|
|
53
|
+
if provider_config and not isinstance(provider_config, dict):
|
|
54
|
+
raise TypeError("Expected argument 'provider_config' to be a dict")
|
|
55
|
+
pulumi.set(__self__, "provider_config", provider_config)
|
|
51
56
|
if workspace_path and not isinstance(workspace_path, str):
|
|
52
57
|
raise TypeError("Expected argument 'workspace_path' to be a str")
|
|
53
58
|
pulumi.set(__self__, "workspace_path", workspace_path)
|
|
@@ -102,6 +107,11 @@ class GetNotebookResult:
|
|
|
102
107
|
def path(self) -> _builtins.str:
|
|
103
108
|
return pulumi.get(self, "path")
|
|
104
109
|
|
|
110
|
+
@_builtins.property
|
|
111
|
+
@pulumi.getter(name="providerConfig")
|
|
112
|
+
def provider_config(self) -> Optional['outputs.GetNotebookProviderConfigResult']:
|
|
113
|
+
return pulumi.get(self, "provider_config")
|
|
114
|
+
|
|
105
115
|
@_builtins.property
|
|
106
116
|
@pulumi.getter(name="workspacePath")
|
|
107
117
|
def workspace_path(self) -> _builtins.str:
|
|
@@ -124,6 +134,7 @@ class AwaitableGetNotebookResult(GetNotebookResult):
|
|
|
124
134
|
object_id=self.object_id,
|
|
125
135
|
object_type=self.object_type,
|
|
126
136
|
path=self.path,
|
|
137
|
+
provider_config=self.provider_config,
|
|
127
138
|
workspace_path=self.workspace_path)
|
|
128
139
|
|
|
129
140
|
|
|
@@ -132,6 +143,7 @@ def get_notebook(format: Optional[_builtins.str] = None,
|
|
|
132
143
|
object_id: Optional[_builtins.int] = None,
|
|
133
144
|
object_type: Optional[_builtins.str] = None,
|
|
134
145
|
path: Optional[_builtins.str] = None,
|
|
146
|
+
provider_config: Optional[Union['GetNotebookProviderConfigArgs', 'GetNotebookProviderConfigArgsDict']] = None,
|
|
135
147
|
opts: Optional[pulumi.InvokeOptions] = None) -> AwaitableGetNotebookResult:
|
|
136
148
|
"""
|
|
137
149
|
This data source allows to export a notebook from Databricks Workspace.
|
|
@@ -154,6 +166,7 @@ def get_notebook(format: Optional[_builtins.str] = None,
|
|
|
154
166
|
:param _builtins.int object_id: notebook object ID
|
|
155
167
|
:param _builtins.str object_type: notebook object type
|
|
156
168
|
:param _builtins.str path: Notebook path on the workspace
|
|
169
|
+
:param Union['GetNotebookProviderConfigArgs', 'GetNotebookProviderConfigArgsDict'] provider_config: Configure the provider for management through account provider. This block consists of the following fields:
|
|
157
170
|
"""
|
|
158
171
|
__args__ = dict()
|
|
159
172
|
__args__['format'] = format
|
|
@@ -161,6 +174,7 @@ def get_notebook(format: Optional[_builtins.str] = None,
|
|
|
161
174
|
__args__['objectId'] = object_id
|
|
162
175
|
__args__['objectType'] = object_type
|
|
163
176
|
__args__['path'] = path
|
|
177
|
+
__args__['providerConfig'] = provider_config
|
|
164
178
|
opts = pulumi.InvokeOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
|
|
165
179
|
__ret__ = pulumi.runtime.invoke('databricks:index/getNotebook:getNotebook', __args__, opts=opts, typ=GetNotebookResult).value
|
|
166
180
|
|
|
@@ -172,12 +186,14 @@ def get_notebook(format: Optional[_builtins.str] = None,
|
|
|
172
186
|
object_id=pulumi.get(__ret__, 'object_id'),
|
|
173
187
|
object_type=pulumi.get(__ret__, 'object_type'),
|
|
174
188
|
path=pulumi.get(__ret__, 'path'),
|
|
189
|
+
provider_config=pulumi.get(__ret__, 'provider_config'),
|
|
175
190
|
workspace_path=pulumi.get(__ret__, 'workspace_path'))
|
|
176
191
|
def get_notebook_output(format: Optional[pulumi.Input[_builtins.str]] = None,
|
|
177
192
|
language: Optional[pulumi.Input[Optional[_builtins.str]]] = None,
|
|
178
193
|
object_id: Optional[pulumi.Input[Optional[_builtins.int]]] = None,
|
|
179
194
|
object_type: Optional[pulumi.Input[Optional[_builtins.str]]] = None,
|
|
180
195
|
path: Optional[pulumi.Input[_builtins.str]] = None,
|
|
196
|
+
provider_config: Optional[pulumi.Input[Optional[Union['GetNotebookProviderConfigArgs', 'GetNotebookProviderConfigArgsDict']]]] = None,
|
|
181
197
|
opts: Optional[Union[pulumi.InvokeOptions, pulumi.InvokeOutputOptions]] = None) -> pulumi.Output[GetNotebookResult]:
|
|
182
198
|
"""
|
|
183
199
|
This data source allows to export a notebook from Databricks Workspace.
|
|
@@ -200,6 +216,7 @@ def get_notebook_output(format: Optional[pulumi.Input[_builtins.str]] = None,
|
|
|
200
216
|
:param _builtins.int object_id: notebook object ID
|
|
201
217
|
:param _builtins.str object_type: notebook object type
|
|
202
218
|
:param _builtins.str path: Notebook path on the workspace
|
|
219
|
+
:param Union['GetNotebookProviderConfigArgs', 'GetNotebookProviderConfigArgsDict'] provider_config: Configure the provider for management through account provider. This block consists of the following fields:
|
|
203
220
|
"""
|
|
204
221
|
__args__ = dict()
|
|
205
222
|
__args__['format'] = format
|
|
@@ -207,6 +224,7 @@ def get_notebook_output(format: Optional[pulumi.Input[_builtins.str]] = None,
|
|
|
207
224
|
__args__['objectId'] = object_id
|
|
208
225
|
__args__['objectType'] = object_type
|
|
209
226
|
__args__['path'] = path
|
|
227
|
+
__args__['providerConfig'] = provider_config
|
|
210
228
|
opts = pulumi.InvokeOutputOptions.merge(_utilities.get_invoke_opts_defaults(), opts)
|
|
211
229
|
__ret__ = pulumi.runtime.invoke_output('databricks:index/getNotebook:getNotebook', __args__, opts=opts, typ=GetNotebookResult)
|
|
212
230
|
return __ret__.apply(lambda __response__: GetNotebookResult(
|
|
@@ -217,4 +235,5 @@ def get_notebook_output(format: Optional[pulumi.Input[_builtins.str]] = None,
|
|
|
217
235
|
object_id=pulumi.get(__response__, 'object_id'),
|
|
218
236
|
object_type=pulumi.get(__response__, 'object_type'),
|
|
219
237
|
path=pulumi.get(__response__, 'path'),
|
|
238
|
+
provider_config=pulumi.get(__response__, 'provider_config'),
|
|
220
239
|
workspace_path=pulumi.get(__response__, 'workspace_path')))
|