@pulumi/databricks 0.0.1-alpha.1648473134

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (261) hide show
  1. package/LICENSE +202 -0
  2. package/README.md +303 -0
  3. package/awsS3Mount.d.ts +50 -0
  4. package/awsS3Mount.js +63 -0
  5. package/awsS3Mount.js.map +1 -0
  6. package/azureAdlsGen1Mount.d.ts +65 -0
  7. package/azureAdlsGen1Mount.js +85 -0
  8. package/azureAdlsGen1Mount.js.map +1 -0
  9. package/azureAdlsGen2Mount.d.ts +68 -0
  10. package/azureAdlsGen2Mount.js +93 -0
  11. package/azureAdlsGen2Mount.js.map +1 -0
  12. package/azureBlobMount.d.ts +62 -0
  13. package/azureBlobMount.js +83 -0
  14. package/azureBlobMount.js.map +1 -0
  15. package/catalog.d.ts +124 -0
  16. package/catalog.js +88 -0
  17. package/catalog.js.map +1 -0
  18. package/cluster.d.ts +332 -0
  19. package/cluster.js +121 -0
  20. package/cluster.js.map +1 -0
  21. package/clusterPolicy.d.ts +112 -0
  22. package/clusterPolicy.js +97 -0
  23. package/clusterPolicy.js.map +1 -0
  24. package/config/index.d.ts +1 -0
  25. package/config/index.js +21 -0
  26. package/config/index.js.map +1 -0
  27. package/config/vars.d.ts +20 -0
  28. package/config/vars.js +127 -0
  29. package/config/vars.js.map +1 -0
  30. package/dbfsFile.d.ts +91 -0
  31. package/dbfsFile.js +71 -0
  32. package/dbfsFile.js.map +1 -0
  33. package/directory.d.ts +72 -0
  34. package/directory.js +65 -0
  35. package/directory.js.map +1 -0
  36. package/externalLocation.d.ts +114 -0
  37. package/externalLocation.js +80 -0
  38. package/externalLocation.js.map +1 -0
  39. package/getAwsAssumeRolePolicy.d.ts +47 -0
  40. package/getAwsAssumeRolePolicy.js +24 -0
  41. package/getAwsAssumeRolePolicy.js.map +1 -0
  42. package/getAwsBucketPolicy.d.ts +59 -0
  43. package/getAwsBucketPolicy.js +36 -0
  44. package/getAwsBucketPolicy.js.map +1 -0
  45. package/getAwsCrossAccountPolicy.d.ts +59 -0
  46. package/getAwsCrossAccountPolicy.js +47 -0
  47. package/getAwsCrossAccountPolicy.js.map +1 -0
  48. package/getCatalogs.d.ts +54 -0
  49. package/getCatalogs.js +43 -0
  50. package/getCatalogs.js.map +1 -0
  51. package/getClusters.d.ts +69 -0
  52. package/getClusters.js +57 -0
  53. package/getClusters.js.map +1 -0
  54. package/getCurrentUser.d.ts +39 -0
  55. package/getCurrentUser.js +38 -0
  56. package/getCurrentUser.js.map +1 -0
  57. package/getDbfsFile.d.ts +69 -0
  58. package/getDbfsFile.js +44 -0
  59. package/getDbfsFile.js.map +1 -0
  60. package/getDbfsFilePaths.d.ts +68 -0
  61. package/getDbfsFilePaths.js +46 -0
  62. package/getDbfsFilePaths.js.map +1 -0
  63. package/getGroup.d.ts +166 -0
  64. package/getGroup.js +46 -0
  65. package/getGroup.js.map +1 -0
  66. package/getJobs.d.ts +47 -0
  67. package/getJobs.js +30 -0
  68. package/getJobs.js.map +1 -0
  69. package/getNodeType.d.ts +173 -0
  70. package/getNodeType.js +71 -0
  71. package/getNodeType.js.map +1 -0
  72. package/getNotebook.d.ts +93 -0
  73. package/getNotebook.js +39 -0
  74. package/getNotebook.js.map +1 -0
  75. package/getNotebookPaths.d.ts +58 -0
  76. package/getNotebookPaths.js +36 -0
  77. package/getNotebookPaths.js.map +1 -0
  78. package/getSchemas.d.ts +65 -0
  79. package/getSchemas.js +45 -0
  80. package/getSchemas.js.map +1 -0
  81. package/getSparkVersion.d.ts +150 -0
  82. package/getSparkVersion.js +70 -0
  83. package/getSparkVersion.js.map +1 -0
  84. package/getTables.d.ts +75 -0
  85. package/getTables.js +47 -0
  86. package/getTables.js.map +1 -0
  87. package/getUser.d.ts +78 -0
  88. package/getUser.js +39 -0
  89. package/getUser.js.map +1 -0
  90. package/getZones.d.ts +29 -0
  91. package/getZones.js +26 -0
  92. package/getZones.js.map +1 -0
  93. package/gitCredential.d.ts +48 -0
  94. package/gitCredential.js +64 -0
  95. package/gitCredential.js.map +1 -0
  96. package/globalInitScript.d.ts +99 -0
  97. package/globalInitScript.js +68 -0
  98. package/globalInitScript.js.map +1 -0
  99. package/grants.d.ts +58 -0
  100. package/grants.js +64 -0
  101. package/grants.js.map +1 -0
  102. package/group.d.ts +131 -0
  103. package/group.js +83 -0
  104. package/group.js.map +1 -0
  105. package/groupInstanceProfile.d.ts +96 -0
  106. package/groupInstanceProfile.js +93 -0
  107. package/groupInstanceProfile.js.map +1 -0
  108. package/index.d.ts +81 -0
  109. package/index.js +362 -0
  110. package/index.js.map +1 -0
  111. package/instancePool.d.ts +166 -0
  112. package/instancePool.js +93 -0
  113. package/instancePool.js.map +1 -0
  114. package/instanceProfile.d.ts +180 -0
  115. package/instanceProfile.js +161 -0
  116. package/instanceProfile.js.map +1 -0
  117. package/ipAccessList.d.ts +128 -0
  118. package/ipAccessList.js +108 -0
  119. package/ipAccessList.js.map +1 -0
  120. package/job.d.ts +228 -0
  121. package/job.js +102 -0
  122. package/job.js.map +1 -0
  123. package/library.d.ts +183 -0
  124. package/library.js +189 -0
  125. package/library.js.map +1 -0
  126. package/metastore.d.ts +102 -0
  127. package/metastore.js +75 -0
  128. package/metastore.js.map +1 -0
  129. package/metastoreAssignment.d.ts +94 -0
  130. package/metastoreAssignment.js +81 -0
  131. package/metastoreAssignment.js.map +1 -0
  132. package/metastoreDataAccess.d.ts +85 -0
  133. package/metastoreDataAccess.js +73 -0
  134. package/metastoreDataAccess.js.map +1 -0
  135. package/mlflowExperiment.d.ts +122 -0
  136. package/mlflowExperiment.js +99 -0
  137. package/mlflowExperiment.js.map +1 -0
  138. package/mlflowModel.d.ts +131 -0
  139. package/mlflowModel.js +107 -0
  140. package/mlflowModel.js.map +1 -0
  141. package/mlflowWebhook.d.ts +129 -0
  142. package/mlflowWebhook.js +100 -0
  143. package/mlflowWebhook.js.map +1 -0
  144. package/mount.d.ts +89 -0
  145. package/mount.js +76 -0
  146. package/mount.js.map +1 -0
  147. package/mwsCredentials.d.ts +137 -0
  148. package/mwsCredentials.js +115 -0
  149. package/mwsCredentials.js.map +1 -0
  150. package/mwsCustomerManagedKeys.d.ts +257 -0
  151. package/mwsCustomerManagedKeys.js +226 -0
  152. package/mwsCustomerManagedKeys.js.map +1 -0
  153. package/mwsLogDelivery.d.ts +219 -0
  154. package/mwsLogDelivery.js +144 -0
  155. package/mwsLogDelivery.js.map +1 -0
  156. package/mwsNetworks.d.ts +129 -0
  157. package/mwsNetworks.js +89 -0
  158. package/mwsNetworks.js.map +1 -0
  159. package/mwsPrivateAccessSettings.d.ts +137 -0
  160. package/mwsPrivateAccessSettings.js +74 -0
  161. package/mwsPrivateAccessSettings.js.map +1 -0
  162. package/mwsStorageConfigurations.d.ts +122 -0
  163. package/mwsStorageConfigurations.js +106 -0
  164. package/mwsStorageConfigurations.js.map +1 -0
  165. package/mwsVpcEndpoint.d.ts +122 -0
  166. package/mwsVpcEndpoint.js +79 -0
  167. package/mwsVpcEndpoint.js.map +1 -0
  168. package/mwsWorkspaces.d.ts +222 -0
  169. package/mwsWorkspaces.js +106 -0
  170. package/mwsWorkspaces.js.map +1 -0
  171. package/notebook.d.ts +128 -0
  172. package/notebook.js +77 -0
  173. package/notebook.js.map +1 -0
  174. package/oboToken.d.ts +98 -0
  175. package/oboToken.js +82 -0
  176. package/oboToken.js.map +1 -0
  177. package/package.json +28 -0
  178. package/package.json.bak +28 -0
  179. package/package.json.dev +28 -0
  180. package/permissions.d.ts +211 -0
  181. package/permissions.js +97 -0
  182. package/permissions.js.map +1 -0
  183. package/pipeline.d.ts +200 -0
  184. package/pipeline.js +134 -0
  185. package/pipeline.js.map +1 -0
  186. package/provider.d.ts +61 -0
  187. package/provider.js +64 -0
  188. package/provider.js.map +1 -0
  189. package/repo.d.ts +117 -0
  190. package/repo.js +71 -0
  191. package/repo.js.map +1 -0
  192. package/schema.d.ts +149 -0
  193. package/schema.js +106 -0
  194. package/schema.js.map +1 -0
  195. package/scripts/install-pulumi-plugin.js +21 -0
  196. package/secret.d.ts +115 -0
  197. package/secret.js +99 -0
  198. package/secret.js.map +1 -0
  199. package/secretAcl.d.ts +115 -0
  200. package/secretAcl.js +105 -0
  201. package/secretAcl.js.map +1 -0
  202. package/secretScope.d.ts +85 -0
  203. package/secretScope.js +64 -0
  204. package/secretScope.js.map +1 -0
  205. package/servicePrincipal.d.ts +142 -0
  206. package/servicePrincipal.js +83 -0
  207. package/servicePrincipal.js.map +1 -0
  208. package/sqlDashboard.d.ts +90 -0
  209. package/sqlDashboard.js +99 -0
  210. package/sqlDashboard.js.map +1 -0
  211. package/sqlEndpoint.d.ts +249 -0
  212. package/sqlEndpoint.js +128 -0
  213. package/sqlEndpoint.js.map +1 -0
  214. package/sqlGlobalConfig.d.ts +157 -0
  215. package/sqlGlobalConfig.js +115 -0
  216. package/sqlGlobalConfig.js.map +1 -0
  217. package/sqlPermissions.d.ts +191 -0
  218. package/sqlPermissions.js +139 -0
  219. package/sqlPermissions.js.map +1 -0
  220. package/sqlQuery.d.ts +131 -0
  221. package/sqlQuery.js +139 -0
  222. package/sqlQuery.js.map +1 -0
  223. package/sqlVisualization.d.ts +105 -0
  224. package/sqlVisualization.js +119 -0
  225. package/sqlVisualization.js.map +1 -0
  226. package/sqlWidget.d.ts +109 -0
  227. package/sqlWidget.js +114 -0
  228. package/sqlWidget.js.map +1 -0
  229. package/storageCredential.d.ts +122 -0
  230. package/storageCredential.js +118 -0
  231. package/storageCredential.js.map +1 -0
  232. package/table.d.ts +249 -0
  233. package/table.js +157 -0
  234. package/table.js.map +1 -0
  235. package/token.d.ts +102 -0
  236. package/token.js +84 -0
  237. package/token.js.map +1 -0
  238. package/types/index.d.ts +3 -0
  239. package/types/index.js +11 -0
  240. package/types/index.js.map +1 -0
  241. package/types/input.d.ts +1209 -0
  242. package/types/input.js +5 -0
  243. package/types/input.js.map +1 -0
  244. package/types/output.d.ts +1222 -0
  245. package/types/output.js +5 -0
  246. package/types/output.js.map +1 -0
  247. package/user.d.ts +149 -0
  248. package/user.js +91 -0
  249. package/user.js.map +1 -0
  250. package/userInstanceProfile.d.ts +93 -0
  251. package/userInstanceProfile.js +90 -0
  252. package/userInstanceProfile.js.map +1 -0
  253. package/userRole.d.ts +106 -0
  254. package/userRole.js +103 -0
  255. package/userRole.js.map +1 -0
  256. package/utilities.d.ts +4 -0
  257. package/utilities.js +57 -0
  258. package/utilities.js.map +1 -0
  259. package/workspaceConf.d.ts +76 -0
  260. package/workspaceConf.js +71 -0
  261. package/workspaceConf.js.map +1 -0
package/library.d.ts ADDED
@@ -0,0 +1,183 @@
1
+ import * as pulumi from "@pulumi/pulumi";
2
+ import { input as inputs, output as outputs } from "./types";
3
+ /**
4
+ * Installs a [library](https://docs.databricks.com/libraries/index.html) on databricks_cluster. Each different type of library has a slightly different syntax. It's possible to set only one type of library within one resource. Otherwise, the plan will fail with an error.
5
+ *
6
+ * > **Note** `databricks.Library` resource would always start the associated cluster if it's not running, so make sure to have auto-termination configured. It's not possible to atomically change the version of the same library without cluster restart. Libraries are fully removed from the cluster only after restart.
7
+ *
8
+ * ## Java/Scala JAR
9
+ *
10
+ * ```typescript
11
+ * import * as pulumi from "@pulumi/pulumi";
12
+ * import * as databricks from "@pulumi/databricks";
13
+ *
14
+ * const appDbfsFile = new databricks.DbfsFile("appDbfsFile", {
15
+ * source: `${path.module}/app-0.0.1.jar`,
16
+ * path: "/FileStore/app-0.0.1.jar",
17
+ * });
18
+ * const appLibrary = new databricks.Library("appLibrary", {
19
+ * clusterId: databricks_cluster["this"].id,
20
+ * jar: appDbfsFile.dbfsPath,
21
+ * });
22
+ * ```
23
+ *
24
+ * ## Java/Scala Maven
25
+ *
26
+ * Installing artifacts from Maven repository. You can also optionally specify a `repo` parameter for custom Maven-style repository, that should be accessible without any authentication. Maven libraries are resolved in Databricks Control Plane, so repo should be accessible from it. It can even be properly configured [maven s3 wagon](https://github.com/seahen/maven-s3-wagon), [AWS CodeArtifact](https://aws.amazon.com/codeartifact/) or [Azure Artifacts](https://azure.microsoft.com/en-us/services/devops/artifacts/).
27
+ *
28
+ * ```typescript
29
+ * import * as pulumi from "@pulumi/pulumi";
30
+ * import * as databricks from "@pulumi/databricks";
31
+ *
32
+ * const deequ = new databricks.Library("deequ", {
33
+ * clusterId: databricks_cluster["this"].id,
34
+ * maven: {
35
+ * coordinates: "com.amazon.deequ:deequ:1.0.4",
36
+ * exclusions: ["org.apache.avro:avro"],
37
+ * },
38
+ * });
39
+ * ```
40
+ *
41
+ * ## Python Wheel
42
+ *
43
+ * ```typescript
44
+ * import * as pulumi from "@pulumi/pulumi";
45
+ * import * as databricks from "@pulumi/databricks";
46
+ *
47
+ * const appDbfsFile = new databricks.DbfsFile("appDbfsFile", {
48
+ * source: `${path.module}/baz.whl`,
49
+ * path: "/FileStore/baz.whl",
50
+ * });
51
+ * const appLibrary = new databricks.Library("appLibrary", {
52
+ * clusterId: databricks_cluster["this"].id,
53
+ * whl: appDbfsFile.dbfsPath,
54
+ * });
55
+ * ```
56
+ *
57
+ * ## Python PyPI
58
+ *
59
+ * Installing Python PyPI artifacts. You can optionally also specify the `repo` parameter for custom PyPI mirror, which should be accessible without any authentication for the network that cluster runs in.
60
+ *
61
+ * > **Note** `repo` host should be accessible from Internet by Databricks control plane. If connectivity to custom PyPI repositories is required, please modify cluster-node `/etc/pip.conf` through databricks_global_init_script.
62
+ *
63
+ * ```typescript
64
+ * import * as pulumi from "@pulumi/pulumi";
65
+ * import * as databricks from "@pulumi/databricks";
66
+ *
67
+ * const fbprophet = new databricks.Library("fbprophet", {
68
+ * clusterId: databricks_cluster["this"].id,
69
+ * pypi: {
70
+ * "package": "fbprophet==0.6",
71
+ * },
72
+ * });
73
+ * ```
74
+ *
75
+ * ## Python EGG
76
+ *
77
+ * ```typescript
78
+ * import * as pulumi from "@pulumi/pulumi";
79
+ * import * as databricks from "@pulumi/databricks";
80
+ *
81
+ * const appDbfsFile = new databricks.DbfsFile("appDbfsFile", {
82
+ * source: `${path.module}/foo.egg`,
83
+ * path: "/FileStore/foo.egg",
84
+ * });
85
+ * const appLibrary = new databricks.Library("appLibrary", {
86
+ * clusterId: databricks_cluster["this"].id,
87
+ * egg: appDbfsFile.dbfsPath,
88
+ * });
89
+ * ```
90
+ *
91
+ * ## R CRan
92
+ *
93
+ * Installing artifacts from CRan. You can also optionally specify a `repo` parameter for a custom cran mirror.
94
+ *
95
+ * ```typescript
96
+ * import * as pulumi from "@pulumi/pulumi";
97
+ * import * as databricks from "@pulumi/databricks";
98
+ *
99
+ * const rkeops = new databricks.Library("rkeops", {
100
+ * clusterId: databricks_cluster["this"].id,
101
+ * cran: {
102
+ * "package": "rkeops",
103
+ * },
104
+ * });
105
+ * ```
106
+ *
107
+ * ## Related Resources
108
+ *
109
+ * The following resources are often used in the same context:
110
+ *
111
+ * * End to end workspace management guide.
112
+ * * databricks.getClusters data to retrieve a list of databricks.Cluster ids.
113
+ * * databricks.Cluster to create [Databricks Clusters](https://docs.databricks.com/clusters/index.html).
114
+ * * databricks.ClusterPolicy to create a databricks.Cluster policy, which limits the ability to create clusters based on a set of rules.
115
+ * * databricks.DbfsFile data to get file content from [Databricks File System (DBFS)](https://docs.databricks.com/data/databricks-file-system.html).
116
+ * * databricks.getDbfsFilePaths data to get list of file names from get file content from [Databricks File System (DBFS)](https://docs.databricks.com/data/databricks-file-system.html).
117
+ * * databricks.DbfsFile to manage relatively small files on [Databricks File System (DBFS)](https://docs.databricks.com/data/databricks-file-system.html).
118
+ * * databricks.GlobalInitScript to manage [global init scripts](https://docs.databricks.com/clusters/init-scripts.html#global-init-scripts), which are run on all databricks.Cluster and databricks_job.
119
+ * * databricks.Job to manage [Databricks Jobs](https://docs.databricks.com/jobs.html) to run non-interactive code in a databricks_cluster.
120
+ * * databricks.Mount to [mount your cloud storage](https://docs.databricks.com/data/databricks-file-system.html#mount-object-storage-to-dbfs) on `dbfs:/mnt/name`.
121
+ * * databricks.Pipeline to deploy [Delta Live Tables](https://docs.databricks.com/data-engineering/delta-live-tables/index.html).
122
+ * * databricks.Repo to manage [Databricks Repos](https://docs.databricks.com/repos.html).
123
+ *
124
+ * ## Import
125
+ *
126
+ * -> **Note** Importing this resource is not currently supported.
127
+ */
128
+ export declare class Library extends pulumi.CustomResource {
129
+ /**
130
+ * Get an existing Library resource's state with the given name, ID, and optional extra
131
+ * properties used to qualify the lookup.
132
+ *
133
+ * @param name The _unique_ name of the resulting resource.
134
+ * @param id The _unique_ provider ID of the resource to lookup.
135
+ * @param state Any extra arguments used during the lookup.
136
+ * @param opts Optional settings to control the behavior of the CustomResource.
137
+ */
138
+ static get(name: string, id: pulumi.Input<pulumi.ID>, state?: LibraryState, opts?: pulumi.CustomResourceOptions): Library;
139
+ /**
140
+ * Returns true if the given object is an instance of Library. This is designed to work even
141
+ * when multiple copies of the Pulumi SDK have been loaded into the same process.
142
+ */
143
+ static isInstance(obj: any): obj is Library;
144
+ readonly clusterId: pulumi.Output<string>;
145
+ readonly cran: pulumi.Output<outputs.LibraryCran | undefined>;
146
+ readonly egg: pulumi.Output<string | undefined>;
147
+ readonly jar: pulumi.Output<string | undefined>;
148
+ readonly maven: pulumi.Output<outputs.LibraryMaven | undefined>;
149
+ readonly pypi: pulumi.Output<outputs.LibraryPypi | undefined>;
150
+ readonly whl: pulumi.Output<string | undefined>;
151
+ /**
152
+ * Create a Library resource with the given unique name, arguments, and options.
153
+ *
154
+ * @param name The _unique_ name of the resource.
155
+ * @param args The arguments to use to populate this resource's properties.
156
+ * @param opts A bag of options that control this resource's behavior.
157
+ */
158
+ constructor(name: string, args: LibraryArgs, opts?: pulumi.CustomResourceOptions);
159
+ }
160
+ /**
161
+ * Input properties used for looking up and filtering Library resources.
162
+ */
163
+ export interface LibraryState {
164
+ clusterId?: pulumi.Input<string>;
165
+ cran?: pulumi.Input<inputs.LibraryCran>;
166
+ egg?: pulumi.Input<string>;
167
+ jar?: pulumi.Input<string>;
168
+ maven?: pulumi.Input<inputs.LibraryMaven>;
169
+ pypi?: pulumi.Input<inputs.LibraryPypi>;
170
+ whl?: pulumi.Input<string>;
171
+ }
172
+ /**
173
+ * The set of arguments for constructing a Library resource.
174
+ */
175
+ export interface LibraryArgs {
176
+ clusterId: pulumi.Input<string>;
177
+ cran?: pulumi.Input<inputs.LibraryCran>;
178
+ egg?: pulumi.Input<string>;
179
+ jar?: pulumi.Input<string>;
180
+ maven?: pulumi.Input<inputs.LibraryMaven>;
181
+ pypi?: pulumi.Input<inputs.LibraryPypi>;
182
+ whl?: pulumi.Input<string>;
183
+ }
package/library.js ADDED
@@ -0,0 +1,189 @@
1
+ "use strict";
2
+ // *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
3
+ // *** Do not edit by hand unless you're certain you know what you are doing! ***
4
+ Object.defineProperty(exports, "__esModule", { value: true });
5
+ exports.Library = void 0;
6
+ const pulumi = require("@pulumi/pulumi");
7
+ const utilities = require("./utilities");
8
+ /**
9
+ * Installs a [library](https://docs.databricks.com/libraries/index.html) on databricks_cluster. Each different type of library has a slightly different syntax. It's possible to set only one type of library within one resource. Otherwise, the plan will fail with an error.
10
+ *
11
+ * > **Note** `databricks.Library` resource would always start the associated cluster if it's not running, so make sure to have auto-termination configured. It's not possible to atomically change the version of the same library without cluster restart. Libraries are fully removed from the cluster only after restart.
12
+ *
13
+ * ## Java/Scala JAR
14
+ *
15
+ * ```typescript
16
+ * import * as pulumi from "@pulumi/pulumi";
17
+ * import * as databricks from "@pulumi/databricks";
18
+ *
19
+ * const appDbfsFile = new databricks.DbfsFile("appDbfsFile", {
20
+ * source: `${path.module}/app-0.0.1.jar`,
21
+ * path: "/FileStore/app-0.0.1.jar",
22
+ * });
23
+ * const appLibrary = new databricks.Library("appLibrary", {
24
+ * clusterId: databricks_cluster["this"].id,
25
+ * jar: appDbfsFile.dbfsPath,
26
+ * });
27
+ * ```
28
+ *
29
+ * ## Java/Scala Maven
30
+ *
31
+ * Installing artifacts from Maven repository. You can also optionally specify a `repo` parameter for custom Maven-style repository, that should be accessible without any authentication. Maven libraries are resolved in Databricks Control Plane, so repo should be accessible from it. It can even be properly configured [maven s3 wagon](https://github.com/seahen/maven-s3-wagon), [AWS CodeArtifact](https://aws.amazon.com/codeartifact/) or [Azure Artifacts](https://azure.microsoft.com/en-us/services/devops/artifacts/).
32
+ *
33
+ * ```typescript
34
+ * import * as pulumi from "@pulumi/pulumi";
35
+ * import * as databricks from "@pulumi/databricks";
36
+ *
37
+ * const deequ = new databricks.Library("deequ", {
38
+ * clusterId: databricks_cluster["this"].id,
39
+ * maven: {
40
+ * coordinates: "com.amazon.deequ:deequ:1.0.4",
41
+ * exclusions: ["org.apache.avro:avro"],
42
+ * },
43
+ * });
44
+ * ```
45
+ *
46
+ * ## Python Wheel
47
+ *
48
+ * ```typescript
49
+ * import * as pulumi from "@pulumi/pulumi";
50
+ * import * as databricks from "@pulumi/databricks";
51
+ *
52
+ * const appDbfsFile = new databricks.DbfsFile("appDbfsFile", {
53
+ * source: `${path.module}/baz.whl`,
54
+ * path: "/FileStore/baz.whl",
55
+ * });
56
+ * const appLibrary = new databricks.Library("appLibrary", {
57
+ * clusterId: databricks_cluster["this"].id,
58
+ * whl: appDbfsFile.dbfsPath,
59
+ * });
60
+ * ```
61
+ *
62
+ * ## Python PyPI
63
+ *
64
+ * Installing Python PyPI artifacts. You can optionally also specify the `repo` parameter for custom PyPI mirror, which should be accessible without any authentication for the network that cluster runs in.
65
+ *
66
+ * > **Note** `repo` host should be accessible from Internet by Databricks control plane. If connectivity to custom PyPI repositories is required, please modify cluster-node `/etc/pip.conf` through databricks_global_init_script.
67
+ *
68
+ * ```typescript
69
+ * import * as pulumi from "@pulumi/pulumi";
70
+ * import * as databricks from "@pulumi/databricks";
71
+ *
72
+ * const fbprophet = new databricks.Library("fbprophet", {
73
+ * clusterId: databricks_cluster["this"].id,
74
+ * pypi: {
75
+ * "package": "fbprophet==0.6",
76
+ * },
77
+ * });
78
+ * ```
79
+ *
80
+ * ## Python EGG
81
+ *
82
+ * ```typescript
83
+ * import * as pulumi from "@pulumi/pulumi";
84
+ * import * as databricks from "@pulumi/databricks";
85
+ *
86
+ * const appDbfsFile = new databricks.DbfsFile("appDbfsFile", {
87
+ * source: `${path.module}/foo.egg`,
88
+ * path: "/FileStore/foo.egg",
89
+ * });
90
+ * const appLibrary = new databricks.Library("appLibrary", {
91
+ * clusterId: databricks_cluster["this"].id,
92
+ * egg: appDbfsFile.dbfsPath,
93
+ * });
94
+ * ```
95
+ *
96
+ * ## R CRan
97
+ *
98
+ * Installing artifacts from CRan. You can also optionally specify a `repo` parameter for a custom cran mirror.
99
+ *
100
+ * ```typescript
101
+ * import * as pulumi from "@pulumi/pulumi";
102
+ * import * as databricks from "@pulumi/databricks";
103
+ *
104
+ * const rkeops = new databricks.Library("rkeops", {
105
+ * clusterId: databricks_cluster["this"].id,
106
+ * cran: {
107
+ * "package": "rkeops",
108
+ * },
109
+ * });
110
+ * ```
111
+ *
112
+ * ## Related Resources
113
+ *
114
+ * The following resources are often used in the same context:
115
+ *
116
+ * * End to end workspace management guide.
117
+ * * databricks.getClusters data to retrieve a list of databricks.Cluster ids.
118
+ * * databricks.Cluster to create [Databricks Clusters](https://docs.databricks.com/clusters/index.html).
119
+ * * databricks.ClusterPolicy to create a databricks.Cluster policy, which limits the ability to create clusters based on a set of rules.
120
+ * * databricks.DbfsFile data to get file content from [Databricks File System (DBFS)](https://docs.databricks.com/data/databricks-file-system.html).
121
+ * * databricks.getDbfsFilePaths data to get list of file names from get file content from [Databricks File System (DBFS)](https://docs.databricks.com/data/databricks-file-system.html).
122
+ * * databricks.DbfsFile to manage relatively small files on [Databricks File System (DBFS)](https://docs.databricks.com/data/databricks-file-system.html).
123
+ * * databricks.GlobalInitScript to manage [global init scripts](https://docs.databricks.com/clusters/init-scripts.html#global-init-scripts), which are run on all databricks.Cluster and databricks_job.
124
+ * * databricks.Job to manage [Databricks Jobs](https://docs.databricks.com/jobs.html) to run non-interactive code in a databricks_cluster.
125
+ * * databricks.Mount to [mount your cloud storage](https://docs.databricks.com/data/databricks-file-system.html#mount-object-storage-to-dbfs) on `dbfs:/mnt/name`.
126
+ * * databricks.Pipeline to deploy [Delta Live Tables](https://docs.databricks.com/data-engineering/delta-live-tables/index.html).
127
+ * * databricks.Repo to manage [Databricks Repos](https://docs.databricks.com/repos.html).
128
+ *
129
+ * ## Import
130
+ *
131
+ * -> **Note** Importing this resource is not currently supported.
132
+ */
133
+ class Library extends pulumi.CustomResource {
134
+ constructor(name, argsOrState, opts) {
135
+ let resourceInputs = {};
136
+ opts = opts || {};
137
+ if (opts.id) {
138
+ const state = argsOrState;
139
+ resourceInputs["clusterId"] = state ? state.clusterId : undefined;
140
+ resourceInputs["cran"] = state ? state.cran : undefined;
141
+ resourceInputs["egg"] = state ? state.egg : undefined;
142
+ resourceInputs["jar"] = state ? state.jar : undefined;
143
+ resourceInputs["maven"] = state ? state.maven : undefined;
144
+ resourceInputs["pypi"] = state ? state.pypi : undefined;
145
+ resourceInputs["whl"] = state ? state.whl : undefined;
146
+ }
147
+ else {
148
+ const args = argsOrState;
149
+ if ((!args || args.clusterId === undefined) && !opts.urn) {
150
+ throw new Error("Missing required property 'clusterId'");
151
+ }
152
+ resourceInputs["clusterId"] = args ? args.clusterId : undefined;
153
+ resourceInputs["cran"] = args ? args.cran : undefined;
154
+ resourceInputs["egg"] = args ? args.egg : undefined;
155
+ resourceInputs["jar"] = args ? args.jar : undefined;
156
+ resourceInputs["maven"] = args ? args.maven : undefined;
157
+ resourceInputs["pypi"] = args ? args.pypi : undefined;
158
+ resourceInputs["whl"] = args ? args.whl : undefined;
159
+ }
160
+ opts = pulumi.mergeOptions(utilities.resourceOptsDefaults(), opts);
161
+ super(Library.__pulumiType, name, resourceInputs, opts);
162
+ }
163
+ /**
164
+ * Get an existing Library resource's state with the given name, ID, and optional extra
165
+ * properties used to qualify the lookup.
166
+ *
167
+ * @param name The _unique_ name of the resulting resource.
168
+ * @param id The _unique_ provider ID of the resource to lookup.
169
+ * @param state Any extra arguments used during the lookup.
170
+ * @param opts Optional settings to control the behavior of the CustomResource.
171
+ */
172
+ static get(name, id, state, opts) {
173
+ return new Library(name, state, Object.assign(Object.assign({}, opts), { id: id }));
174
+ }
175
+ /**
176
+ * Returns true if the given object is an instance of Library. This is designed to work even
177
+ * when multiple copies of the Pulumi SDK have been loaded into the same process.
178
+ */
179
+ static isInstance(obj) {
180
+ if (obj === undefined || obj === null) {
181
+ return false;
182
+ }
183
+ return obj['__pulumiType'] === Library.__pulumiType;
184
+ }
185
+ }
186
+ exports.Library = Library;
187
+ /** @internal */
188
+ Library.__pulumiType = 'databricks:index/library:Library';
189
+ //# sourceMappingURL=library.js.map
package/library.js.map ADDED
@@ -0,0 +1 @@
1
+ {"version":3,"file":"library.js","sourceRoot":"","sources":["../library.ts"],"names":[],"mappings":";AAAA,wFAAwF;AACxF,iFAAiF;;;AAEjF,yCAAyC;AAEzC,yCAAyC;AAEzC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4HG;AACH,MAAa,OAAQ,SAAQ,MAAM,CAAC,cAAc;IA4C9C,YAAY,IAAY,EAAE,WAAwC,EAAE,IAAmC;QACnG,IAAI,cAAc,GAAkB,EAAE,CAAC;QACvC,IAAI,GAAG,IAAI,IAAI,EAAE,CAAC;QAClB,IAAI,IAAI,CAAC,EAAE,EAAE;YACT,MAAM,KAAK,GAAG,WAAuC,CAAC;YACtD,cAAc,CAAC,WAAW,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;YAClE,cAAc,CAAC,MAAM,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC;YACxD,cAAc,CAAC,KAAK,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,SAAS,CAAC;YACtD,cAAc,CAAC,KAAK,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,SAAS,CAAC;YACtD,cAAc,CAAC,OAAO,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC;YAC1D,cAAc,CAAC,MAAM,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC;YACxD,cAAc,CAAC,KAAK,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,SAAS,CAAC;SACzD;aAAM;YACH,MAAM,IAAI,GAAG,WAAsC,CAAC;YACpD,IAAI,CAAC,CAAC,IAAI,IAAI,IAAI,CAAC,SAAS,KAAK,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;gBACtD,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;aAC5D;YACD,cAAc,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;YAChE,cAAc,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC;YACtD,cAAc,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,SAAS,CAAC;YACpD,cAAc,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,SAAS,CAAC;YACpD,cAAc,CAAC,OAAO,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC;YACxD,cAAc,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC;YACtD,cAAc,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,SAAS,CAAC;SACvD;QACD,IAAI,GAAG,MAAM,CAAC,YAAY,CAAC,SAAS,CAAC,oBAAoB,EAAE,EAAE,IAAI,CAAC,CAAC;QACnE,KAAK,CAAC,OAAO,CAAC,YAAY,EAAE,IAAI,EAAE,cAAc,EAAE,IAAI,CAAC,CAAC;IAC5D,CAAC;IAtED;;;;;;;;OAQG;IACI,MAAM,CAAC,GAAG,CAAC,IAAY,EAAE,EAA2B,EAAE,KAAoB,EAAE,IAAmC;QAClH,OAAO,IAAI,OAAO,CAAC,IAAI,EAAO,KAAK,kCAAO,IAAI,KAAE,EAAE,EAAE,EAAE,IAAG,CAAC;IAC9D,CAAC;IAKD;;;OAGG;IACI,MAAM,CAAC,UAAU,CAAC,GAAQ;QAC7B,IAAI,GAAG,KAAK,SAAS,IAAI,GAAG,KAAK,IAAI,EAAE;YACnC,OAAO,KAAK,CAAC;SAChB;QACD,OAAO,GAAG,CAAC,cAAc,CAAC,KAAK,OAAO,CAAC,YAAY,CAAC;IACxD,CAAC;;AA1BL,0BAwEC;AA1DG,gBAAgB;AACO,oBAAY,GAAG,kCAAkC,CAAC"}
package/metastore.d.ts ADDED
@@ -0,0 +1,102 @@
1
+ import * as pulumi from "@pulumi/pulumi";
2
+ /**
3
+ * > **Private Preview** This feature is in [Private Preview](https://docs.databricks.com/release-notes/release-types.html). Contact your Databricks representative to request access.
4
+ *
5
+ * A metastore is the top-level container of objects in Unity Catalog. It stores data assets (tables and views) and the permissions that govern access to them. Databricks account admins can create metastores and assign them to Databricks workspaces in order to control which workloads use each metastore.
6
+ *
7
+ * Unity Catalog offers a new metastore with built in security and auditing. This is distinct to the metastore used in previous versions of Databricks (based on the Hive Metastore).
8
+ *
9
+ * ## Import
10
+ *
11
+ * This resource can be imported by IDbash
12
+ *
13
+ * ```sh
14
+ * $ pulumi import databricks:index/metastore:Metastore this <id>
15
+ * ```
16
+ */
17
+ export declare class Metastore extends pulumi.CustomResource {
18
+ /**
19
+ * Get an existing Metastore resource's state with the given name, ID, and optional extra
20
+ * properties used to qualify the lookup.
21
+ *
22
+ * @param name The _unique_ name of the resulting resource.
23
+ * @param id The _unique_ provider ID of the resource to lookup.
24
+ * @param state Any extra arguments used during the lookup.
25
+ * @param opts Optional settings to control the behavior of the CustomResource.
26
+ */
27
+ static get(name: string, id: pulumi.Input<pulumi.ID>, state?: MetastoreState, opts?: pulumi.CustomResourceOptions): Metastore;
28
+ /**
29
+ * Returns true if the given object is an instance of Metastore. This is designed to work even
30
+ * when multiple copies of the Pulumi SDK have been loaded into the same process.
31
+ */
32
+ static isInstance(obj: any): obj is Metastore;
33
+ readonly defaultDataAccessConfigId: pulumi.Output<string | undefined>;
34
+ /**
35
+ * Destroy metastore regardless of its contents.
36
+ */
37
+ readonly forceDestroy: pulumi.Output<boolean | undefined>;
38
+ /**
39
+ * Name of metastore.
40
+ */
41
+ readonly name: pulumi.Output<string>;
42
+ /**
43
+ * Username/groupname of Metastore owner.
44
+ */
45
+ readonly owner: pulumi.Output<string>;
46
+ /**
47
+ * Path on cloud storage account, where managed databricks.Table are stored. Change forces creation of a new resource.
48
+ */
49
+ readonly storageRoot: pulumi.Output<string>;
50
+ /**
51
+ * Create a Metastore resource with the given unique name, arguments, and options.
52
+ *
53
+ * @param name The _unique_ name of the resource.
54
+ * @param args The arguments to use to populate this resource's properties.
55
+ * @param opts A bag of options that control this resource's behavior.
56
+ */
57
+ constructor(name: string, args: MetastoreArgs, opts?: pulumi.CustomResourceOptions);
58
+ }
59
+ /**
60
+ * Input properties used for looking up and filtering Metastore resources.
61
+ */
62
+ export interface MetastoreState {
63
+ defaultDataAccessConfigId?: pulumi.Input<string>;
64
+ /**
65
+ * Destroy metastore regardless of its contents.
66
+ */
67
+ forceDestroy?: pulumi.Input<boolean>;
68
+ /**
69
+ * Name of metastore.
70
+ */
71
+ name?: pulumi.Input<string>;
72
+ /**
73
+ * Username/groupname of Metastore owner.
74
+ */
75
+ owner?: pulumi.Input<string>;
76
+ /**
77
+ * Path on cloud storage account, where managed databricks.Table are stored. Change forces creation of a new resource.
78
+ */
79
+ storageRoot?: pulumi.Input<string>;
80
+ }
81
+ /**
82
+ * The set of arguments for constructing a Metastore resource.
83
+ */
84
+ export interface MetastoreArgs {
85
+ defaultDataAccessConfigId?: pulumi.Input<string>;
86
+ /**
87
+ * Destroy metastore regardless of its contents.
88
+ */
89
+ forceDestroy?: pulumi.Input<boolean>;
90
+ /**
91
+ * Name of metastore.
92
+ */
93
+ name?: pulumi.Input<string>;
94
+ /**
95
+ * Username/groupname of Metastore owner.
96
+ */
97
+ owner?: pulumi.Input<string>;
98
+ /**
99
+ * Path on cloud storage account, where managed databricks.Table are stored. Change forces creation of a new resource.
100
+ */
101
+ storageRoot: pulumi.Input<string>;
102
+ }
package/metastore.js ADDED
@@ -0,0 +1,75 @@
1
+ "use strict";
2
+ // *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
3
+ // *** Do not edit by hand unless you're certain you know what you are doing! ***
4
+ Object.defineProperty(exports, "__esModule", { value: true });
5
+ exports.Metastore = void 0;
6
+ const pulumi = require("@pulumi/pulumi");
7
+ const utilities = require("./utilities");
8
+ /**
9
+ * > **Private Preview** This feature is in [Private Preview](https://docs.databricks.com/release-notes/release-types.html). Contact your Databricks representative to request access.
10
+ *
11
+ * A metastore is the top-level container of objects in Unity Catalog. It stores data assets (tables and views) and the permissions that govern access to them. Databricks account admins can create metastores and assign them to Databricks workspaces in order to control which workloads use each metastore.
12
+ *
13
+ * Unity Catalog offers a new metastore with built in security and auditing. This is distinct to the metastore used in previous versions of Databricks (based on the Hive Metastore).
14
+ *
15
+ * ## Import
16
+ *
17
+ * This resource can be imported by IDbash
18
+ *
19
+ * ```sh
20
+ * $ pulumi import databricks:index/metastore:Metastore this <id>
21
+ * ```
22
+ */
23
+ class Metastore extends pulumi.CustomResource {
24
+ constructor(name, argsOrState, opts) {
25
+ let resourceInputs = {};
26
+ opts = opts || {};
27
+ if (opts.id) {
28
+ const state = argsOrState;
29
+ resourceInputs["defaultDataAccessConfigId"] = state ? state.defaultDataAccessConfigId : undefined;
30
+ resourceInputs["forceDestroy"] = state ? state.forceDestroy : undefined;
31
+ resourceInputs["name"] = state ? state.name : undefined;
32
+ resourceInputs["owner"] = state ? state.owner : undefined;
33
+ resourceInputs["storageRoot"] = state ? state.storageRoot : undefined;
34
+ }
35
+ else {
36
+ const args = argsOrState;
37
+ if ((!args || args.storageRoot === undefined) && !opts.urn) {
38
+ throw new Error("Missing required property 'storageRoot'");
39
+ }
40
+ resourceInputs["defaultDataAccessConfigId"] = args ? args.defaultDataAccessConfigId : undefined;
41
+ resourceInputs["forceDestroy"] = args ? args.forceDestroy : undefined;
42
+ resourceInputs["name"] = args ? args.name : undefined;
43
+ resourceInputs["owner"] = args ? args.owner : undefined;
44
+ resourceInputs["storageRoot"] = args ? args.storageRoot : undefined;
45
+ }
46
+ opts = pulumi.mergeOptions(utilities.resourceOptsDefaults(), opts);
47
+ super(Metastore.__pulumiType, name, resourceInputs, opts);
48
+ }
49
+ /**
50
+ * Get an existing Metastore resource's state with the given name, ID, and optional extra
51
+ * properties used to qualify the lookup.
52
+ *
53
+ * @param name The _unique_ name of the resulting resource.
54
+ * @param id The _unique_ provider ID of the resource to lookup.
55
+ * @param state Any extra arguments used during the lookup.
56
+ * @param opts Optional settings to control the behavior of the CustomResource.
57
+ */
58
+ static get(name, id, state, opts) {
59
+ return new Metastore(name, state, Object.assign(Object.assign({}, opts), { id: id }));
60
+ }
61
+ /**
62
+ * Returns true if the given object is an instance of Metastore. This is designed to work even
63
+ * when multiple copies of the Pulumi SDK have been loaded into the same process.
64
+ */
65
+ static isInstance(obj) {
66
+ if (obj === undefined || obj === null) {
67
+ return false;
68
+ }
69
+ return obj['__pulumiType'] === Metastore.__pulumiType;
70
+ }
71
+ }
72
+ exports.Metastore = Metastore;
73
+ /** @internal */
74
+ Metastore.__pulumiType = 'databricks:index/metastore:Metastore';
75
+ //# sourceMappingURL=metastore.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"metastore.js","sourceRoot":"","sources":["../metastore.ts"],"names":[],"mappings":";AAAA,wFAAwF;AACxF,iFAAiF;;;AAEjF,yCAAyC;AACzC,yCAAyC;AAEzC;;;;;;;;;;;;;;GAcG;AACH,MAAa,SAAU,SAAQ,MAAM,CAAC,cAAc;IAsDhD,YAAY,IAAY,EAAE,WAA4C,EAAE,IAAmC;QACvG,IAAI,cAAc,GAAkB,EAAE,CAAC;QACvC,IAAI,GAAG,IAAI,IAAI,EAAE,CAAC;QAClB,IAAI,IAAI,CAAC,EAAE,EAAE;YACT,MAAM,KAAK,GAAG,WAAyC,CAAC;YACxD,cAAc,CAAC,2BAA2B,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,yBAAyB,CAAC,CAAC,CAAC,SAAS,CAAC;YAClG,cAAc,CAAC,cAAc,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,YAAY,CAAC,CAAC,CAAC,SAAS,CAAC;YACxE,cAAc,CAAC,MAAM,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC;YACxD,cAAc,CAAC,OAAO,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC;YAC1D,cAAc,CAAC,aAAa,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,WAAW,CAAC,CAAC,CAAC,SAAS,CAAC;SACzE;aAAM;YACH,MAAM,IAAI,GAAG,WAAwC,CAAC;YACtD,IAAI,CAAC,CAAC,IAAI,IAAI,IAAI,CAAC,WAAW,KAAK,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;gBACxD,MAAM,IAAI,KAAK,CAAC,yCAAyC,CAAC,CAAC;aAC9D;YACD,cAAc,CAAC,2BAA2B,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,yBAAyB,CAAC,CAAC,CAAC,SAAS,CAAC;YAChG,cAAc,CAAC,cAAc,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC,CAAC,SAAS,CAAC;YACtE,cAAc,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC;YACtD,cAAc,CAAC,OAAO,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC;YACxD,cAAc,CAAC,aAAa,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,SAAS,CAAC;SACvE;QACD,IAAI,GAAG,MAAM,CAAC,YAAY,CAAC,SAAS,CAAC,oBAAoB,EAAE,EAAE,IAAI,CAAC,CAAC;QACnE,KAAK,CAAC,SAAS,CAAC,YAAY,EAAE,IAAI,EAAE,cAAc,EAAE,IAAI,CAAC,CAAC;IAC9D,CAAC;IA5ED;;;;;;;;OAQG;IACI,MAAM,CAAC,GAAG,CAAC,IAAY,EAAE,EAA2B,EAAE,KAAsB,EAAE,IAAmC;QACpH,OAAO,IAAI,SAAS,CAAC,IAAI,EAAO,KAAK,kCAAO,IAAI,KAAE,EAAE,EAAE,EAAE,IAAG,CAAC;IAChE,CAAC;IAKD;;;OAGG;IACI,MAAM,CAAC,UAAU,CAAC,GAAQ;QAC7B,IAAI,GAAG,KAAK,SAAS,IAAI,GAAG,KAAK,IAAI,EAAE;YACnC,OAAO,KAAK,CAAC;SAChB;QACD,OAAO,GAAG,CAAC,cAAc,CAAC,KAAK,SAAS,CAAC,YAAY,CAAC;IAC1D,CAAC;;AA1BL,8BA8EC;AAhEG,gBAAgB;AACO,sBAAY,GAAG,sCAAsC,CAAC"}
@@ -0,0 +1,94 @@
1
+ import * as pulumi from "@pulumi/pulumi";
2
+ /**
3
+ * > **Private Preview** This feature is in [Private Preview](https://docs.databricks.com/release-notes/release-types.html). Contact your Databricks representative to request access.
4
+ *
5
+ * A single databricks.Metastore can be shared across Databricks workspaces, and each linked workspace has a consistent view of the data and a single set of access policies. It is only recommended to have multiple metastores when organizations wish to have hard isolation boundaries between data (note that data cannot be easily joined/queried across metastores).
6
+ *
7
+ * ## Example Usage
8
+ *
9
+ * ```typescript
10
+ * import * as pulumi from "@pulumi/pulumi";
11
+ * import * as databricks from "@pulumi/databricks";
12
+ *
13
+ * const thisMetastore = new databricks.Metastore("thisMetastore", {
14
+ * storageRoot: `s3://${aws_s3_bucket.metastore.id}/metastore`,
15
+ * owner: "uc admins",
16
+ * forceDestroy: true,
17
+ * });
18
+ * const thisMetastoreAssignment = new databricks.MetastoreAssignment("thisMetastoreAssignment", {
19
+ * metastoreId: thisMetastore.id,
20
+ * workspaceId: local.workspace_id,
21
+ * });
22
+ * ```
23
+ */
24
+ export declare class MetastoreAssignment extends pulumi.CustomResource {
25
+ /**
26
+ * Get an existing MetastoreAssignment resource's state with the given name, ID, and optional extra
27
+ * properties used to qualify the lookup.
28
+ *
29
+ * @param name The _unique_ name of the resulting resource.
30
+ * @param id The _unique_ provider ID of the resource to lookup.
31
+ * @param state Any extra arguments used during the lookup.
32
+ * @param opts Optional settings to control the behavior of the CustomResource.
33
+ */
34
+ static get(name: string, id: pulumi.Input<pulumi.ID>, state?: MetastoreAssignmentState, opts?: pulumi.CustomResourceOptions): MetastoreAssignment;
35
+ /**
36
+ * Returns true if the given object is an instance of MetastoreAssignment. This is designed to work even
37
+ * when multiple copies of the Pulumi SDK have been loaded into the same process.
38
+ */
39
+ static isInstance(obj: any): obj is MetastoreAssignment;
40
+ /**
41
+ * Default catalog used for this assignment, default to `hiveMetastore`
42
+ */
43
+ readonly defaultCatalogName: pulumi.Output<string | undefined>;
44
+ /**
45
+ * Unique identifier of the parent Metastore
46
+ */
47
+ readonly metastoreId: pulumi.Output<string>;
48
+ /**
49
+ * id of the workspace for the assignment
50
+ */
51
+ readonly workspaceId: pulumi.Output<number>;
52
+ /**
53
+ * Create a MetastoreAssignment resource with the given unique name, arguments, and options.
54
+ *
55
+ * @param name The _unique_ name of the resource.
56
+ * @param args The arguments to use to populate this resource's properties.
57
+ * @param opts A bag of options that control this resource's behavior.
58
+ */
59
+ constructor(name: string, args: MetastoreAssignmentArgs, opts?: pulumi.CustomResourceOptions);
60
+ }
61
+ /**
62
+ * Input properties used for looking up and filtering MetastoreAssignment resources.
63
+ */
64
+ export interface MetastoreAssignmentState {
65
+ /**
66
+ * Default catalog used for this assignment, default to `hiveMetastore`
67
+ */
68
+ defaultCatalogName?: pulumi.Input<string>;
69
+ /**
70
+ * Unique identifier of the parent Metastore
71
+ */
72
+ metastoreId?: pulumi.Input<string>;
73
+ /**
74
+ * id of the workspace for the assignment
75
+ */
76
+ workspaceId?: pulumi.Input<number>;
77
+ }
78
+ /**
79
+ * The set of arguments for constructing a MetastoreAssignment resource.
80
+ */
81
+ export interface MetastoreAssignmentArgs {
82
+ /**
83
+ * Default catalog used for this assignment, default to `hiveMetastore`
84
+ */
85
+ defaultCatalogName?: pulumi.Input<string>;
86
+ /**
87
+ * Unique identifier of the parent Metastore
88
+ */
89
+ metastoreId: pulumi.Input<string>;
90
+ /**
91
+ * id of the workspace for the assignment
92
+ */
93
+ workspaceId: pulumi.Input<number>;
94
+ }