@pulumi/databricks 1.48.0-alpha.1721971593 → 1.48.0-alpha.1722058383

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (150) hide show
  1. package/artifactAllowlist.d.ts +7 -0
  2. package/artifactAllowlist.js +7 -0
  3. package/artifactAllowlist.js.map +1 -1
  4. package/catalog.d.ts +6 -0
  5. package/catalog.js +6 -0
  6. package/catalog.js.map +1 -1
  7. package/catalogWorkspaceBinding.d.ts +12 -0
  8. package/catalogWorkspaceBinding.js +12 -0
  9. package/catalogWorkspaceBinding.js.map +1 -1
  10. package/cluster.d.ts +9 -0
  11. package/cluster.js.map +1 -1
  12. package/dashboard.d.ts +24 -0
  13. package/dashboard.js +24 -0
  14. package/dashboard.js.map +1 -1
  15. package/dbfsFile.d.ts +11 -0
  16. package/dbfsFile.js +2 -0
  17. package/dbfsFile.js.map +1 -1
  18. package/directory.d.ts +38 -0
  19. package/directory.js +29 -0
  20. package/directory.js.map +1 -1
  21. package/file.d.ts +69 -0
  22. package/file.js +69 -0
  23. package/file.js.map +1 -1
  24. package/getCatalog.d.ts +12 -0
  25. package/getCatalog.js +12 -0
  26. package/getCatalog.js.map +1 -1
  27. package/getCatalogs.d.ts +12 -0
  28. package/getCatalogs.js +12 -0
  29. package/getCatalogs.js.map +1 -1
  30. package/getClusters.d.ts +8 -0
  31. package/getClusters.js +8 -0
  32. package/getClusters.js.map +1 -1
  33. package/getCurrentUser.d.ts +4 -4
  34. package/getCurrentUser.js +4 -4
  35. package/getExternalLocation.d.ts +8 -0
  36. package/getExternalLocation.js +8 -0
  37. package/getExternalLocation.js.map +1 -1
  38. package/getExternalLocations.d.ts +8 -0
  39. package/getExternalLocations.js +8 -0
  40. package/getExternalLocations.js.map +1 -1
  41. package/getJobs.d.ts +12 -0
  42. package/getJobs.js +12 -0
  43. package/getJobs.js.map +1 -1
  44. package/getMetastore.d.ts +12 -0
  45. package/getMetastore.js +12 -0
  46. package/getMetastore.js.map +1 -1
  47. package/getMetastores.d.ts +12 -0
  48. package/getMetastores.js +12 -0
  49. package/getMetastores.js.map +1 -1
  50. package/getSchema.d.ts +4 -6
  51. package/getSchema.js +4 -6
  52. package/getSchema.js.map +1 -1
  53. package/getSchemas.d.ts +12 -0
  54. package/getSchemas.js +12 -0
  55. package/getSchemas.js.map +1 -1
  56. package/getShare.d.ts +4 -0
  57. package/getShare.js +4 -0
  58. package/getShare.js.map +1 -1
  59. package/getShares.d.ts +4 -0
  60. package/getShares.js +4 -0
  61. package/getShares.js.map +1 -1
  62. package/getSqlWarehouses.d.ts +8 -0
  63. package/getSqlWarehouses.js +8 -0
  64. package/getSqlWarehouses.js.map +1 -1
  65. package/getStorageCredential.d.ts +8 -0
  66. package/getStorageCredential.js +8 -0
  67. package/getStorageCredential.js.map +1 -1
  68. package/getStorageCredentials.d.ts +8 -0
  69. package/getStorageCredentials.js +8 -0
  70. package/getStorageCredentials.js.map +1 -1
  71. package/getTable.d.ts +12 -0
  72. package/getTable.js +12 -0
  73. package/getTable.js.map +1 -1
  74. package/getTables.d.ts +12 -0
  75. package/getTables.js +12 -0
  76. package/getTables.js.map +1 -1
  77. package/getViews.d.ts +4 -4
  78. package/getViews.js +4 -4
  79. package/getVolume.d.ts +4 -6
  80. package/getVolume.js +4 -6
  81. package/getVolume.js.map +1 -1
  82. package/getVolumes.d.ts +8 -0
  83. package/getVolumes.js +8 -0
  84. package/getVolumes.js.map +1 -1
  85. package/gitCredential.d.ts +23 -0
  86. package/gitCredential.js +23 -0
  87. package/gitCredential.js.map +1 -1
  88. package/globalInitScript.d.ts +11 -0
  89. package/globalInitScript.js +2 -0
  90. package/globalInitScript.js.map +1 -1
  91. package/grant.d.ts +396 -0
  92. package/grant.js +396 -0
  93. package/grant.js.map +1 -1
  94. package/grants.d.ts +433 -0
  95. package/grants.js +433 -0
  96. package/grants.js.map +1 -1
  97. package/group.d.ts +9 -0
  98. package/group.js.map +1 -1
  99. package/job.d.ts +69 -0
  100. package/job.js +69 -0
  101. package/job.js.map +1 -1
  102. package/mlflowWebhook.d.ts +1 -1
  103. package/mlflowWebhook.js +1 -1
  104. package/mwsCustomerManagedKeys.d.ts +9 -0
  105. package/mwsCustomerManagedKeys.js +9 -0
  106. package/mwsCustomerManagedKeys.js.map +1 -1
  107. package/mwsVpcEndpoint.d.ts +192 -0
  108. package/mwsVpcEndpoint.js +192 -0
  109. package/mwsVpcEndpoint.js.map +1 -1
  110. package/notebook.d.ts +11 -0
  111. package/notebook.js +2 -0
  112. package/notebook.js.map +1 -1
  113. package/oboToken.d.ts +64 -0
  114. package/oboToken.js +64 -0
  115. package/oboToken.js.map +1 -1
  116. package/package.json +2 -2
  117. package/permissionAssignment.d.ts +9 -0
  118. package/permissionAssignment.js.map +1 -1
  119. package/permissions.d.ts +684 -18
  120. package/permissions.js +684 -18
  121. package/permissions.js.map +1 -1
  122. package/repo.d.ts +32 -0
  123. package/repo.js +32 -0
  124. package/repo.js.map +1 -1
  125. package/servicePrincipal.d.ts +9 -0
  126. package/servicePrincipal.js.map +1 -1
  127. package/servicePrincipalSecret.d.ts +8 -0
  128. package/servicePrincipalSecret.js +8 -0
  129. package/servicePrincipalSecret.js.map +1 -1
  130. package/share.d.ts +93 -0
  131. package/share.js +93 -0
  132. package/share.js.map +1 -1
  133. package/sqlPermissions.d.ts +15 -3
  134. package/sqlPermissions.js +15 -3
  135. package/sqlPermissions.js.map +1 -1
  136. package/token.d.ts +2 -2
  137. package/token.js +2 -2
  138. package/types/input.d.ts +16 -0
  139. package/types/output.d.ts +16 -0
  140. package/user.d.ts +9 -0
  141. package/user.js.map +1 -1
  142. package/workspaceBinding.d.ts +12 -0
  143. package/workspaceBinding.js +12 -0
  144. package/workspaceBinding.js.map +1 -1
  145. package/workspaceConf.d.ts +4 -0
  146. package/workspaceConf.js +4 -0
  147. package/workspaceConf.js.map +1 -1
  148. package/workspaceFile.d.ts +11 -0
  149. package/workspaceFile.js +2 -0
  150. package/workspaceFile.js.map +1 -1
package/permissions.js CHANGED
@@ -6,42 +6,708 @@ exports.Permissions = void 0;
6
6
  const pulumi = require("@pulumi/pulumi");
7
7
  const utilities = require("./utilities");
8
8
  /**
9
- * ## Import
9
+ * This resource allows you to generically manage [access control](https://docs.databricks.com/security/access-control/index.html) in Databricks workspace. It would guarantee that only _admins_, _authenticated principal_ and those declared within `accessControl` blocks would have specified access. It is not possible to remove management rights from _admins_ group.
10
+ *
11
+ * > **Note** Configuring this resource for an object will **OVERWRITE** any existing permissions of the same type unless imported, and changes made outside of Pulumi will be reset unless the changes are also reflected in the configuration.
12
+ *
13
+ * > **Note** It is not possible to lower permissions for `admins` or your own user anywhere from `CAN_MANAGE` level, so Databricks Pulumi Provider removes those `accessControl` blocks automatically.
14
+ *
15
+ * > **Note** If multiple permission levels are specified for an identity (e.g. `CAN_RESTART` and `CAN_MANAGE` for a cluster), only the highest level permission is returned and will cause permanent drift.
16
+ *
17
+ * > **Warning** To manage access control on service principals, use databricks_access_control_rule_set.
18
+ *
19
+ * ## Cluster usage
20
+ *
21
+ * It's possible to separate [cluster access control](https://docs.databricks.com/security/access-control/cluster-acl.html) to three different permission levels: `CAN_ATTACH_TO`, `CAN_RESTART` and `CAN_MANAGE`:
22
+ *
23
+ * ```typescript
24
+ * import * as pulumi from "@pulumi/pulumi";
25
+ * import * as databricks from "@pulumi/databricks";
26
+ *
27
+ * const auto = new databricks.Group("auto", {displayName: "Automation"});
28
+ * const eng = new databricks.Group("eng", {displayName: "Engineering"});
29
+ * const ds = new databricks.Group("ds", {displayName: "Data Science"});
30
+ * const latest = databricks.getSparkVersion({});
31
+ * const smallest = databricks.getNodeType({
32
+ * localDisk: true,
33
+ * });
34
+ * const sharedAutoscaling = new databricks.Cluster("shared_autoscaling", {
35
+ * clusterName: "Shared Autoscaling",
36
+ * sparkVersion: latest.then(latest => latest.id),
37
+ * nodeTypeId: smallest.then(smallest => smallest.id),
38
+ * autoterminationMinutes: 60,
39
+ * autoscale: {
40
+ * minWorkers: 1,
41
+ * maxWorkers: 10,
42
+ * },
43
+ * });
44
+ * const clusterUsage = new databricks.Permissions("cluster_usage", {
45
+ * clusterId: sharedAutoscaling.id,
46
+ * accessControls: [
47
+ * {
48
+ * groupName: auto.displayName,
49
+ * permissionLevel: "CAN_ATTACH_TO",
50
+ * },
51
+ * {
52
+ * groupName: eng.displayName,
53
+ * permissionLevel: "CAN_RESTART",
54
+ * },
55
+ * {
56
+ * groupName: ds.displayName,
57
+ * permissionLevel: "CAN_MANAGE",
58
+ * },
59
+ * ],
60
+ * });
61
+ * ```
62
+ *
63
+ * ## Cluster Policy usage
64
+ *
65
+ * Cluster policies allow creation of clusters, that match [given policy](https://docs.databricks.com/administration-guide/clusters/policies.html). It's possible to assign `CAN_USE` permission to users and groups:
66
+ *
67
+ * ```typescript
68
+ * import * as pulumi from "@pulumi/pulumi";
69
+ * import * as databricks from "@pulumi/databricks";
10
70
  *
11
- * ### Import Example
71
+ * const ds = new databricks.Group("ds", {displayName: "Data Science"});
72
+ * const eng = new databricks.Group("eng", {displayName: "Engineering"});
73
+ * const somethingSimple = new databricks.ClusterPolicy("something_simple", {
74
+ * name: "Some simple policy",
75
+ * definition: JSON.stringify({
76
+ * "spark_conf.spark.hadoop.javax.jdo.option.ConnectionURL": {
77
+ * type: "forbidden",
78
+ * },
79
+ * "spark_conf.spark.secondkey": {
80
+ * type: "forbidden",
81
+ * },
82
+ * }),
83
+ * });
84
+ * const policyUsage = new databricks.Permissions("policy_usage", {
85
+ * clusterPolicyId: somethingSimple.id,
86
+ * accessControls: [
87
+ * {
88
+ * groupName: ds.displayName,
89
+ * permissionLevel: "CAN_USE",
90
+ * },
91
+ * {
92
+ * groupName: eng.displayName,
93
+ * permissionLevel: "CAN_USE",
94
+ * },
95
+ * ],
96
+ * });
97
+ * ```
98
+ *
99
+ * ## Instance Pool usage
12
100
  *
13
- * Configuration file:
101
+ * Instance Pools access control [allows to](https://docs.databricks.com/security/access-control/pool-acl.html) assign `CAN_ATTACH_TO` and `CAN_MANAGE` permissions to users, service principals, and groups. It's also possible to grant creation of Instance Pools to individual groups and users, service principals.
14
102
  *
15
- * hcl
103
+ * ```typescript
104
+ * import * as pulumi from "@pulumi/pulumi";
105
+ * import * as databricks from "@pulumi/databricks";
106
+ *
107
+ * const auto = new databricks.Group("auto", {displayName: "Automation"});
108
+ * const eng = new databricks.Group("eng", {displayName: "Engineering"});
109
+ * const smallest = databricks.getNodeType({
110
+ * localDisk: true,
111
+ * });
112
+ * const _this = new databricks.InstancePool("this", {
113
+ * instancePoolName: "Reserved Instances",
114
+ * idleInstanceAutoterminationMinutes: 60,
115
+ * nodeTypeId: smallest.then(smallest => smallest.id),
116
+ * minIdleInstances: 0,
117
+ * maxCapacity: 10,
118
+ * });
119
+ * const poolUsage = new databricks.Permissions("pool_usage", {
120
+ * instancePoolId: _this.id,
121
+ * accessControls: [
122
+ * {
123
+ * groupName: auto.displayName,
124
+ * permissionLevel: "CAN_ATTACH_TO",
125
+ * },
126
+ * {
127
+ * groupName: eng.displayName,
128
+ * permissionLevel: "CAN_MANAGE",
129
+ * },
130
+ * ],
131
+ * });
132
+ * ```
16
133
  *
17
- * resource "databricks_mlflow_model" "model" {
134
+ * ## Job usage
18
135
  *
19
- * name = "example_model"
136
+ * There are four assignable [permission levels](https://docs.databricks.com/security/access-control/jobs-acl.html#job-permissions) for databricks_job: `CAN_VIEW`, `CAN_MANAGE_RUN`, `IS_OWNER`, and `CAN_MANAGE`. Admins are granted the `CAN_MANAGE` permission by default, and they can assign that permission to non-admin users, and service principals.
20
137
  *
21
- * description = "MLflow registered model"
138
+ * - The creator of a job has `IS_OWNER` permission. Destroying `databricks.Permissions` resource for a job would revert ownership to the creator.
139
+ * - A job must have exactly one owner. If a resource is changed and no owner is specified, the currently authenticated principal would become the new owner of the job. Nothing would change, per se, if the job was created through Pulumi.
140
+ * - A job cannot have a group as an owner.
141
+ * - Jobs triggered through _Run Now_ assume the permissions of the job owner and not the user, and service principal who issued Run Now.
142
+ * - Read [main documentation](https://docs.databricks.com/security/access-control/jobs-acl.html) for additional detail.
143
+ *
144
+ * ```typescript
145
+ * import * as pulumi from "@pulumi/pulumi";
146
+ * import * as databricks from "@pulumi/databricks";
147
+ *
148
+ * const auto = new databricks.Group("auto", {displayName: "Automation"});
149
+ * const eng = new databricks.Group("eng", {displayName: "Engineering"});
150
+ * const awsPrincipal = new databricks.ServicePrincipal("aws_principal", {displayName: "main"});
151
+ * const latest = databricks.getSparkVersion({});
152
+ * const smallest = databricks.getNodeType({
153
+ * localDisk: true,
154
+ * });
155
+ * const _this = new databricks.Job("this", {
156
+ * name: "Featurization",
157
+ * maxConcurrentRuns: 1,
158
+ * tasks: [{
159
+ * taskKey: "task1",
160
+ * newCluster: {
161
+ * numWorkers: 300,
162
+ * sparkVersion: latest.then(latest => latest.id),
163
+ * nodeTypeId: smallest.then(smallest => smallest.id),
164
+ * },
165
+ * notebookTask: {
166
+ * notebookPath: "/Production/MakeFeatures",
167
+ * },
168
+ * }],
169
+ * });
170
+ * const jobUsage = new databricks.Permissions("job_usage", {
171
+ * jobId: _this.id,
172
+ * accessControls: [
173
+ * {
174
+ * groupName: "users",
175
+ * permissionLevel: "CAN_VIEW",
176
+ * },
177
+ * {
178
+ * groupName: auto.displayName,
179
+ * permissionLevel: "CAN_MANAGE_RUN",
180
+ * },
181
+ * {
182
+ * groupName: eng.displayName,
183
+ * permissionLevel: "CAN_MANAGE",
184
+ * },
185
+ * {
186
+ * servicePrincipalName: awsPrincipal.applicationId,
187
+ * permissionLevel: "IS_OWNER",
188
+ * },
189
+ * ],
190
+ * });
191
+ * ```
22
192
  *
23
- * }
193
+ * ## Delta Live Tables usage
24
194
  *
25
- * resource "databricks_permissions" "model_usage" {
195
+ * There are four assignable [permission levels](https://docs.databricks.com/security/access-control/dlt-acl.html#delta-live-tables-permissions) for databricks_pipeline: `CAN_VIEW`, `CAN_RUN`, `CAN_MANAGE`, and `IS_OWNER`. Admins are granted the `CAN_MANAGE` permission by default, and they can assign that permission to non-admin users, and service principals.
26
196
  *
27
- * registered_model_id = databricks_mlflow_model.model.registered_model_id
197
+ * - The creator of a DLT Pipeline has `IS_OWNER` permission. Destroying `databricks.Permissions` resource for a pipeline would revert ownership to the creator.
198
+ * - A DLT pipeline must have exactly one owner. If a resource is changed and no owner is specified, the currently authenticated principal would become the new owner of the pipeline. Nothing would change, per se, if the pipeline was created through Pulumi.
199
+ * - A DLT pipeline cannot have a group as an owner.
200
+ * - DLT Pipelines triggered through _Start_ assume the permissions of the pipeline owner and not the user, and service principal who issued Run Now.
201
+ * - Read [main documentation](https://docs.databricks.com/security/access-control/dlt-acl.html) for additional detail.
28
202
  *
29
- * access_control {
203
+ * ```typescript
204
+ * import * as pulumi from "@pulumi/pulumi";
205
+ * import * as databricks from "@pulumi/databricks";
206
+ * import * as std from "@pulumi/std";
30
207
  *
31
- * group_name = "users"
208
+ * const eng = new databricks.Group("eng", {displayName: "Engineering"});
209
+ * const dltDemo = new databricks.Notebook("dlt_demo", {
210
+ * contentBase64: std.base64encode({
211
+ * input: `import dlt
212
+ * json_path = "/databricks-datasets/wikipedia-datasets/data-001/clickstream/raw-uncompressed-json/2015_2_clickstream.json"
213
+ * @dlt.table(
214
+ * comment="The raw wikipedia clickstream dataset, ingested from /databricks-datasets."
215
+ * )
216
+ * def clickstream_raw():
217
+ * return (spark.read.format("json").load(json_path))
218
+ * `,
219
+ * }).then(invoke => invoke.result),
220
+ * language: "PYTHON",
221
+ * path: `${me.home}/DLT_Demo`,
222
+ * });
223
+ * const _this = new databricks.Pipeline("this", {
224
+ * name: `DLT Demo Pipeline (${me.alphanumeric})`,
225
+ * storage: "/test/tf-pipeline",
226
+ * configuration: {
227
+ * key1: "value1",
228
+ * key2: "value2",
229
+ * },
230
+ * libraries: [{
231
+ * notebook: {
232
+ * path: dltDemo.id,
233
+ * },
234
+ * }],
235
+ * continuous: false,
236
+ * filters: {
237
+ * includes: ["com.databricks.include"],
238
+ * excludes: ["com.databricks.exclude"],
239
+ * },
240
+ * });
241
+ * const dltUsage = new databricks.Permissions("dlt_usage", {
242
+ * pipelineId: _this.id,
243
+ * accessControls: [
244
+ * {
245
+ * groupName: "users",
246
+ * permissionLevel: "CAN_VIEW",
247
+ * },
248
+ * {
249
+ * groupName: eng.displayName,
250
+ * permissionLevel: "CAN_MANAGE",
251
+ * },
252
+ * ],
253
+ * });
254
+ * ```
255
+ *
256
+ * ## Notebook usage
257
+ *
258
+ * Valid [permission levels](https://docs.databricks.com/security/access-control/workspace-acl.html#notebook-permissions) for databricks.Notebook are: `CAN_READ`, `CAN_RUN`, `CAN_EDIT`, and `CAN_MANAGE`.
259
+ *
260
+ * ```typescript
261
+ * import * as pulumi from "@pulumi/pulumi";
262
+ * import * as databricks from "@pulumi/databricks";
263
+ * import * as std from "@pulumi/std";
264
+ *
265
+ * const auto = new databricks.Group("auto", {displayName: "Automation"});
266
+ * const eng = new databricks.Group("eng", {displayName: "Engineering"});
267
+ * const _this = new databricks.Notebook("this", {
268
+ * contentBase64: std.base64encode({
269
+ * input: "# Welcome to your Python notebook",
270
+ * }).then(invoke => invoke.result),
271
+ * path: "/Production/ETL/Features",
272
+ * language: "PYTHON",
273
+ * });
274
+ * const notebookUsage = new databricks.Permissions("notebook_usage", {
275
+ * notebookPath: _this.path,
276
+ * accessControls: [
277
+ * {
278
+ * groupName: "users",
279
+ * permissionLevel: "CAN_READ",
280
+ * },
281
+ * {
282
+ * groupName: auto.displayName,
283
+ * permissionLevel: "CAN_RUN",
284
+ * },
285
+ * {
286
+ * groupName: eng.displayName,
287
+ * permissionLevel: "CAN_EDIT",
288
+ * },
289
+ * ],
290
+ * });
291
+ * ```
32
292
  *
33
- * permission_level = "CAN_READ"
293
+ * ## Workspace file usage
294
+ *
295
+ * Valid permission levels for databricks.WorkspaceFile are: `CAN_READ`, `CAN_RUN`, `CAN_EDIT`, and `CAN_MANAGE`.
296
+ *
297
+ * ```typescript
298
+ * import * as pulumi from "@pulumi/pulumi";
299
+ * import * as databricks from "@pulumi/databricks";
300
+ * import * as std from "@pulumi/std";
301
+ *
302
+ * const auto = new databricks.Group("auto", {displayName: "Automation"});
303
+ * const eng = new databricks.Group("eng", {displayName: "Engineering"});
304
+ * const _this = new databricks.WorkspaceFile("this", {
305
+ * contentBase64: std.base64encode({
306
+ * input: "print('Hello World')",
307
+ * }).then(invoke => invoke.result),
308
+ * path: "/Production/ETL/Features.py",
309
+ * });
310
+ * const workspaceFileUsage = new databricks.Permissions("workspace_file_usage", {
311
+ * workspaceFilePath: _this.path,
312
+ * accessControls: [
313
+ * {
314
+ * groupName: "users",
315
+ * permissionLevel: "CAN_READ",
316
+ * },
317
+ * {
318
+ * groupName: auto.displayName,
319
+ * permissionLevel: "CAN_RUN",
320
+ * },
321
+ * {
322
+ * groupName: eng.displayName,
323
+ * permissionLevel: "CAN_EDIT",
324
+ * },
325
+ * ],
326
+ * });
327
+ * ```
328
+ *
329
+ * ## Folder usage
330
+ *
331
+ * Valid [permission levels](https://docs.databricks.com/security/access-control/workspace-acl.html#folder-permissions) for folders of databricks.Directory are: `CAN_READ`, `CAN_RUN`, `CAN_EDIT`, and `CAN_MANAGE`. Notebooks and experiments in a folder inherit all permissions settings of that folder. For example, a user (or service principal) that has `CAN_RUN` permission on a folder has `CAN_RUN` permission on the notebooks in that folder.
332
+ *
333
+ * - All users can list items in the folder without any permissions.
334
+ * - All users (or service principals) have `CAN_MANAGE` permission for items in the Workspace > Shared Icon Shared folder. You can grant `CAN_MANAGE` permission to notebooks and folders by moving them to the Shared Icon Shared folder.
335
+ * - All users (or service principals) have `CAN_MANAGE` permission for objects the user creates.
336
+ * - User home directory - The user (or service principal) has `CAN_MANAGE` permission. All other users (or service principals) can list their directories.
337
+ *
338
+ * ```typescript
339
+ * import * as pulumi from "@pulumi/pulumi";
340
+ * import * as databricks from "@pulumi/databricks";
341
+ *
342
+ * const auto = new databricks.Group("auto", {displayName: "Automation"});
343
+ * const eng = new databricks.Group("eng", {displayName: "Engineering"});
344
+ * const _this = new databricks.Directory("this", {path: "/Production/ETL"});
345
+ * const folderUsage = new databricks.Permissions("folder_usage", {
346
+ * directoryPath: _this.path,
347
+ * accessControls: [
348
+ * {
349
+ * groupName: "users",
350
+ * permissionLevel: "CAN_READ",
351
+ * },
352
+ * {
353
+ * groupName: auto.displayName,
354
+ * permissionLevel: "CAN_RUN",
355
+ * },
356
+ * {
357
+ * groupName: eng.displayName,
358
+ * permissionLevel: "CAN_EDIT",
359
+ * },
360
+ * ],
361
+ * }, {
362
+ * dependsOn: [_this],
363
+ * });
364
+ * ```
365
+ *
366
+ * ## Repos usage
367
+ *
368
+ * Valid [permission levels](https://docs.databricks.com/security/access-control/workspace-acl.html) for databricks.Repo are: `CAN_READ`, `CAN_RUN`, `CAN_EDIT`, and `CAN_MANAGE`.
369
+ *
370
+ * ```typescript
371
+ * import * as pulumi from "@pulumi/pulumi";
372
+ * import * as databricks from "@pulumi/databricks";
373
+ *
374
+ * const auto = new databricks.Group("auto", {displayName: "Automation"});
375
+ * const eng = new databricks.Group("eng", {displayName: "Engineering"});
376
+ * const _this = new databricks.Repo("this", {url: "https://github.com/user/demo.git"});
377
+ * const repoUsage = new databricks.Permissions("repo_usage", {
378
+ * repoId: _this.id,
379
+ * accessControls: [
380
+ * {
381
+ * groupName: "users",
382
+ * permissionLevel: "CAN_READ",
383
+ * },
384
+ * {
385
+ * groupName: auto.displayName,
386
+ * permissionLevel: "CAN_RUN",
387
+ * },
388
+ * {
389
+ * groupName: eng.displayName,
390
+ * permissionLevel: "CAN_EDIT",
391
+ * },
392
+ * ],
393
+ * });
394
+ * ```
395
+ *
396
+ * ## MLflow Experiment usage
397
+ *
398
+ * Valid [permission levels](https://docs.databricks.com/security/access-control/workspace-acl.html#mlflow-experiment-permissions-1) for databricks.MlflowExperiment are: `CAN_READ`, `CAN_EDIT`, and `CAN_MANAGE`.
399
+ *
400
+ * ```typescript
401
+ * import * as pulumi from "@pulumi/pulumi";
402
+ * import * as databricks from "@pulumi/databricks";
403
+ *
404
+ * const me = databricks.getCurrentUser({});
405
+ * const _this = new databricks.MlflowExperiment("this", {
406
+ * name: me.then(me => `${me.home}/Sample`),
407
+ * artifactLocation: "dbfs:/tmp/my-experiment",
408
+ * description: "My MLflow experiment description",
409
+ * });
410
+ * const auto = new databricks.Group("auto", {displayName: "Automation"});
411
+ * const eng = new databricks.Group("eng", {displayName: "Engineering"});
412
+ * const experimentUsage = new databricks.Permissions("experiment_usage", {
413
+ * experimentId: _this.id,
414
+ * accessControls: [
415
+ * {
416
+ * groupName: "users",
417
+ * permissionLevel: "CAN_READ",
418
+ * },
419
+ * {
420
+ * groupName: auto.displayName,
421
+ * permissionLevel: "CAN_MANAGE",
422
+ * },
423
+ * {
424
+ * groupName: eng.displayName,
425
+ * permissionLevel: "CAN_EDIT",
426
+ * },
427
+ * ],
428
+ * });
429
+ * ```
430
+ *
431
+ * ## MLflow Model usage
432
+ *
433
+ * Valid [permission levels](https://docs.databricks.com/security/access-control/workspace-acl.html#mlflow-model-permissions-1) for databricks.MlflowModel are: `CAN_READ`, `CAN_EDIT`, `CAN_MANAGE_STAGING_VERSIONS`, `CAN_MANAGE_PRODUCTION_VERSIONS`, and `CAN_MANAGE`. You can also manage permissions for all MLflow models by `registeredModelId = "root"`.
434
+ *
435
+ * ```typescript
436
+ * import * as pulumi from "@pulumi/pulumi";
437
+ * import * as databricks from "@pulumi/databricks";
438
+ *
439
+ * const _this = new databricks.MlflowModel("this", {name: "SomePredictions"});
440
+ * const auto = new databricks.Group("auto", {displayName: "Automation"});
441
+ * const eng = new databricks.Group("eng", {displayName: "Engineering"});
442
+ * const modelUsage = new databricks.Permissions("model_usage", {
443
+ * registeredModelId: _this.registeredModelId,
444
+ * accessControls: [
445
+ * {
446
+ * groupName: "users",
447
+ * permissionLevel: "CAN_READ",
448
+ * },
449
+ * {
450
+ * groupName: auto.displayName,
451
+ * permissionLevel: "CAN_MANAGE_PRODUCTION_VERSIONS",
452
+ * },
453
+ * {
454
+ * groupName: eng.displayName,
455
+ * permissionLevel: "CAN_MANAGE_STAGING_VERSIONS",
456
+ * },
457
+ * ],
458
+ * });
459
+ * ```
34
460
  *
35
- * }
461
+ * ## Model serving usage
36
462
  *
37
- * }
463
+ * Valid permission levels for databricks.ModelServing are: `CAN_VIEW`, `CAN_QUERY`, and `CAN_MANAGE`.
38
464
  *
39
- * Import command:
465
+ * ```typescript
466
+ * import * as pulumi from "@pulumi/pulumi";
467
+ * import * as databricks from "@pulumi/databricks";
468
+ *
469
+ * const _this = new databricks.ModelServing("this", {
470
+ * name: "tf-test",
471
+ * config: {
472
+ * servedModels: [{
473
+ * name: "prod_model",
474
+ * modelName: "test",
475
+ * modelVersion: "1",
476
+ * workloadSize: "Small",
477
+ * scaleToZeroEnabled: true,
478
+ * }],
479
+ * },
480
+ * });
481
+ * const auto = new databricks.Group("auto", {displayName: "Automation"});
482
+ * const eng = new databricks.Group("eng", {displayName: "Engineering"});
483
+ * const mlServingUsage = new databricks.Permissions("ml_serving_usage", {
484
+ * servingEndpointId: _this.servingEndpointId,
485
+ * accessControls: [
486
+ * {
487
+ * groupName: "users",
488
+ * permissionLevel: "CAN_VIEW",
489
+ * },
490
+ * {
491
+ * groupName: auto.displayName,
492
+ * permissionLevel: "CAN_MANAGE",
493
+ * },
494
+ * {
495
+ * groupName: eng.displayName,
496
+ * permissionLevel: "CAN_QUERY",
497
+ * },
498
+ * ],
499
+ * });
500
+ * ```
501
+ *
502
+ * ## Passwords usage
503
+ *
504
+ * By default on AWS deployments, all admin users can sign in to Databricks using either SSO or their username and password, and all API users can authenticate to the Databricks REST APIs using their username and password. As an admin, you [can limit](https://docs.databricks.com/administration-guide/users-groups/single-sign-on/index.html#optional-configure-password-access-control) admin users’ and API users’ ability to authenticate with their username and password by configuring `CAN_USE` permissions using password access control.
505
+ *
506
+ * ```typescript
507
+ * import * as pulumi from "@pulumi/pulumi";
508
+ * import * as databricks from "@pulumi/databricks";
509
+ *
510
+ * const guests = new databricks.Group("guests", {displayName: "Guest Users"});
511
+ * const passwordUsage = new databricks.Permissions("password_usage", {
512
+ * authorization: "passwords",
513
+ * accessControls: [{
514
+ * groupName: guests.displayName,
515
+ * permissionLevel: "CAN_USE",
516
+ * }],
517
+ * });
518
+ * ```
519
+ *
520
+ * ## Token usage
521
+ *
522
+ * It is required to have at least 1 personal access token in the workspace before you can manage tokens permissions.
523
+ *
524
+ * !> **Warning** There can be only one `authorization = "tokens"` permissions resource per workspace, otherwise there'll be a permanent configuration drift. After applying changes, users who previously had either `CAN_USE` or `CAN_MANAGE` permission but no longer have either permission have their access to token-based authentication revoked. Their active tokens are immediately deleted (revoked).
525
+ *
526
+ * Only [possible permission](https://docs.databricks.com/administration-guide/access-control/tokens.html) to assign to non-admin group is `CAN_USE`, where _admins_ `CAN_MANAGE` all tokens:
527
+ *
528
+ * ```typescript
529
+ * import * as pulumi from "@pulumi/pulumi";
530
+ * import * as databricks from "@pulumi/databricks";
531
+ *
532
+ * const auto = new databricks.Group("auto", {displayName: "Automation"});
533
+ * const eng = new databricks.Group("eng", {displayName: "Engineering"});
534
+ * const tokenUsage = new databricks.Permissions("token_usage", {
535
+ * authorization: "tokens",
536
+ * accessControls: [
537
+ * {
538
+ * groupName: auto.displayName,
539
+ * permissionLevel: "CAN_USE",
540
+ * },
541
+ * {
542
+ * groupName: eng.displayName,
543
+ * permissionLevel: "CAN_USE",
544
+ * },
545
+ * ],
546
+ * });
547
+ * ```
548
+ *
549
+ * ## SQL warehouse usage
550
+ *
551
+ * [SQL warehouses](https://docs.databricks.com/sql/user/security/access-control/sql-endpoint-acl.html) have four possible permissions: `CAN_USE`, `CAN_MONITOR`, `CAN_MANAGE` and `IS_OWNER`:
552
+ *
553
+ * ```typescript
554
+ * import * as pulumi from "@pulumi/pulumi";
555
+ * import * as databricks from "@pulumi/databricks";
556
+ *
557
+ * const me = databricks.getCurrentUser({});
558
+ * const auto = new databricks.Group("auto", {displayName: "Automation"});
559
+ * const eng = new databricks.Group("eng", {displayName: "Engineering"});
560
+ * const _this = new databricks.SqlEndpoint("this", {
561
+ * name: me.then(me => `Endpoint of ${me.alphanumeric}`),
562
+ * clusterSize: "Small",
563
+ * maxNumClusters: 1,
564
+ * tags: {
565
+ * customTags: [{
566
+ * key: "City",
567
+ * value: "Amsterdam",
568
+ * }],
569
+ * },
570
+ * });
571
+ * const endpointUsage = new databricks.Permissions("endpoint_usage", {
572
+ * sqlEndpointId: _this.id,
573
+ * accessControls: [
574
+ * {
575
+ * groupName: auto.displayName,
576
+ * permissionLevel: "CAN_USE",
577
+ * },
578
+ * {
579
+ * groupName: eng.displayName,
580
+ * permissionLevel: "CAN_MANAGE",
581
+ * },
582
+ * ],
583
+ * });
584
+ * ```
585
+ *
586
+ * ## Dashboard usage
587
+ *
588
+ * [Dashboards](https://docs.databricks.com/en/dashboards/tutorials/manage-permissions.html) have four possible permissions: `CAN_READ`, `CAN_RUN`, `CAN_EDIT` and `CAN_MANAGE`:
589
+ *
590
+ * ```typescript
591
+ * import * as pulumi from "@pulumi/pulumi";
592
+ * import * as databricks from "@pulumi/databricks";
593
+ *
594
+ * const auto = new databricks.Group("auto", {displayName: "Automation"});
595
+ * const eng = new databricks.Group("eng", {displayName: "Engineering"});
596
+ * const dashboard = new databricks.Dashboard("dashboard", {displayName: "TF New Dashboard"});
597
+ * const dashboardUsage = new databricks.Permissions("dashboard_usage", {
598
+ * dashboardId: dashboard.id,
599
+ * accessControls: [
600
+ * {
601
+ * groupName: auto.displayName,
602
+ * permissionLevel: "CAN_RUN",
603
+ * },
604
+ * {
605
+ * groupName: eng.displayName,
606
+ * permissionLevel: "CAN_MANAGE",
607
+ * },
608
+ * ],
609
+ * });
610
+ * ```
611
+ *
612
+ * ## Legacy SQL Dashboard usage
613
+ *
614
+ * [Legacy SQL dashboards](https://docs.databricks.com/sql/user/security/access-control/dashboard-acl.html) have three possible permissions: `CAN_VIEW`, `CAN_RUN` and `CAN_MANAGE`:
615
+ *
616
+ * ```typescript
617
+ * import * as pulumi from "@pulumi/pulumi";
618
+ * import * as databricks from "@pulumi/databricks";
619
+ *
620
+ * const auto = new databricks.Group("auto", {displayName: "Automation"});
621
+ * const eng = new databricks.Group("eng", {displayName: "Engineering"});
622
+ * const sqlDashboardUsage = new databricks.Permissions("sql_dashboard_usage", {
623
+ * sqlDashboardId: "3244325",
624
+ * accessControls: [
625
+ * {
626
+ * groupName: auto.displayName,
627
+ * permissionLevel: "CAN_RUN",
628
+ * },
629
+ * {
630
+ * groupName: eng.displayName,
631
+ * permissionLevel: "CAN_MANAGE",
632
+ * },
633
+ * ],
634
+ * });
635
+ * ```
636
+ *
637
+ * ## SQL Query usage
638
+ *
639
+ * [SQL queries](https://docs.databricks.com/sql/user/security/access-control/query-acl.html) have three possible permissions: `CAN_VIEW`, `CAN_RUN` and `CAN_MANAGE`:
640
+ *
641
+ * > **Note** If you do not define an `accessControl` block granting `CAN_MANAGE` explictly for the user calling this provider, Databricks Pulumi Provider will add `CAN_MANAGE` permission for the caller. This is a failsafe to prevent situations where the caller is locked out from making changes to the targeted `databricks.SqlQuery` resource when backend API do not apply permission inheritance correctly.
642
+ *
643
+ * ```typescript
644
+ * import * as pulumi from "@pulumi/pulumi";
645
+ * import * as databricks from "@pulumi/databricks";
646
+ *
647
+ * const auto = new databricks.Group("auto", {displayName: "Automation"});
648
+ * const eng = new databricks.Group("eng", {displayName: "Engineering"});
649
+ * const queryUsage = new databricks.Permissions("query_usage", {
650
+ * sqlQueryId: "3244325",
651
+ * accessControls: [
652
+ * {
653
+ * groupName: auto.displayName,
654
+ * permissionLevel: "CAN_RUN",
655
+ * },
656
+ * {
657
+ * groupName: eng.displayName,
658
+ * permissionLevel: "CAN_MANAGE",
659
+ * },
660
+ * ],
661
+ * });
662
+ * ```
663
+ *
664
+ * ## SQL Alert usage
665
+ *
666
+ * [SQL alerts](https://docs.databricks.com/sql/user/security/access-control/alert-acl.html) have three possible permissions: `CAN_VIEW`, `CAN_RUN` and `CAN_MANAGE`:
667
+ *
668
+ * ```typescript
669
+ * import * as pulumi from "@pulumi/pulumi";
670
+ * import * as databricks from "@pulumi/databricks";
671
+ *
672
+ * const auto = new databricks.Group("auto", {displayName: "Automation"});
673
+ * const eng = new databricks.Group("eng", {displayName: "Engineering"});
674
+ * const alertUsage = new databricks.Permissions("alert_usage", {
675
+ * sqlAlertId: "3244325",
676
+ * accessControls: [
677
+ * {
678
+ * groupName: auto.displayName,
679
+ * permissionLevel: "CAN_RUN",
680
+ * },
681
+ * {
682
+ * groupName: eng.displayName,
683
+ * permissionLevel: "CAN_MANAGE",
684
+ * },
685
+ * ],
686
+ * });
687
+ * ```
688
+ *
689
+ * ## Instance Profiles
690
+ *
691
+ * Instance Profiles are not managed by General Permissions API and therefore databricks.GroupInstanceProfile and databricks.UserInstanceProfile should be used to allow usage of specific AWS EC2 IAM roles to users or groups.
692
+ *
693
+ * ## Secrets
694
+ *
695
+ * One can control access to databricks.Secret through `initialManagePrincipal` argument on databricks.SecretScope or databricks_secret_acl, so that users (or service principals) can `READ`, `WRITE` or `MANAGE` entries within secret scope.
696
+ *
697
+ * ## Tables, Views and Databases
698
+ *
699
+ * General Permissions API does not apply to access control for tables and they have to be managed separately using the databricks.SqlPermissions resource, though you're encouraged to use Unity Catalog or migrate to it.
700
+ *
701
+ * ## Data Access with Unity Catalog
702
+ *
703
+ * Initially in Unity Catalog all users have no access to data, which has to be later assigned through databricks.Grants resource.
704
+ *
705
+ * ## Import
40
706
  *
41
- * bash
707
+ * The resource permissions can be imported using the object id
42
708
  *
43
709
  * ```sh
44
- * $ pulumi import databricks:index/permissions:Permissions model_usage /registered-models/<registered_model_id>
710
+ * $ pulumi import databricks:index/permissions:Permissions databricks_permissions <object type>/<object id>
45
711
  * ```
46
712
  */
47
713
  class Permissions extends pulumi.CustomResource {