@pulumi/databricks 0.0.1-alpha.1648473134

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (261) hide show
  1. package/LICENSE +202 -0
  2. package/README.md +303 -0
  3. package/awsS3Mount.d.ts +50 -0
  4. package/awsS3Mount.js +63 -0
  5. package/awsS3Mount.js.map +1 -0
  6. package/azureAdlsGen1Mount.d.ts +65 -0
  7. package/azureAdlsGen1Mount.js +85 -0
  8. package/azureAdlsGen1Mount.js.map +1 -0
  9. package/azureAdlsGen2Mount.d.ts +68 -0
  10. package/azureAdlsGen2Mount.js +93 -0
  11. package/azureAdlsGen2Mount.js.map +1 -0
  12. package/azureBlobMount.d.ts +62 -0
  13. package/azureBlobMount.js +83 -0
  14. package/azureBlobMount.js.map +1 -0
  15. package/catalog.d.ts +124 -0
  16. package/catalog.js +88 -0
  17. package/catalog.js.map +1 -0
  18. package/cluster.d.ts +332 -0
  19. package/cluster.js +121 -0
  20. package/cluster.js.map +1 -0
  21. package/clusterPolicy.d.ts +112 -0
  22. package/clusterPolicy.js +97 -0
  23. package/clusterPolicy.js.map +1 -0
  24. package/config/index.d.ts +1 -0
  25. package/config/index.js +21 -0
  26. package/config/index.js.map +1 -0
  27. package/config/vars.d.ts +20 -0
  28. package/config/vars.js +127 -0
  29. package/config/vars.js.map +1 -0
  30. package/dbfsFile.d.ts +91 -0
  31. package/dbfsFile.js +71 -0
  32. package/dbfsFile.js.map +1 -0
  33. package/directory.d.ts +72 -0
  34. package/directory.js +65 -0
  35. package/directory.js.map +1 -0
  36. package/externalLocation.d.ts +114 -0
  37. package/externalLocation.js +80 -0
  38. package/externalLocation.js.map +1 -0
  39. package/getAwsAssumeRolePolicy.d.ts +47 -0
  40. package/getAwsAssumeRolePolicy.js +24 -0
  41. package/getAwsAssumeRolePolicy.js.map +1 -0
  42. package/getAwsBucketPolicy.d.ts +59 -0
  43. package/getAwsBucketPolicy.js +36 -0
  44. package/getAwsBucketPolicy.js.map +1 -0
  45. package/getAwsCrossAccountPolicy.d.ts +59 -0
  46. package/getAwsCrossAccountPolicy.js +47 -0
  47. package/getAwsCrossAccountPolicy.js.map +1 -0
  48. package/getCatalogs.d.ts +54 -0
  49. package/getCatalogs.js +43 -0
  50. package/getCatalogs.js.map +1 -0
  51. package/getClusters.d.ts +69 -0
  52. package/getClusters.js +57 -0
  53. package/getClusters.js.map +1 -0
  54. package/getCurrentUser.d.ts +39 -0
  55. package/getCurrentUser.js +38 -0
  56. package/getCurrentUser.js.map +1 -0
  57. package/getDbfsFile.d.ts +69 -0
  58. package/getDbfsFile.js +44 -0
  59. package/getDbfsFile.js.map +1 -0
  60. package/getDbfsFilePaths.d.ts +68 -0
  61. package/getDbfsFilePaths.js +46 -0
  62. package/getDbfsFilePaths.js.map +1 -0
  63. package/getGroup.d.ts +166 -0
  64. package/getGroup.js +46 -0
  65. package/getGroup.js.map +1 -0
  66. package/getJobs.d.ts +47 -0
  67. package/getJobs.js +30 -0
  68. package/getJobs.js.map +1 -0
  69. package/getNodeType.d.ts +173 -0
  70. package/getNodeType.js +71 -0
  71. package/getNodeType.js.map +1 -0
  72. package/getNotebook.d.ts +93 -0
  73. package/getNotebook.js +39 -0
  74. package/getNotebook.js.map +1 -0
  75. package/getNotebookPaths.d.ts +58 -0
  76. package/getNotebookPaths.js +36 -0
  77. package/getNotebookPaths.js.map +1 -0
  78. package/getSchemas.d.ts +65 -0
  79. package/getSchemas.js +45 -0
  80. package/getSchemas.js.map +1 -0
  81. package/getSparkVersion.d.ts +150 -0
  82. package/getSparkVersion.js +70 -0
  83. package/getSparkVersion.js.map +1 -0
  84. package/getTables.d.ts +75 -0
  85. package/getTables.js +47 -0
  86. package/getTables.js.map +1 -0
  87. package/getUser.d.ts +78 -0
  88. package/getUser.js +39 -0
  89. package/getUser.js.map +1 -0
  90. package/getZones.d.ts +29 -0
  91. package/getZones.js +26 -0
  92. package/getZones.js.map +1 -0
  93. package/gitCredential.d.ts +48 -0
  94. package/gitCredential.js +64 -0
  95. package/gitCredential.js.map +1 -0
  96. package/globalInitScript.d.ts +99 -0
  97. package/globalInitScript.js +68 -0
  98. package/globalInitScript.js.map +1 -0
  99. package/grants.d.ts +58 -0
  100. package/grants.js +64 -0
  101. package/grants.js.map +1 -0
  102. package/group.d.ts +131 -0
  103. package/group.js +83 -0
  104. package/group.js.map +1 -0
  105. package/groupInstanceProfile.d.ts +96 -0
  106. package/groupInstanceProfile.js +93 -0
  107. package/groupInstanceProfile.js.map +1 -0
  108. package/index.d.ts +81 -0
  109. package/index.js +362 -0
  110. package/index.js.map +1 -0
  111. package/instancePool.d.ts +166 -0
  112. package/instancePool.js +93 -0
  113. package/instancePool.js.map +1 -0
  114. package/instanceProfile.d.ts +180 -0
  115. package/instanceProfile.js +161 -0
  116. package/instanceProfile.js.map +1 -0
  117. package/ipAccessList.d.ts +128 -0
  118. package/ipAccessList.js +108 -0
  119. package/ipAccessList.js.map +1 -0
  120. package/job.d.ts +228 -0
  121. package/job.js +102 -0
  122. package/job.js.map +1 -0
  123. package/library.d.ts +183 -0
  124. package/library.js +189 -0
  125. package/library.js.map +1 -0
  126. package/metastore.d.ts +102 -0
  127. package/metastore.js +75 -0
  128. package/metastore.js.map +1 -0
  129. package/metastoreAssignment.d.ts +94 -0
  130. package/metastoreAssignment.js +81 -0
  131. package/metastoreAssignment.js.map +1 -0
  132. package/metastoreDataAccess.d.ts +85 -0
  133. package/metastoreDataAccess.js +73 -0
  134. package/metastoreDataAccess.js.map +1 -0
  135. package/mlflowExperiment.d.ts +122 -0
  136. package/mlflowExperiment.js +99 -0
  137. package/mlflowExperiment.js.map +1 -0
  138. package/mlflowModel.d.ts +131 -0
  139. package/mlflowModel.js +107 -0
  140. package/mlflowModel.js.map +1 -0
  141. package/mlflowWebhook.d.ts +129 -0
  142. package/mlflowWebhook.js +100 -0
  143. package/mlflowWebhook.js.map +1 -0
  144. package/mount.d.ts +89 -0
  145. package/mount.js +76 -0
  146. package/mount.js.map +1 -0
  147. package/mwsCredentials.d.ts +137 -0
  148. package/mwsCredentials.js +115 -0
  149. package/mwsCredentials.js.map +1 -0
  150. package/mwsCustomerManagedKeys.d.ts +257 -0
  151. package/mwsCustomerManagedKeys.js +226 -0
  152. package/mwsCustomerManagedKeys.js.map +1 -0
  153. package/mwsLogDelivery.d.ts +219 -0
  154. package/mwsLogDelivery.js +144 -0
  155. package/mwsLogDelivery.js.map +1 -0
  156. package/mwsNetworks.d.ts +129 -0
  157. package/mwsNetworks.js +89 -0
  158. package/mwsNetworks.js.map +1 -0
  159. package/mwsPrivateAccessSettings.d.ts +137 -0
  160. package/mwsPrivateAccessSettings.js +74 -0
  161. package/mwsPrivateAccessSettings.js.map +1 -0
  162. package/mwsStorageConfigurations.d.ts +122 -0
  163. package/mwsStorageConfigurations.js +106 -0
  164. package/mwsStorageConfigurations.js.map +1 -0
  165. package/mwsVpcEndpoint.d.ts +122 -0
  166. package/mwsVpcEndpoint.js +79 -0
  167. package/mwsVpcEndpoint.js.map +1 -0
  168. package/mwsWorkspaces.d.ts +222 -0
  169. package/mwsWorkspaces.js +106 -0
  170. package/mwsWorkspaces.js.map +1 -0
  171. package/notebook.d.ts +128 -0
  172. package/notebook.js +77 -0
  173. package/notebook.js.map +1 -0
  174. package/oboToken.d.ts +98 -0
  175. package/oboToken.js +82 -0
  176. package/oboToken.js.map +1 -0
  177. package/package.json +28 -0
  178. package/package.json.bak +28 -0
  179. package/package.json.dev +28 -0
  180. package/permissions.d.ts +211 -0
  181. package/permissions.js +97 -0
  182. package/permissions.js.map +1 -0
  183. package/pipeline.d.ts +200 -0
  184. package/pipeline.js +134 -0
  185. package/pipeline.js.map +1 -0
  186. package/provider.d.ts +61 -0
  187. package/provider.js +64 -0
  188. package/provider.js.map +1 -0
  189. package/repo.d.ts +117 -0
  190. package/repo.js +71 -0
  191. package/repo.js.map +1 -0
  192. package/schema.d.ts +149 -0
  193. package/schema.js +106 -0
  194. package/schema.js.map +1 -0
  195. package/scripts/install-pulumi-plugin.js +21 -0
  196. package/secret.d.ts +115 -0
  197. package/secret.js +99 -0
  198. package/secret.js.map +1 -0
  199. package/secretAcl.d.ts +115 -0
  200. package/secretAcl.js +105 -0
  201. package/secretAcl.js.map +1 -0
  202. package/secretScope.d.ts +85 -0
  203. package/secretScope.js +64 -0
  204. package/secretScope.js.map +1 -0
  205. package/servicePrincipal.d.ts +142 -0
  206. package/servicePrincipal.js +83 -0
  207. package/servicePrincipal.js.map +1 -0
  208. package/sqlDashboard.d.ts +90 -0
  209. package/sqlDashboard.js +99 -0
  210. package/sqlDashboard.js.map +1 -0
  211. package/sqlEndpoint.d.ts +249 -0
  212. package/sqlEndpoint.js +128 -0
  213. package/sqlEndpoint.js.map +1 -0
  214. package/sqlGlobalConfig.d.ts +157 -0
  215. package/sqlGlobalConfig.js +115 -0
  216. package/sqlGlobalConfig.js.map +1 -0
  217. package/sqlPermissions.d.ts +191 -0
  218. package/sqlPermissions.js +139 -0
  219. package/sqlPermissions.js.map +1 -0
  220. package/sqlQuery.d.ts +131 -0
  221. package/sqlQuery.js +139 -0
  222. package/sqlQuery.js.map +1 -0
  223. package/sqlVisualization.d.ts +105 -0
  224. package/sqlVisualization.js +119 -0
  225. package/sqlVisualization.js.map +1 -0
  226. package/sqlWidget.d.ts +109 -0
  227. package/sqlWidget.js +114 -0
  228. package/sqlWidget.js.map +1 -0
  229. package/storageCredential.d.ts +122 -0
  230. package/storageCredential.js +118 -0
  231. package/storageCredential.js.map +1 -0
  232. package/table.d.ts +249 -0
  233. package/table.js +157 -0
  234. package/table.js.map +1 -0
  235. package/token.d.ts +102 -0
  236. package/token.js +84 -0
  237. package/token.js.map +1 -0
  238. package/types/index.d.ts +3 -0
  239. package/types/index.js +11 -0
  240. package/types/index.js.map +1 -0
  241. package/types/input.d.ts +1209 -0
  242. package/types/input.js +5 -0
  243. package/types/input.js.map +1 -0
  244. package/types/output.d.ts +1222 -0
  245. package/types/output.js +5 -0
  246. package/types/output.js.map +1 -0
  247. package/user.d.ts +149 -0
  248. package/user.js +91 -0
  249. package/user.js.map +1 -0
  250. package/userInstanceProfile.d.ts +93 -0
  251. package/userInstanceProfile.js +90 -0
  252. package/userInstanceProfile.js.map +1 -0
  253. package/userRole.d.ts +106 -0
  254. package/userRole.js +103 -0
  255. package/userRole.js.map +1 -0
  256. package/utilities.d.ts +4 -0
  257. package/utilities.js +57 -0
  258. package/utilities.js.map +1 -0
  259. package/workspaceConf.d.ts +76 -0
  260. package/workspaceConf.js +71 -0
  261. package/workspaceConf.js.map +1 -0
package/pipeline.d.ts ADDED
@@ -0,0 +1,200 @@
1
+ import * as pulumi from "@pulumi/pulumi";
2
+ import { input as inputs, output as outputs } from "./types";
3
+ /**
4
+ * Use `databricks.Pipeline` to deploy [Delta Live Tables](https://docs.databricks.com/data-engineering/delta-live-tables/index.html).
5
+ *
6
+ * ## Example Usage
7
+ *
8
+ * ```typescript
9
+ * import * as pulumi from "@pulumi/pulumi";
10
+ * import * as databricks from "@pulumi/databricks";
11
+ *
12
+ * const dltDemo = new databricks.Notebook("dltDemo", {});
13
+ * //...
14
+ * const _this = new databricks.Pipeline("this", {
15
+ * storage: "/test/first-pipeline",
16
+ * configuration: {
17
+ * key1: "value1",
18
+ * key2: "value2",
19
+ * },
20
+ * clusters: [
21
+ * {
22
+ * label: "default",
23
+ * numWorkers: 2,
24
+ * customTags: {
25
+ * cluster_type: "default",
26
+ * },
27
+ * },
28
+ * {
29
+ * label: "maintenance",
30
+ * numWorkers: 1,
31
+ * customTags: {
32
+ * cluster_type: "maintenance",
33
+ * },
34
+ * },
35
+ * ],
36
+ * libraries: [{
37
+ * notebook: {
38
+ * path: dltDemo.id,
39
+ * },
40
+ * }],
41
+ * filters: {
42
+ * includes: ["com.databricks.include"],
43
+ * excludes: ["com.databricks.exclude"],
44
+ * },
45
+ * continuous: false,
46
+ * });
47
+ * ```
48
+ * ## Related Resources
49
+ *
50
+ * The following resources are often used in the same context:
51
+ *
52
+ * * End to end workspace management guide.
53
+ * * databricks.Cluster to create [Databricks Clusters](https://docs.databricks.com/clusters/index.html).
54
+ * * databricks.Job to manage [Databricks Jobs](https://docs.databricks.com/jobs.html) to run non-interactive code in a databricks_cluster.
55
+ * * databricks.Notebook to manage [Databricks Notebooks](https://docs.databricks.com/notebooks/index.html).
56
+ *
57
+ * ## Import
58
+ *
59
+ * The resource job can be imported using the id of the pipeline bash
60
+ *
61
+ * ```sh
62
+ * $ pulumi import databricks:index/pipeline:Pipeline this <pipeline-id>
63
+ * ```
64
+ */
65
+ export declare class Pipeline extends pulumi.CustomResource {
66
+ /**
67
+ * Get an existing Pipeline resource's state with the given name, ID, and optional extra
68
+ * properties used to qualify the lookup.
69
+ *
70
+ * @param name The _unique_ name of the resulting resource.
71
+ * @param id The _unique_ provider ID of the resource to lookup.
72
+ * @param state Any extra arguments used during the lookup.
73
+ * @param opts Optional settings to control the behavior of the CustomResource.
74
+ */
75
+ static get(name: string, id: pulumi.Input<pulumi.ID>, state?: PipelineState, opts?: pulumi.CustomResourceOptions): Pipeline;
76
+ /**
77
+ * Returns true if the given object is an instance of Pipeline. This is designed to work even
78
+ * when multiple copies of the Pulumi SDK have been loaded into the same process.
79
+ */
80
+ static isInstance(obj: any): obj is Pipeline;
81
+ readonly allowDuplicateNames: pulumi.Output<boolean | undefined>;
82
+ /**
83
+ * blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline.
84
+ */
85
+ readonly clusters: pulumi.Output<outputs.PipelineCluster[] | undefined>;
86
+ /**
87
+ * An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
88
+ */
89
+ readonly configuration: pulumi.Output<{
90
+ [key: string]: any;
91
+ } | undefined>;
92
+ /**
93
+ * A flag indicating whether to run the pipeline continuously. The default value is `false`.
94
+ */
95
+ readonly continuous: pulumi.Output<boolean | undefined>;
96
+ readonly filters: pulumi.Output<outputs.PipelineFilters>;
97
+ readonly id: pulumi.Output<string>;
98
+ /**
99
+ * blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special `notebook` type of library that should have `path` attribute.
100
+ */
101
+ readonly libraries: pulumi.Output<outputs.PipelineLibrary[] | undefined>;
102
+ /**
103
+ * A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
104
+ */
105
+ readonly name: pulumi.Output<string>;
106
+ /**
107
+ * A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location.
108
+ */
109
+ readonly storage: pulumi.Output<string | undefined>;
110
+ /**
111
+ * The name of a database for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
112
+ */
113
+ readonly target: pulumi.Output<string | undefined>;
114
+ readonly url: pulumi.Output<string>;
115
+ /**
116
+ * Create a Pipeline resource with the given unique name, arguments, and options.
117
+ *
118
+ * @param name The _unique_ name of the resource.
119
+ * @param args The arguments to use to populate this resource's properties.
120
+ * @param opts A bag of options that control this resource's behavior.
121
+ */
122
+ constructor(name: string, args: PipelineArgs, opts?: pulumi.CustomResourceOptions);
123
+ }
124
+ /**
125
+ * Input properties used for looking up and filtering Pipeline resources.
126
+ */
127
+ export interface PipelineState {
128
+ allowDuplicateNames?: pulumi.Input<boolean>;
129
+ /**
130
+ * blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline.
131
+ */
132
+ clusters?: pulumi.Input<pulumi.Input<inputs.PipelineCluster>[]>;
133
+ /**
134
+ * An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
135
+ */
136
+ configuration?: pulumi.Input<{
137
+ [key: string]: any;
138
+ }>;
139
+ /**
140
+ * A flag indicating whether to run the pipeline continuously. The default value is `false`.
141
+ */
142
+ continuous?: pulumi.Input<boolean>;
143
+ filters?: pulumi.Input<inputs.PipelineFilters>;
144
+ id?: pulumi.Input<string>;
145
+ /**
146
+ * blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special `notebook` type of library that should have `path` attribute.
147
+ */
148
+ libraries?: pulumi.Input<pulumi.Input<inputs.PipelineLibrary>[]>;
149
+ /**
150
+ * A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
151
+ */
152
+ name?: pulumi.Input<string>;
153
+ /**
154
+ * A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location.
155
+ */
156
+ storage?: pulumi.Input<string>;
157
+ /**
158
+ * The name of a database for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
159
+ */
160
+ target?: pulumi.Input<string>;
161
+ url?: pulumi.Input<string>;
162
+ }
163
+ /**
164
+ * The set of arguments for constructing a Pipeline resource.
165
+ */
166
+ export interface PipelineArgs {
167
+ allowDuplicateNames?: pulumi.Input<boolean>;
168
+ /**
169
+ * blocks - Clusters to run the pipeline. If none is specified, pipelines will automatically select a default cluster configuration for the pipeline.
170
+ */
171
+ clusters?: pulumi.Input<pulumi.Input<inputs.PipelineCluster>[]>;
172
+ /**
173
+ * An optional list of values to apply to the entire pipeline. Elements must be formatted as key:value pairs.
174
+ */
175
+ configuration?: pulumi.Input<{
176
+ [key: string]: any;
177
+ }>;
178
+ /**
179
+ * A flag indicating whether to run the pipeline continuously. The default value is `false`.
180
+ */
181
+ continuous?: pulumi.Input<boolean>;
182
+ filters: pulumi.Input<inputs.PipelineFilters>;
183
+ id?: pulumi.Input<string>;
184
+ /**
185
+ * blocks - Specifies pipeline code and required artifacts. Syntax resembles library configuration block with the addition of a special `notebook` type of library that should have `path` attribute.
186
+ */
187
+ libraries?: pulumi.Input<pulumi.Input<inputs.PipelineLibrary>[]>;
188
+ /**
189
+ * A user-friendly name for this pipeline. The name can be used to identify pipeline jobs in the UI.
190
+ */
191
+ name?: pulumi.Input<string>;
192
+ /**
193
+ * A location on DBFS or cloud storage where output data and metadata required for pipeline execution are stored. By default, tables are stored in a subdirectory of this location.
194
+ */
195
+ storage?: pulumi.Input<string>;
196
+ /**
197
+ * The name of a database for persisting pipeline output data. Configuring the target setting allows you to view and query the pipeline output data from the Databricks UI.
198
+ */
199
+ target?: pulumi.Input<string>;
200
+ }
package/pipeline.js ADDED
@@ -0,0 +1,134 @@
1
+ "use strict";
2
+ // *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
3
+ // *** Do not edit by hand unless you're certain you know what you are doing! ***
4
+ Object.defineProperty(exports, "__esModule", { value: true });
5
+ exports.Pipeline = void 0;
6
+ const pulumi = require("@pulumi/pulumi");
7
+ const utilities = require("./utilities");
8
+ /**
9
+ * Use `databricks.Pipeline` to deploy [Delta Live Tables](https://docs.databricks.com/data-engineering/delta-live-tables/index.html).
10
+ *
11
+ * ## Example Usage
12
+ *
13
+ * ```typescript
14
+ * import * as pulumi from "@pulumi/pulumi";
15
+ * import * as databricks from "@pulumi/databricks";
16
+ *
17
+ * const dltDemo = new databricks.Notebook("dltDemo", {});
18
+ * //...
19
+ * const _this = new databricks.Pipeline("this", {
20
+ * storage: "/test/first-pipeline",
21
+ * configuration: {
22
+ * key1: "value1",
23
+ * key2: "value2",
24
+ * },
25
+ * clusters: [
26
+ * {
27
+ * label: "default",
28
+ * numWorkers: 2,
29
+ * customTags: {
30
+ * cluster_type: "default",
31
+ * },
32
+ * },
33
+ * {
34
+ * label: "maintenance",
35
+ * numWorkers: 1,
36
+ * customTags: {
37
+ * cluster_type: "maintenance",
38
+ * },
39
+ * },
40
+ * ],
41
+ * libraries: [{
42
+ * notebook: {
43
+ * path: dltDemo.id,
44
+ * },
45
+ * }],
46
+ * filters: {
47
+ * includes: ["com.databricks.include"],
48
+ * excludes: ["com.databricks.exclude"],
49
+ * },
50
+ * continuous: false,
51
+ * });
52
+ * ```
53
+ * ## Related Resources
54
+ *
55
+ * The following resources are often used in the same context:
56
+ *
57
+ * * End to end workspace management guide.
58
+ * * databricks.Cluster to create [Databricks Clusters](https://docs.databricks.com/clusters/index.html).
59
+ * * databricks.Job to manage [Databricks Jobs](https://docs.databricks.com/jobs.html) to run non-interactive code in a databricks_cluster.
60
+ * * databricks.Notebook to manage [Databricks Notebooks](https://docs.databricks.com/notebooks/index.html).
61
+ *
62
+ * ## Import
63
+ *
64
+ * The resource job can be imported using the id of the pipeline bash
65
+ *
66
+ * ```sh
67
+ * $ pulumi import databricks:index/pipeline:Pipeline this <pipeline-id>
68
+ * ```
69
+ */
70
+ class Pipeline extends pulumi.CustomResource {
71
+ constructor(name, argsOrState, opts) {
72
+ let resourceInputs = {};
73
+ opts = opts || {};
74
+ if (opts.id) {
75
+ const state = argsOrState;
76
+ resourceInputs["allowDuplicateNames"] = state ? state.allowDuplicateNames : undefined;
77
+ resourceInputs["clusters"] = state ? state.clusters : undefined;
78
+ resourceInputs["configuration"] = state ? state.configuration : undefined;
79
+ resourceInputs["continuous"] = state ? state.continuous : undefined;
80
+ resourceInputs["filters"] = state ? state.filters : undefined;
81
+ resourceInputs["id"] = state ? state.id : undefined;
82
+ resourceInputs["libraries"] = state ? state.libraries : undefined;
83
+ resourceInputs["name"] = state ? state.name : undefined;
84
+ resourceInputs["storage"] = state ? state.storage : undefined;
85
+ resourceInputs["target"] = state ? state.target : undefined;
86
+ resourceInputs["url"] = state ? state.url : undefined;
87
+ }
88
+ else {
89
+ const args = argsOrState;
90
+ if ((!args || args.filters === undefined) && !opts.urn) {
91
+ throw new Error("Missing required property 'filters'");
92
+ }
93
+ resourceInputs["allowDuplicateNames"] = args ? args.allowDuplicateNames : undefined;
94
+ resourceInputs["clusters"] = args ? args.clusters : undefined;
95
+ resourceInputs["configuration"] = args ? args.configuration : undefined;
96
+ resourceInputs["continuous"] = args ? args.continuous : undefined;
97
+ resourceInputs["filters"] = args ? args.filters : undefined;
98
+ resourceInputs["id"] = args ? args.id : undefined;
99
+ resourceInputs["libraries"] = args ? args.libraries : undefined;
100
+ resourceInputs["name"] = args ? args.name : undefined;
101
+ resourceInputs["storage"] = args ? args.storage : undefined;
102
+ resourceInputs["target"] = args ? args.target : undefined;
103
+ resourceInputs["url"] = undefined /*out*/;
104
+ }
105
+ opts = pulumi.mergeOptions(utilities.resourceOptsDefaults(), opts);
106
+ super(Pipeline.__pulumiType, name, resourceInputs, opts);
107
+ }
108
+ /**
109
+ * Get an existing Pipeline resource's state with the given name, ID, and optional extra
110
+ * properties used to qualify the lookup.
111
+ *
112
+ * @param name The _unique_ name of the resulting resource.
113
+ * @param id The _unique_ provider ID of the resource to lookup.
114
+ * @param state Any extra arguments used during the lookup.
115
+ * @param opts Optional settings to control the behavior of the CustomResource.
116
+ */
117
+ static get(name, id, state, opts) {
118
+ return new Pipeline(name, state, Object.assign(Object.assign({}, opts), { id: id }));
119
+ }
120
+ /**
121
+ * Returns true if the given object is an instance of Pipeline. This is designed to work even
122
+ * when multiple copies of the Pulumi SDK have been loaded into the same process.
123
+ */
124
+ static isInstance(obj) {
125
+ if (obj === undefined || obj === null) {
126
+ return false;
127
+ }
128
+ return obj['__pulumiType'] === Pipeline.__pulumiType;
129
+ }
130
+ }
131
+ exports.Pipeline = Pipeline;
132
+ /** @internal */
133
+ Pipeline.__pulumiType = 'databricks:index/pipeline:Pipeline';
134
+ //# sourceMappingURL=pipeline.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"pipeline.js","sourceRoot":"","sources":["../pipeline.ts"],"names":[],"mappings":";AAAA,wFAAwF;AACxF,iFAAiF;;;AAEjF,yCAAyC;AAEzC,yCAAyC;AAEzC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA6DG;AACH,MAAa,QAAS,SAAQ,MAAM,CAAC,cAAc;IAqE/C,YAAY,IAAY,EAAE,WAA0C,EAAE,IAAmC;QACrG,IAAI,cAAc,GAAkB,EAAE,CAAC;QACvC,IAAI,GAAG,IAAI,IAAI,EAAE,CAAC;QAClB,IAAI,IAAI,CAAC,EAAE,EAAE;YACT,MAAM,KAAK,GAAG,WAAwC,CAAC;YACvD,cAAc,CAAC,qBAAqB,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,mBAAmB,CAAC,CAAC,CAAC,SAAS,CAAC;YACtF,cAAc,CAAC,UAAU,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,QAAQ,CAAC,CAAC,CAAC,SAAS,CAAC;YAChE,cAAc,CAAC,eAAe,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,aAAa,CAAC,CAAC,CAAC,SAAS,CAAC;YAC1E,cAAc,CAAC,YAAY,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,UAAU,CAAC,CAAC,CAAC,SAAS,CAAC;YACpE,cAAc,CAAC,SAAS,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC;YAC9D,cAAc,CAAC,IAAI,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC;YACpD,cAAc,CAAC,WAAW,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;YAClE,cAAc,CAAC,MAAM,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC;YACxD,cAAc,CAAC,SAAS,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC;YAC9D,cAAc,CAAC,QAAQ,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC;YAC5D,cAAc,CAAC,KAAK,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,SAAS,CAAC;SACzD;aAAM;YACH,MAAM,IAAI,GAAG,WAAuC,CAAC;YACrD,IAAI,CAAC,CAAC,IAAI,IAAI,IAAI,CAAC,OAAO,KAAK,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;gBACpD,MAAM,IAAI,KAAK,CAAC,qCAAqC,CAAC,CAAC;aAC1D;YACD,cAAc,CAAC,qBAAqB,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,mBAAmB,CAAC,CAAC,CAAC,SAAS,CAAC;YACpF,cAAc,CAAC,UAAU,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,SAAS,CAAC;YAC9D,cAAc,CAAC,eAAe,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC,CAAC,SAAS,CAAC;YACxE,cAAc,CAAC,YAAY,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,SAAS,CAAC;YAClE,cAAc,CAAC,SAAS,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC;YAC5D,cAAc,CAAC,IAAI,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,EAAE,CAAC,CAAC,CAAC,SAAS,CAAC;YAClD,cAAc,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;YAChE,cAAc,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC;YACtD,cAAc,CAAC,SAAS,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC;YAC5D,cAAc,CAAC,QAAQ,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,MAAM,CAAC,CAAC,CAAC,SAAS,CAAC;YAC1D,cAAc,CAAC,KAAK,CAAC,GAAG,SAAS,CAAC,OAAO,CAAC;SAC7C;QACD,IAAI,GAAG,MAAM,CAAC,YAAY,CAAC,SAAS,CAAC,oBAAoB,EAAE,EAAE,IAAI,CAAC,CAAC;QACnE,KAAK,CAAC,QAAQ,CAAC,YAAY,EAAE,IAAI,EAAE,cAAc,EAAE,IAAI,CAAC,CAAC;IAC7D,CAAC;IAvGD;;;;;;;;OAQG;IACI,MAAM,CAAC,GAAG,CAAC,IAAY,EAAE,EAA2B,EAAE,KAAqB,EAAE,IAAmC;QACnH,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAO,KAAK,kCAAO,IAAI,KAAE,EAAE,EAAE,EAAE,IAAG,CAAC;IAC/D,CAAC;IAKD;;;OAGG;IACI,MAAM,CAAC,UAAU,CAAC,GAAQ;QAC7B,IAAI,GAAG,KAAK,SAAS,IAAI,GAAG,KAAK,IAAI,EAAE;YACnC,OAAO,KAAK,CAAC;SAChB;QACD,OAAO,GAAG,CAAC,cAAc,CAAC,KAAK,QAAQ,CAAC,YAAY,CAAC;IACzD,CAAC;;AA1BL,4BAyGC;AA3FG,gBAAgB;AACO,qBAAY,GAAG,oCAAoC,CAAC"}
package/provider.d.ts ADDED
@@ -0,0 +1,61 @@
1
+ import * as pulumi from "@pulumi/pulumi";
2
+ /**
3
+ * The provider type for the databricks package. By default, resources use package-wide configuration
4
+ * settings, however an explicit `Provider` instance may be created and passed during resource
5
+ * construction to achieve fine-grained programmatic control over provider settings. See the
6
+ * [documentation](https://www.pulumi.com/docs/reference/programming-model/#providers) for more information.
7
+ */
8
+ export declare class Provider extends pulumi.ProviderResource {
9
+ /**
10
+ * Returns true if the given object is an instance of Provider. This is designed to work even
11
+ * when multiple copies of the Pulumi SDK have been loaded into the same process.
12
+ */
13
+ static isInstance(obj: any): obj is Provider;
14
+ readonly accountId: pulumi.Output<string | undefined>;
15
+ readonly authType: pulumi.Output<string | undefined>;
16
+ readonly azureClientId: pulumi.Output<string | undefined>;
17
+ readonly azureClientSecret: pulumi.Output<string | undefined>;
18
+ readonly azureEnvironment: pulumi.Output<string | undefined>;
19
+ readonly azureTenantId: pulumi.Output<string | undefined>;
20
+ readonly azureWorkspaceResourceId: pulumi.Output<string | undefined>;
21
+ readonly configFile: pulumi.Output<string | undefined>;
22
+ readonly googleServiceAccount: pulumi.Output<string | undefined>;
23
+ readonly host: pulumi.Output<string | undefined>;
24
+ readonly password: pulumi.Output<string | undefined>;
25
+ readonly profile: pulumi.Output<string | undefined>;
26
+ readonly token: pulumi.Output<string | undefined>;
27
+ readonly username: pulumi.Output<string | undefined>;
28
+ /**
29
+ * Create a Provider resource with the given unique name, arguments, and options.
30
+ *
31
+ * @param name The _unique_ name of the resource.
32
+ * @param args The arguments to use to populate this resource's properties.
33
+ * @param opts A bag of options that control this resource's behavior.
34
+ */
35
+ constructor(name: string, args?: ProviderArgs, opts?: pulumi.ResourceOptions);
36
+ }
37
+ /**
38
+ * The set of arguments for constructing a Provider resource.
39
+ */
40
+ export interface ProviderArgs {
41
+ accountId?: pulumi.Input<string>;
42
+ authType?: pulumi.Input<string>;
43
+ azureClientId?: pulumi.Input<string>;
44
+ azureClientSecret?: pulumi.Input<string>;
45
+ azureEnvironment?: pulumi.Input<string>;
46
+ azureTenantId?: pulumi.Input<string>;
47
+ azureUseMsi?: pulumi.Input<boolean>;
48
+ azureWorkspaceResourceId?: pulumi.Input<string>;
49
+ configFile?: pulumi.Input<string>;
50
+ debugHeaders?: pulumi.Input<boolean>;
51
+ debugTruncateBytes?: pulumi.Input<number>;
52
+ googleServiceAccount?: pulumi.Input<string>;
53
+ host?: pulumi.Input<string>;
54
+ httpTimeoutSeconds?: pulumi.Input<number>;
55
+ password?: pulumi.Input<string>;
56
+ profile?: pulumi.Input<string>;
57
+ rateLimit?: pulumi.Input<number>;
58
+ skipVerify?: pulumi.Input<boolean>;
59
+ token?: pulumi.Input<string>;
60
+ username?: pulumi.Input<string>;
61
+ }
package/provider.js ADDED
@@ -0,0 +1,64 @@
1
+ "use strict";
2
+ // *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
3
+ // *** Do not edit by hand unless you're certain you know what you are doing! ***
4
+ Object.defineProperty(exports, "__esModule", { value: true });
5
+ exports.Provider = void 0;
6
+ const pulumi = require("@pulumi/pulumi");
7
+ const utilities = require("./utilities");
8
+ /**
9
+ * The provider type for the databricks package. By default, resources use package-wide configuration
10
+ * settings, however an explicit `Provider` instance may be created and passed during resource
11
+ * construction to achieve fine-grained programmatic control over provider settings. See the
12
+ * [documentation](https://www.pulumi.com/docs/reference/programming-model/#providers) for more information.
13
+ */
14
+ class Provider extends pulumi.ProviderResource {
15
+ /**
16
+ * Create a Provider resource with the given unique name, arguments, and options.
17
+ *
18
+ * @param name The _unique_ name of the resource.
19
+ * @param args The arguments to use to populate this resource's properties.
20
+ * @param opts A bag of options that control this resource's behavior.
21
+ */
22
+ constructor(name, args, opts) {
23
+ let resourceInputs = {};
24
+ opts = opts || {};
25
+ {
26
+ resourceInputs["accountId"] = args ? args.accountId : undefined;
27
+ resourceInputs["authType"] = args ? args.authType : undefined;
28
+ resourceInputs["azureClientId"] = args ? args.azureClientId : undefined;
29
+ resourceInputs["azureClientSecret"] = args ? args.azureClientSecret : undefined;
30
+ resourceInputs["azureEnvironment"] = args ? args.azureEnvironment : undefined;
31
+ resourceInputs["azureTenantId"] = args ? args.azureTenantId : undefined;
32
+ resourceInputs["azureUseMsi"] = pulumi.output(args ? args.azureUseMsi : undefined).apply(JSON.stringify);
33
+ resourceInputs["azureWorkspaceResourceId"] = args ? args.azureWorkspaceResourceId : undefined;
34
+ resourceInputs["configFile"] = args ? args.configFile : undefined;
35
+ resourceInputs["debugHeaders"] = pulumi.output(args ? args.debugHeaders : undefined).apply(JSON.stringify);
36
+ resourceInputs["debugTruncateBytes"] = pulumi.output(args ? args.debugTruncateBytes : undefined).apply(JSON.stringify);
37
+ resourceInputs["googleServiceAccount"] = args ? args.googleServiceAccount : undefined;
38
+ resourceInputs["host"] = args ? args.host : undefined;
39
+ resourceInputs["httpTimeoutSeconds"] = pulumi.output(args ? args.httpTimeoutSeconds : undefined).apply(JSON.stringify);
40
+ resourceInputs["password"] = args ? args.password : undefined;
41
+ resourceInputs["profile"] = args ? args.profile : undefined;
42
+ resourceInputs["rateLimit"] = pulumi.output(args ? args.rateLimit : undefined).apply(JSON.stringify);
43
+ resourceInputs["skipVerify"] = pulumi.output(args ? args.skipVerify : undefined).apply(JSON.stringify);
44
+ resourceInputs["token"] = args ? args.token : undefined;
45
+ resourceInputs["username"] = args ? args.username : undefined;
46
+ }
47
+ opts = pulumi.mergeOptions(utilities.resourceOptsDefaults(), opts);
48
+ super(Provider.__pulumiType, name, resourceInputs, opts);
49
+ }
50
+ /**
51
+ * Returns true if the given object is an instance of Provider. This is designed to work even
52
+ * when multiple copies of the Pulumi SDK have been loaded into the same process.
53
+ */
54
+ static isInstance(obj) {
55
+ if (obj === undefined || obj === null) {
56
+ return false;
57
+ }
58
+ return obj['__pulumiType'] === Provider.__pulumiType;
59
+ }
60
+ }
61
+ exports.Provider = Provider;
62
+ /** @internal */
63
+ Provider.__pulumiType = 'databricks';
64
+ //# sourceMappingURL=provider.js.map
@@ -0,0 +1 @@
1
+ {"version":3,"file":"provider.js","sourceRoot":"","sources":["../provider.ts"],"names":[],"mappings":";AAAA,wFAAwF;AACxF,iFAAiF;;;AAEjF,yCAAyC;AACzC,yCAAyC;AAEzC;;;;;GAKG;AACH,MAAa,QAAS,SAAQ,MAAM,CAAC,gBAAgB;IA8BjD;;;;;;OAMG;IACH,YAAY,IAAY,EAAE,IAAmB,EAAE,IAA6B;QACxE,IAAI,cAAc,GAAkB,EAAE,CAAC;QACvC,IAAI,GAAG,IAAI,IAAI,EAAE,CAAC;QAClB;YACI,cAAc,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;YAChE,cAAc,CAAC,UAAU,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,SAAS,CAAC;YAC9D,cAAc,CAAC,eAAe,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC,CAAC,SAAS,CAAC;YACxE,cAAc,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC,CAAC,SAAS,CAAC;YAChF,cAAc,CAAC,kBAAkB,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,gBAAgB,CAAC,CAAC,CAAC,SAAS,CAAC;YAC9E,cAAc,CAAC,eAAe,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,aAAa,CAAC,CAAC,CAAC,SAAS,CAAC;YACxE,cAAc,CAAC,aAAa,CAAC,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,WAAW,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;YACzG,cAAc,CAAC,0BAA0B,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,wBAAwB,CAAC,CAAC,CAAC,SAAS,CAAC;YAC9F,cAAc,CAAC,YAAY,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,SAAS,CAAC;YAClE,cAAc,CAAC,cAAc,CAAC,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,YAAY,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;YAC3G,cAAc,CAAC,oBAAoB,CAAC,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;YACvH,cAAc,CAAC,sBAAsB,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,oBAAoB,CAAC,CAAC,CAAC,SAAS,CAAC;YACtF,cAAc,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC;YACtD,cAAc,CAAC,oBAAoB,CAAC,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,kBAAkB,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;YACvH,cAAc,CAAC,UAAU,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,SAAS,CAAC;YAC9D,cAAc,CAAC,SAAS,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,OAAO,CAAC,CAAC,CAAC,SAAS,CAAC;YAC5D,cAAc,CAAC,WAAW,CAAC,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;YACrG,cAAc,CAAC,YAAY,CAAC,GAAG,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,UAAU,CAAC,CAAC,CAAC,SAAS,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC;YACvG,cAAc,CAAC,OAAO,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC;YACxD,cAAc,CAAC,UAAU,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,QAAQ,CAAC,CAAC,CAAC,SAAS,CAAC;SACjE;QACD,IAAI,GAAG,MAAM,CAAC,YAAY,CAAC,SAAS,CAAC,oBAAoB,EAAE,EAAE,IAAI,CAAC,CAAC;QACnE,KAAK,CAAC,QAAQ,CAAC,YAAY,EAAE,IAAI,EAAE,cAAc,EAAE,IAAI,CAAC,CAAC;IAC7D,CAAC;IA5DD;;;OAGG;IACI,MAAM,CAAC,UAAU,CAAC,GAAQ;QAC7B,IAAI,GAAG,KAAK,SAAS,IAAI,GAAG,KAAK,IAAI,EAAE;YACnC,OAAO,KAAK,CAAC;SAChB;QACD,OAAO,GAAG,CAAC,cAAc,CAAC,KAAK,QAAQ,CAAC,YAAY,CAAC;IACzD,CAAC;;AAbL,4BAiEC;AAhEG,gBAAgB;AACO,qBAAY,GAAG,YAAY,CAAC"}
package/repo.d.ts ADDED
@@ -0,0 +1,117 @@
1
+ import * as pulumi from "@pulumi/pulumi";
2
+ /**
3
+ * ## Import
4
+ *
5
+ * The resource Repo can be imported using the Repo ID (obtained via UI or using API) bash
6
+ *
7
+ * ```sh
8
+ * $ pulumi import databricks:index/repo:Repo this repo_id
9
+ * ```
10
+ */
11
+ export declare class Repo extends pulumi.CustomResource {
12
+ /**
13
+ * Get an existing Repo resource's state with the given name, ID, and optional extra
14
+ * properties used to qualify the lookup.
15
+ *
16
+ * @param name The _unique_ name of the resulting resource.
17
+ * @param id The _unique_ provider ID of the resource to lookup.
18
+ * @param state Any extra arguments used during the lookup.
19
+ * @param opts Optional settings to control the behavior of the CustomResource.
20
+ */
21
+ static get(name: string, id: pulumi.Input<pulumi.ID>, state?: RepoState, opts?: pulumi.CustomResourceOptions): Repo;
22
+ /**
23
+ * Returns true if the given object is an instance of Repo. This is designed to work even
24
+ * when multiple copies of the Pulumi SDK have been loaded into the same process.
25
+ */
26
+ static isInstance(obj: any): obj is Repo;
27
+ /**
28
+ * name of the branch for initial checkout. If not specified, the default branch of the repository will be used. Conflicts with `tag`. If `branch` is removed, and `tag` isn't specified, then the repository will stay at the previously checked out state.
29
+ */
30
+ readonly branch: pulumi.Output<string>;
31
+ /**
32
+ * Hash of the HEAD commit at time of the last executed operation. It won't change if you manually perform pull operation via UI or API
33
+ */
34
+ readonly commitHash: pulumi.Output<string>;
35
+ /**
36
+ * case insensitive name of the Git provider. Following values are supported right now (maybe a subject for change, consult [Repos API documentation](https://docs.databricks.com/dev-tools/api/latest/repos.html)): `gitHub`, `gitHubEnterprise`, `bitbucketCloud`, `bitbucketServer`, `azureDevOpsServices`, `gitLab`, `gitLabEnterpriseEdition`
37
+ */
38
+ readonly gitProvider: pulumi.Output<string>;
39
+ /**
40
+ * path to put the checked out Repo. If not specified, then repo will be created in the user's repo directory (`/Repos/<username>/...`). If value changes, repo is re-created
41
+ */
42
+ readonly path: pulumi.Output<string>;
43
+ /**
44
+ * name of the tag for initial checkout. Conflicts with `branch`
45
+ */
46
+ readonly tag: pulumi.Output<string | undefined>;
47
+ /**
48
+ * The URL of the Git Repository to clone from. If value changes, repo is re-created
49
+ */
50
+ readonly url: pulumi.Output<string>;
51
+ /**
52
+ * Create a Repo resource with the given unique name, arguments, and options.
53
+ *
54
+ * @param name The _unique_ name of the resource.
55
+ * @param args The arguments to use to populate this resource's properties.
56
+ * @param opts A bag of options that control this resource's behavior.
57
+ */
58
+ constructor(name: string, args: RepoArgs, opts?: pulumi.CustomResourceOptions);
59
+ }
60
+ /**
61
+ * Input properties used for looking up and filtering Repo resources.
62
+ */
63
+ export interface RepoState {
64
+ /**
65
+ * name of the branch for initial checkout. If not specified, the default branch of the repository will be used. Conflicts with `tag`. If `branch` is removed, and `tag` isn't specified, then the repository will stay at the previously checked out state.
66
+ */
67
+ branch?: pulumi.Input<string>;
68
+ /**
69
+ * Hash of the HEAD commit at time of the last executed operation. It won't change if you manually perform pull operation via UI or API
70
+ */
71
+ commitHash?: pulumi.Input<string>;
72
+ /**
73
+ * case insensitive name of the Git provider. Following values are supported right now (maybe a subject for change, consult [Repos API documentation](https://docs.databricks.com/dev-tools/api/latest/repos.html)): `gitHub`, `gitHubEnterprise`, `bitbucketCloud`, `bitbucketServer`, `azureDevOpsServices`, `gitLab`, `gitLabEnterpriseEdition`
74
+ */
75
+ gitProvider?: pulumi.Input<string>;
76
+ /**
77
+ * path to put the checked out Repo. If not specified, then repo will be created in the user's repo directory (`/Repos/<username>/...`). If value changes, repo is re-created
78
+ */
79
+ path?: pulumi.Input<string>;
80
+ /**
81
+ * name of the tag for initial checkout. Conflicts with `branch`
82
+ */
83
+ tag?: pulumi.Input<string>;
84
+ /**
85
+ * The URL of the Git Repository to clone from. If value changes, repo is re-created
86
+ */
87
+ url?: pulumi.Input<string>;
88
+ }
89
+ /**
90
+ * The set of arguments for constructing a Repo resource.
91
+ */
92
+ export interface RepoArgs {
93
+ /**
94
+ * name of the branch for initial checkout. If not specified, the default branch of the repository will be used. Conflicts with `tag`. If `branch` is removed, and `tag` isn't specified, then the repository will stay at the previously checked out state.
95
+ */
96
+ branch?: pulumi.Input<string>;
97
+ /**
98
+ * Hash of the HEAD commit at time of the last executed operation. It won't change if you manually perform pull operation via UI or API
99
+ */
100
+ commitHash?: pulumi.Input<string>;
101
+ /**
102
+ * case insensitive name of the Git provider. Following values are supported right now (maybe a subject for change, consult [Repos API documentation](https://docs.databricks.com/dev-tools/api/latest/repos.html)): `gitHub`, `gitHubEnterprise`, `bitbucketCloud`, `bitbucketServer`, `azureDevOpsServices`, `gitLab`, `gitLabEnterpriseEdition`
103
+ */
104
+ gitProvider?: pulumi.Input<string>;
105
+ /**
106
+ * path to put the checked out Repo. If not specified, then repo will be created in the user's repo directory (`/Repos/<username>/...`). If value changes, repo is re-created
107
+ */
108
+ path?: pulumi.Input<string>;
109
+ /**
110
+ * name of the tag for initial checkout. Conflicts with `branch`
111
+ */
112
+ tag?: pulumi.Input<string>;
113
+ /**
114
+ * The URL of the Git Repository to clone from. If value changes, repo is re-created
115
+ */
116
+ url: pulumi.Input<string>;
117
+ }
package/repo.js ADDED
@@ -0,0 +1,71 @@
1
+ "use strict";
2
+ // *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
3
+ // *** Do not edit by hand unless you're certain you know what you are doing! ***
4
+ Object.defineProperty(exports, "__esModule", { value: true });
5
+ exports.Repo = void 0;
6
+ const pulumi = require("@pulumi/pulumi");
7
+ const utilities = require("./utilities");
8
+ /**
9
+ * ## Import
10
+ *
11
+ * The resource Repo can be imported using the Repo ID (obtained via UI or using API) bash
12
+ *
13
+ * ```sh
14
+ * $ pulumi import databricks:index/repo:Repo this repo_id
15
+ * ```
16
+ */
17
+ class Repo extends pulumi.CustomResource {
18
+ constructor(name, argsOrState, opts) {
19
+ let resourceInputs = {};
20
+ opts = opts || {};
21
+ if (opts.id) {
22
+ const state = argsOrState;
23
+ resourceInputs["branch"] = state ? state.branch : undefined;
24
+ resourceInputs["commitHash"] = state ? state.commitHash : undefined;
25
+ resourceInputs["gitProvider"] = state ? state.gitProvider : undefined;
26
+ resourceInputs["path"] = state ? state.path : undefined;
27
+ resourceInputs["tag"] = state ? state.tag : undefined;
28
+ resourceInputs["url"] = state ? state.url : undefined;
29
+ }
30
+ else {
31
+ const args = argsOrState;
32
+ if ((!args || args.url === undefined) && !opts.urn) {
33
+ throw new Error("Missing required property 'url'");
34
+ }
35
+ resourceInputs["branch"] = args ? args.branch : undefined;
36
+ resourceInputs["commitHash"] = args ? args.commitHash : undefined;
37
+ resourceInputs["gitProvider"] = args ? args.gitProvider : undefined;
38
+ resourceInputs["path"] = args ? args.path : undefined;
39
+ resourceInputs["tag"] = args ? args.tag : undefined;
40
+ resourceInputs["url"] = args ? args.url : undefined;
41
+ }
42
+ opts = pulumi.mergeOptions(utilities.resourceOptsDefaults(), opts);
43
+ super(Repo.__pulumiType, name, resourceInputs, opts);
44
+ }
45
+ /**
46
+ * Get an existing Repo resource's state with the given name, ID, and optional extra
47
+ * properties used to qualify the lookup.
48
+ *
49
+ * @param name The _unique_ name of the resulting resource.
50
+ * @param id The _unique_ provider ID of the resource to lookup.
51
+ * @param state Any extra arguments used during the lookup.
52
+ * @param opts Optional settings to control the behavior of the CustomResource.
53
+ */
54
+ static get(name, id, state, opts) {
55
+ return new Repo(name, state, Object.assign(Object.assign({}, opts), { id: id }));
56
+ }
57
+ /**
58
+ * Returns true if the given object is an instance of Repo. This is designed to work even
59
+ * when multiple copies of the Pulumi SDK have been loaded into the same process.
60
+ */
61
+ static isInstance(obj) {
62
+ if (obj === undefined || obj === null) {
63
+ return false;
64
+ }
65
+ return obj['__pulumiType'] === Repo.__pulumiType;
66
+ }
67
+ }
68
+ exports.Repo = Repo;
69
+ /** @internal */
70
+ Repo.__pulumiType = 'databricks:index/repo:Repo';
71
+ //# sourceMappingURL=repo.js.map