@pulumi/mongodbatlas 3.11.0 → 3.11.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (46) hide show
  1. package/alertConfiguration.d.ts +9 -4
  2. package/alertConfiguration.js +3 -1
  3. package/alertConfiguration.js.map +1 -1
  4. package/cloudProviderAccess.d.ts +1 -1
  5. package/cloudProviderAccess.js +1 -1
  6. package/get509AuthenticationDatabaseUser.d.ts +2 -58
  7. package/get509AuthenticationDatabaseUser.js +3 -58
  8. package/get509AuthenticationDatabaseUser.js.map +1 -1
  9. package/getAlertConfiguration.d.ts +9 -0
  10. package/getAlertConfiguration.js.map +1 -1
  11. package/getOnlineArchive.d.ts +19 -2
  12. package/getOnlineArchive.js +18 -2
  13. package/getOnlineArchive.js.map +1 -1
  14. package/getOnlineArchives.d.ts +44 -28
  15. package/getOnlineArchives.js +44 -28
  16. package/getOnlineArchives.js.map +1 -1
  17. package/getX509AuthenticationDatabaseUser.d.ts +64 -0
  18. package/getX509AuthenticationDatabaseUser.js +38 -0
  19. package/getX509AuthenticationDatabaseUser.js.map +1 -0
  20. package/index.d.ts +3 -18
  21. package/index.js +8 -27
  22. package/index.js.map +1 -1
  23. package/onlineArchive.d.ts +33 -12
  24. package/onlineArchive.js +2 -0
  25. package/onlineArchive.js.map +1 -1
  26. package/package.json +1 -1
  27. package/types/input.d.ts +10 -53
  28. package/types/output.d.ts +151 -227
  29. package/dataLake.d.ts +0 -204
  30. package/dataLake.js +0 -107
  31. package/dataLake.js.map +0 -1
  32. package/getDataLake.d.ts +0 -100
  33. package/getDataLake.js +0 -30
  34. package/getDataLake.js.map +0 -1
  35. package/getDataLakes.d.ts +0 -68
  36. package/getDataLakes.js +0 -51
  37. package/getDataLakes.js.map +0 -1
  38. package/getPrivatelinkEndpointServiceAdl.d.ts +0 -66
  39. package/getPrivatelinkEndpointServiceAdl.js +0 -34
  40. package/getPrivatelinkEndpointServiceAdl.js.map +0 -1
  41. package/getPrivatelinkEndpointsServiceAdl.d.ts +0 -89
  42. package/getPrivatelinkEndpointsServiceAdl.js +0 -65
  43. package/getPrivatelinkEndpointsServiceAdl.js.map +0 -1
  44. package/privatelinkEndpointServiceAdl.d.ts +0 -125
  45. package/privatelinkEndpointServiceAdl.js +0 -98
  46. package/privatelinkEndpointServiceAdl.js.map +0 -1
package/dataLake.d.ts DELETED
@@ -1,204 +0,0 @@
1
- import * as pulumi from "@pulumi/pulumi";
2
- import * as inputs from "./types/input";
3
- import * as outputs from "./types/output";
4
- /**
5
- * `mongodbatlas.DataLake` provides a Data Lake resource.
6
- *
7
- * > **NOTE:** Groups and projects are synonymous terms. You may find groupId in the official documentation.
8
- *
9
- * > **IMPORTANT:** All arguments including the password will be stored in the raw state as plain-text. Read more about sensitive data in state.
10
- *
11
- * ## Example Usage
12
- * ### S
13
- *
14
- * ```typescript
15
- * import * as pulumi from "@pulumi/pulumi";
16
- * import * as mongodbatlas from "@pulumi/mongodbatlas";
17
- *
18
- * const testProject = new mongodbatlas.Project("testProject", {orgId: "ORGANIZATION ID"});
19
- * const testCloudProviderAccess = new mongodbatlas.CloudProviderAccess("testCloudProviderAccess", {
20
- * projectId: testProject.id,
21
- * providerName: "AWS",
22
- * iamAssumedRoleArn: "AWS ROLE ID",
23
- * });
24
- * const basicDs = new mongodbatlas.DataLake("basicDs", {
25
- * projectId: testProject.id,
26
- * aws: {
27
- * roleId: testCloudProviderAccess.roleId,
28
- * testS3Bucket: "TEST S3 BUCKET NAME",
29
- * },
30
- * });
31
- * ```
32
- *
33
- * ## Import
34
- *
35
- * Data Lake can be imported using project ID, name of the data lake and name of the AWS s3 bucket, in the format `project_id`--`name`--`aws_test_s3_bucket`, e.g.
36
- *
37
- * ```sh
38
- * $ pulumi import mongodbatlas:index/dataLake:DataLake example 1112222b3bf99403840e8934--test-data-lake--s3-test
39
- * ```
40
- * See [MongoDB Atlas API](https://docs.mongodb.com/datalake/reference/api/dataLakes-create-one-tenant) Documentation for more information.
41
- */
42
- export declare class DataLake extends pulumi.CustomResource {
43
- /**
44
- * Get an existing DataLake resource's state with the given name, ID, and optional extra
45
- * properties used to qualify the lookup.
46
- *
47
- * @param name The _unique_ name of the resulting resource.
48
- * @param id The _unique_ provider ID of the resource to lookup.
49
- * @param state Any extra arguments used during the lookup.
50
- * @param opts Optional settings to control the behavior of the CustomResource.
51
- */
52
- static get(name: string, id: pulumi.Input<pulumi.ID>, state?: DataLakeState, opts?: pulumi.CustomResourceOptions): DataLake;
53
- /**
54
- * Returns true if the given object is an instance of DataLake. This is designed to work even
55
- * when multiple copies of the Pulumi SDK have been loaded into the same process.
56
- */
57
- static isInstance(obj: any): obj is DataLake;
58
- /**
59
- * AWS provider of the cloud service where Data Lake can access the S3 Bucket.
60
- * * `aws.0.role_id` - (Required) Unique identifier of the role that Data Lake can use to access the data stores. If necessary, use the Atlas [UI](https://docs.atlas.mongodb.com/security/manage-iam-roles/) or [API](https://docs.atlas.mongodb.com/reference/api/cloud-provider-access-get-roles/) to retrieve the role ID. You must also specify the `aws.0.test_s3_bucket`.
61
- * * `aws.0.test_s3_bucket` - (Required) Name of the S3 data bucket that the provided role ID is authorized to access. You must also specify the `aws.0.role_id`.
62
- */
63
- readonly aws: pulumi.Output<outputs.DataLakeAws>;
64
- /**
65
- * The cloud provider region to which Atlas Data Lake routes client connections for data processing. Set to `null` to direct Atlas Data Lake to route client connections to the region nearest to the client based on DNS resolution.
66
- * * `data_process_region.0.cloud_provider` - (Required) Name of the cloud service provider. Atlas Data Lake only supports AWS.
67
- * * `data_process_region.0.region` - (Required). Name of the region to which Data Lake routes client connections for data processing. Atlas Data Lake only supports the following regions:
68
- */
69
- readonly dataProcessRegion: pulumi.Output<outputs.DataLakeDataProcessRegion>;
70
- /**
71
- * The list of hostnames assigned to the Atlas Data Lake. Each string in the array is a hostname assigned to the Atlas Data Lake.
72
- */
73
- readonly hostnames: pulumi.Output<string[]>;
74
- /**
75
- * Name of the Atlas Data Lake.
76
- */
77
- readonly name: pulumi.Output<string>;
78
- /**
79
- * The unique ID for the project to create a data lake.
80
- */
81
- readonly projectId: pulumi.Output<string>;
82
- /**
83
- * Current state of the Atlas Data Lake:
84
- */
85
- readonly state: pulumi.Output<string>;
86
- /**
87
- * Configuration details for mapping each data store to queryable databases and collections. For complete documentation on this object and its nested fields, see [databases](https://docs.mongodb.com/datalake/reference/format/data-lake-configuration#std-label-datalake-databases-reference). An empty object indicates that the Data Lake has no mapping configuration for any data store.
88
- * * `storage_databases.#.name` - Name of the database to which Data Lake maps the data contained in the data store.
89
- * * `storage_databases.#.collections` - Array of objects where each object represents a collection and data sources that map to a [stores](https://docs.mongodb.com/datalake/reference/format/data-lake-configuration#mongodb-datalakeconf-datalakeconf.stores) data store.
90
- * * `storage_databases.#.collections.#.name` - Name of the collection.
91
- * * `storage_databases.#.collections.#.data_sources` - Array of objects where each object represents a stores data store to map with the collection.
92
- * * `storage_databases.#.collections.#.data_sources.#.store_name` - Name of a data store to map to the `<collection>`. Must match the name of an object in the stores array.
93
- * * `storage_databases.#.collections.#.data_sources.#.default_format` - Default format that Data Lake assumes if it encounters a file without an extension while searching the storeName.
94
- * * `storage_databases.#.collections.#.data_sources.#.path` - Controls how Atlas Data Lake searches for and parses files in the storeName before mapping them to the `<collection>`.
95
- * * `storage_databases.#.views` - Array of objects where each object represents an [aggregation pipeline](https://docs.mongodb.com/manual/core/aggregation-pipeline/#id1) on a collection. To learn more about views, see [Views](https://docs.mongodb.com/manual/core/views/).
96
- * * `storage_databases.#.views.#.name` - Name of the view.
97
- * * `storage_databases.#.views.#.source` - Name of the source collection for the view.
98
- * * `storage_databases.#.views.#.pipeline`- Aggregation pipeline stage(s) to apply to the source collection.
99
- */
100
- readonly storageDatabases: pulumi.Output<outputs.DataLakeStorageDatabase[]>;
101
- /**
102
- * Each object in the array represents a data store. Data Lake uses the storage.databases configuration details to map data in each data store to queryable databases and collections. For complete documentation on this object and its nested fields, see [stores](https://docs.mongodb.com/datalake/reference/format/data-lake-configuration#std-label-datalake-stores-reference). An empty object indicates that the Data Lake has no configured data stores.
103
- * * `storage_stores.#.name` - Name of the data store.
104
- * * `storage_stores.#.provider` - Defines where the data is stored.
105
- * * `storage_stores.#.region` - Name of the AWS region in which the S3 bucket is hosted.
106
- * * `storage_stores.#.bucket` - Name of the AWS S3 bucket.
107
- * * `storage_stores.#.prefix` - Prefix Data Lake applies when searching for files in the S3 bucket .
108
- * * `storage_stores.#.delimiter` - The delimiter that separates `storage_databases.#.collections.#.data_sources.#.path` segments in the data store.
109
- * * `storage_stores.#.include_tags` - Determines whether or not to use S3 tags on the files in the given path as additional partition attributes.
110
- */
111
- readonly storageStores: pulumi.Output<outputs.DataLakeStorageStore[]>;
112
- /**
113
- * Create a DataLake resource with the given unique name, arguments, and options.
114
- *
115
- * @param name The _unique_ name of the resource.
116
- * @param args The arguments to use to populate this resource's properties.
117
- * @param opts A bag of options that control this resource's behavior.
118
- */
119
- constructor(name: string, args: DataLakeArgs, opts?: pulumi.CustomResourceOptions);
120
- }
121
- /**
122
- * Input properties used for looking up and filtering DataLake resources.
123
- */
124
- export interface DataLakeState {
125
- /**
126
- * AWS provider of the cloud service where Data Lake can access the S3 Bucket.
127
- * * `aws.0.role_id` - (Required) Unique identifier of the role that Data Lake can use to access the data stores. If necessary, use the Atlas [UI](https://docs.atlas.mongodb.com/security/manage-iam-roles/) or [API](https://docs.atlas.mongodb.com/reference/api/cloud-provider-access-get-roles/) to retrieve the role ID. You must also specify the `aws.0.test_s3_bucket`.
128
- * * `aws.0.test_s3_bucket` - (Required) Name of the S3 data bucket that the provided role ID is authorized to access. You must also specify the `aws.0.role_id`.
129
- */
130
- aws?: pulumi.Input<inputs.DataLakeAws>;
131
- /**
132
- * The cloud provider region to which Atlas Data Lake routes client connections for data processing. Set to `null` to direct Atlas Data Lake to route client connections to the region nearest to the client based on DNS resolution.
133
- * * `data_process_region.0.cloud_provider` - (Required) Name of the cloud service provider. Atlas Data Lake only supports AWS.
134
- * * `data_process_region.0.region` - (Required). Name of the region to which Data Lake routes client connections for data processing. Atlas Data Lake only supports the following regions:
135
- */
136
- dataProcessRegion?: pulumi.Input<inputs.DataLakeDataProcessRegion>;
137
- /**
138
- * The list of hostnames assigned to the Atlas Data Lake. Each string in the array is a hostname assigned to the Atlas Data Lake.
139
- */
140
- hostnames?: pulumi.Input<pulumi.Input<string>[]>;
141
- /**
142
- * Name of the Atlas Data Lake.
143
- */
144
- name?: pulumi.Input<string>;
145
- /**
146
- * The unique ID for the project to create a data lake.
147
- */
148
- projectId?: pulumi.Input<string>;
149
- /**
150
- * Current state of the Atlas Data Lake:
151
- */
152
- state?: pulumi.Input<string>;
153
- /**
154
- * Configuration details for mapping each data store to queryable databases and collections. For complete documentation on this object and its nested fields, see [databases](https://docs.mongodb.com/datalake/reference/format/data-lake-configuration#std-label-datalake-databases-reference). An empty object indicates that the Data Lake has no mapping configuration for any data store.
155
- * * `storage_databases.#.name` - Name of the database to which Data Lake maps the data contained in the data store.
156
- * * `storage_databases.#.collections` - Array of objects where each object represents a collection and data sources that map to a [stores](https://docs.mongodb.com/datalake/reference/format/data-lake-configuration#mongodb-datalakeconf-datalakeconf.stores) data store.
157
- * * `storage_databases.#.collections.#.name` - Name of the collection.
158
- * * `storage_databases.#.collections.#.data_sources` - Array of objects where each object represents a stores data store to map with the collection.
159
- * * `storage_databases.#.collections.#.data_sources.#.store_name` - Name of a data store to map to the `<collection>`. Must match the name of an object in the stores array.
160
- * * `storage_databases.#.collections.#.data_sources.#.default_format` - Default format that Data Lake assumes if it encounters a file without an extension while searching the storeName.
161
- * * `storage_databases.#.collections.#.data_sources.#.path` - Controls how Atlas Data Lake searches for and parses files in the storeName before mapping them to the `<collection>`.
162
- * * `storage_databases.#.views` - Array of objects where each object represents an [aggregation pipeline](https://docs.mongodb.com/manual/core/aggregation-pipeline/#id1) on a collection. To learn more about views, see [Views](https://docs.mongodb.com/manual/core/views/).
163
- * * `storage_databases.#.views.#.name` - Name of the view.
164
- * * `storage_databases.#.views.#.source` - Name of the source collection for the view.
165
- * * `storage_databases.#.views.#.pipeline`- Aggregation pipeline stage(s) to apply to the source collection.
166
- */
167
- storageDatabases?: pulumi.Input<pulumi.Input<inputs.DataLakeStorageDatabase>[]>;
168
- /**
169
- * Each object in the array represents a data store. Data Lake uses the storage.databases configuration details to map data in each data store to queryable databases and collections. For complete documentation on this object and its nested fields, see [stores](https://docs.mongodb.com/datalake/reference/format/data-lake-configuration#std-label-datalake-stores-reference). An empty object indicates that the Data Lake has no configured data stores.
170
- * * `storage_stores.#.name` - Name of the data store.
171
- * * `storage_stores.#.provider` - Defines where the data is stored.
172
- * * `storage_stores.#.region` - Name of the AWS region in which the S3 bucket is hosted.
173
- * * `storage_stores.#.bucket` - Name of the AWS S3 bucket.
174
- * * `storage_stores.#.prefix` - Prefix Data Lake applies when searching for files in the S3 bucket .
175
- * * `storage_stores.#.delimiter` - The delimiter that separates `storage_databases.#.collections.#.data_sources.#.path` segments in the data store.
176
- * * `storage_stores.#.include_tags` - Determines whether or not to use S3 tags on the files in the given path as additional partition attributes.
177
- */
178
- storageStores?: pulumi.Input<pulumi.Input<inputs.DataLakeStorageStore>[]>;
179
- }
180
- /**
181
- * The set of arguments for constructing a DataLake resource.
182
- */
183
- export interface DataLakeArgs {
184
- /**
185
- * AWS provider of the cloud service where Data Lake can access the S3 Bucket.
186
- * * `aws.0.role_id` - (Required) Unique identifier of the role that Data Lake can use to access the data stores. If necessary, use the Atlas [UI](https://docs.atlas.mongodb.com/security/manage-iam-roles/) or [API](https://docs.atlas.mongodb.com/reference/api/cloud-provider-access-get-roles/) to retrieve the role ID. You must also specify the `aws.0.test_s3_bucket`.
187
- * * `aws.0.test_s3_bucket` - (Required) Name of the S3 data bucket that the provided role ID is authorized to access. You must also specify the `aws.0.role_id`.
188
- */
189
- aws: pulumi.Input<inputs.DataLakeAws>;
190
- /**
191
- * The cloud provider region to which Atlas Data Lake routes client connections for data processing. Set to `null` to direct Atlas Data Lake to route client connections to the region nearest to the client based on DNS resolution.
192
- * * `data_process_region.0.cloud_provider` - (Required) Name of the cloud service provider. Atlas Data Lake only supports AWS.
193
- * * `data_process_region.0.region` - (Required). Name of the region to which Data Lake routes client connections for data processing. Atlas Data Lake only supports the following regions:
194
- */
195
- dataProcessRegion?: pulumi.Input<inputs.DataLakeDataProcessRegion>;
196
- /**
197
- * Name of the Atlas Data Lake.
198
- */
199
- name?: pulumi.Input<string>;
200
- /**
201
- * The unique ID for the project to create a data lake.
202
- */
203
- projectId: pulumi.Input<string>;
204
- }
package/dataLake.js DELETED
@@ -1,107 +0,0 @@
1
- "use strict";
2
- // *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
3
- // *** Do not edit by hand unless you're certain you know what you are doing! ***
4
- Object.defineProperty(exports, "__esModule", { value: true });
5
- exports.DataLake = void 0;
6
- const pulumi = require("@pulumi/pulumi");
7
- const utilities = require("./utilities");
8
- /**
9
- * `mongodbatlas.DataLake` provides a Data Lake resource.
10
- *
11
- * > **NOTE:** Groups and projects are synonymous terms. You may find groupId in the official documentation.
12
- *
13
- * > **IMPORTANT:** All arguments including the password will be stored in the raw state as plain-text. Read more about sensitive data in state.
14
- *
15
- * ## Example Usage
16
- * ### S
17
- *
18
- * ```typescript
19
- * import * as pulumi from "@pulumi/pulumi";
20
- * import * as mongodbatlas from "@pulumi/mongodbatlas";
21
- *
22
- * const testProject = new mongodbatlas.Project("testProject", {orgId: "ORGANIZATION ID"});
23
- * const testCloudProviderAccess = new mongodbatlas.CloudProviderAccess("testCloudProviderAccess", {
24
- * projectId: testProject.id,
25
- * providerName: "AWS",
26
- * iamAssumedRoleArn: "AWS ROLE ID",
27
- * });
28
- * const basicDs = new mongodbatlas.DataLake("basicDs", {
29
- * projectId: testProject.id,
30
- * aws: {
31
- * roleId: testCloudProviderAccess.roleId,
32
- * testS3Bucket: "TEST S3 BUCKET NAME",
33
- * },
34
- * });
35
- * ```
36
- *
37
- * ## Import
38
- *
39
- * Data Lake can be imported using project ID, name of the data lake and name of the AWS s3 bucket, in the format `project_id`--`name`--`aws_test_s3_bucket`, e.g.
40
- *
41
- * ```sh
42
- * $ pulumi import mongodbatlas:index/dataLake:DataLake example 1112222b3bf99403840e8934--test-data-lake--s3-test
43
- * ```
44
- * See [MongoDB Atlas API](https://docs.mongodb.com/datalake/reference/api/dataLakes-create-one-tenant) Documentation for more information.
45
- */
46
- class DataLake extends pulumi.CustomResource {
47
- /**
48
- * Get an existing DataLake resource's state with the given name, ID, and optional extra
49
- * properties used to qualify the lookup.
50
- *
51
- * @param name The _unique_ name of the resulting resource.
52
- * @param id The _unique_ provider ID of the resource to lookup.
53
- * @param state Any extra arguments used during the lookup.
54
- * @param opts Optional settings to control the behavior of the CustomResource.
55
- */
56
- static get(name, id, state, opts) {
57
- return new DataLake(name, state, Object.assign(Object.assign({}, opts), { id: id }));
58
- }
59
- /**
60
- * Returns true if the given object is an instance of DataLake. This is designed to work even
61
- * when multiple copies of the Pulumi SDK have been loaded into the same process.
62
- */
63
- static isInstance(obj) {
64
- if (obj === undefined || obj === null) {
65
- return false;
66
- }
67
- return obj['__pulumiType'] === DataLake.__pulumiType;
68
- }
69
- constructor(name, argsOrState, opts) {
70
- let resourceInputs = {};
71
- opts = opts || {};
72
- if (opts.id) {
73
- const state = argsOrState;
74
- resourceInputs["aws"] = state ? state.aws : undefined;
75
- resourceInputs["dataProcessRegion"] = state ? state.dataProcessRegion : undefined;
76
- resourceInputs["hostnames"] = state ? state.hostnames : undefined;
77
- resourceInputs["name"] = state ? state.name : undefined;
78
- resourceInputs["projectId"] = state ? state.projectId : undefined;
79
- resourceInputs["state"] = state ? state.state : undefined;
80
- resourceInputs["storageDatabases"] = state ? state.storageDatabases : undefined;
81
- resourceInputs["storageStores"] = state ? state.storageStores : undefined;
82
- }
83
- else {
84
- const args = argsOrState;
85
- if ((!args || args.aws === undefined) && !opts.urn) {
86
- throw new Error("Missing required property 'aws'");
87
- }
88
- if ((!args || args.projectId === undefined) && !opts.urn) {
89
- throw new Error("Missing required property 'projectId'");
90
- }
91
- resourceInputs["aws"] = args ? args.aws : undefined;
92
- resourceInputs["dataProcessRegion"] = args ? args.dataProcessRegion : undefined;
93
- resourceInputs["name"] = args ? args.name : undefined;
94
- resourceInputs["projectId"] = args ? args.projectId : undefined;
95
- resourceInputs["hostnames"] = undefined /*out*/;
96
- resourceInputs["state"] = undefined /*out*/;
97
- resourceInputs["storageDatabases"] = undefined /*out*/;
98
- resourceInputs["storageStores"] = undefined /*out*/;
99
- }
100
- opts = pulumi.mergeOptions(utilities.resourceOptsDefaults(), opts);
101
- super(DataLake.__pulumiType, name, resourceInputs, opts);
102
- }
103
- }
104
- exports.DataLake = DataLake;
105
- /** @internal */
106
- DataLake.__pulumiType = 'mongodbatlas:index/dataLake:DataLake';
107
- //# sourceMappingURL=dataLake.js.map
package/dataLake.js.map DELETED
@@ -1 +0,0 @@
1
- {"version":3,"file":"dataLake.js","sourceRoot":"","sources":["../dataLake.ts"],"names":[],"mappings":";AAAA,wFAAwF;AACxF,iFAAiF;;;AAEjF,yCAAyC;AAGzC,yCAAyC;AAEzC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GAqCG;AACH,MAAa,QAAS,SAAQ,MAAM,CAAC,cAAc;IAC/C;;;;;;;;OAQG;IACI,MAAM,CAAC,GAAG,CAAC,IAAY,EAAE,EAA2B,EAAE,KAAqB,EAAE,IAAmC;QACnH,OAAO,IAAI,QAAQ,CAAC,IAAI,EAAO,KAAK,kCAAO,IAAI,KAAE,EAAE,EAAE,EAAE,IAAG,CAAC;IAC/D,CAAC;IAKD;;;OAGG;IACI,MAAM,CAAC,UAAU,CAAC,GAAQ;QAC7B,IAAI,GAAG,KAAK,SAAS,IAAI,GAAG,KAAK,IAAI,EAAE;YACnC,OAAO,KAAK,CAAC;SAChB;QACD,OAAO,GAAG,CAAC,cAAc,CAAC,KAAK,QAAQ,CAAC,YAAY,CAAC;IACzD,CAAC;IAiED,YAAY,IAAY,EAAE,WAA0C,EAAE,IAAmC;QACrG,IAAI,cAAc,GAAkB,EAAE,CAAC;QACvC,IAAI,GAAG,IAAI,IAAI,EAAE,CAAC;QAClB,IAAI,IAAI,CAAC,EAAE,EAAE;YACT,MAAM,KAAK,GAAG,WAAwC,CAAC;YACvD,cAAc,CAAC,KAAK,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,GAAG,CAAC,CAAC,CAAC,SAAS,CAAC;YACtD,cAAc,CAAC,mBAAmB,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,iBAAiB,CAAC,CAAC,CAAC,SAAS,CAAC;YAClF,cAAc,CAAC,WAAW,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;YAClE,cAAc,CAAC,MAAM,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC;YACxD,cAAc,CAAC,WAAW,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;YAClE,cAAc,CAAC,OAAO,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,KAAK,CAAC,CAAC,CAAC,SAAS,CAAC;YAC1D,cAAc,CAAC,kBAAkB,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,gBAAgB,CAAC,CAAC,CAAC,SAAS,CAAC;YAChF,cAAc,CAAC,eAAe,CAAC,GAAG,KAAK,CAAC,CAAC,CAAC,KAAK,CAAC,aAAa,CAAC,CAAC,CAAC,SAAS,CAAC;SAC7E;aAAM;YACH,MAAM,IAAI,GAAG,WAAuC,CAAC;YACrD,IAAI,CAAC,CAAC,IAAI,IAAI,IAAI,CAAC,GAAG,KAAK,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;gBAChD,MAAM,IAAI,KAAK,CAAC,iCAAiC,CAAC,CAAC;aACtD;YACD,IAAI,CAAC,CAAC,IAAI,IAAI,IAAI,CAAC,SAAS,KAAK,SAAS,CAAC,IAAI,CAAC,IAAI,CAAC,GAAG,EAAE;gBACtD,MAAM,IAAI,KAAK,CAAC,uCAAuC,CAAC,CAAC;aAC5D;YACD,cAAc,CAAC,KAAK,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,GAAG,CAAC,CAAC,CAAC,SAAS,CAAC;YACpD,cAAc,CAAC,mBAAmB,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,iBAAiB,CAAC,CAAC,CAAC,SAAS,CAAC;YAChF,cAAc,CAAC,MAAM,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,IAAI,CAAC,CAAC,CAAC,SAAS,CAAC;YACtD,cAAc,CAAC,WAAW,CAAC,GAAG,IAAI,CAAC,CAAC,CAAC,IAAI,CAAC,SAAS,CAAC,CAAC,CAAC,SAAS,CAAC;YAChE,cAAc,CAAC,WAAW,CAAC,GAAG,SAAS,CAAC,OAAO,CAAC;YAChD,cAAc,CAAC,OAAO,CAAC,GAAG,SAAS,CAAC,OAAO,CAAC;YAC5C,cAAc,CAAC,kBAAkB,CAAC,GAAG,SAAS,CAAC,OAAO,CAAC;YACvD,cAAc,CAAC,eAAe,CAAC,GAAG,SAAS,CAAC,OAAO,CAAC;SACvD;QACD,IAAI,GAAG,MAAM,CAAC,YAAY,CAAC,SAAS,CAAC,oBAAoB,EAAE,EAAE,IAAI,CAAC,CAAC;QACnE,KAAK,CAAC,QAAQ,CAAC,YAAY,EAAE,IAAI,EAAE,cAAc,EAAE,IAAI,CAAC,CAAC;IAC7D,CAAC;;AA3HL,4BA4HC;AA9GG,gBAAgB;AACO,qBAAY,GAAG,sCAAsC,CAAC"}
package/getDataLake.d.ts DELETED
@@ -1,100 +0,0 @@
1
- import * as pulumi from "@pulumi/pulumi";
2
- import * as outputs from "./types/output";
3
- /**
4
- * `mongodbatlas.DataLake` describe a Data Lake.
5
- *
6
- * > **NOTE:** Groups and projects are synonymous terms. You may find groupId in the official documentation.
7
- */
8
- export declare function getDataLake(args: GetDataLakeArgs, opts?: pulumi.InvokeOptions): Promise<GetDataLakeResult>;
9
- /**
10
- * A collection of arguments for invoking getDataLake.
11
- */
12
- export interface GetDataLakeArgs {
13
- /**
14
- * Name of the data lake.
15
- */
16
- name: string;
17
- /**
18
- * The unique ID for the project to create a data lake.
19
- */
20
- projectId: string;
21
- }
22
- /**
23
- * A collection of values returned by getDataLake.
24
- */
25
- export interface GetDataLakeResult {
26
- /**
27
- * AWS provider of the cloud service where Data Lake can access the S3 Bucket.
28
- * * `aws.0.role_id` - Unique identifier of the role that Data Lake can use to access the data stores.
29
- * * `aws.0.test_s3_bucket` - Name of the S3 data bucket that the provided role ID is authorized to access.
30
- * * `aws.0.role_id` - Unique identifier of the role that Data Lake can use to access the data stores.
31
- * * `aws.0.test_s3_bucket` - Name of the S3 data bucket that the provided role ID is authorized to access.
32
- * * `aws.0.iam_assumed_role_arn` - Amazon Resource Name (ARN) of the IAM Role that Data Lake assumes when accessing S3 Bucket data stores.
33
- */
34
- readonly aws: outputs.GetDataLakeAw[];
35
- /**
36
- * The cloud provider region to which Atlas Data Lake routes client connections for data processing.
37
- * * `data_process_region.0.cloud_provider` - Name of the cloud service provider.
38
- * * `data_process_region.0.region` -Name of the region to which Data Lake routes client connections for data processing.
39
- */
40
- readonly dataProcessRegions: outputs.GetDataLakeDataProcessRegion[];
41
- /**
42
- * The list of hostnames assigned to the Atlas Data Lake. Each string in the array is a hostname assigned to the Atlas Data Lake.
43
- */
44
- readonly hostnames: string[];
45
- /**
46
- * The provider-assigned unique ID for this managed resource.
47
- */
48
- readonly id: string;
49
- readonly name: string;
50
- readonly projectId: string;
51
- /**
52
- * Current state of the Atlas Data Lake:
53
- */
54
- readonly state: string;
55
- /**
56
- * Configuration details for mapping each data store to queryable databases and collections.
57
- * * `storage_databases.#.name` - Name of the database to which Data Lake maps the data contained in the data store.
58
- * * `storage_databases.#.collections` - Array of objects where each object represents a collection and data sources that map to a [stores](https://docs.mongodb.com/datalake/reference/format/data-lake-configuration#mongodb-datalakeconf-datalakeconf.stores) data store.
59
- * * `storage_databases.#.collections.#.name` - Name of the collection.
60
- * * `storage_databases.#.collections.#.data_sources` - Array of objects where each object represents a stores data store to map with the collection.
61
- * * `storage_databases.#.collections.#.data_sources.#.store_name` - Name of a data store to map to the `<collection>`.
62
- * * `storage_databases.#.collections.#.data_sources.#.default_format` - Default format that Data Lake assumes if it encounters a file without an extension while searching the storeName.
63
- * * `storage_databases.#.collections.#.data_sources.#.path` - Controls how Atlas Data Lake searches for and parses files in the storeName before mapping them to the `<collection>`.
64
- * * `storage_databases.#.views` - Array of objects where each object represents an [aggregation pipeline](https://docs.mongodb.com/manual/core/aggregation-pipeline/#id1) on a collection.
65
- * * `storage_databases.#.views.#.name` - Name of the view.
66
- * * `storage_databases.#.views.#.source` - Name of the source collection for the view.
67
- * * `storage_databases.#.views.#.pipeline`- Aggregation pipeline stage(s) to apply to the source collection.
68
- */
69
- readonly storageDatabases: outputs.GetDataLakeStorageDatabase[];
70
- /**
71
- * Each object in the array represents a data store. Data Lake uses the storage.databases configuration details to map data in each data store to queryable databases and collections.
72
- * * `storage_stores.#.name` - Name of the data store.
73
- * * `storage_stores.#.provider` - Defines where the data is stored.
74
- * * `storage_stores.#.region` - Name of the AWS region in which the S3 bucket is hosted.
75
- * * `storage_stores.#.bucket` - Name of the AWS S3 bucket.
76
- * * `storage_stores.#.prefix` - Prefix Data Lake applies when searching for files in the S3 bucket .
77
- * * `storage_stores.#.delimiter` - The delimiter that separates `storage_databases.#.collections.#.data_sources.#.path` segments in the data store.
78
- * * `storage_stores.#.include_tags` - Determines whether or not to use S3 tags on the files in the given path as additional partition attributes.
79
- */
80
- readonly storageStores: outputs.GetDataLakeStorageStore[];
81
- }
82
- /**
83
- * `mongodbatlas.DataLake` describe a Data Lake.
84
- *
85
- * > **NOTE:** Groups and projects are synonymous terms. You may find groupId in the official documentation.
86
- */
87
- export declare function getDataLakeOutput(args: GetDataLakeOutputArgs, opts?: pulumi.InvokeOptions): pulumi.Output<GetDataLakeResult>;
88
- /**
89
- * A collection of arguments for invoking getDataLake.
90
- */
91
- export interface GetDataLakeOutputArgs {
92
- /**
93
- * Name of the data lake.
94
- */
95
- name: pulumi.Input<string>;
96
- /**
97
- * The unique ID for the project to create a data lake.
98
- */
99
- projectId: pulumi.Input<string>;
100
- }
package/getDataLake.js DELETED
@@ -1,30 +0,0 @@
1
- "use strict";
2
- // *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
3
- // *** Do not edit by hand unless you're certain you know what you are doing! ***
4
- Object.defineProperty(exports, "__esModule", { value: true });
5
- exports.getDataLakeOutput = exports.getDataLake = void 0;
6
- const pulumi = require("@pulumi/pulumi");
7
- const utilities = require("./utilities");
8
- /**
9
- * `mongodbatlas.DataLake` describe a Data Lake.
10
- *
11
- * > **NOTE:** Groups and projects are synonymous terms. You may find groupId in the official documentation.
12
- */
13
- function getDataLake(args, opts) {
14
- opts = pulumi.mergeOptions(utilities.resourceOptsDefaults(), opts || {});
15
- return pulumi.runtime.invoke("mongodbatlas:index/getDataLake:getDataLake", {
16
- "name": args.name,
17
- "projectId": args.projectId,
18
- }, opts);
19
- }
20
- exports.getDataLake = getDataLake;
21
- /**
22
- * `mongodbatlas.DataLake` describe a Data Lake.
23
- *
24
- * > **NOTE:** Groups and projects are synonymous terms. You may find groupId in the official documentation.
25
- */
26
- function getDataLakeOutput(args, opts) {
27
- return pulumi.output(args).apply((a) => getDataLake(a, opts));
28
- }
29
- exports.getDataLakeOutput = getDataLakeOutput;
30
- //# sourceMappingURL=getDataLake.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"getDataLake.js","sourceRoot":"","sources":["../getDataLake.ts"],"names":[],"mappings":";AAAA,wFAAwF;AACxF,iFAAiF;;;AAEjF,yCAAyC;AAGzC,yCAAyC;AAEzC;;;;GAIG;AACH,SAAgB,WAAW,CAAC,IAAqB,EAAE,IAA2B;IAE1E,IAAI,GAAG,MAAM,CAAC,YAAY,CAAC,SAAS,CAAC,oBAAoB,EAAE,EAAE,IAAI,IAAI,EAAE,CAAC,CAAC;IACzE,OAAO,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,4CAA4C,EAAE;QACvE,MAAM,EAAE,IAAI,CAAC,IAAI;QACjB,WAAW,EAAE,IAAI,CAAC,SAAS;KAC9B,EAAE,IAAI,CAAC,CAAC;AACb,CAAC;AAPD,kCAOC;AA4ED;;;;GAIG;AACH,SAAgB,iBAAiB,CAAC,IAA2B,EAAE,IAA2B;IACtF,OAAO,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,CAAM,EAAE,EAAE,CAAC,WAAW,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,CAAA;AACtE,CAAC;AAFD,8CAEC"}
package/getDataLakes.d.ts DELETED
@@ -1,68 +0,0 @@
1
- import * as pulumi from "@pulumi/pulumi";
2
- import * as outputs from "./types/output";
3
- /**
4
- * `mongodbatlas.getDataLakes` describe all Data Lakes.
5
- *
6
- * > **NOTE:** Groups and projects are synonymous terms. You may find `groupId` in the official documentation.
7
- *
8
- * ## Example Usage
9
- *
10
- * ```typescript
11
- * import * as pulumi from "@pulumi/pulumi";
12
- * import * as mongodbatlas from "@pulumi/mongodbatlas";
13
- *
14
- * const test = mongodbatlas.getDataLakes({
15
- * projectId: "PROJECT ID",
16
- * });
17
- * ```
18
- */
19
- export declare function getDataLakes(args: GetDataLakesArgs, opts?: pulumi.InvokeOptions): Promise<GetDataLakesResult>;
20
- /**
21
- * A collection of arguments for invoking getDataLakes.
22
- */
23
- export interface GetDataLakesArgs {
24
- /**
25
- * The unique ID for the project to get all data lakes.
26
- */
27
- projectId: string;
28
- }
29
- /**
30
- * A collection of values returned by getDataLakes.
31
- */
32
- export interface GetDataLakesResult {
33
- /**
34
- * The provider-assigned unique ID for this managed resource.
35
- */
36
- readonly id: string;
37
- readonly projectId: string;
38
- /**
39
- * A list where each represents a Data lake.
40
- */
41
- readonly results: outputs.GetDataLakesResult[];
42
- }
43
- /**
44
- * `mongodbatlas.getDataLakes` describe all Data Lakes.
45
- *
46
- * > **NOTE:** Groups and projects are synonymous terms. You may find `groupId` in the official documentation.
47
- *
48
- * ## Example Usage
49
- *
50
- * ```typescript
51
- * import * as pulumi from "@pulumi/pulumi";
52
- * import * as mongodbatlas from "@pulumi/mongodbatlas";
53
- *
54
- * const test = mongodbatlas.getDataLakes({
55
- * projectId: "PROJECT ID",
56
- * });
57
- * ```
58
- */
59
- export declare function getDataLakesOutput(args: GetDataLakesOutputArgs, opts?: pulumi.InvokeOptions): pulumi.Output<GetDataLakesResult>;
60
- /**
61
- * A collection of arguments for invoking getDataLakes.
62
- */
63
- export interface GetDataLakesOutputArgs {
64
- /**
65
- * The unique ID for the project to get all data lakes.
66
- */
67
- projectId: pulumi.Input<string>;
68
- }
package/getDataLakes.js DELETED
@@ -1,51 +0,0 @@
1
- "use strict";
2
- // *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
3
- // *** Do not edit by hand unless you're certain you know what you are doing! ***
4
- Object.defineProperty(exports, "__esModule", { value: true });
5
- exports.getDataLakesOutput = exports.getDataLakes = void 0;
6
- const pulumi = require("@pulumi/pulumi");
7
- const utilities = require("./utilities");
8
- /**
9
- * `mongodbatlas.getDataLakes` describe all Data Lakes.
10
- *
11
- * > **NOTE:** Groups and projects are synonymous terms. You may find `groupId` in the official documentation.
12
- *
13
- * ## Example Usage
14
- *
15
- * ```typescript
16
- * import * as pulumi from "@pulumi/pulumi";
17
- * import * as mongodbatlas from "@pulumi/mongodbatlas";
18
- *
19
- * const test = mongodbatlas.getDataLakes({
20
- * projectId: "PROJECT ID",
21
- * });
22
- * ```
23
- */
24
- function getDataLakes(args, opts) {
25
- opts = pulumi.mergeOptions(utilities.resourceOptsDefaults(), opts || {});
26
- return pulumi.runtime.invoke("mongodbatlas:index/getDataLakes:getDataLakes", {
27
- "projectId": args.projectId,
28
- }, opts);
29
- }
30
- exports.getDataLakes = getDataLakes;
31
- /**
32
- * `mongodbatlas.getDataLakes` describe all Data Lakes.
33
- *
34
- * > **NOTE:** Groups and projects are synonymous terms. You may find `groupId` in the official documentation.
35
- *
36
- * ## Example Usage
37
- *
38
- * ```typescript
39
- * import * as pulumi from "@pulumi/pulumi";
40
- * import * as mongodbatlas from "@pulumi/mongodbatlas";
41
- *
42
- * const test = mongodbatlas.getDataLakes({
43
- * projectId: "PROJECT ID",
44
- * });
45
- * ```
46
- */
47
- function getDataLakesOutput(args, opts) {
48
- return pulumi.output(args).apply((a) => getDataLakes(a, opts));
49
- }
50
- exports.getDataLakesOutput = getDataLakesOutput;
51
- //# sourceMappingURL=getDataLakes.js.map
@@ -1 +0,0 @@
1
- {"version":3,"file":"getDataLakes.js","sourceRoot":"","sources":["../getDataLakes.ts"],"names":[],"mappings":";AAAA,wFAAwF;AACxF,iFAAiF;;;AAEjF,yCAAyC;AAGzC,yCAAyC;AAEzC;;;;;;;;;;;;;;;GAeG;AACH,SAAgB,YAAY,CAAC,IAAsB,EAAE,IAA2B;IAE5E,IAAI,GAAG,MAAM,CAAC,YAAY,CAAC,SAAS,CAAC,oBAAoB,EAAE,EAAE,IAAI,IAAI,EAAE,CAAC,CAAC;IACzE,OAAO,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,8CAA8C,EAAE;QACzE,WAAW,EAAE,IAAI,CAAC,SAAS;KAC9B,EAAE,IAAI,CAAC,CAAC;AACb,CAAC;AAND,oCAMC;AA0BD;;;;;;;;;;;;;;;GAeG;AACH,SAAgB,kBAAkB,CAAC,IAA4B,EAAE,IAA2B;IACxF,OAAO,MAAM,CAAC,MAAM,CAAC,IAAI,CAAC,CAAC,KAAK,CAAC,CAAC,CAAM,EAAE,EAAE,CAAC,YAAY,CAAC,CAAC,EAAE,IAAI,CAAC,CAAC,CAAA;AACvE,CAAC;AAFD,gDAEC"}
@@ -1,66 +0,0 @@
1
- import * as pulumi from "@pulumi/pulumi";
2
- /**
3
- * `privatelinkEndpointServiceAdl` Provides an Atlas Data Lake (ADL) and Online Archive PrivateLink endpoint resource.
4
- *
5
- * > **NOTE:** Groups and projects are synonymous terms. You may find groupId in the official documentation.
6
- *
7
- * ## Example Usage
8
- */
9
- export declare function getPrivatelinkEndpointServiceAdl(args: GetPrivatelinkEndpointServiceAdlArgs, opts?: pulumi.InvokeOptions): Promise<GetPrivatelinkEndpointServiceAdlResult>;
10
- /**
11
- * A collection of arguments for invoking getPrivatelinkEndpointServiceAdl.
12
- */
13
- export interface GetPrivatelinkEndpointServiceAdlArgs {
14
- /**
15
- * Unique 22-character alphanumeric string that identifies the private endpoint. Atlas supports AWS private endpoints using the [|aws| PrivateLink](https://aws.amazon.com/privatelink/) feature.
16
- */
17
- endpointId: string;
18
- /**
19
- * Unique 24-digit hexadecimal string that identifies the project.
20
- */
21
- projectId: string;
22
- }
23
- /**
24
- * A collection of values returned by getPrivatelinkEndpointServiceAdl.
25
- */
26
- export interface GetPrivatelinkEndpointServiceAdlResult {
27
- /**
28
- * Human-readable string to associate with this private endpoint.
29
- */
30
- readonly comment: string;
31
- readonly endpointId: string;
32
- /**
33
- * The provider-assigned unique ID for this managed resource.
34
- */
35
- readonly id: string;
36
- readonly projectId: string;
37
- /**
38
- * Human-readable label that identifies the cloud provider for this endpoint.
39
- */
40
- readonly providerName: string;
41
- /**
42
- * Human-readable label that identifies the type of resource to associate with this private endpoint.
43
- */
44
- readonly type: string;
45
- }
46
- /**
47
- * `privatelinkEndpointServiceAdl` Provides an Atlas Data Lake (ADL) and Online Archive PrivateLink endpoint resource.
48
- *
49
- * > **NOTE:** Groups and projects are synonymous terms. You may find groupId in the official documentation.
50
- *
51
- * ## Example Usage
52
- */
53
- export declare function getPrivatelinkEndpointServiceAdlOutput(args: GetPrivatelinkEndpointServiceAdlOutputArgs, opts?: pulumi.InvokeOptions): pulumi.Output<GetPrivatelinkEndpointServiceAdlResult>;
54
- /**
55
- * A collection of arguments for invoking getPrivatelinkEndpointServiceAdl.
56
- */
57
- export interface GetPrivatelinkEndpointServiceAdlOutputArgs {
58
- /**
59
- * Unique 22-character alphanumeric string that identifies the private endpoint. Atlas supports AWS private endpoints using the [|aws| PrivateLink](https://aws.amazon.com/privatelink/) feature.
60
- */
61
- endpointId: pulumi.Input<string>;
62
- /**
63
- * Unique 24-digit hexadecimal string that identifies the project.
64
- */
65
- projectId: pulumi.Input<string>;
66
- }