@pulumi/mongodbatlas 3.37.0-alpha.1762842753 → 3.37.0-alpha.1763188204

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (80) hide show
  1. package/dataLakePipeline.d.ts +54 -0
  2. package/dataLakePipeline.js +54 -0
  3. package/dataLakePipeline.js.map +1 -1
  4. package/eventTrigger.d.ts +2 -2
  5. package/eventTrigger.js +2 -2
  6. package/flexCluster.d.ts +26 -0
  7. package/flexCluster.js +26 -0
  8. package/flexCluster.js.map +1 -1
  9. package/get509AuthenticationDatabaseUser.d.ts +58 -2
  10. package/get509AuthenticationDatabaseUser.js +58 -2
  11. package/get509AuthenticationDatabaseUser.js.map +1 -1
  12. package/getAccessListApiKey.d.ts +30 -0
  13. package/getAccessListApiKey.js +30 -0
  14. package/getAccessListApiKey.js.map +1 -1
  15. package/getAccessListApiKeys.d.ts +30 -0
  16. package/getAccessListApiKeys.js +30 -0
  17. package/getAccessListApiKeys.js.map +1 -1
  18. package/getCloudBackupSchedule.d.ts +108 -0
  19. package/getCloudBackupSchedule.js +108 -0
  20. package/getCloudBackupSchedule.js.map +1 -1
  21. package/getCloudBackupSnapshotRestoreJob.d.ts +54 -0
  22. package/getCloudBackupSnapshotRestoreJob.js +54 -0
  23. package/getCloudBackupSnapshotRestoreJob.js.map +1 -1
  24. package/getCustomDbRoles.d.ts +50 -0
  25. package/getCustomDbRoles.js +50 -0
  26. package/getCustomDbRoles.js.map +1 -1
  27. package/getDataLakePipeline.d.ts +116 -0
  28. package/getDataLakePipeline.js +116 -0
  29. package/getDataLakePipeline.js.map +1 -1
  30. package/getDataLakePipelineRuns.d.ts +74 -0
  31. package/getDataLakePipelineRuns.js +74 -0
  32. package/getDataLakePipelineRuns.js.map +1 -1
  33. package/getFederatedSettingsOrgRoleMappings.d.ts +68 -0
  34. package/getFederatedSettingsOrgRoleMappings.js +68 -0
  35. package/getFederatedSettingsOrgRoleMappings.js.map +1 -1
  36. package/getFlexCluster.d.ts +50 -0
  37. package/getFlexCluster.js +50 -0
  38. package/getFlexCluster.js.map +1 -1
  39. package/getFlexClusters.d.ts +50 -0
  40. package/getFlexClusters.js +50 -0
  41. package/getFlexClusters.js.map +1 -1
  42. package/getSearchDeployment.d.ts +78 -0
  43. package/getSearchDeployment.js +78 -0
  44. package/getSearchDeployment.js.map +1 -1
  45. package/getStreamPrivatelinkEndpoint.d.ts +140 -0
  46. package/getStreamPrivatelinkEndpoint.js +140 -0
  47. package/getStreamPrivatelinkEndpoint.js.map +1 -1
  48. package/getStreamPrivatelinkEndpoints.d.ts +140 -0
  49. package/getStreamPrivatelinkEndpoints.js +140 -0
  50. package/getStreamPrivatelinkEndpoints.js.map +1 -1
  51. package/getStreamProcessor.d.ts +264 -0
  52. package/getStreamProcessor.js +264 -0
  53. package/getStreamProcessor.js.map +1 -1
  54. package/getStreamProcessors.d.ts +264 -0
  55. package/getStreamProcessors.js +264 -0
  56. package/getStreamProcessors.js.map +1 -1
  57. package/getX509AuthenticationDatabaseUser.d.ts +58 -2
  58. package/getX509AuthenticationDatabaseUser.js +58 -2
  59. package/getX509AuthenticationDatabaseUser.js.map +1 -1
  60. package/package.json +2 -2
  61. package/searchDeployment.d.ts +40 -0
  62. package/searchDeployment.js +40 -0
  63. package/searchDeployment.js.map +1 -1
  64. package/searchIndex.d.ts +39 -39
  65. package/searchIndex.js +39 -39
  66. package/streamConnection.d.ts +40 -0
  67. package/streamConnection.js +40 -0
  68. package/streamConnection.js.map +1 -1
  69. package/streamInstance.d.ts +14 -0
  70. package/streamInstance.js +14 -0
  71. package/streamInstance.js.map +1 -1
  72. package/streamPrivatelinkEndpoint.d.ts +70 -0
  73. package/streamPrivatelinkEndpoint.js +70 -0
  74. package/streamPrivatelinkEndpoint.js.map +1 -1
  75. package/streamProcessor.d.ts +133 -0
  76. package/streamProcessor.js +133 -0
  77. package/streamProcessor.js.map +1 -1
  78. package/x509authenticationDatabaseUser.d.ts +63 -0
  79. package/x509authenticationDatabaseUser.js +63 -0
  80. package/x509authenticationDatabaseUser.js.map +1 -1
@@ -13,6 +13,138 @@ const utilities = require("./utilities");
13
13
  * ## Example Usage
14
14
  *
15
15
  * ### S
16
+ * ```typescript
17
+ * import * as pulumi from "@pulumi/pulumi";
18
+ * import * as mongodbatlas from "@pulumi/mongodbatlas";
19
+ *
20
+ * const example = new mongodbatlas.StreamInstance("example", {
21
+ * projectId: projectId,
22
+ * instanceName: "InstanceName",
23
+ * dataProcessRegion: {
24
+ * region: "VIRGINIA_USA",
25
+ * cloudProvider: "AWS",
26
+ * },
27
+ * });
28
+ * const example_sample = new mongodbatlas.StreamConnection("example-sample", {
29
+ * projectId: projectId,
30
+ * instanceName: example.instanceName,
31
+ * connectionName: "sample_stream_solar",
32
+ * type: "Sample",
33
+ * });
34
+ * const example_cluster = new mongodbatlas.StreamConnection("example-cluster", {
35
+ * projectId: projectId,
36
+ * instanceName: example.instanceName,
37
+ * connectionName: "ClusterConnection",
38
+ * type: "Cluster",
39
+ * clusterName: clusterName,
40
+ * dbRoleToExecute: {
41
+ * role: "atlasAdmin",
42
+ * type: "BUILT_IN",
43
+ * },
44
+ * });
45
+ * const example_kafka = new mongodbatlas.StreamConnection("example-kafka", {
46
+ * projectId: projectId,
47
+ * instanceName: example.instanceName,
48
+ * connectionName: "KafkaPlaintextConnection",
49
+ * type: "Kafka",
50
+ * authentication: {
51
+ * mechanism: "PLAIN",
52
+ * username: kafkaUsername,
53
+ * password: kafkaPassword,
54
+ * },
55
+ * bootstrapServers: "localhost:9092,localhost:9092",
56
+ * config: {
57
+ * "auto.offset.reset": "earliest",
58
+ * },
59
+ * security: {
60
+ * protocol: "SASL_PLAINTEXT",
61
+ * },
62
+ * });
63
+ * const stream_processor_sample_example = new mongodbatlas.StreamProcessor("stream-processor-sample-example", {
64
+ * projectId: projectId,
65
+ * instanceName: example.instanceName,
66
+ * processorName: "sampleProcessorName",
67
+ * pipeline: JSON.stringify([
68
+ * {
69
+ * $source: {
70
+ * connectionName: mongodbatlasStreamConnection["example-sample"].connectionName,
71
+ * },
72
+ * },
73
+ * {
74
+ * $emit: {
75
+ * connectionName: mongodbatlasStreamConnection["example-cluster"].connectionName,
76
+ * db: "sample",
77
+ * coll: "solar",
78
+ * timeseries: {
79
+ * timeField: "_ts",
80
+ * },
81
+ * },
82
+ * },
83
+ * ]),
84
+ * state: "STARTED",
85
+ * });
86
+ * const stream_processor_cluster_to_kafka_example = new mongodbatlas.StreamProcessor("stream-processor-cluster-to-kafka-example", {
87
+ * projectId: projectId,
88
+ * instanceName: example.instanceName,
89
+ * processorName: "clusterProcessorName",
90
+ * pipeline: JSON.stringify([
91
+ * {
92
+ * $source: {
93
+ * connectionName: mongodbatlasStreamConnection["example-cluster"].connectionName,
94
+ * },
95
+ * },
96
+ * {
97
+ * $emit: {
98
+ * connectionName: mongodbatlasStreamConnection["example-kafka"].connectionName,
99
+ * topic: "topic_from_cluster",
100
+ * },
101
+ * },
102
+ * ]),
103
+ * state: "CREATED",
104
+ * });
105
+ * const stream_processor_kafka_to_cluster_example = new mongodbatlas.StreamProcessor("stream-processor-kafka-to-cluster-example", {
106
+ * projectId: projectId,
107
+ * instanceName: example.instanceName,
108
+ * processorName: "kafkaProcessorName",
109
+ * pipeline: JSON.stringify([
110
+ * {
111
+ * $source: {
112
+ * connectionName: mongodbatlasStreamConnection["example-kafka"].connectionName,
113
+ * topic: "topic_source",
114
+ * },
115
+ * },
116
+ * {
117
+ * $emit: {
118
+ * connectionName: mongodbatlasStreamConnection["example-cluster"].connectionName,
119
+ * db: "kafka",
120
+ * coll: "topic_source",
121
+ * timeseries: {
122
+ * timeField: "ts",
123
+ * },
124
+ * },
125
+ * },
126
+ * ]),
127
+ * state: "CREATED",
128
+ * options: {
129
+ * dlq: {
130
+ * coll: "exampleColumn",
131
+ * connectionName: mongodbatlasStreamConnection["example-cluster"].connectionName,
132
+ * db: "exampleDb",
133
+ * },
134
+ * },
135
+ * });
136
+ * const example_stream_processors = example.instanceName.apply(instanceName => mongodbatlas.getStreamProcessorsOutput({
137
+ * projectId: projectId,
138
+ * instanceName: instanceName,
139
+ * }));
140
+ * const example_stream_processor = pulumi.all([example.instanceName, stream_processor_sample_example.processorName]).apply(([instanceName, processorName]) => mongodbatlas.getStreamProcessorOutput({
141
+ * projectId: projectId,
142
+ * instanceName: instanceName,
143
+ * processorName: processorName,
144
+ * }));
145
+ * export const streamProcessorsState = example_stream_processor.apply(example_stream_processor => example_stream_processor.state);
146
+ * export const streamProcessorsResults = example_stream_processors.apply(example_stream_processors => example_stream_processors.results);
147
+ * ```
16
148
  */
17
149
  function getStreamProcessor(args, opts) {
18
150
  opts = pulumi.mergeOptions(utilities.resourceOptsDefaults(), opts || {});
@@ -31,6 +163,138 @@ exports.getStreamProcessor = getStreamProcessor;
31
163
  * ## Example Usage
32
164
  *
33
165
  * ### S
166
+ * ```typescript
167
+ * import * as pulumi from "@pulumi/pulumi";
168
+ * import * as mongodbatlas from "@pulumi/mongodbatlas";
169
+ *
170
+ * const example = new mongodbatlas.StreamInstance("example", {
171
+ * projectId: projectId,
172
+ * instanceName: "InstanceName",
173
+ * dataProcessRegion: {
174
+ * region: "VIRGINIA_USA",
175
+ * cloudProvider: "AWS",
176
+ * },
177
+ * });
178
+ * const example_sample = new mongodbatlas.StreamConnection("example-sample", {
179
+ * projectId: projectId,
180
+ * instanceName: example.instanceName,
181
+ * connectionName: "sample_stream_solar",
182
+ * type: "Sample",
183
+ * });
184
+ * const example_cluster = new mongodbatlas.StreamConnection("example-cluster", {
185
+ * projectId: projectId,
186
+ * instanceName: example.instanceName,
187
+ * connectionName: "ClusterConnection",
188
+ * type: "Cluster",
189
+ * clusterName: clusterName,
190
+ * dbRoleToExecute: {
191
+ * role: "atlasAdmin",
192
+ * type: "BUILT_IN",
193
+ * },
194
+ * });
195
+ * const example_kafka = new mongodbatlas.StreamConnection("example-kafka", {
196
+ * projectId: projectId,
197
+ * instanceName: example.instanceName,
198
+ * connectionName: "KafkaPlaintextConnection",
199
+ * type: "Kafka",
200
+ * authentication: {
201
+ * mechanism: "PLAIN",
202
+ * username: kafkaUsername,
203
+ * password: kafkaPassword,
204
+ * },
205
+ * bootstrapServers: "localhost:9092,localhost:9092",
206
+ * config: {
207
+ * "auto.offset.reset": "earliest",
208
+ * },
209
+ * security: {
210
+ * protocol: "SASL_PLAINTEXT",
211
+ * },
212
+ * });
213
+ * const stream_processor_sample_example = new mongodbatlas.StreamProcessor("stream-processor-sample-example", {
214
+ * projectId: projectId,
215
+ * instanceName: example.instanceName,
216
+ * processorName: "sampleProcessorName",
217
+ * pipeline: JSON.stringify([
218
+ * {
219
+ * $source: {
220
+ * connectionName: mongodbatlasStreamConnection["example-sample"].connectionName,
221
+ * },
222
+ * },
223
+ * {
224
+ * $emit: {
225
+ * connectionName: mongodbatlasStreamConnection["example-cluster"].connectionName,
226
+ * db: "sample",
227
+ * coll: "solar",
228
+ * timeseries: {
229
+ * timeField: "_ts",
230
+ * },
231
+ * },
232
+ * },
233
+ * ]),
234
+ * state: "STARTED",
235
+ * });
236
+ * const stream_processor_cluster_to_kafka_example = new mongodbatlas.StreamProcessor("stream-processor-cluster-to-kafka-example", {
237
+ * projectId: projectId,
238
+ * instanceName: example.instanceName,
239
+ * processorName: "clusterProcessorName",
240
+ * pipeline: JSON.stringify([
241
+ * {
242
+ * $source: {
243
+ * connectionName: mongodbatlasStreamConnection["example-cluster"].connectionName,
244
+ * },
245
+ * },
246
+ * {
247
+ * $emit: {
248
+ * connectionName: mongodbatlasStreamConnection["example-kafka"].connectionName,
249
+ * topic: "topic_from_cluster",
250
+ * },
251
+ * },
252
+ * ]),
253
+ * state: "CREATED",
254
+ * });
255
+ * const stream_processor_kafka_to_cluster_example = new mongodbatlas.StreamProcessor("stream-processor-kafka-to-cluster-example", {
256
+ * projectId: projectId,
257
+ * instanceName: example.instanceName,
258
+ * processorName: "kafkaProcessorName",
259
+ * pipeline: JSON.stringify([
260
+ * {
261
+ * $source: {
262
+ * connectionName: mongodbatlasStreamConnection["example-kafka"].connectionName,
263
+ * topic: "topic_source",
264
+ * },
265
+ * },
266
+ * {
267
+ * $emit: {
268
+ * connectionName: mongodbatlasStreamConnection["example-cluster"].connectionName,
269
+ * db: "kafka",
270
+ * coll: "topic_source",
271
+ * timeseries: {
272
+ * timeField: "ts",
273
+ * },
274
+ * },
275
+ * },
276
+ * ]),
277
+ * state: "CREATED",
278
+ * options: {
279
+ * dlq: {
280
+ * coll: "exampleColumn",
281
+ * connectionName: mongodbatlasStreamConnection["example-cluster"].connectionName,
282
+ * db: "exampleDb",
283
+ * },
284
+ * },
285
+ * });
286
+ * const example_stream_processors = example.instanceName.apply(instanceName => mongodbatlas.getStreamProcessorsOutput({
287
+ * projectId: projectId,
288
+ * instanceName: instanceName,
289
+ * }));
290
+ * const example_stream_processor = pulumi.all([example.instanceName, stream_processor_sample_example.processorName]).apply(([instanceName, processorName]) => mongodbatlas.getStreamProcessorOutput({
291
+ * projectId: projectId,
292
+ * instanceName: instanceName,
293
+ * processorName: processorName,
294
+ * }));
295
+ * export const streamProcessorsState = example_stream_processor.apply(example_stream_processor => example_stream_processor.state);
296
+ * export const streamProcessorsResults = example_stream_processors.apply(example_stream_processors => example_stream_processors.results);
297
+ * ```
34
298
  */
35
299
  function getStreamProcessorOutput(args, opts) {
36
300
  opts = pulumi.mergeOptions(utilities.resourceOptsDefaults(), opts || {});
@@ -1 +1 @@
1
- {"version":3,"file":"getStreamProcessor.js","sourceRoot":"","sources":["../getStreamProcessor.ts"],"names":[],"mappings":";AAAA,sEAAsE;AACtE,iFAAiF;;;AAEjF,yCAAyC;AAGzC,yCAAyC;AAEzC;;;;;;;;GAQG;AACH,SAAgB,kBAAkB,CAAC,IAA4B,EAAE,IAA2B;IACxF,IAAI,GAAG,MAAM,CAAC,YAAY,CAAC,SAAS,CAAC,oBAAoB,EAAE,EAAE,IAAI,IAAI,EAAE,CAAC,CAAC;IACzE,OAAO,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,0DAA0D,EAAE;QACrF,cAAc,EAAE,IAAI,CAAC,YAAY;QACjC,eAAe,EAAE,IAAI,CAAC,aAAa;QACnC,WAAW,EAAE,IAAI,CAAC,SAAS;KAC9B,EAAE,IAAI,CAAC,CAAC;AACb,CAAC;AAPD,gDAOC;AA0CD;;;;;;;;GAQG;AACH,SAAgB,wBAAwB,CAAC,IAAkC,EAAE,IAAiC;IAC1G,IAAI,GAAG,MAAM,CAAC,YAAY,CAAC,SAAS,CAAC,oBAAoB,EAAE,EAAE,IAAI,IAAI,EAAE,CAAC,CAAC;IACzE,OAAO,MAAM,CAAC,OAAO,CAAC,YAAY,CAAC,0DAA0D,EAAE;QAC3F,cAAc,EAAE,IAAI,CAAC,YAAY;QACjC,eAAe,EAAE,IAAI,CAAC,aAAa;QACnC,WAAW,EAAE,IAAI,CAAC,SAAS;KAC9B,EAAE,IAAI,CAAC,CAAC;AACb,CAAC;AAPD,4DAOC"}
1
+ {"version":3,"file":"getStreamProcessor.js","sourceRoot":"","sources":["../getStreamProcessor.ts"],"names":[],"mappings":";AAAA,sEAAsE;AACtE,iFAAiF;;;AAEjF,yCAAyC;AAGzC,yCAAyC;AAEzC;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4IG;AACH,SAAgB,kBAAkB,CAAC,IAA4B,EAAE,IAA2B;IACxF,IAAI,GAAG,MAAM,CAAC,YAAY,CAAC,SAAS,CAAC,oBAAoB,EAAE,EAAE,IAAI,IAAI,EAAE,CAAC,CAAC;IACzE,OAAO,MAAM,CAAC,OAAO,CAAC,MAAM,CAAC,0DAA0D,EAAE;QACrF,cAAc,EAAE,IAAI,CAAC,YAAY;QACjC,eAAe,EAAE,IAAI,CAAC,aAAa;QACnC,WAAW,EAAE,IAAI,CAAC,SAAS;KAC9B,EAAE,IAAI,CAAC,CAAC;AACb,CAAC;AAPD,gDAOC;AA0CD;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;GA4IG;AACH,SAAgB,wBAAwB,CAAC,IAAkC,EAAE,IAAiC;IAC1G,IAAI,GAAG,MAAM,CAAC,YAAY,CAAC,SAAS,CAAC,oBAAoB,EAAE,EAAE,IAAI,IAAI,EAAE,CAAC,CAAC;IACzE,OAAO,MAAM,CAAC,OAAO,CAAC,YAAY,CAAC,0DAA0D,EAAE;QAC3F,cAAc,EAAE,IAAI,CAAC,YAAY;QACjC,eAAe,EAAE,IAAI,CAAC,aAAa;QACnC,WAAW,EAAE,IAAI,CAAC,SAAS;KAC9B,EAAE,IAAI,CAAC,CAAC;AACb,CAAC;AAPD,4DAOC"}
@@ -8,6 +8,138 @@ import * as outputs from "./types/output";
8
8
  * ## Example Usage
9
9
  *
10
10
  * ### S
11
+ * ```typescript
12
+ * import * as pulumi from "@pulumi/pulumi";
13
+ * import * as mongodbatlas from "@pulumi/mongodbatlas";
14
+ *
15
+ * const example = new mongodbatlas.StreamInstance("example", {
16
+ * projectId: projectId,
17
+ * instanceName: "InstanceName",
18
+ * dataProcessRegion: {
19
+ * region: "VIRGINIA_USA",
20
+ * cloudProvider: "AWS",
21
+ * },
22
+ * });
23
+ * const example_sample = new mongodbatlas.StreamConnection("example-sample", {
24
+ * projectId: projectId,
25
+ * instanceName: example.instanceName,
26
+ * connectionName: "sample_stream_solar",
27
+ * type: "Sample",
28
+ * });
29
+ * const example_cluster = new mongodbatlas.StreamConnection("example-cluster", {
30
+ * projectId: projectId,
31
+ * instanceName: example.instanceName,
32
+ * connectionName: "ClusterConnection",
33
+ * type: "Cluster",
34
+ * clusterName: clusterName,
35
+ * dbRoleToExecute: {
36
+ * role: "atlasAdmin",
37
+ * type: "BUILT_IN",
38
+ * },
39
+ * });
40
+ * const example_kafka = new mongodbatlas.StreamConnection("example-kafka", {
41
+ * projectId: projectId,
42
+ * instanceName: example.instanceName,
43
+ * connectionName: "KafkaPlaintextConnection",
44
+ * type: "Kafka",
45
+ * authentication: {
46
+ * mechanism: "PLAIN",
47
+ * username: kafkaUsername,
48
+ * password: kafkaPassword,
49
+ * },
50
+ * bootstrapServers: "localhost:9092,localhost:9092",
51
+ * config: {
52
+ * "auto.offset.reset": "earliest",
53
+ * },
54
+ * security: {
55
+ * protocol: "SASL_PLAINTEXT",
56
+ * },
57
+ * });
58
+ * const stream_processor_sample_example = new mongodbatlas.StreamProcessor("stream-processor-sample-example", {
59
+ * projectId: projectId,
60
+ * instanceName: example.instanceName,
61
+ * processorName: "sampleProcessorName",
62
+ * pipeline: JSON.stringify([
63
+ * {
64
+ * $source: {
65
+ * connectionName: mongodbatlasStreamConnection["example-sample"].connectionName,
66
+ * },
67
+ * },
68
+ * {
69
+ * $emit: {
70
+ * connectionName: mongodbatlasStreamConnection["example-cluster"].connectionName,
71
+ * db: "sample",
72
+ * coll: "solar",
73
+ * timeseries: {
74
+ * timeField: "_ts",
75
+ * },
76
+ * },
77
+ * },
78
+ * ]),
79
+ * state: "STARTED",
80
+ * });
81
+ * const stream_processor_cluster_to_kafka_example = new mongodbatlas.StreamProcessor("stream-processor-cluster-to-kafka-example", {
82
+ * projectId: projectId,
83
+ * instanceName: example.instanceName,
84
+ * processorName: "clusterProcessorName",
85
+ * pipeline: JSON.stringify([
86
+ * {
87
+ * $source: {
88
+ * connectionName: mongodbatlasStreamConnection["example-cluster"].connectionName,
89
+ * },
90
+ * },
91
+ * {
92
+ * $emit: {
93
+ * connectionName: mongodbatlasStreamConnection["example-kafka"].connectionName,
94
+ * topic: "topic_from_cluster",
95
+ * },
96
+ * },
97
+ * ]),
98
+ * state: "CREATED",
99
+ * });
100
+ * const stream_processor_kafka_to_cluster_example = new mongodbatlas.StreamProcessor("stream-processor-kafka-to-cluster-example", {
101
+ * projectId: projectId,
102
+ * instanceName: example.instanceName,
103
+ * processorName: "kafkaProcessorName",
104
+ * pipeline: JSON.stringify([
105
+ * {
106
+ * $source: {
107
+ * connectionName: mongodbatlasStreamConnection["example-kafka"].connectionName,
108
+ * topic: "topic_source",
109
+ * },
110
+ * },
111
+ * {
112
+ * $emit: {
113
+ * connectionName: mongodbatlasStreamConnection["example-cluster"].connectionName,
114
+ * db: "kafka",
115
+ * coll: "topic_source",
116
+ * timeseries: {
117
+ * timeField: "ts",
118
+ * },
119
+ * },
120
+ * },
121
+ * ]),
122
+ * state: "CREATED",
123
+ * options: {
124
+ * dlq: {
125
+ * coll: "exampleColumn",
126
+ * connectionName: mongodbatlasStreamConnection["example-cluster"].connectionName,
127
+ * db: "exampleDb",
128
+ * },
129
+ * },
130
+ * });
131
+ * const example_stream_processors = example.instanceName.apply(instanceName => mongodbatlas.getStreamProcessorsOutput({
132
+ * projectId: projectId,
133
+ * instanceName: instanceName,
134
+ * }));
135
+ * const example_stream_processor = pulumi.all([example.instanceName, stream_processor_sample_example.processorName]).apply(([instanceName, processorName]) => mongodbatlas.getStreamProcessorOutput({
136
+ * projectId: projectId,
137
+ * instanceName: instanceName,
138
+ * processorName: processorName,
139
+ * }));
140
+ * export const streamProcessorsState = example_stream_processor.apply(example_stream_processor => example_stream_processor.state);
141
+ * export const streamProcessorsResults = example_stream_processors.apply(example_stream_processors => example_stream_processors.results);
142
+ * ```
11
143
  */
12
144
  export declare function getStreamProcessors(args: GetStreamProcessorsArgs, opts?: pulumi.InvokeOptions): Promise<GetStreamProcessorsResult>;
13
145
  /**
@@ -49,6 +181,138 @@ export interface GetStreamProcessorsResult {
49
181
  * ## Example Usage
50
182
  *
51
183
  * ### S
184
+ * ```typescript
185
+ * import * as pulumi from "@pulumi/pulumi";
186
+ * import * as mongodbatlas from "@pulumi/mongodbatlas";
187
+ *
188
+ * const example = new mongodbatlas.StreamInstance("example", {
189
+ * projectId: projectId,
190
+ * instanceName: "InstanceName",
191
+ * dataProcessRegion: {
192
+ * region: "VIRGINIA_USA",
193
+ * cloudProvider: "AWS",
194
+ * },
195
+ * });
196
+ * const example_sample = new mongodbatlas.StreamConnection("example-sample", {
197
+ * projectId: projectId,
198
+ * instanceName: example.instanceName,
199
+ * connectionName: "sample_stream_solar",
200
+ * type: "Sample",
201
+ * });
202
+ * const example_cluster = new mongodbatlas.StreamConnection("example-cluster", {
203
+ * projectId: projectId,
204
+ * instanceName: example.instanceName,
205
+ * connectionName: "ClusterConnection",
206
+ * type: "Cluster",
207
+ * clusterName: clusterName,
208
+ * dbRoleToExecute: {
209
+ * role: "atlasAdmin",
210
+ * type: "BUILT_IN",
211
+ * },
212
+ * });
213
+ * const example_kafka = new mongodbatlas.StreamConnection("example-kafka", {
214
+ * projectId: projectId,
215
+ * instanceName: example.instanceName,
216
+ * connectionName: "KafkaPlaintextConnection",
217
+ * type: "Kafka",
218
+ * authentication: {
219
+ * mechanism: "PLAIN",
220
+ * username: kafkaUsername,
221
+ * password: kafkaPassword,
222
+ * },
223
+ * bootstrapServers: "localhost:9092,localhost:9092",
224
+ * config: {
225
+ * "auto.offset.reset": "earliest",
226
+ * },
227
+ * security: {
228
+ * protocol: "SASL_PLAINTEXT",
229
+ * },
230
+ * });
231
+ * const stream_processor_sample_example = new mongodbatlas.StreamProcessor("stream-processor-sample-example", {
232
+ * projectId: projectId,
233
+ * instanceName: example.instanceName,
234
+ * processorName: "sampleProcessorName",
235
+ * pipeline: JSON.stringify([
236
+ * {
237
+ * $source: {
238
+ * connectionName: mongodbatlasStreamConnection["example-sample"].connectionName,
239
+ * },
240
+ * },
241
+ * {
242
+ * $emit: {
243
+ * connectionName: mongodbatlasStreamConnection["example-cluster"].connectionName,
244
+ * db: "sample",
245
+ * coll: "solar",
246
+ * timeseries: {
247
+ * timeField: "_ts",
248
+ * },
249
+ * },
250
+ * },
251
+ * ]),
252
+ * state: "STARTED",
253
+ * });
254
+ * const stream_processor_cluster_to_kafka_example = new mongodbatlas.StreamProcessor("stream-processor-cluster-to-kafka-example", {
255
+ * projectId: projectId,
256
+ * instanceName: example.instanceName,
257
+ * processorName: "clusterProcessorName",
258
+ * pipeline: JSON.stringify([
259
+ * {
260
+ * $source: {
261
+ * connectionName: mongodbatlasStreamConnection["example-cluster"].connectionName,
262
+ * },
263
+ * },
264
+ * {
265
+ * $emit: {
266
+ * connectionName: mongodbatlasStreamConnection["example-kafka"].connectionName,
267
+ * topic: "topic_from_cluster",
268
+ * },
269
+ * },
270
+ * ]),
271
+ * state: "CREATED",
272
+ * });
273
+ * const stream_processor_kafka_to_cluster_example = new mongodbatlas.StreamProcessor("stream-processor-kafka-to-cluster-example", {
274
+ * projectId: projectId,
275
+ * instanceName: example.instanceName,
276
+ * processorName: "kafkaProcessorName",
277
+ * pipeline: JSON.stringify([
278
+ * {
279
+ * $source: {
280
+ * connectionName: mongodbatlasStreamConnection["example-kafka"].connectionName,
281
+ * topic: "topic_source",
282
+ * },
283
+ * },
284
+ * {
285
+ * $emit: {
286
+ * connectionName: mongodbatlasStreamConnection["example-cluster"].connectionName,
287
+ * db: "kafka",
288
+ * coll: "topic_source",
289
+ * timeseries: {
290
+ * timeField: "ts",
291
+ * },
292
+ * },
293
+ * },
294
+ * ]),
295
+ * state: "CREATED",
296
+ * options: {
297
+ * dlq: {
298
+ * coll: "exampleColumn",
299
+ * connectionName: mongodbatlasStreamConnection["example-cluster"].connectionName,
300
+ * db: "exampleDb",
301
+ * },
302
+ * },
303
+ * });
304
+ * const example_stream_processors = example.instanceName.apply(instanceName => mongodbatlas.getStreamProcessorsOutput({
305
+ * projectId: projectId,
306
+ * instanceName: instanceName,
307
+ * }));
308
+ * const example_stream_processor = pulumi.all([example.instanceName, stream_processor_sample_example.processorName]).apply(([instanceName, processorName]) => mongodbatlas.getStreamProcessorOutput({
309
+ * projectId: projectId,
310
+ * instanceName: instanceName,
311
+ * processorName: processorName,
312
+ * }));
313
+ * export const streamProcessorsState = example_stream_processor.apply(example_stream_processor => example_stream_processor.state);
314
+ * export const streamProcessorsResults = example_stream_processors.apply(example_stream_processors => example_stream_processors.results);
315
+ * ```
52
316
  */
53
317
  export declare function getStreamProcessorsOutput(args: GetStreamProcessorsOutputArgs, opts?: pulumi.InvokeOutputOptions): pulumi.Output<GetStreamProcessorsResult>;
54
318
  /**