pulumi-alicloud 3.62.0a1725945881__py3-none-any.whl → 3.62.1__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.

Potentially problematic release.


This version of pulumi-alicloud might be problematic. Click here for more details.

Files changed (80) hide show
  1. pulumi_alicloud/__init__.py +80 -0
  2. pulumi_alicloud/actiontrail/trail.py +2 -2
  3. pulumi_alicloud/adb/cluster.py +34 -0
  4. pulumi_alicloud/adb/db_cluster.py +47 -0
  5. pulumi_alicloud/amqp/binding.py +30 -36
  6. pulumi_alicloud/amqp/static_account.py +12 -12
  7. pulumi_alicloud/cen/_inputs.py +180 -14
  8. pulumi_alicloud/cen/outputs.py +173 -12
  9. pulumi_alicloud/cen/traffic_marking_policy.py +116 -53
  10. pulumi_alicloud/cen/transit_router_peer_attachment.py +38 -20
  11. pulumi_alicloud/cen/transit_router_vpc_attachment.py +338 -119
  12. pulumi_alicloud/cen/transit_router_vpn_attachment.py +2 -2
  13. pulumi_alicloud/cr/chart_namespace.py +14 -6
  14. pulumi_alicloud/cr/endpoint_acl_policy.py +10 -2
  15. pulumi_alicloud/cr/namespace.py +10 -2
  16. pulumi_alicloud/cs/registry_enterprise_repo.py +4 -4
  17. pulumi_alicloud/cs/registry_enterprise_sync_rule.py +10 -10
  18. pulumi_alicloud/ddos/_inputs.py +24 -0
  19. pulumi_alicloud/ddos/bgp_ip.py +1 -1
  20. pulumi_alicloud/ddos/outputs.py +37 -0
  21. pulumi_alicloud/ddos/port.py +110 -40
  22. pulumi_alicloud/ecs/_inputs.py +28 -0
  23. pulumi_alicloud/ecs/outputs.py +20 -0
  24. pulumi_alicloud/emrv2/_inputs.py +20 -20
  25. pulumi_alicloud/emrv2/get_clusters.py +25 -4
  26. pulumi_alicloud/emrv2/outputs.py +24 -24
  27. pulumi_alicloud/ens/_inputs.py +137 -2
  28. pulumi_alicloud/ens/instance.py +428 -184
  29. pulumi_alicloud/ens/load_balancer.py +74 -25
  30. pulumi_alicloud/ens/outputs.py +141 -2
  31. pulumi_alicloud/expressconnect/physical_connection.py +321 -185
  32. pulumi_alicloud/fc/__init__.py +3 -0
  33. pulumi_alicloud/fc/_inputs.py +320 -8
  34. pulumi_alicloud/fc/outputs.py +321 -8
  35. pulumi_alicloud/fc/trigger.py +22 -24
  36. pulumi_alicloud/fc/v3_layer_version.py +511 -0
  37. pulumi_alicloud/fc/v3_provision_config.py +676 -0
  38. pulumi_alicloud/fc/v3_vpc_binding.py +283 -0
  39. pulumi_alicloud/ga/endpoint_group.py +68 -14
  40. pulumi_alicloud/ga/get_endpoint_group_ip_address_cidr_blocks.py +18 -3
  41. pulumi_alicloud/gpdb/__init__.py +2 -0
  42. pulumi_alicloud/gpdb/account.py +172 -83
  43. pulumi_alicloud/gpdb/db_resource_group.py +54 -9
  44. pulumi_alicloud/gpdb/hadoop_data_source.py +1135 -0
  45. pulumi_alicloud/gpdb/jdbc_data_source.py +643 -0
  46. pulumi_alicloud/hbr/_inputs.py +14 -14
  47. pulumi_alicloud/hbr/outputs.py +14 -14
  48. pulumi_alicloud/hbr/policy.py +18 -18
  49. pulumi_alicloud/hbr/policy_binding.py +203 -62
  50. pulumi_alicloud/mongodb/instance.py +94 -0
  51. pulumi_alicloud/nlb/_inputs.py +120 -64
  52. pulumi_alicloud/nlb/get_listeners.py +32 -2
  53. pulumi_alicloud/nlb/get_server_group_server_attachments.py +8 -2
  54. pulumi_alicloud/nlb/listener.py +315 -245
  55. pulumi_alicloud/nlb/listener_additional_certificate_attachment.py +25 -25
  56. pulumi_alicloud/nlb/load_balancer.py +181 -212
  57. pulumi_alicloud/nlb/load_balancer_security_group_attachment.py +29 -39
  58. pulumi_alicloud/nlb/loadbalancer_common_bandwidth_package_attachment.py +22 -18
  59. pulumi_alicloud/nlb/outputs.py +122 -66
  60. pulumi_alicloud/nlb/security_policy.py +53 -25
  61. pulumi_alicloud/nlb/server_group.py +196 -133
  62. pulumi_alicloud/ocean/base_instance.py +498 -163
  63. pulumi_alicloud/pulumi-plugin.json +1 -1
  64. pulumi_alicloud/quotas/__init__.py +1 -0
  65. pulumi_alicloud/quotas/template_service.py +165 -0
  66. pulumi_alicloud/rds/rds_db_proxy.py +61 -0
  67. pulumi_alicloud/servicecatalog/__init__.py +4 -0
  68. pulumi_alicloud/servicecatalog/portfolio.py +31 -31
  69. pulumi_alicloud/servicecatalog/principal_portfolio_association.py +354 -0
  70. pulumi_alicloud/servicecatalog/product.py +383 -0
  71. pulumi_alicloud/servicecatalog/product_portfolio_association.py +222 -0
  72. pulumi_alicloud/servicecatalog/product_version.py +539 -0
  73. pulumi_alicloud/servicemesh/extension_provider.py +2 -2
  74. pulumi_alicloud/vpc/common_bandwith_package_attachment.py +2 -2
  75. pulumi_alicloud/vpc/get_route_tables.py +28 -5
  76. pulumi_alicloud/vpc/outputs.py +2 -2
  77. {pulumi_alicloud-3.62.0a1725945881.dist-info → pulumi_alicloud-3.62.1.dist-info}/METADATA +1 -1
  78. {pulumi_alicloud-3.62.0a1725945881.dist-info → pulumi_alicloud-3.62.1.dist-info}/RECORD +80 -70
  79. {pulumi_alicloud-3.62.0a1725945881.dist-info → pulumi_alicloud-3.62.1.dist-info}/WHEEL +1 -1
  80. {pulumi_alicloud-3.62.0a1725945881.dist-info → pulumi_alicloud-3.62.1.dist-info}/top_level.txt +0 -0
@@ -0,0 +1,1135 @@
1
+ # coding=utf-8
2
+ # *** WARNING: this file was generated by the Pulumi Terraform Bridge (tfgen) Tool. ***
3
+ # *** Do not edit by hand unless you're certain you know what you are doing! ***
4
+
5
+ import copy
6
+ import warnings
7
+ import pulumi
8
+ import pulumi.runtime
9
+ from typing import Any, Mapping, Optional, Sequence, Union, overload
10
+ from .. import _utilities
11
+
12
+ __all__ = ['HadoopDataSourceArgs', 'HadoopDataSource']
13
+
14
+ @pulumi.input_type
15
+ class HadoopDataSourceArgs:
16
+ def __init__(__self__, *,
17
+ db_instance_id: pulumi.Input[str],
18
+ data_source_description: Optional[pulumi.Input[str]] = None,
19
+ data_source_name: Optional[pulumi.Input[str]] = None,
20
+ data_source_type: Optional[pulumi.Input[str]] = None,
21
+ emr_instance_id: Optional[pulumi.Input[str]] = None,
22
+ hadoop_core_conf: Optional[pulumi.Input[str]] = None,
23
+ hadoop_create_type: Optional[pulumi.Input[str]] = None,
24
+ hadoop_hosts_address: Optional[pulumi.Input[str]] = None,
25
+ hdfs_conf: Optional[pulumi.Input[str]] = None,
26
+ hive_conf: Optional[pulumi.Input[str]] = None,
27
+ map_reduce_conf: Optional[pulumi.Input[str]] = None,
28
+ yarn_conf: Optional[pulumi.Input[str]] = None):
29
+ """
30
+ The set of arguments for constructing a HadoopDataSource resource.
31
+ :param pulumi.Input[str] db_instance_id: The instance ID.
32
+ :param pulumi.Input[str] data_source_description: Data Source Description
33
+ :param pulumi.Input[str] data_source_name: Data Source Name
34
+ :param pulumi.Input[str] data_source_type: The type of the data source. Valid values:
35
+
36
+ * mysql
37
+ - postgresql
38
+
39
+ * hdfs
40
+ - hive
41
+ :param pulumi.Input[str] emr_instance_id: The ID of the Emr instance.
42
+ :param pulumi.Input[str] hadoop_core_conf: The string that specifies the content of the Hadoop core-site.xml file.
43
+ :param pulumi.Input[str] hadoop_create_type: The type of the external service. Valid values:
44
+ - emr: E-MapReduce (EMR) Hadoop cluster.
45
+ - selfCreate: self-managed Hadoop cluster.
46
+ :param pulumi.Input[str] hadoop_hosts_address: The IP address and hostname of the Hadoop cluster (data source) in the /etc/hosts file.
47
+ :param pulumi.Input[str] hdfs_conf: The string that specifies the content of the Hadoop hdfs-site.xml file. This parameter must be specified when DataSourceType is set to HDFS.
48
+ :param pulumi.Input[str] hive_conf: The string that specifies the content of the Hadoop hive-site.xml file. This parameter must be specified when DataSourceType is set to Hive.
49
+ :param pulumi.Input[str] map_reduce_conf: The content of the Hadoop mapred-site.xml file. This parameter must be specified when DataSourceType is set to HDFS.
50
+ :param pulumi.Input[str] yarn_conf: The string that specifies the content of the Hadoop yarn-site.xml file. This parameter must be specified when DataSourceType is set to HDFS.
51
+ """
52
+ pulumi.set(__self__, "db_instance_id", db_instance_id)
53
+ if data_source_description is not None:
54
+ pulumi.set(__self__, "data_source_description", data_source_description)
55
+ if data_source_name is not None:
56
+ pulumi.set(__self__, "data_source_name", data_source_name)
57
+ if data_source_type is not None:
58
+ pulumi.set(__self__, "data_source_type", data_source_type)
59
+ if emr_instance_id is not None:
60
+ pulumi.set(__self__, "emr_instance_id", emr_instance_id)
61
+ if hadoop_core_conf is not None:
62
+ pulumi.set(__self__, "hadoop_core_conf", hadoop_core_conf)
63
+ if hadoop_create_type is not None:
64
+ pulumi.set(__self__, "hadoop_create_type", hadoop_create_type)
65
+ if hadoop_hosts_address is not None:
66
+ pulumi.set(__self__, "hadoop_hosts_address", hadoop_hosts_address)
67
+ if hdfs_conf is not None:
68
+ pulumi.set(__self__, "hdfs_conf", hdfs_conf)
69
+ if hive_conf is not None:
70
+ pulumi.set(__self__, "hive_conf", hive_conf)
71
+ if map_reduce_conf is not None:
72
+ pulumi.set(__self__, "map_reduce_conf", map_reduce_conf)
73
+ if yarn_conf is not None:
74
+ pulumi.set(__self__, "yarn_conf", yarn_conf)
75
+
76
+ @property
77
+ @pulumi.getter(name="dbInstanceId")
78
+ def db_instance_id(self) -> pulumi.Input[str]:
79
+ """
80
+ The instance ID.
81
+ """
82
+ return pulumi.get(self, "db_instance_id")
83
+
84
+ @db_instance_id.setter
85
+ def db_instance_id(self, value: pulumi.Input[str]):
86
+ pulumi.set(self, "db_instance_id", value)
87
+
88
+ @property
89
+ @pulumi.getter(name="dataSourceDescription")
90
+ def data_source_description(self) -> Optional[pulumi.Input[str]]:
91
+ """
92
+ Data Source Description
93
+ """
94
+ return pulumi.get(self, "data_source_description")
95
+
96
+ @data_source_description.setter
97
+ def data_source_description(self, value: Optional[pulumi.Input[str]]):
98
+ pulumi.set(self, "data_source_description", value)
99
+
100
+ @property
101
+ @pulumi.getter(name="dataSourceName")
102
+ def data_source_name(self) -> Optional[pulumi.Input[str]]:
103
+ """
104
+ Data Source Name
105
+ """
106
+ return pulumi.get(self, "data_source_name")
107
+
108
+ @data_source_name.setter
109
+ def data_source_name(self, value: Optional[pulumi.Input[str]]):
110
+ pulumi.set(self, "data_source_name", value)
111
+
112
+ @property
113
+ @pulumi.getter(name="dataSourceType")
114
+ def data_source_type(self) -> Optional[pulumi.Input[str]]:
115
+ """
116
+ The type of the data source. Valid values:
117
+
118
+ * mysql
119
+ - postgresql
120
+
121
+ * hdfs
122
+ - hive
123
+ """
124
+ return pulumi.get(self, "data_source_type")
125
+
126
+ @data_source_type.setter
127
+ def data_source_type(self, value: Optional[pulumi.Input[str]]):
128
+ pulumi.set(self, "data_source_type", value)
129
+
130
+ @property
131
+ @pulumi.getter(name="emrInstanceId")
132
+ def emr_instance_id(self) -> Optional[pulumi.Input[str]]:
133
+ """
134
+ The ID of the Emr instance.
135
+ """
136
+ return pulumi.get(self, "emr_instance_id")
137
+
138
+ @emr_instance_id.setter
139
+ def emr_instance_id(self, value: Optional[pulumi.Input[str]]):
140
+ pulumi.set(self, "emr_instance_id", value)
141
+
142
+ @property
143
+ @pulumi.getter(name="hadoopCoreConf")
144
+ def hadoop_core_conf(self) -> Optional[pulumi.Input[str]]:
145
+ """
146
+ The string that specifies the content of the Hadoop core-site.xml file.
147
+ """
148
+ return pulumi.get(self, "hadoop_core_conf")
149
+
150
+ @hadoop_core_conf.setter
151
+ def hadoop_core_conf(self, value: Optional[pulumi.Input[str]]):
152
+ pulumi.set(self, "hadoop_core_conf", value)
153
+
154
+ @property
155
+ @pulumi.getter(name="hadoopCreateType")
156
+ def hadoop_create_type(self) -> Optional[pulumi.Input[str]]:
157
+ """
158
+ The type of the external service. Valid values:
159
+ - emr: E-MapReduce (EMR) Hadoop cluster.
160
+ - selfCreate: self-managed Hadoop cluster.
161
+ """
162
+ return pulumi.get(self, "hadoop_create_type")
163
+
164
+ @hadoop_create_type.setter
165
+ def hadoop_create_type(self, value: Optional[pulumi.Input[str]]):
166
+ pulumi.set(self, "hadoop_create_type", value)
167
+
168
+ @property
169
+ @pulumi.getter(name="hadoopHostsAddress")
170
+ def hadoop_hosts_address(self) -> Optional[pulumi.Input[str]]:
171
+ """
172
+ The IP address and hostname of the Hadoop cluster (data source) in the /etc/hosts file.
173
+ """
174
+ return pulumi.get(self, "hadoop_hosts_address")
175
+
176
+ @hadoop_hosts_address.setter
177
+ def hadoop_hosts_address(self, value: Optional[pulumi.Input[str]]):
178
+ pulumi.set(self, "hadoop_hosts_address", value)
179
+
180
+ @property
181
+ @pulumi.getter(name="hdfsConf")
182
+ def hdfs_conf(self) -> Optional[pulumi.Input[str]]:
183
+ """
184
+ The string that specifies the content of the Hadoop hdfs-site.xml file. This parameter must be specified when DataSourceType is set to HDFS.
185
+ """
186
+ return pulumi.get(self, "hdfs_conf")
187
+
188
+ @hdfs_conf.setter
189
+ def hdfs_conf(self, value: Optional[pulumi.Input[str]]):
190
+ pulumi.set(self, "hdfs_conf", value)
191
+
192
+ @property
193
+ @pulumi.getter(name="hiveConf")
194
+ def hive_conf(self) -> Optional[pulumi.Input[str]]:
195
+ """
196
+ The string that specifies the content of the Hadoop hive-site.xml file. This parameter must be specified when DataSourceType is set to Hive.
197
+ """
198
+ return pulumi.get(self, "hive_conf")
199
+
200
+ @hive_conf.setter
201
+ def hive_conf(self, value: Optional[pulumi.Input[str]]):
202
+ pulumi.set(self, "hive_conf", value)
203
+
204
+ @property
205
+ @pulumi.getter(name="mapReduceConf")
206
+ def map_reduce_conf(self) -> Optional[pulumi.Input[str]]:
207
+ """
208
+ The content of the Hadoop mapred-site.xml file. This parameter must be specified when DataSourceType is set to HDFS.
209
+ """
210
+ return pulumi.get(self, "map_reduce_conf")
211
+
212
+ @map_reduce_conf.setter
213
+ def map_reduce_conf(self, value: Optional[pulumi.Input[str]]):
214
+ pulumi.set(self, "map_reduce_conf", value)
215
+
216
+ @property
217
+ @pulumi.getter(name="yarnConf")
218
+ def yarn_conf(self) -> Optional[pulumi.Input[str]]:
219
+ """
220
+ The string that specifies the content of the Hadoop yarn-site.xml file. This parameter must be specified when DataSourceType is set to HDFS.
221
+ """
222
+ return pulumi.get(self, "yarn_conf")
223
+
224
+ @yarn_conf.setter
225
+ def yarn_conf(self, value: Optional[pulumi.Input[str]]):
226
+ pulumi.set(self, "yarn_conf", value)
227
+
228
+
229
+ @pulumi.input_type
230
+ class _HadoopDataSourceState:
231
+ def __init__(__self__, *,
232
+ create_time: Optional[pulumi.Input[str]] = None,
233
+ data_source_description: Optional[pulumi.Input[str]] = None,
234
+ data_source_id: Optional[pulumi.Input[int]] = None,
235
+ data_source_name: Optional[pulumi.Input[str]] = None,
236
+ data_source_type: Optional[pulumi.Input[str]] = None,
237
+ db_instance_id: Optional[pulumi.Input[str]] = None,
238
+ emr_instance_id: Optional[pulumi.Input[str]] = None,
239
+ hadoop_core_conf: Optional[pulumi.Input[str]] = None,
240
+ hadoop_create_type: Optional[pulumi.Input[str]] = None,
241
+ hadoop_hosts_address: Optional[pulumi.Input[str]] = None,
242
+ hdfs_conf: Optional[pulumi.Input[str]] = None,
243
+ hive_conf: Optional[pulumi.Input[str]] = None,
244
+ map_reduce_conf: Optional[pulumi.Input[str]] = None,
245
+ status: Optional[pulumi.Input[str]] = None,
246
+ yarn_conf: Optional[pulumi.Input[str]] = None):
247
+ """
248
+ Input properties used for looking up and filtering HadoopDataSource resources.
249
+ :param pulumi.Input[str] create_time: Creation time
250
+ :param pulumi.Input[str] data_source_description: Data Source Description
251
+ :param pulumi.Input[int] data_source_id: The data source ID.
252
+ :param pulumi.Input[str] data_source_name: Data Source Name
253
+ :param pulumi.Input[str] data_source_type: The type of the data source. Valid values:
254
+
255
+ * mysql
256
+ - postgresql
257
+
258
+ * hdfs
259
+ - hive
260
+ :param pulumi.Input[str] db_instance_id: The instance ID.
261
+ :param pulumi.Input[str] emr_instance_id: The ID of the Emr instance.
262
+ :param pulumi.Input[str] hadoop_core_conf: The string that specifies the content of the Hadoop core-site.xml file.
263
+ :param pulumi.Input[str] hadoop_create_type: The type of the external service. Valid values:
264
+ - emr: E-MapReduce (EMR) Hadoop cluster.
265
+ - selfCreate: self-managed Hadoop cluster.
266
+ :param pulumi.Input[str] hadoop_hosts_address: The IP address and hostname of the Hadoop cluster (data source) in the /etc/hosts file.
267
+ :param pulumi.Input[str] hdfs_conf: The string that specifies the content of the Hadoop hdfs-site.xml file. This parameter must be specified when DataSourceType is set to HDFS.
268
+ :param pulumi.Input[str] hive_conf: The string that specifies the content of the Hadoop hive-site.xml file. This parameter must be specified when DataSourceType is set to Hive.
269
+ :param pulumi.Input[str] map_reduce_conf: The content of the Hadoop mapred-site.xml file. This parameter must be specified when DataSourceType is set to HDFS.
270
+ :param pulumi.Input[str] status: Data Source Status
271
+ :param pulumi.Input[str] yarn_conf: The string that specifies the content of the Hadoop yarn-site.xml file. This parameter must be specified when DataSourceType is set to HDFS.
272
+ """
273
+ if create_time is not None:
274
+ pulumi.set(__self__, "create_time", create_time)
275
+ if data_source_description is not None:
276
+ pulumi.set(__self__, "data_source_description", data_source_description)
277
+ if data_source_id is not None:
278
+ pulumi.set(__self__, "data_source_id", data_source_id)
279
+ if data_source_name is not None:
280
+ pulumi.set(__self__, "data_source_name", data_source_name)
281
+ if data_source_type is not None:
282
+ pulumi.set(__self__, "data_source_type", data_source_type)
283
+ if db_instance_id is not None:
284
+ pulumi.set(__self__, "db_instance_id", db_instance_id)
285
+ if emr_instance_id is not None:
286
+ pulumi.set(__self__, "emr_instance_id", emr_instance_id)
287
+ if hadoop_core_conf is not None:
288
+ pulumi.set(__self__, "hadoop_core_conf", hadoop_core_conf)
289
+ if hadoop_create_type is not None:
290
+ pulumi.set(__self__, "hadoop_create_type", hadoop_create_type)
291
+ if hadoop_hosts_address is not None:
292
+ pulumi.set(__self__, "hadoop_hosts_address", hadoop_hosts_address)
293
+ if hdfs_conf is not None:
294
+ pulumi.set(__self__, "hdfs_conf", hdfs_conf)
295
+ if hive_conf is not None:
296
+ pulumi.set(__self__, "hive_conf", hive_conf)
297
+ if map_reduce_conf is not None:
298
+ pulumi.set(__self__, "map_reduce_conf", map_reduce_conf)
299
+ if status is not None:
300
+ pulumi.set(__self__, "status", status)
301
+ if yarn_conf is not None:
302
+ pulumi.set(__self__, "yarn_conf", yarn_conf)
303
+
304
+ @property
305
+ @pulumi.getter(name="createTime")
306
+ def create_time(self) -> Optional[pulumi.Input[str]]:
307
+ """
308
+ Creation time
309
+ """
310
+ return pulumi.get(self, "create_time")
311
+
312
+ @create_time.setter
313
+ def create_time(self, value: Optional[pulumi.Input[str]]):
314
+ pulumi.set(self, "create_time", value)
315
+
316
+ @property
317
+ @pulumi.getter(name="dataSourceDescription")
318
+ def data_source_description(self) -> Optional[pulumi.Input[str]]:
319
+ """
320
+ Data Source Description
321
+ """
322
+ return pulumi.get(self, "data_source_description")
323
+
324
+ @data_source_description.setter
325
+ def data_source_description(self, value: Optional[pulumi.Input[str]]):
326
+ pulumi.set(self, "data_source_description", value)
327
+
328
+ @property
329
+ @pulumi.getter(name="dataSourceId")
330
+ def data_source_id(self) -> Optional[pulumi.Input[int]]:
331
+ """
332
+ The data source ID.
333
+ """
334
+ return pulumi.get(self, "data_source_id")
335
+
336
+ @data_source_id.setter
337
+ def data_source_id(self, value: Optional[pulumi.Input[int]]):
338
+ pulumi.set(self, "data_source_id", value)
339
+
340
+ @property
341
+ @pulumi.getter(name="dataSourceName")
342
+ def data_source_name(self) -> Optional[pulumi.Input[str]]:
343
+ """
344
+ Data Source Name
345
+ """
346
+ return pulumi.get(self, "data_source_name")
347
+
348
+ @data_source_name.setter
349
+ def data_source_name(self, value: Optional[pulumi.Input[str]]):
350
+ pulumi.set(self, "data_source_name", value)
351
+
352
+ @property
353
+ @pulumi.getter(name="dataSourceType")
354
+ def data_source_type(self) -> Optional[pulumi.Input[str]]:
355
+ """
356
+ The type of the data source. Valid values:
357
+
358
+ * mysql
359
+ - postgresql
360
+
361
+ * hdfs
362
+ - hive
363
+ """
364
+ return pulumi.get(self, "data_source_type")
365
+
366
+ @data_source_type.setter
367
+ def data_source_type(self, value: Optional[pulumi.Input[str]]):
368
+ pulumi.set(self, "data_source_type", value)
369
+
370
+ @property
371
+ @pulumi.getter(name="dbInstanceId")
372
+ def db_instance_id(self) -> Optional[pulumi.Input[str]]:
373
+ """
374
+ The instance ID.
375
+ """
376
+ return pulumi.get(self, "db_instance_id")
377
+
378
+ @db_instance_id.setter
379
+ def db_instance_id(self, value: Optional[pulumi.Input[str]]):
380
+ pulumi.set(self, "db_instance_id", value)
381
+
382
+ @property
383
+ @pulumi.getter(name="emrInstanceId")
384
+ def emr_instance_id(self) -> Optional[pulumi.Input[str]]:
385
+ """
386
+ The ID of the Emr instance.
387
+ """
388
+ return pulumi.get(self, "emr_instance_id")
389
+
390
+ @emr_instance_id.setter
391
+ def emr_instance_id(self, value: Optional[pulumi.Input[str]]):
392
+ pulumi.set(self, "emr_instance_id", value)
393
+
394
+ @property
395
+ @pulumi.getter(name="hadoopCoreConf")
396
+ def hadoop_core_conf(self) -> Optional[pulumi.Input[str]]:
397
+ """
398
+ The string that specifies the content of the Hadoop core-site.xml file.
399
+ """
400
+ return pulumi.get(self, "hadoop_core_conf")
401
+
402
+ @hadoop_core_conf.setter
403
+ def hadoop_core_conf(self, value: Optional[pulumi.Input[str]]):
404
+ pulumi.set(self, "hadoop_core_conf", value)
405
+
406
+ @property
407
+ @pulumi.getter(name="hadoopCreateType")
408
+ def hadoop_create_type(self) -> Optional[pulumi.Input[str]]:
409
+ """
410
+ The type of the external service. Valid values:
411
+ - emr: E-MapReduce (EMR) Hadoop cluster.
412
+ - selfCreate: self-managed Hadoop cluster.
413
+ """
414
+ return pulumi.get(self, "hadoop_create_type")
415
+
416
+ @hadoop_create_type.setter
417
+ def hadoop_create_type(self, value: Optional[pulumi.Input[str]]):
418
+ pulumi.set(self, "hadoop_create_type", value)
419
+
420
+ @property
421
+ @pulumi.getter(name="hadoopHostsAddress")
422
+ def hadoop_hosts_address(self) -> Optional[pulumi.Input[str]]:
423
+ """
424
+ The IP address and hostname of the Hadoop cluster (data source) in the /etc/hosts file.
425
+ """
426
+ return pulumi.get(self, "hadoop_hosts_address")
427
+
428
+ @hadoop_hosts_address.setter
429
+ def hadoop_hosts_address(self, value: Optional[pulumi.Input[str]]):
430
+ pulumi.set(self, "hadoop_hosts_address", value)
431
+
432
+ @property
433
+ @pulumi.getter(name="hdfsConf")
434
+ def hdfs_conf(self) -> Optional[pulumi.Input[str]]:
435
+ """
436
+ The string that specifies the content of the Hadoop hdfs-site.xml file. This parameter must be specified when DataSourceType is set to HDFS.
437
+ """
438
+ return pulumi.get(self, "hdfs_conf")
439
+
440
+ @hdfs_conf.setter
441
+ def hdfs_conf(self, value: Optional[pulumi.Input[str]]):
442
+ pulumi.set(self, "hdfs_conf", value)
443
+
444
+ @property
445
+ @pulumi.getter(name="hiveConf")
446
+ def hive_conf(self) -> Optional[pulumi.Input[str]]:
447
+ """
448
+ The string that specifies the content of the Hadoop hive-site.xml file. This parameter must be specified when DataSourceType is set to Hive.
449
+ """
450
+ return pulumi.get(self, "hive_conf")
451
+
452
+ @hive_conf.setter
453
+ def hive_conf(self, value: Optional[pulumi.Input[str]]):
454
+ pulumi.set(self, "hive_conf", value)
455
+
456
+ @property
457
+ @pulumi.getter(name="mapReduceConf")
458
+ def map_reduce_conf(self) -> Optional[pulumi.Input[str]]:
459
+ """
460
+ The content of the Hadoop mapred-site.xml file. This parameter must be specified when DataSourceType is set to HDFS.
461
+ """
462
+ return pulumi.get(self, "map_reduce_conf")
463
+
464
+ @map_reduce_conf.setter
465
+ def map_reduce_conf(self, value: Optional[pulumi.Input[str]]):
466
+ pulumi.set(self, "map_reduce_conf", value)
467
+
468
+ @property
469
+ @pulumi.getter
470
+ def status(self) -> Optional[pulumi.Input[str]]:
471
+ """
472
+ Data Source Status
473
+ """
474
+ return pulumi.get(self, "status")
475
+
476
+ @status.setter
477
+ def status(self, value: Optional[pulumi.Input[str]]):
478
+ pulumi.set(self, "status", value)
479
+
480
+ @property
481
+ @pulumi.getter(name="yarnConf")
482
+ def yarn_conf(self) -> Optional[pulumi.Input[str]]:
483
+ """
484
+ The string that specifies the content of the Hadoop yarn-site.xml file. This parameter must be specified when DataSourceType is set to HDFS.
485
+ """
486
+ return pulumi.get(self, "yarn_conf")
487
+
488
+ @yarn_conf.setter
489
+ def yarn_conf(self, value: Optional[pulumi.Input[str]]):
490
+ pulumi.set(self, "yarn_conf", value)
491
+
492
+
493
+ class HadoopDataSource(pulumi.CustomResource):
494
+ @overload
495
+ def __init__(__self__,
496
+ resource_name: str,
497
+ opts: Optional[pulumi.ResourceOptions] = None,
498
+ data_source_description: Optional[pulumi.Input[str]] = None,
499
+ data_source_name: Optional[pulumi.Input[str]] = None,
500
+ data_source_type: Optional[pulumi.Input[str]] = None,
501
+ db_instance_id: Optional[pulumi.Input[str]] = None,
502
+ emr_instance_id: Optional[pulumi.Input[str]] = None,
503
+ hadoop_core_conf: Optional[pulumi.Input[str]] = None,
504
+ hadoop_create_type: Optional[pulumi.Input[str]] = None,
505
+ hadoop_hosts_address: Optional[pulumi.Input[str]] = None,
506
+ hdfs_conf: Optional[pulumi.Input[str]] = None,
507
+ hive_conf: Optional[pulumi.Input[str]] = None,
508
+ map_reduce_conf: Optional[pulumi.Input[str]] = None,
509
+ yarn_conf: Optional[pulumi.Input[str]] = None,
510
+ __props__=None):
511
+ """
512
+ Provides a GPDB Hadoop Data Source resource.
513
+
514
+ Hadoop DataSource Config.
515
+
516
+ For information about GPDB Hadoop Data Source and how to use it, see [What is Hadoop Data Source](https://www.alibabacloud.com/help/en/).
517
+
518
+ > **NOTE:** Available since v1.230.0.
519
+
520
+ ## Example Usage
521
+
522
+ Basic Usage
523
+
524
+ ```python
525
+ import pulumi
526
+ import pulumi_alicloud as alicloud
527
+
528
+ config = pulumi.Config()
529
+ name = config.get("name")
530
+ if name is None:
531
+ name = "terraform-example"
532
+ default = alicloud.get_zones(available_resource_creation="VSwitch")
533
+ default_get_networks = alicloud.vpc.get_networks(name_regex="^default-NODELETING$")
534
+ default_get_switches = alicloud.vpc.get_switches(vpc_id=default_get_networks.ids[0],
535
+ zone_id="cn-beijing-h")
536
+ default_ecs_key_pair = alicloud.ecs.EcsKeyPair("default", key_pair_name=name)
537
+ default_security_group = alicloud.ecs.SecurityGroup("default",
538
+ name=name,
539
+ vpc_id=default_get_networks.ids[0])
540
+ default_role = alicloud.ram.Role("default",
541
+ name=name,
542
+ document=\"\"\" {
543
+ "Statement": [
544
+ {
545
+ "Action": "sts:AssumeRole",
546
+ "Effect": "Allow",
547
+ "Principal": {
548
+ "Service": [
549
+ "emr.aliyuncs.com",
550
+ "ecs.aliyuncs.com"
551
+ ]
552
+ }
553
+ }
554
+ ],
555
+ "Version": "1"
556
+ }
557
+ \"\"\",
558
+ description="this is a role example.",
559
+ force=True)
560
+ default_get_resource_groups = alicloud.resourcemanager.get_resource_groups(status="OK")
561
+ default_get_keys = alicloud.kms.get_keys(status="Enabled")
562
+ default_cluster = alicloud.emrv2.Cluster("default",
563
+ node_groups=[
564
+ {
565
+ "vswitch_ids": [default_get_switches.ids[0]],
566
+ "instance_types": ["ecs.g6.xlarge"],
567
+ "node_count": 1,
568
+ "spot_instance_remedy": False,
569
+ "data_disks": [{
570
+ "count": 3,
571
+ "category": "cloud_essd",
572
+ "size": 80,
573
+ "performance_level": "PL0",
574
+ }],
575
+ "node_group_name": "emr-master",
576
+ "payment_type": "PayAsYouGo",
577
+ "with_public_ip": False,
578
+ "graceful_shutdown": False,
579
+ "system_disk": {
580
+ "category": "cloud_essd",
581
+ "size": 80,
582
+ "performance_level": "PL0",
583
+ "count": 1,
584
+ },
585
+ "node_group_type": "MASTER",
586
+ },
587
+ {
588
+ "spot_instance_remedy": False,
589
+ "node_group_type": "CORE",
590
+ "vswitch_ids": [default_get_switches.ids[0]],
591
+ "node_count": 2,
592
+ "graceful_shutdown": False,
593
+ "system_disk": {
594
+ "performance_level": "PL0",
595
+ "count": 1,
596
+ "category": "cloud_essd",
597
+ "size": 80,
598
+ },
599
+ "data_disks": [{
600
+ "count": 3,
601
+ "performance_level": "PL0",
602
+ "category": "cloud_essd",
603
+ "size": 80,
604
+ }],
605
+ "node_group_name": "emr-core",
606
+ "payment_type": "PayAsYouGo",
607
+ "instance_types": ["ecs.g6.xlarge"],
608
+ "with_public_ip": False,
609
+ },
610
+ ],
611
+ deploy_mode="NORMAL",
612
+ tags={
613
+ "Created": "TF",
614
+ "For": "example",
615
+ },
616
+ release_version="EMR-5.10.0",
617
+ applications=[
618
+ "HADOOP-COMMON",
619
+ "HDFS",
620
+ "YARN",
621
+ ],
622
+ node_attributes=[{
623
+ "zone_id": "cn-beijing-h",
624
+ "key_pair_name": default_ecs_key_pair.id,
625
+ "data_disk_encrypted": True,
626
+ "data_disk_kms_key_id": default_get_keys.ids[0],
627
+ "vpc_id": default_get_networks.ids[0],
628
+ "ram_role": default_role.name,
629
+ "security_group_id": default_security_group.id,
630
+ }],
631
+ resource_group_id=default_get_resource_groups.ids[0],
632
+ cluster_name=name,
633
+ payment_type="PayAsYouGo",
634
+ cluster_type="DATAFLOW")
635
+ default_zoepvx = alicloud.gpdb.Instance("defaultZoepvx",
636
+ instance_spec="2C8G",
637
+ description=name,
638
+ seg_node_num=2,
639
+ seg_storage_type="cloud_essd",
640
+ instance_network_type="VPC",
641
+ payment_type="PayAsYouGo",
642
+ ssl_enabled=0,
643
+ engine_version="6.0",
644
+ zone_id="cn-beijing-h",
645
+ vswitch_id=default_get_switches.ids[0],
646
+ storage_size=50,
647
+ master_cu=4,
648
+ vpc_id=default_get_networks.ids[0],
649
+ db_instance_mode="StorageElastic",
650
+ engine="gpdb",
651
+ db_instance_category="Basic")
652
+ defaulty_oxz1_k = alicloud.gpdb.ExternalDataService("defaultyOxz1K",
653
+ service_name=name,
654
+ db_instance_id=default_zoepvx.id,
655
+ service_description=name,
656
+ service_spec="8")
657
+ default_hadoop_data_source = alicloud.gpdb.HadoopDataSource("default",
658
+ hdfs_conf="aaa",
659
+ data_source_name=defaulty_oxz1_k.service_name,
660
+ yarn_conf="aaa",
661
+ hive_conf="aaa",
662
+ hadoop_create_type="emr",
663
+ data_source_description=name,
664
+ map_reduce_conf="aaa",
665
+ data_source_type="hive",
666
+ hadoop_core_conf="aaa",
667
+ emr_instance_id=default_cluster.id,
668
+ db_instance_id=default_zoepvx.id,
669
+ hadoop_hosts_address="aaa")
670
+ ```
671
+
672
+ ## Import
673
+
674
+ GPDB Hadoop Data Source can be imported using the id, e.g.
675
+
676
+ ```sh
677
+ $ pulumi import alicloud:gpdb/hadoopDataSource:HadoopDataSource example <db_instance_id>:<data_source_id>
678
+ ```
679
+
680
+ :param str resource_name: The name of the resource.
681
+ :param pulumi.ResourceOptions opts: Options for the resource.
682
+ :param pulumi.Input[str] data_source_description: Data Source Description
683
+ :param pulumi.Input[str] data_source_name: Data Source Name
684
+ :param pulumi.Input[str] data_source_type: The type of the data source. Valid values:
685
+
686
+ * mysql
687
+ - postgresql
688
+
689
+ * hdfs
690
+ - hive
691
+ :param pulumi.Input[str] db_instance_id: The instance ID.
692
+ :param pulumi.Input[str] emr_instance_id: The ID of the Emr instance.
693
+ :param pulumi.Input[str] hadoop_core_conf: The string that specifies the content of the Hadoop core-site.xml file.
694
+ :param pulumi.Input[str] hadoop_create_type: The type of the external service. Valid values:
695
+ - emr: E-MapReduce (EMR) Hadoop cluster.
696
+ - selfCreate: self-managed Hadoop cluster.
697
+ :param pulumi.Input[str] hadoop_hosts_address: The IP address and hostname of the Hadoop cluster (data source) in the /etc/hosts file.
698
+ :param pulumi.Input[str] hdfs_conf: The string that specifies the content of the Hadoop hdfs-site.xml file. This parameter must be specified when DataSourceType is set to HDFS.
699
+ :param pulumi.Input[str] hive_conf: The string that specifies the content of the Hadoop hive-site.xml file. This parameter must be specified when DataSourceType is set to Hive.
700
+ :param pulumi.Input[str] map_reduce_conf: The content of the Hadoop mapred-site.xml file. This parameter must be specified when DataSourceType is set to HDFS.
701
+ :param pulumi.Input[str] yarn_conf: The string that specifies the content of the Hadoop yarn-site.xml file. This parameter must be specified when DataSourceType is set to HDFS.
702
+ """
703
+ ...
704
+ @overload
705
+ def __init__(__self__,
706
+ resource_name: str,
707
+ args: HadoopDataSourceArgs,
708
+ opts: Optional[pulumi.ResourceOptions] = None):
709
+ """
710
+ Provides a GPDB Hadoop Data Source resource.
711
+
712
+ Hadoop DataSource Config.
713
+
714
+ For information about GPDB Hadoop Data Source and how to use it, see [What is Hadoop Data Source](https://www.alibabacloud.com/help/en/).
715
+
716
+ > **NOTE:** Available since v1.230.0.
717
+
718
+ ## Example Usage
719
+
720
+ Basic Usage
721
+
722
+ ```python
723
+ import pulumi
724
+ import pulumi_alicloud as alicloud
725
+
726
+ config = pulumi.Config()
727
+ name = config.get("name")
728
+ if name is None:
729
+ name = "terraform-example"
730
+ default = alicloud.get_zones(available_resource_creation="VSwitch")
731
+ default_get_networks = alicloud.vpc.get_networks(name_regex="^default-NODELETING$")
732
+ default_get_switches = alicloud.vpc.get_switches(vpc_id=default_get_networks.ids[0],
733
+ zone_id="cn-beijing-h")
734
+ default_ecs_key_pair = alicloud.ecs.EcsKeyPair("default", key_pair_name=name)
735
+ default_security_group = alicloud.ecs.SecurityGroup("default",
736
+ name=name,
737
+ vpc_id=default_get_networks.ids[0])
738
+ default_role = alicloud.ram.Role("default",
739
+ name=name,
740
+ document=\"\"\" {
741
+ "Statement": [
742
+ {
743
+ "Action": "sts:AssumeRole",
744
+ "Effect": "Allow",
745
+ "Principal": {
746
+ "Service": [
747
+ "emr.aliyuncs.com",
748
+ "ecs.aliyuncs.com"
749
+ ]
750
+ }
751
+ }
752
+ ],
753
+ "Version": "1"
754
+ }
755
+ \"\"\",
756
+ description="this is a role example.",
757
+ force=True)
758
+ default_get_resource_groups = alicloud.resourcemanager.get_resource_groups(status="OK")
759
+ default_get_keys = alicloud.kms.get_keys(status="Enabled")
760
+ default_cluster = alicloud.emrv2.Cluster("default",
761
+ node_groups=[
762
+ {
763
+ "vswitch_ids": [default_get_switches.ids[0]],
764
+ "instance_types": ["ecs.g6.xlarge"],
765
+ "node_count": 1,
766
+ "spot_instance_remedy": False,
767
+ "data_disks": [{
768
+ "count": 3,
769
+ "category": "cloud_essd",
770
+ "size": 80,
771
+ "performance_level": "PL0",
772
+ }],
773
+ "node_group_name": "emr-master",
774
+ "payment_type": "PayAsYouGo",
775
+ "with_public_ip": False,
776
+ "graceful_shutdown": False,
777
+ "system_disk": {
778
+ "category": "cloud_essd",
779
+ "size": 80,
780
+ "performance_level": "PL0",
781
+ "count": 1,
782
+ },
783
+ "node_group_type": "MASTER",
784
+ },
785
+ {
786
+ "spot_instance_remedy": False,
787
+ "node_group_type": "CORE",
788
+ "vswitch_ids": [default_get_switches.ids[0]],
789
+ "node_count": 2,
790
+ "graceful_shutdown": False,
791
+ "system_disk": {
792
+ "performance_level": "PL0",
793
+ "count": 1,
794
+ "category": "cloud_essd",
795
+ "size": 80,
796
+ },
797
+ "data_disks": [{
798
+ "count": 3,
799
+ "performance_level": "PL0",
800
+ "category": "cloud_essd",
801
+ "size": 80,
802
+ }],
803
+ "node_group_name": "emr-core",
804
+ "payment_type": "PayAsYouGo",
805
+ "instance_types": ["ecs.g6.xlarge"],
806
+ "with_public_ip": False,
807
+ },
808
+ ],
809
+ deploy_mode="NORMAL",
810
+ tags={
811
+ "Created": "TF",
812
+ "For": "example",
813
+ },
814
+ release_version="EMR-5.10.0",
815
+ applications=[
816
+ "HADOOP-COMMON",
817
+ "HDFS",
818
+ "YARN",
819
+ ],
820
+ node_attributes=[{
821
+ "zone_id": "cn-beijing-h",
822
+ "key_pair_name": default_ecs_key_pair.id,
823
+ "data_disk_encrypted": True,
824
+ "data_disk_kms_key_id": default_get_keys.ids[0],
825
+ "vpc_id": default_get_networks.ids[0],
826
+ "ram_role": default_role.name,
827
+ "security_group_id": default_security_group.id,
828
+ }],
829
+ resource_group_id=default_get_resource_groups.ids[0],
830
+ cluster_name=name,
831
+ payment_type="PayAsYouGo",
832
+ cluster_type="DATAFLOW")
833
+ default_zoepvx = alicloud.gpdb.Instance("defaultZoepvx",
834
+ instance_spec="2C8G",
835
+ description=name,
836
+ seg_node_num=2,
837
+ seg_storage_type="cloud_essd",
838
+ instance_network_type="VPC",
839
+ payment_type="PayAsYouGo",
840
+ ssl_enabled=0,
841
+ engine_version="6.0",
842
+ zone_id="cn-beijing-h",
843
+ vswitch_id=default_get_switches.ids[0],
844
+ storage_size=50,
845
+ master_cu=4,
846
+ vpc_id=default_get_networks.ids[0],
847
+ db_instance_mode="StorageElastic",
848
+ engine="gpdb",
849
+ db_instance_category="Basic")
850
+ defaulty_oxz1_k = alicloud.gpdb.ExternalDataService("defaultyOxz1K",
851
+ service_name=name,
852
+ db_instance_id=default_zoepvx.id,
853
+ service_description=name,
854
+ service_spec="8")
855
+ default_hadoop_data_source = alicloud.gpdb.HadoopDataSource("default",
856
+ hdfs_conf="aaa",
857
+ data_source_name=defaulty_oxz1_k.service_name,
858
+ yarn_conf="aaa",
859
+ hive_conf="aaa",
860
+ hadoop_create_type="emr",
861
+ data_source_description=name,
862
+ map_reduce_conf="aaa",
863
+ data_source_type="hive",
864
+ hadoop_core_conf="aaa",
865
+ emr_instance_id=default_cluster.id,
866
+ db_instance_id=default_zoepvx.id,
867
+ hadoop_hosts_address="aaa")
868
+ ```
869
+
870
+ ## Import
871
+
872
+ GPDB Hadoop Data Source can be imported using the id, e.g.
873
+
874
+ ```sh
875
+ $ pulumi import alicloud:gpdb/hadoopDataSource:HadoopDataSource example <db_instance_id>:<data_source_id>
876
+ ```
877
+
878
+ :param str resource_name: The name of the resource.
879
+ :param HadoopDataSourceArgs args: The arguments to use to populate this resource's properties.
880
+ :param pulumi.ResourceOptions opts: Options for the resource.
881
+ """
882
+ ...
883
+ def __init__(__self__, resource_name: str, *args, **kwargs):
884
+ resource_args, opts = _utilities.get_resource_args_opts(HadoopDataSourceArgs, pulumi.ResourceOptions, *args, **kwargs)
885
+ if resource_args is not None:
886
+ __self__._internal_init(resource_name, opts, **resource_args.__dict__)
887
+ else:
888
+ __self__._internal_init(resource_name, *args, **kwargs)
889
+
890
+ def _internal_init(__self__,
891
+ resource_name: str,
892
+ opts: Optional[pulumi.ResourceOptions] = None,
893
+ data_source_description: Optional[pulumi.Input[str]] = None,
894
+ data_source_name: Optional[pulumi.Input[str]] = None,
895
+ data_source_type: Optional[pulumi.Input[str]] = None,
896
+ db_instance_id: Optional[pulumi.Input[str]] = None,
897
+ emr_instance_id: Optional[pulumi.Input[str]] = None,
898
+ hadoop_core_conf: Optional[pulumi.Input[str]] = None,
899
+ hadoop_create_type: Optional[pulumi.Input[str]] = None,
900
+ hadoop_hosts_address: Optional[pulumi.Input[str]] = None,
901
+ hdfs_conf: Optional[pulumi.Input[str]] = None,
902
+ hive_conf: Optional[pulumi.Input[str]] = None,
903
+ map_reduce_conf: Optional[pulumi.Input[str]] = None,
904
+ yarn_conf: Optional[pulumi.Input[str]] = None,
905
+ __props__=None):
906
+ opts = pulumi.ResourceOptions.merge(_utilities.get_resource_opts_defaults(), opts)
907
+ if not isinstance(opts, pulumi.ResourceOptions):
908
+ raise TypeError('Expected resource options to be a ResourceOptions instance')
909
+ if opts.id is None:
910
+ if __props__ is not None:
911
+ raise TypeError('__props__ is only valid when passed in combination with a valid opts.id to get an existing resource')
912
+ __props__ = HadoopDataSourceArgs.__new__(HadoopDataSourceArgs)
913
+
914
+ __props__.__dict__["data_source_description"] = data_source_description
915
+ __props__.__dict__["data_source_name"] = data_source_name
916
+ __props__.__dict__["data_source_type"] = data_source_type
917
+ if db_instance_id is None and not opts.urn:
918
+ raise TypeError("Missing required property 'db_instance_id'")
919
+ __props__.__dict__["db_instance_id"] = db_instance_id
920
+ __props__.__dict__["emr_instance_id"] = emr_instance_id
921
+ __props__.__dict__["hadoop_core_conf"] = hadoop_core_conf
922
+ __props__.__dict__["hadoop_create_type"] = hadoop_create_type
923
+ __props__.__dict__["hadoop_hosts_address"] = hadoop_hosts_address
924
+ __props__.__dict__["hdfs_conf"] = hdfs_conf
925
+ __props__.__dict__["hive_conf"] = hive_conf
926
+ __props__.__dict__["map_reduce_conf"] = map_reduce_conf
927
+ __props__.__dict__["yarn_conf"] = yarn_conf
928
+ __props__.__dict__["create_time"] = None
929
+ __props__.__dict__["data_source_id"] = None
930
+ __props__.__dict__["status"] = None
931
+ super(HadoopDataSource, __self__).__init__(
932
+ 'alicloud:gpdb/hadoopDataSource:HadoopDataSource',
933
+ resource_name,
934
+ __props__,
935
+ opts)
936
+
937
+ @staticmethod
938
+ def get(resource_name: str,
939
+ id: pulumi.Input[str],
940
+ opts: Optional[pulumi.ResourceOptions] = None,
941
+ create_time: Optional[pulumi.Input[str]] = None,
942
+ data_source_description: Optional[pulumi.Input[str]] = None,
943
+ data_source_id: Optional[pulumi.Input[int]] = None,
944
+ data_source_name: Optional[pulumi.Input[str]] = None,
945
+ data_source_type: Optional[pulumi.Input[str]] = None,
946
+ db_instance_id: Optional[pulumi.Input[str]] = None,
947
+ emr_instance_id: Optional[pulumi.Input[str]] = None,
948
+ hadoop_core_conf: Optional[pulumi.Input[str]] = None,
949
+ hadoop_create_type: Optional[pulumi.Input[str]] = None,
950
+ hadoop_hosts_address: Optional[pulumi.Input[str]] = None,
951
+ hdfs_conf: Optional[pulumi.Input[str]] = None,
952
+ hive_conf: Optional[pulumi.Input[str]] = None,
953
+ map_reduce_conf: Optional[pulumi.Input[str]] = None,
954
+ status: Optional[pulumi.Input[str]] = None,
955
+ yarn_conf: Optional[pulumi.Input[str]] = None) -> 'HadoopDataSource':
956
+ """
957
+ Get an existing HadoopDataSource resource's state with the given name, id, and optional extra
958
+ properties used to qualify the lookup.
959
+
960
+ :param str resource_name: The unique name of the resulting resource.
961
+ :param pulumi.Input[str] id: The unique provider ID of the resource to lookup.
962
+ :param pulumi.ResourceOptions opts: Options for the resource.
963
+ :param pulumi.Input[str] create_time: Creation time
964
+ :param pulumi.Input[str] data_source_description: Data Source Description
965
+ :param pulumi.Input[int] data_source_id: The data source ID.
966
+ :param pulumi.Input[str] data_source_name: Data Source Name
967
+ :param pulumi.Input[str] data_source_type: The type of the data source. Valid values:
968
+
969
+ * mysql
970
+ - postgresql
971
+
972
+ * hdfs
973
+ - hive
974
+ :param pulumi.Input[str] db_instance_id: The instance ID.
975
+ :param pulumi.Input[str] emr_instance_id: The ID of the Emr instance.
976
+ :param pulumi.Input[str] hadoop_core_conf: The string that specifies the content of the Hadoop core-site.xml file.
977
+ :param pulumi.Input[str] hadoop_create_type: The type of the external service. Valid values:
978
+ - emr: E-MapReduce (EMR) Hadoop cluster.
979
+ - selfCreate: self-managed Hadoop cluster.
980
+ :param pulumi.Input[str] hadoop_hosts_address: The IP address and hostname of the Hadoop cluster (data source) in the /etc/hosts file.
981
+ :param pulumi.Input[str] hdfs_conf: The string that specifies the content of the Hadoop hdfs-site.xml file. This parameter must be specified when DataSourceType is set to HDFS.
982
+ :param pulumi.Input[str] hive_conf: The string that specifies the content of the Hadoop hive-site.xml file. This parameter must be specified when DataSourceType is set to Hive.
983
+ :param pulumi.Input[str] map_reduce_conf: The content of the Hadoop mapred-site.xml file. This parameter must be specified when DataSourceType is set to HDFS.
984
+ :param pulumi.Input[str] status: Data Source Status
985
+ :param pulumi.Input[str] yarn_conf: The string that specifies the content of the Hadoop yarn-site.xml file. This parameter must be specified when DataSourceType is set to HDFS.
986
+ """
987
+ opts = pulumi.ResourceOptions.merge(opts, pulumi.ResourceOptions(id=id))
988
+
989
+ __props__ = _HadoopDataSourceState.__new__(_HadoopDataSourceState)
990
+
991
+ __props__.__dict__["create_time"] = create_time
992
+ __props__.__dict__["data_source_description"] = data_source_description
993
+ __props__.__dict__["data_source_id"] = data_source_id
994
+ __props__.__dict__["data_source_name"] = data_source_name
995
+ __props__.__dict__["data_source_type"] = data_source_type
996
+ __props__.__dict__["db_instance_id"] = db_instance_id
997
+ __props__.__dict__["emr_instance_id"] = emr_instance_id
998
+ __props__.__dict__["hadoop_core_conf"] = hadoop_core_conf
999
+ __props__.__dict__["hadoop_create_type"] = hadoop_create_type
1000
+ __props__.__dict__["hadoop_hosts_address"] = hadoop_hosts_address
1001
+ __props__.__dict__["hdfs_conf"] = hdfs_conf
1002
+ __props__.__dict__["hive_conf"] = hive_conf
1003
+ __props__.__dict__["map_reduce_conf"] = map_reduce_conf
1004
+ __props__.__dict__["status"] = status
1005
+ __props__.__dict__["yarn_conf"] = yarn_conf
1006
+ return HadoopDataSource(resource_name, opts=opts, __props__=__props__)
1007
+
1008
+ @property
1009
+ @pulumi.getter(name="createTime")
1010
+ def create_time(self) -> pulumi.Output[str]:
1011
+ """
1012
+ Creation time
1013
+ """
1014
+ return pulumi.get(self, "create_time")
1015
+
1016
+ @property
1017
+ @pulumi.getter(name="dataSourceDescription")
1018
+ def data_source_description(self) -> pulumi.Output[Optional[str]]:
1019
+ """
1020
+ Data Source Description
1021
+ """
1022
+ return pulumi.get(self, "data_source_description")
1023
+
1024
+ @property
1025
+ @pulumi.getter(name="dataSourceId")
1026
+ def data_source_id(self) -> pulumi.Output[int]:
1027
+ """
1028
+ The data source ID.
1029
+ """
1030
+ return pulumi.get(self, "data_source_id")
1031
+
1032
+ @property
1033
+ @pulumi.getter(name="dataSourceName")
1034
+ def data_source_name(self) -> pulumi.Output[Optional[str]]:
1035
+ """
1036
+ Data Source Name
1037
+ """
1038
+ return pulumi.get(self, "data_source_name")
1039
+
1040
+ @property
1041
+ @pulumi.getter(name="dataSourceType")
1042
+ def data_source_type(self) -> pulumi.Output[Optional[str]]:
1043
+ """
1044
+ The type of the data source. Valid values:
1045
+
1046
+ * mysql
1047
+ - postgresql
1048
+
1049
+ * hdfs
1050
+ - hive
1051
+ """
1052
+ return pulumi.get(self, "data_source_type")
1053
+
1054
+ @property
1055
+ @pulumi.getter(name="dbInstanceId")
1056
+ def db_instance_id(self) -> pulumi.Output[str]:
1057
+ """
1058
+ The instance ID.
1059
+ """
1060
+ return pulumi.get(self, "db_instance_id")
1061
+
1062
+ @property
1063
+ @pulumi.getter(name="emrInstanceId")
1064
+ def emr_instance_id(self) -> pulumi.Output[Optional[str]]:
1065
+ """
1066
+ The ID of the Emr instance.
1067
+ """
1068
+ return pulumi.get(self, "emr_instance_id")
1069
+
1070
+ @property
1071
+ @pulumi.getter(name="hadoopCoreConf")
1072
+ def hadoop_core_conf(self) -> pulumi.Output[Optional[str]]:
1073
+ """
1074
+ The string that specifies the content of the Hadoop core-site.xml file.
1075
+ """
1076
+ return pulumi.get(self, "hadoop_core_conf")
1077
+
1078
+ @property
1079
+ @pulumi.getter(name="hadoopCreateType")
1080
+ def hadoop_create_type(self) -> pulumi.Output[Optional[str]]:
1081
+ """
1082
+ The type of the external service. Valid values:
1083
+ - emr: E-MapReduce (EMR) Hadoop cluster.
1084
+ - selfCreate: self-managed Hadoop cluster.
1085
+ """
1086
+ return pulumi.get(self, "hadoop_create_type")
1087
+
1088
+ @property
1089
+ @pulumi.getter(name="hadoopHostsAddress")
1090
+ def hadoop_hosts_address(self) -> pulumi.Output[Optional[str]]:
1091
+ """
1092
+ The IP address and hostname of the Hadoop cluster (data source) in the /etc/hosts file.
1093
+ """
1094
+ return pulumi.get(self, "hadoop_hosts_address")
1095
+
1096
+ @property
1097
+ @pulumi.getter(name="hdfsConf")
1098
+ def hdfs_conf(self) -> pulumi.Output[Optional[str]]:
1099
+ """
1100
+ The string that specifies the content of the Hadoop hdfs-site.xml file. This parameter must be specified when DataSourceType is set to HDFS.
1101
+ """
1102
+ return pulumi.get(self, "hdfs_conf")
1103
+
1104
+ @property
1105
+ @pulumi.getter(name="hiveConf")
1106
+ def hive_conf(self) -> pulumi.Output[Optional[str]]:
1107
+ """
1108
+ The string that specifies the content of the Hadoop hive-site.xml file. This parameter must be specified when DataSourceType is set to Hive.
1109
+ """
1110
+ return pulumi.get(self, "hive_conf")
1111
+
1112
+ @property
1113
+ @pulumi.getter(name="mapReduceConf")
1114
+ def map_reduce_conf(self) -> pulumi.Output[Optional[str]]:
1115
+ """
1116
+ The content of the Hadoop mapred-site.xml file. This parameter must be specified when DataSourceType is set to HDFS.
1117
+ """
1118
+ return pulumi.get(self, "map_reduce_conf")
1119
+
1120
+ @property
1121
+ @pulumi.getter
1122
+ def status(self) -> pulumi.Output[str]:
1123
+ """
1124
+ Data Source Status
1125
+ """
1126
+ return pulumi.get(self, "status")
1127
+
1128
+ @property
1129
+ @pulumi.getter(name="yarnConf")
1130
+ def yarn_conf(self) -> pulumi.Output[Optional[str]]:
1131
+ """
1132
+ The string that specifies the content of the Hadoop yarn-site.xml file. This parameter must be specified when DataSourceType is set to HDFS.
1133
+ """
1134
+ return pulumi.get(self, "yarn_conf")
1135
+