cloudbox-server 0.0.1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (147) hide show
  1. data/.yardopts +3 -0
  2. data/README.md +17 -0
  3. data/bin/cb-cluster-allocate +5 -0
  4. data/bin/cb-cluster-create +5 -0
  5. data/bin/cb-cluster-delete +5 -0
  6. data/bin/cb-cluster-desallocate +5 -0
  7. data/bin/cb-cluster-info +5 -0
  8. data/bin/cb-cluster-ls +5 -0
  9. data/bin/cb-node-destroy +5 -0
  10. data/bin/cb-node-detach +6 -0
  11. data/bin/cb-node-info +5 -0
  12. data/bin/cb-node-install +5 -0
  13. data/bin/cb-node-ls +5 -0
  14. data/bin/cb-service-add +5 -0
  15. data/bin/cb-service-component-add +5 -0
  16. data/bin/cb-service-component-delete +5 -0
  17. data/bin/cb-service-component-detach +5 -0
  18. data/bin/cb-service-conf-apply +5 -0
  19. data/bin/cb-service-conf-create +5 -0
  20. data/bin/cb-service-conf-get +5 -0
  21. data/bin/cb-service-conf-ls +5 -0
  22. data/bin/cb-service-follow-request +5 -0
  23. data/bin/cb-service-info +5 -0
  24. data/bin/cb-service-install +5 -0
  25. data/bin/cb-service-ls +5 -0
  26. data/bin/cb-service-start +5 -0
  27. data/bin/cb-service-stop +5 -0
  28. data/lib/command.rb +61 -0
  29. data/lib/command/ambari_add_host.rb +19 -0
  30. data/lib/command/ambari_cluster_command.rb +26 -0
  31. data/lib/command/ambari_cluster_component_command.rb +26 -0
  32. data/lib/command/ambari_cluster_component_host_command.rb +26 -0
  33. data/lib/command/ambari_cluster_create.rb +19 -0
  34. data/lib/command/ambari_cluster_host_command.rb +27 -0
  35. data/lib/command/ambari_cluster_list.rb +20 -0
  36. data/lib/command/ambari_cluster_service_command.rb +26 -0
  37. data/lib/command/ambari_cluster_service_host_command.rb +26 -0
  38. data/lib/command/ambari_command.rb +21 -0
  39. data/lib/command/ambari_create_cluster_configuration.rb +39 -0
  40. data/lib/command/ambari_get_cluster_configuration.rb +29 -0
  41. data/lib/command/ambari_host_list.rb +19 -0
  42. data/lib/command/ambari_install_cluster.rb +19 -0
  43. data/lib/command/ambari_install_component.rb +19 -0
  44. data/lib/command/ambari_install_host.rb +19 -0
  45. data/lib/command/ambari_install_host_component.rb +19 -0
  46. data/lib/command/ambari_install_host_components.rb +19 -0
  47. data/lib/command/ambari_install_service_components.rb +19 -0
  48. data/lib/command/ambari_service_add.rb +27 -0
  49. data/lib/command/ambari_service_component_add.rb +21 -0
  50. data/lib/command/ambari_service_component_delete.rb +21 -0
  51. data/lib/command/ambari_service_component_detach.rb +21 -0
  52. data/lib/command/ambari_service_component_info.rb +29 -0
  53. data/lib/command/ambari_service_conf_apply.rb +31 -0
  54. data/lib/command/ambari_service_conf_ls.rb +30 -0
  55. data/lib/command/ambari_service_delete.rb +21 -0
  56. data/lib/command/ambari_service_info.rb +20 -0
  57. data/lib/command/ambari_service_list.rb +21 -0
  58. data/lib/command/ambari_start_cluster.rb +19 -0
  59. data/lib/command/ambari_start_component.rb +19 -0
  60. data/lib/command/ambari_start_host.rb +19 -0
  61. data/lib/command/ambari_start_host_component.rb +19 -0
  62. data/lib/command/ambari_start_host_components.rb +19 -0
  63. data/lib/command/ambari_start_service_components.rb +19 -0
  64. data/lib/command/ambari_stop_cluster.rb +19 -0
  65. data/lib/command/ambari_stop_component.rb +19 -0
  66. data/lib/command/ambari_stop_host.rb +19 -0
  67. data/lib/command/ambari_stop_host_component.rb +19 -0
  68. data/lib/command/ambari_stop_host_components.rb +19 -0
  69. data/lib/command/ambari_stop_service_components.rb +19 -0
  70. data/lib/command/basic_command.rb +44 -0
  71. data/lib/command/cluster_create.rb +50 -0
  72. data/lib/command/cluster_delete.rb +30 -0
  73. data/lib/command/cluster_info.rb +30 -0
  74. data/lib/command/cluster_ls.rb +30 -0
  75. data/lib/command/crowbar_cluster_allocate.rb +42 -0
  76. data/lib/command/crowbar_command.rb +30 -0
  77. data/lib/command/crowbar_get_barclamps_list.rb +30 -0
  78. data/lib/command/crowbar_node_command.rb +21 -0
  79. data/lib/command/crowbar_node_list.rb +40 -0
  80. data/lib/command/crowbar_vcluster_command.rb +20 -0
  81. data/lib/command/filter.rb +15 -0
  82. data/lib/command/node_destroy.rb +58 -0
  83. data/lib/command/node_detach.rb +44 -0
  84. data/lib/command/node_info.rb +27 -0
  85. data/lib/command/node_install.rb +46 -0
  86. data/lib/command/node_reinstall.rb +44 -0
  87. data/lib/command/state_filter.rb +16 -0
  88. data/lib/common.rb +11 -0
  89. data/lib/common/cb-lib-node-erase-hard-disk.sh +29 -0
  90. data/lib/common/cloudbox_exceptions.rb +631 -0
  91. data/lib/common/cloudbox_logger.rb +81 -0
  92. data/lib/common/cloudbox_logger_mock.rb +43 -0
  93. data/lib/common/color.rb +267 -0
  94. data/lib/common/config_properties.rb +2027 -0
  95. data/lib/common/services_description.rb +192 -0
  96. data/lib/exec.rb +35 -0
  97. data/lib/exec/check_parameter.rb +208 -0
  98. data/lib/exec/cluster_allocate.rb +163 -0
  99. data/lib/exec/cluster_create.rb +99 -0
  100. data/lib/exec/cluster_delete.rb +38 -0
  101. data/lib/exec/cluster_desallocate.rb +37 -0
  102. data/lib/exec/cluster_info.rb +177 -0
  103. data/lib/exec/cluster_ls.rb +150 -0
  104. data/lib/exec/command_option.rb +222 -0
  105. data/lib/exec/executable_command.rb +194 -0
  106. data/lib/exec/node_destroy.rb +101 -0
  107. data/lib/exec/node_detach.rb +98 -0
  108. data/lib/exec/node_info.rb +280 -0
  109. data/lib/exec/node_install.rb +234 -0
  110. data/lib/exec/node_ls.rb +160 -0
  111. data/lib/exec/service_add.rb +224 -0
  112. data/lib/exec/service_component_add.rb +39 -0
  113. data/lib/exec/service_component_delete.rb +37 -0
  114. data/lib/exec/service_component_detach.rb +37 -0
  115. data/lib/exec/service_conf_apply.rb +57 -0
  116. data/lib/exec/service_conf_create.rb +80 -0
  117. data/lib/exec/service_conf_get.rb +165 -0
  118. data/lib/exec/service_conf_ls.rb +103 -0
  119. data/lib/exec/service_follow_request.rb +49 -0
  120. data/lib/exec/service_info.rb +346 -0
  121. data/lib/exec/service_install.rb +87 -0
  122. data/lib/exec/service_ls.rb +124 -0
  123. data/lib/exec/service_start.rb +110 -0
  124. data/lib/exec/service_stop.rb +112 -0
  125. data/lib/receiver.rb +14 -0
  126. data/lib/receiver/ambari_receiver.rb +812 -0
  127. data/lib/receiver/ambari_rest_api_connector.rb +599 -0
  128. data/lib/receiver/basic_receiver.rb +28 -0
  129. data/lib/receiver/connector.rb +28 -0
  130. data/lib/receiver/crowbar_receiver.rb +588 -0
  131. data/lib/receiver/crowbar_rest_api_connector.rb +518 -0
  132. data/lib/receiver/crowbar_shell_api.rb +643 -0
  133. data/lib/receiver/rest_api_connector.rb +275 -0
  134. data/resources/ambari-configurations/HBASE/default_hbase-site +33 -0
  135. data/resources/ambari-configurations/HDFS/default_hdfs-site +49 -0
  136. data/resources/ambari-configurations/HIVE/default_hive-site +33 -0
  137. data/resources/ambari-configurations/MAPREDUCE/default_mapred-site +72 -0
  138. data/resources/ambari-configurations/OOZIE/default_oozie-site +28 -0
  139. data/resources/ambari-configurations/WEBHCAT/default_webhcat-site +18 -0
  140. data/resources/ambari-configurations/ZOOKEEPER/default_zookeeper-site +8 -0
  141. data/resources/ambari-configurations/default_core-site +22 -0
  142. data/resources/ambari-configurations/default_global +141 -0
  143. data/resources/cloudbox-server.conf +10 -0
  144. data/spec/common/services_description.rb +130 -0
  145. data/spec/exec/check_parameter.rb +152 -0
  146. data/spec/exec/command_option_spec.rb +97 -0
  147. metadata +328 -0
@@ -0,0 +1,275 @@
1
+ # This file is part of cloudbox-server project
2
+ # @author tnoguer (INGENSI)
3
+
4
+ require 'uri'
5
+ require 'xmlsimple'
6
+ require 'net/http'
7
+ require 'net/http/digest_auth'
8
+ require 'common/cloudbox_exceptions'
9
+ require 'receiver/connector'
10
+
11
+ module Receiver
12
+ # Allow to connects to an API and execute Rest command (GET/POST/PUT/DELETE)
13
+ # @author mbretaud
14
+ class RestApiConnector < Connector
15
+
16
+ public
17
+ # The destination IP.
18
+ attr_reader :dest_ip
19
+ # The destination port.
20
+ attr_reader :dest_port
21
+ # The timeout.
22
+ attr_reader :timeout
23
+ # The type of authentification
24
+ attr_reader :auth_type
25
+
26
+ private
27
+ # The user, used for authentication.
28
+ @user
29
+ # The password, used for authentication.
30
+ @password
31
+ # The header, used for http requests
32
+ @header
33
+ # The http object.
34
+ @http
35
+
36
+ public
37
+ # Default constructor.
38
+ # @param dest_ip [String] The destination IP.
39
+ # @param dest_port [Integer] The destination port.
40
+ # @param auth_type [String] The type of authentication (BASIC, DIGEST)
41
+ # @param user [String] The username.
42
+ # @param password [String] The password.
43
+ # @raise [ArgumentError] If the value of the arguments are nil or invalid.
44
+ # @author tnoguer
45
+ def initialize(dest_ip, dest_port, auth_type, user, password, timeout = 10000, logger = nil)
46
+ super(logger)
47
+ @logger.info("Receiver::RestApiConnector initialize the parameters...")
48
+ if dest_ip == nil || dest_port == nil || user == nil || password == nil
49
+ raise ArgumentError.new("Arguments can\'t be nil!")
50
+ elsif auth_type != "BASIC" && auth_type != "DIGEST"
51
+ raise ArgumentError.new("Authentication type invalid: " + auth_type)
52
+ end
53
+
54
+ @dest_ip = dest_ip
55
+ @dest_port = dest_port
56
+ @auth_type = auth_type
57
+ @user = user
58
+ @password = password
59
+ @timeout = timeout
60
+ @header = {
61
+ "Accept" => "application/json",
62
+ "Content-Type" => "application/json"
63
+ }
64
+ @body = ''
65
+ @http = Net::HTTP.new(@dest_ip, @dest_port)
66
+
67
+ @logger.info("Receiver::RestApiConnector initialize the parameters...")
68
+ end
69
+
70
+ public
71
+ # Launch an http request to the rest api.
72
+ # @param request_type The type of request. (GET, POST, PUT, DELETE)
73
+ # @param address The url address of the request. (ex: /clusters)
74
+ # @param body The body of the request. (ex: "\{\"Clusters\": \{\"version\" : \"HDP-1.3.0\"\}\}"
75
+ # @raise [ArgumentError] If the value of the arguments are nil or invalid.
76
+ # @raise [HTTPError] If the request failed.
77
+ # @author tnoguer
78
+ def launch_request(request_type, address, body = '')
79
+ @logger.info("Receiver::RestApiConnector launch a #{request_type} request on the '#{address}' with body '#{body}'...")
80
+
81
+ uri = URI.parse("http://#{@dest_ip}:#{@dest_port}#{address.strip}")
82
+ uri.user=@user.strip
83
+ uri.password=@password.strip
84
+ response=nil
85
+ error = false
86
+
87
+ if request_type == nil || address == nil || body == nil
88
+ raise ArgumentError.new("Arguments can\'t be nil!")
89
+ end
90
+
91
+ if @auth_type == "BASIC"
92
+ case request_type
93
+ when "GET"
94
+ request = Net::HTTP::Get.new(address)
95
+ when "POST"
96
+ request = Net::HTTP::Post.new(address)
97
+ when "PUT"
98
+ request = Net::HTTP::Put.new(address)
99
+ when "DELETE"
100
+ request = Net::HTTP::Delete.new(address)
101
+ else
102
+ raise ArgumentError.new("Invalid type: " + request_type)
103
+ end
104
+
105
+ request.basic_auth(@user, @password)
106
+
107
+ request.body = body
108
+ response = @http.request(request)
109
+
110
+ error = false
111
+ case request_type
112
+ when "GET" || "DELETE"
113
+ if response.code != '200'
114
+ error = true
115
+ end
116
+ when "POST"
117
+ if response.code != '201'
118
+ error = true
119
+ end
120
+ when "PUT"
121
+ if response.code != '200' && response.code != '202'
122
+ error = true
123
+ end
124
+ end
125
+ elsif @auth_type == "DIGEST"
126
+ Net::HTTP.start(uri.host, uri.port) {|http|
127
+ http.read_timeout = @timeout
128
+ case request_type
129
+ when "GET"
130
+ request = Net::HTTP::Get.new(uri.request_uri,@header)
131
+ when "POST"
132
+ request = Net::HTTP::Post.new(uri.request_uri)
133
+ when "PUT"
134
+ request = Net::HTTP::Put.new(uri.request_uri,@header)
135
+ when "DELETE"
136
+ request = Net::HTTP::Delete.new(uri.request_uri,@header)
137
+ else
138
+ raise ArgumentError.new("Invalid type: " + request_type)
139
+ end
140
+
141
+ request.body = body
142
+ response = http.request request
143
+
144
+ if response['www-authenticate']
145
+
146
+ begin
147
+ auth = Net::HTTP::DigestAuth.new.auth_header(uri, response['www-authenticate'], request_type)
148
+ request.add_field 'Authorization', auth
149
+ response = http.request request
150
+ rescue => e
151
+ message = get_message_html(response.body)
152
+ end
153
+ raise Common::HTTPError.getConsistentError(message, response.code, response.body)
154
+ end
155
+
156
+ error = false
157
+ case request_type
158
+ when "GET" || "DELETE"
159
+ if response.code != '200'
160
+ error = true
161
+ end
162
+ when "POST"
163
+ if response.code != '302'
164
+ error = true
165
+ end
166
+ when "PUT"
167
+ if response.code != '200'
168
+ error = true
169
+ end
170
+ else
171
+ raise ArgumentError.new("Invalid type: " + request_type)
172
+ end
173
+
174
+ #if error && response.body != ""
175
+ # begin
176
+ # m = JSON.parse(response.body)
177
+ # message = m['message']
178
+ # rescue
179
+ # # @todo raise exception
180
+ # message = get_message_html(response.body)
181
+ # end
182
+ # raise Common::HTTPError.getConsistentError(message, response.code, response.body)
183
+ #end
184
+ #return response
185
+ }
186
+ end
187
+
188
+ if error
189
+ begin
190
+ m = JSON.parse(response.body)
191
+ message = m['message']
192
+ rescue
193
+ message = get_message_html(response.body)
194
+ end
195
+ raise Common::HTTPError.getConsistentError(message, response.code, response.body)
196
+ end
197
+ return response
198
+ end
199
+
200
+ public
201
+ # Launch an http request to the rest api.
202
+ # @param request_type The type of request. (GET, POST, PUT, DELETE)
203
+ # @param address The url address of the request. (ex: /clusters)
204
+ # @param body The body of the request. (ex: "\{\"Clusters\": \{\"version\" : \"HDP-1.3.0\"\}\}"
205
+ # @raise [ArgumentError] If the value of the arguments are nil or invalid.
206
+ # @raise [HTTPError] If the request failed.
207
+ # @author tnoguer
208
+ def launch_request_mock(request_type, address, body = nil)
209
+ uri = URI.parse("http://#{@dest_ip}:#{@dest_port}#{address.strip}")
210
+ uri.user=@user
211
+ uri.password=@password
212
+ response=nil
213
+
214
+ if request_type == nil || address == nil || body == nil
215
+ raise ArgumentError.new("Arguments can\'t be nil!")
216
+ end
217
+
218
+ if @auth_type == "DIGEST"
219
+ uri = URI.parse("http://#{@dest_ip}:#{@dest_port}#{address.strip}")
220
+ uri.user=@user.strip
221
+ uri.password=@password.strip
222
+ response=nil
223
+
224
+ Net::HTTP.start(uri.host, uri.port) {|http|
225
+ http.read_timeout = @timeout
226
+ case request_type
227
+ when "GET"
228
+ request = Net::HTTP::Get.new(uri.request_uri,@header)
229
+ when "POST"
230
+ request = Net::HTTP::Post.new(uri.request_uri,@header)
231
+ when "PUT"
232
+ request = Net::HTTP::Put.new(uri.request_uri,@header)
233
+ when "DELETE"
234
+ request = Net::HTTP::Delete.new(uri.request_uri,@header)
235
+ else
236
+ raise ArgumentError.new("Invalid type: " + request_type)
237
+ end
238
+
239
+ request.body = body
240
+ response = http.request request
241
+
242
+ if response['www-authenticate']
243
+ begin
244
+ auth = Net::HTTP::DigestAuth.new.auth_header(uri, response['www-authenticate'], request_type)
245
+ request.add_field 'Authorization', auth
246
+ response = http.request request
247
+
248
+ return response
249
+ rescue => e
250
+ message = get_message_html(response.body)
251
+ end
252
+ raise Common::HTTPError.getConsistentError(message, response.code, response.body)
253
+ end
254
+ }
255
+ end
256
+
257
+ return response
258
+ end
259
+
260
+ private
261
+ # Get message for HTML error content.
262
+ def get_message_html(html)
263
+ @logger.info("Receiver::RestApiConnector get the message html '#{html}' of the request...")
264
+
265
+ begin
266
+ parsed_html = XmlSimple.xml_in(html)
267
+ #$LOG.debug(parsedHTML)
268
+ return parsed_html['body'][0]['p'][0]['content']
269
+ rescue
270
+ return html
271
+ end
272
+ end
273
+
274
+ end
275
+ end
@@ -0,0 +1,33 @@
1
+ {
2
+ "hbase.hregion.memstore.flush.size": "134217728",
3
+ "hbase.hstore.compactionThreshold": "3",
4
+ "hbase.security.authorization": "false",
5
+ "zookeeper.znode.parent": "/hbase-unsecure",
6
+ "hbase.regionserver.global.memstore.upperLimit": "0.4",
7
+ "hbase.security.authentication": "simple",
8
+ "hbase.rpc.engine": "org.apache.hadoop.hbase.ipc.WritableRpcEngine",
9
+ "dfs.support.append": "true",
10
+ "hbase.hregion.majorcompaction": "86400000",
11
+ "hbase.client.keyvalue.maxsize": "10485760",
12
+ "hbase.client.scanner.caching": "100",
13
+ "hbase.cluster.distributed": "true",
14
+ "hbase.rootdir": "hdfs://__MASTERNODE__:8020/apps/hbase/data",
15
+ "hbase.regionserver.kerberos.principal": "rs/_HOST@EXAMPLE.COM",
16
+ "hbase.regionserver.global.memstore.lowerLimit": "0.35",
17
+ "hbase.regionserver.handler.count": "30",
18
+ "hbase.tmp.dir": "/var/log/hbase",
19
+ "hbase.hregion.memstore.mslab.enabled": "true",
20
+ "hbase.regionserver.optionalcacheflushinterval": "10000",
21
+ "hbase.hstore.blockingStoreFiles": "7",
22
+ "hbase.zookeeper.useMulti": "true",
23
+ "hfile.block.cache.size": "0.25",
24
+ "hbase.hregion.max.filesize": "1073741824",
25
+ "hbase.master.lease.thread.wakefrequency": "3000",
26
+ "hbase.hregion.memstore.block.multiplier": "2",
27
+ "hbase.master.kerberos.principal": "hm/_HOST@EXAMPLE.COM",
28
+ "zookeeper.session.timeout": "60000",
29
+ "hbase.zookeeper.property.clientPort": "2181",
30
+ "hbase.master.keytab.file": "/etc/security/keytabs/hm.service.keytab",
31
+ "dfs.client.read.shortcircuit": "true",
32
+ "hbase.zookeeper.quorum": "__HOST_ZOOKEEPER__"
33
+ }
@@ -0,0 +1,49 @@
1
+ {
2
+ "dfs.https.address": "__MASTERNODE__:50470",
3
+ "dfs.http.address": "__MASTERNODE__:50070",
4
+ "dfs.datanode.max.xcievers": "4096",
5
+ "dfs.safemode.threshold.pct": "1.0f",
6
+ "dfs.datanode.http.address": "0.0.0.0:50075",
7
+ "dfs.datanode.address": "0.0.0.0:50010",
8
+ "dfs.datanode.failed.volumes.tolerated": "0",
9
+ "dfs.namenode.keytab.file": "/etc/security/keytabs/nn.service.keytab",
10
+ "ipc.server.read.threadpool.size": "5",
11
+ "dfs.datanode.data.dir.perm": "750",
12
+ "dfs.datanode.du.pct": "0.85f",
13
+ "ipc.server.max.response.size": "5242880",
14
+ "dfs.datanode.du.reserved": "1",
15
+ "dfs.datanode.socket.write.timeout": "0",
16
+ "dfs.namenode.handler.count": "100",
17
+ "dfs.web.ugi": "gopher,gopher",
18
+ "dfs.datanode.keytab.file": "/etc/security/keytabs/dn.service.keytab",
19
+ "dfs.name.dir": "/hadoop/hdfs/namenode",
20
+ "dfs.heartbeat.interval": "3",
21
+ "dfs.balance.bandwidthPerSec": "6250000",
22
+ "dfs.permissions.supergroup": "hdfs",
23
+ "dfs.cluster.administrators": " hdfs",
24
+ "dfs.secondary.namenode.keytab.file": "/etc/security/keytabs/nn.service.keytab",
25
+ "dfs.hosts.exclude": "/etc/hadoop/dfs.exclude",
26
+ "dfs.permissions": "true",
27
+ "dfs.namenode.kerberos.https.principal": "host/_HOST@EXAMPLE.COM",
28
+ "dfs.datanode.kerberos.principal": "dn/_HOST@EXAMPLE.COM",
29
+ "dfs.web.authentication.kerberos.keytab": "/etc/security/keytabs/spnego.service.keytab",
30
+ "dfs.webhdfs.enabled": "false",
31
+ "dfs.secondary.namenode.kerberos.https.principal": "host/_HOST@EXAMPLE.COM",
32
+ "dfs.secondary.https.port": "50490",
33
+ "dfs.block.size": "134217728",
34
+ "dfs.datanode.ipc.address": "0.0.0.0:8010",
35
+ "dfs.data.dir": "/hadoop/hdfs/data",
36
+ "dfs.namenode.check.stale.datanode": "true",
37
+ "dfs.access.time.precision": "0",
38
+ "dfs.blockreport.initialDelay": "120",
39
+ "dfs.https.port": "50470",
40
+ "dfs.block.access.token.enable": "true",
41
+ "dfs.umaskmode": "077",
42
+ "dfs.replication": "3",
43
+ "dfs.secondary.http.address": "__MASTERNODE__:50090",
44
+ "dfs.secondary.namenode.kerberos.principal": "nn/_HOST@EXAMPLE.COM",
45
+ "dfs.replication.max": "50",
46
+ "dfs.hosts": "/etc/hadoop/dfs.include",
47
+ "dfs.namenode.kerberos.principal": "nn/_HOST@EXAMPLE.COM",
48
+ "dfs.block.local-path-access.user": "hbase"
49
+ }
@@ -0,0 +1,33 @@
1
+ {
2
+ "hive.mapred.reduce.tasks.speculative.execution": "false",
3
+ "fs.file.impl.disable.cache": "true",
4
+ "hive.enforce.sorting": "true",
5
+ "javax.jdo.option.ConnectionPassword": "admin",
6
+ "hive.metastore.uris": "thrift://__MASTERNODE__:9083",
7
+ "hive.optimize.bucketmapjoin.sortedmerge": "true",
8
+ "hive.optimize.reducededuplication.min.reducer": "1",
9
+ "hive.metastore.local": "false",
10
+ "javax.jdo.option.ConnectionURL": "jdbc:mysql://__MASTERNODE__/hive?createDatabaseIfNotExist=true",
11
+ "hive.semantic.analyzer.factory.impl": "org.apache.hivealog.cli.HCatSemanticAnalyzerFactory",
12
+ "hive.metastore.client.socket.timeout": "60",
13
+ "hive.metastore.warehouse.dir": "/apps/hive/warehouse",
14
+ "hive.server2.enable.doAs": "true",
15
+ "hive.auto.convert.join.noconditionaltask.size": "1000000000",
16
+ "hive.auto.convert.sortmerge.join": "true",
17
+ "javax.jdo.option.ConnectionDriverName": "com.mysql.jdbc.Driver",
18
+ "hive.map.aggr": "true",
19
+ "hive.enforce.bucketing": "true",
20
+ "hadoop.clientside.fs.operations": "true",
21
+ "hive.metastore.cache.pinobjtypes": "Table,Database,Type,FieldSchema,Order",
22
+ "hive.auto.convert.sortmerge.join.noconditionaltask": "true",
23
+ "hive.mapjoin.bucket.cache.size": "10000",
24
+ "hive.metastore.execute.setugi": "true",
25
+ "fs.hdfs.impl.disable.cache": "true",
26
+ "hive.optimize.bucketmapjoin": "true",
27
+ "hive.security.authorization.enabled": "true",
28
+ "hive.auto.convert.join.noconditionaltask": "true",
29
+ "javax.jdo.option.ConnectionUserName": "hive",
30
+ "hive.auto.convert.join": "true",
31
+ "hive.security.authorization.manager": "org.apache.hcatalog.security.HdfsAuthorizationProvider",
32
+ "hive.optimize.mapjoin.mapreduce": "true"
33
+ }
@@ -0,0 +1,72 @@
1
+ {
2
+ "mapred.reduce.tasks.speculative.execution": "false",
3
+ "mapred.tasktracker.map.tasks.maximum": "4",
4
+ "mapred.hosts.exclude": "/etc/hadoop/mapred.exclude",
5
+ "mapreduce.tasktracker.group": "hadoop",
6
+ "mapred.job.reduce.input.buffer.percent": "0.0",
7
+ "mapreduce.reduce.input.limit": "10737418240",
8
+ "mapred.map.tasks.speculative.execution": "false",
9
+ "mapreduce.fileoutputcommitter.marksuccessfuljobs": "false",
10
+ "mapreduce.jobtracker.kerberos.principal": "jt/_HOST@EXAMPLE.COM",
11
+ "mapred.output.compression.type": "BLOCK",
12
+ "mapred.userlog.retain.hours": "24",
13
+ "mapred.job.reuse.jvm.num.tasks": "1",
14
+ "mapred.system.dir": "/mapred/system",
15
+ "mapreduce.tasktracker.keytab.file": "/etc/security/keytabs/tt.service.keytab",
16
+ "mapred.task.tracker.task-controller": "org.apache.hadoop.mapred.DefaultTaskController",
17
+ "io.sort.factor": "100",
18
+ "mapreduce.history.server.http.address": "__MASTERNODE__:51111",
19
+ "mapred.jobtracker.maxtasks.per.job": "-1",
20
+ "mapred.cluster.reduce.memory.mb": "-1",
21
+ "io.sort.spill.percent": "0.9",
22
+ "mapred.reduce.parallel.copies": "30",
23
+ "tasktracker.http.threads": "50",
24
+ "mapred.healthChecker.script.path": "file:////mapred/jobstatus",
25
+ "mapreduce.cluster.administrators": " hadoop",
26
+ "jetty.connector": "org.mortbay.jetty.nio.SelectChannelConnector",
27
+ "mapred.inmem.merge.threshold": "1000",
28
+ "mapred.job.reduce.memory.mb": "-1",
29
+ "mapred.job.map.memory.mb": "-1",
30
+ "mapreduce.jobhistory.kerberos.principal": "jt/_HOST@EXAMPLE.COM",
31
+ "mapred.cluster.map.memory.mb": "-1",
32
+ "mapred.jobtracker.retirejob.interval": "21600000",
33
+ "mapred.job.tracker.persist.jobstatus.hours": "1",
34
+ "mapred.cluster.max.map.memory.mb": "-1",
35
+ "mapred.reduce.slowstart.completed.maps": "0.05",
36
+ "hadoop.job.history.user.location": "none",
37
+ "mapred.job.tracker.handler.count": "50",
38
+ "mapred.healthChecker.interval": "135000",
39
+ "mapred.jobtracker.blacklist.fault-bucket-width": "15",
40
+ "mapred.task.timeout": "600000",
41
+ "mapred.jobtracker.taskScheduler": "org.apache.hadoop.mapred.CapacityTaskScheduler",
42
+ "mapred.max.tracker.blacklists": "16",
43
+ "mapreduce.jobhistory.keytab.file": "/etc/security/keytabs/jt.service.keytab",
44
+ "mapred.map.output.compression.codec": "org.apache.hadoop.io.compress.SnappyCodec",
45
+ "mapred.jobtracker.retirejob.check": "10000",
46
+ "mapred.tasktracker.tasks.sleeptime-before-sigkill": "250",
47
+ "mapreduce.jobtracker.staging.root.dir": "/user",
48
+ "mapred.job.shuffle.input.buffer.percent": "0.7",
49
+ "mapred.jobtracker.completeuserjobs.maximum": "5",
50
+ "mapred.job.tracker.persist.jobstatus.active": "false",
51
+ "mapred.tasktracker.reduce.tasks.maximum": "2",
52
+ "mapreduce.history.server.embedded": "false",
53
+ "mapred.job.tracker.http.address": "__MASTERNODE__:50030",
54
+ "mapred.queue.names": "default",
55
+ "mapred.job.tracker.history.completed.location": "/mapred/history/done",
56
+ "mapred.child.java.opts": "-server -Xmx768m -Djava.net.preferIPv4Stack=true",
57
+ "mapred.jobtracker.blacklist.fault-timeout-window": "180",
58
+ "mapreduce.jobtracker.split.metainfo.maxsize": "50000000",
59
+ "mapred.healthChecker.script.timeout": "60000",
60
+ "mapred.jobtracker.restart.recover": "false",
61
+ "mapreduce.jobtracker.keytab.file": "/etc/security/keytabs/jt.service.keytab",
62
+ "mapred.hosts": "/etc/hadoop/mapred.include",
63
+ "mapred.local.dir": "/hadoop/mapred",
64
+ "mapreduce.tasktracker.kerberos.principal": "tt/_HOST@EXAMPLE.COM",
65
+ "mapred.job.tracker.persist.jobstatus.dir": "/etc/hadoop/health_check",
66
+ "mapred.job.tracker": "__MASTERNODE__:50300",
67
+ "io.sort.record.percent": ".2",
68
+ "mapred.cluster.max.reduce.memory.mb": "-1",
69
+ "io.sort.mb": "200",
70
+ "mapred.job.shuffle.merge.percent": "0.66",
71
+ "mapred.child.root.logger": "INFO,TLA"
72
+ }