elastic-mapreduce 0.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- data/CHANGELOG +51 -0
- data/Gemfile +13 -0
- data/Gemfile.lock +16 -0
- data/LICENSE.txt +393 -0
- data/NOTICE.txt +26 -0
- data/README +1007 -0
- data/Rakefile +35 -0
- data/VERSION +1 -0
- data/bin/elastic-mapreduce +27 -0
- data/cacert.pem +280 -0
- data/elastic-mapreduce.gemspec +104 -0
- data/lib/amazon/aws/exceptions.rb +211 -0
- data/lib/amazon/coral/awsquery.rb +128 -0
- data/lib/amazon/coral/awsquerychainhelper.rb +92 -0
- data/lib/amazon/coral/awsqueryhandler.rb +170 -0
- data/lib/amazon/coral/awsqueryurihandler.rb +34 -0
- data/lib/amazon/coral/call.rb +68 -0
- data/lib/amazon/coral/dispatcher.rb +33 -0
- data/lib/amazon/coral/ec2client.rb +91 -0
- data/lib/amazon/coral/elasticmapreduceclient.rb +198 -0
- data/lib/amazon/coral/handler.rb +20 -0
- data/lib/amazon/coral/httpdelegationhelper.rb +27 -0
- data/lib/amazon/coral/httpdestinationhandler.rb +36 -0
- data/lib/amazon/coral/httphandler.rb +124 -0
- data/lib/amazon/coral/identityhandler.rb +32 -0
- data/lib/amazon/coral/job.rb +25 -0
- data/lib/amazon/coral/logfactory.rb +35 -0
- data/lib/amazon/coral/option.rb +70 -0
- data/lib/amazon/coral/orchestrator.rb +49 -0
- data/lib/amazon/coral/querystringmap.rb +93 -0
- data/lib/amazon/coral/service.rb +130 -0
- data/lib/amazon/coral/simplelog.rb +98 -0
- data/lib/amazon/coral/urlencoding.rb +19 -0
- data/lib/amazon/coral/v0signaturehandler.rb +33 -0
- data/lib/amazon/coral/v0signaturehelper.rb +83 -0
- data/lib/amazon/coral/v1signaturehandler.rb +32 -0
- data/lib/amazon/coral/v1signaturehelper.rb +58 -0
- data/lib/amazon/coral/v2signaturehandler.rb +46 -0
- data/lib/amazon/coral/v2signaturehelper.rb +76 -0
- data/lib/amazon/retry_delegator.rb +66 -0
- data/lib/amazon/stderr_logger.rb +23 -0
- data/lib/client.rb +117 -0
- data/lib/commands.rb +1690 -0
- data/lib/credentials.rb +86 -0
- data/lib/ec2_client_wrapper.rb +73 -0
- data/lib/json/lexer.rb +294 -0
- data/lib/json/objects.rb +200 -0
- data/lib/json.rb +58 -0
- data/lib/simple_executor.rb +11 -0
- data/lib/simple_logger.rb +38 -0
- data/lib/uuidtools/version.rb +32 -0
- data/lib/uuidtools.rb +655 -0
- data/run_tests.rb +8 -0
- data/samples/freebase/code/freebase_jobflow.json +44 -0
- data/samples/similarity/lastfm_jobflow.json +78 -0
- data/samples/wordSplitter.py +18 -0
- data/tests/commands_test.rb +587 -0
- data/tests/credentials.json +7 -0
- data/tests/example.json +14 -0
- metadata +154 -0
|
@@ -0,0 +1,587 @@
|
|
|
1
|
+
#
|
|
2
|
+
# Copyright 2008-2010 Amazon.com, Inc. or its affiliates. All Rights Reserved.
|
|
3
|
+
|
|
4
|
+
require 'commands'
|
|
5
|
+
require 'test/unit'
|
|
6
|
+
|
|
7
|
+
module Commands
|
|
8
|
+
|
|
9
|
+
class MockExecutor
|
|
10
|
+
def exec(cmd)
|
|
11
|
+
end
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
class MockEMRClient
|
|
15
|
+
attr_accessor :state
|
|
16
|
+
|
|
17
|
+
def initialize(config)
|
|
18
|
+
@config = config
|
|
19
|
+
@state = "RUNNING"
|
|
20
|
+
@step_map =
|
|
21
|
+
{ "j-hive-installed" =>
|
|
22
|
+
{"ExecutionStatusDetail"=>
|
|
23
|
+
{"StartDateTime"=>1291074747.0, "EndDateTime"=>1291074776.0, "LastStateChangeReason"=>nil,
|
|
24
|
+
"CreationDateTime"=>1291074521.0, "State"=>"COMPLETED"},
|
|
25
|
+
"StepConfig"=>
|
|
26
|
+
{"Name"=>"Setup Hive",
|
|
27
|
+
"HadoopJarStep"=>
|
|
28
|
+
{"Jar"=>"s3://us-east-1.elasticmapreduce/libs/script-runner/script-runner.jar",
|
|
29
|
+
"Args"=>
|
|
30
|
+
["s3://us-east-1.elasticmapreduce/libs/hive/hive-script",
|
|
31
|
+
"--base-path", "s3://us-east-1.elasticmapreduce/libs/hive/",
|
|
32
|
+
"--install-hive"],
|
|
33
|
+
"Properties"=>[],
|
|
34
|
+
"MainClass"=>nil},
|
|
35
|
+
"ActionOnFailure"=>"CONTINUE"}},
|
|
36
|
+
"j-hive-0.5-installed" =>
|
|
37
|
+
{"ExecutionStatusDetail"=>
|
|
38
|
+
{"StartDateTime"=>1291074747.0, "EndDateTime"=>1291074776.0, "LastStateChangeReason"=>nil,
|
|
39
|
+
"CreationDateTime"=>1291074521.0, "State"=>"COMPLETED"},
|
|
40
|
+
"StepConfig"=>
|
|
41
|
+
{"Name"=>"Setup Hive",
|
|
42
|
+
"HadoopJarStep"=>
|
|
43
|
+
{"Jar"=>"s3://us-east-1.elasticmapreduce/libs/script-runner/script-runner.jar",
|
|
44
|
+
"Args"=>
|
|
45
|
+
["s3://us-east-1.elasticmapreduce/libs/hive/hive-script",
|
|
46
|
+
"--base-path", "s3://us-east-1.elasticmapreduce/libs/hive/",
|
|
47
|
+
"--install-hive","--hive-versions","0.5"],
|
|
48
|
+
"Properties"=>[],
|
|
49
|
+
"MainClass"=>nil},
|
|
50
|
+
"ActionOnFailure"=>"CONTINUE"}},
|
|
51
|
+
"j-hive-0.7-installed" =>
|
|
52
|
+
{"ExecutionStatusDetail"=>
|
|
53
|
+
{"StartDateTime"=>1291074747.0, "EndDateTime"=>1291074776.0, "LastStateChangeReason"=>nil,
|
|
54
|
+
"CreationDateTime"=>1291074521.0, "State"=>"COMPLETED"},
|
|
55
|
+
"StepConfig"=>
|
|
56
|
+
{"Name"=>"Setup Hive",
|
|
57
|
+
"HadoopJarStep"=>
|
|
58
|
+
{"Jar"=>"s3://us-east-1.elasticmapreduce/libs/script-runner/script-runner.jar",
|
|
59
|
+
"Args"=>
|
|
60
|
+
["s3://us-east-1.elasticmapreduce/libs/hive/hive-script",
|
|
61
|
+
"--base-path", "s3://us-east-1.elasticmapreduce/libs/hive/",
|
|
62
|
+
"--install-hive","--hive-versions","0.7"],
|
|
63
|
+
"Properties"=>[],
|
|
64
|
+
"MainClass"=>nil},
|
|
65
|
+
"ActionOnFailure"=>"CONTINUE"}},
|
|
66
|
+
}
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
def self.new_aws_query(config)
|
|
70
|
+
return MockEMRClient.new(config)
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
def DescribeJobFlows(args)
|
|
74
|
+
steps = []
|
|
75
|
+
if args["JobFlowIds"] != nil and @step_map.has_key? args["JobFlowIds"].first then
|
|
76
|
+
steps = [@step_map[args["JobFlowIds"].first]]
|
|
77
|
+
end
|
|
78
|
+
return {
|
|
79
|
+
"JobFlows" =>
|
|
80
|
+
[
|
|
81
|
+
{
|
|
82
|
+
"LogUri" => "s3n://testing/",
|
|
83
|
+
"Name" => "Development Job Flow (requires manual termination)",
|
|
84
|
+
"BootstrapActions" =>[],
|
|
85
|
+
"ExecutionStatusDetail" => {
|
|
86
|
+
"EndDateTime" => 1286584312.0,
|
|
87
|
+
"CreationDateTime" => 1286584224.0,
|
|
88
|
+
"LastStateChangeReason" => "Terminated by user request",
|
|
89
|
+
"State" => @state,
|
|
90
|
+
"StartDateTime" => nil,
|
|
91
|
+
"ReadyDateTime" => nil
|
|
92
|
+
},
|
|
93
|
+
"Steps" => steps,
|
|
94
|
+
"JobFlowId" => "j-2HWO50OUKNMHG",
|
|
95
|
+
"Instances" => {
|
|
96
|
+
"Ec2KeyName" => "richcole-test",
|
|
97
|
+
"InstanceCount" =>5,
|
|
98
|
+
"NormalizedInstanceHours" => nil,
|
|
99
|
+
"Placement" => {"AvailabilityZone" => "us-east-1d"},
|
|
100
|
+
"KeepJobFlowAliveWhenNoSteps" => true,
|
|
101
|
+
"SlaveInstanceType" => "m2.xlarge",
|
|
102
|
+
"MasterInstanceType" => "m2.xlarge",
|
|
103
|
+
"MasterPublicDnsName" => nil,
|
|
104
|
+
"MasterInstanceId" => nil,
|
|
105
|
+
"InstanceGroups" => [{
|
|
106
|
+
"SpotPrice" => nil,
|
|
107
|
+
"EndDateTime" => nil,
|
|
108
|
+
"Name" => "Task Instance Group",
|
|
109
|
+
"InstanceRole" => "TASK",
|
|
110
|
+
"CreationDateTime" => 1286862675.0,
|
|
111
|
+
"LaunchGroup" => nil,
|
|
112
|
+
"LastStateChangeReason" => "",
|
|
113
|
+
"InstanceGroupId" => "ig-D2NC23WFSOOU",
|
|
114
|
+
"State" => "RUNNING",
|
|
115
|
+
"Market" => "ON_DEMAND",
|
|
116
|
+
"InstanceType" => "c1.medium",
|
|
117
|
+
"StartDateTime" => 1286862907.0,
|
|
118
|
+
"InstanceRunningCount" => 2,
|
|
119
|
+
"ReadyDateTime" => 1286862907.0,
|
|
120
|
+
"InstanceRequestCount" => 2
|
|
121
|
+
},
|
|
122
|
+
{
|
|
123
|
+
"SpotPrice" => nil,
|
|
124
|
+
"EndDateTime" => nil,
|
|
125
|
+
"Name" => "Master Instance Group",
|
|
126
|
+
"InstanceRole" => "MASTER",
|
|
127
|
+
"CreationDateTime" => 1286862675.0,
|
|
128
|
+
"LaunchGroup" => nil,
|
|
129
|
+
"LastStateChangeReason" => "",
|
|
130
|
+
"InstanceGroupId" => "ig-1BFN7TCX7YE5Y",
|
|
131
|
+
"State" => "RUNNING",
|
|
132
|
+
"Market" => "ON_DEMAND",
|
|
133
|
+
"InstanceType" => "m1.small",
|
|
134
|
+
"StartDateTime" => 1286862866.0,
|
|
135
|
+
"InstanceRunningCount" => 1,
|
|
136
|
+
"ReadyDateTime" => 1286862906.0,
|
|
137
|
+
"InstanceRequestCount" => 1
|
|
138
|
+
},
|
|
139
|
+
{
|
|
140
|
+
"SpotPrice" => nil,
|
|
141
|
+
"EndDateTime" => nil,
|
|
142
|
+
"Name" => "Core Instance Group",
|
|
143
|
+
"InstanceRole" => "CORE",
|
|
144
|
+
"CreationDateTime" => 1286862675.0,
|
|
145
|
+
"LaunchGroup" => nil,
|
|
146
|
+
"LastStateChangeReason" => "Expanding cluster",
|
|
147
|
+
"InstanceGroupId" => "ig-2EUIGTIPDLTXW",
|
|
148
|
+
"State" => "RESIZING",
|
|
149
|
+
"Market" => "ON_DEMAND",
|
|
150
|
+
"InstanceType" => "m1.large",
|
|
151
|
+
"StartDateTime" => 1286862907.0,
|
|
152
|
+
"InstanceRunningCount" => 1,
|
|
153
|
+
"ReadyDateTime" => 1286862907.0,
|
|
154
|
+
"InstanceRequestCount" => 3
|
|
155
|
+
}]
|
|
156
|
+
},
|
|
157
|
+
"HadoopVersion" => "0.20"
|
|
158
|
+
}
|
|
159
|
+
]
|
|
160
|
+
}
|
|
161
|
+
end
|
|
162
|
+
|
|
163
|
+
def RunJobFlow(opts)
|
|
164
|
+
return { "JobFlowId" => "j-ABABABABA" }
|
|
165
|
+
end
|
|
166
|
+
|
|
167
|
+
def AddJobFlowSteps(opts)
|
|
168
|
+
return nil
|
|
169
|
+
end
|
|
170
|
+
|
|
171
|
+
def TerminateJobFlows(opts)
|
|
172
|
+
return nil
|
|
173
|
+
end
|
|
174
|
+
|
|
175
|
+
def ModifyInstanceGroups(opts)
|
|
176
|
+
return nil
|
|
177
|
+
end
|
|
178
|
+
|
|
179
|
+
def AddInstanceGroups(opts)
|
|
180
|
+
return nil
|
|
181
|
+
end
|
|
182
|
+
|
|
183
|
+
end
|
|
184
|
+
|
|
185
|
+
class MockLogger
|
|
186
|
+
def puts(msg)
|
|
187
|
+
end
|
|
188
|
+
|
|
189
|
+
def trace(msg)
|
|
190
|
+
end
|
|
191
|
+
end
|
|
192
|
+
|
|
193
|
+
class CommandTest < Test::Unit::TestCase
|
|
194
|
+
|
|
195
|
+
def setup
|
|
196
|
+
@client_class = MockEMRClient #FIXME: make this return always the same object
|
|
197
|
+
@logger = MockLogger.new
|
|
198
|
+
@executor = MockExecutor.new
|
|
199
|
+
end
|
|
200
|
+
|
|
201
|
+
def create_and_execute_commands(args)
|
|
202
|
+
return ::Commands.create_and_execute_commands(args.split(/\s+/), @client_class, @logger, @executor, false)
|
|
203
|
+
end
|
|
204
|
+
|
|
205
|
+
def test_modify_instance_group_command
|
|
206
|
+
args = "-c tests/credentials.json --modify-instance-group core --instance-count 10 --jobflow j-ABABABA"
|
|
207
|
+
@commands = create_and_execute_commands(args)
|
|
208
|
+
assert_equal(1, @commands.size)
|
|
209
|
+
c = @commands.last
|
|
210
|
+
assert(c.is_a?(ModifyInstanceGroupCommand))
|
|
211
|
+
assert_equal(10, c.instance_count)
|
|
212
|
+
assert_not_nil(c.instance_group_id)
|
|
213
|
+
assert_equal(nil, c.instance_type)
|
|
214
|
+
assert_equal("CORE", c.instance_role)
|
|
215
|
+
end
|
|
216
|
+
|
|
217
|
+
def test_one
|
|
218
|
+
args = "-c tests/credentials.json --create --alive --num-instances 10 " +
|
|
219
|
+
"--slave-instance-type m1.small --master-instance-type m1.large"
|
|
220
|
+
@commands = create_and_execute_commands(args)
|
|
221
|
+
end
|
|
222
|
+
|
|
223
|
+
def test_two
|
|
224
|
+
args = "-c tests/credentials.json --create --alive --num-instances 10 " +
|
|
225
|
+
"--slave-instance-type m1.small --master-instance-type m1.large " +
|
|
226
|
+
"--instance-group TASK --instance-type m1.small --instance-count 10 " +
|
|
227
|
+
"--bootstrap-action s3://elasticmapreduce/scripts/configure-hadoop " +
|
|
228
|
+
"--arg s3://mybucket/config/custom-site-config.xml "
|
|
229
|
+
@commands = create_and_execute_commands(args)
|
|
230
|
+
assert_equal(1, @commands.commands.size)
|
|
231
|
+
end
|
|
232
|
+
|
|
233
|
+
def test_three
|
|
234
|
+
args = "-c tests/credentials.json --create --alive --num-instances 10 " +
|
|
235
|
+
"--slave-instance-type m1.small --master-instance-type m1.large " +
|
|
236
|
+
"--instance-group TASK --instance-type m1.small --instance-count 10 " +
|
|
237
|
+
"--bootstrap-action s3://elasticmapreduce/scripts/configure-hadoop " +
|
|
238
|
+
"--arg s3://mybucket/config/custom-site-config.xml " +
|
|
239
|
+
"--pig-script s3://elasticmapreduce/samples/sample.pig " +
|
|
240
|
+
"--pig-interactive"
|
|
241
|
+
@commands = create_and_execute_commands(args)
|
|
242
|
+
assert_equal(1, @commands.commands.size)
|
|
243
|
+
cmd1 = @commands.commands.first
|
|
244
|
+
assert_equal(2, cmd1.step_commands.size)
|
|
245
|
+
assert_equal(PigInteractiveCommand, cmd1.step_commands[0].class)
|
|
246
|
+
end
|
|
247
|
+
|
|
248
|
+
def test_four
|
|
249
|
+
args = "-a ACCESS_ID -p SECRET_KEY --create --alive " +
|
|
250
|
+
"--hive-script s3://maps.google.com --enable-debugging " +
|
|
251
|
+
"--log-uri s3://somewhere.com/logs/"
|
|
252
|
+
@commands = create_and_execute_commands(args)
|
|
253
|
+
assert_equal(1, @commands.commands.size)
|
|
254
|
+
assert_equal(3, @commands.commands[0].step_commands.size)
|
|
255
|
+
steps = @commands.commands[0].step_commands
|
|
256
|
+
assert_equal(EnableDebuggingCommand, steps[0].class)
|
|
257
|
+
assert_equal(HiveInteractiveCommand, steps[1].class)
|
|
258
|
+
assert_equal(HiveScriptCommand, steps[2].class)
|
|
259
|
+
end
|
|
260
|
+
|
|
261
|
+
def hadoop_jar_step_args(command)
|
|
262
|
+
command.steps[0]["HadoopJarStep"]["Args"]
|
|
263
|
+
end
|
|
264
|
+
|
|
265
|
+
def pig_script_command_default_args
|
|
266
|
+
[
|
|
267
|
+
"s3://us-east-1.elasticmapreduce/libs/pig/pig-script",
|
|
268
|
+
"--base-path",
|
|
269
|
+
"s3://us-east-1.elasticmapreduce/libs/pig/",
|
|
270
|
+
"--run-pig-script",
|
|
271
|
+
"--args",
|
|
272
|
+
"-f"
|
|
273
|
+
]
|
|
274
|
+
end
|
|
275
|
+
|
|
276
|
+
def hive_script_command_default_args
|
|
277
|
+
[
|
|
278
|
+
"s3://us-east-1.elasticmapreduce/libs/hive/hive-script",
|
|
279
|
+
"--base-path",
|
|
280
|
+
"s3://us-east-1.elasticmapreduce/libs/hive/",
|
|
281
|
+
"--run-hive-script",
|
|
282
|
+
"--args",
|
|
283
|
+
"-f"
|
|
284
|
+
]
|
|
285
|
+
end
|
|
286
|
+
|
|
287
|
+
def test_pig_arg
|
|
288
|
+
args1 = "-a ACCESS_ID -p SECRET_KEY --create --alive " +
|
|
289
|
+
"--pig-script --args s3://maps.google.com --args -p,INPUT=s3://maps.google.com/test.pig"
|
|
290
|
+
args2 = "-a ACCESS_ID -p SECRET_KEY --create --alive " +
|
|
291
|
+
"--pig-script s3://maps.google.com --args -p,INPUT=s3://maps.google.com/test.pig"
|
|
292
|
+
@commands = create_and_execute_commands(args1)
|
|
293
|
+
steps = @commands.commands.first.step_commands
|
|
294
|
+
pig_command = steps[1]
|
|
295
|
+
assert_equal(pig_command.arg, nil)
|
|
296
|
+
assert_equal(pig_command.args, ["s3://maps.google.com", "-p", "INPUT=s3://maps.google.com/test.pig"])
|
|
297
|
+
assert_equal(pig_command.steps.size, 1)
|
|
298
|
+
args = pig_script_command_default_args
|
|
299
|
+
args << "s3://maps.google.com" << "-p" << "INPUT=s3://maps.google.com/test.pig"
|
|
300
|
+
assert_equal(hadoop_jar_step_args(pig_command), args)
|
|
301
|
+
|
|
302
|
+
@commands = create_and_execute_commands(args2)
|
|
303
|
+
steps = @commands.commands.first.step_commands
|
|
304
|
+
pig_command = steps[1]
|
|
305
|
+
assert_equal(pig_command.arg, "s3://maps.google.com")
|
|
306
|
+
assert_equal(pig_command.args, ["-p", "INPUT=s3://maps.google.com/test.pig"])
|
|
307
|
+
assert_equal(pig_command.steps.size, 1)
|
|
308
|
+
assert_equal(hadoop_jar_step_args(pig_command), args)
|
|
309
|
+
end
|
|
310
|
+
|
|
311
|
+
def test_hive
|
|
312
|
+
args = "-a ACCESS_ID -p SECRET_KEY --create --alive " +
|
|
313
|
+
"--hive-script --args s3://maps.google.com "
|
|
314
|
+
@commands = create_and_execute_commands(args)
|
|
315
|
+
create_command = @commands.commands.first
|
|
316
|
+
steps = create_command.step_commands
|
|
317
|
+
install_hive_command = steps[0]
|
|
318
|
+
hive_command = steps[1]
|
|
319
|
+
|
|
320
|
+
assert_equal(1, @commands.commands.size)
|
|
321
|
+
assert_equal(2, steps.size)
|
|
322
|
+
assert_equal(HiveScriptCommand, hive_command.class)
|
|
323
|
+
assert_equal(hive_command.arg, nil)
|
|
324
|
+
assert_equal(hive_command.args, ["s3://maps.google.com"])
|
|
325
|
+
assert_equal(hive_command.steps.size, 1)
|
|
326
|
+
args = hive_script_command_default_args
|
|
327
|
+
args << "s3://maps.google.com"
|
|
328
|
+
assert_equal(hadoop_jar_step_args(hive_command), args)
|
|
329
|
+
end
|
|
330
|
+
|
|
331
|
+
def test_install_hive_version
|
|
332
|
+
args = "-a ACCESS_ID -p SECRET_KEY --create --alive " +
|
|
333
|
+
"--hive-script --args s3://maps.google.com --hive-versions 0.5"
|
|
334
|
+
@commands = create_and_execute_commands(args)
|
|
335
|
+
create_command = @commands.commands.first
|
|
336
|
+
steps = create_command.step_commands
|
|
337
|
+
install_hive_command = steps[0]
|
|
338
|
+
hive_command = steps[1]
|
|
339
|
+
assert_equal(1, @commands.commands.size)
|
|
340
|
+
assert_equal(2, steps.size)
|
|
341
|
+
assert_equal(HiveScriptCommand, hive_command.class)
|
|
342
|
+
assert_equal(hive_command.arg, nil)
|
|
343
|
+
assert_equal(hive_command.args, ["s3://maps.google.com"])
|
|
344
|
+
assert_equal(hive_command.steps.size, 1)
|
|
345
|
+
args = hive_script_command_default_args
|
|
346
|
+
args.insert(3, "--hive-versions")
|
|
347
|
+
args.insert(4, "0.5")
|
|
348
|
+
args <<"s3://maps.google.com"
|
|
349
|
+
assert_equal(hadoop_jar_step_args(hive_command), args)
|
|
350
|
+
end
|
|
351
|
+
|
|
352
|
+
def test_run_hive_script_same_version
|
|
353
|
+
args = "-a ACCESS_ID -p SECRET_KEY --jobflow j-hive-0.5-installed " +
|
|
354
|
+
"--hive-script --args s3://maps.google.com --hive-versions 0.5"
|
|
355
|
+
@commands = create_and_execute_commands(args)
|
|
356
|
+
create_command = @commands.commands.first
|
|
357
|
+
steps = create_command.step_commands
|
|
358
|
+
install_hive_command = steps[0]
|
|
359
|
+
hive_command = steps[0]
|
|
360
|
+
|
|
361
|
+
assert_equal(1, @commands.commands.size)
|
|
362
|
+
assert_equal(1, steps.size)
|
|
363
|
+
assert_equal(HiveScriptCommand, hive_command.class)
|
|
364
|
+
assert_equal(hive_command.arg, nil)
|
|
365
|
+
assert_equal(hive_command.args, ["s3://maps.google.com"])
|
|
366
|
+
assert_equal(hive_command.steps.size, 1)
|
|
367
|
+
args = hive_script_command_default_args
|
|
368
|
+
args.insert(3, "--hive-versions")
|
|
369
|
+
args.insert(4, "0.5")
|
|
370
|
+
args << "s3://maps.google.com"
|
|
371
|
+
assert_equal(hadoop_jar_step_args(hive_command), args)
|
|
372
|
+
end
|
|
373
|
+
|
|
374
|
+
def test_run_hive_script_different_version
|
|
375
|
+
args = "-a ACCESS_ID -p SECRET_KEY --jobflow j-hive-0.5-installed " +
|
|
376
|
+
"--hive-script --args s3://maps.google.com --hive-versions 0.7"
|
|
377
|
+
@commands = create_and_execute_commands(args)
|
|
378
|
+
create_command = @commands.commands.first
|
|
379
|
+
steps = create_command.step_commands
|
|
380
|
+
install_hive_command = steps[0]
|
|
381
|
+
hive_command = steps[1]
|
|
382
|
+
|
|
383
|
+
assert_equal(1, @commands.commands.size)
|
|
384
|
+
assert_equal(2, steps.size)
|
|
385
|
+
assert_equal(HiveScriptCommand, hive_command.class)
|
|
386
|
+
assert_equal(hive_command.arg, nil)
|
|
387
|
+
assert_equal(hive_command.args, ["s3://maps.google.com"])
|
|
388
|
+
assert_equal(hive_command.steps.size, 1)
|
|
389
|
+
args = hive_script_command_default_args
|
|
390
|
+
args.insert(3, "--hive-versions")
|
|
391
|
+
args.insert(4, "0.7")
|
|
392
|
+
args << "s3://maps.google.com"
|
|
393
|
+
assert_equal(hadoop_jar_step_args(hive_command), args)
|
|
394
|
+
end
|
|
395
|
+
|
|
396
|
+
def test_hive_script_step_action_propogation
|
|
397
|
+
args = "-a ACCESS_ID -p SECRET_KEY --jobflow j-hive-0.5-installed " +
|
|
398
|
+
"--hive-script --args s3://maps.google.com --hive-versions 0.7 " +
|
|
399
|
+
"--step-action CONTINUE"
|
|
400
|
+
@commands = create_and_execute_commands(args)
|
|
401
|
+
create_command = @commands.commands.first
|
|
402
|
+
steps = create_command.step_commands
|
|
403
|
+
install_hive_command = steps[0]
|
|
404
|
+
hive_command = steps[0]
|
|
405
|
+
assert_equal(1, @commands.commands.size)
|
|
406
|
+
assert_equal(2, steps.size)
|
|
407
|
+
assert_equal(HiveInteractiveCommand, hive_command.class)
|
|
408
|
+
assert_equal(hive_command.arg, nil)
|
|
409
|
+
assert_equal(hive_command.args, [])
|
|
410
|
+
assert_equal(hive_command.steps.size, 1)
|
|
411
|
+
assert_equal(hive_command.steps.first["ActionOnFailure"], "CONTINUE")
|
|
412
|
+
end
|
|
413
|
+
|
|
414
|
+
def test_hive_no_create
|
|
415
|
+
args = "-a ACCESS_ID -p SECRET_KEY --jobflow j-ABABABABA " +
|
|
416
|
+
"--hive-script --args s3://maps.google.com "
|
|
417
|
+
@commands = create_and_execute_commands(args)
|
|
418
|
+
create_command = @commands.commands.first
|
|
419
|
+
steps = create_command.step_commands
|
|
420
|
+
install_hive_command = steps[0]
|
|
421
|
+
hive_command = steps[1]
|
|
422
|
+
|
|
423
|
+
assert_equal(1, @commands.commands.size)
|
|
424
|
+
assert_equal(2, steps.size)
|
|
425
|
+
assert_equal(HiveScriptCommand, hive_command.class)
|
|
426
|
+
assert_equal(hive_command.arg, nil)
|
|
427
|
+
assert_equal(hive_command.args, ["s3://maps.google.com"])
|
|
428
|
+
assert_equal(hive_command.steps.size, 1)
|
|
429
|
+
args = hive_script_command_default_args
|
|
430
|
+
args << "s3://maps.google.com"
|
|
431
|
+
assert_equal(hadoop_jar_step_args(hive_command), args)
|
|
432
|
+
end
|
|
433
|
+
|
|
434
|
+
def test_hive_no_create2
|
|
435
|
+
args = "-a ACCESS_ID -p SECRET_KEY --jobflow j-ABABABABA " +
|
|
436
|
+
"--hive-script s3://maps.google.com --args -d,options "
|
|
437
|
+
@commands = create_and_execute_commands(args)
|
|
438
|
+
create_command = @commands.commands.first
|
|
439
|
+
steps = create_command.step_commands
|
|
440
|
+
install_hive_command = steps[0]
|
|
441
|
+
hive_command = steps[1]
|
|
442
|
+
|
|
443
|
+
assert_equal(1, @commands.commands.size)
|
|
444
|
+
assert_equal(2, steps.size)
|
|
445
|
+
assert_equal(HiveScriptCommand, hive_command.class)
|
|
446
|
+
assert_equal(hive_command.arg, "s3://maps.google.com")
|
|
447
|
+
assert_equal(hive_command.args, ["-d", "options"])
|
|
448
|
+
assert_equal(hive_command.steps.size, 1)
|
|
449
|
+
args = hive_script_command_default_args
|
|
450
|
+
args << "s3://maps.google.com" << "-d" << "options"
|
|
451
|
+
assert_equal(hadoop_jar_step_args(hive_command), args)
|
|
452
|
+
end
|
|
453
|
+
|
|
454
|
+
def test_five
|
|
455
|
+
args = "-a ACCESS_ID -p SECRET_KEY -j j-ABABABAABA --hive-script " +
|
|
456
|
+
"s3://maps.google.com --enable-debugging --log-uri s3://somewhere.com/logs/"
|
|
457
|
+
@commands = create_and_execute_commands(args)
|
|
458
|
+
assert_equal(1, @commands.commands.size)
|
|
459
|
+
assert_equal(3, @commands.commands[0].step_commands.size)
|
|
460
|
+
steps = @commands.commands[0].step_commands
|
|
461
|
+
assert_equal(EnableDebuggingCommand, steps[0].class)
|
|
462
|
+
assert_equal(HiveInteractiveCommand, steps[1].class)
|
|
463
|
+
assert_equal(HiveScriptCommand, steps[2].class)
|
|
464
|
+
end
|
|
465
|
+
|
|
466
|
+
def test_six
|
|
467
|
+
args = "-a ACCESS_ID -p SECRET_KEY --list --active"
|
|
468
|
+
@commands = create_and_execute_commands(args)
|
|
469
|
+
end
|
|
470
|
+
|
|
471
|
+
def test_seven
|
|
472
|
+
args = "-a ACCESS_ID -p SECRET_KEY --list --active --terminate"
|
|
473
|
+
@commands = create_and_execute_commands(args)
|
|
474
|
+
end
|
|
475
|
+
|
|
476
|
+
def test_eight
|
|
477
|
+
args = "-a ACCESS_ID -p SECRET_KEY --terminate -j j-ABABABABA"
|
|
478
|
+
@commands = create_and_execute_commands(args)
|
|
479
|
+
end
|
|
480
|
+
|
|
481
|
+
def test_create_one
|
|
482
|
+
args = "-a ACCESS_ID -p SECRET_KEY --create --alive --name TestFlow"
|
|
483
|
+
@commands = create_and_execute_commands(args)
|
|
484
|
+
end
|
|
485
|
+
|
|
486
|
+
def test_ssh_no_jobflow
|
|
487
|
+
args = "-a ACCESS_ID -p SECRET_KEY --ssh"
|
|
488
|
+
assert_raise RuntimeError do
|
|
489
|
+
@commands = create_and_execute_commands(args)
|
|
490
|
+
end
|
|
491
|
+
end
|
|
492
|
+
|
|
493
|
+
def test_ssh_too_many_jobflows
|
|
494
|
+
args = "-a ACCESS_ID -p SECRET_KEY -j j-ABABABA j-ABABABA --ssh"
|
|
495
|
+
assert_raise RuntimeError do
|
|
496
|
+
@commands = create_and_execute_commands(args)
|
|
497
|
+
end
|
|
498
|
+
end
|
|
499
|
+
|
|
500
|
+
def test_jar_with_mainclass
|
|
501
|
+
args = "-a ACCESS_ID -p SECRET_KEY -j j-3TRNB9E4GU2NI
|
|
502
|
+
--jar s3://my-example-bucket/wordcount.jar
|
|
503
|
+
--main-class org.myorg.WordCount
|
|
504
|
+
--arg s3://elasticmapreduce/samples/wordcount/input/
|
|
505
|
+
--arg hdfs:///wordcount/output/1
|
|
506
|
+
"
|
|
507
|
+
|
|
508
|
+
@commands = create_and_execute_commands(args)
|
|
509
|
+
assert_equal(1, @commands.commands.size)
|
|
510
|
+
c = @commands.commands.first
|
|
511
|
+
s = c.step_commands.first
|
|
512
|
+
st = s.steps
|
|
513
|
+
assert_equal([{"HadoopJarStep"=>{"Jar"=>"s3://my-example-bucket/wordcount.jar",
|
|
514
|
+
"Args"=>["s3://elasticmapreduce/samples/wordcount/input/", "hdfs:///wordcount/output/1"],
|
|
515
|
+
"MainClass"=>"org.myorg.WordCount"}, "ActionOnFailure"=>"CANCEL_AND_WAIT", "Name"=>"Example Jar Step"}], st)
|
|
516
|
+
end
|
|
517
|
+
|
|
518
|
+
def test_ssh
|
|
519
|
+
args = "-a ACCESS_ID -p SECRET_KEY --key-pair-file test.pem -j j-ABABABA --ssh"
|
|
520
|
+
@commands = create_and_execute_commands(args)
|
|
521
|
+
end
|
|
522
|
+
|
|
523
|
+
def test_unarrest
|
|
524
|
+
args = "-a ACCESS_ID -p SECRET_KEY --unarrest-instance-group core -j j-ABABABA"
|
|
525
|
+
@commands = create_and_execute_commands(args)
|
|
526
|
+
end
|
|
527
|
+
|
|
528
|
+
def test_late_name
|
|
529
|
+
args = "-a ACCESS_ID -p SECRET_KEY --create --alive --enable-debugging --hive-interactive --name MyHiveJobFlow --log-uri=s3://haijun-test/logs"
|
|
530
|
+
@commands = create_and_execute_commands(args)
|
|
531
|
+
assert_equal(1, @commands.commands.size)
|
|
532
|
+
assert_equal("MyHiveJobFlow", @commands.commands.first.jobflow_name)
|
|
533
|
+
end
|
|
534
|
+
|
|
535
|
+
def test_ic_it
|
|
536
|
+
args = "-a ACCESS_ID -p SECRET_KEY --create --alive --enable-debugging --hive-interactive --instance-count 5 --instance-type m1.small --name MyHiveJobFlow --log-uri=s3://haijun-test/logs"
|
|
537
|
+
@commands = create_and_execute_commands(args)
|
|
538
|
+
assert_equal(1, @commands.commands.size)
|
|
539
|
+
cc = @commands.commands.first
|
|
540
|
+
assert_equal("MyHiveJobFlow", cc.jobflow_name)
|
|
541
|
+
assert_equal(5, cc.instance_count)
|
|
542
|
+
assert_equal("m1.small", cc.instance_type)
|
|
543
|
+
end
|
|
544
|
+
|
|
545
|
+
def test_json
|
|
546
|
+
args = "-a ACCESS_ID -p SECRET_KEY --jobflow j-ABABABABA --json tests/example.json --param <bucket>=mybucket --param mybucket=yourbucket"
|
|
547
|
+
@commands = create_and_execute_commands(args)
|
|
548
|
+
assert_equal(1, @commands.commands.size)
|
|
549
|
+
cc = @commands.commands.first.step_commands.first
|
|
550
|
+
assert_equal("tests/example.json", cc.arg)
|
|
551
|
+
assert_equal({:key => "<bucket>", :value => "mybucket"}, cc.variables[0])
|
|
552
|
+
assert_equal({:key => "mybucket", :value => "yourbucket"}, cc.variables[1])
|
|
553
|
+
|
|
554
|
+
st = cc.steps.first
|
|
555
|
+
|
|
556
|
+
expected_step = {
|
|
557
|
+
"HadoopJarStep" => {
|
|
558
|
+
"Jar" => "/home/hadoop/contrib/streaming/hadoop-0.18-streaming.jar",
|
|
559
|
+
"Args" => ["-input", "s3n://elasticmapreduce/samples/wordcount/input",
|
|
560
|
+
"-output", "s3n://yourbucket/result", "-mapper", "s3://yourbucket/lib/mapper"]
|
|
561
|
+
},
|
|
562
|
+
"ActionOnFailure" => "CONTINUE",
|
|
563
|
+
"Name" => "Example Step"
|
|
564
|
+
}
|
|
565
|
+
|
|
566
|
+
assert_equal(expected_step, st)
|
|
567
|
+
end
|
|
568
|
+
|
|
569
|
+
def test_ic_it2
|
|
570
|
+
args = "-a ACCESS_ID -p SECRET_KEY --jobflow j-ABABABAB --resize-jobflow --add-instance-group task --instance-type m1.large --instance-count 5"
|
|
571
|
+
@commands = create_and_execute_commands(args)
|
|
572
|
+
assert_equal(2, @commands.commands.size)
|
|
573
|
+
cc = @commands.commands[1]
|
|
574
|
+
assert_equal(5, cc.instance_count)
|
|
575
|
+
assert_equal("m1.large", cc.instance_type)
|
|
576
|
+
assert_equal("TASK", cc.instance_role)
|
|
577
|
+
end
|
|
578
|
+
|
|
579
|
+
def test_command_option_mismatch
|
|
580
|
+
args = "-c tests/credentials.json --instance-group core --instance-count 10"
|
|
581
|
+
assert_raise RuntimeError do
|
|
582
|
+
@commands = create_and_execute_commands(args)
|
|
583
|
+
end
|
|
584
|
+
end
|
|
585
|
+
|
|
586
|
+
end
|
|
587
|
+
end
|
data/tests/example.json
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
[
|
|
2
|
+
{
|
|
3
|
+
"Name": "Example Step",
|
|
4
|
+
"ActionOnFailure": "CONTINUE",
|
|
5
|
+
"HadoopJarStep": {
|
|
6
|
+
"Jar": "/home/hadoop/contrib/streaming/hadoop-0.18-streaming.jar",
|
|
7
|
+
"Args": [
|
|
8
|
+
"-input", "s3n://elasticmapreduce/samples/wordcount/input",
|
|
9
|
+
"-output", "s3n://<bucket>/result",
|
|
10
|
+
"-mapper", "s3://<bucket>/lib/mapper"
|
|
11
|
+
]
|
|
12
|
+
}
|
|
13
|
+
}
|
|
14
|
+
]
|