hadupils 0.5.0 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/CHANGELOG.md CHANGED
@@ -53,6 +53,19 @@
53
53
  * Introduced Hadupils::Extensions::Dfs::TmpFile
54
54
  * Introduced Hadupils::Hacks module for String Refinements (self.randcase)
55
55
  for Ruby 2+ and Monkey Patching for the String class for Ruby < 2.0
56
+ * Introduced $HADUPILS_BASE_TMP_PATH and $HADUPILS_TMPDIR_PATH for use with
57
+ commands: mktemp, withtmpdir and rm
56
58
  * Some refactoring and fixed a bug with the specs for Mac OS X
57
59
  * Tweaked old unit tests and added new ones for the new features
58
60
  * Updated the README with examples
61
+
62
+ ### 0.6.0
63
+ * Renamed $HADUPILS_BASE_TMP_PATH to $HADUPILS_TMP_PATH (less typing)
64
+ * Introduced $HADUPILS_TMP_TTL for use with command: cleanup
65
+ * Introduced Hadupils::Commands::Cleanup to identify and remove old hadupils tmp DFS
66
+ directories/files where all files within any hadupils-tmpdir* in $HADUPILS_TMP_PATH
67
+ are older than $HADUPILS_TMP_TTL, the TTL (Time.now.utc - $HADUPILS_TMP_TTL)
68
+ * The Hadupils::Runnders::Base.new.execute! method now uses Open3.capture2 or Kernel.system
69
+ * Fixed 1.8.7 compatibility bug with the Kernel.system call in
70
+ Hadupils::Extensions::Hive::AuxJarsPath.build_archive
71
+ * Some refactoring
data/README.md CHANGED
@@ -4,20 +4,31 @@ hadupils
4
4
  Operating environment oriented utilities for hadoop (Hadoop + Utils => hadupils)
5
5
 
6
6
  ## Shell Environment Variables
7
- - $HADUPILS_BASE_TMP_PATH
7
+ - $HADUPILS_TMP_PATH
8
8
  * This is the base path for DFS temporary file/directory creation
9
9
  * Defaults to '/tmp' on the DFS (only set this if you need another base directory)
10
+ * Command 'cleanup' will use this ENV var for the base tmp_path to look for /hadupils-tmp*/
11
+ tmpdirs if the tmp_path isn't set throught the command line
12
+ * Other commands that use this are: mktemp, withtmpdir
13
+
10
14
  - $HADUPILS_TMPDIR_PATH
11
15
  * Set when the subcommand is executed in a subshell via the hadupils 'withtmpdir' command
12
16
  * The value comes from the tmp directory that hadupils created for the subcommand
13
17
  * It will cleanup (remove) the directory if the subcommand returns an exitstatus of zero
18
+ - $HADUPILS_TMP_TTL
19
+ * This is the Time-To-Live for hadupils DFS temporary files/directories (hadupils-tmp*)
20
+ * Defaults to '86400' (24 hours)
21
+ * Command 'cleanup' will use this ENV var to remove any /hadupils-tmp*/ tmpdirs within
22
+ $HADUPILS_TMP_PATH where all files within are older than TTL, (Time.now.utc - $HADUPILS_TMP_TTL)
23
+ if ttl isn't set through the command line
14
24
 
15
25
  ## Hadpuils' Commands
16
26
  - hive __command__ _options_
17
27
  - hadoop __command__ _options_
18
28
  - mktemp [-d]
19
29
  - withtmpdir __subshell_command__
20
- - rm [-r] __full_path_to_file_or_directory__
30
+ - rm [-rR] __full_path_to_file_or_directory__
31
+ - cleanup [-n] __full_path_to_tmp_dir__ __ttl__
21
32
 
22
33
  ### Example Usages
23
34
  ``` shell
@@ -26,4 +37,5 @@ hadupils hadoop fs -ls /tmp
26
37
  hadupils mktemp -d
27
38
  hadupils withtmpdir 'echo $HADUPILS_TMPDIR_PATH'
28
39
  hadupils rm -r /tmp/hadupils-tmp-e341afe01721013128c122000af92329
40
+ hadupils cleanup -n
29
41
  ```
data/bin/hadupils CHANGED
@@ -4,4 +4,4 @@
4
4
 
5
5
  require 'hadupils'
6
6
 
7
- exit Hadupils::Commands.run ARGV[0], ARGV[1..-1]
7
+ exit Hadupils::Commands.run(ARGV[0], ARGV[1..-1])[1]
@@ -0,0 +1,20 @@
1
+ module Hadupils::Commands
2
+ module Options
3
+ # NOTE: Only a single option per command (known limitation for now)
4
+ module Directory
5
+ def perform_directory?
6
+ %w(-d --directory).include? params[0]
7
+ end
8
+ end
9
+ module DryRun
10
+ def perform_dry_run?
11
+ %w(-n --dry-run).include? params[0]
12
+ end
13
+ end
14
+ module Recursive
15
+ def perform_recursive?
16
+ %w(-r -R --recursive).include? params[0]
17
+ end
18
+ end
19
+ end
20
+ end
@@ -1,3 +1,5 @@
1
+ require 'hadupils/commands/options'
2
+
1
3
  module Hadupils::Commands
2
4
  def self.run(command, params=[])
3
5
  handler = handler_for command
@@ -18,8 +20,14 @@ module Hadupils::Commands
18
20
  end
19
21
 
20
22
  class SimpleCommand
23
+ attr_reader :params
24
+
25
+ def initialize(params=[])
26
+ @params = params
27
+ end
28
+
21
29
  def self.run(params=[])
22
- self.new.run params
30
+ self.new(params).run
23
31
  end
24
32
 
25
33
  def successful?(exitstatus)
@@ -50,9 +58,9 @@ module Hadupils::Commands
50
58
  include UserConf
51
59
 
52
60
  def assemble_parameters(parameters)
53
- @hadoop_ext = Hadupils::Extensions::Static.new(Hadupils::Search.hadoop_assets)
61
+ @hadoop_ext = Hadupils::Extensions::Static.new(Hadupils::Search.hadoop_assets)
54
62
  hadoop_cmd = parameters[0...1]
55
- hadoop_cmd_opts = parameters[1..-1] || []
63
+ hadoop_cmd_opts = parameters[1..-1] || []
56
64
 
57
65
  if %w(fs dfs).include? parameters[0]
58
66
  hadoop_cmd + user_config.hadoop_confs + hadoop_ext.hadoop_confs + hadoop_cmd_opts
@@ -62,8 +70,8 @@ module Hadupils::Commands
62
70
  end
63
71
  end
64
72
 
65
- def run(parameters)
66
- Hadupils::Runners::Hadoop.run assemble_parameters(parameters)
73
+ def run
74
+ Hadupils::Runners::Hadoop.run assemble_parameters(params)
67
75
  end
68
76
  end
69
77
 
@@ -78,61 +86,71 @@ module Hadupils::Commands
78
86
  user_config.hivercs + hadoop_ext.hivercs + hive_ext.hivercs + parameters
79
87
  end
80
88
 
81
- def run(parameters)
82
- Hadupils::Runners::Hive.run assemble_parameters(parameters), hive_ext.hive_aux_jars_path
89
+ def run
90
+ Hadupils::Runners::Hive.run assemble_parameters(params), hive_ext.hive_aux_jars_path
83
91
  end
84
92
  end
85
93
 
86
94
  register_handler :hive, Hive
87
95
 
88
96
  class MkTmpFile < SimpleCommand
89
- def run(parameters)
90
- # Creates a new tmpdir and puts the full tmpdir_path to STDOUT
97
+ include Options::Directory
98
+
99
+ attr_reader :tmpdir_path
100
+
101
+ def initialize(params)
102
+ super(params)
91
103
  Hadupils::Extensions::Dfs::TmpFile.reset_tmpfile!
92
- tmpdir_path = Hadupils::Extensions::Dfs::TmpFile.tmpfile_path
104
+ @tmpdir_path = Hadupils::Extensions::Dfs::TmpFile.tmpfile_path
105
+ end
93
106
 
107
+ def run
94
108
  # Similar to shell mktemp, but for Hadoop DFS!
109
+ # Creates a new tmpdir and puts the full tmpdir_path to STDOUT
95
110
  # Makes a tmp file by default; a tmp directory with '-d' flag
96
- fs_cmd = parameters[0] == '-d' ? '-mkdir' : '-touchz'
97
- exitstatus = Hadupils::Commands::Hadoop.run ['fs', fs_cmd, tmpdir_path]
111
+ fs_cmd = perform_directory? ? '-mkdir' : '-touchz'
112
+ stdout, exitstatus = Hadupils::Commands::Hadoop.run ['fs', fs_cmd, tmpdir_path]
98
113
  if successful? exitstatus
99
- exitstatus = Hadupils::Commands::Hadoop.run ['fs', '-chmod', '700', tmpdir_path]
114
+ stdout, exitstatus = Hadupils::Commands::Hadoop.run ['fs', '-chmod', '700', tmpdir_path]
100
115
  if successful? exitstatus
101
116
  puts tmpdir_path
102
117
  else
103
- $stderr.puts "Failed to chmod 700 dfs tmpdir: #{tmpdir_path}"
118
+ $stderr.puts "Failed to dfs -chmod 700 dfs tmpdir: #{tmpdir_path}"
104
119
  end
105
120
  else
106
121
  $stderr.puts "Failed creating dfs tmpdir: #{tmpdir_path}"
107
122
  end
108
- exitstatus
123
+ [nil, exitstatus]
109
124
  end
110
125
  end
111
126
 
112
127
  register_handler :mktemp, MkTmpFile
113
128
 
114
129
  class RmFile < SimpleCommand
115
- def run(parameters)
130
+ include Hadupils::Helpers::TextHelper
131
+ include Options::Recursive
132
+
133
+ def assemble_parameters(parameters)
134
+ perform_recursive? ? ['-rmr', parameters[1..-1]] : ['-rm', parameters[0..-1]]
135
+ end
136
+
137
+ def run
116
138
  # Similar to shell rm, but for Hadoop DFS!
117
139
  # Removes files by default; removes directories recursively with '-r' flag
118
- fs_cmd, tmp_dirs =
119
- if parameters[0] == '-r'
120
- ['-rmr', parameters[1..-1]]
121
- else
122
- ['-rm', parameters[0..-1]]
123
- end
140
+ fs_cmd, tmp_dirs = assemble_parameters(params)
124
141
 
125
142
  if tmp_dirs.empty?
126
143
  $stderr.puts 'Failed to remove unspecified tmpdir(s), please specify tmpdir_path'
127
- 255
144
+ [nil, 255]
128
145
  else
129
- exitstatus = Hadupils::Commands::Hadoop.run ['fs', fs_cmd, tmp_dirs].flatten
130
- if successful? exitstatus
131
- Hadupils::Extensions::Dfs::TmpFile.reset_tmpfile!
132
- else
133
- $stderr.puts "Failed to remove dfs tmpdir: #{tmp_dirs.join(' ')}"
146
+ stdout, exitstatus = Hadupils::Commands::Hadoop.run ['fs', fs_cmd, tmp_dirs].flatten
147
+ unless successful? exitstatus
148
+ $stderr.puts "Failed to remove #{pluralize(tmp_dirs.length, 'tmpdir', 'tmpdirs')}"
149
+ tmp_dirs.each do |tmp_dir|
150
+ $stderr.puts tmp_dir
151
+ end
134
152
  end
135
- exitstatus
153
+ [nil, exitstatus]
136
154
  end
137
155
  end
138
156
  end
@@ -140,32 +158,121 @@ module Hadupils::Commands
140
158
  register_handler :rm, RmFile
141
159
 
142
160
  class WithTmpDir < SimpleCommand
143
- def run(parameters)
161
+ def run
144
162
  # Runs provided subcommand with tmpdir and cleans up tmpdir on an exitstatus of zero
145
- if parameters.empty?
163
+ if params.empty?
146
164
  $stderr.puts 'Yeeaaahhh... sooo... you failed to provide a subcommand...'
147
- 255
165
+ [nil, 255]
148
166
  else
149
167
  # Let's create the tmpdir
150
- exitstatus = Hadupils::Commands::MkTmpFile.run ['-d']
168
+ stdout, exitstatus = Hadupils::Commands::MkTmpFile.run ['-d']
151
169
  if successful? exitstatus
152
170
  tmpdir_path = Hadupils::Extensions::Dfs::TmpFile.tmpfile_path
153
- parameters.unshift({'HADUPILS_TMPDIR_PATH' => tmpdir_path})
171
+ params.unshift({'HADUPILS_TMPDIR_PATH' => tmpdir_path})
154
172
 
155
173
  # Let's run the shell subcommand!
156
- exitstatus = Hadupils::Runners::Subcommand.run parameters
174
+ stdout, exitstatus = Hadupils::Runners::Subcommand.run params
157
175
 
158
176
  if successful? exitstatus
159
177
  # Let's attempt to cleanup tmpdir_path
160
- exitstatus = Hadupils::Commands::RmFile.run ['-r', tmpdir_path]
178
+ stdout, exitstatus = Hadupils::Commands::RmFile.run ['-r', tmpdir_path]
161
179
  else
162
- $stderr.puts "Failed to run shell subcommand: #{parameters}"
180
+ $stderr.puts "Failed to run shell subcommand: #{params}"
163
181
  end
164
182
  end
165
- exitstatus
183
+ Hadupils::Extensions::Dfs::TmpFile.reset_tmpfile!
184
+ [nil, exitstatus]
166
185
  end
167
186
  end
168
187
  end
169
188
 
170
189
  register_handler :withtmpdir, WithTmpDir
190
+
191
+ class Cleanup < SimpleCommand
192
+ include Hadupils::Extensions::Dfs
193
+ include Hadupils::Extensions::Runners
194
+ include Hadupils::Helpers::Dfs
195
+ include Hadupils::Helpers::TextHelper
196
+ include Options::DryRun
197
+
198
+ attr_accessor :expired_exitstatuses
199
+ attr_accessor :rm_exitstatuses
200
+ attr_reader :tmp_path
201
+ attr_reader :tmp_ttl
202
+
203
+ def initialize(params)
204
+ super(params)
205
+ @expired_exitstatuses = []
206
+ @rm_exitstatuses = []
207
+ @tmp_path = (perform_dry_run? ? params[1] : params[0]) || TmpFile.tmp_path
208
+ @tmp_ttl = ((perform_dry_run? ? params[2] : params[1]) || TmpFile.tmp_ttl).to_i
209
+ end
210
+
211
+ def run
212
+ # Removes old hadupils tmp files/dirs where all files within a tmpdir are also older than the TTL
213
+ # User configurable by setting the ENV variable $HADUPILS_TMP_TTL, defaults to 86400 (last 24 hours)
214
+ # User may also perform a dry-run via a -n or a --dry-run flag
215
+
216
+ # Silence the Runner's shell STDOUT noise
217
+ Shell.silence_stdout = true
218
+
219
+ # Get candidate directories
220
+ stdout, exitstatus = Hadupils::Commands::Hadoop.run ['fs', '-ls', tmp_path]
221
+ if successful? exitstatus
222
+ rm_array = []
223
+ dir_candidates(hadupils_tmpfiles(parse_ls(stdout)), tmp_ttl).each do |dir_candidate|
224
+ next unless has_expired? dir_candidate, tmp_ttl
225
+ rm_array << dir_candidate
226
+ end
227
+
228
+ exitstatus = expired_exitstatuses.all? {|expired_exitstatus| expired_exitstatus == 0} ? 0 : 255
229
+ if successful? exitstatus
230
+ puts "Found #{pluralize(rm_array.length, 'item', 'items')} to be removed recursively"
231
+ rm_array.each {|rm_item| puts rm_item }
232
+
233
+ unless perform_dry_run?
234
+ # Now want the user to see the Runner's shell STDOUT
235
+ Shell.silence_stdout = false
236
+
237
+ puts 'Removing...'
238
+ rm_array.each do |dir|
239
+ rm_stdout, rm_exitstatus = Hadupils::Commands::RmFile.run ['-r', dir]
240
+ rm_exitstatuses << rm_exitstatus
241
+ $stderr.puts "Failed to recursively remove: #{dir}" unless successful? rm_exitstatus
242
+ end
243
+ end
244
+ exitstatus = rm_exitstatuses.all? {|rm_exitstatus| rm_exitstatus == 0} ? 0 : 255
245
+ end
246
+ end
247
+ [nil, exitstatus]
248
+ end
249
+
250
+ def has_expired?(dir_candidate, ttl)
251
+ stdout, exitstatus = Hadupils::Commands::Hadoop.run ['fs', '-count', dir_candidate]
252
+ expired_exitstatuses << exitstatus
253
+ if successful? exitstatus
254
+ parsed_count = parse_count(stdout)
255
+ if parsed_count.empty?
256
+ $stderr.puts "Failed to parse dfs -count for stdout: #{stdout}"
257
+ expired_exitstatuses << 255
258
+ elsif dir_empty? parsed_count[:file_count]
259
+ true
260
+ else
261
+ stdout, exitstatus = Hadupils::Commands::Hadoop.run ['fs', '-ls', File.join(dir_candidate, '**', '*')]
262
+ expired_exitstatuses << exitstatus
263
+ if successful? exitstatus
264
+ all_expired? parse_ls(stdout), ttl
265
+ else
266
+ $stderr.puts "Failed to perform dfs -ls on path: #{File.join(dir_candidate, '**', '*')}"
267
+ false
268
+ end
269
+ end
270
+ else
271
+ $stderr.puts "Failed to perform dfs -count on path: #{dir_candidate}"
272
+ false
273
+ end
274
+ end
275
+ end
276
+
277
+ register_handler :cleanup, Cleanup
171
278
  end
@@ -223,7 +223,12 @@ module Hadupils::Extensions
223
223
  end
224
224
 
225
225
  ::Dir.chdir(workdir) do |p|
226
- Kernel.system 'tar', 'cz', *basenames, :out => io
226
+ Open3.popen3('tar', 'cz', *basenames) do |i, o, e|
227
+ stderr = e.read
228
+ stdout = o.read
229
+ $stderr.puts stderr unless stderr.empty?
230
+ io << stdout
231
+ end
227
232
  end
228
233
  end
229
234
  true
@@ -1,7 +1,83 @@
1
1
  require 'uuid'
2
+ require 'open3'
2
3
  require 'tempfile'
3
4
 
4
5
  module Hadupils::Extensions
6
+ # Tools for managing shell commands/output performed by the runners
7
+ module Runners
8
+ module Shell
9
+ def self.command(*command_list)
10
+ opts = {}
11
+ stdout = nil
12
+ stderr = nil
13
+ status = nil
14
+
15
+ begin
16
+ if RUBY_VERSION < '1.9'
17
+ Open3.popen3(*command_list) do |i, o, e|
18
+ stdout = o.read
19
+ stderr = e.read
20
+ end
21
+ status = $?
22
+ $stdout.puts stdout unless stdout.nil? || stdout.empty? || Shell.silence_stdout?
23
+ $stderr.puts stderr unless stderr.nil? || stderr.empty?
24
+ stdout = nil unless capture_stdout?
25
+ stderr = nil unless capture_stderr?
26
+ else
27
+ stdout_rd, stdout_wr = IO.pipe if capture_stdout?
28
+ stderr_rd, stderr_wr = IO.pipe if capture_stderr?
29
+ opts[:out] = stdout_wr if capture_stdout?
30
+ opts[:err] = stderr_wr if capture_stderr?
31
+
32
+ # NOTE: eval prevents Ruby 1.8.7 from throwing a syntax error on Ruby 1.9+ syntax
33
+ result = eval 'Kernel.system(*command_list, opts)'
34
+ status = result ? $? : nil
35
+ if capture_stdout?
36
+ stdout_wr.close
37
+ stdout = stdout_rd.read
38
+ stdout_rd.close
39
+ $stdout.puts stdout unless stdout.nil? || stdout.empty? || Shell.silence_stdout?
40
+ end
41
+ if capture_stderr?
42
+ stderr_wr.close
43
+ stderr = stderr_rd.read
44
+ stderr_rd.close
45
+ $stderr.puts stderr unless stderr.nil? || stderr.empty?
46
+ end
47
+ end
48
+ [stdout, stderr, status]
49
+ rescue Errno::ENOENT => e
50
+ $stderr.puts e
51
+ [stdout, stderr, nil]
52
+ end
53
+ end
54
+
55
+ def self.capture_stderr?
56
+ @capture_stderr
57
+ end
58
+
59
+ def self.capture_stderr=(value)
60
+ @capture_stderr = value
61
+ end
62
+
63
+ def self.capture_stdout?
64
+ @capture_stdout || Shell.silence_stdout?
65
+ end
66
+
67
+ def self.capture_stdout=(value)
68
+ @capture_stdout = value
69
+ end
70
+
71
+ def self.silence_stdout?
72
+ @silence_stdout
73
+ end
74
+
75
+ def self.silence_stdout=(value)
76
+ @silence_stdout = value
77
+ end
78
+ end
79
+ end
80
+
5
81
  # Tools for managing tmp files in the hadoop dfs
6
82
  module Dfs
7
83
  module TmpFile
@@ -9,12 +85,16 @@ module Hadupils::Extensions
9
85
  @uuid ||= UUID.new
10
86
  end
11
87
 
88
+ def self.tmp_ttl
89
+ @tmp_ttl ||= (ENV['HADUPILS_TMP_TTL'] || '86400').to_i
90
+ end
91
+
12
92
  def self.tmp_path
13
- @tmp_path ||= (ENV['HADUPILS_BASE_TMP_PATH'] || '/tmp')
93
+ @tmp_path ||= (ENV['HADUPILS_TMP_PATH'] || '/tmp')
14
94
  end
15
95
 
16
96
  def self.tmpfile_path
17
- @tmpdir_path ||= ::File.join(tmp_path, "hadupils-tmp-#{uuid.generate(:compact)}")
97
+ @tmpfile_path ||= ::File.join(tmp_path, "hadupils-tmp-#{uuid.generate(:compact)}")
18
98
  end
19
99
 
20
100
  def self.reset_tmpfile!
@@ -0,0 +1,81 @@
1
+ require 'time'
2
+
3
+ module Hadupils::Helpers
4
+ module TextHelper
5
+ def pluralize(count, singular, plural=nil)
6
+ if count == 1
7
+ "1 #{singular}"
8
+ elsif plural
9
+ "#{count} #{plural}"
10
+ else
11
+ "#{count} #{singular}s"
12
+ end
13
+ end
14
+ end
15
+
16
+ module Dfs
17
+ def parse_count(stdout)
18
+ parsed_count = {}
19
+ if stdout
20
+ result = stdout.squeeze(' ').split
21
+ parsed_count =
22
+ begin
23
+ { :dir_count => result[0],
24
+ :file_count => result[1],
25
+ :content_size => result[2],
26
+ :file_name => result[3] }
27
+ end if result.length == 4 # Check for proper # of dfs -count columns
28
+ end
29
+ parsed_count
30
+ end
31
+
32
+ def parse_ls(stdout)
33
+ parsed_ls = []
34
+ if stdout
35
+ result = stdout.split(/\n/)
36
+ parsed_ls =
37
+ result[1..-1].map do |line|
38
+ l = line.squeeze(' ').split
39
+ begin
40
+ l = l[-3..-1]
41
+ [Time.parse("#{l[0]} #{l[1]}Z"), l[2]]
42
+ rescue ArgumentError
43
+ nil
44
+ end if l.length == 8 # Check for proper # of dfs -ls columns
45
+ end.compact unless result.empty?
46
+ end
47
+ parsed_ls
48
+ end
49
+
50
+ def hadupils_tmpfile?(parsed_line)
51
+ parsed_line.match(/hadupils-tmp/)
52
+ end
53
+
54
+ def dir_candidates(parsed_ls, ttl)
55
+ parsed_ls.inject([]) do |dir_candidates, (file_time, file_path)|
56
+ if file_time < (Time.now.utc - ttl)
57
+ dir_candidates << file_path
58
+ end
59
+ dir_candidates
60
+ end
61
+ end
62
+
63
+ def dir_empty?(count)
64
+ count.to_i == 0
65
+ end
66
+
67
+ def all_expired?(parsed_ls, ttl)
68
+ parsed_ls.all? {|file_time, file_path| file_time < (Time.now.utc - ttl)}
69
+ end
70
+
71
+ def hadupils_tmpfiles(parsed_ls)
72
+ parsed_ls.map do |time, file_path|
73
+ if hadupils_tmpfile? file_path
74
+ [time, file_path]
75
+ else
76
+ nil
77
+ end
78
+ end.compact
79
+ end
80
+ end
81
+ end
@@ -1,6 +1,7 @@
1
1
  module Hadupils::Runners
2
2
  class Base
3
- attr_reader :params, :last_result, :last_status
3
+ include Hadupils::Extensions::Runners
4
+ attr_reader :params, :last_stdout, :last_stderr, :last_status
4
5
 
5
6
  def initialize(params)
6
7
  @params = params
@@ -14,6 +15,7 @@ module Hadupils::Runners
14
15
 
15
16
  def execute!
16
17
  command_list = command
18
+
17
19
  if RUBY_VERSION < '1.9' and command_list[0].kind_of? Hash
18
20
  deletes = []
19
21
  overrides = {}
@@ -26,24 +28,23 @@ module Hadupils::Runners
26
28
  end
27
29
  ::ENV[key] = val
28
30
  end
29
- Kernel.system(*command_list[1..-1])
31
+ Shell.command(*command_list[1..-1])
30
32
  ensure
31
33
  overrides.each {|key, val| ::ENV[key] = val }
32
34
  deletes.each {|key| ::ENV.delete key }
33
35
  end
34
36
  else
35
- Kernel.system(*command_list)
37
+ Shell.command(*command_list)
36
38
  end
37
39
  end
38
40
 
39
41
  def wait!
40
- @last_result = execute!
41
- @last_status = $?
42
- last_exitstatus
42
+ @last_stdout, @last_stderr, @last_status = execute!
43
+ [@last_stdout, last_exitstatus]
43
44
  end
44
45
 
45
46
  def last_exitstatus
46
- if @last_result.nil?
47
+ if @last_status.nil?
47
48
  255
48
49
  else
49
50
  @last_status.exitstatus
data/lib/hadupils.rb CHANGED
@@ -3,9 +3,11 @@ module Hadupils
3
3
  end
4
4
 
5
5
  require 'hadupils/assets'
6
- require 'hadupils/commands'
6
+ require 'hadupils/helpers'
7
7
  require 'hadupils/extensions'
8
8
  require 'hadupils/runners'
9
9
  require 'hadupils/search'
10
10
  require 'hadupils/util'
11
11
  require 'hadupils/hacks'
12
+
13
+ require 'hadupils/commands'
@@ -39,8 +39,9 @@ class Hadupils::CommandsTest < Test::Unit::TestCase
39
39
  end
40
40
 
41
41
  should 'have a #run singleton method that dispatches to an instance #run' do
42
- @klass.expects(:new).with.returns(instance = mock())
43
- instance.expects(:run).with(params = mock()).returns(result = mock())
42
+ params = mock()
43
+ @klass.expects(:new).with(params).returns(instance = mock())
44
+ instance.expects(:run).with.returns(result = mock())
44
45
  assert_equal result, @klass.run(params)
45
46
  end
46
47
 
@@ -67,9 +68,8 @@ class Hadupils::CommandsTest < Test::Unit::TestCase
67
68
 
68
69
  context '#run' do
69
70
  setup do
70
- @command = @klass.new
71
- @command.stubs(:user_config).with.returns(@user_config = mock())
72
- @command.stubs(:hadoop_ext).with.returns(@hadoop_ext = mock())
71
+ @klass.any_instance.stubs(:user_config).with.returns(@user_config = mock())
72
+ @klass.any_instance.stubs(:hadoop_ext).with.returns(@hadoop_ext = mock())
73
73
  @runner_class = Hadupils::Runners::Hadoop
74
74
  end
75
75
 
@@ -82,7 +82,7 @@ class Hadupils::CommandsTest < Test::Unit::TestCase
82
82
  should 'apply hadoop_conf options to hadoop runner call' do
83
83
  @runner_class.expects(:run).with(@user_config_hadoop_confs +
84
84
  @hadoop_ext_hadoop_confs).returns(result = mock())
85
- assert_equal result, @command.run([])
85
+ assert_equal result, @klass.new([]).run
86
86
  end
87
87
 
88
88
  should 'insert hadoop_conf options into position 1 of given params array to hadoop runner call' do
@@ -91,7 +91,7 @@ class Hadupils::CommandsTest < Test::Unit::TestCase
91
91
  @user_config_hadoop_confs +
92
92
  @hadoop_ext_hadoop_confs +
93
93
  params[1..-1]).returns(result = mock())
94
- assert_equal result, @command.run(params)
94
+ assert_equal result, @klass.new(params).run
95
95
  end
96
96
  end
97
97
 
@@ -103,12 +103,12 @@ class Hadupils::CommandsTest < Test::Unit::TestCase
103
103
 
104
104
  should 'pass params unchanged through to hadoop runner call' do
105
105
  @runner_class.expects(:run).with(params = [mock(), mock()]).returns(result = mock())
106
- assert_equal result, @command.run(params)
106
+ assert_equal result, @klass.new(params).run
107
107
  end
108
108
 
109
109
  should 'handle empty params' do
110
110
  @runner_class.expects(:run).with([]).returns(result = mock())
111
- assert_equal result, @command.run([])
111
+ assert_equal result, @klass.new([]).run
112
112
  end
113
113
  end
114
114
  end
@@ -125,8 +125,9 @@ class Hadupils::CommandsTest < Test::Unit::TestCase
125
125
  end
126
126
 
127
127
  should 'have a #run singleton method that dispatches to an instance #run' do
128
- @klass.expects(:new).with.returns(instance = mock())
129
- instance.expects(:run).with(params = mock()).returns(result = mock())
128
+ params = mock()
129
+ @klass.expects(:new).with(params).returns(instance = mock())
130
+ instance.expects(:run).with.returns(result = mock())
130
131
  assert_equal result, @klass.run(params)
131
132
  end
132
133
 
@@ -160,10 +161,9 @@ class Hadupils::CommandsTest < Test::Unit::TestCase
160
161
 
161
162
  context '#run' do
162
163
  setup do
163
- @command = @klass.new
164
- @command.stubs(:user_config).with.returns(@user_config = mock())
165
- @command.stubs(:hadoop_ext).with.returns(@hadoop_ext = mock())
166
- @command.stubs(:hive_ext).with.returns(@hive_ext = mock)
164
+ @klass.any_instance.stubs(:user_config).with.returns(@user_config = mock())
165
+ @klass.any_instance.stubs(:hadoop_ext).with.returns(@hadoop_ext = mock())
166
+ @klass.any_instance.stubs(:hive_ext).with.returns(@hive_ext = mock)
167
167
  @runner_class = Hadupils::Runners::Hive
168
168
  end
169
169
 
@@ -180,7 +180,7 @@ class Hadupils::CommandsTest < Test::Unit::TestCase
180
180
  @hadoop_ext_hivercs +
181
181
  @hive_ext_hivercs,
182
182
  @hive_aux_jars_path).returns(result = mock())
183
- assert_equal result, @command.run([])
183
+ assert_equal result, @klass.new([]).run
184
184
  end
185
185
 
186
186
  should 'prepend hiverc options before given params to hive runner call' do
@@ -190,7 +190,7 @@ class Hadupils::CommandsTest < Test::Unit::TestCase
190
190
  @hive_ext_hivercs +
191
191
  params,
192
192
  @hive_aux_jars_path).returns(result = mock())
193
- assert_equal result, @command.run(params)
193
+ assert_equal result, @klass.new(params).run
194
194
  end
195
195
  end
196
196
 
@@ -204,12 +204,12 @@ class Hadupils::CommandsTest < Test::Unit::TestCase
204
204
 
205
205
  should 'pass params unchanged through to hive runner call along with aux jars path' do
206
206
  @runner_class.expects(:run).with(params = [mock(), mock()], '').returns(result = mock())
207
- assert_equal result, @command.run(params)
207
+ assert_equal result, @klass.new(params).run
208
208
  end
209
209
 
210
210
  should 'handle empty params' do
211
211
  @runner_class.expects(:run).with([], '').returns(result = mock())
212
- assert_equal result, @command.run([])
212
+ assert_equal result, @klass.new([]).run
213
213
  end
214
214
  end
215
215
  end
@@ -317,140 +317,203 @@ class Hadupils::CommandsTest < Test::Unit::TestCase
317
317
  ::Dir.chdir @pwd
318
318
  end
319
319
  end
320
- context 'MkTempFile' do
321
- setup do
322
- @klass = Hadupils::Commands::MkTmpFile
323
- end
320
+ end
321
+
322
+ context 'MkTempFile' do
323
+ setup do
324
+ @klass = Hadupils::Commands::MkTmpFile
325
+ end
326
+
327
+ should 'register with :mktemp name' do
328
+ handlers = [:mktemp]
329
+ run_handler_assertions_for handlers
330
+ end
324
331
 
325
- should 'register with :mktemp name' do
326
- handlers = [:mktemp]
327
- run_handler_assertions_for handlers
332
+ should 'have a #run singleton method that dispatches to an instance #run' do
333
+ params = mock()
334
+ @klass.expects(:new).with(params).returns(instance = mock())
335
+ instance.expects(:run).with.returns(result = mock())
336
+ assert_equal result, @klass.run(params)
337
+ end
338
+
339
+ context '#run' do
340
+ should 'provide invocation for bare mktemp if given empty parameters' do
341
+ tmpdir_path = mock().to_s
342
+ Hadupils::Extensions::Dfs::TmpFile.expects(:tmpfile_path).returns(tmpdir_path)
343
+ Hadupils::Runners::Hadoop.expects(:run).with(['fs', '-touchz', tmpdir_path]).returns(['', 0])
344
+ Hadupils::Runners::Hadoop.expects(:run).with(['fs', '-chmod', '700', tmpdir_path]).returns(['', 0])
345
+ assert_equal [nil, 0], @klass.new([]).run
328
346
  end
329
347
 
330
- should 'have a #run singleton method that dispatches to an instance #run' do
331
- @klass.expects(:new).with.returns(instance = mock())
332
- instance.expects(:run).with(params = mock()).returns(result = mock())
333
- assert_equal result, @klass.run(params)
348
+ should 'provide invocation for mktemp if given with -d flag parameter' do
349
+ tmpdir_path = mock().to_s
350
+ Hadupils::Extensions::Dfs::TmpFile.expects(:tmpfile_path).returns(tmpdir_path)
351
+ Hadupils::Runners::Hadoop.expects(:run).with(['fs', '-mkdir', tmpdir_path]).returns(['', 0])
352
+ Hadupils::Runners::Hadoop.expects(:run).with(['fs', '-chmod', '700', tmpdir_path]).returns(['', 0])
353
+ assert_equal [nil, 0], @klass.new(['-d']).run
334
354
  end
355
+ end
356
+ end
335
357
 
336
- context '#run' do
337
- setup do
338
- @command = @klass.new
339
- Hadupils::Runners::Hadoop.stubs(:base_runner).returns(@hadoop_path = mock().to_s + '-hadoop')
340
- end
358
+ context 'RmFile' do
359
+ setup do
360
+ @klass = Hadupils::Commands::RmFile
361
+ end
341
362
 
342
- should 'provide invocation for bare mktemp if given empty parameters' do
343
- tmpdir_path = mock().to_s
344
- Hadupils::Extensions::Dfs::TmpFile.expects(:tmpfile_path).returns(tmpdir_path)
345
- Kernel.expects(:system).with(@hadoop_path, 'fs', '-touchz', tmpdir_path).returns(0)
346
- Kernel.expects(:system).with(@hadoop_path, 'fs', '-chmod', '700', tmpdir_path).returns(0)
347
- assert_equal 0, @command.run([])
348
- end
363
+ should 'register with :rm name' do
364
+ handlers = [:rm]
365
+ run_handler_assertions_for handlers
366
+ end
349
367
 
350
- should 'provide invocation for mktemp if given with -d flag parameter' do
351
- tmpdir_path = mock().to_s
352
- Hadupils::Extensions::Dfs::TmpFile.expects(:tmpfile_path).returns(tmpdir_path)
353
- Kernel.expects(:system).with(@hadoop_path, 'fs', '-mkdir', tmpdir_path).returns(0)
354
- Kernel.expects(:system).with(@hadoop_path, 'fs', '-chmod', '700', tmpdir_path).returns(0)
355
- assert_equal 0, @command.run(['-d'])
356
- end
357
- end
368
+ should 'have a #run singleton method that dispatches to an instance #run' do
369
+ params = mock()
370
+ @klass.expects(:new).with(params).returns(instance = mock())
371
+ instance.expects(:run).with.returns(result = mock())
372
+ assert_equal result, @klass.run(params)
358
373
  end
359
374
 
360
- context 'RmFile' do
361
- setup do
362
- @klass = Hadupils::Commands::RmFile
375
+ context '#run' do
376
+ should 'provide invocation for bare rm if given empty parameters' do
377
+ assert_equal [nil, 255], @klass.new([]).run
363
378
  end
364
379
 
365
- should 'register with :rm name' do
366
- handlers = [:rm]
367
- run_handler_assertions_for handlers
380
+ should 'provide invocation for rm if just tmpdir_path parameter' do
381
+ tmpdir_path = mock().to_s
382
+ Hadupils::Runners::Hadoop.expects(:run).with(['fs', '-rm', tmpdir_path]).returns(['', 0])
383
+ assert_equal [nil, 0], @klass.new([tmpdir_path]).run
368
384
  end
369
385
 
370
- should 'have a #run singleton method that dispatches to an instance #run' do
371
- @klass.expects(:new).with.returns(instance = mock())
372
- instance.expects(:run).with(params = mock()).returns(result = mock())
373
- assert_equal result, @klass.run(params)
386
+ should 'provide invocation for hadoop if just tmpdir_path with -r flag parameter' do
387
+ tmpdir_path = mock().to_s
388
+ Hadupils::Runners::Hadoop.expects(:run).with(['fs', '-rmr', tmpdir_path]).returns(['', 0])
389
+ assert_equal [nil, 0], @klass.new(['-r', tmpdir_path]).run
374
390
  end
391
+ end
392
+ end
375
393
 
376
- context '#run' do
377
- setup do
378
- @command = @klass.new
379
- Hadupils::Runners::Hadoop.stubs(:base_runner).returns(@hadoop_path = mock().to_s + '-hadoop')
380
- end
394
+ context 'WithTempDir' do
395
+ setup do
396
+ @klass = Hadupils::Commands::WithTmpDir
397
+ end
381
398
 
382
- should 'provide invocation for bare rm if given empty parameters' do
383
- assert_equal 255, @klass.run([])
384
- end
399
+ should 'register with :withtmpdir name' do
400
+ handlers = [:withtmpdir]
401
+ run_handler_assertions_for handlers
402
+ end
385
403
 
386
- should 'provide invocation for rm if just tmpdir_path parameter' do
387
- tmpdir_path = mock().to_s
388
- Kernel.expects(:system).with(@hadoop_path, 'fs', '-rm', tmpdir_path).returns(0)
389
- assert_equal 0, @klass.run([tmpdir_path])
390
- end
404
+ should 'have a #run singleton method that dispatches to an instance #run' do
405
+ params = mock()
406
+ @klass.expects(:new).with(params).returns(instance = mock())
407
+ instance.expects(:run).with.returns(result = mock())
408
+ assert_equal result, @klass.run(params)
409
+ end
391
410
 
392
- should 'provide invocation for hadoop if just tmpdir_path with -r flag parameter' do
393
- tmpdir_path = mock().to_s
394
- Kernel.expects(:system).with(@hadoop_path, 'fs', '-rmr', tmpdir_path).returns(0)
395
- assert_equal 0, @klass.run(['-r', tmpdir_path])
396
- end
411
+ context '#run' do
412
+ should 'provide invocation for withtmpdir if given parameters for shell subcommand' do
413
+ tmpdir_path = mock().to_s
414
+ run_common_subcommand_assertions_with(tmpdir_path)
415
+ subcommand_params = [{'HADUPILS_TMPDIR_PATH' => tmpdir_path}, '/path/to/my_wonderful_script.sh']
416
+ Hadupils::Runners::Subcommand.expects(:run).with(subcommand_params).returns(['', 0])
417
+ Hadupils::Runners::Hadoop.expects(:run).with(['fs', '-rmr', tmpdir_path]).returns(['', 0])
418
+ assert_equal [nil, 0], @klass.new(['/path/to/my_wonderful_script.sh']).run
397
419
  end
398
420
 
399
- context 'WithTempDir' do
400
- setup do
401
- @klass = Hadupils::Commands::WithTmpDir
402
- end
403
-
404
- should 'register with :withtmpdir name' do
405
- handlers = [:withtmpdir]
406
- run_handler_assertions_for handlers
407
- end
421
+ should 'provide invocation for withtmpdir if given parameters for shell subcommand (another hadupils command)' do
422
+ tmpdir_path = mock().to_s
423
+ run_common_subcommand_assertions_with(tmpdir_path)
424
+ subcommand_params = [{'HADUPILS_TMPDIR_PATH' => tmpdir_path}, 'hadupils hadoop ls /tmp']
425
+ Hadupils::Runners::Subcommand.expects(:run).with(subcommand_params).returns(['', 0])
426
+ Hadupils::Runners::Hadoop.expects(:run).with(['fs', '-rmr', tmpdir_path]).returns(['', 0])
427
+ assert_equal [nil, 0], @klass.new(['hadupils hadoop ls /tmp']).run
428
+ end
408
429
 
409
- should 'have a #run singleton method that dispatches to an instance #run' do
410
- @klass.expects(:new).with.returns(instance = mock())
411
- instance.expects(:run).with(params = mock()).returns(result = mock())
412
- assert_equal result, @klass.run(params)
413
- end
430
+ should 'provide invocation for withtmpdir if given parameters for shell subcommand with nil result' do
431
+ tmpdir_path = mock().to_s
432
+ subcommand_params = [{'HADUPILS_TMPDIR_PATH' => tmpdir_path}, '/path/to/my_wonderful_script.sh']
433
+ run_common_subcommand_assertions_with(tmpdir_path)
434
+ Hadupils::Runners::Subcommand.expects(:run).with(subcommand_params).returns(['', 255])
435
+ assert_equal [nil, 255], @klass.new(['/path/to/my_wonderful_script.sh']).run
436
+ end
437
+ end
438
+ end
439
+ end
414
440
 
415
- context '#run' do
416
- setup do
417
- @command = @klass.new
418
- Hadupils::Runners::Hadoop.stubs(:base_runner).returns(@hadoop_path = mock().to_s + '-hadoop')
419
- end
441
+ context 'Cleanup' do
442
+ setup do
443
+ @klass = Hadupils::Commands::Cleanup
444
+ end
420
445
 
421
- should 'provide invocation for withtmpdir if given parameters for shell subcommand' do
422
- tmpdir_path = mock().to_s
423
- run_common_subcommand_assertions_with tmpdir_path
424
- Kernel.expects(:system).with({'HADUPILS_TMPDIR_PATH' => tmpdir_path}, '/path/to/my_wonderful_script.sh').returns(0)
425
- Kernel.expects(:system).with(@hadoop_path, 'fs', '-rmr', tmpdir_path).returns(0)
426
- assert_equal 0, @klass.run(['/path/to/my_wonderful_script.sh'])
427
- end
446
+ should 'register with :cleanup name' do
447
+ handlers = [:cleanup]
448
+ run_handler_assertions_for handlers
449
+ end
428
450
 
429
- should 'provide invocation for withtmpdir if given parameters for shell subcommand (another hadupils command)' do
430
- tmpdir_path = mock().to_s
431
- run_common_subcommand_assertions_with tmpdir_path
432
- Kernel.expects(:system).with({'HADUPILS_TMPDIR_PATH' => tmpdir_path}, 'hadupils hadoop ls /tmp').returns(0)
433
- Kernel.expects(:system).with(@hadoop_path, 'fs', '-rmr', tmpdir_path).returns('')
434
- assert_equal 0, @klass.run(['hadupils hadoop ls /tmp'])
435
- end
451
+ should 'have a #run singleton method that dispatches to an instance #run' do
452
+ params = mock()
453
+ @klass.expects(:new).with(params).returns(instance = mock())
454
+ instance.expects(:run).with.returns(result = mock())
455
+ assert_equal result, @klass.run(params)
456
+ end
436
457
 
437
- should 'provide invocation for withtmpdir if given parameters for shell subcommand with nil result' do
438
- tmpdir_path = mock().to_s
439
- run_common_subcommand_assertions_with tmpdir_path
440
- Kernel.expects(:system).with({'HADUPILS_TMPDIR_PATH' => tmpdir_path}, '/path/to/my_wonderful_script.sh').returns(nil)
441
- assert_equal 255, @klass.run(['/path/to/my_wonderful_script.sh'])
442
- end
443
- end
444
- end
445
- end
458
+ context '#run' do
459
+ should 'provide invocation for bare cleanup if given empty parameters' do
460
+ tmp_path = '/tmp'
461
+ tmpdir1 = File.join(tmp_path, 'hadupils-tmp-064708701f180131f7ef3c0754617b34')
462
+ tmpdir2 = File.join(tmp_path, 'hadupils-tmp-0e5175901f180131f7f03c0754617b34')
463
+
464
+ run_common_cleanup_assertions_with(tmp_path, tmpdir1, tmpdir2)
465
+ instance = @klass.new([])
466
+ assert_equal [nil, 0], instance.run
467
+ assert_equal 86400, instance.tmp_ttl
468
+ assert_equal '/tmp', instance.tmp_path
469
+ end
470
+
471
+ should 'provide invocation for cleanup if just tmp_path parameter' do
472
+ tmp_path = mock().to_s
473
+ tmpdir1 = File.join(tmp_path, 'hadupils-tmp-064708701f180131f7ef3c0754617b34')
474
+ tmpdir2 = File.join(tmp_path, 'hadupils-tmp-0e5175901f180131f7f03c0754617b34')
475
+
476
+ run_common_cleanup_assertions_with(tmp_path, tmpdir1, tmpdir2)
477
+ instance = @klass.new([tmp_path])
478
+ assert_equal [nil, 0], instance.run
479
+ assert_equal 86400, instance.tmp_ttl
480
+ assert_equal tmp_path, instance.tmp_path
481
+ end
482
+
483
+ should 'provide invocation for cleanup with tmp_path and ttl parameter' do
484
+ tmp_path = mock().to_s
485
+ tmpdir1 = File.join(tmp_path, 'hadupils-tmp-064708701f180131f7ef3c0754617b34')
486
+ tmpdir2 = File.join(tmp_path, 'hadupils-tmp-0e5175901f180131f7f03c0754617b34')
487
+
488
+ run_common_cleanup_assertions_with(tmp_path, tmpdir1, tmpdir2)
489
+ instance = @klass.new([tmp_path, '0'])
490
+ assert_equal [nil, 0], instance.run
491
+ assert_equal 0, instance.tmp_ttl
492
+ assert_equal tmp_path, instance.tmp_path
493
+ end
446
494
  end
447
495
  end
448
496
 
449
497
  def run_common_subcommand_assertions_with(tmpdir_path)
450
498
  Hadupils::Extensions::Dfs::TmpFile.expects(:tmpfile_path).returns(tmpdir_path)
451
499
  Hadupils::Extensions::Dfs::TmpFile.expects(:tmpfile_path).returns(tmpdir_path)
452
- Kernel.expects(:system).with(@hadoop_path, 'fs', '-mkdir', tmpdir_path).returns(0)
453
- Kernel.expects(:system).with(@hadoop_path, 'fs', '-chmod', '700', tmpdir_path).returns(0)
500
+ Hadupils::Runners::Hadoop.expects(:run).with(['fs', '-mkdir', tmpdir_path]).returns(['', 0])
501
+ Hadupils::Runners::Hadoop.expects(:run).with(['fs', '-chmod', '700', tmpdir_path]).returns(['', 0])
502
+ end
503
+
504
+ def run_common_cleanup_assertions_with(tmp_path, tmpdir1, tmpdir2)
505
+ ls_stdout =
506
+ "Found 2 items\n" +
507
+ "drwx------ - willdrew supergroup 0 2013-10-24 16:23 #{tmpdir1}\n" +
508
+ "drwx------ - willdrew supergroup 0 2013-10-24 16:23 #{tmpdir2}\n"
509
+ count_stdout1 = " 1 0 0 hdfs://localhost:9000#{tmpdir1}\n"
510
+ count_stdout2 = " 1 1 0 hdfs://localhost:9000#{tmpdir2}\n"
511
+ Hadupils::Runners::Hadoop.expects(:run).with(['fs', '-ls', tmp_path]).returns([ls_stdout, 0])
512
+ Hadupils::Runners::Hadoop.expects(:run).with(['fs', '-count', tmpdir1]).returns([count_stdout1, 0])
513
+ Hadupils::Runners::Hadoop.expects(:run).with(['fs', '-count', tmpdir2]).returns([count_stdout2, 0])
514
+ Hadupils::Runners::Hadoop.expects(:run).with(['fs', '-ls', File.join(tmpdir2, '**', '*')]).returns(['', 0])
515
+ Hadupils::Runners::Hadoop.expects(:run).with(['fs', '-rmr', tmpdir1]).returns(['', 0])
516
+ Hadupils::Runners::Hadoop.expects(:run).with(['fs', '-rmr', tmpdir2]).returns(['', 0])
454
517
  end
455
518
 
456
519
  def run_handler_assertions_for(handlers)
@@ -1,4 +1,6 @@
1
1
  class Hadupils::RunnersTest < Test::Unit::TestCase
2
+ include Hadupils::Extensions::Runners
3
+
2
4
  context Hadupils::Runners::Base do
3
5
  setup do
4
6
  @runner = Hadupils::Runners::Base.new(@params = mock())
@@ -21,20 +23,22 @@ class Hadupils::RunnersTest < Test::Unit::TestCase
21
23
  end
22
24
 
23
25
  should 'assemble system call via command method' do
24
- Kernel.expects(:system).with(*@command).returns(true)
25
26
  $?.stubs(:exitstatus).with.returns(mock())
27
+ last_status = $?
28
+ Shell.stubs(:command).with(*@command).returns([nil, nil, last_status])
26
29
  @runner.wait!
27
30
  end
28
31
 
29
32
  should 'return 255 when system returns nil' do
30
- Kernel.stubs(:system).returns(nil)
31
- assert_equal 255, @runner.wait!
33
+ Shell.stubs(:command).returns([nil, nil, nil])
34
+ assert_equal [nil, 255], @runner.wait!
32
35
  end
33
36
 
34
37
  should 'return Process::Status#exitstatus when non-nil system result' do
35
- Kernel.stubs(:system).returns(true)
36
38
  $?.stubs(:exitstatus).with.returns(status = mock())
37
- assert_equal status, @runner.wait!
39
+ last_status = $?
40
+ Shell.stubs(:command).returns([nil, nil, last_status])
41
+ assert_equal [nil, status], @runner.wait!
38
42
  end
39
43
  end
40
44
 
@@ -50,7 +54,7 @@ class Hadupils::RunnersTest < Test::Unit::TestCase
50
54
 
51
55
  should 'handle command without env hash normally' do
52
56
  @runner.expects(:command).with.returns(@command)
53
- Kernel.expects(:system).with(*@command).returns(true)
57
+ Open3.expects(:popen3).with(*@command)
54
58
  $?.stubs(:exitstatus).with.returns(mock)
55
59
  @runner.wait!
56
60
  end
@@ -66,7 +70,8 @@ class Hadupils::RunnersTest < Test::Unit::TestCase
66
70
  $?.stubs(:exitstatus).with.returns(mock)
67
71
  begin
68
72
  # Environment variable is overridden during system call
69
- matcher = Kernel.expects(:system).with do |*args|
73
+ last_status = $?
74
+ matcher = Shell.stubs(:command).returns([nil, nil, last_status]).with do |*args|
70
75
  args == @command and ::ENV[var] == replacement and ::ENV[to_be_removed] == removal_val
71
76
  end
72
77
 
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: hadupils
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.5.0
4
+ version: 0.6.0
5
5
  prerelease:
6
6
  platform: ruby
7
7
  authors:
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2013-10-11 00:00:00.000000000 Z
12
+ date: 2013-10-28 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: uuid
@@ -102,6 +102,8 @@ files:
102
102
  - lib/hadupils/commands.rb
103
103
  - lib/hadupils/runners.rb
104
104
  - lib/hadupils/extensions/hive.rb
105
+ - lib/hadupils/helpers.rb
106
+ - lib/hadupils/commands/options.rb
105
107
  - lib/hadupils/extensions.rb
106
108
  - lib/hadupils/hacks.rb
107
109
  - lib/hadupils/assets.rb