webhdfs-with-krb5 0.6.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
data/AUTHORS ADDED
@@ -0,0 +1,3 @@
1
+ Kazuki Ohta <kazuki.ohta@gmail.com>
2
+ TAGOMORI Satoshi <tagomoris@gmail.com>
3
+ Zixian.shen <zixian.shen@gmail.com>
data/COPYING ADDED
@@ -0,0 +1,13 @@
1
+ Copyright (C) 2012 Fluentd Project
2
+
3
+ Licensed under the Apache License, Version 2.0 (the "License");
4
+ you may not use this file except in compliance with the License.
5
+ You may obtain a copy of the License at
6
+
7
+ http://www.apache.org/licenses/LICENSE-2.0
8
+
9
+ Unless required by applicable law or agreed to in writing, software
10
+ distributed under the License is distributed on an "AS IS" BASIS,
11
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ See the License for the specific language governing permissions and
13
+ limitations under the License.
data/Gemfile ADDED
@@ -0,0 +1,6 @@
1
+ source "http://rubygems.org"
2
+
3
+ gemspec
4
+
5
+ gem "simplecov", :require => false
6
+
data/README.md ADDED
@@ -0,0 +1,84 @@
1
+ now revising...
2
+
3
+ # webhdfs-client - A client library implementation for Hadoop WebHDFS, and HttpFs, for Ruby
4
+
5
+ The webhdfs-client gem is to access Hadoop WebHDFS (EXPERIMENTAL: and HttpFs). WebHDFS::Client is a client class, and WebHDFS::FileUtils is utility like 'fileutils'.
6
+
7
+ ## Installation
8
+
9
+
10
+ ## Usage
11
+
12
+ ### WebHDFS::Client
13
+
14
+ For client object interface:
15
+
16
+ require 'webhdfs'
17
+ client = WebHDFS::Client.new(hostname, port)
18
+ # or with pseudo username authentication
19
+ client = WebHDFS::Client.new(hostname, port, username)
20
+
21
+ To create/append/read files:
22
+
23
+ client.create('/path/to/file', data)
24
+ client.create('/path/to/file', data, :overwrite => false, :blocksize => 268435456, :replication => 5, :permission => 0666)
25
+
26
+ client.append('/path/to/existing/file', data)
27
+
28
+ client.read('/path/to/target') #=> data
29
+ client.read('/path/to/target' :offset => 2048, :length => 1024) #=> data
30
+
31
+ To mkdir/rename/delete directories or files:
32
+
33
+ client.mkdir('/hdfs/dirname')
34
+ client.mkdir('/hdfs/dirname', :permission => 0777)
35
+
36
+ client.rename(original_path, dst_path)
37
+
38
+ client.delete(path)
39
+ client.delete(dir_path, :recursive => true)
40
+
41
+ To get status or list of files and directories:
42
+
43
+ client.stat(file_path) #=> key-value pairs for file status
44
+ client.list(dir_path) #=> list of key-value pairs for files in dir_path
45
+
46
+ And, 'content_summary', 'checksum', 'homedir', 'chmod', 'chown', 'replication' and 'touch' methods available.
47
+
48
+ ### WebHDFS::FileUtils
49
+
50
+ require 'webhdfs/fileutils'
51
+ WebHDFS::FileUtils.set_server(host, port)
52
+ # or
53
+ WebHDFS::FileUtils.set_server(host, port, username, doas)
54
+
55
+ WebHDFS::FileUtils.copy_from_local(localpath, hdfspath)
56
+ WebHDFS::FileUtils.copy_to_local(hdfspath, localpath)
57
+
58
+ WebHDFS::FileUtils.append(path, data)
59
+
60
+ ### For HttpFs
61
+
62
+ For HttpFs instead of WebHDFS:
63
+
64
+ client = WebHDFS::Client('hostname', 14000)
65
+ client.httpfs_mode = true
66
+
67
+ client.read(path) #=> data
68
+
69
+ # or with webhdfs/filetuils
70
+ WebHDFS::FileUtils.set_server('hostname', 14000)
71
+ WebHDFS::FileUtils.set_httpfs_mode
72
+ WebHDFS::FileUtils.copy_to_local(remote_path, local_path)
73
+
74
+ ## AUTHORS
75
+
76
+ * Kazuki Ohta <kazuki.ohta@gmail.com>
77
+ * TAGOMORI Satoshi <tagomoris@gmail.com>
78
+ * ZiXian Shen <zixian.shen@gmail.com>
79
+ * Takamatsu Tomoaki <takamatsutomoaki@gmail.com>
80
+
81
+ ## LICENSE
82
+
83
+ * Copyright: Copyright (c) 2012- Fluentd Project
84
+ * License: Apache License, Version 2.0
data/Rakefile ADDED
@@ -0,0 +1,21 @@
1
+ require 'bundler'
2
+ Bundler::GemHelper.install_tasks
3
+
4
+ require 'rake/testtask'
5
+
6
+ Rake::TestTask.new(:test) do |test|
7
+ test.libs << 'lib' << 'test'
8
+ test.test_files = FileList['test/webhdfs/*.rb']
9
+ test.verbose = true
10
+ end
11
+
12
+ task :doc do |t|
13
+ `bundle exec rdoc --markup=tomdoc --visibility=public --include=lib --exclude=test`
14
+ end
15
+
16
+ task :coverage do |t|
17
+ ENV['SIMPLE_COV'] = '1'
18
+ Rake::Task["test"].invoke
19
+ end
20
+
21
+ task :default => [:build]
data/VERSION ADDED
@@ -0,0 +1 @@
1
+ 0.6.1
@@ -0,0 +1,6 @@
1
+ require_relative 'client_v1'
2
+
3
+ module WebHDFS
4
+ class Client < ClientV1
5
+ end
6
+ end
@@ -0,0 +1,349 @@
1
+ #require 'net/http'
2
+ require 'httpi'
3
+ require 'krb5_auth'
4
+ require 'uri'
5
+ require 'json'
6
+
7
+ require_relative 'exceptions'
8
+
9
+ module WebHDFS
10
+ class ClientV1
11
+
12
+ # This hash table holds command options.
13
+ OPT_TABLE = {} # internal use only
14
+
15
+ attr_accessor :host, :port, :username, :doas
16
+ attr_accessor :open_timeout # default 30s (in ruby net/http)
17
+ attr_accessor :read_timeout # default 60s (in ruby net/http)
18
+ attr_accessor :httpfs_mode
19
+ attr_accessor :auth_type # pseudo, kerberos
20
+ attr_accessor :keytab
21
+ attr_accessor :pass
22
+
23
+
24
+ def initialize(host='localhost', port=14000, username=nil, doas=nil)
25
+ @host = host
26
+ @port = port
27
+ @username = username
28
+ @doas = doas
29
+
30
+ @httpfs_mode = false
31
+
32
+ @auth_type = :pseudo
33
+ @keytab = nil
34
+ @pass = nil
35
+ end
36
+
37
+ # curl -i -X PUT "http://<HOST>:<PORT>/webhdfs/v1/<PATH>?op=CREATESYMLINK&destination=<PATH>
38
+ # [&createParent=<true|false>]"
39
+ def create_symlink(path, dest, options={})
40
+ check_options(options, OPT_TABLE['CREATESYMLINK'])
41
+ unless dest.start_with?('/')
42
+ dest = '/' + dest
43
+ end
44
+ res = operate_requests(:put, path, 'CREATESYMLINK', options.merge({'destination' => dest}))
45
+ check_success_json(res, 'boolean')
46
+ end
47
+ OPT_TABLE['CREATESYMLINK'] = ['createParent']
48
+
49
+ # curl -i -X PUT "http://<HOST>:<PORT>/webhdfs/v1/<PATH>?op=CREATE
50
+ # [&overwrite=<true|false>][&blocksize=<LONG>][&replication=<SHORT>]
51
+ # [&permission=<OCTAL>][&buffersize=<INT>]"
52
+ def create(path, body, options={})
53
+ if @httpfs_mode
54
+ options = options.merge({'data' => 'true'})
55
+ end
56
+ check_options(options, OPT_TABLE['CREATE'])
57
+ res = operate_requests(:put, path, 'CREATE', options, body)
58
+ res.code == 201
59
+ end
60
+ OPT_TABLE['CREATE'] = ['overwrite', 'blocksize', 'replication', 'permission', 'buffersize', 'data']
61
+
62
+ # curl -i -X POST "http://<HOST>:<PORT>/webhdfs/v1/<PATH>?op=APPEND[&buffersize=<INT>]"
63
+ def append(path, body, options={})
64
+ if @httpfs_mode
65
+ options = options.merge({'data' => 'true'})
66
+ end
67
+ check_options(options, OPT_TABLE['APPEND'])
68
+ res = operate_requests(:post, path, 'APPEND', options, body)
69
+ res.code == 200
70
+ end
71
+ OPT_TABLE['APPEND'] = ['buffersize', 'data']
72
+
73
+ # curl -i -L "http://<HOST>:<PORT>/webhdfs/v1/<PATH>?op=OPEN
74
+ # [&offset=<LONG>][&length=<LONG>][&buffersize=<INT>]"
75
+ def read(path, options={})
76
+ check_options(options, OPT_TABLE['OPEN'])
77
+ res = operate_requests(:get, path, 'OPEN', options)
78
+ res.body
79
+ end
80
+ OPT_TABLE['OPEN'] = ['offset', 'length', 'buffersize']
81
+ alias :open :read
82
+
83
+ # curl -i -X PUT "http://<HOST>:<PORT>/<PATH>?op=MKDIRS[&permission=<OCTAL>]"
84
+ def mkdir(path, options={})
85
+ check_options(options, OPT_TABLE['MKDIRS'])
86
+ res = operate_requests(:put, path, 'MKDIRS', options)
87
+ check_success_json(res, 'boolean')
88
+ end
89
+ OPT_TABLE['MKDIRS'] = ['permission']
90
+ alias :mkdirs :mkdir
91
+
92
+ # curl -i -X PUT "<HOST>:<PORT>/webhdfs/v1/<PATH>?op=RENAME&destination=<PATH>"
93
+ def rename(path, dest, options={})
94
+ check_options(options, OPT_TABLE['RENAME'])
95
+ unless dest.start_with?('/')
96
+ dest = '/' + dest
97
+ end
98
+ res = operate_requests(:put, path, 'RENAME', options.merge({'destination' => dest}))
99
+ check_success_json(res, 'boolean')
100
+ end
101
+
102
+ # curl -i -X DELETE "http://<host>:<port>/webhdfs/v1/<path>?op=DELETE
103
+ # [&recursive=<true|false>]"
104
+ def delete(path, options={})
105
+ check_options(options, OPT_TABLE['DELETE'])
106
+ res = operate_requests(:delete, path, 'DELETE', options)
107
+ check_success_json(res, 'boolean')
108
+ end
109
+ OPT_TABLE['DELETE'] = ['recursive']
110
+
111
+ # curl -i "http://<HOST>:<PORT>/webhdfs/v1/<PATH>?op=GETFILESTATUS"
112
+ def stat(path, options={})
113
+ check_options(options, OPT_TABLE['GETFILESTATUS'])
114
+ res = operate_requests(:get, path, 'GETFILESTATUS', options)
115
+ check_success_json(res, 'FileStatus')
116
+ end
117
+ alias :getfilestatus :stat
118
+
119
+ # curl -i "http://<HOST>:<PORT>/webhdfs/v1/<PATH>?op=LISTSTATUS"
120
+ def list(path, options={})
121
+ check_options(options, OPT_TABLE['LISTSTATUS'])
122
+ res = operate_requests(:get, path, 'LISTSTATUS', options)
123
+ check_success_json(res, 'FileStatuses')['FileStatus']
124
+ end
125
+ alias :liststatus :list
126
+
127
+ # curl -i "http://<HOST>:<PORT>/webhdfs/v1/<PATH>?op=GETCONTENTSUMMARY"
128
+ def content_summary(path, options={})
129
+ check_options(options, OPT_TABLE['GETCONTENTSUMMARY'])
130
+ res = operate_requests(:get, path, 'GETCONTENTSUMMARY', options)
131
+ check_success_json(res, 'ContentSummary')
132
+ end
133
+ alias :getcontentsummary :content_summary
134
+
135
+ # curl -i "http://<HOST>:<PORT>/webhdfs/v1/<PATH>?op=GETFILECHECKSUM"
136
+ def checksum(path, options={})
137
+ check_options(options, OPT_TABLE['GETFILECHECKSUM'])
138
+ res = operate_requests(:get, path, 'GETFILECHECKSUM', options)
139
+ check_success_json(res, 'FileChecksum')
140
+ end
141
+ alias :getfilechecksum :checksum
142
+
143
+ # curl -i "http://<HOST>:<PORT>/webhdfs/v1/?op=GETHOMEDIRECTORY"
144
+ def homedir(options={})
145
+ check_options(options, OPT_TABLE['GETHOMEDIRECTORY'])
146
+ res = operate_requests(:get, '/', 'GETHOMEDIRECTORY', options)
147
+ check_success_json(res, 'Path')
148
+ end
149
+ alias :gethomedirectory :homedir
150
+
151
+ # curl -i -X PUT "http://<HOST>:<PORT>/webhdfs/v1/<PATH>?op=SETPERMISSION
152
+ # [&permission=<OCTAL>]"
153
+ def chmod(path, mode, options={})
154
+ check_options(options, OPT_TABLE['SETPERMISSION'])
155
+ res = operate_requests(:put, path, 'SETPERMISSION', options.merge({'permission' => mode}))
156
+ res.code == 200
157
+ end
158
+ alias :setpermission :chmod
159
+
160
+ # curl -i -X PUT "http://<HOST>:<PORT>/webhdfs/v1/<PATH>?op=SETOWNER
161
+ # [&owner=<USER>][&group=<GROUP>]"
162
+ def chown(path, options={})
163
+ check_options(options, OPT_TABLE['SETOWNER'])
164
+ unless options.has_key?('owner') or options.has_key?('group') or
165
+ options.has_key?(:owner) or options.has_key?(:group)
166
+ raise ArgumentError, "'chown' needs at least one of owner or group"
167
+ end
168
+ res = operate_requests(:put, path, 'SETOWNER', options)
169
+ res.code == 200
170
+ end
171
+ OPT_TABLE['SETOWNER'] = ['owner', 'group']
172
+ alias :setowner :chown
173
+
174
+ # curl -i -X PUT "http://<HOST>:<PORT>/webhdfs/v1/<PATH>?op=SETREPLICATION
175
+ # [&replication=<SHORT>]"
176
+ def replication(path, replnum, options={})
177
+ check_options(options, OPT_TABLE['SETREPLICATION'])
178
+ res = operate_requests(:put, path, 'SETREPLICATION', options.merge({'replication' => replnum.to_s}))
179
+ check_success_json(res, 'boolean')
180
+ end
181
+ alias :setreplication :replication
182
+
183
+ # curl -i -X PUT "http://<HOST>:<PORT>/webhdfs/v1/<PATH>?op=SETTIMES
184
+ # [&modificationtime=<TIME>][&accesstime=<TIME>]"
185
+ # motidicationtime: radix-10 logn integer
186
+ # accesstime: radix-10 logn integer
187
+ def touch(path, options={})
188
+ check_options(options, OPT_TABLE['SETTIMES'])
189
+ unless options.has_key?('modificationtime') or options.has_key?('accesstime') or
190
+ options.has_key?(:modificationtime) or options.has_key?(:accesstime)
191
+ raise ArgumentError, "'chown' needs at least one of modificationtime or accesstime"
192
+ end
193
+ res = operate_requests(:put, path, 'SETTIMES', options)
194
+ res.code == 200
195
+ end
196
+ OPT_TABLE['SETTIMES'] = ['modificationtime', 'accesstime']
197
+ alias :settimes :touch
198
+
199
+ # def delegation_token(user, options={}) # GETDELEGATIONTOKEN
200
+ # raise NotImplementedError
201
+ # end
202
+ # def renew_delegation_token(token, options={}) # RENEWDELEGATIONTOKEN
203
+ # raise NotImplementedError
204
+ # end
205
+ # def cancel_delegation_token(token, options={}) # CANCELDELEGATIONTOKEN
206
+ # raise NotImplementedError
207
+ # end
208
+
209
+ def check_options(options, optdecl=[])
210
+ ex = options.keys.map(&:to_s) - (optdecl || [])
211
+ raise ArgumentError, "no such option: #{ex.join(' ')}" unless ex.empty?
212
+ end
213
+
214
+ def check_success_json(res, attr=nil)
215
+ res.code == 200 and res.headers['Content-Type'].include?('application/json') and (attr.nil? or JSON.parse(res.body)[attr])
216
+ end
217
+
218
+ def api_path(path)
219
+ if path.start_with?('/')
220
+ '/webhdfs/v1' + path
221
+ else
222
+ '/webhdfs/v1/' + path
223
+ end
224
+ end
225
+
226
+ def build_path(path, op, params)
227
+ opts = if @username and @doas
228
+ {'op' => op, 'user.name' => @username, 'doas' => @doas}
229
+ elsif @username
230
+ {'op' => op, 'user.name' => @username}
231
+ elsif @doas
232
+ {'op' => op, 'doas' => @doas}
233
+ else
234
+ {'op' => op}
235
+ end
236
+ query = URI.encode_www_form(params.merge(opts))
237
+ api_path(path) + '?' + query
238
+ end
239
+
240
+ REDIRECTED_OPERATIONS = ['APPEND' ,'CREATE' , 'OPEN', 'GETFILECHECKSUM']
241
+ REDIRECTED_CODE = (300..399)
242
+
243
+ def operate_requests(method, path, op, params={}, payload=nil)
244
+ if not @httpfs_mode and REDIRECTED_OPERATIONS.include?(op)
245
+ res = request(@host, @port, method, path, op, params, nil)
246
+ unless REDIRECTED_CODE.include?(res.code) and res.headers['Location']
247
+ msg = "NameNode returns non-redirection (or without location header), code:#{res.code}, body:#{res.body}."
248
+ raise WebHDFS::RequestFailedError, msg
249
+ end
250
+ uri = URI.parse(res.headers['Location'])
251
+ rpath = if uri.query
252
+ uri.path + '?' + uri.query
253
+ else
254
+ uri.path
255
+ end
256
+ request(uri.host, uri.port, method, rpath, nil, {}, payload, {'Content-Type' => 'application/octet-stream'})
257
+ else
258
+ if @httpfs_mode and not payload.nil?
259
+ request(@host, @port, method, path, op, params, payload, {'Content-Type' => 'application/octet-stream'})
260
+ else
261
+ request(@host, @port, method, path, op, params, payload)
262
+ end
263
+ end
264
+ end
265
+
266
+ # IllegalArgumentException 400 Bad Request
267
+ # UnsupportedOperationException 400 Bad Request
268
+ # SecurityException 401 Unauthorized
269
+ # IOException 403 Forbidden
270
+ # FileNotFoundException 404 Not Found
271
+ # RumtimeException 500 Internal Server Error
272
+ # @param [Object] host
273
+ # @param [Object] port
274
+ # @param [Object] method
275
+ # @param [Object] path
276
+ # @param [Object] op
277
+ # @param [Object] params
278
+ # @param [Object] payload
279
+ # @param [Object] header
280
+ def request(host, port, method, path, op=nil, params={}, payload=nil, header=nil)
281
+ req = HTTPI::Request.new
282
+ krb5 = nil
283
+
284
+ HTTPI.log = true # disable logging
285
+ HTTPI.log_level= :debug
286
+ HTTPI.adapter = :net_http # one of [:httpclient, :curb, :net_http]
287
+
288
+ if @auth_type == :kerberos
289
+ if @username
290
+ krb5 = Krb5Auth::Krb5.new
291
+ inited = false;
292
+ begin
293
+ if @keytab
294
+ inited = krb5.get_init_creds_keytab(@username, @keytab)
295
+ elsif @pass
296
+ inited = krb5.get_init_creds_password(@username, @pass)
297
+ else
298
+ raise ArgumentError, "kerberos authentication requires keytab or password"
299
+ end
300
+ rescue
301
+ raise WebHDFS::SecurityError, "kerberos initialization is failed"
302
+ end
303
+ if inited
304
+ krb5.cache
305
+ HTTPI.adapter = :curb
306
+ req.auth.gssnegotiate
307
+ end
308
+ end
309
+ end
310
+ req.open_timeout = @open_timeout if @open_timeout
311
+ req.read_timeout = @read_timeout if @read_timeout
312
+ request_path = if op
313
+ build_path(path, op, params)
314
+ else
315
+ path
316
+ end
317
+ req.url = URI::HTTP.build({:host => host, :port => port}) + request_path
318
+ req.headers = header.nil? ? {} : header # MUST BE ASSIGN {} if nil BY zixian.shen
319
+ req.body = payload.nil? ? {} : payload # MUST BE ASSIGN {} if nil BY zixian.shen
320
+
321
+ res = HTTPI.request( method, req )
322
+ if HTTPI::Response::SuccessfulResponseCodes.include?(res.code)
323
+ res
324
+ elsif REDIRECTED_CODE.include?(res.code)
325
+ res
326
+ else
327
+ message = if res.body and not res.body.empty?
328
+ res.body.gsub(/\n/, '')
329
+ else
330
+ 'Response body is empty...'
331
+ end
332
+ case res.code
333
+ when 400
334
+ raise WebHDFS::ClientError, message
335
+ when 401
336
+ raise WebHDFS::SecurityError, message
337
+ when 403
338
+ raise WebHDFS::IOError, message
339
+ when 404
340
+ raise WebHDFS::FileNotFoundError, message
341
+ when 500
342
+ raise WebHDFS::ServerError, message
343
+ else
344
+ raise WebHDFS::RequestFailedError, "response code:#{res.code}, message:#{message}"
345
+ end
346
+ end
347
+ end
348
+ end
349
+ end
@@ -0,0 +1,12 @@
1
+ module WebHDFS; end
2
+ class WebHDFS::Error < StandardError; end
3
+
4
+ class WebHDFS::FileNotFoundError < WebHDFS::Error; end
5
+
6
+ class WebHDFS::IOError < WebHDFS::Error; end
7
+ class WebHDFS::SecurityError < WebHDFS::Error; end
8
+
9
+ class WebHDFS::ClientError < WebHDFS::Error; end
10
+ class WebHDFS::ServerError < WebHDFS::Error; end
11
+
12
+ class WebHDFS::RequestFailedError < WebHDFS::Error; end
@@ -0,0 +1,350 @@
1
+ require_relative 'client'
2
+
3
+ module WebHDFS
4
+ module FileUtils
5
+ # Those values hold NameNode location
6
+ @fu_host = 'localhost'
7
+ @fu_port = 50070
8
+ @fu_user = nil
9
+ @fu_doas = nil
10
+ @fu_httpfs_mode = false
11
+ @fu_auth_type = :pseudo
12
+ @fu_keytab = nil
13
+ @fu_pass = nil
14
+
15
+ # Public: Set hostname and port number of WebHDFS
16
+ #
17
+ # host - hostname
18
+ # port - port
19
+ # user - username
20
+ # doas - proxy user name
21
+ #
22
+ # Examples
23
+ #
24
+ # FileUtils.set_server 'localhost', 50070
25
+ #
26
+ def set_server(host, port, user=nil, doas=nil)
27
+ @fu_host = host
28
+ @fu_port = port
29
+ @fu_user = user
30
+ @fu_doas = doas
31
+ end
32
+ module_function :set_server
33
+
34
+ # Public: Set httpfs mode enable/disable
35
+ #
36
+ # mode - boolean (default true)
37
+ #
38
+ # Examples
39
+ #
40
+ # FileUtils.set_httpfs_mode
41
+ #
42
+ def set_httpfs_mode(mode=true)
43
+ @fu_httpfs_mode = mode
44
+ end
45
+ module_function :set_httpfs_mode
46
+
47
+ # Public: Copy local file into HDFS
48
+ #
49
+ # file - local file path
50
+ # path - HDFS file path
51
+ # options - :overwrite, :blocksize, :replication, :mode, :buffersize, :verbose
52
+ #
53
+ # Examples
54
+ #
55
+ # FileUtils.copy_from_local 'local_file', 'remote_file'
56
+ #
57
+ def
58
+ copy_from_local(file, path, options={})
59
+ opts = options.dup
60
+ fu_log "copy_from_local local=#{file} hdfs=#{path}" if opts.delete(:verbose)
61
+ if mode = opts.delete(:mode)
62
+ mode = ('%03o' % mode) if mode.is_a? Integer
63
+ else
64
+ mode = '644'
65
+ end
66
+ opts[:permission] = mode
67
+ opts[:overwrite] ||= true
68
+
69
+ client.create(path, File.new(file, 'rb').read(File.size(file)), opts)
70
+ end
71
+ module_function :copy_from_local
72
+
73
+ # Public: Copy remote HDFS file into local
74
+ #
75
+ # path - HDFS file path
76
+ # file - local file path
77
+ # options - :offset, :length, :buffersize, :verbose
78
+ #
79
+ # Examples
80
+ #
81
+ # FileUtils.copy_to_local 'remote_file', 'local_file'
82
+ #
83
+ def copy_to_local(path, file, options={})
84
+ opts = options.dup
85
+ fu_log "copy_to_local hdfs=#{path} local=#{file}" if opts.delete(:verbose)
86
+ File.open(file, "wb") do |f|
87
+ f.write client.read(path, opts)
88
+ end
89
+ end
90
+ module_function :copy_to_local
91
+
92
+ # Public: Append to HDFS file
93
+ #
94
+ # path - HDFS file path
95
+ # body - contents
96
+ # options - :buffersize, :verbose
97
+ #
98
+ # Examples
99
+ #
100
+ # FileUtils.append 'remote_path', 'contents'
101
+ #
102
+ def append(path, body, options={})
103
+ opts = options.dup
104
+ fu_log "append #{body.bytesize} bytes to #{path}" if opts.delete(:verbose)
105
+ client.append(path, body, opts)
106
+ end
107
+ module_function :append
108
+
109
+ # Public: Create one or more directories.
110
+ #
111
+ # list - directory name, or list of them
112
+ # options - :mode, :verbose
113
+ #
114
+ # Examples
115
+ #
116
+ # FileUtils.mkdir 'test'
117
+ # FileUtils.mkdir %w( tmp data )
118
+ # FileUtils.mkdir 'tmp', :mode => 0700
119
+ #
120
+ def mkdir(list, options={})
121
+ opts = options.dup
122
+ list = [list].flatten
123
+ fu_log "mkdir #{options[:mode] ? ('-m %03o ' % options[:mode]) : ''}#{list.join ' '}" if opts.delete(:verbose)
124
+ if mode = opts[:mode]
125
+ mode = ('0%03o' % mode) if mode.is_a? Integer
126
+ else
127
+ mode = '0755'
128
+ end
129
+ c = client
130
+ list.each { |dir|
131
+ c.mkdir(dir, {:permission => mode})
132
+ }
133
+ end
134
+ module_function :mkdir
135
+
136
+ # Public: Create one or more directories recursively.
137
+ #
138
+ # list - directory name, or list of them
139
+ # options - :mode, :verbose
140
+ #
141
+ # Examples
142
+ #
143
+ # FileUtils.mkdir_p 'dir/subdir'
144
+ # FileUtils.mkdir_p %w( tmp data )
145
+ # FileUtils.mkdir_p 'dir/subdir', :mode => 0700
146
+ #
147
+ alias mkdir_p mkdir
148
+ module_function :mkdir_p
149
+
150
+ # Public: Remove one or more directories or files.
151
+ #
152
+ # list - directory name, or list of them
153
+ # options - :recursive, :verbose
154
+ #
155
+ # Examples
156
+ #
157
+ # FileUtils.rm 'dir'
158
+ # FileUtils.rm %w( tmp data )
159
+ # FileUtils.rm 'dir', :recursive => true
160
+ #
161
+ def rm(list, options={})
162
+ opts = options.dup
163
+ list = [list].flatten
164
+ fu_log "rm #{list.join ' '}" if opts.delete(:verbose)
165
+ c = client
166
+ list.each { |dir|
167
+ c.delete(dir, {:recursive => opts[:recursive] || false})
168
+ }
169
+ end
170
+ module_function :rm
171
+
172
+ # Public: Remove one or more directories/files recursively.
173
+ #
174
+ # list - directory name, or list of them
175
+ # options - :verbose
176
+ #
177
+ # Examples
178
+ #
179
+ # FileUtils.rmr 'dir'
180
+ # FileUtils.rmr %w( tmp data )
181
+ # FileUtils.rmr 'dir'
182
+ #
183
+ def rmr(list, options={})
184
+ self.rm(list, options.merge({:recursive => true}))
185
+ end
186
+ module_function :rmr
187
+
188
+ # Public: Rename a file or directory.
189
+ #
190
+ # src - from
191
+ # dst - to
192
+ # options - :verbose
193
+ #
194
+ # Examples
195
+ #
196
+ # FileUtils.rename 'from', 'to'
197
+ #
198
+ def rename(src, dst, options={})
199
+ opts = options.dup
200
+ fu_log "rename #{src} #{dst}" if opts.delete(:verbose)
201
+ client.rename(src, dst, opts)
202
+ end
203
+ module_function :rename
204
+
205
+ # Public: Change permission of one or more directories/files.
206
+ #
207
+ # mode - permission
208
+ # list - file/directory name or list of them.
209
+ # options - :verbose
210
+ #
211
+ # Examples
212
+ #
213
+ # FileUtils.chmod 0755, 'dir'
214
+ # FileUtils.chmod 0644, 'file'
215
+ #
216
+ def chmod(mode, list, options={})
217
+ opts = options.dup
218
+ list = [list].flatten
219
+ fu_log sprintf('chmod %o %s', mode, list.join(' ')) if opts.delete(:verbose)
220
+ mode = ('%03o' % mode) if mode.is_a? Integer
221
+ c = client
222
+ list.each { |entry|
223
+ c.chmod(entry, mode, opts)
224
+ }
225
+ end
226
+ module_function :chmod
227
+
228
+ # Public: Change an ownership of one or more directories/files.
229
+ #
230
+ # user - username
231
+ # group - groupname
232
+ # list - file/directory name or list of them
233
+ # options - :verbose
234
+ #
235
+ # Examples
236
+ #
237
+ # FileUtils.chmod 0755, 'dir'
238
+ # FileUtils.chmod 0644, 'file'
239
+ #
240
+ def chown(user, group, list, options={})
241
+ opts = options.dup
242
+ list = [list].flatten
243
+ fu_log sprintf('chown %s%s',
244
+ [user,group].compact.join(':') + ' ',
245
+ list.join(' ')) if opts.delete(:verbose)
246
+ c = client
247
+ list.each { |entry|
248
+ c.chown(entry, {:owner => user, :group => group})
249
+ }
250
+ end
251
+ module_function :chown
252
+
253
+ # Public: Set a replication factor of files
254
+ #
255
+ # list - file/directory name or list of them
256
+ # num - replication factor
257
+ # options - :verbose
258
+ #
259
+ # Examples
260
+ #
261
+ # FileUtils.set_repl_factor 'file', 3
262
+ #
263
+ def set_repl_factor(list, num, options={})
264
+ opts = options.dup
265
+ list = [list].flatten
266
+ fu_log sprintf('set_repl_factor %s %d',
267
+ list.join(' '), num) if opts.delete(:verbose)
268
+ c = client
269
+ list.each { |entry|
270
+ c.replication(entry, num, opts)
271
+ }
272
+ end
273
+ module_function :set_repl_factor
274
+
275
+ # Public: Set an access time of files
276
+ #
277
+ # list - file/directory name or list of them
278
+ # time - new access time
279
+ # options - :verbose
280
+ #
281
+ # Examples
282
+ #
283
+ # FileUtils.set_atime 'file', Time.now
284
+ #
285
+ def set_atime(list, time, options={})
286
+ opts = options.dup
287
+ list = [list].flatten
288
+ time = time.to_i
289
+ fu_log sprintf('set_atime %s %d', list.join(' '), time) if opts.delete(:verbose)
290
+ c = client
291
+ list.each { |entry|
292
+ c.touch(entry, {:accesstime => time})
293
+ }
294
+ end
295
+ module_function :set_atime
296
+
297
+ # Public: Set a modification time of files
298
+ #
299
+ # list - file/directory name or list of them
300
+ # time - new modification time
301
+ # options - :verbose
302
+ #
303
+ # Examples
304
+ #
305
+ # FileUtils.set_mtime 'file', Time.now
306
+ #
307
+ def set_mtime(list, time, options={})
308
+ opts = options.dup
309
+ list = [list].flatten
310
+ time = time.to_i
311
+ fu_log sprintf('set_mtime %s %d', list.join(' '), time) if opts.delete(:verbose)
312
+ c = client
313
+ list.each { |entry|
314
+ c.touch(entry, {:modificationtime => time})
315
+ }
316
+ end
317
+ module_function :set_mtime
318
+
319
+ # Internal: make functin private
320
+ def self.private_module_function(name)
321
+ module_function name
322
+ private_class_method name
323
+ end
324
+
325
+ @fileutils_output = $stderr
326
+ @fileutils_label = '[webhdfs]: '
327
+ # Internal: Logging
328
+ def fu_log(msg)
329
+ @fileutils_output ||= $stderr
330
+ @fileutils_label ||= ''
331
+ @fileutils_output.puts @fileutils_label + msg
332
+ end
333
+ private_module_function :fu_log
334
+
335
+ # Internal
336
+ def client
337
+ client = WebHDFS::Client.new(@fu_host, @fu_port, @fu_user, @fu_doas)
338
+ if @fu_httpfs_mode
339
+ client.httpfs_mode = true
340
+ end
341
+ if @fu_auth_type == :kerberos
342
+ client.auth_type = @fu_auth_type
343
+ client.keytab = @fu_keytab
344
+ client.pass = @fu_pass
345
+ end
346
+ client
347
+ end
348
+ private_module_function :client
349
+ end
350
+ end
data/lib/webhdfs.rb ADDED
@@ -0,0 +1 @@
1
+ require File.join(File.dirname(__FILE__), 'webhdfs', 'client.rb')
@@ -0,0 +1,19 @@
1
+ $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
2
+ $LOAD_PATH.unshift(File.dirname(__FILE__))
3
+
4
+ require 'rr'
5
+ require 'test/unit'
6
+ class Test::Unit::TestCase
7
+ include RR::Adapters::TestUnit
8
+ end
9
+
10
+ if ENV['SIMPLE_COV']
11
+ require 'simplecov'
12
+ SimpleCov.start do
13
+ add_filter 'test/'
14
+ add_filter 'pkg/'
15
+ add_filter 'vendor/'
16
+ end
17
+ end
18
+
19
+ require 'test/unit'
@@ -0,0 +1,21 @@
1
+ require 'httpi'
2
+ require 'krb5_auth'
3
+
4
+ krb5 = Krb5Auth::Krb5.new
5
+ #krb5.get_init_creds_password('zen', 'abc123')
6
+ krb5.get_init_creds_keytab("hdfs/api.0.efoxconn.com", "/etc/hadoop/conf/hdfs.keytab")
7
+ krb5.cache
8
+
9
+
10
+ HTTPI.adapter = :curb # one of [:httpclient, :curb, :net_http]
11
+
12
+ request = HTTPI::Request.new
13
+ request.url = "http://api.0.efoxconn.com:14000/webhdfs/v1?op=gethomedirectory"
14
+ request.auth.gssnegotiate
15
+ response = HTTPI.request :get,request
16
+ puts response.code
17
+ puts response.headers
18
+ puts response.body
19
+
20
+
21
+ krb5.destroy
@@ -0,0 +1,44 @@
1
+ require 'rubygems'
2
+ require 'krb5_auth'
3
+ include Krb5Auth
4
+
5
+ krb5 = Krb5.new
6
+
7
+ # get the default realm
8
+ default_realm = krb5.get_default_realm
9
+ puts "Default realm is: " + default_realm
10
+
11
+ # try to cache non-existant data (this should fail and throw an exception)
12
+ begin
13
+ krb5.cache
14
+ rescue Krb5Auth::Krb5::Exception
15
+ puts "Failed caching credentials before obtaining them. Continuing..."
16
+ end
17
+
18
+ # Get initial credentials for the default principal and default keytab
19
+ krb5.get_init_creds_password("zen", "abc123")
20
+
21
+ # cache those credentials in the default cache location
22
+ krb5.cache
23
+
24
+ puts "Principal: " + krb5.get_default_principal
25
+
26
+ # List all of the credentials in the cache, and expiration times, etc.
27
+ krb5.list_cache.each do |cred|
28
+ starttime = DateTime.strptime(cred.starttime.to_s, "%s")
29
+ endtime = DateTime.strptime(cred.endtime.to_s, "%s")
30
+ puts "Client: " + cred.client + " Server: " + cred.server + " starttime: " + starttime.strftime("%D %T") + " endtime: " + endtime.strftime("%D %T")
31
+ end
32
+
33
+ # destroy those same credentials from the default cache location
34
+ krb5.destroy
35
+
36
+ # close the object (releases all memory)
37
+ krb5.close
38
+
39
+ # now try to use the object again; this should fail and throw an exception
40
+ begin
41
+ krb5.cache
42
+ rescue Krb5Auth::Krb5::Exception
43
+ puts "Tried to reuse closed object; continuing..."
44
+ end
@@ -0,0 +1,54 @@
1
+ $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..', 'lib'))
2
+ require "webhdfs"
3
+
4
+ #puts (300..399).include?(307)
5
+
6
+ #require "webhdfs/fileutils"
7
+
8
+ client = WebHDFS::Client.new
9
+ client.host="api.0.efoxconn.com"
10
+ client.port=14000
11
+ client.auth_type=:kerberos
12
+ client.username="zen"
13
+ client.pass_keytab="abc123"
14
+ client.username="hdfs/api.0.efoxconn.com"
15
+ client.pass_keytab="/etc/hadoop/conf/hdfs.keytab"
16
+
17
+ client.delete('/user/fU3931aa69444d4558aba6b26981994e6e', {"recursive"=>"true"})
18
+ #client.mkdir('/user/zen')
19
+ #client.content_summary("/")
20
+ myhome = client.homedir
21
+ p myhome
22
+ info={"user"=>"u740a1d36-2e6d-4aa9-b2e7-028750a8c918", "pass"=>"pfa6f202f-b8da-421e-956b-a5383899192b"}
23
+ client.username=info["user"]
24
+ client.pass_keytab=info["pass"]
25
+ myhome = client.homedir
26
+ p myhome
27
+ client.setowner(myhome, {:owner=>info["user"],:group=>info["user"]} )
28
+ #client.create_symlink("/user/gangang/lznew", "/user/zen/newfolder")
29
+ # puts client.mkdir(myhome+"/newfolder/nf1/nf2")
30
+ #puts client.delete(myhome+"/newfolder")
31
+ #client.getfilechecksum("/user/update-java.deb")
32
+
33
+ #File.open("/tmp/update-java.deb", "wb") do |f|
34
+ # f.write client.read("/user/update-java.deb")
35
+ #end
36
+
37
+ #for x in 20..30
38
+ #
39
+ #client.delete('/user/U-abcdef-'+ x.to_s)
40
+ ## client.mkdir('/user/U-abcdef-'+ x.to_s)
41
+ #end
42
+
43
+ #client.setowner('/user/zen', {"owner"=> "zen", "group"=> "zen"} )
44
+ file = "test_helper.rb"
45
+ #for x in 0..10
46
+ # client.append("/user/VERSION"+x.to_s,
47
+ # File.new(file, 'rb').read(File.size(file)).to_s )
48
+ #end
49
+
50
+ #### PUT Curl::Err::SendFailedRewind
51
+ #for x in 20..30
52
+ # client.create("/user/VERSION"+x.to_s,
53
+ # File.new(file, 'rb').read(File.size(file)) )
54
+ #end
@@ -0,0 +1,65 @@
1
+ require 'test_helper'
2
+
3
+ class FileUtilsTest < Test::Unit::TestCase
4
+ def setup
5
+ require 'webhdfs'
6
+ WebHDFS::FileUtils.fu_host = "api.0.efoxconn.com"
7
+ WebHDFS::FileUtils.fu_port = "14000"
8
+ WebHDFS::FileUtils.fu_user = "zen"
9
+ WebHDFS::FileUtils.fu_pass_keytab = "abc123"
10
+ end
11
+
12
+ def test_copy_from_local
13
+ WebHDFS::FileUtils.copy_from_local('VERSION', 'VERSION', :verbose => true)
14
+ WebHDFS::FileUtils.copy_to_local('VERSION', 'VERSION2', :verbose => true)
15
+ WebHDFS::FileUtils.append('VERSION', 'foo-bar-buzz', :verbose => true)
16
+ WebHDFS::FileUtils.rm('VERSION', :verbose => true)
17
+ end
18
+
19
+ def test_rm
20
+ WebHDFS::FileUtils.mkdir('foo', :mode => 0777, :verbose => true)
21
+ WebHDFS::FileUtils.rm('foo', :verbose => true)
22
+ end
23
+
24
+ def test_rmr
25
+ WebHDFS::FileUtils.mkdir_p('foo/bar/buzz', :mode => 0777, :verbose => true)
26
+ WebHDFS::FileUtils.rmr('foo', :verbose => true)
27
+ end
28
+
29
+ def test_rename
30
+ #WebHDFS::FileUtils.mkdir_p('foo', :mode => 0777, :verbose => true)
31
+ #WebHDFS::FileUtils.rename('foo', 'foo2', :verbose => true)
32
+ #WebHDFS::FileUtils.rmr('foo2', :verbose => true)
33
+ end
34
+
35
+ def test_chmod
36
+ WebHDFS::FileUtils.mkdir('foo', :mode => 0777, :verbose => true)
37
+ WebHDFS::FileUtils.chmod(0755, 'foo', :verbose => true)
38
+ WebHDFS::FileUtils.chmod(0777, 'foo', :verbose => true)
39
+ WebHDFS::FileUtils.rm('foo', :verbose => true)
40
+ end
41
+
42
+ def test_chown
43
+ #WebHDFS::FileUtils.mkdir('foo', :mode => 0777, :verbose => true)
44
+ #WebHDFS::FileUtils.chown('webuser', 'supergroup', 'foo', :verbose => true)
45
+ #WebHDFS::FileUtils.rm('foo', :verbose => true)
46
+ end
47
+
48
+ def test_set_repl_factor
49
+ WebHDFS::FileUtils.mkdir('foo', :mode => 0777, :verbose => true)
50
+ WebHDFS::FileUtils.set_repl_factor('foo', 5)
51
+ WebHDFS::FileUtils.rm('foo', :verbose => true)
52
+ end
53
+
54
+ def test_set_atime
55
+ #WebHDFS::FileUtils.mkdir('foo', :mode => 0777, :verbose => true)
56
+ #WebHDFS::FileUtils.set_atime('foo', Time.now)
57
+ #WebHDFS::FileUtils.rm('foo', :verbose => true)
58
+ end
59
+
60
+ def test_set_mtime
61
+ #WebHDFS::FileUtils.mkdir('foo', :mode => 0777, :verbose => true)
62
+ #WebHDFS::FileUtils.set_mtime('foo', Time.now)
63
+ #WebHDFS::FileUtils.rm('foo', :verbose => true)
64
+ end
65
+ end
@@ -0,0 +1,26 @@
1
+ # encoding: utf-8
2
+ $:.push File.expand_path('../lib', __FILE__)
3
+
4
+ Gem::Specification.new do |gem|
5
+ gem.name = "webhdfs-with-krb5"
6
+ gem.description = "Ruby WebHDFS/HttpFs client with krb5 authentication"
7
+ gem.homepage = ""
8
+ gem.summary = gem.description
9
+ gem.version = File.read("VERSION").strip
10
+ gem.authors = ["Tomoaki Takamatsu"]
11
+ gem.email = "takamatsutomoaki@gmail.com"
12
+ gem.has_rdoc = false
13
+ gem.platform = Gem::Platform::RUBY
14
+ gem.files = `git ls-files`.split("\n")
15
+ #gem.test_files = `git ls-files -- {test,spec,features}/*`.split("\n")
16
+ #gem.executables = `git ls-files -- bin/*`.split("\n").map{ |f| File.basename(f) }
17
+ gem.require_paths = ['lib']
18
+
19
+ gem.add_development_dependency "rake", ">= 0.9.2"
20
+ gem.add_development_dependency "rdoc", ">= 3.12"
21
+ gem.add_development_dependency "simplecov", ">= 0.5.4"
22
+ gem.add_development_dependency "rr", ">= 1.0.0"
23
+ gem.add_development_dependency "timfel-krb5-auth"
24
+ gem.add_development_dependency "httpi"
25
+ gem.add_development_dependency "curb"
26
+ end
metadata ADDED
@@ -0,0 +1,177 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: webhdfs-with-krb5
3
+ version: !ruby/object:Gem::Version
4
+ prerelease: false
5
+ segments:
6
+ - 0
7
+ - 6
8
+ - 1
9
+ version: 0.6.1
10
+ platform: ruby
11
+ authors:
12
+ - Tomoaki Takamatsu
13
+ autorequire:
14
+ bindir: bin
15
+ cert_chain: []
16
+
17
+ date: 2013-05-20 00:00:00 +09:00
18
+ default_executable:
19
+ dependencies:
20
+ - !ruby/object:Gem::Dependency
21
+ name: rake
22
+ prerelease: false
23
+ requirement: &id001 !ruby/object:Gem::Requirement
24
+ none: false
25
+ requirements:
26
+ - - ">="
27
+ - !ruby/object:Gem::Version
28
+ segments:
29
+ - 0
30
+ - 9
31
+ - 2
32
+ version: 0.9.2
33
+ type: :development
34
+ version_requirements: *id001
35
+ - !ruby/object:Gem::Dependency
36
+ name: rdoc
37
+ prerelease: false
38
+ requirement: &id002 !ruby/object:Gem::Requirement
39
+ none: false
40
+ requirements:
41
+ - - ">="
42
+ - !ruby/object:Gem::Version
43
+ segments:
44
+ - 3
45
+ - 12
46
+ version: "3.12"
47
+ type: :development
48
+ version_requirements: *id002
49
+ - !ruby/object:Gem::Dependency
50
+ name: simplecov
51
+ prerelease: false
52
+ requirement: &id003 !ruby/object:Gem::Requirement
53
+ none: false
54
+ requirements:
55
+ - - ">="
56
+ - !ruby/object:Gem::Version
57
+ segments:
58
+ - 0
59
+ - 5
60
+ - 4
61
+ version: 0.5.4
62
+ type: :development
63
+ version_requirements: *id003
64
+ - !ruby/object:Gem::Dependency
65
+ name: rr
66
+ prerelease: false
67
+ requirement: &id004 !ruby/object:Gem::Requirement
68
+ none: false
69
+ requirements:
70
+ - - ">="
71
+ - !ruby/object:Gem::Version
72
+ segments:
73
+ - 1
74
+ - 0
75
+ - 0
76
+ version: 1.0.0
77
+ type: :development
78
+ version_requirements: *id004
79
+ - !ruby/object:Gem::Dependency
80
+ name: timfel-krb5-auth
81
+ prerelease: false
82
+ requirement: &id005 !ruby/object:Gem::Requirement
83
+ none: false
84
+ requirements:
85
+ - - ">="
86
+ - !ruby/object:Gem::Version
87
+ segments:
88
+ - 0
89
+ version: "0"
90
+ type: :development
91
+ version_requirements: *id005
92
+ - !ruby/object:Gem::Dependency
93
+ name: httpi
94
+ prerelease: false
95
+ requirement: &id006 !ruby/object:Gem::Requirement
96
+ none: false
97
+ requirements:
98
+ - - ">="
99
+ - !ruby/object:Gem::Version
100
+ segments:
101
+ - 0
102
+ version: "0"
103
+ type: :development
104
+ version_requirements: *id006
105
+ - !ruby/object:Gem::Dependency
106
+ name: curb
107
+ prerelease: false
108
+ requirement: &id007 !ruby/object:Gem::Requirement
109
+ none: false
110
+ requirements:
111
+ - - ">="
112
+ - !ruby/object:Gem::Version
113
+ segments:
114
+ - 0
115
+ version: "0"
116
+ type: :development
117
+ version_requirements: *id007
118
+ description: Ruby WebHDFS/HttpFs client with krb5 authentication
119
+ email: takamatsutomoaki@gmail.com
120
+ executables: []
121
+
122
+ extensions: []
123
+
124
+ extra_rdoc_files: []
125
+
126
+ files:
127
+ - AUTHORS
128
+ - COPYING
129
+ - Gemfile
130
+ - README.md
131
+ - Rakefile
132
+ - VERSION
133
+ - lib/webhdfs.rb
134
+ - lib/webhdfs/client.rb
135
+ - lib/webhdfs/client_v1.rb
136
+ - lib/webhdfs/exceptions.rb
137
+ - lib/webhdfs/fileutils.rb
138
+ - test/test_helper.rb
139
+ - test/test_httpi.rb
140
+ - test/test_krb5-auth.rb
141
+ - test/test_webhdfs.rb
142
+ - test/webhdfs/fileutils.rb
143
+ - webhdfs-with-krb5.gemspec
144
+ has_rdoc: true
145
+ homepage: ""
146
+ licenses: []
147
+
148
+ post_install_message:
149
+ rdoc_options: []
150
+
151
+ require_paths:
152
+ - lib
153
+ required_ruby_version: !ruby/object:Gem::Requirement
154
+ none: false
155
+ requirements:
156
+ - - ">="
157
+ - !ruby/object:Gem::Version
158
+ segments:
159
+ - 0
160
+ version: "0"
161
+ required_rubygems_version: !ruby/object:Gem::Requirement
162
+ none: false
163
+ requirements:
164
+ - - ">="
165
+ - !ruby/object:Gem::Version
166
+ segments:
167
+ - 0
168
+ version: "0"
169
+ requirements: []
170
+
171
+ rubyforge_project:
172
+ rubygems_version: 1.3.7
173
+ signing_key:
174
+ specification_version: 3
175
+ summary: Ruby WebHDFS/HttpFs client with krb5 authentication
176
+ test_files: []
177
+