hdfs_jruby 0.0.7 → 0.0.8

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,15 @@
1
1
  ---
2
- SHA1:
3
- metadata.gz: 9ba67ce8212278a98c6a60120d657d4b1f0ddd15
4
- data.tar.gz: a471124e46c89d0ccb854c0789fb0745f461dd87
2
+ !binary "U0hBMQ==":
3
+ metadata.gz: !binary |-
4
+ ZTUwNDJjYTYyMDY1MTI1NGE5MjVmYjI0MmNjMzk1NmIxMDE4YjUzNA==
5
+ data.tar.gz: !binary |-
6
+ YjBlNmNkOTc1YThhZmNmNTdjMzZmYjJiM2E2NjQzZDA3YjVlNDg3Ng==
5
7
  SHA512:
6
- metadata.gz: a7d9c78123be942b16406ed1e8d3fd41798f49d5ce908ccac6b3760596f77944527e2cb276235001e1f95cdcc195f74d07f68af9d6114c2a2f39095b2c0fffb3
7
- data.tar.gz: d99a9a45f7117acbdea8f85fd5a3111cebb6f486bd1eb48218aea7672700d4030eb3bf5f131f7a4177f2330ace0614bcc482b090af61eb9964e1d8bc72fbcf60
8
+ metadata.gz: !binary |-
9
+ ZmUxOTMwMTIzYTcwZjdlZjc5NTFkYmEwMWJiYjI3OWI0MzFlNTgxYmY2MDhk
10
+ OTU1MjNmOGFkY2YyY2JiZmU0NDk0MDk5ODk3MzRhY2Y4NDA3YzczMjJjMDc1
11
+ YTA5MmYwZmNjZTNiNjAxMTQwNzJmYTM0MDVhY2YwYTA1ZDY2Yjc=
12
+ data.tar.gz: !binary |-
13
+ ZTQ1NjU5YTdkZTEzNDRlMDZiNTQzYTNhMDMwOGNiNDI1M2U2ODhmYWE0YmY1
14
+ YTNhZGVmNDYwODM4YmEyMjQ4NGRjYTY5NjRmZWRkNjI5N2IwNmE1NzI2N2Ew
15
+ NDExNmE0ZmI3ZGUwMWY1MzZhOTY1ODM2NDZjMmJhYmJkMzA0NTk=
data/README.md CHANGED
@@ -17,6 +17,9 @@ Or install it yourself as:
17
17
  $ gem install hdfs_jruby
18
18
 
19
19
  ## Usage
20
+ http://rubydoc.info/gems/hdfs_jruby/0.0.7/frames
21
+
22
+ Example
20
23
 
21
24
  require 'hdfs_jruby'
22
25
 
@@ -9,7 +9,7 @@ module Hdfs
9
9
  class File < Delegator
10
10
 
11
11
  # @param [String] path
12
- # @param [String] mode 'r' or 'w' or 'a'
12
+ # @param [String] mode 'r' read, 'w' write, 'a': apeend
13
13
  def initialize(path, mode = "r")
14
14
  @conf = Hdfs::Configuration.new()
15
15
  @fs = Hdfs::FileSystem.get(@conf)
@@ -32,8 +32,17 @@ module Hdfs
32
32
  end
33
33
  end
34
34
 
35
+ # @example
36
+ # Hdfs::File.open("hoge.txt", "r") do | io |
37
+ # ...
38
+ # end
39
+ #
40
+ # Hdfs::File.open("hoge.txt", "r").each do | line |
41
+ # puts line
42
+ # end
43
+ #
35
44
  # @param [String] path
36
- # @param [String] mode 'r' or 'w' or 'a'
45
+ # @param [String] mode 'r' read, 'w' write, 'a': apeend
37
46
  def self.open(path, mode = "r")
38
47
  if block_given?
39
48
  io = File.new(path, mode).to_io
@@ -62,6 +71,7 @@ module Hdfs
62
71
  outbuf << java.lang.String.new(buf, 0, n).to_s
63
72
  end
64
73
 
74
+ # @private
65
75
  def seek(offset, whence = IO::SEEK_SET)
66
76
  @stream.seek(offset)
67
77
  0
@@ -71,11 +81,13 @@ module Hdfs
71
81
  @stream.close
72
82
  @fs.close
73
83
  end
74
-
84
+
85
+ # @private
75
86
  def __getobj__
76
87
  @stream
77
88
  end
78
-
89
+
90
+ # @private
79
91
  def __setobj__(obj)
80
92
  @stream = obj
81
93
  end
@@ -1,3 +1,3 @@
1
1
  module Hdfs
2
- VERSION = "0.0.7"
2
+ VERSION = "0.0.8"
3
3
  end
data/lib/hdfs_jruby.rb CHANGED
@@ -31,15 +31,19 @@ module Hdfs
31
31
  raise "HADOOP_HOME is not set!"
32
32
  end
33
33
 
34
+ # @private
34
35
  class FileSystem < org.apache.hadoop.fs.FileSystem
35
36
  end
36
37
 
38
+ # @private
37
39
  class Configuration < org.apache.hadoop.conf.Configuration
38
40
  end
39
-
41
+
42
+ # @private
40
43
  class Path < org.apache.hadoop.fs.Path
41
44
  end
42
-
45
+
46
+ # @private
43
47
  class FsPermission < org.apache.hadoop.fs.permission.FsPermission
44
48
  end
45
49
 
@@ -54,8 +58,21 @@ module Hdfs
54
58
  end
55
59
 
56
60
  # ls
61
+ # @example
62
+ # Hdfs.ls("hoge/").each do | stat |
63
+ # p stat
64
+ # end
57
65
  # @param [String] path
58
- # @return [Array] match path list
66
+ # @return [Array] file status array
67
+ #
68
+ # @note file status:
69
+ # path
70
+ # length
71
+ # modificationTime
72
+ # owner
73
+ # group
74
+ # permission
75
+ # type
59
76
  def ls(path)
60
77
  p = _path(path)
61
78
  list = @fs.globStatus(p)
@@ -96,6 +113,7 @@ module Hdfs
96
113
  else
97
114
  list = @fs.listStatus(p)
98
115
  end
116
+ return [] if list.nil?
99
117
 
100
118
  if ! block_given?
101
119
  ret_list = []
@@ -164,24 +182,29 @@ module Hdfs
164
182
  @fs.copyToLocalFile(Path.new(remote), Path.new(local))
165
183
  end
166
184
 
185
+ # get home directory
167
186
  def get_home_directory()
168
187
  @fs.getHomeDirectory()
169
188
  end
170
189
 
190
+ # get working directory
171
191
  def get_working_directory()
172
192
  @fs.getWorkingDirectory()
173
193
  end
174
-
194
+
195
+ # set working directory
175
196
  def set_working_directory(path)
176
197
  @fs.setWorkingDirectory(_path())
177
198
  end
178
199
 
200
+ # set permission
179
201
  # @param [String] path
180
202
  # @param [Integer] perm permission
181
203
  def set_permission(path, perm)
182
204
  @fs.setPermission(_path(path), org.apache.hadoop.fs.permission.FsPermission.new(perm))
183
205
  end
184
206
 
207
+ # set owner & group
185
208
  # @param [String] path
186
209
  # @param [String] owner
187
210
  # @param [String] group
@@ -208,6 +231,8 @@ module Hdfs
208
231
  module_function :connectAsUser
209
232
 
210
233
  private
234
+
235
+ # @private
211
236
  def _path(path)
212
237
  if path.nil?
213
238
  raise "path is nil"
@@ -215,6 +240,7 @@ module Hdfs
215
240
  Path.new(path)
216
241
  end
217
242
 
243
+ # @private
218
244
  def _conv(stat)
219
245
  file_info = {}
220
246
  file_info['path'] = stat.getPath.to_s
metadata CHANGED
@@ -1,43 +1,43 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: hdfs_jruby
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.7
4
+ version: 0.0.8
5
5
  platform: ruby
6
6
  authors:
7
7
  - shinji ikeda
8
- autorequire:
8
+ autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2014-09-12 00:00:00.000000000 Z
11
+ date: 2014-10-31 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
15
- version_requirements: !ruby/object:Gem::Requirement
15
+ requirement: !ruby/object:Gem::Requirement
16
16
  requirements:
17
17
  - - ~>
18
18
  - !ruby/object:Gem::Version
19
19
  version: '1.6'
20
- requirement: !ruby/object:Gem::Requirement
20
+ type: :development
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
21
23
  requirements:
22
24
  - - ~>
23
25
  - !ruby/object:Gem::Version
24
26
  version: '1.6'
25
- prerelease: false
26
- type: :development
27
27
  - !ruby/object:Gem::Dependency
28
28
  name: rake
29
- version_requirements: !ruby/object:Gem::Requirement
29
+ requirement: !ruby/object:Gem::Requirement
30
30
  requirements:
31
- - - '>='
31
+ - - ! '>='
32
32
  - !ruby/object:Gem::Version
33
33
  version: '0'
34
- requirement: !ruby/object:Gem::Requirement
34
+ type: :development
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
35
37
  requirements:
36
- - - '>='
38
+ - - ! '>='
37
39
  - !ruby/object:Gem::Version
38
40
  version: '0'
39
- prerelease: false
40
- type: :development
41
41
  description: ''
42
42
  email:
43
43
  - gm.ikeda@gmail.com
@@ -74,24 +74,24 @@ homepage: https://github.com/shinjiikeda/hdfs_jruby
74
74
  licenses:
75
75
  - MIT
76
76
  metadata: {}
77
- post_install_message:
77
+ post_install_message:
78
78
  rdoc_options: []
79
79
  require_paths:
80
80
  - lib
81
81
  required_ruby_version: !ruby/object:Gem::Requirement
82
82
  requirements:
83
- - - '>='
83
+ - - ! '>='
84
84
  - !ruby/object:Gem::Version
85
85
  version: '0'
86
86
  required_rubygems_version: !ruby/object:Gem::Requirement
87
87
  requirements:
88
- - - '>='
88
+ - - ! '>='
89
89
  - !ruby/object:Gem::Version
90
90
  version: '0'
91
91
  requirements: []
92
- rubyforge_project:
92
+ rubyforge_project:
93
93
  rubygems_version: 2.2.2
94
- signing_key:
94
+ signing_key:
95
95
  specification_version: 4
96
96
  summary: Hadoop hdfs api for JRuby
97
97
  test_files:
@@ -111,4 +111,3 @@ test_files:
111
111
  - test/test_read2.rb
112
112
  - test/test_utils.rb
113
113
  - test/test_write.rb
114
- has_rdoc: