hdfs_jruby 0.0.4 → 0.0.7

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 69687639027bf661cd93890f5c8fb6fd9e120213
4
- data.tar.gz: 25deefd6f6d5c6a2ef817794207442889ac98ee6
3
+ metadata.gz: 9ba67ce8212278a98c6a60120d657d4b1f0ddd15
4
+ data.tar.gz: a471124e46c89d0ccb854c0789fb0745f461dd87
5
5
  SHA512:
6
- metadata.gz: a6ef1a85d42d0c492c1584a975786476ef0c1bc5338bc61460e0f9ff349f46a189123d885cfdf11ef9139edb59a7fd0c80cf5dc6e9bbb3ae76d692feefc6a93c
7
- data.tar.gz: df0f2243afaaaa31fa5211860ce1c65fe0c40b549813412ee347c6de7a51a365b16895260f288e52f0861551fdb081e2bbdcaa956587c0571005017ac2defb65
6
+ metadata.gz: a7d9c78123be942b16406ed1e8d3fd41798f49d5ce908ccac6b3760596f77944527e2cb276235001e1f95cdcc195f74d07f68af9d6114c2a2f39095b2c0fffb3
7
+ data.tar.gz: d99a9a45f7117acbdea8f85fd5a3111cebb6f486bd1eb48218aea7672700d4030eb3bf5f131f7a4177f2330ace0614bcc482b090af61eb9964e1d8bc72fbcf60
data/hdfs_jruby.gemspec CHANGED
@@ -8,7 +8,7 @@ Gem::Specification.new do |spec|
8
8
  spec.version = Hdfs::VERSION
9
9
  spec.authors = ["shinji ikeda"]
10
10
  spec.email = ["gm.ikeda@gmail.com"]
11
- spec.summary = %q{ jruby hdfs api}
11
+ spec.summary = %q{ Hadoop hdfs api for JRuby}
12
12
  spec.description = %q{}
13
13
  spec.homepage = "https://github.com/shinjiikeda/hdfs_jruby"
14
14
  spec.license = "MIT"
@@ -7,6 +7,9 @@ module Hdfs
7
7
  import java.lang.String
8
8
 
9
9
  class File < Delegator
10
+
11
+ # @param [String] path
12
+ # @param [String] mode 'r' or 'w' or 'a'
10
13
  def initialize(path, mode = "r")
11
14
  @conf = Hdfs::Configuration.new()
12
15
  @fs = Hdfs::FileSystem.get(@conf)
@@ -29,6 +32,8 @@ module Hdfs
29
32
  end
30
33
  end
31
34
 
35
+ # @param [String] path
36
+ # @param [String] mode 'r' or 'w' or 'a'
32
37
  def self.open(path, mode = "r")
33
38
  if block_given?
34
39
  io = File.new(path, mode).to_io
@@ -1,3 +1,3 @@
1
1
  module Hdfs
2
- VERSION = "0.0.4"
2
+ VERSION = "0.0.7"
3
3
  end
data/lib/hdfs_jruby.rb CHANGED
@@ -11,9 +11,19 @@ module Hdfs
11
11
  warn "only for use with JRuby"
12
12
  end
13
13
 
14
+ if ! ENV["HADOOP_HOME"] && File.exists?("/usr/lib/hadoop")
15
+ ENV["HADOOP_HOME"] = "/usr/lib/hadoop"
16
+ end
17
+
14
18
  if ENV["HADOOP_HOME"]
15
19
  HADOOP_HOME=ENV["HADOOP_HOME"]
16
- Dir["#{HADOOP_HOME}/#{JAR_PATTERN_0_20}","#{HADOOP_HOME}/lib/*.jar", "#{HADOOP_HOME}/share/hadoop/common/*.jar", "#{HADOOP_HOME}/share/hadoop/common/lib/*.jar", "#{HADOOP_HOME}/share/hadoop/hdfs/*.jar", "#{HADOOP_HOME}/share/hadoop/hdfs/lib/*.jar"].each do |jar|
20
+ Dir["#{HADOOP_HOME}/#{JAR_PATTERN_0_20}",
21
+ "#{HADOOP_HOME}/lib/*.jar",
22
+ "#{HADOOP_HOME}/share/hadoop/common/*.jar",
23
+ "#{HADOOP_HOME}/share/hadoop/common/lib/*.jar",
24
+ "#{HADOOP_HOME}/share/hadoop/hdfs/*.jar",
25
+ "#{HADOOP_HOME}/share/hadoop/hdfs/lib/*.jar"
26
+ ].each do |jar|
17
27
  require jar
18
28
  end
19
29
  $CLASSPATH << "#{HADOOP_HOME}/conf"
@@ -23,25 +33,29 @@ module Hdfs
23
33
 
24
34
  class FileSystem < org.apache.hadoop.fs.FileSystem
25
35
  end
26
-
36
+
27
37
  class Configuration < org.apache.hadoop.conf.Configuration
28
38
  end
29
39
 
30
40
  class Path < org.apache.hadoop.fs.Path
31
41
  end
32
-
42
+
33
43
  class FsPermission < org.apache.hadoop.fs.permission.FsPermission
34
44
  end
35
45
 
36
46
  @conf = Hdfs::Configuration.new
37
47
  @fs = Hdfs::FileSystem.get(@conf)
38
48
 
49
+ # @private
39
50
  def connectAsUser(user)
40
51
  uri = Hdfs::FileSystem.getDefaultUri(@conf)
41
52
  @fs.close if ! @fs.nil?
42
53
  @fs = Hdfs::FileSystem.get(uri, @conf, user)
43
54
  end
44
55
 
56
+ # ls
57
+ # @param [String] path
58
+ # @return [Array] match path list
45
59
  def ls(path)
46
60
  p = _path(path)
47
61
  list = @fs.globStatus(p)
@@ -70,7 +84,8 @@ module Hdfs
70
84
  end
71
85
  ret_list if ! block_given?
72
86
  end
73
-
87
+
88
+ # @private
74
89
  def list(path, opts={})
75
90
  use_glob = opts[:glob] ? true : false
76
91
  p = _path(path)
@@ -95,38 +110,56 @@ module Hdfs
95
110
  end
96
111
  end
97
112
 
113
+ # @param [String] path
98
114
  def exists?(path)
99
115
  @fs.exists(_path(path))
100
116
  end
101
-
117
+
118
+ # @param [String] src hdfs source path
119
+ # @param [String] dst hdfs destination path
102
120
  def move(src, dst)
103
121
  @fs.rename(Path.new(src), Path.new(dst))
104
122
  end
105
-
123
+
124
+ # delete
125
+ #
126
+ # @param [String] path
127
+ # @param [Boolean] r recursive false or true (default: false)
106
128
  def delete(path, r=false)
107
129
  @fs.delete(_path(path), r)
108
130
  end
109
-
131
+
132
+ # @return [Boolean] true: file, false: directory
110
133
  def file?(path)
111
134
  @fs.isFile(_path(path))
112
135
  end
113
136
 
137
+ # @return [Boolean] true: directory, false: file
114
138
  def directory?(path)
115
139
  @fs.isDirectory(_path(path))
116
140
  end
117
-
141
+
142
+ # @return [Integer] file size
118
143
  def size(path)
119
144
  @fs.getFileStatus(_path(path)).getLen()
120
145
  end
121
146
 
147
+ # make directory
148
+ # @param [String] path
122
149
  def mkdir(path)
123
150
  @fs.mkdirs(_path(path))
124
151
  end
125
-
152
+
153
+ # put file or directory to hdfs
154
+ # @param [String] local surouce (local path)
155
+ # @param [String] remote destination (hdfs path)
126
156
  def put(local, remote)
127
157
  @fs.copyFromLocalFile(Path.new(local), Path.new(remote))
128
158
  end
129
159
 
160
+ # get file or directory from hdfs
161
+ # @param [String] remote surouce (hdfs path)
162
+ # @param [String] local destination (local path)
130
163
  def get(remote, local)
131
164
  @fs.copyToLocalFile(Path.new(remote), Path.new(local))
132
165
  end
@@ -142,11 +175,16 @@ module Hdfs
142
175
  def set_working_directory(path)
143
176
  @fs.setWorkingDirectory(_path())
144
177
  end
145
-
178
+
179
+ # @param [String] path
180
+ # @param [Integer] perm permission
146
181
  def set_permission(path, perm)
147
182
  @fs.setPermission(_path(path), org.apache.hadoop.fs.permission.FsPermission.new(perm))
148
183
  end
149
184
 
185
+ # @param [String] path
186
+ # @param [String] owner
187
+ # @param [String] group
150
188
  def set_owner(path, owner, group)
151
189
  @fs.setOwner(_path(path), owner, group)
152
190
  end
@@ -16,7 +16,7 @@ describe "test1" do
16
16
  end
17
17
 
18
18
  it "put test_dir" do
19
- Hdfs.put("./rspec/test_data", HDFS_TMP_DIR)
19
+ Hdfs.put("./spec/test_data", HDFS_TMP_DIR)
20
20
  end
21
21
 
22
22
  it "ls #{HDFS_TMP_DIR}/test_data use block" do
File without changes
File without changes
File without changes
File without changes
File without changes
metadata CHANGED
@@ -1,43 +1,43 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: hdfs_jruby
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.0.4
4
+ version: 0.0.7
5
5
  platform: ruby
6
6
  authors:
7
7
  - shinji ikeda
8
- autorequire:
8
+ autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
11
  date: 2014-09-12 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
15
- requirement: !ruby/object:Gem::Requirement
15
+ version_requirements: !ruby/object:Gem::Requirement
16
16
  requirements:
17
- - - "~>"
17
+ - - ~>
18
18
  - !ruby/object:Gem::Version
19
19
  version: '1.6'
20
- type: :development
21
- prerelease: false
22
- version_requirements: !ruby/object:Gem::Requirement
20
+ requirement: !ruby/object:Gem::Requirement
23
21
  requirements:
24
- - - "~>"
22
+ - - ~>
25
23
  - !ruby/object:Gem::Version
26
24
  version: '1.6'
25
+ prerelease: false
26
+ type: :development
27
27
  - !ruby/object:Gem::Dependency
28
28
  name: rake
29
- requirement: !ruby/object:Gem::Requirement
29
+ version_requirements: !ruby/object:Gem::Requirement
30
30
  requirements:
31
- - - ">="
31
+ - - '>='
32
32
  - !ruby/object:Gem::Version
33
33
  version: '0'
34
- type: :development
35
- prerelease: false
36
- version_requirements: !ruby/object:Gem::Requirement
34
+ requirement: !ruby/object:Gem::Requirement
37
35
  requirements:
38
- - - ">="
36
+ - - '>='
39
37
  - !ruby/object:Gem::Version
40
38
  version: '0'
39
+ prerelease: false
40
+ type: :development
41
41
  description: ''
42
42
  email:
43
43
  - gm.ikeda@gmail.com
@@ -45,7 +45,7 @@ executables: []
45
45
  extensions: []
46
46
  extra_rdoc_files: []
47
47
  files:
48
- - ".gitignore"
48
+ - .gitignore
49
49
  - Gemfile
50
50
  - LICENSE.txt
51
51
  - README.md
@@ -54,12 +54,12 @@ files:
54
54
  - lib/hdfs_jruby.rb
55
55
  - lib/hdfs_jruby/file.rb
56
56
  - lib/hdfs_jruby/version.rb
57
- - rspec/01.rb
58
- - rspec/test_data/a/a/test.txt
59
- - rspec/test_data/a/b/test.txt
60
- - rspec/test_data/a/c/test.txt
61
- - rspec/test_data/b/test.txt
62
- - rspec/test_data/c/test.txt
57
+ - spec/01.rb
58
+ - spec/test_data/a/a/test.txt
59
+ - spec/test_data/a/b/test.txt
60
+ - spec/test_data/a/c/test.txt
61
+ - spec/test_data/b/test.txt
62
+ - spec/test_data/c/test.txt
63
63
  - test/test_append.rb
64
64
  - test/test_get.rb
65
65
  - test/test_ls.rb
@@ -74,27 +74,33 @@ homepage: https://github.com/shinjiikeda/hdfs_jruby
74
74
  licenses:
75
75
  - MIT
76
76
  metadata: {}
77
- post_install_message:
77
+ post_install_message:
78
78
  rdoc_options: []
79
79
  require_paths:
80
80
  - lib
81
81
  required_ruby_version: !ruby/object:Gem::Requirement
82
82
  requirements:
83
- - - ">="
83
+ - - '>='
84
84
  - !ruby/object:Gem::Version
85
85
  version: '0'
86
86
  required_rubygems_version: !ruby/object:Gem::Requirement
87
87
  requirements:
88
- - - ">="
88
+ - - '>='
89
89
  - !ruby/object:Gem::Version
90
90
  version: '0'
91
91
  requirements: []
92
- rubyforge_project:
92
+ rubyforge_project:
93
93
  rubygems_version: 2.2.2
94
- signing_key:
94
+ signing_key:
95
95
  specification_version: 4
96
- summary: jruby hdfs api
96
+ summary: Hadoop hdfs api for JRuby
97
97
  test_files:
98
+ - spec/01.rb
99
+ - spec/test_data/a/a/test.txt
100
+ - spec/test_data/a/b/test.txt
101
+ - spec/test_data/a/c/test.txt
102
+ - spec/test_data/b/test.txt
103
+ - spec/test_data/c/test.txt
98
104
  - test/test_append.rb
99
105
  - test/test_get.rb
100
106
  - test/test_ls.rb
@@ -105,3 +111,4 @@ test_files:
105
111
  - test/test_read2.rb
106
112
  - test/test_utils.rb
107
113
  - test/test_write.rb
114
+ has_rdoc: