hdfs_jruby 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml ADDED
@@ -0,0 +1,15 @@
1
+ ---
2
+ !binary "U0hBMQ==":
3
+ metadata.gz: !binary |-
4
+ NzY1ZTJlNjhlMTc5NWUxZDIzNDY2ZDlhOWYzZjFiMWMwNTkxYzMwOA==
5
+ data.tar.gz: !binary |-
6
+ YmZlYjFjODUzYjM3YTBiOTc2MzFmZjkyMWE2ZmViMTdlZDYwOTMyYQ==
7
+ SHA512:
8
+ metadata.gz: !binary |-
9
+ MjlkNGIwYTRhNTUyZWYyZWQzMmZmNzg3NGFlYTVmNmY0NWNhZDI0MDQyMjAx
10
+ ZWIzYTNhMTQ2ODg3MzQzYmI5YWFjYTdiZGI0MWMyOWQ4Yjk2MjlkY2M0MWEw
11
+ MzE5Y2EzZTEzOTc4N2VmZjk1MGI3NjFhYTc1OGVmMmU4OTU2ZTc=
12
+ data.tar.gz: !binary |-
13
+ Yjc2ZDdhZjZlYmZhY2VhNTc0ODE4OWI2OGZkODU1Yjk5MjVmMGY1MGMxODcw
14
+ ODg0MWQzNjlmOTNhNTU0MDUxYTRiNjNmMzAwZDg4MGU3ZjEzYzliYWY2ZWYw
15
+ NmI1OWYzNmU3NTRjODJkZTYwZDhkYzlhOTAyNGJkM2IwMGRjMGQ=
data/.gitignore ADDED
@@ -0,0 +1,22 @@
1
+ *.gem
2
+ *.rbc
3
+ .bundle
4
+ .config
5
+ .yardoc
6
+ Gemfile.lock
7
+ InstalledFiles
8
+ _yardoc
9
+ coverage
10
+ doc/
11
+ lib/bundler/man
12
+ pkg
13
+ rdoc
14
+ spec/reports
15
+ test/tmp
16
+ test/version_tmp
17
+ tmp
18
+ *.bundle
19
+ *.so
20
+ *.o
21
+ *.a
22
+ mkmf.log
data/Gemfile ADDED
@@ -0,0 +1,4 @@
1
+ source 'https://rubygems.org'
2
+
3
+ # Specify your gem's dependencies in hdfs_jruby.gemspec
4
+ gemspec
data/LICENSE.txt ADDED
@@ -0,0 +1,22 @@
1
+ Copyright (c) 2014 game
2
+
3
+ MIT License
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining
6
+ a copy of this software and associated documentation files (the
7
+ "Software"), to deal in the Software without restriction, including
8
+ without limitation the rights to use, copy, modify, merge, publish,
9
+ distribute, sublicense, and/or sell copies of the Software, and to
10
+ permit persons to whom the Software is furnished to do so, subject to
11
+ the following conditions:
12
+
13
+ The above copyright notice and this permission notice shall be
14
+ included in all copies or substantial portions of the Software.
15
+
16
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
19
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
20
+ LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
21
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
22
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
data/README.md ADDED
@@ -0,0 +1,29 @@
1
+ # Hdfs Jruby
2
+
3
+ Jruby HDFS API
4
+
5
+ ## Installation
6
+
7
+ Add this line to your application's Gemfile:
8
+
9
+ gem 'hdfs_jruby'
10
+
11
+ And then execute:
12
+
13
+ $ bundle
14
+
15
+ Or install it yourself as:
16
+
17
+ $ gem install hdfs_jruby
18
+
19
+ ## Usage
20
+
21
+ TODO: Write usage instructions here
22
+
23
+ ## Contributing
24
+
25
+ 1. Fork it ( https://github.com/[my-github-username]/hdfs_jruby/fork )
26
+ 2. Create your feature branch (`git checkout -b my-new-feature`)
27
+ 3. Commit your changes (`git commit -am 'Add some feature'`)
28
+ 4. Push to the branch (`git push origin my-new-feature`)
29
+ 5. Create a new Pull Request
data/Rakefile ADDED
@@ -0,0 +1,2 @@
1
+ require "bundler/gem_tasks"
2
+
@@ -0,0 +1,23 @@
1
+ # coding: utf-8
2
+ lib = File.expand_path('../lib', __FILE__)
3
+ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
4
+ require 'hdfs_jruby/version'
5
+
6
+ Gem::Specification.new do |spec|
7
+ spec.name = "hdfs_jruby"
8
+ spec.version = Hdfs::VERSION
9
+ spec.authors = ["shinji ikeda"]
10
+ spec.email = ["gm.ikeda@gmail.com"]
11
+ spec.summary = %q{ jruby hdfs api}
12
+ spec.description = %q{}
13
+ spec.homepage = ""
14
+ spec.license = "MIT"
15
+
16
+ spec.files = `git ls-files -z`.split("\x0")
17
+ spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
18
+ spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
19
+ spec.require_paths = ["lib"]
20
+
21
+ spec.add_development_dependency "bundler", "~> 1.6"
22
+ spec.add_development_dependency "rake"
23
+ end
@@ -0,0 +1,66 @@
1
+
2
+ require 'hdfs_jruby'
3
+
4
+ module Hdfs
5
+
6
+ require "delegate"
7
+ import java.lang.String
8
+
9
+ class File < Delegator
10
+ def initialize(path, mode = "r")
11
+ @conf = Hdfs::Configuration.new()
12
+ @fs = Hdfs::FileSystem.get(@conf)
13
+
14
+ @mode = mode
15
+ if mode == "w"
16
+ @stream = @fs.create(Hdfs::Path.new(path), false)
17
+ elsif mode == "r"
18
+ @stream = @fs.open(Hdfs::Path.new(path))
19
+ @buf = java.nio.ByteBuffer.allocate(65536)
20
+ elsif mode == "a"
21
+ p = Hdfs::Path.new(path)
22
+ if !@fs.exists(p)
23
+ @stream = @fs.create(Hdfs::Path.new(path), false)
24
+ else
25
+ if ! @fs.isFile(p)
26
+ raise "path: #{path} is not file"
27
+ end
28
+ @stream = @fs.append(Hdfs::Path.new(path))
29
+ end
30
+ end
31
+ end
32
+
33
+ def self.open(path, mode = "r")
34
+ return File.new(path, mode).to_io
35
+ end
36
+
37
+ def syswrite(str)
38
+ n = @stream.write(str.to_java_bytes)
39
+ return n.to_i
40
+ end
41
+
42
+ def sysread(length, outbuf = "")
43
+ buf = Java::byte[length].new
44
+
45
+ n = @stream.read(buf)
46
+ if n < 0
47
+ return nil
48
+ end
49
+ outbuf << java.lang.String.new(buf, 0, n).to_s
50
+ end
51
+
52
+ def close
53
+ @stream.close
54
+ @fs.close
55
+ end
56
+
57
+
58
+ def __getobj__
59
+ @stream
60
+ end
61
+
62
+ def __setobj__(obj)
63
+ @stream = obj
64
+ end
65
+ end
66
+ end
@@ -0,0 +1,3 @@
1
+ module Hdfs
2
+ VERSION = "0.0.1"
3
+ end
data/lib/hdfs_jruby.rb ADDED
@@ -0,0 +1,139 @@
1
+
2
+ require "hdfs_jruby/version"
3
+
4
+ module Hdfs
5
+
6
+ JAR_PATTERN_0_20="hadoop-core-*.jar"
7
+
8
+ if RUBY_PLATFORM =~ /java/
9
+ include Java
10
+ else
11
+ warn "only for use with JRuby"
12
+ end
13
+
14
+ if ENV["HADOOP_HOME"]
15
+ HADOOP_HOME=ENV["HADOOP_HOME"]
16
+ Dir["#{HADOOP_HOME}/#{JAR_PATTERN_0_20}","#{HADOOP_HOME}/lib/*.jar", "#{HADOOP_HOME}/share/hadoop/common/*.jar", "#{HADOOP_HOME}/share/hadoop/common/lib/*.jar", "#{HADOOP_HOME}/share/hadoop/hdfs/*.jar", "#{HADOOP_HOME}/share/hadoop/hdfs/lib/*.jar"].each do |jar|
17
+ require jar
18
+ end
19
+ $CLASSPATH << "#{HADOOP_HOME}/conf"
20
+ else
21
+ raise "HADOOP_HOME is not set!"
22
+ end
23
+
24
+ class FileSystem < org.apache.hadoop.fs.FileSystem
25
+ end
26
+
27
+ class Configuration < org.apache.hadoop.conf.Configuration
28
+ end
29
+
30
+ class Path < org.apache.hadoop.fs.Path
31
+ end
32
+
33
+ class FsPermission < org.apache.hadoop.fs.permission.FsPermission
34
+ end
35
+
36
+ @conf = Hdfs::Configuration.new()
37
+ @fs = Hdfs::FileSystem.get(@conf)
38
+
39
+ def list(path, use_glob=true)
40
+ p = _path(path)
41
+ if ! block_given?
42
+ raise "error"
43
+ else
44
+ list = nil
45
+ if use_glob
46
+ list = @fs.globStatus(p)
47
+ else
48
+ list = @fs.listStatus(p)
49
+ end
50
+ list.each do | stat |
51
+ file_info = {}
52
+ file_info['path'] = stat.getPath.to_s
53
+ file_info['length'] = stat.getLen.to_i
54
+ file_info['modificationTime'] = stat.getModificationTime.to_i
55
+ file_info['owner'] = stat.getOwner.to_s
56
+ file_info['group'] = stat.getGroup.to_s
57
+ file_info['permission'] = stat.getPermission.toShort.to_i
58
+ file_info['type'] = !stat.isDir ? 'FILE': 'DIRECTORY'
59
+ yield file_info
60
+ end
61
+ end
62
+ end
63
+
64
+ def exists?(path)
65
+ @fs.exists(_path(path))
66
+ end
67
+
68
+ def move(src, dst)
69
+ @fs.rename(Path.new(src), Path.new(dst))
70
+ end
71
+
72
+ def delete(path, r=false)
73
+ @fs.delete(_path(path), r)
74
+ end
75
+
76
+ def file?(path)
77
+ @fs.isFile(_path(path))
78
+ end
79
+
80
+ def directory?(path)
81
+ @fs.isDirectory(_path(path))
82
+ end
83
+
84
+ def size(path)
85
+ @fs.getFileStatus(_path(path)).getLen()
86
+ end
87
+
88
+ def mkdir(path)
89
+ @fs.mkdirs(_path(path))
90
+ end
91
+
92
+ def put(local, remote)
93
+ @fs.copyFromLocalFile(Path.new(local), Path.new(remote))
94
+ end
95
+
96
+ def get(remote, local)
97
+ @fs.copyToLocalFile(Path.new(remote), Path.new(local))
98
+ end
99
+
100
+ def get_home_directory()
101
+ @fs.getHomeDirectory()
102
+ end
103
+
104
+ def get_working_directory()
105
+ @fs.getWorkingDirectory()
106
+ end
107
+
108
+ def set_working_directory(path)
109
+ @fs.setWorkingDirectory(_path())
110
+ end
111
+
112
+ def set_permission(path, perm)
113
+ @fs.setPermission(_path(path), org.apache.hadoop.fs.permission.FsPermission.new(perm))
114
+ end
115
+
116
+ module_function :exists?
117
+ module_function :move
118
+ module_function :delete
119
+ module_function :file?
120
+ module_function :directory?
121
+ module_function :size
122
+ module_function :put
123
+ module_function :get
124
+ module_function :get_home_directory
125
+ module_function :get_working_directory
126
+ module_function :set_working_directory
127
+ module_function :set_permission
128
+ module_function :list
129
+
130
+ private
131
+ def _path(path)
132
+ if path.nil?
133
+ raise "path is nil"
134
+ end
135
+ Path.new(path)
136
+ end
137
+
138
+ module_function :_path
139
+ end
@@ -0,0 +1,11 @@
1
+
2
+ require 'hdfs_jruby/file'
3
+
4
+ begin
5
+ f = Hdfs::File.open("test_a.txt", "a")
6
+ p f.print "test..\ntest\n"
7
+
8
+ f.close()
9
+ rescue org.apache.hadoop.security.AccessControlException => e
10
+ STDERR.print "permission denied\n"
11
+ end
data/test/test_get.rb ADDED
@@ -0,0 +1,7 @@
1
+
2
+ require 'hdfs_jruby'
3
+
4
+ p Hdfs.get(ARGV[0], ARGV[1])
5
+
6
+
7
+
data/test/test_ls.rb ADDED
@@ -0,0 +1,16 @@
1
+
2
+ require 'hdfs_jruby'
3
+
4
+ Hdfs.list(ARGV[0], true) do | stat |
5
+ p stat
6
+ path = stat['path']
7
+ length = stat['length']
8
+ type = stat['type']
9
+ owner = stat['owner']
10
+ group = stat['group']
11
+ mtime = stat['modificationTime']
12
+
13
+ print "#{type == "DIRECTORY" ? "d" : "f"} #{path} #{length} #{owner}:#{group} #{mtime}\n"
14
+ end
15
+
16
+
data/test/test_move.rb ADDED
@@ -0,0 +1,7 @@
1
+
2
+ require 'hdfs_jruby'
3
+
4
+ p Hdfs.move(ARGV[0], ARGV[1])
5
+
6
+
7
+
data/test/test_perm.rb ADDED
@@ -0,0 +1,4 @@
1
+
2
+ require 'hdfs_jruby/file'
3
+
4
+ p Hdfs.set_permission("test_a.txt", 0444)
data/test/test_put.rb ADDED
@@ -0,0 +1,7 @@
1
+
2
+ require 'hdfs_jruby'
3
+
4
+ p Hdfs.put(ARGV[0], ARGV[1])
5
+
6
+
7
+
data/test/test_read.rb ADDED
@@ -0,0 +1,11 @@
1
+
2
+ require 'hdfs_jruby/file'
3
+
4
+ #f = Hdfs::File.new("test.txt", "r").to_io
5
+ #print f.read
6
+ #f.close
7
+
8
+
9
+ Hdfs::File.open("test.txt", "r").each do | line |
10
+ print line
11
+ end
@@ -0,0 +1,10 @@
1
+
2
+ require 'hdfs_jruby'
3
+
4
+
5
+ p Hdfs.exists?(ARGV[0])
6
+ p Hdfs.file?(ARGV[0])
7
+ p Hdfs.directory?(ARGV[0])
8
+ p Hdfs.size(ARGV[0])
9
+
10
+
@@ -0,0 +1,12 @@
1
+
2
+ require 'hdfs_jruby/file'
3
+
4
+ #f = Hdfs::File.new("test.txt", "r").to_io
5
+ #print f.read
6
+ #f.close
7
+
8
+ f = Hdfs::File.open("test_w.txt", "w")
9
+ p f.print "test..\ntest\n"
10
+ p f.print "test..\ntest\n"
11
+
12
+ f.close()
metadata ADDED
@@ -0,0 +1,99 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: hdfs_jruby
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.0.1
5
+ platform: ruby
6
+ authors:
7
+ - shinji ikeda
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2014-07-16 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: bundler
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - ~>
18
+ - !ruby/object:Gem::Version
19
+ version: '1.6'
20
+ type: :development
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - ~>
25
+ - !ruby/object:Gem::Version
26
+ version: '1.6'
27
+ - !ruby/object:Gem::Dependency
28
+ name: rake
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - ! '>='
32
+ - !ruby/object:Gem::Version
33
+ version: '0'
34
+ type: :development
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - ! '>='
39
+ - !ruby/object:Gem::Version
40
+ version: '0'
41
+ description: ''
42
+ email:
43
+ - gm.ikeda@gmail.com
44
+ executables: []
45
+ extensions: []
46
+ extra_rdoc_files: []
47
+ files:
48
+ - .gitignore
49
+ - Gemfile
50
+ - LICENSE.txt
51
+ - README.md
52
+ - Rakefile
53
+ - hdfs_jruby.gemspec
54
+ - lib/hdfs_jruby.rb
55
+ - lib/hdfs_jruby/file.rb
56
+ - lib/hdfs_jruby/version.rb
57
+ - test/test_append.rb
58
+ - test/test_get.rb
59
+ - test/test_ls.rb
60
+ - test/test_move.rb
61
+ - test/test_perm.rb
62
+ - test/test_put.rb
63
+ - test/test_read.rb
64
+ - test/test_utils.rb
65
+ - test/test_write.rb
66
+ homepage: ''
67
+ licenses:
68
+ - MIT
69
+ metadata: {}
70
+ post_install_message:
71
+ rdoc_options: []
72
+ require_paths:
73
+ - lib
74
+ required_ruby_version: !ruby/object:Gem::Requirement
75
+ requirements:
76
+ - - ! '>='
77
+ - !ruby/object:Gem::Version
78
+ version: '0'
79
+ required_rubygems_version: !ruby/object:Gem::Requirement
80
+ requirements:
81
+ - - ! '>='
82
+ - !ruby/object:Gem::Version
83
+ version: '0'
84
+ requirements: []
85
+ rubyforge_project:
86
+ rubygems_version: 2.2.2
87
+ signing_key:
88
+ specification_version: 4
89
+ summary: jruby hdfs api
90
+ test_files:
91
+ - test/test_append.rb
92
+ - test/test_get.rb
93
+ - test/test_ls.rb
94
+ - test/test_move.rb
95
+ - test/test_perm.rb
96
+ - test/test_put.rb
97
+ - test/test_read.rb
98
+ - test/test_utils.rb
99
+ - test/test_write.rb