s3_utils 0.0.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: 85e08887f108e1d5a9f9ebd68b8e5040c3c8050f
4
+ data.tar.gz: 7fdfc58af701270d945d3e4db02cbbc34cf0b2ce
5
+ SHA512:
6
+ metadata.gz: 502e4edf9005ebed5f0e1f07943c49a4dfe9d388e275f8fcdfa3b7d1a824c7219bd594a056ecf1ad06a17e0d516f39cf3c28d19e605dc195be8b9212e394373f
7
+ data.tar.gz: 0c8d94cc30df96972dd128250eb3a98da6e102d5a7398d8b990b2f86f05a12db71669490619ea1ba176e9fd86ee36fbbecde91e362aaaf2a209864d0d18acac2
data/.gitignore ADDED
@@ -0,0 +1,14 @@
1
+ /.bundle/
2
+ /.yardoc
3
+ /Gemfile.lock
4
+ /_yardoc/
5
+ /coverage/
6
+ /doc/
7
+ /pkg/
8
+ /spec/reports/
9
+ /tmp/
10
+ *.bundle
11
+ *.so
12
+ *.o
13
+ *.a
14
+ mkmf.log
data/.rspec ADDED
@@ -0,0 +1,3 @@
1
+ --color
2
+ --format documentation
3
+ --require spec_helper
data/.travis.yml ADDED
@@ -0,0 +1,11 @@
1
+ language: ruby
2
+ rvm:
3
+ - 2.0.0
4
+ - 2.1.1
5
+ branches:
6
+ only:
7
+ - master
8
+ env:
9
+ global:
10
+ - AWS_ACCESS_KEY_ID=xxxx
11
+ - AWS_SECRET_ACCESS_KEY=yyyy
data/Gemfile ADDED
@@ -0,0 +1,4 @@
1
+ source 'https://rubygems.org'
2
+
3
+ # Specify your gem's dependencies in s3_utils.gemspec
4
+ gemspec
data/LICENSE ADDED
@@ -0,0 +1,22 @@
1
+ The MIT License (MIT)
2
+
3
+ Copyright (c) 2014 MOGI Hiromu
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining a copy
6
+ of this software and associated documentation files (the "Software"), to deal
7
+ in the Software without restriction, including without limitation the rights
8
+ to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
9
+ copies of the Software, and to permit persons to whom the Software is
10
+ furnished to do so, subject to the following conditions:
11
+
12
+ The above copyright notice and this permission notice shall be included in all
13
+ copies or substantial portions of the Software.
14
+
15
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
16
+ IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
17
+ FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
18
+ AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
19
+ LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
20
+ OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
21
+ SOFTWARE.
22
+
data/LICENSE.txt ADDED
@@ -0,0 +1,22 @@
1
+ Copyright (c) 2014 mgi166
2
+
3
+ MIT License
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining
6
+ a copy of this software and associated documentation files (the
7
+ "Software"), to deal in the Software without restriction, including
8
+ without limitation the rights to use, copy, modify, merge, publish,
9
+ distribute, sublicense, and/or sell copies of the Software, and to
10
+ permit persons to whom the Software is furnished to do so, subject to
11
+ the following conditions:
12
+
13
+ The above copyright notice and this permission notice shall be
14
+ included in all copies or substantial portions of the Software.
15
+
16
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
19
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
20
+ LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
21
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
22
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
data/README.md ADDED
@@ -0,0 +1,147 @@
1
+ [![Coverage Status](https://coveralls.io/repos/mgi166/s3_utils/badge.png?branch=master)](https://coveralls.io/r/mgi166/s3_utils?branch=master)
2
+ [![Code Climate](https://codeclimate.com/github/mgi166/s3_utils/badges/gpa.svg)](https://codeclimate.com/github/mgi166/s3_utils)
3
+
4
+ # S3Utils
5
+ Simple s3 modules in order to download, upload, copy and delete the file on s3.
6
+ It is a wrapper of `aws-sdk`.
7
+
8
+ ## Installation
9
+
10
+ Add this line to your application's Gemfile:
11
+
12
+ ```ruby
13
+ gem 's3_utils'
14
+ ```
15
+
16
+ And then execute:
17
+
18
+ $ bundle
19
+
20
+ Or install it yourself as:
21
+
22
+ $ gem install s3_utils
23
+
24
+ ## dependency
25
+
26
+ * [aws-sdk](https://github.com/aws/aws-sdk-ruby)
27
+
28
+ ## Usage
29
+
30
+ ```ruby
31
+ require 's3_utils'
32
+ ```
33
+
34
+ This module has some methods.
35
+
36
+ * `#upload_to_s3`
37
+ * uploads the file in local to s3
38
+ * `#download_from_s3`
39
+ * downloads from the file into local path
40
+ * `#copy_on_s3`
41
+ * copies the file on s3 to other as `FileUtils.cp`
42
+ * `#delete_on_s3`
43
+ * deletes the file on s3 as `FileUtils.rm`
44
+ * `#create_on_s3`
45
+ * creates the file on s3 as `File.open`
46
+ * `#read_on_s3`
47
+ * read the file on s3 as `File.read`
48
+
49
+ ### Using module includion
50
+
51
+ ```ruby
52
+ require 's3_utils'
53
+
54
+ include S3Utils
55
+
56
+ upload_to_s3('path/to/local_file.txt', 's3.bucket.name/path/to/upload_file.txt')
57
+ #=> upload to s3!
58
+
59
+ download_from_s3('s3.bucket.name/path/to/upload_file.txt', 'path/to/local_file.txt')
60
+ #=> download from s3!
61
+ ```
62
+
63
+ ### Using module function
64
+ ```ruby
65
+ require 's3_utils'
66
+
67
+ S3Utils.upload_to_s3("path/to/local_file.txt", "s3.bucket.name/path/to/dir/")
68
+ #=> upload to "s3://s3.bucket.name/path/to/dir/local_file.txt"
69
+
70
+ S3Utils.create_on_s3("s3.bucket.name/path/to/test.txt") do |f|
71
+ f.puts "This is the sample text"
72
+ end
73
+ #=> create the file "s3.bucket.name/path/to/test.txt" that has contents "This is the sample text"
74
+ ```
75
+
76
+ ## Methods
77
+ ### upload_to_s3
78
+ Uploads the file in local path to s3.
79
+ when destination url has the string end with "/", upload the local file under the directory.
80
+
81
+ ```ruby
82
+ S3Utils.upload_to_s3('path/to/local_file.txt', 's3.bucket.name/path/to/upload_file.txt')
83
+ #=> Upload from "path/to/local_file.txt" to "s3.bucket.name/path/to/upload_file.txt"
84
+
85
+ S3Utils.upload_to_s3("path/to/local_file.txt", "s3.bucket.name/path/to/dir/")
86
+ #=> Upload from "path/to/local_file.txt" to "s3://s3.bucket.name/path/to/dir/local_file.txt"
87
+ ```
88
+
89
+ ### download_from_s3
90
+ Downloads the file in s3 to local path.
91
+ When local path is directory, download to under the local directory.
92
+
93
+ ```ruby
94
+ S3Utils.download_from_s3('s3.bucket.name/path/to/upload_file.txt', 'path/to/local_file.txt')
95
+ #=> Download from "s3.bucket.name/path/to/upload_file.txt" to "path/to/local_file.txt"
96
+
97
+ # path/to/dir is directory
98
+ S3Utils.download_from_s3('s3.bucket.name/path/to/upload_file.txt', 'path/to/dir')
99
+ #=> Donwload from "s3.bucket.name/path/to/upload_file.txt" to "path/to/dir/upload_file.txt"
100
+ ```
101
+
102
+ ### copy_on_s3
103
+ Copy the file in s3 to another.
104
+
105
+ ```ruby
106
+ S3Utils.copy_on_s3('s3.bucket.com/path/to/source.txt', 's3.bucket.com/path/to/dest.txt')
107
+ #=> Copy from "s3.bucket.com/path/to/source.txt" to "s3.bucket.com/path/to/dest.txt"
108
+ ```
109
+
110
+ ### delete_on_s3
111
+ Delete the file in s3.
112
+
113
+ ```ruby
114
+ S3Utils.delete_on_s3('s3.bucket.com/path/to/source.txt')
115
+ #=> Delete "s3.bucket.com/path/to/source.txt"
116
+ ```
117
+
118
+ ### create_on_s3
119
+ Create the file in s3.
120
+ If block given, it will be passed the File object and uploads to s3.
121
+
122
+ ```ruby
123
+ S3Utils.create_on_s3('s3.bucket.com/path/to/file.txt')
124
+ #=> Create "s3.bucket.com/path/to/source.txt" but it is empty file
125
+
126
+ S3Utils.create_on_s3('s3.bucket.com/path/to/file.txt') do |f|
127
+ f.puts "the file in s3"
128
+ end
129
+ #=> Create "s3.bucket.com/path/to/source.txt" and it has the contents "the file in s3"
130
+ ```
131
+
132
+ ### read_on_s3
133
+ Read the file in s3.
134
+
135
+ ```ruby
136
+ # s3.bucket.com/path/to/file.txt has contents "abcdefg"
137
+ S3Utils.read_on_s3('s3.bucket.com/path/to/file.txt')
138
+ #=> abcdefg
139
+ ```
140
+
141
+ ## Contributing
142
+
143
+ 1. Fork it ( https://github.com/[my-github-username]/s3_utils/fork )
144
+ 2. Create your feature branch (`git checkout -b my-new-feature`)
145
+ 3. Commit your changes (`git commit -am 'Add some feature'`)
146
+ 4. Push to the branch (`git push origin my-new-feature`)
147
+ 5. Create a new Pull Request
data/Rakefile ADDED
@@ -0,0 +1,6 @@
1
+ require "bundler/gem_tasks"
2
+ require 'rspec/core/rake_task'
3
+
4
+ RSpec::Core::RakeTask.new(:spec)
5
+
6
+ task :default => :spec
data/lib/s3_utils.rb ADDED
@@ -0,0 +1,8 @@
1
+ require 'aws-sdk'
2
+ require 's3_utils/path'
3
+ require 's3_utils/generator'
4
+ require 's3_utils/method'
5
+
6
+ module S3Utils
7
+ include Method
8
+ end
@@ -0,0 +1,32 @@
1
+ module S3Utils
2
+ class Generator
3
+ def initialize(path)
4
+ @path = Path.new(path)
5
+ end
6
+
7
+ def bucket
8
+ s3.buckets[@path.bucket_name]
9
+ end
10
+
11
+ def s3_object(path=nil)
12
+ base_path = @path.path_without_bucket
13
+ dest_path = path ? File.join(base_path, path) : base_path
14
+ bucket.objects[dest_path]
15
+ end
16
+
17
+ def s3_object_collection(path=nil)
18
+ base_path = @path.path_without_bucket
19
+ bucket.objects.with_prefix(base_path)
20
+ end
21
+
22
+ def tree
23
+ bucket.as_tree(prefix: @path.path_without_bucket)
24
+ end
25
+
26
+ private
27
+
28
+ def s3
29
+ ::AWS::S3.new
30
+ end
31
+ end
32
+ end
@@ -0,0 +1,86 @@
1
+ require 'tempfile'
2
+
3
+ module S3Utils
4
+ module Method
5
+ def self.included(klass)
6
+ klass.extend(self)
7
+ end
8
+
9
+ def upload_to_s3(src, dest)
10
+ g = Generator.new(dest)
11
+
12
+ case
13
+ when File.file?(src)
14
+ filename = File.basename(src.to_s) if dest.to_s.end_with?('/')
15
+ g.s3_object(filename).write(file: src)
16
+ when File.directory?(src)
17
+ Dir[File.join(src, '**', '*')].each do |path|
18
+ next if File.directory?(path)
19
+ g.s3_object(path).write(file: path)
20
+ end
21
+ else
22
+ Dir[src].each do |path|
23
+ g.s3_object(path).write(file: path)
24
+ end
25
+ end
26
+ end
27
+
28
+ def download_from_s3(src, dest)
29
+ g = Generator.new(src)
30
+
31
+ if g.s3_object.exists?
32
+ download_path = File.directory?(dest) ? File.join(dest, File.basename(src)) : dest
33
+ File.open(download_path, 'w') do |f|
34
+ g.s3_object.read { |chunk| f.write(chunk) }
35
+ end
36
+ else
37
+ file_objects = g.tree.children(&:reaf?).map(&:object)
38
+
39
+ file_objects.each do |obj|
40
+ next unless obj.exists?
41
+
42
+ base_dir = File.basename(File.dirname(obj.key))
43
+ obj_name = File.basename(obj.key)
44
+
45
+ unless Dir.exist?(File.join(dest, base_dir))
46
+ Dir.mkdir(File.join(dest, base_dir))
47
+ end
48
+
49
+ File.open(File.join(dest, base_dir, obj_name), 'w') do |f|
50
+ obj.read { |chunk| f.write(chunk) }
51
+ end
52
+ end
53
+ end
54
+ end
55
+
56
+ def copy_on_s3(src, dest)
57
+ gs = Generator.new(src)
58
+ gd = Generator.new(dest)
59
+
60
+ gs.s3_object.copy_to(gd.s3_object)
61
+ end
62
+
63
+ def delete_on_s3(path)
64
+ g = Generator.new(path)
65
+ g.s3_object.delete
66
+ end
67
+
68
+ def create_on_s3(path)
69
+ @tmp = Tempfile.new('')
70
+ g = Generator.new(path)
71
+
72
+ File.open(@tmp, "w") do |f|
73
+ yield f if block_given?
74
+ end
75
+
76
+ g.s3_object.write(file: @tmp.path)
77
+ ensure
78
+ @tmp.close! if @tmp
79
+ end
80
+
81
+ def read_on_s3(path)
82
+ g = Generator.new(path)
83
+ g.s3_object.read.chomp
84
+ end
85
+ end
86
+ end
@@ -0,0 +1,24 @@
1
+ require 'pathname'
2
+
3
+ module S3Utils
4
+ class Path
5
+ def initialize(path)
6
+ @path = Pathname.new(path)
7
+ end
8
+
9
+ def bucket_name
10
+ return '' if @path.to_s.empty? || @path.to_s == '.'
11
+
12
+ element[0].to_s.empty? ? element[1] : element[0]
13
+ end
14
+
15
+ def path_without_bucket
16
+ ele = element.drop_while(&:empty?).drop(1)
17
+ File.join(ele)
18
+ end
19
+
20
+ def element
21
+ @element ||= @path.cleanpath.to_s.split(Pathname::SEPARATOR_PAT)
22
+ end
23
+ end
24
+ end
@@ -0,0 +1,3 @@
1
+ module S3Utils
2
+ VERSION = "0.0.1"
3
+ end
data/s3_utils.gemspec ADDED
@@ -0,0 +1,28 @@
1
+ # coding: utf-8
2
+ lib = File.expand_path('../lib', __FILE__)
3
+ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
4
+ require 's3_utils/version'
5
+
6
+ Gem::Specification.new do |spec|
7
+ spec.name = "s3_utils"
8
+ spec.version = S3Utils::VERSION
9
+ spec.authors = ["mgi166"]
10
+ spec.email = ["skskoari@gmail.com"]
11
+ spec.summary = %q{Simple s3 modules in order to download, upload, copy and delete the file on s3.}
12
+ spec.description = %q{Simple s3 modules in order to download, upload, copy and delete the file on s3.}
13
+ spec.homepage = "https://github.com/mgi166/s3_utils"
14
+ spec.license = "MIT"
15
+
16
+ spec.files = `git ls-files -z`.split("\x0")
17
+ spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
18
+ spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
19
+ spec.require_paths = ["lib"]
20
+
21
+ spec.add_dependency "aws-sdk"
22
+
23
+ spec.add_development_dependency "bundler", "~> 1.7"
24
+ spec.add_development_dependency "rake", "~> 10.0"
25
+ spec.add_development_dependency "rspec"
26
+ spec.add_development_dependency "fakes3"
27
+ spec.add_development_dependency "coveralls"
28
+ end
@@ -0,0 +1,102 @@
1
+ require 'fakes3/server'
2
+
3
+ describe S3Utils::Generator do
4
+ before(:all) do
5
+ @pid = fork do
6
+ FakeS3::Server.new('0.0.0.0', 12345, FakeS3::FileStore.new('/var/tmp/fakes3'), 'localhost').serve
7
+ end
8
+ AWS.config(s3_endpoint: 'localhost', s3_force_path_style: true, s3_port: 12345, use_ssl: false)
9
+ end
10
+
11
+ after(:all) do
12
+ Process.kill(:TERM, @pid) rescue nil
13
+ end
14
+
15
+ describe '#bucket' do
16
+ let(:generator) { S3Utils::Generator.new('bucket/fuga/hoge') }
17
+
18
+ it 'returns AWS::S3::Bucket instance' do
19
+ expect(generator.bucket).to be_instance_of AWS::S3::Bucket
20
+ end
21
+
22
+ it 'has names bucket name' do
23
+ expect(generator.bucket.name).to eq('bucket')
24
+ end
25
+ end
26
+
27
+ describe '#s3_object' do
28
+ let(:generator) { S3Utils::Generator.new('bucket/fuga/hoge') }
29
+
30
+ context 'no given the argument of path' do
31
+ it 'returns AWS::S3::Object' do
32
+ expect(generator.s3_object).to be_instance_of AWS::S3::S3Object
33
+ end
34
+
35
+ it 'has the path' do
36
+ expect(generator.s3_object.key).to eq('fuga/hoge')
37
+ end
38
+
39
+ it 'has the bucket' do
40
+ expect(generator.s3_object.bucket).to eq(::AWS::S3.new.buckets['bucket'])
41
+ end
42
+ end
43
+
44
+ context 'given the argument of path' do
45
+ it 'returns AWS::S3::Object' do
46
+ expect(generator.s3_object('bazz/spec.txt')).to be_instance_of AWS::S3::S3Object
47
+ end
48
+
49
+ it 'has the path' do
50
+ expect(generator.s3_object('bazz/spec.txt').key).to eq('fuga/hoge/bazz/spec.txt')
51
+ end
52
+
53
+ it 'has the bucket' do
54
+ expect(generator.s3_object.bucket).to eq(::AWS::S3.new.buckets['bucket'])
55
+ end
56
+ end
57
+ end
58
+
59
+ describe '#s3_object_collection' do
60
+ let(:generator) { S3Utils::Generator.new('s3.bucket.com/fuga') }
61
+
62
+ context 'when the objects exists in s3' do
63
+ before do
64
+ create_on_s3('s3.bucket.com/fuga/hoge.txt') {|f| f.puts '' }
65
+ create_on_s3('s3.bucket.com/fuga/fuga.txt') {|f| f.puts '' }
66
+ end
67
+
68
+ after do
69
+ delete_on_s3('s3.bucket.com/fuga/hoge.txt')
70
+ delete_on_s3('s3.bucket.com/fuga/fuga.txt')
71
+ end
72
+
73
+ it 'returns the instance of AWS::S3::ObjectCollection' do
74
+ expect(generator.s3_object_collection).to be_instance_of AWS::S3::ObjectCollection
75
+ end
76
+
77
+ it 'returns the s3 objects under the directory' do
78
+ expect(
79
+ generator.s3_object_collection.to_a
80
+ ).to eq([S3Utils::Generator.new('s3.bucket.com/fuga/fuga.txt').s3_object, S3Utils::Generator.new('s3.bucket.com/fuga/hoge.txt').s3_object])
81
+ end
82
+ end
83
+ end
84
+
85
+ describe '#tree' do
86
+ let(:generator) { S3Utils::Generator.new('s3.bucket.com/fuga') }
87
+
88
+ before do
89
+ create_on_s3('s3.bucket.com/fuga/hoge.txt') {|f| f.puts '' }
90
+ end
91
+
92
+ it 'returns the instance of AWS::S3::Tree' do
93
+ expect(generator.tree).to be_instance_of AWS::S3::Tree
94
+ end
95
+
96
+ it 'returns the tree that has files in the argument directory' do
97
+ expect(
98
+ generator.tree.children.map(&:key)
99
+ ).to eq(['fuga/hoge.txt'])
100
+ end
101
+ end
102
+ end
@@ -0,0 +1,109 @@
1
+ require 'tmpdir'
2
+
3
+ describe S3Utils::Path do
4
+ def path(p)
5
+ described_class.new(p)
6
+ end
7
+
8
+ describe '.initialize' do
9
+ context 'when the argument is the Object#to_str' do
10
+ it 'returns S3Utils::Path instance' do
11
+ expect(described_class.new('dev.spec.bucket.com')).to be_instance_of S3Utils::Path
12
+ end
13
+ end
14
+
15
+ context 'when the argument is nil(not response_to? #to_str)' do
16
+ it 'raise TypeError' do
17
+ expect do
18
+ described_class.new(nil)
19
+ end.to raise_error TypeError
20
+ end
21
+ end
22
+ end
23
+
24
+ describe '#bucket_name' do
25
+ context 'when the path is likely path of file' do
26
+ it 'returns the first of dirname' do
27
+ expect(path('bucket/fuga/hoge').bucket_name).to eq('bucket')
28
+ end
29
+ end
30
+
31
+ context 'when the path includes "//"' do
32
+ it 'returns the first of dirname' do
33
+ expect(path('bucket//fuga/hoge').bucket_name).to eq('bucket')
34
+ end
35
+ end
36
+
37
+ context 'when the path includes ".."' do
38
+ it 'returns the first of dirname with cleanpath' do
39
+ expect(path('bucket/../fuga/hoge').bucket_name).to eq('fuga')
40
+ end
41
+ end
42
+
43
+ context 'when the path includes "."' do
44
+ it 'returns the first of dirname with cleanpath' do
45
+ expect(path('./bucket/./fuga/hoge').bucket_name).to eq('bucket')
46
+ end
47
+ end
48
+
49
+ context 'when the path starts with "/"' do
50
+ it 'returns the first of dirname removed the "/"' do
51
+ expect(path('/bucket/fuga/hoge').bucket_name).to eq('bucket')
52
+ end
53
+ end
54
+
55
+ context 'when the path is empty string' do
56
+ it 'returns the empty' do
57
+ expect(path('').bucket_name).to be_empty
58
+ end
59
+ end
60
+
61
+ context 'when the path is "."' do
62
+ it 'returns the empty' do
63
+ expect(path('.').bucket_name).to be_empty
64
+ end
65
+ end
66
+ end
67
+
68
+ describe '#path_without_bucket' do
69
+ it 'returns the dirname without bucket' do
70
+ expect(path('bucket/fuga/hoge').path_without_bucket).to eq('fuga/hoge')
71
+ end
72
+
73
+ context 'when the path includes "//"' do
74
+ it 'returns the first of dirname' do
75
+ expect(path('bucket//fuga/hoge').path_without_bucket).to eq('fuga/hoge')
76
+ end
77
+ end
78
+
79
+ context 'when the path includes ".."' do
80
+ it 'returns the first of dirname with cleanpath' do
81
+ expect(path('bucket/../fuga/hoge').path_without_bucket).to eq('hoge')
82
+ end
83
+ end
84
+
85
+ context 'when the path includes "."' do
86
+ it 'returns the first of dirname with cleanpath' do
87
+ expect(path('./bucket/./fuga/hoge').path_without_bucket).to eq('fuga/hoge')
88
+ end
89
+ end
90
+
91
+ context 'when the path starts with "/"' do
92
+ it 'returns the first of dirname removed the "/"' do
93
+ expect(path('/bucket/fuga/hoge').path_without_bucket).to eq('fuga/hoge')
94
+ end
95
+ end
96
+
97
+ context 'when the path is empty string' do
98
+ it 'returns the empty' do
99
+ expect(path('').path_without_bucket).to be_empty
100
+ end
101
+ end
102
+
103
+ context 'when the path is "."' do
104
+ it 'returns the empty' do
105
+ expect(path('.').path_without_bucket).to be_empty
106
+ end
107
+ end
108
+ end
109
+ end
@@ -0,0 +1,322 @@
1
+ require 'tmpdir'
2
+ require 'fakes3/server'
3
+
4
+ describe S3Utils do
5
+ before(:all) do
6
+ @pid = fork do
7
+ FakeS3::Server.new('0.0.0.0', 12345, FakeS3::FileStore.new('/var/tmp/fakes3'), 'localhost').serve
8
+ end
9
+ AWS.config(s3_endpoint: 'localhost', s3_force_path_style: true, s3_port: 12345, use_ssl: false)
10
+ end
11
+
12
+ after(:all) do
13
+ Process.kill(:TERM, @pid) rescue nil
14
+ end
15
+
16
+ def create_tempfile(string)
17
+ src = Tempfile.new('src')
18
+ src.write(string)
19
+ src.close
20
+ src
21
+ end
22
+
23
+ describe '.upload_to_s3' do
24
+ context 'when source is file(not dest path end with "/")' do
25
+ before do
26
+ delete_on_s3('s3.bucket.com/spec/path')
27
+ end
28
+
29
+ it 'exists the upload file after #upload_to_s3' do
30
+ src = create_tempfile("aaa")
31
+
32
+ expect do
33
+ S3Utils.upload_to_s3(src.path, 's3.bucket.com/spec/path')
34
+ end.to change {
35
+ s3_object('s3.bucket.com/spec/path').exists?
36
+ }.from(false).to(true)
37
+ end
38
+
39
+ it 'uploads the file to dest path' do
40
+ src = create_tempfile("hoge\nfuga")
41
+
42
+ S3Utils.upload_to_s3(src.path, 's3.bucket.com/spec/path')
43
+
44
+ expect(
45
+ read_on_s3('s3.bucket.com/spec/path')
46
+ ).to eq("hoge\nfuga")
47
+ end
48
+ end
49
+
50
+ context 'when source is file(and dest path end with "/")' do
51
+ before do
52
+ delete_on_s3('s3.bucket.com/spec/path')
53
+ end
54
+
55
+ after { FileUtils.remove_entry_secure(@dir) if Dir.exist?(@dir) }
56
+
57
+ it 'uploads the file to under the dest path' do
58
+ @dir = Dir.mktmpdir
59
+ File.open(File.join(@dir, '1.txt'), 'w') {|f| f.puts "hogehoge" }
60
+
61
+ S3Utils.upload_to_s3(File.join(@dir, '1.txt'), 's3.bucket.com/spec/path/')
62
+
63
+ expect(
64
+ read_on_s3('s3.bucket.com/spec/path/1.txt')
65
+ ).to eq('hogehoge')
66
+ end
67
+ end
68
+
69
+ context 'when source is directory' do
70
+ before do
71
+ delete_on_s3('s3.bucket.com/spec/path')
72
+
73
+ @dir = Dir.mktmpdir
74
+ File.open(File.join(@dir, '1.txt'), 'w') {|f| f.puts "The one" }
75
+ File.open(File.join(@dir, '2.txt'), 'w') {|f| f.puts "The two" }
76
+ end
77
+
78
+ after { FileUtils.remove_entry_secure(@dir) if Dir.exist?(@dir) }
79
+
80
+ it 'uploads the file with directoy to dest path' do
81
+ S3Utils.upload_to_s3(@dir, 's3.bucket.com/spec/path')
82
+
83
+ expect(
84
+ read_on_s3("s3.bucket.com/spec/path/#{@dir}/1.txt")
85
+ ).to eq('The one')
86
+
87
+ expect(
88
+ read_on_s3("s3.bucket.com/spec/path/#{@dir}/2.txt")
89
+ ).to eq('The two')
90
+ end
91
+ end
92
+
93
+ context 'when source includes "*"' do
94
+ before do
95
+ delete_on_s3('s3.bucket.com/spec/path')
96
+
97
+ @dir = Dir.mktmpdir
98
+ File.open(File.join(@dir, 'abc1.txt'), 'w') {|f| f.puts "The abc1" }
99
+ File.open(File.join(@dir, 'def1.txt'), 'w') {|f| f.puts "The def" }
100
+ File.open(File.join(@dir, 'abc2.txt'), 'w') {|f| f.puts "The abc2" }
101
+ end
102
+
103
+ after { FileUtils.remove_entry_secure(@dir) if Dir.exist?(@dir) }
104
+
105
+ it "uploads the fnmatch file and doesn't upload not fmatch file" do
106
+ S3Utils.upload_to_s3("{#@dir}/abc*.txt", 's3.bucket.com/spec/path')
107
+
108
+ expect(
109
+ read_on_s3("s3.bucket.com/spec/path/#{@dir}/abc1.txt")
110
+ ).to eq('The abc1')
111
+
112
+ expect(
113
+ read_on_s3("s3.bucket.com/spec/path/#{@dir}/abc2.txt")
114
+ ).to eq('The abc2')
115
+
116
+ expect(
117
+ s3_object("s3.bucket.com/spec/path/#{@dir}/def1").exists?
118
+ ).to be false
119
+ end
120
+ end
121
+ end
122
+
123
+ describe '.download_from_s3' do
124
+ context 'when dest path is directory' do
125
+ before do
126
+ delete_on_s3('s3.bucket.com/spec/path')
127
+ create_on_s3('s3.bucket.com/spec/path/hoge.txt') {|f| f.write "hoge"}
128
+ @dir = Dir.mktmpdir
129
+ end
130
+
131
+ after { FileUtils.remove_entry_secure(@dir) if Dir.exist?(@dir) }
132
+
133
+ it 'downloads the file in the directory' do
134
+ S3Utils.download_from_s3('s3.bucket.com/spec/path/hoge.txt', @dir)
135
+
136
+ expect(File.read("#{@dir}/hoge.txt")).to eq('hoge')
137
+ end
138
+ end
139
+
140
+ context 'when dest path is file' do
141
+ before do
142
+ delete_on_s3('s3.bucket.com/spec/path')
143
+ create_on_s3('s3.bucket.com/spec/path/fuga.txt') {|f| f.write "fuga"}
144
+ @dir = Dir.mktmpdir
145
+ end
146
+
147
+ after { FileUtils.remove_entry_secure(@dir) if Dir.exist?(@dir) }
148
+
149
+ it 'downloads the file as local file' do
150
+ dest_file = File.join(@dir, 'fuga.txt')
151
+ S3Utils.download_from_s3('s3.bucket.com/spec/path/fuga.txt', dest_file)
152
+
153
+ expect(File.read(dest_file)).to eq('fuga')
154
+ end
155
+ end
156
+
157
+ describe 'when the src is directory' do
158
+ context 'the dest directory is already exists' do
159
+ before do
160
+ delete_on_s3('s3.bucket.com/spec/path')
161
+ create_on_s3('s3.bucket.com/spec/path/fuga.txt') {|f| f.write "fuga"}
162
+ create_on_s3('s3.bucket.com/spec/path/bazz.txt') {|f| f.write "bazz"}
163
+ @dir = Dir.mktmpdir
164
+ end
165
+
166
+ after { FileUtils.remove_entry_secure(@dir) if Dir.exist?(@dir) }
167
+
168
+ it 'downloads the directory in dest directory' do
169
+ S3Utils.download_from_s3('s3.bucket.com/spec/path', @dir)
170
+ expect(Dir["#{@dir}/path/**/*"]).to eq([ "#{@dir}/path/bazz.txt", "#{@dir}/path/fuga.txt"])
171
+ end
172
+ end
173
+ end
174
+ end
175
+
176
+ describe '.copy_on_s3' do
177
+ before do
178
+ delete_on_s3('s3.bucket.com/spec/path')
179
+ create_on_s3('s3.bucket.com/spec/path/hoge.txt') {|f| f.write "hoge"}
180
+ end
181
+
182
+ it 'copy src object to dest' do
183
+ S3Utils.copy_on_s3('s3.bucket.com/spec/path/hoge.txt', 's3.bucket.com/spec/path/fuga.txt')
184
+
185
+ expect(
186
+ read_on_s3("s3.bucket.com/spec/path/fuga.txt")
187
+ ).to eq('hoge')
188
+ end
189
+ end
190
+
191
+ describe '.delete_on_s3' do
192
+ context 'when the argument is file on s3' do
193
+ before do
194
+ create_on_s3('s3.bucket.com/spec/path/hoge.txt') {|f| f.write "hoge"}
195
+ end
196
+
197
+ it 'returns nil' do
198
+ expect(
199
+ S3Utils.delete_on_s3('s3.bucket.com/spec/path/dir/hoge.txt')
200
+ ).to be_nil
201
+ end
202
+
203
+ it 'deletes the argument file on s3' do
204
+ expect do
205
+ S3Utils.delete_on_s3('s3.bucket.com/spec/path/hoge.txt')
206
+ end.to change {
207
+ s3_object('s3.bucket.com/spec/path/hoge.txt').exists?
208
+ }.from(true).to(false)
209
+ end
210
+ end
211
+
212
+ context 'when the argument is directory on s3' do
213
+ before do
214
+ create_on_s3('s3.bucket.com/spec/path/dir/hoge.txt') {|f| f.write "hoge"}
215
+ end
216
+
217
+ it 'deletes the argument directory on s3' do
218
+ expect do
219
+ S3Utils.delete_on_s3('s3.bucket.com/spec/path/dir')
220
+ end.to change {
221
+ s3_object('s3.bucket.com/spec/path/dir/hoge.txt').exists?
222
+ }.from(true).to(false)
223
+ end
224
+ end
225
+
226
+ context "when the argument doesn't exist on s3" do
227
+ before do
228
+ delete_on_s3('s3.bucket.com/spec/path/dir/hoge.txt')
229
+ end
230
+
231
+ it 'returns nil' do
232
+ expect(
233
+ S3Utils.delete_on_s3('s3.bucket.com/spec/path/dir/hoge.txt')
234
+ ).to be_nil
235
+ end
236
+
237
+ it 'keeps of not existance' do
238
+ expect do
239
+ S3Utils.delete_on_s3('s3.bucket.com/spec/path/dir/hoge.txt')
240
+ end.to_not change {
241
+ s3_object('s3.bucket.com/spec/path/dir/hoge.txt').exists?
242
+ }.from(false)
243
+ end
244
+ end
245
+ end
246
+
247
+ describe '.create_on_s3' do
248
+ context "when the file doesn't exist on s3" do
249
+ before do
250
+ delete_on_s3('s3.bucket.com/spec/path')
251
+ end
252
+
253
+ it 'creates the file on s3' do
254
+ S3Utils.create_on_s3('s3.bucket.com/spec/path/test.txt') do |f|
255
+ f.puts "aaaa"
256
+ f.puts "bbbb"
257
+ f.puts "cccc"
258
+ end
259
+
260
+ expect(
261
+ read_on_s3('s3.bucket.com/spec/path/test.txt')
262
+ ).to eq("aaaa\nbbbb\ncccc")
263
+ end
264
+ end
265
+
266
+ context 'when the file already exist on s3' do
267
+ before do
268
+ create_on_s3('s3.bucket.com/spec/path/test.txt') do |f|
269
+ f.puts "already exist"
270
+ end
271
+ end
272
+
273
+ it 'overwrites the contents' do
274
+ S3Utils.create_on_s3('s3.bucket.com/spec/path/test.txt') do |f|
275
+ f.puts "overwrite the contents"
276
+ end
277
+
278
+ expect(
279
+ read_on_s3('s3.bucket.com/spec/path/test.txt')
280
+ ).to eq("overwrite the contents")
281
+ end
282
+ end
283
+
284
+ context 'when no block given' do
285
+ before do
286
+ delete_on_s3('s3.bucket.com/spec/path/test.txt')
287
+ end
288
+
289
+ it 'creates empty file on s3' do
290
+ S3Utils.create_on_s3('s3.bucket.com/spec/path/test.txt')
291
+
292
+ expect(read_on_s3('s3.bucket.com/spec/path/test.txt')).to be_empty
293
+ end
294
+ end
295
+ end
296
+
297
+ describe '.read_on_s3' do
298
+ context 'when the file exists' do
299
+ before do
300
+ create_on_s3('s3.bucket.com/spec/path/test.txt') {|f| f.puts "test" }
301
+ end
302
+
303
+ it 'returns the String that the file contains' do
304
+ expect(
305
+ S3Utils.read_on_s3('s3.bucket.com/spec/path/test.txt')
306
+ ).to eq('test')
307
+ end
308
+ end
309
+
310
+ context "when the file doesn't exists" do
311
+ before do
312
+ delete_on_s3('s3.bucket.com/spec/path/test.txt')
313
+ end
314
+
315
+ it 'raises error' do
316
+ expect do
317
+ S3Utils.read_on_s3('s3.bucket.com/spec/path/test.txt')
318
+ end.to raise_error AWS::S3::Errors::NoSuchKey
319
+ end
320
+ end
321
+ end
322
+ end
@@ -0,0 +1,14 @@
1
+ require 'coveralls'
2
+ Coveralls.wear!
3
+
4
+ require 's3_utils'
5
+ require 'support/s3_helper'
6
+
7
+ RSpec.configure do |config|
8
+ config.include S3Helper
9
+
10
+ config.filter_run :focus
11
+ config.run_all_when_everything_filtered = true
12
+ config.warnings = true
13
+ config.order = :random
14
+ end
@@ -0,0 +1,42 @@
1
+ module S3Helper
2
+ def read_on_s3(path)
3
+ s3_object(path).read.chomp
4
+ end
5
+
6
+ def delete_on_s3(path)
7
+ s3_object(path).delete
8
+ end
9
+
10
+ def create_on_s3(path, &block)
11
+ @tmp = Tempfile.new('')
12
+
13
+ File.open(@tmp, "w") do |f|
14
+ yield f
15
+ end
16
+
17
+ s3_object(path).write(file: @tmp.path)
18
+ ensure
19
+ @tmp.close
20
+ end
21
+
22
+ def s3_object(path)
23
+ bucket = bucket(path)
24
+ s3_path = s3_path(path)
25
+
26
+ s3.buckets[bucket].objects[s3_path]
27
+ end
28
+
29
+ private
30
+
31
+ def bucket(path)
32
+ path.split('/', -1).first
33
+ end
34
+
35
+ def s3_path(path)
36
+ path.split('/', -1).drop(1).join('/')
37
+ end
38
+
39
+ def s3
40
+ @s3 ||= ::AWS::S3.new
41
+ end
42
+ end
metadata ADDED
@@ -0,0 +1,154 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: s3_utils
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.0.1
5
+ platform: ruby
6
+ authors:
7
+ - mgi166
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2014-11-13 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: aws-sdk
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - ">="
18
+ - !ruby/object:Gem::Version
19
+ version: '0'
20
+ type: :runtime
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - ">="
25
+ - !ruby/object:Gem::Version
26
+ version: '0'
27
+ - !ruby/object:Gem::Dependency
28
+ name: bundler
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - "~>"
32
+ - !ruby/object:Gem::Version
33
+ version: '1.7'
34
+ type: :development
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - "~>"
39
+ - !ruby/object:Gem::Version
40
+ version: '1.7'
41
+ - !ruby/object:Gem::Dependency
42
+ name: rake
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - "~>"
46
+ - !ruby/object:Gem::Version
47
+ version: '10.0'
48
+ type: :development
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - "~>"
53
+ - !ruby/object:Gem::Version
54
+ version: '10.0'
55
+ - !ruby/object:Gem::Dependency
56
+ name: rspec
57
+ requirement: !ruby/object:Gem::Requirement
58
+ requirements:
59
+ - - ">="
60
+ - !ruby/object:Gem::Version
61
+ version: '0'
62
+ type: :development
63
+ prerelease: false
64
+ version_requirements: !ruby/object:Gem::Requirement
65
+ requirements:
66
+ - - ">="
67
+ - !ruby/object:Gem::Version
68
+ version: '0'
69
+ - !ruby/object:Gem::Dependency
70
+ name: fakes3
71
+ requirement: !ruby/object:Gem::Requirement
72
+ requirements:
73
+ - - ">="
74
+ - !ruby/object:Gem::Version
75
+ version: '0'
76
+ type: :development
77
+ prerelease: false
78
+ version_requirements: !ruby/object:Gem::Requirement
79
+ requirements:
80
+ - - ">="
81
+ - !ruby/object:Gem::Version
82
+ version: '0'
83
+ - !ruby/object:Gem::Dependency
84
+ name: coveralls
85
+ requirement: !ruby/object:Gem::Requirement
86
+ requirements:
87
+ - - ">="
88
+ - !ruby/object:Gem::Version
89
+ version: '0'
90
+ type: :development
91
+ prerelease: false
92
+ version_requirements: !ruby/object:Gem::Requirement
93
+ requirements:
94
+ - - ">="
95
+ - !ruby/object:Gem::Version
96
+ version: '0'
97
+ description: Simple s3 modules in order to download, upload, copy and delete the file
98
+ on s3.
99
+ email:
100
+ - skskoari@gmail.com
101
+ executables: []
102
+ extensions: []
103
+ extra_rdoc_files: []
104
+ files:
105
+ - ".gitignore"
106
+ - ".rspec"
107
+ - ".travis.yml"
108
+ - Gemfile
109
+ - LICENSE
110
+ - LICENSE.txt
111
+ - README.md
112
+ - Rakefile
113
+ - lib/s3_utils.rb
114
+ - lib/s3_utils/generator.rb
115
+ - lib/s3_utils/method.rb
116
+ - lib/s3_utils/path.rb
117
+ - lib/s3_utils/version.rb
118
+ - s3_utils.gemspec
119
+ - spec/lib/s3_utils/generator_spec.rb
120
+ - spec/lib/s3_utils/path_spec.rb
121
+ - spec/lib/s3_utils_spec.rb
122
+ - spec/spec_helper.rb
123
+ - spec/support/s3_helper.rb
124
+ homepage: https://github.com/mgi166/s3_utils
125
+ licenses:
126
+ - MIT
127
+ metadata: {}
128
+ post_install_message:
129
+ rdoc_options: []
130
+ require_paths:
131
+ - lib
132
+ required_ruby_version: !ruby/object:Gem::Requirement
133
+ requirements:
134
+ - - ">="
135
+ - !ruby/object:Gem::Version
136
+ version: '0'
137
+ required_rubygems_version: !ruby/object:Gem::Requirement
138
+ requirements:
139
+ - - ">="
140
+ - !ruby/object:Gem::Version
141
+ version: '0'
142
+ requirements: []
143
+ rubyforge_project:
144
+ rubygems_version: 2.2.2
145
+ signing_key:
146
+ specification_version: 4
147
+ summary: Simple s3 modules in order to download, upload, copy and delete the file
148
+ on s3.
149
+ test_files:
150
+ - spec/lib/s3_utils/generator_spec.rb
151
+ - spec/lib/s3_utils/path_spec.rb
152
+ - spec/lib/s3_utils_spec.rb
153
+ - spec/spec_helper.rb
154
+ - spec/support/s3_helper.rb