encrypted_s3_copy 0.0.2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA1:
3
+ metadata.gz: 630866427aec087a5ac9c0cd78b8babee5992381
4
+ data.tar.gz: 1132645949fb57d4de5e2ddb563da6852873704c
5
+ SHA512:
6
+ metadata.gz: f78af23a9e5adeba60972266fb37ecdeee0c188dd3e37a58292a6c55331d6dea8b9d182cbabedaa37b3c6348b5107f1c73bbe7702fa982dc3a92b849947c3769
7
+ data.tar.gz: 11fd5db629f94738b13f9d73e3d261043bab0597b4b05fcdf1efcbaa3913ae3e4bf702127706f3561945f724ff50a3e7fd9a57f86d4e8e8158940f81fa5e0665
data/.gitignore ADDED
@@ -0,0 +1,25 @@
1
+ *.gem
2
+ *.rbc
3
+ .bundle
4
+ .config
5
+ .yardoc
6
+ Gemfile.lock
7
+ InstalledFiles
8
+ _yardoc
9
+ coverage
10
+ doc/
11
+ lib/bundler/man
12
+ pkg
13
+ rdoc
14
+ spec/reports
15
+ test/tmp
16
+ test/version_tmp
17
+ tmp
18
+ *.bundle
19
+ *.so
20
+ *.o
21
+ *.a
22
+ mkmf.log
23
+
24
+ *.sw[p|o|n|m]
25
+ vendor/bundle
data/.rspec ADDED
@@ -0,0 +1,2 @@
1
+ --format documentation
2
+ --color
data/.simplecov ADDED
@@ -0,0 +1,4 @@
1
+ SimpleCov.start do
2
+ add_filter 'vendor/bundle'
3
+ coverage_dir '/var/www/html/coverage'
4
+ end
data/.travis.yml ADDED
@@ -0,0 +1,3 @@
1
+ language: ruby
2
+ rvm:
3
+ - 2.0.0
data/Gemfile ADDED
@@ -0,0 +1,7 @@
1
+ source 'https://rubygems.org'
2
+
3
+ group :test, :development do
4
+ gem 'simplecov'
5
+ end
6
+ # Specify your gem's dependencies in encrypted_s3_copy.gemspec
7
+ gemspec
data/LICENSE.txt ADDED
@@ -0,0 +1,22 @@
1
+ Copyright (c) 2014 nabewata07
2
+
3
+ MIT License
4
+
5
+ Permission is hereby granted, free of charge, to any person obtaining
6
+ a copy of this software and associated documentation files (the
7
+ "Software"), to deal in the Software without restriction, including
8
+ without limitation the rights to use, copy, modify, merge, publish,
9
+ distribute, sublicense, and/or sell copies of the Software, and to
10
+ permit persons to whom the Software is furnished to do so, subject to
11
+ the following conditions:
12
+
13
+ The above copyright notice and this permission notice shall be
14
+ included in all copies or substantial portions of the Software.
15
+
16
+ THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
17
+ EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
18
+ MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
19
+ NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
20
+ LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
21
+ OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
22
+ WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
data/README.md ADDED
@@ -0,0 +1,46 @@
1
+ # EncryptedS3Copy
2
+
3
+ Tool to upload files to AWS S3 with client-side encryption and download client-side encrypted files.
4
+
5
+ ## Installation
6
+
7
+ Add this line to your application's Gemfile:
8
+
9
+ gem 'encrypted_s3_copy'
10
+
11
+ And then execute:
12
+
13
+ $ bundle install
14
+
15
+ Or install it yourself as:
16
+
17
+ $ gem install encrypted_s3_copy
18
+
19
+ ## Usage
20
+
21
+ command options
22
+
23
+ -k, --key-file=KEY_FILE_PATH
24
+ -s, --source=SOURCE_PATH
25
+ -d, --dest=DEST_PATH
26
+ -r, --recursive
27
+
28
+ ### upload
29
+ #### single file
30
+ $ encrypted_s3_copy -k /path/to/symmetric/key/file -s /path/to/local/file -d s3://bucket/suffix/to/file
31
+ #### recursive mode
32
+ $ encrypted_s3_copy -k /path/to/symmetric/key/file -s /path/to/local/directory/ -d s3://bucket/suffix/to/directory/ --recursive
33
+
34
+ ### download
35
+ #### single file
36
+ $ encrypted_s3_copy -k /path/to/symmetric/key/file -s s3://bucket/suffix/to/file -d /path/to/local/file
37
+ #### recursive mode
38
+ $ encrypted_s3_copy -k /path/to/symmetric/key/file -s s3://bucket/suffix/to/directory/ -d /path/to/local/directory/ --recursive
39
+
40
+ ## Contributing
41
+
42
+ 1. Fork it ( https://github.com/nabewata07/encrypted_s3_copy/fork )
43
+ 2. Create your feature branch (`git checkout -b my-new-feature`)
44
+ 3. Commit your changes (`git commit -am 'Add some feature'`)
45
+ 4. Push to the branch (`git push origin my-new-feature`)
46
+ 5. Create a new Pull Request
data/Rakefile ADDED
@@ -0,0 +1,7 @@
1
+ require "bundler/gem_tasks"
2
+ require "rspec/core/rake_task"
3
+
4
+ RSpec::Core::RakeTask.new(:spec)
5
+
6
+ task :default => :spec
7
+
@@ -0,0 +1,5 @@
1
+ #!/usr/bin/env ruby
2
+ require 'encrypted_s3_copy'
3
+
4
+ client = EncryptedS3Copy::Client.new
5
+ client.execute
@@ -0,0 +1,28 @@
1
+ # coding: utf-8
2
+ lib = File.expand_path('../lib', __FILE__)
3
+ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
4
+ require 'encrypted_s3_copy/version'
5
+
6
+ Gem::Specification.new do |spec|
7
+ spec.name = "encrypted_s3_copy"
8
+ spec.version = EncryptedS3Copy::VERSION
9
+ spec.authors = ["nabewata07"]
10
+ spec.email = ["channel.momo@gmail.com"]
11
+ spec.summary = %q{upload and download encrypted files to/from AWS S3}
12
+ spec.description = %q{upload and download encrypted files to/from AWS S3}
13
+ spec.homepage = "https://github.com/nabewata07/encrypted_s3_copy"
14
+ spec.license = "MIT"
15
+
16
+ spec.required_ruby_version = '>= 2.0'
17
+
18
+ spec.files = `git ls-files -z`.split("\x0")
19
+ spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
20
+ spec.test_files = spec.files.grep(%r{^(test|spec|features)/})
21
+ spec.require_paths = ["lib"]
22
+
23
+ spec.add_dependency "aws-sdk", "~> 1.0"
24
+
25
+ spec.add_development_dependency "bundler", "~> 1.6"
26
+ spec.add_development_dependency "rake"
27
+ spec.add_development_dependency "rspec", ">= 2.99"
28
+ end
@@ -0,0 +1,5 @@
1
+ require "encrypted_s3_copy/version"
2
+ require "encrypted_s3_copy/client"
3
+
4
+ module EncryptedS3Copy
5
+ end
@@ -0,0 +1,129 @@
1
+ require 'aws-sdk'
2
+ require 'json'
3
+ require 'optparse'
4
+ require 'base64'
5
+
6
+ module EncryptedS3Copy
7
+ class Client
8
+ # s3://(a_bucket)/(path/to/target_file)
9
+ S3_PATH = /^s3:\/\/([^\/]+)\/(.+)/
10
+ def before
11
+ opt = OptionParser.new
12
+ opt.on('-k', '--key-file=KEY_FILE_PATH') do |path|
13
+ encoded_key = File.read(path)
14
+ AWS.config(s3_encryption_key: Base64.decode64(encoded_key.chomp))
15
+ end
16
+ opt.on('-s', '--source=SOURCE_PATH') do |path|
17
+ @source = path
18
+ end
19
+ opt.on('-d', '--dest=DEST_PATH') do |path|
20
+ @dest = path
21
+ end
22
+ opt.on('-r', '--recursive') do |is_recursive|
23
+ @is_recursive = is_recursive
24
+ end
25
+ opt.parse(ARGV)
26
+ end
27
+
28
+ def execute
29
+ before
30
+ handle
31
+ end
32
+
33
+ private
34
+
35
+ def handle
36
+ if !(@source =~ S3_PATH) && @dest =~ S3_PATH
37
+ if @is_recursive
38
+ recursive_upload($1, $2)
39
+ else
40
+ single_upload($1, $2)
41
+ end
42
+ elsif !(@dest =~ S3_PATH) && @source =~ S3_PATH
43
+ if @is_recursive
44
+ recursive_download($1, $2)
45
+ else
46
+ obj = get_s3_object($1, $2)
47
+ single_download(obj)
48
+ end
49
+ else
50
+ raise 'either source path or destination path or both are wrong'
51
+ end
52
+ end
53
+
54
+ def recursive_download(bucket_name, suffix)
55
+ suffix += '/' unless suffix =~ /\/$/
56
+
57
+ s3_objects = get_s3_objects(bucket_name)
58
+ s3_objects.with_prefix(suffix).each do |obj|
59
+ next if obj.content_length < 1
60
+ single_download(obj)
61
+ end
62
+ end
63
+
64
+ def recursive_upload(bucket_name, suffix)
65
+ wildcard = '**/*'
66
+ source_dir = (@source[-1] == '/') ? @source : @source + '/'
67
+ suffix += '/' if suffix[-1] != '/'
68
+ files_dirs = Dir.glob(source_dir + wildcard)
69
+
70
+ files_dirs.each do |path|
71
+ next if File.directory?(path)
72
+ @source = path
73
+ input_dir_size = source_dir.size
74
+ additional_path = path[input_dir_size..-1]
75
+
76
+ single_upload(bucket_name, suffix + additional_path)
77
+ end
78
+ end
79
+
80
+ def get_s3_object(bucket_name, suffix)
81
+ s3 = AWS::S3.new
82
+ s3.buckets[bucket_name].objects[suffix]
83
+ end
84
+
85
+ def get_s3_objects(bucket_name)
86
+ s3 = AWS::S3.new
87
+ s3.buckets[bucket_name].objects
88
+ end
89
+
90
+ def single_upload(bucket_name, suffix)
91
+ suffix += File.basename(@source) if suffix =~ /\/$/
92
+
93
+ s3_obj = get_s3_object(bucket_name, suffix)
94
+ fp = File.open(@source)
95
+ s3_obj.write(fp)
96
+ fp.close
97
+ end
98
+
99
+ def single_download(s3_obj)
100
+ dest_path = get_dest_path(s3_obj)
101
+ FileUtils.mkdir_p(File.dirname(dest_path))
102
+ File.open(dest_path, 'wb') do |file|
103
+ s3_obj.read do |chunk|
104
+ file.write(chunk)
105
+ end
106
+ end
107
+ end
108
+
109
+ def get_dest_path(s3_obj)
110
+ unless @is_recursive
111
+ return @dest + File.basename(@source) if @dest =~ /\/$/
112
+ @dest
113
+ else
114
+ @dest += '/' unless @dest =~ /\/$/
115
+ @source += '/' unless @source =~ /\/$/
116
+
117
+ source_prefix = @source.gsub(/s3:\/\/([^\/])+\//, '')
118
+ key = s3_obj.key
119
+ diff = key[source_prefix.size..key.length]
120
+ return @dest + diff
121
+ end
122
+ end
123
+ end
124
+ end
125
+
126
+ if $0 == __FILE__
127
+ client = EncryptedS3Copy::Client.new
128
+ client.execute
129
+ end
@@ -0,0 +1,3 @@
1
+ module EncryptedS3Copy
2
+ VERSION = "0.0.2"
3
+ end
@@ -0,0 +1,288 @@
1
+ # require 'simplecov'
2
+ require_relative '../../lib/encrypted_s3_copy/client'
3
+
4
+ describe EncryptedS3Copy::Client do
5
+ let(:bucket_name) { 'test_bkt' }
6
+ let(:source_s3_suffix) { 'path/to/source_file_name' }
7
+ let(:local_source_path) { '/path/to/source_file_name' }
8
+ let(:remote_source_path) { "s3://#{bucket_name}/#{source_s3_suffix}" }
9
+ let(:remote_dest_path) { "s3://#{bucket_name}/#{dest_s3_suffix}" }
10
+ let(:dest_s3_suffix) { 'path/to/dest_file_name' }
11
+ let(:local_dest_path) { '/path/to/dest_file_name' }
12
+ let(:obj_double) { double('double of s3 object') }
13
+ describe '#before' do
14
+
15
+ context 'when key file path option is set' do
16
+ let(:opt_double) { double('double of OptionParser') }
17
+ before :each do
18
+ allow(OptionParser).to receive(:new).and_return(opt_double)
19
+ allow(opt_double).to receive(:on)
20
+ allow(opt_double).to receive(:parse)
21
+ end
22
+ it 'should set key file path argument to instance variable' do
23
+ key_double = double('double of encoded_key')
24
+ decoded_key_double = double('double of decoded_key')
25
+ expect(File).to receive(:read).with('/path/to/key').
26
+ and_return(key_double)
27
+ expect(key_double).to receive(:chomp).and_return(decoded_key_double)
28
+ expect(Base64).to receive(:decode64).with(decoded_key_double).
29
+ and_return('decoded_key_string')
30
+ expect(opt_double).to receive(:on).
31
+ with('-k', '--key-file=KEY_FILE_PATH').and_yield('/path/to/key')
32
+ allow(AWS).to receive(:config).
33
+ with(s3_encryption_key: 'decoded_key_string')
34
+ subject.before
35
+ end
36
+ it 'should be given argument of source file path' do
37
+ expect(opt_double).to receive(:on).with('-s', '--source=SOURCE_PATH')
38
+ subject.before
39
+ end
40
+ it 'should set source file path to instance variable' do
41
+ allow(opt_double).to receive(:on).with('-s', '--source=SOURCE_PATH').
42
+ and_yield('source_file_path')
43
+ subject.before
44
+ path = subject.instance_variable_get(:@source)
45
+ expect(path).to eq('source_file_path')
46
+ end
47
+ it 'should be given argument of destination file path' do
48
+ expect(opt_double).to receive(:on).with('-d', '--dest=DEST_PATH')
49
+ subject.before
50
+ end
51
+ it 'should set destination file path to instance variable' do
52
+ allow(opt_double).to receive(:on).with('-d', '--dest=DEST_PATH').
53
+ and_yield('dest_file_path')
54
+ subject.before
55
+ path = subject.instance_variable_get(:@dest)
56
+ expect(path).to eq('dest_file_path')
57
+ end
58
+ it 'should prepare parsing option of recursive' do
59
+ expect(opt_double).to receive(:on).with('-r', '--recursive')
60
+ subject.before
61
+ end
62
+ context 'when recursive option is set' do
63
+ it 'should set recursive option true' do
64
+ allow(opt_double).to receive(:on).with('-r', '--recursive').
65
+ and_yield(true)
66
+ subject.before
67
+ r_flag = subject.instance_variable_get(:@is_recursive)
68
+ expect(r_flag).to be true
69
+ end
70
+ end
71
+ end
72
+ end
73
+
74
+ describe '#handle' do
75
+ let(:file_double) { double('double of File object') }
76
+
77
+ before :each do
78
+ allow(FileUtils).to receive(:mkdir_p)
79
+ end
80
+
81
+ context 'when recursive option is set' do
82
+
83
+ before :each do
84
+ subject.instance_variable_set(:@is_recursive, true)
85
+ end
86
+
87
+ context 'copy local files to S3' do
88
+ it 'should call single_upload multiple times' do
89
+ files_dirs = [
90
+ '/source/dir/file01.txt',
91
+ '/source/dir/file02.txt',
92
+ '/source/dir/dir2',
93
+ '/source/dir/dir2/file01.txt'
94
+ ]
95
+ subject.instance_variable_set(:@source, '/source/dir')
96
+ subject.instance_variable_set(:@dest, 's3://dest/dir')
97
+ allow(Dir).to receive(:glob).with('/source/dir/**/*').
98
+ and_return(files_dirs)
99
+ allow(File).to receive(:directory?)
100
+ expect(File).to receive(:directory?).with('/source/dir/dir2').
101
+ and_return(true)
102
+ expect(subject).to receive(:single_upload).with('dest', 'dir/file01.txt')
103
+ expect(subject).to receive(:single_upload).with('dest', 'dir/file02.txt')
104
+ expect(subject).to receive(:single_upload).
105
+ with('dest', 'dir/dir2/file01.txt')
106
+ subject.send(:handle)
107
+ end
108
+ end
109
+
110
+ context 'copy S3 files to local' do
111
+ it 'should call single_download multiple times' do
112
+ s3_objects_double = double("double of S3 objects")
113
+ s3_dir01_double = double("double of <AWS::S3::S3Object:dest/dir/>")
114
+ s3_file01_double = double("double of <AWS::S3::S3Object:dest/dir/sample.txt>")
115
+ s3_file02_double = double("double of <AWS::S3::S3Object:dest/dir/test>")
116
+ s3_dir02_double = double("double of <AWS::S3::S3Object:dest/dir/test/>")
117
+ s3_file03_double = double("double of <AWS::S3::S3Object:dest/dir/test/myfile>")
118
+ s3_file04_double = double("double of <AWS::S3::S3Object:dest/hoge.txt>")
119
+ source_objs = [
120
+ s3_dir01_double, s3_dir02_double, s3_file01_double, s3_file02_double,
121
+ s3_file03_double, s3_file04_double
122
+ ]
123
+ subject.instance_variable_set(:@source, 's3://dest/dir')
124
+ subject.instance_variable_set(:@dest, '/source/dir')
125
+
126
+ allow(s3_dir01_double).to receive(:key).and_return('dir/')
127
+ allow(s3_dir02_double).to receive(:key).and_return('dir/test/')
128
+ allow(s3_file01_double).to receive(:key).and_return('dir/sample.txt')
129
+ allow(s3_file02_double).to receive(:key).and_return('dir/test')
130
+ allow(s3_file03_double).to receive(:key).and_return('dir/test/myfile')
131
+ allow(s3_file04_double).to receive(:key).and_return('hoge.txt')
132
+
133
+ allow(s3_dir01_double).to receive(:content_length).and_return(0)
134
+ allow(s3_dir02_double).to receive(:content_length).and_return(0)
135
+ allow(s3_file01_double).to receive(:content_length).and_return(10)
136
+ allow(s3_file02_double).to receive(:content_length).and_return(10)
137
+ allow(s3_file03_double).to receive(:content_length).and_return(10)
138
+ allow(s3_file04_double).to receive(:content_length).and_return(10)
139
+
140
+ allow(subject).to receive(:get_s3_objects).with('dest').
141
+ and_return(s3_objects_double)
142
+
143
+ chunk_double = double('double of chunk of s3 obj')
144
+ expect(s3_dir01_double).not_to receive(:read)
145
+ expect(s3_dir02_double).not_to receive(:read)
146
+ expect(s3_file01_double).to receive(:read).and_yield(chunk_double)
147
+ expect(s3_file02_double).to receive(:read).and_yield(chunk_double)
148
+ expect(s3_file03_double).to receive(:read).and_yield(chunk_double)
149
+ expect(s3_file04_double).to receive(:read).and_yield(chunk_double)
150
+
151
+ expect(s3_objects_double).to receive(:with_prefix).with('dir/').
152
+ and_return(source_objs)
153
+
154
+ file_double = double('double of file object')
155
+ allow(File).to receive(:open).and_yield(file_double)
156
+
157
+ expect(file_double).to receive(:write).with(chunk_double).exactly(4).times
158
+
159
+ subject.send(:handle)
160
+ end
161
+ end
162
+ end
163
+
164
+ context 'when copy single local file to s3' do
165
+ before :each do
166
+ subject.instance_variable_set(:@source, local_source_path)
167
+ subject.instance_variable_set(:@dest, remote_dest_path)
168
+
169
+ allow(obj_double).to receive(:write)
170
+ allow(File).to receive(:open).with(local_source_path).and_return(file_double)
171
+ allow(file_double).to receive(:close)
172
+ allow(subject).to receive(:get_s3_object).and_return(obj_double)
173
+ end
174
+ it 'should get bucket object' do
175
+ expect(subject).to receive(:get_s3_object).with(bucket_name, dest_s3_suffix)
176
+ subject.send(:handle)
177
+ end
178
+ context 'when destination path is directory path' do
179
+ let(:remote_dest_path) { "s3://#{bucket_name}/#{dest_s3_suffix}" }
180
+ let(:dest_s3_suffix) { 'path/to/dest_dir/' }
181
+
182
+ before :each do
183
+ subject.instance_variable_set(:@dest, remote_dest_path)
184
+ end
185
+ it 'should complement file name' do
186
+ expected_dest = dest_s3_suffix + 'source_file_name'
187
+ expect(subject).to receive(:get_s3_object).with(bucket_name, expected_dest)
188
+ subject.send(:handle)
189
+ end
190
+ end
191
+ it 'should open source file' do
192
+ expect(File).to receive(:open).with(local_source_path)
193
+ subject.send(:handle)
194
+ end
195
+ it 'should write file contents to s3 object' do
196
+ expect(obj_double).to receive(:write).with(file_double)
197
+ subject.send(:handle)
198
+ end
199
+ it 'should close file pointer' do
200
+ expect(file_double).to receive(:close)
201
+ subject.send(:handle)
202
+ end
203
+ end
204
+
205
+ context 'when copy single s3 file to local' do
206
+ before :each do
207
+ subject.instance_variable_set(:@source, remote_source_path)
208
+ subject.instance_variable_set(:@dest, local_dest_path)
209
+ allow(File).to receive(:open)
210
+ allow(subject).to receive(:get_s3_object).and_return(obj_double)
211
+ end
212
+ it 'should execute single_download' do
213
+ expect(subject).to receive(:single_download)
214
+ subject.send(:handle)
215
+ end
216
+ it 'should get bucket' do
217
+ expect(subject).to receive(:get_s3_object)
218
+ subject.send(:handle)
219
+ end
220
+ context 'when destination path is full path' do
221
+ it 'should open local destination file' do
222
+ expect(File).to receive(:open).with(local_dest_path, 'wb')
223
+ subject.send(:handle)
224
+ end
225
+ end
226
+ context 'when destination path is directory path' do
227
+ let(:local_dest_path) { '/path/to/dest/file_name/' }
228
+ it 'should complement file name' do
229
+ expected_dest = local_dest_path + 'source_file_name'
230
+ expect(File).to receive(:open).with(expected_dest, 'wb')
231
+ subject.send(:handle)
232
+ end
233
+ end
234
+ it 'should read s3 object' do
235
+ allow(File).to receive(:open).and_yield(file_double)
236
+ expect(obj_double).to receive(:read)
237
+ subject.send(:handle)
238
+ end
239
+ it 'should write contents of s3 object to local file' do
240
+ allow(File).to receive(:open).and_yield(file_double)
241
+ allow(obj_double).to receive(:read).and_yield('chunk')
242
+ expect(file_double).to receive(:write).with('chunk')
243
+ subject.send(:handle)
244
+ end
245
+ end
246
+
247
+ context 'when local to local' do
248
+ it 'should raise RuntimeError' do
249
+ subject.instance_variable_set(:@source, local_source_path)
250
+ subject.instance_variable_set(:@dest, local_dest_path)
251
+ message = 'either source path or destination path or both are wrong'
252
+ expect{ subject.send(:handle) }.to raise_error(RuntimeError, message)
253
+ end
254
+ end
255
+ end
256
+
257
+ describe 'get_s3_object' do
258
+ let(:s3_double) { double('double of s3 client') }
259
+ before :each do
260
+ allow(AWS::S3).to receive(:new).and_return(s3_double)
261
+ allow(s3_double).
262
+ to receive_message_chain(:buckets, :[], :objects, :[]) { obj_double }
263
+ end
264
+ it 'should create s3 client' do
265
+ expect(AWS::S3).to receive(:new)
266
+ subject.send(:get_s3_object, bucket_name, dest_s3_suffix)
267
+ end
268
+ it 'should get s3 obj' do
269
+ buckets_double = double('double of s3 buckets').as_null_object
270
+ bucket_double = double('double of s3 bucket').as_null_object
271
+ objects_double = double('double of s3 objects').as_null_object
272
+ expect(s3_double).to receive(:buckets).and_return(buckets_double)
273
+ expect(buckets_double).to receive(:[]).with(bucket_name).and_return(bucket_double)
274
+ expect(bucket_double).to receive(:objects).and_return(objects_double)
275
+ expect(objects_double).to receive(:[]).with(dest_s3_suffix)
276
+
277
+ subject.send(:get_s3_object, bucket_name, dest_s3_suffix)
278
+ end
279
+ end
280
+
281
+ describe '#execute' do
282
+ it 'should call handle before and handle' do
283
+ expect(subject).to receive(:before).ordered
284
+ expect(subject).to receive(:handle).ordered
285
+ subject.execute
286
+ end
287
+ end
288
+ end
@@ -0,0 +1,7 @@
1
+ require 'spec_helper'
2
+
3
+ describe EncryptedS3Copy do
4
+ it 'has a version number' do
5
+ expect(EncryptedS3Copy::VERSION).not_to be nil
6
+ end
7
+ end
@@ -0,0 +1,2 @@
1
+ $LOAD_PATH.unshift File.expand_path('../../lib', __FILE__)
2
+ require 'encrypted_s3_copy'
metadata ADDED
@@ -0,0 +1,120 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: encrypted_s3_copy
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.0.2
5
+ platform: ruby
6
+ authors:
7
+ - nabewata07
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2014-07-27 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: aws-sdk
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - ~>
18
+ - !ruby/object:Gem::Version
19
+ version: '1.0'
20
+ type: :runtime
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - ~>
25
+ - !ruby/object:Gem::Version
26
+ version: '1.0'
27
+ - !ruby/object:Gem::Dependency
28
+ name: bundler
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - ~>
32
+ - !ruby/object:Gem::Version
33
+ version: '1.6'
34
+ type: :development
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - ~>
39
+ - !ruby/object:Gem::Version
40
+ version: '1.6'
41
+ - !ruby/object:Gem::Dependency
42
+ name: rake
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - '>='
46
+ - !ruby/object:Gem::Version
47
+ version: '0'
48
+ type: :development
49
+ prerelease: false
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - '>='
53
+ - !ruby/object:Gem::Version
54
+ version: '0'
55
+ - !ruby/object:Gem::Dependency
56
+ name: rspec
57
+ requirement: !ruby/object:Gem::Requirement
58
+ requirements:
59
+ - - '>='
60
+ - !ruby/object:Gem::Version
61
+ version: '2.99'
62
+ type: :development
63
+ prerelease: false
64
+ version_requirements: !ruby/object:Gem::Requirement
65
+ requirements:
66
+ - - '>='
67
+ - !ruby/object:Gem::Version
68
+ version: '2.99'
69
+ description: upload and download encrypted files to/from AWS S3
70
+ email:
71
+ - channel.momo@gmail.com
72
+ executables:
73
+ - encrypted_s3_copy
74
+ extensions: []
75
+ extra_rdoc_files: []
76
+ files:
77
+ - .gitignore
78
+ - .rspec
79
+ - .simplecov
80
+ - .travis.yml
81
+ - Gemfile
82
+ - LICENSE.txt
83
+ - README.md
84
+ - Rakefile
85
+ - bin/encrypted_s3_copy
86
+ - encrypted_s3_copy.gemspec
87
+ - lib/encrypted_s3_copy.rb
88
+ - lib/encrypted_s3_copy/client.rb
89
+ - lib/encrypted_s3_copy/version.rb
90
+ - spec/encrypted_s3_copy/client_spec.rb
91
+ - spec/encrypted_s3_copy_spec.rb
92
+ - spec/spec_helper.rb
93
+ homepage: https://github.com/nabewata07/encrypted_s3_copy
94
+ licenses:
95
+ - MIT
96
+ metadata: {}
97
+ post_install_message:
98
+ rdoc_options: []
99
+ require_paths:
100
+ - lib
101
+ required_ruby_version: !ruby/object:Gem::Requirement
102
+ requirements:
103
+ - - '>='
104
+ - !ruby/object:Gem::Version
105
+ version: '2.0'
106
+ required_rubygems_version: !ruby/object:Gem::Requirement
107
+ requirements:
108
+ - - '>='
109
+ - !ruby/object:Gem::Version
110
+ version: '0'
111
+ requirements: []
112
+ rubyforge_project:
113
+ rubygems_version: 2.2.2
114
+ signing_key:
115
+ specification_version: 4
116
+ summary: upload and download encrypted files to/from AWS S3
117
+ test_files:
118
+ - spec/encrypted_s3_copy/client_spec.rb
119
+ - spec/encrypted_s3_copy_spec.rb
120
+ - spec/spec_helper.rb