bfs-s3 0.1.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/bfs-s3.gemspec +22 -0
- data/lib/bfs/bucket/s3.rb +141 -0
- data/lib/bfs/s3.rb +1 -0
- data/spec/bfs/bucket/s3_spec.rb +74 -0
- metadata +76 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: 99fd7cbae95d18698976a0e6bc061a8c89f4767ee58186903dceb321fefccbf4
|
4
|
+
data.tar.gz: 66abcf654b856ca9da91319352bceec93d806635f6587676e7eee16a6fab7450
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 11a36864299e8b1b549cbda57dcf1fdf3cba4e2a2eb5bd6b3a18f5617adc5bcb8a267dbfa5522659b083e4d7d349a5717c52641f4cd987d175624297df7f6b68
|
7
|
+
data.tar.gz: 890d3fec3c4653f23b7c85f98f2ab89ad219728633858e5558161951c7572c98fe879850ab0b349a485f8a42ffd85e65f432e4b3f418099475a4e5866a884edf
|
data/bfs-s3.gemspec
ADDED
@@ -0,0 +1,22 @@
|
|
1
|
+
Gem::Specification.new do |s|
|
2
|
+
s.name = 'bfs-s3'
|
3
|
+
s.version = File.read(File.expand_path('../.version', __dir__)).strip
|
4
|
+
s.platform = Gem::Platform::RUBY
|
5
|
+
|
6
|
+
s.licenses = ['Apache-2.0']
|
7
|
+
s.summary = 'S3 bucket adapter for bfs'
|
8
|
+
s.description = 'https://github.com/bsm/bfs.rb'
|
9
|
+
|
10
|
+
s.authors = ['Dimitrij Denissenko']
|
11
|
+
s.email = 'dimitrij@blacksquaremedia.com'
|
12
|
+
s.homepage = 'https://github.com/bsm/bfs.rb'
|
13
|
+
|
14
|
+
s.executables = []
|
15
|
+
s.files = `git ls-files`.split("\n")
|
16
|
+
s.test_files = `git ls-files -- spec/*`.split("\n")
|
17
|
+
s.require_paths = ['lib']
|
18
|
+
s.required_ruby_version = '>= 2.2.0'
|
19
|
+
|
20
|
+
s.add_dependency 'aws-sdk-s3'
|
21
|
+
s.add_dependency 'bfs'
|
22
|
+
end
|
@@ -0,0 +1,141 @@
|
|
1
|
+
require 'bfs'
|
2
|
+
require 'aws-sdk-s3'
|
3
|
+
require 'cgi'
|
4
|
+
|
5
|
+
module BFS
|
6
|
+
module Bucket
|
7
|
+
# S3 buckets are operating on s3
|
8
|
+
class S3 < Abstract
|
9
|
+
attr_reader :name, :sse, :acl, :storage_class
|
10
|
+
|
11
|
+
# Initializes a new S3 bucket
|
12
|
+
# @param [String] name the bucket name
|
13
|
+
# @param [Hash] opts options
|
14
|
+
# @option opts [String] :region default region
|
15
|
+
# @option opts [String] :sse default server-side-encryption setting
|
16
|
+
# @option opts [Aws::Credentials] :credentials credentials object
|
17
|
+
# @option opts [String] :access_key_id custom AWS access key ID
|
18
|
+
# @option opts [String] :secret_access_key custom AWS secret access key
|
19
|
+
# @option opts [Symbol] :acl canned ACL
|
20
|
+
# @option opts [String] :storage_class storage class
|
21
|
+
# @option opts [Aws::S3::Client] :client custom client, uses default_client by default
|
22
|
+
def initialize(name, opts={})
|
23
|
+
opts = opts.dup
|
24
|
+
opts.keys.each do |key|
|
25
|
+
opts[key.to_s] = opts.delete(key)
|
26
|
+
end
|
27
|
+
|
28
|
+
@name = name
|
29
|
+
@sse = opts['sse'] || opts['server_side_encryption']
|
30
|
+
@credentials = opts['credentials']
|
31
|
+
@credentials ||= Aws::Credentials.new(opts['access_key_id'].to_s, opts['secret_access_key'].to_s) if opts['access_key_id']
|
32
|
+
@acl = opts['acl'].to_sym if opts.key?('acl')
|
33
|
+
@storage_class = opts['storage_class']
|
34
|
+
@client = opts['client'] || Aws::S3::Client.new(region: opts['region'])
|
35
|
+
end
|
36
|
+
|
37
|
+
# Lists the contents of a bucket using a glob pattern
|
38
|
+
def ls(pattern='**/*', opts={})
|
39
|
+
@client.list_objects_v2(opts.merge(bucket: name)).contents.select do |obj|
|
40
|
+
File.fnmatch?(pattern, obj.key, File::FNM_PATHNAME)
|
41
|
+
end.map(&:key)
|
42
|
+
end
|
43
|
+
|
44
|
+
# Info returns the object info
|
45
|
+
def info(path, opts={})
|
46
|
+
path = norm_path(path)
|
47
|
+
opts = opts.merge(
|
48
|
+
bucket: name,
|
49
|
+
max_keys: 1,
|
50
|
+
prefix: opts[:prefix] ? File.join(opts[:prefix], path) : path,
|
51
|
+
)
|
52
|
+
object = @client.list_objects_v2(opts).contents.first
|
53
|
+
raise BFS::FileNotFound, path unless object
|
54
|
+
|
55
|
+
BFS::FileInfo.new(path, object.size, object.last_modified)
|
56
|
+
rescue Aws::S3::Errors::NoSuchKey, Aws::S3::Errors::NoSuchBucket
|
57
|
+
raise BFS::FileNotFound, path
|
58
|
+
end
|
59
|
+
|
60
|
+
# Creates a new file and opens it for writing
|
61
|
+
def create(path, opts={}, &block)
|
62
|
+
path = norm_path(path)
|
63
|
+
opts = opts.merge(
|
64
|
+
bucket: name,
|
65
|
+
key: path,
|
66
|
+
)
|
67
|
+
opts[:acl] ||= @acl if @acl
|
68
|
+
opts[:server_side_encryption] ||= @sse if @sse
|
69
|
+
opts[:storage_class] ||= @storage_class if @storage_class
|
70
|
+
|
71
|
+
temp = BFS::TempWriter.new(path) do |t|
|
72
|
+
File.open(t) do |file|
|
73
|
+
@client.put_object(opts.merge(body: file))
|
74
|
+
end
|
75
|
+
end
|
76
|
+
return temp unless block
|
77
|
+
|
78
|
+
begin
|
79
|
+
yield temp
|
80
|
+
ensure
|
81
|
+
temp.close
|
82
|
+
end
|
83
|
+
end
|
84
|
+
|
85
|
+
# Opens an existing file for reading
|
86
|
+
def open(path, opts={}, &block)
|
87
|
+
path = norm_path(path)
|
88
|
+
temp = Tempfile.new(File.basename(path))
|
89
|
+
temp.close
|
90
|
+
|
91
|
+
opts = opts.merge(
|
92
|
+
response_target: temp.path,
|
93
|
+
bucket: name,
|
94
|
+
key: path,
|
95
|
+
)
|
96
|
+
@client.get_object(opts)
|
97
|
+
|
98
|
+
File.open(temp.path, &block)
|
99
|
+
rescue Aws::S3::Errors::NoSuchKey, Aws::S3::Errors::NoSuchBucket
|
100
|
+
raise BFS::FileNotFound, path
|
101
|
+
end
|
102
|
+
|
103
|
+
# Deletes a file.
|
104
|
+
def rm(path, opts={})
|
105
|
+
path = norm_path(path)
|
106
|
+
opts = opts.merge(
|
107
|
+
bucket: name,
|
108
|
+
key: path,
|
109
|
+
)
|
110
|
+
@client.delete_object(opts)
|
111
|
+
rescue Aws::S3::Errors::NoSuchKey, Aws::S3::Errors::NoSuchBucket # rubocop:disable Lint/HandleExceptions
|
112
|
+
end
|
113
|
+
|
114
|
+
# Copies a file.
|
115
|
+
def cp(src, dst, opts={})
|
116
|
+
src = norm_path(src)
|
117
|
+
dst = norm_path(dst)
|
118
|
+
opts = opts.merge(
|
119
|
+
bucket: name,
|
120
|
+
copy_source: "/#{name}/#{src}",
|
121
|
+
key: dst,
|
122
|
+
)
|
123
|
+
@client.copy_object(opts)
|
124
|
+
rescue Aws::S3::Errors::NoSuchKey, Aws::S3::Errors::NoSuchBucket
|
125
|
+
raise BFS::FileNotFound, src
|
126
|
+
end
|
127
|
+
end
|
128
|
+
end
|
129
|
+
end
|
130
|
+
|
131
|
+
BFS.register('s3') do |url|
|
132
|
+
params = CGI.parse(url.query)
|
133
|
+
|
134
|
+
BFS::Bucket::S3.new url.host,
|
135
|
+
region: params.key?('region') ? params['region'].first : nil,
|
136
|
+
sse: params.key?('sse') ? params['sse'].first : nil,
|
137
|
+
access_key_id: params.key?('access_key_id') ? params['access_key_id'].first : nil,
|
138
|
+
secret_access_key: params.key?('secret_access_key') ? params['secret_access_key'].first : nil,
|
139
|
+
acl: params.key?('acl') ? params['acl'].first : nil,
|
140
|
+
storage_class: params.key?('storage_class') ? params['storage_class'].first : nil
|
141
|
+
end
|
data/lib/bfs/s3.rb
ADDED
@@ -0,0 +1 @@
|
|
1
|
+
require 'bfs/bucket/s3'
|
@@ -0,0 +1,74 @@
|
|
1
|
+
require 'spec_helper'
|
2
|
+
|
3
|
+
RSpec.describe BFS::Bucket::S3 do
|
4
|
+
let(:client) { double('Aws::S3::Client') }
|
5
|
+
let(:files) { {} }
|
6
|
+
subject { described_class.new('mock-bucket', client: client) }
|
7
|
+
|
8
|
+
# stub put_object calls and store file data
|
9
|
+
before do
|
10
|
+
allow(client).to receive(:put_object).with(hash_including(bucket: 'mock-bucket')) do |opts|
|
11
|
+
files[opts[:key]] = opts[:body].read
|
12
|
+
nil
|
13
|
+
end
|
14
|
+
end
|
15
|
+
|
16
|
+
# stub get_object calls
|
17
|
+
before do
|
18
|
+
allow(client).to receive(:get_object).with(hash_including(bucket: 'mock-bucket')) do |opts|
|
19
|
+
data = files[opts[:key]]
|
20
|
+
raise Aws::S3::Errors::NoSuchKey.new(nil, nil) unless data
|
21
|
+
|
22
|
+
File.open(opts[:response_target], 'w') {|f| f.write(data) }
|
23
|
+
nil
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
before do
|
28
|
+
allow(client).to receive(:delete_object).with(hash_including(bucket: 'mock-bucket')) do |opts|
|
29
|
+
raise Aws::S3::Errors::NoSuchKey.new(nil, nil) unless files.key?(opts[:key])
|
30
|
+
|
31
|
+
files.delete(opts[:key])
|
32
|
+
nil
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
# stub list_objects_v2 calls
|
37
|
+
before do
|
38
|
+
allow(client).to receive(:list_objects_v2).with(bucket: 'mock-bucket') do |*|
|
39
|
+
contents = files.keys.map {|key| Aws::S3::Types::Object.new(key: key) }
|
40
|
+
double 'ListObjectsV2Response', contents: contents
|
41
|
+
end
|
42
|
+
end
|
43
|
+
|
44
|
+
# stub list_objects_v2, single object calls
|
45
|
+
before do
|
46
|
+
match = double 'ListObjectsV2Response', contents: [
|
47
|
+
Aws::S3::Types::Object.new(key: 'a/b/c.txt', size: 10, last_modified: Time.now),
|
48
|
+
]
|
49
|
+
no_match = double 'ListObjectsV2Response', contents: []
|
50
|
+
|
51
|
+
allow(client).to receive(:list_objects_v2).with(bucket: 'mock-bucket', max_keys: 1, prefix: 'a/b/c.txt').and_return(match)
|
52
|
+
allow(client).to receive(:list_objects_v2).with(bucket: 'mock-bucket', max_keys: 1, prefix: 'missing.txt').and_return(no_match)
|
53
|
+
end
|
54
|
+
|
55
|
+
# stub copy_object calls
|
56
|
+
before do
|
57
|
+
allow(client).to receive(:copy_object).with(hash_including(bucket: 'mock-bucket')) do |opts|
|
58
|
+
src = opts[:copy_source].sub('/mock-bucket/', '')
|
59
|
+
raise Aws::S3::Errors::NoSuchKey.new(nil, nil) unless files.key?(src)
|
60
|
+
|
61
|
+
files[opts[:key]] = files[src]
|
62
|
+
nil
|
63
|
+
end
|
64
|
+
end
|
65
|
+
|
66
|
+
it_behaves_like 'a bucket'
|
67
|
+
|
68
|
+
it 'should resolve from URL' do
|
69
|
+
bucket = BFS.resolve('s3://mock-bucket?acl=private®ion=eu-west-2')
|
70
|
+
expect(bucket).to be_instance_of(described_class)
|
71
|
+
expect(bucket.name).to eq('mock-bucket')
|
72
|
+
expect(bucket.acl).to eq(:private)
|
73
|
+
end
|
74
|
+
end
|
metadata
ADDED
@@ -0,0 +1,76 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: bfs-s3
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.1.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Dimitrij Denissenko
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2018-10-10 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: aws-sdk-s3
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - ">="
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: '0'
|
20
|
+
type: :runtime
|
21
|
+
prerelease: false
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - ">="
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: '0'
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: bfs
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - ">="
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: '0'
|
34
|
+
type: :runtime
|
35
|
+
prerelease: false
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - ">="
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: '0'
|
41
|
+
description: https://github.com/bsm/bfs.rb
|
42
|
+
email: dimitrij@blacksquaremedia.com
|
43
|
+
executables: []
|
44
|
+
extensions: []
|
45
|
+
extra_rdoc_files: []
|
46
|
+
files:
|
47
|
+
- bfs-s3.gemspec
|
48
|
+
- lib/bfs/bucket/s3.rb
|
49
|
+
- lib/bfs/s3.rb
|
50
|
+
- spec/bfs/bucket/s3_spec.rb
|
51
|
+
homepage: https://github.com/bsm/bfs.rb
|
52
|
+
licenses:
|
53
|
+
- Apache-2.0
|
54
|
+
metadata: {}
|
55
|
+
post_install_message:
|
56
|
+
rdoc_options: []
|
57
|
+
require_paths:
|
58
|
+
- lib
|
59
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
60
|
+
requirements:
|
61
|
+
- - ">="
|
62
|
+
- !ruby/object:Gem::Version
|
63
|
+
version: 2.2.0
|
64
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
65
|
+
requirements:
|
66
|
+
- - ">="
|
67
|
+
- !ruby/object:Gem::Version
|
68
|
+
version: '0'
|
69
|
+
requirements: []
|
70
|
+
rubyforge_project:
|
71
|
+
rubygems_version: 2.7.6
|
72
|
+
signing_key:
|
73
|
+
specification_version: 4
|
74
|
+
summary: S3 bucket adapter for bfs
|
75
|
+
test_files:
|
76
|
+
- spec/bfs/bucket/s3_spec.rb
|