bfs-s3 0.6.1 → 0.7.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: d45908f3dcc0517ee7beab0c08841a516c140069f2a4e3a6166ae0333ca2c5f0
4
- data.tar.gz: 2c845b22f0fb5776ba1e8be311f1de7cd0ff9caf5cd88d61f8b0782712810dc0
3
+ metadata.gz: 1e2a56db1b3250b2054d01e38788642c887f4c747c17570b49e0c5efed3e28e2
4
+ data.tar.gz: 43462a7a5cb5b7d1eb86a45422f1f4ed2d2dab5409309a39be859d02027c4a39
5
5
  SHA512:
6
- metadata.gz: 64de3e661d1bf09aa264ad045eaeeff4c5a86320ab90be4b96e4c46fb9675379babd0ebe24cf98c56552db05fa7dec2ae7530283b7f65fdb27c1819d929199ca
7
- data.tar.gz: 7a4707fbfed527b5e3f3ebd48887cbdd759c78bd28951647579a213d49ab418fb9db2e37584c28964a292c0dd180c9f3f73e3a5d6c6de1f087da0a4a63d6aadc
6
+ metadata.gz: 72798b2b002302d858556669318977a4b74ea32a7c76fd703e1cb368e0b3f426150d6eb760f7fe77f59e34c36db7d3ce36d45b4a72198c7dd8a1b21b4fa32277
7
+ data.tar.gz: f9a7376141e5aaab374ac62887245a7458e04de47d9208222559b6b041a28e965cc18d7dd81ee3128f39b748515511ab40ae4b5e48ed6c1aa1d5a1d5503342bf
@@ -15,7 +15,7 @@ Gem::Specification.new do |s|
15
15
  s.files = `git ls-files`.split("\n")
16
16
  s.test_files = `git ls-files -- spec/*`.split("\n")
17
17
  s.require_paths = ['lib']
18
- s.required_ruby_version = '>= 2.5.0'
18
+ s.required_ruby_version = '>= 2.6.0'
19
19
 
20
20
  s.add_dependency 'aws-sdk-s3', '~> 1.38'
21
21
  s.add_dependency 'bfs', s.version
@@ -1,6 +1,5 @@
1
1
  require 'bfs'
2
2
  require 'aws-sdk-s3'
3
- require 'cgi'
4
3
 
5
4
  module BFS
6
5
  module Bucket
@@ -21,13 +20,7 @@ module BFS
21
20
  # @option opts [Symbol] :acl canned ACL
22
21
  # @option opts [String] :storage_class storage class
23
22
  # @option opts [Aws::S3::Client] :client custom client, uses default_client by default
24
- # @option opts [String] :encoding Custom encoding.
25
23
  def initialize(name, **opts)
26
- opts = opts.dup
27
- opts.keys.each do |key|
28
- val = opts.delete(key)
29
- opts[key.to_sym] = val unless val.nil?
30
- end
31
24
  super(**opts)
32
25
 
33
26
  @name = name
@@ -67,7 +60,7 @@ module BFS
67
60
  info = @client.head_object(**opts)
68
61
  raise BFS::FileNotFound, path unless info
69
62
 
70
- BFS::FileInfo.new(path, info.content_length, info.last_modified, info.content_type, norm_meta(info.metadata))
63
+ BFS::FileInfo.new(path: path, size: info.content_length, mtime: info.last_modified, content_type: info.content_type, metadata: norm_meta(info.metadata))
71
64
  rescue Aws::S3::Errors::NoSuchKey, Aws::S3::Errors::NoSuchBucket, Aws::S3::Errors::NotFound
72
65
  raise BFS::FileNotFound, path
73
66
  end
@@ -79,9 +72,8 @@ module BFS
79
72
  # @option opts [String] :acl custom ACL override
80
73
  # @option opts [String] :server_side_encryption SSE override
81
74
  # @option opts [String] :storage_class storage class override
82
- def create(path, **opts, &block)
75
+ def create(path, encoding: self.encoding, perm: self.perm, **opts, &block)
83
76
  path = full_path(path)
84
- enc = opts.delete(:encoding) || @encoding
85
77
  opts = opts.merge(
86
78
  bucket: name,
87
79
  key: path,
@@ -90,8 +82,8 @@ module BFS
90
82
  opts[:server_side_encryption] ||= @sse if @sse
91
83
  opts[:storage_class] ||= @storage_class if @storage_class
92
84
 
93
- temp = BFS::TempWriter.new(path, encoding: enc) do |t|
94
- File.open(t, encoding: enc) do |file|
85
+ temp = BFS::TempWriter.new(path, encoding: encoding, perm: perm) do |t|
86
+ File.open(t, encoding: encoding) do |file|
95
87
  @client.put_object(opts.merge(body: file))
96
88
  end
97
89
  end
@@ -108,10 +100,10 @@ module BFS
108
100
  # @param [String] path
109
101
  # @param [Hash] opts options
110
102
  # @option opts [String] :encoding Custom encoding.
111
- def open(path, **opts, &block)
103
+ # @option opts [String] :tempdir Custom temp dir.
104
+ def open(path, encoding: self.encoding, tempdir: nil, **opts, &block)
112
105
  path = full_path(path)
113
- enc = opts.delete(:encoding) || @encoding
114
- temp = Tempfile.new(File.basename(path), encoding: enc)
106
+ temp = Tempfile.new(File.basename(path), tempdir, encoding: encoding)
115
107
  temp.close
116
108
 
117
109
  opts = opts.merge(
@@ -121,7 +113,7 @@ module BFS
121
113
  )
122
114
  @client.get_object(**opts)
123
115
 
124
- File.open(temp.path, encoding: enc, &block)
116
+ File.open(temp.path, encoding: encoding, &block)
125
117
  rescue Aws::S3::Errors::NoSuchKey, Aws::S3::Errors::NoSuchBucket, Aws::S3::Errors::NotFound
126
118
  raise BFS::FileNotFound, trim_prefix(path)
127
119
  end
@@ -134,7 +126,7 @@ module BFS
134
126
  key: path,
135
127
  )
136
128
  @client.delete_object(**opts)
137
- rescue Aws::S3::Errors::NoSuchKey, Aws::S3::Errors::NoSuchBucket, Aws::S3::Errors::NotFound # rubocop:disable Lint/SuppressedException
129
+ rescue Aws::S3::Errors::NoSuchKey, Aws::S3::Errors::NoSuchBucket, Aws::S3::Errors::NotFound
138
130
  end
139
131
 
140
132
  # Copies a file.
@@ -166,17 +158,9 @@ module BFS
166
158
  end
167
159
  end
168
160
 
169
- BFS.register('s3') do |url|
170
- params = CGI.parse(url.query.to_s)
171
- prefix = BFS.norm_path(params.key?('prefix') ? params['prefix'].first : url.path)
172
- prefix = nil if prefix.empty?
173
-
174
- BFS::Bucket::S3.new url.host,
175
- prefix: prefix,
176
- region: params.key?('region') ? params['region'].first : nil,
177
- sse: params.key?('sse') ? params['sse'].first : nil,
178
- access_key_id: params.key?('access_key_id') ? params['access_key_id'].first : nil,
179
- secret_access_key: params.key?('secret_access_key') ? params['secret_access_key'].first : nil,
180
- acl: params.key?('acl') ? params['acl'].first : nil,
181
- storage_class: params.key?('storage_class') ? params['storage_class'].first : nil
161
+ BFS.register('s3') do |url, opts|
162
+ prefix = BFS.norm_path(opts[:prefix] || url.path)
163
+ opts[:prefix] = prefix.empty? ? nil : prefix
164
+
165
+ BFS::Bucket::S3.new url.host, **opts.slice(:prefix, :region, :sse, :access_key_id, :secret_access_key, :acl, :storage_class, :encoding)
182
166
  end
@@ -25,10 +25,11 @@ RSpec.describe BFS::Bucket::S3, if: run_spec do
25
25
  it_behaves_like 'a bucket'
26
26
 
27
27
  it 'should resolve from URL' do
28
- bucket = BFS.resolve("s3://#{sandbox[:bucket]}/?acl=private")
28
+ bucket = BFS.resolve("s3://#{sandbox[:bucket]}/?acl=private&encoding=binary")
29
29
  expect(bucket).to be_instance_of(described_class)
30
30
  expect(bucket.name).to eq(sandbox[:bucket])
31
31
  expect(bucket.acl).to eq(:private)
32
+ expect(bucket.encoding).to eq('binary')
32
33
  expect(bucket.instance_variable_get(:@prefix)).to be_nil
33
34
 
34
35
  bucket = BFS.resolve("s3://#{sandbox[:bucket]}/a/b/")
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: bfs-s3
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.6.1
4
+ version: 0.7.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Dimitrij Denissenko
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2020-02-11 00:00:00.000000000 Z
11
+ date: 2020-05-28 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: aws-sdk-s3
@@ -30,14 +30,14 @@ dependencies:
30
30
  requirements:
31
31
  - - '='
32
32
  - !ruby/object:Gem::Version
33
- version: 0.6.1
33
+ version: 0.7.0
34
34
  type: :runtime
35
35
  prerelease: false
36
36
  version_requirements: !ruby/object:Gem::Requirement
37
37
  requirements:
38
38
  - - '='
39
39
  - !ruby/object:Gem::Version
40
- version: 0.6.1
40
+ version: 0.7.0
41
41
  description: https://github.com/bsm/bfs.rb
42
42
  email: dimitrij@blacksquaremedia.com
43
43
  executables: []
@@ -60,7 +60,7 @@ required_ruby_version: !ruby/object:Gem::Requirement
60
60
  requirements:
61
61
  - - ">="
62
62
  - !ruby/object:Gem::Version
63
- version: 2.5.0
63
+ version: 2.6.0
64
64
  required_rubygems_version: !ruby/object:Gem::Requirement
65
65
  requirements:
66
66
  - - ">="