shrine 2.10.1 → 2.11.0

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of shrine might be problematic. Click here for more details.

@@ -11,7 +11,7 @@ class Shrine
11
11
  # and the result is automatically written to the `mime_type` metadata
12
12
  # field. You can choose a different built-in MIME type analyzer:
13
13
  #
14
- # plugin :determine_mime_type, analyzer: :filemagic
14
+ # plugin :determine_mime_type, analyzer: :marcel
15
15
  #
16
16
  # The following analyzers are accepted:
17
17
  #
@@ -166,6 +166,8 @@ class Shrine
166
166
  def extract_with_file(io)
167
167
  require "open3"
168
168
 
169
+ return nil if io.eof? # file command returns "application/x-empty" for empty files
170
+
169
171
  Open3.popen3(*%W[file --mime-type --brief -]) do |stdin, stdout, stderr, thread|
170
172
  begin
171
173
  IO.copy_stream(io, stdin.binmode)
@@ -178,8 +180,7 @@ class Shrine
178
180
  raise Error, stderr.read unless status.success?
179
181
  $stderr.print(stderr.read)
180
182
 
181
- mime_type = stdout.read.strip
182
- mime_type unless mime_type == "application/x-empty"
183
+ stdout.read.strip
183
184
  end
184
185
  rescue Errno::ENOENT
185
186
  raise Error, "The `file` command-line tool is not installed"
@@ -195,9 +196,10 @@ class Shrine
195
196
  def extract_with_filemagic(io)
196
197
  require "filemagic"
197
198
 
199
+ return nil if io.eof? # FileMagic returns "application/x-empty" for empty files
200
+
198
201
  FileMagic.open(FileMagic::MAGIC_MIME_TYPE) do |filemagic|
199
- mime_type = filemagic.buffer(io.read(MAGIC_NUMBER).to_s)
200
- mime_type unless mime_type == "application/x-empty"
202
+ filemagic.buffer(io.read(MAGIC_NUMBER))
201
203
  end
202
204
  end
203
205
 
@@ -1,5 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ Shrine.deprecation("The direct_upload plugin has been deprecated in favor of upload_endpoint and presign_endpoint plugins. The direct_upload plugin will be removed in Shrine 3.")
4
+
3
5
  require "roda"
4
6
  require "json"
5
7
 
@@ -159,7 +161,7 @@ class Shrine
159
161
  # [Roda]: https://github.com/jeremyevans/roda
160
162
  # [Uppy]: https://uppy.io
161
163
  # [Roda request]: http://roda.jeremyevans.net/rdoc/classes/Roda/RodaPlugins/Base/RequestMethods.html
162
- # [Direct Uploads to S3]: http://shrinerb.com/rdoc/files/doc/direct_s3_md.html
164
+ # [Direct Uploads to S3]: https://shrinerb.com/rdoc/files/doc/direct_s3_md.html
163
165
  module DirectUpload
164
166
  def self.load_dependencies(uploader, *)
165
167
  uploader.plugin :rack_file
@@ -30,7 +30,7 @@ class Shrine
30
30
  #
31
31
  # plugin :infer_extension, inferrer: -> (mime_type, inferrers) do
32
32
  # # don't add extension if the file is a text file
33
- # inferrrs[:rack_mime].call(mime_type) unless mime_type == "text/plain"
33
+ # inferrers[:rack_mime].call(mime_type) unless mime_type == "text/plain"
34
34
  # end
35
35
  #
36
36
  # You can also use methods for inferring extension directly:
@@ -9,7 +9,7 @@ class Shrine
9
9
  # plugin :metadata_attributes, :size => :size, :mime_type => :type
10
10
  # # or
11
11
  # plugin :metadata_attributes
12
- # Attacher.metadata_attributes, :size => :size, :mime_type => :type
12
+ # Attacher.metadata_attributes :size => :size, :mime_type => :type
13
13
  #
14
14
  # The above configuration will sync `size` metadata field to
15
15
  # `<attachment>_size` record attribute, and `mime_type` metadata field to
@@ -28,7 +28,7 @@ class Shrine
28
28
  # If you want to specify the full record attribute name, pass the record
29
29
  # attribute name as a string instead of a symbol.
30
30
  #
31
- # Attacher.metadata_attributes, :filename => "original_filename"
31
+ # Attacher.metadata_attributes :filename => "original_filename"
32
32
  #
33
33
  # # ...
34
34
  #
@@ -36,16 +36,18 @@ class Shrine
36
36
  # the default synchronous workflow, so we want to generate parameters for
37
37
  # uploads to the temporary (`:cache`) storage.
38
38
  #
39
- # The above will create a `GET /presign` endpoint, which generates presign
40
- # URL, fields, and headers using the specified storage, and returns it in
39
+ # The above will create a `GET /images/presign` endpoint, which calls
40
+ # `#presign` on the storage and returns the HTTP verb, URL, fields, and
41
+ # headers needed for a single upload directly to the storage service, in
41
42
  # JSON format.
42
43
  #
43
44
  # # GET /images/presign
44
45
  # {
46
+ # "method": "post",
45
47
  # "url": "https://my-bucket.s3-eu-west-1.amazonaws.com",
46
48
  # "fields": {
47
49
  # "key": "b7d575850ba61b44c8a9ff889dfdb14d88cdc25f8dd121004c8",
48
- # "policy": "eyJleHBpcmF0aW9uIjoiMjAxNS0QwMToxMToyOVoiLCJjb25kaXRpb25zIjpbeyJidWNrZXQiOiJzaHJpbmUtdGVzdGluZyJ9LHsia2V5IjoiYjdkNTc1ODUwYmE2MWI0NGU3Y2M4YTliZmY4OGU5ZGZkYjE2NTQ0ZDk4OGNkYzI1ZjhkZDEyMTAwNGM4In0seyJ4LWFtei1jcmVkZW50aWFsIjoiQUtJQUlKRjU1VE1aWlk0NVVUNlEvMjAxNTEwMjQvZXUtd2VzdC0xL3MzL2F3czRfcmVxdWVzdCJ9LHsieC1hbXotYWxnb3JpdGhtIjoiQVdTNC1ITUFDLVNIQTI1NiJ9LHsieC1hbXotZGF0ZSI6IjIwMTUxMDI0VDAwMTEyOVoifV19",
50
+ # "policy": "eyJleHBpcmF0aW9uIjoiMjAxNS0QwMToxMToyOVoiLCJjb25kaXRpb25zIjpbeyJidWNrZXQiOiJ...",
49
51
  # "x-amz-credential": "AKIAIJF55TMZYT6Q/20151024/eu-west-1/s3/aws4_request",
50
52
  # "x-amz-algorithm": "AWS4-HMAC-SHA256",
51
53
  # "x-amz-date": "20151024T001129Z",
@@ -54,17 +56,17 @@ class Shrine
54
56
  # "headers": {}
55
57
  # }
56
58
  #
57
- # This gives the client all the information it needs to make the upload
58
- # request to the selected file to the storage service. The `url` field is
59
- # the request URL, `fields` are the required POST parameters, and `headers`
60
- # are the required request headers.
59
+ # * `method` HTTP verb
60
+ # * `url` request URL
61
+ # * `fields` POST parameters
62
+ # * `headers` request headers
61
63
  #
62
64
  # ## Location
63
65
  #
64
66
  # By default the generated location won't have any file extension, but you
65
67
  # can specify one by sending the `filename` query parameter:
66
68
  #
67
- # GET /images/presign?filename=nature.jpg
69
+ # # GET /images/presign?filename=nature.jpg
68
70
  #
69
71
  # It's also possible to customize how the presign location is generated:
70
72
  #
@@ -89,13 +91,15 @@ class Shrine
89
91
  # }
90
92
  # end
91
93
  #
94
+ # The `:presign_options` can be a Proc or a Hash.
95
+ #
92
96
  # ## Presign
93
97
  #
94
98
  # You can also customize how the presign itself is generated via the
95
99
  # `:presign` option:
96
100
  #
97
101
  # plugin :presign_endpoint, presign: -> (id, options, request) do
98
- # # return an object that responds to #url, #fields, and #headers
102
+ # # return a Hash with :url, :fields, and :headers keys
99
103
  # end
100
104
  #
101
105
  # ## Response
@@ -103,8 +107,8 @@ class Shrine
103
107
  # The response returned by the endpoint can be customized via the
104
108
  # `:rack_response` option:
105
109
  #
106
- # plugin :presign_endpoint, rack_response: -> (hash, request) do
107
- # body = { endpoint: hash[:url], params: hash[:fields], headers: hash[:headers] }.to_json
110
+ # plugin :presign_endpoint, rack_response: -> (data, request) do
111
+ # body = { endpoint: data[:url], params: data[:fields], headers: data[:headers] }.to_json
108
112
  # [201, { "Content-Type" => "application/json" }, [body]]
109
113
  # end
110
114
  #
@@ -215,16 +219,22 @@ class Shrine
215
219
  # option is given, calls that instead of calling `#presign`.
216
220
  def generate_presign(location, options, request)
217
221
  if @presign
218
- presign = @presign.call(location, options, request)
222
+ data = @presign.call(location, options, request)
219
223
  else
220
- presign = storage.presign(location, options)
224
+ data = storage.presign(location, options)
221
225
  end
222
226
 
223
- url = presign.url
224
- fields = presign.fields
225
- headers = presign.headers if presign.respond_to?(:headers)
227
+ if data.respond_to?(:to_h)
228
+ { fields: {}, headers: {} }.merge(data.to_h)
229
+ else
230
+ Shrine.deprecation("Returning a custom object in Storage#presign is deprecated, presign_endpoint will not support it in Shrine 3. Storage#presign should return a Hash instead.")
231
+
232
+ url = data.url
233
+ fields = data.fields
234
+ headers = data.headers if data.respond_to?(:headers)
226
235
 
227
- { url: url, fields: fields.to_h, headers: headers.to_h }
236
+ { url: url, fields: fields.to_h, headers: headers.to_h }
237
+ end
228
238
  end
229
239
 
230
240
  # Transforms the presign hash into a JSON response. It returns a Rack
@@ -29,11 +29,11 @@ class Shrine
29
29
  # class FilesController < ActionController::Base
30
30
  # def download
31
31
  # # ...
32
- # file_response = record.attachment.to_rack_response
32
+ # file_status, file_headers, file_body = record.attachment.to_rack_response
33
33
  #
34
- # response.status = file_response[0]
35
- # response.headers.merge!(file_response[1])
36
- # self.response_body = file_response[2]
34
+ # response.status = file_status
35
+ # response.headers.merge!(file_headers)
36
+ # self.response_body = file_body
37
37
  # end
38
38
  # end
39
39
  #
@@ -36,7 +36,7 @@ class Shrine
36
36
  #
37
37
  # You can also choose which format will the calculated hash be encoded in:
38
38
  #
39
- # Shrine.calculate_signature(io, :sha256, format: :hex)
39
+ # Shrine.calculate_signature(io, :sha256, format: :base64)
40
40
  #
41
41
  # The supported encoding formats are `hex` (default), `base64`, and `none`.
42
42
  module Signature
@@ -3,7 +3,8 @@
3
3
  class Shrine
4
4
  module Plugins
5
5
  # The `store_dimensions` plugin extracts dimensions of uploaded images and
6
- # stores them into the metadata hash.
6
+ # stores them into the metadata hash (by default it uses the [fastimage]
7
+ # gem).
7
8
  #
8
9
  # plugin :store_dimensions
9
10
  #
@@ -29,11 +30,11 @@ class Shrine
29
30
  # The following analyzers are supported:
30
31
  #
31
32
  # :fastimage
32
- # : (Default). Uses the [FastImage] gem to extract dimensions from any IO
33
+ # : (Default). Uses the [fastimage] gem to extract dimensions from any IO
33
34
  # object.
34
35
  #
35
36
  # :mini_magick
36
- # : Uses the [MiniMagick] gem to extract dimensions from File objects. If
37
+ # : Uses the [mini_magick] gem to extract dimensions from File objects. If
37
38
  # non-file IO object is given it will be temporarily downloaded to disk.
38
39
  #
39
40
  # :ruby_vips
@@ -61,8 +62,8 @@ class Shrine
61
62
  # Shrine.dimensions_analyzers[:fastimage].call(io) # calls a built-in analyzer
62
63
  # #=> [300, 400]
63
64
  #
64
- # [FastImage]: https://github.com/sdsykes/fastimage
65
- # [MiniMagick]: https://github.com/minimagick/minimagick
65
+ # [fastimage]: https://github.com/sdsykes/fastimage
66
+ # [mini_magick]: https://github.com/minimagick/minimagick
66
67
  # [ruby-vips]: https://github.com/jcupitt/ruby-vips
67
68
  module StoreDimensions
68
69
  def self.configure(uploader, opts = {})
@@ -157,23 +158,14 @@ class Shrine
157
158
 
158
159
  def extract_with_mini_magick(io)
159
160
  require "mini_magick"
160
- ensure_file(io) { |file| MiniMagick::Image.new(file.path).dimensions }
161
+ Shrine.with_file(io) { |file| MiniMagick::Image.new(file.path).dimensions }
162
+ rescue MiniMagick::Error
161
163
  end
162
164
 
163
165
  def extract_with_ruby_vips(io)
164
166
  require "vips"
165
- ensure_file(io) { |file| Vips::Image.new_from_file(file.path).size }
166
- end
167
-
168
- def ensure_file(io)
169
- if io.respond_to?(:path)
170
- yield io
171
- else
172
- Tempfile.create("shrine-store_dimensions") do |tempfile|
173
- IO.copy_stream(io, tempfile.path)
174
- yield tempfile
175
- end
176
- end
167
+ Shrine.with_file(io) { |file| Vips::Image.new_from_file(file.path).size }
168
+ rescue Vips::Error
177
169
  end
178
170
  end
179
171
  end
@@ -3,6 +3,7 @@
3
3
  require "rack"
4
4
 
5
5
  require "json"
6
+ require "digest"
6
7
 
7
8
  class Shrine
8
9
  module Plugins
@@ -62,6 +63,17 @@ class Shrine
62
63
  # If the uploaded file is larger than the specified value, a `413 Payload
63
64
  # Too Large` response will be returned.
64
65
  #
66
+ # ## Checksum
67
+ #
68
+ # If you want the upload endpoint to verify the integrity of the uploaded
69
+ # file, you can include the `Content-MD5` header in the request filled with
70
+ # the base64-encoded MD5 hash of the file that was calculated prior to the
71
+ # upload, and the endpoint will automatically use it to verify the uploaded
72
+ # data.
73
+ #
74
+ # If the checksums don't match, a `460 Checksum Mismatch` response is
75
+ # returned.
76
+ #
65
77
  # ## Context
66
78
  #
67
79
  # The upload context will *not* contain `:record` and `:name` values, as
@@ -187,6 +199,8 @@ class Shrine
187
199
  error!(400, "Upload Not Found") unless file.is_a?(Hash) && file[:tempfile]
188
200
  error!(413, "Upload Too Large") if @max_size && file[:tempfile].size > @max_size
189
201
 
202
+ verify_checksum!(file[:tempfile], request.env["HTTP_CONTENT_MD5"]) if request.env["HTTP_CONTENT_MD5"]
203
+
190
204
  @shrine_class.rack_file(file)
191
205
  end
192
206
 
@@ -222,6 +236,14 @@ class Shrine
222
236
  end
223
237
  end
224
238
 
239
+ # Verifies the provided checksum against the received file.
240
+ def verify_checksum!(file, provided_checksum)
241
+ error!(400, "The Content-MD5 you specified was invalid") if provided_checksum.length != 24
242
+
243
+ calculated_checksum = Digest::MD5.file(file.path).base64digest
244
+ error!(460, "The Content-MD5 you specified did not match what was recieved") if provided_checksum != calculated_checksum
245
+ end
246
+
225
247
  # Used for early returning an error response.
226
248
  def error!(status, message)
227
249
  throw :halt, [status, {"Content-Type" => "text/plain"}, [message]]
@@ -105,21 +105,17 @@ class Shrine
105
105
  # example, you might want to split a PDf into pages:
106
106
  #
107
107
  # process(:store) do |io, context|
108
- # pdf = io.download
109
- # image = MiniMagick::Image.new(pdf.path)
110
- # versions = []
111
- #
112
- # image.pages.each_with_index do |page, index|
113
- # page_file = Tempfile.new("version-#{index}", binmode: true)
114
- # MiniMagick::Tool::Convert.new do |convert|
115
- # convert << page.path
116
- # convert << page_file.path
117
- # end
118
- # page_file.open # refresh updated file
119
- # versions << page_file
108
+ # pdf = io.download
109
+ # page_count = MiniMagick::Image.new(pdf.path).pages.count
110
+ # pipeline = ImageProcessing::MiniMagick.source(pdf).convert("jpg")
111
+ #
112
+ # pages = page_count.times.map do |page_number|
113
+ # pipeline.loader(page: page_number).call
120
114
  # end
121
115
  #
122
- # versions # array of pages
116
+ # pdf.close!
117
+ #
118
+ # { pages: pages } # array of pages
123
119
  # end
124
120
  #
125
121
  # You can also combine Hashes and Arrays, there is no limit to the level of
@@ -158,7 +154,7 @@ class Shrine
158
154
  #
159
155
  # If you want to re-create a single or all versions, refer to the [reprocessing versions] guide for details.
160
156
  #
161
- # [reprocessing versions]: http://shrinerb.com/rdoc/files/doc/regenerating_versions_md.html
157
+ # [reprocessing versions]: https://shrinerb.com/rdoc/files/doc/regenerating_versions_md.html
162
158
  # [image_processing]: https://github.com/janko-m/image_processing
163
159
  module Versions
164
160
  def self.load_dependencies(uploader, *)
@@ -53,6 +53,10 @@ class Shrine
53
53
  storage.upload(io_factory.call, id = "quux".dup)
54
54
  lint_clear(id)
55
55
  end
56
+
57
+ if storage.respond_to?(:presign)
58
+ lint_presign(id)
59
+ end
56
60
  end
57
61
 
58
62
  def lint_download(id)
@@ -101,6 +105,13 @@ class Shrine
101
105
  error :clear!, "file still #exists? after clearing" if storage.exists?(id)
102
106
  end
103
107
 
108
+ def lint_presign(id)
109
+ data = storage.presign(id, {})
110
+ error :presign, "result should be a Hash" unless data.respond_to?(:to_h)
111
+ error :presign, "result should include :method key" unless data.to_h.key?(:method)
112
+ error :presign, "result should include :url key" unless data.to_h.key?(:url)
113
+ end
114
+
104
115
  private
105
116
 
106
117
  attr_reader :storage
@@ -28,7 +28,7 @@ class Shrine
28
28
  #
29
29
  # gem "aws-sdk-s3", "~> 1.2"
30
30
  #
31
- # It is initialized with the following 4 required options:
31
+ # It can be initialized by providing the bucket name and credentials:
32
32
  #
33
33
  # s3 = Shrine::Storage::S3.new(
34
34
  # access_key_id: "abc",
@@ -37,6 +37,11 @@ class Shrine
37
37
  # bucket: "my-app",
38
38
  # )
39
39
  #
40
+ # The core features of this storage requires the following AWS permissions:
41
+ # `s3:ListBucket`, `s3:PutObject`, `s3:GetObject`, and `s3:DeleteObject`.
42
+ # If you have additional upload options configured such as setting object
43
+ # ACLs, then additional permissions may be required.
44
+ #
40
45
  # The storage exposes the underlying Aws objects:
41
46
  #
42
47
  # s3.client #=> #<Aws::S3::Client>
@@ -56,14 +61,13 @@ class Shrine
56
61
  # cache and store:
57
62
  #
58
63
  # Shrine::Storage::S3.new(prefix: "cache", **s3_options)
59
- # Shrine::Storage::S3.new(prefix: "store", **s3_options)
60
64
  #
61
65
  # ## Upload options
62
66
  #
63
67
  # Sometimes you'll want to add additional upload options to all S3 uploads.
64
68
  # You can do that by passing the `:upload` option:
65
69
  #
66
- # Shrine::Storage::S3.new(upload_options: {acl: "private"}, **s3_options)
70
+ # Shrine::Storage::S3.new(upload_options: { acl: "private" }, **s3_options)
67
71
  #
68
72
  # These options will be passed to aws-sdk-s3's methods for [uploading],
69
73
  # [copying] and [presigning].
@@ -83,7 +87,7 @@ class Shrine
83
87
  #
84
88
  # or when using the uploader directly
85
89
  #
86
- # uploader.upload(file, upload_options: {acl: "private"})
90
+ # uploader.upload(file, upload_options: { acl: "private" })
87
91
  #
88
92
  # Note that, unlike the `:upload_options` storage option, upload options
89
93
  # given on the uploader level won't be forwarded for generating presigns,
@@ -145,15 +149,15 @@ class Shrine
145
149
  # multipart copy if they're larger than 150MB, but you can change the
146
150
  # thresholds via `:multipart_threshold`.
147
151
  #
148
- # thresholds = {upload: 30*1024*1024, copy: 200*1024*1024}
152
+ # thresholds = { upload: 30*1024*1024, copy: 200*1024*1024 }
149
153
  # Shrine::Storage::S3.new(multipart_threshold: thresholds, **s3_options)
150
154
  #
151
155
  # If you want to change how many threads aws-sdk-s3 will use for multipart
152
156
  # upload/copy, you can use the `upload_options` plugin to specify
153
157
  # `:thread_count`.
154
158
  #
155
- # plugin :upload_options, store: ->(io, context) do
156
- # {thread_count: 5}
159
+ # plugin :upload_options, store: -> (io, context) do
160
+ # { thread_count: 5 }
157
161
  # end
158
162
  #
159
163
  # ## Clearing cache
@@ -178,34 +182,37 @@ class Shrine
178
182
 
179
183
  attr_reader :client, :bucket, :prefix, :host, :upload_options
180
184
 
181
- # Initializes a storage for uploading to S3.
185
+ # Initializes a storage for uploading to S3. All options are forwarded to
186
+ # [`Aws::S3::Client#initialize`], except the following:
182
187
  #
183
- # :access_key_id
184
- # :secret_access_key
185
- # :region
186
188
  # :bucket
187
- # : Credentials required by the `aws-sdk-s3` gem.
189
+ # : (Required). Name of the S3 bucket.
188
190
  #
189
191
  # :prefix
190
- # : "Folder" name inside the bucket to store files into.
192
+ # : "Directory" inside the bucket to store files into.
191
193
  #
192
194
  # :upload_options
193
- # : Additional options that will be used for uploading files, they will
194
- # be passed to [`Aws::S3::Object#put`], [`Aws::S3::Object#copy_from`]
195
- # and [`Aws::S3::Bucket#presigned_post`].
195
+ # : Additional options that will be used for uploading files, they will
196
+ # be passed to [`Aws::S3::Object#put`], [`Aws::S3::Object#copy_from`]
197
+ # and [`Aws::S3::Bucket#presigned_post`].
196
198
  #
197
199
  # :multipart_threshold
198
- # : If the input file is larger than the specified size, a parallelized
199
- # multipart will be used for the upload/copy. Defaults to
200
- # `{upload: 15*1024*1024, copy: 100*1024*1024}` (15MB for upload
201
- # requests, 100MB for copy requests).
200
+ # : If the input file is larger than the specified size, a parallelized
201
+ # multipart will be used for the upload/copy. Defaults to
202
+ # `{upload: 15*1024*1024, copy: 100*1024*1024}` (15MB for upload
203
+ # requests, 100MB for copy requests).
202
204
  #
203
- # All other options are forwarded to [`Aws::S3::Client#initialize`].
205
+ # In addition to specifying the `:bucket`, you'll also need to provide
206
+ # AWS credentials. The most common way is to provide them directly via
207
+ # `:access_key_id`, `:secret_access_key`, and `:region` options. But you
208
+ # can also use any other way of authentication specified in the [AWS SDK
209
+ # documentation][configuring AWS SDK].
204
210
  #
205
211
  # [`Aws::S3::Object#put`]: http://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/S3/Object.html#put-instance_method
206
212
  # [`Aws::S3::Object#copy_from`]: http://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/S3/Object.html#copy_from-instance_method
207
213
  # [`Aws::S3::Bucket#presigned_post`]: http://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/S3/Object.html#presigned_post-instance_method
208
214
  # [`Aws::S3::Client#initialize`]: http://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/S3/Client.html#initialize-instance_method
215
+ # [configuring AWS SDK]: https://docs.aws.amazon.com/sdk-for-ruby/v3/developer-guide/setup-config.html
209
216
  def initialize(bucket:, prefix: nil, host: nil, upload_options: {}, multipart_threshold: {}, **s3_options)
210
217
  Shrine.deprecation("The :host option to Shrine::Storage::S3#initialize is deprecated and will be removed in Shrine 3. Pass :host to S3#url instead, you can also use default_url_options plugin.") if host
211
218
  resource = Aws::S3::Resource.new(**s3_options)
@@ -322,23 +329,44 @@ class Shrine
322
329
 
323
330
  if host
324
331
  uri = URI.parse(url)
325
- uri.path = uri.path.match(/^\/#{bucket.name}/).post_match unless uri.host.include?(bucket.name)
332
+ uri.path = uri.path.match(/^\/#{bucket.name}/).post_match unless uri.host.include?(bucket.name) || client.config.force_path_style
326
333
  url = URI.join(host, uri.request_uri).to_s
327
334
  end
328
335
 
329
336
  url
330
337
  end
331
338
 
332
- # Returns a signature for direct uploads. Internally it calls
333
- # [`Aws::S3::Bucket#presigned_post`], and forwards any additional options
334
- # to it.
339
+ # Returns URL, params and headers for direct uploads. By default it
340
+ # generates data for a POST request, calling [`Aws::S3::Object#presigned_post`].
341
+ # You can also specify `method: :put` to generate data for a PUT request,
342
+ # using [`Aws::S3::Object#presigned_url`]. Any additional options are
343
+ # forwarded to the underlying AWS SDK method.
335
344
  #
336
- # [`Aws::S3::Bucket#presigned_post`]: http://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/S3/Bucket.html#presigned_post-instance_method
337
- def presign(id, **options)
345
+ # [`Aws::S3::Object#presigned_post`]: http://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/S3/Object.html#presigned_post-instance_method
346
+ # [`Aws::S3::Object#presigned_url`]: https://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/S3/Object.html#presigned_url-instance_method
347
+ def presign(id, method: :post, **options)
338
348
  options = @upload_options.merge(options)
339
349
  options[:content_disposition] = encode_content_disposition(options[:content_disposition]) if options[:content_disposition]
340
350
 
341
- object(id).presigned_post(options)
351
+ if method == :post
352
+ presigned_post = object(id).presigned_post(options)
353
+
354
+ Struct.new(:method, :url, :fields).new(method, presigned_post.url, presigned_post.fields)
355
+ else
356
+ url = object(id).presigned_url(method, options)
357
+
358
+ # When any of these options are specified, the corresponding request
359
+ # headers must be included in the upload request.
360
+ headers = {}
361
+ headers["Content-Length"] = options[:content_length] if options[:content_length]
362
+ headers["Content-Type"] = options[:content_type] if options[:content_type]
363
+ headers["Content-Disposition"] = options[:content_disposition] if options[:content_disposition]
364
+ headers["Content-Encoding"] = options[:content_encoding] if options[:content_encoding]
365
+ headers["Content-Language"] = options[:content_language] if options[:content_language]
366
+ headers["Content-MD5"] = options[:content_md5] if options[:content_md5]
367
+
368
+ { method: method, url: url, headers: headers }
369
+ end
342
370
  end
343
371
 
344
372
  # Deletes the file from the storage.
@@ -355,8 +383,7 @@ class Shrine
355
383
  def clear!(&block)
356
384
  objects_to_delete = Enumerator.new do |yielder|
357
385
  bucket.objects(prefix: prefix).each do |object|
358
- condition = block.call(object) if block
359
- yielder << object unless condition == false
386
+ yielder << object if block.nil? || block.call(object)
360
387
  end
361
388
  end
362
389