shrine 2.19.4 → 3.0.0.alpha
Sign up to get free protection for your applications and to get access to all the features.
Potentially problematic release.
This version of shrine might be problematic. Click here for more details.
- checksums.yaml +4 -4
- data/CHANGELOG.md +299 -11
- data/README.md +9 -3
- data/doc/advantages.md +1 -1
- data/doc/carrierwave.md +4 -4
- data/doc/creating_persistence_plugins.md +172 -0
- data/doc/creating_plugins.md +1 -1
- data/doc/creating_storages.md +3 -1
- data/doc/design.md +2 -2
- data/doc/direct_s3.md +0 -22
- data/doc/paperclip.md +3 -3
- data/doc/plugins/activerecord.md +211 -42
- data/doc/plugins/atomic_helpers.md +153 -0
- data/doc/plugins/column.md +90 -0
- data/doc/plugins/derivation_endpoint.md +54 -62
- data/doc/plugins/derivatives.md +752 -0
- data/doc/plugins/entity.md +204 -0
- data/doc/plugins/infer_extension.md +8 -8
- data/doc/plugins/instrumentation.md +33 -13
- data/doc/plugins/keep_files.md +5 -15
- data/doc/plugins/model.md +157 -0
- data/doc/plugins/presign_endpoint.md +2 -1
- data/doc/plugins/refresh_metadata.md +44 -7
- data/doc/plugins/sequel.md +190 -33
- data/doc/plugins/{default_url_options.md → url_options.md} +5 -5
- data/doc/processing.md +1 -1
- data/doc/release_notes/1.1.0.md +2 -2
- data/doc/release_notes/2.15.0.md +1 -1
- data/doc/storage/s3.md +2 -2
- data/doc/testing.md +1 -1
- data/lib/shrine.rb +72 -138
- data/lib/shrine/attacher.rb +272 -176
- data/lib/shrine/attachment.rb +2 -42
- data/lib/shrine/plugins/activerecord.rb +103 -26
- data/lib/shrine/plugins/add_metadata.rb +9 -10
- data/lib/shrine/plugins/atomic_helpers.rb +111 -0
- data/lib/shrine/plugins/attacher_options.rb +55 -0
- data/lib/shrine/plugins/backgrounding.rb +147 -115
- data/lib/shrine/plugins/cached_attachment_data.rb +6 -9
- data/lib/shrine/plugins/column.rb +104 -0
- data/lib/shrine/plugins/data_uri.rb +35 -38
- data/lib/shrine/plugins/default_storage.rb +18 -12
- data/lib/shrine/plugins/default_url.rb +11 -21
- data/lib/shrine/plugins/default_url_options.rb +3 -30
- data/lib/shrine/plugins/delete_raw.rb +9 -13
- data/lib/shrine/plugins/derivation_endpoint.rb +75 -114
- data/lib/shrine/plugins/derivatives.rb +576 -0
- data/lib/shrine/plugins/determine_mime_type.rb +3 -15
- data/lib/shrine/plugins/download_endpoint.rb +83 -131
- data/lib/shrine/plugins/dynamic_storage.rb +4 -8
- data/lib/shrine/plugins/entity.rb +128 -0
- data/lib/shrine/plugins/form_assign.rb +107 -0
- data/lib/shrine/plugins/included.rb +4 -3
- data/lib/shrine/plugins/infer_extension.rb +10 -17
- data/lib/shrine/plugins/instrumentation.rb +45 -25
- data/lib/shrine/plugins/keep_files.rb +2 -12
- data/lib/shrine/plugins/metadata_attributes.rb +15 -14
- data/lib/shrine/plugins/model.rb +137 -0
- data/lib/shrine/plugins/module_include.rb +2 -0
- data/lib/shrine/plugins/presign_endpoint.rb +1 -15
- data/lib/shrine/plugins/pretty_location.rb +5 -5
- data/lib/shrine/plugins/processing.rb +21 -6
- data/lib/shrine/plugins/rack_file.rb +1 -39
- data/lib/shrine/plugins/rack_response.rb +14 -7
- data/lib/shrine/plugins/recache.rb +5 -2
- data/lib/shrine/plugins/refresh_metadata.rb +12 -8
- data/lib/shrine/plugins/remote_url.rb +44 -53
- data/lib/shrine/plugins/remove_attachment.rb +7 -2
- data/lib/shrine/plugins/remove_invalid.rb +8 -4
- data/lib/shrine/plugins/restore_cached_data.rb +12 -4
- data/lib/shrine/plugins/sequel.rb +115 -27
- data/lib/shrine/plugins/signature.rb +2 -7
- data/lib/shrine/plugins/store_dimensions.rb +13 -27
- data/lib/shrine/plugins/upload_endpoint.rb +14 -15
- data/lib/shrine/plugins/upload_options.rb +9 -8
- data/lib/shrine/plugins/url_options.rb +33 -0
- data/lib/shrine/plugins/validation.rb +87 -0
- data/lib/shrine/plugins/validation_helpers.rb +33 -54
- data/lib/shrine/plugins/versions.rb +106 -84
- data/lib/shrine/storage/file_system.rb +32 -57
- data/lib/shrine/storage/linter.rb +9 -1
- data/lib/shrine/storage/memory.rb +42 -0
- data/lib/shrine/storage/s3.rb +38 -146
- data/lib/shrine/uploaded_file.rb +22 -29
- data/lib/shrine/version.rb +4 -4
- data/shrine.gemspec +2 -3
- metadata +27 -54
- data/doc/plugins/backup.md +0 -31
- data/doc/plugins/copy.md +0 -24
- data/doc/plugins/delete_promoted.md +0 -12
- data/doc/plugins/direct_upload.md +0 -172
- data/doc/plugins/hooks.md +0 -58
- data/doc/plugins/logging.md +0 -42
- data/doc/plugins/migration_helpers.md +0 -60
- data/doc/plugins/moving.md +0 -19
- data/doc/plugins/multi_delete.md +0 -20
- data/doc/plugins/parallelize.md +0 -16
- data/doc/plugins/parsed_json.md +0 -23
- data/lib/shrine/plugins/background_helpers.rb +0 -5
- data/lib/shrine/plugins/backup.rb +0 -90
- data/lib/shrine/plugins/copy.rb +0 -50
- data/lib/shrine/plugins/delete_promoted.rb +0 -20
- data/lib/shrine/plugins/direct_upload.rb +0 -217
- data/lib/shrine/plugins/hooks.rb +0 -90
- data/lib/shrine/plugins/logging.rb +0 -142
- data/lib/shrine/plugins/migration_helpers.rb +0 -70
- data/lib/shrine/plugins/moving.rb +0 -57
- data/lib/shrine/plugins/multi_delete.rb +0 -32
- data/lib/shrine/plugins/parallelize.rb +0 -78
- data/lib/shrine/plugins/parsed_json.rb +0 -29
@@ -130,7 +130,8 @@ option:
|
|
130
130
|
|
131
131
|
```rb
|
132
132
|
plugin :presign_endpoint, presign: -> (id, options, request) do
|
133
|
-
# return a Hash with :url, :fields, and :headers keys
|
133
|
+
# return a Hash with :method, :url, :fields, and :headers keys
|
134
|
+
Shrine.storages[:cache].presign(id, options)
|
134
135
|
end
|
135
136
|
```
|
136
137
|
|
@@ -7,27 +7,64 @@ metadata from an uploaded file.
|
|
7
7
|
plugin :refresh_metadata
|
8
8
|
```
|
9
9
|
|
10
|
-
It provides
|
11
|
-
|
12
|
-
|
10
|
+
It provides `#refresh_metadata!` method, which triggers metadata extraction
|
11
|
+
(calls `Shrine#extract_metadata`) with the uploaded file opened for reading,
|
12
|
+
and updates the existing metadata hash with the results. This can be done
|
13
|
+
on the attacher or the uploaded file level.
|
14
|
+
|
15
|
+
## Attacher
|
16
|
+
|
17
|
+
Calling `#refresh_metadata!` on a `Shrine::Attacher` object will re-extract
|
18
|
+
metadata of the attached file. When used with a [model], it will write new file
|
19
|
+
data back into the attachment attribute.
|
20
|
+
|
21
|
+
```rb
|
22
|
+
attacher.refresh_metadata!
|
23
|
+
attacher.file.metadata # re-extracted metadata
|
24
|
+
```
|
25
|
+
|
26
|
+
The `Attacher#context` hash will be forwarded to metadata extraction, as well
|
27
|
+
as any options that you pass in.
|
28
|
+
|
29
|
+
```rb
|
30
|
+
# via context
|
31
|
+
attacher.context[:foo] = "bar"
|
32
|
+
attacher.refresh_metadata! # passes `{ foo: "bar" }` options to metadata extraction
|
33
|
+
|
34
|
+
# via arguments
|
35
|
+
attacher.refresh_metadata!(foo: "bar") # passes `{ foo: "bar" }` options to metadata extraction
|
36
|
+
```
|
37
|
+
|
38
|
+
## Uploaded File
|
39
|
+
|
40
|
+
The `#refresh_metadata!` method can be called on a `Shrine::UploadedFile` object
|
41
|
+
as well.
|
13
42
|
|
14
43
|
```rb
|
15
44
|
uploaded_file.refresh_metadata!
|
16
45
|
uploaded_file.metadata # re-extracted metadata
|
17
46
|
```
|
18
47
|
|
19
|
-
|
20
|
-
|
21
|
-
|
48
|
+
If the uploaded file is not open, it is opened before and closed after metadata
|
49
|
+
extraction. For remote storage services this will make an HTTP request.
|
50
|
+
However, only the portion of the file needed for extracting metadata will be
|
51
|
+
downloaded.
|
22
52
|
|
23
53
|
If the uploaded file is already open, it is passed to metadata extraction as
|
24
54
|
is.
|
25
55
|
|
26
56
|
```rb
|
27
57
|
uploaded_file.open do
|
28
|
-
uploaded_file.refresh_metadata!
|
58
|
+
uploaded_file.refresh_metadata! # uses the already opened file
|
29
59
|
# ...
|
30
60
|
end
|
31
61
|
```
|
32
62
|
|
63
|
+
Any options passed in will be forwarded to metadata extraction:
|
64
|
+
|
65
|
+
```rb
|
66
|
+
uploaded_file.refresh_metadata!(foo: "bar") # passes `{ foo: "bar" }` options to metadata extraction
|
67
|
+
```
|
68
|
+
|
33
69
|
[refresh_metadata]: /lib/shrine/plugins/refresh_metadata.rb
|
70
|
+
[model]: /doc/plugins/model.md#readme
|
data/doc/plugins/sequel.md
CHANGED
@@ -1,67 +1,224 @@
|
|
1
1
|
# Sequel
|
2
2
|
|
3
|
-
The [`sequel`][sequel] plugin
|
4
|
-
|
3
|
+
The [`sequel`][sequel] plugin adds [Sequel] integration to the attachment
|
4
|
+
interface. It is built on top of the [`model`][model] plugin.
|
5
5
|
|
6
6
|
```rb
|
7
7
|
plugin :sequel
|
8
8
|
```
|
9
9
|
|
10
|
-
##
|
10
|
+
## Attachment
|
11
11
|
|
12
|
-
|
12
|
+
When `Shrine::Attachment` module is included into a `Sequel::Model` subclass,
|
13
|
+
additional [hooks] are added to tie the attachment process to the record
|
14
|
+
lifecycle.
|
13
15
|
|
14
|
-
|
15
|
-
|
16
|
-
|
16
|
+
```rb
|
17
|
+
class Photo < Sequel::Model
|
18
|
+
include ImageUploader::Attachment(:image) # adds callbacks & validations
|
19
|
+
end
|
20
|
+
```
|
17
21
|
|
18
|
-
|
19
|
-
`backgrounding` plugin.
|
22
|
+
### Callbacks
|
20
23
|
|
21
|
-
|
22
|
-
|
24
|
+
#### Save
|
25
|
+
|
26
|
+
After a record is saved and the transaction is committed, `Attacher#finalize`
|
27
|
+
is called, which promotes cached file to permanent storage and deletes previous
|
28
|
+
file if any.
|
23
29
|
|
24
30
|
```rb
|
25
|
-
|
26
|
-
include ImageUploader::Attachment.new(:avatar)
|
31
|
+
photo = Photo.new
|
27
32
|
|
28
|
-
|
29
|
-
|
33
|
+
photo.image = file
|
34
|
+
photo.image.storage_key #=> :cache
|
30
35
|
|
31
|
-
|
32
|
-
|
33
|
-
elsif changed_columns.include?(:avatar) && avatar_attacher.stored?
|
34
|
-
# promoted
|
35
|
-
end
|
36
|
-
end
|
37
|
-
end
|
36
|
+
photo.save
|
37
|
+
photo.image.storage_key #=> :store
|
38
38
|
```
|
39
39
|
|
40
|
-
|
41
|
-
|
40
|
+
#### Destroy
|
41
|
+
|
42
|
+
After a record is destroyed and the transaction is committed,
|
43
|
+
`Attacher#destroy_attached` method is called, which deletes stored attached
|
44
|
+
file if any.
|
45
|
+
|
46
|
+
```rb
|
47
|
+
photo = Photo.find(photo_id)
|
48
|
+
photo.image #=> #<Shrine::UploadedFile>
|
49
|
+
photo.image.exists? #=> true
|
50
|
+
|
51
|
+
photo.destroy
|
52
|
+
photo.image.exists? #=> false
|
53
|
+
```
|
54
|
+
|
55
|
+
#### Skipping
|
56
|
+
|
57
|
+
If you don't want the attachment module to add any callbacks to your Sequel
|
58
|
+
model, you can set `:callbacks` to `false`:
|
42
59
|
|
43
60
|
```rb
|
44
61
|
plugin :sequel, callbacks: false
|
45
62
|
```
|
46
63
|
|
47
|
-
|
64
|
+
### Validations
|
65
|
+
|
66
|
+
If you're using the [`validation`][validation] plugin, the attachment module
|
67
|
+
will automatically merge attacher errors with model errors.
|
68
|
+
|
69
|
+
```rb
|
70
|
+
class ImageUploader < Shrine
|
71
|
+
plugin :validation_helpers
|
72
|
+
|
73
|
+
Attacher.validate do
|
74
|
+
validate_max_size 10 * 1024 * 1024
|
75
|
+
end
|
76
|
+
end
|
77
|
+
```
|
78
|
+
```rb
|
79
|
+
photo = Photo.new
|
80
|
+
photo.image = file
|
81
|
+
photo.valid?
|
82
|
+
photo.errors #=> { image: ["size must not be greater than 10.0 MB"] }
|
83
|
+
```
|
84
|
+
|
85
|
+
#### Presence
|
48
86
|
|
49
|
-
|
50
|
-
|
51
|
-
can do it directly on the model.
|
87
|
+
If you want to validate presence of the attachment, you can use Sequel's
|
88
|
+
presence validator:
|
52
89
|
|
53
90
|
```rb
|
54
|
-
class
|
55
|
-
include ImageUploader::Attachment.new(:
|
56
|
-
|
91
|
+
class Photo < Sequel::Model
|
92
|
+
include ImageUploader::Attachment.new(:image)
|
93
|
+
|
94
|
+
plugin :validation_helpers
|
95
|
+
|
96
|
+
def validate
|
97
|
+
super
|
98
|
+
validates_presence :image
|
99
|
+
end
|
57
100
|
end
|
58
101
|
```
|
59
102
|
|
60
|
-
|
61
|
-
|
103
|
+
#### Skipping
|
104
|
+
|
105
|
+
If don't want the attachment module to merge file validations errors into
|
106
|
+
model errors, you can set `:validations` to `false`:
|
62
107
|
|
63
108
|
```rb
|
64
109
|
plugin :sequel, validations: false
|
65
110
|
```
|
66
111
|
|
112
|
+
## Attacher
|
113
|
+
|
114
|
+
This section will cover methods added to the `Shrine::Attacher` instance. If
|
115
|
+
you're not familar with how to obtain it, see the [`model`][model] plugin docs.
|
116
|
+
|
117
|
+
### Atomic promotion
|
118
|
+
|
119
|
+
If you're promoting cached file to permanent storage
|
120
|
+
[asynchronously][backgrounding], you might want to handle the possibility of
|
121
|
+
the attachment changing during promotion. You can do that with
|
122
|
+
`Attacher#atomic_promote`:
|
123
|
+
|
124
|
+
```rb
|
125
|
+
# in your controller
|
126
|
+
attacher.attach_cached(io)
|
127
|
+
attacher.cached? #=> true
|
128
|
+
```
|
129
|
+
```rb
|
130
|
+
# in a background job
|
131
|
+
attacher.atomic_promote # promotes cached file and persists
|
132
|
+
attacher.stored? #=> true
|
133
|
+
```
|
134
|
+
|
135
|
+
After cached file is uploaded to permanent storage, the record is reloaded in
|
136
|
+
order to check whether the attachment hasn't changed, and if it hasn't the
|
137
|
+
attachment is persisted. If the attachment has changed,
|
138
|
+
`Shrine::AttachmentChanged` exception is raised.
|
139
|
+
|
140
|
+
Additional options are passed to `Attacher#promote`.
|
141
|
+
|
142
|
+
#### Reloader & persister
|
143
|
+
|
144
|
+
You can change how the record is reloaded or persisted during atomic promotion:
|
145
|
+
|
146
|
+
```rb
|
147
|
+
# reloader
|
148
|
+
attacher.atomic_promote(reload: :lock) # uses database locking (default)
|
149
|
+
attacher.atomic_promote(reload: :fetch) # reloads with no locking
|
150
|
+
attacher.atomic_promote(reload: ->(&b){}) # custom reloader (see atomic_helpers plugin docs)
|
151
|
+
attacher.atomic_promote(reload: false) # skips reloading
|
152
|
+
|
153
|
+
# persister
|
154
|
+
attacher.atomic_promote(persist: :save) # persists stored file (default)
|
155
|
+
attacher.atomic_promote(persist: ->{}) # custom persister (see atomic_helpers plugin docs)
|
156
|
+
attacher.atomic_promote(persist: false) # skips persistence
|
157
|
+
```
|
158
|
+
|
159
|
+
For more details, see the [`atomic_helpers`][atomic_helpers] plugin docs.
|
160
|
+
|
161
|
+
### Atomic persistence
|
162
|
+
|
163
|
+
If you're updating something based on the attached file
|
164
|
+
[asynchronously][backgrounding], you might want to handle the possibility of
|
165
|
+
the attachment changing in the meanwhile. You can do that with
|
166
|
+
`Attacher#atomic_persist`:
|
167
|
+
|
168
|
+
```rb
|
169
|
+
# in a background job
|
170
|
+
attacher.refresh_metadata! # refresh_metadata plugin
|
171
|
+
attacher.atomic_persist # persists attachment data
|
172
|
+
```
|
173
|
+
|
174
|
+
The record is first reloaded in order to check whether the attachment hasn't
|
175
|
+
changed, and if it hasn't the attachment is persisted. If the attachment has
|
176
|
+
changed, `Shrine::AttachmentChanged` exception is raised.
|
177
|
+
|
178
|
+
#### Reloader & persister
|
179
|
+
|
180
|
+
You can change how the record is reloaded or persisted during atomic
|
181
|
+
persistence:
|
182
|
+
|
183
|
+
```rb
|
184
|
+
# reloader
|
185
|
+
attacher.atomic_persist(reload: :lock) # uses database locking (default)
|
186
|
+
attacher.atomic_persist(reload: :fetch) # reloads with no locking
|
187
|
+
attacher.atomic_persist(reload: ->(&b){...}) # custom reloader (see atomic_helpers plugin docs)
|
188
|
+
attacher.atomic_persist(reload: false) # skips reloading
|
189
|
+
|
190
|
+
# persister
|
191
|
+
attacher.atomic_persist(persist: :save) # persists stored file (default)
|
192
|
+
attacher.atomic_persist(persist: ->{...}) # custom persister (see atomic_helpers plugin docs)
|
193
|
+
attacher.atomic_persist(persist: false) # skips persistence
|
194
|
+
```
|
195
|
+
|
196
|
+
For more details, see the [`atomic_helpers`][atomic_helpers] plugin docs.
|
197
|
+
|
198
|
+
### Persistence
|
199
|
+
|
200
|
+
You can call `Attacher#persist` to save any changes to the underlying record:
|
201
|
+
|
202
|
+
```rb
|
203
|
+
attacher.attach(io)
|
204
|
+
attacher.persist # saves the underlying record
|
205
|
+
```
|
206
|
+
|
207
|
+
### With other database plugins
|
208
|
+
|
209
|
+
If you have another database plugin loaded together with the `sequel` plugin,
|
210
|
+
you can prefix any method above with `sequel_*` to avoid naming clashes:
|
211
|
+
|
212
|
+
```rb
|
213
|
+
attacher.sequel_atomic_promote
|
214
|
+
attacher.sequel_atomic_persist
|
215
|
+
attacher.sequel_persist
|
216
|
+
```
|
217
|
+
|
67
218
|
[sequel]: /lib/shrine/plugins/sequel.rb
|
219
|
+
[Sequel]: https://sequel.jeremyevans.net/
|
220
|
+
[model]: /doc/plugins/model.md#readme
|
221
|
+
[hooks]: http://sequel.jeremyevans.net/rdoc/files/doc/model_hooks_rdoc.html
|
222
|
+
[validation]: /doc/plugins/validation.md#readme
|
223
|
+
[atomic_helpers]: /doc/plugins/atomic_helpers.md#readme
|
224
|
+
[backgrounding]: /doc/plugins/backgrounding.md#readme
|
@@ -1,11 +1,11 @@
|
|
1
|
-
#
|
1
|
+
# URL Options
|
2
2
|
|
3
|
-
The [`
|
3
|
+
The [`url_options`][url_options] plugin allows you to specify
|
4
4
|
URL options that will be applied by default for uploaded files of specified
|
5
5
|
storages.
|
6
6
|
|
7
7
|
```rb
|
8
|
-
plugin :
|
8
|
+
plugin :url_options, store: { expires_in: 24*60*60 }
|
9
9
|
```
|
10
10
|
|
11
11
|
You can also generate the default URL options dynamically by using a block,
|
@@ -13,7 +13,7 @@ which will receive the UploadedFile object along with any options that were
|
|
13
13
|
passed to `UploadedFile#url`.
|
14
14
|
|
15
15
|
```rb
|
16
|
-
plugin :
|
16
|
+
plugin :url_options, store: -> (io, options) do
|
17
17
|
{ response_content_disposition: ContentDisposition.attachment(io.original_filename) }
|
18
18
|
end
|
19
19
|
```
|
@@ -22,4 +22,4 @@ In both cases the default options are merged with options passed to
|
|
22
22
|
`UploadedFile#url`, and the latter will always have precedence over default
|
23
23
|
options.
|
24
24
|
|
25
|
-
[
|
25
|
+
[url_options]: /lib/shrine/plugins/url_options.rb
|
data/doc/processing.md
CHANGED
@@ -230,7 +230,7 @@ Now you can generate thumbnail URLs from attached files, and the actual
|
|
230
230
|
thumbnail will be generated when the URL is requested:
|
231
231
|
|
232
232
|
```rb
|
233
|
-
photo.image.derivation_url(:thumbnail,
|
233
|
+
photo.image.derivation_url(:thumbnail, 600, 400)
|
234
234
|
#=> "/derivations/image/thumbnail/600/400/eyJpZCI6ImZvbyIsInN0b3JhZ2UiOiJzdG9yZSJ9?signature=..."
|
235
235
|
```
|
236
236
|
|
data/doc/release_notes/1.1.0.md
CHANGED
@@ -39,11 +39,11 @@ plugin :keep_location, :cache => :store
|
|
39
39
|
```rb
|
40
40
|
user = User.new
|
41
41
|
user.avatar = image
|
42
|
-
user.avatar.storage_key #=>
|
42
|
+
user.avatar.storage_key #=> :cache
|
43
43
|
user.avatar.id #=> "abc123.jpg"
|
44
44
|
|
45
45
|
user.save
|
46
|
-
user.avatar.storage_key #=>
|
46
|
+
user.avatar.storage_key #=> :store
|
47
47
|
user.avatar.id #=> "abc123.jpg"
|
48
48
|
```
|
49
49
|
|
data/doc/release_notes/2.15.0.md
CHANGED
@@ -10,7 +10,7 @@
|
|
10
10
|
"derivation" blocks, passing any arguments you need for the processing.
|
11
11
|
|
12
12
|
```rb
|
13
|
-
photo.image.derivation_url(:thumbnail,
|
13
|
+
photo.image.derivation_url(:thumbnail, 600, 400)
|
14
14
|
#=> "derivations/image/thumbnail/600/400/eyJpZCI6ImZvbyIsInN0b3JhZ2UiOiJzdG9yZSJ9?signature=..."
|
15
15
|
```
|
16
16
|
|
data/doc/storage/s3.md
CHANGED
@@ -131,10 +131,10 @@ s3.url("image.jpg", host: "https://your-s3-host.com/prefix/") # needs to end wit
|
|
131
131
|
```
|
132
132
|
|
133
133
|
To have the `:host` option passed automatically for every URL, use the
|
134
|
-
`
|
134
|
+
`url_options` plugin.
|
135
135
|
|
136
136
|
```rb
|
137
|
-
plugin :
|
137
|
+
plugin :url_options, store: { host: "http://abc123.cloudfront.net" }
|
138
138
|
```
|
139
139
|
|
140
140
|
If you would like to [serve private content via CloudFront], you need to sign
|
data/doc/testing.md
CHANGED
@@ -158,7 +158,7 @@ Regular routing tests in Rails use [Rack::Test], in which case you can create
|
|
158
158
|
`Rack::Test::UploadedFile` objects and pass them as form parameters:
|
159
159
|
|
160
160
|
```rb
|
161
|
-
post "/photos", photo: {image: Rack::Test::UploadedFile.new("test/files/image.jpg", "image/jpeg")}
|
161
|
+
post "/photos", photo: { image: Rack::Test::UploadedFile.new("test/files/image.jpg", "image/jpeg") }
|
162
162
|
```
|
163
163
|
|
164
164
|
### Rack::TestApp
|
data/lib/shrine.rb
CHANGED
@@ -20,21 +20,13 @@ class Shrine
|
|
20
20
|
# Raised when a file is not a valid IO.
|
21
21
|
class InvalidFile < Error
|
22
22
|
def initialize(io, missing_methods)
|
23
|
-
super "#{io.inspect} is not a valid IO object (it doesn't respond to
|
24
|
-
#{missing_methods.map{|m, _|"##{m}"}.join(", ")})"
|
23
|
+
super "#{io.inspect} is not a valid IO object (it doesn't respond to #{missing_methods.map{|m, _|"##{m}"}.join(", ")})"
|
25
24
|
end
|
26
25
|
end
|
27
26
|
|
28
|
-
#
|
29
|
-
|
30
|
-
|
31
|
-
read: [:length, :outbuf],
|
32
|
-
eof?: [],
|
33
|
-
rewind: [],
|
34
|
-
size: [],
|
35
|
-
close: [],
|
36
|
-
}
|
37
|
-
deprecate_constant(:IO_METHODS)
|
27
|
+
# Raised by the storage in the #open method.
|
28
|
+
class FileNotFound < Error
|
29
|
+
end
|
38
30
|
|
39
31
|
@opts = {}
|
40
32
|
@storages = {}
|
@@ -54,12 +46,7 @@ class Shrine
|
|
54
46
|
# When inheriting Shrine, copy the instance variables into the subclass,
|
55
47
|
# and create subclasses of core classes.
|
56
48
|
def inherited(subclass)
|
57
|
-
subclass.instance_variable_set(:@opts, opts
|
58
|
-
subclass.opts.each do |key, value|
|
59
|
-
if value.is_a?(Enumerable) && !value.frozen?
|
60
|
-
subclass.opts[key] = value.dup
|
61
|
-
end
|
62
|
-
end
|
49
|
+
subclass.instance_variable_set(:@opts, deep_dup(opts))
|
63
50
|
subclass.instance_variable_set(:@storages, storages.dup)
|
64
51
|
|
65
52
|
file_class = Class.new(self::UploadedFile)
|
@@ -117,25 +104,26 @@ class Shrine
|
|
117
104
|
# Uploads the file to the specified storage. It delegates to `Shrine#upload`.
|
118
105
|
#
|
119
106
|
# Shrine.upload(io, :store) #=> #<Shrine::UploadedFile>
|
120
|
-
def upload(io, storage,
|
121
|
-
new(storage).upload(io,
|
107
|
+
def upload(io, storage, **options)
|
108
|
+
new(storage).upload(io, **options)
|
122
109
|
end
|
123
110
|
|
124
111
|
# Instantiates a Shrine::UploadedFile from a hash, and optionally
|
125
112
|
# yields the returned object.
|
126
113
|
#
|
127
|
-
# data = {"storage" => "cache", "id" => "abc123.jpg", "metadata" => {}}
|
114
|
+
# data = { "storage" => "cache", "id" => "abc123.jpg", "metadata" => {} }
|
128
115
|
# Shrine.uploaded_file(data) #=> #<Shrine::UploadedFile>
|
129
|
-
def uploaded_file(object
|
116
|
+
def uploaded_file(object)
|
130
117
|
case object
|
131
118
|
when String
|
132
|
-
uploaded_file(JSON.parse(object)
|
119
|
+
uploaded_file(JSON.parse(object))
|
133
120
|
when Hash
|
134
|
-
|
121
|
+
object = JSON.parse(object.to_json) if object.keys.grep(Symbol).any? # deep stringify keys
|
122
|
+
self::UploadedFile.new(object)
|
135
123
|
when self::UploadedFile
|
136
|
-
object
|
124
|
+
object
|
137
125
|
else
|
138
|
-
|
126
|
+
fail ArgumentError, "cannot convert #{object.inspect} to a #{self}::UploadedFile"
|
139
127
|
end
|
140
128
|
end
|
141
129
|
|
@@ -169,6 +157,21 @@ class Shrine
|
|
169
157
|
def deprecation(message)
|
170
158
|
Shrine.logger.warn "SHRINE DEPRECATION WARNING: #{message}"
|
171
159
|
end
|
160
|
+
|
161
|
+
private
|
162
|
+
|
163
|
+
# Deep duplicates a nested hash of options.
|
164
|
+
def deep_dup(collection)
|
165
|
+
duplicate_collection = collection.dup
|
166
|
+
|
167
|
+
if duplicate_collection.is_a?(Hash)
|
168
|
+
duplicate_collection.each do |key, value|
|
169
|
+
duplicate_collection[key] = deep_dup(value) if value.is_a?(Enumerable)
|
170
|
+
end
|
171
|
+
end
|
172
|
+
|
173
|
+
duplicate_collection
|
174
|
+
end
|
172
175
|
end
|
173
176
|
|
174
177
|
module InstanceMethods
|
@@ -182,16 +185,10 @@ class Shrine
|
|
182
185
|
#
|
183
186
|
# Shrine.new(:store)
|
184
187
|
def initialize(storage_key)
|
185
|
-
@storage
|
188
|
+
@storage = self.class.find_storage(storage_key)
|
186
189
|
@storage_key = storage_key.to_sym
|
187
190
|
end
|
188
191
|
|
189
|
-
# The class-level options hash. This should probably not be modified at
|
190
|
-
# the instance level.
|
191
|
-
def opts
|
192
|
-
self.class.opts
|
193
|
-
end
|
194
|
-
|
195
192
|
# The main method for uploading files. Takes an IO-like object and an
|
196
193
|
# optional context hash (used internally by Shrine::Attacher). It calls
|
197
194
|
# user-defined #process, and afterwards it calls #store. The `io` is
|
@@ -201,55 +198,31 @@ class Shrine
|
|
201
198
|
# uploader.upload(io, metadata: { "foo" => "bar" }) # add metadata
|
202
199
|
# uploader.upload(io, location: "path/to/file") # specify location
|
203
200
|
# uploader.upload(io, upload_options: { acl: "public-read" }) # add upload options
|
204
|
-
def upload(io,
|
205
|
-
|
206
|
-
store(io, context)
|
207
|
-
end
|
208
|
-
|
209
|
-
# User is expected to perform processing inside this method, and
|
210
|
-
# return the processed files. Returning nil signals that no proccessing
|
211
|
-
# has been done and that the original file should be used.
|
212
|
-
#
|
213
|
-
# class ImageUploader < Shrine
|
214
|
-
# def process(io, context)
|
215
|
-
# # do processing and return processed files
|
216
|
-
# end
|
217
|
-
# end
|
218
|
-
def process(io, context = {})
|
219
|
-
end
|
201
|
+
def upload(io, **options)
|
202
|
+
_enforce_io(io)
|
220
203
|
|
221
|
-
|
222
|
-
|
223
|
-
# \#generate_location, but you can pass in `:location` to upload to
|
224
|
-
# a specific location.
|
225
|
-
#
|
226
|
-
# uploader.store(io)
|
227
|
-
def store(io, context = {})
|
228
|
-
_store(io, context)
|
229
|
-
end
|
204
|
+
metadata = get_metadata(io, **options)
|
205
|
+
location = get_location(io, **options, metadata: metadata)
|
230
206
|
|
231
|
-
|
232
|
-
# storage of this uploader.
|
233
|
-
def uploaded?(uploaded_file)
|
234
|
-
uploaded_file.storage_key == storage_key.to_s
|
235
|
-
end
|
207
|
+
_upload(io, **options, location: location, metadata: metadata)
|
236
208
|
|
237
|
-
|
238
|
-
|
239
|
-
|
240
|
-
|
209
|
+
self.class::UploadedFile.new(
|
210
|
+
id: location,
|
211
|
+
storage: storage_key,
|
212
|
+
metadata: metadata,
|
213
|
+
)
|
241
214
|
end
|
242
215
|
|
243
216
|
# Generates a unique location for the uploaded file, preserving the
|
244
217
|
# file extension. Can be overriden in uploaders for generating custom
|
245
218
|
# location.
|
246
|
-
def generate_location(io,
|
247
|
-
basic_location(io, metadata:
|
219
|
+
def generate_location(io, metadata: {}, **options)
|
220
|
+
basic_location(io, metadata: metadata)
|
248
221
|
end
|
249
222
|
|
250
223
|
# Extracts filename, size and MIME type from the file, which is later
|
251
224
|
# accessible through UploadedFile#metadata.
|
252
|
-
def extract_metadata(io,
|
225
|
+
def extract_metadata(io, **options)
|
253
226
|
{
|
254
227
|
"filename" => extract_filename(io),
|
255
228
|
"size" => extract_size(io),
|
@@ -257,8 +230,21 @@ class Shrine
|
|
257
230
|
}
|
258
231
|
end
|
259
232
|
|
233
|
+
# The class-level options hash. This should probably not be modified at
|
234
|
+
# the instance level.
|
235
|
+
def opts
|
236
|
+
self.class.opts
|
237
|
+
end
|
238
|
+
|
260
239
|
private
|
261
240
|
|
241
|
+
def _upload(io, location:, metadata:, upload_options: {}, close: true, delete: false, **)
|
242
|
+
storage.upload(io, location, shrine_metadata: metadata, **upload_options)
|
243
|
+
ensure
|
244
|
+
io.close if close
|
245
|
+
File.unlink(io.path) if delete && io.respond_to?(:path) && File.exist?(io.path)
|
246
|
+
end
|
247
|
+
|
262
248
|
# Attempts to extract the appropriate filename from the IO object.
|
263
249
|
def extract_filename(io)
|
264
250
|
if io.respond_to?(:original_filename)
|
@@ -281,61 +267,6 @@ class Shrine
|
|
281
267
|
io.size if io.respond_to?(:size)
|
282
268
|
end
|
283
269
|
|
284
|
-
# It first asserts that `io` is a valid IO object. It then extracts
|
285
|
-
# metadata and generates the location, before calling the storage to
|
286
|
-
# upload the IO object, passing the extracted metadata and location.
|
287
|
-
# Finally it returns a Shrine::UploadedFile object which represents the
|
288
|
-
# file that was uploaded.
|
289
|
-
def _store(io, context)
|
290
|
-
_enforce_io(io)
|
291
|
-
|
292
|
-
metadata = get_metadata(io, context)
|
293
|
-
metadata = metadata.merge(context[:metadata]) if context[:metadata].is_a?(Hash)
|
294
|
-
|
295
|
-
location = get_location(io, context.merge(metadata: metadata))
|
296
|
-
|
297
|
-
put(io, context.merge(location: location, metadata: metadata))
|
298
|
-
|
299
|
-
self.class.uploaded_file(
|
300
|
-
"id" => location,
|
301
|
-
"storage" => storage_key.to_s,
|
302
|
-
"metadata" => metadata,
|
303
|
-
)
|
304
|
-
end
|
305
|
-
|
306
|
-
# Delegates to #remove.
|
307
|
-
def _delete(uploaded_file, context)
|
308
|
-
remove(uploaded_file, context)
|
309
|
-
end
|
310
|
-
|
311
|
-
# Delegates to #copy.
|
312
|
-
def put(io, context)
|
313
|
-
copy(io, context)
|
314
|
-
end
|
315
|
-
|
316
|
-
# Calls `#upload` on the storage, passing to it the location, metadata
|
317
|
-
# and any upload options. The storage might modify the location or
|
318
|
-
# metadata that were passed in. The uploaded IO is then closed.
|
319
|
-
def copy(io, context)
|
320
|
-
location = context[:location]
|
321
|
-
metadata = context[:metadata]
|
322
|
-
upload_options = context[:upload_options] || {}
|
323
|
-
|
324
|
-
storage.upload(io, location, shrine_metadata: metadata, **upload_options)
|
325
|
-
ensure
|
326
|
-
io.close rescue nil
|
327
|
-
end
|
328
|
-
|
329
|
-
# Delegates to `UploadedFile#delete`.
|
330
|
-
def remove(uploaded_file, context)
|
331
|
-
uploaded_file.delete
|
332
|
-
end
|
333
|
-
|
334
|
-
# Delegates to #process.
|
335
|
-
def processed(io, context)
|
336
|
-
process(io, context)
|
337
|
-
end
|
338
|
-
|
339
270
|
# Generates a basic location for an uploaded file
|
340
271
|
def basic_location(io, metadata:)
|
341
272
|
extension = ".#{io.extension}" if io.is_a?(UploadedFile) && io.extension
|
@@ -345,23 +276,26 @@ class Shrine
|
|
345
276
|
basename + extension
|
346
277
|
end
|
347
278
|
|
348
|
-
# Retrieves the location for the given IO and context. First it looks
|
349
|
-
# for the `:location` option, otherwise it calls #generate_location.
|
350
|
-
def get_location(io, context)
|
351
|
-
location = context[:location] || generate_location(io, context)
|
352
|
-
location or raise Error, "location generated for #{io.inspect} was nil (context = #{context})"
|
353
|
-
end
|
354
|
-
|
355
279
|
# If the IO object is a Shrine::UploadedFile, it simply copies over its
|
356
280
|
# metadata, otherwise it calls #extract_metadata.
|
357
|
-
def get_metadata(io,
|
358
|
-
if io.is_a?(UploadedFile) &&
|
359
|
-
io.metadata.dup
|
360
|
-
elsif
|
361
|
-
extract_metadata(io,
|
281
|
+
def get_metadata(io, metadata: nil, **options)
|
282
|
+
if io.is_a?(UploadedFile) && metadata != true
|
283
|
+
result = io.metadata.dup
|
284
|
+
elsif metadata != false
|
285
|
+
result = extract_metadata(io, **options)
|
362
286
|
else
|
363
|
-
{}
|
287
|
+
result = {}
|
364
288
|
end
|
289
|
+
|
290
|
+
result = result.merge(metadata) if metadata.is_a?(Hash)
|
291
|
+
result
|
292
|
+
end
|
293
|
+
|
294
|
+
# Retrieves the location for the given IO and context. First it looks
|
295
|
+
# for the `:location` option, otherwise it calls #generate_location.
|
296
|
+
def get_location(io, location: nil, **options)
|
297
|
+
location ||= generate_location(io, options)
|
298
|
+
location or fail Error, "location generated for #{io.inspect} was nil"
|
365
299
|
end
|
366
300
|
|
367
301
|
# Asserts that the object is a valid IO object, specifically that it
|