activestorage_legacy 0.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.babelrc +5 -0
- data/.codeclimate.yml +7 -0
- data/.eslintrc +19 -0
- data/.github/workflows/gem-push.yml +29 -0
- data/.github/workflows/ruby-tests.yml +37 -0
- data/.gitignore +9 -0
- data/.rubocop.yml +125 -0
- data/.travis.yml +25 -0
- data/Gemfile +33 -0
- data/Gemfile.lock +271 -0
- data/MIT-LICENSE +20 -0
- data/README.md +160 -0
- data/Rakefile +12 -0
- data/activestorage.gemspec +27 -0
- data/app/assets/javascripts/activestorage.js +1 -0
- data/app/controllers/active_storage/blobs_controller.rb +22 -0
- data/app/controllers/active_storage/direct_uploads_controller.rb +21 -0
- data/app/controllers/active_storage/disk_controller.rb +52 -0
- data/app/controllers/active_storage/variants_controller.rb +28 -0
- data/app/helpers/active_storage/file_field_with_direct_upload_helper.rb +18 -0
- data/app/javascript/activestorage/blob_record.js +54 -0
- data/app/javascript/activestorage/blob_upload.js +34 -0
- data/app/javascript/activestorage/direct_upload.js +42 -0
- data/app/javascript/activestorage/direct_upload_controller.js +67 -0
- data/app/javascript/activestorage/direct_uploads_controller.js +50 -0
- data/app/javascript/activestorage/file_checksum.js +53 -0
- data/app/javascript/activestorage/helpers.js +42 -0
- data/app/javascript/activestorage/index.js +11 -0
- data/app/javascript/activestorage/ujs.js +74 -0
- data/app/jobs/active_storage/purge_attachment_worker.rb +9 -0
- data/app/jobs/active_storage/purge_blob_worker.rb +9 -0
- data/app/models/active_storage/attachment.rb +33 -0
- data/app/models/active_storage/blob.rb +198 -0
- data/app/models/active_storage/filename.rb +49 -0
- data/app/models/active_storage/variant.rb +82 -0
- data/app/models/active_storage/variation.rb +53 -0
- data/config/routes.rb +9 -0
- data/config/storage_services.yml +34 -0
- data/lib/active_storage/attached/macros.rb +86 -0
- data/lib/active_storage/attached/many.rb +51 -0
- data/lib/active_storage/attached/one.rb +56 -0
- data/lib/active_storage/attached.rb +38 -0
- data/lib/active_storage/engine.rb +81 -0
- data/lib/active_storage/gem_version.rb +15 -0
- data/lib/active_storage/log_subscriber.rb +48 -0
- data/lib/active_storage/messages_metadata.rb +64 -0
- data/lib/active_storage/migration.rb +27 -0
- data/lib/active_storage/patches/active_record.rb +19 -0
- data/lib/active_storage/patches/delegation.rb +98 -0
- data/lib/active_storage/patches/secure_random.rb +26 -0
- data/lib/active_storage/patches.rb +4 -0
- data/lib/active_storage/service/azure_service.rb +115 -0
- data/lib/active_storage/service/configurator.rb +28 -0
- data/lib/active_storage/service/disk_service.rb +124 -0
- data/lib/active_storage/service/gcs_service.rb +79 -0
- data/lib/active_storage/service/mirror_service.rb +46 -0
- data/lib/active_storage/service/s3_service.rb +96 -0
- data/lib/active_storage/service.rb +113 -0
- data/lib/active_storage/verifier.rb +113 -0
- data/lib/active_storage/version.rb +8 -0
- data/lib/active_storage.rb +34 -0
- data/lib/tasks/activestorage.rake +20 -0
- data/package.json +33 -0
- data/test/controllers/direct_uploads_controller_test.rb +123 -0
- data/test/controllers/disk_controller_test.rb +57 -0
- data/test/controllers/variants_controller_test.rb +21 -0
- data/test/database/create_users_migration.rb +7 -0
- data/test/database/setup.rb +6 -0
- data/test/dummy/Rakefile +3 -0
- data/test/dummy/app/assets/config/manifest.js +5 -0
- data/test/dummy/app/assets/images/.keep +0 -0
- data/test/dummy/app/assets/javascripts/application.js +13 -0
- data/test/dummy/app/assets/stylesheets/application.css +15 -0
- data/test/dummy/app/controllers/application_controller.rb +3 -0
- data/test/dummy/app/controllers/concerns/.keep +0 -0
- data/test/dummy/app/helpers/application_helper.rb +2 -0
- data/test/dummy/app/jobs/application_job.rb +2 -0
- data/test/dummy/app/models/application_record.rb +3 -0
- data/test/dummy/app/models/concerns/.keep +0 -0
- data/test/dummy/app/views/layouts/application.html.erb +14 -0
- data/test/dummy/bin/bundle +3 -0
- data/test/dummy/bin/rails +4 -0
- data/test/dummy/bin/rake +4 -0
- data/test/dummy/bin/yarn +11 -0
- data/test/dummy/config/application.rb +22 -0
- data/test/dummy/config/boot.rb +5 -0
- data/test/dummy/config/database.yml +25 -0
- data/test/dummy/config/environment.rb +5 -0
- data/test/dummy/config/environments/development.rb +49 -0
- data/test/dummy/config/environments/production.rb +82 -0
- data/test/dummy/config/environments/test.rb +33 -0
- data/test/dummy/config/initializers/application_controller_renderer.rb +6 -0
- data/test/dummy/config/initializers/assets.rb +14 -0
- data/test/dummy/config/initializers/backtrace_silencers.rb +7 -0
- data/test/dummy/config/initializers/cookies_serializer.rb +5 -0
- data/test/dummy/config/initializers/filter_parameter_logging.rb +4 -0
- data/test/dummy/config/initializers/inflections.rb +16 -0
- data/test/dummy/config/initializers/mime_types.rb +4 -0
- data/test/dummy/config/initializers/secret_key.rb +3 -0
- data/test/dummy/config/initializers/wrap_parameters.rb +14 -0
- data/test/dummy/config/routes.rb +2 -0
- data/test/dummy/config/secrets.yml +32 -0
- data/test/dummy/config/spring.rb +6 -0
- data/test/dummy/config/storage_services.yml +3 -0
- data/test/dummy/config.ru +5 -0
- data/test/dummy/db/.keep +0 -0
- data/test/dummy/lib/assets/.keep +0 -0
- data/test/dummy/log/.keep +0 -0
- data/test/dummy/package.json +5 -0
- data/test/dummy/public/404.html +67 -0
- data/test/dummy/public/422.html +67 -0
- data/test/dummy/public/500.html +66 -0
- data/test/dummy/public/apple-touch-icon-precomposed.png +0 -0
- data/test/dummy/public/apple-touch-icon.png +0 -0
- data/test/dummy/public/favicon.ico +0 -0
- data/test/filename_test.rb +36 -0
- data/test/fixtures/files/racecar.jpg +0 -0
- data/test/models/attachments_test.rb +122 -0
- data/test/models/blob_test.rb +47 -0
- data/test/models/variant_test.rb +27 -0
- data/test/service/.gitignore +1 -0
- data/test/service/azure_service_test.rb +14 -0
- data/test/service/configurations-example.yml +31 -0
- data/test/service/configurator_test.rb +14 -0
- data/test/service/disk_service_test.rb +12 -0
- data/test/service/gcs_service_test.rb +42 -0
- data/test/service/mirror_service_test.rb +62 -0
- data/test/service/s3_service_test.rb +52 -0
- data/test/service/shared_service_tests.rb +66 -0
- data/test/sidekiq/minitest_support.rb +6 -0
- data/test/support/assertions.rb +20 -0
- data/test/test_helper.rb +69 -0
- data/webpack.config.js +27 -0
- data/yarn.lock +3164 -0
- metadata +330 -0
@@ -0,0 +1,53 @@
|
|
1
|
+
import SparkMD5 from "spark-md5"
|
2
|
+
|
3
|
+
const fileSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice
|
4
|
+
|
5
|
+
export class FileChecksum {
|
6
|
+
static create(file, callback) {
|
7
|
+
const instance = new FileChecksum(file)
|
8
|
+
instance.create(callback)
|
9
|
+
}
|
10
|
+
|
11
|
+
constructor(file) {
|
12
|
+
this.file = file
|
13
|
+
this.chunkSize = 2097152 // 2MB
|
14
|
+
this.chunkCount = Math.ceil(this.file.size / this.chunkSize)
|
15
|
+
this.chunkIndex = 0
|
16
|
+
}
|
17
|
+
|
18
|
+
create(callback) {
|
19
|
+
this.callback = callback
|
20
|
+
this.md5Buffer = new SparkMD5.ArrayBuffer
|
21
|
+
this.fileReader = new FileReader
|
22
|
+
this.fileReader.addEventListener("load", event => this.fileReaderDidLoad(event))
|
23
|
+
this.fileReader.addEventListener("error", event => this.fileReaderDidError(event))
|
24
|
+
this.readNextChunk()
|
25
|
+
}
|
26
|
+
|
27
|
+
fileReaderDidLoad(event) {
|
28
|
+
this.md5Buffer.append(event.target.result)
|
29
|
+
|
30
|
+
if (!this.readNextChunk()) {
|
31
|
+
const binaryDigest = this.md5Buffer.end(true)
|
32
|
+
const base64digest = btoa(binaryDigest)
|
33
|
+
this.callback(null, base64digest)
|
34
|
+
}
|
35
|
+
}
|
36
|
+
|
37
|
+
fileReaderDidError(event) {
|
38
|
+
this.callback(`Error reading ${this.file.name}`)
|
39
|
+
}
|
40
|
+
|
41
|
+
readNextChunk() {
|
42
|
+
if (this.chunkIndex < this.chunkCount) {
|
43
|
+
const start = this.chunkIndex * this.chunkSize
|
44
|
+
const end = Math.min(start + this.chunkSize, this.file.size)
|
45
|
+
const bytes = fileSlice.call(this.file, start, end)
|
46
|
+
this.fileReader.readAsArrayBuffer(bytes)
|
47
|
+
this.chunkIndex++
|
48
|
+
return true
|
49
|
+
} else {
|
50
|
+
return false
|
51
|
+
}
|
52
|
+
}
|
53
|
+
}
|
@@ -0,0 +1,42 @@
|
|
1
|
+
export function getMetaValue(name) {
|
2
|
+
const element = findElement(document.head, `meta[name="${name}"]`)
|
3
|
+
if (element) {
|
4
|
+
return element.getAttribute("content")
|
5
|
+
}
|
6
|
+
}
|
7
|
+
|
8
|
+
export function findElements(root, selector) {
|
9
|
+
if (typeof root == "string") {
|
10
|
+
selector = root
|
11
|
+
root = document
|
12
|
+
}
|
13
|
+
const elements = root.querySelectorAll(selector)
|
14
|
+
return toArray(elements)
|
15
|
+
}
|
16
|
+
|
17
|
+
export function findElement(root, selector) {
|
18
|
+
if (typeof root == "string") {
|
19
|
+
selector = root
|
20
|
+
root = document
|
21
|
+
}
|
22
|
+
return root.querySelector(selector)
|
23
|
+
}
|
24
|
+
|
25
|
+
export function dispatchEvent(element, type, eventInit = {}) {
|
26
|
+
const { bubbles, cancelable, detail } = eventInit
|
27
|
+
const event = document.createEvent("Event")
|
28
|
+
event.initEvent(type, bubbles || true, cancelable || true)
|
29
|
+
event.detail = detail || {}
|
30
|
+
element.dispatchEvent(event)
|
31
|
+
return event
|
32
|
+
}
|
33
|
+
|
34
|
+
export function toArray(value) {
|
35
|
+
if (Array.isArray(value)) {
|
36
|
+
return value
|
37
|
+
} else if (Array.from) {
|
38
|
+
return Array.from(value)
|
39
|
+
} else {
|
40
|
+
return [].slice.call(value)
|
41
|
+
}
|
42
|
+
}
|
@@ -0,0 +1,74 @@
|
|
1
|
+
import { DirectUploadsController } from "./direct_uploads_controller"
|
2
|
+
import { findElement } from "./helpers"
|
3
|
+
|
4
|
+
const processingAttribute = "data-direct-uploads-processing"
|
5
|
+
let started = false
|
6
|
+
|
7
|
+
export function start() {
|
8
|
+
if (!started) {
|
9
|
+
started = true
|
10
|
+
document.addEventListener("submit", didSubmitForm)
|
11
|
+
document.addEventListener("ajax:before", didSubmitRemoteElement)
|
12
|
+
}
|
13
|
+
}
|
14
|
+
|
15
|
+
function didSubmitForm(event) {
|
16
|
+
handleFormSubmissionEvent(event)
|
17
|
+
}
|
18
|
+
|
19
|
+
function didSubmitRemoteElement(event) {
|
20
|
+
if (event.target.tagName == "FORM") {
|
21
|
+
handleFormSubmissionEvent(event)
|
22
|
+
}
|
23
|
+
}
|
24
|
+
|
25
|
+
function handleFormSubmissionEvent(event) {
|
26
|
+
const form = event.target
|
27
|
+
|
28
|
+
if (form.hasAttribute(processingAttribute)) {
|
29
|
+
event.preventDefault()
|
30
|
+
return
|
31
|
+
}
|
32
|
+
|
33
|
+
const controller = new DirectUploadsController(form)
|
34
|
+
const { inputs } = controller
|
35
|
+
|
36
|
+
if (inputs.length) {
|
37
|
+
event.preventDefault()
|
38
|
+
form.setAttribute(processingAttribute, "")
|
39
|
+
inputs.forEach(disable)
|
40
|
+
controller.start(error => {
|
41
|
+
form.removeAttribute(processingAttribute)
|
42
|
+
if (error) {
|
43
|
+
inputs.forEach(enable)
|
44
|
+
} else {
|
45
|
+
submitForm(form)
|
46
|
+
}
|
47
|
+
})
|
48
|
+
}
|
49
|
+
}
|
50
|
+
|
51
|
+
function submitForm(form) {
|
52
|
+
let button = findElement(form, "input[type=submit]")
|
53
|
+
if (button) {
|
54
|
+
const { disabled } = button
|
55
|
+
button.disabled = false
|
56
|
+
button.click()
|
57
|
+
button.disabled = disabled
|
58
|
+
} else {
|
59
|
+
button = document.createElement("input")
|
60
|
+
button.type = "submit"
|
61
|
+
button.style = "display:none"
|
62
|
+
form.appendChild(button)
|
63
|
+
button.click()
|
64
|
+
form.removeChild(button)
|
65
|
+
}
|
66
|
+
}
|
67
|
+
|
68
|
+
function disable(input) {
|
69
|
+
input.disabled = true
|
70
|
+
}
|
71
|
+
|
72
|
+
function enable(input) {
|
73
|
+
input.disabled = false
|
74
|
+
}
|
@@ -0,0 +1,33 @@
|
|
1
|
+
require "active_storage/blob"
|
2
|
+
require "active_storage/patches/delegation"
|
3
|
+
require "strong_parameters"
|
4
|
+
|
5
|
+
# Attachments associate records with blobs. Usually that's a one record-many blobs relationship,
|
6
|
+
# but it is possible to associate many different records with the same blob. If you're doing that,
|
7
|
+
# you'll want to declare with `has_one/many_attached :thingy, dependent: false`, so that destroying
|
8
|
+
# any one record won't destroy the blob as well. (Then you'll need to do your own garbage collecting, though).
|
9
|
+
class ActiveStorage::Attachment < ActiveRecord::Base
|
10
|
+
self.table_name = "active_storage_attachments"
|
11
|
+
|
12
|
+
attr_protected
|
13
|
+
include ActiveModel::ForbiddenAttributesProtection
|
14
|
+
|
15
|
+
belongs_to :record, polymorphic: true
|
16
|
+
belongs_to :blob, class_name: "ActiveStorage::Blob"
|
17
|
+
|
18
|
+
delegate_missing_to :blob
|
19
|
+
|
20
|
+
# Purging an attachment will purge the blob (delete the file on the service, then destroy the record)
|
21
|
+
# and then destroy the attachment itself.
|
22
|
+
def purge
|
23
|
+
blob.purge
|
24
|
+
destroy
|
25
|
+
end
|
26
|
+
|
27
|
+
# Purging an attachment means purging the blob, which means talking to the service, which means
|
28
|
+
# talking over the internet. Whenever you're doing that, it's a good idea to put that work in a job,
|
29
|
+
# so it doesn't hold up other operations. That's what #purge_later provides.
|
30
|
+
def purge_later
|
31
|
+
ActiveStorage::PurgeAttachmentWorker.perform_async(self.id)
|
32
|
+
end
|
33
|
+
end
|
@@ -0,0 +1,198 @@
|
|
1
|
+
require "active_storage/service"
|
2
|
+
require "active_storage/filename"
|
3
|
+
require "active_storage/purge_blob_worker"
|
4
|
+
require "active_storage/purge_attachment_worker"
|
5
|
+
require "active_storage/variant"
|
6
|
+
require "active_storage/variation"
|
7
|
+
require "strong_parameters"
|
8
|
+
|
9
|
+
# A blob is a record that contains the metadata about a file and a key for where that file resides on the service.
|
10
|
+
# Blobs can be created in two ways:
|
11
|
+
#
|
12
|
+
# 1) Subsequent to the file being uploaded server-side to the service via #create_after_upload!
|
13
|
+
# 2) Ahead of the file being directly uploaded client-side to the service via #create_before_direct_upload!
|
14
|
+
#
|
15
|
+
# The first option doesn't require any client-side JavaScript integration, and can be used by any other back-end
|
16
|
+
# service that deals with files. The second option is faster, since you're not using your own server as a staging
|
17
|
+
# point for uploads, and can work with deployments like Heroku that do not provide large amounts of disk space.
|
18
|
+
#
|
19
|
+
# Blobs are intended to be immutable in as-so-far as their reference to a specific file goes. You're allowed to
|
20
|
+
# update a blob's metadata on a subsequent pass, but you should not update the key or change the uploaded file.
|
21
|
+
# If you need to create a derivative or otherwise change the blob, simply create a new blob and purge the old.
|
22
|
+
class ActiveStorage::Blob < ActiveRecord::Base
|
23
|
+
self.table_name = "active_storage_blobs"
|
24
|
+
|
25
|
+
attr_protected
|
26
|
+
include ActiveModel::ForbiddenAttributesProtection
|
27
|
+
|
28
|
+
has_secure_token :key
|
29
|
+
store :metadata, coder: JSON
|
30
|
+
|
31
|
+
class_attribute :service
|
32
|
+
|
33
|
+
class << self
|
34
|
+
# You can used the signed id of a blob to refer to it on the client side without fear of tampering.
|
35
|
+
# This is particularly helpful for direct uploads where the client side needs to refer to the blob
|
36
|
+
# that was created ahead of the upload itself on form submission.
|
37
|
+
#
|
38
|
+
# The signed id is also used to create stable URLs for the blob through the BlobsController.
|
39
|
+
def find_signed(id)
|
40
|
+
find ActiveStorage.verifier.verify(id, purpose: :blob_id)
|
41
|
+
end
|
42
|
+
|
43
|
+
# Returns a new, unsaved blob instance after the `io` has been uploaded to the service.
|
44
|
+
def build_after_upload(io:, filename:, content_type: nil, metadata: nil)
|
45
|
+
new.tap do |blob|
|
46
|
+
blob.filename = filename
|
47
|
+
blob.content_type = content_type
|
48
|
+
blob.metadata = metadata
|
49
|
+
|
50
|
+
blob.upload io
|
51
|
+
end
|
52
|
+
end
|
53
|
+
|
54
|
+
# Returns a saved blob instance after the `io` has been uploaded to the service. Note, the blob is first built,
|
55
|
+
# then the `io` is uploaded, then the blob is saved. This is doing to avoid opening a transaction and talking to
|
56
|
+
# the service during that (which is a bad idea and leads to deadlocks).
|
57
|
+
def create_after_upload!(io:, filename:, content_type: nil, metadata: nil)
|
58
|
+
build_after_upload(io: io, filename: filename, content_type: content_type, metadata: metadata).tap(&:save!)
|
59
|
+
end
|
60
|
+
|
61
|
+
# Returns a saved blob _without_ uploading a file to the service. This blob will point to a key where there is
|
62
|
+
# no file yet. It's intended to be used together with a client-side upload, which will first create the blob
|
63
|
+
# in order to produce the signed URL for uploading. This signed URL points to the key generated by the blob.
|
64
|
+
# Once the form using the direct upload is submitted, the blob can be associated with the right record using
|
65
|
+
# the signed ID.
|
66
|
+
def create_before_direct_upload!(filename:, byte_size:, checksum:, content_type: nil, metadata: nil)
|
67
|
+
create! filename: filename, byte_size: byte_size, checksum: checksum, content_type: content_type, metadata: metadata
|
68
|
+
end
|
69
|
+
end
|
70
|
+
|
71
|
+
|
72
|
+
# Returns a signed ID for this blob that's suitable for reference on the client-side without fear of tampering.
|
73
|
+
# It uses the framework-wide verifier on `ActiveStorage.verifier`, but with a dedicated purpose.
|
74
|
+
def signed_id
|
75
|
+
ActiveStorage.verifier.generate(id, purpose: :blob_id)
|
76
|
+
end
|
77
|
+
|
78
|
+
# Returns the key pointing to the file on the service that's associated with this blob. The key is in the
|
79
|
+
# standard secure-token format from Rails. So it'll look like: XTAPjJCJiuDrLk3TmwyJGpUo. This key is not intended
|
80
|
+
# to be revealed directly to the user. Always refer to blobs using the signed_id or a verified form of the key.
|
81
|
+
def key
|
82
|
+
# We can't wait until the record is first saved to have a key for it
|
83
|
+
self[:key] ||= self.class.generate_unique_secure_token
|
84
|
+
end
|
85
|
+
|
86
|
+
# Returns a `ActiveStorage::Filename` instance of the filename that can be queried for basename, extension, and
|
87
|
+
# a sanitized version of the filename that's safe to use in URLs.
|
88
|
+
def filename
|
89
|
+
ActiveStorage::Filename.new(self[:filename])
|
90
|
+
end
|
91
|
+
|
92
|
+
# Returns true if the content_type of this blob is in the image range, like image/png.
|
93
|
+
def image?() content_type.start_with?('image') end
|
94
|
+
|
95
|
+
# Returns true if the content_type of this blob is in the audio range, like audio/mpeg.
|
96
|
+
def audio?() content_type.start_with?('audio') end
|
97
|
+
|
98
|
+
# Returns true if the content_type of this blob is in the video range, like video/mp4.
|
99
|
+
def video?() content_type.start_with?('video') end
|
100
|
+
|
101
|
+
# Returns true if the content_type of this blob is in the text range, like text/plain.
|
102
|
+
def text?() content_type.start_with?('text') end
|
103
|
+
|
104
|
+
# Returns a `ActiveStorage::Variant` instance with the set of `transformations` passed in. This is only relevant
|
105
|
+
# for image files, and it allows any image to be transformed for size, colors, and the like. Example:
|
106
|
+
#
|
107
|
+
# avatar.variant(resize: "100x100").processed.service_url
|
108
|
+
#
|
109
|
+
# This will create and process a variant of the avatar blob that's constrained to a height and width of 100.
|
110
|
+
# Then it'll upload said variant to the service according to a derivative key of the blob and the transformations.
|
111
|
+
#
|
112
|
+
# Frequently, though, you don't actually want to transform the variant right away. But rather simply refer to a
|
113
|
+
# specific variant that can be created by a controller on-demand. Like so:
|
114
|
+
#
|
115
|
+
# <%= image_tag url_for(Current.user.avatar.variant(resize: "100x100")) %>
|
116
|
+
#
|
117
|
+
# This will create a URL for that specific blob with that specific variant, which the `ActiveStorage::VariantsController`
|
118
|
+
# can then produce on-demand.
|
119
|
+
def variant(transformations)
|
120
|
+
ActiveStorage::Variant.new(self, ActiveStorage::Variation.new(transformations))
|
121
|
+
end
|
122
|
+
|
123
|
+
|
124
|
+
# Returns the URL of the blob on the service. This URL is intended to be short-lived for security and not used directly
|
125
|
+
# with users. Instead, the `service_url` should only be exposed as a redirect from a stable, possibly authenticated URL.
|
126
|
+
# Hiding the `service_url` behind a redirect also gives you the power to change services without updating all URLs. And
|
127
|
+
# it allows permanent URLs that redirect to the `service_url` to be cached in the view.
|
128
|
+
def service_url(expires_in: 5.minutes, disposition: :inline)
|
129
|
+
service.url key, expires_in: expires_in, disposition: disposition, filename: filename, content_type: content_type
|
130
|
+
end
|
131
|
+
|
132
|
+
# Returns a URL that can be used to directly upload a file for this blob on the service. This URL is intended to be
|
133
|
+
# short-lived for security and only generated on-demand by the client-side JavaScript responsible for doing the uploading.
|
134
|
+
def service_url_for_direct_upload(expires_in: 5.minutes)
|
135
|
+
service.url_for_direct_upload key, expires_in: expires_in, content_type: content_type, content_length: byte_size, checksum: checksum
|
136
|
+
end
|
137
|
+
|
138
|
+
# Returns a Hash of headers for `service_url_for_direct_upload` requests.
|
139
|
+
def service_headers_for_direct_upload
|
140
|
+
service.headers_for_direct_upload key, filename: filename, content_type: content_type, content_length: byte_size, checksum: checksum
|
141
|
+
end
|
142
|
+
|
143
|
+
# Uploads the `io` to the service on the `key` for this blob. Blobs are intended to be immutable, so you shouldn't be
|
144
|
+
# using this method after a file has already been uploaded to fit with a blob. If you want to create a derivative blob,
|
145
|
+
# you should instead simply create a new blob based on the old one.
|
146
|
+
#
|
147
|
+
# Prior to uploading, we compute the checksum, which is sent to the service for transit integrity validation. If the
|
148
|
+
# checksum does not match what the service receives, an exception will be raised. We also measure the size of the `io`
|
149
|
+
# and store that in `byte_size` on the blob record.
|
150
|
+
#
|
151
|
+
# Normally, you do not have to call this method directly at all. Use the factory class methods of `build_after_upload`
|
152
|
+
# and `create_after_upload!`.
|
153
|
+
def upload(io)
|
154
|
+
self.checksum = compute_checksum_in_chunks(io)
|
155
|
+
self.byte_size = io.size
|
156
|
+
|
157
|
+
service.upload(key, io, checksum: checksum)
|
158
|
+
end
|
159
|
+
|
160
|
+
# Downloads the file associated with this blob. If no block is given, the entire file is read into memory and returned.
|
161
|
+
# That'll use a lot of RAM for very large files. If a block is given, then the download is streamed and yielded in chunks.
|
162
|
+
def download(&block)
|
163
|
+
service.download key, &block
|
164
|
+
end
|
165
|
+
|
166
|
+
|
167
|
+
# Deletes the file on the service that's associated with this blob. This should only be done if the blob is going to be
|
168
|
+
# deleted as well or you will essentially have a dead reference. It's recommended to use the `#purge` and `#purge_later`
|
169
|
+
# methods in most circumstances.
|
170
|
+
def delete
|
171
|
+
service.delete key
|
172
|
+
end
|
173
|
+
|
174
|
+
# Deletes the file on the service and then destroys the blob record. This is the recommended way to dispose of unwanted
|
175
|
+
# blobs. Note, though, that deleting the file off the service will initiate a HTTP connection to the service, which may
|
176
|
+
# be slow or prevented, so you should not use this method inside a transaction or in callbacks. Use `#purge_later` instead.
|
177
|
+
def purge
|
178
|
+
delete
|
179
|
+
destroy
|
180
|
+
end
|
181
|
+
|
182
|
+
# Enqueues a `ActiveStorage::PurgeJob` job that'll call `#purge`. This is the recommended way to purge blobs when the call
|
183
|
+
# needs to be made from a transaction, a callback, or any other real-time scenario.
|
184
|
+
def purge_later
|
185
|
+
ActiveStorage::PurgeBlobWorker.perform_async(self.id)
|
186
|
+
end
|
187
|
+
|
188
|
+
private
|
189
|
+
def compute_checksum_in_chunks(io)
|
190
|
+
Digest::MD5.new.tap do |checksum|
|
191
|
+
while chunk = io.read(5.megabytes)
|
192
|
+
checksum << chunk
|
193
|
+
end
|
194
|
+
|
195
|
+
io.rewind
|
196
|
+
end.base64digest
|
197
|
+
end
|
198
|
+
end
|
@@ -0,0 +1,49 @@
|
|
1
|
+
# Encapsulates a string representing a filename to provide convenience access to parts of it and a sanitized version.
|
2
|
+
# This is what's returned by `ActiveStorage::Blob#filename`. A Filename instance is comparable so it can be used for sorting.
|
3
|
+
class ActiveStorage::Filename
|
4
|
+
include Comparable
|
5
|
+
|
6
|
+
def initialize(filename)
|
7
|
+
@filename = filename
|
8
|
+
end
|
9
|
+
|
10
|
+
# Filename.new("racecar.jpg").extname # => ".jpg"
|
11
|
+
def extname
|
12
|
+
File.extname(@filename)
|
13
|
+
end
|
14
|
+
|
15
|
+
# Filename.new("racecar.jpg").extension # => "jpg"
|
16
|
+
def extension
|
17
|
+
extname.from(1)
|
18
|
+
end
|
19
|
+
|
20
|
+
# Filename.new("racecar.jpg").base # => "racecar"
|
21
|
+
def base
|
22
|
+
File.basename(@filename, extname)
|
23
|
+
end
|
24
|
+
|
25
|
+
# Filename.new("foo:bar.jpg").sanitized # => "foo-bar.jpg"
|
26
|
+
# Filename.new("foo/bar.jpg").sanitized # => "foo-bar.jpg"
|
27
|
+
#
|
28
|
+
# ...and any other character unsafe for URLs or storage is converted or stripped.
|
29
|
+
def sanitized
|
30
|
+
@filename.encode(Encoding::UTF_8, invalid: :replace, undef: :replace, replace: "�").strip.tr("\u{202E}%$|:;/\t\r\n\\", "-")
|
31
|
+
end
|
32
|
+
|
33
|
+
# Returns the sanitized version of the filename.
|
34
|
+
def to_s
|
35
|
+
sanitized.to_s
|
36
|
+
end
|
37
|
+
|
38
|
+
def as_json(*)
|
39
|
+
to_s
|
40
|
+
end
|
41
|
+
|
42
|
+
def to_json
|
43
|
+
to_s
|
44
|
+
end
|
45
|
+
|
46
|
+
def <=>(other)
|
47
|
+
to_s.downcase <=> other.to_s.downcase
|
48
|
+
end
|
49
|
+
end
|
@@ -0,0 +1,82 @@
|
|
1
|
+
require "active_storage/blob"
|
2
|
+
|
3
|
+
# Image blobs can have variants that are the result of a set of transformations applied to the original.
|
4
|
+
# These variants are used to create thumbnails, fixed-size avatars, or any other derivative image from the
|
5
|
+
# original.
|
6
|
+
#
|
7
|
+
# Variants rely on `MiniMagick` for the actual transformations of the file, so you must add `gem "mini_magick"`
|
8
|
+
# to your Gemfile if you wish to use variants.
|
9
|
+
#
|
10
|
+
# Note that to create a variant it's necessary to download the entire blob file from the service and load it
|
11
|
+
# into memory. The larger the image, the more memory is used. Because of this process, you also want to be
|
12
|
+
# considerate about when the variant is actually processed. You shouldn't be processing variants inline in a
|
13
|
+
# template, for example. Delay the processing to an on-demand controller, like the one provided in
|
14
|
+
# `ActiveStorage::VariantsController`.
|
15
|
+
#
|
16
|
+
# To refer to such a delayed on-demand variant, simply link to the variant through the resolved route provided
|
17
|
+
# by Active Storage like so:
|
18
|
+
#
|
19
|
+
# <%= image_tag url_for(Current.user.avatar.variant(resize: "100x100")) %>
|
20
|
+
#
|
21
|
+
# This will create a URL for that specific blob with that specific variant, which the `ActiveStorage::VariantsController`
|
22
|
+
# can then produce on-demand.
|
23
|
+
#
|
24
|
+
# When you do want to actually produce the variant needed, call `#processed`. This will check that the variant
|
25
|
+
# has already been processed and uploaded to the service, and, if so, just return that. Otherwise it will perform
|
26
|
+
# the transformations, upload the variant to the service, and return itself again. Example:
|
27
|
+
#
|
28
|
+
# avatar.variant(resize: "100x100").processed.service_url
|
29
|
+
#
|
30
|
+
# This will create and process a variant of the avatar blob that's constrained to a height and width of 100.
|
31
|
+
# Then it'll upload said variant to the service according to a derivative key of the blob and the transformations.
|
32
|
+
#
|
33
|
+
# A list of all possible transformations is available at https://www.imagemagick.org/script/mogrify.php. You can
|
34
|
+
# combine as many as you like freely:
|
35
|
+
#
|
36
|
+
# avatar.variant(resize: "100x100", monochrome: true, flip: "-90")
|
37
|
+
class ActiveStorage::Variant
|
38
|
+
attr_reader :blob, :variation
|
39
|
+
delegate :service, to: :blob
|
40
|
+
|
41
|
+
def initialize(blob, variation)
|
42
|
+
@blob, @variation = blob, variation
|
43
|
+
end
|
44
|
+
|
45
|
+
# Returns the variant instance itself after it's been processed or an existing processing has been found on the service.
|
46
|
+
def processed
|
47
|
+
process unless processed?
|
48
|
+
self
|
49
|
+
end
|
50
|
+
|
51
|
+
# Returns a combination key of the blob and the variation that together identifies a specific variant.
|
52
|
+
def key
|
53
|
+
"variants/#{blob.key}/#{variation.key}"
|
54
|
+
end
|
55
|
+
|
56
|
+
# Returns the URL of the variant on the service. This URL is intended to be short-lived for security and not used directly
|
57
|
+
# with users. Instead, the `service_url` should only be exposed as a redirect from a stable, possibly authenticated URL.
|
58
|
+
# Hiding the `service_url` behind a redirect also gives you the power to change services without updating all URLs. And
|
59
|
+
# it allows permanent URLs that redirec to the `service_url` to be cached in the view.
|
60
|
+
#
|
61
|
+
# Use `url_for(variant)` (or the implied form, like `link_to variant` or `redirect_to variant`) to get the stable URL
|
62
|
+
# for a variant that points to the `ActiveStorage::VariantsController`, which in turn will use this `#service_call` method
|
63
|
+
# for its redirection.
|
64
|
+
def service_url(expires_in: 5.minutes, disposition: :inline)
|
65
|
+
service.url key, expires_in: expires_in, disposition: disposition, filename: blob.filename, content_type: blob.content_type
|
66
|
+
end
|
67
|
+
|
68
|
+
|
69
|
+
private
|
70
|
+
def processed?
|
71
|
+
service.exist?(key)
|
72
|
+
end
|
73
|
+
|
74
|
+
def process
|
75
|
+
service.upload key, transform(service.download(blob.key))
|
76
|
+
end
|
77
|
+
|
78
|
+
def transform(io)
|
79
|
+
require "mini_magick"
|
80
|
+
File.open MiniMagick::Image.read(io).tap { |image| variation.transform(image) }.path
|
81
|
+
end
|
82
|
+
end
|
@@ -0,0 +1,53 @@
|
|
1
|
+
require "active_support/core_ext/object/inclusion"
|
2
|
+
|
3
|
+
# A set of transformations that can be applied to a blob to create a variant. This class is exposed via
|
4
|
+
# the `ActiveStorage::Blob#variant` method and should rarely be used directly.
|
5
|
+
#
|
6
|
+
# In case you do need to use this directly, it's instantiated using a hash of transformations where
|
7
|
+
# the key is the command and the value is the arguments. Example:
|
8
|
+
#
|
9
|
+
# ActiveStorage::Variation.new(resize: "100x100", monochrome: true, trim: true, rotate: "-90")
|
10
|
+
#
|
11
|
+
# A list of all possible transformations is available at https://www.imagemagick.org/script/mogrify.php.
|
12
|
+
class ActiveStorage::Variation
|
13
|
+
attr_reader :transformations
|
14
|
+
|
15
|
+
class << self
|
16
|
+
# Returns a variation instance with the transformations that were encoded by `#encode`.
|
17
|
+
def decode(key)
|
18
|
+
new ActiveStorage.verifier.verify(key, purpose: :variation)
|
19
|
+
end
|
20
|
+
|
21
|
+
# Returns a signed key for the `transformations`, which can be used to refer to a specific
|
22
|
+
# variation in a URL or combined key (like `ActiveStorage::Variant#key`).
|
23
|
+
def encode(transformations)
|
24
|
+
ActiveStorage.verifier.generate(transformations, purpose: :variation)
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
def initialize(transformations)
|
29
|
+
@transformations = transformations
|
30
|
+
end
|
31
|
+
|
32
|
+
# Accepts an open MiniMagick image instance, like what's return by `MiniMagick::Image.read(io)`,
|
33
|
+
# and performs the `transformations` against it. The transformed image instance is then returned.
|
34
|
+
def transform(image)
|
35
|
+
transformations.each do |(method, argument)|
|
36
|
+
if eligible_argument?(argument)
|
37
|
+
image.public_send(method, argument)
|
38
|
+
else
|
39
|
+
image.public_send(method)
|
40
|
+
end
|
41
|
+
end
|
42
|
+
end
|
43
|
+
|
44
|
+
# Returns a signed key for all the `transformations` that this variation was instantiated with.
|
45
|
+
def key
|
46
|
+
self.class.encode(transformations)
|
47
|
+
end
|
48
|
+
|
49
|
+
private
|
50
|
+
def eligible_argument?(argument)
|
51
|
+
argument.present? && argument != true
|
52
|
+
end
|
53
|
+
end
|
data/config/routes.rb
ADDED
@@ -0,0 +1,9 @@
|
|
1
|
+
Rails.application.routes.draw do
|
2
|
+
get "/rails/active_storage/blobs/:signed_id/*filename" => "active_storage/blobs#show", as: :rails_service_blob
|
3
|
+
|
4
|
+
get "/rails/active_storage/variants/:signed_blob_id/:variation_key/*filename" => "active_storage/variants#show", as: :rails_blob_variation
|
5
|
+
|
6
|
+
get "/rails/active_storage/disk/:encoded_key/*filename" => "active_storage/disk#show", as: :rails_disk_service
|
7
|
+
put "/rails/active_storage/disk/:encoded_token" => "active_storage/disk#update", as: :update_rails_disk_service
|
8
|
+
post "/rails/active_storage/direct_uploads" => "active_storage/direct_uploads#create", as: :rails_direct_uploads
|
9
|
+
end
|
@@ -0,0 +1,34 @@
|
|
1
|
+
test:
|
2
|
+
service: Disk
|
3
|
+
root: <%= Rails.root.join("tmp/storage") %>
|
4
|
+
|
5
|
+
local:
|
6
|
+
service: Disk
|
7
|
+
root: <%= Rails.root.join("storage") %>
|
8
|
+
|
9
|
+
# Use rails secrets:edit to set the AWS secrets (as shared:aws:access_key_id|secret_access_key)
|
10
|
+
amazon:
|
11
|
+
service: S3
|
12
|
+
access_key_id: <%= Rails.application.secrets.dig(:aws, :access_key_id) %>
|
13
|
+
secret_access_key: <%= Rails.application.secrets.dig(:aws, :secret_access_key) %>
|
14
|
+
region: us-east-1
|
15
|
+
bucket: your_own_bucket
|
16
|
+
|
17
|
+
# Remember not to checkin your GCS keyfile to a repository
|
18
|
+
google:
|
19
|
+
service: GCS
|
20
|
+
project: your_project
|
21
|
+
keyfile: <%= Rails.root.join("path/to/gcs.keyfile") %>
|
22
|
+
bucket: your_own_bucket
|
23
|
+
|
24
|
+
microsoft:
|
25
|
+
service: Azure
|
26
|
+
path: your_azure_storage_path
|
27
|
+
storage_account_name: your_account_name
|
28
|
+
storage_access_key: <%= Rails.application.secrets.azure[:secret_access_key] %>
|
29
|
+
container: your_container_name
|
30
|
+
|
31
|
+
mirror:
|
32
|
+
service: Mirror
|
33
|
+
primary: local
|
34
|
+
mirrors: [ amazon, google ]
|