activestorage 0.1 → 5.2.0.beta1

Sign up to get free protection for your applications and to get access to all the features.

Potentially problematic release.


This version of activestorage might be problematic. Click here for more details.

Files changed (85) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +5 -0
  3. data/README.md +94 -25
  4. data/app/assets/javascripts/activestorage.js +1 -0
  5. data/app/controllers/active_storage/blobs_controller.rb +16 -0
  6. data/app/controllers/active_storage/direct_uploads_controller.rb +23 -0
  7. data/app/controllers/active_storage/disk_controller.rb +51 -0
  8. data/app/controllers/active_storage/previews_controller.rb +12 -0
  9. data/app/controllers/active_storage/variants_controller.rb +16 -0
  10. data/app/javascript/activestorage/blob_record.js +54 -0
  11. data/app/javascript/activestorage/blob_upload.js +35 -0
  12. data/app/javascript/activestorage/direct_upload.js +42 -0
  13. data/app/javascript/activestorage/direct_upload_controller.js +67 -0
  14. data/app/javascript/activestorage/direct_uploads_controller.js +50 -0
  15. data/app/javascript/activestorage/file_checksum.js +53 -0
  16. data/app/javascript/activestorage/helpers.js +42 -0
  17. data/app/javascript/activestorage/index.js +11 -0
  18. data/app/javascript/activestorage/ujs.js +75 -0
  19. data/app/jobs/active_storage/analyze_job.rb +8 -0
  20. data/app/jobs/active_storage/base_job.rb +5 -0
  21. data/app/jobs/active_storage/purge_job.rb +11 -0
  22. data/app/models/active_storage/attachment.rb +35 -0
  23. data/app/models/active_storage/blob.rb +313 -0
  24. data/app/models/active_storage/filename.rb +73 -0
  25. data/app/models/active_storage/filename/parameters.rb +36 -0
  26. data/app/models/active_storage/preview.rb +90 -0
  27. data/app/models/active_storage/variant.rb +86 -0
  28. data/app/models/active_storage/variation.rb +67 -0
  29. data/config/routes.rb +43 -0
  30. data/lib/active_storage.rb +37 -2
  31. data/lib/active_storage/analyzer.rb +33 -0
  32. data/lib/active_storage/analyzer/image_analyzer.rb +36 -0
  33. data/lib/active_storage/analyzer/null_analyzer.rb +13 -0
  34. data/lib/active_storage/analyzer/video_analyzer.rb +79 -0
  35. data/lib/active_storage/attached.rb +28 -22
  36. data/lib/active_storage/attached/macros.rb +89 -16
  37. data/lib/active_storage/attached/many.rb +53 -21
  38. data/lib/active_storage/attached/one.rb +74 -20
  39. data/lib/active_storage/downloading.rb +26 -0
  40. data/lib/active_storage/engine.rb +72 -0
  41. data/lib/active_storage/gem_version.rb +17 -0
  42. data/lib/active_storage/log_subscriber.rb +52 -0
  43. data/lib/active_storage/previewer.rb +58 -0
  44. data/lib/active_storage/previewer/pdf_previewer.rb +17 -0
  45. data/lib/active_storage/previewer/video_previewer.rb +23 -0
  46. data/lib/active_storage/service.rb +112 -24
  47. data/lib/active_storage/service/azure_storage_service.rb +124 -0
  48. data/lib/active_storage/service/configurator.rb +32 -0
  49. data/lib/active_storage/service/disk_service.rb +103 -44
  50. data/lib/active_storage/service/gcs_service.rb +87 -29
  51. data/lib/active_storage/service/mirror_service.rb +38 -22
  52. data/lib/active_storage/service/s3_service.rb +83 -38
  53. data/lib/active_storage/version.rb +10 -0
  54. data/lib/tasks/activestorage.rake +4 -15
  55. metadata +64 -108
  56. data/.gitignore +0 -1
  57. data/Gemfile +0 -11
  58. data/Gemfile.lock +0 -235
  59. data/Rakefile +0 -11
  60. data/activestorage.gemspec +0 -21
  61. data/lib/active_storage/attachment.rb +0 -30
  62. data/lib/active_storage/blob.rb +0 -80
  63. data/lib/active_storage/disk_controller.rb +0 -28
  64. data/lib/active_storage/download.rb +0 -90
  65. data/lib/active_storage/filename.rb +0 -31
  66. data/lib/active_storage/migration.rb +0 -28
  67. data/lib/active_storage/purge_job.rb +0 -10
  68. data/lib/active_storage/railtie.rb +0 -56
  69. data/lib/active_storage/storage_services.yml +0 -27
  70. data/lib/active_storage/verified_key_with_expiration.rb +0 -24
  71. data/test/attachments_test.rb +0 -95
  72. data/test/blob_test.rb +0 -28
  73. data/test/database/create_users_migration.rb +0 -7
  74. data/test/database/setup.rb +0 -6
  75. data/test/disk_controller_test.rb +0 -34
  76. data/test/filename_test.rb +0 -36
  77. data/test/service/.gitignore +0 -1
  78. data/test/service/configurations-example.yml +0 -11
  79. data/test/service/disk_service_test.rb +0 -8
  80. data/test/service/gcs_service_test.rb +0 -20
  81. data/test/service/mirror_service_test.rb +0 -50
  82. data/test/service/s3_service_test.rb +0 -11
  83. data/test/service/shared_service_tests.rb +0 -68
  84. data/test/test_helper.rb +0 -28
  85. data/test/verified_key_with_expiration_test.rb +0 -19
@@ -0,0 +1,67 @@
1
+ import { DirectUpload } from "./direct_upload"
2
+ import { dispatchEvent } from "./helpers"
3
+
4
+ export class DirectUploadController {
5
+ constructor(input, file) {
6
+ this.input = input
7
+ this.file = file
8
+ this.directUpload = new DirectUpload(this.file, this.url, this)
9
+ this.dispatch("initialize")
10
+ }
11
+
12
+ start(callback) {
13
+ const hiddenInput = document.createElement("input")
14
+ hiddenInput.type = "hidden"
15
+ hiddenInput.name = this.input.name
16
+ this.input.insertAdjacentElement("beforebegin", hiddenInput)
17
+
18
+ this.dispatch("start")
19
+
20
+ this.directUpload.create((error, attributes) => {
21
+ if (error) {
22
+ hiddenInput.parentNode.removeChild(hiddenInput)
23
+ this.dispatchError(error)
24
+ } else {
25
+ hiddenInput.value = attributes.signed_id
26
+ }
27
+
28
+ this.dispatch("end")
29
+ callback(error)
30
+ })
31
+ }
32
+
33
+ uploadRequestDidProgress(event) {
34
+ const progress = event.loaded / event.total * 100
35
+ if (progress) {
36
+ this.dispatch("progress", { progress })
37
+ }
38
+ }
39
+
40
+ get url() {
41
+ return this.input.getAttribute("data-direct-upload-url")
42
+ }
43
+
44
+ dispatch(name, detail = {}) {
45
+ detail.file = this.file
46
+ detail.id = this.directUpload.id
47
+ return dispatchEvent(this.input, `direct-upload:${name}`, { detail })
48
+ }
49
+
50
+ dispatchError(error) {
51
+ const event = this.dispatch("error", { error })
52
+ if (!event.defaultPrevented) {
53
+ alert(error)
54
+ }
55
+ }
56
+
57
+ // DirectUpload delegate
58
+
59
+ directUploadWillCreateBlobWithXHR(xhr) {
60
+ this.dispatch("before-blob-request", { xhr })
61
+ }
62
+
63
+ directUploadWillStoreFileWithXHR(xhr) {
64
+ this.dispatch("before-storage-request", { xhr })
65
+ xhr.upload.addEventListener("progress", event => this.uploadRequestDidProgress(event))
66
+ }
67
+ }
@@ -0,0 +1,50 @@
1
+ import { DirectUploadController } from "./direct_upload_controller"
2
+ import { findElements, dispatchEvent, toArray } from "./helpers"
3
+
4
+ const inputSelector = "input[type=file][data-direct-upload-url]:not([disabled])"
5
+
6
+ export class DirectUploadsController {
7
+ constructor(form) {
8
+ this.form = form
9
+ this.inputs = findElements(form, inputSelector).filter(input => input.files.length)
10
+ }
11
+
12
+ start(callback) {
13
+ const controllers = this.createDirectUploadControllers()
14
+
15
+ const startNextController = () => {
16
+ const controller = controllers.shift()
17
+ if (controller) {
18
+ controller.start(error => {
19
+ if (error) {
20
+ callback(error)
21
+ this.dispatch("end")
22
+ } else {
23
+ startNextController()
24
+ }
25
+ })
26
+ } else {
27
+ callback()
28
+ this.dispatch("end")
29
+ }
30
+ }
31
+
32
+ this.dispatch("start")
33
+ startNextController()
34
+ }
35
+
36
+ createDirectUploadControllers() {
37
+ const controllers = []
38
+ this.inputs.forEach(input => {
39
+ toArray(input.files).forEach(file => {
40
+ const controller = new DirectUploadController(input, file)
41
+ controllers.push(controller)
42
+ })
43
+ })
44
+ return controllers
45
+ }
46
+
47
+ dispatch(name, detail = {}) {
48
+ return dispatchEvent(this.form, `direct-uploads:${name}`, { detail })
49
+ }
50
+ }
@@ -0,0 +1,53 @@
1
+ import SparkMD5 from "spark-md5"
2
+
3
+ const fileSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice
4
+
5
+ export class FileChecksum {
6
+ static create(file, callback) {
7
+ const instance = new FileChecksum(file)
8
+ instance.create(callback)
9
+ }
10
+
11
+ constructor(file) {
12
+ this.file = file
13
+ this.chunkSize = 2097152 // 2MB
14
+ this.chunkCount = Math.ceil(this.file.size / this.chunkSize)
15
+ this.chunkIndex = 0
16
+ }
17
+
18
+ create(callback) {
19
+ this.callback = callback
20
+ this.md5Buffer = new SparkMD5.ArrayBuffer
21
+ this.fileReader = new FileReader
22
+ this.fileReader.addEventListener("load", event => this.fileReaderDidLoad(event))
23
+ this.fileReader.addEventListener("error", event => this.fileReaderDidError(event))
24
+ this.readNextChunk()
25
+ }
26
+
27
+ fileReaderDidLoad(event) {
28
+ this.md5Buffer.append(event.target.result)
29
+
30
+ if (!this.readNextChunk()) {
31
+ const binaryDigest = this.md5Buffer.end(true)
32
+ const base64digest = btoa(binaryDigest)
33
+ this.callback(null, base64digest)
34
+ }
35
+ }
36
+
37
+ fileReaderDidError(event) {
38
+ this.callback(`Error reading ${this.file.name}`)
39
+ }
40
+
41
+ readNextChunk() {
42
+ if (this.chunkIndex < this.chunkCount) {
43
+ const start = this.chunkIndex * this.chunkSize
44
+ const end = Math.min(start + this.chunkSize, this.file.size)
45
+ const bytes = fileSlice.call(this.file, start, end)
46
+ this.fileReader.readAsArrayBuffer(bytes)
47
+ this.chunkIndex++
48
+ return true
49
+ } else {
50
+ return false
51
+ }
52
+ }
53
+ }
@@ -0,0 +1,42 @@
1
+ export function getMetaValue(name) {
2
+ const element = findElement(document.head, `meta[name="${name}"]`)
3
+ if (element) {
4
+ return element.getAttribute("content")
5
+ }
6
+ }
7
+
8
+ export function findElements(root, selector) {
9
+ if (typeof root == "string") {
10
+ selector = root
11
+ root = document
12
+ }
13
+ const elements = root.querySelectorAll(selector)
14
+ return toArray(elements)
15
+ }
16
+
17
+ export function findElement(root, selector) {
18
+ if (typeof root == "string") {
19
+ selector = root
20
+ root = document
21
+ }
22
+ return root.querySelector(selector)
23
+ }
24
+
25
+ export function dispatchEvent(element, type, eventInit = {}) {
26
+ const { bubbles, cancelable, detail } = eventInit
27
+ const event = document.createEvent("Event")
28
+ event.initEvent(type, bubbles || true, cancelable || true)
29
+ event.detail = detail || {}
30
+ element.dispatchEvent(event)
31
+ return event
32
+ }
33
+
34
+ export function toArray(value) {
35
+ if (Array.isArray(value)) {
36
+ return value
37
+ } else if (Array.from) {
38
+ return Array.from(value)
39
+ } else {
40
+ return [].slice.call(value)
41
+ }
42
+ }
@@ -0,0 +1,11 @@
1
+ import { start } from "./ujs"
2
+ import { DirectUpload } from "./direct_upload"
3
+ export { start, DirectUpload }
4
+
5
+ function autostart() {
6
+ if (window.ActiveStorage) {
7
+ start()
8
+ }
9
+ }
10
+
11
+ setTimeout(autostart, 1)
@@ -0,0 +1,75 @@
1
+ import { DirectUploadsController } from "./direct_uploads_controller"
2
+ import { findElement } from "./helpers"
3
+
4
+ const processingAttribute = "data-direct-uploads-processing"
5
+ let started = false
6
+
7
+ export function start() {
8
+ if (!started) {
9
+ started = true
10
+ document.addEventListener("submit", didSubmitForm)
11
+ document.addEventListener("ajax:before", didSubmitRemoteElement)
12
+ }
13
+ }
14
+
15
+ function didSubmitForm(event) {
16
+ handleFormSubmissionEvent(event)
17
+ }
18
+
19
+ function didSubmitRemoteElement(event) {
20
+ if (event.target.tagName == "FORM") {
21
+ handleFormSubmissionEvent(event)
22
+ }
23
+ }
24
+
25
+ function handleFormSubmissionEvent(event) {
26
+ const form = event.target
27
+
28
+ if (form.hasAttribute(processingAttribute)) {
29
+ event.preventDefault()
30
+ return
31
+ }
32
+
33
+ const controller = new DirectUploadsController(form)
34
+ const { inputs } = controller
35
+
36
+ if (inputs.length) {
37
+ event.preventDefault()
38
+ form.setAttribute(processingAttribute, "")
39
+ inputs.forEach(disable)
40
+ controller.start(error => {
41
+ form.removeAttribute(processingAttribute)
42
+ if (error) {
43
+ inputs.forEach(enable)
44
+ } else {
45
+ submitForm(form)
46
+ }
47
+ })
48
+ }
49
+ }
50
+
51
+ function submitForm(form) {
52
+ let button = findElement(form, "input[type=submit]")
53
+ if (button) {
54
+ const { disabled } = button
55
+ button.disabled = false
56
+ button.focus()
57
+ button.click()
58
+ button.disabled = disabled
59
+ } else {
60
+ button = document.createElement("input")
61
+ button.type = "submit"
62
+ button.style = "display:none"
63
+ form.appendChild(button)
64
+ button.click()
65
+ form.removeChild(button)
66
+ }
67
+ }
68
+
69
+ function disable(input) {
70
+ input.disabled = true
71
+ }
72
+
73
+ function enable(input) {
74
+ input.disabled = false
75
+ }
@@ -0,0 +1,8 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Provides asynchronous analysis of ActiveStorage::Blob records via ActiveStorage::Blob#analyze_later.
4
+ class ActiveStorage::AnalyzeJob < ActiveStorage::BaseJob
5
+ def perform(blob)
6
+ blob.analyze
7
+ end
8
+ end
@@ -0,0 +1,5 @@
1
+ # frozen_string_literal: true
2
+
3
+ class ActiveStorage::BaseJob < ActiveJob::Base
4
+ queue_as { ActiveStorage.queue }
5
+ end
@@ -0,0 +1,11 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Provides asynchronous purging of ActiveStorage::Blob records via ActiveStorage::Blob#purge_later.
4
+ class ActiveStorage::PurgeJob < ActiveStorage::BaseJob
5
+ # FIXME: Limit this to a custom ActiveStorage error
6
+ retry_on StandardError
7
+
8
+ def perform(blob)
9
+ blob.purge
10
+ end
11
+ end
@@ -0,0 +1,35 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "active_support/core_ext/module/delegation"
4
+
5
+ # Attachments associate records with blobs. Usually that's a one record-many blobs relationship,
6
+ # but it is possible to associate many different records with the same blob. If you're doing that,
7
+ # you'll want to declare with <tt>has_one/many_attached :thingy, dependent: false</tt>, so that destroying
8
+ # any one record won't destroy the blob as well. (Then you'll need to do your own garbage collecting, though).
9
+ class ActiveStorage::Attachment < ActiveRecord::Base
10
+ self.table_name = "active_storage_attachments"
11
+
12
+ belongs_to :record, polymorphic: true, touch: true
13
+ belongs_to :blob, class_name: "ActiveStorage::Blob"
14
+
15
+ delegate_missing_to :blob
16
+
17
+ after_create_commit :analyze_blob_later
18
+
19
+ # Synchronously purges the blob (deletes it from the configured service) and destroys the attachment.
20
+ def purge
21
+ blob.purge
22
+ destroy
23
+ end
24
+
25
+ # Destroys the attachment and asynchronously purges the blob (deletes it from the configured service).
26
+ def purge_later
27
+ blob.purge_later
28
+ destroy
29
+ end
30
+
31
+ private
32
+ def analyze_blob_later
33
+ blob.analyze_later unless blob.analyzed?
34
+ end
35
+ end
@@ -0,0 +1,313 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "active_storage/analyzer/null_analyzer"
4
+
5
+ # A blob is a record that contains the metadata about a file and a key for where that file resides on the service.
6
+ # Blobs can be created in two ways:
7
+ #
8
+ # 1. Subsequent to the file being uploaded server-side to the service via <tt>create_after_upload!</tt>.
9
+ # 2. Ahead of the file being directly uploaded client-side to the service via <tt>create_before_direct_upload!</tt>.
10
+ #
11
+ # The first option doesn't require any client-side JavaScript integration, and can be used by any other back-end
12
+ # service that deals with files. The second option is faster, since you're not using your own server as a staging
13
+ # point for uploads, and can work with deployments like Heroku that do not provide large amounts of disk space.
14
+ #
15
+ # Blobs are intended to be immutable in as-so-far as their reference to a specific file goes. You're allowed to
16
+ # update a blob's metadata on a subsequent pass, but you should not update the key or change the uploaded file.
17
+ # If you need to create a derivative or otherwise change the blob, simply create a new blob and purge the old one.
18
+ class ActiveStorage::Blob < ActiveRecord::Base
19
+ class UnpreviewableError < StandardError; end
20
+ class UnrepresentableError < StandardError; end
21
+
22
+ self.table_name = "active_storage_blobs"
23
+
24
+ has_secure_token :key
25
+ store :metadata, accessors: [ :analyzed ], coder: JSON
26
+
27
+ class_attribute :service
28
+
29
+ has_many :attachments
30
+
31
+ has_one_attached :preview_image
32
+
33
+ class << self
34
+ # You can used the signed ID of a blob to refer to it on the client side without fear of tampering.
35
+ # This is particularly helpful for direct uploads where the client-side needs to refer to the blob
36
+ # that was created ahead of the upload itself on form submission.
37
+ #
38
+ # The signed ID is also used to create stable URLs for the blob through the BlobsController.
39
+ def find_signed(id)
40
+ find ActiveStorage.verifier.verify(id, purpose: :blob_id)
41
+ end
42
+
43
+ # Returns a new, unsaved blob instance after the +io+ has been uploaded to the service.
44
+ def build_after_upload(io:, filename:, content_type: nil, metadata: nil)
45
+ new.tap do |blob|
46
+ blob.filename = filename
47
+ blob.content_type = content_type
48
+ blob.metadata = metadata
49
+
50
+ blob.upload io
51
+ end
52
+ end
53
+
54
+ # Returns a saved blob instance after the +io+ has been uploaded to the service. Note, the blob is first built,
55
+ # then the +io+ is uploaded, then the blob is saved. This is done this way to avoid uploading (which may take
56
+ # time), while having an open database transaction.
57
+ def create_after_upload!(io:, filename:, content_type: nil, metadata: nil)
58
+ build_after_upload(io: io, filename: filename, content_type: content_type, metadata: metadata).tap(&:save!)
59
+ end
60
+
61
+ # Returns a saved blob _without_ uploading a file to the service. This blob will point to a key where there is
62
+ # no file yet. It's intended to be used together with a client-side upload, which will first create the blob
63
+ # in order to produce the signed URL for uploading. This signed URL points to the key generated by the blob.
64
+ # Once the form using the direct upload is submitted, the blob can be associated with the right record using
65
+ # the signed ID.
66
+ def create_before_direct_upload!(filename:, byte_size:, checksum:, content_type: nil, metadata: nil)
67
+ create! filename: filename, byte_size: byte_size, checksum: checksum, content_type: content_type, metadata: metadata
68
+ end
69
+ end
70
+
71
+
72
+ # Returns a signed ID for this blob that's suitable for reference on the client-side without fear of tampering.
73
+ # It uses the framework-wide verifier on <tt>ActiveStorage.verifier</tt>, but with a dedicated purpose.
74
+ def signed_id
75
+ ActiveStorage.verifier.generate(id, purpose: :blob_id)
76
+ end
77
+
78
+ # Returns the key pointing to the file on the service that's associated with this blob. The key is in the
79
+ # standard secure-token format from Rails. So it'll look like: XTAPjJCJiuDrLk3TmwyJGpUo. This key is not intended
80
+ # to be revealed directly to the user. Always refer to blobs using the signed_id or a verified form of the key.
81
+ def key
82
+ # We can't wait until the record is first saved to have a key for it
83
+ self[:key] ||= self.class.generate_unique_secure_token
84
+ end
85
+
86
+ # Returns an ActiveStorage::Filename instance of the filename that can be
87
+ # queried for basename, extension, and a sanitized version of the filename
88
+ # that's safe to use in URLs.
89
+ def filename
90
+ ActiveStorage::Filename.new(self[:filename])
91
+ end
92
+
93
+ # Returns true if the content_type of this blob is in the image range, like image/png.
94
+ def image?
95
+ content_type.start_with?("image")
96
+ end
97
+
98
+ # Returns true if the content_type of this blob is in the audio range, like audio/mpeg.
99
+ def audio?
100
+ content_type.start_with?("audio")
101
+ end
102
+
103
+ # Returns true if the content_type of this blob is in the video range, like video/mp4.
104
+ def video?
105
+ content_type.start_with?("video")
106
+ end
107
+
108
+ # Returns true if the content_type of this blob is in the text range, like text/plain.
109
+ def text?
110
+ content_type.start_with?("text")
111
+ end
112
+
113
+ # Returns an ActiveStorage::Variant instance with the set of +transformations+ provided. This is only relevant for image
114
+ # files, and it allows any image to be transformed for size, colors, and the like. Example:
115
+ #
116
+ # avatar.variant(resize: "100x100").processed.service_url
117
+ #
118
+ # This will create and process a variant of the avatar blob that's constrained to a height and width of 100px.
119
+ # Then it'll upload said variant to the service according to a derivative key of the blob and the transformations.
120
+ #
121
+ # Frequently, though, you don't actually want to transform the variant right away. But rather simply refer to a
122
+ # specific variant that can be created by a controller on-demand. Like so:
123
+ #
124
+ # <%= image_tag Current.user.avatar.variant(resize: "100x100") %>
125
+ #
126
+ # This will create a URL for that specific blob with that specific variant, which the ActiveStorage::VariantsController
127
+ # can then produce on-demand.
128
+ def variant(transformations)
129
+ ActiveStorage::Variant.new(self, ActiveStorage::Variation.wrap(transformations))
130
+ end
131
+
132
+
133
+ # Returns an ActiveStorage::Preview instance with the set of +transformations+ provided. A preview is an image generated
134
+ # from a non-image blob. Active Storage comes with built-in previewers for videos and PDF documents. The video previewer
135
+ # extracts the first frame from a video and the PDF previewer extracts the first page from a PDF document.
136
+ #
137
+ # blob.preview(resize: "100x100").processed.service_url
138
+ #
139
+ # Avoid processing previews synchronously in views. Instead, link to a controller action that processes them on demand.
140
+ # Active Storage provides one, but you may want to create your own (for example, if you need authentication). Here’s
141
+ # how to use the built-in version:
142
+ #
143
+ # <%= image_tag video.preview(resize: "100x100") %>
144
+ #
145
+ # This method raises ActiveStorage::Blob::UnpreviewableError if no previewer accepts the receiving blob. To determine
146
+ # whether a blob is accepted by any previewer, call ActiveStorage::Blob#previewable?.
147
+ def preview(transformations)
148
+ if previewable?
149
+ ActiveStorage::Preview.new(self, ActiveStorage::Variation.wrap(transformations))
150
+ else
151
+ raise UnpreviewableError
152
+ end
153
+ end
154
+
155
+ # Returns true if any registered previewer accepts the blob. By default, this will return true for videos and PDF documents.
156
+ def previewable?
157
+ ActiveStorage.previewers.any? { |klass| klass.accept?(self) }
158
+ end
159
+
160
+
161
+ # Returns an ActiveStorage::Preview instance for a previewable blob or an ActiveStorage::Variant instance for an image blob.
162
+ #
163
+ # blob.representation(resize: "100x100").processed.service_url
164
+ #
165
+ # Raises ActiveStorage::Blob::UnrepresentableError if the receiving blob is neither an image nor previewable. Call
166
+ # ActiveStorage::Blob#representable? to determine whether a blob is representable.
167
+ #
168
+ # See ActiveStorage::Blob#preview and ActiveStorage::Blob#variant for more information.
169
+ def representation(transformations)
170
+ case
171
+ when previewable?
172
+ preview transformations
173
+ when image?
174
+ variant transformations
175
+ else
176
+ raise UnrepresentableError
177
+ end
178
+ end
179
+
180
+ # Returns true if the blob is an image or is previewable.
181
+ def representable?
182
+ image? || previewable?
183
+ end
184
+
185
+
186
+ # Returns the URL of the blob on the service. This URL is intended to be short-lived for security and not used directly
187
+ # with users. Instead, the +service_url+ should only be exposed as a redirect from a stable, possibly authenticated URL.
188
+ # Hiding the +service_url+ behind a redirect also gives you the power to change services without updating all URLs. And
189
+ # it allows permanent URLs that redirect to the +service_url+ to be cached in the view.
190
+ def service_url(expires_in: service.url_expires_in, disposition: "inline")
191
+ service.url key, expires_in: expires_in, disposition: disposition, filename: filename, content_type: content_type
192
+ end
193
+
194
+ # Returns a URL that can be used to directly upload a file for this blob on the service. This URL is intended to be
195
+ # short-lived for security and only generated on-demand by the client-side JavaScript responsible for doing the uploading.
196
+ def service_url_for_direct_upload(expires_in: service.url_expires_in)
197
+ service.url_for_direct_upload key, expires_in: expires_in, content_type: content_type, content_length: byte_size, checksum: checksum
198
+ end
199
+
200
+ # Returns a Hash of headers for +service_url_for_direct_upload+ requests.
201
+ def service_headers_for_direct_upload
202
+ service.headers_for_direct_upload key, filename: filename, content_type: content_type, content_length: byte_size, checksum: checksum
203
+ end
204
+
205
+ # Uploads the +io+ to the service on the +key+ for this blob. Blobs are intended to be immutable, so you shouldn't be
206
+ # using this method after a file has already been uploaded to fit with a blob. If you want to create a derivative blob,
207
+ # you should instead simply create a new blob based on the old one.
208
+ #
209
+ # Prior to uploading, we compute the checksum, which is sent to the service for transit integrity validation. If the
210
+ # checksum does not match what the service receives, an exception will be raised. We also measure the size of the +io+
211
+ # and store that in +byte_size+ on the blob record.
212
+ #
213
+ # Normally, you do not have to call this method directly at all. Use the factory class methods of +build_after_upload+
214
+ # and +create_after_upload!+.
215
+ def upload(io)
216
+ self.checksum = compute_checksum_in_chunks(io)
217
+ self.byte_size = io.size
218
+
219
+ service.upload(key, io, checksum: checksum)
220
+ end
221
+
222
+ # Downloads the file associated with this blob. If no block is given, the entire file is read into memory and returned.
223
+ # That'll use a lot of RAM for very large files. If a block is given, then the download is streamed and yielded in chunks.
224
+ def download(&block)
225
+ service.download key, &block
226
+ end
227
+
228
+
229
+ # Extracts and stores metadata from the file associated with this blob using a relevant analyzer. Active Storage comes
230
+ # with built-in analyzers for images and videos. See ActiveStorage::Analyzer::ImageAnalyzer and
231
+ # ActiveStorage::Analyzer::VideoAnalyzer for information about the specific attributes they extract and the third-party
232
+ # libraries they require.
233
+ #
234
+ # To choose the analyzer for a blob, Active Storage calls +accept?+ on each registered analyzer in order. It uses the
235
+ # first analyzer for which +accept?+ returns true when given the blob. If no registered analyzer accepts the blob, no
236
+ # metadata is extracted from it.
237
+ #
238
+ # In a Rails application, add or remove analyzers by manipulating +Rails.application.config.active_storage.analyzers+
239
+ # in an initializer:
240
+ #
241
+ # # Add a custom analyzer for Microsoft Office documents:
242
+ # Rails.application.config.active_storage.analyzers.append DOCXAnalyzer
243
+ #
244
+ # # Remove the built-in video analyzer:
245
+ # Rails.application.config.active_storage.analyzers.delete ActiveStorage::Analyzer::VideoAnalyzer
246
+ #
247
+ # Outside of a Rails application, manipulate +ActiveStorage.analyzers+ instead.
248
+ #
249
+ # You won't ordinarily need to call this method from a Rails application. New blobs are automatically and asynchronously
250
+ # analyzed via #analyze_later when they're attached for the first time.
251
+ def analyze
252
+ update! metadata: metadata.merge(extract_metadata_via_analyzer)
253
+ end
254
+
255
+ # Enqueues an ActiveStorage::AnalyzeJob which calls #analyze.
256
+ #
257
+ # This method is automatically called for a blob when it's attached for the first time. You can call it to analyze a blob
258
+ # again (e.g. if you add a new analyzer or modify an existing one).
259
+ def analyze_later
260
+ ActiveStorage::AnalyzeJob.perform_later(self)
261
+ end
262
+
263
+ # Returns true if the blob has been analyzed.
264
+ def analyzed?
265
+ analyzed
266
+ end
267
+
268
+
269
+ # Deletes the file on the service that's associated with this blob. This should only be done if the blob is going to be
270
+ # deleted as well or you will essentially have a dead reference. It's recommended to use the +#purge+ and +#purge_later+
271
+ # methods in most circumstances.
272
+ def delete
273
+ service.delete key
274
+ end
275
+
276
+ # Deletes the file on the service and then destroys the blob record. This is the recommended way to dispose of unwanted
277
+ # blobs. Note, though, that deleting the file off the service will initiate a HTTP connection to the service, which may
278
+ # be slow or prevented, so you should not use this method inside a transaction or in callbacks. Use +#purge_later+ instead.
279
+ def purge
280
+ delete
281
+ destroy
282
+ end
283
+
284
+ # Enqueues an ActiveStorage::PurgeJob job that'll call +purge+. This is the recommended way to purge blobs when the call
285
+ # needs to be made from a transaction, a callback, or any other real-time scenario.
286
+ def purge_later
287
+ ActiveStorage::PurgeJob.perform_later(self)
288
+ end
289
+
290
+ private
291
+ def compute_checksum_in_chunks(io)
292
+ Digest::MD5.new.tap do |checksum|
293
+ while chunk = io.read(5.megabytes)
294
+ checksum << chunk
295
+ end
296
+
297
+ io.rewind
298
+ end.base64digest
299
+ end
300
+
301
+
302
+ def extract_metadata_via_analyzer
303
+ analyzer.metadata.merge(analyzed: true)
304
+ end
305
+
306
+ def analyzer
307
+ analyzer_class.new(self)
308
+ end
309
+
310
+ def analyzer_class
311
+ ActiveStorage.analyzers.detect { |klass| klass.accept?(self) } || ActiveStorage::Analyzer::NullAnalyzer
312
+ end
313
+ end