activestorage_qinium 0.1.0 → 0.2.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +4 -0
- data/Gemfile.lock +3 -3
- data/activestorage_qinium.gemspec +2 -4
- data/lib/active_storage/service/qinium_service.rb +38 -24
- data/lib/active_storage_qinium/version.rb +1 -1
- data/lib/active_storage_qinium.rb +1 -1
- metadata +5 -12
- data/app/javascript/active_storage_qinium/direct_upload_controller/blob_record.js +0 -73
- data/app/javascript/active_storage_qinium/direct_upload_controller/blob_upload.js +0 -45
- data/app/javascript/active_storage_qinium/direct_upload_controller/direct_upload.js +0 -48
- data/app/javascript/active_storage_qinium/direct_upload_controller/file_checksum.js +0 -53
- data/app/javascript/active_storage_qinium/direct_upload_controller/helpers.js +0 -51
- data/app/javascript/active_storage_qinium/direct_upload_controller.js +0 -55
- data/lib/active_storage_qinium/engine.rb +0 -4
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: '00099f76b5d04bf4937fa1a7ba08b6372c27438cb6a743b14b28c178f7be6d23'
|
4
|
+
data.tar.gz: 60cd2922c6d18ef2efe74b81445ec5a74046f33fd1ba3d269321642bf5fefa14
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 3c41a1e9a044866659fb3528d3a4d8ee5d93144e737e891fc8578aa70385ec72f405b531df0de8edf69b32e7f555e8686b7f30f23267b04f7ad3742ec3b1a852
|
7
|
+
data.tar.gz: 9d6c3a98ddf542e32def899fa36fc1f6f6566fa957aff73009016c19484acf92c838fa4d7b327a9448c08044d1a35ca6cbe509b08033ec3904166b897a7b7deb
|
data/CHANGELOG.md
CHANGED
data/Gemfile.lock
CHANGED
@@ -1,8 +1,8 @@
|
|
1
1
|
PATH
|
2
2
|
remote: .
|
3
3
|
specs:
|
4
|
-
activestorage_qinium (0.1
|
5
|
-
qinium (~> 0.
|
4
|
+
activestorage_qinium (0.2.1)
|
5
|
+
qinium (~> 0.2.0)
|
6
6
|
|
7
7
|
GEM
|
8
8
|
remote: https://rubygems.org/
|
@@ -12,7 +12,7 @@ GEM
|
|
12
12
|
parallel (1.22.1)
|
13
13
|
parser (3.1.1.0)
|
14
14
|
ast (~> 2.4.1)
|
15
|
-
qinium (0.
|
15
|
+
qinium (0.2.0)
|
16
16
|
rainbow (3.1.1)
|
17
17
|
rake (13.0.6)
|
18
18
|
regexp_parser (2.5.0)
|
@@ -15,11 +15,9 @@ Gem::Specification.new do |spec|
|
|
15
15
|
spec.required_ruby_version = ">= 2.6.0"
|
16
16
|
|
17
17
|
spec.metadata["homepage_uri"] = spec.homepage
|
18
|
-
spec.metadata["source_code_uri"] =spec.homepage
|
18
|
+
spec.metadata["source_code_uri"] = spec.homepage
|
19
19
|
spec.metadata["changelog_uri"] = "#{spec.homepage}/CHANGELOG.md"
|
20
20
|
|
21
|
-
# Specify which files should be added to the gem when it is released.
|
22
|
-
# The `git ls-files -z` loads the files in the RubyGem that have been added into git.
|
23
21
|
spec.files = Dir.chdir(File.expand_path(__dir__)) do
|
24
22
|
`git ls-files -z`.split("\x0").reject do |f|
|
25
23
|
(f == __FILE__) || f.match(%r{\A(?:(?:bin|test|spec|features)/|\.(?:git|travis|circleci)|appveyor)})
|
@@ -28,5 +26,5 @@ Gem::Specification.new do |spec|
|
|
28
26
|
|
29
27
|
spec.require_paths = ["lib"]
|
30
28
|
|
31
|
-
spec.add_dependency "qinium", "~> 0.
|
29
|
+
spec.add_dependency "qinium", "~> 0.2.0"
|
32
30
|
end
|
@@ -1,10 +1,12 @@
|
|
1
|
+
require "open-uri"
|
2
|
+
require "active_storage/analyzer/qinium_image_analyzer"
|
1
3
|
module ActiveStorage
|
2
4
|
class Service::QiniumService < Service
|
3
5
|
attr_reader :qiniu
|
4
6
|
|
5
7
|
delegate :config, :client, to: :qiniu
|
6
|
-
delegate :settings, :
|
7
|
-
|
8
|
+
delegate :settings, :bucket, :access_key, :secret_key, :domain,
|
9
|
+
:protocol, :put_policy_options,
|
8
10
|
to: :config
|
9
11
|
|
10
12
|
def self.analyzers
|
@@ -15,7 +17,7 @@ module ActiveStorage
|
|
15
17
|
@qiniu = Qinium.new(options)
|
16
18
|
end
|
17
19
|
|
18
|
-
def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:)
|
20
|
+
def url_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:, custom_metadata:)
|
19
21
|
instrument :url, key: key do |payload|
|
20
22
|
url = config.up_host
|
21
23
|
payload[:url] = url
|
@@ -23,15 +25,25 @@ module ActiveStorage
|
|
23
25
|
end
|
24
26
|
end
|
25
27
|
|
26
|
-
def
|
28
|
+
def http_method_for_direct_upload
|
29
|
+
"POST"
|
30
|
+
end
|
31
|
+
|
32
|
+
def http_response_type_for_direct_upload
|
33
|
+
"json"
|
34
|
+
end
|
35
|
+
|
36
|
+
def form_data_for_direct_upload(key, expires_in:, content_type:, content_length:, checksum:, **)
|
27
37
|
put_policy = Qinium::PutPolicy.new(config, key: key, expires_in: expires_in)
|
28
38
|
put_policy.fsize_limit = content_length.to_i + 1000
|
29
|
-
|
39
|
+
# OPTIMIZE: 暂时关闭文件类型限制,避免 xmind 文件无法上传
|
40
|
+
put_policy.mime_limit = nil
|
30
41
|
put_policy.detect_mime = 1
|
31
42
|
put_policy.insert_only = 1
|
32
43
|
{
|
33
44
|
key: key,
|
34
|
-
token: put_policy.to_token
|
45
|
+
token: put_policy.to_token,
|
46
|
+
':file': "file"
|
35
47
|
}
|
36
48
|
end
|
37
49
|
|
@@ -46,19 +58,19 @@ module ActiveStorage
|
|
46
58
|
host = nil
|
47
59
|
while (blk = io.read(config.block_size))
|
48
60
|
data = upload_blk(blk, token: up_token, host: host)
|
49
|
-
ctx = data.fetch(
|
50
|
-
host = data.fetch(
|
61
|
+
ctx = data.fetch("ctx")
|
62
|
+
host = data.fetch("host")
|
51
63
|
file_size += blk.size
|
52
64
|
blocks.push(ctx)
|
53
65
|
end
|
54
66
|
|
55
|
-
_code, data, _headers = qiniu.object.mkfile(token: up_token, file_size: file_size, key: key,
|
67
|
+
_code, data, _headers = qiniu.object.mkfile(token: up_token, file_size: file_size, key: key,
|
68
|
+
mime_type: content_type, blocks: blocks)
|
56
69
|
data
|
57
70
|
end
|
58
71
|
end
|
59
72
|
|
60
|
-
def update_metadata(key, **metadata)
|
61
|
-
end
|
73
|
+
def update_metadata(key, **metadata); end
|
62
74
|
|
63
75
|
def download(key)
|
64
76
|
if block_given?
|
@@ -81,7 +93,7 @@ module ActiveStorage
|
|
81
93
|
uri = URI(url(key, disposition: :attachment))
|
82
94
|
Net::HTTP.start(uri.host, uri.port, use_ssl: uri.scheme == "https") do |client|
|
83
95
|
client.get(uri,
|
84
|
-
|
96
|
+
"Range" => "bytes=#{range.begin}-#{range.exclude_end? ? range.end - 1 : range.end}").body
|
85
97
|
end
|
86
98
|
end
|
87
99
|
end
|
@@ -94,7 +106,7 @@ module ActiveStorage
|
|
94
106
|
|
95
107
|
def delete_prefixed(prefix)
|
96
108
|
instrument :delete_prefixed, prefix: prefix do
|
97
|
-
items_for(prefix).each { |item| delete item[
|
109
|
+
items_for(prefix).each { |item| delete item["key"] }
|
98
110
|
end
|
99
111
|
end
|
100
112
|
|
@@ -110,20 +122,21 @@ module ActiveStorage
|
|
110
122
|
instrument :url, key: key do |payload|
|
111
123
|
fop = if options[:fop].present? # 内容预处理
|
112
124
|
options[:fop]
|
113
|
-
elsif options[:disposition].to_s ==
|
114
|
-
attname = URI.encode_www_form_component
|
125
|
+
elsif options[:disposition].to_s == "attachment" # 下载附件
|
126
|
+
attname = URI.encode_www_form_component (options[:filename] || key).to_s
|
115
127
|
"attname=#{attname}"
|
116
128
|
end
|
117
129
|
|
118
|
-
url = if
|
130
|
+
url = if config.public
|
131
|
+
url_encoded_key = key.split("/").map { |x| CGI.escape(x) }.join("/")
|
132
|
+
["#{protocol}://#{domain}/#{url_encoded_key}", fop].compact.join("?")
|
133
|
+
else
|
119
134
|
expires_in = options[:expires_in] ||
|
120
135
|
Rails.application.config.active_storage.service_urls_expire_in ||
|
121
136
|
3600
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
url_encoded_key = key.split('/').map { |x| CGI.escape(x) }.join('/')
|
126
|
-
["#{protocol}://#{domain}/#{url_encoded_key}", fop].compact.join('?')
|
137
|
+
Qinium::Auth.authorize_download_url(domain, key,
|
138
|
+
access_key, secret_key,
|
139
|
+
schema: protocol, fop: fop, expires_in: expires_in)
|
127
140
|
end
|
128
141
|
|
129
142
|
payload[:url] = url
|
@@ -133,9 +146,9 @@ module ActiveStorage
|
|
133
146
|
|
134
147
|
private
|
135
148
|
|
136
|
-
def items_for(prefix =
|
149
|
+
def items_for(prefix = "")
|
137
150
|
_code, data, _headers = qiniu.object.list(prefix: prefix)
|
138
|
-
data[
|
151
|
+
data["items"]
|
139
152
|
end
|
140
153
|
|
141
154
|
def upload_blk(blk, token:, host: nil)
|
@@ -147,8 +160,9 @@ module ActiveStorage
|
|
147
160
|
|
148
161
|
def with_retries(max: 3)
|
149
162
|
yield
|
150
|
-
rescue
|
163
|
+
rescue StandardError
|
151
164
|
raise if max.zero?
|
165
|
+
|
152
166
|
max -= 1
|
153
167
|
retry
|
154
168
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: activestorage_qinium
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1
|
4
|
+
version: 0.2.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- xiaohui
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2024-05-09 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: qinium
|
@@ -16,14 +16,14 @@ dependencies:
|
|
16
16
|
requirements:
|
17
17
|
- - "~>"
|
18
18
|
- !ruby/object:Gem::Version
|
19
|
-
version: 0.
|
19
|
+
version: 0.2.0
|
20
20
|
type: :runtime
|
21
21
|
prerelease: false
|
22
22
|
version_requirements: !ruby/object:Gem::Requirement
|
23
23
|
requirements:
|
24
24
|
- - "~>"
|
25
25
|
- !ruby/object:Gem::Version
|
26
|
-
version: 0.
|
26
|
+
version: 0.2.0
|
27
27
|
description: Wraps the Qiniu Storage Service as an Active Storage service, support
|
28
28
|
muti-tenant settings. https://www.qiniu.com
|
29
29
|
email:
|
@@ -41,16 +41,9 @@ files:
|
|
41
41
|
- README.md
|
42
42
|
- Rakefile
|
43
43
|
- activestorage_qinium.gemspec
|
44
|
-
- app/javascript/active_storage_qinium/direct_upload_controller.js
|
45
|
-
- app/javascript/active_storage_qinium/direct_upload_controller/blob_record.js
|
46
|
-
- app/javascript/active_storage_qinium/direct_upload_controller/blob_upload.js
|
47
|
-
- app/javascript/active_storage_qinium/direct_upload_controller/direct_upload.js
|
48
|
-
- app/javascript/active_storage_qinium/direct_upload_controller/file_checksum.js
|
49
|
-
- app/javascript/active_storage_qinium/direct_upload_controller/helpers.js
|
50
44
|
- lib/active_storage/analyzer/qinium_image_analyzer.rb
|
51
45
|
- lib/active_storage/service/qinium_service.rb
|
52
46
|
- lib/active_storage_qinium.rb
|
53
|
-
- lib/active_storage_qinium/engine.rb
|
54
47
|
- lib/active_storage_qinium/version.rb
|
55
48
|
- lib/activestorage_qinium.rb
|
56
49
|
- sig/activestorage_qinium.rbs
|
@@ -76,7 +69,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
76
69
|
- !ruby/object:Gem::Version
|
77
70
|
version: '0'
|
78
71
|
requirements: []
|
79
|
-
rubygems_version: 3.
|
72
|
+
rubygems_version: 3.4.19
|
80
73
|
signing_key:
|
81
74
|
specification_version: 4
|
82
75
|
summary: A muti-tenant SDK wrap the Qiniu Storage Service as an Active Storage service
|
@@ -1,73 +0,0 @@
|
|
1
|
-
import { getMetaValue } from "./helpers"
|
2
|
-
|
3
|
-
export class BlobRecord {
|
4
|
-
constructor(file, checksum, url) {
|
5
|
-
this.file = file
|
6
|
-
|
7
|
-
this.attributes = {
|
8
|
-
filename: file.name,
|
9
|
-
content_type: file.type || "application/octet-stream",
|
10
|
-
byte_size: file.size,
|
11
|
-
checksum: checksum
|
12
|
-
}
|
13
|
-
|
14
|
-
this.xhr = new XMLHttpRequest
|
15
|
-
this.xhr.open("POST", url, true)
|
16
|
-
this.xhr.responseType = "json"
|
17
|
-
this.xhr.setRequestHeader("Content-Type", "application/json")
|
18
|
-
this.xhr.setRequestHeader("Accept", "application/json")
|
19
|
-
this.xhr.setRequestHeader("X-Requested-With", "XMLHttpRequest")
|
20
|
-
|
21
|
-
const csrfToken = getMetaValue("csrf-token")
|
22
|
-
if (csrfToken != undefined) {
|
23
|
-
this.xhr.setRequestHeader("X-CSRF-Token", csrfToken)
|
24
|
-
}
|
25
|
-
|
26
|
-
this.xhr.addEventListener("load", event => this.requestDidLoad(event))
|
27
|
-
this.xhr.addEventListener("error", event => this.requestDidError(event))
|
28
|
-
}
|
29
|
-
|
30
|
-
get status() {
|
31
|
-
return this.xhr.status
|
32
|
-
}
|
33
|
-
|
34
|
-
get response() {
|
35
|
-
const { responseType, response } = this.xhr
|
36
|
-
if (responseType == "json") {
|
37
|
-
return response
|
38
|
-
} else {
|
39
|
-
// Shim for IE 11: https://connect.microsoft.com/IE/feedback/details/794808
|
40
|
-
return JSON.parse(response)
|
41
|
-
}
|
42
|
-
}
|
43
|
-
|
44
|
-
create(callback) {
|
45
|
-
this.callback = callback
|
46
|
-
this.xhr.send(JSON.stringify({ blob: this.attributes }))
|
47
|
-
}
|
48
|
-
|
49
|
-
requestDidLoad(event) {
|
50
|
-
if (this.status >= 200 && this.status < 300) {
|
51
|
-
const { response } = this
|
52
|
-
const { direct_upload } = response
|
53
|
-
delete response.direct_upload
|
54
|
-
this.attributes = response
|
55
|
-
this.directUploadData = direct_upload
|
56
|
-
this.callback(null, this.toJSON())
|
57
|
-
} else {
|
58
|
-
this.requestDidError(event)
|
59
|
-
}
|
60
|
-
}
|
61
|
-
|
62
|
-
requestDidError(event) {
|
63
|
-
this.callback(event, this)
|
64
|
-
}
|
65
|
-
|
66
|
-
toJSON() {
|
67
|
-
const result = {}
|
68
|
-
for (const key in this.attributes) {
|
69
|
-
result[key] = this.attributes[key]
|
70
|
-
}
|
71
|
-
return result
|
72
|
-
}
|
73
|
-
}
|
@@ -1,45 +0,0 @@
|
|
1
|
-
export class BlobUpload {
|
2
|
-
constructor(blob) {
|
3
|
-
this.blob = blob
|
4
|
-
this.file = blob.file
|
5
|
-
this.dataBuilder = blob.dataBuilder || ((ctx) => ctx.file.slice())
|
6
|
-
const { url, headers } = blob.directUploadData
|
7
|
-
this.xhr = new XMLHttpRequest
|
8
|
-
this.xhr.open("POST", url, true)
|
9
|
-
// this.xhr.responseType = "text"
|
10
|
-
for (const key in headers) {
|
11
|
-
this.xhr.setRequestHeader(key, headers[key])
|
12
|
-
}
|
13
|
-
this.xhr.addEventListener("load", event => this.requestDidLoad(event))
|
14
|
-
this.xhr.addEventListener("error", event => this.requestDidError(event))
|
15
|
-
}
|
16
|
-
|
17
|
-
create(callback) {
|
18
|
-
// debugger
|
19
|
-
this.callback = callback
|
20
|
-
if(this.blob.directUploadData.formData){
|
21
|
-
var formData
|
22
|
-
formData = new FormData()
|
23
|
-
for(const key in this.blob.directUploadData.formData){
|
24
|
-
formData.append(key, this.blob.directUploadData.formData[key])
|
25
|
-
}
|
26
|
-
formData.append('file', this.file)
|
27
|
-
this.xhr.send(formData)
|
28
|
-
}else{
|
29
|
-
this.xhr.send(this.dataBuilder(this.blob))
|
30
|
-
}
|
31
|
-
}
|
32
|
-
|
33
|
-
requestDidLoad(event) {
|
34
|
-
const { status, response } = this.xhr
|
35
|
-
if (status >= 200 && status < 300) {
|
36
|
-
this.callback(null, response)
|
37
|
-
} else {
|
38
|
-
this.requestDidError(event)
|
39
|
-
}
|
40
|
-
}
|
41
|
-
|
42
|
-
requestDidError(event) {
|
43
|
-
this.callback(event, this)
|
44
|
-
}
|
45
|
-
}
|
@@ -1,48 +0,0 @@
|
|
1
|
-
import { FileChecksum } from "./file_checksum"
|
2
|
-
import { BlobRecord } from "./blob_record"
|
3
|
-
import { BlobUpload } from "./blob_upload"
|
4
|
-
|
5
|
-
let id = 0
|
6
|
-
|
7
|
-
export class DirectUpload {
|
8
|
-
constructor(file, url, delegate) {
|
9
|
-
this.id = ++id
|
10
|
-
this.file = file
|
11
|
-
this.url = url
|
12
|
-
this.delegate = delegate
|
13
|
-
}
|
14
|
-
|
15
|
-
create(callback) {
|
16
|
-
FileChecksum.create(this.file, (error, checksum) => {
|
17
|
-
if (error) {
|
18
|
-
callback(error)
|
19
|
-
return
|
20
|
-
}
|
21
|
-
|
22
|
-
const blob = new BlobRecord(this.file, checksum, this.url)
|
23
|
-
notify(this.delegate, "directUploadWillCreateBlobWithXHR", blob.xhr)
|
24
|
-
|
25
|
-
blob.create(error => {
|
26
|
-
if (error) {
|
27
|
-
callback(error)
|
28
|
-
} else {
|
29
|
-
const upload = new BlobUpload(blob)
|
30
|
-
notify(this.delegate, "directUploadWillStoreFileWithXHR", upload.xhr)
|
31
|
-
upload.create(error => {
|
32
|
-
if (error) {
|
33
|
-
callback(error)
|
34
|
-
} else {
|
35
|
-
callback(null, blob.toJSON())
|
36
|
-
}
|
37
|
-
})
|
38
|
-
}
|
39
|
-
})
|
40
|
-
})
|
41
|
-
}
|
42
|
-
}
|
43
|
-
|
44
|
-
function notify(object, methodName, ...messages) {
|
45
|
-
if (object && typeof object[methodName] == "function") {
|
46
|
-
return object[methodName](...messages)
|
47
|
-
}
|
48
|
-
}
|
@@ -1,53 +0,0 @@
|
|
1
|
-
import SparkMD5 from "spark-md5"
|
2
|
-
|
3
|
-
const fileSlice = File.prototype.slice || File.prototype.mozSlice || File.prototype.webkitSlice
|
4
|
-
|
5
|
-
export class FileChecksum {
|
6
|
-
static create(file, callback) {
|
7
|
-
const instance = new FileChecksum(file)
|
8
|
-
instance.create(callback)
|
9
|
-
}
|
10
|
-
|
11
|
-
constructor(file) {
|
12
|
-
this.file = file
|
13
|
-
this.chunkSize = 2097152 // 2MB
|
14
|
-
this.chunkCount = Math.ceil(this.file.size / this.chunkSize)
|
15
|
-
this.chunkIndex = 0
|
16
|
-
}
|
17
|
-
|
18
|
-
create(callback) {
|
19
|
-
this.callback = callback
|
20
|
-
this.md5Buffer = new SparkMD5.ArrayBuffer
|
21
|
-
this.fileReader = new FileReader
|
22
|
-
this.fileReader.addEventListener("load", event => this.fileReaderDidLoad(event))
|
23
|
-
this.fileReader.addEventListener("error", event => this.fileReaderDidError(event))
|
24
|
-
this.readNextChunk()
|
25
|
-
}
|
26
|
-
|
27
|
-
fileReaderDidLoad(event) {
|
28
|
-
this.md5Buffer.append(event.target.result)
|
29
|
-
|
30
|
-
if (!this.readNextChunk()) {
|
31
|
-
const binaryDigest = this.md5Buffer.end(true)
|
32
|
-
const base64digest = btoa(binaryDigest)
|
33
|
-
this.callback(null, base64digest)
|
34
|
-
}
|
35
|
-
}
|
36
|
-
|
37
|
-
fileReaderDidError(event) {
|
38
|
-
this.callback(`Error reading ${this.file.name}`)
|
39
|
-
}
|
40
|
-
|
41
|
-
readNextChunk() {
|
42
|
-
if (this.chunkIndex < this.chunkCount || (this.chunkIndex == 0 && this.chunkCount == 0)) {
|
43
|
-
const start = this.chunkIndex * this.chunkSize
|
44
|
-
const end = Math.min(start + this.chunkSize, this.file.size)
|
45
|
-
const bytes = fileSlice.call(this.file, start, end)
|
46
|
-
this.fileReader.readAsArrayBuffer(bytes)
|
47
|
-
this.chunkIndex++
|
48
|
-
return true
|
49
|
-
} else {
|
50
|
-
return false
|
51
|
-
}
|
52
|
-
}
|
53
|
-
}
|
@@ -1,51 +0,0 @@
|
|
1
|
-
export function getMetaValue(name) {
|
2
|
-
const element = findElement(document.head, `meta[name="${name}"]`)
|
3
|
-
if (element) {
|
4
|
-
return element.getAttribute("content")
|
5
|
-
}
|
6
|
-
}
|
7
|
-
|
8
|
-
export function findElements(root, selector) {
|
9
|
-
if (typeof root == "string") {
|
10
|
-
selector = root
|
11
|
-
root = document
|
12
|
-
}
|
13
|
-
const elements = root.querySelectorAll(selector)
|
14
|
-
return toArray(elements)
|
15
|
-
}
|
16
|
-
|
17
|
-
export function findElement(root, selector) {
|
18
|
-
if (typeof root == "string") {
|
19
|
-
selector = root
|
20
|
-
root = document
|
21
|
-
}
|
22
|
-
return root.querySelector(selector)
|
23
|
-
}
|
24
|
-
|
25
|
-
export function dispatchEvent(element, type, eventInit = {}) {
|
26
|
-
const { disabled } = element
|
27
|
-
const { bubbles, cancelable, detail } = eventInit
|
28
|
-
const event = document.createEvent("Event")
|
29
|
-
|
30
|
-
event.initEvent(type, bubbles || true, cancelable || true)
|
31
|
-
event.detail = detail || {}
|
32
|
-
|
33
|
-
try {
|
34
|
-
element.disabled = false
|
35
|
-
element.dispatchEvent(event)
|
36
|
-
} finally {
|
37
|
-
element.disabled = disabled
|
38
|
-
}
|
39
|
-
|
40
|
-
return event
|
41
|
-
}
|
42
|
-
|
43
|
-
export function toArray(value) {
|
44
|
-
if (Array.isArray(value)) {
|
45
|
-
return value
|
46
|
-
} else if (Array.from) {
|
47
|
-
return Array.from(value)
|
48
|
-
} else {
|
49
|
-
return [].slice.call(value)
|
50
|
-
}
|
51
|
-
}
|
@@ -1,55 +0,0 @@
|
|
1
|
-
import { Controller } from "stimulus";
|
2
|
-
import { DirectUpload } from "./direct_upload_controller/direct_upload";
|
3
|
-
|
4
|
-
export default class extends Controller {
|
5
|
-
static targets = ['file'];
|
6
|
-
static values = {
|
7
|
-
'url': String
|
8
|
-
}
|
9
|
-
|
10
|
-
initialize(){
|
11
|
-
this.onFileChange = this.onFileChange.bind(this)
|
12
|
-
}
|
13
|
-
|
14
|
-
connect(){
|
15
|
-
this.hiddenInput = document.createElement("input")
|
16
|
-
this.hiddenInput.type = "hidden"
|
17
|
-
this.hiddenInput.name = this.fileTarget.name
|
18
|
-
this.fileTarget.removeAttribute('name')
|
19
|
-
this.fileTarget.insertAdjacentElement("beforebegin", this.hiddenInput)
|
20
|
-
this.fileTarget.addEventListener('change', this.onFileChange)
|
21
|
-
}
|
22
|
-
|
23
|
-
disconnect(){
|
24
|
-
this.fileTarget.removeEventListener('change', this.onFileChange)
|
25
|
-
}
|
26
|
-
|
27
|
-
onFileChange(event){
|
28
|
-
const { target } = event
|
29
|
-
const { files } = target
|
30
|
-
const directUpload = new DirectUpload(files[0], this.urlValue, this)
|
31
|
-
|
32
|
-
directUpload.create((error, attributes) => {
|
33
|
-
if(error){
|
34
|
-
this.hiddenInput.removeAttribute('value')
|
35
|
-
}else{
|
36
|
-
this.hiddenInput.setAttribute('value', attributes.signed_id)
|
37
|
-
}
|
38
|
-
})
|
39
|
-
}
|
40
|
-
|
41
|
-
// DirectUpload delegate
|
42
|
-
|
43
|
-
directUploadWillCreateBlobWithXHR(xhr) {
|
44
|
-
// this.dispatch("before-blob-request", { xhr })
|
45
|
-
}
|
46
|
-
|
47
|
-
directUploadWillStoreFileWithXHR(xhr) {
|
48
|
-
// this.dispatch("before-storage-request", { xhr })
|
49
|
-
xhr.upload.addEventListener("progress", event => this.uploadRequestDidProgress(event))
|
50
|
-
}
|
51
|
-
|
52
|
-
uploadRequestDidProgress(event){
|
53
|
-
|
54
|
-
}
|
55
|
-
}
|