attache 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/README.md +181 -0
- data/Rakefile +5 -0
- data/exe/attache +54 -0
- data/lib/attache.rb +21 -0
- data/lib/attache/base.rb +63 -0
- data/lib/attache/boot.rb +36 -0
- data/lib/attache/delete.rb +31 -0
- data/lib/attache/deleteme.log +800 -0
- data/lib/attache/download.rb +86 -0
- data/lib/attache/file_response_body.rb +20 -0
- data/lib/attache/job.rb +32 -0
- data/lib/attache/resize_job.rb +44 -0
- data/lib/attache/tus.rb +53 -0
- data/lib/attache/tus/upload.rb +102 -0
- data/lib/attache/upload.rb +44 -0
- data/lib/attache/version.rb +3 -0
- data/lib/attache/vhost.rb +89 -0
- metadata +272 -0
@@ -0,0 +1,86 @@
|
|
1
|
+
require 'connection_pool'
|
2
|
+
|
3
|
+
class Attache::Download < Attache::Base
|
4
|
+
REMOTE_GEOMETRY = ENV.fetch('REMOTE_GEOMETRY') { 'remote' }
|
5
|
+
OUTPUT_EXTENSIONS = %w[png jpg jpeg gif]
|
6
|
+
RESIZE_JOB_POOL = ConnectionPool.new(JSON.parse(ENV.fetch('RESIZE_POOL') { '{ "size": 2, "timeout": 60 }' }).symbolize_keys) { Attache::ResizeJob.new }
|
7
|
+
|
8
|
+
def initialize(app)
|
9
|
+
@app = app
|
10
|
+
end
|
11
|
+
|
12
|
+
def _call(env, config)
|
13
|
+
case env['PATH_INFO']
|
14
|
+
when %r{\A/view/}
|
15
|
+
parse_path_info(env['PATH_INFO']['/view/'.length..-1]) do |dirname, geometry, basename, relpath|
|
16
|
+
if geometry == REMOTE_GEOMETRY && config.storage && config.bucket
|
17
|
+
headers = config.download_headers.merge({
|
18
|
+
'Location' => config.storage_url(relpath: relpath),
|
19
|
+
'Cache-Control' => 'private, no-cache',
|
20
|
+
})
|
21
|
+
return [302, headers, []]
|
22
|
+
end
|
23
|
+
|
24
|
+
file = begin
|
25
|
+
cachekey = File.join(request_hostname(env), relpath)
|
26
|
+
Attache.cache.fetch(cachekey) do
|
27
|
+
config.storage_get(relpath: relpath) if config.storage && config.bucket
|
28
|
+
end
|
29
|
+
rescue Exception # Errno::ECONNREFUSED, OpenURI::HTTPError, Excon::Errors, Fog::Errors::Error
|
30
|
+
Attache.logger.error $@
|
31
|
+
Attache.logger.error $!
|
32
|
+
Attache.logger.error "ERROR REFERER #{env['HTTP_REFERER'].inspect}"
|
33
|
+
nil
|
34
|
+
end
|
35
|
+
|
36
|
+
unless file
|
37
|
+
return [404, config.download_headers, []]
|
38
|
+
end
|
39
|
+
|
40
|
+
thumbnail = if geometry == 'original' || geometry == REMOTE_GEOMETRY
|
41
|
+
file
|
42
|
+
else
|
43
|
+
extension = basename.split(/\W+/).last
|
44
|
+
extension = OUTPUT_EXTENSIONS.first unless OUTPUT_EXTENSIONS.index(extension.to_s.downcase)
|
45
|
+
make_thumbnail_for(file.tap(&:close), geometry, extension)
|
46
|
+
end
|
47
|
+
|
48
|
+
headers = {
|
49
|
+
'Content-Type' => content_type_of(thumbnail.path),
|
50
|
+
}.merge(config.download_headers)
|
51
|
+
|
52
|
+
[200, headers, rack_response_body_for(thumbnail)].tap do
|
53
|
+
unless file == thumbnail # cleanup
|
54
|
+
File.unlink(thumbnail.path) rescue Errno::ENOENT
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
58
|
+
else
|
59
|
+
@app.call(env)
|
60
|
+
end
|
61
|
+
rescue Exception
|
62
|
+
Attache.logger.error $@
|
63
|
+
Attache.logger.error $!
|
64
|
+
Attache.logger.error "ERROR REFERER #{env['HTTP_REFERER'].inspect}"
|
65
|
+
[500, { 'X-Exception' => $!.to_s }, []]
|
66
|
+
end
|
67
|
+
|
68
|
+
private
|
69
|
+
|
70
|
+
def parse_path_info(geometrypath)
|
71
|
+
parts = geometrypath.split('/')
|
72
|
+
basename = CGI.unescape parts.pop
|
73
|
+
geometry = CGI.unescape parts.pop
|
74
|
+
dirname = parts.join('/')
|
75
|
+
relpath = File.join(dirname, basename)
|
76
|
+
yield dirname, geometry, basename, relpath
|
77
|
+
end
|
78
|
+
|
79
|
+
def make_thumbnail_for(file, geometry, extension)
|
80
|
+
Attache.logger.info "[POOL] new job"
|
81
|
+
RESIZE_JOB_POOL.with do |job|
|
82
|
+
job.perform(file, geometry, extension)
|
83
|
+
end
|
84
|
+
end
|
85
|
+
|
86
|
+
end
|
@@ -0,0 +1,20 @@
|
|
1
|
+
class Attache::FileResponseBody
|
2
|
+
def initialize(file, range_start = nil, range_end = nil)
|
3
|
+
@file = file
|
4
|
+
@range_start = range_start || 0
|
5
|
+
@range_end = range_end || File.size(@file.path)
|
6
|
+
end
|
7
|
+
|
8
|
+
# adapted from rack/file.rb
|
9
|
+
def each
|
10
|
+
@file.seek(@range_start)
|
11
|
+
remaining_len = @range_end
|
12
|
+
while remaining_len > 0
|
13
|
+
part = @file.read([8192, remaining_len].min)
|
14
|
+
break unless part
|
15
|
+
remaining_len -= part.length
|
16
|
+
|
17
|
+
yield part
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
data/lib/attache/job.rb
ADDED
@@ -0,0 +1,32 @@
|
|
1
|
+
class Attache::Job
|
2
|
+
RETRY_DURATION = ENV.fetch('CACHE_EVICTION_INTERVAL_SECONDS') { 60 }.to_i / 3
|
3
|
+
|
4
|
+
def perform(method, env, args)
|
5
|
+
config = Attache::VHost.new(env)
|
6
|
+
config.send(method, args.symbolize_keys)
|
7
|
+
rescue Exception
|
8
|
+
Attache.logger.error $@
|
9
|
+
Attache.logger.error $!
|
10
|
+
Attache.logger.error [method, args].inspect
|
11
|
+
self.class.perform_in(RETRY_DURATION, method, env, args)
|
12
|
+
end
|
13
|
+
|
14
|
+
# Background processing setup
|
15
|
+
|
16
|
+
if defined?(::SuckerPunch::Job)
|
17
|
+
include ::SuckerPunch::Job
|
18
|
+
def later(sec, *args)
|
19
|
+
after(sec) { perform(*args) }
|
20
|
+
end
|
21
|
+
def self.perform_async(*args)
|
22
|
+
self.new.async.perform(*args)
|
23
|
+
end
|
24
|
+
def self.perform_in(duration, *args)
|
25
|
+
self.new.async.later(duration, *args)
|
26
|
+
end
|
27
|
+
else
|
28
|
+
include Sidekiq::Worker
|
29
|
+
sidekiq_options :queue => :attache_vhost_jobs
|
30
|
+
sidekiq_retry_in {|count| RETRY_DURATION} # uncaught exception, retry after RETRY_DURATION
|
31
|
+
end
|
32
|
+
end
|
@@ -0,0 +1,44 @@
|
|
1
|
+
require 'digest/sha1'
|
2
|
+
|
3
|
+
class Attache::ResizeJob
|
4
|
+
def perform(closed_file, target_geometry_string, extension)
|
5
|
+
t = Time.now
|
6
|
+
Attache.logger.info "[POOL] start"
|
7
|
+
thumbnail = thumbnail_for(closed_file: closed_file, target_geometry_string: target_geometry_string, extension: extension)
|
8
|
+
thumbnail.instance_variable_set('@basename', thumbnail.instance_variable_get('@basename').gsub(/[^\w\.]/, '_'))
|
9
|
+
thumbnail.make.tap do
|
10
|
+
Attache.logger.info "[POOL] done in #{Time.now - t}s"
|
11
|
+
end
|
12
|
+
rescue Paperclip::Errors::NotIdentifiedByImageMagickError
|
13
|
+
closed_file
|
14
|
+
end
|
15
|
+
|
16
|
+
private
|
17
|
+
|
18
|
+
def thumbnail_for(closed_file:, target_geometry_string:, extension:, max: 2048)
|
19
|
+
thumbnail = Paperclip::Thumbnail.new(closed_file, geometry: target_geometry_string, format: extension)
|
20
|
+
current_geometry = current_geometry_for(thumbnail)
|
21
|
+
target_geometry = Paperclip::GeometryParser.new(target_geometry_string).make
|
22
|
+
if target_geometry.larger <= max && current_geometry.larger > max
|
23
|
+
# optimization:
|
24
|
+
# when users upload "super big files", we can speed things up
|
25
|
+
# by working from a "reasonably large 2048x2048 thumbnail" (<2 seconds)
|
26
|
+
# instead of operating on the original (>10 seconds)
|
27
|
+
# we store this reusably in Attache.cache to persist reboot, but not uploaded to cloud
|
28
|
+
working_geometry = "#{max}x#{max}>"
|
29
|
+
working_file = Attache.cache.fetch(Digest::SHA1.hexdigest(working_geometry + closed_file.path)) do
|
30
|
+
Attache.logger.info "[POOL] generate working_file"
|
31
|
+
Paperclip::Thumbnail.new(closed_file, geometry: working_geometry, format: extension).make
|
32
|
+
end
|
33
|
+
Attache.logger.info "[POOL] use working_file #{working_file.path}"
|
34
|
+
thumbnail = Paperclip::Thumbnail.new(working_file.tap(&:close), geometry: target_geometry_string, format: extension)
|
35
|
+
end
|
36
|
+
thumbnail
|
37
|
+
end
|
38
|
+
|
39
|
+
# allow stub in spec
|
40
|
+
def current_geometry_for(thumbnail)
|
41
|
+
thumbnail.current_geometry.tap(&:auto_orient)
|
42
|
+
end
|
43
|
+
|
44
|
+
end
|
data/lib/attache/tus.rb
ADDED
@@ -0,0 +1,53 @@
|
|
1
|
+
class Attache::Tus
|
2
|
+
LENGTH_KEYS = %w[Upload-Length Entity-Length]
|
3
|
+
OFFSET_KEYS = %w[Upload-Offset Offset]
|
4
|
+
METADATA_KEYS = %w[Upload-Metadata Metadata]
|
5
|
+
|
6
|
+
attr_accessor :env, :config
|
7
|
+
|
8
|
+
def initialize(env, config)
|
9
|
+
@env = env
|
10
|
+
@config = config
|
11
|
+
end
|
12
|
+
|
13
|
+
def header_value(keys)
|
14
|
+
value = nil
|
15
|
+
keys.find {|k| value = env["HTTP_#{k.gsub('-', '_').upcase}"]}
|
16
|
+
value
|
17
|
+
end
|
18
|
+
|
19
|
+
def upload_length
|
20
|
+
header_value LENGTH_KEYS
|
21
|
+
end
|
22
|
+
|
23
|
+
def upload_offset
|
24
|
+
header_value OFFSET_KEYS
|
25
|
+
end
|
26
|
+
|
27
|
+
def upload_metadata
|
28
|
+
value = header_value METADATA_KEYS
|
29
|
+
Hash[*value.split(/[, ]/)].inject({}) do |h, (k, v)|
|
30
|
+
h.merge(k => Base64.decode64(v))
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
def resumable_version
|
35
|
+
header_value ["Tus-Resumable"]
|
36
|
+
end
|
37
|
+
|
38
|
+
def headers_with_cors(headers = {}, offset: nil)
|
39
|
+
tus_headers = {
|
40
|
+
"Access-Control-Allow-Methods" => "PATCH",
|
41
|
+
"Access-Control-Allow-Headers" => "Tus-Resumable, #{LENGTH_KEYS.join(', ')}, #{METADATA_KEYS.join(', ')}, #{OFFSET_KEYS.join(', ')}",
|
42
|
+
"Access-Control-Expose-Headers" => "Location, #{OFFSET_KEYS.join(', ')}",
|
43
|
+
}
|
44
|
+
OFFSET_KEYS.each do |k|
|
45
|
+
tus_headers[k] = offset
|
46
|
+
end if offset
|
47
|
+
|
48
|
+
# append
|
49
|
+
tus_headers.inject(config.headers_with_cors.merge(headers)) do |sum, (k, v)|
|
50
|
+
sum.merge(k => [*sum[k], v].join(', '))
|
51
|
+
end
|
52
|
+
end
|
53
|
+
end
|
@@ -0,0 +1,102 @@
|
|
1
|
+
class Attache::Tus::Upload < Attache::Base
|
2
|
+
def initialize(app)
|
3
|
+
@app = app
|
4
|
+
end
|
5
|
+
|
6
|
+
def _call(env, config)
|
7
|
+
case env['PATH_INFO']
|
8
|
+
when '/tus/files'
|
9
|
+
tus = ::Attache::Tus.new(env, config)
|
10
|
+
params = params_of(env) # avoid unnecessary `invalid byte sequence in UTF-8` on `request.params`
|
11
|
+
return config.unauthorized unless config.authorized?(params)
|
12
|
+
|
13
|
+
case env['REQUEST_METHOD']
|
14
|
+
when 'POST'
|
15
|
+
if positive_number?(tus.upload_length)
|
16
|
+
relpath = generate_relpath(Attache::Upload.sanitize(tus.upload_metadata['filename'] || params['file']))
|
17
|
+
cachekey = File.join(request_hostname(env), relpath)
|
18
|
+
|
19
|
+
bytes_wrote = Attache.cache.write(cachekey, StringIO.new)
|
20
|
+
uri = URI.parse(Rack::Request.new(env).url)
|
21
|
+
uri.query = (uri.query ? "#{uri.query}&" : '') + "relpath=#{CGI.escape relpath}"
|
22
|
+
[201, tus.headers_with_cors('Location' => uri.to_s), []]
|
23
|
+
else
|
24
|
+
[400, tus.headers_with_cors('X-Exception' => "Bad upload length"), []]
|
25
|
+
end
|
26
|
+
|
27
|
+
when 'PATCH'
|
28
|
+
relpath = params['relpath']
|
29
|
+
cachekey = File.join(request_hostname(env), relpath)
|
30
|
+
http_offset = tus.upload_offset
|
31
|
+
if positive_number?(env['CONTENT_LENGTH']) &&
|
32
|
+
positive_number?(http_offset) &&
|
33
|
+
(env['CONTENT_TYPE'] == 'application/offset+octet-stream') &&
|
34
|
+
tus.resumable_version.to_s == '1.0.0' &&
|
35
|
+
current_offset(cachekey, relpath, config) >= http_offset.to_i
|
36
|
+
|
37
|
+
append_to(cachekey, http_offset, env['rack.input'])
|
38
|
+
config.storage_create(relpath: relpath, cachekey: cachekey) if config.storage && config.bucket
|
39
|
+
|
40
|
+
[200,
|
41
|
+
tus.headers_with_cors({'Content-Type' => 'text/json'}, offset: current_offset(cachekey, relpath, config)),
|
42
|
+
[json_of(relpath, cachekey)],
|
43
|
+
]
|
44
|
+
else
|
45
|
+
[400, tus.headers_with_cors('X-Exception' => 'Bad headers'), []]
|
46
|
+
end
|
47
|
+
|
48
|
+
when 'OPTIONS'
|
49
|
+
[201, tus.headers_with_cors, []]
|
50
|
+
|
51
|
+
when 'HEAD'
|
52
|
+
relpath = params['relpath']
|
53
|
+
cachekey = File.join(request_hostname(env), relpath)
|
54
|
+
[200,
|
55
|
+
tus.headers_with_cors({'Content-Type' => 'text/json'}, offset: current_offset(cachekey, relpath, config)),
|
56
|
+
[json_of(relpath, cachekey)],
|
57
|
+
]
|
58
|
+
|
59
|
+
when 'GET'
|
60
|
+
relpath = params['relpath']
|
61
|
+
uri = URI.parse(Rack::Request.new(env).url)
|
62
|
+
uri.query = nil
|
63
|
+
uri.path = File.join('/view', File.dirname(relpath), 'original', CGI.escape(File.basename(relpath)))
|
64
|
+
[302, tus.headers_with_cors('Location' => uri.to_s), []]
|
65
|
+
end
|
66
|
+
else
|
67
|
+
@app.call(env)
|
68
|
+
end
|
69
|
+
rescue Exception
|
70
|
+
Attache.logger.error $@
|
71
|
+
Attache.logger.error $!
|
72
|
+
Attache.logger.error "ERROR REFERER #{env['HTTP_REFERER'].inspect}"
|
73
|
+
[500, { 'X-Exception' => $!.to_s }, []]
|
74
|
+
end
|
75
|
+
|
76
|
+
private
|
77
|
+
|
78
|
+
def current_offset(cachekey, relpath, config)
|
79
|
+
file = Attache.cache.fetch(cachekey) do
|
80
|
+
config.storage_get(relpath: relpath) if config.storage && config.bucket
|
81
|
+
end
|
82
|
+
file.size
|
83
|
+
rescue
|
84
|
+
Attache.cache.write(cachekey, StringIO.new)
|
85
|
+
ensure
|
86
|
+
file.tap(&:close)
|
87
|
+
end
|
88
|
+
|
89
|
+
def append_to(cachekey, offset, io)
|
90
|
+
f = File.open(path_of(cachekey), 'r+b')
|
91
|
+
f.sync = true
|
92
|
+
f.seek(offset.to_i)
|
93
|
+
f.write(io.read)
|
94
|
+
ensure
|
95
|
+
f.close
|
96
|
+
end
|
97
|
+
|
98
|
+
def positive_number?(value)
|
99
|
+
(value.to_s == "0" || value.to_i > 0)
|
100
|
+
end
|
101
|
+
|
102
|
+
end
|
@@ -0,0 +1,44 @@
|
|
1
|
+
class Attache::Upload < Attache::Base
|
2
|
+
def initialize(app)
|
3
|
+
@app = app
|
4
|
+
end
|
5
|
+
|
6
|
+
def _call(env, config)
|
7
|
+
case env['PATH_INFO']
|
8
|
+
when '/upload'
|
9
|
+
case env['REQUEST_METHOD']
|
10
|
+
when 'POST', 'PUT', 'PATCH'
|
11
|
+
request = Rack::Request.new(env)
|
12
|
+
params = request.params
|
13
|
+
return config.unauthorized unless config.authorized?(params)
|
14
|
+
|
15
|
+
relpath = generate_relpath(Attache::Upload.sanitize params['file'])
|
16
|
+
cachekey = File.join(request_hostname(env), relpath)
|
17
|
+
|
18
|
+
bytes_wrote = Attache.cache.write(cachekey, request.body)
|
19
|
+
if bytes_wrote == 0
|
20
|
+
return [500, config.headers_with_cors.merge('X-Exception' => 'Local file failed'), []]
|
21
|
+
end
|
22
|
+
|
23
|
+
config.storage_create(relpath: relpath, cachekey: cachekey) if config.storage && config.bucket
|
24
|
+
|
25
|
+
[200, config.headers_with_cors.merge('Content-Type' => 'text/json'), [json_of(relpath, cachekey)]]
|
26
|
+
when 'OPTIONS'
|
27
|
+
[200, config.headers_with_cors, []]
|
28
|
+
else
|
29
|
+
[400, config.headers_with_cors, []]
|
30
|
+
end
|
31
|
+
else
|
32
|
+
@app.call(env)
|
33
|
+
end
|
34
|
+
rescue Exception
|
35
|
+
Attache.logger.error $@
|
36
|
+
Attache.logger.error $!
|
37
|
+
Attache.logger.error "ERROR REFERER #{env['HTTP_REFERER'].inspect}"
|
38
|
+
[500, { 'X-Exception' => $!.to_s }, []]
|
39
|
+
end
|
40
|
+
|
41
|
+
def self.sanitize(filename)
|
42
|
+
filename.to_s.gsub(/\%/, '_')
|
43
|
+
end
|
44
|
+
end
|
@@ -0,0 +1,89 @@
|
|
1
|
+
class Attache::VHost
|
2
|
+
attr_accessor :remotedir,
|
3
|
+
:secret_key,
|
4
|
+
:bucket,
|
5
|
+
:storage,
|
6
|
+
:download_headers,
|
7
|
+
:headers_with_cors,
|
8
|
+
:env
|
9
|
+
|
10
|
+
def initialize(hash)
|
11
|
+
self.env = hash || {}
|
12
|
+
self.remotedir = env['REMOTE_DIR'] # nil means no fixed top level remote directory, and that's fine.
|
13
|
+
self.secret_key = env['SECRET_KEY'] # nil means no auth check; anyone can upload a file
|
14
|
+
if env['FOG_CONFIG']
|
15
|
+
self.bucket = env['FOG_CONFIG'].fetch('bucket')
|
16
|
+
self.storage = Fog::Storage.new(env['FOG_CONFIG'].except('bucket').symbolize_keys)
|
17
|
+
end
|
18
|
+
self.download_headers = {
|
19
|
+
"Cache-Control" => "public, max-age=31536000"
|
20
|
+
}.merge(env['DOWNLOAD_HEADERS'] || {})
|
21
|
+
self.headers_with_cors = {
|
22
|
+
'Access-Control-Allow-Origin' => '*',
|
23
|
+
'Access-Control-Allow-Methods' => 'POST, PUT',
|
24
|
+
'Access-Control-Allow-Headers' => 'Content-Type',
|
25
|
+
}.merge(env['UPLOAD_HEADERS'] || {})
|
26
|
+
end
|
27
|
+
|
28
|
+
def hmac_for(content)
|
29
|
+
OpenSSL::HMAC.hexdigest(OpenSSL::Digest.new('sha1'), secret_key, content)
|
30
|
+
end
|
31
|
+
|
32
|
+
def hmac_valid?(params)
|
33
|
+
params['uuid'] &&
|
34
|
+
params['hmac'] &&
|
35
|
+
params['expiration'] &&
|
36
|
+
Time.at(params['expiration'].to_i) > Time.now &&
|
37
|
+
Rack::Utils.secure_compare(params['hmac'], hmac_for("#{params['uuid']}#{params['expiration']}"))
|
38
|
+
end
|
39
|
+
|
40
|
+
def storage_url(args)
|
41
|
+
remote_api.new({
|
42
|
+
key: File.join(*remotedir, args[:relpath]),
|
43
|
+
}).url(Time.now + 60)
|
44
|
+
end
|
45
|
+
|
46
|
+
def storage_get(args)
|
47
|
+
open storage_url(args)
|
48
|
+
end
|
49
|
+
|
50
|
+
def storage_create(args)
|
51
|
+
Attache.logger.info "[JOB] uploading #{args[:cachekey].inspect}"
|
52
|
+
body = begin
|
53
|
+
Attache.cache.read(args[:cachekey])
|
54
|
+
rescue Errno::ENOENT
|
55
|
+
:no_entry # upload file no longer exist; likely deleted immediately after upload
|
56
|
+
end
|
57
|
+
unless body == :no_entry
|
58
|
+
remote_api.create({
|
59
|
+
key: File.join(*remotedir, args[:relpath]),
|
60
|
+
body: body,
|
61
|
+
})
|
62
|
+
Attache.logger.info "[JOB] uploaded #{args[:cachekey]}"
|
63
|
+
end
|
64
|
+
end
|
65
|
+
|
66
|
+
def storage_destroy(args)
|
67
|
+
Attache.logger.info "[JOB] deleting #{args[:relpath]}"
|
68
|
+
remote_api.new({
|
69
|
+
key: File.join(*remotedir, args[:relpath]),
|
70
|
+
}).destroy
|
71
|
+
Attache.logger.info "[JOB] deleted #{args[:relpath]}"
|
72
|
+
end
|
73
|
+
|
74
|
+
def remote_api
|
75
|
+
storage.directories.new(key: bucket).files
|
76
|
+
end
|
77
|
+
|
78
|
+
def async(method, args)
|
79
|
+
::Attache::Job.perform_async(method, env, args)
|
80
|
+
end
|
81
|
+
|
82
|
+
def authorized?(params)
|
83
|
+
secret_key.blank? || hmac_valid?(params)
|
84
|
+
end
|
85
|
+
|
86
|
+
def unauthorized
|
87
|
+
[401, headers_with_cors.merge('X-Exception' => 'Authorization failed'), []]
|
88
|
+
end
|
89
|
+
end
|