shrine-transloadit 0.5.1 → 1.0.0.beta
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/README.md +469 -329
- data/lib/shrine/plugins/transloadit.rb +167 -326
- data/shrine-transloadit.gemspec +5 -7
- metadata +19 -43
- data/lib/shrine/plugins/transloadit2.rb +0 -48
@@ -1,3 +1,5 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
1
3
|
require "transloadit"
|
2
4
|
|
3
5
|
require "uri"
|
@@ -7,405 +9,244 @@ require "openssl"
|
|
7
9
|
class Shrine
|
8
10
|
module Plugins
|
9
11
|
module Transloadit
|
10
|
-
|
12
|
+
# Transloadit's default destination path for export robots.
|
13
|
+
DEFAULT_PATH = "${unique_prefix}/${file.url_name}"
|
11
14
|
|
12
|
-
class
|
13
|
-
|
15
|
+
class Error < Shrine::Error
|
16
|
+
end
|
14
17
|
|
15
|
-
|
16
|
-
@response = response
|
17
|
-
super("#{response["error"]}: #{response["reason"] || response["message"]}")
|
18
|
-
end
|
18
|
+
class InvalidSignature < Error
|
19
19
|
end
|
20
20
|
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
# If promoting was not yet overriden, it is set to automatically trigger
|
27
|
-
# Transloadit processing defined in `Shrine#transloadit_process`.
|
28
|
-
def self.configure(uploader, opts = {})
|
29
|
-
uploader.opts[:transloadit_auth_key] = opts.fetch(:auth_key, uploader.opts[:transloadit_auth_key])
|
30
|
-
uploader.opts[:transloadit_auth_secret] = opts.fetch(:auth_secret, uploader.opts[:transloadit_auth_secret])
|
31
|
-
|
32
|
-
raise Error, "The :auth_key is required for transloadit plugin" if uploader.opts[:transloadit_auth_key].nil?
|
33
|
-
raise Error, "The :auth_secret is required for transloadit plugin" if uploader.opts[:transloadit_auth_secret].nil?
|
34
|
-
|
35
|
-
uploader.opts[:backgrounding_promote] ||= proc { transloadit_process }
|
21
|
+
LOG_SUBSCRIBER = -> (event) do
|
22
|
+
Shrine.logger.info "Transloadit (#{event.duration}ms) – #{{
|
23
|
+
processor: event[:processor],
|
24
|
+
uploader: event[:uploader],
|
25
|
+
}.inspect}"
|
36
26
|
end
|
37
27
|
|
38
|
-
#
|
39
|
-
def self.
|
40
|
-
uploader.
|
28
|
+
# Accepts Transloadit credentials via `:auth_key` and `:auth_secret`.
|
29
|
+
def self.configure(uploader, log_subscriber: LOG_SUBSCRIBER, **opts)
|
30
|
+
uploader.opts[:transloadit] ||= { processors: {}, savers: {}, credentials: {} }
|
31
|
+
uploader.opts[:transloadit].merge!(opts)
|
32
|
+
|
33
|
+
fail Error, "The :auth option is required" unless uploader.opts[:transloadit][:auth]
|
34
|
+
|
35
|
+
# instrumentation plugin integration
|
36
|
+
uploader.subscribe(:transloadit, &log_subscriber) if uploader.respond_to?(:subscribe)
|
41
37
|
end
|
42
38
|
|
43
39
|
module AttacherClassMethods
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
40
|
+
def transloadit_processor(name, &block)
|
41
|
+
if block
|
42
|
+
shrine_class.opts[:transloadit][:processors][name.to_sym] = block
|
43
|
+
else
|
44
|
+
shrine_class.opts[:transloadit][:processors][name.to_sym] or
|
45
|
+
fail Error, "transloadit processor #{name.inspect} not registered"
|
46
|
+
end
|
51
47
|
end
|
52
48
|
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
attacher = self.load(data)
|
61
|
-
cached_file = attacher.uploaded_file(data["attachment"])
|
62
|
-
attacher.transloadit_save(response, valid: attacher.get == cached_file)
|
63
|
-
attacher
|
49
|
+
def transloadit_saver(name, &block)
|
50
|
+
if block
|
51
|
+
shrine_class.opts[:transloadit][:savers][name.to_sym] = block
|
52
|
+
else
|
53
|
+
shrine_class.opts[:transloadit][:savers][name.to_sym] or
|
54
|
+
fail Error, "transloadit saver #{name.inspect} not registered"
|
55
|
+
end
|
64
56
|
end
|
65
57
|
end
|
66
58
|
|
67
59
|
module AttacherMethods
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
# After the Transloadit assembly was submitted, the response is saved
|
74
|
-
# into cached file's metadata, which can then be reloaded at will for
|
75
|
-
# checking progress of the assembly.
|
76
|
-
#
|
77
|
-
# Raises a `Shrine::Plugins::Transloadit::ResponseError` if Transloadit returned an error.
|
78
|
-
def transloadit_process(cached_file = get)
|
79
|
-
assembly = store.transloadit_process(cached_file, context)
|
80
|
-
assembly.options[:fields] ||= {}
|
81
|
-
assembly.options[:fields]["attacher"] = self.dump.merge("attachment" => cached_file.to_json)
|
82
|
-
response = assembly.create!
|
83
|
-
raise ResponseError.new(response.body) if response["error"]
|
84
|
-
cached_file.metadata["transloadit_response"] = response.body.to_json
|
85
|
-
swap(cached_file) or _set(cached_file)
|
60
|
+
def transloadit_process(name, *args)
|
61
|
+
processor = self.class.transloadit_processor(name)
|
62
|
+
instrument_transloadit(name) do
|
63
|
+
instance_exec(*args, &processor)
|
64
|
+
end
|
86
65
|
end
|
87
66
|
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
# If attachment has changed in the meanwhile, meaning the result of
|
93
|
-
# this processing is no longer valid, it deletes the processed files
|
94
|
-
# from the main storage.
|
95
|
-
#
|
96
|
-
# Raises a `Shrine::Plugins::Transloadit::ResponseError` if Transloadit returned an error.
|
97
|
-
def transloadit_save(response, valid: true)
|
98
|
-
raise ResponseError.new(response) if response["error"]
|
99
|
-
|
100
|
-
if versions = response["fields"]["versions"]
|
101
|
-
stored_file = versions.inject({}) do |hash, (name, step_name)|
|
102
|
-
results = response["results"].fetch(step_name) { [] }
|
103
|
-
uploaded_files = results.map { |result| store.transloadit_uploaded_file(result) }
|
104
|
-
multiple = response["fields"]["multiple"].to_h[name]
|
105
|
-
|
106
|
-
if multiple == "list"
|
107
|
-
hash.merge!(name => uploaded_files)
|
108
|
-
elsif uploaded_files.one?
|
109
|
-
hash.merge!(name => uploaded_files[0])
|
110
|
-
elsif uploaded_files.empty?
|
111
|
-
hash
|
112
|
-
else
|
113
|
-
raise Error, "Step produced multiple files but wasn't marked as multiple"
|
114
|
-
end
|
115
|
-
end
|
116
|
-
else
|
117
|
-
results = response["results"].values.last
|
118
|
-
uploaded_files = results.map { |result| store.transloadit_uploaded_file(result) }
|
119
|
-
multiple = response["fields"]["multiple"]
|
120
|
-
|
121
|
-
if multiple == "list"
|
122
|
-
stored_file = uploaded_files
|
123
|
-
elsif uploaded_files.one?
|
124
|
-
stored_file = uploaded_files[0]
|
125
|
-
else
|
126
|
-
raise Error, "Step produced multiple files but wasn't marked as multiple"
|
127
|
-
end
|
128
|
-
end
|
67
|
+
def transloadit_save(name, *args)
|
68
|
+
saver = self.class.transloadit_saver(name)
|
69
|
+
instance_exec(*args, &saver)
|
70
|
+
end
|
129
71
|
|
130
|
-
|
131
|
-
|
132
|
-
else
|
133
|
-
_delete(stored_file, action: :abort)
|
134
|
-
end
|
72
|
+
def transloadit_step(*args)
|
73
|
+
shrine_class.transloadit_step(*args)
|
135
74
|
end
|
136
|
-
end
|
137
75
|
|
138
|
-
module ClassMethods
|
139
|
-
# Creates a new Transloadit client, so that the expiration timestamp is
|
140
|
-
# refreshed on new processing requests.
|
141
76
|
def transloadit
|
142
|
-
|
143
|
-
key: opts[:transloadit_auth_key],
|
144
|
-
secret: opts[:transloadit_auth_secret],
|
145
|
-
)
|
77
|
+
shrine_class.transloadit
|
146
78
|
end
|
147
79
|
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
payload = params["transloadit"]
|
155
|
-
algorithm = OpenSSL::Digest.new('sha1')
|
156
|
-
secret = opts[:transloadit_auth_secret]
|
157
|
-
calculated_signature = OpenSSL::HMAC.hexdigest(algorithm, secret, payload)
|
158
|
-
raise Error, "Received signature doesn't match the calculated signature" if calculated_signature != sent_signature
|
80
|
+
private
|
81
|
+
|
82
|
+
def instrument_transloadit(processor, &block)
|
83
|
+
return yield unless shrine_class.respond_to?(:instrument)
|
84
|
+
|
85
|
+
shrine_class.instrument(:transloadit, processor: processor, &block)
|
159
86
|
end
|
160
87
|
end
|
161
88
|
|
162
|
-
module
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
# When doing direct uploads to Transloadit you will only get a
|
169
|
-
# temporary URL, which will be saved in the "id" attribute and it's
|
170
|
-
# expected that the URL storage is used.
|
171
|
-
def transloadit_uploaded_file(result)
|
172
|
-
case url = result.fetch("url")
|
173
|
-
when /amazonaws\.com/
|
174
|
-
raise Error, "Cannot save a processed file which wasn't exported: #{url.inspect}" if url.include?("tmp.transloadit.com")
|
175
|
-
path = URI(url).path
|
176
|
-
id = path.match(%r{^(/#{storage.prefix})?/}).post_match
|
89
|
+
module ClassMethods
|
90
|
+
def transloadit_step(name, robot, use: nil, **options)
|
91
|
+
if Array(use).first.is_a?(::Transloadit::Step)
|
92
|
+
step = transloadit.step(name, robot, **options)
|
93
|
+
step.use(use) if use
|
94
|
+
step
|
177
95
|
else
|
178
|
-
|
96
|
+
transloadit.step(name, robot, use: use, **options)
|
179
97
|
end
|
180
|
-
|
181
|
-
self.class::UploadedFile.new(
|
182
|
-
"id" => id,
|
183
|
-
"storage" => storage_key.to_s,
|
184
|
-
"metadata" => {
|
185
|
-
"filename" => result.fetch("name"),
|
186
|
-
"size" => result.fetch("size"),
|
187
|
-
"mime_type" => result.fetch("mime"),
|
188
|
-
"width" => (result["meta"] && result["meta"]["width"]),
|
189
|
-
"height" => (result["meta"] && result["meta"]["height"]),
|
190
|
-
"transloadit" => result["meta"],
|
191
|
-
}
|
192
|
-
)
|
193
98
|
end
|
194
99
|
|
195
|
-
#
|
196
|
-
#
|
197
|
-
#
|
198
|
-
def
|
199
|
-
|
200
|
-
|
201
|
-
if defined?(Storage::S3) && io.storage.is_a?(Storage::S3)
|
202
|
-
step = transloadit.step(name, "/s3/import",
|
203
|
-
key: io.storage.client.config.access_key_id,
|
204
|
-
secret: io.storage.client.config.secret_access_key,
|
205
|
-
bucket: io.storage.bucket.name,
|
206
|
-
bucket_region: io.storage.client.config.region,
|
207
|
-
path: [*io.storage.prefix, io.id].join("/"),
|
208
|
-
)
|
209
|
-
elsif uri.scheme == "http" || uri.scheme == "https"
|
210
|
-
step = transloadit.step(name, "/http/import",
|
211
|
-
url: uri.to_s,
|
212
|
-
)
|
213
|
-
elsif uri.scheme == "ftp"
|
214
|
-
step = transloadit.step(name, "/ftp/import",
|
215
|
-
host: uri.host,
|
216
|
-
user: uri.user,
|
217
|
-
password: uri.password,
|
218
|
-
path: uri.path,
|
219
|
-
)
|
220
|
-
else
|
221
|
-
raise Error, "Cannot construct a transloadit import step from #{io.inspect}"
|
100
|
+
# Verifies the Transloadit signature of a webhook request. Raises
|
101
|
+
# `Shrine::Plugins::Transloadit::InvalidSignature` if signatures
|
102
|
+
# don't match.
|
103
|
+
def transloadit_verify!(params)
|
104
|
+
if transloadit_sign(params["transloadit"]) != params["signature"]
|
105
|
+
raise InvalidSignature, "received signature doesn't match calculated"
|
222
106
|
end
|
107
|
+
end
|
223
108
|
|
224
|
-
|
109
|
+
# Creates a new Transloadit client each time. This way the expiration
|
110
|
+
# timestamp is refreshed on new processing requests.
|
111
|
+
def transloadit
|
112
|
+
::Transloadit.new(**opts[:transloadit][:auth])
|
113
|
+
end
|
225
114
|
|
226
|
-
|
115
|
+
def transloadit_credentials(storage_key)
|
116
|
+
opts[:transloadit][:credentials][storage_key] or
|
117
|
+
fail Error, "credentials not registered for storage #{storage_key.inspect}"
|
227
118
|
end
|
228
119
|
|
229
|
-
|
230
|
-
# At the moment only Amazon S3 is supported.
|
231
|
-
def transloadit_export_step(name, path: nil, **step_options)
|
232
|
-
if defined?(Storage::S3) && storage.is_a?(Storage::S3)
|
233
|
-
path ||= "${unique_prefix}/${file.url_name}" # Transloadit's default path
|
234
|
-
|
235
|
-
step = transloadit.step(name, "/s3/store",
|
236
|
-
key: storage.client.config.access_key_id,
|
237
|
-
secret: storage.client.config.secret_access_key,
|
238
|
-
bucket: storage.bucket.name,
|
239
|
-
bucket_region: storage.client.config.region,
|
240
|
-
path: [*storage.prefix, path].join("/"),
|
241
|
-
)
|
242
|
-
else
|
243
|
-
raise Error, "Cannot construct a transloadit export step from #{storage.inspect}"
|
244
|
-
end
|
120
|
+
private
|
245
121
|
|
246
|
-
|
122
|
+
# Signs given string with Transloadit secret key.
|
123
|
+
def transloadit_sign(string)
|
124
|
+
algorithm = OpenSSL::Digest::SHA1.new
|
125
|
+
secret_key = opts[:transloadit][:auth][:secret]
|
247
126
|
|
248
|
-
|
127
|
+
OpenSSL::HMAC.hexdigest(algorithm, secret_key, string)
|
249
128
|
end
|
129
|
+
end
|
250
130
|
|
251
|
-
|
252
|
-
|
253
|
-
|
254
|
-
file = TransloaditFile.new(transloadit: transloadit)
|
255
|
-
file = file.add_step(transloadit_import_step("import", io)) if io
|
256
|
-
file
|
131
|
+
module InstanceMethods
|
132
|
+
def transloadit_files(results)
|
133
|
+
results.map { |result| transloadit_file(result) }
|
257
134
|
end
|
258
135
|
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
|
268
|
-
|
269
|
-
when TransloaditFile then transloadit_assembly_update_single!(value, context, options)
|
270
|
-
when Hash then transloadit_assembly_update_versions!(value, context, options)
|
271
|
-
when String then transloadit_assembly_update_template!(value, context, options)
|
136
|
+
def transloadit_file(result)
|
137
|
+
result = result.first if result.is_a?(Array)
|
138
|
+
uri = URI.parse(result.fetch("url"))
|
139
|
+
|
140
|
+
if defined?(Storage::S3) && storage.is_a?(Storage::S3)
|
141
|
+
prefix = "#{storage.prefix}/" if storage.prefix
|
142
|
+
id = uri.path.match(%r{^/#{prefix}})&.post_match or
|
143
|
+
fail Error, "URL path doesn't start with storage prefix: #{uri}"
|
144
|
+
elsif defined?(Storage::Url) && storage.is_a?(Storage::Url)
|
145
|
+
id = uri.to_s
|
272
146
|
else
|
273
|
-
|
147
|
+
fail Error, "storage not supported: #{storage.inspect}"
|
274
148
|
end
|
275
149
|
|
276
|
-
|
277
|
-
|
278
|
-
|
150
|
+
metadata = {
|
151
|
+
"filename" => result.fetch("name"),
|
152
|
+
"size" => result.fetch("size"),
|
153
|
+
"mime_type" => result.fetch("mime"),
|
154
|
+
}
|
279
155
|
|
280
|
-
transloadit
|
281
|
-
|
156
|
+
# merge transloadit's meatadata, but don't let it override ours
|
157
|
+
metadata.merge!(result.fetch("meta")) { |k, v1, v2| v1 }
|
282
158
|
|
283
|
-
|
284
|
-
|
285
|
-
|
159
|
+
self.class::UploadedFile.new(
|
160
|
+
id: id,
|
161
|
+
storage: storage_key,
|
162
|
+
metadata: metadata,
|
163
|
+
)
|
286
164
|
end
|
287
165
|
|
288
|
-
|
289
|
-
|
290
|
-
|
291
|
-
def transloadit_assembly_update_single!(transloadit_file, context, options)
|
292
|
-
raise Error, "The given TransloaditFile is missing an import step" if !transloadit_file.imported?
|
293
|
-
unless transloadit_file.exported?
|
294
|
-
path = generate_location(transloadit_file, context) + ".${file.ext}"
|
295
|
-
export_step = transloadit_export_step("export", path: path)
|
296
|
-
transloadit_file = transloadit_file.add_step(export_step)
|
166
|
+
def transloadit_export_step(name = "export", **options)
|
167
|
+
unless options.key?(:credentials)
|
168
|
+
options[:credentials] = self.class.transloadit_credentials(storage_key).to_s
|
297
169
|
end
|
298
|
-
options[:steps] += transloadit_file.steps
|
299
|
-
options[:fields]["multiple"] = transloadit_file.multiple
|
300
|
-
end
|
301
170
|
|
302
|
-
|
303
|
-
|
304
|
-
|
305
|
-
|
306
|
-
|
307
|
-
|
308
|
-
|
309
|
-
|
310
|
-
unless transloadit_file.exported?
|
311
|
-
path = generate_location(transloadit_file, context.merge(version: name)) + ".${file.ext}"
|
312
|
-
export_step = transloadit_export_step("export_#{name}", path: path)
|
313
|
-
transloadit_file = transloadit_file.add_step(export_step)
|
314
|
-
end
|
315
|
-
options[:steps] |= transloadit_file.steps
|
316
|
-
options[:fields]["versions"][name] = transloadit_file.name
|
317
|
-
options[:fields]["multiple"][name] = transloadit_file.multiple
|
171
|
+
if defined?(Storage::S3) && storage.is_a?(Storage::S3)
|
172
|
+
transloadit_s3_store_step(name, **options)
|
173
|
+
elsif defined?(Storage::GoogleCloudStorage) && storage.is_a?(Storage::GoogleCloudStorage)
|
174
|
+
transloadit_google_store_step(name, **options)
|
175
|
+
elsif defined?(Storage::YouTube) && storage.is_a?(Storage::YouTube)
|
176
|
+
transloadit_youtube_store_step(name, **options)
|
177
|
+
else
|
178
|
+
fail Error, "cannot construct export step for #{storage.inspect}"
|
318
179
|
end
|
319
180
|
end
|
320
181
|
|
321
|
-
|
322
|
-
def transloadit_assembly_update_template!(template, context, options)
|
323
|
-
options[:template_id] = template
|
324
|
-
end
|
325
|
-
end
|
182
|
+
private
|
326
183
|
|
327
|
-
|
328
|
-
|
329
|
-
|
330
|
-
|
331
|
-
def transloadit_response
|
332
|
-
@transloadit_response ||= (
|
333
|
-
body = metadata["transloadit_response"] or return
|
334
|
-
body.instance_eval { def body; self; end }
|
335
|
-
response = ::Transloadit::Response.new(body)
|
336
|
-
response.extend ::Transloadit::Response::Assembly
|
337
|
-
response
|
338
|
-
)
|
184
|
+
def transloadit_s3_store_step(name, path: DEFAULT_PATH, **options)
|
185
|
+
transloadit_step name, "/s3/store",
|
186
|
+
path: [*storage.prefix, path].join("/"),
|
187
|
+
**options
|
339
188
|
end
|
340
|
-
end
|
341
189
|
|
342
|
-
|
343
|
-
|
344
|
-
|
345
|
-
|
346
|
-
@transloadit = transloadit
|
347
|
-
@steps = steps
|
348
|
-
@multiple = multiple
|
190
|
+
def transloadit_google_store_step(name, path: DEFAULT_PATH, **options)
|
191
|
+
transloadit_step name, "/google/store",
|
192
|
+
path: [*storage.prefix, path].join("/"),
|
193
|
+
**options
|
349
194
|
end
|
350
195
|
|
351
|
-
|
352
|
-
|
353
|
-
|
354
|
-
|
355
|
-
# It returns a new TransloaditFile with the step added.
|
356
|
-
def add_step(*args)
|
357
|
-
if args[0].is_a?(::Transloadit::Step)
|
358
|
-
step = args[0]
|
359
|
-
else
|
360
|
-
step = transloadit.step(*args)
|
361
|
-
end
|
362
|
-
|
363
|
-
unless step.options[:use]
|
364
|
-
step.use steps.last if steps.any?
|
365
|
-
end
|
196
|
+
def transloadit_youtube_store_step(name, **options)
|
197
|
+
transloadit_step name, "/youtube/store",
|
198
|
+
**options
|
199
|
+
end
|
366
200
|
|
367
|
-
|
201
|
+
def transloadit_step(*args)
|
202
|
+
self.class.transloadit_step(*args)
|
368
203
|
end
|
204
|
+
end
|
369
205
|
|
370
|
-
|
371
|
-
def
|
372
|
-
if
|
373
|
-
|
206
|
+
module FileMethods
|
207
|
+
def transloadit_import_step(name = "import", **options)
|
208
|
+
if defined?(Storage::S3) && storage.is_a?(Storage::S3)
|
209
|
+
transloadit_s3_import_step(name, **options)
|
210
|
+
elsif url && URI(url).is_a?(URI::HTTP)
|
211
|
+
transloadit_http_import_step(name, **options)
|
212
|
+
elsif url && URI(url).is_a?(URI::FTP)
|
213
|
+
transloadit_ftp_import_step(name, **options)
|
374
214
|
else
|
375
|
-
|
215
|
+
fail Error, "cannot construct import step from #{self.inspect}"
|
376
216
|
end
|
377
217
|
end
|
378
218
|
|
379
|
-
|
380
|
-
|
381
|
-
|
382
|
-
|
383
|
-
|
384
|
-
@steps[-2].name
|
385
|
-
else
|
386
|
-
@steps.last.name
|
219
|
+
private
|
220
|
+
|
221
|
+
def transloadit_s3_import_step(name, **options)
|
222
|
+
unless options.key?(:credentials)
|
223
|
+
options[:credentials] = shrine_class.transloadit_credentials(storage_key).to_s
|
387
224
|
end
|
388
|
-
end
|
389
225
|
|
390
|
-
|
391
|
-
|
392
|
-
|
226
|
+
transloadit_step name, "/s3/import",
|
227
|
+
path: [*storage.prefix, id].join("/"),
|
228
|
+
**options
|
393
229
|
end
|
394
230
|
|
395
|
-
|
396
|
-
|
397
|
-
|
231
|
+
def transloadit_http_import_step(name, **options)
|
232
|
+
transloadit_step name, "/http/import",
|
233
|
+
url: url,
|
234
|
+
**options
|
398
235
|
end
|
399
236
|
|
400
|
-
|
237
|
+
def transloadit_ftp_import_step(name, **options)
|
238
|
+
uri = URI.parse(url)
|
401
239
|
|
402
|
-
|
403
|
-
|
404
|
-
|
405
|
-
|
406
|
-
|
240
|
+
transloadit_step name, "/ftp/import",
|
241
|
+
host: uri.host,
|
242
|
+
user: uri.user,
|
243
|
+
password: uri.password,
|
244
|
+
path: uri.path,
|
407
245
|
**options
|
408
|
-
|
246
|
+
end
|
247
|
+
|
248
|
+
def transloadit_step(*args)
|
249
|
+
shrine_class.transloadit_step(*args)
|
409
250
|
end
|
410
251
|
end
|
411
252
|
end
|