shrine-transloadit 0.5.0 → 1.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +5 -5
- data/README.md +532 -329
- data/lib/shrine/plugins/transloadit.rb +169 -324
- data/shrine-transloadit.gemspec +5 -7
- metadata +17 -47
- data/lib/shrine/plugins/transloadit2.rb +0 -48
data/README.md
CHANGED
@@ -1,474 +1,671 @@
|
|
1
1
|
# Shrine::Plugins::Transloadit
|
2
2
|
|
3
|
-
Provides [Transloadit] integration for [Shrine].
|
3
|
+
Provides [Transloadit] integration for [Shrine], using its [Ruby SDK].
|
4
4
|
|
5
|
-
Transloadit is a service that helps you
|
5
|
+
Transloadit is a service that helps you handle file uploads, resize, crop and
|
6
6
|
watermark your images, make GIFs, transcode your videos, extract thumbnails,
|
7
|
-
generate audio waveforms
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
7
|
+
generate audio waveforms and more.
|
8
|
+
|
9
|
+
## Contents
|
10
|
+
|
11
|
+
* [Installation](#installation)
|
12
|
+
* [Setup](#setup)
|
13
|
+
* [Usage](#usage)
|
14
|
+
* [Notifications](#notifications)
|
15
|
+
* [Direct uploads](#direct-uploads)
|
16
|
+
* [Promotion](#promotion)
|
17
|
+
* [Skipping exports](#skipping-exports)
|
18
|
+
* [API](#api)
|
19
|
+
- [Processor](#processor)
|
20
|
+
- [Saver](#saver)
|
21
|
+
- [Step](#step)
|
22
|
+
- [Import step](#import-step)
|
23
|
+
- [Export step](#export-step)
|
24
|
+
- [File](#file)
|
25
|
+
* [Instrumentation](#instrumentation)
|
26
|
+
|
27
|
+
## Installation
|
28
|
+
|
29
|
+
Put the gem in your Gemfile:
|
16
30
|
|
17
31
|
```rb
|
18
|
-
|
19
|
-
gem "
|
20
|
-
gem "shrine-transloadit"
|
32
|
+
# Gemfile
|
33
|
+
gem "shrine-transloadit", "~> 1.0"
|
21
34
|
```
|
22
35
|
|
23
|
-
|
24
|
-
require "shrine"
|
25
|
-
require "shrine/storage/s3"
|
36
|
+
## Setup
|
26
37
|
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
secret_access_key: "xyz",
|
32
|
-
}
|
38
|
+
You'll first need to create [credentials] for the storage service you want to
|
39
|
+
import from and export to. Let's assume you're using S3 and have named the
|
40
|
+
credentials `s3_store`. Now you can load the `transloadit` plugin, providing
|
41
|
+
Transloadit key & secret, and mapping credentials to Shrine storages:
|
33
42
|
|
43
|
+
```rb
|
44
|
+
# example storage configuration
|
34
45
|
Shrine.storages = {
|
35
|
-
cache: Shrine::Storage::S3.new(prefix: "cache", **
|
36
|
-
store: Shrine::Storage::S3.new(
|
46
|
+
cache: Shrine::Storage::S3.new(prefix: "cache", **options),
|
47
|
+
store: Shrine::Storage::S3.new(**options),
|
37
48
|
}
|
38
49
|
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
50
|
+
# transloadit plugin configuration
|
51
|
+
Shrine.plugin :transloadit,
|
52
|
+
auth: {
|
53
|
+
key: "YOUR_TRANSLOADIT_KEY",
|
54
|
+
secret: "YOUR_TRANSLOADIT_SECRET",
|
55
|
+
},
|
56
|
+
credentials: {
|
57
|
+
cache: :s3_store, # use "s3_store" credentials for :cache storage
|
58
|
+
store: :s3_store, # use "s3_store" credentials for :store storage
|
59
|
+
}
|
60
|
+
|
61
|
+
# for storing processed files
|
62
|
+
Shrine.plugin :derivatives
|
44
63
|
```
|
45
64
|
|
46
|
-
|
47
|
-
|
48
|
-
|
65
|
+
## Usage
|
66
|
+
|
67
|
+
The `transloadit` plugin provides helper methods for creating [import][import
|
68
|
+
robots] and [export][export robots] steps, as well as for parsing out exported
|
69
|
+
files from results.
|
70
|
+
|
71
|
+
Here is a basic example where we kick off transcoding and thumbnail extraction
|
72
|
+
from an attached video, wait for assembly to complete, then save processed
|
73
|
+
files as derivatives:
|
49
74
|
|
50
|
-
|
75
|
+
```rb
|
76
|
+
class VideoUploader < Shrine
|
77
|
+
Attacher.transloadit_processor do
|
78
|
+
import = file.transloadit_import_step
|
79
|
+
encode = transloadit_step "encode", "/video/encode", use: import
|
80
|
+
thumbs = transloadit_step "thumbs", "/video/thumbs", use: import
|
81
|
+
export = store.transloadit_export_step use: [encode, thumbs]
|
82
|
+
|
83
|
+
assembly = transloadit.assembly(steps: [import, encode, thumbs, export])
|
84
|
+
assembly.create!
|
85
|
+
end
|
51
86
|
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
which it will POST results to when processing finishes.
|
87
|
+
Attacher.transloadit_saver do |results|
|
88
|
+
transcoded = store.transloadit_file(results["encode"])
|
89
|
+
thumbnails = store.transloadit_files(results["thumbs"])
|
56
90
|
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
91
|
+
merge_derivatives(transcoded: transcoded, thumbnails: thumbnails)
|
92
|
+
end
|
93
|
+
end
|
94
|
+
```
|
95
|
+
```rb
|
96
|
+
response = attacher.transloadit_process
|
97
|
+
response.reload_until_finished!
|
62
98
|
|
63
|
-
|
64
|
-
|
99
|
+
if response.error?
|
100
|
+
# handle error
|
101
|
+
end
|
65
102
|
|
66
|
-
|
103
|
+
attacher.transloadit_save(response["results"])
|
104
|
+
attacher.derivatives #=>
|
105
|
+
# {
|
106
|
+
# transcoded: #<Shrine::UploadedFile storage_key=:store ...>,
|
107
|
+
# thumbnails: [
|
108
|
+
# #<Shrine::UploadedFile storage_key=:store ...>,
|
109
|
+
# #<Shrine::UploadedFile storage_key=:store ...>,
|
110
|
+
# ...
|
111
|
+
# ]
|
112
|
+
# }
|
113
|
+
```
|
67
114
|
|
68
|
-
|
69
|
-
class, so that any uploaders that you want to do Transloadit processing with
|
70
|
-
can just inherit from that class.
|
115
|
+
### Backgrounding
|
71
116
|
|
72
|
-
|
73
|
-
|
74
|
-
provides.
|
117
|
+
When using [backgrounding], it's probably best to create the assembly after
|
118
|
+
promotion:
|
75
119
|
|
76
120
|
```rb
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
121
|
+
Shrine.plugin :backgrounding
|
122
|
+
Shrine::Attacher.promote_block do
|
123
|
+
PromoteJob.perform_async(self.class.name, record.class.name, record.id, name, file_data)
|
124
|
+
end
|
125
|
+
```
|
126
|
+
```rb
|
127
|
+
class PromoteJob
|
128
|
+
include Sidekiq::Worker
|
129
|
+
|
130
|
+
def perform(attacher_class, record_class, record_id, name, file_data)
|
131
|
+
attacher_class = Object.const_get(attacher_class)
|
132
|
+
record = Object.const_get(record_class).find(record_id) # if using Active Record
|
81
133
|
|
82
|
-
|
134
|
+
attacher = attacher_class.retrieve(model: record, name: name, file: file_data)
|
135
|
+
attacher.atomic_promote
|
136
|
+
attacher.transloadit_process
|
137
|
+
# ...
|
138
|
+
rescue Shrine::AttachmentChanged, ActiveRecord::RecordNotFound
|
83
139
|
end
|
84
140
|
end
|
85
141
|
```
|
86
142
|
|
87
|
-
|
88
|
-
[transloadit gem], which you might want look at to get a better understanding
|
89
|
-
of how building assemblies works.
|
143
|
+
## Notifications
|
90
144
|
|
91
|
-
|
92
|
-
|
93
|
-
a *unique name*. Transloadit allows you to define the entire processing flow
|
94
|
-
(which can result in multiple files) as a collection of steps, which is called
|
95
|
-
an "assembly". Once the assembly is built it can be submitted to Transloadit.
|
145
|
+
When using [assembly notifications], the attacher data can be sent to the
|
146
|
+
webhook via `:fields`:
|
96
147
|
|
97
|
-
|
148
|
+
```rb
|
149
|
+
Attacher.transloadit_processor do
|
150
|
+
# ...
|
151
|
+
assembly = transloadit.assembly(
|
152
|
+
steps: [ ... ],
|
153
|
+
notify_url: "https://example.com/webhooks/transloadit",
|
154
|
+
fields: {
|
155
|
+
attacher: {
|
156
|
+
record_class: record.class,
|
157
|
+
record_id: record.id,
|
158
|
+
name: name,
|
159
|
+
data: file_data,
|
160
|
+
}
|
161
|
+
}
|
162
|
+
)
|
163
|
+
assembly.create!
|
164
|
+
end
|
165
|
+
```
|
98
166
|
|
99
|
-
|
100
|
-
|
167
|
+
Then in the webhook handler we can load the attacher and [atomically
|
168
|
+
persist][atomic_helpers] assembly results. If during processing the attachment
|
169
|
+
has changed or record was deleted, we make sure we delete processed files.
|
101
170
|
|
102
171
|
```rb
|
103
|
-
|
104
|
-
|
172
|
+
post "/transloadit/video" do
|
173
|
+
Shrine.transloadit_verify!(params) # verify transloadit signature
|
174
|
+
|
175
|
+
response = JSON.parse(params["transloadit"])
|
105
176
|
|
106
|
-
|
107
|
-
|
108
|
-
medium = original.add_step("resize_500", "/image/resize", width: 500)
|
109
|
-
small = original.add_step("resize_300", "/image/resize", width: 300)
|
177
|
+
record_class, record_id, name, file_data = response["fields"]["attacher"].values
|
178
|
+
record_class = Object.const_get(record_class)
|
110
179
|
|
111
|
-
|
180
|
+
attacher = record_class.send(:"#{name}_attacher")
|
181
|
+
derivatives = attacher.transloadit_save(response["results"])
|
112
182
|
|
113
|
-
|
183
|
+
begin
|
184
|
+
record = record_class.find(record_id)
|
185
|
+
attacher = Shrine::Attacher.retrieve(model: record, name: name, file: file_data)
|
186
|
+
|
187
|
+
attacher.merge_derivatives(derivatives)
|
188
|
+
attacher.atomic_persist
|
189
|
+
rescue Shrine::AttachmentChanged, ActiveRecord::RecordNotFound
|
190
|
+
attacher.destroy_attached # delete orphaned processed files
|
114
191
|
end
|
192
|
+
|
193
|
+
# return successful response for Transloadit
|
194
|
+
status 200
|
115
195
|
end
|
116
196
|
```
|
117
197
|
|
118
|
-
|
198
|
+
Note that if you have CSRF protection, make sure that you skip verifying the
|
199
|
+
CSRF token for this route.
|
200
|
+
|
201
|
+
## Direct uploads
|
202
|
+
|
203
|
+
Transloadit supports client side uploads via [Robodog], an [Uppy]-based
|
204
|
+
JavaScript library.
|
119
205
|
|
120
|
-
|
121
|
-
|
122
|
-
step returned more than one file, but it's possible to specify the result
|
123
|
-
format in which shrine-transloadit should save the processed files.
|
206
|
+
If you have an HTML form, you can use Robodog's [Form API][Robodog Form] to add
|
207
|
+
Transloadit's encoding capabilities to it:
|
124
208
|
|
125
|
-
|
209
|
+
```js
|
210
|
+
window.Robodog.form('form#myform', {
|
211
|
+
params: {
|
212
|
+
auth: { key: 'YOUR_TRANSLOADIT_KEY' },
|
213
|
+
template_id: 'YOUR_TEMPLATE_ID',
|
214
|
+
},
|
215
|
+
waitForEncoding: true,
|
216
|
+
// ...
|
217
|
+
})
|
218
|
+
```
|
126
219
|
|
127
|
-
|
220
|
+
With the above setup, Robodog will send the assembly results to your controller
|
221
|
+
in the `transloadit` param, which we can parse out and save to our record. See
|
222
|
+
the [demo app] for an example of doing this.
|
223
|
+
|
224
|
+
## Promotion
|
225
|
+
|
226
|
+
If you want Transloadit to also upload your cached original file to permanent
|
227
|
+
storage, you can skip promotion on the Shrine side:
|
128
228
|
|
129
229
|
```rb
|
130
|
-
class
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
230
|
+
class VideoUploader < Shrine
|
231
|
+
Attacher.transloadit_processor do
|
232
|
+
import = file.transloadit_import_step
|
233
|
+
encode = transloadit_step "encode", "/video/encode", use: import
|
234
|
+
thumbs = transloadit_step "thumbs", "/video/thumbs", use: import
|
235
|
+
export = store.transloadit_export_step use: [import, encode, thumbs] # include original
|
236
|
+
|
237
|
+
assembly = transloadit.assembly(steps: [import, encode, thumbs, export])
|
238
|
+
assembly.create!
|
239
|
+
end
|
135
240
|
|
136
|
-
|
241
|
+
Attacher.transloadit_saver do |results|
|
242
|
+
stored = store.transloadit_file(results["import"])
|
243
|
+
transcoded = store.transloadit_file(results["encode"])
|
244
|
+
thumbnails = store.transloadit_files(results["thumbs"])
|
245
|
+
|
246
|
+
set(stored) # set promoted file
|
247
|
+
merge_derivatives(transcoded: transcoded, thumbnails: thumbnails)
|
137
248
|
end
|
138
249
|
end
|
139
250
|
```
|
140
|
-
|
141
|
-
This will make the processing results save as an array of files:
|
142
|
-
|
143
251
|
```rb
|
144
|
-
|
145
|
-
|
146
|
-
# #<Shrine::UploadedFile ...>
|
147
|
-
# #<Shrine::UploadedFile ...>
|
148
|
-
# ...
|
149
|
-
# ]
|
252
|
+
class PromoteJob
|
253
|
+
include Sidekiq::Worker
|
150
254
|
|
151
|
-
|
152
|
-
|
255
|
+
def perform(attacher_class, record_class, record_id, name, file_data)
|
256
|
+
attacher_class = Object.const_get(attacher_class)
|
257
|
+
record = Object.const_get(record_class).find(record_id) # if using Active Record
|
153
258
|
|
154
|
-
|
259
|
+
attacher = attacher_class.retrieve(model: record, name: name, file: file_data)
|
155
260
|
|
156
|
-
|
157
|
-
|
261
|
+
response = attacher.transloadit_process
|
262
|
+
response.reload_until_finished!
|
158
263
|
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
264
|
+
if response.error?
|
265
|
+
# handle error
|
266
|
+
end
|
267
|
+
|
268
|
+
attacher.transloadit_save(response["results"])
|
269
|
+
attacher.atomic_persist attacher.uploaded_file(file_data)
|
270
|
+
rescue Shrine::AttachmentChanged, ActiveRecord::RecordNotFound
|
271
|
+
attacher&.destroy_attached # delete orphaned processed files
|
164
272
|
end
|
165
273
|
end
|
166
274
|
```
|
167
275
|
|
168
|
-
|
169
|
-
|
276
|
+
## Skipping exports
|
277
|
+
|
278
|
+
If you want to use Transloadit only for processing, and prefer to store results
|
279
|
+
to yourself, you can do so with help of the [shrine-url] gem.
|
170
280
|
|
171
281
|
```rb
|
172
|
-
|
173
|
-
|
174
|
-
# return 200 status
|
175
|
-
end
|
282
|
+
# Gemfile
|
283
|
+
gem "shrine-url"
|
176
284
|
```
|
285
|
+
```rb
|
286
|
+
# ...
|
287
|
+
require "shrine/storage/url"
|
177
288
|
|
178
|
-
|
179
|
-
|
289
|
+
Shrine.storages = {
|
290
|
+
# ...
|
291
|
+
url: Shrine::Storage::Url.new,
|
292
|
+
}
|
293
|
+
```
|
180
294
|
|
181
|
-
|
295
|
+
If you don't specify an export step, Transloadit will return processed files
|
296
|
+
uploaded to Transloadit's temporary storage. You can load these results using
|
297
|
+
the `:url` storage, and then upload them to your permanent storage:
|
182
298
|
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
299
|
+
```rb
|
300
|
+
class VideoUploader < Shrine
|
301
|
+
Attacher.transloadit_processor do
|
302
|
+
import = file.transloadit_import_step
|
303
|
+
encode = transloadit_step "encode", "/video/encode", use: import
|
304
|
+
thumbs = transloadit_step "thumbs", "/video/thumbs", use: import
|
305
|
+
# no export step
|
306
|
+
|
307
|
+
assembly = transloadit.assembly(steps: [import, encode, thumbs])
|
308
|
+
assembly.create!
|
309
|
+
end
|
187
310
|
|
188
|
-
|
189
|
-
|
190
|
-
|
191
|
-
|
192
|
-
target: fileInput.parentNode,
|
193
|
-
allowMultipleFiles: fileInput.multiple
|
194
|
-
})
|
195
|
-
.use(Uppy.Tus, {})
|
196
|
-
.use(Uppy.Transloadit, {
|
197
|
-
waitForEncoding: true,
|
198
|
-
params: {
|
199
|
-
auth: { key: 'YOUR_TRANSLOADIT_KEY' },
|
200
|
-
steps: {
|
201
|
-
// ...
|
202
|
-
}
|
203
|
-
}
|
204
|
-
})
|
311
|
+
Attacher.transloadit_saver do |results|
|
312
|
+
url = shrine_class.new(:url)
|
313
|
+
transcoded = url.transloadit_file(results["encode"])
|
314
|
+
thumbnails = url.transloadit_files(results["thumbs"])
|
205
315
|
|
206
|
-
|
207
|
-
|
316
|
+
# results are uploaded to Transloadit's temporary storage
|
317
|
+
transcoded #=> #<Shrine::UploadedFile @storage_key=:url @id="https://tmp.transloadit.com/..." ...>
|
318
|
+
thumbnails #=> [#<Shrine::UploadedFile @storage_key=:url @id="https://tmp.transloadit.com/..." ...>, ...]
|
208
319
|
|
209
|
-
|
210
|
-
|
211
|
-
|
212
|
-
|
213
|
-
|
320
|
+
# upload results to permanent storage
|
321
|
+
add_derivatives(transcoded: transcoded, thumbnails: thumbnails)
|
322
|
+
end
|
323
|
+
end
|
324
|
+
```
|
325
|
+
```rb
|
326
|
+
response = attacher.transloadit_process
|
327
|
+
response.reload_until_finished!
|
214
328
|
|
215
|
-
|
216
|
-
|
217
|
-
|
218
|
-
id: result['ssl_url'],
|
219
|
-
storage: 'cache',
|
220
|
-
metadata: {
|
221
|
-
size: result['size'],
|
222
|
-
filename: result['name'],
|
223
|
-
mime_type: result['mime'],
|
224
|
-
width: result['meta'] && result['meta']['width'],
|
225
|
-
height: result['meta'] && result['meta']['height'],
|
226
|
-
transloadit: result['meta'],
|
227
|
-
}
|
228
|
-
})
|
329
|
+
if response.error?
|
330
|
+
# handle error
|
331
|
+
end
|
229
332
|
|
230
|
-
|
231
|
-
|
333
|
+
attacher.transloadit_save(response["results"])
|
334
|
+
attacher.derivatives #=>
|
335
|
+
# {
|
336
|
+
# transcoded: #<Shrine::UploadedFile storage_key=:store ...>,
|
337
|
+
# thumbnails: [
|
338
|
+
# #<Shrine::UploadedFile storage_key=:store ...>,
|
339
|
+
# #<Shrine::UploadedFile storage_key=:store ...>,
|
340
|
+
# ...
|
341
|
+
# ]
|
342
|
+
# }
|
232
343
|
```
|
233
344
|
|
234
|
-
|
235
|
-
|
345
|
+
## API
|
346
|
+
|
347
|
+
### Processor
|
348
|
+
|
349
|
+
The processor is just a block registered under an identifier, which is expected
|
350
|
+
to create a Transloadit assembly:
|
236
351
|
|
237
352
|
```rb
|
238
|
-
|
353
|
+
class VideoUploader < Shrine
|
354
|
+
Attacher.transloadit_processor :video do
|
355
|
+
# ...
|
356
|
+
end
|
357
|
+
end
|
239
358
|
```
|
240
359
|
|
360
|
+
It is executed when `Attacher#transloadit_process` is called:
|
361
|
+
|
241
362
|
```rb
|
242
|
-
|
243
|
-
Shrine.storages[:cache] = Shrine::Storage::Url.new
|
363
|
+
attacher.transloadit_process(:video) # calls :video processor
|
244
364
|
```
|
245
365
|
|
246
|
-
|
247
|
-
|
248
|
-
### Templates
|
366
|
+
Any arguments passed to the processor will be given to the block:
|
249
367
|
|
250
|
-
|
251
|
-
|
368
|
+
```rb
|
369
|
+
attacher.transloadit_process(:video, foo: "bar")
|
370
|
+
```
|
371
|
+
```rb
|
372
|
+
class VideoUploader < Shrine
|
373
|
+
Attacher.transloadit_processor :video do |options|
|
374
|
+
options #=> { :foo => "bar" }
|
375
|
+
end
|
376
|
+
end
|
377
|
+
```
|
252
378
|
|
253
|
-
|
254
|
-
and we just set the location of the imported file.
|
379
|
+
The processor block is executed in context of a `Shrine::Attacher` instance:
|
255
380
|
|
256
381
|
```rb
|
257
|
-
|
258
|
-
|
259
|
-
|
260
|
-
|
261
|
-
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
|
266
|
-
robot: "/s3/store",
|
267
|
-
use: "resize",
|
268
|
-
bucket: "YOUR_AWS_BUCKET",
|
269
|
-
key: "YOUR_AWS_KEY",
|
270
|
-
secret: "YOUR_AWS_SECRET",
|
271
|
-
bucket_region: "YOUR_AWS_REGION",
|
272
|
-
path: "videos/${unique_prefix}/${file.url_name}"
|
273
|
-
}
|
274
|
-
}
|
275
|
-
}
|
382
|
+
class VideoUploader < Shrine
|
383
|
+
Attacher.transloadit_processor :video do
|
384
|
+
self #=> #<Shrine::Attacher>
|
385
|
+
|
386
|
+
record #=> #<Video>
|
387
|
+
name #=> :file
|
388
|
+
file #=> #<Shrine::UploadedFile>
|
389
|
+
end
|
390
|
+
end
|
276
391
|
```
|
392
|
+
|
393
|
+
### Saver
|
394
|
+
|
395
|
+
The saver is just a block registered under an identifier, which is expected to
|
396
|
+
save given Transloadit results into the attacher:
|
397
|
+
|
277
398
|
```rb
|
278
|
-
class
|
279
|
-
|
280
|
-
|
281
|
-
transloadit_assembly("my_template", steps: [import])
|
399
|
+
class VideoUploader < Shrine
|
400
|
+
Attacher.transloadit_saver :video do |results|
|
401
|
+
# ...
|
282
402
|
end
|
283
403
|
end
|
284
404
|
```
|
285
405
|
|
286
|
-
|
287
|
-
|
288
|
-
Even though submitting a Transloadit assembly doesn't require any uploading, it
|
289
|
-
still does two HTTP requests, so you might want to put them into a background
|
290
|
-
job. You can configure that in the `TransloaditUploader` base uploader class:
|
406
|
+
It is executed when `Attacher#transloadit_save` is called:
|
291
407
|
|
292
408
|
```rb
|
293
|
-
|
294
|
-
|
295
|
-
auth_key: "your transloadit key",
|
296
|
-
auth_secret: "your transloadit secret"
|
409
|
+
attacher.transloadit_save(:video, results) # calls :video saver
|
410
|
+
```
|
297
411
|
|
298
|
-
|
412
|
+
Any arguments passed to the saver will be given to the block:
|
413
|
+
|
414
|
+
```rb
|
415
|
+
attacher.transloadit_save(:video, results, foo: "bar")
|
416
|
+
```
|
417
|
+
```rb
|
418
|
+
class VideoUploader < Shrine
|
419
|
+
Attacher.transloadit_saver :video do |results, options|
|
420
|
+
options #=> { :foo => "bar" }
|
421
|
+
end
|
299
422
|
end
|
300
423
|
```
|
424
|
+
|
425
|
+
The saver block is executed in context of a `Shrine::Attacher` instance:
|
426
|
+
|
301
427
|
```rb
|
302
|
-
class
|
303
|
-
|
428
|
+
class VideoUploader < Shrine
|
429
|
+
Attacher.transloadit_saver :video do |results|
|
430
|
+
self #=> #<Shrine::Attacher>
|
304
431
|
|
305
|
-
|
306
|
-
|
432
|
+
record #=> #<Video>
|
433
|
+
name #=> :file
|
434
|
+
file #=> #<Shrine::UploadedFile>
|
307
435
|
end
|
308
436
|
end
|
309
437
|
```
|
310
438
|
|
311
|
-
###
|
439
|
+
### Step
|
312
440
|
|
313
|
-
|
314
|
-
about the status of that assembly, which the plugin saves to the cached
|
315
|
-
attachment's metadata.
|
441
|
+
You can generate `Transloadit::Step` objects with `Shrine.transloadit_step`:
|
316
442
|
|
317
443
|
```rb
|
318
|
-
|
319
|
-
|
320
|
-
# {
|
321
|
-
# "ok" => "ASSEMBLY_EXECUTING",
|
322
|
-
# "message" => "The assembly is currently being executed.",
|
323
|
-
# "assembly_id" => "83d07d10414011e68cc8c5df79919836",
|
324
|
-
# "assembly_url" => "http://api2.janani.transloadit.com/assemblies/83d07d10414011e68cc8c5df79919836",
|
325
|
-
# "execution_start" => "2016/07/03 17:06:42 GMT",
|
326
|
-
# "execution_duration" => 2.113,
|
327
|
-
# "params" => "{\"steps\":{...}}",
|
328
|
-
# ...
|
329
|
-
# }
|
444
|
+
Shrine.transloadit_step "my_name", "/my/robot", **options
|
445
|
+
#=> #<Transloadit::Step name="my_name", robot="/my/robot", options={...}>
|
330
446
|
```
|
331
447
|
|
332
|
-
|
333
|
-
|
448
|
+
This method adds the ability to pass another `Transloadit::Step` object as the
|
449
|
+
`:use` parameter:
|
334
450
|
|
335
451
|
```rb
|
336
|
-
|
337
|
-
|
338
|
-
|
452
|
+
step_one = Shrine.transloadit_step "one", "/robot/one"
|
453
|
+
step_two = Shrine.transloadit_step "two", "/robot/two", use: step_one
|
454
|
+
step_two.options[:use] #=> ["one"]
|
339
455
|
```
|
340
456
|
|
341
|
-
###
|
457
|
+
### Import step
|
458
|
+
|
459
|
+
The `Shrine::UploadedFile#transloadit_import_step` method generates an import
|
460
|
+
step for the uploaded file:
|
461
|
+
|
462
|
+
```rb
|
463
|
+
file = Shrine.upload(io, :store)
|
464
|
+
file.storage #=> #<Shrine::Storage::S3>
|
465
|
+
file.id #=> "foo"
|
342
466
|
|
343
|
-
|
344
|
-
|
345
|
-
|
346
|
-
|
467
|
+
step = file.transloadit_import_step
|
468
|
+
|
469
|
+
step #=> #<Transloadit::Step ...>
|
470
|
+
step.name #=> "import"
|
471
|
+
step.robot #=> "/s3/import"
|
472
|
+
|
473
|
+
step.options[:path] #=> "foo"
|
474
|
+
step.options[:credentials] #=> :s3_store (inferred from the plugin setting)
|
475
|
+
```
|
347
476
|
|
348
|
-
|
349
|
-
metadata key, so that you can access any other values:
|
477
|
+
You can change the default step name:
|
350
478
|
|
351
479
|
```rb
|
352
|
-
|
353
|
-
|
354
|
-
|
355
|
-
|
356
|
-
|
357
|
-
|
358
|
-
|
359
|
-
|
360
|
-
|
361
|
-
# "country" => "United States",
|
362
|
-
# "latitude" => 33.77519301,
|
363
|
-
# "longitude" => -84.295608,
|
364
|
-
# "orientation" => "Horizontal (normal)",
|
365
|
-
# "colorspace" => "RGB",
|
366
|
-
# "average_color" => "#8b8688",
|
367
|
-
# ...
|
368
|
-
# }
|
480
|
+
step = file.transloadit_import_step("my_import")
|
481
|
+
step.name #=> "my_import"
|
482
|
+
```
|
483
|
+
|
484
|
+
You can also pass step options:
|
485
|
+
|
486
|
+
```rb
|
487
|
+
step = file.transloadit_import_step(ignore_errors: ["meta"])
|
488
|
+
step.options[:ignore_errors] #=> ["meta"]
|
369
489
|
```
|
370
490
|
|
371
|
-
|
491
|
+
The following import robots are currently supported:
|
372
492
|
|
373
|
-
|
374
|
-
|
493
|
+
| Robot | Description |
|
494
|
+
| :----------- | :---------- |
|
495
|
+
| `/s3/import` | activated for `Shrine::Storage::S3` |
|
496
|
+
| `/http/import` | activated for any other storage which returns HTTP(S) URLs |
|
497
|
+
| `/ftp/import` | activated for any other storage which returns FTP URLs |
|
498
|
+
|
499
|
+
### Export step
|
500
|
+
|
501
|
+
The `Shrine#transloadit_export_step` method generates an export step for the underlying
|
502
|
+
storage:
|
375
503
|
|
376
504
|
```rb
|
377
|
-
|
505
|
+
uploader = Shrine.new(:store)
|
506
|
+
uploader.storage #=> #<Shrine::Storage::S3>
|
507
|
+
|
508
|
+
step = uploader.transloadit_export_step
|
378
509
|
|
379
|
-
|
510
|
+
step #=> #<Transloadit::Step ...>
|
511
|
+
step.name #=> "export"
|
512
|
+
step.robot #=> "/s3/store"
|
380
513
|
|
381
|
-
|
382
|
-
file.add_step(transloadit_import_step("import", io))
|
514
|
+
step.options[:credentials] #=> :s3_store (inferred from the plugin setting)
|
383
515
|
```
|
384
516
|
|
517
|
+
You can change the default step name:
|
518
|
+
|
385
519
|
```rb
|
386
|
-
|
520
|
+
step = uploader.transloadit_export_step("my_export")
|
521
|
+
step.name #=> "my_export"
|
522
|
+
```
|
387
523
|
|
388
|
-
|
524
|
+
You can also pass step options:
|
389
525
|
|
390
|
-
|
391
|
-
|
392
|
-
|
393
|
-
})
|
526
|
+
```rb
|
527
|
+
step = file.transloadit_export_step(acl: "public-read")
|
528
|
+
step.options[:acl] #=> "public-read"
|
394
529
|
```
|
395
530
|
|
396
|
-
|
397
|
-
|
531
|
+
The following export robots are currently supported:
|
532
|
+
|
533
|
+
| Robot | Description |
|
534
|
+
| :---- | :---------- |
|
535
|
+
| `/s3/store` | activated for `Shrine::Storage::S3` |
|
536
|
+
| `/google/store` | activated for [`Shrine::Storage::GoogleCloudStorage`][shrine-gcs] |
|
537
|
+
| `/youtube/store` | activated for [`Shrine::Storage::YouTube`][shrine-youtube] |
|
398
538
|
|
399
|
-
###
|
539
|
+
### File
|
400
540
|
|
401
|
-
|
402
|
-
|
403
|
-
additionally carries the Transloadit response in the `#response` attribute.
|
541
|
+
The `Shrine#transloadit_file` method will convert a Transloadit result hash
|
542
|
+
into a `Shrine::UploadedFile` object:
|
404
543
|
|
405
544
|
```rb
|
406
|
-
|
407
|
-
|
408
|
-
|
409
|
-
|
410
|
-
|
411
|
-
end
|
545
|
+
uploader = Shrine.new(:store)
|
546
|
+
uploader.storage #=> #<Shrine::Storage::S3>
|
547
|
+
|
548
|
+
file = uploader.transloadit_file(
|
549
|
+
"url" => "https://my-bucket.s3.amazonaws.com/foo",
|
412
550
|
# ...
|
413
|
-
|
551
|
+
)
|
552
|
+
|
553
|
+
file #=> #<Shrine::UploadedFile @id="foo" storage_key=:store ...>
|
554
|
+
|
555
|
+
file.storage #=> #<Shrine::Storage::S3>
|
556
|
+
file.id #=> "foo"
|
414
557
|
```
|
415
558
|
|
416
|
-
|
559
|
+
You can use the plural `Shrine#transloadit_files` to convert an array of
|
560
|
+
results:
|
561
|
+
|
562
|
+
```rb
|
563
|
+
files = uploader.transloadit_files [
|
564
|
+
{ "url" => "https://my-bucket.s3.amazonaws.com/foo", ... },
|
565
|
+
{ "url" => "https://my-bucket.s3.amazonaws.com/bar", ... },
|
566
|
+
{ "url" => "https://my-bucket.s3.amazonaws.com/baz", ... },
|
567
|
+
]
|
417
568
|
|
418
|
-
|
419
|
-
|
420
|
-
|
569
|
+
files #=>
|
570
|
+
# [
|
571
|
+
# #<Shrine::UploadedFile @id="foo" @storage_key=:store ...>,
|
572
|
+
# #<Shrine::UploadedFile @id="bar" @storage_key=:store ...>,
|
573
|
+
# #<Shrine::UploadedFile @id="baz" @storage_key=:store ...>,
|
574
|
+
# ]
|
575
|
+
```
|
576
|
+
|
577
|
+
It will include basic metadata:
|
421
578
|
|
422
579
|
```rb
|
423
|
-
|
424
|
-
|
425
|
-
|
580
|
+
file = uploader.transloadit_file(
|
581
|
+
# ...
|
582
|
+
"name" => "matrix.mp4",
|
583
|
+
"size" => 44198,
|
584
|
+
"mime" => "video/mp4",
|
585
|
+
)
|
586
|
+
|
587
|
+
file.original_filename #=> "matrix.mp4"
|
588
|
+
file.size #=> 44198
|
589
|
+
file.mime_type #=> "video/mp4"
|
590
|
+
```
|
426
591
|
|
427
|
-
|
428
|
-
notify_url = "https://myapp.com/webhooks/transloadit"
|
429
|
-
else
|
430
|
-
# In development we cannot receive webhooks, because Transloadit as an
|
431
|
-
# external service cannot reach our localhost.
|
432
|
-
end
|
592
|
+
It will also merge any custom metadata:
|
433
593
|
|
434
|
-
|
435
|
-
|
436
|
-
|
594
|
+
```rb
|
595
|
+
file = uploader.transloadit_file(
|
596
|
+
# ...
|
597
|
+
"meta" => { "duration" => 9000, ... },
|
598
|
+
)
|
599
|
+
|
600
|
+
file["duration"] #=> 9000
|
437
601
|
```
|
438
602
|
|
603
|
+
Currently only `Shrine::Stroage::S3` is supported. However, you can still
|
604
|
+
handle other remote files using [`Shrine::Storage::Url`][shrine-url]:
|
605
|
+
|
439
606
|
```rb
|
440
|
-
|
441
|
-
|
607
|
+
Shrine.storages => {
|
608
|
+
# ...
|
609
|
+
url: Shrine::Storage::Url.new,
|
610
|
+
}
|
611
|
+
```
|
612
|
+
```rb
|
613
|
+
uploader = Shrine.new(:url)
|
614
|
+
uploader #=> #<Shrine::Storage::Url>
|
442
615
|
|
443
|
-
|
444
|
-
|
616
|
+
file = uploader.transloadit_file(
|
617
|
+
"url" => "https://example.com/foo",
|
618
|
+
# ...
|
619
|
+
)
|
445
620
|
|
446
|
-
|
447
|
-
unless ENV["RACK_ENV"] == "production"
|
448
|
-
response = attacher.get.transloadit_response
|
449
|
-
response.reload_until_finished!
|
450
|
-
attacher.transloadit_save(response.body)
|
451
|
-
end
|
452
|
-
end
|
453
|
-
end
|
621
|
+
file.id #=> "https://example.com/foo"
|
454
622
|
```
|
455
623
|
|
456
|
-
##
|
624
|
+
## Instrumentation
|
457
625
|
|
458
|
-
|
459
|
-
|
626
|
+
If the `instrumentation` plugin has been loaded, the `transloadit` plugin adds
|
627
|
+
instrumentation around triggering processing.
|
460
628
|
|
461
|
-
```
|
462
|
-
#
|
463
|
-
|
464
|
-
|
465
|
-
S3_BUCKET="..."
|
466
|
-
S3_REGION="..."
|
467
|
-
S3_ACCESS_KEY_ID="..."
|
468
|
-
S3_SECRET_ACCESS_KEY="..."
|
629
|
+
```rb
|
630
|
+
# instrumentation plugin needs to be loaded *before* transloadit
|
631
|
+
plugin :instrumentation
|
632
|
+
plugin :transloadit
|
469
633
|
```
|
470
634
|
|
471
|
-
|
635
|
+
Calling the processor will trigger a `transloadit.shrine` event with the
|
636
|
+
following payload:
|
637
|
+
|
638
|
+
| Key | Description |
|
639
|
+
| :---- | :---------- |
|
640
|
+
| `:processor` | Name of the processor |
|
641
|
+
| `:uploader` | The uploader class that sent the event |
|
642
|
+
|
643
|
+
A default log subscriber is added as well which logs these events:
|
644
|
+
|
645
|
+
```
|
646
|
+
Transloadit (1238ms) – {:processor=>:video, :uploader=>VideoUploader}
|
647
|
+
```
|
648
|
+
|
649
|
+
You can also use your own log subscriber:
|
650
|
+
|
651
|
+
```rb
|
652
|
+
plugin :transloadit, log_subscriber: -> (event) {
|
653
|
+
Shrine.logger.info JSON.generate(name: event.name, duration: event.duration, **event.payload)
|
654
|
+
}
|
655
|
+
```
|
656
|
+
```
|
657
|
+
{"name":"transloadit","duration":1238,"processor":"video","uploader":"VideoUploader"}
|
658
|
+
```
|
659
|
+
|
660
|
+
Or disable logging altogether:
|
661
|
+
|
662
|
+
```rb
|
663
|
+
plugin :transloadit, log_subscriber: nil
|
664
|
+
```
|
665
|
+
|
666
|
+
## Contributing
|
667
|
+
|
668
|
+
Tests are run with:
|
472
669
|
|
473
670
|
```sh
|
474
671
|
$ bundle exec rake test
|
@@ -480,12 +677,18 @@ $ bundle exec rake test
|
|
480
677
|
|
481
678
|
[Shrine]: https://github.com/shrinerb/shrine
|
482
679
|
[Transloadit]: https://transloadit.com/
|
483
|
-
[
|
484
|
-
[
|
485
|
-
[
|
486
|
-
[
|
487
|
-
[
|
488
|
-
[
|
489
|
-
[
|
490
|
-
[
|
680
|
+
[Ruby SDK]: https://github.com/transloadit/ruby-sdk
|
681
|
+
[credentials]: https://transloadit.com/docs/#16-template-credentials
|
682
|
+
[import robots]: https://transloadit.com/docs/transcoding/#overview-service-file-importing
|
683
|
+
[export robots]: https://transloadit.com/docs/transcoding/#overview-service-file-exporting
|
684
|
+
[derivatives]: https://github.com/shrinerb/shrine/blob/master/doc/plugins/derivatives.md#readme
|
685
|
+
[assembly notifications]: https://transloadit.com/docs/#24-assembly-notifications
|
686
|
+
[backgrounding]: https://github.com/shrinerb/shrine/blob/master/doc/plugins/backgrounding.md#readme
|
687
|
+
[shrine-url]: https://github.com/shrinerb/shrine-url
|
688
|
+
[Robodog]: https://uppy.io/docs/robodog/
|
689
|
+
[Robodog Form]: https://uppy.io/docs/robodog/form/
|
690
|
+
[Uppy]: https://uppy.io/
|
691
|
+
[atomic_helpers]: https://github.com/shrinerb/shrine/blob/master/doc/plugins/atomic_helpers.md#readme
|
692
|
+
[shrine-gcs]: https://github.com/renchap/shrine-google_cloud_storage
|
693
|
+
[shrine-youtube]: https://github.com/thedyrt/shrine-storage-you_tube
|
491
694
|
[shrine-url]: https://github.com/shrinerb/shrine-url
|