active_encode 0.7.0 → 0.8.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.rubocop.yml +1 -0
- data/.rubocop_todo.yml +1 -0
- data/Gemfile +2 -1
- data/README.md +69 -0
- data/active_encode.gemspec +5 -2
- data/lib/active_encode/engine_adapters.rb +1 -0
- data/lib/active_encode/engine_adapters/elastic_transcoder_adapter.rb +1 -1
- data/lib/active_encode/engine_adapters/ffmpeg_adapter.rb +12 -2
- data/lib/active_encode/engine_adapters/media_convert_adapter.rb +372 -0
- data/lib/active_encode/engine_adapters/media_convert_output.rb +104 -0
- data/lib/active_encode/polling.rb +1 -1
- data/lib/active_encode/spec/shared_specs.rb +2 -0
- data/{spec/shared_specs/engine_adapter_specs.rb → lib/active_encode/spec/shared_specs/engine_adapter.rb} +36 -36
- data/lib/active_encode/version.rb +1 -1
- data/lib/file_locator.rb +1 -1
- data/spec/fixtures/media_convert/endpoints.json +1 -0
- data/spec/fixtures/media_convert/job_canceled.json +412 -0
- data/spec/fixtures/media_convert/job_canceling.json +1 -0
- data/spec/fixtures/media_convert/job_completed.json +359 -0
- data/spec/fixtures/media_convert/job_completed_detail.json +1 -0
- data/spec/fixtures/media_convert/job_completed_detail_query.json +1 -0
- data/spec/fixtures/media_convert/job_created.json +408 -0
- data/spec/fixtures/media_convert/job_failed.json +406 -0
- data/spec/fixtures/media_convert/job_progressing.json +414 -0
- data/spec/integration/elastic_transcoder_adapter_spec.rb +4 -4
- data/spec/integration/ffmpeg_adapter_spec.rb +2 -2
- data/spec/integration/matterhorn_adapter_spec.rb +39 -39
- data/spec/integration/media_convert_adapter_spec.rb +126 -0
- data/spec/integration/pass_through_adapter_spec.rb +2 -2
- data/spec/integration/zencoder_adapter_spec.rb +198 -198
- data/spec/spec_helper.rb +0 -1
- data/spec/units/core_spec.rb +17 -17
- data/spec/units/file_locator_spec.rb +1 -1
- data/spec/units/global_id_spec.rb +8 -8
- data/spec/units/persistence_spec.rb +9 -9
- metadata +87 -23
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: e36b69edd440383a39009cc2a331f6cda32974e62fde4961353e5abd80e2bc0b
|
4
|
+
data.tar.gz: ce26e5a14a08c96b708d6fea08ff127606f9da7023a674444037374cf9812ef9
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: d82bbc86cfc93cc33d699fb943c5b1caab27bc76b8149a599a61514f5257572aa3f0239ab617062f5b9ed4ef5b3874b9764f6786b0700fe2fa08625b917ead07
|
7
|
+
data.tar.gz: 53aabadb2c2337ac2cf8725ca0476b5afb9446aa8102aa156e93913d978317ceece57facf2bce0775fb8d5e3cf6e5a9ad47538c15006aed40d7bb94b98e301ce
|
data/.rubocop.yml
CHANGED
data/.rubocop_todo.yml
CHANGED
data/Gemfile
CHANGED
data/README.md
CHANGED
@@ -124,6 +124,75 @@ end
|
|
124
124
|
|
125
125
|
Engine adapters are shims between ActiveEncode and the back end encoding service. You can add an additional engine by creating an engine adapter class that implements `:create`, `:find`, and `:cancel` and passes the shared specs.
|
126
126
|
|
127
|
+
For example:
|
128
|
+
```ruby
|
129
|
+
# In your application at:
|
130
|
+
# lib/active_encode/engine_adapters/my_custom_adapter.rb
|
131
|
+
module ActiveEncode
|
132
|
+
module EngineAdapters
|
133
|
+
class MyCustomAdapter
|
134
|
+
def create(input_url, options = {})
|
135
|
+
# Start a new encoding job. This may be an external service, or a
|
136
|
+
# locally queued job.
|
137
|
+
|
138
|
+
# Return an instance ActiveEncode::Base (or subclass) that represents
|
139
|
+
# the encoding job that was just started.
|
140
|
+
end
|
141
|
+
|
142
|
+
def find(id, opts = {})
|
143
|
+
# Find the encoding job for the given parameters.
|
144
|
+
|
145
|
+
# Return an instance of ActiveEncode::Base (or subclass) that represents
|
146
|
+
# the found encoding job.
|
147
|
+
end
|
148
|
+
|
149
|
+
def cancel(id)
|
150
|
+
# Cancel the encoding job for the given id.
|
151
|
+
|
152
|
+
# Return an instance of ActiveEncode::Base (or subclass) that represents
|
153
|
+
# the canceled job.
|
154
|
+
end
|
155
|
+
end
|
156
|
+
end
|
157
|
+
end
|
158
|
+
```
|
159
|
+
Then, use the shared specs...
|
160
|
+
```ruby
|
161
|
+
# In your application at...
|
162
|
+
# spec/lib/active_encode/engine_adapters/my_custom_adapter_spec.rb
|
163
|
+
require 'spec_helper'
|
164
|
+
require 'active_encode/spec/shared_specs'
|
165
|
+
RSpec.describe MyCustomAdapter do
|
166
|
+
let(:created_job) {
|
167
|
+
# an instance of ActiveEncode::Base represented a newly created encode job
|
168
|
+
}
|
169
|
+
let(:running_job) {
|
170
|
+
# an instance of ActiveEncode::Base represented a running encode job
|
171
|
+
}
|
172
|
+
let(:canceled_job) {
|
173
|
+
# an instance of ActiveEncode::Base represented a canceled encode job
|
174
|
+
}
|
175
|
+
let(:completed_job) {
|
176
|
+
# an instance of ActiveEncode::Base represented a completed encode job
|
177
|
+
}
|
178
|
+
let(:failed_job) {
|
179
|
+
# an instance of ActiveEncode::Base represented a failed encode job
|
180
|
+
}
|
181
|
+
let(:completed_tech_metadata) {
|
182
|
+
# a hash representing completed technical metadata
|
183
|
+
}
|
184
|
+
let(:completed_output) {
|
185
|
+
# data representing completed output
|
186
|
+
}
|
187
|
+
let(:failed_tech_metadata) {
|
188
|
+
# a hash representing failed technical metadata
|
189
|
+
}
|
190
|
+
|
191
|
+
# Run the shared specs.
|
192
|
+
it_behaves_like 'an ActiveEncode::EngineAdapter'
|
193
|
+
end
|
194
|
+
```
|
195
|
+
|
127
196
|
# Acknowledgments
|
128
197
|
|
129
198
|
This software has been developed by and is brought to you by the Samvera community. Learn more at the
|
data/active_encode.gemspec
CHANGED
@@ -22,7 +22,11 @@ Gem::Specification.new do |spec|
|
|
22
22
|
|
23
23
|
spec.add_dependency "rails"
|
24
24
|
|
25
|
-
spec.add_development_dependency "aws-sdk"
|
25
|
+
spec.add_development_dependency "aws-sdk-cloudwatchevents"
|
26
|
+
spec.add_development_dependency "aws-sdk-cloudwatchlogs"
|
27
|
+
spec.add_development_dependency "aws-sdk-elastictranscoder"
|
28
|
+
spec.add_development_dependency "aws-sdk-mediaconvert"
|
29
|
+
spec.add_development_dependency "aws-sdk-s3"
|
26
30
|
spec.add_development_dependency "bixby", '~> 1.0.0'
|
27
31
|
spec.add_development_dependency "bundler"
|
28
32
|
spec.add_development_dependency "coveralls"
|
@@ -30,7 +34,6 @@ Gem::Specification.new do |spec|
|
|
30
34
|
spec.add_development_dependency "engine_cart", "~> 2.2"
|
31
35
|
spec.add_development_dependency "rake"
|
32
36
|
spec.add_development_dependency "rspec"
|
33
|
-
spec.add_development_dependency "rspec-its"
|
34
37
|
spec.add_development_dependency 'rspec_junit_formatter'
|
35
38
|
spec.add_development_dependency "rspec-rails"
|
36
39
|
|
@@ -34,7 +34,13 @@ module ActiveEncode
|
|
34
34
|
FileUtils.mkdir_p working_path("outputs", new_encode.id)
|
35
35
|
|
36
36
|
# Extract technical metadata from input file
|
37
|
-
|
37
|
+
curl_option = if options && options[:headers]
|
38
|
+
headers = options[:headers].map { |k, v| "#{k}: #{v}" }
|
39
|
+
(["--File_curl=HttpHeader"] + headers).join(",").yield_self { |s| "'#{s}'" }
|
40
|
+
else
|
41
|
+
""
|
42
|
+
end
|
43
|
+
`#{MEDIAINFO_PATH} #{curl_option} --Output=XML --LogFile=#{working_path("input_metadata", new_encode.id)} "#{input_url}"`
|
38
44
|
new_encode.input = build_input new_encode
|
39
45
|
|
40
46
|
if new_encode.input.duration.blank?
|
@@ -206,7 +212,11 @@ module ActiveEncode
|
|
206
212
|
file_name = "outputs/#{sanitized_filename}-#{output[:label]}.#{output[:extension]}"
|
207
213
|
" #{output[:ffmpeg_opt]} #{working_path(file_name, id)}"
|
208
214
|
end.join(" ")
|
209
|
-
|
215
|
+
header_opt = Array(opts[:headers]).map do |k, v|
|
216
|
+
"#{k}: #{v}\r\n"
|
217
|
+
end.join
|
218
|
+
header_opt = "-headers '#{header_opt}'" if header_opt.present?
|
219
|
+
"#{FFMPEG_PATH} #{header_opt} -y -loglevel error -progress #{working_path('progress', id)} -i \"#{input_url}\" #{output_opt}"
|
210
220
|
end
|
211
221
|
|
212
222
|
def sanitize_base(input_url)
|
@@ -0,0 +1,372 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
require 'active_encode/engine_adapters/media_convert_output.rb'
|
3
|
+
require 'active_support/core_ext/integer/time'
|
4
|
+
require 'addressable/uri'
|
5
|
+
require 'aws-sdk-cloudwatchevents'
|
6
|
+
require 'aws-sdk-cloudwatchlogs'
|
7
|
+
require 'aws-sdk-mediaconvert'
|
8
|
+
require 'file_locator'
|
9
|
+
|
10
|
+
require 'active_support/json'
|
11
|
+
require 'active_support/time'
|
12
|
+
|
13
|
+
module ActiveEncode
|
14
|
+
module EngineAdapters
|
15
|
+
class MediaConvertAdapter
|
16
|
+
# [AWS Elemental MediaConvert](https://aws.amazon.com/mediaconvert/) doesn't provide detailed
|
17
|
+
# output information in the job description that can be pulled directly from the service.
|
18
|
+
# Instead, it provides that information along with the job status notification when the job
|
19
|
+
# status changes to `COMPLETE`. The only way to capture that notification is through an [Amazon
|
20
|
+
# Eventbridge](https://aws.amazon.com/eventbridge/) rule that forwards the status change
|
21
|
+
# notification to another service for capture and/or handling.
|
22
|
+
#
|
23
|
+
# `ActiveEncode::EngineAdapters::MediaConvert` does this by creating a [CloudWatch Logs]
|
24
|
+
# (https://aws.amazon.com/cloudwatch/) log group and an EventBridge rule to forward status
|
25
|
+
# change notifications to the log group. It can then find the log entry containing the output
|
26
|
+
# details later when the job is complete. This is accomplished by calling the idempotent
|
27
|
+
# `#setup!` method.
|
28
|
+
#
|
29
|
+
# The AWS user/role calling the `#setup!` method will require permissions to create the
|
30
|
+
# necessary CloudWatch and EventBridge resources, and the role passed to the engine adapter
|
31
|
+
# will need access to any S3 buckets where files will be read from or written to during
|
32
|
+
# transcoding.
|
33
|
+
#
|
34
|
+
# Configuration example:
|
35
|
+
#
|
36
|
+
# ActiveEncode::Base.engine_adapter = :media_convert
|
37
|
+
# ActiveEncode::Base.engine_adapter.role = 'arn:aws:iam::123456789012:role/service-role/MediaConvert_Default_Role'
|
38
|
+
# ActiveEncode::Base.engine_adapter.output_bucket = 'output-bucket'
|
39
|
+
# ActiveEncode::Base.engine_adapter.setup!
|
40
|
+
|
41
|
+
JOB_STATES = {
|
42
|
+
"SUBMITTED" => :running, "PROGRESSING" => :running, "CANCELED" => :cancelled,
|
43
|
+
"ERROR" => :failed, "COMPLETE" => :completed
|
44
|
+
}.freeze
|
45
|
+
|
46
|
+
OUTPUT_GROUP_TEMPLATES = {
|
47
|
+
hls: { min_segment_length: 0, segment_control: "SEGMENTED_FILES", segment_length: 10 },
|
48
|
+
dash_iso: { fragment_length: 2, segment_control: "SEGMENTED_FILES", segment_length: 30 },
|
49
|
+
file: {},
|
50
|
+
ms_smooth: { fragment_length: 2 },
|
51
|
+
cmaf: { fragment_length: 2, segment_control: "SEGMENTED_FILES", segment_length: 10 }
|
52
|
+
}.freeze
|
53
|
+
|
54
|
+
attr_accessor :role, :output_bucket
|
55
|
+
attr_writer :log_group, :queue
|
56
|
+
|
57
|
+
def setup!
|
58
|
+
rule_name = "active-encode-mediaconvert-#{queue}"
|
59
|
+
return true if event_rule_exists?(rule_name)
|
60
|
+
|
61
|
+
queue_arn = mediaconvert.get_queue(name: queue).queue.arn
|
62
|
+
|
63
|
+
event_pattern = {
|
64
|
+
source: ["aws.mediaconvert"],
|
65
|
+
"detail-type": ["MediaConvert Job State Change"],
|
66
|
+
detail: {
|
67
|
+
queue: [queue_arn]
|
68
|
+
}
|
69
|
+
}
|
70
|
+
|
71
|
+
log_group_arn = create_log_group(log_group).arn
|
72
|
+
|
73
|
+
cloudwatch_events.put_rule(
|
74
|
+
name: rule_name,
|
75
|
+
event_pattern: event_pattern.to_json,
|
76
|
+
state: "ENABLED",
|
77
|
+
description: "Forward MediaConvert job state changes from queue #{queue} to #{log_group}"
|
78
|
+
)
|
79
|
+
|
80
|
+
cloudwatch_events.put_targets(
|
81
|
+
rule: rule_name,
|
82
|
+
targets: [
|
83
|
+
{
|
84
|
+
id: "Id#{SecureRandom.uuid}",
|
85
|
+
arn: log_group_arn
|
86
|
+
}
|
87
|
+
]
|
88
|
+
)
|
89
|
+
true
|
90
|
+
end
|
91
|
+
|
92
|
+
# Required options:
|
93
|
+
#
|
94
|
+
# * `output_prefix`: The S3 key prefix to use as the base for all outputs.
|
95
|
+
#
|
96
|
+
# * `outputs`: An array of `{preset, modifier}` options defining how to transcode and name the outputs.
|
97
|
+
#
|
98
|
+
# Optional options:
|
99
|
+
#
|
100
|
+
# * `masterfile_bucket`: The bucket to which file-based inputs will be copied before
|
101
|
+
# being passed to MediaConvert. Also used for S3-based inputs
|
102
|
+
# unless `use_original_url` is specified.
|
103
|
+
#
|
104
|
+
# * `use_original_url`: If `true`, any S3 URL passed in as input will be passed directly to
|
105
|
+
# MediaConvert as the file input instead of copying the source to
|
106
|
+
# the `masterfile_bucket`.
|
107
|
+
#
|
108
|
+
# Example:
|
109
|
+
# {
|
110
|
+
# output_prefix: "path/to/output/files",
|
111
|
+
# outputs: [
|
112
|
+
# {preset: "System-Avc_16x9_1080p_29_97fps_8500kbps", modifier: "-1080"},
|
113
|
+
# {preset: "System-Avc_16x9_720p_29_97fps_5000kbps", modifier: "-720"},
|
114
|
+
# {preset: "System-Avc_16x9_540p_29_97fps_3500kbps", modifier: "-540"}
|
115
|
+
# ]
|
116
|
+
# }
|
117
|
+
# }
|
118
|
+
def create(input_url, options = {})
|
119
|
+
input_url = s3_uri(input_url, options)
|
120
|
+
|
121
|
+
input = options[:media_type] == :audio ? make_audio_input(input_url) : make_video_input(input_url)
|
122
|
+
|
123
|
+
create_job_params = {
|
124
|
+
role: role,
|
125
|
+
settings: {
|
126
|
+
inputs: [input],
|
127
|
+
output_groups: make_output_groups(options)
|
128
|
+
}
|
129
|
+
}
|
130
|
+
|
131
|
+
response = mediaconvert.create_job(create_job_params)
|
132
|
+
job = response.job
|
133
|
+
build_encode(job)
|
134
|
+
end
|
135
|
+
|
136
|
+
def find(id, _opts = {})
|
137
|
+
response = mediaconvert.get_job(id: id)
|
138
|
+
job = response.job
|
139
|
+
build_encode(job)
|
140
|
+
rescue Aws::MediaConvert::Errors::NotFound
|
141
|
+
raise ActiveEncode::NotFound, "Job #{id} not found"
|
142
|
+
end
|
143
|
+
|
144
|
+
def cancel(id)
|
145
|
+
mediaconvert.cancel_job(id: id)
|
146
|
+
find(id)
|
147
|
+
end
|
148
|
+
|
149
|
+
def log_group
|
150
|
+
@log_group ||= "/aws/events/active-encode/mediaconvert/#{queue}"
|
151
|
+
end
|
152
|
+
|
153
|
+
def queue
|
154
|
+
@queue ||= "Default"
|
155
|
+
end
|
156
|
+
|
157
|
+
private
|
158
|
+
|
159
|
+
def build_encode(job)
|
160
|
+
return nil if job.nil?
|
161
|
+
encode = ActiveEncode::Base.new(job.settings.inputs.first.file_input, {})
|
162
|
+
encode.id = job.id
|
163
|
+
encode.input.id = job.id
|
164
|
+
encode.state = JOB_STATES[job.status]
|
165
|
+
encode.current_operations = [job.current_phase].compact
|
166
|
+
encode.created_at = job.timing.submit_time
|
167
|
+
encode.updated_at = job.timing.finish_time || job.timing.start_time || encode.created_at
|
168
|
+
encode.percent_complete = convert_percent_complete(job)
|
169
|
+
encode.errors = [job.error_message].compact
|
170
|
+
|
171
|
+
encode.input.created_at = encode.created_at
|
172
|
+
encode.input.updated_at = encode.updated_at
|
173
|
+
|
174
|
+
encode.output = encode.state == :completed ? convert_output(job) : []
|
175
|
+
encode
|
176
|
+
end
|
177
|
+
|
178
|
+
def convert_percent_complete(job)
|
179
|
+
case job.status
|
180
|
+
when "SUBMITTED"
|
181
|
+
5
|
182
|
+
when "PROGRESSING"
|
183
|
+
job.job_percent_complete
|
184
|
+
when "CANCELED", "ERROR"
|
185
|
+
50
|
186
|
+
when "COMPLETE"
|
187
|
+
100
|
188
|
+
else
|
189
|
+
0
|
190
|
+
end
|
191
|
+
end
|
192
|
+
|
193
|
+
def convert_output(job)
|
194
|
+
results = get_encode_results(job)
|
195
|
+
settings = job.settings.output_groups.first.outputs
|
196
|
+
|
197
|
+
outputs = results.dig('detail', 'outputGroupDetails', 0, 'outputDetails').map.with_index do |detail, index|
|
198
|
+
tech_md = MediaConvertOutput.tech_metadata(settings[index], detail)
|
199
|
+
output = ActiveEncode::Output.new
|
200
|
+
|
201
|
+
output.created_at = job.timing.submit_time
|
202
|
+
output.updated_at = job.timing.finish_time || job.timing.start_time || output.created_at
|
203
|
+
|
204
|
+
[:width, :height, :frame_rate, :duration, :checksum, :audio_codec, :video_codec,
|
205
|
+
:audio_bitrate, :video_bitrate, :file_size, :label, :url, :id].each do |field|
|
206
|
+
output.send("#{field}=", tech_md[field])
|
207
|
+
end
|
208
|
+
output.id ||= "#{job.id}-output#{tech_md[:suffix]}"
|
209
|
+
output
|
210
|
+
end
|
211
|
+
|
212
|
+
adaptive_playlist = results.dig('detail', 'outputGroupDetails', 0, 'playlistFilePaths', 0)
|
213
|
+
unless adaptive_playlist.nil?
|
214
|
+
output = ActiveEncode::Output.new
|
215
|
+
output.created_at = job.timing.submit_time
|
216
|
+
output.updated_at = job.timing.finish_time || job.timing.start_time || output.created_at
|
217
|
+
output.id = "#{job.id}-output-auto"
|
218
|
+
|
219
|
+
[:duration, :audio_codec, :video_codec].each do |field|
|
220
|
+
output.send("#{field}=", outputs.first.send(field))
|
221
|
+
end
|
222
|
+
output.label = File.basename(adaptive_playlist)
|
223
|
+
output.url = adaptive_playlist
|
224
|
+
outputs << output
|
225
|
+
end
|
226
|
+
outputs
|
227
|
+
end
|
228
|
+
|
229
|
+
def get_encode_results(job)
|
230
|
+
start_time = job.timing.submit_time
|
231
|
+
end_time = job.timing.finish_time || Time.now.utc
|
232
|
+
|
233
|
+
response = cloudwatch_logs.start_query(
|
234
|
+
log_group_name: log_group,
|
235
|
+
start_time: start_time.to_i,
|
236
|
+
end_time: end_time.to_i,
|
237
|
+
limit: 1,
|
238
|
+
query_string: "fields @message | filter detail.jobId = '#{job.id}' | filter detail.status = 'COMPLETE' | sort @ingestionTime desc"
|
239
|
+
)
|
240
|
+
query_id = response.query_id
|
241
|
+
response = cloudwatch_logs.get_query_results(query_id: query_id)
|
242
|
+
until response.status == "Complete"
|
243
|
+
sleep(0.5)
|
244
|
+
response = cloudwatch_logs.get_query_results(query_id: query_id)
|
245
|
+
end
|
246
|
+
raise ActiveEncode::NotFound, "Unable to load progress for job #{job.id}" if response.results.empty?
|
247
|
+
|
248
|
+
JSON.parse(response.results.first.first.value)
|
249
|
+
end
|
250
|
+
|
251
|
+
def cloudwatch_events
|
252
|
+
@cloudwatch_events ||= Aws::CloudWatchEvents::Client.new
|
253
|
+
end
|
254
|
+
|
255
|
+
def cloudwatch_logs
|
256
|
+
@cloudwatch_logs ||= Aws::CloudWatchLogs::Client.new
|
257
|
+
end
|
258
|
+
|
259
|
+
def mediaconvert
|
260
|
+
endpoint = Aws::MediaConvert::Client.new.describe_endpoints.endpoints.first.url
|
261
|
+
@mediaconvert ||= Aws::MediaConvert::Client.new(endpoint: endpoint)
|
262
|
+
end
|
263
|
+
|
264
|
+
def s3_uri(url, options = {})
|
265
|
+
bucket = options[:masterfile_bucket]
|
266
|
+
|
267
|
+
case Addressable::URI.parse(url).scheme
|
268
|
+
when nil, 'file'
|
269
|
+
upload_to_s3 url, bucket
|
270
|
+
when 's3'
|
271
|
+
return url if options[:use_original_url]
|
272
|
+
check_s3_bucket url, bucket
|
273
|
+
else
|
274
|
+
raise ArgumentError, "Cannot handle source URL: #{url}"
|
275
|
+
end
|
276
|
+
end
|
277
|
+
|
278
|
+
def check_s3_bucket(input_url, source_bucket)
|
279
|
+
# logger.info("Checking `#{input_url}'")
|
280
|
+
s3_object = FileLocator::S3File.new(input_url).object
|
281
|
+
if s3_object.bucket_name == source_bucket
|
282
|
+
# logger.info("Already in bucket `#{source_bucket}'")
|
283
|
+
s3_object.key
|
284
|
+
else
|
285
|
+
s3_key = File.join(SecureRandom.uuid, s3_object.key)
|
286
|
+
# logger.info("Copying to `#{source_bucket}/#{input_url}'")
|
287
|
+
target = Aws::S3::Object.new(bucket_name: source_bucket, key: input_url)
|
288
|
+
target.copy_from(s3_object, multipart_copy: s3_object.size > 15_728_640) # 15.megabytes
|
289
|
+
s3_key
|
290
|
+
end
|
291
|
+
end
|
292
|
+
|
293
|
+
def upload_to_s3(input_url, source_bucket)
|
294
|
+
# original_input = input_url
|
295
|
+
bucket = Aws::S3::Resource.new(client: s3client).bucket(source_bucket)
|
296
|
+
filename = FileLocator.new(input_url).location
|
297
|
+
s3_key = File.join(SecureRandom.uuid, File.basename(filename))
|
298
|
+
# logger.info("Copying `#{original_input}' to `#{source_bucket}/#{input_url}'")
|
299
|
+
obj = bucket.object(s3_key)
|
300
|
+
obj.upload_file filename
|
301
|
+
|
302
|
+
s3_key
|
303
|
+
end
|
304
|
+
|
305
|
+
def event_rule_exists?(rule_name)
|
306
|
+
rule = cloudwatch_events.list_rules(name_prefix: rule_name).rules.find do |existing_rule|
|
307
|
+
existing_rule.name == rule_name
|
308
|
+
end
|
309
|
+
!rule.nil?
|
310
|
+
end
|
311
|
+
|
312
|
+
def find_log_group(name)
|
313
|
+
cloudwatch_logs.describe_log_groups(log_group_name_prefix: name).log_groups.find do |group|
|
314
|
+
group.log_group_name == name
|
315
|
+
end
|
316
|
+
end
|
317
|
+
|
318
|
+
def create_log_group(name)
|
319
|
+
result = find_log_group(name)
|
320
|
+
|
321
|
+
return result unless result.nil?
|
322
|
+
|
323
|
+
cloudwatch_logs.create_log_group(log_group_name: name)
|
324
|
+
find_log_group(name)
|
325
|
+
end
|
326
|
+
|
327
|
+
def make_audio_input(input_url)
|
328
|
+
{
|
329
|
+
audio_selectors: { "Audio Selector 1" => { default_selection: "DEFAULT" } },
|
330
|
+
audio_selector_groups: {
|
331
|
+
"Audio Selector Group 1" => {
|
332
|
+
audio_selector_names: ["Audio Selector 1"]
|
333
|
+
}
|
334
|
+
},
|
335
|
+
file_input: input_url,
|
336
|
+
timecode_source: "ZEROBASED"
|
337
|
+
}
|
338
|
+
end
|
339
|
+
|
340
|
+
def make_video_input(input_url)
|
341
|
+
{
|
342
|
+
audio_selectors: { "Audio Selector 1" => { default_selection: "DEFAULT" } },
|
343
|
+
file_input: input_url,
|
344
|
+
timecode_source: "ZEROBASED",
|
345
|
+
video_selector: {}
|
346
|
+
}
|
347
|
+
end
|
348
|
+
|
349
|
+
def make_output_groups(options)
|
350
|
+
output_type = options[:output_type] || :hls
|
351
|
+
raise ArgumentError, "Unknown output type: #{output_type.inspect}" unless OUTPUT_GROUP_TEMPLATES.keys.include?(output_type)
|
352
|
+
output_group_settings_key = "#{output_type}_group_settings".to_sym
|
353
|
+
output_group_settings = OUTPUT_GROUP_TEMPLATES[output_type].merge(destination: "s3://#{output_bucket}/#{options[:output_prefix]}")
|
354
|
+
|
355
|
+
outputs = options[:outputs].map do |output|
|
356
|
+
{
|
357
|
+
preset: output[:preset],
|
358
|
+
name_modifier: output[:modifier]
|
359
|
+
}
|
360
|
+
end
|
361
|
+
|
362
|
+
[{
|
363
|
+
output_group_settings: {
|
364
|
+
type: output_group_settings_key.upcase,
|
365
|
+
output_group_settings_key => output_group_settings
|
366
|
+
},
|
367
|
+
outputs: outputs
|
368
|
+
}]
|
369
|
+
end
|
370
|
+
end
|
371
|
+
end
|
372
|
+
end
|