active_encode 0.5.0 → 0.8.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.circleci/config.yml +80 -0
- data/.rubocop.yml +9 -70
- data/.rubocop_todo.yml +68 -0
- data/Gemfile +5 -4
- data/README.md +69 -0
- data/active_encode.gemspec +12 -3
- data/app/controllers/active_encode/encode_record_controller.rb +1 -0
- data/app/jobs/active_encode/polling_job.rb +1 -1
- data/app/models/active_encode/encode_record.rb +1 -0
- data/db/migrate/20180822021048_create_active_encode_encode_records.rb +1 -0
- data/db/migrate/20190702153755_add_create_options_to_active_encode_encode_records.rb +6 -0
- data/db/migrate/20190712174821_add_progress_to_active_encode_encode_records.rb +6 -0
- data/lib/active_encode.rb +1 -0
- data/lib/active_encode/base.rb +2 -2
- data/lib/active_encode/callbacks.rb +1 -0
- data/lib/active_encode/core.rb +4 -3
- data/lib/active_encode/engine.rb +1 -0
- data/lib/active_encode/engine_adapter.rb +1 -0
- data/lib/active_encode/engine_adapters.rb +4 -1
- data/lib/active_encode/engine_adapters/elastic_transcoder_adapter.rb +31 -29
- data/lib/active_encode/engine_adapters/ffmpeg_adapter.rb +138 -87
- data/lib/active_encode/engine_adapters/matterhorn_adapter.rb +5 -4
- data/lib/active_encode/engine_adapters/media_convert_adapter.rb +399 -0
- data/lib/active_encode/engine_adapters/media_convert_output.rb +104 -0
- data/lib/active_encode/engine_adapters/pass_through_adapter.rb +239 -0
- data/lib/active_encode/engine_adapters/test_adapter.rb +5 -4
- data/lib/active_encode/engine_adapters/zencoder_adapter.rb +3 -2
- data/lib/active_encode/errors.rb +6 -0
- data/lib/active_encode/global_id.rb +2 -1
- data/lib/active_encode/input.rb +3 -2
- data/lib/active_encode/output.rb +3 -2
- data/lib/active_encode/persistence.rb +11 -5
- data/lib/active_encode/polling.rb +3 -2
- data/lib/active_encode/spec/shared_specs.rb +2 -0
- data/{spec/shared_specs/engine_adapter_specs.rb → lib/active_encode/spec/shared_specs/engine_adapter.rb} +37 -38
- data/lib/active_encode/status.rb +1 -0
- data/lib/active_encode/technical_metadata.rb +3 -2
- data/lib/active_encode/version.rb +2 -1
- data/lib/file_locator.rb +8 -9
- data/spec/controllers/encode_record_controller_spec.rb +4 -3
- data/spec/fixtures/ffmpeg/cancelled-id/cancelled +0 -0
- data/spec/fixtures/file with space.low.mp4 +0 -0
- data/spec/fixtures/file with space.mp4 +0 -0
- data/spec/fixtures/fireworks.low.mp4 +0 -0
- data/spec/fixtures/media_convert/endpoints.json +1 -0
- data/spec/fixtures/media_convert/job_canceled.json +412 -0
- data/spec/fixtures/media_convert/job_canceling.json +1 -0
- data/spec/fixtures/media_convert/job_completed.json +359 -0
- data/spec/fixtures/media_convert/job_completed_detail.json +1 -0
- data/spec/fixtures/media_convert/job_completed_detail_query.json +1 -0
- data/spec/fixtures/media_convert/job_completed_empty_detail.json +1 -0
- data/spec/fixtures/media_convert/job_created.json +408 -0
- data/spec/fixtures/media_convert/job_failed.json +406 -0
- data/spec/fixtures/media_convert/job_progressing.json +414 -0
- data/spec/fixtures/pass_through/cancelled-id/cancelled +0 -0
- data/spec/fixtures/pass_through/cancelled-id/input_metadata +90 -0
- data/spec/fixtures/pass_through/completed-id/completed +0 -0
- data/spec/fixtures/pass_through/completed-id/input_metadata +102 -0
- data/spec/fixtures/pass_through/completed-id/output_metadata-high +90 -0
- data/spec/fixtures/pass_through/completed-id/output_metadata-low +90 -0
- data/spec/fixtures/pass_through/completed-id/video-high.mp4 +0 -0
- data/spec/fixtures/pass_through/completed-id/video-low.mp4 +0 -0
- data/spec/fixtures/pass_through/failed-id/error.log +1 -0
- data/spec/fixtures/pass_through/failed-id/input_metadata +90 -0
- data/spec/fixtures/pass_through/running-id/input_metadata +90 -0
- data/spec/integration/elastic_transcoder_adapter_spec.rb +30 -30
- data/spec/integration/ffmpeg_adapter_spec.rb +93 -25
- data/spec/integration/matterhorn_adapter_spec.rb +45 -44
- data/spec/integration/media_convert_adapter_spec.rb +152 -0
- data/spec/integration/pass_through_adapter_spec.rb +151 -0
- data/spec/integration/zencoder_adapter_spec.rb +210 -209
- data/spec/rails_helper.rb +1 -0
- data/spec/routing/encode_record_controller_routing_spec.rb +1 -0
- data/spec/spec_helper.rb +2 -2
- data/spec/test_app_templates/lib/generators/test_app_generator.rb +13 -12
- data/spec/units/callbacks_spec.rb +3 -2
- data/spec/units/core_spec.rb +26 -25
- data/spec/units/engine_adapter_spec.rb +1 -0
- data/spec/units/file_locator_spec.rb +20 -19
- data/spec/units/global_id_spec.rb +12 -11
- data/spec/units/input_spec.rb +8 -5
- data/spec/units/output_spec.rb +8 -5
- data/spec/units/persistence_spec.rb +15 -11
- data/spec/units/polling_job_spec.rb +7 -6
- data/spec/units/polling_spec.rb +1 -0
- data/spec/units/status_spec.rb +3 -3
- metadata +158 -14
- data/.travis.yml +0 -19
@@ -1,3 +1,4 @@
|
|
1
|
+
# frozen_string_literal: true
|
1
2
|
require 'rubyhorn'
|
2
3
|
|
3
4
|
module ActiveEncode
|
@@ -11,7 +12,7 @@ module ActiveEncode
|
|
11
12
|
build_encode(get_workflow(workflow_om))
|
12
13
|
end
|
13
14
|
|
14
|
-
def find(id,
|
15
|
+
def find(id, _opts = {})
|
15
16
|
build_encode(fetch_workflow(id))
|
16
17
|
end
|
17
18
|
|
@@ -145,7 +146,7 @@ module ActiveEncode
|
|
145
146
|
|
146
147
|
def convert_created_at(workflow)
|
147
148
|
created_at = workflow.xpath('mediapackage/@start').last.to_s
|
148
|
-
created_at.present? ? Time.parse(created_at) : nil
|
149
|
+
created_at.present? ? Time.parse(created_at).utc : nil
|
149
150
|
end
|
150
151
|
|
151
152
|
def convert_updated_at(workflow)
|
@@ -156,13 +157,13 @@ module ActiveEncode
|
|
156
157
|
def convert_output_created_at(track, workflow)
|
157
158
|
quality = track.xpath('tags/tag[starts-with(text(),"quality")]/text()').to_s
|
158
159
|
created_at = workflow.xpath("//operation[@id=\"compose\"][configurations/configuration[@key=\"target-tags\" and contains(text(), \"#{quality}\")]]/started/text()").to_s
|
159
|
-
created_at.present? ? Time.at(created_at.to_i / 1000.0) : nil
|
160
|
+
created_at.present? ? Time.at(created_at.to_i / 1000.0).utc : nil
|
160
161
|
end
|
161
162
|
|
162
163
|
def convert_output_updated_at(track, workflow)
|
163
164
|
quality = track.xpath('tags/tag[starts-with(text(),"quality")]/text()').to_s
|
164
165
|
updated_at = workflow.xpath("//operation[@id=\"compose\"][configurations/configuration[@key=\"target-tags\" and contains(text(), \"#{quality}\")]]/completed/text()").to_s
|
165
|
-
updated_at.present? ? Time.at(updated_at.to_i / 1000.0) : nil
|
166
|
+
updated_at.present? ? Time.at(updated_at.to_i / 1000.0).utc : nil
|
166
167
|
end
|
167
168
|
|
168
169
|
def convert_options(workflow)
|
@@ -0,0 +1,399 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
require 'active_encode/engine_adapters/media_convert_output.rb'
|
3
|
+
require 'active_support/core_ext/integer/time'
|
4
|
+
require 'addressable/uri'
|
5
|
+
require 'aws-sdk-cloudwatchevents'
|
6
|
+
require 'aws-sdk-cloudwatchlogs'
|
7
|
+
require 'aws-sdk-mediaconvert'
|
8
|
+
require 'file_locator'
|
9
|
+
|
10
|
+
require 'active_support/json'
|
11
|
+
require 'active_support/time'
|
12
|
+
|
13
|
+
module ActiveEncode
|
14
|
+
module EngineAdapters
|
15
|
+
class MediaConvertAdapter
|
16
|
+
# [AWS Elemental MediaConvert](https://aws.amazon.com/mediaconvert/) doesn't provide detailed
|
17
|
+
# output information in the job description that can be pulled directly from the service.
|
18
|
+
# Instead, it provides that information along with the job status notification when the job
|
19
|
+
# status changes to `COMPLETE`. The only way to capture that notification is through an [Amazon
|
20
|
+
# Eventbridge](https://aws.amazon.com/eventbridge/) rule that forwards the status change
|
21
|
+
# notification to another service for capture and/or handling.
|
22
|
+
#
|
23
|
+
# `ActiveEncode::EngineAdapters::MediaConvert` does this by creating a [CloudWatch Logs]
|
24
|
+
# (https://aws.amazon.com/cloudwatch/) log group and an EventBridge rule to forward status
|
25
|
+
# change notifications to the log group. It can then find the log entry containing the output
|
26
|
+
# details later when the job is complete. This is accomplished by calling the idempotent
|
27
|
+
# `#setup!` method.
|
28
|
+
#
|
29
|
+
# The AWS user/role calling the `#setup!` method will require permissions to create the
|
30
|
+
# necessary CloudWatch and EventBridge resources, and the role passed to the engine adapter
|
31
|
+
# will need access to any S3 buckets where files will be read from or written to during
|
32
|
+
# transcoding.
|
33
|
+
#
|
34
|
+
# Configuration example:
|
35
|
+
#
|
36
|
+
# ActiveEncode::Base.engine_adapter = :media_convert
|
37
|
+
# ActiveEncode::Base.engine_adapter.role = 'arn:aws:iam::123456789012:role/service-role/MediaConvert_Default_Role'
|
38
|
+
# ActiveEncode::Base.engine_adapter.output_bucket = 'output-bucket'
|
39
|
+
# ActiveEncode::Base.engine_adapter.setup!
|
40
|
+
|
41
|
+
JOB_STATES = {
|
42
|
+
"SUBMITTED" => :running, "PROGRESSING" => :running, "CANCELED" => :cancelled,
|
43
|
+
"ERROR" => :failed, "COMPLETE" => :completed
|
44
|
+
}.freeze
|
45
|
+
|
46
|
+
OUTPUT_GROUP_TEMPLATES = {
|
47
|
+
hls: { min_segment_length: 0, segment_control: "SEGMENTED_FILES", segment_length: 10 },
|
48
|
+
dash_iso: { fragment_length: 2, segment_control: "SEGMENTED_FILES", segment_length: 30 },
|
49
|
+
file: {},
|
50
|
+
ms_smooth: { fragment_length: 2 },
|
51
|
+
cmaf: { fragment_length: 2, segment_control: "SEGMENTED_FILES", segment_length: 10 }
|
52
|
+
}.freeze
|
53
|
+
|
54
|
+
class ResultsNotAvailable < RuntimeError
|
55
|
+
attr_reader :encode
|
56
|
+
|
57
|
+
def initialize(msg = nil, encode = nil)
|
58
|
+
@encode = encode
|
59
|
+
super(msg)
|
60
|
+
end
|
61
|
+
end
|
62
|
+
|
63
|
+
attr_accessor :role, :output_bucket
|
64
|
+
attr_writer :log_group, :queue
|
65
|
+
|
66
|
+
def setup!
|
67
|
+
rule_name = "active-encode-mediaconvert-#{queue}"
|
68
|
+
return true if event_rule_exists?(rule_name)
|
69
|
+
|
70
|
+
queue_arn = mediaconvert.get_queue(name: queue).queue.arn
|
71
|
+
|
72
|
+
event_pattern = {
|
73
|
+
source: ["aws.mediaconvert"],
|
74
|
+
"detail-type": ["MediaConvert Job State Change"],
|
75
|
+
detail: {
|
76
|
+
queue: [queue_arn]
|
77
|
+
}
|
78
|
+
}
|
79
|
+
|
80
|
+
log_group_arn = create_log_group(log_group).arn
|
81
|
+
|
82
|
+
cloudwatch_events.put_rule(
|
83
|
+
name: rule_name,
|
84
|
+
event_pattern: event_pattern.to_json,
|
85
|
+
state: "ENABLED",
|
86
|
+
description: "Forward MediaConvert job state changes from queue #{queue} to #{log_group}"
|
87
|
+
)
|
88
|
+
|
89
|
+
cloudwatch_events.put_targets(
|
90
|
+
rule: rule_name,
|
91
|
+
targets: [
|
92
|
+
{
|
93
|
+
id: "Id#{SecureRandom.uuid}",
|
94
|
+
arn: log_group_arn
|
95
|
+
}
|
96
|
+
]
|
97
|
+
)
|
98
|
+
true
|
99
|
+
end
|
100
|
+
|
101
|
+
# Required options:
|
102
|
+
#
|
103
|
+
# * `output_prefix`: The S3 key prefix to use as the base for all outputs.
|
104
|
+
#
|
105
|
+
# * `outputs`: An array of `{preset, modifier}` options defining how to transcode and name the outputs.
|
106
|
+
#
|
107
|
+
# Optional options:
|
108
|
+
#
|
109
|
+
# * `masterfile_bucket`: The bucket to which file-based inputs will be copied before
|
110
|
+
# being passed to MediaConvert. Also used for S3-based inputs
|
111
|
+
# unless `use_original_url` is specified.
|
112
|
+
#
|
113
|
+
# * `use_original_url`: If `true`, any S3 URL passed in as input will be passed directly to
|
114
|
+
# MediaConvert as the file input instead of copying the source to
|
115
|
+
# the `masterfile_bucket`.
|
116
|
+
#
|
117
|
+
# Example:
|
118
|
+
# {
|
119
|
+
# output_prefix: "path/to/output/files",
|
120
|
+
# outputs: [
|
121
|
+
# {preset: "System-Avc_16x9_1080p_29_97fps_8500kbps", modifier: "-1080"},
|
122
|
+
# {preset: "System-Avc_16x9_720p_29_97fps_5000kbps", modifier: "-720"},
|
123
|
+
# {preset: "System-Avc_16x9_540p_29_97fps_3500kbps", modifier: "-540"}
|
124
|
+
# ]
|
125
|
+
# }
|
126
|
+
# }
|
127
|
+
def create(input_url, options = {})
|
128
|
+
input_url = s3_uri(input_url, options)
|
129
|
+
|
130
|
+
input = options[:media_type] == :audio ? make_audio_input(input_url) : make_video_input(input_url)
|
131
|
+
|
132
|
+
create_job_params = {
|
133
|
+
role: role,
|
134
|
+
settings: {
|
135
|
+
inputs: [input],
|
136
|
+
output_groups: make_output_groups(options)
|
137
|
+
}
|
138
|
+
}
|
139
|
+
|
140
|
+
response = mediaconvert.create_job(create_job_params)
|
141
|
+
job = response.job
|
142
|
+
build_encode(job)
|
143
|
+
end
|
144
|
+
|
145
|
+
def find(id, _opts = {})
|
146
|
+
response = mediaconvert.get_job(id: id)
|
147
|
+
job = response.job
|
148
|
+
build_encode(job)
|
149
|
+
rescue Aws::MediaConvert::Errors::NotFound
|
150
|
+
raise ActiveEncode::NotFound, "Job #{id} not found"
|
151
|
+
end
|
152
|
+
|
153
|
+
def cancel(id)
|
154
|
+
mediaconvert.cancel_job(id: id)
|
155
|
+
find(id)
|
156
|
+
end
|
157
|
+
|
158
|
+
def log_group
|
159
|
+
@log_group ||= "/aws/events/active-encode/mediaconvert/#{queue}"
|
160
|
+
end
|
161
|
+
|
162
|
+
def queue
|
163
|
+
@queue ||= "Default"
|
164
|
+
end
|
165
|
+
|
166
|
+
private
|
167
|
+
|
168
|
+
def build_encode(job)
|
169
|
+
return nil if job.nil?
|
170
|
+
encode = ActiveEncode::Base.new(job.settings.inputs.first.file_input, {})
|
171
|
+
encode.id = job.id
|
172
|
+
encode.input.id = job.id
|
173
|
+
encode.state = JOB_STATES[job.status]
|
174
|
+
encode.current_operations = [job.current_phase].compact
|
175
|
+
encode.created_at = job.timing.submit_time
|
176
|
+
encode.updated_at = job.timing.finish_time || job.timing.start_time || encode.created_at
|
177
|
+
encode.percent_complete = convert_percent_complete(job)
|
178
|
+
encode.errors = [job.error_message].compact
|
179
|
+
encode.output = []
|
180
|
+
|
181
|
+
encode.input.created_at = encode.created_at
|
182
|
+
encode.input.updated_at = encode.updated_at
|
183
|
+
|
184
|
+
encode = complete_encode(encode, job) if encode.state == :completed
|
185
|
+
encode
|
186
|
+
end
|
187
|
+
|
188
|
+
def complete_encode(encode, job)
|
189
|
+
result = convert_output(job)
|
190
|
+
if result.nil?
|
191
|
+
raise ResultsNotAvailable.new("Unable to load progress for job #{job.id}", encode) if job.timing.finish_time < 10.minutes.ago
|
192
|
+
encode.state = :running
|
193
|
+
else
|
194
|
+
encode.output = result
|
195
|
+
end
|
196
|
+
encode
|
197
|
+
end
|
198
|
+
|
199
|
+
def convert_percent_complete(job)
|
200
|
+
case job.status
|
201
|
+
when "SUBMITTED"
|
202
|
+
5
|
203
|
+
when "PROGRESSING"
|
204
|
+
job.job_percent_complete
|
205
|
+
when "CANCELED", "ERROR"
|
206
|
+
50
|
207
|
+
when "COMPLETE"
|
208
|
+
100
|
209
|
+
else
|
210
|
+
0
|
211
|
+
end
|
212
|
+
end
|
213
|
+
|
214
|
+
def convert_output(job)
|
215
|
+
results = get_encode_results(job)
|
216
|
+
return nil if results.nil?
|
217
|
+
convert_encode_results(job, results)
|
218
|
+
end
|
219
|
+
|
220
|
+
def convert_encode_results(job, results)
|
221
|
+
settings = job.settings.output_groups.first.outputs
|
222
|
+
|
223
|
+
outputs = results.dig('detail', 'outputGroupDetails', 0, 'outputDetails').map.with_index do |detail, index|
|
224
|
+
tech_md = MediaConvertOutput.tech_metadata(settings[index], detail)
|
225
|
+
output = ActiveEncode::Output.new
|
226
|
+
|
227
|
+
output.created_at = job.timing.submit_time
|
228
|
+
output.updated_at = job.timing.finish_time || job.timing.start_time || output.created_at
|
229
|
+
|
230
|
+
[:width, :height, :frame_rate, :duration, :checksum, :audio_codec, :video_codec,
|
231
|
+
:audio_bitrate, :video_bitrate, :file_size, :label, :url, :id].each do |field|
|
232
|
+
output.send("#{field}=", tech_md[field])
|
233
|
+
end
|
234
|
+
output.id ||= "#{job.id}-output#{tech_md[:suffix]}"
|
235
|
+
output
|
236
|
+
end
|
237
|
+
|
238
|
+
adaptive_playlist = results.dig('detail', 'outputGroupDetails', 0, 'playlistFilePaths', 0)
|
239
|
+
unless adaptive_playlist.nil?
|
240
|
+
output = ActiveEncode::Output.new
|
241
|
+
output.created_at = job.timing.submit_time
|
242
|
+
output.updated_at = job.timing.finish_time || job.timing.start_time || output.created_at
|
243
|
+
output.id = "#{job.id}-output-auto"
|
244
|
+
|
245
|
+
[:duration, :audio_codec, :video_codec].each do |field|
|
246
|
+
output.send("#{field}=", outputs.first.send(field))
|
247
|
+
end
|
248
|
+
output.label = File.basename(adaptive_playlist)
|
249
|
+
output.url = adaptive_playlist
|
250
|
+
outputs << output
|
251
|
+
end
|
252
|
+
outputs
|
253
|
+
end
|
254
|
+
|
255
|
+
def get_encode_results(job)
|
256
|
+
start_time = job.timing.submit_time
|
257
|
+
end_time = (job.timing.finish_time || Time.now.utc) + 10.minutes
|
258
|
+
|
259
|
+
response = cloudwatch_logs.start_query(
|
260
|
+
log_group_name: log_group,
|
261
|
+
start_time: start_time.to_i,
|
262
|
+
end_time: end_time.to_i,
|
263
|
+
limit: 1,
|
264
|
+
query_string: "fields @message | filter detail.jobId = '#{job.id}' | filter detail.status = 'COMPLETE' | sort @ingestionTime desc"
|
265
|
+
)
|
266
|
+
query_id = response.query_id
|
267
|
+
response = cloudwatch_logs.get_query_results(query_id: query_id)
|
268
|
+
until response.status == "Complete"
|
269
|
+
sleep(0.5)
|
270
|
+
response = cloudwatch_logs.get_query_results(query_id: query_id)
|
271
|
+
end
|
272
|
+
|
273
|
+
return nil if response.results.empty?
|
274
|
+
|
275
|
+
JSON.parse(response.results.first.first.value)
|
276
|
+
end
|
277
|
+
|
278
|
+
def cloudwatch_events
|
279
|
+
@cloudwatch_events ||= Aws::CloudWatchEvents::Client.new
|
280
|
+
end
|
281
|
+
|
282
|
+
def cloudwatch_logs
|
283
|
+
@cloudwatch_logs ||= Aws::CloudWatchLogs::Client.new
|
284
|
+
end
|
285
|
+
|
286
|
+
def mediaconvert
|
287
|
+
endpoint = Aws::MediaConvert::Client.new.describe_endpoints.endpoints.first.url
|
288
|
+
@mediaconvert ||= Aws::MediaConvert::Client.new(endpoint: endpoint)
|
289
|
+
end
|
290
|
+
|
291
|
+
def s3_uri(url, options = {})
|
292
|
+
bucket = options[:masterfile_bucket]
|
293
|
+
|
294
|
+
case Addressable::URI.parse(url).scheme
|
295
|
+
when nil, 'file'
|
296
|
+
upload_to_s3 url, bucket
|
297
|
+
when 's3'
|
298
|
+
return url if options[:use_original_url]
|
299
|
+
check_s3_bucket url, bucket
|
300
|
+
else
|
301
|
+
raise ArgumentError, "Cannot handle source URL: #{url}"
|
302
|
+
end
|
303
|
+
end
|
304
|
+
|
305
|
+
def check_s3_bucket(input_url, source_bucket)
|
306
|
+
# logger.info("Checking `#{input_url}'")
|
307
|
+
s3_object = FileLocator::S3File.new(input_url).object
|
308
|
+
if s3_object.bucket_name == source_bucket
|
309
|
+
# logger.info("Already in bucket `#{source_bucket}'")
|
310
|
+
s3_object.key
|
311
|
+
else
|
312
|
+
s3_key = File.join(SecureRandom.uuid, s3_object.key)
|
313
|
+
# logger.info("Copying to `#{source_bucket}/#{input_url}'")
|
314
|
+
target = Aws::S3::Object.new(bucket_name: source_bucket, key: input_url)
|
315
|
+
target.copy_from(s3_object, multipart_copy: s3_object.size > 15_728_640) # 15.megabytes
|
316
|
+
s3_key
|
317
|
+
end
|
318
|
+
end
|
319
|
+
|
320
|
+
def upload_to_s3(input_url, source_bucket)
|
321
|
+
# original_input = input_url
|
322
|
+
bucket = Aws::S3::Resource.new(client: s3client).bucket(source_bucket)
|
323
|
+
filename = FileLocator.new(input_url).location
|
324
|
+
s3_key = File.join(SecureRandom.uuid, File.basename(filename))
|
325
|
+
# logger.info("Copying `#{original_input}' to `#{source_bucket}/#{input_url}'")
|
326
|
+
obj = bucket.object(s3_key)
|
327
|
+
obj.upload_file filename
|
328
|
+
|
329
|
+
s3_key
|
330
|
+
end
|
331
|
+
|
332
|
+
def event_rule_exists?(rule_name)
|
333
|
+
rule = cloudwatch_events.list_rules(name_prefix: rule_name).rules.find do |existing_rule|
|
334
|
+
existing_rule.name == rule_name
|
335
|
+
end
|
336
|
+
!rule.nil?
|
337
|
+
end
|
338
|
+
|
339
|
+
def find_log_group(name)
|
340
|
+
cloudwatch_logs.describe_log_groups(log_group_name_prefix: name).log_groups.find do |group|
|
341
|
+
group.log_group_name == name
|
342
|
+
end
|
343
|
+
end
|
344
|
+
|
345
|
+
def create_log_group(name)
|
346
|
+
result = find_log_group(name)
|
347
|
+
|
348
|
+
return result unless result.nil?
|
349
|
+
|
350
|
+
cloudwatch_logs.create_log_group(log_group_name: name)
|
351
|
+
find_log_group(name)
|
352
|
+
end
|
353
|
+
|
354
|
+
def make_audio_input(input_url)
|
355
|
+
{
|
356
|
+
audio_selectors: { "Audio Selector 1" => { default_selection: "DEFAULT" } },
|
357
|
+
audio_selector_groups: {
|
358
|
+
"Audio Selector Group 1" => {
|
359
|
+
audio_selector_names: ["Audio Selector 1"]
|
360
|
+
}
|
361
|
+
},
|
362
|
+
file_input: input_url,
|
363
|
+
timecode_source: "ZEROBASED"
|
364
|
+
}
|
365
|
+
end
|
366
|
+
|
367
|
+
def make_video_input(input_url)
|
368
|
+
{
|
369
|
+
audio_selectors: { "Audio Selector 1" => { default_selection: "DEFAULT" } },
|
370
|
+
file_input: input_url,
|
371
|
+
timecode_source: "ZEROBASED",
|
372
|
+
video_selector: {}
|
373
|
+
}
|
374
|
+
end
|
375
|
+
|
376
|
+
def make_output_groups(options)
|
377
|
+
output_type = options[:output_type] || :hls
|
378
|
+
raise ArgumentError, "Unknown output type: #{output_type.inspect}" unless OUTPUT_GROUP_TEMPLATES.keys.include?(output_type)
|
379
|
+
output_group_settings_key = "#{output_type}_group_settings".to_sym
|
380
|
+
output_group_settings = OUTPUT_GROUP_TEMPLATES[output_type].merge(destination: "s3://#{output_bucket}/#{options[:output_prefix]}")
|
381
|
+
|
382
|
+
outputs = options[:outputs].map do |output|
|
383
|
+
{
|
384
|
+
preset: output[:preset],
|
385
|
+
name_modifier: output[:modifier]
|
386
|
+
}
|
387
|
+
end
|
388
|
+
|
389
|
+
[{
|
390
|
+
output_group_settings: {
|
391
|
+
type: output_group_settings_key.upcase,
|
392
|
+
output_group_settings_key => output_group_settings
|
393
|
+
},
|
394
|
+
outputs: outputs
|
395
|
+
}]
|
396
|
+
end
|
397
|
+
end
|
398
|
+
end
|
399
|
+
end
|
@@ -0,0 +1,104 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
module ActiveEncode
|
3
|
+
module EngineAdapters
|
4
|
+
module MediaConvertOutput
|
5
|
+
class << self
|
6
|
+
AUDIO_SETTINGS = {
|
7
|
+
"AAC" => :aac_settings,
|
8
|
+
"AC3" => :ac3_settings,
|
9
|
+
"AIFF" => :aiff_settings,
|
10
|
+
"EAC3_ATMOS" => :eac_3_atmos_settings,
|
11
|
+
"EAC3" => :eac_3_settings,
|
12
|
+
"MP2" => :mp_2_settings,
|
13
|
+
"MP3" => :mp_3_settings,
|
14
|
+
"OPUS" => :opus_settings,
|
15
|
+
"VORBIS" => :vorbis_settings,
|
16
|
+
"WAV" => :wav_settings
|
17
|
+
}.freeze
|
18
|
+
|
19
|
+
VIDEO_SETTINGS = {
|
20
|
+
"AV1" => :av_1_settings,
|
21
|
+
"AVC_INTRA" => :avc_intra_settings,
|
22
|
+
"FRAME_CAPTURE" => :frame_capture_settings,
|
23
|
+
"H_264" => :h264_settings,
|
24
|
+
"H_265" => :h265_settings,
|
25
|
+
"MPEG2" => :mpeg_2_settings,
|
26
|
+
"PRORES" => :prores_settings,
|
27
|
+
"VC3" => :vc_3_settings,
|
28
|
+
"VP8" => :vp_8_settings,
|
29
|
+
"VP9" => :vp_9_settings,
|
30
|
+
"XAVC" => :xavc_settings
|
31
|
+
}.freeze
|
32
|
+
|
33
|
+
def tech_metadata(settings, output)
|
34
|
+
url = output.dig('outputFilePaths', 0)
|
35
|
+
{
|
36
|
+
width: output.dig('videoDetails', 'widthInPx'),
|
37
|
+
height: output.dig('videoDetails', 'heightInPx'),
|
38
|
+
frame_rate: extract_video_frame_rate(settings),
|
39
|
+
duration: output['durationInMs'],
|
40
|
+
audio_codec: extract_audio_codec(settings),
|
41
|
+
video_codec: extract_video_codec(settings),
|
42
|
+
audio_bitrate: extract_audio_bitrate(settings),
|
43
|
+
video_bitrate: extract_video_bitrate(settings),
|
44
|
+
url: url,
|
45
|
+
label: File.basename(url),
|
46
|
+
suffix: settings.name_modifier
|
47
|
+
}
|
48
|
+
end
|
49
|
+
|
50
|
+
def extract_audio_codec(settings)
|
51
|
+
settings.audio_descriptions.first.codec_settings.codec
|
52
|
+
rescue
|
53
|
+
nil
|
54
|
+
end
|
55
|
+
|
56
|
+
def extract_audio_codec_settings(settings)
|
57
|
+
codec_key = AUDIO_SETTINGS[extract_audio_codec(settings)]
|
58
|
+
settings.audio_descriptions.first.codec_settings[codec_key]
|
59
|
+
end
|
60
|
+
|
61
|
+
def extract_video_codec(settings)
|
62
|
+
settings.video_description.codec_settings.codec
|
63
|
+
rescue
|
64
|
+
nil
|
65
|
+
end
|
66
|
+
|
67
|
+
def extract_video_codec_settings(settings)
|
68
|
+
codec_key = VIDEO_SETTINGS[extract_video_codec(settings)]
|
69
|
+
settings.video_description.codec_settings[codec_key]
|
70
|
+
rescue
|
71
|
+
nil
|
72
|
+
end
|
73
|
+
|
74
|
+
def extract_audio_bitrate(settings)
|
75
|
+
codec_settings = extract_audio_codec_settings(settings)
|
76
|
+
return nil if codec_settings.nil?
|
77
|
+
try(codec_settings, :bitrate)
|
78
|
+
end
|
79
|
+
|
80
|
+
def extract_video_bitrate(settings)
|
81
|
+
codec_settings = extract_video_codec_settings(settings)
|
82
|
+
return nil if codec_settings.nil?
|
83
|
+
try(codec_settings, :bitrate) || try(codec_settings, :max_bitrate)
|
84
|
+
end
|
85
|
+
|
86
|
+
def extract_video_frame_rate(settings)
|
87
|
+
codec_settings = extract_video_codec_settings(settings)
|
88
|
+
return nil if codec_settings.nil?
|
89
|
+
(codec_settings.framerate_numerator.to_f / codec_settings.framerate_denominator.to_f).round(2)
|
90
|
+
rescue
|
91
|
+
nil
|
92
|
+
end
|
93
|
+
|
94
|
+
private
|
95
|
+
|
96
|
+
def try(struct, key)
|
97
|
+
struct[key]
|
98
|
+
rescue
|
99
|
+
nil
|
100
|
+
end
|
101
|
+
end
|
102
|
+
end
|
103
|
+
end
|
104
|
+
end
|