active_encode 0.8.2 → 1.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (47) hide show
  1. checksums.yaml +4 -4
  2. data/.circleci/config.yml +26 -17
  3. data/.rubocop.yml +7 -3
  4. data/.rubocop_todo.yml +8 -1
  5. data/CONTRIBUTING.md +42 -12
  6. data/Gemfile +11 -11
  7. data/README.md +64 -10
  8. data/active_encode.gemspec +2 -4
  9. data/app/controllers/active_encode/encode_record_controller.rb +1 -1
  10. data/app/jobs/active_encode/polling_job.rb +1 -1
  11. data/app/models/active_encode/encode_record.rb +1 -1
  12. data/guides/media_convert_adapter.md +208 -0
  13. data/lib/active_encode/base.rb +1 -1
  14. data/lib/active_encode/core.rb +14 -14
  15. data/lib/active_encode/engine_adapter.rb +13 -13
  16. data/lib/active_encode/engine_adapters/elastic_transcoder_adapter.rb +158 -158
  17. data/lib/active_encode/engine_adapters/ffmpeg_adapter.rb +14 -3
  18. data/lib/active_encode/engine_adapters/matterhorn_adapter.rb +204 -202
  19. data/lib/active_encode/engine_adapters/media_convert_adapter.rb +421 -217
  20. data/lib/active_encode/engine_adapters/media_convert_output.rb +67 -5
  21. data/lib/active_encode/engine_adapters/pass_through_adapter.rb +3 -3
  22. data/lib/active_encode/engine_adapters/zencoder_adapter.rb +114 -114
  23. data/lib/active_encode/errors.rb +1 -1
  24. data/lib/active_encode/persistence.rb +19 -19
  25. data/lib/active_encode/version.rb +1 -1
  26. data/lib/file_locator.rb +6 -6
  27. data/spec/fixtures/ffmpeg/cancelled-id/exit_status.code +1 -0
  28. data/spec/fixtures/ffmpeg/completed-id/exit_status.code +1 -0
  29. data/spec/fixtures/ffmpeg/completed-with-warnings-id/error.log +3 -0
  30. data/spec/fixtures/ffmpeg/completed-with-warnings-id/exit_status.code +1 -0
  31. data/spec/fixtures/ffmpeg/completed-with-warnings-id/input_metadata +102 -0
  32. data/spec/fixtures/ffmpeg/completed-with-warnings-id/output_metadata-high +90 -0
  33. data/spec/fixtures/ffmpeg/completed-with-warnings-id/output_metadata-low +90 -0
  34. data/spec/fixtures/ffmpeg/completed-with-warnings-id/pid +1 -0
  35. data/spec/fixtures/ffmpeg/completed-with-warnings-id/progress +11 -0
  36. data/spec/fixtures/ffmpeg/completed-with-warnings-id/video-high.mp4 +0 -0
  37. data/spec/fixtures/ffmpeg/completed-with-warnings-id/video-low.mp4 +0 -0
  38. data/spec/fixtures/ffmpeg/failed-id/exit_status.code +1 -0
  39. data/spec/integration/ffmpeg_adapter_spec.rb +50 -1
  40. data/spec/integration/matterhorn_adapter_spec.rb +1 -2
  41. data/spec/integration/media_convert_adapter_spec.rb +91 -0
  42. data/spec/integration/pass_through_adapter_spec.rb +2 -2
  43. data/spec/integration/zencoder_adapter_spec.rb +3 -3
  44. data/spec/units/core_spec.rb +1 -1
  45. data/spec/units/file_locator_spec.rb +3 -3
  46. data/spec/units/status_spec.rb +1 -1
  47. metadata +50 -19
@@ -12,32 +12,72 @@ require 'active_support/time'
12
12
 
13
13
  module ActiveEncode
14
14
  module EngineAdapters
15
+ # An adapter for using [AWS Elemental MediaConvert](https://aws.amazon.com/mediaconvert/) to
16
+ # encode.
17
+ #
18
+ # Note: this adapter does not perform input characterization, does not provide technical
19
+ # metadata on inputs.
20
+ #
21
+ # ## Configuration
22
+ #
23
+ # ActiveEncode::Base.engine_adapter = :media_convert
24
+ #
25
+ # ActiveEncode::Base.engine_adapter.role = 'arn:aws:iam::123456789012:role/service-role/MediaConvert_Default_Role'
26
+ # ActiveEncode::Base.engine_adapter.output_bucket = 'output-bucket'
27
+ #
28
+ # # optionally and probably not needed
29
+ #
30
+ # ActiveEncode::Base.engine_adapter.queue = my_mediaconvert_queue_name
31
+ # ActiveEncode::Base.engine_adapter.log_group = my_log_group_name
32
+ #
33
+ # ## Capturing output information
34
+ #
35
+ # [AWS Elemental MediaConvert](https://aws.amazon.com/mediaconvert/) doesn't provide detailed
36
+ # output information in the job description that can be pulled directly from the service.
37
+ # Instead, it provides that information along with the job status notification when the job
38
+ # status changes to `COMPLETE`. The only way to capture that notification is through an [Amazon
39
+ # Eventbridge](https://aws.amazon.com/eventbridge/) rule that forwards the a MediaWatch job
40
+ # status change on `COMPLETE` to another service, such as [CloudWatch Logs]
41
+ # (https://aws.amazon.com/cloudwatch/) log group
42
+ #
43
+ # This adapter is written to get output information from a CloudWatch log group that has had
44
+ # MediaWatch complete events forwarded to it by an EventBridge group. The `setup!` method
45
+ # can be used to create these for you, at conventional names the adapter will be default use.
46
+ #
47
+ # ActiveEncode::Base.engine_adapter.setup!
48
+ #
49
+ # **OR**, there is experimental functionality to get what we can directly from the job without
50
+ # requiring a CloudWatch log -- this is expected to be complete only for HLS output at present.
51
+ # It seems to work well for HLS output. To opt-in, and not require CloudWatch logs:
52
+ #
53
+ # ActiveEncode::Base.engine_adapter.direct_output_lookup = true
54
+ #
55
+ # ## Example
56
+ #
57
+ # ActiveEncode::Base.engine_adapter = :media_convert
58
+ # ActiveEncode::Base.engine_adapter.role = 'arn:aws:iam::123456789012:role/service-role/MediaConvert_Default_Role'
59
+ # ActiveEncode::Base.engine_adapter.output_bucket = 'output-bucket'
60
+ #
61
+ # ActiveEncode::Base.engine_adapter.setup!
62
+ #
63
+ # encode = ActiveEncode::Base.create(
64
+ # "file://path/to/file.mp4",
65
+ # {
66
+ # masterfile_bucket: "name-of-my-masterfile_bucket"
67
+ # output_prefix: "path/to/output/base_name_of_outputs",
68
+ # use_original_url: true,
69
+ # outputs: [
70
+ # { preset: "my-hls-preset-high", modifier: "_high" },
71
+ # { preset: "my-hls-preset-medium", modifier: "_medium" },
72
+ # { preset: "my-hls-preset-low", modifier: "_low" },
73
+ # ]
74
+ # }
75
+ # )
76
+ #
77
+ # ## More info
78
+ #
79
+ # A more detailed guide is available in the repo at [guides/media_convert_adapter.md](../../../guides/media_convert_adapter.md)
15
80
  class MediaConvertAdapter
16
- # [AWS Elemental MediaConvert](https://aws.amazon.com/mediaconvert/) doesn't provide detailed
17
- # output information in the job description that can be pulled directly from the service.
18
- # Instead, it provides that information along with the job status notification when the job
19
- # status changes to `COMPLETE`. The only way to capture that notification is through an [Amazon
20
- # Eventbridge](https://aws.amazon.com/eventbridge/) rule that forwards the status change
21
- # notification to another service for capture and/or handling.
22
- #
23
- # `ActiveEncode::EngineAdapters::MediaConvert` does this by creating a [CloudWatch Logs]
24
- # (https://aws.amazon.com/cloudwatch/) log group and an EventBridge rule to forward status
25
- # change notifications to the log group. It can then find the log entry containing the output
26
- # details later when the job is complete. This is accomplished by calling the idempotent
27
- # `#setup!` method.
28
- #
29
- # The AWS user/role calling the `#setup!` method will require permissions to create the
30
- # necessary CloudWatch and EventBridge resources, and the role passed to the engine adapter
31
- # will need access to any S3 buckets where files will be read from or written to during
32
- # transcoding.
33
- #
34
- # Configuration example:
35
- #
36
- # ActiveEncode::Base.engine_adapter = :media_convert
37
- # ActiveEncode::Base.engine_adapter.role = 'arn:aws:iam::123456789012:role/service-role/MediaConvert_Default_Role'
38
- # ActiveEncode::Base.engine_adapter.output_bucket = 'output-bucket'
39
- # ActiveEncode::Base.engine_adapter.setup!
40
-
41
81
  JOB_STATES = {
42
82
  "SUBMITTED" => :running, "PROGRESSING" => :running, "CANCELED" => :cancelled,
43
83
  "ERROR" => :failed, "COMPLETE" => :completed
@@ -51,6 +91,8 @@ module ActiveEncode
51
91
  cmaf: { fragment_length: 2, segment_control: "SEGMENTED_FILES", segment_length: 10 }
52
92
  }.freeze
53
93
 
94
+ SETUP_LOG_GROUP_RETENTION_DAYS = 3
95
+
54
96
  class ResultsNotAvailable < RuntimeError
55
97
  attr_reader :encode
56
98
 
@@ -60,9 +102,29 @@ module ActiveEncode
60
102
  end
61
103
  end
62
104
 
63
- attr_accessor :role, :output_bucket
105
+ # @!attribute [rw] role simple name of AWS role to pass to MediaConvert, eg `my-role-name`
106
+ # @!attribute [rw] output_bucket simple bucket name to write output to
107
+ # @!attribute [rw] direct_output_lookup if true, do NOT get output information from cloudwatch,
108
+ # instead retrieve and construct it only from job itself. Currently
109
+ # working only for HLS output. default false.
110
+ attr_accessor :role, :output_bucket, :direct_output_lookup
111
+
112
+ # @!attribute [w] log_group log_group_name that is being used to capture output
113
+ # @!attribute [w] queue name of MediaConvert queue to use.
64
114
  attr_writer :log_group, :queue
65
115
 
116
+ # Creates a [CloudWatch Logs]
117
+ # (https://aws.amazon.com/cloudwatch/) log group and an EventBridge rule to forward status
118
+ # change notifications to the log group, to catch result information from MediaConvert jobs.
119
+ #
120
+ # Will use the configured `queue` and `log_group` values.
121
+ #
122
+ # The active AWS user/role when calling the `#setup!` method will require permissions to create the
123
+ # necessary CloudWatch and EventBridge resources
124
+ #
125
+ # This method chooses a conventional name for the EventBridge rule, if a rule by that
126
+ # name already exists, it will silently exit. So this method can be called in a boot process,
127
+ # to check if this infrastructure already exists, and create it only if it does not.
66
128
  def setup!
67
129
  rule_name = "active-encode-mediaconvert-#{queue}"
68
130
  return true if event_rule_exists?(rule_name)
@@ -73,17 +135,20 @@ module ActiveEncode
73
135
  source: ["aws.mediaconvert"],
74
136
  "detail-type": ["MediaConvert Job State Change"],
75
137
  detail: {
76
- queue: [queue_arn]
138
+ queue: [queue_arn],
139
+ status: ["COMPLETE"]
77
140
  }
78
141
  }
79
142
 
80
- log_group_arn = create_log_group(log_group).arn
143
+ # AWS is inconsistent about whether a cloudwatch ARN has :* appended
144
+ # to the end, and we need to make sure it doesn't in the rule target.
145
+ log_group_arn = create_log_group(log_group).arn.chomp(":*")
81
146
 
82
147
  cloudwatch_events.put_rule(
83
148
  name: rule_name,
84
149
  event_pattern: event_pattern.to_json,
85
150
  state: "ENABLED",
86
- description: "Forward MediaConvert job state changes from queue #{queue} to #{log_group}"
151
+ description: "Forward MediaConvert job state changes on COMPLETE from queue #{queue} to #{log_group}"
87
152
  )
88
153
 
89
154
  cloudwatch_events.put_targets(
@@ -100,20 +165,48 @@ module ActiveEncode
100
165
 
101
166
  # Required options:
102
167
  #
103
- # * `output_prefix`: The S3 key prefix to use as the base for all outputs.
168
+ # * `output_prefix`: The S3 key prefix to use as the base for all outputs. Will be
169
+ # combined with configured `output_bucket` to be passed to MediaConvert
170
+ # `destination`. Alternately see `destination` arg; one or the other
171
+ # is required.
104
172
  #
105
- # * `outputs`: An array of `{preset, modifier}` options defining how to transcode and name the outputs.
173
+ # * `destination`: The full s3:// URL to be passed to MediaConvert `destination` as output
174
+ # location an filename base. `output_bucket` config is ignored if you
175
+ # pass `destination`. Alternately see `output_prefix` arg; one or the
176
+ # other is required.
177
+ #
178
+ #
179
+ # * `outputs`: An array of `{preset, modifier}` options defining how to transcode and
180
+ # name the outputs. The "modifier" option will be passed as `name_modifier`
181
+ # to AWS, to be added as a suffix on to `output_prefix` to create the
182
+ # filenames for each output.
106
183
  #
107
184
  # Optional options:
108
185
  #
109
- # * `masterfile_bucket`: The bucket to which file-based inputs will be copied before
110
- # being passed to MediaConvert. Also used for S3-based inputs
111
- # unless `use_original_url` is specified.
186
+ # * `masterfile_bucket`: All input will first be copied to this bucket, before being passed
187
+ # to MediaConvert. You can skip this copy by passing `use_original_url`
188
+ # option, and an S3-based input. `masterfile_bucket` **is** required
189
+ # unless use_original_url is true and an S3 input source.
112
190
  #
113
191
  # * `use_original_url`: If `true`, any S3 URL passed in as input will be passed directly to
114
192
  # MediaConvert as the file input instead of copying the source to
115
193
  # the `masterfile_bucket`.
116
194
  #
195
+ # * `media_type`: `audio` or `video`. Default `video`. Triggers use of a correspoinding
196
+ # template for arguments sent to AWS create_job API.
197
+ #
198
+ #
199
+ # * `output_type`: One of: `hls`, `dash_iso`, `file`, `ms_smooth`, `cmaf`. Default `hls`.
200
+ # Triggers use of a corresponding template for arguments sent to AWS
201
+ # create_job API.
202
+ #
203
+ #
204
+ # * `output_group_destination_settings`: A hash of additional `destination_settings` to be
205
+ # sent to MediaConvert with the output_group. Can include `s3_settings` key
206
+ # with `access_control` and `encryption` settings. See examples at:
207
+ # https://docs.aws.amazon.com/sdk-for-ruby/v3/api/Aws/MediaConvert/Client.html#create_job-instance_method
208
+ #
209
+ #
117
210
  # Example:
118
211
  # {
119
212
  # output_prefix: "path/to/output/files",
@@ -166,235 +259,346 @@ module ActiveEncode
166
259
 
167
260
  private
168
261
 
169
- def build_encode(job)
170
- return nil if job.nil?
171
- encode = ActiveEncode::Base.new(job.settings.inputs.first.file_input, {})
172
- encode.id = job.id
173
- encode.input.id = job.id
174
- encode.state = JOB_STATES[job.status]
175
- encode.current_operations = [job.current_phase].compact
176
- encode.created_at = job.timing.submit_time
177
- encode.updated_at = job.timing.finish_time || job.timing.start_time || encode.created_at
178
- encode.percent_complete = convert_percent_complete(job)
179
- encode.errors = [job.error_message].compact
180
- encode.output = []
181
-
182
- encode.input.created_at = encode.created_at
183
- encode.input.updated_at = encode.updated_at
184
-
185
- encode = complete_encode(encode, job) if encode.state == :completed
186
- encode
187
- end
262
+ def build_encode(job)
263
+ return nil if job.nil?
264
+ encode = ActiveEncode::Base.new(job.settings.inputs.first.file_input, {})
265
+ encode.id = job.id
266
+ encode.input.id = job.id
267
+ encode.state = JOB_STATES[job.status]
268
+ encode.current_operations = [job.current_phase].compact
269
+ encode.created_at = job.timing.submit_time
270
+ encode.updated_at = job.timing.finish_time || job.timing.start_time || encode.created_at
271
+ encode.percent_complete = convert_percent_complete(job)
272
+ encode.errors = [job.error_message].compact
273
+ encode.output = []
274
+
275
+ encode.input.created_at = encode.created_at
276
+ encode.input.updated_at = encode.updated_at
277
+
278
+ encode = complete_encode(encode, job) if encode.state == :completed
279
+ encode
280
+ end
188
281
 
189
- def complete_encode(encode, job)
190
- result = convert_output(job)
191
- if result.nil?
192
- raise ResultsNotAvailable.new("Unable to load progress for job #{job.id}", encode) if job.timing.finish_time < 10.minutes.ago
193
- encode.state = :running
194
- else
195
- encode.output = result
196
- end
197
- encode
282
+ # Called when job is complete to add output details, will mutate the encode object
283
+ # passed in to add #output details, an array of `ActiveEncode::Output` objects.
284
+ #
285
+ # @param encode [ActiveEncode::Output] encode object to mutate
286
+ # @param job [Aws::MediaConvert::Types::Job] corresponding MediaConvert Job object already looked up
287
+ #
288
+ # @return ActiveEncode::Output the same encode object passed in.
289
+ def complete_encode(encode, job)
290
+ output_result = convert_output(job)
291
+ if output_result.nil?
292
+ raise ResultsNotAvailable.new("Unable to load progress for job #{job.id}", encode) if job.timing.finish_time < 10.minutes.ago
293
+ encode.state = :running
294
+ else
295
+ encode.output = output_result
198
296
  end
297
+ encode
298
+ end
199
299
 
200
- def convert_percent_complete(job)
201
- case job.status
202
- when "SUBMITTED"
203
- 5
204
- when "PROGRESSING"
205
- job.job_percent_complete
206
- when "CANCELED", "ERROR"
207
- 50
208
- when "COMPLETE"
209
- 100
210
- else
211
- 0
212
- end
300
+ def convert_percent_complete(job)
301
+ case job.status
302
+ when "SUBMITTED"
303
+ 0
304
+ when "PROGRESSING"
305
+ job.job_percent_complete
306
+ when "CANCELED", "ERROR"
307
+ 50
308
+ when "COMPLETE"
309
+ 100
310
+ else
311
+ 0
213
312
  end
313
+ end
214
314
 
215
- def convert_output(job)
216
- results = get_encode_results(job)
217
- return nil if results.nil?
218
- convert_encode_results(job, results)
315
+ # extracts and looks up output information from an AWS MediaConvert job.
316
+ # Will also lookup corresponding CloudWatch log entry unless
317
+ # direct_output_lookup config is true.
318
+ #
319
+ # @param job [Aws::MediaConvert::Types::Job]
320
+ #
321
+ # @return [Array<ActiveEncode::Output>,nil]
322
+ def convert_output(job)
323
+ if direct_output_lookup
324
+ build_output_from_only_job(job)
325
+ else
326
+ logged_results = get_encode_results(job)
327
+ return nil if logged_results.nil?
328
+ build_output_from_logged_results(job, logged_results)
219
329
  end
330
+ end
220
331
 
221
- def convert_encode_results(job, results)
222
- settings = job.settings.output_groups.first.outputs
332
+ def build_output_from_only_job(job)
333
+ # we need to compile info from two places in job output, two arrays of things,
334
+ # that correspond.
335
+ output_group = job.dig("settings", "output_groups", 0)
336
+ output_group_settings = output_group.dig("output_group_settings")
337
+ output_settings = output_group.dig("outputs")
338
+
339
+ output_group_details = job.dig("output_group_details", 0, "output_details")
340
+ file_input_url = job.dig("settings", "inputs", 0, "file_input")
341
+
342
+ outputs = output_group_details.map.with_index do |output_group_detail, index|
343
+ # Right now we only know how to get a URL for hls output, although
344
+ # the others should be possible and very analagous, just not familiar with them.
345
+ if output_group_settings.type == "HLS_GROUP_SETTINGS"
346
+ output_url = MediaConvertOutput.construct_output_url(
347
+ destination: output_group_settings.hls_group_settings.destination,
348
+ file_input_url: file_input_url,
349
+ name_modifier: output_settings[index].name_modifier,
350
+ file_suffix: "m3u8"
351
+ )
352
+ end
223
353
 
224
- outputs = results.dig('detail', 'outputGroupDetails', 0, 'outputDetails').map.with_index do |detail, index|
225
- tech_md = MediaConvertOutput.tech_metadata(settings[index], detail)
226
- output = ActiveEncode::Output.new
354
+ tech_md = MediaConvertOutput.tech_metadata_from_settings(
355
+ output_url: output_url,
356
+ output_settings: output_settings[index],
357
+ output_detail_settings: output_group_detail
358
+ )
227
359
 
228
- output.created_at = job.timing.submit_time
229
- output.updated_at = job.timing.finish_time || job.timing.start_time || output.created_at
360
+ output = ActiveEncode::Output.new
230
361
 
231
- [:width, :height, :frame_rate, :duration, :checksum, :audio_codec, :video_codec,
232
- :audio_bitrate, :video_bitrate, :file_size, :label, :url, :id].each do |field|
233
- output.send("#{field}=", tech_md[field])
234
- end
235
- output.id ||= "#{job.id}-output#{tech_md[:suffix]}"
236
- output
237
- end
362
+ output.created_at = job.timing.submit_time
363
+ output.updated_at = job.timing.finish_time || job.timing.start_time || output.created_at
238
364
 
239
- adaptive_playlist = results.dig('detail', 'outputGroupDetails', 0, 'playlistFilePaths', 0)
240
- unless adaptive_playlist.nil?
241
- output = ActiveEncode::Output.new
242
- output.created_at = job.timing.submit_time
243
- output.updated_at = job.timing.finish_time || job.timing.start_time || output.created_at
244
- output.id = "#{job.id}-output-auto"
245
-
246
- [:duration, :audio_codec, :video_codec].each do |field|
247
- output.send("#{field}=", outputs.first.send(field))
248
- end
249
- output.label = File.basename(adaptive_playlist)
250
- output.url = adaptive_playlist
251
- outputs << output
365
+ [:width, :height, :frame_rate, :duration, :checksum, :audio_codec, :video_codec,
366
+ :audio_bitrate, :video_bitrate, :file_size, :label, :url, :id].each do |field|
367
+ output.send("#{field}=", tech_md[field])
252
368
  end
253
- outputs
369
+ output.id ||= "#{job.id}-output#{tech_md[:suffix]}"
370
+ output
254
371
  end
255
372
 
256
- def get_encode_results(job)
257
- start_time = job.timing.submit_time
258
- end_time = (job.timing.finish_time || Time.now.utc) + 10.minutes
259
-
260
- response = cloudwatch_logs.start_query(
261
- log_group_name: log_group,
262
- start_time: start_time.to_i,
263
- end_time: end_time.to_i,
264
- limit: 1,
265
- query_string: "fields @message | filter detail.jobId = '#{job.id}' | filter detail.status = 'COMPLETE' | sort @ingestionTime desc"
373
+ # For HLS, we need to add on the single master adaptive playlist URL, which
374
+ # we can predict what it will be. At the moment, we don't know what to do
375
+ # for other types.
376
+ if output_group_settings.type == "HLS_GROUP_SETTINGS"
377
+ adaptive_playlist_url = MediaConvertOutput.construct_output_url(
378
+ destination: output_group_settings.hls_group_settings.destination,
379
+ file_input_url: file_input_url,
380
+ name_modifier: nil,
381
+ file_suffix: "m3u8"
266
382
  )
267
- query_id = response.query_id
268
- response = cloudwatch_logs.get_query_results(query_id: query_id)
269
- until response.status == "Complete"
270
- sleep(0.5)
271
- response = cloudwatch_logs.get_query_results(query_id: query_id)
272
- end
273
383
 
274
- return nil if response.results.empty?
384
+ output = ActiveEncode::Output.new
385
+ output.created_at = job.timing.submit_time
386
+ output.updated_at = job.timing.finish_time || job.timing.start_time || output.created_at
387
+ output.id = "#{job.id}-output-auto"
275
388
 
276
- JSON.parse(response.results.first.first.value)
389
+ [:duration, :audio_codec, :video_codec].each do |field|
390
+ output.send("#{field}=", outputs.first.send(field))
391
+ end
392
+ output.label = File.basename(adaptive_playlist_url)
393
+ output.url = adaptive_playlist_url
394
+ outputs << output
277
395
  end
278
396
 
279
- def cloudwatch_events
280
- @cloudwatch_events ||= Aws::CloudWatchEvents::Client.new
281
- end
397
+ outputs
398
+ end
282
399
 
283
- def cloudwatch_logs
284
- @cloudwatch_logs ||= Aws::CloudWatchLogs::Client.new
285
- end
400
+ # Takes an AWS MediaConvert job object, and the fetched CloudWatch log results
401
+ # of MediaConvert completion event, and builds and returns ActiveEncode output
402
+ # from extracted data.
403
+ #
404
+ # @param job [Aws::MediaConvert::Types::Job]
405
+ # @param results [Hash] relevant AWS MediaConvert completion event, fetched from CloudWatch.
406
+ #
407
+ # @return [Array<ActiveEncode::Output>,nil]
408
+ def build_output_from_logged_results(job, logged_results)
409
+ output_settings = job.settings.output_groups.first.outputs
286
410
 
287
- def mediaconvert
288
- endpoint = Aws::MediaConvert::Client.new.describe_endpoints.endpoints.first.url
289
- @mediaconvert ||= Aws::MediaConvert::Client.new(endpoint: endpoint)
290
- end
411
+ outputs = logged_results.dig('detail', 'outputGroupDetails', 0, 'outputDetails').map.with_index do |logged_detail, index|
412
+ tech_md = MediaConvertOutput.tech_metadata_from_logged(output_settings[index], logged_detail)
413
+ output = ActiveEncode::Output.new
291
414
 
292
- def s3_uri(url, options = {})
293
- bucket = options[:masterfile_bucket]
294
-
295
- case Addressable::URI.parse(url).scheme
296
- when nil, 'file'
297
- upload_to_s3 url, bucket
298
- when 's3'
299
- return url if options[:use_original_url]
300
- check_s3_bucket url, bucket
301
- else
302
- raise ArgumentError, "Cannot handle source URL: #{url}"
415
+ output.created_at = job.timing.submit_time
416
+ output.updated_at = job.timing.finish_time || job.timing.start_time || output.created_at
417
+
418
+ [:width, :height, :frame_rate, :duration, :checksum, :audio_codec, :video_codec,
419
+ :audio_bitrate, :video_bitrate, :file_size, :label, :url, :id].each do |field|
420
+ output.send("#{field}=", tech_md[field])
303
421
  end
422
+ output.id ||= "#{job.id}-output#{tech_md[:suffix]}"
423
+ output
304
424
  end
305
425
 
306
- def check_s3_bucket(input_url, source_bucket)
307
- # logger.info("Checking `#{input_url}'")
308
- s3_object = FileLocator::S3File.new(input_url).object
309
- if s3_object.bucket_name == source_bucket
310
- # logger.info("Already in bucket `#{source_bucket}'")
311
- s3_object.key
312
- else
313
- s3_key = File.join(SecureRandom.uuid, s3_object.key)
314
- # logger.info("Copying to `#{source_bucket}/#{input_url}'")
315
- target = Aws::S3::Object.new(bucket_name: source_bucket, key: input_url)
316
- target.copy_from(s3_object, multipart_copy: s3_object.size > 15_728_640) # 15.megabytes
317
- s3_key
426
+ adaptive_playlist = logged_results.dig('detail', 'outputGroupDetails', 0, 'playlistFilePaths', 0)
427
+ unless adaptive_playlist.nil?
428
+ output = ActiveEncode::Output.new
429
+ output.created_at = job.timing.submit_time
430
+ output.updated_at = job.timing.finish_time || job.timing.start_time || output.created_at
431
+ output.id = "#{job.id}-output-auto"
432
+
433
+ [:duration, :audio_codec, :video_codec].each do |field|
434
+ output.send("#{field}=", outputs.first.send(field))
318
435
  end
436
+ output.label = File.basename(adaptive_playlist)
437
+ output.url = adaptive_playlist
438
+ outputs << output
319
439
  end
440
+ outputs
441
+ end
320
442
 
321
- def upload_to_s3(input_url, source_bucket)
322
- # original_input = input_url
323
- bucket = Aws::S3::Resource.new(client: s3client).bucket(source_bucket)
324
- filename = FileLocator.new(input_url).location
325
- s3_key = File.join(SecureRandom.uuid, File.basename(filename))
326
- # logger.info("Copying `#{original_input}' to `#{source_bucket}/#{input_url}'")
327
- obj = bucket.object(s3_key)
328
- obj.upload_file filename
443
+ # gets complete notification data from CloudWatch logs, returns the CloudWatch
444
+ # log value as a parsed hash.
445
+ #
446
+ # @return [Hash] parsed AWS Cloudwatch data from MediaConvert COMPLETE event.
447
+ def get_encode_results(job)
448
+ start_time = job.timing.submit_time
449
+ end_time = (job.timing.finish_time || Time.now.utc) + 10.minutes
450
+
451
+ response = cloudwatch_logs.start_query(
452
+ log_group_name: log_group,
453
+ start_time: start_time.to_i,
454
+ end_time: end_time.to_i,
455
+ limit: 1,
456
+ query_string: "fields @message | filter detail.jobId = '#{job.id}' | filter detail.status = 'COMPLETE' | sort @ingestionTime desc"
457
+ )
458
+ query_id = response.query_id
459
+ response = cloudwatch_logs.get_query_results(query_id: query_id)
460
+ until response.status == "Complete"
461
+ sleep(0.5)
462
+ response = cloudwatch_logs.get_query_results(query_id: query_id)
463
+ end
329
464
 
330
- s3_key
465
+ return nil if response.results.empty?
466
+
467
+ JSON.parse(response.results.first.first.value)
468
+ end
469
+
470
+ def cloudwatch_events
471
+ @cloudwatch_events ||= Aws::CloudWatchEvents::Client.new
472
+ end
473
+
474
+ def cloudwatch_logs
475
+ @cloudwatch_logs ||= Aws::CloudWatchLogs::Client.new
476
+ end
477
+
478
+ def mediaconvert
479
+ @mediaconvert ||= begin
480
+ endpoint = Aws::MediaConvert::Client.new.describe_endpoints.endpoints.first.url
481
+ Aws::MediaConvert::Client.new(endpoint: endpoint)
331
482
  end
483
+ end
332
484
 
333
- def event_rule_exists?(rule_name)
334
- rule = cloudwatch_events.list_rules(name_prefix: rule_name).rules.find do |existing_rule|
335
- existing_rule.name == rule_name
336
- end
337
- !rule.nil?
485
+ def s3_uri(url, options = {})
486
+ bucket = options[:masterfile_bucket]
487
+
488
+ case Addressable::URI.parse(url).scheme
489
+ when nil, 'file'
490
+ upload_to_s3 url, bucket
491
+ when 's3'
492
+ return url if options[:use_original_url]
493
+ check_s3_bucket url, bucket
494
+ else
495
+ raise ArgumentError, "Cannot handle source URL: #{url}"
338
496
  end
497
+ end
339
498
 
340
- def find_log_group(name)
341
- cloudwatch_logs.describe_log_groups(log_group_name_prefix: name).log_groups.find do |group|
342
- group.log_group_name == name
343
- end
499
+ def check_s3_bucket(input_url, source_bucket)
500
+ # logger.info("Checking `#{input_url}'")
501
+ s3_object = FileLocator::S3File.new(input_url).object
502
+ if s3_object.bucket_name == source_bucket
503
+ # logger.info("Already in bucket `#{source_bucket}'")
504
+ s3_object.key
505
+ else
506
+ s3_key = File.join(SecureRandom.uuid, s3_object.key)
507
+ # logger.info("Copying to `#{source_bucket}/#{input_url}'")
508
+ target = Aws::S3::Object.new(bucket_name: source_bucket, key: input_url)
509
+ target.copy_from(s3_object, multipart_copy: s3_object.size > 15_728_640) # 15.megabytes
510
+ s3_key
344
511
  end
512
+ end
345
513
 
346
- def create_log_group(name)
347
- result = find_log_group(name)
514
+ def upload_to_s3(input_url, source_bucket)
515
+ # original_input = input_url
516
+ bucket = Aws::S3::Resource.new(client: s3client).bucket(source_bucket)
517
+ filename = FileLocator.new(input_url).location
518
+ s3_key = File.join(SecureRandom.uuid, File.basename(filename))
519
+ # logger.info("Copying `#{original_input}' to `#{source_bucket}/#{input_url}'")
520
+ obj = bucket.object(s3_key)
521
+ obj.upload_file filename
348
522
 
349
- return result unless result.nil?
523
+ s3_key
524
+ end
350
525
 
351
- cloudwatch_logs.create_log_group(log_group_name: name)
352
- find_log_group(name)
526
+ def event_rule_exists?(rule_name)
527
+ rule = cloudwatch_events.list_rules(name_prefix: rule_name).rules.find do |existing_rule|
528
+ existing_rule.name == rule_name
353
529
  end
530
+ !rule.nil?
531
+ end
354
532
 
355
- def make_audio_input(input_url)
356
- {
357
- audio_selectors: { "Audio Selector 1" => { default_selection: "DEFAULT" } },
358
- audio_selector_groups: {
359
- "Audio Selector Group 1" => {
360
- audio_selector_names: ["Audio Selector 1"]
361
- }
362
- },
363
- file_input: input_url,
364
- timecode_source: "ZEROBASED"
365
- }
533
+ def find_log_group(name)
534
+ cloudwatch_logs.describe_log_groups(log_group_name_prefix: name).log_groups.find do |group|
535
+ group.log_group_name == name
366
536
  end
537
+ end
367
538
 
368
- def make_video_input(input_url)
369
- {
370
- audio_selectors: { "Audio Selector 1" => { default_selection: "DEFAULT" } },
371
- file_input: input_url,
372
- timecode_source: "ZEROBASED",
373
- video_selector: {}
374
- }
375
- end
539
+ def create_log_group(name)
540
+ result = find_log_group(name)
376
541
 
377
- def make_output_groups(options)
378
- output_type = options[:output_type] || :hls
379
- raise ArgumentError, "Unknown output type: #{output_type.inspect}" unless OUTPUT_GROUP_TEMPLATES.keys.include?(output_type)
380
- output_group_settings_key = "#{output_type}_group_settings".to_sym
381
- output_group_settings = OUTPUT_GROUP_TEMPLATES[output_type].merge(destination: "s3://#{output_bucket}/#{options[:output_prefix]}")
542
+ return result unless result.nil?
382
543
 
383
- outputs = options[:outputs].map do |output|
384
- {
385
- preset: output[:preset],
386
- name_modifier: output[:modifier]
544
+ cloudwatch_logs.create_log_group(log_group_name: name)
545
+ cloudwatch_logs.put_retention_policy(
546
+ log_group_name: name,
547
+ retention_in_days: SETUP_LOG_GROUP_RETENTION_DAYS
548
+ )
549
+
550
+ find_log_group(name)
551
+ end
552
+
553
+ def make_audio_input(input_url)
554
+ {
555
+ audio_selectors: { "Audio Selector 1" => { default_selection: "DEFAULT" } },
556
+ audio_selector_groups: {
557
+ "Audio Selector Group 1" => {
558
+ audio_selector_names: ["Audio Selector 1"]
387
559
  }
388
- end
560
+ },
561
+ file_input: input_url,
562
+ timecode_source: "ZEROBASED"
563
+ }
564
+ end
389
565
 
390
- [{
391
- output_group_settings: {
392
- type: output_group_settings_key.upcase,
393
- output_group_settings_key => output_group_settings
394
- },
395
- outputs: outputs
396
- }]
566
+ def make_video_input(input_url)
567
+ {
568
+ audio_selectors: { "Audio Selector 1" => { default_selection: "DEFAULT" } },
569
+ file_input: input_url,
570
+ timecode_source: "ZEROBASED",
571
+ video_selector: {}
572
+ }
573
+ end
574
+
575
+ def make_output_groups(options)
576
+ output_type = options[:output_type] || :hls
577
+ raise ArgumentError, "Unknown output type: #{output_type.inspect}" unless OUTPUT_GROUP_TEMPLATES.keys.include?(output_type)
578
+ output_group_settings_key = "#{output_type}_group_settings".to_sym
579
+
580
+ destination = options[:destination] || "s3://#{output_bucket}/#{options[:output_prefix]}"
581
+ output_group_settings = OUTPUT_GROUP_TEMPLATES[output_type].merge(destination: destination)
582
+
583
+ if options[:output_group_destination_settings]
584
+ output_group_settings[:destination_settings] = options[:output_group_destination_settings]
397
585
  end
586
+
587
+ outputs = options[:outputs].map do |output|
588
+ {
589
+ preset: output[:preset],
590
+ name_modifier: output[:modifier]
591
+ }
592
+ end
593
+
594
+ [{
595
+ output_group_settings: {
596
+ type: output_group_settings_key.upcase,
597
+ output_group_settings_key => output_group_settings
598
+ },
599
+ outputs: outputs
600
+ }]
601
+ end
398
602
  end
399
603
  end
400
604
  end