media_processing_tool 1.0.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +21 -0
- data/README.md +2 -0
- data/bin/catalog +181 -0
- data/bin/catalog_assets +187 -0
- data/bin/fcp_xml_parser +41 -0
- data/bin/mig +44 -0
- data/bin/mig_http +52 -0
- data/bin/xml_processor +51 -0
- data/config/default/xml_processor_config +49 -0
- data/lib/axml.rb +59 -0
- data/lib/cli.rb +88 -0
- data/lib/final_cut_pro.rb +31 -0
- data/lib/final_cut_pro/sequence_processor.rb +135 -0
- data/lib/final_cut_pro/xml_parser.rb +15 -0
- data/lib/final_cut_pro/xml_parser/common.rb +121 -0
- data/lib/final_cut_pro/xml_parser/document.rb +18 -0
- data/lib/final_cut_pro/xml_parser/fcpxml/version_1.rb +28 -0
- data/lib/final_cut_pro/xml_parser/xmeml/version_5.rb +234 -0
- data/lib/itunes/xml_parser.rb +51 -0
- data/lib/media_processing_tool/publisher.rb +52 -0
- data/lib/media_processing_tool/xml_parser.rb +30 -0
- data/lib/media_processing_tool/xml_parser/document.rb +38 -0
- data/lib/media_processing_tool/xml_parser/identifier.rb +43 -0
- data/lib/media_processing_tool/xml_processor.rb +132 -0
- data/lib/mig.rb +158 -0
- data/lib/mig/http.rb +54 -0
- data/lib/mig/modules/common.rb +333 -0
- data/lib/mig/modules/exiftool.rb +26 -0
- data/lib/mig/modules/ffmpeg.rb +225 -0
- data/lib/mig/modules/media_type.rb +23 -0
- data/lib/mig/modules/mediainfo.rb +91 -0
- data/lib/timecode_methods.rb +108 -0
- data/lib/udam_utils/publish_map_processor.rb +710 -0
- metadata +111 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: 297b7fe61e129519bdfe33fb90bf29d7b199ab64
|
4
|
+
data.tar.gz: 6127946e2a8d8865ca4846d183376e8d3fbecf68
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 0f8d9ce57c58f76c19f2336866da0be2f20ba887bc9b1edb1811857fa6aa223169cc5457eb7b5bb30c6f8bfaaaf67cf365299d4e05d2b95d67e4a9dda6c0420b
|
7
|
+
data.tar.gz: 96a77a2e6729efec6cd3dfafaa24b0e3a9313543c78ab967fbfa45bd1bf870a3274392f20d8017376bfc198896322bdb9b40f4496b7ce0f79205707bdaa4558e
|
data/.gitignore
ADDED
data/README.md
ADDED
data/bin/catalog
ADDED
@@ -0,0 +1,181 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
# A utility to create a CSV of the file properties.
|
3
|
+
#
|
4
|
+
# If process-xml-files is true only the xml files then the file paths inside of supported xml files will be parsed and the files will be
|
5
|
+
# analyzed and added to the output
|
6
|
+
#
|
7
|
+
lib_path = File.expand_path('../../lib', __FILE__)
|
8
|
+
$:.unshift(lib_path) if !$:.include?(lib_path) and File.exists?(lib_path)
|
9
|
+
|
10
|
+
require 'cgi'
|
11
|
+
require 'csv'
|
12
|
+
require 'json'
|
13
|
+
require 'logger'
|
14
|
+
require 'open3'
|
15
|
+
require 'optparse'
|
16
|
+
require 'shellwords'
|
17
|
+
require 'time'
|
18
|
+
require 'uri'
|
19
|
+
|
20
|
+
require 'mig/modules/ffmpeg'
|
21
|
+
|
22
|
+
DEFAULT_MEDIA_FILE_PATH_FIELD_NAME = 'Media File Path'
|
23
|
+
|
24
|
+
module Find
|
25
|
+
|
26
|
+
#
|
27
|
+
# Calls the associated block with the name of every file and directory listed
|
28
|
+
# as arguments, then recursively on their subdirectories, and so on.
|
29
|
+
#
|
30
|
+
# See the +Find+ module documentation for an example.
|
31
|
+
#
|
32
|
+
def find(*paths) # :yield: path
|
33
|
+
block_given? or return enum_for(__method__, *paths)
|
34
|
+
|
35
|
+
paths.collect!{|d| raise Errno::ENOENT unless File.exist?(d); d.dup}
|
36
|
+
while file = paths.shift
|
37
|
+
catch(:prune) do
|
38
|
+
begin
|
39
|
+
s = File.lstat(file)
|
40
|
+
rescue Errno::ENOENT, Errno::EACCES, Errno::ENOTDIR, Errno::ELOOP, Errno::ENAMETOOLONG
|
41
|
+
next
|
42
|
+
end
|
43
|
+
if s.directory? then
|
44
|
+
begin
|
45
|
+
fs = Dir.entries(file)
|
46
|
+
rescue Errno::ENOENT, Errno::EACCES, Errno::ENOTDIR, Errno::ELOOP, Errno::ENAMETOOLONG
|
47
|
+
next
|
48
|
+
end
|
49
|
+
fs.sort!
|
50
|
+
fs.reverse_each {|f|
|
51
|
+
next if f == "." or f == ".."
|
52
|
+
f = File.join(file, f)
|
53
|
+
paths.unshift f.untaint
|
54
|
+
}
|
55
|
+
else
|
56
|
+
yield file.dup.taint, s
|
57
|
+
end
|
58
|
+
end
|
59
|
+
end
|
60
|
+
end
|
61
|
+
|
62
|
+
#
|
63
|
+
# Skips the current file or directory, restarting the loop with the next
|
64
|
+
# entry. If the current file is a directory, that directory will not be
|
65
|
+
# recursively entered. Meaningful only within the block associated with
|
66
|
+
# Find::find.
|
67
|
+
#
|
68
|
+
# See the +Find+ module documentation for an example.
|
69
|
+
#
|
70
|
+
def prune
|
71
|
+
throw :prune
|
72
|
+
end
|
73
|
+
|
74
|
+
module_function :find, :prune
|
75
|
+
end
|
76
|
+
|
77
|
+
File::Stat.class_eval do
|
78
|
+
|
79
|
+
def to_hash
|
80
|
+
(self.methods - Object.methods - [__callee__]).each_with_object({}) { |meth, acc| acc[meth] = self.send(meth) if self.method(meth).arity == 0 }
|
81
|
+
end
|
82
|
+
|
83
|
+
end
|
84
|
+
|
85
|
+
options = { }
|
86
|
+
options_file_name = nil
|
87
|
+
op = OptionParser.new
|
88
|
+
op.on('--path PATH', 'The path to catalog') { |v| options[:path_to_catalog] = v }
|
89
|
+
op.on('--[no-]recursive', 'Determines if The directory tree will be recursive or not. If not then path can be a glob pattern.') { |v| options[:recursive] = v }
|
90
|
+
op.on('--[no-]process-xml-files', 'Media File Paths will be pulled from FCP xml files found using the path.') { |v| options[:process_xml_files] = v }
|
91
|
+
op.on('--destination-path PATH', 'The path and file name of the csv to create.') { |v| options[:destination_path] = v }
|
92
|
+
op.on('--help', 'Display this message.') { puts op; exit }
|
93
|
+
op.parse!(command_line_options = ARGV.dup)
|
94
|
+
op.parse!(ARGV.dup) if op.load(options_file_name)
|
95
|
+
|
96
|
+
@logger = Logger.new(STDOUT)
|
97
|
+
def logger; @logger end
|
98
|
+
|
99
|
+
@ffmpeg = FFMPEG.new
|
100
|
+
path_to_catalog = options[:path_to_catalog]
|
101
|
+
recursive = options[:recursive]
|
102
|
+
process_xml_files = options[:process_xml_files]
|
103
|
+
destination_path = options[:destination_path]
|
104
|
+
media_file_path_field_name = options[:media_file_path_field_name] || DEFAULT_MEDIA_FILE_PATH_FIELD_NAME
|
105
|
+
|
106
|
+
path_to_catalog ||= '/assets/**/*.mov'
|
107
|
+
destination_path ||= '/tmp/assets.csv'
|
108
|
+
|
109
|
+
|
110
|
+
def unescape_fcp_file_path_url(file_path_url)
|
111
|
+
URI(file_path_url).path.gsub(/(%(?:[2-9]|[A-F])(?:\d|[A-F]))/) { |v| CGI.unescape(v) }
|
112
|
+
end
|
113
|
+
|
114
|
+
def process_media_file_paths(media_file_paths, options = { })
|
115
|
+
xml_file_path = options[:xml_file_path]
|
116
|
+
xml_file_path_field_name = options[:xml_file_path_field_name] || 'XML File Path'
|
117
|
+
|
118
|
+
media_file_paths = media_file_paths.uniq
|
119
|
+
media_file_path_data = { }
|
120
|
+
media_file_path_count = media_file_paths.length
|
121
|
+
media_file_path_counter = 0
|
122
|
+
media_file_paths.each do |mfp|
|
123
|
+
media_file_path_counter += 1
|
124
|
+
logger.debug { "Processing Media File Path #{media_file_path_counter} of #{media_file_path_count}. '#{mfp}'" }
|
125
|
+
next unless File.exists?(mfp)
|
126
|
+
md = @ffmpeg.run(mfp)
|
127
|
+
md[xml_file_path_field_name] = xml_file_path if xml_file_path
|
128
|
+
md = md.delete_if { |k, v| %w(output command).include? k }
|
129
|
+
md.merge!(File.stat(mfp).to_hash)
|
130
|
+
media_file_path_data[mfp] = md
|
131
|
+
end
|
132
|
+
media_file_path_data
|
133
|
+
end
|
134
|
+
|
135
|
+
def get_media_file_paths_from_xml(xml_file_path)
|
136
|
+
_media_file_paths = [ ]
|
137
|
+
File.open(xml_file_path).read().scan(/<pathurl>(.*)<\/pathurl>/) { |file_path_url_ary| _media_file_paths << unescape_fcp_file_path_url(file_path_url_ary.first) }
|
138
|
+
_media_file_paths
|
139
|
+
end
|
140
|
+
|
141
|
+
def get_media_file_paths_from_xml_files(path_to_catalog)
|
142
|
+
media_file_paths = [ ]
|
143
|
+
media_file_paths_by_xml_file = { }
|
144
|
+
xml_file_paths = Dir.glob(path_to_catalog)
|
145
|
+
xml_file_path_count = xml_file_paths.length
|
146
|
+
xml_file_path_counter = 0
|
147
|
+
xml_file_paths.each do |xml_file_path|
|
148
|
+
xml_file_path_counter += 1
|
149
|
+
logger.debug { "Processing XML File #{xml_file_path_counter} of #{xml_file_path_count}. #{xml_file_path}" }
|
150
|
+
media_file_paths_from_xml = get_media_file_paths_from_xml(xml_file_path)
|
151
|
+
media_file_paths_by_xml_file[xml_file_path] = media_file_paths_from_xml
|
152
|
+
media_file_paths = media_file_paths + media_file_paths_from_xml
|
153
|
+
end
|
154
|
+
media_file_paths
|
155
|
+
end
|
156
|
+
|
157
|
+
def data_to_table(media_file_paths_with_data)
|
158
|
+
|
159
|
+
end
|
160
|
+
|
161
|
+
if process_xml_files
|
162
|
+
media_file_paths = get_media_file_paths_from_xml_files(path_to_catalog)
|
163
|
+
else
|
164
|
+
media_file_paths = recursive ? Find.find(path_to_catalog) : Dir.glob(path_to_catalog)
|
165
|
+
end
|
166
|
+
|
167
|
+
media_file_paths_with_data = process_media_file_paths(media_file_paths)
|
168
|
+
|
169
|
+
|
170
|
+
fields = { media_file_path_field_name => '' }
|
171
|
+
media_file_paths_with_data.each { |_, data| fields.merge!(data) }
|
172
|
+
empty_record = Hash[fields.map { |k,_| [k, ''] }]
|
173
|
+
|
174
|
+
CSV.open(destination_path, 'w') do |csv|
|
175
|
+
csv << fields.keys
|
176
|
+
media_file_paths_with_data.each do |media_file_path, data|
|
177
|
+
data[media_file_path_field_name] = media_file_path
|
178
|
+
csv << empty_record.merge(data).values
|
179
|
+
end
|
180
|
+
end
|
181
|
+
puts "Output written to: #{destination_path}"
|
data/bin/catalog_assets
ADDED
@@ -0,0 +1,187 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
# A utility to create a CSV of the file properties.
|
3
|
+
#
|
4
|
+
# If process-xml-files is true only the xml files then the file paths inside of supported xml files will be parsed and the files will be
|
5
|
+
# analyzed and added to the output
|
6
|
+
#
|
7
|
+
lib_path = File.expand_path('../../lib', __FILE__)
|
8
|
+
$:.unshift(lib_path) if !$:.include?(lib_path) and File.exists?(lib_path)
|
9
|
+
require 'rubygems'
|
10
|
+
require 'cgi'
|
11
|
+
require 'csv'
|
12
|
+
require 'find'
|
13
|
+
require 'json'
|
14
|
+
require 'logger'
|
15
|
+
require 'mig'
|
16
|
+
require 'open3'
|
17
|
+
require 'open3_backport' if RUBY_VERSION.start_with?('1.8.')
|
18
|
+
require 'optparse'
|
19
|
+
require 'pp'
|
20
|
+
require 'shellwords'
|
21
|
+
require 'time'
|
22
|
+
require 'uri'
|
23
|
+
|
24
|
+
#require 'mig/modules/ffmpeg'
|
25
|
+
module Find
|
26
|
+
|
27
|
+
#
|
28
|
+
# Calls the associated block with the name of every file and directory listed
|
29
|
+
# as arguments, then recursively on their subdirectories, and so on.
|
30
|
+
#
|
31
|
+
# See the +Find+ module documentation for an example.
|
32
|
+
#
|
33
|
+
def find(*paths) # :yield: path
|
34
|
+
block_given? or return enum_for(__method__, *paths)
|
35
|
+
|
36
|
+
paths.collect!{|d| raise Errno::ENOENT unless File.exist?(d); d.dup}
|
37
|
+
while file = paths.shift
|
38
|
+
catch(:prune) do
|
39
|
+
|
40
|
+
begin
|
41
|
+
s = File.lstat(file)
|
42
|
+
rescue Errno::ENOENT, Errno::EACCES, Errno::ENOTDIR, Errno::ELOOP, Errno::ENAMETOOLONG
|
43
|
+
next
|
44
|
+
end
|
45
|
+
|
46
|
+
yield file.dup.taint, s
|
47
|
+
|
48
|
+
if s.directory? then
|
49
|
+
begin
|
50
|
+
fs = Dir.entries(file)
|
51
|
+
rescue Errno::ENOENT, Errno::EACCES, Errno::ENOTDIR, Errno::ELOOP, Errno::ENAMETOOLONG
|
52
|
+
next
|
53
|
+
end
|
54
|
+
fs.sort!
|
55
|
+
fs.reverse_each {|f|
|
56
|
+
next if f == "." or f == ".."
|
57
|
+
f = File.join(file, f)
|
58
|
+
paths.unshift f.untaint
|
59
|
+
}
|
60
|
+
end
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
64
|
+
|
65
|
+
#
|
66
|
+
# Skips the current file or directory, restarting the loop with the next
|
67
|
+
# entry. If the current file is a directory, that directory will not be
|
68
|
+
# recursively entered. Meaningful only within the block associated with
|
69
|
+
# Find::find.
|
70
|
+
#
|
71
|
+
# See the +Find+ module documentation for an example.
|
72
|
+
#
|
73
|
+
def prune
|
74
|
+
throw :prune
|
75
|
+
end
|
76
|
+
|
77
|
+
module_function :find, :prune
|
78
|
+
end
|
79
|
+
|
80
|
+
DEFAULT_MEDIA_FILE_PATH_FIELD_NAME = 'Media File Path'
|
81
|
+
|
82
|
+
options = { }
|
83
|
+
options_file_name = nil
|
84
|
+
op = OptionParser.new
|
85
|
+
op.on('--path PATH', 'The path to catalog') { |v| options[:path_to_catalog] = v }
|
86
|
+
op.on('--[no-]recursive', 'Determines if The directory tree will be recursive or not. If not then path can be a glob pattern.') { |v| options[:recursive] = v }
|
87
|
+
op.on('--destination-path PATH', 'The path and file name of the csv to create.') { |v| options[:destination_path] = v }
|
88
|
+
op.on('--help', 'Display this message.') { puts op; exit }
|
89
|
+
op.parse!(ARGV.dup)
|
90
|
+
op.parse!(ARGV.dup) if op.load(options_file_name)
|
91
|
+
|
92
|
+
@logger = Logger.new(STDOUT)
|
93
|
+
def logger; @logger end
|
94
|
+
|
95
|
+
@mig = MediaInformationGatherer.new(options)
|
96
|
+
def mig; @mig end
|
97
|
+
|
98
|
+
@path_to_catalog = options[:path_to_catalog]
|
99
|
+
def path_to_catalog; @path_to_catalog end
|
100
|
+
|
101
|
+
@recursive = options[:recursive]
|
102
|
+
def recursive; @recursive end
|
103
|
+
|
104
|
+
@destination_path = options[:destination_path]
|
105
|
+
def destination_path; @destination_path end
|
106
|
+
|
107
|
+
@media_file_path_field_name = options[:media_file_path_field_name] || DEFAULT_MEDIA_FILE_PATH_FIELD_NAME
|
108
|
+
def media_file_path_field_name; @media_file_path_field_name end
|
109
|
+
|
110
|
+
# @path_to_catalog ||= '/assets/**/*.mov'
|
111
|
+
# @destination_path ||= '/tmp/assets.csv'
|
112
|
+
|
113
|
+
abort("Path is a required argument.\n#{op}") unless path_to_catalog
|
114
|
+
abort("Destination Path is a required argument.\n#{op}") unless destination_path
|
115
|
+
|
116
|
+
def unescape_fcp_file_path_url(file_path_url)
|
117
|
+
URI(file_path_url).path.gsub(/(%(?:[2-9]|[A-F])(?:\d|[A-F]))/) { |v| CGI.unescape(v) }
|
118
|
+
end
|
119
|
+
|
120
|
+
def process_media_file_path(media_file_path)
|
121
|
+
mds = mig.run(media_file_path)
|
122
|
+
md = mds[:common]
|
123
|
+
md
|
124
|
+
end
|
125
|
+
|
126
|
+
def process_media_file_paths(media_file_paths, options = { })
|
127
|
+
xml_file_path = options[:xml_file_path]
|
128
|
+
xml_file_path_field_name = options[:xml_file_path_field_name] || 'XML File Path'
|
129
|
+
|
130
|
+
media_file_paths = media_file_paths.uniq
|
131
|
+
media_file_path_data = { }
|
132
|
+
media_file_path_count = media_file_paths.length
|
133
|
+
media_file_path_counter = 0
|
134
|
+
attempt_counter = 0
|
135
|
+
media_file_paths.each do |mfp|
|
136
|
+
attempt_counter += 1
|
137
|
+
media_file_path_counter += 1
|
138
|
+
logger.debug { "Processing Media File Path #{media_file_path_counter} of #{media_file_path_count}. '#{mfp}'" }
|
139
|
+
#next unless File.exists?(mfp)
|
140
|
+
#md = @ffmpeg.run(mfp)
|
141
|
+
md = process_media_file_path(mfp)
|
142
|
+
unless (mfp == md[:file_path])
|
143
|
+
warn("File Path Mismatch A1. '#{mfp}' != '#{md[:file_path]}'")
|
144
|
+
media_file_path_counter -= 1
|
145
|
+
attempt_counter < 4 ? redo : warn("Failing After #{attempt_counter}")
|
146
|
+
end
|
147
|
+
md[xml_file_path_field_name] = xml_file_path if xml_file_path
|
148
|
+
media_file_path_data[mfp] = md
|
149
|
+
# unless (mfp == md[:file_path])
|
150
|
+
# warn("File Path Mismatch A2. '#{mfp}' != '#{md[:file_path]}'")
|
151
|
+
# media_file_path_counter -= 1
|
152
|
+
# redo
|
153
|
+
# end
|
154
|
+
attempt_counter = 0
|
155
|
+
end
|
156
|
+
|
157
|
+
media_file_path_data
|
158
|
+
end
|
159
|
+
|
160
|
+
def data_to_table(media_file_paths_with_data)
|
161
|
+
|
162
|
+
end
|
163
|
+
|
164
|
+
if recursive then
|
165
|
+
media_file_paths = [ ]
|
166
|
+
Find.find(path_to_catalog) { |fp, stat| media_file_paths << fp if stat.file?; puts fp }
|
167
|
+
else
|
168
|
+
media_file_paths = Dir.glob(path_to_catalog)
|
169
|
+
end
|
170
|
+
# require 'pp'
|
171
|
+
# abort("MFP Size: #{media_file_paths.length} #{PP.pp(media_file_paths, '')}")
|
172
|
+
media_file_paths_with_data = process_media_file_paths(media_file_paths)
|
173
|
+
|
174
|
+
|
175
|
+
fields = { media_file_path_field_name => '' }
|
176
|
+
media_file_paths_with_data.each { |_, data| fields.merge!(data) }
|
177
|
+
empty_record = Hash[fields.map { |k,_| [k, ''] }]
|
178
|
+
|
179
|
+
CSV.open(destination_path, 'w') do |csv|
|
180
|
+
csv << fields.keys
|
181
|
+
media_file_paths_with_data.each do |media_file_path, data|
|
182
|
+
data[media_file_path_field_name] = media_file_path
|
183
|
+
csv << empty_record.merge(data).values
|
184
|
+
end
|
185
|
+
end
|
186
|
+
|
187
|
+
puts "Output written to: #{destination_path}"
|
data/bin/fcp_xml_parser
ADDED
@@ -0,0 +1,41 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
$:.unshift(File.expand_path(File.join(File.dirname(__FILE__), '../lib')))
|
3
|
+
require 'json'
|
4
|
+
require 'optparse'
|
5
|
+
require 'final_cut_pro/xml_parser'
|
6
|
+
|
7
|
+
options = { }
|
8
|
+
|
9
|
+
op = OptionParser.new
|
10
|
+
op.banner = "Usage: #{File.basename(__FILE__)} [options] filepath"
|
11
|
+
options[:pretty_print] = true
|
12
|
+
op.on('--[no-]pretty-print', '') { |v| options[:pretty_print] = v }
|
13
|
+
options[:output_files] = true
|
14
|
+
op.on('--[no-]files', '') { |v| options[:output_files] = v }
|
15
|
+
options[:output_sequences] = true
|
16
|
+
op.on('--[no-]sequences', '') { |v| options[:output_sequences] = v }
|
17
|
+
op.parse!
|
18
|
+
|
19
|
+
file_path = ARGV.shift
|
20
|
+
#file_path ||= '/Users/admin/Dropbox/work/bbc/examples/final_cut_pro/SX100117B-Football Samoa RE-EDIT.xml'
|
21
|
+
file_path ||= '/Users/admin/Dropbox/work/bbc/examples/final_cut_pro/multipleEdits-SameSource-NNAT069S Nigeria master EDIT.xml'
|
22
|
+
# file_path ||= '/Users/admin/Dropbox/work/bbc/examples/final_cut_pro/SX071209A_Kings of Congo.xml'
|
23
|
+
unless file_path
|
24
|
+
puts op
|
25
|
+
exit
|
26
|
+
end
|
27
|
+
|
28
|
+
# { files: { }, sequences: { } }
|
29
|
+
doc = FinalCutPro::XMLParser.parse(file_path)#.to_hash
|
30
|
+
|
31
|
+
output = { }
|
32
|
+
output[:files] = doc.files if options[:output_files]
|
33
|
+
output[:sequences] = doc.sequences if options[:output_sequences]
|
34
|
+
|
35
|
+
json_generate_options = { :max_nesting => 250 }
|
36
|
+
if options[:pretty_print]
|
37
|
+
puts JSON.pretty_generate(output, json_generate_options)
|
38
|
+
else
|
39
|
+
puts JSON.fast_generate(output, json_generate_options)
|
40
|
+
end
|
41
|
+
|
data/bin/mig
ADDED
@@ -0,0 +1,44 @@
|
|
1
|
+
#!/usr/bin/env ruby
|
2
|
+
#lib_path = File.expand_path(File.join(File.dirname(__FILE__), '..', 'lib'))
|
3
|
+
lib_path = File.expand_path('../../lib', __FILE__)
|
4
|
+
$:.unshift(lib_path) if !$:.include?(lib_path) and File.exists?(lib_path)
|
5
|
+
|
6
|
+
require 'rubygems'
|
7
|
+
require 'json'
|
8
|
+
require 'optparse'
|
9
|
+
|
10
|
+
require 'mig'
|
11
|
+
|
12
|
+
options = { }
|
13
|
+
|
14
|
+
op = OptionParser.new
|
15
|
+
#op.banner = "Usage: #{File.basename(__FILE__)} [options] filepath"
|
16
|
+
op.on('--ffmpeg-bin-path PATH', '') { |v| options[:ffmpeg_cmd_path] = v }
|
17
|
+
op.on('--mediainfo-bin-path PATH', '') { |v| options[:mediainfo_cmd_path] = v }
|
18
|
+
op.on('--exiftool-bin-path PATH', '') { |v| options[:exiftool_cmd_path] = v }
|
19
|
+
op.on('--[no-]pretty-print', '') { |v| options[:pretty_print] = v }
|
20
|
+
op.on('--help', 'Displays this message.') { puts op; exit }
|
21
|
+
op.load
|
22
|
+
op.parse!
|
23
|
+
|
24
|
+
logger = Logger.new(STDERR)
|
25
|
+
logger.level = Logger::ERROR
|
26
|
+
options[:logger] = logger
|
27
|
+
|
28
|
+
file_path = ARGV.shift
|
29
|
+
unless file_path
|
30
|
+
puts op
|
31
|
+
exit
|
32
|
+
end
|
33
|
+
|
34
|
+
mig = MediaInformationGatherer.new(options)
|
35
|
+
abort("File Not Found. #{file_path}") unless File.exist?(file_path)
|
36
|
+
output = mig.run(file_path)
|
37
|
+
|
38
|
+
json_options = { :allow_nan => true }
|
39
|
+
|
40
|
+
if options[:pretty_print]
|
41
|
+
puts JSON.pretty_generate(output, json_options)
|
42
|
+
else
|
43
|
+
puts JSON.fast_generate(output, json_options)
|
44
|
+
end
|