media_processing_tool 1.0.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.gitignore +21 -0
- data/README.md +2 -0
- data/bin/catalog +181 -0
- data/bin/catalog_assets +187 -0
- data/bin/fcp_xml_parser +41 -0
- data/bin/mig +44 -0
- data/bin/mig_http +52 -0
- data/bin/xml_processor +51 -0
- data/config/default/xml_processor_config +49 -0
- data/lib/axml.rb +59 -0
- data/lib/cli.rb +88 -0
- data/lib/final_cut_pro.rb +31 -0
- data/lib/final_cut_pro/sequence_processor.rb +135 -0
- data/lib/final_cut_pro/xml_parser.rb +15 -0
- data/lib/final_cut_pro/xml_parser/common.rb +121 -0
- data/lib/final_cut_pro/xml_parser/document.rb +18 -0
- data/lib/final_cut_pro/xml_parser/fcpxml/version_1.rb +28 -0
- data/lib/final_cut_pro/xml_parser/xmeml/version_5.rb +234 -0
- data/lib/itunes/xml_parser.rb +51 -0
- data/lib/media_processing_tool/publisher.rb +52 -0
- data/lib/media_processing_tool/xml_parser.rb +30 -0
- data/lib/media_processing_tool/xml_parser/document.rb +38 -0
- data/lib/media_processing_tool/xml_parser/identifier.rb +43 -0
- data/lib/media_processing_tool/xml_processor.rb +132 -0
- data/lib/mig.rb +158 -0
- data/lib/mig/http.rb +54 -0
- data/lib/mig/modules/common.rb +333 -0
- data/lib/mig/modules/exiftool.rb +26 -0
- data/lib/mig/modules/ffmpeg.rb +225 -0
- data/lib/mig/modules/media_type.rb +23 -0
- data/lib/mig/modules/mediainfo.rb +91 -0
- data/lib/timecode_methods.rb +108 -0
- data/lib/udam_utils/publish_map_processor.rb +710 -0
- metadata +111 -0
@@ -0,0 +1,23 @@
|
|
1
|
+
require 'filemagic/ext'
|
2
|
+
class MediaInformationGatherer
|
3
|
+
class MediaType
|
4
|
+
|
5
|
+
# @params [Hash] options Not currently used
|
6
|
+
def initialize(options = {}); end # initialize
|
7
|
+
|
8
|
+
# @params [String] file_path The path to the file to scan
|
9
|
+
# @params [Hash] options Not currently used
|
10
|
+
# @return [Hash] Will contain :type, :subtype and any other attributes output during the call
|
11
|
+
def run(file_path, options = { })
|
12
|
+
media_type, charset = (File.mime_type(file_path) || '').split(';')
|
13
|
+
type, subtype = media_type.split('/') if media_type.is_a?(String)
|
14
|
+
output = { :type => type, :subtype => subtype }
|
15
|
+
|
16
|
+
param = charset.strip.split('=') if charset.is_a?(String)
|
17
|
+
output[param.first] = param.last if param.is_a?(Array)
|
18
|
+
|
19
|
+
output
|
20
|
+
end # run
|
21
|
+
|
22
|
+
end # MediaType
|
23
|
+
end
|
@@ -0,0 +1,91 @@
|
|
1
|
+
class MediaInformationGatherer
|
2
|
+
class Mediainfo
|
3
|
+
|
4
|
+
DEFAULT_EXECUTABLE_PATH = 'mediainfo'
|
5
|
+
|
6
|
+
def self.run(file_path, options = { })
|
7
|
+
new(options).run(file_path, options)
|
8
|
+
end
|
9
|
+
|
10
|
+
# @params [Hash] options
|
11
|
+
# @options options [String] :mediainfo_cmd_path
|
12
|
+
def initialize(options = { })
|
13
|
+
@mediainfo_cmd_path = options.fetch(:mediainfo_cmd_path, DEFAULT_EXECUTABLE_PATH)
|
14
|
+
end # initialize
|
15
|
+
|
16
|
+
# @params [String] file_path
|
17
|
+
# @params [Hash] options
|
18
|
+
def run(file_path, options = { })
|
19
|
+
command_line = "#{@mediainfo_cmd_path} '#{file_path}'"
|
20
|
+
output = `#{command_line}`
|
21
|
+
|
22
|
+
fix_encoding(output)
|
23
|
+
|
24
|
+
parse_output_to_hash(output)
|
25
|
+
end # run
|
26
|
+
|
27
|
+
# @param [String] output
|
28
|
+
def fix_encoding(output)
|
29
|
+
output[/test/] # Running a regexp on the string throws error if it's not UTF-8
|
30
|
+
rescue ArgumentError
|
31
|
+
output.force_encoding('ISO-8859-1')
|
32
|
+
end
|
33
|
+
|
34
|
+
# Takes the output from media info and creates a hash consisting of hashes of each 'section type'
|
35
|
+
# Known 'section types' are: General, Video, Audio, and Menu
|
36
|
+
#
|
37
|
+
# @param [String] output
|
38
|
+
def parse_output_to_hash(output)
|
39
|
+
# Add a hash that will provide a count of sections by type
|
40
|
+
mediainfo_hash = { 'output' => output, 'section_type_counts' => { 'audio' => 0, 'video' => 0 } }
|
41
|
+
|
42
|
+
section_name = nil
|
43
|
+
section_data = { }
|
44
|
+
|
45
|
+
output.each_line { |line|
|
46
|
+
data = line.chomp.split(':', 2)
|
47
|
+
case data.length
|
48
|
+
when 0; next # Nothing parsed on this line, goto the next
|
49
|
+
when 1
|
50
|
+
# No key:value pair so it looks like we have a new section being defined
|
51
|
+
|
52
|
+
# Add the previously parsed section
|
53
|
+
append_section(mediainfo_hash, section_name, section_data) unless section_name.nil? and section_data.empty?
|
54
|
+
|
55
|
+
section_name = data[0].strip
|
56
|
+
section_data = { }
|
57
|
+
when 2
|
58
|
+
# We have a key value pair, add it to this section
|
59
|
+
section_data[data[0].strip] = data[1].strip
|
60
|
+
end
|
61
|
+
}
|
62
|
+
# Append the last section we processed
|
63
|
+
append_section(mediainfo_hash, section_name, section_data)
|
64
|
+
end
|
65
|
+
|
66
|
+
# Appends parsed data to the main hash by section_name
|
67
|
+
#
|
68
|
+
# @param [Hash] mediainfo_hash
|
69
|
+
# @param [String] section_name
|
70
|
+
# @param [Hash] section_data
|
71
|
+
def append_section(mediainfo_hash, section_name, section_data)
|
72
|
+
if mediainfo_hash.has_key? section_name
|
73
|
+
mediainfo_hash[section_name] = [ mediainfo_hash[section_name] ] unless mediainfo_hash[section_name].is_a? Array
|
74
|
+
mediainfo_hash[section_name] << section_data
|
75
|
+
else
|
76
|
+
mediainfo_hash[section_name] = section_data
|
77
|
+
end
|
78
|
+
|
79
|
+
# Determine section type by taking the first word of the section name (ex: 'Audio #1' == 'audio', 'Video' == 'video')
|
80
|
+
section_type = section_name.split.first.downcase rescue section_name
|
81
|
+
|
82
|
+
# Increment section type count for this section type
|
83
|
+
mediainfo_hash['section_type_counts'][section_type] = 0 unless mediainfo_hash['section_type_counts'].has_key? section_type
|
84
|
+
mediainfo_hash['section_type_counts'][section_type] += 1
|
85
|
+
|
86
|
+
return mediainfo_hash
|
87
|
+
end # append_section
|
88
|
+
|
89
|
+
end # MediaInfo
|
90
|
+
|
91
|
+
end
|
@@ -0,0 +1,108 @@
|
|
1
|
+
module TimecodeMethods
|
2
|
+
|
3
|
+
# @param [Integer] time_base
|
4
|
+
# @param [Boolean] ntsc
|
5
|
+
def self.convert_time_base(time_base, ntsc)
|
6
|
+
fps = case time_base.to_f
|
7
|
+
when 24; ntsc ? 23.976 : 24.0
|
8
|
+
when 30; ntsc ? 29.97 : 30.0
|
9
|
+
when 60; ntsc ? 59.94 : 60.0
|
10
|
+
else; time_base.to_f
|
11
|
+
end
|
12
|
+
#puts "Time Base: #{time_base} NTSC: #{ntsc} FPS: #{fps}"
|
13
|
+
fps
|
14
|
+
end # convert_time_base
|
15
|
+
def convert_time_base(*args); self.convert_time_base(*args) end
|
16
|
+
|
17
|
+
def self.convert_frames_time_base(frames, frame_rate_from, frame_rate_to)
|
18
|
+
return 0 unless frame_rate_from and frame_rate_from > 0 and frame_rate_to and frame_rate_to > 0
|
19
|
+
frames * (frame_rate_to / frame_rate_from)
|
20
|
+
end # convert_frames_time_base
|
21
|
+
def convert_frames_time_base(*args); self.convert_frames_time_base(*args) end
|
22
|
+
|
23
|
+
def self.timecode_to_frames(timecode, fps = 25.0, drop_frame = false)
|
24
|
+
return 0 unless timecode and fps and fps > 0
|
25
|
+
hours, minutes, seconds, frames = timecode.split(':')
|
26
|
+
frames = frames.to_i
|
27
|
+
frames += seconds.to_i * fps
|
28
|
+
frames += (minutes.to_i * 60) * fps
|
29
|
+
frames += (hours.to_i * 3600) * fps
|
30
|
+
|
31
|
+
frames
|
32
|
+
end # timecode_to_frames
|
33
|
+
def timecode_to_frames(*args); self.timecode_to_frames(*args) end
|
34
|
+
|
35
|
+
def self.frames_to_timecode(frames, frame_rate = 25.0, drop_frame = false, drop_code_separator = ';')
|
36
|
+
return '00:00:00:00' unless frames and frames > 0 and frame_rate and frame_rate > 0
|
37
|
+
return frames_to_drop_frame_timecode(frames, frame_rate, drop_code_separator) if drop_frame
|
38
|
+
fps = frame_rate.to_f
|
39
|
+
seconds = frames.to_f / fps
|
40
|
+
remaining_frames = frames % fps
|
41
|
+
|
42
|
+
hours = seconds / 3600
|
43
|
+
seconds %= 3600
|
44
|
+
|
45
|
+
minutes = seconds / 60
|
46
|
+
seconds %= 60
|
47
|
+
|
48
|
+
sprintf('%02d:%02d:%02d:%02d', hours, minutes, seconds, remaining_frames)
|
49
|
+
end # frames_to_timecode
|
50
|
+
def frames_to_timecode(*args); self.frames_to_timecode(*args) end
|
51
|
+
|
52
|
+
def self.frames_to_drop_frame_timecode(frames, frame_rate, frame_separator = ';')
|
53
|
+
# FIXME FAILS TESTS
|
54
|
+
|
55
|
+
#?> frames_to_drop_frame_timecode(5395, 29.97)
|
56
|
+
#=> "00:02:59;29"
|
57
|
+
#frames_to_drop_frame_timecode(5396, 29.97)
|
58
|
+
#=> "00:03:00;00"
|
59
|
+
#?> frames_to_drop_frame_timecode(5397, 29.97)
|
60
|
+
#=> "00:03:00;01"
|
61
|
+
|
62
|
+
|
63
|
+
#?> frames_to_drop_frame_timecode(1800, 29.97)
|
64
|
+
#=> "00:01:00;02"
|
65
|
+
|
66
|
+
#?> frames_to_drop_frame_timecode(3600, 29.97)
|
67
|
+
#=> "00:02:00;04"
|
68
|
+
|
69
|
+
#?> frames_to_drop_frame_timecode(5400, 29.97)
|
70
|
+
#=> "00:03:00;06"
|
71
|
+
|
72
|
+
# when frames equals 1800 then timecode should be '00:01:00:00'
|
73
|
+
# when frames equals 3600 then timecode should be '00:02:02:00'
|
74
|
+
# when frames equals 5400 then timecode should be '00:03:02:00'
|
75
|
+
# when frames equals 18000 then timecode should be 00:10:00:00'
|
76
|
+
frame_rate = frame_rate.round(0)
|
77
|
+
frames = frames.to_i
|
78
|
+
|
79
|
+
skipped_frames = frames / (frame_rate * 60)
|
80
|
+
skipped_frames *= 2
|
81
|
+
added_frames = frames / (frame_rate * 600) #60 * 10
|
82
|
+
added_frames *= 2
|
83
|
+
|
84
|
+
frames += skipped_frames
|
85
|
+
frames -= added_frames
|
86
|
+
|
87
|
+
sec_frames = frame_rate
|
88
|
+
min_frames = 60 * sec_frames
|
89
|
+
hour_frames = 60 * min_frames
|
90
|
+
|
91
|
+
hour = frames / hour_frames
|
92
|
+
frames %= hour_frames
|
93
|
+
|
94
|
+
min = frames / min_frames
|
95
|
+
frames %= min_frames
|
96
|
+
|
97
|
+
sec = frames / sec_frames
|
98
|
+
frames %= sec_frames
|
99
|
+
|
100
|
+
# mystery off by 2 error
|
101
|
+
frames += 2 if hour > 2
|
102
|
+
|
103
|
+
drop = frames
|
104
|
+
return sprintf('%02d:%02d:%02d%s%02d', hour, min, sec, frame_separator, drop)
|
105
|
+
end # frames_to_drop_frame_timecode
|
106
|
+
def frames_to_drop_frame_timecode(*args); self.frames_to_drop_frame_timecode(*args) end
|
107
|
+
|
108
|
+
end # Timecode
|
@@ -0,0 +1,710 @@
|
|
1
|
+
$:.unshift(File.expand_path('../'))
|
2
|
+
require 'logger'
|
3
|
+
require 'open3'
|
4
|
+
require 'pp'
|
5
|
+
require 'shellwords'
|
6
|
+
#require 'udam_utils'
|
7
|
+
|
8
|
+
|
9
|
+
module UDAMUtils
|
10
|
+
|
11
|
+
class BasePublishMapProcessor
|
12
|
+
|
13
|
+
class << self
|
14
|
+
def publish_map=(value); @publish_map = value end # publish_map=
|
15
|
+
def publish_map; @publish_map end # publish_map
|
16
|
+
|
17
|
+
def process(params = { })
|
18
|
+
#new(params).process
|
19
|
+
end # self.process
|
20
|
+
|
21
|
+
# @param [Hash] hash
|
22
|
+
# @param [Symbol|String|Array<Symbol, String>] keys
|
23
|
+
# @param [Hash] options
|
24
|
+
# @option options [Any] :default The default value to return if none of the keys are found
|
25
|
+
# @option options [Boolean] :search_keys_as_string
|
26
|
+
def search_hash(hash, keys, options = { })
|
27
|
+
value = options[:default]
|
28
|
+
search_keys_as_string = options[:search_keys_as_string]
|
29
|
+
[*keys].each do |key|
|
30
|
+
value = hash[key] and break if hash.has_key?(key)
|
31
|
+
if search_keys_as_string
|
32
|
+
key = key.to_s
|
33
|
+
value = hash[key] and break if hash.has_key?(key)
|
34
|
+
end
|
35
|
+
end
|
36
|
+
value
|
37
|
+
end # search_keys
|
38
|
+
|
39
|
+
end # self
|
40
|
+
|
41
|
+
attr_accessor :logger
|
42
|
+
|
43
|
+
def initialize(params = {})
|
44
|
+
@logger = params[:logger] || Logger.new(params[:log_to] || STDOUT)
|
45
|
+
load_configuration_from_file(params)
|
46
|
+
|
47
|
+
@publish_maps ||= @publish_map || params[:publish_maps] || params[:publish_map]
|
48
|
+
raise RuntimeError, "Missing or Empty @publish_maps.\n Check your configuration in #{options[:config_file_path]}" unless (@publish_maps.is_a?(Array) and !@publish_maps.empty?)
|
49
|
+
end # initialize
|
50
|
+
|
51
|
+
# @param [Hash] params
|
52
|
+
# @option params [String] :config_file_path
|
53
|
+
def load_configuration_from_file(params = { })
|
54
|
+
params = params.dup
|
55
|
+
|
56
|
+
case params
|
57
|
+
when String
|
58
|
+
config_file_path = params
|
59
|
+
params = { }
|
60
|
+
when Hash
|
61
|
+
config_file_path = params[:config_file_path] || params[:configuration_file_path]
|
62
|
+
when Array
|
63
|
+
case params.first
|
64
|
+
when String
|
65
|
+
config_file_path = params.shift
|
66
|
+
params = params.first.is_a?(Hash) ? params.first : { }
|
67
|
+
when Hash; params = params.shift
|
68
|
+
else params = { }
|
69
|
+
end
|
70
|
+
end # case params
|
71
|
+
return false unless config_file_path
|
72
|
+
#raise ArgumentError, 'config_file_path is a required argument.' unless config_file_path
|
73
|
+
|
74
|
+
raise "Configuration File Not Found. '#{config_file_path}'" unless File.exists?(config_file_path)
|
75
|
+
logger.debug { "Loading Configuration From File. #{config_file_path}"}
|
76
|
+
|
77
|
+
#require config_file_path
|
78
|
+
eval(File.open(config_file_path, 'r').read)
|
79
|
+
end # load_configuration_from_file
|
80
|
+
|
81
|
+
# @param [Hash] hash
|
82
|
+
# @param [Symbol|String|Array<Symbol, String>] keys
|
83
|
+
# @param [Hash] options
|
84
|
+
# @option options [Any] :default The default value to return if none of the keys are found
|
85
|
+
# @option options [Boolean] :search_keys_as_string
|
86
|
+
def search_hash(hash, keys, options = { })
|
87
|
+
value = options[:default]
|
88
|
+
search_keys_as_string = options[:search_keys_as_string]
|
89
|
+
[*keys].each do |key|
|
90
|
+
value = hash[key] and break if hash.has_key?(key)
|
91
|
+
if search_keys_as_string
|
92
|
+
key = key.to_s
|
93
|
+
value = hash[key] and break if hash.has_key?(key)
|
94
|
+
end
|
95
|
+
end
|
96
|
+
value
|
97
|
+
end # search_keys
|
98
|
+
|
99
|
+
end # BasePublishMapProcessor
|
100
|
+
|
101
|
+
|
102
|
+
class WorkflowPublishMapProcessor < BasePublishMapProcessor
|
103
|
+
|
104
|
+
attr_accessor :event
|
105
|
+
|
106
|
+
# @param [Hash] params
|
107
|
+
# @option params [Object] :logger
|
108
|
+
# @option params [String] :uu_executable ([UDAMUtils.bin_dir]/uu)
|
109
|
+
# @option params [Boolean] :confirm_filtered_vents NOT CURRENTLY USED
|
110
|
+
# @option params [Hash] :publish_map
|
111
|
+
# @option params [Symbol, String] :event_id_field_name (:id)
|
112
|
+
# @option params [Symbol, String] :event_type_field_name (:type)
|
113
|
+
# @option params [Symbol, String] :entity_path_field_name (:path)
|
114
|
+
def initialize(params = { })
|
115
|
+
super(params)
|
116
|
+
|
117
|
+
logger.debug { "Initializing Workflow Publish Map Processor. #{params}" }
|
118
|
+
|
119
|
+
options = params
|
120
|
+
@uu_bin_dir = '/usr/bin' #|| UDAMUtils.get_bin_dir
|
121
|
+
|
122
|
+
@udam_utils_exec = options[:uu_executable] ||= File.join(@uu_bin_dir, 'uu')
|
123
|
+
raise "UDAM Utils Executable Not Found. File Not Found: '#{@udam_utils_exec}'" unless File.exist?(@udam_utils_exec)
|
124
|
+
logger.debug { "UDAM Utils Executable: #{@udam_utils_exec}" }
|
125
|
+
|
126
|
+
#@get_events_exec = options[:get_events_exec] ||= File.join(mre_bin_dir, 'get_events_with_metadata.rb')
|
127
|
+
#raise "Get Events Executable Not Found. File Not Found: '#{@get_events_exec}'" unless File.exist?(@get_events_exec)
|
128
|
+
#@logger.debug { "Get Events Exec: #{@get_events_exec}" }
|
129
|
+
|
130
|
+
#@confirm_event_exec = options[:confirm_event_exec] ||= File.join(mre_bin_dir, 'confirm_event.rb')
|
131
|
+
#raise "Confirm Events Executable Not Found. File Not Found: '#{@confirm_event_exec}'" unless File.exist?(@confirm_event_exec)
|
132
|
+
#@logger.debug { "Confirm Event Exec: #{@confirm_event_exec}" }
|
133
|
+
|
134
|
+
# Determines if events that are set to not be published still get confirmed by default
|
135
|
+
@confirm_filtered_events ||= params[:confirm_filtered_events] ||= false
|
136
|
+
logger.debug { "Confirm Filtered Events: #{@confirm_filtered_events}" }
|
137
|
+
|
138
|
+
@event_id_field_name ||= params[:event_id_field_name] || :id
|
139
|
+
@event_id_field_name = @event_id_field_name.to_sym if @event_id_field_name.is_a?(String)
|
140
|
+
|
141
|
+
@event_type_field_name ||= params[:event_type_field_name] || :type
|
142
|
+
@event_type_field_name = @event_type_field_name.to_sym if @event_type_field_name.is_a?(String)
|
143
|
+
|
144
|
+
@entity_field_name ||= params[:entity_field_name] || :entity
|
145
|
+
@entity_field_name = @entity_field_name.to_sym if @entity_field_name.is_a?(String)
|
146
|
+
|
147
|
+
@entity_path_field_name ||= params[:entity_path_field_name] || :path
|
148
|
+
@entity_path_field_name = @entity_path_field_name.to_sym if @entity_path_field_name.is_a?(String)
|
149
|
+
end # initialize
|
150
|
+
|
151
|
+
# @param [Hash] parameters
|
152
|
+
# @param [Hash] event
|
153
|
+
# @return [Hash]
|
154
|
+
def eval_workflow_parameters(parameters, event = @event)
|
155
|
+
workflow_parameter_values = { }
|
156
|
+
parameters.each { |pname, param|
|
157
|
+
logger.debug { "Processing Workflow Parameter: #{pname} -> #{param}" }
|
158
|
+
case param
|
159
|
+
when String
|
160
|
+
pv = param
|
161
|
+
eval_pv = true
|
162
|
+
when Hash
|
163
|
+
pv = param.fetch(:value, nil)
|
164
|
+
eval_pv = param.fetch(:eval, false)
|
165
|
+
else
|
166
|
+
pv = nil
|
167
|
+
eval_pv = false
|
168
|
+
end
|
169
|
+
begin
|
170
|
+
pv = eval(pv) if eval_pv && pv.is_a?(String)
|
171
|
+
logger.debug { "Processed Workflow Parameter: #{pname} -> #{pv}" }
|
172
|
+
workflow_parameter_values[pname] = pv
|
173
|
+
rescue => e
|
174
|
+
logger.error { "Failed to evaluate parameter. #{e.message}\nName: #{pname}\nValue: #{param}" }
|
175
|
+
end
|
176
|
+
}
|
177
|
+
workflow_parameter_values
|
178
|
+
end # eval_workflow_parameters
|
179
|
+
|
180
|
+
# @param [Hash] params
|
181
|
+
# @option params [Hash] :event
|
182
|
+
# @option params [Hash] :workflow A hash containing the :name and optionally the :parameters key for the workflow to execute
|
183
|
+
# @option params [String] :mq_connection_uri The connection URI to use when publishing the workflow.
|
184
|
+
# @return [Boolean]
|
185
|
+
def publish_to_workflow(params = { })
|
186
|
+
event = params.fetch(:event, @event)
|
187
|
+
workflow = params.fetch(:workflow, @publish_params.fetch(:workflow, nil))
|
188
|
+
mq_connection_uri = params.fetch(:mq_connection_uri, nil)
|
189
|
+
|
190
|
+
logger.debug { "Publishing To Workflow. Workflow: #{workflow}" }
|
191
|
+
unless (workflow_name = workflow.fetch(:name, false))
|
192
|
+
logger.error "No Workflow Name Specified. Event: #{event} Workflow: #{workflow}"
|
193
|
+
return false
|
194
|
+
end
|
195
|
+
|
196
|
+
workflow_parameters = workflow.fetch(:parameters, false)
|
197
|
+
workflow_parameter_values = eval_workflow_parameters(workflow_parameters, event) if workflow_parameters
|
198
|
+
workflow_parameter_values ||= { }
|
199
|
+
|
200
|
+
cmd_line = [ @udam_utils_exec, 'job', '--workflow', workflow_name, '--workflow-parameters', workflow_parameter_values.to_json]
|
201
|
+
cmd_line << '--mq-connection-uri' << mq_connection_uri if mq_connection_uri
|
202
|
+
cmd_line = cmd_line.shelljoin
|
203
|
+
|
204
|
+
logger.debug { "Publishing event using command line. #{cmd_line}" }
|
205
|
+
response = execute(cmd_line)
|
206
|
+
logger.debug { "Publish event command response: #{response}" }
|
207
|
+
response[:success]
|
208
|
+
end # publish_event_to_workflow
|
209
|
+
|
210
|
+
# @param [String] cmd_line The command line to execute
|
211
|
+
# @return [Hash] { "STDOUT" => [String], "STDERR" => [String], "STATUS" => [Object] }
|
212
|
+
def execute(cmd_line)
|
213
|
+
begin
|
214
|
+
stdout_str, stderr_str, status = Open3.capture3(cmd_line)
|
215
|
+
logger.error { "Error Executing #{cmd_line}. Stdout: #{stdout_str} Stderr: #{stderr_str}" } unless status.success?
|
216
|
+
return { :stdout => stdout_str, :stderr => stderr_str, :status => status, :success => status.success? }
|
217
|
+
rescue
|
218
|
+
logger.error { "Error Executing '#{cmd_line}'. Exception: #{$!} @ #{$@} STDOUT: '#{stdout_str}' STDERR: '#{stderr_str}' Status: #{status.inspect} " }
|
219
|
+
return { :stdout => stdout_str, :stderr => stderr_str, :status => status, :success => false }
|
220
|
+
end
|
221
|
+
end # execute
|
222
|
+
|
223
|
+
# @param [Hash] event
|
224
|
+
def parse_event(event = @event)
|
225
|
+
# Since the event came in and was converted from JSON all of the keys are strings instead of symbols
|
226
|
+
|
227
|
+
begin
|
228
|
+
@object = @event
|
229
|
+
@event_id = event[@event_id_field_name] || event[@event_id_field_name.to_s]
|
230
|
+
@event_type = event[@event_type_field_name] || event[@event_type_field_name.to_s]
|
231
|
+
|
232
|
+
entity = event.fetch(@entity_field_name, event)
|
233
|
+
@full_file_path = entity.fetch(@entity_path_field_name)
|
234
|
+
|
235
|
+
@metadata_sources = entity.fetch(:metadata_sources, { })
|
236
|
+
@exiftool = @metadata_sources[:exiftool] ||= { }
|
237
|
+
@mediainfo = @metadata_sources[:mediainfo] ||= { }
|
238
|
+
@ffmpeg = @metadata_sources[:ffmpeg] ||= { }
|
239
|
+
@filemagic = @metadata_sources[:filemagic] ||= { }
|
240
|
+
@media = @metadata_sources[:filemagic] ||= { }
|
241
|
+
@common_media_info = @metadata_sources[:common] ||= { }
|
242
|
+
|
243
|
+
#media = entity.fetch('media', { })
|
244
|
+
@media_type = @media[:type] || @media['type']
|
245
|
+
@media_subtype = @media[:subtype] || @media['subtype']
|
246
|
+
rescue => e
|
247
|
+
logger.error "Error parsing event.\n\tEvent: #{event.inspect}\n\n\tException #{e.message}\n\tBacktrace #{e.backtrace}"
|
248
|
+
raise
|
249
|
+
end
|
250
|
+
end # parse_event
|
251
|
+
|
252
|
+
# @param [Hash] event
|
253
|
+
def process_event(event = @event)
|
254
|
+
@event = event
|
255
|
+
logger.debug { "Processing Event: \n\n #{PP.pp(event, '')}" }
|
256
|
+
ignore_publish_error = false
|
257
|
+
|
258
|
+
parse_event(event) #rescue return
|
259
|
+
|
260
|
+
if match_found_in_publish_maps?
|
261
|
+
|
262
|
+
# Determines if the event is to be published
|
263
|
+
# Defaults to true so that that it doesn't have to be defined for every workflow map
|
264
|
+
map_publish_event = @publish_params.fetch(:publish_event, true)
|
265
|
+
|
266
|
+
# Determines if the event is to be confirmed
|
267
|
+
# Defaults to true so that that it doesn't have to be defined for every workflow map
|
268
|
+
map_confirm_event = @publish_params.fetch(:confirm_event, true)
|
269
|
+
|
270
|
+
# Determines if the event will still get confirmed if there is an error during publishing
|
271
|
+
ignore_publish_error = @publish_params.fetch(:ignore_publish_error, false)
|
272
|
+
else
|
273
|
+
map_publish_event = nil
|
274
|
+
map_confirm_event = nil
|
275
|
+
end
|
276
|
+
|
277
|
+
to_publish = map_publish_event
|
278
|
+
to_confirm = (map_confirm_event or @confirm_filtered_events)
|
279
|
+
|
280
|
+
if to_publish
|
281
|
+
publish_response = publish_event
|
282
|
+
publish_successful = publish_response[:success]
|
283
|
+
confirm_response = confirm_event(@event_id) if (publish_successful or ignore_publish_error) and map_confirm_event
|
284
|
+
elsif to_confirm
|
285
|
+
logger.debug { "Event is being confirmed but not published. Included Event: #{!map_publish_event.nil?} Map Publish Content: #{map_publish_event} Confirm Filtered Events: #{@confirm_filtered_events} Map Confirm Event: #{map_confirm_event} Event: #{event.inspect}"}
|
286
|
+
confirm_response = confirm_event(@event_id)
|
287
|
+
else
|
288
|
+
publish_successful = publish_response = confirm_successful = confirm_response = nil
|
289
|
+
end
|
290
|
+
confirm_successful = confirm_response[:success] if confirm_response
|
291
|
+
|
292
|
+
{
|
293
|
+
:to_publish => map_publish_event,
|
294
|
+
:to_confirm => to_confirm,
|
295
|
+
:published => (publish_successful or ignore_publish_error),
|
296
|
+
:confirmed => confirm_successful,
|
297
|
+
:publish_response => publish_response,
|
298
|
+
:confirm_response => confirm_response,
|
299
|
+
:success => (
|
300
|
+
(!to_publish or (to_publish and publish_successful)) and
|
301
|
+
(!to_confirm or (to_confirm and confirm_successful))
|
302
|
+
)
|
303
|
+
}
|
304
|
+
end # process_event
|
305
|
+
|
306
|
+
# # @param [Array(Hash)] events
|
307
|
+
# @param [Array(Hash)] events
|
308
|
+
def process_events(events)
|
309
|
+
@event = nil
|
310
|
+
events.each do |event|
|
311
|
+
@event = event
|
312
|
+
begin
|
313
|
+
process_event
|
314
|
+
rescue StandardError, ScriptError => e
|
315
|
+
logger.error "Error processing event.\n\tEvent: #{event.inspect}\n\n\tException #{e.inspect}"
|
316
|
+
end
|
317
|
+
end
|
318
|
+
end # process_events
|
319
|
+
|
320
|
+
# @param [String|Integer] event_id
|
321
|
+
# @param [Hash] event
|
322
|
+
# @param [Hash] params
|
323
|
+
# @option params [String|nil] :publish_event_exec
|
324
|
+
# @option params [Boolean|nil] (false) :eval_publish_event_exec
|
325
|
+
# @option params [String|nil] :publish_event_arguments
|
326
|
+
# @option params [Boolean|nil] (true) :eval_publish_event_exec
|
327
|
+
# @return [Boolean]
|
328
|
+
def publish_event(event = @event, params = @publish_params)
|
329
|
+
|
330
|
+
workflow = params.fetch(:workflow, false)
|
331
|
+
return publish_event_to_workflow(:workflow => workflow) if workflow
|
332
|
+
|
333
|
+
exec = params.fetch(:publish_event_exec, nil)
|
334
|
+
eval_publish_event_exec = params.fetch(:eval_publish_event_exec, false)
|
335
|
+
|
336
|
+
arguments = params.fetch(:publish_event_arguments, nil)
|
337
|
+
eval_publish_event_arguments = params.fetch(:eval_publish_event_arguments, true)
|
338
|
+
|
339
|
+
logger.debug { "Evaluating exec: #{exec}" } and exec = eval(exec) if eval_publish_event_exec and exec
|
340
|
+
logger.debug { "Evaluating arguments: #{arguments}" } and arguments = eval(arguments) if eval_publish_event_arguments and arguments
|
341
|
+
cmd_line = "#{exec} #{arguments}" if arguments
|
342
|
+
logger.debug { "Publishing event using command line. #{cmd_line}" }
|
343
|
+
response = execute(cmd_line)
|
344
|
+
logger.debug { "Publish event command response: #{response}" }
|
345
|
+
response
|
346
|
+
end # publish_event
|
347
|
+
|
348
|
+
# @param [Hash] event
|
349
|
+
# @param [Hash] workflow
|
350
|
+
# @return [Boolean]
|
351
|
+
def publish_event_to_workflow(params = { })
|
352
|
+
event = params.fetch(:event, @event)
|
353
|
+
workflow = params.fetch(:workflow, @publish_params.fetch(:workflow, nil))
|
354
|
+
|
355
|
+
|
356
|
+
logger.debug { "Publishing Event To Workflow. Workflow: #{workflow}" }
|
357
|
+
unless (workflow_name = workflow.fetch(:name, false))
|
358
|
+
logger.error "No Workflow Name Specified. Event: #{event} Workflow: #{workflow}"
|
359
|
+
return false
|
360
|
+
end
|
361
|
+
|
362
|
+
workflow_parameters = workflow.fetch(:parameters, false)
|
363
|
+
workflow_parameter_values = eval_workflow_parameters(workflow_parameters, event) if workflow_parameters
|
364
|
+
workflow_parameter_values ||= { }
|
365
|
+
|
366
|
+
cmd_line = [ @udam_utils_exec, 'job', '--workflow', workflow_name, '--workflow-parameters', workflow_parameter_values.to_json].shelljoin
|
367
|
+
logger.debug { "Publishing event using command line. #{cmd_line}" }
|
368
|
+
response = execute(cmd_line)
|
369
|
+
logger.debug { "Publish event command response: #{response}" }
|
370
|
+
response
|
371
|
+
end # publish_event_to_workflow
|
372
|
+
|
373
|
+
|
374
|
+
# @params [Hash] publish_maps
|
375
|
+
def match_found_in_publish_maps?(publish_maps = @publish_maps)
|
376
|
+
matched = false
|
377
|
+
publish_maps.each { |current_publish_map|
|
378
|
+
current_publish_map = { :type => :glob, :map => current_publish_map } if current_publish_map.is_a?(Array)
|
379
|
+
|
380
|
+
map_type = current_publish_map.fetch(:type, :unknown)
|
381
|
+
map = current_publish_map.fetch(:map, false)
|
382
|
+
logger.warn { "Mapping with no map detected. #{current_publish_map}"} and next unless map
|
383
|
+
|
384
|
+
case map_type
|
385
|
+
when :eval
|
386
|
+
matched = search_eval_publish_map(current_publish_map)
|
387
|
+
when :media_type
|
388
|
+
matched = search_media_type_publish_map(current_publish_map)
|
389
|
+
when :glob
|
390
|
+
matched = search_glob_publish_map(current_publish_map)
|
391
|
+
when :global
|
392
|
+
matched = search_global_publish_map(current_publish_map)
|
393
|
+
else
|
394
|
+
logger.warn { "Unknown map type '#{map_type}'."}
|
395
|
+
end
|
396
|
+
break if matched
|
397
|
+
}
|
398
|
+
matched
|
399
|
+
end # process_publish_maps
|
400
|
+
|
401
|
+
def init_publish_params(params_by_event_type)
|
402
|
+
return false unless params_by_event_type
|
403
|
+
|
404
|
+
@publish_params = params_by_event_type[@event_type.to_sym] || params_by_event_type[@event_type.to_s]
|
405
|
+
@publish_params ||= params_by_event_type[:any] || params_by_event_type['any']
|
406
|
+
@publish_params ||= params_by_event_type[:all] || params_by_event_type['all']
|
407
|
+
@publish_params
|
408
|
+
end
|
409
|
+
|
410
|
+
# @params [Hash] params
|
411
|
+
# @option params [Hash] :map
|
412
|
+
def search_eval_publish_map(params = { })
|
413
|
+
logger.debug { 'Starting Eval Search.' }
|
414
|
+
map = params.fetch(:map, false)
|
415
|
+
return false unless map
|
416
|
+
|
417
|
+
match_found = false
|
418
|
+
map.each { |expressions, map_params|
|
419
|
+
@logger.debug { "Testing expressions. #{expressions} #{map_params}" }
|
420
|
+
next unless init_publish_params(map_params)
|
421
|
+
[*expressions].each { |expression| logger.debug { "Matched expression: #{expression}" } and match_found = true and break if eval(expression) }
|
422
|
+
break if match_found
|
423
|
+
}
|
424
|
+
match_found
|
425
|
+
end # search_eval_publish_map
|
426
|
+
|
427
|
+
# @params [Hash] params
|
428
|
+
# @option params [Hash] :map
|
429
|
+
def search_media_type_publish_map(params = { })
|
430
|
+
logger.debug { 'Starting Media Type Search.' }
|
431
|
+
map = params.fetch(:map, false)
|
432
|
+
return false unless map
|
433
|
+
|
434
|
+
logger.warn("Asset media type is empty. #{@object}") and return false unless @media_type
|
435
|
+
logger.warn("Asset media subtype is empty. #{@object}") and return false unless @media_subtype
|
436
|
+
|
437
|
+
match_found = false
|
438
|
+
map.each { |media_types, media_subtypes_with_params|
|
439
|
+
[*media_types].each { |media_type|
|
440
|
+
next unless media_type.match(@media_type) || @media_type == '*'
|
441
|
+
media_subtypes_with_params.each { |media_subtypes, map_params|
|
442
|
+
next unless init_publish_params(map_params)
|
443
|
+
[*media_subtypes].each { |media_subtype|
|
444
|
+
logger.debug { "Matched media type: #{media_type.to_s}/#{media_subtype.to_s} -> #{@media_type}/#{@media_subtype}" } and match_found = true and break if (media_subtype.match(@media_subtype) || media_subtype == '*')
|
445
|
+
}
|
446
|
+
}
|
447
|
+
break if match_found
|
448
|
+
}
|
449
|
+
break if match_found
|
450
|
+
}
|
451
|
+
logger.debug { "Media Type Search Completed. Match Found: #{match_found}" }
|
452
|
+
match_found
|
453
|
+
end # search_media_type_publish_map
|
454
|
+
|
455
|
+
# @params [Hash] params
|
456
|
+
# @option params [Hash] :map
|
457
|
+
# @option params [Integer] :options
|
458
|
+
def search_glob_publish_map(params = { })
|
459
|
+
logger.debug { 'Starting Glob Search.' }
|
460
|
+
map = params.fetch(:map, false)
|
461
|
+
return false unless map
|
462
|
+
|
463
|
+
logger.debug { "MAP: #{map}" }
|
464
|
+
|
465
|
+
full_file_path = @full_file_path
|
466
|
+
logger.warn("Full file path is empty. #{@object}") and return false unless full_file_path
|
467
|
+
|
468
|
+
event_type = @event_type
|
469
|
+
|
470
|
+
match_found = false
|
471
|
+
|
472
|
+
default_glob_options = params.fetch(:options, 0)
|
473
|
+
default_glob_options = 0 unless default_glob_options.is_a? Integer
|
474
|
+
|
475
|
+
map.each { |patterns, map_params|
|
476
|
+
next unless init_publish_params(map_params)
|
477
|
+
|
478
|
+
if patterns.is_a?(Hash)
|
479
|
+
globs = patterns[:globs] || patterns[:glob] || { }
|
480
|
+
glob_options = patterns.fetch(:options, default_glob_options)
|
481
|
+
glob_options = 0 unless glob_options.is_a? Integer
|
482
|
+
else
|
483
|
+
globs = patterns
|
484
|
+
glob_options = default_glob_options
|
485
|
+
end
|
486
|
+
|
487
|
+
[*globs].each do |pattern|
|
488
|
+
logger.debug { "Testing #{full_file_path} against #{pattern} with options #{glob_options}" }
|
489
|
+
if File.fnmatch(pattern, full_file_path, glob_options)
|
490
|
+
logger.debug { "Matched pattern: #{full_file_path} -> #{pattern}" }
|
491
|
+
match_found = true
|
492
|
+
break
|
493
|
+
end
|
494
|
+
end
|
495
|
+
break if match_found
|
496
|
+
}
|
497
|
+
match_found
|
498
|
+
end # search_glob_publish_map
|
499
|
+
|
500
|
+
# Processes a catch all publish map
|
501
|
+
#
|
502
|
+
# { type: :global, map: { anything: all: { publish_event: false, confirm_event: false } } }
|
503
|
+
# Where :anything is ignored and :all can be anyone of :all, :created, :modified, :deleted
|
504
|
+
#
|
505
|
+
# @params [Hash] params
|
506
|
+
# @option params [Hash] :map
|
507
|
+
def search_global_publish_map(params = { })
|
508
|
+
logger.debug { 'Starting Global Search.' }
|
509
|
+
return false unless (map = params[:map])
|
510
|
+
|
511
|
+
match_found = false
|
512
|
+
map.each { |ignored, params_by_event_type|
|
513
|
+
next unless init_publish_params(params_by_event_type)
|
514
|
+
match_found = true
|
515
|
+
break
|
516
|
+
}
|
517
|
+
match_found
|
518
|
+
end # search_global_publish_map
|
519
|
+
|
520
|
+
def confirm_event(*args)
|
521
|
+
{ :stdout => '', :stderr => '', :status => '', :success => true }
|
522
|
+
end # confirm_event
|
523
|
+
|
524
|
+
end # WorkflowPublishMapProcessor
|
525
|
+
|
526
|
+
class EventBasedPublishMapProcessor < WorkflowPublishMapProcessor
|
527
|
+
|
528
|
+
end # EventBasedPublishMapProcessor
|
529
|
+
|
530
|
+
class GenericPublishMapProcessor < WorkflowPublishMapProcessor
|
531
|
+
|
532
|
+
attr_accessor :object
|
533
|
+
|
534
|
+
def initialize(params = {})
|
535
|
+
super(params)
|
536
|
+
@full_file_path_field_name = params[:file_path_field_name]
|
537
|
+
end # initialize
|
538
|
+
|
539
|
+
|
540
|
+
def init_publish_params(params)
|
541
|
+
# We don't have events so the params are the params, there is not an 'event type' as a key in between
|
542
|
+
@publish_params = params
|
543
|
+
@publish_params
|
544
|
+
end # init_publish_params
|
545
|
+
|
546
|
+
def parse_object(object = @object, params = { })
|
547
|
+
logger.debug { "Parsing Object: #{PP.pp(object, '')}" }
|
548
|
+
begin
|
549
|
+
@full_file_path = object[@full_file_path_field_name]
|
550
|
+
|
551
|
+
@metadata_sources = object.fetch(:metadata_sources, { })
|
552
|
+
@exiftool = @metadata_sources[:exiftool] ||= { }
|
553
|
+
@mediainfo = @metadata_sources[:mediainfo] ||= { }
|
554
|
+
@ffmpeg = @metadata_sources[:ffmpeg] ||= { }
|
555
|
+
@filemagic = @metadata_sources[:filemagic] ||= { }
|
556
|
+
@media = @metadata_sources[:filemagic] ||= { }
|
557
|
+
@common_media_info = @metadata_sources[:common] ||= { }
|
558
|
+
|
559
|
+
#media = entity.fetch('media', { })
|
560
|
+
@media_type = @media[:type] || @media['type']
|
561
|
+
@media_subtype = @media[:subtype] || @media['subtype']
|
562
|
+
rescue => e
|
563
|
+
logger.error "Error parsing object.\n\tObject: #{object.inspect}\n\n\tException #{e.message}\n\tBacktrace #{e.backtrace}"
|
564
|
+
raise
|
565
|
+
end
|
566
|
+
end # parse_event
|
567
|
+
|
568
|
+
|
569
|
+
# @param [Array<Hash>] objects
|
570
|
+
def process_objects(objects, params = {})
|
571
|
+
results = [ ]
|
572
|
+
[*objects].each do |_object|
|
573
|
+
begin
|
574
|
+
results << process_object(params.merge(:object => _object))
|
575
|
+
rescue StandardError, ScriptError => e
|
576
|
+
logger.error "Error processing event.\n\tObject: #{_object.inspect}\n\n\tException #{e.inspect}"
|
577
|
+
results << { :success => false, :error => { :message => e.message }, :exception => { :message => e.message, :backtrace => e.backtrace }, :object => _object }
|
578
|
+
end
|
579
|
+
end
|
580
|
+
results
|
581
|
+
end # process_events
|
582
|
+
|
583
|
+
def process_object(params = { })
|
584
|
+
_object = params.has_key?(:object) ? params[:object] : nil
|
585
|
+
return _object.map { |o| process_object(params.merge(:object => o)) } if _object.is_a?(Array)
|
586
|
+
@object = _object
|
587
|
+
|
588
|
+
logger.debug { "Processing Object: \n\n #{PP.pp(object, '')}" }
|
589
|
+
parse_object
|
590
|
+
ignore_publish_error = false
|
591
|
+
|
592
|
+
#parse_event(event) #rescue return
|
593
|
+
|
594
|
+
if match_found_in_publish_maps?
|
595
|
+
|
596
|
+
# Determines if the event is to be published
|
597
|
+
# Defaults to true so that that it doesn't have to be defined for every workflow map
|
598
|
+
map_publish = @publish_params.fetch(:publish, true)
|
599
|
+
|
600
|
+
# Determines if the event is to be confirmed
|
601
|
+
# Defaults to true so that that it doesn't have to be defined for every workflow map
|
602
|
+
map_confirm = @publish_params.fetch(:confirm, true)
|
603
|
+
|
604
|
+
# Determines if the event will still get confirmed if there is an error during publishing
|
605
|
+
ignore_publish_error = @publish_params.fetch(:ignore_publish_error, false)
|
606
|
+
else
|
607
|
+
map_publish = nil
|
608
|
+
map_confirm = nil
|
609
|
+
end
|
610
|
+
|
611
|
+
to_publish = map_publish
|
612
|
+
to_confirm = (map_confirm or @confirm_filtered_objects)
|
613
|
+
|
614
|
+
if to_publish
|
615
|
+
publish_response = publish
|
616
|
+
publish_successful = publish_response[:success]
|
617
|
+
confirm_response = confirm(@object_id) if (publish_successful or ignore_publish_error) and map_confirm
|
618
|
+
elsif to_confirm
|
619
|
+
logger.debug { "Confirming but not published. Map Publish Content: #{map_publish} Confirm Filtered: #{@confirm_filtered_events} Map Confirm Event: #{map_confirm} Object: #{object.inspect}"}
|
620
|
+
confirm_response = confirm(@object_id)
|
621
|
+
else
|
622
|
+
publish_successful = publish_response = confirm_successful = confirm_response = nil
|
623
|
+
end
|
624
|
+
confirm_successful = confirm_response[:success] if confirm_response
|
625
|
+
|
626
|
+
{
|
627
|
+
:to_publish => to_publish,
|
628
|
+
:to_confirm => to_confirm,
|
629
|
+
:published => (publish_successful or ignore_publish_error),
|
630
|
+
:confirmed => confirm_successful,
|
631
|
+
:publish_response => publish_response,
|
632
|
+
:confirm_response => confirm_response,
|
633
|
+
:success => (
|
634
|
+
(!to_publish or (to_publish and publish_successful)) and
|
635
|
+
(!to_confirm or (to_confirm and confirm_successful))
|
636
|
+
)
|
637
|
+
}
|
638
|
+
rescue StandardError, ScriptError => e
|
639
|
+
logger.error "Error processing object.\n\tObject: #{object.inspect}\n\n\tException #{e.inspect}"
|
640
|
+
{ :success => false, :error => { :message => e.message }, :exception => { :message => e.message, :backtrace => e.backtrace }, :object => object }
|
641
|
+
end # process_object
|
642
|
+
|
643
|
+
# @param [Hash] params
|
644
|
+
# @option params [Hash] :object
|
645
|
+
# @option params [Hash] :workflow
|
646
|
+
# @return [Boolean]
|
647
|
+
def publish_to_workflow(params = { })
|
648
|
+
object = params.fetch(:object, @object)
|
649
|
+
workflow = params[:workflow]
|
650
|
+
workflow ||= @publish_params[:workflow] if @publish_params.is_a?(Hash)
|
651
|
+
|
652
|
+
logger.debug { "Publishing To Workflow. Workflow: #{workflow}" }
|
653
|
+
unless (workflow_name = workflow.fetch(:name, false))
|
654
|
+
logger.error "No Workflow Name Specified. Object: #{object} Workflow: #{workflow}"
|
655
|
+
return false
|
656
|
+
end
|
657
|
+
|
658
|
+
workflow_parameters = workflow.fetch(:parameters, false)
|
659
|
+
workflow_parameter_values = eval_workflow_parameters(workflow_parameters, object) if workflow_parameters
|
660
|
+
workflow_parameter_values ||= { }
|
661
|
+
|
662
|
+
cmd_line = [ @udam_utils_exec, 'job', '--workflow', workflow_name, '--workflow-parameters', workflow_parameter_values.to_json].shelljoin
|
663
|
+
logger.debug { "Publishing event using command line. #{cmd_line}" }
|
664
|
+
response = execute(cmd_line)
|
665
|
+
logger.debug { "Publish event command response: #{response}" }
|
666
|
+
response
|
667
|
+
end # publish_to_workflow
|
668
|
+
|
669
|
+
# @param [String|Integer] event_id
|
670
|
+
# @param [Hash] object
|
671
|
+
# @param [Hash] params
|
672
|
+
# @option params [String|nil] :publish_event_exec
|
673
|
+
# @option params [Boolean|nil] (false) :eval_publish_event_exec
|
674
|
+
# @option params [String|nil] :publish_event_arguments
|
675
|
+
# @option params [Boolean|nil] (true) :eval_publish_event_exec
|
676
|
+
# @return [Boolean]
|
677
|
+
def publish(object = @object, params = @publish_params)
|
678
|
+
|
679
|
+
workflow = params.fetch(:workflow, false)
|
680
|
+
return publish_to_workflow(:workflow => workflow) if workflow
|
681
|
+
|
682
|
+
exec = search_hash(params, [:publish_executable, :publish_exec, :publish_event_exec])
|
683
|
+
eval_publish_exec = search_hash(params, [:eval_publish_executable, :eval_publish_exec, :eval_publish_event_exec])
|
684
|
+
|
685
|
+
arguments = search_hash(params, [:publish_arguments, :publish_event_arguments])
|
686
|
+
eval_publish_arguments = search_hash(params, [:eval_publish_arguments, :eval_publish_event_arguments], :default => true)
|
687
|
+
|
688
|
+
logger.debug { "Evaluating exec: #{exec}" } and (exec = eval(exec)) if eval_publish_exec and exec
|
689
|
+
logger.debug { "Evaluating arguments: #{arguments}" } and (arguments = eval(arguments)) if eval_publish_arguments and arguments
|
690
|
+
|
691
|
+
if exec
|
692
|
+
cmd_line = arguments ? "#{exec} #{arguments}" : exec
|
693
|
+
else
|
694
|
+
cmd_line = arguments
|
695
|
+
end
|
696
|
+
|
697
|
+
logger.debug { "Publishing using command line. #{cmd_line}" }
|
698
|
+
response = execute(cmd_line)
|
699
|
+
logger.debug { "Publish command response: #{response}" }
|
700
|
+
response
|
701
|
+
end # publish
|
702
|
+
|
703
|
+
def confirm(params = {})
|
704
|
+
|
705
|
+
end # confirm
|
706
|
+
|
707
|
+
end # GenericPublishMapProcessor
|
708
|
+
|
709
|
+
end # UDAMUtils
|
710
|
+
|