logstash-integration-jdbc 5.0.0.alpha1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CHANGELOG.md +8 -0
- data/CONTRIBUTORS +22 -0
- data/Gemfile +11 -0
- data/LICENSE +13 -0
- data/NOTICE.TXT +5 -0
- data/README.md +105 -0
- data/docs/filter-jdbc_static.asciidoc +606 -0
- data/docs/filter-jdbc_streaming.asciidoc +317 -0
- data/docs/index.asciidoc +32 -0
- data/docs/input-jdbc.asciidoc +573 -0
- data/lib/logstash/filters/jdbc/basic_database.rb +125 -0
- data/lib/logstash/filters/jdbc/column.rb +39 -0
- data/lib/logstash/filters/jdbc/db_object.rb +101 -0
- data/lib/logstash/filters/jdbc/loader.rb +119 -0
- data/lib/logstash/filters/jdbc/loader_schedule.rb +64 -0
- data/lib/logstash/filters/jdbc/lookup.rb +253 -0
- data/lib/logstash/filters/jdbc/lookup_processor.rb +100 -0
- data/lib/logstash/filters/jdbc/lookup_result.rb +40 -0
- data/lib/logstash/filters/jdbc/read_only_database.rb +57 -0
- data/lib/logstash/filters/jdbc/read_write_database.rb +108 -0
- data/lib/logstash/filters/jdbc/repeating_load_runner.rb +13 -0
- data/lib/logstash/filters/jdbc/single_load_runner.rb +46 -0
- data/lib/logstash/filters/jdbc/validatable.rb +46 -0
- data/lib/logstash/filters/jdbc_static.rb +240 -0
- data/lib/logstash/filters/jdbc_streaming.rb +196 -0
- data/lib/logstash/inputs/jdbc.rb +341 -0
- data/lib/logstash/inputs/tzinfo_jruby_patch.rb +57 -0
- data/lib/logstash/plugin_mixins/jdbc/checked_count_logger.rb +43 -0
- data/lib/logstash/plugin_mixins/jdbc/jdbc.rb +298 -0
- data/lib/logstash/plugin_mixins/jdbc/statement_handler.rb +129 -0
- data/lib/logstash/plugin_mixins/jdbc/value_tracking.rb +140 -0
- data/lib/logstash/plugin_mixins/jdbc_streaming/cache_payload.rb +28 -0
- data/lib/logstash/plugin_mixins/jdbc_streaming/parameter_handler.rb +64 -0
- data/lib/logstash/plugin_mixins/jdbc_streaming/statement_handler.rb +143 -0
- data/lib/logstash/plugin_mixins/jdbc_streaming.rb +100 -0
- data/lib/logstash/plugin_mixins/statement_handler.rb +0 -0
- data/lib/logstash-integration-jdbc_jars.rb +5 -0
- data/logstash-integration-jdbc.gemspec +44 -0
- data/spec/filters/env_helper.rb +10 -0
- data/spec/filters/integration/jdbc_static_spec.rb +154 -0
- data/spec/filters/integration/jdbcstreaming_spec.rb +173 -0
- data/spec/filters/jdbc/column_spec.rb +70 -0
- data/spec/filters/jdbc/db_object_spec.rb +81 -0
- data/spec/filters/jdbc/loader_spec.rb +77 -0
- data/spec/filters/jdbc/lookup_processor_spec.rb +132 -0
- data/spec/filters/jdbc/lookup_spec.rb +253 -0
- data/spec/filters/jdbc/read_only_database_spec.rb +67 -0
- data/spec/filters/jdbc/read_write_database_spec.rb +90 -0
- data/spec/filters/jdbc/repeating_load_runner_spec.rb +24 -0
- data/spec/filters/jdbc/single_load_runner_spec.rb +16 -0
- data/spec/filters/jdbc_static_file_local_spec.rb +83 -0
- data/spec/filters/jdbc_static_spec.rb +162 -0
- data/spec/filters/jdbc_streaming_spec.rb +350 -0
- data/spec/filters/remote_server_helper.rb +24 -0
- data/spec/filters/shared_helpers.rb +34 -0
- data/spec/helpers/WHY-THIS-JAR.txt +4 -0
- data/spec/helpers/derbyrun.jar +0 -0
- data/spec/inputs/integration/integ_spec.rb +78 -0
- data/spec/inputs/jdbc_spec.rb +1431 -0
- data/vendor/jar-dependencies/org/apache/derby/derby/10.14.1.0/derby-10.14.1.0.jar +0 -0
- data/vendor/jar-dependencies/org/apache/derby/derbyclient/10.14.1.0/derbyclient-10.14.1.0.jar +0 -0
- metadata +319 -0
@@ -0,0 +1,39 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require_relative "validatable"
|
3
|
+
|
4
|
+
module LogStash module Filters module Jdbc
|
5
|
+
class Column < Validatable
|
6
|
+
attr_reader :name, :datatype
|
7
|
+
|
8
|
+
private
|
9
|
+
|
10
|
+
def post_initialize
|
11
|
+
if valid?
|
12
|
+
@name = @name.to_sym
|
13
|
+
@datatype = @datatype.to_sym
|
14
|
+
end
|
15
|
+
end
|
16
|
+
|
17
|
+
def to_array
|
18
|
+
[@name.to_s, @datatype.to_s]
|
19
|
+
end
|
20
|
+
|
21
|
+
def parse_options
|
22
|
+
unless @options.is_a?(Array)
|
23
|
+
@option_errors << "The column options must be an array"
|
24
|
+
end
|
25
|
+
|
26
|
+
@name, @datatype = @options
|
27
|
+
|
28
|
+
unless @name && @name.is_a?(String)
|
29
|
+
@option_errors << "The first column option is the name and must be a string"
|
30
|
+
end
|
31
|
+
|
32
|
+
unless @datatype && @datatype.is_a?(String)
|
33
|
+
@option_errors << "The second column option is the datatype and must be a string"
|
34
|
+
end
|
35
|
+
|
36
|
+
@valid = @option_errors.empty?
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end end end
|
@@ -0,0 +1,101 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require_relative "validatable"
|
3
|
+
require_relative "column"
|
4
|
+
|
5
|
+
module LogStash module Filters module Jdbc
|
6
|
+
|
7
|
+
class DbObject < Validatable
|
8
|
+
# {name => "servers", index_columns => ["ip"], columns => [["ip", "text"], ["name", "text"], ["location", "text"]]},
|
9
|
+
|
10
|
+
attr_reader :name, :columns, :preserve_existing, :index_columns
|
11
|
+
|
12
|
+
def build(db)
|
13
|
+
return unless valid?
|
14
|
+
if db.nil?
|
15
|
+
raise "DbObject given a database instance that is nil"
|
16
|
+
end
|
17
|
+
schema_gen = db.create_table_generator()
|
18
|
+
@columns.each {|col| schema_gen.column(col.name, col.datatype)}
|
19
|
+
schema_gen.index(@index_columns) unless @index_columns.empty?
|
20
|
+
options = {:generator => schema_gen}
|
21
|
+
if @preserve_existing
|
22
|
+
db.create_table?(@name, options)
|
23
|
+
else
|
24
|
+
db.create_table(@name, options)
|
25
|
+
end
|
26
|
+
end
|
27
|
+
|
28
|
+
def <=>(other)
|
29
|
+
@name <=> other.name
|
30
|
+
end
|
31
|
+
|
32
|
+
def to_s
|
33
|
+
inspect
|
34
|
+
end
|
35
|
+
|
36
|
+
def inspect
|
37
|
+
"<LogStash::Filters::Jdbc::DbObject name: #{@name}, columns: #{@columns.inspect}>"
|
38
|
+
end
|
39
|
+
|
40
|
+
private
|
41
|
+
|
42
|
+
def post_initialize
|
43
|
+
if valid?
|
44
|
+
@name = @name.to_sym
|
45
|
+
end
|
46
|
+
end
|
47
|
+
|
48
|
+
def parse_options
|
49
|
+
if !@options.is_a?(Hash)
|
50
|
+
@option_errors << "DbObject options must be a Hash"
|
51
|
+
@valid = false
|
52
|
+
return
|
53
|
+
end
|
54
|
+
|
55
|
+
@name = @options["name"]
|
56
|
+
unless @name && @name.is_a?(String)
|
57
|
+
@option_errors << "DbObject options must include a 'name' string"
|
58
|
+
@name = "unnamed"
|
59
|
+
end
|
60
|
+
|
61
|
+
@preserve_existing = @options.fetch("preserve_existing", false)
|
62
|
+
@preserve_existing = true if @preserve_existing == "true"
|
63
|
+
|
64
|
+
@columns_options = @options["columns"]
|
65
|
+
@columns = []
|
66
|
+
temp_column_names = []
|
67
|
+
if @columns_options && @columns_options.is_a?(Array)
|
68
|
+
sizes = @columns_options.map{|option| option.size}.uniq
|
69
|
+
if sizes == [2]
|
70
|
+
@columns_options.each do |option|
|
71
|
+
column = Column.new(option)
|
72
|
+
if column.valid?
|
73
|
+
@columns << column
|
74
|
+
temp_column_names << column.name
|
75
|
+
else
|
76
|
+
@option_errors << column.formatted_errors
|
77
|
+
end
|
78
|
+
end
|
79
|
+
else
|
80
|
+
@option_errors << "The columns array for '#{@name}' is not uniform, it should contain arrays of two strings only"
|
81
|
+
end
|
82
|
+
else
|
83
|
+
@option_errors << "DbObject options for '#{@name}' must include a 'columns' array"
|
84
|
+
end
|
85
|
+
|
86
|
+
@index_column_options = @options["index_columns"]
|
87
|
+
@index_columns = []
|
88
|
+
if @index_column_options && @index_column_options.is_a?(Array)
|
89
|
+
@index_column_options.each do |option|
|
90
|
+
if option.is_a?(String) && temp_column_names.member?(option.to_sym)
|
91
|
+
@index_columns << option.to_sym
|
92
|
+
else
|
93
|
+
@option_errors << "The index_columns element: '#{option}' must be a column defined in the columns array"
|
94
|
+
end
|
95
|
+
end
|
96
|
+
end
|
97
|
+
|
98
|
+
@valid = @option_errors.empty?
|
99
|
+
end
|
100
|
+
end
|
101
|
+
end end end
|
@@ -0,0 +1,119 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require_relative "validatable"
|
3
|
+
require_relative "db_object"
|
4
|
+
require_relative "read_only_database"
|
5
|
+
require "logstash/util/loggable"
|
6
|
+
|
7
|
+
module LogStash module Filters module Jdbc
|
8
|
+
class Loader < Validatable
|
9
|
+
include LogStash::Util::Loggable
|
10
|
+
|
11
|
+
CONNECTION_ERROR_MSG = "Remote DB connection error when executing loader Jdbc query"
|
12
|
+
|
13
|
+
attr_reader :id, :table, :query, :max_rows
|
14
|
+
attr_reader :connection_string, :driver_library, :driver_class
|
15
|
+
attr_reader :user, :password, :staging_directory
|
16
|
+
|
17
|
+
def build_remote_db
|
18
|
+
@remote = ReadOnlyDatabase.create(connection_string, driver_class, driver_library, user, password)
|
19
|
+
end
|
20
|
+
|
21
|
+
def fetch
|
22
|
+
@remote.connect(CONNECTION_ERROR_MSG)
|
23
|
+
row_count = @remote.count(query)
|
24
|
+
if row_count.zero?
|
25
|
+
logger.warn? && logger.warn("Query returned no results", :lookup_id => @id, :query => query)
|
26
|
+
return @remote.empty_record_set
|
27
|
+
end
|
28
|
+
if row_count > max_rows
|
29
|
+
logger.warn? && logger.warn("Query returned more than max_rows results", :lookup_id => @id, :query => query, :count => row_count, :max_rows => max_rows)
|
30
|
+
return @remote.empty_record_set
|
31
|
+
end
|
32
|
+
@remote.query(query)
|
33
|
+
ensure
|
34
|
+
@remote.disconnect(CONNECTION_ERROR_MSG)
|
35
|
+
end
|
36
|
+
|
37
|
+
def close
|
38
|
+
@remote.disconnect(CONNECTION_ERROR_MSG)
|
39
|
+
end
|
40
|
+
|
41
|
+
private
|
42
|
+
|
43
|
+
def pre_initialize(options)
|
44
|
+
@table = options["local_table"]
|
45
|
+
end
|
46
|
+
|
47
|
+
def post_initialize
|
48
|
+
if valid?
|
49
|
+
@table = @table.to_sym
|
50
|
+
end
|
51
|
+
end
|
52
|
+
|
53
|
+
def parse_options
|
54
|
+
unless @table && @table.is_a?(String)
|
55
|
+
@option_errors << "The options must include a 'local_table' string"
|
56
|
+
end
|
57
|
+
|
58
|
+
@id = @options.fetch("id", @table)
|
59
|
+
|
60
|
+
@query = @options["query"]
|
61
|
+
unless @query && @query.is_a?(String)
|
62
|
+
@option_errors << "The options for '#{@table}' must include a 'query' string"
|
63
|
+
end
|
64
|
+
|
65
|
+
@max_rows = @options["max_rows"]
|
66
|
+
if @max_rows
|
67
|
+
if !@max_rows.respond_to?(:to_i)
|
68
|
+
@option_errors << "The 'max_rows' option for '#{@table}' must be an integer"
|
69
|
+
else
|
70
|
+
@max_rows = @max_rows.to_i
|
71
|
+
end
|
72
|
+
else
|
73
|
+
@max_rows = 1_000_000
|
74
|
+
end
|
75
|
+
|
76
|
+
@driver_library = @options["jdbc_driver_library"]
|
77
|
+
if @driver_library
|
78
|
+
if !@driver_library.is_a?(String)
|
79
|
+
@option_errors << "The 'jdbc_driver_library' option for '#{@table}' must be a string"
|
80
|
+
end
|
81
|
+
if !::File.exists?(@driver_library)
|
82
|
+
@option_errors << "The 'jdbc_driver_library' option for '#{@table}' must be a file that can be opened: #{driver_library}"
|
83
|
+
end
|
84
|
+
end
|
85
|
+
|
86
|
+
@driver_class = @options["jdbc_driver_class"]
|
87
|
+
if @driver_class && !@driver_class.is_a?(String)
|
88
|
+
@option_errors << "The 'jdbc_driver_class' option for '#{@table}' must be a string"
|
89
|
+
end
|
90
|
+
|
91
|
+
@connection_string = @options["jdbc_connection_string"]
|
92
|
+
if @connection_string && !@connection_string.is_a?(String)
|
93
|
+
@option_errors << "The 'jdbc_connection_string' option for '#{@table}' must be a string"
|
94
|
+
end
|
95
|
+
|
96
|
+
@user = @options["jdbc_user"]
|
97
|
+
if @user && !@user.is_a?(String)
|
98
|
+
@option_errors << "The 'jdbc_user' option for '#{@table}' must be a string"
|
99
|
+
end
|
100
|
+
|
101
|
+
@password = @options["jdbc_password"]
|
102
|
+
case @password
|
103
|
+
when String
|
104
|
+
@password = LogStash::Util::Password.new(@password)
|
105
|
+
when LogStash::Util::Password, nil
|
106
|
+
# this is OK
|
107
|
+
else
|
108
|
+
@option_errors << "The 'jdbc_password' option for '#{@table}' must be a string"
|
109
|
+
end
|
110
|
+
|
111
|
+
@staging_directory = @options["staging_directory"]
|
112
|
+
if @staging_directory
|
113
|
+
FileUtils.mkdir_p(@staging_directory)
|
114
|
+
end
|
115
|
+
|
116
|
+
@valid = @option_errors.empty?
|
117
|
+
end
|
118
|
+
end
|
119
|
+
end end end
|
@@ -0,0 +1,64 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require_relative "validatable"
|
3
|
+
require "rufus/scheduler"
|
4
|
+
|
5
|
+
module LogStash module Filters module Jdbc
|
6
|
+
class LoaderSchedule < Validatable
|
7
|
+
attr_reader :schedule_frequency, :loader_schedule
|
8
|
+
|
9
|
+
def to_log_string
|
10
|
+
message = ""
|
11
|
+
message.concat "these months in the year [#{@cronline.months.to_a.join(", ")}];" unless @cronline.months.nil?
|
12
|
+
message.concat "these days in the month [#{@cronline.days.to_a.join(", ")}];" unless @cronline.days.nil?
|
13
|
+
message.concat "these hours in the day [#{@cronline.hours.to_a.join(", ")}];" unless @cronline.hours.nil?
|
14
|
+
message.concat "these minutes in the hour [#{@cronline.minutes.to_a.join(", ")}];" unless @cronline.minutes.nil?
|
15
|
+
message.concat "these seconds in the minute [#{@cronline.seconds.to_a.join(", ")}]" unless @cronline.seconds.nil?
|
16
|
+
if !message.empty?
|
17
|
+
message.prepend "Scheduled for: "
|
18
|
+
end
|
19
|
+
message
|
20
|
+
end
|
21
|
+
|
22
|
+
private
|
23
|
+
|
24
|
+
def post_initialize
|
25
|
+
if valid?
|
26
|
+
# From the Rufus::Scheduler docs:
|
27
|
+
# By default, rufus-scheduler sleeps 0.300 second between every step.
|
28
|
+
# At each step it checks for jobs to trigger and so on.
|
29
|
+
# set the frequency to 2.5 seconds if we are not reloading in the seconds timeframe
|
30
|
+
# rufus scheduler thread should respond to stop quickly enough.
|
31
|
+
if only_seconds_set?
|
32
|
+
@schedule_frequency = 0.3
|
33
|
+
else
|
34
|
+
@schedule_frequency = 2.5
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
38
|
+
|
39
|
+
|
40
|
+
def only_seconds_set?
|
41
|
+
@cronline.seconds &&
|
42
|
+
@cronline.minutes.nil? &&
|
43
|
+
@cronline.hours.nil? &&
|
44
|
+
@cronline.days.nil? &&
|
45
|
+
@cronline.months.nil?
|
46
|
+
end
|
47
|
+
|
48
|
+
def parse_options
|
49
|
+
@loader_schedule = @options
|
50
|
+
|
51
|
+
unless @loader_schedule.is_a?(String)
|
52
|
+
@option_errors << "The loader_schedule option must be a string"
|
53
|
+
end
|
54
|
+
|
55
|
+
begin
|
56
|
+
@cronline = Rufus::Scheduler::CronLine.new(@loader_schedule)
|
57
|
+
rescue => e
|
58
|
+
@option_errors << "The loader_schedule option is invalid: #{e.message}"
|
59
|
+
end
|
60
|
+
|
61
|
+
@valid = @option_errors.empty?
|
62
|
+
end
|
63
|
+
end
|
64
|
+
end end end
|
@@ -0,0 +1,253 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require_relative "lookup_result"
|
3
|
+
require "logstash/util/loggable"
|
4
|
+
|
5
|
+
module LogStash module Filters module Jdbc
|
6
|
+
class Lookup
|
7
|
+
include LogStash::Util::Loggable
|
8
|
+
|
9
|
+
class Sprintfier
|
10
|
+
def initialize(param)
|
11
|
+
@param = param
|
12
|
+
end
|
13
|
+
|
14
|
+
def fetch(event, result)
|
15
|
+
formatted = event.sprintf(@param)
|
16
|
+
if formatted == @param # no field found so no transformation
|
17
|
+
result.invalid_parameters_push(@param)
|
18
|
+
end
|
19
|
+
formatted
|
20
|
+
end
|
21
|
+
end
|
22
|
+
|
23
|
+
class Getfier
|
24
|
+
def initialize(param)
|
25
|
+
@param = param
|
26
|
+
end
|
27
|
+
|
28
|
+
def fetch(event, result)
|
29
|
+
value = event.get(@param)
|
30
|
+
if value.nil? || value.is_a?(Hash) || value.is_a?(Array) # Array or Hash is not suitable
|
31
|
+
result.invalid_parameters_push(@param)
|
32
|
+
end
|
33
|
+
value
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
def self.find_validation_errors(array_of_options)
|
38
|
+
if !array_of_options.is_a?(Array)
|
39
|
+
return "The options must be an Array"
|
40
|
+
end
|
41
|
+
errors = []
|
42
|
+
array_of_options.each_with_index do |options, i|
|
43
|
+
instance = new(options, {}, "lookup-#{i.next}")
|
44
|
+
unless instance.valid?
|
45
|
+
errors << instance.formatted_errors
|
46
|
+
end
|
47
|
+
end
|
48
|
+
return nil if errors.empty?
|
49
|
+
errors.join("; ")
|
50
|
+
end
|
51
|
+
|
52
|
+
attr_reader :id, :target, :query, :parameters
|
53
|
+
|
54
|
+
def initialize(options, globals, default_id)
|
55
|
+
@id = options["id"] || default_id
|
56
|
+
@target = options["target"]
|
57
|
+
@id_used_as_target = @target.nil?
|
58
|
+
if @id_used_as_target
|
59
|
+
@target = @id
|
60
|
+
end
|
61
|
+
@options = options
|
62
|
+
@globals = globals
|
63
|
+
@valid = false
|
64
|
+
@option_errors = []
|
65
|
+
@default_result = nil
|
66
|
+
@prepared_statement = nil
|
67
|
+
@symbol_parameters = nil
|
68
|
+
parse_options
|
69
|
+
end
|
70
|
+
|
71
|
+
def id_used_as_target?
|
72
|
+
@id_used_as_target
|
73
|
+
end
|
74
|
+
|
75
|
+
def valid?
|
76
|
+
@valid
|
77
|
+
end
|
78
|
+
|
79
|
+
def formatted_errors
|
80
|
+
@option_errors.join(", ")
|
81
|
+
end
|
82
|
+
|
83
|
+
def enhance(local, event)
|
84
|
+
if @prepared_statement
|
85
|
+
result = call_prepared(local, event)
|
86
|
+
else
|
87
|
+
result = fetch(local, event) # should return a LookupResult
|
88
|
+
end
|
89
|
+
if result.failed? || result.parameters_invalid?
|
90
|
+
tag_failure(event)
|
91
|
+
end
|
92
|
+
|
93
|
+
if result.valid?
|
94
|
+
if @use_default && result.empty?
|
95
|
+
tag_default(event)
|
96
|
+
process_event(event, @default_result)
|
97
|
+
else
|
98
|
+
process_event(event, result)
|
99
|
+
end
|
100
|
+
true
|
101
|
+
else
|
102
|
+
false
|
103
|
+
end
|
104
|
+
end
|
105
|
+
|
106
|
+
def use_prepared_statement?
|
107
|
+
@prepared_parameters && !@prepared_parameters.empty?
|
108
|
+
end
|
109
|
+
|
110
|
+
def prepare(local)
|
111
|
+
hash = {}
|
112
|
+
@prepared_parameters.each_with_index { |v, i| hash[:"$p#{i}"] = v }
|
113
|
+
@prepared_param_placeholder_map = hash
|
114
|
+
@prepared_statement = local.prepare(query, hash.keys)
|
115
|
+
end
|
116
|
+
|
117
|
+
private
|
118
|
+
|
119
|
+
def tag_failure(event)
|
120
|
+
@tag_on_failure.each do |tag|
|
121
|
+
event.tag(tag)
|
122
|
+
end
|
123
|
+
end
|
124
|
+
|
125
|
+
def tag_default(event)
|
126
|
+
@tag_on_default_use.each do |tag|
|
127
|
+
event.tag(tag)
|
128
|
+
end
|
129
|
+
end
|
130
|
+
|
131
|
+
def fetch(local, event)
|
132
|
+
result = LookupResult.new()
|
133
|
+
if @parameters_specified
|
134
|
+
params = prepare_parameters_from_event(event, result)
|
135
|
+
if result.parameters_invalid?
|
136
|
+
logger.warn? && logger.warn("Parameter field not found in event", :lookup_id => @id, :invalid_parameters => result.invalid_parameters)
|
137
|
+
return result
|
138
|
+
end
|
139
|
+
else
|
140
|
+
params = {}
|
141
|
+
end
|
142
|
+
begin
|
143
|
+
logger.debug? && logger.debug("Executing Jdbc query", :lookup_id => @id, :statement => query, :parameters => params)
|
144
|
+
local.fetch(query, params).each do |row|
|
145
|
+
stringified = row.inject({}){|hash,(k,v)| hash[k.to_s] = v; hash} #Stringify row keys
|
146
|
+
result.push(stringified)
|
147
|
+
end
|
148
|
+
rescue ::Sequel::Error => e
|
149
|
+
# all sequel errors are a subclass of this, let all other standard or runtime errors bubble up
|
150
|
+
result.failed!
|
151
|
+
logger.warn? && logger.warn("Exception when executing Jdbc query", :lookup_id => @id, :exception => e.message, :backtrace => e.backtrace.take(8))
|
152
|
+
end
|
153
|
+
# if either of: no records or a Sequel exception occurs the payload is
|
154
|
+
# empty and the default can be substituted later.
|
155
|
+
result
|
156
|
+
end
|
157
|
+
|
158
|
+
def call_prepared(local, event)
|
159
|
+
result = LookupResult.new()
|
160
|
+
if @parameters_specified
|
161
|
+
params = prepare_parameters_from_event(event, result)
|
162
|
+
if result.parameters_invalid?
|
163
|
+
logger.warn? && logger.warn("Parameter field not found in event", :lookup_id => @id, :invalid_parameters => result.invalid_parameters)
|
164
|
+
return result
|
165
|
+
end
|
166
|
+
else
|
167
|
+
params = {}
|
168
|
+
end
|
169
|
+
begin
|
170
|
+
logger.debug? && logger.debug("Executing Jdbc query", :lookup_id => @id, :statement => query, :parameters => params)
|
171
|
+
@prepared_statement.call(params).each do |row|
|
172
|
+
stringified = row.inject({}){|hash,(k,v)| hash[k.to_s] = v; hash} #Stringify row keys
|
173
|
+
result.push(stringified)
|
174
|
+
end
|
175
|
+
rescue ::Sequel::Error => e
|
176
|
+
# all sequel errors are a subclass of this, let all other standard or runtime errors bubble up
|
177
|
+
result.failed!
|
178
|
+
logger.warn? && logger.warn("Exception when executing Jdbc query", :lookup_id => @id, :exception => e.message, :backtrace => e.backtrace.take(8))
|
179
|
+
end
|
180
|
+
# if either of: no records or a Sequel exception occurs the payload is
|
181
|
+
# empty and the default can be substituted later.
|
182
|
+
result
|
183
|
+
end
|
184
|
+
|
185
|
+
def process_event(event, result)
|
186
|
+
# use deep clone here so other filter function don't taint the payload by reference
|
187
|
+
event.set(@target, ::LogStash::Util.deep_clone(result.payload))
|
188
|
+
end
|
189
|
+
|
190
|
+
def prepare_parameters_from_event(event, result)
|
191
|
+
@symbol_parameters.inject({}) do |hash,(k,v)|
|
192
|
+
value = v.fetch(event, result)
|
193
|
+
hash[k] = value.is_a?(::LogStash::Timestamp) ? value.time : value
|
194
|
+
hash
|
195
|
+
end
|
196
|
+
end
|
197
|
+
|
198
|
+
def sprintf_or_get(v)
|
199
|
+
v.match(/%{([^}]+)}/) ? Sprintfier.new(v) : Getfier.new(v)
|
200
|
+
end
|
201
|
+
|
202
|
+
def parse_options
|
203
|
+
@query = @options["query"]
|
204
|
+
unless @query && @query.is_a?(String)
|
205
|
+
@option_errors << "The options for '#{@id}' must include a 'query' string"
|
206
|
+
end
|
207
|
+
|
208
|
+
if @options["parameters"] && @options["prepared_parameters"]
|
209
|
+
@option_errors << "Can't specify 'parameters' and 'prepared_parameters' in the same lookup"
|
210
|
+
else
|
211
|
+
@parameters = @options["parameters"]
|
212
|
+
@prepared_parameters = @options["prepared_parameters"]
|
213
|
+
@parameters_specified = false
|
214
|
+
if @parameters
|
215
|
+
if !@parameters.is_a?(Hash)
|
216
|
+
@option_errors << "The 'parameters' option for '#{@id}' must be a Hash"
|
217
|
+
else
|
218
|
+
# this is done once per lookup at start, i.e. Sprintfier.new et.al is done once.
|
219
|
+
@symbol_parameters = @parameters.inject({}) {|hash,(k,v)| hash[k.to_sym] = sprintf_or_get(v) ; hash }
|
220
|
+
# the user might specify an empty hash parameters => {}
|
221
|
+
# maybe due to an unparameterised query
|
222
|
+
@parameters_specified = !@symbol_parameters.empty?
|
223
|
+
end
|
224
|
+
elsif @prepared_parameters
|
225
|
+
if !@prepared_parameters.is_a?(Array)
|
226
|
+
@option_errors << "The 'prepared_parameters' option for '#{@id}' must be an Array"
|
227
|
+
elsif @query.count("?") != @prepared_parameters.size
|
228
|
+
@option_errors << "The 'prepared_parameters' option for '#{@id}' doesn't match count with query's placeholder"
|
229
|
+
else
|
230
|
+
#prepare the map @symbol_parameters :n => sprintf_or_get
|
231
|
+
hash = {}
|
232
|
+
@prepared_parameters.each_with_index {|v,i| hash[:"p#{i}"] = sprintf_or_get(v)}
|
233
|
+
@symbol_parameters = hash
|
234
|
+
@parameters_specified = !@prepared_parameters.empty?
|
235
|
+
end
|
236
|
+
end
|
237
|
+
end
|
238
|
+
|
239
|
+
default_hash = @options["default_hash"]
|
240
|
+
if default_hash && !default_hash.empty?
|
241
|
+
@default_result = LookupResult.new()
|
242
|
+
@default_result.push(default_hash)
|
243
|
+
end
|
244
|
+
|
245
|
+
@use_default = !@default_result.nil?
|
246
|
+
|
247
|
+
@tag_on_failure = @options["tag_on_failure"] || @globals["tag_on_failure"] || []
|
248
|
+
@tag_on_default_use = @options["tag_on_default_use"] || @globals["tag_on_default_use"] || []
|
249
|
+
|
250
|
+
@valid = @option_errors.empty?
|
251
|
+
end
|
252
|
+
end
|
253
|
+
end end end
|
@@ -0,0 +1,100 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require_relative "lookup"
|
3
|
+
require_relative "read_write_database"
|
4
|
+
|
5
|
+
module LogStash module Filters module Jdbc
|
6
|
+
class LookupProcessor
|
7
|
+
attr_reader :lookups, :local
|
8
|
+
|
9
|
+
CONNECTION_ERROR_MSG = "Connection error when initialising lookup (local) db"
|
10
|
+
DISCONNECTION_ERROR_MSG = "Connection error when disconnecting from lookup (local) db"
|
11
|
+
|
12
|
+
def self.find_validation_errors(array_of_options)
|
13
|
+
if !array_of_options.is_a?(Array)
|
14
|
+
return "The options must be an Array"
|
15
|
+
end
|
16
|
+
errors = []
|
17
|
+
instance = new(array_of_options, {})
|
18
|
+
instance.lookups.each do |lookup|
|
19
|
+
unless lookup.valid?
|
20
|
+
errors << lookup.formatted_errors
|
21
|
+
end
|
22
|
+
end
|
23
|
+
unless instance.valid?
|
24
|
+
errors << instance.formatted_errors
|
25
|
+
end
|
26
|
+
return nil if errors.empty?
|
27
|
+
errors.join("; ")
|
28
|
+
end
|
29
|
+
|
30
|
+
def initialize(lookups_array, globals)
|
31
|
+
@lookups = lookups_array.map.with_index do |options, i|
|
32
|
+
Lookup.new(options, globals, "lookup-#{i.next}")
|
33
|
+
end
|
34
|
+
@lookups_errors = validate_lookups
|
35
|
+
if @lookups_errors.empty? && !globals.empty?
|
36
|
+
@local = ReadWriteDatabase.create(*globals.values_at(
|
37
|
+
"lookup_jdbc_connection_string",
|
38
|
+
"lookup_jdbc_driver_class",
|
39
|
+
"lookup_jdbc_driver_library").compact)
|
40
|
+
@local.connect(CONNECTION_ERROR_MSG)
|
41
|
+
|
42
|
+
create_prepared_statements_for_lookups
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
def enhance(event)
|
47
|
+
@lookups.map { |lookup| lookup.enhance(@local, event) }
|
48
|
+
end
|
49
|
+
|
50
|
+
def close
|
51
|
+
@local.disconnect(DISCONNECTION_ERROR_MSG)
|
52
|
+
@local = nil
|
53
|
+
end
|
54
|
+
|
55
|
+
def formatted_errors
|
56
|
+
@lookups_errors.join(", ")
|
57
|
+
end
|
58
|
+
|
59
|
+
def valid?
|
60
|
+
@lookups_errors.empty?
|
61
|
+
end
|
62
|
+
|
63
|
+
private
|
64
|
+
|
65
|
+
def create_prepared_statements_for_lookups()
|
66
|
+
@lookups.each do |lookup|
|
67
|
+
if lookup.use_prepared_statement?
|
68
|
+
lookup.prepare(@local)
|
69
|
+
end
|
70
|
+
end
|
71
|
+
end
|
72
|
+
|
73
|
+
def validate_lookups(lookups_errors = [])
|
74
|
+
ids = Hash.new(0)
|
75
|
+
errors = []
|
76
|
+
@lookups.each {|lookup| ids[lookup.id] += 1}
|
77
|
+
ids.select{|id, count| count > 1}.each do |id, count|
|
78
|
+
errors << "'#{id}' is specified multiple times"
|
79
|
+
end
|
80
|
+
if !errors.empty?
|
81
|
+
errors.unshift("Id setting must be different across all lookups")
|
82
|
+
end
|
83
|
+
lookups_errors.concat(errors)
|
84
|
+
targets = Hash.new {|h,k| h[k] = []}
|
85
|
+
errors = []
|
86
|
+
@lookups.each do |lookup|
|
87
|
+
# if id was used as target, skip target unique check because id uniqueness is checked already
|
88
|
+
next if lookup.id_used_as_target?
|
89
|
+
targets[lookup.target] << lookup.id
|
90
|
+
end
|
91
|
+
targets.select{|_,val| val.size > 1}.each do |target, ids|
|
92
|
+
errors << "'#{ids.join("', '")}' have the same target field setting"
|
93
|
+
end
|
94
|
+
if !errors.empty?
|
95
|
+
errors.unshift("Target setting must be different across all lookups")
|
96
|
+
end
|
97
|
+
lookups_errors.concat(errors)
|
98
|
+
end
|
99
|
+
end
|
100
|
+
end end end
|