logstash-integration-jdbc 5.0.0.alpha1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CHANGELOG.md +8 -0
- data/CONTRIBUTORS +22 -0
- data/Gemfile +11 -0
- data/LICENSE +13 -0
- data/NOTICE.TXT +5 -0
- data/README.md +105 -0
- data/docs/filter-jdbc_static.asciidoc +606 -0
- data/docs/filter-jdbc_streaming.asciidoc +317 -0
- data/docs/index.asciidoc +32 -0
- data/docs/input-jdbc.asciidoc +573 -0
- data/lib/logstash/filters/jdbc/basic_database.rb +125 -0
- data/lib/logstash/filters/jdbc/column.rb +39 -0
- data/lib/logstash/filters/jdbc/db_object.rb +101 -0
- data/lib/logstash/filters/jdbc/loader.rb +119 -0
- data/lib/logstash/filters/jdbc/loader_schedule.rb +64 -0
- data/lib/logstash/filters/jdbc/lookup.rb +253 -0
- data/lib/logstash/filters/jdbc/lookup_processor.rb +100 -0
- data/lib/logstash/filters/jdbc/lookup_result.rb +40 -0
- data/lib/logstash/filters/jdbc/read_only_database.rb +57 -0
- data/lib/logstash/filters/jdbc/read_write_database.rb +108 -0
- data/lib/logstash/filters/jdbc/repeating_load_runner.rb +13 -0
- data/lib/logstash/filters/jdbc/single_load_runner.rb +46 -0
- data/lib/logstash/filters/jdbc/validatable.rb +46 -0
- data/lib/logstash/filters/jdbc_static.rb +240 -0
- data/lib/logstash/filters/jdbc_streaming.rb +196 -0
- data/lib/logstash/inputs/jdbc.rb +341 -0
- data/lib/logstash/inputs/tzinfo_jruby_patch.rb +57 -0
- data/lib/logstash/plugin_mixins/jdbc/checked_count_logger.rb +43 -0
- data/lib/logstash/plugin_mixins/jdbc/jdbc.rb +298 -0
- data/lib/logstash/plugin_mixins/jdbc/statement_handler.rb +129 -0
- data/lib/logstash/plugin_mixins/jdbc/value_tracking.rb +140 -0
- data/lib/logstash/plugin_mixins/jdbc_streaming/cache_payload.rb +28 -0
- data/lib/logstash/plugin_mixins/jdbc_streaming/parameter_handler.rb +64 -0
- data/lib/logstash/plugin_mixins/jdbc_streaming/statement_handler.rb +143 -0
- data/lib/logstash/plugin_mixins/jdbc_streaming.rb +100 -0
- data/lib/logstash/plugin_mixins/statement_handler.rb +0 -0
- data/lib/logstash-integration-jdbc_jars.rb +5 -0
- data/logstash-integration-jdbc.gemspec +44 -0
- data/spec/filters/env_helper.rb +10 -0
- data/spec/filters/integration/jdbc_static_spec.rb +154 -0
- data/spec/filters/integration/jdbcstreaming_spec.rb +173 -0
- data/spec/filters/jdbc/column_spec.rb +70 -0
- data/spec/filters/jdbc/db_object_spec.rb +81 -0
- data/spec/filters/jdbc/loader_spec.rb +77 -0
- data/spec/filters/jdbc/lookup_processor_spec.rb +132 -0
- data/spec/filters/jdbc/lookup_spec.rb +253 -0
- data/spec/filters/jdbc/read_only_database_spec.rb +67 -0
- data/spec/filters/jdbc/read_write_database_spec.rb +90 -0
- data/spec/filters/jdbc/repeating_load_runner_spec.rb +24 -0
- data/spec/filters/jdbc/single_load_runner_spec.rb +16 -0
- data/spec/filters/jdbc_static_file_local_spec.rb +83 -0
- data/spec/filters/jdbc_static_spec.rb +162 -0
- data/spec/filters/jdbc_streaming_spec.rb +350 -0
- data/spec/filters/remote_server_helper.rb +24 -0
- data/spec/filters/shared_helpers.rb +34 -0
- data/spec/helpers/WHY-THIS-JAR.txt +4 -0
- data/spec/helpers/derbyrun.jar +0 -0
- data/spec/inputs/integration/integ_spec.rb +78 -0
- data/spec/inputs/jdbc_spec.rb +1431 -0
- data/vendor/jar-dependencies/org/apache/derby/derby/10.14.1.0/derby-10.14.1.0.jar +0 -0
- data/vendor/jar-dependencies/org/apache/derby/derbyclient/10.14.1.0/derbyclient-10.14.1.0.jar +0 -0
- metadata +319 -0
@@ -0,0 +1,40 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
module LogStash module Filters module Jdbc
|
3
|
+
class LookupResult
|
4
|
+
attr_reader :payload, :invalid_parameters
|
5
|
+
|
6
|
+
def initialize
|
7
|
+
@failure = false
|
8
|
+
@payload = []
|
9
|
+
@invalid_parameters = []
|
10
|
+
end
|
11
|
+
|
12
|
+
def push(data)
|
13
|
+
@payload << data
|
14
|
+
end
|
15
|
+
|
16
|
+
def invalid_parameters_push(data)
|
17
|
+
@invalid_parameters << data
|
18
|
+
end
|
19
|
+
|
20
|
+
def failed!
|
21
|
+
@failure = true
|
22
|
+
end
|
23
|
+
|
24
|
+
def valid?
|
25
|
+
!failed? && @invalid_parameters.empty?
|
26
|
+
end
|
27
|
+
|
28
|
+
def failed?
|
29
|
+
@failure
|
30
|
+
end
|
31
|
+
|
32
|
+
def parameters_invalid?
|
33
|
+
!@invalid_parameters.empty?
|
34
|
+
end
|
35
|
+
|
36
|
+
def empty?
|
37
|
+
@payload.empty?
|
38
|
+
end
|
39
|
+
end
|
40
|
+
end end end
|
@@ -0,0 +1,57 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require_relative "basic_database"
|
3
|
+
|
4
|
+
module LogStash module Filters module Jdbc
|
5
|
+
class ReadOnlyDatabase < BasicDatabase
|
6
|
+
|
7
|
+
def count(statement)
|
8
|
+
result = 0
|
9
|
+
debug_log_messages = ["Lookup query count is zero"]
|
10
|
+
begin
|
11
|
+
# its the responsibility of the caller to manage the connections see Loader
|
12
|
+
if connected?
|
13
|
+
result = @db[statement].count
|
14
|
+
else
|
15
|
+
debug_log_messages.concat("and there is no connection to the remote db at this time")
|
16
|
+
end
|
17
|
+
rescue ::Sequel::Error => err
|
18
|
+
# a fatal issue
|
19
|
+
msg = "Exception occurred when executing loader Jdbc query count"
|
20
|
+
logger.error(msg, :exception => err.message, :backtrace => err.backtrace.take(8))
|
21
|
+
raise wrap_error(LookupJdbcException, err, msg)
|
22
|
+
end
|
23
|
+
logger.debug(debug_log_messages.join(' ')) if result.zero?
|
24
|
+
result
|
25
|
+
end
|
26
|
+
|
27
|
+
def query(statement)
|
28
|
+
result = empty_record_set
|
29
|
+
debug_log_messages = ["Lookup query results are empty"]
|
30
|
+
begin
|
31
|
+
# its the responsibility of the caller to manage the connections see Loader
|
32
|
+
if connected?
|
33
|
+
result = @db[statement].all
|
34
|
+
else
|
35
|
+
debug_log_messages.concat("and there is no connection to the remote db at this time")
|
36
|
+
end
|
37
|
+
rescue ::Sequel::Error => err
|
38
|
+
# a fatal issue
|
39
|
+
msg = "Exception occurred when executing loader Jdbc query"
|
40
|
+
logger.error(msg, :exception => err.message, :backtrace => err.backtrace.take(8))
|
41
|
+
raise wrap_error(LookupJdbcException, err, msg)
|
42
|
+
end
|
43
|
+
logger.debug(debug_log_messages.join(' ')) if result.empty?
|
44
|
+
result
|
45
|
+
end
|
46
|
+
|
47
|
+
def post_create(connection_string, driver_class, driver_library, user, password)
|
48
|
+
verify_connection(connection_string, driver_class, driver_library, user, password)
|
49
|
+
end
|
50
|
+
|
51
|
+
private
|
52
|
+
|
53
|
+
def post_initialize()
|
54
|
+
super
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end end end
|
@@ -0,0 +1,108 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require_relative "basic_database"
|
3
|
+
|
4
|
+
module LogStash module Filters module Jdbc
|
5
|
+
class ReadWriteDatabase < BasicDatabase
|
6
|
+
def repopulate_all(loaders)
|
7
|
+
case loaders.size
|
8
|
+
when 1
|
9
|
+
fill_local_table(loaders.first)
|
10
|
+
when 2
|
11
|
+
fill_local_table(loaders.first)
|
12
|
+
fill_local_table(loaders.last)
|
13
|
+
else
|
14
|
+
loaders.each do |loader|
|
15
|
+
fill_local_table(loader)
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
19
|
+
|
20
|
+
alias populate_all repopulate_all
|
21
|
+
|
22
|
+
def fetch(statement, parameters)
|
23
|
+
@rwlock.readLock().lock()
|
24
|
+
# any exceptions should bubble up because we need to set failure tags etc.
|
25
|
+
@db[statement, parameters].all
|
26
|
+
ensure
|
27
|
+
@rwlock.readLock().unlock()
|
28
|
+
end
|
29
|
+
|
30
|
+
def prepare(statement, parameters)
|
31
|
+
@rwlock.readLock().lock()
|
32
|
+
@db[statement, parameters].prepare(:select, @id)
|
33
|
+
ensure
|
34
|
+
@rwlock.readLock().unlock()
|
35
|
+
end
|
36
|
+
|
37
|
+
def build_db_object(db_object)
|
38
|
+
begin
|
39
|
+
@rwlock.writeLock().lock()
|
40
|
+
db_object.build(@db)
|
41
|
+
if db_object.index_columns.empty?
|
42
|
+
logger.warn("local_db_object '#{db_object.name}': `index_columns` is optional but on larger datasets consider adding an index on the lookup column, it will improve performance")
|
43
|
+
end
|
44
|
+
rescue *CONNECTION_ERRORS => err
|
45
|
+
# we do not raise an error when there is a connection error, we hope that the connection works next time
|
46
|
+
logger.error("Connection error when initialising lookup db", :db_object => db_object.inspect, :exception => err.message, :backtrace => err.backtrace.take(8))
|
47
|
+
rescue ::Sequel::Error => err
|
48
|
+
msg = "Exception when initialising lookup db for db object: #{db_object}"
|
49
|
+
logger.error(msg, :exception => err.message, :backtrace => err.backtrace.take(8))
|
50
|
+
raise wrap_error(LoaderJdbcException, err, msg)
|
51
|
+
ensure
|
52
|
+
@rwlock.writeLock().unlock()
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
56
|
+
def post_create(connection_string, driver_class, driver_library, user, password)
|
57
|
+
mutated_connection_string = connection_string.sub("____", unique_db_name)
|
58
|
+
verify_connection(mutated_connection_string, driver_class, driver_library, user, password)
|
59
|
+
connect("Connection error when connecting to lookup db")
|
60
|
+
end
|
61
|
+
|
62
|
+
private
|
63
|
+
|
64
|
+
def fill_local_table(loader)
|
65
|
+
begin
|
66
|
+
@rwlock.writeLock().lock()
|
67
|
+
start = Time.now.to_f
|
68
|
+
records = loader.fetch
|
69
|
+
records_size = records.size
|
70
|
+
return if records_size.zero?
|
71
|
+
logger.info("loader #{loader.id}, fetched #{records_size} records in: #{(Time.now.to_f - start).round(3)} seconds")
|
72
|
+
start = Time.now.to_f
|
73
|
+
import_file = ::File.join(loader.staging_directory, loader.table.to_s)
|
74
|
+
::File.open(import_file, "w") do |fd|
|
75
|
+
dataset = @db[loader.table]
|
76
|
+
records.each do |hash|
|
77
|
+
array = hash.values.map {|val| dataset.literal(val) }
|
78
|
+
fd.puts(array.join(", "))
|
79
|
+
end
|
80
|
+
fd.fsync
|
81
|
+
end
|
82
|
+
logger.info("loader #{loader.id}, saved fetched records to import file in: #{(Time.now.to_f - start).round(3)} seconds")
|
83
|
+
start = Time.now.to_f
|
84
|
+
import_cmd = "CALL SYSCS_UTIL.SYSCS_IMPORT_TABLE (null,'#{loader.table.upcase}','#{import_file}',null,'''',null,1)"
|
85
|
+
@db.execute_ddl(import_cmd)
|
86
|
+
FileUtils.rm_f(import_file)
|
87
|
+
logger.info("loader #{loader.id}, imported all fetched records in: #{(Time.now.to_f - start).round(3)} seconds")
|
88
|
+
rescue *CONNECTION_ERRORS => err
|
89
|
+
# we do not raise an error when there is a connection error, we hope that the connection works next time
|
90
|
+
logger.error("Connection error when filling lookup db from loader #{loader.id}, query results", :exception => err.message, :backtrace => err.backtrace.take(8))
|
91
|
+
rescue => err
|
92
|
+
# In theory all exceptions in Sequel should be wrapped in Sequel::Error
|
93
|
+
# There are cases where exceptions occur in unprotected ensure sections
|
94
|
+
msg = "Exception when filling lookup db from loader #{loader.id}, query results, original exception: #{err.class}, original message: #{err.message}"
|
95
|
+
logger.error(msg, :backtrace => err.backtrace.take(16))
|
96
|
+
raise wrap_error(LoaderJdbcException, err, msg)
|
97
|
+
ensure
|
98
|
+
@rwlock.writeLock().unlock()
|
99
|
+
end
|
100
|
+
end
|
101
|
+
|
102
|
+
def post_initialize()
|
103
|
+
super
|
104
|
+
# get a fair reentrant read write lock
|
105
|
+
@rwlock = java.util.concurrent.locks.ReentrantReadWriteLock.new(true)
|
106
|
+
end
|
107
|
+
end
|
108
|
+
end end end
|
@@ -0,0 +1,13 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require_relative "single_load_runner"
|
3
|
+
|
4
|
+
module LogStash module Filters module Jdbc
|
5
|
+
class RepeatingLoadRunner < SingleLoadRunner
|
6
|
+
# info - attr_reader :local, :loaders, :preloaders
|
7
|
+
|
8
|
+
def repeated_load
|
9
|
+
local.repopulate_all(loaders)
|
10
|
+
@reload_counter.increment
|
11
|
+
end
|
12
|
+
end
|
13
|
+
end end end
|
@@ -0,0 +1,46 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require_relative 'db_object'
|
3
|
+
|
4
|
+
module LogStash module Filters module Jdbc
|
5
|
+
class SingleLoadRunner
|
6
|
+
|
7
|
+
attr_reader :local, :loaders, :preloaders
|
8
|
+
|
9
|
+
def initialize(local, loaders, preloaders)
|
10
|
+
@local = local
|
11
|
+
@loaders = loaders
|
12
|
+
@preloaders = []
|
13
|
+
@reload_counter = Concurrent::AtomicFixnum.new(0)
|
14
|
+
preloaders.map do |pre|
|
15
|
+
@preloaders << DbObject.new(pre)
|
16
|
+
end
|
17
|
+
@preloaders.sort!
|
18
|
+
end
|
19
|
+
|
20
|
+
def initial_load
|
21
|
+
do_preload
|
22
|
+
local.populate_all(loaders)
|
23
|
+
@reload_counter.increment
|
24
|
+
end
|
25
|
+
|
26
|
+
def repeated_load
|
27
|
+
end
|
28
|
+
|
29
|
+
def call
|
30
|
+
repeated_load
|
31
|
+
end
|
32
|
+
|
33
|
+
def reload_count
|
34
|
+
@reload_counter.value
|
35
|
+
end
|
36
|
+
|
37
|
+
private
|
38
|
+
|
39
|
+
def do_preload
|
40
|
+
preloaders.each do |db_object|
|
41
|
+
local.build_db_object(db_object)
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
45
|
+
|
46
|
+
end end end
|
@@ -0,0 +1,46 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
module LogStash module Filters module Jdbc
|
3
|
+
class Validatable
|
4
|
+
def self.find_validation_errors(options)
|
5
|
+
array_of_options = Array(options)
|
6
|
+
errors = []
|
7
|
+
array_of_options.each do |options|
|
8
|
+
instance = new(options)
|
9
|
+
unless instance.valid?
|
10
|
+
errors << instance.formatted_errors
|
11
|
+
end
|
12
|
+
end
|
13
|
+
return nil if errors.empty?
|
14
|
+
errors.join("; ")
|
15
|
+
end
|
16
|
+
|
17
|
+
def initialize(options)
|
18
|
+
pre_initialize(options)
|
19
|
+
@options = options
|
20
|
+
@valid = false
|
21
|
+
@option_errors = []
|
22
|
+
parse_options
|
23
|
+
post_initialize
|
24
|
+
end
|
25
|
+
|
26
|
+
def valid?
|
27
|
+
@valid
|
28
|
+
end
|
29
|
+
|
30
|
+
def formatted_errors
|
31
|
+
@option_errors.join(", ")
|
32
|
+
end
|
33
|
+
|
34
|
+
private
|
35
|
+
|
36
|
+
def pre_initialize(options)
|
37
|
+
end
|
38
|
+
|
39
|
+
def post_initialize
|
40
|
+
end
|
41
|
+
|
42
|
+
def parse_options
|
43
|
+
raise "Subclass must implement 'parse_options'"
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end end end
|
@@ -0,0 +1,240 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash-integration-jdbc_jars"
|
3
|
+
require "logstash/filters/base"
|
4
|
+
require "logstash/namespace"
|
5
|
+
require_relative "jdbc/loader"
|
6
|
+
require_relative "jdbc/loader_schedule"
|
7
|
+
require_relative "jdbc/repeating_load_runner"
|
8
|
+
require_relative "jdbc/lookup_processor"
|
9
|
+
|
10
|
+
# This filter can do multiple enhancements to an event in one pass.
|
11
|
+
# Define multiple loader sources and multiple lookup targets.
|
12
|
+
# Currently only one remote database connection is supported.
|
13
|
+
# [source,ruby]
|
14
|
+
|
15
|
+
#
|
16
|
+
module LogStash module Filters class JdbcStatic < LogStash::Filters::Base
|
17
|
+
config_name "jdbc_static"
|
18
|
+
|
19
|
+
# Define the loaders, an Array of Hashes, to fetch remote data and create local tables.
|
20
|
+
# the fetched data will be inserted into the local tables. Make sure that the
|
21
|
+
# local table name, columns and datatypes correspond to the shape of the remote data
|
22
|
+
# being fetched. The default for max_rows is 1 million rows. You may provide an `id`
|
23
|
+
# For example:
|
24
|
+
# loaders => [
|
25
|
+
# {
|
26
|
+
# id => "country_details"
|
27
|
+
# query => "select code, name from WORLD.COUNTRY"
|
28
|
+
# max_rows => 2000
|
29
|
+
# local_table => "country"
|
30
|
+
# },
|
31
|
+
# {
|
32
|
+
# id => "servers_load"
|
33
|
+
# query => "select id, ip, name, location from INTERNAL.SERVERS"
|
34
|
+
# local_table => "servers"
|
35
|
+
# }
|
36
|
+
# ]
|
37
|
+
# This is optional. You can provide a pre-populated local database server then no initial loaders are needed.
|
38
|
+
config :loaders, :required => false, :default => [], :validate => [LogStash::Filters::Jdbc::Loader]
|
39
|
+
|
40
|
+
# Define an array of Database Objects to create when the plugin first starts.
|
41
|
+
# These will usually be the definitions to setup the local in-memory tables.
|
42
|
+
# For example:
|
43
|
+
# local_db_objects => [
|
44
|
+
# {name => "servers", preserve_existing => true, index_columns => ["ip"], columns => [["id", "INTEGER"], ["ip", "varchar(64)"], ["name", "varchar(64)"], ["location", "varchar(64)"]]},
|
45
|
+
# ]
|
46
|
+
# NOTE: Important! use `preserve_existing => true` to keep a table created and filled in a previous Logstash session. It will default to false and is unneeded if the database is not persistent.
|
47
|
+
# NOTE: Important! Tables created here must have the same names as those used in the `loaders` and
|
48
|
+
# `local_lookups` configuration options
|
49
|
+
config :local_db_objects, :required => false, :default => [], :validate => [LogStash::Filters::Jdbc::DbObject]
|
50
|
+
|
51
|
+
# Define the list (Array) of enhancement local_lookups to be applied to an event
|
52
|
+
# Each entry is a hash of the query string, the target field and value and a
|
53
|
+
# parameter hash. Target is overwritten if existing. Target is optional,
|
54
|
+
# if omitted the lookup results will be written to the root of the event like this:
|
55
|
+
# event.set(<column name (or alias)>, <column value>)
|
56
|
+
# Use parameters to have this plugin put values from the event into the query.
|
57
|
+
# The parameter maps the symbol used in the query string to the field name in the event.
|
58
|
+
# NOTE: when using a query string that includes the LIKE keyword make sure that
|
59
|
+
# you provide a Logstash Event sprintf pattern with added wildcards.
|
60
|
+
# For example:
|
61
|
+
# local_lookups => [
|
62
|
+
# {
|
63
|
+
# "query" => "SELECT * FROM country WHERE code = :code",
|
64
|
+
# "parameters" => {"code" => "country_code"}
|
65
|
+
# "target" => "country_details"
|
66
|
+
# },
|
67
|
+
# {
|
68
|
+
# "query" => "SELECT ip, name FROM servers WHERE ip LIKE :ip",
|
69
|
+
# "parameters" => {"ip" => "%{[response][ip]}%"}
|
70
|
+
# "target" => "servers"
|
71
|
+
# },
|
72
|
+
# {
|
73
|
+
# "query" => "SELECT ip, name FROM servers WHERE ip = ?",
|
74
|
+
# "prepared_parameters" => ["from_ip"]
|
75
|
+
# "target" => "servers"
|
76
|
+
# }
|
77
|
+
# ]
|
78
|
+
config :local_lookups, :required => true, :validate => [LogStash::Filters::Jdbc::LookupProcessor]
|
79
|
+
|
80
|
+
# Schedule of when to periodically run loaders, in Cron format
|
81
|
+
# for example: "* * * * *" (execute query every minute, on the minute)
|
82
|
+
#
|
83
|
+
# There is no schedule by default. If no schedule is given, then the loaders are run
|
84
|
+
# exactly once.
|
85
|
+
config :loader_schedule, :required => false, :validate => LogStash::Filters::Jdbc::LoaderSchedule
|
86
|
+
|
87
|
+
# Append values to the `tags` field if sql error occured
|
88
|
+
# Alternatively, individual `tag_on_failure` arrays can be added to each lookup hash
|
89
|
+
config :tag_on_failure, :validate => :array, :default => ["_jdbcstaticfailure"]
|
90
|
+
|
91
|
+
# Append values to the `tags` field if no record was found and default values were used
|
92
|
+
config :tag_on_default_use, :validate => :array, :default => ["_jdbcstaticdefaultsused"]
|
93
|
+
|
94
|
+
# Remote Load DB Jdbc driver library path to third party driver library.
|
95
|
+
# Use comma separated paths in one string if you need more than one library.
|
96
|
+
config :jdbc_driver_library, :validate => :string
|
97
|
+
|
98
|
+
# Remote Load DB Jdbc driver class to load, for example "oracle.jdbc.OracleDriver" or "org.apache.derby.jdbc.ClientDriver"
|
99
|
+
config :jdbc_driver_class, :validate => :string, :required => true
|
100
|
+
|
101
|
+
# Remote Load DB Jdbc connection string
|
102
|
+
config :jdbc_connection_string, :validate => :string, :required => true
|
103
|
+
|
104
|
+
# Remote Load DB Jdbc user
|
105
|
+
config :jdbc_user, :validate => :string
|
106
|
+
|
107
|
+
# Remote Load DB Jdbc password
|
108
|
+
config :jdbc_password, :validate => :password
|
109
|
+
|
110
|
+
# directory for temp files created during bulk loader import.
|
111
|
+
config :staging_directory, :validate => :string, :default => ::File.join(Dir.tmpdir, "logstash", config_name, "import_data")
|
112
|
+
|
113
|
+
# NOTE: For the initial release, we are not allowing the user to specify their own local lookup JDBC DB settings.
|
114
|
+
# In the near future we have to consider identical config running in multiple pipelines stomping over each other
|
115
|
+
# when the database names are common across configs because there is only one Derby server in memory per JVM.
|
116
|
+
|
117
|
+
# Local Lookup DB Jdbc driver class to load, for example "org.apache.derby.jdbc.ClientDriver"
|
118
|
+
# config :lookup_jdbc_driver_class, :validate => :string, :required => false
|
119
|
+
|
120
|
+
# Local Lookup DB Jdbc driver library path to third party driver library.
|
121
|
+
# config :lookup_jdbc_driver_library, :validate => :path, :required => false
|
122
|
+
|
123
|
+
# Local Lookup DB Jdbc connection string
|
124
|
+
# config :lookup_jdbc_connection_string, :validate => :string, :required => false
|
125
|
+
|
126
|
+
class << self
|
127
|
+
alias_method :old_validate_value, :validate_value
|
128
|
+
|
129
|
+
def validate_value(value, validator)
|
130
|
+
if validator.is_a?(Array) && validator.first.respond_to?(:find_validation_errors)
|
131
|
+
validation_errors = validator.first.find_validation_errors(value)
|
132
|
+
unless validation_errors.nil?
|
133
|
+
return false, validation_errors
|
134
|
+
end
|
135
|
+
elsif validator.respond_to?(:find_validation_errors)
|
136
|
+
validation_errors = validator.find_validation_errors(value)
|
137
|
+
unless validation_errors.nil?
|
138
|
+
return false, validation_errors
|
139
|
+
end
|
140
|
+
else
|
141
|
+
return old_validate_value(value, validator)
|
142
|
+
end
|
143
|
+
[true, value]
|
144
|
+
end
|
145
|
+
end
|
146
|
+
|
147
|
+
public
|
148
|
+
|
149
|
+
def register
|
150
|
+
prepare_data_dir
|
151
|
+
prepare_runner
|
152
|
+
end
|
153
|
+
|
154
|
+
def filter(event)
|
155
|
+
enhancement_states = @processor.enhance(event)
|
156
|
+
filter_matched(event) if enhancement_states.all?
|
157
|
+
end
|
158
|
+
|
159
|
+
def close
|
160
|
+
@scheduler.stop if @scheduler
|
161
|
+
@parsed_loaders.each(&:close)
|
162
|
+
@processor.close
|
163
|
+
end
|
164
|
+
|
165
|
+
def loader_runner
|
166
|
+
# use for test verification
|
167
|
+
@loader_runner
|
168
|
+
end
|
169
|
+
|
170
|
+
private
|
171
|
+
|
172
|
+
def prepare_data_dir
|
173
|
+
# later, when local persistent databases are allowed set this property to LS_HOME/data/jdbc-static/
|
174
|
+
# must take multi-pipelines into account and more than one config using the same jdbc-static settings
|
175
|
+
java.lang.System.setProperty("derby.system.home", ENV["HOME"])
|
176
|
+
logger.info("derby.system.home is: #{java.lang.System.getProperty("derby.system.home")}")
|
177
|
+
end
|
178
|
+
|
179
|
+
def prepare_runner
|
180
|
+
@parsed_loaders = @loaders.map do |options|
|
181
|
+
add_plugin_configs(options)
|
182
|
+
loader = Jdbc::Loader.new(options)
|
183
|
+
loader.build_remote_db
|
184
|
+
loader
|
185
|
+
end
|
186
|
+
runner_args = [@parsed_loaders, @local_db_objects]
|
187
|
+
@processor = Jdbc::LookupProcessor.new(@local_lookups, global_lookup_options)
|
188
|
+
runner_args.unshift(@processor.local)
|
189
|
+
if @loader_schedule
|
190
|
+
args = []
|
191
|
+
@loader_runner = Jdbc::RepeatingLoadRunner.new(*runner_args)
|
192
|
+
@loader_runner.initial_load
|
193
|
+
cronline = Jdbc::LoaderSchedule.new(@loader_schedule)
|
194
|
+
cronline.to_log_string.tap do |msg|
|
195
|
+
logger.info("Scheduler operations: #{msg}") unless msg.empty?
|
196
|
+
end
|
197
|
+
logger.info("Scheduler scan for work frequency is: #{cronline.schedule_frequency}")
|
198
|
+
rufus_args = {:max_work_threads => 1, :frequency => cronline.schedule_frequency}
|
199
|
+
@scheduler = Rufus::Scheduler.new(rufus_args)
|
200
|
+
@scheduler.cron(cronline.loader_schedule, @loader_runner)
|
201
|
+
else
|
202
|
+
@loader_runner = Jdbc::SingleLoadRunner.new(*runner_args)
|
203
|
+
@loader_runner.initial_load
|
204
|
+
end
|
205
|
+
end
|
206
|
+
|
207
|
+
def global_lookup_options(options = Hash.new)
|
208
|
+
if @tag_on_failure && !@tag_on_failure.empty? && !options.key?("tag_on_failure")
|
209
|
+
options["tag_on_failure"] = @tag_on_failure
|
210
|
+
end
|
211
|
+
if @tag_on_default_use && !@tag_on_default_use.empty? && !options.key?("tag_on_default_use")
|
212
|
+
options["tag_on_default_use"] = @tag_on_default_use
|
213
|
+
end
|
214
|
+
options["lookup_jdbc_driver_class"] = @lookup_jdbc_driver_class
|
215
|
+
options["lookup_jdbc_driver_library"] = @lookup_jdbc_driver_library
|
216
|
+
options["lookup_jdbc_connection_string"] = @lookup_jdbc_connection_string
|
217
|
+
options
|
218
|
+
end
|
219
|
+
|
220
|
+
def add_plugin_configs(options)
|
221
|
+
if @jdbc_driver_library
|
222
|
+
options["jdbc_driver_library"] = @jdbc_driver_library
|
223
|
+
end
|
224
|
+
if @jdbc_driver_class
|
225
|
+
options["jdbc_driver_class"] = @jdbc_driver_class
|
226
|
+
end
|
227
|
+
if @jdbc_connection_string
|
228
|
+
options["jdbc_connection_string"] = @jdbc_connection_string
|
229
|
+
end
|
230
|
+
if @jdbc_user
|
231
|
+
options["jdbc_user"] = @jdbc_user
|
232
|
+
end
|
233
|
+
if @jdbc_password
|
234
|
+
options["jdbc_password"] = @jdbc_password
|
235
|
+
end
|
236
|
+
if @staging_directory
|
237
|
+
options["staging_directory"] = @staging_directory
|
238
|
+
end
|
239
|
+
end
|
240
|
+
end end end
|