logstash-integration-jdbc 5.0.0.alpha1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/CHANGELOG.md +8 -0
- data/CONTRIBUTORS +22 -0
- data/Gemfile +11 -0
- data/LICENSE +13 -0
- data/NOTICE.TXT +5 -0
- data/README.md +105 -0
- data/docs/filter-jdbc_static.asciidoc +606 -0
- data/docs/filter-jdbc_streaming.asciidoc +317 -0
- data/docs/index.asciidoc +32 -0
- data/docs/input-jdbc.asciidoc +573 -0
- data/lib/logstash/filters/jdbc/basic_database.rb +125 -0
- data/lib/logstash/filters/jdbc/column.rb +39 -0
- data/lib/logstash/filters/jdbc/db_object.rb +101 -0
- data/lib/logstash/filters/jdbc/loader.rb +119 -0
- data/lib/logstash/filters/jdbc/loader_schedule.rb +64 -0
- data/lib/logstash/filters/jdbc/lookup.rb +253 -0
- data/lib/logstash/filters/jdbc/lookup_processor.rb +100 -0
- data/lib/logstash/filters/jdbc/lookup_result.rb +40 -0
- data/lib/logstash/filters/jdbc/read_only_database.rb +57 -0
- data/lib/logstash/filters/jdbc/read_write_database.rb +108 -0
- data/lib/logstash/filters/jdbc/repeating_load_runner.rb +13 -0
- data/lib/logstash/filters/jdbc/single_load_runner.rb +46 -0
- data/lib/logstash/filters/jdbc/validatable.rb +46 -0
- data/lib/logstash/filters/jdbc_static.rb +240 -0
- data/lib/logstash/filters/jdbc_streaming.rb +196 -0
- data/lib/logstash/inputs/jdbc.rb +341 -0
- data/lib/logstash/inputs/tzinfo_jruby_patch.rb +57 -0
- data/lib/logstash/plugin_mixins/jdbc/checked_count_logger.rb +43 -0
- data/lib/logstash/plugin_mixins/jdbc/jdbc.rb +298 -0
- data/lib/logstash/plugin_mixins/jdbc/statement_handler.rb +129 -0
- data/lib/logstash/plugin_mixins/jdbc/value_tracking.rb +140 -0
- data/lib/logstash/plugin_mixins/jdbc_streaming/cache_payload.rb +28 -0
- data/lib/logstash/plugin_mixins/jdbc_streaming/parameter_handler.rb +64 -0
- data/lib/logstash/plugin_mixins/jdbc_streaming/statement_handler.rb +143 -0
- data/lib/logstash/plugin_mixins/jdbc_streaming.rb +100 -0
- data/lib/logstash/plugin_mixins/statement_handler.rb +0 -0
- data/lib/logstash-integration-jdbc_jars.rb +5 -0
- data/logstash-integration-jdbc.gemspec +44 -0
- data/spec/filters/env_helper.rb +10 -0
- data/spec/filters/integration/jdbc_static_spec.rb +154 -0
- data/spec/filters/integration/jdbcstreaming_spec.rb +173 -0
- data/spec/filters/jdbc/column_spec.rb +70 -0
- data/spec/filters/jdbc/db_object_spec.rb +81 -0
- data/spec/filters/jdbc/loader_spec.rb +77 -0
- data/spec/filters/jdbc/lookup_processor_spec.rb +132 -0
- data/spec/filters/jdbc/lookup_spec.rb +253 -0
- data/spec/filters/jdbc/read_only_database_spec.rb +67 -0
- data/spec/filters/jdbc/read_write_database_spec.rb +90 -0
- data/spec/filters/jdbc/repeating_load_runner_spec.rb +24 -0
- data/spec/filters/jdbc/single_load_runner_spec.rb +16 -0
- data/spec/filters/jdbc_static_file_local_spec.rb +83 -0
- data/spec/filters/jdbc_static_spec.rb +162 -0
- data/spec/filters/jdbc_streaming_spec.rb +350 -0
- data/spec/filters/remote_server_helper.rb +24 -0
- data/spec/filters/shared_helpers.rb +34 -0
- data/spec/helpers/WHY-THIS-JAR.txt +4 -0
- data/spec/helpers/derbyrun.jar +0 -0
- data/spec/inputs/integration/integ_spec.rb +78 -0
- data/spec/inputs/jdbc_spec.rb +1431 -0
- data/vendor/jar-dependencies/org/apache/derby/derby/10.14.1.0/derby-10.14.1.0.jar +0 -0
- data/vendor/jar-dependencies/org/apache/derby/derbyclient/10.14.1.0/derbyclient-10.14.1.0.jar +0 -0
- metadata +319 -0
@@ -0,0 +1,143 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/util/loggable"
|
3
|
+
|
4
|
+
module LogStash module PluginMixins module JdbcStreaming
|
5
|
+
# so as to not clash with the class of the same name and function in the jdbc input
|
6
|
+
# this is in the `module JdbcStreaming` namespace
|
7
|
+
# this duplication can be removed in a universal plugin
|
8
|
+
|
9
|
+
class StatementHandler
|
10
|
+
def self.build_statement_handler(plugin)
|
11
|
+
klass = plugin.use_prepared_statements ? PreparedStatementHandler : NormalStatementHandler
|
12
|
+
klass.new(plugin)
|
13
|
+
end
|
14
|
+
|
15
|
+
attr_reader :statement, :parameters, :cache
|
16
|
+
|
17
|
+
def initialize(plugin)
|
18
|
+
@statement = plugin.statement
|
19
|
+
klass = plugin.use_cache ? RowCache : NoCache
|
20
|
+
@cache = klass.new(plugin.cache_size, plugin.cache_expiration)
|
21
|
+
post_init(plugin)
|
22
|
+
end
|
23
|
+
|
24
|
+
# Get from cache or performs remote lookup and saves to cache
|
25
|
+
# @param db [Sequel::Database]
|
26
|
+
# @param event [LogStash::Event]
|
27
|
+
# @returnparam [CachePayload]
|
28
|
+
def cache_lookup(db, event)
|
29
|
+
# override in subclass
|
30
|
+
end
|
31
|
+
|
32
|
+
private
|
33
|
+
|
34
|
+
def common_cache_lookup(db, event)
|
35
|
+
params = prepare_parameters_from_event(event)
|
36
|
+
@cache.get(params) do
|
37
|
+
result = CachePayload.new
|
38
|
+
begin
|
39
|
+
logger.debug? && logger.debug("Executing JDBC query", :statement => statement, :parameters => params)
|
40
|
+
execute_extract_records(db, params, result)
|
41
|
+
rescue ::Sequel::Error => e
|
42
|
+
# all sequel errors are a subclass of this, let all other standard or runtime errors bubble up
|
43
|
+
result.failed!
|
44
|
+
logger.warn? && logger.warn("Exception when executing JDBC query", :statement => statement, :parameters => params, :exception => e)
|
45
|
+
end
|
46
|
+
# if either of: no records or a Sequel exception occurs the payload is
|
47
|
+
# empty and the default can be substituted later.
|
48
|
+
result
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
def execute_extract_records(db, params, result)
|
53
|
+
# override in subclass
|
54
|
+
end
|
55
|
+
|
56
|
+
def post_init(plugin)
|
57
|
+
# override in subclass, if needed
|
58
|
+
end
|
59
|
+
|
60
|
+
def prepare_parameters_from_event(event)
|
61
|
+
@parameters.inject({}) do |hash, (k, parameter_handler)|
|
62
|
+
# defer to appropriate parameter handler
|
63
|
+
value = parameter_handler.extract_from(event)
|
64
|
+
hash[k] = value.is_a?(::LogStash::Timestamp) ? value.time : value
|
65
|
+
hash
|
66
|
+
end
|
67
|
+
end
|
68
|
+
end
|
69
|
+
|
70
|
+
class NormalStatementHandler < StatementHandler
|
71
|
+
include LogStash::Util::Loggable
|
72
|
+
|
73
|
+
# Get from cache or performs remote lookup and saves to cache
|
74
|
+
# @param db [Sequel::Database]
|
75
|
+
# @param event [LogStash::Event]
|
76
|
+
# @returnparam [CachePayload]
|
77
|
+
def cache_lookup(db, event)
|
78
|
+
common_cache_lookup(db, event)
|
79
|
+
end
|
80
|
+
|
81
|
+
private
|
82
|
+
|
83
|
+
def execute_extract_records(db, params, result)
|
84
|
+
dataset = db[statement, params] # returns a Sequel dataset
|
85
|
+
dataset.all do |row|
|
86
|
+
result.push row.inject({}){|hash,(k,v)| hash[k.to_s] = v; hash} # Stringify row keys
|
87
|
+
end
|
88
|
+
end
|
89
|
+
|
90
|
+
def post_init(plugin)
|
91
|
+
@parameters = plugin.parameters
|
92
|
+
end
|
93
|
+
end
|
94
|
+
|
95
|
+
class PreparedStatementHandler < StatementHandler
|
96
|
+
include LogStash::Util::Loggable
|
97
|
+
attr_reader :name, :bind_values_array, :statement_prepared, :prepared
|
98
|
+
|
99
|
+
# Get from cache or performs remote lookup and saves to cache
|
100
|
+
# @param db [Sequel::Database]
|
101
|
+
# @param event [LogStash::Event]
|
102
|
+
# @returnparam [CachePayload]
|
103
|
+
def cache_lookup(db, event)
|
104
|
+
build_prepared_statement(db)
|
105
|
+
common_cache_lookup(db, event)
|
106
|
+
end
|
107
|
+
|
108
|
+
private
|
109
|
+
|
110
|
+
def execute_extract_records(db, params, result)
|
111
|
+
records = db.call(name, params) # returns an array of hashes
|
112
|
+
records.each do |row|
|
113
|
+
result.push row.inject({}){|hash,(k,v)| hash[k.to_s] = v; hash} #Stringify row keys
|
114
|
+
end
|
115
|
+
end
|
116
|
+
|
117
|
+
def post_init(plugin)
|
118
|
+
@name = plugin.prepared_statement_name.to_sym
|
119
|
+
@bind_values_array = plugin.prepared_statement_bind_values
|
120
|
+
@statement_prepared = Concurrent::AtomicBoolean.new(false)
|
121
|
+
@parameters = create_bind_values_hash
|
122
|
+
end
|
123
|
+
|
124
|
+
def build_prepared_statement(db)
|
125
|
+
# create prepared statement on first use
|
126
|
+
if statement_prepared.false?
|
127
|
+
prepended = parameters.keys.map{|v| v.to_s.prepend("$").to_sym}
|
128
|
+
@prepared = db[statement, *prepended].prepare(:select, name)
|
129
|
+
statement_prepared.make_true
|
130
|
+
end
|
131
|
+
# make sure the Sequel database instance has the prepared statement
|
132
|
+
if db.prepared_statement(name).nil?
|
133
|
+
db.set_prepared_statement(name, prepared)
|
134
|
+
end
|
135
|
+
end
|
136
|
+
|
137
|
+
def create_bind_values_hash
|
138
|
+
hash = {}
|
139
|
+
bind_values_array.each_with_index {|v,i| hash[:"p#{i}"] = v}
|
140
|
+
hash
|
141
|
+
end
|
142
|
+
end
|
143
|
+
end end end
|
@@ -0,0 +1,100 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/config/mixin"
|
3
|
+
|
4
|
+
# Tentative of abstracting JDBC logic to a mixin
|
5
|
+
# for potential reuse in other plugins (input/output)
|
6
|
+
module LogStash module PluginMixins module JdbcStreaming
|
7
|
+
class RowCache
|
8
|
+
def initialize(size, ttl)
|
9
|
+
@cache = ::LruRedux::TTL::ThreadSafeCache.new(size, ttl)
|
10
|
+
end
|
11
|
+
|
12
|
+
def get(parameters)
|
13
|
+
@cache.getset(parameters) { yield }
|
14
|
+
end
|
15
|
+
end
|
16
|
+
|
17
|
+
class NoCache
|
18
|
+
def initialize(size, ttl) end
|
19
|
+
|
20
|
+
def get(statement)
|
21
|
+
yield
|
22
|
+
end
|
23
|
+
end
|
24
|
+
|
25
|
+
# This method is called when someone includes this module
|
26
|
+
def self.included(base)
|
27
|
+
# Add these methods to the 'base' given.
|
28
|
+
base.extend(self)
|
29
|
+
base.setup_jdbc_config
|
30
|
+
end
|
31
|
+
|
32
|
+
public
|
33
|
+
def setup_jdbc_config
|
34
|
+
# JDBC driver library path to third party driver library.
|
35
|
+
config :jdbc_driver_library, :validate => :path
|
36
|
+
|
37
|
+
# JDBC driver class to load, for example "oracle.jdbc.OracleDriver" or "org.apache.derby.jdbc.ClientDriver"
|
38
|
+
config :jdbc_driver_class, :validate => :string, :required => true
|
39
|
+
|
40
|
+
# JDBC connection string
|
41
|
+
config :jdbc_connection_string, :validate => :string, :required => true
|
42
|
+
|
43
|
+
# JDBC user
|
44
|
+
config :jdbc_user, :validate => :string
|
45
|
+
|
46
|
+
# JDBC password
|
47
|
+
config :jdbc_password, :validate => :password
|
48
|
+
|
49
|
+
# Connection pool configuration.
|
50
|
+
# Validate connection before use.
|
51
|
+
config :jdbc_validate_connection, :validate => :boolean, :default => false
|
52
|
+
|
53
|
+
# Connection pool configuration.
|
54
|
+
# How often to validate a connection (in seconds)
|
55
|
+
config :jdbc_validation_timeout, :validate => :number, :default => 3600
|
56
|
+
end
|
57
|
+
|
58
|
+
private
|
59
|
+
|
60
|
+
def load_driver_jars
|
61
|
+
unless @jdbc_driver_library.nil? || @jdbc_driver_library.empty?
|
62
|
+
@jdbc_driver_library.split(",").each do |driver_jar|
|
63
|
+
begin
|
64
|
+
@logger.debug("loading #{driver_jar}")
|
65
|
+
# Use https://github.com/jruby/jruby/wiki/CallingJavaFromJRuby#from-jar-files to make classes from jar
|
66
|
+
# available
|
67
|
+
require driver_jar
|
68
|
+
rescue LoadError => e
|
69
|
+
raise LogStash::PluginLoadingError, "unable to load #{driver_jar} from :jdbc_driver_library, #{e.message}"
|
70
|
+
end
|
71
|
+
end
|
72
|
+
end
|
73
|
+
end
|
74
|
+
|
75
|
+
public
|
76
|
+
def prepare_jdbc_connection
|
77
|
+
require "sequel"
|
78
|
+
require "sequel/adapters/jdbc"
|
79
|
+
require "java"
|
80
|
+
|
81
|
+
load_driver_jars
|
82
|
+
|
83
|
+
@sequel_opts_symbols = @sequel_opts.inject({}) {|hash, (k,v)| hash[k.to_sym] = v; hash}
|
84
|
+
@sequel_opts_symbols[:user] = @jdbc_user unless @jdbc_user.nil? || @jdbc_user.empty?
|
85
|
+
@sequel_opts_symbols[:password] = @jdbc_password.value unless @jdbc_password.nil?
|
86
|
+
|
87
|
+
Sequel::JDBC.load_driver(@jdbc_driver_class)
|
88
|
+
@database = Sequel.connect(@jdbc_connection_string, @sequel_opts_symbols)
|
89
|
+
if @jdbc_validate_connection
|
90
|
+
@database.extension(:connection_validator)
|
91
|
+
@database.pool.connection_validation_timeout = @jdbc_validation_timeout
|
92
|
+
end
|
93
|
+
begin
|
94
|
+
@database.test_connection
|
95
|
+
rescue Sequel::DatabaseConnectionError => e
|
96
|
+
#TODO return false and let the plugin raise a LogStash::ConfigurationError
|
97
|
+
raise e
|
98
|
+
end
|
99
|
+
end # def prepare_jdbc_connection
|
100
|
+
end end end
|
File without changes
|
@@ -0,0 +1,44 @@
|
|
1
|
+
Gem::Specification.new do |s|
|
2
|
+
s.name = 'logstash-integration-jdbc'
|
3
|
+
s.version = '5.0.0.alpha1'
|
4
|
+
s.licenses = ['Apache License (2.0)']
|
5
|
+
s.summary = "Integration with JDBC - input and filter plugins"
|
6
|
+
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
7
|
+
s.authors = ["Elastic"]
|
8
|
+
s.email = 'info@elastic.co'
|
9
|
+
s.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html"
|
10
|
+
s.require_paths = ["lib", "vendor/jar-dependencies"]
|
11
|
+
|
12
|
+
# Files
|
13
|
+
s.files = Dir["lib/**/*","spec/**/*","*.gemspec","*.md","CONTRIBUTORS","Gemfile","LICENSE","NOTICE.TXT", "vendor/jar-dependencies/**/*.jar", "vendor/jar-dependencies/**/*.rb", "VERSION", "docs/**/*"]
|
14
|
+
|
15
|
+
# Tests
|
16
|
+
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
17
|
+
|
18
|
+
# Special flag to let us know this is actually a logstash plugin
|
19
|
+
s.metadata = {
|
20
|
+
"logstash_plugin" => "true",
|
21
|
+
"logstash_group" => "integration",
|
22
|
+
"integration_plugins" => "logstash-input-jdbc,logstash-filter-jdbc_streaming,logstash-filter-jdbc_static"
|
23
|
+
}
|
24
|
+
|
25
|
+
# Gem dependencies
|
26
|
+
s.add_development_dependency 'jar-dependencies', '~> 0.3'
|
27
|
+
|
28
|
+
s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
|
29
|
+
# Restrict use of this plugin to versions of Logstash where support for integration plugins is present.
|
30
|
+
s.add_runtime_dependency "logstash-core", ">= 6.5.0"
|
31
|
+
s.add_runtime_dependency 'logstash-codec-plain'
|
32
|
+
s.add_runtime_dependency 'sequel'
|
33
|
+
s.add_runtime_dependency 'lru_redux' # lru cache with ttl
|
34
|
+
|
35
|
+
s.add_runtime_dependency 'tzinfo'
|
36
|
+
s.add_runtime_dependency 'tzinfo-data'
|
37
|
+
# 3.5 limitation is required for jdbc-static loading schedule
|
38
|
+
s.add_runtime_dependency 'rufus-scheduler', '< 3.5'
|
39
|
+
|
40
|
+
s.add_development_dependency "childprocess"
|
41
|
+
s.add_development_dependency 'logstash-devutils'
|
42
|
+
s.add_development_dependency 'timecop'
|
43
|
+
s.add_development_dependency 'jdbc-derby'
|
44
|
+
end
|
@@ -0,0 +1,10 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
# use the rspec --require command line option to have this file evaluated before rspec runs
|
4
|
+
# it i
|
5
|
+
|
6
|
+
GEM_BASE_DIR = ::File.expand_path("../../..", __FILE__)
|
7
|
+
BASE_DERBY_DIR = ::File.join(GEM_BASE_DIR, "spec", "helpers")
|
8
|
+
ENV["HOME"] = GEM_BASE_DIR
|
9
|
+
ENV["TEST_DEBUG"] = "true"
|
10
|
+
java.lang.System.setProperty("ls.logs", "console")
|
@@ -0,0 +1,154 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/devutils/rspec/spec_helper"
|
3
|
+
require "logstash/filters/jdbc_static"
|
4
|
+
require "sequel"
|
5
|
+
require "sequel/adapters/jdbc"
|
6
|
+
require "stud/temporary"
|
7
|
+
require "timecop"
|
8
|
+
|
9
|
+
module LogStash module Filters
|
10
|
+
describe JdbcStatic, :integration => true do
|
11
|
+
|
12
|
+
before(:all) do
|
13
|
+
@thread_abort = Thread.abort_on_exception
|
14
|
+
Thread.abort_on_exception = true
|
15
|
+
end
|
16
|
+
|
17
|
+
let(:loader_statement) { "SELECT ip, name, location FROM reference_table" }
|
18
|
+
let(:lookup_statement) { "SELECT * FROM servers WHERE ip LIKE :ip" }
|
19
|
+
let(:parameters_rhs) { "%%{[ip]}" }
|
20
|
+
let(:temp_import_path_plugin) { Stud::Temporary.pathname }
|
21
|
+
let(:temp_import_path_rspec) { Stud::Temporary.pathname }
|
22
|
+
|
23
|
+
ENV["TZ"] = "Etc/UTC"
|
24
|
+
|
25
|
+
# For Travis and CI based on docker, we source from ENV
|
26
|
+
jdbc_connection_string = ENV.fetch("PG_CONNECTION_STRING",
|
27
|
+
"jdbc:postgresql://postgresql:5432") + "/jdbc_static_db?user=postgres"
|
28
|
+
|
29
|
+
let(:local_db_objects) do
|
30
|
+
[
|
31
|
+
{"name" => "servers", "index_columns" => ["ip"], "columns" => [["ip", "varchar(64)"], ["name", "varchar(64)"], ["location", "varchar(64)"]]},
|
32
|
+
]
|
33
|
+
end
|
34
|
+
|
35
|
+
let(:settings) do
|
36
|
+
{
|
37
|
+
"jdbc_user" => ENV['USER'],
|
38
|
+
"jdbc_driver_class" => "org.postgresql.Driver",
|
39
|
+
"jdbc_driver_library" => "/usr/share/logstash/postgresql.jar",
|
40
|
+
"staging_directory" => temp_import_path_plugin,
|
41
|
+
"jdbc_connection_string" => jdbc_connection_string,
|
42
|
+
"loaders" => [
|
43
|
+
{
|
44
|
+
"id" =>"servers",
|
45
|
+
"query" => loader_statement,
|
46
|
+
"local_table" => "servers"
|
47
|
+
}
|
48
|
+
],
|
49
|
+
"local_db_objects" => local_db_objects,
|
50
|
+
"local_lookups" => [
|
51
|
+
{
|
52
|
+
"query" => lookup_statement,
|
53
|
+
"parameters" => {"ip" => parameters_rhs},
|
54
|
+
"target" => "server"
|
55
|
+
}
|
56
|
+
]
|
57
|
+
}
|
58
|
+
end
|
59
|
+
|
60
|
+
let(:plugin) { JdbcStatic.new(settings) }
|
61
|
+
|
62
|
+
let(:event) { ::LogStash::Event.new("message" => "some text", "ip" => ipaddr) }
|
63
|
+
|
64
|
+
let(:ipaddr) { ".3.1.1" }
|
65
|
+
|
66
|
+
describe "non scheduled operation" do
|
67
|
+
after { plugin.close }
|
68
|
+
|
69
|
+
context "under normal conditions" do
|
70
|
+
it "enhances an event" do
|
71
|
+
plugin.register
|
72
|
+
plugin.filter(event)
|
73
|
+
expect(event.get("server")).to eq([{"ip"=>"10.3.1.1", "name"=>"mv-server-1", "location"=>"MV-9-6-4"}])
|
74
|
+
end
|
75
|
+
end
|
76
|
+
|
77
|
+
context "when the loader query returns no results" do
|
78
|
+
let(:loader_statement) { "SELECT ip, name, location FROM reference_table WHERE ip LIKE '20%'" }
|
79
|
+
it "add an empty array to the target field" do
|
80
|
+
plugin.register
|
81
|
+
plugin.filter(event)
|
82
|
+
expect(event.get("server")).to eq([])
|
83
|
+
end
|
84
|
+
end
|
85
|
+
|
86
|
+
context "under normal conditions with prepared statement" do
|
87
|
+
let(:lookup_statement) { "SELECT * FROM servers WHERE ip LIKE ?" }
|
88
|
+
let(:settings) do
|
89
|
+
{
|
90
|
+
"jdbc_user" => ENV['USER'],
|
91
|
+
"jdbc_driver_class" => "org.postgresql.Driver",
|
92
|
+
"jdbc_driver_library" => "/usr/share/logstash/postgresql.jar",
|
93
|
+
"staging_directory" => temp_import_path_plugin,
|
94
|
+
"jdbc_connection_string" => jdbc_connection_string,
|
95
|
+
"loaders" => [
|
96
|
+
{
|
97
|
+
"id" =>"servers",
|
98
|
+
"query" => loader_statement,
|
99
|
+
"local_table" => "servers"
|
100
|
+
}
|
101
|
+
],
|
102
|
+
"local_db_objects" => local_db_objects,
|
103
|
+
"local_lookups" => [
|
104
|
+
{
|
105
|
+
"query" => lookup_statement,
|
106
|
+
"prepared_parameters" => [parameters_rhs],
|
107
|
+
"target" => "server"
|
108
|
+
}
|
109
|
+
]
|
110
|
+
}
|
111
|
+
end
|
112
|
+
|
113
|
+
it "enhances an event" do
|
114
|
+
plugin.register
|
115
|
+
plugin.filter(event)
|
116
|
+
expect(event.get("server")).to eq([{"ip"=>"10.3.1.1", "name"=>"mv-server-1", "location"=>"MV-9-6-4"}])
|
117
|
+
end
|
118
|
+
end
|
119
|
+
|
120
|
+
context "under normal conditions when index_columns is not specified" do
|
121
|
+
let(:local_db_objects) do
|
122
|
+
[
|
123
|
+
{"name" => "servers", "columns" => [["ip", "varchar(64)"], ["name", "varchar(64)"], ["location", "varchar(64)"]]},
|
124
|
+
]
|
125
|
+
end
|
126
|
+
it "enhances an event" do
|
127
|
+
plugin.register
|
128
|
+
plugin.filter(event)
|
129
|
+
expect(event.get("server")).to eq([{"ip"=>"10.3.1.1", "name"=>"mv-server-1", "location"=>"MV-9-6-4"}])
|
130
|
+
end
|
131
|
+
end
|
132
|
+
end
|
133
|
+
|
134
|
+
describe "scheduled operation" do
|
135
|
+
context "given a loader_schedule" do
|
136
|
+
it "should properly schedule" do
|
137
|
+
settings["loader_schedule"] = "*/10 * * * * * UTC"
|
138
|
+
Timecop.travel(Time.now.utc - 3600)
|
139
|
+
Timecop.scale(60)
|
140
|
+
static_filter = JdbcStatic.new(settings)
|
141
|
+
runner = Thread.new(static_filter) do |filter|
|
142
|
+
filter.register
|
143
|
+
end
|
144
|
+
sleep 3
|
145
|
+
static_filter.filter(event)
|
146
|
+
expect(static_filter.loader_runner.reload_count).to be > 1
|
147
|
+
static_filter.close
|
148
|
+
Timecop.return
|
149
|
+
expect(event.get("server")).to eq([{"ip"=>"10.3.1.1", "name"=>"mv-server-1", "location"=>"MV-9-6-4"}])
|
150
|
+
end
|
151
|
+
end
|
152
|
+
end
|
153
|
+
end
|
154
|
+
end end
|
@@ -0,0 +1,173 @@
|
|
1
|
+
require "logstash/devutils/rspec/spec_helper"
|
2
|
+
require "logstash/filters/jdbc_streaming"
|
3
|
+
require "sequel"
|
4
|
+
require "sequel/adapters/jdbc"
|
5
|
+
|
6
|
+
module LogStash module Filters
|
7
|
+
class TestJdbcStreaming < JdbcStreaming
|
8
|
+
attr_reader :database
|
9
|
+
end
|
10
|
+
|
11
|
+
describe JdbcStreaming, :integration => true do
|
12
|
+
ENV["TZ"] = "Etc/UTC"
|
13
|
+
|
14
|
+
# For Travis and CI based on docker, we source from ENV
|
15
|
+
jdbc_connection_string = ENV.fetch("PG_CONNECTION_STRING",
|
16
|
+
"jdbc:postgresql://postgresql:5432") + "/jdbc_streaming_db?user=postgres"
|
17
|
+
|
18
|
+
let(:mixin_settings) do
|
19
|
+
{ "jdbc_driver_class" => "org.postgresql.Driver",
|
20
|
+
"jdbc_driver_library" => "/usr/share/logstash/postgresql.jar",
|
21
|
+
"jdbc_connection_string" => jdbc_connection_string
|
22
|
+
}
|
23
|
+
end
|
24
|
+
let(:plugin) { JdbcStreaming.new(mixin_settings.merge(settings)) }
|
25
|
+
let(:db) do
|
26
|
+
::Sequel.connect(mixin_settings['jdbc_connection_string'])
|
27
|
+
end
|
28
|
+
let(:event) { ::LogStash::Event.new("message" => "some text", "ip" => ipaddr) }
|
29
|
+
let(:cache_expiration) { 3.0 }
|
30
|
+
let(:use_cache) { true }
|
31
|
+
let(:cache_size) { 10 }
|
32
|
+
let(:statement) { "SELECT name, location FROM reference_table WHERE ip = :ip" }
|
33
|
+
let(:settings) do
|
34
|
+
{
|
35
|
+
"statement" => statement,
|
36
|
+
"parameters" => {"ip" => "ip"},
|
37
|
+
"target" => "server",
|
38
|
+
"use_cache" => use_cache,
|
39
|
+
"cache_expiration" => cache_expiration,
|
40
|
+
"cache_size" => cache_size,
|
41
|
+
"tag_on_failure" => ["lookup_failed"],
|
42
|
+
"tag_on_default_use" => ["default_used_instead"],
|
43
|
+
"default_hash" => {"name" => "unknown", "location" => "unknown"},
|
44
|
+
"sequel_opts" => {"pool_timeout" => 600}
|
45
|
+
}
|
46
|
+
end
|
47
|
+
let(:ipaddr) { "10.#{idx}.1.1" }
|
48
|
+
|
49
|
+
before :each do
|
50
|
+
plugin.register
|
51
|
+
end
|
52
|
+
|
53
|
+
describe "found record - uses row" do
|
54
|
+
let(:idx) { 200 }
|
55
|
+
|
56
|
+
it "fills in the target" do
|
57
|
+
plugin.filter(event)
|
58
|
+
expect(event.get("server")).to eq([{"name" => "ldn-server-#{idx}", "location" => "LDN-#{idx}-2-3"}])
|
59
|
+
expect((event.get("tags") || []) & ["lookup_failed", "default_used_instead"]).to be_empty
|
60
|
+
end
|
61
|
+
end
|
62
|
+
|
63
|
+
describe "In Prepared Statement mode, found record - uses row" do
|
64
|
+
let(:idx) { 200 }
|
65
|
+
let(:statement) { "SELECT name, location FROM reference_table WHERE ip = ?" }
|
66
|
+
let(:settings) do
|
67
|
+
{
|
68
|
+
"statement" => statement,
|
69
|
+
"use_prepared_statements" => true,
|
70
|
+
"prepared_statement_name" => "lookup_ip",
|
71
|
+
"prepared_statement_bind_values" => ["[ip]"],
|
72
|
+
"target" => "server",
|
73
|
+
"use_cache" => use_cache,
|
74
|
+
"cache_expiration" => cache_expiration,
|
75
|
+
"cache_size" => cache_size,
|
76
|
+
"tag_on_failure" => ["lookup_failed"],
|
77
|
+
"tag_on_default_use" => ["default_used_instead"],
|
78
|
+
"default_hash" => {"name" => "unknown", "location" => "unknown"},
|
79
|
+
"sequel_opts" => {"pool_timeout" => 600}
|
80
|
+
}
|
81
|
+
end
|
82
|
+
it "fills in the target" do
|
83
|
+
plugin.filter(event)
|
84
|
+
expect(event.get("server")).to eq([{"name" => "ldn-server-#{idx}", "location" => "LDN-#{idx}-2-3"}])
|
85
|
+
expect((event.get("tags") || []) & ["lookup_failed", "default_used_instead"]).to be_empty
|
86
|
+
end
|
87
|
+
end
|
88
|
+
|
89
|
+
context "when fetching from cache" do
|
90
|
+
let(:plugin) { TestJdbcStreaming.new(mixin_settings.merge(settings)) }
|
91
|
+
let(:events) do
|
92
|
+
5.times.map{|i| ::LogStash::Event.new("message" => "some other text #{i}", "ip" => ipaddr) }
|
93
|
+
end
|
94
|
+
let(:call_count) { 1 }
|
95
|
+
before(:each) do
|
96
|
+
expect(plugin.database).to receive(:[]).exactly(call_count).times.and_call_original
|
97
|
+
plugin.filter(event)
|
98
|
+
end
|
99
|
+
|
100
|
+
describe "found record - caches row" do
|
101
|
+
let(:idx) { "42" }
|
102
|
+
it "calls the database once then uses the cache" do
|
103
|
+
expect(event.get("server")).to eq([{"name" => "ldn-server-#{idx}", "location" => "LDN-#{idx}-2-3"}])
|
104
|
+
expect(event.get("tags") || []).not_to include("lookup_failed")
|
105
|
+
expect(event.get("tags") || []).not_to include("default_used_instead")
|
106
|
+
events.each do |evt|
|
107
|
+
plugin.filter(evt)
|
108
|
+
expect(evt.get("server")).to eq([{"name" => "ldn-server-#{idx}", "location" => "LDN-#{idx}-2-3"}])
|
109
|
+
end
|
110
|
+
end
|
111
|
+
end
|
112
|
+
|
113
|
+
describe "missing record - uses default" do
|
114
|
+
let(:idx) { "252" }
|
115
|
+
it "calls the database once then uses the cache" do
|
116
|
+
expect(event.get("server")).to eq([{"name" => "unknown", "location" => "unknown"}])
|
117
|
+
expect(event.get("tags") & ["lookup_failed", "default_used_instead"]).to eq(["default_used_instead"])
|
118
|
+
events.each do |evt|
|
119
|
+
plugin.filter(evt)
|
120
|
+
expect(evt.get("server")).to eq([{"name" => "unknown", "location" => "unknown"}])
|
121
|
+
end
|
122
|
+
end
|
123
|
+
end
|
124
|
+
|
125
|
+
context "extremely small cache expiration" do
|
126
|
+
describe "found record - cache always expires" do
|
127
|
+
let(:idx) { "10" }
|
128
|
+
let(:call_count) { 6 }
|
129
|
+
let(:cache_expiration) { 0.0000001 }
|
130
|
+
it "calls the database each time because cache entry expired" do
|
131
|
+
expect(event.get("server")).to eq([{"name" => "ldn-server-#{idx}", "location" => "LDN-#{idx}-2-3"}])
|
132
|
+
expect(event.get("tags") || []).not_to include("lookup_failed")
|
133
|
+
expect(event.get("tags") || []).not_to include("default_used_instead")
|
134
|
+
events.each do |evt|
|
135
|
+
plugin.filter(evt)
|
136
|
+
expect(evt.get("server")).to eq([{"name" => "ldn-server-#{idx}", "location" => "LDN-#{idx}-2-3"}])
|
137
|
+
end
|
138
|
+
end
|
139
|
+
end
|
140
|
+
end
|
141
|
+
|
142
|
+
context "when cache is disabled" do
|
143
|
+
let(:call_count) { 6 }
|
144
|
+
let(:use_cache) { false }
|
145
|
+
describe "database is always called" do
|
146
|
+
let(:idx) { "1" }
|
147
|
+
it "calls the database each time" do
|
148
|
+
expect(event.get("server")).to eq([{"name" => "ldn-server-#{idx}", "location" => "LDN-#{idx}-2-3"}])
|
149
|
+
expect(event.get("tags") || []).not_to include("lookup_failed")
|
150
|
+
expect(event.get("tags") || []).not_to include("default_used_instead")
|
151
|
+
events.each do |evt|
|
152
|
+
plugin.filter(evt)
|
153
|
+
expect(evt.get("server")).to eq([{"name" => "ldn-server-#{idx}", "location" => "LDN-#{idx}-2-3"}])
|
154
|
+
end
|
155
|
+
end
|
156
|
+
end
|
157
|
+
|
158
|
+
describe "database is always called but record is missing and default is used" do
|
159
|
+
let(:idx) { "251" }
|
160
|
+
it "calls the database each time" do
|
161
|
+
expect(event.get("server")).to eq([{"name" => "unknown", "location" => "unknown"}])
|
162
|
+
expect(event.get("tags") & ["lookup_failed", "default_used_instead"]).to eq(["default_used_instead"])
|
163
|
+
events.each do |evt|
|
164
|
+
plugin.filter(evt)
|
165
|
+
expect(evt.get("server")).to eq([{"name" => "unknown", "location" => "unknown"}])
|
166
|
+
end
|
167
|
+
end
|
168
|
+
end
|
169
|
+
end
|
170
|
+
end
|
171
|
+
end
|
172
|
+
|
173
|
+
end end
|