logstash-integration-jdbc 5.0.6 → 5.1.3
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +26 -1
- data/README.md +1 -1
- data/docs/filter-jdbc_static.asciidoc +2 -2
- data/docs/input-jdbc.asciidoc +12 -0
- data/lib/logstash/filters/jdbc/lookup.rb +28 -35
- data/lib/logstash/filters/jdbc_static.rb +5 -0
- data/lib/logstash/inputs/jdbc.rb +23 -1
- data/lib/logstash/plugin_mixins/jdbc/jdbc.rb +19 -4
- data/lib/logstash/plugin_mixins/jdbc_streaming/parameter_handler.rb +1 -1
- data/lib/logstash/plugin_mixins/jdbc_streaming/statement_handler.rb +5 -2
- data/logstash-integration-jdbc.gemspec +3 -1
- data/spec/filters/integration/jdbc_static_spec.rb +1 -1
- data/spec/filters/jdbc/lookup_spec.rb +66 -0
- data/spec/inputs/integration/integ_spec.rb +18 -2
- data/spec/inputs/jdbc_spec.rb +42 -0
- data/spec/plugin_mixins/jdbc_streaming/parameter_handler_spec.rb +23 -0
- metadata +32 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 629c09f028a534cb970327b8d98eb85d8ae0371f4e9c7ccc8c5c9194d2942293
|
4
|
+
data.tar.gz: 9fddcb34b38b9be0bdc80cfc9c9385dacbcb821530be8c78c21024414b70f0ba
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 06f23b86b0f5f15bb5c0b93cca5068199df24e9867e891d72d930dd87c7f9faf891a2d0937f55d92e74a3769b432f87c5559e07cc028bbc64652631d927f4ebb
|
7
|
+
data.tar.gz: 969df6533a1c1afcd01ff89283195cde0865ee72bae75ed77e9ef7abfb129af9eb62b485d5a0d293189d5348df71b546dd911146d9e46ea0b481369b938959ca
|
data/CHANGELOG.md
CHANGED
@@ -1,3 +1,28 @@
|
|
1
|
+
## 5.1.3
|
2
|
+
- Improve robustness when handling errors from `sequel` library in jdbc static and streaming
|
3
|
+
filters[#78](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/78)
|
4
|
+
|
5
|
+
## 5.1.2
|
6
|
+
- Fix `prepared_statement_bind_values` in streaming filter to resolve nested event's fields[#76](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/76)
|
7
|
+
|
8
|
+
## 5.1.1
|
9
|
+
- [DOC] Changed docs to indicate that logstash-jdbc-static requires local_table [#56](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/56). Fixes [#55](https://github.com/logstash-plugins/logstash-integration-jdbc/issues/55).
|
10
|
+
|
11
|
+
## 5.1.0
|
12
|
+
- Added `target` option to JDBC input, allowing the row columns to target a specific field instead of being expanded
|
13
|
+
at the root of the event. This allows the input to play nicer with the Elastic Common Schema when
|
14
|
+
the input does not follow the schema. [#69](https://github.com/logstash-plugins/logstash-integration-jdbc/issues/69)
|
15
|
+
|
16
|
+
- Added `target` to JDBC filter static `local_lookups` to verify it's properly valued when ECS is enabled.
|
17
|
+
[#71](https://github.com/logstash-plugins/logstash-integration-jdbc/issues/71)
|
18
|
+
|
19
|
+
## 5.0.7
|
20
|
+
- Feat: try hard to log Java cause (chain) [#62](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/62)
|
21
|
+
|
22
|
+
This allows seeing a full trace from the JDBC driver in case of connection errors.
|
23
|
+
|
24
|
+
- Refactored Lookup used in jdbc_streaming and jdbc_static to avoid code duplication. [#59](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/59)
|
25
|
+
|
1
26
|
## 5.0.6
|
2
27
|
- DOC:Replaced plugin_header file with plugin_header-integration file. [#40](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/40)
|
3
28
|
|
@@ -25,4 +50,4 @@
|
|
25
50
|
- [JBDC Input version 4.3.19](https://github.com/logstash-plugins/logstash-input-jdbc/blob/v4.3.19/CHANGELOG.md)
|
26
51
|
- [JDBC Static filter version 1.1.0](https://github.com/logstash-plugins/logstash-filter-jdbc_static/blob/v1.1.0/CHANGELOG.md)
|
27
52
|
- [JDBC Streaming filter version 1.0.10](https://github.com/logstash-plugins/logstash-filter-jdbc_streaming/blob/v1.0.10/CHANGELOG.md)
|
28
|
-
|
53
|
+
|
data/README.md
CHANGED
@@ -2,7 +2,7 @@
|
|
2
2
|
Logstash Integration Plugin for JDBC, including Logstash Input and Filter Plugins
|
3
3
|
# Logstash Plugin
|
4
4
|
|
5
|
-
[![Travis Build Status](https://travis-ci.
|
5
|
+
[![Travis Build Status](https://travis-ci.com/logstash-plugins/logstash-integration-jdbc.svg)](https://travis-ci.com/logstash-plugins/logstash-integration-jdbc)
|
6
6
|
|
7
7
|
This is a plugin for [Logstash](https://github.com/elastic/logstash).
|
8
8
|
|
@@ -422,7 +422,7 @@ according to the table below.
|
|
422
422
|
|=======================================================================
|
423
423
|
|Setting |Input type|Required
|
424
424
|
| id|string|No
|
425
|
-
|
|
425
|
+
| local_table|string|Yes
|
426
426
|
| query|string|Yes
|
427
427
|
| max_rows|number|No
|
428
428
|
| jdbc_connection_string|string|No
|
@@ -438,7 +438,7 @@ id::
|
|
438
438
|
An optional identifier. This is used to identify the loader that is
|
439
439
|
generating error messages and log lines.
|
440
440
|
|
441
|
-
|
441
|
+
local_table::
|
442
442
|
The destination table in the local lookup database that the loader will fill.
|
443
443
|
|
444
444
|
query::
|
data/docs/input-jdbc.asciidoc
CHANGED
@@ -211,6 +211,7 @@ This plugin supports the following configuration options plus the <<plugins-{typ
|
|
211
211
|
| <<plugins-{type}s-{plugin}-sql_log_level>> |<<string,string>>, one of `["fatal", "error", "warn", "info", "debug"]`|No
|
212
212
|
| <<plugins-{type}s-{plugin}-statement>> |<<string,string>>|No
|
213
213
|
| <<plugins-{type}s-{plugin}-statement_filepath>> |a valid filesystem path|No
|
214
|
+
| <<plugins-{type}s-{plugin}-target>> | {logstash-ref}/field-references-deepdive.html[field reference] | No
|
214
215
|
| <<plugins-{type}s-{plugin}-tracking_column>> |<<string,string>>|No
|
215
216
|
| <<plugins-{type}s-{plugin}-tracking_column_type>> |<<string,string>>, one of `["numeric", "timestamp"]`|No
|
216
217
|
| <<plugins-{type}s-{plugin}-use_column_value>> |<<boolean,boolean>>|No
|
@@ -535,6 +536,17 @@ with the `parameters` setting.
|
|
535
536
|
|
536
537
|
Path of file containing statement to execute
|
537
538
|
|
539
|
+
[id="plugins-{type}s-{plugin}-target"]
|
540
|
+
===== `target`
|
541
|
+
|
542
|
+
* Value type is {logstash-ref}/field-references-deepdive.html[field reference]
|
543
|
+
* There is no default value for this setting.
|
544
|
+
|
545
|
+
Without a `target`, events are created from each row column at the root level.
|
546
|
+
When the `target` is set to a field reference, the column of each row is placed in the target field instead.
|
547
|
+
|
548
|
+
This option can be useful to avoid populating unknown fields when a downstream schema such as ECS is enforced.
|
549
|
+
|
538
550
|
[id="plugins-{type}s-{plugin}-tracking_column"]
|
539
551
|
===== `tracking_column`
|
540
552
|
|
@@ -56,6 +56,12 @@ module LogStash module Filters module Jdbc
|
|
56
56
|
@target = options["target"]
|
57
57
|
@id_used_as_target = @target.nil?
|
58
58
|
if @id_used_as_target
|
59
|
+
# target shouldn't be nil if ecs_compatibility is not :disabled
|
60
|
+
if globals[:ecs_compatibility] != :disabled
|
61
|
+
logger.info('ECS compatibility is enabled but no ``target`` option was specified, it is recommended'\
|
62
|
+
' to set the option to avoid potential schema conflicts (if your data is ECS compliant or'\
|
63
|
+
' non-conflicting feel free to ignore this message)')
|
64
|
+
end
|
59
65
|
@target = @id
|
60
66
|
end
|
61
67
|
@options = options
|
@@ -66,6 +72,7 @@ module LogStash module Filters module Jdbc
|
|
66
72
|
@prepared_statement = nil
|
67
73
|
@symbol_parameters = nil
|
68
74
|
parse_options
|
75
|
+
@load_method_ref = method(:load_data_from_local)
|
69
76
|
end
|
70
77
|
|
71
78
|
def id_used_as_target?
|
@@ -81,11 +88,7 @@ module LogStash module Filters module Jdbc
|
|
81
88
|
end
|
82
89
|
|
83
90
|
def enhance(local, event)
|
84
|
-
|
85
|
-
result = call_prepared(local, event)
|
86
|
-
else
|
87
|
-
result = fetch(local, event) # should return a LookupResult
|
88
|
-
end
|
91
|
+
result = retrieve_local_data(local, event, &@load_method_ref) # return a LookupResult
|
89
92
|
if result.failed? || result.parameters_invalid?
|
90
93
|
tag_failure(event)
|
91
94
|
end
|
@@ -112,6 +115,7 @@ module LogStash module Filters module Jdbc
|
|
112
115
|
@prepared_parameters.each_with_index { |v, i| hash[:"$p#{i}"] = v }
|
113
116
|
@prepared_param_placeholder_map = hash
|
114
117
|
@prepared_statement = local.prepare(query, hash.keys)
|
118
|
+
@load_method_ref = method(:load_data_from_prepared)
|
115
119
|
end
|
116
120
|
|
117
121
|
private
|
@@ -128,34 +132,23 @@ module LogStash module Filters module Jdbc
|
|
128
132
|
end
|
129
133
|
end
|
130
134
|
|
131
|
-
def
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
if result.parameters_invalid?
|
136
|
-
logger.warn? && logger.warn("Parameter field not found in event", :lookup_id => @id, :invalid_parameters => result.invalid_parameters)
|
137
|
-
return result
|
138
|
-
end
|
139
|
-
else
|
140
|
-
params = {}
|
135
|
+
def load_data_from_local(local, query, params, result)
|
136
|
+
local.fetch(query, params).each do |row|
|
137
|
+
stringified = row.inject({}){|hash,(k,v)| hash[k.to_s] = v; hash} #Stringify row keys
|
138
|
+
result.push(stringified)
|
141
139
|
end
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
rescue ::Sequel::Error => e
|
149
|
-
# all sequel errors are a subclass of this, let all other standard or runtime errors bubble up
|
150
|
-
result.failed!
|
151
|
-
logger.warn? && logger.warn("Exception when executing Jdbc query", :lookup_id => @id, :exception => e.message, :backtrace => e.backtrace.take(8))
|
140
|
+
end
|
141
|
+
|
142
|
+
def load_data_from_prepared(_local, _query, params, result)
|
143
|
+
@prepared_statement.call(params).each do |row|
|
144
|
+
stringified = row.inject({}){|hash,(k,v)| hash[k.to_s] = v; hash} #Stringify row keys
|
145
|
+
result.push(stringified)
|
152
146
|
end
|
153
|
-
# if either of: no records or a Sequel exception occurs the payload is
|
154
|
-
# empty and the default can be substituted later.
|
155
|
-
result
|
156
147
|
end
|
157
148
|
|
158
|
-
|
149
|
+
# the &block is invoked with 4 arguments: local, query[String], params[Hash], result[LookupResult]
|
150
|
+
# the result is used as accumulator return variable
|
151
|
+
def retrieve_local_data(local, event, &proc)
|
159
152
|
result = LookupResult.new()
|
160
153
|
if @parameters_specified
|
161
154
|
params = prepare_parameters_from_event(event, result)
|
@@ -168,12 +161,12 @@ module LogStash module Filters module Jdbc
|
|
168
161
|
end
|
169
162
|
begin
|
170
163
|
logger.debug? && logger.debug("Executing Jdbc query", :lookup_id => @id, :statement => query, :parameters => params)
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
#
|
164
|
+
proc.call(local, query, params, result)
|
165
|
+
rescue => e
|
166
|
+
# In theory all exceptions in Sequel should be wrapped in Sequel::Error
|
167
|
+
# However, there are cases where other errors can occur - a `SQLTransactionRollbackException`
|
168
|
+
# may be thrown during `prepareStatement`. Let's handle these cases here, where we can tag and warn
|
169
|
+
# appropriately rather than bubble up and potentially crash the plugin.
|
177
170
|
result.failed!
|
178
171
|
logger.warn? && logger.warn("Exception when executing Jdbc query", :lookup_id => @id, :exception => e.message, :backtrace => e.backtrace.take(8))
|
179
172
|
end
|
@@ -2,6 +2,7 @@
|
|
2
2
|
require "logstash-integration-jdbc_jars"
|
3
3
|
require "logstash/filters/base"
|
4
4
|
require "logstash/namespace"
|
5
|
+
require "logstash/plugin_mixins/ecs_compatibility_support"
|
5
6
|
require_relative "jdbc/loader"
|
6
7
|
require_relative "jdbc/loader_schedule"
|
7
8
|
require_relative "jdbc/repeating_load_runner"
|
@@ -14,6 +15,9 @@ require_relative "jdbc/lookup_processor"
|
|
14
15
|
|
15
16
|
#
|
16
17
|
module LogStash module Filters class JdbcStatic < LogStash::Filters::Base
|
18
|
+
# adds ecs_compatibility config which could be :disabled or :v1
|
19
|
+
include LogStash::PluginMixins::ECSCompatibilitySupport(:disabled, :v1, :v8 => :v1)
|
20
|
+
|
17
21
|
config_name "jdbc_static"
|
18
22
|
|
19
23
|
# Define the loaders, an Array of Hashes, to fetch remote data and create local tables.
|
@@ -214,6 +218,7 @@ module LogStash module Filters class JdbcStatic < LogStash::Filters::Base
|
|
214
218
|
options["lookup_jdbc_driver_class"] = @lookup_jdbc_driver_class
|
215
219
|
options["lookup_jdbc_driver_library"] = @lookup_jdbc_driver_library
|
216
220
|
options["lookup_jdbc_connection_string"] = @lookup_jdbc_connection_string
|
221
|
+
options["ecs_compatibility"] = ecs_compatibility
|
217
222
|
options
|
218
223
|
end
|
219
224
|
|
data/lib/logstash/inputs/jdbc.rb
CHANGED
@@ -3,6 +3,8 @@ require "logstash/inputs/base"
|
|
3
3
|
require "logstash/namespace"
|
4
4
|
require "logstash/plugin_mixins/jdbc/common"
|
5
5
|
require "logstash/plugin_mixins/jdbc/jdbc"
|
6
|
+
require "logstash/plugin_mixins/ecs_compatibility_support"
|
7
|
+
require "logstash/plugin_mixins/validator_support/field_reference_validation_adapter"
|
6
8
|
|
7
9
|
# this require_relative returns early unless the JRuby version is between 9.2.0.0 and 9.2.8.0
|
8
10
|
require_relative "tzinfo_jruby_patch"
|
@@ -129,6 +131,11 @@ require_relative "tzinfo_jruby_patch"
|
|
129
131
|
module LogStash module Inputs class Jdbc < LogStash::Inputs::Base
|
130
132
|
include LogStash::PluginMixins::Jdbc::Common
|
131
133
|
include LogStash::PluginMixins::Jdbc::Jdbc
|
134
|
+
# adds ecs_compatibility config which could be :disabled or :v1
|
135
|
+
include LogStash::PluginMixins::ECSCompatibilitySupport(:disabled,:v1,:v8 => :v1)
|
136
|
+
# adds :field_reference validator adapter
|
137
|
+
extend LogStash::PluginMixins::ValidatorSupport::FieldReferenceValidationAdapter
|
138
|
+
|
132
139
|
config_name "jdbc"
|
133
140
|
|
134
141
|
# If undefined, Logstash will complain, even if codec is unused.
|
@@ -209,6 +216,9 @@ module LogStash module Inputs class Jdbc < LogStash::Inputs::Base
|
|
209
216
|
|
210
217
|
config :prepared_statement_bind_values, :validate => :array, :default => []
|
211
218
|
|
219
|
+
# Define the target field to store the loaded columns
|
220
|
+
config :target, :validate => :field_reference, :required => false
|
221
|
+
|
212
222
|
attr_reader :database # for test mocking/stubbing
|
213
223
|
|
214
224
|
public
|
@@ -260,6 +270,13 @@ module LogStash module Inputs class Jdbc < LogStash::Inputs::Base
|
|
260
270
|
converters[encoding] = converter
|
261
271
|
end
|
262
272
|
end
|
273
|
+
|
274
|
+
# target must be populated if ecs_compatibility is not :disabled
|
275
|
+
if @target.nil? && ecs_compatibility != :disabled
|
276
|
+
logger.info('ECS compatibility is enabled but no ``target`` option was specified, it is recommended'\
|
277
|
+
' to set the option to avoid potential schema conflicts (if your data is ECS compliant or'\
|
278
|
+
' non-conflicting feel free to ignore this message)')
|
279
|
+
end
|
263
280
|
end # def register
|
264
281
|
|
265
282
|
# test injection points
|
@@ -318,7 +335,12 @@ module LogStash module Inputs class Jdbc < LogStash::Inputs::Base
|
|
318
335
|
## do the necessary conversions to string elements
|
319
336
|
row = Hash[row.map { |k, v| [k.to_s, convert(k, v)] }]
|
320
337
|
end
|
321
|
-
|
338
|
+
if @target
|
339
|
+
event = LogStash::Event.new
|
340
|
+
event.set(@target, row)
|
341
|
+
else
|
342
|
+
event = LogStash::Event.new(row)
|
343
|
+
end
|
322
344
|
decorate(event)
|
323
345
|
queue << event
|
324
346
|
end
|
@@ -119,19 +119,34 @@ module LogStash module PluginMixins module Jdbc
|
|
119
119
|
else
|
120
120
|
@logger.error("Failed to connect to database. #{@jdbc_pool_timeout} second timeout exceeded. Trying again.")
|
121
121
|
end
|
122
|
-
|
123
|
-
rescue ::Sequel::Error => e
|
122
|
+
rescue Java::JavaSql::SQLException, ::Sequel::Error => e
|
124
123
|
if retry_attempts <= 0
|
125
|
-
|
124
|
+
log_java_exception(e.cause)
|
125
|
+
@logger.error("Unable to connect to database. Tried #{@connection_retry_attempts} times", error_details(e, trace: true))
|
126
126
|
raise e
|
127
127
|
else
|
128
|
-
@logger.error("Unable to connect to database. Trying again", :
|
128
|
+
@logger.error("Unable to connect to database. Trying again", error_details(e, trace: false))
|
129
129
|
end
|
130
130
|
end
|
131
131
|
sleep(@connection_retry_attempts_wait_time)
|
132
132
|
end
|
133
133
|
end
|
134
134
|
|
135
|
+
def error_details(e, trace: false)
|
136
|
+
details = { :message => e.message, :exception => e.class }
|
137
|
+
details[:cause] = e.cause if e.cause
|
138
|
+
details[:backtrace] = e.backtrace if trace || @logger.debug?
|
139
|
+
details
|
140
|
+
end
|
141
|
+
|
142
|
+
def log_java_exception(e)
|
143
|
+
return unless e.is_a?(java.lang.Exception)
|
144
|
+
# @logger.name using the same convention as LS does
|
145
|
+
logger = self.class.name.gsub('::', '.').downcase
|
146
|
+
logger = org.apache.logging.log4j.LogManager.getLogger(logger)
|
147
|
+
logger.error('', e) # prints nested causes
|
148
|
+
end
|
149
|
+
|
135
150
|
def open_jdbc_connection
|
136
151
|
# at this point driver is already loaded
|
137
152
|
Sequel.application_timezone = @plugin_timezone.to_sym
|
@@ -38,8 +38,11 @@ module LogStash module PluginMixins module JdbcStreaming
|
|
38
38
|
begin
|
39
39
|
logger.debug? && logger.debug("Executing JDBC query", :statement => statement, :parameters => params)
|
40
40
|
execute_extract_records(db, params, result)
|
41
|
-
rescue
|
42
|
-
#
|
41
|
+
rescue => e
|
42
|
+
# In theory all exceptions in Sequel should be wrapped in Sequel::Error
|
43
|
+
# However, there are cases where other errors can occur - a `SQLException`may be thrown
|
44
|
+
# during `prepareStatement`. Let's handle these cases here, where we can tag and warn
|
45
|
+
# appropriately rather than bubble up and potentially crash the plugin.
|
43
46
|
result.failed!
|
44
47
|
logger.warn? && logger.warn("Exception when executing JDBC query", :statement => statement, :parameters => params, :exception => e)
|
45
48
|
end
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-integration-jdbc'
|
3
|
-
s.version = '5.
|
3
|
+
s.version = '5.1.3'
|
4
4
|
s.licenses = ['Apache License (2.0)']
|
5
5
|
s.summary = "Integration with JDBC - input and filter plugins"
|
6
6
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
@@ -36,6 +36,8 @@ Gem::Specification.new do |s|
|
|
36
36
|
s.add_runtime_dependency 'tzinfo-data'
|
37
37
|
# 3.5 limitation is required for jdbc-static loading schedule
|
38
38
|
s.add_runtime_dependency 'rufus-scheduler', '< 3.5'
|
39
|
+
s.add_runtime_dependency 'logstash-mixin-ecs_compatibility_support', '~>1.2'
|
40
|
+
s.add_runtime_dependency "logstash-mixin-validator_support", '~> 1.0'
|
39
41
|
|
40
42
|
s.add_development_dependency "childprocess"
|
41
43
|
s.add_development_dependency 'logstash-devutils'
|
@@ -59,7 +59,7 @@ module LogStash module Filters
|
|
59
59
|
|
60
60
|
let(:plugin) { JdbcStatic.new(settings) }
|
61
61
|
|
62
|
-
let(:event)
|
62
|
+
let(:event) { ::LogStash::Event.new("message" => "some text", "ip" => ipaddr) }
|
63
63
|
|
64
64
|
let(:ipaddr) { ".3.1.1" }
|
65
65
|
|
@@ -248,6 +248,72 @@ module LogStash module Filters module Jdbc
|
|
248
248
|
expect(subject.valid?).to be_falsey
|
249
249
|
end
|
250
250
|
end
|
251
|
+
|
252
|
+
describe "lookup operations when prepareStatement throws" do
|
253
|
+
let(:local_db) { double("local_db") }
|
254
|
+
let(:lookup_hash) do
|
255
|
+
{
|
256
|
+
"query" => "select * from servers WHERE ip LIKE ? AND os LIKE ?",
|
257
|
+
"prepared_parameters" => ["%%{[ip]}"],
|
258
|
+
"target" => "server",
|
259
|
+
"tag_on_failure" => ["_jdbcstaticfailure_server"]
|
260
|
+
}
|
261
|
+
end
|
262
|
+
let(:event) { LogStash::Event.new()}
|
263
|
+
let(:records) { [{"name" => "ldn-1-23", "rack" => "2:1:6"}] }
|
264
|
+
let(:prepared_statement) { double("prepared_statement")}
|
265
|
+
|
266
|
+
subject(:lookup) { described_class.new(lookup_hash, {}, "lookup-1") }
|
267
|
+
|
268
|
+
before(:each) do
|
269
|
+
allow(local_db).to receive(:prepare).once.and_return(prepared_statement)
|
270
|
+
allow(prepared_statement).to receive(:call).once.and_raise(Java::JavaSql::SQLTransactionRollbackException.new)
|
271
|
+
end
|
272
|
+
|
273
|
+
it "must not be valid" do
|
274
|
+
expect(subject.valid?).to be_falsey
|
275
|
+
end
|
276
|
+
|
277
|
+
it "should tag event as failed" do
|
278
|
+
event.set("ip", "20.20")
|
279
|
+
event.set("os", "MacOS")
|
280
|
+
subject.prepare(local_db)
|
281
|
+
subject.enhance(local_db, event)
|
282
|
+
expect(event.get("tags")).to eq(["_jdbcstaticfailure_server"])
|
283
|
+
expect(event.get("server")).to be_nil
|
284
|
+
end
|
285
|
+
end
|
286
|
+
|
287
|
+
describe "validation of target option" do
|
288
|
+
let(:lookup_hash) do
|
289
|
+
{
|
290
|
+
"query" => "select * from servers WHERE ip LIKE ? AND os LIKE ?",
|
291
|
+
"prepared_parameters" => ["%%{[ip]}"],
|
292
|
+
}
|
293
|
+
end
|
294
|
+
|
295
|
+
it "should log a warn when ECS is enabled and target not defined" do
|
296
|
+
|
297
|
+
class LoggableLookup < Lookup
|
298
|
+
|
299
|
+
@@TEST_LOGGER = nil
|
300
|
+
|
301
|
+
def self.logger=(log)
|
302
|
+
@@TEST_LOGGER = log
|
303
|
+
end
|
304
|
+
|
305
|
+
def self.logger
|
306
|
+
@@TEST_LOGGER
|
307
|
+
end
|
308
|
+
end
|
309
|
+
|
310
|
+
spy_logger = double("logger")
|
311
|
+
expect(spy_logger).to receive(:info).once.with(/ECS compatibility is enabled but no .*?target.*? was specified/)
|
312
|
+
LoggableLookup.logger = spy_logger
|
313
|
+
|
314
|
+
LoggableLookup.new(lookup_hash, {:ecs_compatibility => 'v1'}, "lookup-1")
|
315
|
+
end
|
316
|
+
end
|
251
317
|
end
|
252
318
|
end end end
|
253
319
|
|
@@ -45,7 +45,7 @@ describe LogStash::Inputs::Jdbc, :integration => true do
|
|
45
45
|
|
46
46
|
context "when supplying a non-existent library" do
|
47
47
|
let(:settings) do
|
48
|
-
super.merge(
|
48
|
+
super().merge(
|
49
49
|
"jdbc_driver_library" => "/no/path/to/postgresql.jar"
|
50
50
|
)
|
51
51
|
end
|
@@ -61,13 +61,29 @@ describe LogStash::Inputs::Jdbc, :integration => true do
|
|
61
61
|
|
62
62
|
context "when connecting to a non-existent server" do
|
63
63
|
let(:settings) do
|
64
|
-
super.merge(
|
64
|
+
super().merge(
|
65
65
|
"jdbc_connection_string" => "jdbc:postgresql://localhost:65000/somedb"
|
66
66
|
)
|
67
67
|
end
|
68
68
|
|
69
69
|
it "should not register correctly" do
|
70
70
|
plugin.register
|
71
|
+
allow( plugin ).to receive(:log_java_exception)
|
72
|
+
q = Queue.new
|
73
|
+
expect do
|
74
|
+
plugin.run(q)
|
75
|
+
end.to raise_error(::Sequel::DatabaseConnectionError)
|
76
|
+
end
|
77
|
+
|
78
|
+
it "should log (native) Java driver error" do
|
79
|
+
plugin.register
|
80
|
+
expect( org.apache.logging.log4j.LogManager ).to receive(:getLogger).and_wrap_original do |m, *args|
|
81
|
+
logger = m.call(*args)
|
82
|
+
expect( logger ).to receive(:error) do |_, e|
|
83
|
+
expect( e ).to be_a org.postgresql.util.PSQLException
|
84
|
+
end.and_call_original
|
85
|
+
logger
|
86
|
+
end
|
71
87
|
q = Queue.new
|
72
88
|
expect do
|
73
89
|
plugin.run(q)
|
data/spec/inputs/jdbc_spec.rb
CHANGED
@@ -328,6 +328,48 @@ describe LogStash::Inputs::Jdbc do
|
|
328
328
|
|
329
329
|
end
|
330
330
|
|
331
|
+
context "when using target option" do
|
332
|
+
let(:settings) do
|
333
|
+
{
|
334
|
+
"statement" => "SELECT * from test_table FETCH FIRST 1 ROWS ONLY",
|
335
|
+
"target" => "sub_field"
|
336
|
+
}
|
337
|
+
end
|
338
|
+
|
339
|
+
before do
|
340
|
+
plugin.register
|
341
|
+
end
|
342
|
+
|
343
|
+
after do
|
344
|
+
plugin.stop
|
345
|
+
end
|
346
|
+
|
347
|
+
it "should put all columns under sub-field" do
|
348
|
+
db[:test_table].insert(:num => 1, :custom_time => Time.now.utc, :created_at => Time.now.utc, :string => "Test target option")
|
349
|
+
|
350
|
+
plugin.run(queue)
|
351
|
+
|
352
|
+
expect(queue.size).to eq(1)
|
353
|
+
event = queue.pop
|
354
|
+
expect(event.get("[sub_field][string]")).to eq("Test target option")
|
355
|
+
end
|
356
|
+
end
|
357
|
+
|
358
|
+
context "when using target option is not set and ecs_compatibility is enabled" do
|
359
|
+
let(:settings) do
|
360
|
+
{
|
361
|
+
"statement" => "SELECT * from test_table FETCH FIRST 1 ROWS ONLY",
|
362
|
+
"ecs_compatibility" => :v1
|
363
|
+
}
|
364
|
+
end
|
365
|
+
|
366
|
+
it "should log a warn of missed target usage" do
|
367
|
+
expect(plugin.logger).to receive(:info).once.with(/ECS compatibility is enabled but no .*?target.*? was specified/)
|
368
|
+
|
369
|
+
plugin.register
|
370
|
+
end
|
371
|
+
end
|
372
|
+
|
331
373
|
context "when fetching time data" do
|
332
374
|
|
333
375
|
let(:settings) do
|
@@ -0,0 +1,23 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/devutils/rspec/spec_helper"
|
3
|
+
require "logstash/plugin_mixins/jdbc_streaming/parameter_handler"
|
4
|
+
|
5
|
+
|
6
|
+
describe LogStash::PluginMixins::JdbcStreaming::ParameterHandler do
|
7
|
+
context "resolve field reference" do
|
8
|
+
let(:event) { ::LogStash::Event.new("field" => "field_value") }
|
9
|
+
|
10
|
+
it "should resolve root field" do
|
11
|
+
handler = LogStash::PluginMixins::JdbcStreaming::ParameterHandler.build_bind_value_handler "[field]"
|
12
|
+
handler.extract_from(event)
|
13
|
+
expect(handler.extract_from(event)).to eq "field_value"
|
14
|
+
end
|
15
|
+
|
16
|
+
it "should resolve nested field" do
|
17
|
+
event = ::LogStash::Event.from_json("{\"field\": {\"nested\": \"nested_field\"}}").first
|
18
|
+
handler = LogStash::PluginMixins::JdbcStreaming::ParameterHandler.build_bind_value_handler "[field][nested]"
|
19
|
+
handler.extract_from(event)
|
20
|
+
expect(handler.extract_from(event)).to eq "nested_field"
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-integration-jdbc
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 5.
|
4
|
+
version: 5.1.3
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2021-07-09 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
@@ -142,6 +142,34 @@ dependencies:
|
|
142
142
|
- - "<"
|
143
143
|
- !ruby/object:Gem::Version
|
144
144
|
version: '3.5'
|
145
|
+
- !ruby/object:Gem::Dependency
|
146
|
+
requirement: !ruby/object:Gem::Requirement
|
147
|
+
requirements:
|
148
|
+
- - "~>"
|
149
|
+
- !ruby/object:Gem::Version
|
150
|
+
version: '1.2'
|
151
|
+
name: logstash-mixin-ecs_compatibility_support
|
152
|
+
prerelease: false
|
153
|
+
type: :runtime
|
154
|
+
version_requirements: !ruby/object:Gem::Requirement
|
155
|
+
requirements:
|
156
|
+
- - "~>"
|
157
|
+
- !ruby/object:Gem::Version
|
158
|
+
version: '1.2'
|
159
|
+
- !ruby/object:Gem::Dependency
|
160
|
+
requirement: !ruby/object:Gem::Requirement
|
161
|
+
requirements:
|
162
|
+
- - "~>"
|
163
|
+
- !ruby/object:Gem::Version
|
164
|
+
version: '1.0'
|
165
|
+
name: logstash-mixin-validator_support
|
166
|
+
prerelease: false
|
167
|
+
type: :runtime
|
168
|
+
version_requirements: !ruby/object:Gem::Requirement
|
169
|
+
requirements:
|
170
|
+
- - "~>"
|
171
|
+
- !ruby/object:Gem::Version
|
172
|
+
version: '1.0'
|
145
173
|
- !ruby/object:Gem::Dependency
|
146
174
|
requirement: !ruby/object:Gem::Requirement
|
147
175
|
requirements:
|
@@ -264,6 +292,7 @@ files:
|
|
264
292
|
- spec/helpers/derbyrun.jar
|
265
293
|
- spec/inputs/integration/integ_spec.rb
|
266
294
|
- spec/inputs/jdbc_spec.rb
|
295
|
+
- spec/plugin_mixins/jdbc_streaming/parameter_handler_spec.rb
|
267
296
|
- vendor/jar-dependencies/org/apache/derby/derby/10.14.1.0/derby-10.14.1.0.jar
|
268
297
|
- vendor/jar-dependencies/org/apache/derby/derbyclient/10.14.1.0/derbyclient-10.14.1.0.jar
|
269
298
|
homepage: http://www.elastic.co/guide/en/logstash/current/index.html
|
@@ -316,3 +345,4 @@ test_files:
|
|
316
345
|
- spec/helpers/derbyrun.jar
|
317
346
|
- spec/inputs/integration/integ_spec.rb
|
318
347
|
- spec/inputs/jdbc_spec.rb
|
348
|
+
- spec/plugin_mixins/jdbc_streaming/parameter_handler_spec.rb
|