logstash-integration-jdbc 5.4.1 → 5.4.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +7 -0
- data/docs/filter-jdbc_static.asciidoc +14 -2
- data/docs/input-jdbc.asciidoc +3 -1
- data/lib/logstash/inputs/jdbc.rb +1 -1
- data/lib/logstash/plugin_mixins/jdbc/jdbc.rb +18 -4
- data/logstash-integration-jdbc.gemspec +1 -1
- data/spec/inputs/jdbc_spec.rb +46 -6
- metadata +3 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 0427bfc9149fd1b4751f91bda69dcbc67fe50bbb59ba331142c52b0ac79b92df
|
4
|
+
data.tar.gz: 9b89e07e05f01b5f4226cbbb452a36e0edb116ca116d843d8f578508206086b8
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 7bcc88da6e2d4de082a5547f0f273063913f2fe8e87578d7177eb4929604a66ec8be48818ba7b32a88a7bf978aad42f07c14f69f60349d82531f531c0cd77480
|
7
|
+
data.tar.gz: 76bc8bb2c010a6f6087e91be3280c1b83ae165449889cc425cb91b4e6fe0f9092b229f7ed37df5fb6ae7be3acbaa8c69541c610bbfff07c374a32be014ccdcbf
|
data/CHANGELOG.md
CHANGED
@@ -1,3 +1,10 @@
|
|
1
|
+
## 5.4.3
|
2
|
+
- Fix crash when metadata file can't be deleted after moving under path.data [#136](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/136)
|
3
|
+
|
4
|
+
## 5.4.2
|
5
|
+
- Doc: described default_hash and tag_on_default_use interaction filter plugin [#122](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/122)
|
6
|
+
- Added new settings `statement_retry_attempts` and `statement_retry_attempts_wait_time` for retry of failed sql statement execution [#123](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/123)
|
7
|
+
|
1
8
|
## 5.4.1
|
2
9
|
- Bugfix leak which happened in creating a new Database pool for every query. The pool is now crated on registration and closed on plugin's `stop` [#119](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/119)
|
3
10
|
|
@@ -111,10 +111,14 @@ filter {
|
|
111
111
|
query => "SELECT firstname, lastname FROM users WHERE userid = ? AND country = ?"
|
112
112
|
prepared_parameters => ["[loggedin_userid]", "[user_nation]"] <4>
|
113
113
|
target => "user" <5>
|
114
|
+
default_hash => { <6>
|
115
|
+
firstname => nil
|
116
|
+
lastname => nil
|
117
|
+
}
|
114
118
|
}
|
115
119
|
]
|
116
120
|
# using add_field here to add & rename values to the event root
|
117
|
-
add_field => { server_name => "%{[server][0][description]}" } <
|
121
|
+
add_field => { server_name => "%{[server][0][description]}" } <7>
|
118
122
|
add_field => { user_firstname => "%{[user][0][firstname]}" }
|
119
123
|
add_field => { user_lastname => "%{[user][0][lastname]}" }
|
120
124
|
remove_field => ["server", "user"]
|
@@ -127,6 +131,13 @@ filter {
|
|
127
131
|
jdbc_connection_string => "jdbc:postgresql://remotedb:5432/ls_test_2"
|
128
132
|
}
|
129
133
|
}
|
134
|
+
|
135
|
+
output {
|
136
|
+
if "_jdbcstaticdefaultsused" in [tags] {
|
137
|
+
# Print all the not found users
|
138
|
+
stdout { }
|
139
|
+
}
|
140
|
+
}
|
130
141
|
-----
|
131
142
|
<1> Queries an external database to fetch the dataset that will be cached
|
132
143
|
locally.
|
@@ -139,7 +150,8 @@ See <<plugins-{type}s-{plugin}-object_order>>.
|
|
139
150
|
follow the positional ordering.
|
140
151
|
<5> Specifies the event field that will store the looked-up data. If the lookup
|
141
152
|
returns multiple columns, the data is stored as a JSON object within the field.
|
142
|
-
<6>
|
153
|
+
<6> When the user is not found in the database, an event is created using data from the <<plugins-{type}s-{plugin}-local_lookups>> `default hash` setting, and the event is tagged with the list set in <<plugins-{type}s-{plugin}-tag_on_default_use>>.
|
154
|
+
<7> Takes data from the JSON object and stores it in top-level event fields for
|
143
155
|
easier analysis in Kibana.
|
144
156
|
|
145
157
|
Here's a full example:
|
data/docs/input-jdbc.asciidoc
CHANGED
@@ -491,10 +491,12 @@ How often to validate a connection (in seconds)
|
|
491
491
|
===== `last_run_metadata_path`
|
492
492
|
|
493
493
|
* Value type is <<string,string>>
|
494
|
-
* Default value is `"
|
494
|
+
* Default value is `"<path.data>/plugins/inputs/jdbc/logstash_jdbc_last_run"`
|
495
495
|
|
496
496
|
Path to file with last run time
|
497
497
|
|
498
|
+
In versions prior to 5.2.6 the metadata file was written to `$HOME/.logstash_jdbc_last_run`. If during a Logstash upgrade the file is found in "$HOME" it will be moved to the default location under "path.data". If the path is defined by the user then no automatic move is performed.
|
499
|
+
|
498
500
|
[id="plugins-{type}s-{plugin}-lowercase_column_names"]
|
499
501
|
===== `lowercase_column_names`
|
500
502
|
|
data/lib/logstash/inputs/jdbc.rb
CHANGED
@@ -406,7 +406,7 @@ module LogStash module Inputs class Jdbc < LogStash::Inputs::Base
|
|
406
406
|
# the correct access rights
|
407
407
|
::File.delete(old_default_path.to_path)
|
408
408
|
@logger.info("Successfully moved the #{old_default_path.to_path} into #{metadata_new_path.to_path}")
|
409
|
-
rescue e
|
409
|
+
rescue => e
|
410
410
|
@logger.warn("Using new metadata file at #{metadata_new_path.to_path} but #{old_default_path} can't be removed.")
|
411
411
|
end
|
412
412
|
end
|
@@ -104,6 +104,11 @@ module LogStash module PluginMixins module Jdbc
|
|
104
104
|
# Number of seconds to sleep between connection attempts
|
105
105
|
config :connection_retry_attempts_wait_time, :validate => :number, :default => 0.5
|
106
106
|
|
107
|
+
# Maximum number of times to try running statement
|
108
|
+
config :statement_retry_attempts, :validate => :number, :default => 1
|
109
|
+
# Number of seconds to sleep between statement execution
|
110
|
+
config :statement_retry_attempts_wait_time, :validate => :number, :default => 0.5
|
111
|
+
|
107
112
|
# give users the ability to force Sequel application side into using local timezone
|
108
113
|
config :plugin_timezone, :validate => ["local", "utc"], :default => "utc"
|
109
114
|
end
|
@@ -202,7 +207,10 @@ module LogStash module PluginMixins module Jdbc
|
|
202
207
|
public
|
203
208
|
def execute_statement
|
204
209
|
success = false
|
210
|
+
retry_attempts = @statement_retry_attempts
|
211
|
+
|
205
212
|
begin
|
213
|
+
retry_attempts -= 1
|
206
214
|
sql_last_value = @use_column_value ? @value_tracker.value : Time.now.utc
|
207
215
|
@tracking_column_warning_sent = false
|
208
216
|
@statement_handler.perform_query(@database, @value_tracker.value) do |row|
|
@@ -210,17 +218,23 @@ module LogStash module PluginMixins module Jdbc
|
|
210
218
|
yield extract_values_from(row)
|
211
219
|
end
|
212
220
|
success = true
|
213
|
-
rescue Sequel::
|
214
|
-
Sequel::DatabaseError,
|
215
|
-
Sequel::InvalidValue,
|
216
|
-
Java::JavaSql::SQLException => e
|
221
|
+
rescue Sequel::Error, Java::JavaSql::SQLException => e
|
217
222
|
details = { exception: e.class, message: e.message }
|
218
223
|
details[:cause] = e.cause.inspect if e.cause
|
219
224
|
details[:backtrace] = e.backtrace if @logger.debug?
|
220
225
|
@logger.warn("Exception when executing JDBC query", details)
|
226
|
+
|
227
|
+
if retry_attempts == 0
|
228
|
+
@logger.error("Unable to execute statement. Tried #{@statement_retry_attempts} times.")
|
229
|
+
else
|
230
|
+
@logger.error("Unable to execute statement. Trying again.")
|
231
|
+
sleep(@statement_retry_attempts_wait_time)
|
232
|
+
retry
|
233
|
+
end
|
221
234
|
else
|
222
235
|
@value_tracker.set_value(sql_last_value)
|
223
236
|
end
|
237
|
+
|
224
238
|
return success
|
225
239
|
end
|
226
240
|
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-integration-jdbc'
|
3
|
-
s.version = '5.4.
|
3
|
+
s.version = '5.4.3'
|
4
4
|
s.licenses = ['Apache License (2.0)']
|
5
5
|
s.summary = "Integration with JDBC - input and filter plugins"
|
6
6
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
data/spec/inputs/jdbc_spec.rb
CHANGED
@@ -1169,19 +1169,31 @@ describe LogStash::Inputs::Jdbc do
|
|
1169
1169
|
context "when a file exists" do
|
1170
1170
|
before do
|
1171
1171
|
# in a faked HOME folder save a valid previous last_run metadata file
|
1172
|
+
allow(ENV).to receive(:[]).and_call_original
|
1172
1173
|
allow(ENV).to receive(:[]).with('HOME').and_return(fake_home)
|
1173
|
-
|
1174
|
+
|
1175
|
+
File.open("#{fake_home}/.logstash_jdbc_last_run", 'w') do |file|
|
1174
1176
|
file.write("--- !ruby/object:DateTime '2022-03-08 08:10:00.486889000 Z'")
|
1175
1177
|
end
|
1176
1178
|
end
|
1179
|
+
let(:old_path) { "#{fake_home}/.logstash_jdbc_last_run" }
|
1180
|
+
let(:path_data) { LogStash::SETTINGS.get_value("path.data") }
|
1181
|
+
let(:new_path) { "#{path_data}/plugins/inputs/jdbc/logstash_jdbc_last_run" }
|
1177
1182
|
|
1178
1183
|
it "should be moved" do
|
1179
1184
|
plugin.register
|
1180
|
-
|
1181
|
-
expect(::File.exist
|
1182
|
-
|
1183
|
-
|
1184
|
-
|
1185
|
+
expect(::File).to_not exist(old_path)
|
1186
|
+
expect(::File).to exist(new_path)
|
1187
|
+
end
|
1188
|
+
context "if the delete fails" do
|
1189
|
+
before(:each) do
|
1190
|
+
allow(File).to receive(:delete).and_raise ArgumentError
|
1191
|
+
end
|
1192
|
+
it "should be still be moved" do
|
1193
|
+
plugin.register
|
1194
|
+
expect(::File).to exist(old_path) # old still exists
|
1195
|
+
expect(::File).to exist(new_path)
|
1196
|
+
end
|
1185
1197
|
end
|
1186
1198
|
end
|
1187
1199
|
end
|
@@ -1352,6 +1364,34 @@ describe LogStash::Inputs::Jdbc do
|
|
1352
1364
|
expect { plugin.register }.to_not raise_error
|
1353
1365
|
plugin.stop
|
1354
1366
|
end
|
1367
|
+
|
1368
|
+
it "does retry when query execution fails" do
|
1369
|
+
mixin_settings['statement_retry_attempts'] = 2
|
1370
|
+
mixin_settings['statement_retry_attempts_wait_time'] = 0.5
|
1371
|
+
queue = Queue.new
|
1372
|
+
plugin.register
|
1373
|
+
|
1374
|
+
handler = plugin.instance_variable_get(:@statement_handler)
|
1375
|
+
allow(handler).to receive(:perform_query).with(instance_of(Sequel::JDBC::Database), instance_of(Time)).and_raise(Sequel::PoolTimeout)
|
1376
|
+
expect(plugin.logger).to receive(:error).with("Unable to execute statement. Trying again.")
|
1377
|
+
expect(plugin.logger).to receive(:error).with("Unable to execute statement. Tried 2 times.")
|
1378
|
+
|
1379
|
+
plugin.run(queue)
|
1380
|
+
plugin.stop
|
1381
|
+
end
|
1382
|
+
|
1383
|
+
it "does not retry when query execution succeeds" do
|
1384
|
+
mixin_settings['connection_retry_attempts'] = 2
|
1385
|
+
queue = Queue.new
|
1386
|
+
plugin.register
|
1387
|
+
|
1388
|
+
handler = plugin.instance_variable_get(:@statement_handler)
|
1389
|
+
allow(handler).to receive(:perform_query).with(instance_of(Sequel::JDBC::Database), instance_of(Time)).and_call_original
|
1390
|
+
expect(plugin.logger).not_to receive(:error)
|
1391
|
+
|
1392
|
+
plugin.run(queue)
|
1393
|
+
plugin.stop
|
1394
|
+
end
|
1355
1395
|
end
|
1356
1396
|
|
1357
1397
|
context "when encoding of some columns need to be changed" do
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-integration-jdbc
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 5.4.
|
4
|
+
version: 5.4.3
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2023-05-26 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
@@ -333,7 +333,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
333
333
|
- !ruby/object:Gem::Version
|
334
334
|
version: '0'
|
335
335
|
requirements: []
|
336
|
-
rubygems_version: 3.
|
336
|
+
rubygems_version: 3.2.33
|
337
337
|
signing_key:
|
338
338
|
specification_version: 4
|
339
339
|
summary: Integration with JDBC - input and filter plugins
|