logstash-integration-jdbc 5.4.0 → 5.4.2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 28cba197157488c1839fc9a948c3f27916b26af07f28c592f537434bd84722a6
4
- data.tar.gz: fe2263ea7ed36abdeb9fd3d9597958724be77a8653950e9d554a212c1e3dd106
3
+ metadata.gz: c54f1c22363c2d0637b44a2b68a98e22a1b2f07bc68927a4cc9bd6788a6fc009
4
+ data.tar.gz: 3ea76ee4fcdde152e43af6c0aab53be50dc022a8c31fce8ebac36669370a4875
5
5
  SHA512:
6
- metadata.gz: 76543ead6834631efaca25d154abe7ee7594943dff0c27d90e7588ed3aa651de427fc3372cffac348f6f810f11cfe13864d8f26af7d09fd34470491a6a4c66b3
7
- data.tar.gz: de71ae5f8c54dfa08d67e1e848e59fc1779477c41246cee120debf633301829e931b7103546de0bef482002eb7b5296b748d6ad6caf0bb5457abe7205db79b6e
6
+ metadata.gz: d31cef0d20064897337825bcf7ff491d3b64295c47b644d527974f458ac2c8a180d930b84b55fac6601a5f76faa10754b8975af681c4e0718ec2711e08a0d571
7
+ data.tar.gz: 6434512f2b958b976a12a7c4afe3d584066cb505c4818d8e340e363a7db4327a433785fc9ddfddc6c67eb2d719fc3f518346974760003873250895cb396d36e1
data/CHANGELOG.md CHANGED
@@ -1,3 +1,10 @@
1
+ ## 5.4.2
2
+ - Doc: described default_hash and tag_on_default_use interaction filter plugin [#122](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/122)
3
+ - Added new settings `statement_retry_attempts` and `statement_retry_attempts_wait_time` for retry of failed sql statement execution [#123](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/123)
4
+
5
+ ## 5.4.1
6
+ - Bugfix leak which happened in creating a new Database pool for every query. The pool is now crated on registration and closed on plugin's `stop` [#119](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/119)
7
+
1
8
  ## 5.4.0
2
9
  - Ambiguous Timestamp Support [#92](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/92)
3
10
  - FIX: when encountering an ambiguous timestamp, the JDBC Input no longer crashes
@@ -111,10 +111,14 @@ filter {
111
111
  query => "SELECT firstname, lastname FROM users WHERE userid = ? AND country = ?"
112
112
  prepared_parameters => ["[loggedin_userid]", "[user_nation]"] <4>
113
113
  target => "user" <5>
114
+ default_hash => { <6>
115
+ firstname => nil
116
+ lastname => nil
117
+ }
114
118
  }
115
119
  ]
116
120
  # using add_field here to add & rename values to the event root
117
- add_field => { server_name => "%{[server][0][description]}" } <6>
121
+ add_field => { server_name => "%{[server][0][description]}" } <7>
118
122
  add_field => { user_firstname => "%{[user][0][firstname]}" }
119
123
  add_field => { user_lastname => "%{[user][0][lastname]}" }
120
124
  remove_field => ["server", "user"]
@@ -127,6 +131,13 @@ filter {
127
131
  jdbc_connection_string => "jdbc:postgresql://remotedb:5432/ls_test_2"
128
132
  }
129
133
  }
134
+
135
+ output {
136
+ if "_jdbcstaticdefaultsused" in [tags] {
137
+ # Print all the not found users
138
+ stdout { }
139
+ }
140
+ }
130
141
  -----
131
142
  <1> Queries an external database to fetch the dataset that will be cached
132
143
  locally.
@@ -139,7 +150,8 @@ See <<plugins-{type}s-{plugin}-object_order>>.
139
150
  follow the positional ordering.
140
151
  <5> Specifies the event field that will store the looked-up data. If the lookup
141
152
  returns multiple columns, the data is stored as a JSON object within the field.
142
- <6> Takes data from the JSON object and stores it in top-level event fields for
153
+ <6> When the user is not found in the database, an event is created using data from the <<plugins-{type}s-{plugin}-local_lookups>> `default hash` setting, and the event is tagged with the list set in <<plugins-{type}s-{plugin}-tag_on_default_use>>.
154
+ <7> Takes data from the JSON object and stores it in top-level event fields for
143
155
  easier analysis in Kibana.
144
156
 
145
157
  Here's a full example:
@@ -261,8 +261,6 @@ module LogStash module Inputs class Jdbc < LogStash::Inputs::Base
261
261
  end
262
262
  end
263
263
 
264
- prepare_jdbc_connection
265
-
266
264
  if @use_column_value
267
265
  # Raise an error if @use_column_value is true, but no @tracking_column is set
268
266
  if @tracking_column.nil?
@@ -305,6 +303,20 @@ module LogStash module Inputs class Jdbc < LogStash::Inputs::Base
305
303
  converters[encoding] = converter
306
304
  end
307
305
  end
306
+
307
+ load_driver
308
+ begin
309
+ open_jdbc_connection
310
+ rescue Sequel::DatabaseConnectionError,
311
+ Sequel::DatabaseError,
312
+ Sequel::InvalidValue,
313
+ Java::JavaSql::SQLException => e
314
+ details = { exception: e.class, message: e.message }
315
+ details[:cause] = e.cause.inspect if e.cause
316
+ details[:backtrace] = e.backtrace if @logger.debug?
317
+ @logger.warn("Exception when executing JDBC query", details)
318
+ raise(LogStash::ConfigurationError, "Can't create a connection pool to the database")
319
+ end
308
320
  end # def register
309
321
 
310
322
  # test injection points
@@ -317,7 +329,6 @@ module LogStash module Inputs class Jdbc < LogStash::Inputs::Base
317
329
  end
318
330
 
319
331
  def run(queue)
320
- load_driver
321
332
  if @schedule
322
333
  # scheduler input thread name example: "[my-oracle]|input|jdbc|scheduler"
323
334
  scheduler.cron(@schedule) { execute_query(queue) }
@@ -104,6 +104,11 @@ module LogStash module PluginMixins module Jdbc
104
104
  # Number of seconds to sleep between connection attempts
105
105
  config :connection_retry_attempts_wait_time, :validate => :number, :default => 0.5
106
106
 
107
+ # Maximum number of times to try running statement
108
+ config :statement_retry_attempts, :validate => :number, :default => 1
109
+ # Number of seconds to sleep between statement execution
110
+ config :statement_retry_attempts_wait_time, :validate => :number, :default => 0.5
111
+
107
112
  # give users the ability to force Sequel application side into using local timezone
108
113
  config :plugin_timezone, :validate => ["local", "utc"], :default => "utc"
109
114
  end
@@ -188,31 +193,24 @@ module LogStash module PluginMixins module Jdbc
188
193
  end
189
194
  end
190
195
 
191
- public
192
- def prepare_jdbc_connection
193
- @connection_lock = ReentrantLock.new
194
- end
195
-
196
196
  public
197
197
  def close_jdbc_connection
198
198
  begin
199
199
  # pipeline restarts can also close the jdbc connection, block until the current executing statement is finished to avoid leaking connections
200
200
  # connections in use won't really get closed
201
- @connection_lock.lock
202
201
  @database.disconnect if @database
203
202
  rescue => e
204
203
  @logger.warn("Failed to close connection", :exception => e)
205
- ensure
206
- @connection_lock.unlock
207
204
  end
208
205
  end
209
206
 
210
207
  public
211
208
  def execute_statement
212
209
  success = false
210
+ retry_attempts = @statement_retry_attempts
211
+
213
212
  begin
214
- @connection_lock.lock
215
- open_jdbc_connection
213
+ retry_attempts -= 1
216
214
  sql_last_value = @use_column_value ? @value_tracker.value : Time.now.utc
217
215
  @tracking_column_warning_sent = false
218
216
  @statement_handler.perform_query(@database, @value_tracker.value) do |row|
@@ -220,20 +218,23 @@ module LogStash module PluginMixins module Jdbc
220
218
  yield extract_values_from(row)
221
219
  end
222
220
  success = true
223
- rescue Sequel::DatabaseConnectionError,
224
- Sequel::DatabaseError,
225
- Sequel::InvalidValue,
226
- Java::JavaSql::SQLException => e
221
+ rescue Sequel::Error, Java::JavaSql::SQLException => e
227
222
  details = { exception: e.class, message: e.message }
228
223
  details[:cause] = e.cause.inspect if e.cause
229
224
  details[:backtrace] = e.backtrace if @logger.debug?
230
225
  @logger.warn("Exception when executing JDBC query", details)
226
+
227
+ if retry_attempts == 0
228
+ @logger.error("Unable to execute statement. Tried #{@statement_retry_attempts} times.")
229
+ else
230
+ @logger.error("Unable to execute statement. Trying again.")
231
+ sleep(@statement_retry_attempts_wait_time)
232
+ retry
233
+ end
231
234
  else
232
235
  @value_tracker.set_value(sql_last_value)
233
- ensure
234
- close_jdbc_connection
235
- @connection_lock.unlock
236
236
  end
237
+
237
238
  return success
238
239
  end
239
240
 
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-integration-jdbc'
3
- s.version = '5.4.0'
3
+ s.version = '5.4.2'
4
4
  s.licenses = ['Apache License (2.0)']
5
5
  s.summary = "Integration with JDBC - input and filter plugins"
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -76,10 +76,8 @@ describe LogStash::Inputs::Jdbc, :integration => true do
76
76
  end
77
77
 
78
78
  it "should not register correctly" do
79
- plugin.register
80
- q = Queue.new
81
79
  expect do
82
- plugin.run(q)
80
+ plugin.register
83
81
  end.to raise_error(::LogStash::PluginLoadingError)
84
82
  end
85
83
  end
@@ -92,16 +90,13 @@ describe LogStash::Inputs::Jdbc, :integration => true do
92
90
  end
93
91
 
94
92
  it "log warning msg when plugin run" do
95
- plugin.register
96
93
  expect( plugin ).to receive(:log_java_exception)
97
94
  expect(plugin.logger).to receive(:warn).once.with("Exception when executing JDBC query",
98
95
  hash_including(:message => instance_of(String)))
99
- q = Queue.new
100
- expect{ plugin.run(q) }.not_to raise_error
96
+ expect{ plugin.register }.to raise_error(::LogStash::ConfigurationError)
101
97
  end
102
98
 
103
99
  it "should log (native) Java driver error" do
104
- plugin.register
105
100
  expect( org.apache.logging.log4j.LogManager ).to receive(:getLogger).and_wrap_original do |m, *args|
106
101
  logger = m.call(*args)
107
102
  expect( logger ).to receive(:error) do |_, e|
@@ -109,8 +104,7 @@ describe LogStash::Inputs::Jdbc, :integration => true do
109
104
  end.and_call_original
110
105
  logger
111
106
  end
112
- q = Queue.new
113
- expect{ plugin.run(q) }.not_to raise_error
107
+ expect{ plugin.register }.to raise_error(::LogStash::ConfigurationError)
114
108
  end
115
109
  end
116
110
  end
@@ -1352,6 +1352,34 @@ describe LogStash::Inputs::Jdbc do
1352
1352
  expect { plugin.register }.to_not raise_error
1353
1353
  plugin.stop
1354
1354
  end
1355
+
1356
+ it "does retry when query execution fails" do
1357
+ mixin_settings['statement_retry_attempts'] = 2
1358
+ mixin_settings['statement_retry_attempts_wait_time'] = 0.5
1359
+ queue = Queue.new
1360
+ plugin.register
1361
+
1362
+ handler = plugin.instance_variable_get(:@statement_handler)
1363
+ allow(handler).to receive(:perform_query).with(instance_of(Sequel::JDBC::Database), instance_of(Time)).and_raise(Sequel::PoolTimeout)
1364
+ expect(plugin.logger).to receive(:error).with("Unable to execute statement. Trying again.")
1365
+ expect(plugin.logger).to receive(:error).with("Unable to execute statement. Tried 2 times.")
1366
+
1367
+ plugin.run(queue)
1368
+ plugin.stop
1369
+ end
1370
+
1371
+ it "does not retry when query execution succeeds" do
1372
+ mixin_settings['connection_retry_attempts'] = 2
1373
+ queue = Queue.new
1374
+ plugin.register
1375
+
1376
+ handler = plugin.instance_variable_get(:@statement_handler)
1377
+ allow(handler).to receive(:perform_query).with(instance_of(Sequel::JDBC::Database), instance_of(Time)).and_call_original
1378
+ expect(plugin.logger).not_to receive(:error)
1379
+
1380
+ plugin.run(queue)
1381
+ plugin.stop
1382
+ end
1355
1383
  end
1356
1384
 
1357
1385
  context "when encoding of some columns need to be changed" do
@@ -1565,16 +1593,12 @@ describe LogStash::Inputs::Jdbc do
1565
1593
  { "statement" => "SELECT * from types_table", "jdbc_driver_library" => invalid_driver_jar_path }
1566
1594
  end
1567
1595
 
1568
- before do
1569
- plugin.register
1570
- end
1571
-
1572
1596
  after do
1573
1597
  plugin.stop
1574
1598
  end
1575
1599
 
1576
1600
  it "raise a loading error" do
1577
- expect { plugin.run(queue) }.
1601
+ expect { plugin.register }.
1578
1602
  to raise_error(LogStash::PluginLoadingError, /unable to load .*? from :jdbc_driver_library, file not readable/)
1579
1603
  end
1580
1604
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-integration-jdbc
3
3
  version: !ruby/object:Gem::Version
4
- version: 5.4.0
4
+ version: 5.4.2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2022-10-11 00:00:00.000000000 Z
11
+ date: 2023-05-10 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement