logstash-integration-jdbc 5.5.1 → 5.5.3

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 4ba7e1b0f0a043e0b58ff727a16decd9f5fb63ac5cc68a1c935af2d59c0235a3
4
- data.tar.gz: fa9178574ca2acce0b1f8f0bab0823649e202af2d3911985fb9368a8c3f60c11
3
+ metadata.gz: a28e2a3a37342bc631ed300da7fa687e99fc879d8cc599669d64e2cfc6418177
4
+ data.tar.gz: 38b675462f29af81148dbdcc58144a2e3df932b6c6a44825b580a3468ac90acb
5
5
  SHA512:
6
- metadata.gz: 681d44e0f888cd9c7e87cd33551aa62c5aba9524e714af9db334286b41947c0683dcda15b3fb0e8a8b903976599e261dd0c805666db977ed16f6a145f966c507
7
- data.tar.gz: d7fb56739d0e2db3751b2956f0e47dfc585173ff7703cb5b9c0d52b438b10358a6678c72267fa3103cd92ef6137da5376c7398bbe89289a665b7a224a2e9a2d8
6
+ metadata.gz: b3af9d1d0c7ddc88b8102d7fafb96a9c379a734266698a3136e9e9522f640f64f4f019ea5820c9bc382a460df5f47f2fc62bb6cf8c3ad47ad84f5069710831cd
7
+ data.tar.gz: 41a6e399b350068d1ed337c158b94f9457f0ace2f31588888db8c99b9e20a39f7fe0f06ab4e809cd01c7a635c9ede2a00629aa1ecf04b9cd86951fdda5c9a828
data/CHANGELOG.md CHANGED
@@ -1,3 +1,9 @@
1
+ ## 5.5.3
2
+ - [DOC] Rework inline comment to a callout in preparation for upcoming MD conversion [#181](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/181)
3
+
4
+ ## 5.5.2
5
+ - FIX: the input plugin's prior behaviour of opening a new database connection for each scheduled run (removed in `v5.4.1`) is restored, ensuring that infrequently-run schedules do not hold open connections to their databases indefinitely, _without_ reintroducing the leak [#130](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/130)
6
+
1
7
  ## 5.5.1
2
8
  - Document `statement_retry_attempts` and `statement_retry_attempts_wait_time` options
3
9
 
@@ -123,7 +123,7 @@ filter {
123
123
  add_field => { user_lastname => "%{[user][0][lastname]}" }
124
124
  remove_field => ["server", "user"]
125
125
  staging_directory => "/tmp/logstash/jdbc_static/import_data"
126
- loader_schedule => "* */2 * * *" # run loaders every 2 hours
126
+ loader_schedule => "* */2 * * *" <8>
127
127
  jdbc_user => "logstash"
128
128
  jdbc_password => "example"
129
129
  jdbc_driver_class => "org.postgresql.Driver"
@@ -153,6 +153,7 @@ returns multiple columns, the data is stored as a JSON object within the field.
153
153
  <6> When the user is not found in the database, an event is created using data from the <<plugins-{type}s-{plugin}-local_lookups>> `default hash` setting, and the event is tagged with the list set in <<plugins-{type}s-{plugin}-tag_on_default_use>>.
154
154
  <7> Takes data from the JSON object and stores it in top-level event fields for
155
155
  easier analysis in Kibana.
156
+ <8> Runs loaders every 2 hours.
156
157
 
157
158
  Here's a full example:
158
159
 
@@ -261,6 +261,8 @@ module LogStash module Inputs class Jdbc < LogStash::Inputs::Base
261
261
  end
262
262
  end
263
263
 
264
+ prepare_jdbc_connection
265
+
264
266
  if @use_column_value
265
267
  # Raise an error if @use_column_value is true, but no @tracking_column is set
266
268
  if @tracking_column.nil?
@@ -303,20 +305,7 @@ module LogStash module Inputs class Jdbc < LogStash::Inputs::Base
303
305
  converters[encoding] = converter
304
306
  end
305
307
  end
306
-
307
308
  load_driver
308
- begin
309
- open_jdbc_connection
310
- rescue Sequel::DatabaseConnectionError,
311
- Sequel::DatabaseError,
312
- Sequel::InvalidValue,
313
- Java::JavaSql::SQLException => e
314
- details = { exception: e.class, message: e.message }
315
- details[:cause] = e.cause.inspect if e.cause
316
- details[:backtrace] = e.backtrace if @logger.debug?
317
- @logger.warn("Exception when executing JDBC query", details)
318
- raise(LogStash::ConfigurationError, "Can't create a connection pool to the database")
319
- end
320
309
  end # def register
321
310
 
322
311
  # test injection points
@@ -3,15 +3,17 @@
3
3
  require "logstash/config/mixin"
4
4
  require "time"
5
5
  require "date"
6
+ require "thread" # Monitor
6
7
  require_relative "value_tracking"
7
8
  require_relative "timezone_proxy"
8
9
  require_relative "statement_handler"
9
10
  require_relative "value_handler"
10
11
 
11
- java_import java.util.concurrent.locks.ReentrantLock
12
-
13
12
  # Tentative of abstracting JDBC logic to a mixin
14
13
  # for potential reuse in other plugins (input/output)
14
+ #
15
+ # CAUTION: implementation of this "potential reuse" module is
16
+ # VERY tightly-coupled with the JDBC Input's implementation.
15
17
  module LogStash module PluginMixins module Jdbc
16
18
  module Jdbc
17
19
  include LogStash::PluginMixins::Jdbc::ValueHandler
@@ -159,82 +161,94 @@ module LogStash module PluginMixins module Jdbc
159
161
  end
160
162
 
161
163
  def open_jdbc_connection
162
- # at this point driver is already loaded
163
- Sequel.application_timezone = @plugin_timezone.to_sym
164
-
165
- @database = jdbc_connect()
166
- @database.extension(:pagination)
167
- if @jdbc_default_timezone
168
- @database.extension(:named_timezones)
169
- @database.timezone = TimezoneProxy.load(@jdbc_default_timezone)
170
- end
171
- if @jdbc_validate_connection
172
- @database.extension(:connection_validator)
173
- @database.pool.connection_validation_timeout = @jdbc_validation_timeout
174
- end
175
- @database.fetch_size = @jdbc_fetch_size unless @jdbc_fetch_size.nil?
176
- begin
177
- @database.test_connection
178
- rescue Java::JavaSql::SQLException => e
179
- @logger.warn("Failed test_connection with java.sql.SQLException.", :exception => e)
180
- rescue Sequel::DatabaseConnectionError => e
181
- @logger.warn("Failed test_connection.", :exception => e)
182
- #TODO return false and let the plugin raise a LogStash::ConfigurationError
183
- raise e
184
- end
164
+ @connection_lock.synchronize do
165
+ # at this point driver is already loaded
166
+ Sequel.application_timezone = @plugin_timezone.to_sym
167
+
168
+ @database = jdbc_connect()
169
+ @database.extension(:pagination)
170
+ if @jdbc_default_timezone
171
+ @database.extension(:named_timezones)
172
+ @database.timezone = TimezoneProxy.load(@jdbc_default_timezone)
173
+ end
174
+ if @jdbc_validate_connection
175
+ @database.extension(:connection_validator)
176
+ @database.pool.connection_validation_timeout = @jdbc_validation_timeout
177
+ end
178
+ @database.fetch_size = @jdbc_fetch_size unless @jdbc_fetch_size.nil?
179
+ begin
180
+ @database.test_connection
181
+ rescue Java::JavaSql::SQLException => e
182
+ @logger.warn("Failed test_connection with java.sql.SQLException.", :exception => e)
183
+ rescue Sequel::DatabaseConnectionError => e
184
+ @logger.warn("Failed test_connection.", :exception => e)
185
+ #TODO return false and let the plugin raise a LogStash::ConfigurationError
186
+ raise e
187
+ end
185
188
 
186
- @database.sql_log_level = @sql_log_level.to_sym
187
- @database.logger = @logger
189
+ @database.sql_log_level = @sql_log_level.to_sym
190
+ @database.logger = @logger
188
191
 
189
- @database.extension :identifier_mangling
192
+ @database.extension :identifier_mangling
190
193
 
191
- if @lowercase_column_names
192
- @database.identifier_output_method = :downcase
193
- else
194
- @database.identifier_output_method = :to_s
194
+ if @lowercase_column_names
195
+ @database.identifier_output_method = :downcase
196
+ else
197
+ @database.identifier_output_method = :to_s
198
+ end
195
199
  end
196
200
  end
197
201
 
202
+ public
203
+ def prepare_jdbc_connection
204
+ @connection_lock = Monitor.new # aka ReentrantLock
205
+ end
206
+
198
207
  public
199
208
  def close_jdbc_connection
200
- begin
209
+ @connection_lock.synchronize do
201
210
  # pipeline restarts can also close the jdbc connection, block until the current executing statement is finished to avoid leaking connections
202
211
  # connections in use won't really get closed
203
212
  @database.disconnect if @database
204
- rescue => e
205
- @logger.warn("Failed to close connection", :exception => e)
206
213
  end
214
+ rescue => e
215
+ @logger.warn("Failed to close connection", :exception => e)
207
216
  end
208
217
 
209
218
  public
210
- def execute_statement
219
+ def execute_statement(&result_handler)
211
220
  success = false
212
221
  retry_attempts = @statement_retry_attempts
213
222
 
214
- begin
215
- retry_attempts -= 1
216
- sql_last_value = @use_column_value ? @value_tracker.value : Time.now.utc
217
- @tracking_column_warning_sent = false
218
- @statement_handler.perform_query(@database, @value_tracker.value) do |row|
219
- sql_last_value = get_column_value(row) if @use_column_value
220
- yield extract_values_from(row)
221
- end
222
- success = true
223
- rescue Sequel::Error, Java::JavaSql::SQLException => e
224
- details = { exception: e.class, message: e.message }
225
- details[:cause] = e.cause.inspect if e.cause
226
- details[:backtrace] = e.backtrace if @logger.debug?
227
- @logger.warn("Exception when executing JDBC query", details)
228
-
229
- if retry_attempts == 0
230
- @logger.error("Unable to execute statement. Tried #{@statement_retry_attempts} times.")
223
+ @connection_lock.synchronize do
224
+ begin
225
+ retry_attempts -= 1
226
+ open_jdbc_connection
227
+ sql_last_value = @use_column_value ? @value_tracker.value : Time.now.utc
228
+ @tracking_column_warning_sent = false
229
+ @statement_handler.perform_query(@database, @value_tracker.value) do |row|
230
+ sql_last_value = get_column_value(row) if @use_column_value
231
+ yield extract_values_from(row)
232
+ end
233
+ success = true
234
+ rescue Sequel::Error, Java::JavaSql::SQLException => e
235
+ details = { exception: e.class, message: e.message }
236
+ details[:cause] = e.cause.inspect if e.cause
237
+ details[:backtrace] = e.backtrace if @logger.debug?
238
+ @logger.warn("Exception when executing JDBC query", details)
239
+
240
+ if retry_attempts == 0
241
+ @logger.error("Unable to execute statement. Tried #{@statement_retry_attempts} times.")
242
+ else
243
+ @logger.error("Unable to execute statement. Trying again.")
244
+ sleep(@statement_retry_attempts_wait_time)
245
+ retry
246
+ end
231
247
  else
232
- @logger.error("Unable to execute statement. Trying again.")
233
- sleep(@statement_retry_attempts_wait_time)
234
- retry
248
+ @value_tracker.set_value(sql_last_value)
249
+ ensure
250
+ close_jdbc_connection
235
251
  end
236
- else
237
- @value_tracker.set_value(sql_last_value)
238
252
  end
239
253
 
240
254
  return success
@@ -124,14 +124,14 @@ module LogStash module PluginMixins module Jdbc
124
124
  end
125
125
 
126
126
  class PreparedStatementHandler < StatementHandler
127
- attr_reader :name, :bind_values_array, :statement_prepared, :prepared, :parameters
127
+ attr_reader :name
128
128
 
129
129
  def initialize(plugin)
130
130
  super(plugin)
131
131
  @name = plugin.prepared_statement_name.to_sym
132
- @bind_values_array = plugin.prepared_statement_bind_values
133
- @parameters = plugin.parameters
134
- @statement_prepared = Concurrent::AtomicBoolean.new(false)
132
+
133
+ @positional_bind_mapping = create_positional_bind_mapping(plugin.prepared_statement_bind_values).freeze
134
+ @positional_bind_placeholders = @positional_bind_mapping.keys.map { |v| :"$#{v}" }.freeze
135
135
  end
136
136
 
137
137
  # Performs the query, ignoring our pagination settings, yielding once per row of data
@@ -148,41 +148,28 @@ module LogStash module PluginMixins module Jdbc
148
148
  private
149
149
 
150
150
  def build_query(db, sql_last_value)
151
- @parameters = create_bind_values_hash
152
- if statement_prepared.false?
153
- prepended = parameters.keys.map{|v| v.to_s.prepend("$").to_sym}
154
- @prepared = db[statement, *prepended].prepare(:select, name)
155
- statement_prepared.make_true
156
- end
157
151
  # under the scheduler the Sequel database instance is recreated each time
158
152
  # so the previous prepared statements are lost, add back
159
- if db.prepared_statement(name).nil?
160
- db.set_prepared_statement(name, prepared)
161
- end
162
- bind_value_sql_last_value(sql_last_value)
163
- begin
164
- db.call(name, parameters)
165
- rescue => e
166
- # clear the statement prepared flag - the statement may be closed by this
167
- # time.
168
- statement_prepared.make_false
169
- raise e
170
- end
153
+ prepared = db.prepared_statement(name)
154
+ prepared ||= db[statement, *positional_bind_placeholders].prepare(:select, name)
155
+
156
+ prepared.call(positional_bind_mapping(sql_last_value))
171
157
  end
172
158
 
173
- def create_bind_values_hash
159
+ def create_positional_bind_mapping(bind_values_array)
174
160
  hash = {}
175
161
  bind_values_array.each_with_index {|v,i| hash[:"p#{i}"] = v}
176
162
  hash
177
163
  end
178
164
 
179
- def bind_value_sql_last_value(sql_last_value)
180
- parameters.keys.each do |key|
181
- value = parameters[key]
182
- if value == ":sql_last_value"
183
- parameters[key] = sql_last_value
184
- end
165
+ def positional_bind_mapping(sql_last_value)
166
+ @positional_bind_mapping.transform_values do |value|
167
+ value == ":sql_last_value" ? sql_last_value : value
185
168
  end
186
169
  end
170
+
171
+ def positional_bind_placeholders
172
+ @positional_bind_mapping.keys.map { |v| :"$#{v}" }.freeze
173
+ end
187
174
  end
188
175
  end end end
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-integration-jdbc'
3
- s.version = '5.5.1'
3
+ s.version = '5.5.3'
4
4
  s.licenses = ['Apache License (2.0)']
5
5
  s.summary = "Integration with JDBC - input and filter plugins"
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -113,13 +113,16 @@ describe LogStash::Inputs::Jdbc, :integration => true do
113
113
  end
114
114
 
115
115
  it "log warning msg when plugin run" do
116
+ plugin.register
116
117
  expect( plugin ).to receive(:log_java_exception)
117
118
  expect(plugin.logger).to receive(:warn).once.with("Exception when executing JDBC query",
118
119
  hash_including(:message => instance_of(String)))
119
- expect{ plugin.register }.to raise_error(::LogStash::ConfigurationError)
120
+ q = Queue.new
121
+ expect{ plugin.run(q) }.not_to raise_error
120
122
  end
121
123
 
122
124
  it "should log (native) Java driver error" do
125
+ plugin.register
123
126
  expect( org.apache.logging.log4j.LogManager ).to receive(:getLogger).and_wrap_original do |m, *args|
124
127
  logger = m.call(*args)
125
128
  expect( logger ).to receive(:error) do |_, e|
@@ -127,7 +130,8 @@ describe LogStash::Inputs::Jdbc, :integration => true do
127
130
  end.and_call_original
128
131
  logger
129
132
  end
130
- expect{ plugin.register }.to raise_error(::LogStash::ConfigurationError)
133
+ q = Queue.new
134
+ expect{ plugin.run(q) }.not_to raise_error
131
135
  end
132
136
  end
133
137
  end
@@ -1283,6 +1283,7 @@ describe LogStash::Inputs::Jdbc do
1283
1283
  plugin.register
1284
1284
  plugin.run(queue)
1285
1285
  db = plugin.instance_variable_get(:@database)
1286
+ expect(db.pool).to be_a_kind_of(::Sequel::ThreadedConnectionPool) # pries into internal details
1286
1287
  expect(db.pool.instance_variable_get(:@timeout)).to eq(0)
1287
1288
  expect(db.pool.instance_variable_get(:@max_size)).to eq(1)
1288
1289
 
@@ -1296,11 +1297,12 @@ describe LogStash::Inputs::Jdbc do
1296
1297
 
1297
1298
  it "should log error message" do
1298
1299
  allow(Sequel).to receive(:connect).and_raise(Sequel::PoolTimeout)
1299
- expect(plugin.logger).to receive(:error).with("Failed to connect to database. 0 second timeout exceeded. Tried 1 times.")
1300
- expect do
1301
- plugin.register
1302
- plugin.run(queue)
1303
- end.to raise_error(Sequel::PoolTimeout)
1300
+ allow(plugin.logger).to receive(:error)
1301
+
1302
+ plugin.register
1303
+ plugin.run(queue)
1304
+
1305
+ expect(plugin.logger).to have_received(:error).with("Failed to connect to database. 0 second timeout exceeded. Tried 1 times.")
1304
1306
  end
1305
1307
  end
1306
1308
 
@@ -1376,12 +1378,13 @@ describe LogStash::Inputs::Jdbc do
1376
1378
  mixin_settings['connection_retry_attempts'] = 2
1377
1379
  mixin_settings['jdbc_pool_timeout'] = 0
1378
1380
  allow(Sequel).to receive(:connect).and_raise(Sequel::PoolTimeout)
1379
- expect(plugin.logger).to receive(:error).with("Failed to connect to database. 0 second timeout exceeded. Trying again.")
1380
- expect(plugin.logger).to receive(:error).with("Failed to connect to database. 0 second timeout exceeded. Tried 2 times.")
1381
- expect do
1382
- plugin.register
1383
- plugin.run(queue)
1384
- end.to raise_error(Sequel::PoolTimeout)
1381
+ allow(plugin.logger).to receive(:error)
1382
+
1383
+ plugin.register
1384
+ plugin.run(queue)
1385
+
1386
+ expect(plugin.logger).to have_received(:error).with("Failed to connect to database. 0 second timeout exceeded. Trying again.")
1387
+ expect(plugin.logger).to have_received(:error).with("Failed to connect to database. 0 second timeout exceeded. Tried 2 times.")
1385
1388
  end
1386
1389
 
1387
1390
  it "should not fail when passed a non-positive value" do
@@ -1642,16 +1645,12 @@ describe LogStash::Inputs::Jdbc do
1642
1645
  { "statement" => "SELECT * from types_table", "jdbc_driver_library" => invalid_driver_jar_path }
1643
1646
  end
1644
1647
 
1645
- after do
1646
- plugin.stop
1647
- end
1648
-
1649
- it "raise a loading error" do
1648
+ it "raise a loading error during #register" do
1650
1649
  expect(File.exists?(invalid_driver_jar_path)).to be true
1651
1650
  expect(FileTest.readable?(invalid_driver_jar_path)).to be false
1652
1651
 
1653
- expect { plugin.register }.
1654
- to raise_error(LogStash::PluginLoadingError, /unable to load .*? from :jdbc_driver_library, file not readable/)
1652
+ expect { plugin.register }
1653
+ .to raise_error(LogStash::PluginLoadingError, /unable to load .*? from :jdbc_driver_library, file not readable/)
1655
1654
  end
1656
1655
  end
1657
1656
 
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-integration-jdbc
3
3
  version: !ruby/object:Gem::Version
4
- version: 5.5.1
4
+ version: 5.5.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2024-12-03 00:00:00.000000000 Z
11
+ date: 2025-03-07 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement