logstash-integration-jdbc 5.2.3 → 5.2.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +9 -0
- data/Gemfile +2 -0
- data/lib/logstash/filters/jdbc/loader_schedule.rb +11 -45
- data/lib/logstash/filters/jdbc/repeating_load_runner.rb +0 -2
- data/lib/logstash/filters/jdbc/single_load_runner.rb +0 -4
- data/lib/logstash/filters/jdbc_static.rb +22 -9
- data/lib/logstash/inputs/jdbc.rb +52 -19
- data/lib/logstash/plugin_mixins/jdbc/jdbc.rb +0 -1
- data/lib/logstash/plugin_mixins/jdbc/scheduler.rb +28 -0
- data/lib/logstash/plugin_mixins/jdbc/statement_handler.rb +51 -45
- data/lib/logstash/plugin_mixins/jdbc/value_tracking.rb +1 -4
- data/logstash-integration-jdbc.gemspec +4 -2
- data/spec/filters/jdbc/repeating_load_runner_spec.rb +1 -1
- data/spec/filters/jdbc_static_spec.rb +10 -0
- data/spec/inputs/integration/integ_spec.rb +25 -0
- data/spec/inputs/jdbc_spec.rb +84 -48
- metadata +6 -7
- data/lib/logstash/plugin_mixins/jdbc/checked_count_logger.rb +0 -43
checksums.yaml
CHANGED
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
---
|
|
2
2
|
SHA256:
|
|
3
|
-
metadata.gz:
|
|
4
|
-
data.tar.gz:
|
|
3
|
+
metadata.gz: c9cc41a07a8cb2f7e4b9a6993dd4d0cd69761b3fa95ba81238d0dd6267367311
|
|
4
|
+
data.tar.gz: 32866a340d87d91734586b3324a1349437abc624deb6ff9883ab998b0a4524a6
|
|
5
5
|
SHA512:
|
|
6
|
-
metadata.gz:
|
|
7
|
-
data.tar.gz:
|
|
6
|
+
metadata.gz: b3513ba2d9ade38a829ec0b0c47ad6c0978d0256e03fb4ac26e136a064db65b08a19b7ee8dd7dabe26851739877b82f0f9a550b2328473b8ffd62f194d569ebb
|
|
7
|
+
data.tar.gz: bf71a208f07bc00c213724cbcf175a9ede2f87bf4eaf85a926ee8e9f87b322e2dd79164db1db84ec60633a0b1cfa961e8fc6d3889c8ca25ea220fa437285af5d
|
data/CHANGELOG.md
CHANGED
|
@@ -1,3 +1,12 @@
|
|
|
1
|
+
## 5.2.6
|
|
2
|
+
- Fix: change default path of 'last_run_metadata_path' to be rooted in the LS data.path folder and not in $HOME [#106](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/106)
|
|
3
|
+
|
|
4
|
+
## 5.2.5
|
|
5
|
+
- Fix: do not execute more queries with debug logging [#109](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/109)
|
|
6
|
+
|
|
7
|
+
## 5.2.4
|
|
8
|
+
- Fix: compatibility with all (>= 3.0) rufus-scheduler versions [#97](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/97)
|
|
9
|
+
|
|
1
10
|
## 5.2.3
|
|
2
11
|
- Performance: avoid contention on scheduler execution [#103](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/103)
|
|
3
12
|
|
data/Gemfile
CHANGED
|
@@ -9,3 +9,5 @@ if Dir.exist?(logstash_path) && use_logstash_source
|
|
|
9
9
|
gem 'logstash-core', :path => "#{logstash_path}/logstash-core"
|
|
10
10
|
gem 'logstash-core-plugin-api', :path => "#{logstash_path}/logstash-core-plugin-api"
|
|
11
11
|
end
|
|
12
|
+
|
|
13
|
+
gem 'rufus-scheduler', ENV['RUFUS_SCHEDULER_VERSION'] if ENV['RUFUS_SCHEDULER_VERSION']
|
|
@@ -4,60 +4,26 @@ require "rufus/scheduler"
|
|
|
4
4
|
|
|
5
5
|
module LogStash module Filters module Jdbc
|
|
6
6
|
class LoaderSchedule < Validatable
|
|
7
|
-
attr_reader :
|
|
8
|
-
|
|
9
|
-
def to_log_string
|
|
10
|
-
message = ""
|
|
11
|
-
message.concat "these months in the year [#{@cronline.months.to_a.join(", ")}];" unless @cronline.months.nil?
|
|
12
|
-
message.concat "these days in the month [#{@cronline.days.to_a.join(", ")}];" unless @cronline.days.nil?
|
|
13
|
-
message.concat "these hours in the day [#{@cronline.hours.to_a.join(", ")}];" unless @cronline.hours.nil?
|
|
14
|
-
message.concat "these minutes in the hour [#{@cronline.minutes.to_a.join(", ")}];" unless @cronline.minutes.nil?
|
|
15
|
-
message.concat "these seconds in the minute [#{@cronline.seconds.to_a.join(", ")}]" unless @cronline.seconds.nil?
|
|
16
|
-
if !message.empty?
|
|
17
|
-
message.prepend "Scheduled for: "
|
|
18
|
-
end
|
|
19
|
-
message
|
|
20
|
-
end
|
|
7
|
+
attr_reader :loader_schedule
|
|
21
8
|
|
|
22
9
|
private
|
|
23
10
|
|
|
24
|
-
|
|
25
|
-
if valid?
|
|
26
|
-
# From the Rufus::Scheduler docs:
|
|
27
|
-
# By default, rufus-scheduler sleeps 0.300 second between every step.
|
|
28
|
-
# At each step it checks for jobs to trigger and so on.
|
|
29
|
-
# set the frequency to 2.5 seconds if we are not reloading in the seconds timeframe
|
|
30
|
-
# rufus scheduler thread should respond to stop quickly enough.
|
|
31
|
-
if only_seconds_set?
|
|
32
|
-
@schedule_frequency = 0.3
|
|
33
|
-
else
|
|
34
|
-
@schedule_frequency = 2.5
|
|
35
|
-
end
|
|
36
|
-
end
|
|
37
|
-
end
|
|
38
|
-
|
|
39
|
-
|
|
40
|
-
def only_seconds_set?
|
|
41
|
-
@cronline.seconds &&
|
|
42
|
-
@cronline.minutes.nil? &&
|
|
43
|
-
@cronline.hours.nil? &&
|
|
44
|
-
@cronline.days.nil? &&
|
|
45
|
-
@cronline.months.nil?
|
|
46
|
-
end
|
|
47
|
-
|
|
11
|
+
# @overload
|
|
48
12
|
def parse_options
|
|
49
13
|
@loader_schedule = @options
|
|
50
14
|
|
|
51
|
-
|
|
15
|
+
if @loader_schedule.is_a?(String)
|
|
16
|
+
begin
|
|
17
|
+
# Rufus::Scheduler 3.0 - 3.6 methods signature: parse_cron(o, opts)
|
|
18
|
+
# since Rufus::Scheduler 3.7 methods signature: parse_cron(o, opts={})
|
|
19
|
+
@cronline = Rufus::Scheduler.parse_cron(@loader_schedule, {})
|
|
20
|
+
rescue => e
|
|
21
|
+
@option_errors << "The loader_schedule option is invalid: #{e.message}"
|
|
22
|
+
end
|
|
23
|
+
else
|
|
52
24
|
@option_errors << "The loader_schedule option must be a string"
|
|
53
25
|
end
|
|
54
26
|
|
|
55
|
-
begin
|
|
56
|
-
@cronline = Rufus::Scheduler::CronLine.new(@loader_schedule)
|
|
57
|
-
rescue => e
|
|
58
|
-
@option_errors << "The loader_schedule option is invalid: #{e.message}"
|
|
59
|
-
end
|
|
60
|
-
|
|
61
27
|
@valid = @option_errors.empty?
|
|
62
28
|
end
|
|
63
29
|
end
|
|
@@ -3,6 +3,7 @@ require "logstash-integration-jdbc_jars"
|
|
|
3
3
|
require "logstash/filters/base"
|
|
4
4
|
require "logstash/namespace"
|
|
5
5
|
require "logstash/plugin_mixins/ecs_compatibility_support"
|
|
6
|
+
require "logstash/plugin_mixins/jdbc/scheduler"
|
|
6
7
|
require_relative "jdbc/loader"
|
|
7
8
|
require_relative "jdbc/loader_schedule"
|
|
8
9
|
require_relative "jdbc/repeating_load_runner"
|
|
@@ -174,9 +175,23 @@ module LogStash module Filters class JdbcStatic < LogStash::Filters::Base
|
|
|
174
175
|
private
|
|
175
176
|
|
|
176
177
|
def prepare_data_dir
|
|
178
|
+
# cleanup existing Derby file left behind in $HOME
|
|
179
|
+
derby_log = "#{ENV['HOME']}/derby.log"
|
|
180
|
+
if ::File.exist?(derby_log)
|
|
181
|
+
begin
|
|
182
|
+
::File.delete(derby_log)
|
|
183
|
+
rescue Errno::EPERM => e
|
|
184
|
+
@logger.warn("Can't delete temporary file #{derby_log} due to access permissions")
|
|
185
|
+
rescue e
|
|
186
|
+
@logger.warn("Can't delete temporary file #{derby_log}", {message => e.message})
|
|
187
|
+
end
|
|
188
|
+
end
|
|
189
|
+
|
|
177
190
|
# later, when local persistent databases are allowed set this property to LS_HOME/data/jdbc-static/
|
|
178
191
|
# must take multi-pipelines into account and more than one config using the same jdbc-static settings
|
|
179
|
-
|
|
192
|
+
path_data = Pathname.new(LogStash::SETTINGS.get_value("path.data")).join("plugins", "shared", "derby_home")
|
|
193
|
+
path_data.mkpath
|
|
194
|
+
java.lang.System.setProperty("derby.system.home", path_data.to_path)
|
|
180
195
|
logger.info("derby.system.home is: #{java.lang.System.getProperty("derby.system.home")}")
|
|
181
196
|
end
|
|
182
197
|
|
|
@@ -191,17 +206,15 @@ module LogStash module Filters class JdbcStatic < LogStash::Filters::Base
|
|
|
191
206
|
@processor = Jdbc::LookupProcessor.new(@local_lookups, global_lookup_options)
|
|
192
207
|
runner_args.unshift(@processor.local)
|
|
193
208
|
if @loader_schedule
|
|
194
|
-
args = []
|
|
195
209
|
@loader_runner = Jdbc::RepeatingLoadRunner.new(*runner_args)
|
|
196
210
|
@loader_runner.initial_load
|
|
197
|
-
|
|
198
|
-
|
|
199
|
-
|
|
211
|
+
@scheduler = LogStash::PluginMixins::Jdbc::Scheduler.
|
|
212
|
+
start_cron_scheduler(@loader_schedule, thread_name: "[#{id}]-jdbc_static__scheduler") { @loader_runner.repeated_load }
|
|
213
|
+
cron_job = @scheduler.cron_jobs.first
|
|
214
|
+
if cron_job
|
|
215
|
+
frequency = cron_job.respond_to?(:rough_frequency) ? cron_job.rough_frequency : cron_job.frequency
|
|
216
|
+
logger.info("Loaders will execute every #{frequency} seconds", loader_schedule: @loader_schedule)
|
|
200
217
|
end
|
|
201
|
-
logger.info("Scheduler scan for work frequency is: #{cronline.schedule_frequency}")
|
|
202
|
-
rufus_args = {:max_work_threads => 1, :frequency => cronline.schedule_frequency}
|
|
203
|
-
@scheduler = Rufus::Scheduler.new(rufus_args)
|
|
204
|
-
@scheduler.cron(cronline.loader_schedule, @loader_runner)
|
|
205
218
|
else
|
|
206
219
|
@loader_runner = Jdbc::SingleLoadRunner.new(*runner_args)
|
|
207
220
|
@loader_runner.initial_load
|
data/lib/logstash/inputs/jdbc.rb
CHANGED
|
@@ -9,6 +9,7 @@ require "logstash/plugin_mixins/ecs_compatibility_support/target_check"
|
|
|
9
9
|
require "logstash/plugin_mixins/validator_support/field_reference_validation_adapter"
|
|
10
10
|
|
|
11
11
|
require "logstash/plugin_mixins/event_support/event_factory_adapter"
|
|
12
|
+
require "fileutils"
|
|
12
13
|
|
|
13
14
|
# this require_relative returns early unless the JRuby version is between 9.2.0.0 and 9.2.8.0
|
|
14
15
|
require_relative "tzinfo_jruby_patch"
|
|
@@ -178,8 +179,10 @@ module LogStash module Inputs class Jdbc < LogStash::Inputs::Base
|
|
|
178
179
|
# exactly once.
|
|
179
180
|
config :schedule, :validate => :string
|
|
180
181
|
|
|
181
|
-
# Path to file with last run time
|
|
182
|
-
|
|
182
|
+
# Path to file with last run time.
|
|
183
|
+
# The default will write file to `<path.data>/plugins/inputs/jdbc/logstash_jdbc_last_run`
|
|
184
|
+
# NOTE: it must be a file path and not a directory path
|
|
185
|
+
config :last_run_metadata_path, :validate => :string
|
|
183
186
|
|
|
184
187
|
# Use an incremental column value rather than a timestamp
|
|
185
188
|
config :use_column_value, :validate => :boolean, :default => false
|
|
@@ -230,11 +233,33 @@ module LogStash module Inputs class Jdbc < LogStash::Inputs::Base
|
|
|
230
233
|
config :target, :validate => :field_reference, :required => false
|
|
231
234
|
|
|
232
235
|
attr_reader :database # for test mocking/stubbing
|
|
236
|
+
attr_reader :last_run_metadata_file_path # path to the file used as last run storage
|
|
233
237
|
|
|
234
238
|
public
|
|
235
239
|
|
|
236
240
|
def register
|
|
237
241
|
@logger = self.logger
|
|
242
|
+
|
|
243
|
+
if @record_last_run
|
|
244
|
+
if @last_run_metadata_path.nil?
|
|
245
|
+
logstash_data_path = LogStash::SETTINGS.get_value("path.data")
|
|
246
|
+
logstash_data_path = Pathname.new(logstash_data_path).join("plugins", "inputs", "jdbc")
|
|
247
|
+
# Ensure that the filepath exists before writing, since it's deeply nested.
|
|
248
|
+
logstash_data_path.mkpath
|
|
249
|
+
logstash_data_file_path = logstash_data_path.join("logstash_jdbc_last_run")
|
|
250
|
+
|
|
251
|
+
ensure_default_metadatafile_location(logstash_data_file_path)
|
|
252
|
+
|
|
253
|
+
@last_run_metadata_file_path = logstash_data_file_path.to_path
|
|
254
|
+
else
|
|
255
|
+
# validate the path is a file and not a directory
|
|
256
|
+
if Pathname.new(@last_run_metadata_path).directory?
|
|
257
|
+
raise LogStash::ConfigurationError.new("The \"last_run_metadata_path\" argument must point to a file, received a directory: \"#{last_run_metadata_path}\"")
|
|
258
|
+
end
|
|
259
|
+
@last_run_metadata_file_path = @last_run_metadata_path
|
|
260
|
+
end
|
|
261
|
+
end
|
|
262
|
+
|
|
238
263
|
require "rufus/scheduler"
|
|
239
264
|
prepare_jdbc_connection
|
|
240
265
|
|
|
@@ -259,8 +284,8 @@ module LogStash module Inputs class Jdbc < LogStash::Inputs::Base
|
|
|
259
284
|
end
|
|
260
285
|
end
|
|
261
286
|
|
|
262
|
-
set_value_tracker
|
|
263
|
-
|
|
287
|
+
set_value_tracker LogStash::PluginMixins::Jdbc::ValueTracking.build_last_value_tracker(self)
|
|
288
|
+
set_statement_handler LogStash::PluginMixins::Jdbc::StatementHandler.build_statement_handler(self)
|
|
264
289
|
|
|
265
290
|
@enable_encoding = !@charset.nil? || !@columns_charset.empty?
|
|
266
291
|
|
|
@@ -283,8 +308,8 @@ module LogStash module Inputs class Jdbc < LogStash::Inputs::Base
|
|
|
283
308
|
end # def register
|
|
284
309
|
|
|
285
310
|
# test injection points
|
|
286
|
-
def
|
|
287
|
-
@statement_handler =
|
|
311
|
+
def set_statement_handler(handler)
|
|
312
|
+
@statement_handler = handler
|
|
288
313
|
end
|
|
289
314
|
|
|
290
315
|
def set_value_tracker(instance)
|
|
@@ -295,19 +320,8 @@ module LogStash module Inputs class Jdbc < LogStash::Inputs::Base
|
|
|
295
320
|
load_driver
|
|
296
321
|
if @schedule
|
|
297
322
|
# input thread (Java) name example "[my-oracle]<jdbc"
|
|
298
|
-
@scheduler = LogStash::PluginMixins::Jdbc::Scheduler.
|
|
299
|
-
:
|
|
300
|
-
:thread_name => "[#{id}]<jdbc__scheduler",
|
|
301
|
-
# amount the scheduler thread sleeps between checking whether jobs
|
|
302
|
-
# should trigger, default is 0.3 which is a bit too often ...
|
|
303
|
-
# in theory the cron expression '* * * * * *' supports running jobs
|
|
304
|
-
# every second but this is very rare, we could potentially go higher
|
|
305
|
-
:frequency => 1.0,
|
|
306
|
-
)
|
|
307
|
-
@scheduler.schedule_cron @schedule do
|
|
308
|
-
execute_query(queue)
|
|
309
|
-
end
|
|
310
|
-
|
|
323
|
+
@scheduler = LogStash::PluginMixins::Jdbc::Scheduler.
|
|
324
|
+
start_cron_scheduler(@schedule, thread_name: "[#{id}]<jdbc__scheduler") { execute_query(queue) }
|
|
311
325
|
@scheduler.join
|
|
312
326
|
else
|
|
313
327
|
execute_query(queue)
|
|
@@ -374,4 +388,23 @@ module LogStash module Inputs class Jdbc < LogStash::Inputs::Base
|
|
|
374
388
|
value
|
|
375
389
|
end
|
|
376
390
|
end
|
|
391
|
+
|
|
392
|
+
def ensure_default_metadatafile_location(metadata_new_path)
|
|
393
|
+
old_default_path = Pathname.new("#{ENV['HOME']}/.logstash_jdbc_last_run")
|
|
394
|
+
|
|
395
|
+
if old_default_path.exist? && !metadata_new_path.exist?
|
|
396
|
+
# Previous versions of the plugin hosted the state file into $HOME/.logstash_jdbc_last_run.
|
|
397
|
+
# Copy in the new location
|
|
398
|
+
FileUtils.cp(old_default_path.to_path, metadata_new_path.to_path)
|
|
399
|
+
begin
|
|
400
|
+
# If there is a permission error in the delete of the old file inform the user to give
|
|
401
|
+
# the correct access rights
|
|
402
|
+
::File.delete(old_default_path.to_path)
|
|
403
|
+
@logger.info("Successfully moved the #{old_default_path.to_path} into #{metadata_new_path.to_path}")
|
|
404
|
+
rescue e
|
|
405
|
+
@logger.warn("Using new metadata file at #{metadata_new_path.to_path} but #{old_default_path} can't be removed.")
|
|
406
|
+
end
|
|
407
|
+
end
|
|
408
|
+
end
|
|
409
|
+
|
|
377
410
|
end end end # class LogStash::Inputs::Jdbc
|
|
@@ -12,6 +12,34 @@ module LogStash module PluginMixins module Jdbc
|
|
|
12
12
|
TimeImpl = defined?(Rufus::Scheduler::EoTime) ? Rufus::Scheduler::EoTime :
|
|
13
13
|
(defined?(Rufus::Scheduler::ZoTime) ? Rufus::Scheduler::ZoTime : ::Time)
|
|
14
14
|
|
|
15
|
+
# @param cron [String] cron-line
|
|
16
|
+
# @param opts [Hash] scheduler options
|
|
17
|
+
# @return scheduler instance
|
|
18
|
+
def self.start_cron_scheduler(cron, opts = {}, &block)
|
|
19
|
+
unless block_given?
|
|
20
|
+
raise ArgumentError, 'missing (cron scheduler) block - worker task to execute'
|
|
21
|
+
end
|
|
22
|
+
scheduler = new_scheduler(opts)
|
|
23
|
+
scheduler.schedule_cron(cron, &block)
|
|
24
|
+
scheduler
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
# @param opts [Hash] scheduler options
|
|
28
|
+
# @return scheduler instance
|
|
29
|
+
def self.new_scheduler(opts)
|
|
30
|
+
unless opts.key?(:thread_name)
|
|
31
|
+
raise ArgumentError, 'thread_name: option is required to be able to distinguish multiple scheduler threads'
|
|
32
|
+
end
|
|
33
|
+
opts[:max_work_threads] ||= 1
|
|
34
|
+
# amount the scheduler thread sleeps between checking whether jobs
|
|
35
|
+
# should trigger, default is 0.3 which is a bit too often ...
|
|
36
|
+
# in theory the cron expression '* * * * * *' supports running jobs
|
|
37
|
+
# every second but this is very rare, we could potentially go higher
|
|
38
|
+
opts[:frequency] ||= 1.0
|
|
39
|
+
|
|
40
|
+
new(opts)
|
|
41
|
+
end
|
|
42
|
+
|
|
15
43
|
# @overload
|
|
16
44
|
def timeout_jobs
|
|
17
45
|
# Rufus relies on `Thread.list` which is a blocking operation and with many schedulers
|
|
@@ -2,7 +2,7 @@
|
|
|
2
2
|
|
|
3
3
|
module LogStash module PluginMixins module Jdbc
|
|
4
4
|
class StatementHandler
|
|
5
|
-
def self.build_statement_handler(plugin
|
|
5
|
+
def self.build_statement_handler(plugin)
|
|
6
6
|
if plugin.use_prepared_statements
|
|
7
7
|
klass = PreparedStatementHandler
|
|
8
8
|
else
|
|
@@ -16,27 +16,39 @@ module LogStash module PluginMixins module Jdbc
|
|
|
16
16
|
klass = NormalStatementHandler
|
|
17
17
|
end
|
|
18
18
|
end
|
|
19
|
-
klass.new(plugin
|
|
19
|
+
klass.new(plugin)
|
|
20
20
|
end
|
|
21
21
|
|
|
22
|
-
attr_reader :statement, :parameters
|
|
22
|
+
attr_reader :statement, :parameters
|
|
23
23
|
|
|
24
|
-
def initialize(plugin
|
|
24
|
+
def initialize(plugin)
|
|
25
25
|
@statement = plugin.statement
|
|
26
|
-
@statement_logger = statement_logger
|
|
27
|
-
post_init(plugin)
|
|
28
26
|
end
|
|
29
27
|
|
|
30
28
|
def build_query(db, sql_last_value)
|
|
31
|
-
# override in subclass
|
|
29
|
+
fail NotImplementedError # override in subclass
|
|
32
30
|
end
|
|
33
31
|
|
|
34
|
-
def post_init(plugin)
|
|
35
|
-
# override in subclass, if needed
|
|
36
|
-
end
|
|
37
32
|
end
|
|
38
33
|
|
|
39
34
|
class NormalStatementHandler < StatementHandler
|
|
35
|
+
|
|
36
|
+
attr_reader :parameters
|
|
37
|
+
|
|
38
|
+
def initialize(plugin)
|
|
39
|
+
super(plugin)
|
|
40
|
+
@parameter_keys = ["sql_last_value"] + plugin.parameters.keys
|
|
41
|
+
@parameters = plugin.parameters.inject({}) do |hash,(k,v)|
|
|
42
|
+
case v
|
|
43
|
+
when LogStash::Timestamp
|
|
44
|
+
hash[k.to_sym] = v.time
|
|
45
|
+
else
|
|
46
|
+
hash[k.to_sym] = v
|
|
47
|
+
end
|
|
48
|
+
hash
|
|
49
|
+
end
|
|
50
|
+
end
|
|
51
|
+
|
|
40
52
|
# Performs the query, yielding once per row of data
|
|
41
53
|
# @param db [Sequel::Database]
|
|
42
54
|
# @param sql_last_value [Integer|DateTime|Time]
|
|
@@ -52,27 +64,18 @@ module LogStash module PluginMixins module Jdbc
|
|
|
52
64
|
|
|
53
65
|
def build_query(db, sql_last_value)
|
|
54
66
|
parameters[:sql_last_value] = sql_last_value
|
|
55
|
-
|
|
56
|
-
statement_logger.log_statement_parameters(statement, parameters, query)
|
|
57
|
-
query
|
|
67
|
+
db[statement, parameters]
|
|
58
68
|
end
|
|
59
69
|
|
|
60
|
-
def post_init(plugin)
|
|
61
|
-
@parameter_keys = ["sql_last_value"] + plugin.parameters.keys
|
|
62
|
-
@parameters = plugin.parameters.inject({}) do |hash,(k,v)|
|
|
63
|
-
case v
|
|
64
|
-
when LogStash::Timestamp
|
|
65
|
-
hash[k.to_sym] = v.time
|
|
66
|
-
else
|
|
67
|
-
hash[k.to_sym] = v
|
|
68
|
-
end
|
|
69
|
-
hash
|
|
70
|
-
end
|
|
71
|
-
end
|
|
72
70
|
end
|
|
73
71
|
|
|
74
72
|
class PagedNormalStatementHandler < NormalStatementHandler
|
|
75
|
-
|
|
73
|
+
|
|
74
|
+
def initialize(plugin)
|
|
75
|
+
super(plugin)
|
|
76
|
+
@jdbc_page_size = plugin.jdbc_page_size
|
|
77
|
+
@logger = plugin.logger
|
|
78
|
+
end
|
|
76
79
|
|
|
77
80
|
# Performs the query, respecting our pagination settings, yielding once per row of data
|
|
78
81
|
# @param db [Sequel::Database]
|
|
@@ -81,16 +84,22 @@ module LogStash module PluginMixins module Jdbc
|
|
|
81
84
|
def perform_query(db, sql_last_value)
|
|
82
85
|
query = build_query(db, sql_last_value)
|
|
83
86
|
query.each_page(@jdbc_page_size) do |paged_dataset|
|
|
87
|
+
log_dataset_page(paged_dataset) if @logger.debug?
|
|
84
88
|
paged_dataset.each do |row|
|
|
85
89
|
yield row
|
|
86
90
|
end
|
|
87
91
|
end
|
|
88
92
|
end
|
|
89
93
|
|
|
90
|
-
|
|
91
|
-
|
|
92
|
-
|
|
94
|
+
private
|
|
95
|
+
|
|
96
|
+
# @param paged_dataset [Sequel::Dataset::Pagination] like object
|
|
97
|
+
def log_dataset_page(paged_dataset)
|
|
98
|
+
@logger.debug "fetching paged dataset", current_page: paged_dataset.current_page,
|
|
99
|
+
record_count: paged_dataset.current_page_record_count,
|
|
100
|
+
total_record_count: paged_dataset.pagination_record_count
|
|
93
101
|
end
|
|
102
|
+
|
|
94
103
|
end
|
|
95
104
|
|
|
96
105
|
class ExplicitPagingModeStatementHandler < PagedNormalStatementHandler
|
|
@@ -101,20 +110,29 @@ module LogStash module PluginMixins module Jdbc
|
|
|
101
110
|
def perform_query(db, sql_last_value)
|
|
102
111
|
query = build_query(db, sql_last_value)
|
|
103
112
|
offset = 0
|
|
113
|
+
page_size = @jdbc_page_size
|
|
104
114
|
loop do
|
|
105
115
|
rows_in_page = 0
|
|
106
|
-
query.with_sql(query.sql, offset: offset, size:
|
|
116
|
+
query.with_sql(query.sql, offset: offset, size: page_size).each do |row|
|
|
107
117
|
yield row
|
|
108
118
|
rows_in_page += 1
|
|
109
119
|
end
|
|
110
|
-
break unless rows_in_page ==
|
|
111
|
-
offset +=
|
|
120
|
+
break unless rows_in_page == page_size
|
|
121
|
+
offset += page_size
|
|
112
122
|
end
|
|
113
123
|
end
|
|
114
124
|
end
|
|
115
125
|
|
|
116
126
|
class PreparedStatementHandler < StatementHandler
|
|
117
|
-
attr_reader :name, :bind_values_array, :statement_prepared, :prepared
|
|
127
|
+
attr_reader :name, :bind_values_array, :statement_prepared, :prepared, :parameters
|
|
128
|
+
|
|
129
|
+
def initialize(plugin)
|
|
130
|
+
super(plugin)
|
|
131
|
+
@name = plugin.prepared_statement_name.to_sym
|
|
132
|
+
@bind_values_array = plugin.prepared_statement_bind_values
|
|
133
|
+
@parameters = plugin.parameters
|
|
134
|
+
@statement_prepared = Concurrent::AtomicBoolean.new(false)
|
|
135
|
+
end
|
|
118
136
|
|
|
119
137
|
# Performs the query, ignoring our pagination settings, yielding once per row of data
|
|
120
138
|
# @param db [Sequel::Database]
|
|
@@ -142,7 +160,6 @@ module LogStash module PluginMixins module Jdbc
|
|
|
142
160
|
db.set_prepared_statement(name, prepared)
|
|
143
161
|
end
|
|
144
162
|
bind_value_sql_last_value(sql_last_value)
|
|
145
|
-
statement_logger.log_statement_parameters(statement, parameters, nil)
|
|
146
163
|
begin
|
|
147
164
|
db.call(name, parameters)
|
|
148
165
|
rescue => e
|
|
@@ -153,17 +170,6 @@ module LogStash module PluginMixins module Jdbc
|
|
|
153
170
|
end
|
|
154
171
|
end
|
|
155
172
|
|
|
156
|
-
def post_init(plugin)
|
|
157
|
-
# don't log statement count when using prepared statements for now...
|
|
158
|
-
# needs enhancement to allow user to supply a bindable count prepared statement in settings.
|
|
159
|
-
@statement_logger.disable_count
|
|
160
|
-
|
|
161
|
-
@name = plugin.prepared_statement_name.to_sym
|
|
162
|
-
@bind_values_array = plugin.prepared_statement_bind_values
|
|
163
|
-
@parameters = plugin.parameters
|
|
164
|
-
@statement_prepared = Concurrent::AtomicBoolean.new(false)
|
|
165
|
-
end
|
|
166
|
-
|
|
167
173
|
def create_bind_values_hash
|
|
168
174
|
hash = {}
|
|
169
175
|
bind_values_array.each_with_index {|v,i| hash[:"p#{i}"] = v}
|
|
@@ -5,10 +5,7 @@ module LogStash module PluginMixins module Jdbc
|
|
|
5
5
|
class ValueTracking
|
|
6
6
|
|
|
7
7
|
def self.build_last_value_tracker(plugin)
|
|
8
|
-
handler = plugin.record_last_run ? FileHandler.new(plugin.
|
|
9
|
-
if plugin.record_last_run
|
|
10
|
-
handler = FileHandler.new(plugin.last_run_metadata_path)
|
|
11
|
-
end
|
|
8
|
+
handler = plugin.record_last_run ? FileHandler.new(plugin.last_run_metadata_file_path) : NullFileHandler.new(plugin.last_run_metadata_file_path)
|
|
12
9
|
if plugin.clean_run
|
|
13
10
|
handler.clean
|
|
14
11
|
end
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Gem::Specification.new do |s|
|
|
2
2
|
s.name = 'logstash-integration-jdbc'
|
|
3
|
-
s.version = '5.2.
|
|
3
|
+
s.version = '5.2.6'
|
|
4
4
|
s.licenses = ['Apache License (2.0)']
|
|
5
5
|
s.summary = "Integration with JDBC - input and filter plugins"
|
|
6
6
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
|
@@ -34,7 +34,9 @@ Gem::Specification.new do |s|
|
|
|
34
34
|
|
|
35
35
|
s.add_runtime_dependency 'tzinfo'
|
|
36
36
|
s.add_runtime_dependency 'tzinfo-data'
|
|
37
|
-
|
|
37
|
+
# plugin maintains compatibility with < 3.5 (3.0.9)
|
|
38
|
+
# but works with newer rufus-scheduler >= 3.5 as well
|
|
39
|
+
s.add_runtime_dependency 'rufus-scheduler'
|
|
38
40
|
s.add_runtime_dependency 'logstash-mixin-ecs_compatibility_support', '~>1.3'
|
|
39
41
|
s.add_runtime_dependency "logstash-mixin-validator_support", '~> 1.0'
|
|
40
42
|
s.add_runtime_dependency "logstash-mixin-event_support", '~> 1.0'
|
|
@@ -5,6 +5,7 @@ require "sequel"
|
|
|
5
5
|
require "sequel/adapters/jdbc"
|
|
6
6
|
require "stud/temporary"
|
|
7
7
|
require "timecop"
|
|
8
|
+
require "pathname"
|
|
8
9
|
|
|
9
10
|
# LogStash::Logging::Logger::configure_logging("WARN")
|
|
10
11
|
|
|
@@ -85,6 +86,15 @@ module LogStash module Filters
|
|
|
85
86
|
|
|
86
87
|
let(:ipaddr) { ".3.1.1" }
|
|
87
88
|
|
|
89
|
+
describe "verify derby path property" do
|
|
90
|
+
it "should be set into Logstash data path" do
|
|
91
|
+
plugin.register
|
|
92
|
+
|
|
93
|
+
expected = Pathname.new(LogStash::SETTINGS.get_value("path.data")).join("plugins", "shared", "derby_home").to_path
|
|
94
|
+
expect(java.lang.System.getProperty("derby.system.home")).to eq(expected)
|
|
95
|
+
end
|
|
96
|
+
end
|
|
97
|
+
|
|
88
98
|
describe "non scheduled operation" do
|
|
89
99
|
after { plugin.close }
|
|
90
100
|
|
|
@@ -41,6 +41,31 @@ describe LogStash::Inputs::Jdbc, :integration => true do
|
|
|
41
41
|
expect(event.get('first_name')).to eq("Mark")
|
|
42
42
|
expect(event.get('last_name')).to eq("Guckenheimer")
|
|
43
43
|
end
|
|
44
|
+
|
|
45
|
+
context 'with paging' do
|
|
46
|
+
let(:settings) do
|
|
47
|
+
super().merge 'jdbc_paging_enabled' => true, 'jdbc_page_size' => 1,
|
|
48
|
+
"statement" => 'SELECT * FROM "employee" WHERE EMP_NO >= :p1 ORDER BY EMP_NO',
|
|
49
|
+
'parameters' => { 'p1' => 0 }
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
before do # change plugin logger level to debug - to exercise logging
|
|
53
|
+
logger = plugin.class.name.gsub('::', '.').downcase
|
|
54
|
+
logger = org.apache.logging.log4j.LogManager.getLogger(logger)
|
|
55
|
+
@prev_logger_level = [ logger.getName, logger.getLevel ]
|
|
56
|
+
org.apache.logging.log4j.core.config.Configurator.setLevel logger.getName, org.apache.logging.log4j.Level::DEBUG
|
|
57
|
+
end
|
|
58
|
+
|
|
59
|
+
after do
|
|
60
|
+
org.apache.logging.log4j.core.config.Configurator.setLevel *@prev_logger_level
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
it "should populate the event with database entries" do
|
|
64
|
+
plugin.run(queue)
|
|
65
|
+
event = queue.pop
|
|
66
|
+
expect(event.get('first_name')).to eq('David')
|
|
67
|
+
end
|
|
68
|
+
end
|
|
44
69
|
end
|
|
45
70
|
|
|
46
71
|
context "when supplying a non-existent library" do
|
data/spec/inputs/jdbc_spec.rb
CHANGED
|
@@ -9,6 +9,7 @@ require "timecop"
|
|
|
9
9
|
require "stud/temporary"
|
|
10
10
|
require "time"
|
|
11
11
|
require "date"
|
|
12
|
+
require "pathname"
|
|
12
13
|
|
|
13
14
|
# We do not need to set TZ env var anymore because we can have 'Sequel.application_timezone' set to utc by default now.
|
|
14
15
|
|
|
@@ -51,6 +52,9 @@ describe LogStash::Inputs::Jdbc do
|
|
|
51
52
|
db.drop_table(:types_table)
|
|
52
53
|
db.drop_table(:test1_table)
|
|
53
54
|
end
|
|
55
|
+
|
|
56
|
+
last_run_default_path = LogStash::SETTINGS.get_value("path.data")
|
|
57
|
+
FileUtils.rm_f("#{last_run_default_path}/plugins/inputs/jdbc/logstash_jdbc_last_run")
|
|
54
58
|
end
|
|
55
59
|
|
|
56
60
|
context "when registering and tearing down" do
|
|
@@ -1114,6 +1118,86 @@ describe LogStash::Inputs::Jdbc do
|
|
|
1114
1118
|
end
|
|
1115
1119
|
end
|
|
1116
1120
|
|
|
1121
|
+
context "when state is persisted" do
|
|
1122
|
+
context "to file" do
|
|
1123
|
+
let(:settings) do
|
|
1124
|
+
{
|
|
1125
|
+
"statement" => "SELECT * FROM test_table",
|
|
1126
|
+
"record_last_run" => true
|
|
1127
|
+
}
|
|
1128
|
+
end
|
|
1129
|
+
|
|
1130
|
+
before do
|
|
1131
|
+
plugin.register
|
|
1132
|
+
end
|
|
1133
|
+
|
|
1134
|
+
after do
|
|
1135
|
+
plugin.stop
|
|
1136
|
+
end
|
|
1137
|
+
|
|
1138
|
+
context "with default last_run_metadata_path" do
|
|
1139
|
+
it "should save state in data.data subpath" do
|
|
1140
|
+
path = LogStash::SETTINGS.get_value("path.data")
|
|
1141
|
+
expect(plugin.last_run_metadata_file_path).to start_with(path)
|
|
1142
|
+
end
|
|
1143
|
+
end
|
|
1144
|
+
|
|
1145
|
+
context "with customized last_run_metadata_path" do
|
|
1146
|
+
let(:settings) { super().merge({ "last_run_metadata_path" => Stud::Temporary.pathname })}
|
|
1147
|
+
|
|
1148
|
+
it "should save state in data.data subpath" do
|
|
1149
|
+
expect(plugin.last_run_metadata_file_path).to start_with(settings["last_run_metadata_path"])
|
|
1150
|
+
end
|
|
1151
|
+
end
|
|
1152
|
+
end
|
|
1153
|
+
|
|
1154
|
+
context "with customized last_run_metadata_path point to directory" do
|
|
1155
|
+
let(:settings) do
|
|
1156
|
+
path = Stud::Temporary.pathname
|
|
1157
|
+
Pathname.new(path).tap {|path| path.mkpath}
|
|
1158
|
+
super().merge({ "last_run_metadata_path" => path})
|
|
1159
|
+
end
|
|
1160
|
+
|
|
1161
|
+
it "raise configuration error" do
|
|
1162
|
+
expect { plugin.register }.to raise_error(LogStash::ConfigurationError)
|
|
1163
|
+
end
|
|
1164
|
+
end
|
|
1165
|
+
end
|
|
1166
|
+
|
|
1167
|
+
context "update the previous default last_run_metadata_path" do
|
|
1168
|
+
let(:settings) do
|
|
1169
|
+
{
|
|
1170
|
+
"statement" => "SELECT * FROM test_table",
|
|
1171
|
+
"record_last_run" => true
|
|
1172
|
+
}
|
|
1173
|
+
end
|
|
1174
|
+
|
|
1175
|
+
let(:fake_home) do
|
|
1176
|
+
path = Stud::Temporary.pathname
|
|
1177
|
+
Pathname.new(path).tap {|path| path.mkpath}
|
|
1178
|
+
path
|
|
1179
|
+
end
|
|
1180
|
+
|
|
1181
|
+
context "when a file exists" do
|
|
1182
|
+
before do
|
|
1183
|
+
# in a faked HOME folder save a valid previous last_run metadata file
|
|
1184
|
+
allow(ENV).to receive(:[]).with('HOME').and_return(fake_home)
|
|
1185
|
+
File.open("#{ENV['HOME']}/.logstash_jdbc_last_run", 'w') do |file|
|
|
1186
|
+
file.write("--- !ruby/object:DateTime '2022-03-08 08:10:00.486889000 Z'")
|
|
1187
|
+
end
|
|
1188
|
+
end
|
|
1189
|
+
|
|
1190
|
+
it "should be moved" do
|
|
1191
|
+
plugin.register
|
|
1192
|
+
|
|
1193
|
+
expect(::File.exist?("#{ENV['HOME']}/.logstash_jdbc_last_run")).to be false
|
|
1194
|
+
path = LogStash::SETTINGS.get_value("path.data")
|
|
1195
|
+
full_path = "#{path}/plugins/inputs/jdbc/logstash_jdbc_last_run"
|
|
1196
|
+
expect(::File.exist?(full_path)).to be true
|
|
1197
|
+
end
|
|
1198
|
+
end
|
|
1199
|
+
end
|
|
1200
|
+
|
|
1117
1201
|
context "when setting fetch size" do
|
|
1118
1202
|
|
|
1119
1203
|
let(:settings) do
|
|
@@ -1432,54 +1516,6 @@ describe LogStash::Inputs::Jdbc do
|
|
|
1432
1516
|
end
|
|
1433
1517
|
end
|
|
1434
1518
|
|
|
1435
|
-
context "when debug logging and a count query raises a count related error" do
|
|
1436
|
-
let(:settings) do
|
|
1437
|
-
{ "statement" => "SELECT * from types_table" }
|
|
1438
|
-
end
|
|
1439
|
-
let(:logger) { double("logger", :debug? => true) }
|
|
1440
|
-
let(:statement_logger) { LogStash::PluginMixins::Jdbc::CheckedCountLogger.new(logger) }
|
|
1441
|
-
let(:value_tracker) { double("value tracker", :set_value => nil, :write => nil) }
|
|
1442
|
-
let(:msg) { 'Java::JavaSql::SQLSyntaxErrorException: Dynamic SQL Error; SQL error code = -104; Token unknown - line 1, column 105; LIMIT [SQLState:42000, ISC error code:335544634]' }
|
|
1443
|
-
let(:error_args) do
|
|
1444
|
-
{"exception" => msg}
|
|
1445
|
-
end
|
|
1446
|
-
|
|
1447
|
-
before do
|
|
1448
|
-
db << "INSERT INTO types_table (num, string, started_at, custom_time, ranking) VALUES (1, 'A test', '1999-12-31', '1999-12-31 23:59:59', 95.67)"
|
|
1449
|
-
plugin.register
|
|
1450
|
-
plugin.set_statement_logger(statement_logger)
|
|
1451
|
-
plugin.set_value_tracker(value_tracker)
|
|
1452
|
-
allow(value_tracker).to receive(:value).and_return("bar")
|
|
1453
|
-
allow(statement_logger).to receive(:execute_count).once.and_raise(StandardError.new(msg))
|
|
1454
|
-
end
|
|
1455
|
-
|
|
1456
|
-
after do
|
|
1457
|
-
plugin.stop
|
|
1458
|
-
end
|
|
1459
|
-
|
|
1460
|
-
context "if the count query raises an error" do
|
|
1461
|
-
it "should log a debug line without a count key as its unknown whether a count works at this stage" do
|
|
1462
|
-
expect(logger).to receive(:warn).once.with("Attempting a count query raised an error, the generated count statement is most likely incorrect but check networking, authentication or your statement syntax", error_args)
|
|
1463
|
-
expect(logger).to receive(:warn).once.with("Ongoing count statement generation is being prevented")
|
|
1464
|
-
expect(logger).to receive(:debug).once.with("Executing JDBC query", :statement => settings["statement"], :parameters => {:sql_last_value=>"bar"})
|
|
1465
|
-
plugin.run(queue)
|
|
1466
|
-
queue.pop
|
|
1467
|
-
end
|
|
1468
|
-
|
|
1469
|
-
it "should create an event normally" do
|
|
1470
|
-
allow(logger).to receive(:warn)
|
|
1471
|
-
allow(logger).to receive(:debug)
|
|
1472
|
-
plugin.run(queue)
|
|
1473
|
-
event = queue.pop
|
|
1474
|
-
expect(event.get("num")).to eq(1)
|
|
1475
|
-
expect(event.get("string")).to eq("A test")
|
|
1476
|
-
expect(event.get("started_at")).to be_a_logstash_timestamp_equivalent_to("1999-12-31T00:00:00.000Z")
|
|
1477
|
-
expect(event.get("custom_time")).to be_a_logstash_timestamp_equivalent_to("1999-12-31T23:59:59.000Z")
|
|
1478
|
-
expect(event.get("ranking").to_f).to eq(95.67)
|
|
1479
|
-
end
|
|
1480
|
-
end
|
|
1481
|
-
end
|
|
1482
|
-
|
|
1483
1519
|
context "when an unreadable jdbc_driver_path entry is present" do
|
|
1484
1520
|
let(:driver_jar_path) do
|
|
1485
1521
|
jar_file = $CLASSPATH.find { |name| name.index(Jdbc::Derby.driver_jar) }
|
metadata
CHANGED
|
@@ -1,14 +1,14 @@
|
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
|
2
2
|
name: logstash-integration-jdbc
|
|
3
3
|
version: !ruby/object:Gem::Version
|
|
4
|
-
version: 5.2.
|
|
4
|
+
version: 5.2.6
|
|
5
5
|
platform: ruby
|
|
6
6
|
authors:
|
|
7
7
|
- Elastic
|
|
8
8
|
autorequire:
|
|
9
9
|
bindir: bin
|
|
10
10
|
cert_chain: []
|
|
11
|
-
date: 2022-
|
|
11
|
+
date: 2022-05-19 00:00:00.000000000 Z
|
|
12
12
|
dependencies:
|
|
13
13
|
- !ruby/object:Gem::Dependency
|
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
|
@@ -131,17 +131,17 @@ dependencies:
|
|
|
131
131
|
- !ruby/object:Gem::Dependency
|
|
132
132
|
requirement: !ruby/object:Gem::Requirement
|
|
133
133
|
requirements:
|
|
134
|
-
- - "
|
|
134
|
+
- - ">="
|
|
135
135
|
- !ruby/object:Gem::Version
|
|
136
|
-
version:
|
|
136
|
+
version: '0'
|
|
137
137
|
name: rufus-scheduler
|
|
138
138
|
prerelease: false
|
|
139
139
|
type: :runtime
|
|
140
140
|
version_requirements: !ruby/object:Gem::Requirement
|
|
141
141
|
requirements:
|
|
142
|
-
- - "
|
|
142
|
+
- - ">="
|
|
143
143
|
- !ruby/object:Gem::Version
|
|
144
|
-
version:
|
|
144
|
+
version: '0'
|
|
145
145
|
- !ruby/object:Gem::Dependency
|
|
146
146
|
requirement: !ruby/object:Gem::Requirement
|
|
147
147
|
requirements:
|
|
@@ -275,7 +275,6 @@ files:
|
|
|
275
275
|
- lib/logstash/filters/jdbc_streaming.rb
|
|
276
276
|
- lib/logstash/inputs/jdbc.rb
|
|
277
277
|
- lib/logstash/inputs/tzinfo_jruby_patch.rb
|
|
278
|
-
- lib/logstash/plugin_mixins/jdbc/checked_count_logger.rb
|
|
279
278
|
- lib/logstash/plugin_mixins/jdbc/common.rb
|
|
280
279
|
- lib/logstash/plugin_mixins/jdbc/jdbc.rb
|
|
281
280
|
- lib/logstash/plugin_mixins/jdbc/scheduler.rb
|
|
@@ -1,43 +0,0 @@
|
|
|
1
|
-
# encoding: utf-8
|
|
2
|
-
|
|
3
|
-
module LogStash module PluginMixins module Jdbc
|
|
4
|
-
class CheckedCountLogger
|
|
5
|
-
def initialize(logger)
|
|
6
|
-
@logger = logger
|
|
7
|
-
@needs_check = true
|
|
8
|
-
@count_is_supported = false
|
|
9
|
-
@in_debug = @logger.debug?
|
|
10
|
-
end
|
|
11
|
-
|
|
12
|
-
def disable_count
|
|
13
|
-
@needs_check = false
|
|
14
|
-
@count_is_supported = false
|
|
15
|
-
end
|
|
16
|
-
|
|
17
|
-
def log_statement_parameters(statement, parameters, query)
|
|
18
|
-
return unless @in_debug
|
|
19
|
-
check_count_query(query) if @needs_check && query
|
|
20
|
-
if @count_is_supported
|
|
21
|
-
@logger.debug("Executing JDBC query", :statement => statement, :parameters => parameters, :count => execute_count(query))
|
|
22
|
-
else
|
|
23
|
-
@logger.debug("Executing JDBC query", :statement => statement, :parameters => parameters)
|
|
24
|
-
end
|
|
25
|
-
end
|
|
26
|
-
|
|
27
|
-
def check_count_query(query)
|
|
28
|
-
@needs_check = false
|
|
29
|
-
begin
|
|
30
|
-
execute_count(query)
|
|
31
|
-
@count_is_supported = true
|
|
32
|
-
rescue Exception => e
|
|
33
|
-
@logger.warn("Attempting a count query raised an error, the generated count statement is most likely incorrect but check networking, authentication or your statement syntax", "exception" => e.message)
|
|
34
|
-
@logger.warn("Ongoing count statement generation is being prevented")
|
|
35
|
-
@count_is_supported = false
|
|
36
|
-
end
|
|
37
|
-
end
|
|
38
|
-
|
|
39
|
-
def execute_count(query)
|
|
40
|
-
query.count
|
|
41
|
-
end
|
|
42
|
-
end
|
|
43
|
-
end end end
|