logstash-input-jdbc 4.3.13 → 4.3.14
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/docs/index.asciidoc +25 -2
- data/lib/logstash/inputs/jdbc.rb +16 -12
- data/lib/logstash/inputs/tzinfo_jruby_patch.rb +57 -0
- data/lib/logstash/plugin_mixins/jdbc/jdbc.rb +46 -22
- data/lib/logstash/plugin_mixins/jdbc/value_tracking.rb +21 -8
- data/lib/logstash/plugin_mixins/jdbc/wrapped_driver.rb +46 -0
- data/logstash-input-jdbc.gemspec +1 -1
- data/spec/inputs/jdbc_spec.rb +23 -30
- metadata +4 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: da0726e9534ffb2d10744efb977233487a21fb9e05e047294d5d2349e5a789fc
|
4
|
+
data.tar.gz: 1ea1948f5846b04ef7ecff9c9088f6d1d8c823ac38eceb07b0d83641be01666d
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 1c5ea4dbfba07fb58625b7163740769c36b0288b8dae3a8877dc5feb2af089c71c8fc39982f17f48268bf30b290adf3f5ee942bfe7cba5bbc8f25157c538052b
|
7
|
+
data.tar.gz: a2d8a17d3f33a9c60fff84376566d909d2b29506a7b257c65ec739e9d1fcf953e832d039f9806698dcc34362281264c4f64b460cef41966ca95567996991cc39
|
data/docs/index.asciidoc
CHANGED
@@ -149,6 +149,7 @@ This plugin supports the following configuration options plus the <<plugins-{typ
|
|
149
149
|
[cols="<,<,<",options="header",]
|
150
150
|
|=======================================================================
|
151
151
|
|Setting |Input type|Required
|
152
|
+
| <<plugins-{type}s-{plugin}-plugin_timezone>> |<<string,string>>, one of `["local", "utc"]`|No
|
152
153
|
| <<plugins-{type}s-{plugin}-clean_run>> |<<boolean,boolean>>|No
|
153
154
|
| <<plugins-{type}s-{plugin}-columns_charset>> |<<hash,hash>>|No
|
154
155
|
| <<plugins-{type}s-{plugin}-connection_retry_attempts>> |<<number,number>>|No
|
@@ -247,13 +248,35 @@ JDBC connection string
|
|
247
248
|
* There is no default value for this setting.
|
248
249
|
|
249
250
|
Timezone conversion.
|
250
|
-
|
251
|
-
|
251
|
+
Logstash (and Elasticsearch) expects that timestamps are expressed in UTC terms.
|
252
|
+
If your database has recorded timestamps that are relative to another timezone,
|
253
|
+
the database timezone if you will, then set this setting to be the timezone that
|
254
|
+
the database is using. However, as SQL does not allow for timezone data in
|
255
|
+
timestamp fields we can't figure this out on a record by record basis. This plugin
|
256
|
+
will automatically convert your SQL timestamp fields to Logstash timestamps,
|
257
|
+
in relative UTC time in ISO8601 format.
|
252
258
|
|
253
259
|
Using this setting will manually assign a specified timezone offset, instead
|
254
260
|
of using the timezone setting of the local machine. You must use a canonical
|
255
261
|
timezone, *America/Denver*, for example.
|
256
262
|
|
263
|
+
[id="plugins-{type}s-{plugin}-plugin_timezone"]
|
264
|
+
===== `plugin_timezone`
|
265
|
+
|
266
|
+
* Value can be any of: `utc`, `local`
|
267
|
+
* Default value is `"utc"`
|
268
|
+
|
269
|
+
If you want this plugin to offset timestamps to a timezone other than UTC, you
|
270
|
+
can set this setting to `local` and the plugin will use the OS timezone for offset
|
271
|
+
adjustments.
|
272
|
+
|
273
|
+
Note: when specifying `plugin_timezone` and/or `jdbc_default_timezone`, offset
|
274
|
+
adjustments are made in two places, if `sql_last_value` is a timestamp and it
|
275
|
+
is used as a parameter in the statement then offset adjustment is done from the
|
276
|
+
plugin timezone into the data timezone and while records are processed, timestamps
|
277
|
+
are offset adjusted from the database timezone to the plugin timezone. If your
|
278
|
+
database timezone is UTC then you do not need to set either of these settings.
|
279
|
+
|
257
280
|
[id="plugins-{type}s-{plugin}-jdbc_driver_class"]
|
258
281
|
===== `jdbc_driver_class`
|
259
282
|
|
data/lib/logstash/inputs/jdbc.rb
CHANGED
@@ -3,6 +3,8 @@ require "logstash/inputs/base"
|
|
3
3
|
require "logstash/namespace"
|
4
4
|
require "logstash/plugin_mixins/jdbc/jdbc"
|
5
5
|
|
6
|
+
# this require_relative returns early unless the JRuby version is between 9.2.0.0 and 9.2.8.0
|
7
|
+
require_relative "tzinfo_jruby_patch"
|
6
8
|
|
7
9
|
# This plugin was created as a way to ingest data from any database
|
8
10
|
# with a JDBC interface into Logstash. You can periodically schedule ingestion
|
@@ -84,19 +86,19 @@ require "logstash/plugin_mixins/jdbc/jdbc"
|
|
84
86
|
# The file option only supports one SQL statement. The plugin will only accept one of the options.
|
85
87
|
# It cannot read a statement from a file as well as from the `statement` configuration parameter.
|
86
88
|
#
|
87
|
-
# ==== Configuring multiple SQL statements
|
89
|
+
# ==== Configuring multiple SQL statements
|
88
90
|
#
|
89
|
-
# Configuring multiple SQL statements is useful when there is a need to query and ingest data
|
90
|
-
# from different database tables or views. It is possible to define separate Logstash
|
91
|
-
# configuration files for each statement or to define multiple statements in a single configuration
|
92
|
-
# file. When using multiple statements in a single Logstash configuration file, each statement
|
93
|
-
# has to be defined as a separate jdbc input (including jdbc driver, connection string and other
|
94
|
-
# required parameters).
|
91
|
+
# Configuring multiple SQL statements is useful when there is a need to query and ingest data
|
92
|
+
# from different database tables or views. It is possible to define separate Logstash
|
93
|
+
# configuration files for each statement or to define multiple statements in a single configuration
|
94
|
+
# file. When using multiple statements in a single Logstash configuration file, each statement
|
95
|
+
# has to be defined as a separate jdbc input (including jdbc driver, connection string and other
|
96
|
+
# required parameters).
|
95
97
|
#
|
96
|
-
# Please note that if any of the statements use the `sql_last_value` parameter (e.g. for
|
97
|
-
# ingesting only data changed since last run), each input should define its own
|
98
|
+
# Please note that if any of the statements use the `sql_last_value` parameter (e.g. for
|
99
|
+
# ingesting only data changed since last run), each input should define its own
|
98
100
|
# `last_run_metadata_path` parameter. Failure to do so will result in undesired behaviour, as
|
99
|
-
# all inputs will store their state to the same (default) metadata file, effectively
|
101
|
+
# all inputs will store their state to the same (default) metadata file, effectively
|
100
102
|
# overwriting each other's `sql_last_value`.
|
101
103
|
#
|
102
104
|
# ==== Predefined Parameters
|
@@ -178,11 +180,11 @@ module LogStash module Inputs class Jdbc < LogStash::Inputs::Base
|
|
178
180
|
# Whether to force the lowercasing of identifier fields
|
179
181
|
config :lowercase_column_names, :validate => :boolean, :default => true
|
180
182
|
|
181
|
-
# The character encoding of all columns, leave empty if the columns are already properly UTF-8
|
183
|
+
# The character encoding of all columns, leave empty if the columns are already properly UTF-8
|
182
184
|
# encoded. Specific columns charsets using :columns_charset can override this setting.
|
183
185
|
config :charset, :validate => :string
|
184
186
|
|
185
|
-
# The character encoding for specific columns. This option will override the `:charset` option
|
187
|
+
# The character encoding for specific columns. This option will override the `:charset` option
|
186
188
|
# for the specified columns.
|
187
189
|
#
|
188
190
|
# Example:
|
@@ -199,6 +201,8 @@ module LogStash module Inputs class Jdbc < LogStash::Inputs::Base
|
|
199
201
|
# this will only convert column0 that has ISO-8859-1 as an original encoding.
|
200
202
|
config :columns_charset, :validate => :hash, :default => {}
|
201
203
|
|
204
|
+
attr_reader :database # for test mocking/stubbing
|
205
|
+
|
202
206
|
public
|
203
207
|
|
204
208
|
def register
|
@@ -0,0 +1,57 @@
|
|
1
|
+
# encoding: UTF-8
|
2
|
+
# frozen_string_literal: true
|
3
|
+
|
4
|
+
# There is a bug in JRuby versions between 9.2.0.0 and 9.2.8.0
|
5
|
+
# the TZinfo::Timestamp `new_datetime` can build Rational numbers having
|
6
|
+
# numerators and denominators that are too large for Java longs.
|
7
|
+
#
|
8
|
+
# This patch reopens the TZinfo::Timestamp class and redefines
|
9
|
+
# the `new_datetime method.
|
10
|
+
# It scales down the numerator and denominator if they are larger than
|
11
|
+
# Java Long. There is no appreciable precision loss at the microsecond level
|
12
|
+
|
13
|
+
tzinfo_jruby_bugfixed_version = "9.2.8.0"
|
14
|
+
tzinfo_jruby_bugadded_version = "9.2.0.0"
|
15
|
+
|
16
|
+
current_jruby_version = Gem::Version.new(JRUBY_VERSION)
|
17
|
+
broken_jruby_version = Gem::Version.new(tzinfo_jruby_bugadded_version)
|
18
|
+
patched_jruby_version = Gem::Version.new(tzinfo_jruby_bugfixed_version)
|
19
|
+
|
20
|
+
return unless current_jruby_version >= broken_jruby_version && current_jruby_version < patched_jruby_version
|
21
|
+
|
22
|
+
require 'tzinfo'
|
23
|
+
|
24
|
+
if defined?(TZInfo::VERSION) && TZInfo::VERSION > '2'
|
25
|
+
module TZInfo
|
26
|
+
# A time represented as an `Integer` number of seconds since 1970-01-01
|
27
|
+
# 00:00:00 UTC (ignoring leap seconds), the fraction through the second
|
28
|
+
# (sub_second as a `Rational`) and an optional UTC offset. Like Ruby's `Time`
|
29
|
+
# class, {Timestamp} can distinguish between a local time with a zero offset
|
30
|
+
# and a time specified explicitly as UTC.
|
31
|
+
class Timestamp
|
32
|
+
|
33
|
+
protected
|
34
|
+
|
35
|
+
def new_datetime(klass = DateTime)
|
36
|
+
val = JD_EPOCH + ((@value.to_r + @sub_second) / 86400)
|
37
|
+
datetime = klass.jd(jruby_scale_down_rational(val))
|
38
|
+
@utc_offset && @utc_offset != 0 ? datetime.new_offset(Rational(@utc_offset, 86400)) : datetime
|
39
|
+
end
|
40
|
+
|
41
|
+
private
|
42
|
+
|
43
|
+
# while this JRuby bug exists in 9.2.X.X https://github.com/jruby/jruby/issues/5791
|
44
|
+
# we must scale down the numerator and denominator to fit Java Long values.
|
45
|
+
|
46
|
+
def jruby_scale_down_rational(rat)
|
47
|
+
return rat if rat.numerator <= java.lang.Long::MAX_VALUE
|
48
|
+
[10, 100, 1000].each do |scale_by|
|
49
|
+
new_numerator = rat.numerator / scale_by
|
50
|
+
if new_numerator <= java.lang.Long::MAX_VALUE
|
51
|
+
return Rational(new_numerator, rat.denominator / scale_by)
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|
@@ -5,6 +5,7 @@ require "time"
|
|
5
5
|
require "date"
|
6
6
|
require_relative "value_tracking"
|
7
7
|
require_relative "checked_count_logger"
|
8
|
+
require_relative "wrapped_driver"
|
8
9
|
|
9
10
|
java_import java.util.concurrent.locks.ReentrantLock
|
10
11
|
|
@@ -98,6 +99,9 @@ module LogStash module PluginMixins module Jdbc
|
|
98
99
|
config :connection_retry_attempts, :validate => :number, :default => 1
|
99
100
|
# Number of seconds to sleep between connection attempts
|
100
101
|
config :connection_retry_attempts_wait_time, :validate => :number, :default => 0.5
|
102
|
+
|
103
|
+
# give users the ability to force Sequel application side into using local timezone
|
104
|
+
config :plugin_timezone, :validate => ["local", "utc"], :default => "utc"
|
101
105
|
end
|
102
106
|
|
103
107
|
private
|
@@ -120,7 +124,8 @@ module LogStash module PluginMixins module Jdbc
|
|
120
124
|
else
|
121
125
|
@logger.error("Failed to connect to database. #{@jdbc_pool_timeout} second timeout exceeded. Trying again.")
|
122
126
|
end
|
123
|
-
rescue Sequel::Error => e
|
127
|
+
# rescue Java::JavaSql::SQLException, ::Sequel::Error => e
|
128
|
+
rescue ::Sequel::Error => e
|
124
129
|
if retry_attempts <= 0
|
125
130
|
@logger.error("Unable to connect to database. Tried #{@connection_retry_attempts} times", :error_message => e.message, )
|
126
131
|
raise e
|
@@ -133,14 +138,29 @@ module LogStash module PluginMixins module Jdbc
|
|
133
138
|
end
|
134
139
|
|
135
140
|
private
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
141
|
+
|
142
|
+
def load_drivers
|
143
|
+
return if @jdbc_driver_library.nil? || @jdbc_driver_library.empty?
|
144
|
+
|
145
|
+
driver_jars = @jdbc_driver_library.split(",")
|
146
|
+
|
147
|
+
# Needed for JDK 11 as the DriverManager has a different ClassLoader than Logstash
|
148
|
+
urls = java.net.URL[driver_jars.length].new
|
149
|
+
|
150
|
+
driver_jars.each_with_index do |driver, idx|
|
151
|
+
urls[idx] = java.io.File.new(driver).toURI().toURL()
|
152
|
+
end
|
153
|
+
ucl = java.net.URLClassLoader.new_instance(urls)
|
154
|
+
begin
|
155
|
+
klass = java.lang.Class.forName(@jdbc_driver_class.to_java(:string), true, ucl);
|
156
|
+
rescue Java::JavaLang::ClassNotFoundException => e
|
157
|
+
raise LogStash::Error, "Unable to find driver class via URLClassLoader in given driver jars: #{@jdbc_driver_class}"
|
158
|
+
end
|
159
|
+
begin
|
160
|
+
driver = klass.getConstructor().newInstance();
|
161
|
+
java.sql.DriverManager.register_driver(WrappedDriver.new(driver.to_java(java.sql.Driver)).to_java(java.sql.Driver))
|
162
|
+
rescue Java::JavaSql::SQLException => e
|
163
|
+
raise LogStash::Error, "Unable to register driver with java.sql.DriverManager using WrappedDriver: #{@jdbc_driver_class}"
|
144
164
|
end
|
145
165
|
end
|
146
166
|
|
@@ -149,18 +169,24 @@ module LogStash module PluginMixins module Jdbc
|
|
149
169
|
require "java"
|
150
170
|
require "sequel"
|
151
171
|
require "sequel/adapters/jdbc"
|
152
|
-
|
172
|
+
|
173
|
+
Sequel.application_timezone = @plugin_timezone.to_sym
|
153
174
|
|
154
175
|
begin
|
176
|
+
load_drivers
|
155
177
|
Sequel::JDBC.load_driver(@jdbc_driver_class)
|
178
|
+
rescue LogStash::Error => e
|
179
|
+
# raised in load_drivers, e.cause should be the caught Java exceptions
|
180
|
+
raise LogStash::PluginLoadingError, "#{e.message} and #{e.cause.message}"
|
156
181
|
rescue Sequel::AdapterNotFound => e
|
182
|
+
# fix this !!!
|
157
183
|
message = if @jdbc_driver_library.nil?
|
158
184
|
":jdbc_driver_library is not set, are you sure you included
|
159
185
|
the proper driver client libraries in your classpath?"
|
160
186
|
else
|
161
187
|
"Are you sure you've included the correct jdbc driver in :jdbc_driver_library?"
|
162
188
|
end
|
163
|
-
raise LogStash::
|
189
|
+
raise LogStash::PluginLoadingError, "#{e}. #{message}"
|
164
190
|
end
|
165
191
|
@database = jdbc_connect()
|
166
192
|
@database.extension(:pagination)
|
@@ -175,6 +201,8 @@ module LogStash module PluginMixins module Jdbc
|
|
175
201
|
@database.fetch_size = @jdbc_fetch_size unless @jdbc_fetch_size.nil?
|
176
202
|
begin
|
177
203
|
@database.test_connection
|
204
|
+
rescue Java::JavaSql::SQLException => e
|
205
|
+
@logger.warn("Failed test_connection with java.sql.SQLException.", :exception => e)
|
178
206
|
rescue Sequel::DatabaseConnectionError => e
|
179
207
|
@logger.warn("Failed test_connection.", :exception => e)
|
180
208
|
close_jdbc_connection
|
@@ -216,13 +244,13 @@ module LogStash module PluginMixins module Jdbc
|
|
216
244
|
|
217
245
|
public
|
218
246
|
def execute_statement(statement, parameters)
|
247
|
+
# sql_last_value has been set in params by caller
|
219
248
|
success = false
|
220
249
|
@connection_lock.lock
|
221
250
|
open_jdbc_connection
|
222
251
|
begin
|
223
252
|
params = symbolized_params(parameters)
|
224
253
|
query = @database[statement, params]
|
225
|
-
|
226
254
|
sql_last_value = @use_column_value ? @value_tracker.value : Time.now.utc
|
227
255
|
@tracking_column_warning_sent = false
|
228
256
|
@statement_logger.log_statement_parameters(query, statement, params)
|
@@ -231,7 +259,7 @@ module LogStash module PluginMixins module Jdbc
|
|
231
259
|
yield extract_values_from(row)
|
232
260
|
end
|
233
261
|
success = true
|
234
|
-
rescue Sequel::DatabaseConnectionError, Sequel::DatabaseError => e
|
262
|
+
rescue Sequel::DatabaseConnectionError, Sequel::DatabaseError, Java::JavaSql::SQLException => e
|
235
263
|
@logger.warn("Exception when executing JDBC query", :exception => e)
|
236
264
|
else
|
237
265
|
@value_tracker.set_value(sql_last_value)
|
@@ -267,8 +295,8 @@ module LogStash module PluginMixins module Jdbc
|
|
267
295
|
@logger.warn("tracking_column not found in dataset.", :tracking_column => @tracking_column)
|
268
296
|
@tracking_column_warning_sent = true
|
269
297
|
end
|
270
|
-
# If we can't find the tracking column, return the current value
|
271
|
-
@
|
298
|
+
# If we can't find the tracking column, return the current value_tracker value
|
299
|
+
@value_tracker.value
|
272
300
|
else
|
273
301
|
# Otherwise send the updated tracking column
|
274
302
|
row[@tracking_column.to_sym]
|
@@ -297,20 +325,16 @@ module LogStash module PluginMixins module Jdbc
|
|
297
325
|
|
298
326
|
private
|
299
327
|
def decorate_value(value)
|
300
|
-
|
328
|
+
case value
|
329
|
+
when Time
|
301
330
|
# transform it to LogStash::Timestamp as required by LS
|
302
331
|
LogStash::Timestamp.new(value)
|
303
|
-
|
332
|
+
when Date, DateTime
|
304
333
|
LogStash::Timestamp.new(value.to_time)
|
305
|
-
elsif value.is_a?(DateTime)
|
306
|
-
# Manual timezone conversion detected.
|
307
|
-
# This is slower, so we put it in as a conditional case.
|
308
|
-
LogStash::Timestamp.new(Time.parse(value.to_s))
|
309
334
|
else
|
310
335
|
value
|
311
336
|
end
|
312
337
|
end
|
313
|
-
|
314
338
|
end
|
315
339
|
end end end
|
316
340
|
|
@@ -33,10 +33,10 @@ module LogStash module PluginMixins module Jdbc
|
|
33
33
|
|
34
34
|
def initialize(handler)
|
35
35
|
@file_handler = handler
|
36
|
-
|
36
|
+
set_initial
|
37
37
|
end
|
38
38
|
|
39
|
-
def
|
39
|
+
def set_initial
|
40
40
|
# override in subclass
|
41
41
|
end
|
42
42
|
|
@@ -47,12 +47,23 @@ module LogStash module PluginMixins module Jdbc
|
|
47
47
|
def write
|
48
48
|
@file_handler.write(@value)
|
49
49
|
end
|
50
|
+
|
51
|
+
private
|
52
|
+
def common_set_initial(method_symbol, default)
|
53
|
+
persisted = @file_handler.read
|
54
|
+
if persisted && persisted.respond_to?(method_symbol)
|
55
|
+
@value = persisted
|
56
|
+
else
|
57
|
+
@file_handler.clean
|
58
|
+
@value = default
|
59
|
+
end
|
60
|
+
end
|
50
61
|
end
|
51
62
|
|
52
63
|
|
53
64
|
class NumericValueTracker < ValueTracking
|
54
|
-
def
|
55
|
-
|
65
|
+
def set_initial
|
66
|
+
common_set_initial(:gcd, 0)
|
56
67
|
end
|
57
68
|
|
58
69
|
def set_value(value)
|
@@ -62,8 +73,8 @@ module LogStash module PluginMixins module Jdbc
|
|
62
73
|
end
|
63
74
|
|
64
75
|
class DateTimeValueTracker < ValueTracking
|
65
|
-
def
|
66
|
-
|
76
|
+
def set_initial
|
77
|
+
common_set_initial(:to_datetime, DateTime.new(1970))
|
67
78
|
end
|
68
79
|
|
69
80
|
def set_value(value)
|
@@ -76,8 +87,8 @@ module LogStash module PluginMixins module Jdbc
|
|
76
87
|
end
|
77
88
|
|
78
89
|
class TimeValueTracker < ValueTracking
|
79
|
-
def
|
80
|
-
|
90
|
+
def set_initial
|
91
|
+
common_set_initial(:to_time, Time.at(0).utc)
|
81
92
|
end
|
82
93
|
|
83
94
|
def set_value(value)
|
@@ -90,6 +101,8 @@ module LogStash module PluginMixins module Jdbc
|
|
90
101
|
end
|
91
102
|
|
92
103
|
class FileHandler
|
104
|
+
attr_reader :path
|
105
|
+
|
93
106
|
def initialize(path)
|
94
107
|
@path = path
|
95
108
|
@exists = ::File.exist?(@path)
|
@@ -0,0 +1,46 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
|
3
|
+
module LogStash module PluginMixins module Jdbc
|
4
|
+
class WrappedDriver
|
5
|
+
java_implements java.sql.Driver
|
6
|
+
|
7
|
+
def initialize(drv)
|
8
|
+
@driver = drv
|
9
|
+
end
|
10
|
+
|
11
|
+
java_signature 'boolean acceptsURL(String u) throws SQLException'
|
12
|
+
def accepts_url(u)
|
13
|
+
@driver.accepts_url(u)
|
14
|
+
end
|
15
|
+
|
16
|
+
java_signature 'Connection connect(String u, Properties p)'
|
17
|
+
def connect(url, props)
|
18
|
+
@driver.connect(url, props)
|
19
|
+
end
|
20
|
+
|
21
|
+
java_signature 'int getMajorVersion()'
|
22
|
+
def get_major_version()
|
23
|
+
@driver.get_major_version()
|
24
|
+
end
|
25
|
+
|
26
|
+
java_signature 'int getMinorVersion()'
|
27
|
+
def get_minor_version()
|
28
|
+
@driver.get_minor_version()
|
29
|
+
end
|
30
|
+
|
31
|
+
java_signature 'DriverPropertyInfo[] getPropertyInfo(String u, Properties p)'
|
32
|
+
def get_property_info(url, props)
|
33
|
+
@driver.get_property_info(url, props)
|
34
|
+
end
|
35
|
+
|
36
|
+
java_signature 'boolean jdbcCompliant()'
|
37
|
+
def jdbc_compliant()
|
38
|
+
@driver.jdbc_compliant
|
39
|
+
end
|
40
|
+
|
41
|
+
java_signature 'Logger getParentLogger() throws SQLFeatureNotSupportedException'
|
42
|
+
def get_parent_logger()
|
43
|
+
@driver.get_parent_logger
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end end end
|
data/logstash-input-jdbc.gemspec
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-input-jdbc'
|
3
|
-
s.version = '4.3.
|
3
|
+
s.version = '4.3.14'
|
4
4
|
s.licenses = ['Apache License (2.0)']
|
5
5
|
s.summary = "Creates events from JDBC data"
|
6
6
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
data/spec/inputs/jdbc_spec.rb
CHANGED
@@ -11,11 +11,9 @@ require "stud/temporary"
|
|
11
11
|
require "time"
|
12
12
|
require "date"
|
13
13
|
|
14
|
+
# We do not need to set TZ env var anymore because we can have 'Sequel.application_timezone' set to utc by default now.
|
15
|
+
|
14
16
|
describe LogStash::Inputs::Jdbc do
|
15
|
-
# This is a necessary change test-wide to guarantee that no local timezone
|
16
|
-
# is picked up. It could be arbitrarily set to any timezone, but then the test
|
17
|
-
# would have to compensate differently. That's why UTC is chosen.
|
18
|
-
ENV["TZ"] = "Etc/UTC"
|
19
17
|
let(:mixin_settings) do
|
20
18
|
{ "jdbc_user" => ENV['USER'], "jdbc_driver_class" => "org.apache.derby.jdbc.EmbeddedDriver",
|
21
19
|
"jdbc_connection_string" => "jdbc:derby:memory:testdb;create=true"}
|
@@ -64,22 +62,6 @@ describe LogStash::Inputs::Jdbc do
|
|
64
62
|
plugin.stop
|
65
63
|
end
|
66
64
|
|
67
|
-
it "should load all drivers when passing an array" do
|
68
|
-
mixin_settings['jdbc_driver_library'] = '/foo/bar,/bar/foo'
|
69
|
-
expect(plugin).to receive(:load_drivers).with(['/foo/bar', '/bar/foo'])
|
70
|
-
plugin.register
|
71
|
-
plugin.run(queue) # load when first run
|
72
|
-
plugin.stop
|
73
|
-
end
|
74
|
-
|
75
|
-
it "should load all drivers when using a single value" do
|
76
|
-
mixin_settings['jdbc_driver_library'] = '/foo/bar'
|
77
|
-
expect(plugin).to receive(:load_drivers).with(['/foo/bar'])
|
78
|
-
plugin.register
|
79
|
-
plugin.run(queue) # load when first run
|
80
|
-
plugin.stop
|
81
|
-
end
|
82
|
-
|
83
65
|
it "should stop without raising exception" do
|
84
66
|
plugin.register
|
85
67
|
expect { plugin.stop }.to_not raise_error
|
@@ -371,17 +353,18 @@ describe LogStash::Inputs::Jdbc do
|
|
371
353
|
}
|
372
354
|
end
|
373
355
|
|
374
|
-
|
356
|
+
before do
|
375
357
|
last_run_value = DateTime.iso8601("2000-01-01T12:00:00.000Z")
|
376
|
-
File.write(settings["last_run_metadata_path"],
|
377
|
-
|
378
|
-
Timecop.travel(DateTime.iso8601("2015-01-01T15:50:00.000Z")) do
|
358
|
+
File.write(settings["last_run_metadata_path"], last_run_value)
|
359
|
+
Timecop.travel(DateTime.iso8601("2015-01-01T15:50:01.000Z")) do
|
379
360
|
# simulate earlier records written
|
380
361
|
hours.each do |i|
|
381
362
|
db[:test_table].insert(:num => i, :custom_time => "2015-01-01 #{i}:00:00", :created_at => Time.now.utc)
|
382
363
|
end
|
383
364
|
end
|
365
|
+
end
|
384
366
|
|
367
|
+
it "should convert the time to reflect the timezone " do
|
385
368
|
Timecop.travel(DateTime.iso8601("2015-01-02T02:10:00.000Z")) do
|
386
369
|
# simulate the first plugin run after the custom time of the last record
|
387
370
|
plugin.register
|
@@ -393,6 +376,7 @@ describe LogStash::Inputs::Jdbc do
|
|
393
376
|
end
|
394
377
|
Timecop.travel(DateTime.iso8601("2015-01-02T02:20:00.000Z")) do
|
395
378
|
# simulate a run 10 minutes later
|
379
|
+
plugin.register
|
396
380
|
plugin.run(queue)
|
397
381
|
expect(queue.size).to eq(0) # no new records
|
398
382
|
plugin.stop
|
@@ -402,6 +386,7 @@ describe LogStash::Inputs::Jdbc do
|
|
402
386
|
end
|
403
387
|
Timecop.travel(DateTime.iso8601("2015-01-02T03:30:00.000Z")) do
|
404
388
|
# simulate another run later than the custom time of the last record
|
389
|
+
plugin.register
|
405
390
|
plugin.run(queue)
|
406
391
|
expect(queue.size).to eq(2)
|
407
392
|
plugin.stop
|
@@ -432,9 +417,9 @@ describe LogStash::Inputs::Jdbc do
|
|
432
417
|
# Sequel only does the *correct* timezone calc on a DateTime instance
|
433
418
|
last_run_value = DateTime.iso8601("2000-01-01T00:00:00.987Z")
|
434
419
|
File.write(settings["last_run_metadata_path"], YAML.dump(last_run_value))
|
435
|
-
|
436
420
|
hours.each_with_index do |i, j|
|
437
|
-
|
421
|
+
time_value = Time.utc(2015, 1, 1, i, 0, 0, msecs[j] * 1000)
|
422
|
+
db[:test1_table].insert(:num => i, :custom_time => time_value, :created_at => Time.now.utc)
|
438
423
|
end
|
439
424
|
|
440
425
|
plugin.register
|
@@ -444,7 +429,8 @@ describe LogStash::Inputs::Jdbc do
|
|
444
429
|
actual = queue.size.times.map { queue.pop.get("custom_time").time }
|
445
430
|
expect(actual).to eq(expected)
|
446
431
|
plugin.stop
|
447
|
-
|
432
|
+
raw_last_run_value = File.read(settings["last_run_metadata_path"])
|
433
|
+
last_run_value = YAML.load(raw_last_run_value)
|
448
434
|
expect(last_run_value).to be_a(DateTime)
|
449
435
|
expect(last_run_value.strftime("%F %T.%N %Z")).to eq("2015-01-02 02:00:00.722000000 +00:00")
|
450
436
|
|
@@ -995,7 +981,7 @@ describe LogStash::Inputs::Jdbc do
|
|
995
981
|
expect do
|
996
982
|
plugin.register
|
997
983
|
plugin.run(queue) # load when first run
|
998
|
-
end.to raise_error(LogStash::
|
984
|
+
end.to raise_error(LogStash::PluginLoadingError)
|
999
985
|
end
|
1000
986
|
end
|
1001
987
|
|
@@ -1136,7 +1122,14 @@ describe LogStash::Inputs::Jdbc do
|
|
1136
1122
|
end
|
1137
1123
|
|
1138
1124
|
before(:each) do
|
1139
|
-
|
1125
|
+
dataset = double("Dataset")
|
1126
|
+
allow(dataset).to receive(:each).and_yield(row)
|
1127
|
+
allow(plugin).to receive(:jdbc_connect).and_wrap_original do |m, *args|
|
1128
|
+
_db = m.call(*args)
|
1129
|
+
allow(_db).to receive(:[]).and_return(dataset)
|
1130
|
+
_db
|
1131
|
+
end
|
1132
|
+
# allow_any_instance_of(Sequel::JDBC::Derby::Dataset).to receive(:each).and_yield(row)
|
1140
1133
|
plugin.register
|
1141
1134
|
end
|
1142
1135
|
|
@@ -1191,7 +1184,7 @@ describe LogStash::Inputs::Jdbc do
|
|
1191
1184
|
next unless v.is_a?(String)
|
1192
1185
|
expect(row[k].encoding).to eq(encoded_row[k].encoding)
|
1193
1186
|
end
|
1194
|
-
|
1187
|
+
|
1195
1188
|
event
|
1196
1189
|
end
|
1197
1190
|
plugin.run(events)
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-input-jdbc
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 4.3.
|
4
|
+
version: 4.3.14
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2019-08-29 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
@@ -171,9 +171,11 @@ files:
|
|
171
171
|
- README.md
|
172
172
|
- docs/index.asciidoc
|
173
173
|
- lib/logstash/inputs/jdbc.rb
|
174
|
+
- lib/logstash/inputs/tzinfo_jruby_patch.rb
|
174
175
|
- lib/logstash/plugin_mixins/jdbc/checked_count_logger.rb
|
175
176
|
- lib/logstash/plugin_mixins/jdbc/jdbc.rb
|
176
177
|
- lib/logstash/plugin_mixins/jdbc/value_tracking.rb
|
178
|
+
- lib/logstash/plugin_mixins/jdbc/wrapped_driver.rb
|
177
179
|
- logstash-input-jdbc.gemspec
|
178
180
|
- spec/inputs/integ_spec.rb
|
179
181
|
- spec/inputs/jdbc_spec.rb
|