logstash-input-jdbc 4.3.4 → 4.3.5
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +8 -5
- data/lib/logstash/inputs/jdbc.rb +6 -18
- data/lib/logstash/plugin_mixins/jdbc.rb +5 -16
- data/lib/logstash/plugin_mixins/value_tracking.rb +124 -0
- data/logstash-input-jdbc.gemspec +1 -1
- data/spec/inputs/jdbc_spec.rb +144 -59
- metadata +3 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: f382c4428bc37a74fdfae0aac1a97eca218555266c01cb76a4c6f6de5e7e3ad1
|
4
|
+
data.tar.gz: 70d0027fac72146ee098c78f46c7bac8f3d8feb2139f9e6238eb44da1ba3de8f
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 471770e1b701221c9e0b5897c00313276a28f138a5831c9f1a027ca0655951115542a19edc32a4f4bbe634d9a96f969cc45e9d48d4269f72a1780ad6063b468b
|
7
|
+
data.tar.gz: af300cbd6b078c2d9efcfefc5482480a21e020708fd8e71036c234183dd81e734ab1775b69c9810f0f8e1d13d04dae485979b46857d8d7d773a533b9413b550b
|
data/CHANGELOG.md
CHANGED
@@ -1,17 +1,20 @@
|
|
1
|
+
## 4.3.5
|
2
|
+
- [#140](https://github.com/logstash-plugins/logstash-input-jdbc/issues/140) Fix long standing bug where setting jdbc_default_timezone loses milliseconds. Force all usage of sql_last_value to be typed according to the settings.
|
3
|
+
|
1
4
|
## 4.3.4
|
2
5
|
- [#261](https://github.com/logstash-plugins/logstash-input-jdbc/issues/261) Fix memory leak.
|
3
|
-
|
6
|
+
|
4
7
|
## 4.3.3
|
5
8
|
- [#255](https://github.com/logstash-plugins/logstash-input-jdbc/issues/255) Fix thread and memory leak.
|
6
|
-
|
9
|
+
|
7
10
|
## 4.3.2
|
8
11
|
- [#251](https://github.com/logstash-plugins/logstash-input-jdbc/issues/251) Fix connection and memory leak.
|
9
|
-
|
12
|
+
|
10
13
|
## 4.3.1
|
11
14
|
- Update gemspec summary
|
12
15
|
|
13
|
-
## 4.3.0
|
14
|
-
- [#147](https://github.com/logstash-plugins/logstash-input-jdbc/issues/147) Open and close connection for each query
|
16
|
+
## 4.3.0
|
17
|
+
- [#147](https://github.com/logstash-plugins/logstash-input-jdbc/issues/147) Open and close connection for each query
|
15
18
|
|
16
19
|
## 4.2.4
|
17
20
|
- [#220](https://github.com/logstash-plugins/logstash-input-jdbc/issues/220) Log exception when database connection test fails
|
data/lib/logstash/inputs/jdbc.rb
CHANGED
@@ -2,7 +2,7 @@
|
|
2
2
|
require "logstash/inputs/base"
|
3
3
|
require "logstash/namespace"
|
4
4
|
require "logstash/plugin_mixins/jdbc"
|
5
|
-
|
5
|
+
|
6
6
|
|
7
7
|
# This plugin was created as a way to ingest data from any database
|
8
8
|
# with a JDBC interface into Logstash. You can periodically schedule ingestion
|
@@ -206,21 +206,16 @@ class LogStash::Inputs::Jdbc < LogStash::Inputs::Base
|
|
206
206
|
require "rufus/scheduler"
|
207
207
|
prepare_jdbc_connection
|
208
208
|
|
209
|
-
# Raise an error if @use_column_value is true, but no @tracking_column is set
|
210
209
|
if @use_column_value
|
210
|
+
# Raise an error if @use_column_value is true, but no @tracking_column is set
|
211
211
|
if @tracking_column.nil?
|
212
212
|
raise(LogStash::ConfigurationError, "Must set :tracking_column if :use_column_value is true.")
|
213
213
|
end
|
214
214
|
end
|
215
215
|
|
216
|
-
@
|
216
|
+
@value_tracker = LogStash::PluginMixins::ValueTracking.build_last_value_tracker(self)
|
217
217
|
|
218
|
-
|
219
|
-
if @clean_run && File.exist?(@last_run_metadata_path)
|
220
|
-
File.delete(@last_run_metadata_path)
|
221
|
-
elsif File.exist?(@last_run_metadata_path)
|
222
|
-
@sql_last_value = YAML.load(File.read(@last_run_metadata_path))
|
223
|
-
end
|
218
|
+
@enable_encoding = !@charset.nil? || !@columns_charset.empty?
|
224
219
|
|
225
220
|
unless @statement.nil? ^ @statement_filepath.nil?
|
226
221
|
raise(LogStash::ConfigurationError, "Must set either :statement or :statement_filepath. Only one may be set at a time.")
|
@@ -248,13 +243,11 @@ class LogStash::Inputs::Jdbc < LogStash::Inputs::Base
|
|
248
243
|
@scheduler = Rufus::Scheduler.new(:max_work_threads => 1)
|
249
244
|
@scheduler.cron @schedule do
|
250
245
|
execute_query(queue)
|
251
|
-
update_state_file
|
252
246
|
end
|
253
247
|
|
254
248
|
@scheduler.join
|
255
249
|
else
|
256
250
|
execute_query(queue)
|
257
|
-
update_state_file
|
258
251
|
end
|
259
252
|
end # def run
|
260
253
|
|
@@ -267,7 +260,7 @@ class LogStash::Inputs::Jdbc < LogStash::Inputs::Base
|
|
267
260
|
|
268
261
|
def execute_query(queue)
|
269
262
|
# update default parameters
|
270
|
-
@parameters['sql_last_value'] = @
|
263
|
+
@parameters['sql_last_value'] = @value_tracker.value
|
271
264
|
execute_statement(@statement, @parameters) do |row|
|
272
265
|
if enable_encoding?
|
273
266
|
## do the necessary conversions to string elements
|
@@ -277,12 +270,7 @@ class LogStash::Inputs::Jdbc < LogStash::Inputs::Base
|
|
277
270
|
decorate(event)
|
278
271
|
queue << event
|
279
272
|
end
|
280
|
-
|
281
|
-
|
282
|
-
def update_state_file
|
283
|
-
if @record_last_run
|
284
|
-
File.write(@last_run_metadata_path, YAML.dump(@sql_last_value))
|
285
|
-
end
|
273
|
+
@value_tracker.write
|
286
274
|
end
|
287
275
|
|
288
276
|
private
|
@@ -3,6 +3,7 @@
|
|
3
3
|
require "logstash/config/mixin"
|
4
4
|
require "time"
|
5
5
|
require "date"
|
6
|
+
require "logstash/plugin_mixins/value_tracking"
|
6
7
|
|
7
8
|
java_import java.util.concurrent.locks.ReentrantLock
|
8
9
|
|
@@ -195,17 +196,7 @@ module LogStash::PluginMixins::Jdbc
|
|
195
196
|
public
|
196
197
|
def prepare_jdbc_connection
|
197
198
|
@connection_lock = ReentrantLock.new
|
198
|
-
|
199
|
-
case @tracking_column_type
|
200
|
-
when "numeric"
|
201
|
-
@sql_last_value = 0
|
202
|
-
when "timestamp"
|
203
|
-
@sql_last_value = Time.at(0).utc
|
204
|
-
end
|
205
|
-
else
|
206
|
-
@sql_last_value = Time.at(0).utc
|
207
|
-
end
|
208
|
-
end # def prepare_jdbc_connection
|
199
|
+
end
|
209
200
|
|
210
201
|
public
|
211
202
|
def close_jdbc_connection
|
@@ -229,22 +220,20 @@ module LogStash::PluginMixins::Jdbc
|
|
229
220
|
begin
|
230
221
|
parameters = symbolized_params(parameters)
|
231
222
|
query = @database[statement, parameters]
|
232
|
-
|
223
|
+
|
224
|
+
sql_last_value = @use_column_value ? @value_tracker.value : Time.now.utc
|
233
225
|
@tracking_column_warning_sent = false
|
234
226
|
@logger.debug? and @logger.debug("Executing JDBC query", :statement => statement, :parameters => parameters, :count => query.count)
|
235
227
|
|
236
228
|
perform_query(query) do |row|
|
237
229
|
sql_last_value = get_column_value(row) if @use_column_value
|
238
|
-
if @tracking_column_type=="timestamp" and @use_column_value and sql_last_value.is_a?(DateTime)
|
239
|
-
sql_last_value = sql_last_value.to_time # Coerce the timestamp to a `Time`
|
240
|
-
end
|
241
230
|
yield extract_values_from(row)
|
242
231
|
end
|
243
232
|
success = true
|
244
233
|
rescue Sequel::DatabaseConnectionError, Sequel::DatabaseError => e
|
245
234
|
@logger.warn("Exception when executing JDBC query", :exception => e)
|
246
235
|
else
|
247
|
-
@sql_last_value
|
236
|
+
@value_tracker.set_value(sql_last_value)
|
248
237
|
ensure
|
249
238
|
close_jdbc_connection
|
250
239
|
@connection_lock.unlock
|
@@ -0,0 +1,124 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "yaml" # persistence
|
3
|
+
|
4
|
+
module LogStash module PluginMixins
|
5
|
+
class ValueTracking
|
6
|
+
|
7
|
+
def self.build_last_value_tracker(plugin)
|
8
|
+
if plugin.use_column_value && plugin.tracking_column_type == "numeric"
|
9
|
+
# use this irrespective of the jdbc_default_timezone setting
|
10
|
+
klass = NumericValueTracker
|
11
|
+
else
|
12
|
+
if plugin.jdbc_default_timezone.nil? || plugin.jdbc_default_timezone.empty?
|
13
|
+
# no TZ stuff for Sequel, use Time
|
14
|
+
klass = TimeValueTracker
|
15
|
+
else
|
16
|
+
# Sequel does timezone handling on DateTime only
|
17
|
+
klass = DateTimeValueTracker
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
21
|
+
handler = NullFileHandler.new(plugin.last_run_metadata_path)
|
22
|
+
if plugin.record_last_run
|
23
|
+
handler = FileHandler.new(plugin.last_run_metadata_path)
|
24
|
+
end
|
25
|
+
if plugin.clean_run
|
26
|
+
handler.clean
|
27
|
+
end
|
28
|
+
|
29
|
+
instance = klass.new(handler)
|
30
|
+
end
|
31
|
+
|
32
|
+
attr_reader :value
|
33
|
+
|
34
|
+
def initialize(handler)
|
35
|
+
@file_handler = handler
|
36
|
+
set_value(get_initial)
|
37
|
+
end
|
38
|
+
|
39
|
+
def get_initial
|
40
|
+
# override in subclass
|
41
|
+
end
|
42
|
+
|
43
|
+
def set_value(value)
|
44
|
+
# override in subclass
|
45
|
+
end
|
46
|
+
|
47
|
+
def write
|
48
|
+
@file_handler.write(@value)
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
|
53
|
+
class NumericValueTracker < ValueTracking
|
54
|
+
def get_initial
|
55
|
+
@file_handler.read || 0
|
56
|
+
end
|
57
|
+
|
58
|
+
def set_value(value)
|
59
|
+
return unless value.is_a?(Numeric)
|
60
|
+
@value = value
|
61
|
+
end
|
62
|
+
end
|
63
|
+
|
64
|
+
class DateTimeValueTracker < ValueTracking
|
65
|
+
def get_initial
|
66
|
+
@file_handler.read || DateTime.new(1970)
|
67
|
+
end
|
68
|
+
|
69
|
+
def set_value(value)
|
70
|
+
if value.respond_to?(:to_datetime)
|
71
|
+
@value = value.to_datetime
|
72
|
+
end
|
73
|
+
end
|
74
|
+
end
|
75
|
+
|
76
|
+
class TimeValueTracker < ValueTracking
|
77
|
+
def get_initial
|
78
|
+
@file_handler.read || Time.at(0).utc
|
79
|
+
end
|
80
|
+
|
81
|
+
def set_value(value)
|
82
|
+
if value.respond_to?(:to_time)
|
83
|
+
@value = value.to_time
|
84
|
+
end
|
85
|
+
end
|
86
|
+
end
|
87
|
+
|
88
|
+
class FileHandler
|
89
|
+
def initialize(path)
|
90
|
+
@path = path
|
91
|
+
@exists = ::File.exist?(@path)
|
92
|
+
end
|
93
|
+
|
94
|
+
def clean
|
95
|
+
return unless @exists
|
96
|
+
::File.delete(@path)
|
97
|
+
@exists = false
|
98
|
+
end
|
99
|
+
|
100
|
+
def read
|
101
|
+
return unless @exists
|
102
|
+
YAML.load(::File.read(@path))
|
103
|
+
end
|
104
|
+
|
105
|
+
def write(value)
|
106
|
+
::File.write(@path, YAML.dump(value))
|
107
|
+
@exists = true
|
108
|
+
end
|
109
|
+
end
|
110
|
+
|
111
|
+
class NullFileHandler
|
112
|
+
def initialize(path)
|
113
|
+
end
|
114
|
+
|
115
|
+
def clean
|
116
|
+
end
|
117
|
+
|
118
|
+
def read
|
119
|
+
end
|
120
|
+
|
121
|
+
def write(value)
|
122
|
+
end
|
123
|
+
end
|
124
|
+
end end
|
data/logstash-input-jdbc.gemspec
CHANGED
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-input-jdbc'
|
3
|
-
s.version = '4.3.
|
3
|
+
s.version = '4.3.5'
|
4
4
|
s.licenses = ['Apache License (2.0)']
|
5
5
|
s.summary = "Creates events from JDBC data"
|
6
6
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
data/spec/inputs/jdbc_spec.rb
CHANGED
@@ -38,6 +38,7 @@ describe LogStash::Inputs::Jdbc do
|
|
38
38
|
DateTime :custom_time
|
39
39
|
end
|
40
40
|
db << "CREATE TABLE types_table (num INTEGER, string VARCHAR(255), started_at DATE, custom_time TIMESTAMP, ranking DECIMAL(16,6))"
|
41
|
+
db << "CREATE TABLE test1_table (num INTEGER, string VARCHAR(255), custom_time TIMESTAMP, created_at TIMESTAMP)"
|
41
42
|
end
|
42
43
|
end
|
43
44
|
|
@@ -45,6 +46,7 @@ describe LogStash::Inputs::Jdbc do
|
|
45
46
|
if !RSpec.current_example.metadata[:no_connection]
|
46
47
|
db.drop_table(:test_table)
|
47
48
|
db.drop_table(:types_table)
|
49
|
+
db.drop_table(:test1_table)
|
48
50
|
end
|
49
51
|
end
|
50
52
|
|
@@ -322,7 +324,7 @@ describe LogStash::Inputs::Jdbc do
|
|
322
324
|
end
|
323
325
|
end
|
324
326
|
|
325
|
-
|
327
|
+
describe "when jdbc_default_timezone is set" do
|
326
328
|
let(:mixin_settings) do
|
327
329
|
{ "jdbc_user" => ENV['USER'], "jdbc_driver_class" => "org.apache.derby.jdbc.EmbeddedDriver",
|
328
330
|
"jdbc_connection_string" => "jdbc:derby:memory:testdb;create=true",
|
@@ -330,53 +332,136 @@ describe LogStash::Inputs::Jdbc do
|
|
330
332
|
}
|
331
333
|
end
|
332
334
|
|
333
|
-
let(:
|
334
|
-
|
335
|
-
|
336
|
-
|
337
|
-
|
338
|
-
|
339
|
-
|
335
|
+
let(:hours) { [10, 11, 12, 13, 14, 15, 16, 17, 18, 19, 20] }
|
336
|
+
|
337
|
+
context "when fetching time data and the tracking column is set and tracking column type defaults to 'numeric'" do
|
338
|
+
let(:settings) do
|
339
|
+
{
|
340
|
+
"statement" => "SELECT * from test_table WHERE num > :sql_last_value",
|
341
|
+
"last_run_metadata_path" => Stud::Temporary.pathname,
|
342
|
+
"tracking_column" => "num",
|
343
|
+
"use_column_value" => true
|
344
|
+
}
|
345
|
+
end
|
346
|
+
|
347
|
+
it "should convert the time to reflect the timezone " do
|
348
|
+
File.write(settings["last_run_metadata_path"], YAML.dump(42))
|
349
|
+
|
350
|
+
db[:test_table].insert(:num => 42, :custom_time => "2015-01-01 10:10:10", :created_at => Time.now.utc)
|
351
|
+
db[:test_table].insert(:num => 43, :custom_time => "2015-01-01 11:11:11", :created_at => Time.now.utc)
|
352
|
+
|
353
|
+
plugin.register
|
354
|
+
plugin.run(queue)
|
355
|
+
plugin.stop
|
356
|
+
expect(queue.size).to eq(1)
|
357
|
+
event = queue.pop
|
358
|
+
expect(event.get("num")).to eq(43)
|
359
|
+
expect(event.get("custom_time").time).to eq(Time.iso8601("2015-01-01T17:11:11.000Z"))
|
360
|
+
end
|
340
361
|
end
|
341
362
|
|
342
|
-
|
363
|
+
context "when fetching time data and the tracking column is NOT set, sql_last_value is time of run" do
|
343
364
|
|
344
|
-
|
345
|
-
|
346
|
-
|
365
|
+
let(:settings) do
|
366
|
+
{
|
367
|
+
"statement" => "SELECT * from test_table WHERE custom_time > :sql_last_value",
|
368
|
+
"last_run_metadata_path" => Stud::Temporary.pathname
|
369
|
+
}
|
370
|
+
end
|
371
|
+
|
372
|
+
it "should convert the time to reflect the timezone " do
|
373
|
+
last_run_value = DateTime.iso8601("2000-01-01T12:00:00.000Z")
|
374
|
+
File.write(settings["last_run_metadata_path"], YAML.dump(last_run_value))
|
375
|
+
|
376
|
+
Timecop.travel(DateTime.iso8601("2015-01-01T15:50:00.000Z")) do
|
377
|
+
# simulate earlier records written
|
378
|
+
hours.each do |i|
|
379
|
+
db[:test_table].insert(:num => i, :custom_time => "2015-01-01 #{i}:00:00", :created_at => Time.now.utc)
|
380
|
+
end
|
381
|
+
end
|
347
382
|
|
348
|
-
|
349
|
-
|
383
|
+
Timecop.travel(DateTime.iso8601("2015-01-02T02:10:00.000Z")) do
|
384
|
+
# simulate the first plugin run after the custom time of the last record
|
385
|
+
plugin.register
|
386
|
+
plugin.run(queue)
|
387
|
+
expected = hours.map{|hour| Time.iso8601("2015-01-01T06:00:00.000Z") + (hour * 3600) }# because Sequel converts the column values to Time instances.
|
388
|
+
actual = queue.size.times.map { queue.pop.get("custom_time").time }
|
389
|
+
expect(actual).to eq(expected)
|
390
|
+
plugin.stop
|
391
|
+
end
|
392
|
+
Timecop.travel(DateTime.iso8601("2015-01-02T02:20:00.000Z")) do
|
393
|
+
# simulate a run 10 minutes later
|
394
|
+
plugin.run(queue)
|
395
|
+
expect(queue.size).to eq(0) # no new records
|
396
|
+
plugin.stop
|
397
|
+
# now add records
|
398
|
+
db[:test_table].insert(:num => 11, :custom_time => "2015-01-01 20:20:20", :created_at => Time.now.utc)
|
399
|
+
db[:test_table].insert(:num => 12, :custom_time => "2015-01-01 21:21:21", :created_at => Time.now.utc)
|
400
|
+
end
|
401
|
+
Timecop.travel(DateTime.iso8601("2015-01-02T03:30:00.000Z")) do
|
402
|
+
# simulate another run later than the custom time of the last record
|
403
|
+
plugin.run(queue)
|
404
|
+
expect(queue.size).to eq(2)
|
405
|
+
plugin.stop
|
406
|
+
end
|
407
|
+
event = queue.pop
|
408
|
+
expect(event.get("num")).to eq(11)
|
409
|
+
expect(event.get("custom_time").time).to eq(Time.iso8601("2015-01-02T02:20:20.000Z"))
|
410
|
+
event = queue.pop
|
411
|
+
expect(event.get("num")).to eq(12)
|
412
|
+
expect(event.get("custom_time").time).to eq(Time.iso8601("2015-01-02T03:21:21.000Z"))
|
350
413
|
end
|
414
|
+
end
|
351
415
|
|
352
|
-
|
416
|
+
context "when fetching time data and the tracking column is set, sql_last_value is sourced from a column, sub-second precision is maintained" do
|
417
|
+
let(:settings) do
|
418
|
+
{
|
419
|
+
"statement" => "SELECT * from test1_table WHERE custom_time > :sql_last_value ORDER BY custom_time",
|
420
|
+
"use_column_value" => true,
|
421
|
+
"tracking_column" => "custom_time",
|
422
|
+
"tracking_column_type" => "timestamp",
|
423
|
+
"last_run_metadata_path" => Stud::Temporary.pathname
|
424
|
+
}
|
425
|
+
end
|
353
426
|
|
354
|
-
|
355
|
-
expected = ["2015-01-01T16:00:00.000Z",
|
356
|
-
"2015-01-01T17:00:00.000Z",
|
357
|
-
"2015-01-01T18:00:00.000Z",
|
358
|
-
"2015-01-01T19:00:00.000Z",
|
359
|
-
"2015-01-01T20:00:00.000Z",
|
360
|
-
"2015-01-01T21:00:00.000Z",
|
361
|
-
"2015-01-01T22:00:00.000Z",
|
362
|
-
"2015-01-01T23:00:00.000Z",
|
363
|
-
"2015-01-02T00:00:00.000Z",
|
364
|
-
"2015-01-02T01:00:00.000Z",
|
365
|
-
"2015-01-02T02:00:00.000Z"].map { |i| Time.iso8601(i) }
|
366
|
-
actual = queue.size.times.map { queue.pop.get("custom_time").time }
|
367
|
-
expect(actual).to eq(expected)
|
368
|
-
plugin.stop
|
427
|
+
let(:msecs) { [111, 122, 233, 244, 355, 366, 477, 488, 599, 611, 722] }
|
369
428
|
|
370
|
-
|
371
|
-
|
372
|
-
|
373
|
-
|
374
|
-
|
375
|
-
|
376
|
-
|
377
|
-
|
378
|
-
|
379
|
-
|
429
|
+
it "should convert the time to reflect the timezone " do
|
430
|
+
# Sequel only does the *correct* timezone calc on a DateTime instance
|
431
|
+
last_run_value = DateTime.iso8601("2000-01-01T00:00:00.987Z")
|
432
|
+
File.write(settings["last_run_metadata_path"], YAML.dump(last_run_value))
|
433
|
+
|
434
|
+
hours.each_with_index do |i, j|
|
435
|
+
db[:test1_table].insert(:num => i, :custom_time => "2015-01-01 #{i}:00:00.#{msecs[j]}", :created_at => Time.now.utc)
|
436
|
+
end
|
437
|
+
|
438
|
+
plugin.register
|
439
|
+
|
440
|
+
plugin.run(queue)
|
441
|
+
expected = hours.map.with_index {|hour, i| Time.iso8601("2015-01-01T06:00:00.000Z") + (hour * 3600 + (msecs[i] / 1000.0)) }
|
442
|
+
actual = queue.size.times.map { queue.pop.get("custom_time").time }
|
443
|
+
expect(actual).to eq(expected)
|
444
|
+
plugin.stop
|
445
|
+
last_run_value = YAML.load(File.read(settings["last_run_metadata_path"]))
|
446
|
+
expect(last_run_value).to be_a(DateTime)
|
447
|
+
expect(last_run_value.strftime("%F %T.%N %Z")).to eq("2015-01-02 02:00:00.722000000 +00:00")
|
448
|
+
|
449
|
+
plugin.run(queue)
|
450
|
+
plugin.stop
|
451
|
+
db[:test1_table].insert(:num => 11, :custom_time => "2015-01-01 11:00:00.099", :created_at => Time.now.utc)
|
452
|
+
db[:test1_table].insert(:num => 12, :custom_time => "2015-01-01 21:00:00.811", :created_at => Time.now.utc)
|
453
|
+
expect(queue.size).to eq(0)
|
454
|
+
plugin.run(queue)
|
455
|
+
expect(queue.size).to eq(1)
|
456
|
+
event = queue.pop
|
457
|
+
plugin.stop
|
458
|
+
expect(event.get("num")).to eq(12)
|
459
|
+
expect(event.get("custom_time").time).to eq(Time.iso8601("2015-01-02T03:00:00.811Z"))
|
460
|
+
last_run_value = YAML.load(File.read(settings["last_run_metadata_path"]))
|
461
|
+
expect(last_run_value).to be_a(DateTime)
|
462
|
+
# verify that sub-seconds are recorded to the file
|
463
|
+
expect(last_run_value.strftime("%F %T.%N %Z")).to eq("2015-01-02 03:00:00.811000000 +00:00")
|
464
|
+
end
|
380
465
|
end
|
381
466
|
end
|
382
467
|
|
@@ -479,16 +564,16 @@ describe LogStash::Inputs::Jdbc do
|
|
479
564
|
test_table = db[:test_table]
|
480
565
|
|
481
566
|
plugin.run(queue)
|
482
|
-
expect(plugin.instance_variable_get("@
|
567
|
+
expect(plugin.instance_variable_get("@value_tracker").value).to eq(0)
|
483
568
|
test_table.insert(:num => nums[0], :created_at => Time.now.utc)
|
484
569
|
test_table.insert(:num => nums[1], :created_at => Time.now.utc)
|
485
570
|
plugin.run(queue)
|
486
|
-
expect(plugin.instance_variable_get("@
|
571
|
+
expect(plugin.instance_variable_get("@value_tracker").value).to eq(20)
|
487
572
|
test_table.insert(:num => nums[2], :created_at => Time.now.utc)
|
488
573
|
test_table.insert(:num => nums[3], :created_at => Time.now.utc)
|
489
574
|
test_table.insert(:num => nums[4], :created_at => Time.now.utc)
|
490
575
|
plugin.run(queue)
|
491
|
-
expect(plugin.instance_variable_get("@
|
576
|
+
expect(plugin.instance_variable_get("@value_tracker").value).to eq(50)
|
492
577
|
end
|
493
578
|
end
|
494
579
|
|
@@ -522,17 +607,17 @@ describe LogStash::Inputs::Jdbc do
|
|
522
607
|
test_table = db[:test_table]
|
523
608
|
|
524
609
|
plugin.run(queue)
|
525
|
-
expect(plugin.instance_variable_get("@
|
610
|
+
expect(plugin.instance_variable_get("@value_tracker").value).to eq(Time.parse("1970-01-01 00:00:00.000000000 +0000"))
|
526
611
|
test_table.insert(:num => nums[0], :created_at => Time.now.utc, :custom_time => times[0])
|
527
612
|
test_table.insert(:num => nums[1], :created_at => Time.now.utc, :custom_time => times[1])
|
528
613
|
plugin.run(queue)
|
529
|
-
expect(plugin.instance_variable_get("@
|
530
|
-
expect(plugin.instance_variable_get("@
|
614
|
+
expect(plugin.instance_variable_get("@value_tracker").value.class).to eq(Time.parse(times[0]).class)
|
615
|
+
expect(plugin.instance_variable_get("@value_tracker").value).to eq(Time.parse(times[1]))
|
531
616
|
test_table.insert(:num => nums[2], :created_at => Time.now.utc, :custom_time => times[2])
|
532
617
|
test_table.insert(:num => nums[3], :created_at => Time.now.utc, :custom_time => times[3])
|
533
618
|
test_table.insert(:num => nums[4], :created_at => Time.now.utc, :custom_time => times[4])
|
534
619
|
plugin.run(queue)
|
535
|
-
expect(plugin.instance_variable_get("@
|
620
|
+
expect(plugin.instance_variable_get("@value_tracker").value).to eq(Time.parse(times[4]))
|
536
621
|
end
|
537
622
|
end
|
538
623
|
|
@@ -566,19 +651,19 @@ describe LogStash::Inputs::Jdbc do
|
|
566
651
|
test_table = db[:test_table]
|
567
652
|
|
568
653
|
plugin.run(queue)
|
569
|
-
expect(plugin.instance_variable_get("@
|
654
|
+
expect(plugin.instance_variable_get("@value_tracker").value).to eq(20)
|
570
655
|
expect(queue.length).to eq(0) # Shouldn't grab anything here.
|
571
656
|
test_table.insert(:num => nums[0], :created_at => Time.now.utc)
|
572
657
|
test_table.insert(:num => nums[1], :created_at => Time.now.utc)
|
573
658
|
plugin.run(queue)
|
574
659
|
expect(queue.length).to eq(0) # Shouldn't grab anything here either.
|
575
|
-
expect(plugin.instance_variable_get("@
|
660
|
+
expect(plugin.instance_variable_get("@value_tracker").value).to eq(20)
|
576
661
|
test_table.insert(:num => nums[2], :created_at => Time.now.utc)
|
577
662
|
test_table.insert(:num => nums[3], :created_at => Time.now.utc)
|
578
663
|
test_table.insert(:num => nums[4], :created_at => Time.now.utc)
|
579
664
|
plugin.run(queue)
|
580
665
|
expect(queue.length).to eq(3) # Only values greater than 20 should be grabbed.
|
581
|
-
expect(plugin.instance_variable_get("@
|
666
|
+
expect(plugin.instance_variable_get("@value_tracker").value).to eq(50)
|
582
667
|
end
|
583
668
|
end
|
584
669
|
|
@@ -612,19 +697,19 @@ describe LogStash::Inputs::Jdbc do
|
|
612
697
|
test_table = db[:test_table]
|
613
698
|
|
614
699
|
plugin.run(queue)
|
615
|
-
expect(plugin.instance_variable_get("@
|
700
|
+
expect(plugin.instance_variable_get("@value_tracker").value).to eq(20)
|
616
701
|
expect(queue.length).to eq(0) # Shouldn't grab anything here.
|
617
702
|
test_table.insert(:num => nums[0], :created_at => Time.now.utc)
|
618
703
|
test_table.insert(:num => nums[1], :created_at => Time.now.utc)
|
619
704
|
plugin.run(queue)
|
620
705
|
expect(queue.length).to eq(0) # Shouldn't grab anything here either.
|
621
|
-
expect(plugin.instance_variable_get("@
|
706
|
+
expect(plugin.instance_variable_get("@value_tracker").value).to eq(20)
|
622
707
|
test_table.insert(:num => nums[2], :created_at => Time.now.utc)
|
623
708
|
test_table.insert(:num => nums[3], :created_at => Time.now.utc)
|
624
709
|
test_table.insert(:num => nums[4], :created_at => Time.now.utc)
|
625
710
|
plugin.run(queue)
|
626
711
|
expect(queue.length).to eq(3) # Only values greater than 20 should be grabbed.
|
627
|
-
expect(plugin.instance_variable_get("@
|
712
|
+
expect(plugin.instance_variable_get("@value_tracker").value).to eq(20)
|
628
713
|
expect(plugin.instance_variable_get("@tracking_column_warning_sent")).to eq(true)
|
629
714
|
end
|
630
715
|
end
|
@@ -650,7 +735,7 @@ describe LogStash::Inputs::Jdbc do
|
|
650
735
|
it "should respect last run metadata" do
|
651
736
|
plugin.run(queue)
|
652
737
|
|
653
|
-
expect(plugin.instance_variable_get("@
|
738
|
+
expect(plugin.instance_variable_get("@value_tracker").value).to be > last_run_time
|
654
739
|
end
|
655
740
|
end
|
656
741
|
|
@@ -677,7 +762,7 @@ describe LogStash::Inputs::Jdbc do
|
|
677
762
|
it "metadata should equal last_run_value" do
|
678
763
|
plugin.run(queue)
|
679
764
|
|
680
|
-
expect(plugin.instance_variable_get("@
|
765
|
+
expect(plugin.instance_variable_get("@value_tracker").value).to eq(last_run_value)
|
681
766
|
end
|
682
767
|
end
|
683
768
|
|
@@ -701,7 +786,7 @@ describe LogStash::Inputs::Jdbc do
|
|
701
786
|
it "should not respect last run metadata" do
|
702
787
|
plugin.run(queue)
|
703
788
|
|
704
|
-
expect(plugin.instance_variable_get("@
|
789
|
+
expect(plugin.instance_variable_get("@value_tracker").value).to eq(last_run_time)
|
705
790
|
end
|
706
791
|
end
|
707
792
|
|
@@ -728,7 +813,7 @@ describe LogStash::Inputs::Jdbc do
|
|
728
813
|
it "metadata should still reflect last value" do
|
729
814
|
plugin.run(queue)
|
730
815
|
|
731
|
-
expect(plugin.instance_variable_get("@
|
816
|
+
expect(plugin.instance_variable_get("@value_tracker").value).to eq(last_run_value)
|
732
817
|
end
|
733
818
|
end
|
734
819
|
|
@@ -754,7 +839,7 @@ describe LogStash::Inputs::Jdbc do
|
|
754
839
|
end
|
755
840
|
|
756
841
|
it "should ignore last run metadata if :clean_run set to true" do
|
757
|
-
expect(plugin.instance_variable_get("@
|
842
|
+
expect(plugin.instance_variable_get("@value_tracker").value).to eq(Time.at(0).utc)
|
758
843
|
end
|
759
844
|
end
|
760
845
|
|
@@ -782,7 +867,7 @@ describe LogStash::Inputs::Jdbc do
|
|
782
867
|
end
|
783
868
|
|
784
869
|
it "should ignore last run metadata if :clean_run set to true" do
|
785
|
-
expect(plugin.instance_variable_get("@
|
870
|
+
expect(plugin.instance_variable_get("@value_tracker").value).to eq(0)
|
786
871
|
end
|
787
872
|
end
|
788
873
|
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-input-jdbc
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 4.3.
|
4
|
+
version: 4.3.5
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2018-02-
|
11
|
+
date: 2018-02-27 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
@@ -172,6 +172,7 @@ files:
|
|
172
172
|
- docs/index.asciidoc
|
173
173
|
- lib/logstash/inputs/jdbc.rb
|
174
174
|
- lib/logstash/plugin_mixins/jdbc.rb
|
175
|
+
- lib/logstash/plugin_mixins/value_tracking.rb
|
175
176
|
- logstash-input-jdbc.gemspec
|
176
177
|
- spec/inputs/jdbc_spec.rb
|
177
178
|
homepage: http://www.elastic.co/guide/en/logstash/current/index.html
|