logstash-integration-jdbc 5.2.4 → 5.4.11
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +50 -0
- data/docs/filter-jdbc_static.asciidoc +14 -2
- data/docs/filter-jdbc_streaming.asciidoc +1 -1
- data/docs/input-jdbc.asciidoc +41 -4
- data/lib/logstash/filters/jdbc/basic_database.rb +1 -1
- data/lib/logstash/filters/jdbc/read_only_database.rb +2 -2
- data/lib/logstash/filters/jdbc_static.rb +19 -10
- data/lib/logstash/inputs/jdbc.rb +69 -20
- data/lib/logstash/plugin_mixins/jdbc/common.rb +2 -1
- data/lib/logstash/plugin_mixins/jdbc/jdbc.rb +22 -17
- data/lib/logstash/plugin_mixins/jdbc/sequel_bootstrap.rb +21 -0
- data/lib/logstash/plugin_mixins/jdbc/statement_handler.rb +51 -45
- data/lib/logstash/plugin_mixins/jdbc/timezone_proxy.rb +61 -0
- data/lib/logstash/plugin_mixins/jdbc/value_tracking.rb +16 -3
- data/lib/logstash-integration-jdbc_jars.rb +4 -2
- data/logstash-integration-jdbc.gemspec +6 -6
- data/spec/filters/jdbc_static_spec.rb +10 -0
- data/spec/filters/jdbc_streaming_spec.rb +7 -10
- data/spec/inputs/integration/integ_spec.rb +28 -9
- data/spec/inputs/jdbc_spec.rb +202 -59
- data/spec/plugin_mixins/jdbc/timezone_proxy_spec.rb +68 -0
- data/spec/plugin_mixins/jdbc/value_tracking_spec.rb +113 -0
- data/vendor/jar-dependencies/org/apache/derby/derby/10.15.2.1/derby-10.15.2.1.jar +0 -0
- data/vendor/jar-dependencies/org/apache/derby/derbyclient/10.15.2.1/derbyclient-10.15.2.1.jar +0 -0
- data/vendor/jar-dependencies/org/apache/derby/derbyshared/10.15.2.1/derbyshared-10.15.2.1.jar +0 -0
- data/vendor/jar-dependencies/org/apache/derby/derbytools/10.15.2.1/derbytools-10.15.2.1.jar +0 -0
- metadata +39 -49
- data/lib/logstash/plugin_mixins/jdbc/checked_count_logger.rb +0 -43
- data/lib/logstash/plugin_mixins/jdbc/scheduler.rb +0 -175
- data/spec/plugin_mixins/jdbc/scheduler_spec.rb +0 -78
- data/vendor/jar-dependencies/org/apache/derby/derby/10.14.1.0/derby-10.14.1.0.jar +0 -0
- data/vendor/jar-dependencies/org/apache/derby/derbyclient/10.14.1.0/derbyclient-10.14.1.0.jar +0 -0
data/spec/inputs/jdbc_spec.rb
CHANGED
@@ -2,13 +2,13 @@
|
|
2
2
|
require "logstash/devutils/rspec/spec_helper"
|
3
3
|
require "logstash/devutils/rspec/shared_examples"
|
4
4
|
require "logstash/inputs/jdbc"
|
5
|
-
require "jdbc/derby"
|
6
5
|
require "sequel"
|
7
6
|
require "sequel/adapters/jdbc"
|
8
7
|
require "timecop"
|
9
8
|
require "stud/temporary"
|
10
9
|
require "time"
|
11
10
|
require "date"
|
11
|
+
require "pathname"
|
12
12
|
|
13
13
|
# We do not need to set TZ env var anymore because we can have 'Sequel.application_timezone' set to utc by default now.
|
14
14
|
|
@@ -32,7 +32,6 @@ describe LogStash::Inputs::Jdbc do
|
|
32
32
|
before :each do
|
33
33
|
if !RSpec.current_example.metadata[:no_connection]
|
34
34
|
# before body
|
35
|
-
Jdbc::Derby.load_driver
|
36
35
|
db.create_table :test_table do
|
37
36
|
DateTime :created_at
|
38
37
|
BigDecimal :big_num
|
@@ -51,6 +50,9 @@ describe LogStash::Inputs::Jdbc do
|
|
51
50
|
db.drop_table(:types_table)
|
52
51
|
db.drop_table(:test1_table)
|
53
52
|
end
|
53
|
+
|
54
|
+
last_run_default_path = LogStash::SETTINGS.get_value("path.data")
|
55
|
+
FileUtils.rm_f("#{last_run_default_path}/plugins/inputs/jdbc/logstash_jdbc_last_run")
|
54
56
|
end
|
55
57
|
|
56
58
|
context "when registering and tearing down" do
|
@@ -247,18 +249,6 @@ describe LogStash::Inputs::Jdbc do
|
|
247
249
|
Timecop.return
|
248
250
|
end
|
249
251
|
|
250
|
-
it "cleans up scheduler resources on close" do
|
251
|
-
runner = Thread.new do
|
252
|
-
plugin.run(queue)
|
253
|
-
end
|
254
|
-
sleep 1
|
255
|
-
plugin.do_close
|
256
|
-
|
257
|
-
scheduler = plugin.instance_variable_get(:@scheduler)
|
258
|
-
expect(scheduler).to_not be_nil
|
259
|
-
expect(scheduler.down?).to be_truthy
|
260
|
-
end
|
261
|
-
|
262
252
|
end
|
263
253
|
|
264
254
|
context "when scheduling and previous runs are to be preserved" do
|
@@ -285,7 +275,7 @@ describe LogStash::Inputs::Jdbc do
|
|
285
275
|
sleep 1
|
286
276
|
for i in 0..1
|
287
277
|
sleep 1
|
288
|
-
updated_last_run =
|
278
|
+
updated_last_run = LogStash::PluginMixins::Jdbc::ValueTracking.load_yaml(File.read(settings["last_run_metadata_path"]))
|
289
279
|
expect(updated_last_run).to be > last_run_time
|
290
280
|
last_run_time = updated_last_run
|
291
281
|
end
|
@@ -555,7 +545,7 @@ describe LogStash::Inputs::Jdbc do
|
|
555
545
|
expect(actual).to eq(expected)
|
556
546
|
plugin.stop
|
557
547
|
raw_last_run_value = File.read(settings["last_run_metadata_path"])
|
558
|
-
last_run_value =
|
548
|
+
last_run_value = LogStash::PluginMixins::Jdbc::ValueTracking.load_yaml(raw_last_run_value)
|
559
549
|
expect(last_run_value).to be_a(DateTime)
|
560
550
|
expect(last_run_value.strftime("%F %T.%N %Z")).to eq("2015-01-02 02:00:00.722000000 +00:00")
|
561
551
|
|
@@ -570,7 +560,7 @@ describe LogStash::Inputs::Jdbc do
|
|
570
560
|
plugin.stop
|
571
561
|
expect(event.get("num")).to eq(12)
|
572
562
|
expect(event.get("custom_time").time).to eq(Time.iso8601("2015-01-02T03:00:00.811Z"))
|
573
|
-
last_run_value =
|
563
|
+
last_run_value = LogStash::PluginMixins::Jdbc::ValueTracking.load_yaml(File.read(settings["last_run_metadata_path"]))
|
574
564
|
expect(last_run_value).to be_a(DateTime)
|
575
565
|
# verify that sub-seconds are recorded to the file
|
576
566
|
expect(last_run_value.strftime("%F %T.%N %Z")).to eq("2015-01-02 03:00:00.811000000 +00:00")
|
@@ -611,6 +601,33 @@ describe LogStash::Inputs::Jdbc do
|
|
611
601
|
# With no timezone set, no change should occur
|
612
602
|
expect(event.get("custom_time").time).to eq(Time.iso8601("2015-01-01T12:00:00Z"))
|
613
603
|
end
|
604
|
+
|
605
|
+
%w(
|
606
|
+
Etc/UTC
|
607
|
+
America/Los_Angeles
|
608
|
+
Europe/Berlin
|
609
|
+
Asia/Tokyo
|
610
|
+
).each do |local_timezone|
|
611
|
+
context "when host machine has timezone `#{local_timezone}`" do
|
612
|
+
around(:each) do |example|
|
613
|
+
begin
|
614
|
+
previous_tz = ENV['TZ']
|
615
|
+
ENV['TZ'] = local_timezone
|
616
|
+
example.call
|
617
|
+
ensure
|
618
|
+
ENV['TZ'] = previous_tz
|
619
|
+
end
|
620
|
+
end
|
621
|
+
|
622
|
+
let(:tz) { TZInfo::Timezone.get(local_timezone) }
|
623
|
+
|
624
|
+
it "converts the time using the machine's local timezone" do
|
625
|
+
plugin.run(queue)
|
626
|
+
event = queue.pop
|
627
|
+
expect(event.get("custom_time").time).to eq(Time.new(2015,1,1,12,0,0,tz))
|
628
|
+
end
|
629
|
+
end
|
630
|
+
end
|
614
631
|
end
|
615
632
|
|
616
633
|
context "when iteratively running plugin#run" do
|
@@ -704,7 +721,7 @@ describe LogStash::Inputs::Jdbc do
|
|
704
721
|
"last_run_metadata_path" => Stud::Temporary.pathname }
|
705
722
|
end
|
706
723
|
|
707
|
-
let(:nums) { [BigDecimal
|
724
|
+
let(:nums) { [BigDecimal(10), BigDecimal(20), BigDecimal(30), BigDecimal(40), BigDecimal(50)] }
|
708
725
|
|
709
726
|
before do
|
710
727
|
plugin.register
|
@@ -1114,6 +1131,98 @@ describe LogStash::Inputs::Jdbc do
|
|
1114
1131
|
end
|
1115
1132
|
end
|
1116
1133
|
|
1134
|
+
context "when state is persisted" do
|
1135
|
+
context "to file" do
|
1136
|
+
let(:settings) do
|
1137
|
+
{
|
1138
|
+
"statement" => "SELECT * FROM test_table",
|
1139
|
+
"record_last_run" => true
|
1140
|
+
}
|
1141
|
+
end
|
1142
|
+
|
1143
|
+
before do
|
1144
|
+
plugin.register
|
1145
|
+
end
|
1146
|
+
|
1147
|
+
after do
|
1148
|
+
plugin.stop
|
1149
|
+
end
|
1150
|
+
|
1151
|
+
context "with default last_run_metadata_path" do
|
1152
|
+
it "should save state in data.data subpath" do
|
1153
|
+
path = LogStash::SETTINGS.get_value("path.data")
|
1154
|
+
expect(plugin.last_run_metadata_file_path).to start_with(path)
|
1155
|
+
end
|
1156
|
+
end
|
1157
|
+
|
1158
|
+
context "with customized last_run_metadata_path" do
|
1159
|
+
let(:settings) { super().merge({ "last_run_metadata_path" => Stud::Temporary.pathname })}
|
1160
|
+
|
1161
|
+
it "should save state in data.data subpath" do
|
1162
|
+
expect(plugin.last_run_metadata_file_path).to start_with(settings["last_run_metadata_path"])
|
1163
|
+
end
|
1164
|
+
end
|
1165
|
+
end
|
1166
|
+
|
1167
|
+
context "with customized last_run_metadata_path point to directory" do
|
1168
|
+
let(:settings) do
|
1169
|
+
path = Stud::Temporary.pathname
|
1170
|
+
Pathname.new(path).tap {|path| path.mkpath}
|
1171
|
+
super().merge({ "last_run_metadata_path" => path})
|
1172
|
+
end
|
1173
|
+
|
1174
|
+
it "raise configuration error" do
|
1175
|
+
expect { plugin.register }.to raise_error(LogStash::ConfigurationError)
|
1176
|
+
end
|
1177
|
+
end
|
1178
|
+
end
|
1179
|
+
|
1180
|
+
context "update the previous default last_run_metadata_path" do
|
1181
|
+
let(:settings) do
|
1182
|
+
{
|
1183
|
+
"statement" => "SELECT * FROM test_table",
|
1184
|
+
"record_last_run" => true
|
1185
|
+
}
|
1186
|
+
end
|
1187
|
+
|
1188
|
+
let(:fake_home) do
|
1189
|
+
path = Stud::Temporary.pathname
|
1190
|
+
Pathname.new(path).tap {|path| path.mkpath}
|
1191
|
+
path
|
1192
|
+
end
|
1193
|
+
|
1194
|
+
context "when a file exists" do
|
1195
|
+
before do
|
1196
|
+
# in a faked HOME folder save a valid previous last_run metadata file
|
1197
|
+
allow(ENV).to receive(:[]).with(anything).and_call_original
|
1198
|
+
allow(ENV).to receive(:[]).with('HOME').and_return(fake_home)
|
1199
|
+
|
1200
|
+
File.open("#{fake_home}/.logstash_jdbc_last_run", 'w') do |file|
|
1201
|
+
file.write("--- !ruby/object:DateTime '2022-03-08 08:10:00.486889000 Z'")
|
1202
|
+
end
|
1203
|
+
end
|
1204
|
+
let(:old_path) { "#{fake_home}/.logstash_jdbc_last_run" }
|
1205
|
+
let(:path_data) { LogStash::SETTINGS.get_value("path.data") }
|
1206
|
+
let(:new_path) { "#{path_data}/plugins/inputs/jdbc/logstash_jdbc_last_run" }
|
1207
|
+
|
1208
|
+
it "should be moved" do
|
1209
|
+
plugin.register
|
1210
|
+
expect(::File).to_not exist(old_path)
|
1211
|
+
expect(::File).to exist(new_path)
|
1212
|
+
end
|
1213
|
+
context "if the delete fails" do
|
1214
|
+
before(:each) do
|
1215
|
+
allow(File).to receive(:delete).and_raise ArgumentError
|
1216
|
+
end
|
1217
|
+
it "should be still be moved" do
|
1218
|
+
plugin.register
|
1219
|
+
expect(::File).to exist(old_path) # old still exists
|
1220
|
+
expect(::File).to exist(new_path)
|
1221
|
+
end
|
1222
|
+
end
|
1223
|
+
end
|
1224
|
+
end
|
1225
|
+
|
1117
1226
|
context "when setting fetch size" do
|
1118
1227
|
|
1119
1228
|
let(:settings) do
|
@@ -1280,6 +1389,34 @@ describe LogStash::Inputs::Jdbc do
|
|
1280
1389
|
expect { plugin.register }.to_not raise_error
|
1281
1390
|
plugin.stop
|
1282
1391
|
end
|
1392
|
+
|
1393
|
+
it "does retry when query execution fails" do
|
1394
|
+
mixin_settings['statement_retry_attempts'] = 2
|
1395
|
+
mixin_settings['statement_retry_attempts_wait_time'] = 0.5
|
1396
|
+
queue = Queue.new
|
1397
|
+
plugin.register
|
1398
|
+
|
1399
|
+
handler = plugin.instance_variable_get(:@statement_handler)
|
1400
|
+
allow(handler).to receive(:perform_query).with(instance_of(Sequel::JDBC::Database), instance_of(Time)).and_raise(Sequel::PoolTimeout)
|
1401
|
+
expect(plugin.logger).to receive(:error).with("Unable to execute statement. Trying again.")
|
1402
|
+
expect(plugin.logger).to receive(:error).with("Unable to execute statement. Tried 2 times.")
|
1403
|
+
|
1404
|
+
plugin.run(queue)
|
1405
|
+
plugin.stop
|
1406
|
+
end
|
1407
|
+
|
1408
|
+
it "does not retry when query execution succeeds" do
|
1409
|
+
mixin_settings['connection_retry_attempts'] = 2
|
1410
|
+
queue = Queue.new
|
1411
|
+
plugin.register
|
1412
|
+
|
1413
|
+
handler = plugin.instance_variable_get(:@statement_handler)
|
1414
|
+
allow(handler).to receive(:perform_query).with(instance_of(Sequel::JDBC::Database), instance_of(Time)).and_call_original
|
1415
|
+
expect(plugin.logger).not_to receive(:error)
|
1416
|
+
|
1417
|
+
plugin.run(queue)
|
1418
|
+
plugin.stop
|
1419
|
+
end
|
1283
1420
|
end
|
1284
1421
|
|
1285
1422
|
context "when encoding of some columns need to be changed" do
|
@@ -1432,57 +1569,64 @@ describe LogStash::Inputs::Jdbc do
|
|
1432
1569
|
end
|
1433
1570
|
end
|
1434
1571
|
|
1435
|
-
context "when
|
1572
|
+
context "when retrieving records with ambiguous timestamps" do
|
1573
|
+
|
1436
1574
|
let(:settings) do
|
1437
|
-
{
|
1438
|
-
|
1439
|
-
|
1440
|
-
|
1441
|
-
let(:value_tracker) { double("value tracker", :set_value => nil, :write => nil) }
|
1442
|
-
let(:msg) { 'Java::JavaSql::SQLSyntaxErrorException: Dynamic SQL Error; SQL error code = -104; Token unknown - line 1, column 105; LIMIT [SQLState:42000, ISC error code:335544634]' }
|
1443
|
-
let(:error_args) do
|
1444
|
-
{"exception" => msg}
|
1575
|
+
{
|
1576
|
+
"statement" => "SELECT * from types_table",
|
1577
|
+
"jdbc_default_timezone" => jdbc_default_timezone
|
1578
|
+
}
|
1445
1579
|
end
|
1446
1580
|
|
1447
|
-
before do
|
1448
|
-
db << "INSERT INTO types_table (num, string, started_at, custom_time, ranking) VALUES (1, 'A test', '1999-12-31', '
|
1581
|
+
before(:each) do
|
1582
|
+
db << "INSERT INTO types_table (num, string, started_at, custom_time, ranking) VALUES (1, 'A test', '1999-12-31', '2021-11-07 01:23:45', 95.67)"
|
1449
1583
|
plugin.register
|
1450
|
-
plugin.set_statement_logger(statement_logger)
|
1451
|
-
plugin.set_value_tracker(value_tracker)
|
1452
|
-
allow(value_tracker).to receive(:value).and_return("bar")
|
1453
|
-
allow(statement_logger).to receive(:execute_count).once.and_raise(StandardError.new(msg))
|
1454
1584
|
end
|
1455
1585
|
|
1456
|
-
|
1457
|
-
|
1458
|
-
end
|
1586
|
+
context "when initialized with a preference for DST being enabled" do
|
1587
|
+
let(:jdbc_default_timezone) { 'America/Chicago[dst_enabled_on_overlap:true]' }
|
1459
1588
|
|
1460
|
-
|
1461
|
-
it "should log a debug line without a count key as its unknown whether a count works at this stage" do
|
1462
|
-
expect(logger).to receive(:warn).once.with("Attempting a count query raised an error, the generated count statement is most likely incorrect but check networking, authentication or your statement syntax", error_args)
|
1463
|
-
expect(logger).to receive(:warn).once.with("Ongoing count statement generation is being prevented")
|
1464
|
-
expect(logger).to receive(:debug).once.with("Executing JDBC query", :statement => settings["statement"], :parameters => {:sql_last_value=>"bar"})
|
1589
|
+
it 'treats the timestamp column as if DST was enabled' do
|
1465
1590
|
plugin.run(queue)
|
1466
|
-
queue.pop
|
1591
|
+
event = queue.pop
|
1592
|
+
expect(event.get("custom_time")).to be_a_logstash_timestamp_equivalent_to("2021-11-07T06:23:45Z")
|
1467
1593
|
end
|
1594
|
+
end
|
1595
|
+
context "when initialized with a preference for DST being disabled" do
|
1596
|
+
let(:jdbc_default_timezone) { 'America/Chicago[dst_enabled_on_overlap:false]' }
|
1468
1597
|
|
1469
|
-
it
|
1470
|
-
allow(logger).to receive(:warn)
|
1471
|
-
allow(logger).to receive(:debug)
|
1598
|
+
it 'treats the timestamp column as if DST was disabled' do
|
1472
1599
|
plugin.run(queue)
|
1473
1600
|
event = queue.pop
|
1474
|
-
expect(event.get("
|
1475
|
-
|
1476
|
-
|
1477
|
-
|
1478
|
-
|
1601
|
+
expect(event.get("custom_time")).to be_a_logstash_timestamp_equivalent_to("2021-11-07T07:23:45Z")
|
1602
|
+
end
|
1603
|
+
end
|
1604
|
+
context "when initialized without a preference for DST being enabled or disabled" do
|
1605
|
+
before(:each) { allow(plugin.logger).to receive(:warn) }
|
1606
|
+
let(:jdbc_default_timezone) { 'America/Chicago' }
|
1607
|
+
|
1608
|
+
it 'the error results in helpful log warning' do
|
1609
|
+
plugin.run(queue)
|
1610
|
+
expect(plugin.logger).to have_received(:warn).with(a_string_including("Exception when executing JDBC query"), a_hash_including(:message => a_string_including("2021-11-07 01:23:45 is an ambiguous local time")))
|
1479
1611
|
end
|
1480
1612
|
end
|
1481
1613
|
end
|
1482
1614
|
|
1615
|
+
def load_derby_version
|
1616
|
+
version = {}
|
1617
|
+
derby_version = File.join(Dir.pwd, 'derby_version.txt')
|
1618
|
+
File.readlines(derby_version, chomp: true).each do |line|
|
1619
|
+
key = line.split('=')[0]
|
1620
|
+
value = line.split('=')[1]
|
1621
|
+
version[key] = value
|
1622
|
+
end
|
1623
|
+
version
|
1624
|
+
end
|
1625
|
+
|
1483
1626
|
context "when an unreadable jdbc_driver_path entry is present" do
|
1484
1627
|
let(:driver_jar_path) do
|
1485
|
-
|
1628
|
+
derby_version = load_derby_version()['DERBY_VERSION']
|
1629
|
+
jar_file = $CLASSPATH.find { |name| name.index("derby-#{derby_version}.jar") }
|
1486
1630
|
raise "derby jar not found on class-path" unless jar_file
|
1487
1631
|
jar_file.sub('file:', '')
|
1488
1632
|
end
|
@@ -1498,16 +1642,15 @@ describe LogStash::Inputs::Jdbc do
|
|
1498
1642
|
{ "statement" => "SELECT * from types_table", "jdbc_driver_library" => invalid_driver_jar_path }
|
1499
1643
|
end
|
1500
1644
|
|
1501
|
-
before do
|
1502
|
-
plugin.register
|
1503
|
-
end
|
1504
|
-
|
1505
1645
|
after do
|
1506
1646
|
plugin.stop
|
1507
1647
|
end
|
1508
1648
|
|
1509
1649
|
it "raise a loading error" do
|
1510
|
-
expect
|
1650
|
+
expect(File.exists?(invalid_driver_jar_path)).to be true
|
1651
|
+
expect(FileTest.readable?(invalid_driver_jar_path)).to be false
|
1652
|
+
|
1653
|
+
expect { plugin.register }.
|
1511
1654
|
to raise_error(LogStash::PluginLoadingError, /unable to load .*? from :jdbc_driver_library, file not readable/)
|
1512
1655
|
end
|
1513
1656
|
end
|
@@ -1619,7 +1762,7 @@ describe LogStash::Inputs::Jdbc do
|
|
1619
1762
|
plugin.run(queue)
|
1620
1763
|
|
1621
1764
|
expect(queue.size).to eq(expected_queue_size)
|
1622
|
-
expect(
|
1765
|
+
expect(LogStash::PluginMixins::Jdbc::ValueTracking.load_yaml(File.read(settings["last_run_metadata_path"]))).to eq(expected_queue_size)
|
1623
1766
|
end
|
1624
1767
|
end
|
1625
1768
|
|
@@ -1644,7 +1787,7 @@ describe LogStash::Inputs::Jdbc do
|
|
1644
1787
|
plugin.run(queue)
|
1645
1788
|
|
1646
1789
|
expect(queue.size).to eq(expected_queue_size)
|
1647
|
-
expect(
|
1790
|
+
expect(LogStash::PluginMixins::Jdbc::ValueTracking.load_yaml(File.read(settings["last_run_metadata_path"]))).to eq(last_run_value + expected_queue_size)
|
1648
1791
|
end
|
1649
1792
|
end
|
1650
1793
|
end
|
@@ -1681,7 +1824,7 @@ describe LogStash::Inputs::Jdbc do
|
|
1681
1824
|
let(:jdbc_driver_class) { "org.apache.NonExistentDriver" }
|
1682
1825
|
it "raises a loading error" do
|
1683
1826
|
expect { plugin.send(:load_driver) }.to raise_error LogStash::PluginLoadingError,
|
1684
|
-
/
|
1827
|
+
/ClassNotFoundException: org.apache.NonExistentDriver/
|
1685
1828
|
end
|
1686
1829
|
end
|
1687
1830
|
end
|
@@ -0,0 +1,68 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/devutils/rspec/spec_helper"
|
3
|
+
require "logstash/plugin_mixins/jdbc/timezone_proxy"
|
4
|
+
|
5
|
+
describe LogStash::PluginMixins::Jdbc::TimezoneProxy do
|
6
|
+
subject(:timezone) { described_class.load(timezone_spec) }
|
7
|
+
|
8
|
+
context 'when handling a daylight-savings ambiguous time' do
|
9
|
+
context 'without extensions' do
|
10
|
+
let(:timezone_spec) { 'America/Los_Angeles[]' }
|
11
|
+
it 'raises an AmbiguousTime error' do
|
12
|
+
expect { timezone.local_time(2021,11,7,1,17) }.to raise_error(::TZInfo::AmbiguousTime)
|
13
|
+
end
|
14
|
+
end
|
15
|
+
context 'with extension `dst_enabled_on_overlap:true`' do
|
16
|
+
let(:timezone_spec) { 'America/Los_Angeles[dst_enabled_on_overlap:true]' }
|
17
|
+
it 'resolves as if DST were enabled' do
|
18
|
+
timestamp = timezone.local_time(2021,11,7,1,17)
|
19
|
+
aggregate_failures do
|
20
|
+
expect(timestamp.dst?).to be true
|
21
|
+
expect(timestamp.zone).to eq('PDT') # Pacific Daylight Time
|
22
|
+
expect(timestamp.getutc).to eq(Time.utc(2021,11,7,8,17))
|
23
|
+
expect(timestamp.utc_offset).to eq( -7 * 3600 )
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
27
|
+
context 'with extension `dst_enabled_on_overlap:false`' do
|
28
|
+
let(:timezone_spec) { 'America/Los_Angeles[dst_enabled_on_overlap:false]' }
|
29
|
+
it 'resolves as if DST were disabled' do
|
30
|
+
timestamp = timezone.local_time(2021,11,7,1,17)
|
31
|
+
aggregate_failures do
|
32
|
+
expect(timestamp.dst?).to be false
|
33
|
+
expect(timestamp.zone).to eq('PST') # Pacific Standard Time
|
34
|
+
expect(timestamp.getutc).to eq(Time.utc(2021,11,7,9,17))
|
35
|
+
expect(timestamp.utc_offset).to eq( -8 * 3600 )
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
40
|
+
|
41
|
+
context '#load' do
|
42
|
+
context 'when spec is a normal timezone instance' do
|
43
|
+
let(:timezone_spec) { ::TZInfo::Timezone.get('America/Los_Angeles') }
|
44
|
+
it 'returns that instance' do
|
45
|
+
expect(timezone).to be(timezone_spec)
|
46
|
+
end
|
47
|
+
end
|
48
|
+
context 'when spec is a valid unextended timezone spec' do
|
49
|
+
let(:timezone_spec) { 'America/Los_Angeles' }
|
50
|
+
it 'returns the canonical timezone' do
|
51
|
+
expect(timezone).to eq(::TZInfo::Timezone.get('America/Los_Angeles'))
|
52
|
+
end
|
53
|
+
end
|
54
|
+
context 'when spec is an invalid timezone spec' do
|
55
|
+
let(:timezone_spec) { 'NotAValidTimezoneIdentifier' }
|
56
|
+
|
57
|
+
it 'propagates the TZInfo exception' do
|
58
|
+
expect { timezone }.to raise_exception(::TZInfo::InvalidTimezoneIdentifier)
|
59
|
+
end
|
60
|
+
end
|
61
|
+
context 'with invalid extension' do
|
62
|
+
let(:timezone_spec) { 'America/Los_Angeles[dst_enabled_on_overlap:false;nope:wrong]' }
|
63
|
+
it 'raises an exception with a helpful message' do
|
64
|
+
expect { timezone }.to raise_exception(ArgumentError, a_string_including("Invalid timezone extension `nope:wrong`"))
|
65
|
+
end
|
66
|
+
end
|
67
|
+
end
|
68
|
+
end
|
@@ -0,0 +1,113 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/plugin_mixins/jdbc/value_tracking"
|
3
|
+
require "tempfile"
|
4
|
+
|
5
|
+
module LogStash module PluginMixins module Jdbc
|
6
|
+
describe ValueTracking do
|
7
|
+
context "#load_yaml" do
|
8
|
+
|
9
|
+
context "with date string" do
|
10
|
+
let(:yaml_date_source) { "--- !ruby/object:DateTime '2023-06-15 09:59:30.558000000 +02:00'\n" }
|
11
|
+
|
12
|
+
it "should be loaded" do
|
13
|
+
parsed_date = LogStash::PluginMixins::Jdbc::ValueTracking.load_yaml(yaml_date_source)
|
14
|
+
expect(parsed_date.class).to eq DateTime
|
15
|
+
expect(parsed_date.year).to eq 2023
|
16
|
+
expect(parsed_date.month).to eq 6
|
17
|
+
expect(parsed_date.day).to eq 15
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
21
|
+
context "with time string" do
|
22
|
+
let(:yaml_time_source) { "--- 2023-06-15 15:28:15.227874000 +02:00\n" }
|
23
|
+
|
24
|
+
it "should be loaded" do
|
25
|
+
parsed_time = LogStash::PluginMixins::Jdbc::ValueTracking.load_yaml(yaml_time_source)
|
26
|
+
expect(parsed_time.class).to eq Time
|
27
|
+
expect(parsed_time.year).to eq 2023
|
28
|
+
expect(parsed_time.month).to eq 6
|
29
|
+
expect(parsed_time.day).to eq 15
|
30
|
+
expect(parsed_time.hour).to eq 15
|
31
|
+
expect(parsed_time.min).to eq 28
|
32
|
+
expect(parsed_time.sec).to eq 15
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
context "with date string" do
|
37
|
+
let(:yaml_bigdecimal_source) { "--- !ruby/object:BigDecimal '0:0.1e1'\n" }
|
38
|
+
|
39
|
+
it "should be loaded" do
|
40
|
+
parsed_bigdecimal = LogStash::PluginMixins::Jdbc::ValueTracking.load_yaml(yaml_bigdecimal_source)
|
41
|
+
expect(parsed_bigdecimal.class).to eq BigDecimal
|
42
|
+
expect(parsed_bigdecimal.to_i).to eq 1
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
46
|
+
|
47
|
+
context "#build_last_value_tracker" do
|
48
|
+
|
49
|
+
let(:plugin) { double("fake plugin") }
|
50
|
+
let(:temp_file) { Tempfile.new('last_run_tracker') }
|
51
|
+
|
52
|
+
before(:each) do
|
53
|
+
allow(plugin).to receive(:record_last_run).and_return(true)
|
54
|
+
allow(plugin).to receive(:clean_run).and_return(false)
|
55
|
+
allow(plugin).to receive(:last_run_metadata_file_path).and_return(temp_file.path)
|
56
|
+
end
|
57
|
+
|
58
|
+
context "create numerical tracker" do
|
59
|
+
before(:each) do
|
60
|
+
allow(plugin).to receive(:use_column_value).and_return(true)
|
61
|
+
allow(plugin).to receive(:tracking_column_type).and_return("numeric")
|
62
|
+
end
|
63
|
+
|
64
|
+
it "should write correctly" do
|
65
|
+
tracker = ValueTracking.build_last_value_tracker(plugin)
|
66
|
+
tracker.set_value(1)
|
67
|
+
tracker.write
|
68
|
+
|
69
|
+
temp_file.rewind
|
70
|
+
v = ValueTracking.load_yaml(::File.read(temp_file.path))
|
71
|
+
expect(v).to eq 1
|
72
|
+
end
|
73
|
+
end
|
74
|
+
|
75
|
+
context "create date time tracker" do
|
76
|
+
before(:each) do
|
77
|
+
allow(plugin).to receive(:use_column_value).and_return(false)
|
78
|
+
allow(plugin).to receive(:jdbc_default_timezone).and_return(:something_not_nil)
|
79
|
+
end
|
80
|
+
|
81
|
+
it "should write correctly" do
|
82
|
+
tracker = ValueTracking.build_last_value_tracker(plugin)
|
83
|
+
tracker.set_value("2023-06-15T15:28:15+02:00")
|
84
|
+
tracker.write
|
85
|
+
|
86
|
+
temp_file.rewind
|
87
|
+
v = ValueTracking.load_yaml(::File.read(temp_file.path))
|
88
|
+
expect(v.class).to eq DateTime
|
89
|
+
expect(v.year).to eq 2023
|
90
|
+
end
|
91
|
+
end
|
92
|
+
|
93
|
+
context "create time tracker" do
|
94
|
+
before(:each) do
|
95
|
+
allow(plugin).to receive(:use_column_value).and_return(false)
|
96
|
+
allow(plugin).to receive(:jdbc_default_timezone).and_return(nil)
|
97
|
+
end
|
98
|
+
|
99
|
+
it "should write correctly" do
|
100
|
+
tracker = ValueTracking.build_last_value_tracker(plugin)
|
101
|
+
tracker.set_value("2023-06-15T15:28:15+02:00")
|
102
|
+
tracker.write
|
103
|
+
|
104
|
+
temp_file.rewind
|
105
|
+
v = ValueTracking.load_yaml(::File.read(temp_file.path))
|
106
|
+
expect(v.class).to eq Time
|
107
|
+
expect(v.min).to eq 28
|
108
|
+
end
|
109
|
+
end
|
110
|
+
|
111
|
+
end
|
112
|
+
end
|
113
|
+
end end end
|
Binary file
|
Binary file
|