logstash-integration-jdbc 5.4.2 → 5.4.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: c54f1c22363c2d0637b44a2b68a98e22a1b2f07bc68927a4cc9bd6788a6fc009
4
- data.tar.gz: 3ea76ee4fcdde152e43af6c0aab53be50dc022a8c31fce8ebac36669370a4875
3
+ metadata.gz: 8aae1525f173288cfa38a488fdf35425705463d6b04b44ff34f08be8abce5806
4
+ data.tar.gz: 21d44df47ae618e51f8f6f96812da9b1d0d9ece07c55d45fcd299a51c3e0108d
5
5
  SHA512:
6
- metadata.gz: d31cef0d20064897337825bcf7ff491d3b64295c47b644d527974f458ac2c8a180d930b84b55fac6601a5f76faa10754b8975af681c4e0718ec2711e08a0d571
7
- data.tar.gz: 6434512f2b958b976a12a7c4afe3d584066cb505c4818d8e340e363a7db4327a433785fc9ddfddc6c67eb2d719fc3f518346974760003873250895cb396d36e1
6
+ metadata.gz: 44396ddac2afadbd4221f24204f2d657c8ad7185a430367c9ddbfc4afd18e7ef26334aa96c0b9a48ca6d95f162a9d98f778d78f4495f2143fee028fc30a50e7e
7
+ data.tar.gz: 24ffc8ea5863a823ab6b631051a193295f651133d04fa9ab31e9dd76a77ad44fa25b470064710c07f10c262a8a101bc900a60f57e7a4d8a46f575930fc9cdbb9
data/CHANGELOG.md CHANGED
@@ -1,3 +1,9 @@
1
+ ## 5.4.4
2
+ - Fix: adaptations for JRuby 9.4 [#125](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/125)
3
+
4
+ ## 5.4.3
5
+ - Fix crash when metadata file can't be deleted after moving under path.data [#136](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/136)
6
+
1
7
  ## 5.4.2
2
8
  - Doc: described default_hash and tag_on_default_use interaction filter plugin [#122](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/122)
3
9
  - Added new settings `statement_retry_attempts` and `statement_retry_attempts_wait_time` for retry of failed sql statement execution [#123](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/123)
@@ -491,10 +491,12 @@ How often to validate a connection (in seconds)
491
491
  ===== `last_run_metadata_path`
492
492
 
493
493
  * Value type is <<string,string>>
494
- * Default value is `"$HOME/.logstash_jdbc_last_run"`
494
+ * Default value is `"<path.data>/plugins/inputs/jdbc/logstash_jdbc_last_run"`
495
495
 
496
496
  Path to file with last run time
497
497
 
498
+ In versions prior to 5.2.6 the metadata file was written to `$HOME/.logstash_jdbc_last_run`. If during a Logstash upgrade the file is found in "$HOME" it will be moved to the default location under "path.data". If the path is defined by the user then no automatic move is performed.
499
+
498
500
  [id="plugins-{type}s-{plugin}-lowercase_column_names"]
499
501
  ===== `lowercase_column_names`
500
502
 
@@ -406,7 +406,7 @@ module LogStash module Inputs class Jdbc < LogStash::Inputs::Base
406
406
  # the correct access rights
407
407
  ::File.delete(old_default_path.to_path)
408
408
  @logger.info("Successfully moved the #{old_default_path.to_path} into #{metadata_new_path.to_path}")
409
- rescue e
409
+ rescue => e
410
410
  @logger.warn("Using new metadata file at #{metadata_new_path.to_path} but #{old_default_path} can't be removed.")
411
411
  end
412
412
  end
@@ -1,5 +1,7 @@
1
1
  # encoding: utf-8
2
2
  require "yaml" # persistence
3
+ require "date"
4
+ require "bigdecimal"
3
5
 
4
6
  module LogStash module PluginMixins module Jdbc
5
7
  class ValueTracking
@@ -31,6 +33,17 @@ module LogStash module PluginMixins module Jdbc
31
33
  set_initial
32
34
  end
33
35
 
36
+ if Psych::VERSION&.split('.')&.first.to_i >= 4
37
+ YAML_PERMITTED_CLASSES = [::DateTime, ::Time, ::BigDecimal].freeze
38
+ def self.load_yaml(source)
39
+ Psych::safe_load(source, permitted_classes: YAML_PERMITTED_CLASSES)
40
+ end
41
+ else
42
+ def self.load_yaml(source)
43
+ YAML::load(source)
44
+ end
45
+ end
46
+
34
47
  def set_initial
35
48
  # override in subclass
36
49
  end
@@ -112,7 +125,7 @@ module LogStash module PluginMixins module Jdbc
112
125
 
113
126
  def read
114
127
  return unless @exists
115
- YAML.load(::File.read(@path))
128
+ ValueTracking.load_yaml(::File.read(@path))
116
129
  end
117
130
 
118
131
  def write(value)
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-integration-jdbc'
3
- s.version = '5.4.2'
3
+ s.version = '5.4.4'
4
4
  s.licenses = ['Apache License (2.0)']
5
5
  s.summary = "Integration with JDBC - input and filter plugins"
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -259,7 +259,7 @@ module LogStash module Filters
259
259
  CONFIG
260
260
  end
261
261
 
262
- sample("message" => "some text") do
262
+ sample({"message" => "some text"}) do
263
263
  expect(subject.get('new_field')).to eq([{"1" => 'from_database'}])
264
264
  end
265
265
  end
@@ -277,7 +277,7 @@ module LogStash module Filters
277
277
  CONFIG
278
278
  end
279
279
 
280
- sample("message" => "some text") do
280
+ sample({"message" => "some text"}) do
281
281
  expect(subject.get('new_field')).to eq([{"col_1" => 'from_database'}])
282
282
  end
283
283
  end
@@ -296,11 +296,11 @@ module LogStash module Filters
296
296
  CONFIG
297
297
  end
298
298
 
299
- sample("message" => "some text", "param_field" => "1") do
299
+ sample({"message" => "some text", "param_field" => "1"}) do
300
300
  expect(subject.get('new_field')).to eq([{"1" => 'from_database'}])
301
301
  end
302
302
 
303
- sample("message" => "some text", "param_field" => "2") do
303
+ sample({"message" => "some text", "param_field" => "2"}) do
304
304
  expect(subject.get('new_field').nil?)
305
305
  end
306
306
  end
@@ -319,11 +319,11 @@ module LogStash module Filters
319
319
  CONFIG
320
320
  end
321
321
 
322
- sample("message" => "some text", "param_field" => 1) do
322
+ sample({"message" => "some text", "param_field" => 1}) do
323
323
  expect(subject.get('new_field')).to eq([{"1" => 'from_database'}])
324
324
  end
325
325
 
326
- sample("message" => "some text", "param_field" => "1") do
326
+ sample({"message" => "some text", "param_field" => "1"}) do
327
327
  expect(subject.get('new_field').nil?)
328
328
  end
329
329
  end
@@ -342,7 +342,7 @@ module LogStash module Filters
342
342
  CONFIG
343
343
  end
344
344
 
345
- sample("message" => "some text") do
345
+ sample({"message" => "some text"}) do
346
346
  expect(subject.get('new_field')).to eq([{"1" => 'from_database'}])
347
347
  end
348
348
  end
@@ -277,7 +277,7 @@ describe LogStash::Inputs::Jdbc do
277
277
  sleep 1
278
278
  for i in 0..1
279
279
  sleep 1
280
- updated_last_run = YAML.load(File.read(settings["last_run_metadata_path"]))
280
+ updated_last_run = LogStash::PluginMixins::Jdbc::ValueTracking.load_yaml(File.read(settings["last_run_metadata_path"]))
281
281
  expect(updated_last_run).to be > last_run_time
282
282
  last_run_time = updated_last_run
283
283
  end
@@ -547,7 +547,7 @@ describe LogStash::Inputs::Jdbc do
547
547
  expect(actual).to eq(expected)
548
548
  plugin.stop
549
549
  raw_last_run_value = File.read(settings["last_run_metadata_path"])
550
- last_run_value = YAML.load(raw_last_run_value)
550
+ last_run_value = LogStash::PluginMixins::Jdbc::ValueTracking.load_yaml(raw_last_run_value)
551
551
  expect(last_run_value).to be_a(DateTime)
552
552
  expect(last_run_value.strftime("%F %T.%N %Z")).to eq("2015-01-02 02:00:00.722000000 +00:00")
553
553
 
@@ -562,7 +562,7 @@ describe LogStash::Inputs::Jdbc do
562
562
  plugin.stop
563
563
  expect(event.get("num")).to eq(12)
564
564
  expect(event.get("custom_time").time).to eq(Time.iso8601("2015-01-02T03:00:00.811Z"))
565
- last_run_value = YAML.load(File.read(settings["last_run_metadata_path"]))
565
+ last_run_value = LogStash::PluginMixins::Jdbc::ValueTracking.load_yaml(File.read(settings["last_run_metadata_path"]))
566
566
  expect(last_run_value).to be_a(DateTime)
567
567
  # verify that sub-seconds are recorded to the file
568
568
  expect(last_run_value.strftime("%F %T.%N %Z")).to eq("2015-01-02 03:00:00.811000000 +00:00")
@@ -1169,19 +1169,31 @@ describe LogStash::Inputs::Jdbc do
1169
1169
  context "when a file exists" do
1170
1170
  before do
1171
1171
  # in a faked HOME folder save a valid previous last_run metadata file
1172
+ allow(ENV).to receive(:[]).with(anything).and_call_original
1172
1173
  allow(ENV).to receive(:[]).with('HOME').and_return(fake_home)
1173
- File.open("#{ENV['HOME']}/.logstash_jdbc_last_run", 'w') do |file|
1174
+
1175
+ File.open("#{fake_home}/.logstash_jdbc_last_run", 'w') do |file|
1174
1176
  file.write("--- !ruby/object:DateTime '2022-03-08 08:10:00.486889000 Z'")
1175
1177
  end
1176
1178
  end
1179
+ let(:old_path) { "#{fake_home}/.logstash_jdbc_last_run" }
1180
+ let(:path_data) { LogStash::SETTINGS.get_value("path.data") }
1181
+ let(:new_path) { "#{path_data}/plugins/inputs/jdbc/logstash_jdbc_last_run" }
1177
1182
 
1178
1183
  it "should be moved" do
1179
1184
  plugin.register
1180
-
1181
- expect(::File.exist?("#{ENV['HOME']}/.logstash_jdbc_last_run")).to be false
1182
- path = LogStash::SETTINGS.get_value("path.data")
1183
- full_path = "#{path}/plugins/inputs/jdbc/logstash_jdbc_last_run"
1184
- expect(::File.exist?(full_path)).to be true
1185
+ expect(::File).to_not exist(old_path)
1186
+ expect(::File).to exist(new_path)
1187
+ end
1188
+ context "if the delete fails" do
1189
+ before(:each) do
1190
+ allow(File).to receive(:delete).and_raise ArgumentError
1191
+ end
1192
+ it "should be still be moved" do
1193
+ plugin.register
1194
+ expect(::File).to exist(old_path) # old still exists
1195
+ expect(::File).to exist(new_path)
1196
+ end
1185
1197
  end
1186
1198
  end
1187
1199
  end
@@ -1710,7 +1722,7 @@ describe LogStash::Inputs::Jdbc do
1710
1722
  plugin.run(queue)
1711
1723
 
1712
1724
  expect(queue.size).to eq(expected_queue_size)
1713
- expect(YAML.load(File.read(settings["last_run_metadata_path"]))).to eq(expected_queue_size)
1725
+ expect(LogStash::PluginMixins::Jdbc::ValueTracking.load_yaml(File.read(settings["last_run_metadata_path"]))).to eq(expected_queue_size)
1714
1726
  end
1715
1727
  end
1716
1728
 
@@ -1735,7 +1747,7 @@ describe LogStash::Inputs::Jdbc do
1735
1747
  plugin.run(queue)
1736
1748
 
1737
1749
  expect(queue.size).to eq(expected_queue_size)
1738
- expect(YAML.load(File.read(settings["last_run_metadata_path"]))).to eq(last_run_value + expected_queue_size)
1750
+ expect(LogStash::PluginMixins::Jdbc::ValueTracking.load_yaml(File.read(settings["last_run_metadata_path"]))).to eq(last_run_value + expected_queue_size)
1739
1751
  end
1740
1752
  end
1741
1753
  end
@@ -0,0 +1,113 @@
1
+ # encoding: utf-8
2
+ require "logstash/plugin_mixins/jdbc/value_tracking"
3
+ require "tempfile"
4
+
5
+ module LogStash module PluginMixins module Jdbc
6
+ describe ValueTracking do
7
+ context "#load_yaml" do
8
+
9
+ context "with date string" do
10
+ let(:yaml_date_source) { "--- !ruby/object:DateTime '2023-06-15 09:59:30.558000000 +02:00'\n" }
11
+
12
+ it "should be loaded" do
13
+ parsed_date = LogStash::PluginMixins::Jdbc::ValueTracking.load_yaml(yaml_date_source)
14
+ expect(parsed_date.class).to eq DateTime
15
+ expect(parsed_date.year).to eq 2023
16
+ expect(parsed_date.month).to eq 6
17
+ expect(parsed_date.day).to eq 15
18
+ end
19
+ end
20
+
21
+ context "with time string" do
22
+ let(:yaml_time_source) { "--- 2023-06-15 15:28:15.227874000 +02:00\n" }
23
+
24
+ it "should be loaded" do
25
+ parsed_time = LogStash::PluginMixins::Jdbc::ValueTracking.load_yaml(yaml_time_source)
26
+ expect(parsed_time.class).to eq Time
27
+ expect(parsed_time.year).to eq 2023
28
+ expect(parsed_time.month).to eq 6
29
+ expect(parsed_time.day).to eq 15
30
+ expect(parsed_time.hour).to eq 15
31
+ expect(parsed_time.min).to eq 28
32
+ expect(parsed_time.sec).to eq 15
33
+ end
34
+ end
35
+
36
+ context "with date string" do
37
+ let(:yaml_bigdecimal_source) { "--- !ruby/object:BigDecimal '0:0.1e1'\n" }
38
+
39
+ it "should be loaded" do
40
+ parsed_bigdecimal = LogStash::PluginMixins::Jdbc::ValueTracking.load_yaml(yaml_bigdecimal_source)
41
+ expect(parsed_bigdecimal.class).to eq BigDecimal
42
+ expect(parsed_bigdecimal.to_i).to eq 1
43
+ end
44
+ end
45
+ end
46
+
47
+ context "#build_last_value_tracker" do
48
+
49
+ let(:plugin) { double("fake plugin") }
50
+ let(:temp_file) { Tempfile.new('last_run_tracker') }
51
+
52
+ before(:each) do
53
+ allow(plugin).to receive(:record_last_run).and_return(true)
54
+ allow(plugin).to receive(:clean_run).and_return(false)
55
+ allow(plugin).to receive(:last_run_metadata_file_path).and_return(temp_file.path)
56
+ end
57
+
58
+ context "create numerical tracker" do
59
+ before(:each) do
60
+ allow(plugin).to receive(:use_column_value).and_return(true)
61
+ allow(plugin).to receive(:tracking_column_type).and_return("numeric")
62
+ end
63
+
64
+ it "should write correctly" do
65
+ tracker = ValueTracking.build_last_value_tracker(plugin)
66
+ tracker.set_value(1)
67
+ tracker.write
68
+
69
+ temp_file.rewind
70
+ v = ValueTracking.load_yaml(::File.read(temp_file.path))
71
+ expect(v).to eq 1
72
+ end
73
+ end
74
+
75
+ context "create date time tracker" do
76
+ before(:each) do
77
+ allow(plugin).to receive(:use_column_value).and_return(false)
78
+ allow(plugin).to receive(:jdbc_default_timezone).and_return(:something_not_nil)
79
+ end
80
+
81
+ it "should write correctly" do
82
+ tracker = ValueTracking.build_last_value_tracker(plugin)
83
+ tracker.set_value("2023-06-15T15:28:15+02:00")
84
+ tracker.write
85
+
86
+ temp_file.rewind
87
+ v = ValueTracking.load_yaml(::File.read(temp_file.path))
88
+ expect(v.class).to eq DateTime
89
+ expect(v.year).to eq 2023
90
+ end
91
+ end
92
+
93
+ context "create time tracker" do
94
+ before(:each) do
95
+ allow(plugin).to receive(:use_column_value).and_return(false)
96
+ allow(plugin).to receive(:jdbc_default_timezone).and_return(nil)
97
+ end
98
+
99
+ it "should write correctly" do
100
+ tracker = ValueTracking.build_last_value_tracker(plugin)
101
+ tracker.set_value("2023-06-15T15:28:15+02:00")
102
+ tracker.write
103
+
104
+ temp_file.rewind
105
+ v = ValueTracking.load_yaml(::File.read(temp_file.path))
106
+ expect(v.class).to eq Time
107
+ expect(v.min).to eq 28
108
+ end
109
+ end
110
+
111
+ end
112
+ end
113
+ end end end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-integration-jdbc
3
3
  version: !ruby/object:Gem::Version
4
- version: 5.4.2
4
+ version: 5.4.4
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2023-05-10 00:00:00.000000000 Z
11
+ date: 2023-06-15 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -307,6 +307,7 @@ files:
307
307
  - spec/inputs/integration/integ_spec.rb
308
308
  - spec/inputs/jdbc_spec.rb
309
309
  - spec/plugin_mixins/jdbc/timezone_proxy_spec.rb
310
+ - spec/plugin_mixins/jdbc/value_tracking_spec.rb
310
311
  - spec/plugin_mixins/jdbc_streaming/parameter_handler_spec.rb
311
312
  - vendor/jar-dependencies/org/apache/derby/derby/10.14.1.0/derby-10.14.1.0.jar
312
313
  - vendor/jar-dependencies/org/apache/derby/derbyclient/10.14.1.0/derbyclient-10.14.1.0.jar
@@ -333,7 +334,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
333
334
  - !ruby/object:Gem::Version
334
335
  version: '0'
335
336
  requirements: []
336
- rubygems_version: 3.1.6
337
+ rubygems_version: 3.2.33
337
338
  signing_key:
338
339
  specification_version: 4
339
340
  summary: Integration with JDBC - input and filter plugins
@@ -360,4 +361,5 @@ test_files:
360
361
  - spec/inputs/integration/integ_spec.rb
361
362
  - spec/inputs/jdbc_spec.rb
362
363
  - spec/plugin_mixins/jdbc/timezone_proxy_spec.rb
364
+ - spec/plugin_mixins/jdbc/value_tracking_spec.rb
363
365
  - spec/plugin_mixins/jdbc_streaming/parameter_handler_spec.rb