logstash-integration-jdbc 5.4.10 → 5.5.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 8b3c2ba844927de3fe8fd1ea8a23fe09af8da74835eb8c87a83e4de9f67b3d21
4
- data.tar.gz: 401acb665aacbf8e9f75b6d5092985998d766217619fcc2d26c7d078f0d4f82f
3
+ metadata.gz: e155ca5542f41d749481a67cf65cf28ad194f019defe6368f765cd1a0ccf3860
4
+ data.tar.gz: 81cbc828cbbdecb5d3265c07f5777f68c3103252edbc772868c1aa8f1323b566
5
5
  SHA512:
6
- metadata.gz: c68b79a3bdb9d0529dbff7b1c0e80dcd0e840d57f6c760c056fbbd5525be295bfedb8ddf7b2450ab125eee14c039cf61a7ea38aca81e597a98ea815b51955bab
7
- data.tar.gz: eb872090eb0dfc7bb073aa8c3f992679ecfcb66979c2de6f6838ff17f6acc297d82140cb60ef71a55dc910ad17c9ecab921e62e9d899a4943949ba3003b38a7a
6
+ metadata.gz: 0d3f9f07bd92ff1c21f417795e20edbd5f78542181a00f4bfd770dd7f9fb1066df8bbec9af242126b13148049cc70f41190b5246104a23c549af6a62338807c1
7
+ data.tar.gz: 71c92b8dba18479be7d69f64b580d2bd99902d7d330fd1633997e8134c69403f640e1ba555a1b1640d6737de8b53c68d51e4cab75b4da82eeda7fbf549f72c0c
data/CHANGELOG.md CHANGED
@@ -1,6 +1,13 @@
1
+ ## 5.5.0
2
+ - Feat: add support for SQL `DATE` columns to jdbc static and streaming filters [#171](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/171)
3
+
4
+ ## 5.4.11
5
+ - Fixes an issue in which any one instance of a JDBC input plugin using `jdbc_default_timezone` changes the behaviour of plugin instances that do _not_ use `jdbc_default_timezone`, ensuring that timezone offsets remain consistent for each instance of the plugin _as configured_ [#151](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/151)
6
+ - Fixes an exception that could occur while reloading `jdbc_static` databases when the underlying connection to the remote has been broken [#165](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/165)
7
+
1
8
  ## 5.4.10
2
- - Adds retry mechanism when checkout Derby from SVN repository [#158](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/158)
3
- - [DOC] add known limitations and settings for connection issue [#167](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/167)
9
+ - Adds retry mechanism when checkout Derby from SVN repository [#158](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/158)
10
+ - [DOC] add known limitations and settings for connection issue [#167](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/167)
4
11
 
5
12
  ## 5.4.9
6
13
  - Fix Derby missed driver classes when built locally for version 10.15 [#160](https://github.com/logstash-plugins/logstash-integration-jdbc/pull/160)
@@ -1,6 +1,6 @@
1
1
  # encoding: utf-8
2
2
  require "fileutils"
3
- require "sequel"
3
+ require "logstash/plugin_mixins/jdbc/sequel_bootstrap"
4
4
  require "sequel/adapters/jdbc"
5
5
  require "java"
6
6
  require "logstash/util/loggable"
@@ -1,10 +1,12 @@
1
1
  # encoding: utf-8
2
2
  require_relative "lookup_result"
3
3
  require "logstash/util/loggable"
4
+ require "logstash/plugin_mixins/jdbc/value_handler"
4
5
 
5
6
  module LogStash module Filters module Jdbc
6
7
  class Lookup
7
8
  include LogStash::Util::Loggable
9
+ include LogStash::PluginMixins::Jdbc::ValueHandler
8
10
 
9
11
  class Sprintfier
10
12
  def initialize(param)
@@ -134,15 +136,13 @@ module LogStash module Filters module Jdbc
134
136
 
135
137
  def load_data_from_local(local, query, params, result)
136
138
  local.fetch(query, params).each do |row|
137
- stringified = row.inject({}){|hash,(k,v)| hash[k.to_s] = v; hash} #Stringify row keys
138
- result.push(stringified)
139
+ result.push(extract_values_from(row))
139
140
  end
140
141
  end
141
142
 
142
143
  def load_data_from_prepared(_local, _query, params, result)
143
144
  @prepared_statement.call(params).each do |row|
144
- stringified = row.inject({}){|hash,(k,v)| hash[k.to_s] = v; hash} #Stringify row keys
145
- result.push(stringified)
145
+ result.push(extract_values_from(row))
146
146
  end
147
147
  end
148
148
 
@@ -12,7 +12,7 @@ module LogStash module Filters module Jdbc
12
12
  if connected?
13
13
  result = @db[statement].count
14
14
  else
15
- debug_log_messages.concat("and there is no connection to the remote db at this time")
15
+ debug_log_messages << "and there is no connection to the remote db at this time"
16
16
  end
17
17
  rescue ::Sequel::Error => err
18
18
  # a fatal issue
@@ -32,7 +32,7 @@ module LogStash module Filters module Jdbc
32
32
  if connected?
33
33
  result = @db[statement].all
34
34
  else
35
- debug_log_messages.concat("and there is no connection to the remote db at this time")
35
+ debug_log_messages << "and there is no connection to the remote db at this time"
36
36
  end
37
37
  rescue ::Sequel::Error => err
38
38
  # a fatal issue
@@ -24,7 +24,8 @@ module LogStash module PluginMixins module Jdbc
24
24
  return @driver_impl if @driver_impl ||= nil
25
25
 
26
26
  require "java"
27
- require "sequel"
27
+
28
+ require_relative "sequel_bootstrap"
28
29
  require "sequel/adapters/jdbc"
29
30
 
30
31
  # execute all the driver loading related duties in a serial fashion to avoid
@@ -6,6 +6,7 @@ require "date"
6
6
  require_relative "value_tracking"
7
7
  require_relative "timezone_proxy"
8
8
  require_relative "statement_handler"
9
+ require_relative "value_handler"
9
10
 
10
11
  java_import java.util.concurrent.locks.ReentrantLock
11
12
 
@@ -13,6 +14,7 @@ java_import java.util.concurrent.locks.ReentrantLock
13
14
  # for potential reuse in other plugins (input/output)
14
15
  module LogStash module PluginMixins module Jdbc
15
16
  module Jdbc
17
+ include LogStash::PluginMixins::Jdbc::ValueHandler
16
18
  # This method is called when someone includes this module
17
19
  def self.included(base)
18
20
  # Add these methods to the 'base' given.
@@ -252,25 +254,6 @@ module LogStash module PluginMixins module Jdbc
252
254
  row[@tracking_column.to_sym]
253
255
  end
254
256
  end
255
-
256
- private
257
- #Stringify row keys and decorate values when necessary
258
- def extract_values_from(row)
259
- Hash[row.map { |k, v| [k.to_s, decorate_value(v)] }]
260
- end
261
-
262
- private
263
- def decorate_value(value)
264
- case value
265
- when Time
266
- # transform it to LogStash::Timestamp as required by LS
267
- LogStash::Timestamp.new(value)
268
- when Date, DateTime
269
- LogStash::Timestamp.new(value.to_time)
270
- else
271
- value
272
- end
273
- end
274
257
  end
275
258
  end end end
276
259
 
@@ -0,0 +1,21 @@
1
+ # encoding: utf-8
2
+
3
+ require "sequel"
4
+
5
+ # prevent Sequel's datetime_class from being modified,
6
+ # and ensure behaviour is restored to the library's default
7
+ # if something else in the Ruby VM has already changed it.
8
+ Sequel.synchronize do
9
+ def Sequel.datetime_class=(klass)
10
+ # noop
11
+ end
12
+ def Sequel.datetime_class
13
+ ::Time
14
+ end
15
+ end
16
+
17
+ # load the named_timezones extension, which will attempt to
18
+ # override the global Sequel::datetime_class; for safety,
19
+ # we reset it once more.
20
+ Sequel.extension(:named_timezones)
21
+ Sequel.datetime_class = ::Time
@@ -0,0 +1,24 @@
1
+ # encoding: utf-8
2
+ require "time"
3
+ require "date"
4
+
5
+ module LogStash module PluginMixins module Jdbc
6
+ # Provides functions to extract the row's values, ensuring column types
7
+ # are properly decorated to become coercible to a LogStash::Event.
8
+ module ValueHandler
9
+ # Stringify the row keys and decorate values when necessary
10
+ def extract_values_from(row)
11
+ Hash[row.map { |k, v| [k.to_s, decorate_value(v)] }]
12
+ end
13
+
14
+ # Decorate the value so it can be used as a LogStash::Event field
15
+ def decorate_value(value)
16
+ case value
17
+ when Date, DateTime
18
+ value.to_time
19
+ else
20
+ value
21
+ end
22
+ end
23
+ end
24
+ end end end
@@ -1,5 +1,6 @@
1
1
  # encoding: utf-8
2
2
  require "logstash/util/loggable"
3
+ require "logstash/plugin_mixins/jdbc/value_handler"
3
4
 
4
5
  module LogStash module PluginMixins module JdbcStreaming
5
6
  # so as to not clash with the class of the same name and function in the jdbc input
@@ -7,6 +8,8 @@ module LogStash module PluginMixins module JdbcStreaming
7
8
  # this duplication can be removed in a universal plugin
8
9
 
9
10
  class StatementHandler
11
+ include LogStash::PluginMixins::Jdbc::ValueHandler
12
+
10
13
  def self.build_statement_handler(plugin)
11
14
  klass = plugin.use_prepared_statements ? PreparedStatementHandler : NormalStatementHandler
12
15
  klass.new(plugin)
@@ -86,7 +89,7 @@ module LogStash module PluginMixins module JdbcStreaming
86
89
  def execute_extract_records(db, params, result)
87
90
  dataset = db[statement, params] # returns a Sequel dataset
88
91
  dataset.all do |row|
89
- result.push row.inject({}){|hash,(k,v)| hash[k.to_s] = v; hash} # Stringify row keys
92
+ result.push extract_values_from(row)
90
93
  end
91
94
  end
92
95
 
@@ -113,7 +116,7 @@ module LogStash module PluginMixins module JdbcStreaming
113
116
  def execute_extract_records(db, params, result)
114
117
  records = db.call(name, params) # returns an array of hashes
115
118
  records.each do |row|
116
- result.push row.inject({}){|hash,(k,v)| hash[k.to_s] = v; hash} #Stringify row keys
119
+ result.push extract_values_from(row)
117
120
  end
118
121
  end
119
122
 
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-integration-jdbc'
3
- s.version = '5.4.10'
3
+ s.version = '5.5.0'
4
4
  s.licenses = ['Apache License (2.0)']
5
5
  s.summary = "Integration with JDBC - input and filter plugins"
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -116,6 +116,45 @@ module LogStash module Filters
116
116
  plugin.filter(event)
117
117
  expect(event.get("server")).to eq([{"ip"=>"10.3.1.1", "name"=>"mv-server-1", "location"=>"MV-9-6-4"}])
118
118
  end
119
+
120
+ context 'and record with temporal columns' do
121
+ let(:loader_statement) { "SELECT ip, name, location, entry_date, entry_time, timestamp FROM reference_table" }
122
+ let(:local_db_objects) do
123
+ [
124
+ {
125
+ "name" => "servers",
126
+ "columns" => [
127
+ %w[ip varchar(64)],
128
+ %w[name varchar(64)],
129
+ %w[location varchar(64)],
130
+ %w[entry_date date],
131
+ %w[entry_time time],
132
+ %w[timestamp timestamp]
133
+ ]
134
+ },
135
+ ]
136
+ end
137
+
138
+ before(:each) { plugin.register }
139
+
140
+ subject { event.get("server").first }
141
+
142
+ it "maps the DATE to a Logstash Timestamp" do
143
+ plugin.filter(event)
144
+ expect(subject['entry_date']).to eq(LogStash::Timestamp.new(Time.new(2003, 2, 1)))
145
+ end
146
+
147
+ it "maps the TIME field to a Logstash Timestamp" do
148
+ plugin.filter(event)
149
+ now = DateTime.now
150
+ expect(subject['entry_time']).to eq(LogStash::Timestamp.new(Time.new(now.year, now.month, now.day, 10, 5, 0)))
151
+ end
152
+
153
+ it "maps the TIMESTAMP to a Logstash Timestamp" do
154
+ plugin.filter(event)
155
+ expect(subject['timestamp']).to eq(LogStash::Timestamp.new(Time.new(2003, 2, 1, 1, 2, 3)))
156
+ end
157
+ end
119
158
  end
120
159
 
121
160
  context "under normal conditions when index_columns is not specified" do
@@ -63,7 +63,32 @@ module LogStash module Filters
63
63
  end
64
64
  end
65
65
 
66
- describe "In Prepared Statement mode, found record - uses row" do
66
+ describe 'found record with temporal columns' do
67
+ let(:idx) { 200 }
68
+ let(:statement) { "SELECT entry_date, entry_time, timestamp FROM reference_table WHERE ip = :ip" }
69
+
70
+ before(:each) { plugin.register }
71
+
72
+ subject { event.get("server").first }
73
+
74
+ it "maps the DATE to a Logstash Timestamp" do
75
+ plugin.filter(event)
76
+ expect(subject['entry_date']).to eq(LogStash::Timestamp.new(Time.new(2003, 2, 1)))
77
+ end
78
+
79
+ it "maps the TIME field to a Logstash Timestamp" do
80
+ plugin.filter(event)
81
+ now = DateTime.now
82
+ expect(subject['entry_time']).to eq(LogStash::Timestamp.new(Time.new(now.year, now.month, now.day, 10, 5, 0)))
83
+ end
84
+
85
+ it "maps the TIMESTAMP to a Logstash Timestamp" do
86
+ plugin.filter(event)
87
+ expect(subject['timestamp']).to eq(LogStash::Timestamp.new(Time.new(2003, 2, 1, 1, 2, 3)))
88
+ end
89
+ end
90
+
91
+ context 'prepared statement mode' do
67
92
  let(:idx) { 200 }
68
93
  let(:statement) { "SELECT name, location FROM reference_table WHERE ip = ?" }
69
94
  let(:settings) do
@@ -82,10 +107,37 @@ module LogStash module Filters
82
107
  "sequel_opts" => {"pool_timeout" => 600}
83
108
  }
84
109
  end
85
- it "fills in the target" do
86
- plugin.filter(event)
87
- expect(event.get("server")).to eq([{"name" => "ldn-server-#{idx}", "location" => "LDN-#{idx}-2-3"}])
88
- expect((event.get("tags") || []) & ["lookup_failed", "default_used_instead"]).to be_empty
110
+
111
+ describe "found record - uses row" do
112
+ it "fills in the target" do
113
+ plugin.filter(event)
114
+ expect(event.get("server")).to eq([{"name" => "ldn-server-#{idx}", "location" => "LDN-#{idx}-2-3"}])
115
+ expect((event.get("tags") || []) & ["lookup_failed", "default_used_instead"]).to be_empty
116
+ end
117
+ end
118
+
119
+ describe 'found record with temporal columns' do
120
+ let(:statement) { "SELECT entry_date, entry_time, timestamp FROM reference_table WHERE ip = ?" }
121
+
122
+ before(:each) { plugin.register }
123
+
124
+ subject { event.get("server").first }
125
+
126
+ it "maps the DATE to a Logstash Timestamp" do
127
+ plugin.filter(event)
128
+ expect(subject['entry_date']).to eq(LogStash::Timestamp.new(Time.new(2003, 2, 1)))
129
+ end
130
+
131
+ it "maps the TIME field to a Logstash Timestamp" do
132
+ plugin.filter(event)
133
+ now = DateTime.now
134
+ expect(subject['entry_time']).to eq(LogStash::Timestamp.new(Time.new(now.year, now.month, now.day, 10, 5, 0)))
135
+ end
136
+
137
+ it "maps the TIMESTAMP to a Logstash Timestamp" do
138
+ plugin.filter(event)
139
+ expect(subject['timestamp']).to eq(LogStash::Timestamp.new(Time.new(2003, 2, 1, 1, 2, 3)))
140
+ end
89
141
  end
90
142
  end
91
143
 
@@ -66,6 +66,29 @@ describe LogStash::Inputs::Jdbc, :integration => true do
66
66
  expect(event.get('first_name')).to eq('David')
67
67
  end
68
68
  end
69
+
70
+ context 'with temporal columns' do
71
+ let(:settings) do
72
+ super().merge("statement" => 'SELECT ENTRY_DATE, ENTRY_TIME, TIMESTAMP FROM "employee" WHERE EMP_NO = 2')
73
+ end
74
+
75
+ before(:each) { plugin.run(queue) }
76
+
77
+ subject(:event) { queue.pop }
78
+
79
+ it "maps the DATE to a Logstash Timestamp" do
80
+ expect(event.get('entry_date')).to eq(LogStash::Timestamp.new(Time.new(2003, 2, 1)))
81
+ end
82
+
83
+ it "maps the TIME field to a Logstash Timestamp" do
84
+ now = DateTime.now
85
+ expect(event.get('entry_time')).to eq(LogStash::Timestamp.new(Time.new(now.year, now.month, now.day, 10, 5, 0)))
86
+ end
87
+
88
+ it "maps the TIMESTAMP to a Logstash Timestamp" do
89
+ expect(event.get('timestamp')).to eq(LogStash::Timestamp.new(Time.new(2003, 2, 1, 1, 2, 3)))
90
+ end
91
+ end
69
92
  end
70
93
 
71
94
  context "when supplying a non-existent library" do
@@ -601,6 +601,33 @@ describe LogStash::Inputs::Jdbc do
601
601
  # With no timezone set, no change should occur
602
602
  expect(event.get("custom_time").time).to eq(Time.iso8601("2015-01-01T12:00:00Z"))
603
603
  end
604
+
605
+ %w(
606
+ Etc/UTC
607
+ America/Los_Angeles
608
+ Europe/Berlin
609
+ Asia/Tokyo
610
+ ).each do |local_timezone|
611
+ context "when host machine has timezone `#{local_timezone}`" do
612
+ around(:each) do |example|
613
+ begin
614
+ previous_tz = ENV['TZ']
615
+ ENV['TZ'] = local_timezone
616
+ example.call
617
+ ensure
618
+ ENV['TZ'] = previous_tz
619
+ end
620
+ end
621
+
622
+ let(:tz) { TZInfo::Timezone.get(local_timezone) }
623
+
624
+ it "converts the time using the machine's local timezone" do
625
+ plugin.run(queue)
626
+ event = queue.pop
627
+ expect(event.get("custom_time").time).to eq(Time.new(2015,1,1,12,0,0,tz))
628
+ end
629
+ end
630
+ end
604
631
  end
605
632
 
606
633
  context "when iteratively running plugin#run" do
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-integration-jdbc
3
3
  version: !ruby/object:Gem::Version
4
- version: 5.4.10
4
+ version: 5.5.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2024-05-08 00:00:00.000000000 Z
11
+ date: 2024-08-09 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -263,8 +263,10 @@ files:
263
263
  - lib/logstash/inputs/tzinfo_jruby_patch.rb
264
264
  - lib/logstash/plugin_mixins/jdbc/common.rb
265
265
  - lib/logstash/plugin_mixins/jdbc/jdbc.rb
266
+ - lib/logstash/plugin_mixins/jdbc/sequel_bootstrap.rb
266
267
  - lib/logstash/plugin_mixins/jdbc/statement_handler.rb
267
268
  - lib/logstash/plugin_mixins/jdbc/timezone_proxy.rb
269
+ - lib/logstash/plugin_mixins/jdbc/value_handler.rb
268
270
  - lib/logstash/plugin_mixins/jdbc/value_tracking.rb
269
271
  - lib/logstash/plugin_mixins/jdbc_streaming.rb
270
272
  - lib/logstash/plugin_mixins/jdbc_streaming/cache_payload.rb