logstash-integration-jdbc 5.2.4 → 5.4.11

Sign up to get free protection for your applications and to get access to all the features.
Files changed (33) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +50 -0
  3. data/docs/filter-jdbc_static.asciidoc +14 -2
  4. data/docs/filter-jdbc_streaming.asciidoc +1 -1
  5. data/docs/input-jdbc.asciidoc +41 -4
  6. data/lib/logstash/filters/jdbc/basic_database.rb +1 -1
  7. data/lib/logstash/filters/jdbc/read_only_database.rb +2 -2
  8. data/lib/logstash/filters/jdbc_static.rb +19 -10
  9. data/lib/logstash/inputs/jdbc.rb +69 -20
  10. data/lib/logstash/plugin_mixins/jdbc/common.rb +2 -1
  11. data/lib/logstash/plugin_mixins/jdbc/jdbc.rb +22 -17
  12. data/lib/logstash/plugin_mixins/jdbc/sequel_bootstrap.rb +21 -0
  13. data/lib/logstash/plugin_mixins/jdbc/statement_handler.rb +51 -45
  14. data/lib/logstash/plugin_mixins/jdbc/timezone_proxy.rb +61 -0
  15. data/lib/logstash/plugin_mixins/jdbc/value_tracking.rb +16 -3
  16. data/lib/logstash-integration-jdbc_jars.rb +4 -2
  17. data/logstash-integration-jdbc.gemspec +6 -6
  18. data/spec/filters/jdbc_static_spec.rb +10 -0
  19. data/spec/filters/jdbc_streaming_spec.rb +7 -10
  20. data/spec/inputs/integration/integ_spec.rb +28 -9
  21. data/spec/inputs/jdbc_spec.rb +202 -59
  22. data/spec/plugin_mixins/jdbc/timezone_proxy_spec.rb +68 -0
  23. data/spec/plugin_mixins/jdbc/value_tracking_spec.rb +113 -0
  24. data/vendor/jar-dependencies/org/apache/derby/derby/10.15.2.1/derby-10.15.2.1.jar +0 -0
  25. data/vendor/jar-dependencies/org/apache/derby/derbyclient/10.15.2.1/derbyclient-10.15.2.1.jar +0 -0
  26. data/vendor/jar-dependencies/org/apache/derby/derbyshared/10.15.2.1/derbyshared-10.15.2.1.jar +0 -0
  27. data/vendor/jar-dependencies/org/apache/derby/derbytools/10.15.2.1/derbytools-10.15.2.1.jar +0 -0
  28. metadata +39 -49
  29. data/lib/logstash/plugin_mixins/jdbc/checked_count_logger.rb +0 -43
  30. data/lib/logstash/plugin_mixins/jdbc/scheduler.rb +0 -175
  31. data/spec/plugin_mixins/jdbc/scheduler_spec.rb +0 -78
  32. data/vendor/jar-dependencies/org/apache/derby/derby/10.14.1.0/derby-10.14.1.0.jar +0 -0
  33. data/vendor/jar-dependencies/org/apache/derby/derbyclient/10.14.1.0/derbyclient-10.14.1.0.jar +0 -0
@@ -2,7 +2,7 @@
2
2
 
3
3
  module LogStash module PluginMixins module Jdbc
4
4
  class StatementHandler
5
- def self.build_statement_handler(plugin, logger)
5
+ def self.build_statement_handler(plugin)
6
6
  if plugin.use_prepared_statements
7
7
  klass = PreparedStatementHandler
8
8
  else
@@ -16,27 +16,39 @@ module LogStash module PluginMixins module Jdbc
16
16
  klass = NormalStatementHandler
17
17
  end
18
18
  end
19
- klass.new(plugin, logger)
19
+ klass.new(plugin)
20
20
  end
21
21
 
22
- attr_reader :statement, :parameters, :statement_logger
22
+ attr_reader :statement, :parameters
23
23
 
24
- def initialize(plugin, statement_logger)
24
+ def initialize(plugin)
25
25
  @statement = plugin.statement
26
- @statement_logger = statement_logger
27
- post_init(plugin)
28
26
  end
29
27
 
30
28
  def build_query(db, sql_last_value)
31
- # override in subclass
29
+ fail NotImplementedError # override in subclass
32
30
  end
33
31
 
34
- def post_init(plugin)
35
- # override in subclass, if needed
36
- end
37
32
  end
38
33
 
39
34
  class NormalStatementHandler < StatementHandler
35
+
36
+ attr_reader :parameters
37
+
38
+ def initialize(plugin)
39
+ super(plugin)
40
+ @parameter_keys = ["sql_last_value"] + plugin.parameters.keys
41
+ @parameters = plugin.parameters.inject({}) do |hash,(k,v)|
42
+ case v
43
+ when LogStash::Timestamp
44
+ hash[k.to_sym] = v.time
45
+ else
46
+ hash[k.to_sym] = v
47
+ end
48
+ hash
49
+ end
50
+ end
51
+
40
52
  # Performs the query, yielding once per row of data
41
53
  # @param db [Sequel::Database]
42
54
  # @param sql_last_value [Integer|DateTime|Time]
@@ -52,27 +64,18 @@ module LogStash module PluginMixins module Jdbc
52
64
 
53
65
  def build_query(db, sql_last_value)
54
66
  parameters[:sql_last_value] = sql_last_value
55
- query = db[statement, parameters]
56
- statement_logger.log_statement_parameters(statement, parameters, query)
57
- query
67
+ db[statement, parameters]
58
68
  end
59
69
 
60
- def post_init(plugin)
61
- @parameter_keys = ["sql_last_value"] + plugin.parameters.keys
62
- @parameters = plugin.parameters.inject({}) do |hash,(k,v)|
63
- case v
64
- when LogStash::Timestamp
65
- hash[k.to_sym] = v.time
66
- else
67
- hash[k.to_sym] = v
68
- end
69
- hash
70
- end
71
- end
72
70
  end
73
71
 
74
72
  class PagedNormalStatementHandler < NormalStatementHandler
75
- attr_reader :jdbc_page_size
73
+
74
+ def initialize(plugin)
75
+ super(plugin)
76
+ @jdbc_page_size = plugin.jdbc_page_size
77
+ @logger = plugin.logger
78
+ end
76
79
 
77
80
  # Performs the query, respecting our pagination settings, yielding once per row of data
78
81
  # @param db [Sequel::Database]
@@ -81,16 +84,22 @@ module LogStash module PluginMixins module Jdbc
81
84
  def perform_query(db, sql_last_value)
82
85
  query = build_query(db, sql_last_value)
83
86
  query.each_page(@jdbc_page_size) do |paged_dataset|
87
+ log_dataset_page(paged_dataset) if @logger.debug?
84
88
  paged_dataset.each do |row|
85
89
  yield row
86
90
  end
87
91
  end
88
92
  end
89
93
 
90
- def post_init(plugin)
91
- super(plugin)
92
- @jdbc_page_size = plugin.jdbc_page_size
94
+ private
95
+
96
+ # @param paged_dataset [Sequel::Dataset::Pagination] like object
97
+ def log_dataset_page(paged_dataset)
98
+ @logger.debug "fetching paged dataset", current_page: paged_dataset.current_page,
99
+ record_count: paged_dataset.current_page_record_count,
100
+ total_record_count: paged_dataset.pagination_record_count
93
101
  end
102
+
94
103
  end
95
104
 
96
105
  class ExplicitPagingModeStatementHandler < PagedNormalStatementHandler
@@ -101,20 +110,29 @@ module LogStash module PluginMixins module Jdbc
101
110
  def perform_query(db, sql_last_value)
102
111
  query = build_query(db, sql_last_value)
103
112
  offset = 0
113
+ page_size = @jdbc_page_size
104
114
  loop do
105
115
  rows_in_page = 0
106
- query.with_sql(query.sql, offset: offset, size: jdbc_page_size).each do |row|
116
+ query.with_sql(query.sql, offset: offset, size: page_size).each do |row|
107
117
  yield row
108
118
  rows_in_page += 1
109
119
  end
110
- break unless rows_in_page == jdbc_page_size
111
- offset += jdbc_page_size
120
+ break unless rows_in_page == page_size
121
+ offset += page_size
112
122
  end
113
123
  end
114
124
  end
115
125
 
116
126
  class PreparedStatementHandler < StatementHandler
117
- attr_reader :name, :bind_values_array, :statement_prepared, :prepared
127
+ attr_reader :name, :bind_values_array, :statement_prepared, :prepared, :parameters
128
+
129
+ def initialize(plugin)
130
+ super(plugin)
131
+ @name = plugin.prepared_statement_name.to_sym
132
+ @bind_values_array = plugin.prepared_statement_bind_values
133
+ @parameters = plugin.parameters
134
+ @statement_prepared = Concurrent::AtomicBoolean.new(false)
135
+ end
118
136
 
119
137
  # Performs the query, ignoring our pagination settings, yielding once per row of data
120
138
  # @param db [Sequel::Database]
@@ -142,7 +160,6 @@ module LogStash module PluginMixins module Jdbc
142
160
  db.set_prepared_statement(name, prepared)
143
161
  end
144
162
  bind_value_sql_last_value(sql_last_value)
145
- statement_logger.log_statement_parameters(statement, parameters, nil)
146
163
  begin
147
164
  db.call(name, parameters)
148
165
  rescue => e
@@ -153,17 +170,6 @@ module LogStash module PluginMixins module Jdbc
153
170
  end
154
171
  end
155
172
 
156
- def post_init(plugin)
157
- # don't log statement count when using prepared statements for now...
158
- # needs enhancement to allow user to supply a bindable count prepared statement in settings.
159
- @statement_logger.disable_count
160
-
161
- @name = plugin.prepared_statement_name.to_sym
162
- @bind_values_array = plugin.prepared_statement_bind_values
163
- @parameters = plugin.parameters
164
- @statement_prepared = Concurrent::AtomicBoolean.new(false)
165
- end
166
-
167
173
  def create_bind_values_hash
168
174
  hash = {}
169
175
  bind_values_array.each_with_index {|v,i| hash[:"p#{i}"] = v}
@@ -0,0 +1,61 @@
1
+ # encoding: utf-8
2
+
3
+ require 'tzinfo'
4
+
5
+ module LogStash module PluginMixins module Jdbc
6
+ ##
7
+ # This `TimezoneProxy` allows timezone specs to include extensions indicating preference for ambiguous handling.
8
+ # @see TimezoneProxy::parse
9
+ module TimezoneProxy
10
+ ##
11
+ # @param timezone_spec [String]: a timezone spec, consisting of any valid timezone identifier
12
+ # followed by square-bracketed extensions. Currently-supported
13
+ # extensions are:
14
+ # `dst_enabled_on_overlap:(true|false)`: when encountering an ambiguous time
15
+ # due to daylight-savings transition,
16
+ # assume DST to be either enabled or
17
+ # disabled instead of raising an
18
+ # AmbiguousTime exception
19
+ # @return [TZInfo::Timezone]
20
+ def self.load(timezone_spec)
21
+ # re-load pass-through
22
+ return timezone_spec if timezone_spec.kind_of?(::TZInfo::Timezone)
23
+
24
+ parsed_spec = /\A(?<name>[^\[]+)(\[(?<extensions>[^\]]*)\])?\z/.match(timezone_spec)
25
+
26
+ timezone = ::TZInfo::Timezone.get(parsed_spec[:name])
27
+ return timezone unless parsed_spec[:extensions]
28
+
29
+ parsed_spec[:extensions].split(';').each do |extension_spec|
30
+ timezone = case extension_spec
31
+ when 'dst_enabled_on_overlap:true' then timezone.dup.extend(PeriodForLocalWithDSTPreference::ON)
32
+ when 'dst_enabled_on_overlap:false' then timezone.dup.extend(PeriodForLocalWithDSTPreference::OFF)
33
+ else fail(ArgumentError, "Invalid timezone extension `#{extension_spec}`")
34
+ end
35
+ end
36
+
37
+ timezone
38
+ end
39
+
40
+ module JDBCTimezoneSpecValidator
41
+ def validate_value(value, validator_name)
42
+ return super(value, validator_name) unless validator_name == :jdbc_timezone_spec
43
+
44
+ [true, TimezoneProxy.load(value)] rescue [false, $!.message]
45
+ end
46
+ end
47
+
48
+ ##
49
+ # @api private
50
+ class PeriodForLocalWithDSTPreference < Module
51
+ def initialize(default_dst_enabled_on_overlap)
52
+ define_method(:period_for_local) do |localtime, dst_enabled_on_overlap=nil, &dismabiguation_block|
53
+ super(localtime, dst_enabled_on_overlap.nil? ? default_dst_enabled_on_overlap : dst_enabled_on_overlap, &dismabiguation_block)
54
+ end
55
+ end
56
+
57
+ ON = new(true)
58
+ OFF = new(false)
59
+ end
60
+ end
61
+ end; end; end
@@ -1,11 +1,13 @@
1
1
  # encoding: utf-8
2
2
  require "yaml" # persistence
3
+ require "date"
4
+ require "bigdecimal"
3
5
 
4
6
  module LogStash module PluginMixins module Jdbc
5
7
  class ValueTracking
6
8
 
7
9
  def self.build_last_value_tracker(plugin)
8
- handler = plugin.record_last_run ? FileHandler.new(plugin.last_run_metadata_path) : NullFileHandler.new(plugin.last_run_metadata_path)
10
+ handler = plugin.record_last_run ? FileHandler.new(plugin.last_run_metadata_file_path) : NullFileHandler.new(plugin.last_run_metadata_file_path)
9
11
  if plugin.clean_run
10
12
  handler.clean
11
13
  end
@@ -14,7 +16,7 @@ module LogStash module PluginMixins module Jdbc
14
16
  # use this irrespective of the jdbc_default_timezone setting
15
17
  NumericValueTracker.new(handler)
16
18
  else
17
- if plugin.jdbc_default_timezone.nil? || plugin.jdbc_default_timezone.empty?
19
+ if plugin.jdbc_default_timezone.nil?
18
20
  # no TZ stuff for Sequel, use Time
19
21
  TimeValueTracker.new(handler)
20
22
  else
@@ -31,6 +33,17 @@ module LogStash module PluginMixins module Jdbc
31
33
  set_initial
32
34
  end
33
35
 
36
+ if Psych::VERSION&.split('.')&.first.to_i >= 4
37
+ YAML_PERMITTED_CLASSES = [::DateTime, ::Time, ::BigDecimal].freeze
38
+ def self.load_yaml(source)
39
+ Psych::safe_load(source, permitted_classes: YAML_PERMITTED_CLASSES)
40
+ end
41
+ else
42
+ def self.load_yaml(source)
43
+ YAML::load(source)
44
+ end
45
+ end
46
+
34
47
  def set_initial
35
48
  # override in subclass
36
49
  end
@@ -112,7 +125,7 @@ module LogStash module PluginMixins module Jdbc
112
125
 
113
126
  def read
114
127
  return unless @exists
115
- YAML.load(::File.read(@path))
128
+ ValueTracking.load_yaml(::File.read(@path))
116
129
  end
117
130
 
118
131
  def write(value)
@@ -1,5 +1,7 @@
1
1
  # AUTOGENERATED BY THE GRADLE SCRIPT. DO NOT EDIT.
2
2
 
3
3
  require 'jar_dependencies'
4
- require_jar('org.apache.derby', 'derby', '10.14.1.0')
5
- require_jar('org.apache.derby', 'derbyclient', '10.14.1.0')
4
+ require_jar('org.apache.derby', 'derby', '10.15.2.1')
5
+ require_jar('org.apache.derby', 'derbyclient', '10.15.2.1')
6
+ require_jar('org.apache.derby', 'derbytools', '10.15.2.1')
7
+ require_jar('org.apache.derby', 'derbyshared', '10.15.2.1')
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-integration-jdbc'
3
- s.version = '5.2.4'
3
+ s.version = '5.4.11'
4
4
  s.licenses = ['Apache License (2.0)']
5
5
  s.summary = "Integration with JDBC - input and filter plugins"
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -29,20 +29,20 @@ Gem::Specification.new do |s|
29
29
  # Restrict use of this plugin to versions of Logstash where support for integration plugins is present.
30
30
  s.add_runtime_dependency "logstash-core", ">= 6.5.0"
31
31
  s.add_runtime_dependency 'logstash-codec-plain'
32
- s.add_runtime_dependency 'sequel'
32
+ # Pinned sequel version to >= 5.74.0 as it fixes the generic jdbc adapter to properly
33
+ # handle disconnection errors, and avoid stale connections in the pool.
34
+ s.add_runtime_dependency 'sequel', '>= 5.74.0'
33
35
  s.add_runtime_dependency 'lru_redux' # lru cache with ttl
34
36
 
35
37
  s.add_runtime_dependency 'tzinfo'
36
38
  s.add_runtime_dependency 'tzinfo-data'
37
- # plugin maintains compatibility with < 3.5 (3.0.9)
38
- # but works with newer rufus-scheduler >= 3.5 as well
39
- s.add_runtime_dependency 'rufus-scheduler'
39
+
40
40
  s.add_runtime_dependency 'logstash-mixin-ecs_compatibility_support', '~>1.3'
41
41
  s.add_runtime_dependency "logstash-mixin-validator_support", '~> 1.0'
42
42
  s.add_runtime_dependency "logstash-mixin-event_support", '~> 1.0'
43
+ s.add_runtime_dependency "logstash-mixin-scheduler", '~> 1.0'
43
44
 
44
45
  s.add_development_dependency "childprocess"
45
46
  s.add_development_dependency 'logstash-devutils', '>= 2.3'
46
47
  s.add_development_dependency 'timecop'
47
- s.add_development_dependency 'jdbc-derby'
48
48
  end
@@ -5,6 +5,7 @@ require "sequel"
5
5
  require "sequel/adapters/jdbc"
6
6
  require "stud/temporary"
7
7
  require "timecop"
8
+ require "pathname"
8
9
 
9
10
  # LogStash::Logging::Logger::configure_logging("WARN")
10
11
 
@@ -85,6 +86,15 @@ module LogStash module Filters
85
86
 
86
87
  let(:ipaddr) { ".3.1.1" }
87
88
 
89
+ describe "verify derby path property" do
90
+ it "should be set into Logstash data path" do
91
+ plugin.register
92
+
93
+ expected = Pathname.new(LogStash::SETTINGS.get_value("path.data")).join("plugins", "shared", "derby_home").to_path
94
+ expect(java.lang.System.getProperty("derby.system.home")).to eq(expected)
95
+ end
96
+ end
97
+
88
98
  describe "non scheduled operation" do
89
99
  after { plugin.close }
90
100
 
@@ -1,7 +1,6 @@
1
1
  require "logstash/devutils/rspec/spec_helper"
2
2
  require "logstash/devutils/rspec/shared_examples"
3
3
  require "logstash/filters/jdbc_streaming"
4
- require 'jdbc/derby'
5
4
  require "sequel"
6
5
  require "sequel/adapters/jdbc"
7
6
 
@@ -13,8 +12,6 @@ module LogStash module Filters
13
12
  describe JdbcStreaming do
14
13
  let!(:jdbc_connection_string) { "jdbc:derby:memory:jdbc_streaming_testdb;create=true"}
15
14
  #Use embedded Derby for tests
16
- ::Jdbc::Derby.load_driver
17
-
18
15
  ENV["TZ"] = "Etc/UTC"
19
16
  describe "plugin level execution" do
20
17
  let(:mixin_settings) do
@@ -259,7 +256,7 @@ module LogStash module Filters
259
256
  CONFIG
260
257
  end
261
258
 
262
- sample("message" => "some text") do
259
+ sample({"message" => "some text"}) do
263
260
  expect(subject.get('new_field')).to eq([{"1" => 'from_database'}])
264
261
  end
265
262
  end
@@ -277,7 +274,7 @@ module LogStash module Filters
277
274
  CONFIG
278
275
  end
279
276
 
280
- sample("message" => "some text") do
277
+ sample({"message" => "some text"}) do
281
278
  expect(subject.get('new_field')).to eq([{"col_1" => 'from_database'}])
282
279
  end
283
280
  end
@@ -296,11 +293,11 @@ module LogStash module Filters
296
293
  CONFIG
297
294
  end
298
295
 
299
- sample("message" => "some text", "param_field" => "1") do
296
+ sample({"message" => "some text", "param_field" => "1"}) do
300
297
  expect(subject.get('new_field')).to eq([{"1" => 'from_database'}])
301
298
  end
302
299
 
303
- sample("message" => "some text", "param_field" => "2") do
300
+ sample({"message" => "some text", "param_field" => "2"}) do
304
301
  expect(subject.get('new_field').nil?)
305
302
  end
306
303
  end
@@ -319,11 +316,11 @@ module LogStash module Filters
319
316
  CONFIG
320
317
  end
321
318
 
322
- sample("message" => "some text", "param_field" => 1) do
319
+ sample({"message" => "some text", "param_field" => 1}) do
323
320
  expect(subject.get('new_field')).to eq([{"1" => 'from_database'}])
324
321
  end
325
322
 
326
- sample("message" => "some text", "param_field" => "1") do
323
+ sample({"message" => "some text", "param_field" => "1"}) do
327
324
  expect(subject.get('new_field').nil?)
328
325
  end
329
326
  end
@@ -342,7 +339,7 @@ module LogStash module Filters
342
339
  CONFIG
343
340
  end
344
341
 
345
- sample("message" => "some text") do
342
+ sample({"message" => "some text"}) do
346
343
  expect(subject.get('new_field')).to eq([{"1" => 'from_database'}])
347
344
  end
348
345
  end
@@ -41,6 +41,31 @@ describe LogStash::Inputs::Jdbc, :integration => true do
41
41
  expect(event.get('first_name')).to eq("Mark")
42
42
  expect(event.get('last_name')).to eq("Guckenheimer")
43
43
  end
44
+
45
+ context 'with paging' do
46
+ let(:settings) do
47
+ super().merge 'jdbc_paging_enabled' => true, 'jdbc_page_size' => 1,
48
+ "statement" => 'SELECT * FROM "employee" WHERE EMP_NO >= :p1 ORDER BY EMP_NO',
49
+ 'parameters' => { 'p1' => 0 }
50
+ end
51
+
52
+ before do # change plugin logger level to debug - to exercise logging
53
+ logger = plugin.class.name.gsub('::', '.').downcase
54
+ logger = org.apache.logging.log4j.LogManager.getLogger(logger)
55
+ @prev_logger_level = [ logger.getName, logger.getLevel ]
56
+ org.apache.logging.log4j.core.config.Configurator.setLevel logger.getName, org.apache.logging.log4j.Level::DEBUG
57
+ end
58
+
59
+ after do
60
+ org.apache.logging.log4j.core.config.Configurator.setLevel *@prev_logger_level
61
+ end
62
+
63
+ it "should populate the event with database entries" do
64
+ plugin.run(queue)
65
+ event = queue.pop
66
+ expect(event.get('first_name')).to eq('David')
67
+ end
68
+ end
44
69
  end
45
70
 
46
71
  context "when supplying a non-existent library" do
@@ -51,10 +76,8 @@ describe LogStash::Inputs::Jdbc, :integration => true do
51
76
  end
52
77
 
53
78
  it "should not register correctly" do
54
- plugin.register
55
- q = Queue.new
56
79
  expect do
57
- plugin.run(q)
80
+ plugin.register
58
81
  end.to raise_error(::LogStash::PluginLoadingError)
59
82
  end
60
83
  end
@@ -67,16 +90,13 @@ describe LogStash::Inputs::Jdbc, :integration => true do
67
90
  end
68
91
 
69
92
  it "log warning msg when plugin run" do
70
- plugin.register
71
93
  expect( plugin ).to receive(:log_java_exception)
72
94
  expect(plugin.logger).to receive(:warn).once.with("Exception when executing JDBC query",
73
95
  hash_including(:message => instance_of(String)))
74
- q = Queue.new
75
- expect{ plugin.run(q) }.not_to raise_error
96
+ expect{ plugin.register }.to raise_error(::LogStash::ConfigurationError)
76
97
  end
77
98
 
78
99
  it "should log (native) Java driver error" do
79
- plugin.register
80
100
  expect( org.apache.logging.log4j.LogManager ).to receive(:getLogger).and_wrap_original do |m, *args|
81
101
  logger = m.call(*args)
82
102
  expect( logger ).to receive(:error) do |_, e|
@@ -84,8 +104,7 @@ describe LogStash::Inputs::Jdbc, :integration => true do
84
104
  end.and_call_original
85
105
  logger
86
106
  end
87
- q = Queue.new
88
- expect{ plugin.run(q) }.not_to raise_error
107
+ expect{ plugin.register }.to raise_error(::LogStash::ConfigurationError)
89
108
  end
90
109
  end
91
110
  end