logstash-input-elasticsearch 4.21.2 → 4.23.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -21,9 +21,10 @@ module LogStash
21
21
  @pipeline_id = plugin.pipeline_id
22
22
  end
23
23
 
24
- def do_run(output_queue)
25
- return retryable_search(output_queue) if @slices.nil? || @slices <= 1
24
+ def do_run(output_queue, query)
25
+ @query = query
26
26
 
27
+ return retryable_search(output_queue) if @slices.nil? || @slices <= 1
27
28
  retryable_slice_search(output_queue)
28
29
  end
29
30
 
@@ -122,6 +123,13 @@ module LogStash
122
123
  PIT_JOB = "create point in time (PIT)"
123
124
  SEARCH_AFTER_JOB = "search_after paginated search"
124
125
 
126
+ attr_accessor :cursor_tracker
127
+
128
+ def do_run(output_queue, query)
129
+ super(output_queue, query)
130
+ @cursor_tracker.checkpoint_cursor(intermediate: false) if @cursor_tracker
131
+ end
132
+
125
133
  def pit?(id)
126
134
  !!id&.is_a?(String)
127
135
  end
@@ -192,6 +200,8 @@ module LogStash
192
200
  end
193
201
  end
194
202
 
203
+ @cursor_tracker.checkpoint_cursor(intermediate: true) if @cursor_tracker
204
+
195
205
  logger.info("Query completed", log_details)
196
206
  end
197
207
 
@@ -73,6 +73,8 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
73
73
 
74
74
  require 'logstash/inputs/elasticsearch/paginated_search'
75
75
  require 'logstash/inputs/elasticsearch/aggregation'
76
+ require 'logstash/inputs/elasticsearch/cursor_tracker'
77
+ require 'logstash/inputs/elasticsearch/esql'
76
78
 
77
79
  include LogStash::PluginMixins::ECSCompatibilitySupport(:disabled, :v1, :v8 => :v1)
78
80
  include LogStash::PluginMixins::ECSCompatibilitySupport::TargetCheck
@@ -95,15 +97,21 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
95
97
  # The index or alias to search.
96
98
  config :index, :validate => :string, :default => "logstash-*"
97
99
 
98
- # The query to be executed. Read the Elasticsearch query DSL documentation
99
- # for more info
100
- # https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl.html
100
+ # A type of Elasticsearch query, provided by @query. This will validate query shape and other params.
101
+ config :query_type, :validate => %w[dsl esql], :default => 'dsl'
102
+
103
+ # The query to be executed. DSL or ES|QL (when `query_type => 'esql'`) query shape is accepted.
104
+ # Read the following documentations for more info
105
+ # Query DSL: https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl.html
106
+ # ES|QL: https://www.elastic.co/guide/en/elasticsearch/reference/current/esql.html
101
107
  config :query, :validate => :string, :default => '{ "sort": [ "_doc" ] }'
102
108
 
103
- # This allows you to speccify the response type: either hits or aggregations
104
- # where hits: normal search request
105
- # aggregations: aggregation request
106
- config :response_type, :validate => ['hits', 'aggregations'], :default => 'hits'
109
+ # This allows you to specify the DSL response type: one of [hits, aggregations]
110
+ # where
111
+ # hits: normal search request
112
+ # aggregations: aggregation request
113
+ # Note that this param is invalid when `query_type => 'esql'`, ES|QL response shape is always a tabular format
114
+ config :response_type, :validate => %w[hits aggregations], :default => 'hits'
107
115
 
108
116
  # This allows you to set the maximum number of hits returned per scroll.
109
117
  config :size, :validate => :number, :default => 1000
@@ -124,6 +132,20 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
124
132
  # by this pipeline input.
125
133
  config :slices, :validate => :number
126
134
 
135
+ # Enable tracking the value of a given field to be used as a cursor
136
+ # Main concerns:
137
+ # * using anything other than _event.timestamp easily leads to data loss
138
+ # * the first "synchronization run can take a long time"
139
+ config :tracking_field, :validate => :string
140
+
141
+ # Define the initial seed value of the tracking_field
142
+ config :tracking_field_seed, :validate => :string, :default => "1970-01-01T00:00:00.000000000Z"
143
+
144
+ # The location of where the tracking field value will be stored
145
+ # The value is persisted after each scheduled run (and not per result)
146
+ # If it's not set it defaults to '${path.data}/plugins/inputs/elasticsearch/<pipeline_id>/last_run_value'
147
+ config :last_run_metadata_path, :validate => :string
148
+
127
149
  # If set, include Elasticsearch document information such as index, type, and
128
150
  # the id in the event.
129
151
  #
@@ -262,6 +284,10 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
262
284
  # exactly once.
263
285
  config :schedule, :validate => :string
264
286
 
287
+ # Allow scheduled runs to overlap (enabled by default). Setting to false will
288
+ # only start a new scheduled run after the previous one completes.
289
+ config :schedule_overlap, :validate => :boolean
290
+
265
291
  # If set, the _source of each hit will be added nested under the target instead of at the top-level
266
292
  config :target, :validate => :field_reference
267
293
 
@@ -274,6 +300,9 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
274
300
  DEFAULT_EAV_HEADER = { "Elastic-Api-Version" => "2023-10-31" }.freeze
275
301
  INTERNAL_ORIGIN_HEADER = { 'x-elastic-product-origin' => 'logstash-input-elasticsearch'}.freeze
276
302
 
303
+ LS_ESQL_SUPPORT_VERSION = "8.17.4" # the version started using elasticsearch-ruby v8
304
+ ES_ESQL_SUPPORT_VERSION = "8.11.0"
305
+
277
306
  def initialize(params={})
278
307
  super(params)
279
308
 
@@ -290,10 +319,17 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
290
319
  fill_hosts_from_cloud_id
291
320
  setup_ssl_params!
292
321
 
293
- @base_query = LogStash::Json.load(@query)
294
- if @slices
295
- @base_query.include?('slice') && fail(LogStash::ConfigurationError, "Elasticsearch Input Plugin's `query` option cannot specify specific `slice` when configured to manage parallel slices with `slices` option")
296
- @slices < 1 && fail(LogStash::ConfigurationError, "Elasticsearch Input Plugin's `slices` option must be greater than zero, got `#{@slices}`")
322
+ if @query_type == 'esql'
323
+ validate_ls_version_for_esql_support!
324
+ validate_esql_query!
325
+ not_allowed_options = original_params.keys & %w(index size slices search_api docinfo docinfo_target docinfo_fields response_type tracking_field)
326
+ raise(LogStash::ConfigurationError, "Configured #{not_allowed_options} params are not allowed while using ES|QL query") if not_allowed_options&.size > 1
327
+ else
328
+ @base_query = LogStash::Json.load(@query)
329
+ if @slices
330
+ @base_query.include?('slice') && fail(LogStash::ConfigurationError, "Elasticsearch Input Plugin's `query` option cannot specify specific `slice` when configured to manage parallel slices with `slices` option")
331
+ @slices < 1 && fail(LogStash::ConfigurationError, "Elasticsearch Input Plugin's `slices` option must be greater than zero, got `#{@slices}`")
332
+ end
297
333
  end
298
334
 
299
335
  @retries < 0 && fail(LogStash::ConfigurationError, "Elasticsearch Input Plugin's `retries` option must be equal or greater than zero, got `#{@retries}`")
@@ -329,21 +365,27 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
329
365
 
330
366
  test_connection!
331
367
 
368
+ validate_es_for_esql_support!
369
+
332
370
  setup_serverless
333
371
 
334
372
  setup_search_api
335
373
 
336
- setup_query_executor
374
+ @query_executor = create_query_executor
375
+
376
+ setup_cursor_tracker
337
377
 
338
378
  @client
339
379
  end
340
380
 
341
381
  def run(output_queue)
342
382
  if @schedule
343
- scheduler.cron(@schedule) { @query_executor.do_run(output_queue) }
383
+ scheduler.cron(@schedule, :overlap => @schedule_overlap) do
384
+ @query_executor.do_run(output_queue, get_query_object())
385
+ end
344
386
  scheduler.join
345
387
  else
346
- @query_executor.do_run(output_queue)
388
+ @query_executor.do_run(output_queue, get_query_object())
347
389
  end
348
390
  end
349
391
 
@@ -354,6 +396,28 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
354
396
  event = event_from_hit(hit, root_field)
355
397
  decorate(event)
356
398
  output_queue << event
399
+ record_last_value(event)
400
+ end
401
+
402
+ def decorate_event(event)
403
+ decorate(event)
404
+ end
405
+
406
+ private
407
+
408
+ def get_query_object
409
+ return @query if @query_type == 'esql'
410
+ if @cursor_tracker
411
+ query = @cursor_tracker.inject_cursor(@query)
412
+ @logger.debug("new query is #{query}")
413
+ else
414
+ query = @query
415
+ end
416
+ LogStash::Json.load(query)
417
+ end
418
+
419
+ def record_last_value(event)
420
+ @cursor_tracker.record_last_value(event) if @tracking_field
357
421
  end
358
422
 
359
423
  def event_from_hit(hit, root_field)
@@ -383,8 +447,6 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
383
447
  event.set(@docinfo_target, docinfo_target)
384
448
  end
385
449
 
386
- private
387
-
388
450
  def hosts_default?(hosts)
389
451
  hosts.nil? || ( hosts.is_a?(Array) && hosts.empty? )
390
452
  end
@@ -662,18 +724,38 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
662
724
 
663
725
  end
664
726
 
665
- def setup_query_executor
666
- @query_executor = case @response_type
667
- when 'hits'
668
- if @resolved_search_api == "search_after"
669
- LogStash::Inputs::Elasticsearch::SearchAfter.new(@client, self)
670
- else
671
- logger.warn("scroll API is no longer recommended for pagination. Consider using search_after instead.") if es_major_version >= 8
672
- LogStash::Inputs::Elasticsearch::Scroll.new(@client, self)
673
- end
674
- when 'aggregations'
675
- LogStash::Inputs::Elasticsearch::Aggregation.new(@client, self)
676
- end
727
+ def create_query_executor
728
+ return LogStash::Inputs::Elasticsearch::Esql.new(@client, self) if @query_type == 'esql'
729
+
730
+ # DSL query executor
731
+ return LogStash::Inputs::Elasticsearch::Aggregation.new(@client, self) if @response_type == 'aggregations'
732
+ # response_type is hits, executor can be search_after or scroll type
733
+ return LogStash::Inputs::Elasticsearch::SearchAfter.new(@client, self) if @resolved_search_api == "search_after"
734
+
735
+ logger.warn("scroll API is no longer recommended for pagination. Consider using search_after instead.") if es_major_version >= 8
736
+ LogStash::Inputs::Elasticsearch::Scroll.new(@client, self)
737
+ end
738
+
739
+ def setup_cursor_tracker
740
+ return unless @tracking_field
741
+ return unless @query_executor.is_a?(LogStash::Inputs::Elasticsearch::SearchAfter)
742
+
743
+ if @resolved_search_api != "search_after" || @response_type != "hits"
744
+ raise ConfigurationError.new("The `tracking_field` feature can only be used with `search_after` non-aggregation queries")
745
+ end
746
+
747
+ @cursor_tracker = CursorTracker.new(last_run_metadata_path: last_run_metadata_path,
748
+ tracking_field: @tracking_field,
749
+ tracking_field_seed: @tracking_field_seed)
750
+ @query_executor.cursor_tracker = @cursor_tracker
751
+ end
752
+
753
+ def last_run_metadata_path
754
+ return @last_run_metadata_path if @last_run_metadata_path
755
+
756
+ last_run_metadata_path = ::File.join(LogStash::SETTINGS.get_value("path.data"), "plugins", "inputs", "elasticsearch", pipeline_id, "last_run_value")
757
+ FileUtils.mkdir_p ::File.dirname(last_run_metadata_path)
758
+ last_run_metadata_path
677
759
  end
678
760
 
679
761
  def get_transport_client_class
@@ -690,6 +772,26 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
690
772
  ::Elastic::Transport::Transport::HTTP::Manticore
691
773
  end
692
774
 
775
+ def validate_ls_version_for_esql_support!
776
+ if Gem::Version.create(LOGSTASH_VERSION) < Gem::Version.create(LS_ESQL_SUPPORT_VERSION)
777
+ fail("Current version of Logstash does not include Elasticsearch client which supports ES|QL. Please upgrade Logstash to at least #{LS_ESQL_SUPPORT_VERSION}")
778
+ end
779
+ end
780
+
781
+ def validate_esql_query!
782
+ fail(LogStash::ConfigurationError, "`query` cannot be empty") if @query.strip.empty?
783
+ source_commands = %w[FROM ROW SHOW]
784
+ contains_source_command = source_commands.any? { |source_command| @query.strip.start_with?(source_command) }
785
+ fail(LogStash::ConfigurationError, "`query` needs to start with any of #{source_commands}") unless contains_source_command
786
+ end
787
+
788
+ def validate_es_for_esql_support!
789
+ return unless @query_type == 'esql'
790
+ # make sure connected ES supports ES|QL (8.11+)
791
+ es_supports_esql = Gem::Version.create(es_version) >= Gem::Version.create(ES_ESQL_SUPPORT_VERSION)
792
+ fail("Connected Elasticsearch #{es_version} version does not supports ES|QL. ES|QL feature requires at least Elasticsearch #{ES_ESQL_SUPPORT_VERSION} version.") unless es_supports_esql
793
+ end
794
+
693
795
  module URIOrEmptyValidator
694
796
  ##
695
797
  # @override to provide :uri_or_empty validator
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-input-elasticsearch'
4
- s.version = '4.21.2'
4
+ s.version = '4.23.0'
5
5
  s.licenses = ['Apache License (2.0)']
6
6
  s.summary = "Reads query results from an Elasticsearch cluster"
7
7
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -0,0 +1,72 @@
1
+ # encoding: utf-8
2
+ require "logstash/devutils/rspec/spec_helper"
3
+ require "logstash/devutils/rspec/shared_examples"
4
+ require "logstash/inputs/elasticsearch"
5
+ require "logstash/inputs/elasticsearch/cursor_tracker"
6
+
7
+ describe LogStash::Inputs::Elasticsearch::CursorTracker do
8
+
9
+ let(:last_run_metadata_path) { Tempfile.new('cursor_tracker_testing').path }
10
+ let(:tracking_field_seed) { "1980-01-01T23:59:59.999999999Z" }
11
+ let(:options) do
12
+ {
13
+ :last_run_metadata_path => last_run_metadata_path,
14
+ :tracking_field => "my_field",
15
+ :tracking_field_seed => tracking_field_seed
16
+ }
17
+ end
18
+
19
+ subject { described_class.new(**options) }
20
+
21
+ it "creating a class works" do
22
+ expect(subject).to be_a described_class
23
+ end
24
+
25
+ describe "checkpoint_cursor" do
26
+ before(:each) do
27
+ subject.checkpoint_cursor(intermediate: false) # store seed value
28
+ [
29
+ Thread.new(subject) {|subject| subject.record_last_value(LogStash::Event.new("my_field" => "2025-01-03T23:59:59.999999999Z")) },
30
+ Thread.new(subject) {|subject| subject.record_last_value(LogStash::Event.new("my_field" => "2025-01-01T23:59:59.999999999Z")) },
31
+ Thread.new(subject) {|subject| subject.record_last_value(LogStash::Event.new("my_field" => "2025-01-02T23:59:59.999999999Z")) },
32
+ ].each(&:join)
33
+ end
34
+ context "when doing intermediate checkpoint" do
35
+ it "persists the smallest value" do
36
+ subject.checkpoint_cursor(intermediate: true)
37
+ expect(IO.read(last_run_metadata_path)).to eq("2025-01-01T23:59:59.999999999Z")
38
+ end
39
+ end
40
+ context "when doing non-intermediate checkpoint" do
41
+ it "persists the largest value" do
42
+ subject.checkpoint_cursor(intermediate: false)
43
+ expect(IO.read(last_run_metadata_path)).to eq("2025-01-03T23:59:59.999999999Z")
44
+ end
45
+ end
46
+ end
47
+
48
+ describe "inject_cursor" do
49
+ let(:new_value) { "2025-01-03T23:59:59.999999999Z" }
50
+ let(:fake_now) { "2026-09-19T23:59:59.999999999Z" }
51
+
52
+ let(:query) do
53
+ %q[
54
+ { "query": { "range": { "event.ingested": { "gt": :last_value, "lt": :present}}}, "sort": [ { "event.ingested": {"order": "asc", "format": "strict_date_optional_time_nanos", "numeric_type" : "date_nanos" } } ] }
55
+ ]
56
+ end
57
+
58
+ before(:each) do
59
+ subject.record_last_value(LogStash::Event.new("my_field" => new_value))
60
+ subject.checkpoint_cursor(intermediate: false)
61
+ allow(subject).to receive(:now_minus_30s).and_return(fake_now)
62
+ end
63
+
64
+ it "injects the value of the cursor into json query if it contains :last_value" do
65
+ expect(subject.inject_cursor(query)).to match(/#{new_value}/)
66
+ end
67
+
68
+ it "injects current time into json query if it contains :present" do
69
+ expect(subject.inject_cursor(query)).to match(/#{fake_now}/)
70
+ end
71
+ end
72
+ end
@@ -0,0 +1,180 @@
1
+ # encoding: utf-8
2
+ require "logstash/devutils/rspec/spec_helper"
3
+ require "logstash/inputs/elasticsearch"
4
+ require "elasticsearch"
5
+
6
+ describe LogStash::Inputs::Elasticsearch::Esql do
7
+ let(:client) { instance_double(Elasticsearch::Client) }
8
+ let(:esql_client) { double("esql-client") }
9
+
10
+ let(:plugin) { instance_double(LogStash::Inputs::Elasticsearch, params: plugin_config, decorate_event: nil) }
11
+ let(:plugin_config) do
12
+ {
13
+ "query" => "FROM test-index | STATS count() BY field",
14
+ "retries" => 3
15
+ }
16
+ end
17
+ let(:esql_executor) { described_class.new(client, plugin) }
18
+
19
+ describe "#initialization" do
20
+ it "sets up the ESQL client with correct parameters" do
21
+ expect(esql_executor.instance_variable_get(:@query)).to eq(plugin_config["query"])
22
+ expect(esql_executor.instance_variable_get(:@retries)).to eq(plugin_config["retries"])
23
+ expect(esql_executor.instance_variable_get(:@target_field)).to eq(nil)
24
+ end
25
+ end
26
+
27
+ describe "#execution" do
28
+ let(:output_queue) { Queue.new }
29
+
30
+ context "when faces error while retrying" do
31
+ it "retries the given block the specified number of times" do
32
+ attempts = 0
33
+ result = esql_executor.retryable("Test Job") do
34
+ attempts += 1
35
+ raise StandardError if attempts < 3
36
+ "success"
37
+ end
38
+ expect(attempts).to eq(3)
39
+ expect(result).to eq("success")
40
+ end
41
+
42
+ it "returns false if the block fails all attempts" do
43
+ result = esql_executor.retryable("Test Job") do
44
+ raise StandardError
45
+ end
46
+ expect(result).to eq(false)
47
+ end
48
+ end
49
+
50
+ context "when executing chain of processes" do
51
+ let(:response) { { 'values' => [%w[foo bar]], 'columns' => [{ 'name' => 'a.b.1.d', 'type' => 'keyword' },
52
+ { 'name' => 'h_g.k$l.m.0', 'type' => 'keyword' }] } }
53
+
54
+ before do
55
+ allow(esql_executor).to receive(:retryable).and_yield
56
+ allow(client).to receive_message_chain(:esql, :query).and_return(response)
57
+ end
58
+
59
+ it "executes the ESQL query and processes the results" do
60
+ allow(response).to receive(:headers).and_return({})
61
+ esql_executor.do_run(output_queue, plugin_config["query"])
62
+ expect(output_queue.size).to eq(1)
63
+
64
+ event = output_queue.pop
65
+ expect(event.get('[a][b][1][d]')).to eq('foo')
66
+ expect(event.get('[h_g][k$l][m][0]')).to eq('bar')
67
+ end
68
+
69
+ it "logs a warning if the response contains a warning header" do
70
+ allow(response).to receive(:headers).and_return({ "warning" => "some warning" })
71
+ expect(esql_executor.logger).to receive(:warn).with("ES|QL executor received warning", { :warning_message => "some warning" })
72
+ esql_executor.do_run(output_queue, plugin_config["query"])
73
+ end
74
+
75
+ it "does not log a warning if the response does not contain a warning header" do
76
+ allow(response).to receive(:headers).and_return({})
77
+ expect(esql_executor.logger).not_to receive(:warn)
78
+ esql_executor.do_run(output_queue, plugin_config["query"])
79
+ end
80
+ end
81
+
82
+ describe "multiple rows in the result" do
83
+ let(:response) { { 'values' => rows, 'columns' => [{ 'name' => 'key.1', 'type' => 'keyword' },
84
+ { 'name' => 'key.2', 'type' => 'keyword' }] } }
85
+
86
+ before do
87
+ allow(esql_executor).to receive(:retryable).and_yield
88
+ allow(client).to receive_message_chain(:esql, :query).and_return(response)
89
+ allow(response).to receive(:headers).and_return({})
90
+ end
91
+
92
+ context "when mapping" do
93
+ let(:rows) { [%w[foo bar], %w[hello world]] }
94
+
95
+ it "1:1 maps rows to events" do
96
+ esql_executor.do_run(output_queue, plugin_config["query"])
97
+ expect(output_queue.size).to eq(2)
98
+
99
+ event_1 = output_queue.pop
100
+ expect(event_1.get('[key][1]')).to eq('foo')
101
+ expect(event_1.get('[key][2]')).to eq('bar')
102
+
103
+ event_2 = output_queue.pop
104
+ expect(event_2.get('[key][1]')).to eq('hello')
105
+ expect(event_2.get('[key][2]')).to eq('world')
106
+ end
107
+ end
108
+
109
+ context "when partial nil values appear" do
110
+ let(:rows) { [[nil, "bar"], ["hello", nil]] }
111
+
112
+ it "ignores the nil values" do
113
+ esql_executor.do_run(output_queue, plugin_config["query"])
114
+ expect(output_queue.size).to eq(2)
115
+
116
+ event_1 = output_queue.pop
117
+ expect(event_1.get('[key][1]')).to eq(nil)
118
+ expect(event_1.get('[key][2]')).to eq('bar')
119
+
120
+ event_2 = output_queue.pop
121
+ expect(event_2.get('[key][1]')).to eq('hello')
122
+ expect(event_2.get('[key][2]')).to eq(nil)
123
+ end
124
+ end
125
+ end
126
+
127
+ context "when sub-elements occur in the result" do
128
+ let(:response) { {
129
+ 'values' => [[50, 1, 100], [50, 0, 1000], [50, 9, 99999]],
130
+ 'columns' =>
131
+ [
132
+ { 'name' => 'time', 'type' => 'long' },
133
+ { 'name' => 'time.min', 'type' => 'long' },
134
+ { 'name' => 'time.max', 'type' => 'long' },
135
+ ]
136
+ } }
137
+
138
+ before do
139
+ allow(esql_executor).to receive(:retryable).and_yield
140
+ allow(client).to receive_message_chain(:esql, :query).and_return(response)
141
+ allow(response).to receive(:headers).and_return({})
142
+ end
143
+
144
+ it "includes 1st depth elements into event" do
145
+ esql_executor.do_run(output_queue, plugin_config["query"])
146
+
147
+ expect(output_queue.size).to eq(3)
148
+ 3.times do
149
+ event = output_queue.pop
150
+ expect(event.get('time')).to eq(50)
151
+ expect(event.get('[time][min]')).to eq(nil)
152
+ expect(event.get('[time][max]')).to eq(nil)
153
+ end
154
+ end
155
+ end
156
+ end
157
+
158
+ describe "#column spec" do
159
+ let(:valid_spec) { { 'name' => 'field.name', 'type' => 'keyword' } }
160
+ let(:column_spec) { LogStash::Inputs::Elasticsearch::ColumnSpec.new(valid_spec) }
161
+
162
+ context "when initializes" do
163
+ it "sets the name and type attributes" do
164
+ expect(column_spec.name).to eq("field.name")
165
+ expect(column_spec.type).to eq("keyword")
166
+ end
167
+
168
+ it "freezes the name and type attributes" do
169
+ expect(column_spec.name).to be_frozen
170
+ expect(column_spec.type).to be_frozen
171
+ end
172
+ end
173
+
174
+ context "when calls the field reference" do
175
+ it "returns the correct field reference format" do
176
+ expect(column_spec.field_reference).to eq("[field][name]")
177
+ end
178
+ end
179
+ end
180
+ end if LOGSTASH_VERSION >= LogStash::Inputs::Elasticsearch::LS_ESQL_SUPPORT_VERSION