logstash-input-elasticsearch 5.0.2 → 5.2.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -21,9 +21,10 @@ module LogStash
21
21
  @pipeline_id = plugin.pipeline_id
22
22
  end
23
23
 
24
- def do_run(output_queue)
25
- return retryable_search(output_queue) if @slices.nil? || @slices <= 1
24
+ def do_run(output_queue, query)
25
+ @query = query
26
26
 
27
+ return retryable_search(output_queue) if @slices.nil? || @slices <= 1
27
28
  retryable_slice_search(output_queue)
28
29
  end
29
30
 
@@ -122,6 +123,13 @@ module LogStash
122
123
  PIT_JOB = "create point in time (PIT)"
123
124
  SEARCH_AFTER_JOB = "search_after paginated search"
124
125
 
126
+ attr_accessor :cursor_tracker
127
+
128
+ def do_run(output_queue, query)
129
+ super(output_queue, query)
130
+ @cursor_tracker.checkpoint_cursor(intermediate: false) if @cursor_tracker
131
+ end
132
+
125
133
  def pit?(id)
126
134
  !!id&.is_a?(String)
127
135
  end
@@ -192,6 +200,8 @@ module LogStash
192
200
  end
193
201
  end
194
202
 
203
+ @cursor_tracker.checkpoint_cursor(intermediate: true) if @cursor_tracker
204
+
195
205
  logger.info("Query completed", log_details)
196
206
  end
197
207
 
@@ -73,6 +73,8 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
73
73
 
74
74
  require 'logstash/inputs/elasticsearch/paginated_search'
75
75
  require 'logstash/inputs/elasticsearch/aggregation'
76
+ require 'logstash/inputs/elasticsearch/cursor_tracker'
77
+ require 'logstash/inputs/elasticsearch/esql'
76
78
 
77
79
  include LogStash::PluginMixins::ECSCompatibilitySupport(:disabled, :v1, :v8 => :v1)
78
80
  include LogStash::PluginMixins::ECSCompatibilitySupport::TargetCheck
@@ -95,15 +97,21 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
95
97
  # The index or alias to search.
96
98
  config :index, :validate => :string, :default => "logstash-*"
97
99
 
98
- # The query to be executed. Read the Elasticsearch query DSL documentation
99
- # for more info
100
- # https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl.html
100
+ # A type of Elasticsearch query, provided by @query. This will validate query shape and other params.
101
+ config :query_type, :validate => %w[dsl esql], :default => 'dsl'
102
+
103
+ # The query to be executed. DSL or ES|QL (when `query_type => 'esql'`) query shape is accepted.
104
+ # Read the following documentations for more info
105
+ # Query DSL: https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl.html
106
+ # ES|QL: https://www.elastic.co/guide/en/elasticsearch/reference/current/esql.html
101
107
  config :query, :validate => :string, :default => '{ "sort": [ "_doc" ] }'
102
108
 
103
- # This allows you to speccify the response type: either hits or aggregations
104
- # where hits: normal search request
105
- # aggregations: aggregation request
106
- config :response_type, :validate => ['hits', 'aggregations'], :default => 'hits'
109
+ # This allows you to specify the DSL response type: one of [hits, aggregations]
110
+ # where
111
+ # hits: normal search request
112
+ # aggregations: aggregation request
113
+ # Note that this param is invalid when `query_type => 'esql'`, ES|QL response shape is always a tabular format
114
+ config :response_type, :validate => %w[hits aggregations], :default => 'hits'
107
115
 
108
116
  # This allows you to set the maximum number of hits returned per scroll.
109
117
  config :size, :validate => :number, :default => 1000
@@ -124,6 +132,20 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
124
132
  # by this pipeline input.
125
133
  config :slices, :validate => :number
126
134
 
135
+ # Enable tracking the value of a given field to be used as a cursor
136
+ # Main concerns:
137
+ # * using anything other than _event.timestamp easily leads to data loss
138
+ # * the first "synchronization run can take a long time"
139
+ config :tracking_field, :validate => :string
140
+
141
+ # Define the initial seed value of the tracking_field
142
+ config :tracking_field_seed, :validate => :string, :default => "1970-01-01T00:00:00.000000000Z"
143
+
144
+ # The location of where the tracking field value will be stored
145
+ # The value is persisted after each scheduled run (and not per result)
146
+ # If it's not set it defaults to '${path.data}/plugins/inputs/elasticsearch/<pipeline_id>/last_run_value'
147
+ config :last_run_metadata_path, :validate => :string
148
+
127
149
  # If set, include Elasticsearch document information such as index, type, and
128
150
  # the id in the event.
129
151
  #
@@ -250,6 +272,10 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
250
272
  # exactly once.
251
273
  config :schedule, :validate => :string
252
274
 
275
+ # Allow scheduled runs to overlap (enabled by default). Setting to false will
276
+ # only start a new scheduled run after the previous one completes.
277
+ config :schedule_overlap, :validate => :boolean
278
+
253
279
  # If set, the _source of each hit will be added nested under the target instead of at the top-level
254
280
  config :target, :validate => :field_reference
255
281
 
@@ -267,6 +293,9 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
267
293
  DEFAULT_EAV_HEADER = { "Elastic-Api-Version" => "2023-10-31" }.freeze
268
294
  INTERNAL_ORIGIN_HEADER = { 'x-elastic-product-origin' => 'logstash-input-elasticsearch'}.freeze
269
295
 
296
+ LS_ESQL_SUPPORT_VERSION = "8.17.4" # the version started using elasticsearch-ruby v8
297
+ ES_ESQL_SUPPORT_VERSION = "8.11.0"
298
+
270
299
  def initialize(params={})
271
300
  super(params)
272
301
 
@@ -283,10 +312,17 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
283
312
  fill_hosts_from_cloud_id
284
313
  setup_ssl_params!
285
314
 
286
- @base_query = LogStash::Json.load(@query)
287
- if @slices
288
- @base_query.include?('slice') && fail(LogStash::ConfigurationError, "Elasticsearch Input Plugin's `query` option cannot specify specific `slice` when configured to manage parallel slices with `slices` option")
289
- @slices < 1 && fail(LogStash::ConfigurationError, "Elasticsearch Input Plugin's `slices` option must be greater than zero, got `#{@slices}`")
315
+ if @query_type == 'esql'
316
+ validate_ls_version_for_esql_support!
317
+ validate_esql_query!
318
+ not_allowed_options = original_params.keys & %w(index size slices search_api docinfo docinfo_target docinfo_fields response_type tracking_field)
319
+ raise(LogStash::ConfigurationError, "Configured #{not_allowed_options} params are not allowed while using ES|QL query") if not_allowed_options&.size > 1
320
+ else
321
+ @base_query = LogStash::Json.load(@query)
322
+ if @slices
323
+ @base_query.include?('slice') && fail(LogStash::ConfigurationError, "Elasticsearch Input Plugin's `query` option cannot specify specific `slice` when configured to manage parallel slices with `slices` option")
324
+ @slices < 1 && fail(LogStash::ConfigurationError, "Elasticsearch Input Plugin's `slices` option must be greater than zero, got `#{@slices}`")
325
+ end
290
326
  end
291
327
 
292
328
  @retries < 0 && fail(LogStash::ConfigurationError, "Elasticsearch Input Plugin's `retries` option must be equal or greater than zero, got `#{@retries}`")
@@ -322,21 +358,27 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
322
358
 
323
359
  test_connection!
324
360
 
361
+ validate_es_for_esql_support!
362
+
325
363
  setup_serverless
326
364
 
327
365
  setup_search_api
328
366
 
329
- setup_query_executor
367
+ @query_executor = create_query_executor
368
+
369
+ setup_cursor_tracker
330
370
 
331
371
  @client
332
372
  end
333
373
 
334
374
  def run(output_queue)
335
375
  if @schedule
336
- scheduler.cron(@schedule) { @query_executor.do_run(output_queue) }
376
+ scheduler.cron(@schedule, :overlap => @schedule_overlap) do
377
+ @query_executor.do_run(output_queue, get_query_object())
378
+ end
337
379
  scheduler.join
338
380
  else
339
- @query_executor.do_run(output_queue)
381
+ @query_executor.do_run(output_queue, get_query_object())
340
382
  end
341
383
  end
342
384
 
@@ -347,6 +389,28 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
347
389
  event = event_from_hit(hit, root_field)
348
390
  decorate(event)
349
391
  output_queue << event
392
+ record_last_value(event)
393
+ end
394
+
395
+ def decorate_event(event)
396
+ decorate(event)
397
+ end
398
+
399
+ private
400
+
401
+ def get_query_object
402
+ return @query if @query_type == 'esql'
403
+ if @cursor_tracker
404
+ query = @cursor_tracker.inject_cursor(@query)
405
+ @logger.debug("new query is #{query}")
406
+ else
407
+ query = @query
408
+ end
409
+ LogStash::Json.load(query)
410
+ end
411
+
412
+ def record_last_value(event)
413
+ @cursor_tracker.record_last_value(event) if @tracking_field
350
414
  end
351
415
 
352
416
  def event_from_hit(hit, root_field)
@@ -376,8 +440,6 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
376
440
  event.set(@docinfo_target, docinfo_target)
377
441
  end
378
442
 
379
- private
380
-
381
443
  def hosts_default?(hosts)
382
444
  hosts.nil? || ( hosts.is_a?(Array) && hosts.empty? )
383
445
  end
@@ -626,18 +688,38 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
626
688
 
627
689
  end
628
690
 
629
- def setup_query_executor
630
- @query_executor = case @response_type
631
- when 'hits'
632
- if @resolved_search_api == "search_after"
633
- LogStash::Inputs::Elasticsearch::SearchAfter.new(@client, self)
634
- else
635
- logger.warn("scroll API is no longer recommended for pagination. Consider using search_after instead.") if es_major_version >= 8
636
- LogStash::Inputs::Elasticsearch::Scroll.new(@client, self)
637
- end
638
- when 'aggregations'
639
- LogStash::Inputs::Elasticsearch::Aggregation.new(@client, self)
640
- end
691
+ def create_query_executor
692
+ return LogStash::Inputs::Elasticsearch::Esql.new(@client, self) if @query_type == 'esql'
693
+
694
+ # DSL query executor
695
+ return LogStash::Inputs::Elasticsearch::Aggregation.new(@client, self) if @response_type == 'aggregations'
696
+ # response_type is hits, executor can be search_after or scroll type
697
+ return LogStash::Inputs::Elasticsearch::SearchAfter.new(@client, self) if @resolved_search_api == "search_after"
698
+
699
+ logger.warn("scroll API is no longer recommended for pagination. Consider using search_after instead.") if es_major_version >= 8
700
+ LogStash::Inputs::Elasticsearch::Scroll.new(@client, self)
701
+ end
702
+
703
+ def setup_cursor_tracker
704
+ return unless @tracking_field
705
+ return unless @query_executor.is_a?(LogStash::Inputs::Elasticsearch::SearchAfter)
706
+
707
+ if @resolved_search_api != "search_after" || @response_type != "hits"
708
+ raise ConfigurationError.new("The `tracking_field` feature can only be used with `search_after` non-aggregation queries")
709
+ end
710
+
711
+ @cursor_tracker = CursorTracker.new(last_run_metadata_path: last_run_metadata_path,
712
+ tracking_field: @tracking_field,
713
+ tracking_field_seed: @tracking_field_seed)
714
+ @query_executor.cursor_tracker = @cursor_tracker
715
+ end
716
+
717
+ def last_run_metadata_path
718
+ return @last_run_metadata_path if @last_run_metadata_path
719
+
720
+ last_run_metadata_path = ::File.join(LogStash::SETTINGS.get_value("path.data"), "plugins", "inputs", "elasticsearch", pipeline_id, "last_run_value")
721
+ FileUtils.mkdir_p ::File.dirname(last_run_metadata_path)
722
+ last_run_metadata_path
641
723
  end
642
724
 
643
725
  def get_transport_client_class
@@ -654,6 +736,26 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
654
736
  ::Elastic::Transport::Transport::HTTP::Manticore
655
737
  end
656
738
 
739
+ def validate_ls_version_for_esql_support!
740
+ if Gem::Version.create(LOGSTASH_VERSION) < Gem::Version.create(LS_ESQL_SUPPORT_VERSION)
741
+ fail("Current version of Logstash does not include Elasticsearch client which supports ES|QL. Please upgrade Logstash to at least #{LS_ESQL_SUPPORT_VERSION}")
742
+ end
743
+ end
744
+
745
+ def validate_esql_query!
746
+ fail(LogStash::ConfigurationError, "`query` cannot be empty") if @query.strip.empty?
747
+ source_commands = %w[FROM ROW SHOW]
748
+ contains_source_command = source_commands.any? { |source_command| @query.strip.start_with?(source_command) }
749
+ fail(LogStash::ConfigurationError, "`query` needs to start with any of #{source_commands}") unless contains_source_command
750
+ end
751
+
752
+ def validate_es_for_esql_support!
753
+ return unless @query_type == 'esql'
754
+ # make sure connected ES supports ES|QL (8.11+)
755
+ es_supports_esql = Gem::Version.create(es_version) >= Gem::Version.create(ES_ESQL_SUPPORT_VERSION)
756
+ fail("Connected Elasticsearch #{es_version} version does not supports ES|QL. ES|QL feature requires at least Elasticsearch #{ES_ESQL_SUPPORT_VERSION} version.") unless es_supports_esql
757
+ end
758
+
657
759
  module URIOrEmptyValidator
658
760
  ##
659
761
  # @override to provide :uri_or_empty validator
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-input-elasticsearch'
4
- s.version = '5.0.2'
4
+ s.version = '5.2.0'
5
5
  s.licenses = ['Apache License (2.0)']
6
6
  s.summary = "Reads query results from an Elasticsearch cluster"
7
7
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -0,0 +1,72 @@
1
+ # encoding: utf-8
2
+ require "logstash/devutils/rspec/spec_helper"
3
+ require "logstash/devutils/rspec/shared_examples"
4
+ require "logstash/inputs/elasticsearch"
5
+ require "logstash/inputs/elasticsearch/cursor_tracker"
6
+
7
+ describe LogStash::Inputs::Elasticsearch::CursorTracker do
8
+
9
+ let(:last_run_metadata_path) { Tempfile.new('cursor_tracker_testing').path }
10
+ let(:tracking_field_seed) { "1980-01-01T23:59:59.999999999Z" }
11
+ let(:options) do
12
+ {
13
+ :last_run_metadata_path => last_run_metadata_path,
14
+ :tracking_field => "my_field",
15
+ :tracking_field_seed => tracking_field_seed
16
+ }
17
+ end
18
+
19
+ subject { described_class.new(**options) }
20
+
21
+ it "creating a class works" do
22
+ expect(subject).to be_a described_class
23
+ end
24
+
25
+ describe "checkpoint_cursor" do
26
+ before(:each) do
27
+ subject.checkpoint_cursor(intermediate: false) # store seed value
28
+ [
29
+ Thread.new(subject) {|subject| subject.record_last_value(LogStash::Event.new("my_field" => "2025-01-03T23:59:59.999999999Z")) },
30
+ Thread.new(subject) {|subject| subject.record_last_value(LogStash::Event.new("my_field" => "2025-01-01T23:59:59.999999999Z")) },
31
+ Thread.new(subject) {|subject| subject.record_last_value(LogStash::Event.new("my_field" => "2025-01-02T23:59:59.999999999Z")) },
32
+ ].each(&:join)
33
+ end
34
+ context "when doing intermediate checkpoint" do
35
+ it "persists the smallest value" do
36
+ subject.checkpoint_cursor(intermediate: true)
37
+ expect(IO.read(last_run_metadata_path)).to eq("2025-01-01T23:59:59.999999999Z")
38
+ end
39
+ end
40
+ context "when doing non-intermediate checkpoint" do
41
+ it "persists the largest value" do
42
+ subject.checkpoint_cursor(intermediate: false)
43
+ expect(IO.read(last_run_metadata_path)).to eq("2025-01-03T23:59:59.999999999Z")
44
+ end
45
+ end
46
+ end
47
+
48
+ describe "inject_cursor" do
49
+ let(:new_value) { "2025-01-03T23:59:59.999999999Z" }
50
+ let(:fake_now) { "2026-09-19T23:59:59.999999999Z" }
51
+
52
+ let(:query) do
53
+ %q[
54
+ { "query": { "range": { "event.ingested": { "gt": :last_value, "lt": :present}}}, "sort": [ { "event.ingested": {"order": "asc", "format": "strict_date_optional_time_nanos", "numeric_type" : "date_nanos" } } ] }
55
+ ]
56
+ end
57
+
58
+ before(:each) do
59
+ subject.record_last_value(LogStash::Event.new("my_field" => new_value))
60
+ subject.checkpoint_cursor(intermediate: false)
61
+ allow(subject).to receive(:now_minus_30s).and_return(fake_now)
62
+ end
63
+
64
+ it "injects the value of the cursor into json query if it contains :last_value" do
65
+ expect(subject.inject_cursor(query)).to match(/#{new_value}/)
66
+ end
67
+
68
+ it "injects current time into json query if it contains :present" do
69
+ expect(subject.inject_cursor(query)).to match(/#{fake_now}/)
70
+ end
71
+ end
72
+ end
@@ -0,0 +1,180 @@
1
+ # encoding: utf-8
2
+ require "logstash/devutils/rspec/spec_helper"
3
+ require "logstash/inputs/elasticsearch"
4
+ require "elasticsearch"
5
+
6
+ describe LogStash::Inputs::Elasticsearch::Esql do
7
+ let(:client) { instance_double(Elasticsearch::Client) }
8
+ let(:esql_client) { double("esql-client") }
9
+
10
+ let(:plugin) { instance_double(LogStash::Inputs::Elasticsearch, params: plugin_config, decorate_event: nil) }
11
+ let(:plugin_config) do
12
+ {
13
+ "query" => "FROM test-index | STATS count() BY field",
14
+ "retries" => 3
15
+ }
16
+ end
17
+ let(:esql_executor) { described_class.new(client, plugin) }
18
+
19
+ describe "#initialization" do
20
+ it "sets up the ESQL client with correct parameters" do
21
+ expect(esql_executor.instance_variable_get(:@query)).to eq(plugin_config["query"])
22
+ expect(esql_executor.instance_variable_get(:@retries)).to eq(plugin_config["retries"])
23
+ expect(esql_executor.instance_variable_get(:@target_field)).to eq(nil)
24
+ end
25
+ end
26
+
27
+ describe "#execution" do
28
+ let(:output_queue) { Queue.new }
29
+
30
+ context "when faces error while retrying" do
31
+ it "retries the given block the specified number of times" do
32
+ attempts = 0
33
+ result = esql_executor.retryable("Test Job") do
34
+ attempts += 1
35
+ raise StandardError if attempts < 3
36
+ "success"
37
+ end
38
+ expect(attempts).to eq(3)
39
+ expect(result).to eq("success")
40
+ end
41
+
42
+ it "returns false if the block fails all attempts" do
43
+ result = esql_executor.retryable("Test Job") do
44
+ raise StandardError
45
+ end
46
+ expect(result).to eq(false)
47
+ end
48
+ end
49
+
50
+ context "when executing chain of processes" do
51
+ let(:response) { { 'values' => [%w[foo bar]], 'columns' => [{ 'name' => 'a.b.1.d', 'type' => 'keyword' },
52
+ { 'name' => 'h_g.k$l.m.0', 'type' => 'keyword' }] } }
53
+
54
+ before do
55
+ allow(esql_executor).to receive(:retryable).and_yield
56
+ allow(client).to receive_message_chain(:esql, :query).and_return(response)
57
+ end
58
+
59
+ it "executes the ESQL query and processes the results" do
60
+ allow(response).to receive(:headers).and_return({})
61
+ esql_executor.do_run(output_queue, plugin_config["query"])
62
+ expect(output_queue.size).to eq(1)
63
+
64
+ event = output_queue.pop
65
+ expect(event.get('[a][b][1][d]')).to eq('foo')
66
+ expect(event.get('[h_g][k$l][m][0]')).to eq('bar')
67
+ end
68
+
69
+ it "logs a warning if the response contains a warning header" do
70
+ allow(response).to receive(:headers).and_return({ "warning" => "some warning" })
71
+ expect(esql_executor.logger).to receive(:warn).with("ES|QL executor received warning", { :warning_message => "some warning" })
72
+ esql_executor.do_run(output_queue, plugin_config["query"])
73
+ end
74
+
75
+ it "does not log a warning if the response does not contain a warning header" do
76
+ allow(response).to receive(:headers).and_return({})
77
+ expect(esql_executor.logger).not_to receive(:warn)
78
+ esql_executor.do_run(output_queue, plugin_config["query"])
79
+ end
80
+ end
81
+
82
+ describe "multiple rows in the result" do
83
+ let(:response) { { 'values' => rows, 'columns' => [{ 'name' => 'key.1', 'type' => 'keyword' },
84
+ { 'name' => 'key.2', 'type' => 'keyword' }] } }
85
+
86
+ before do
87
+ allow(esql_executor).to receive(:retryable).and_yield
88
+ allow(client).to receive_message_chain(:esql, :query).and_return(response)
89
+ allow(response).to receive(:headers).and_return({})
90
+ end
91
+
92
+ context "when mapping" do
93
+ let(:rows) { [%w[foo bar], %w[hello world]] }
94
+
95
+ it "1:1 maps rows to events" do
96
+ esql_executor.do_run(output_queue, plugin_config["query"])
97
+ expect(output_queue.size).to eq(2)
98
+
99
+ event_1 = output_queue.pop
100
+ expect(event_1.get('[key][1]')).to eq('foo')
101
+ expect(event_1.get('[key][2]')).to eq('bar')
102
+
103
+ event_2 = output_queue.pop
104
+ expect(event_2.get('[key][1]')).to eq('hello')
105
+ expect(event_2.get('[key][2]')).to eq('world')
106
+ end
107
+ end
108
+
109
+ context "when partial nil values appear" do
110
+ let(:rows) { [[nil, "bar"], ["hello", nil]] }
111
+
112
+ it "ignores the nil values" do
113
+ esql_executor.do_run(output_queue, plugin_config["query"])
114
+ expect(output_queue.size).to eq(2)
115
+
116
+ event_1 = output_queue.pop
117
+ expect(event_1.get('[key][1]')).to eq(nil)
118
+ expect(event_1.get('[key][2]')).to eq('bar')
119
+
120
+ event_2 = output_queue.pop
121
+ expect(event_2.get('[key][1]')).to eq('hello')
122
+ expect(event_2.get('[key][2]')).to eq(nil)
123
+ end
124
+ end
125
+ end
126
+
127
+ context "when sub-elements occur in the result" do
128
+ let(:response) { {
129
+ 'values' => [[50, 1, 100], [50, 0, 1000], [50, 9, 99999]],
130
+ 'columns' =>
131
+ [
132
+ { 'name' => 'time', 'type' => 'long' },
133
+ { 'name' => 'time.min', 'type' => 'long' },
134
+ { 'name' => 'time.max', 'type' => 'long' },
135
+ ]
136
+ } }
137
+
138
+ before do
139
+ allow(esql_executor).to receive(:retryable).and_yield
140
+ allow(client).to receive_message_chain(:esql, :query).and_return(response)
141
+ allow(response).to receive(:headers).and_return({})
142
+ end
143
+
144
+ it "includes 1st depth elements into event" do
145
+ esql_executor.do_run(output_queue, plugin_config["query"])
146
+
147
+ expect(output_queue.size).to eq(3)
148
+ 3.times do
149
+ event = output_queue.pop
150
+ expect(event.get('time')).to eq(50)
151
+ expect(event.get('[time][min]')).to eq(nil)
152
+ expect(event.get('[time][max]')).to eq(nil)
153
+ end
154
+ end
155
+ end
156
+ end
157
+
158
+ describe "#column spec" do
159
+ let(:valid_spec) { { 'name' => 'field.name', 'type' => 'keyword' } }
160
+ let(:column_spec) { LogStash::Inputs::Elasticsearch::ColumnSpec.new(valid_spec) }
161
+
162
+ context "when initializes" do
163
+ it "sets the name and type attributes" do
164
+ expect(column_spec.name).to eq("field.name")
165
+ expect(column_spec.type).to eq("keyword")
166
+ end
167
+
168
+ it "freezes the name and type attributes" do
169
+ expect(column_spec.name).to be_frozen
170
+ expect(column_spec.type).to be_frozen
171
+ end
172
+ end
173
+
174
+ context "when calls the field reference" do
175
+ it "returns the correct field reference format" do
176
+ expect(column_spec.field_reference).to eq("[field][name]")
177
+ end
178
+ end
179
+ end
180
+ end if LOGSTASH_VERSION >= LogStash::Inputs::Elasticsearch::LS_ESQL_SUPPORT_VERSION