logstash-input-opensearch 1.0.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- checksums.yaml.gz.sig +0 -0
- data/ADMINS.md +28 -0
- data/CODE_OF_CONDUCT.md +24 -0
- data/CONTRIBUTING.md +121 -0
- data/DEVELOPER_GUIDE.md +77 -0
- data/Gemfile +14 -0
- data/MAINTAINERS.md +82 -0
- data/README.md +62 -0
- data/RELEASING.md +111 -0
- data/SECURITY.md +3 -0
- data/lib/logstash/inputs/opensearch/patches/_opensearch_transport_connections_selector.rb +52 -0
- data/lib/logstash/inputs/opensearch/patches/_opensearch_transport_http_manticore.rb +44 -0
- data/lib/logstash/inputs/opensearch.rb +432 -0
- data/logstash-input-opensearch.gemspec +52 -0
- data/spec/fixtures/test_certs/ca.crt +20 -0
- data/spec/fixtures/test_certs/ca.key +27 -0
- data/spec/fixtures/test_certs/es.crt +20 -0
- data/spec/fixtures/test_certs/es.key +27 -0
- data/spec/inputs/integration/opensearch_spec.rb +83 -0
- data/spec/inputs/opensearch_spec.rb +877 -0
- data/spec/opensearch_helper.rb +47 -0
- data.tar.gz.sig +1 -0
- metadata +312 -0
- metadata.gz.sig +0 -0
@@ -0,0 +1,877 @@
|
|
1
|
+
# Copyright OpenSearch Contributors
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
3
|
+
|
4
|
+
# encoding: utf-8
|
5
|
+
require "logstash/devutils/rspec/spec_helper"
|
6
|
+
require "logstash/devutils/rspec/shared_examples"
|
7
|
+
require "logstash/inputs/opensearch"
|
8
|
+
require "opensearch"
|
9
|
+
require "timecop"
|
10
|
+
require "stud/temporary"
|
11
|
+
require "time"
|
12
|
+
require "date"
|
13
|
+
require "cabin"
|
14
|
+
require "webrick"
|
15
|
+
require "uri"
|
16
|
+
|
17
|
+
require 'logstash/plugin_mixins/ecs_compatibility_support/spec_helper'
|
18
|
+
|
19
|
+
describe LogStash::Inputs::OpenSearch, :ecs_compatibility_support do
|
20
|
+
|
21
|
+
let(:plugin) { described_class.new(config) }
|
22
|
+
let(:queue) { Queue.new }
|
23
|
+
|
24
|
+
before(:each) do
|
25
|
+
OpenSearch::Client.send(:define_method, :ping) { } # define no-action ping method
|
26
|
+
end
|
27
|
+
|
28
|
+
context "register" do
|
29
|
+
let(:config) do
|
30
|
+
{
|
31
|
+
"schedule" => "* * * * * UTC"
|
32
|
+
}
|
33
|
+
end
|
34
|
+
|
35
|
+
context "against authentic OpenSearch" do
|
36
|
+
it "should not raise an exception" do
|
37
|
+
expect { plugin.register }.to_not raise_error
|
38
|
+
end
|
39
|
+
end
|
40
|
+
|
41
|
+
context "against not authentic OpenSearch" do
|
42
|
+
before(:each) do
|
43
|
+
OpenSearch::Client.send(:define_method, :ping) { raise OpenSearch::UnsupportedProductError.new("Fake error") } # define error ping method
|
44
|
+
end
|
45
|
+
|
46
|
+
it "should raise ConfigurationError" do
|
47
|
+
expect { plugin.register }.to raise_error(LogStash::ConfigurationError)
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
51
|
+
|
52
|
+
it_behaves_like "an interruptible input plugin" do
|
53
|
+
let(:client) { double("opensearch-client") }
|
54
|
+
let(:config) do
|
55
|
+
{
|
56
|
+
"schedule" => "* * * * * UTC"
|
57
|
+
}
|
58
|
+
end
|
59
|
+
|
60
|
+
before :each do
|
61
|
+
allow(OpenSearch::Client).to receive(:new).and_return(client)
|
62
|
+
hit = {
|
63
|
+
"_index" => "logstash-2014.10.12",
|
64
|
+
"_type" => "logs",
|
65
|
+
"_id" => "C5b2xLQwTZa76jBmHIbwHQ",
|
66
|
+
"_score" => 1.0,
|
67
|
+
"_source" => { "message" => ["ohayo"] }
|
68
|
+
}
|
69
|
+
allow(client).to receive(:search) { { "hits" => { "hits" => [hit] } } }
|
70
|
+
allow(client).to receive(:scroll) { { "hits" => { "hits" => [hit] } } }
|
71
|
+
allow(client).to receive(:clear_scroll).and_return(nil)
|
72
|
+
allow(client).to receive(:ping)
|
73
|
+
end
|
74
|
+
end
|
75
|
+
|
76
|
+
|
77
|
+
ecs_compatibility_matrix(:disabled, :v1, :v8) do |ecs_select|
|
78
|
+
|
79
|
+
before(:each) do
|
80
|
+
allow_any_instance_of(described_class).to receive(:ecs_compatibility).and_return(ecs_compatibility)
|
81
|
+
end
|
82
|
+
|
83
|
+
let(:config) do
|
84
|
+
%q[
|
85
|
+
input {
|
86
|
+
opensearch {
|
87
|
+
hosts => ["localhost"]
|
88
|
+
query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
|
89
|
+
}
|
90
|
+
}
|
91
|
+
]
|
92
|
+
end
|
93
|
+
|
94
|
+
let(:mock_response) do
|
95
|
+
{
|
96
|
+
"_scroll_id" => "cXVlcnlUaGVuRmV0Y2g",
|
97
|
+
"took" => 27,
|
98
|
+
"timed_out" => false,
|
99
|
+
"_shards" => {
|
100
|
+
"total" => 169,
|
101
|
+
"successful" => 169,
|
102
|
+
"failed" => 0
|
103
|
+
},
|
104
|
+
"hits" => {
|
105
|
+
"total" => 1,
|
106
|
+
"max_score" => 1.0,
|
107
|
+
"hits" => [ {
|
108
|
+
"_index" => "logstash-2014.10.12",
|
109
|
+
"_type" => "logs",
|
110
|
+
"_id" => "C5b2xLQwTZa76jBmHIbwHQ",
|
111
|
+
"_score" => 1.0,
|
112
|
+
"_source" => { "message" => ["ohayo"] }
|
113
|
+
} ]
|
114
|
+
}
|
115
|
+
}
|
116
|
+
end
|
117
|
+
|
118
|
+
let(:mock_scroll_response) do
|
119
|
+
{
|
120
|
+
"_scroll_id" => "r453Wc1jh0caLJhSDg",
|
121
|
+
"hits" => { "hits" => [] }
|
122
|
+
}
|
123
|
+
end
|
124
|
+
|
125
|
+
before(:each) do
|
126
|
+
client = OpenSearch::Client.new
|
127
|
+
expect(OpenSearch::Client).to receive(:new).with(any_args).and_return(client)
|
128
|
+
expect(client).to receive(:search).with(any_args).and_return(mock_response)
|
129
|
+
expect(client).to receive(:scroll).with({ :body => { :scroll_id => "cXVlcnlUaGVuRmV0Y2g" }, :scroll=> "1m" }).and_return(mock_scroll_response)
|
130
|
+
expect(client).to receive(:clear_scroll).and_return(nil)
|
131
|
+
expect(client).to receive(:ping)
|
132
|
+
end
|
133
|
+
|
134
|
+
it 'creates the events from the hits' do
|
135
|
+
event = input(config) do |pipeline, queue|
|
136
|
+
queue.pop
|
137
|
+
end
|
138
|
+
|
139
|
+
expect(event).to be_a(LogStash::Event)
|
140
|
+
expect(event.get("message")).to eql [ "ohayo" ]
|
141
|
+
end
|
142
|
+
|
143
|
+
context 'when a target is set' do
|
144
|
+
let(:config) do
|
145
|
+
%q[
|
146
|
+
input {
|
147
|
+
opensearch {
|
148
|
+
hosts => ["localhost"]
|
149
|
+
query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
|
150
|
+
target => "[@metadata][_source]"
|
151
|
+
}
|
152
|
+
}
|
153
|
+
]
|
154
|
+
end
|
155
|
+
|
156
|
+
it 'creates the event using the target' do
|
157
|
+
event = input(config) do |pipeline, queue|
|
158
|
+
queue.pop
|
159
|
+
end
|
160
|
+
|
161
|
+
expect(event).to be_a(LogStash::Event)
|
162
|
+
expect(event.get("[@metadata][_source][message]")).to eql [ "ohayo" ]
|
163
|
+
end
|
164
|
+
end
|
165
|
+
|
166
|
+
end
|
167
|
+
|
168
|
+
# This spec is an adapter-spec, ensuring that we send the right sequence of messages to our OpenSearch Client
|
169
|
+
# to support sliced scrolling. The underlying implementation will spawn its own threads to consume, so we must be
|
170
|
+
# careful to use thread-safe constructs.
|
171
|
+
context "with managed sliced scrolling" do
|
172
|
+
let(:config) do
|
173
|
+
{
|
174
|
+
'query' => "#{LogStash::Json.dump(query)}",
|
175
|
+
'slices' => slices,
|
176
|
+
'docinfo' => true, # include ids
|
177
|
+
'docinfo_target' => '[@metadata]'
|
178
|
+
}
|
179
|
+
end
|
180
|
+
let(:query) do
|
181
|
+
{
|
182
|
+
"query" => {
|
183
|
+
"match" => { "city_name" => "Okinawa" }
|
184
|
+
},
|
185
|
+
"fields" => ["message"]
|
186
|
+
}
|
187
|
+
end
|
188
|
+
let(:slices) { 2 }
|
189
|
+
|
190
|
+
context 'with `slices => 0`' do
|
191
|
+
let(:slices) { 0 }
|
192
|
+
it 'fails to register' do
|
193
|
+
expect { plugin.register }.to raise_error(LogStash::ConfigurationError)
|
194
|
+
end
|
195
|
+
end
|
196
|
+
|
197
|
+
context 'with `slices => 1`' do
|
198
|
+
let(:slices) { 1 }
|
199
|
+
it 'runs just one slice' do
|
200
|
+
expect(plugin).to receive(:do_run_slice).with(duck_type(:<<))
|
201
|
+
expect(Thread).to_not receive(:new)
|
202
|
+
|
203
|
+
plugin.register
|
204
|
+
plugin.run([])
|
205
|
+
end
|
206
|
+
end
|
207
|
+
|
208
|
+
context 'without slices directive' do
|
209
|
+
let(:config) { super().tap { |h| h.delete('slices') } }
|
210
|
+
it 'runs just one slice' do
|
211
|
+
expect(plugin).to receive(:do_run_slice).with(duck_type(:<<))
|
212
|
+
expect(Thread).to_not receive(:new)
|
213
|
+
|
214
|
+
plugin.register
|
215
|
+
plugin.run([])
|
216
|
+
end
|
217
|
+
end
|
218
|
+
|
219
|
+
2.upto(8) do |slice_count|
|
220
|
+
context "with `slices => #{slice_count}`" do
|
221
|
+
let(:slices) { slice_count }
|
222
|
+
it "runs #{slice_count} independent slices" do
|
223
|
+
expect(Thread).to receive(:new).and_call_original.exactly(slice_count).times
|
224
|
+
slice_count.times do |slice_id|
|
225
|
+
expect(plugin).to receive(:do_run_slice).with(duck_type(:<<), slice_id)
|
226
|
+
end
|
227
|
+
|
228
|
+
plugin.register
|
229
|
+
plugin.run([])
|
230
|
+
end
|
231
|
+
end
|
232
|
+
end
|
233
|
+
|
234
|
+
# This section of specs heavily mocks the OpenSearch::Client, and ensures that the OpenSearch Input Plugin
|
235
|
+
# behaves as expected when handling a series of sliced, scrolled requests/responses.
|
236
|
+
context 'adapter/integration' do
|
237
|
+
let(:response_template) do
|
238
|
+
{
|
239
|
+
"took" => 12,
|
240
|
+
"timed_out" => false,
|
241
|
+
"shards" => {
|
242
|
+
"total" => 6,
|
243
|
+
"successful" => 6,
|
244
|
+
"failed" => 0
|
245
|
+
}
|
246
|
+
}
|
247
|
+
end
|
248
|
+
|
249
|
+
let(:hits_template) do
|
250
|
+
{
|
251
|
+
"total" => 4,
|
252
|
+
"max_score" => 1.0,
|
253
|
+
"hits" => []
|
254
|
+
}
|
255
|
+
end
|
256
|
+
|
257
|
+
let(:hit_template) do
|
258
|
+
{
|
259
|
+
"_index" => "logstash-2018.08.23",
|
260
|
+
"_type" => "logs",
|
261
|
+
"_score" => 1.0,
|
262
|
+
"_source" => { "message" => ["hello, world"] }
|
263
|
+
}
|
264
|
+
end
|
265
|
+
|
266
|
+
# BEGIN SLICE 0: a sequence of THREE scrolled responses containing 2, 1, and 0 items
|
267
|
+
# end-of-slice is reached when slice0_response2 is empty.
|
268
|
+
begin
|
269
|
+
let(:slice0_response0) do
|
270
|
+
response_template.merge({
|
271
|
+
"_scroll_id" => slice0_scroll1,
|
272
|
+
"hits" => hits_template.merge("hits" => [
|
273
|
+
hit_template.merge('_id' => "slice0-response0-item0"),
|
274
|
+
hit_template.merge('_id' => "slice0-response0-item1")
|
275
|
+
])
|
276
|
+
})
|
277
|
+
end
|
278
|
+
let(:slice0_scroll1) { 'slice:0,scroll:1' }
|
279
|
+
let(:slice0_response1) do
|
280
|
+
response_template.merge({
|
281
|
+
"_scroll_id" => slice0_scroll2,
|
282
|
+
"hits" => hits_template.merge("hits" => [
|
283
|
+
hit_template.merge('_id' => "slice0-response1-item0")
|
284
|
+
])
|
285
|
+
})
|
286
|
+
end
|
287
|
+
let(:slice0_scroll2) { 'slice:0,scroll:2' }
|
288
|
+
let(:slice0_response2) do
|
289
|
+
response_template.merge(
|
290
|
+
"_scroll_id" => slice0_scroll3,
|
291
|
+
"hits" => hits_template.merge({"hits" => []})
|
292
|
+
)
|
293
|
+
end
|
294
|
+
let(:slice0_scroll3) { 'slice:0,scroll:3' }
|
295
|
+
end
|
296
|
+
# END SLICE 0
|
297
|
+
|
298
|
+
# BEGIN SLICE 1: a sequence of TWO scrolled responses containing 2 and 2 items.
|
299
|
+
# end-of-slice is reached when slice1_response1 does not contain a next scroll id
|
300
|
+
begin
|
301
|
+
let(:slice1_response0) do
|
302
|
+
response_template.merge({
|
303
|
+
"_scroll_id" => slice1_scroll1,
|
304
|
+
"hits" => hits_template.merge("hits" => [
|
305
|
+
hit_template.merge('_id' => "slice1-response0-item0"),
|
306
|
+
hit_template.merge('_id' => "slice1-response0-item1")
|
307
|
+
])
|
308
|
+
})
|
309
|
+
end
|
310
|
+
let(:slice1_scroll1) { 'slice:1,scroll:1' }
|
311
|
+
let(:slice1_response1) do
|
312
|
+
response_template.merge({
|
313
|
+
"hits" => hits_template.merge("hits" => [
|
314
|
+
hit_template.merge('_id' => "slice1-response1-item0"),
|
315
|
+
hit_template.merge('_id' => "slice1-response1-item1")
|
316
|
+
])
|
317
|
+
})
|
318
|
+
end
|
319
|
+
end
|
320
|
+
# END SLICE 1
|
321
|
+
|
322
|
+
let(:client) { OpenSearch::Client.new }
|
323
|
+
|
324
|
+
# RSpec mocks validations are not threadsafe.
|
325
|
+
# Allow caller to synchronize.
|
326
|
+
def synchronize_method!(object, method_name)
|
327
|
+
original_method = object.method(method_name)
|
328
|
+
mutex = Mutex.new
|
329
|
+
allow(object).to receive(method_name).with(any_args) do |*method_args, &method_block|
|
330
|
+
mutex.synchronize do
|
331
|
+
original_method.call(*method_args,&method_block)
|
332
|
+
end
|
333
|
+
end
|
334
|
+
end
|
335
|
+
|
336
|
+
before(:each) do
|
337
|
+
expect(OpenSearch::Client).to receive(:new).with(any_args).and_return(client)
|
338
|
+
plugin.register
|
339
|
+
|
340
|
+
expect(client).to receive(:clear_scroll).and_return(nil)
|
341
|
+
|
342
|
+
# SLICE0 is a three-page scroll in which the last page is empty
|
343
|
+
slice0_query = LogStash::Json.dump(query.merge('slice' => { 'id' => 0, 'max' => 2}))
|
344
|
+
expect(client).to receive(:search).with(hash_including(:body => slice0_query)).and_return(slice0_response0)
|
345
|
+
expect(client).to receive(:scroll).with(hash_including(:body => { :scroll_id => slice0_scroll1 })).and_return(slice0_response1)
|
346
|
+
expect(client).to receive(:scroll).with(hash_including(:body => { :scroll_id => slice0_scroll2 })).and_return(slice0_response2)
|
347
|
+
allow(client).to receive(:ping)
|
348
|
+
|
349
|
+
# SLICE1 is a two-page scroll in which the last page has no next scroll id
|
350
|
+
slice1_query = LogStash::Json.dump(query.merge('slice' => { 'id' => 1, 'max' => 2}))
|
351
|
+
expect(client).to receive(:search).with(hash_including(:body => slice1_query)).and_return(slice1_response0)
|
352
|
+
expect(client).to receive(:scroll).with(hash_including(:body => { :scroll_id => slice1_scroll1 })).and_return(slice1_response1)
|
353
|
+
|
354
|
+
synchronize_method!(plugin, :scroll_request)
|
355
|
+
synchronize_method!(plugin, :search_request)
|
356
|
+
end
|
357
|
+
|
358
|
+
let(:emitted_events) do
|
359
|
+
queue = Queue.new # since we are running slices in threads, we need a thread-safe queue.
|
360
|
+
plugin.run(queue)
|
361
|
+
events = []
|
362
|
+
events << queue.pop until queue.empty?
|
363
|
+
events
|
364
|
+
end
|
365
|
+
|
366
|
+
let(:emitted_event_ids) do
|
367
|
+
emitted_events.map { |event| event.get('[@metadata][_id]') }
|
368
|
+
end
|
369
|
+
|
370
|
+
it 'emits the hits on the first page of the first slice' do
|
371
|
+
expect(emitted_event_ids).to include('slice0-response0-item0')
|
372
|
+
expect(emitted_event_ids).to include('slice0-response0-item1')
|
373
|
+
end
|
374
|
+
it 'emits the hits on the second page of the first slice' do
|
375
|
+
expect(emitted_event_ids).to include('slice0-response1-item0')
|
376
|
+
end
|
377
|
+
|
378
|
+
it 'emits the hits on the first page of the second slice' do
|
379
|
+
expect(emitted_event_ids).to include('slice1-response0-item0')
|
380
|
+
expect(emitted_event_ids).to include('slice1-response0-item1')
|
381
|
+
end
|
382
|
+
|
383
|
+
it 'emits the hitson the second page of the second slice' do
|
384
|
+
expect(emitted_event_ids).to include('slice1-response1-item0')
|
385
|
+
expect(emitted_event_ids).to include('slice1-response1-item1')
|
386
|
+
end
|
387
|
+
|
388
|
+
it 'does not double-emit' do
|
389
|
+
expect(emitted_event_ids.uniq).to eq(emitted_event_ids)
|
390
|
+
end
|
391
|
+
|
392
|
+
it 'emits events with appropriate fields' do
|
393
|
+
emitted_events.each do |event|
|
394
|
+
expect(event).to be_a(LogStash::Event)
|
395
|
+
expect(event.get('message')).to eq(['hello, world'])
|
396
|
+
expect(event.get('[@metadata][_id]')).to_not be_nil
|
397
|
+
expect(event.get('[@metadata][_id]')).to_not be_empty
|
398
|
+
expect(event.get('[@metadata][_index]')).to start_with('logstash-')
|
399
|
+
end
|
400
|
+
end
|
401
|
+
end
|
402
|
+
end
|
403
|
+
|
404
|
+
context "with OpenSearch document information" do
|
405
|
+
let!(:response) do
|
406
|
+
{
|
407
|
+
"_scroll_id" => "cXVlcnlUaGVuRmV0Y2g",
|
408
|
+
"took" => 27,
|
409
|
+
"timed_out" => false,
|
410
|
+
"_shards" => {
|
411
|
+
"total" => 169,
|
412
|
+
"successful" => 169,
|
413
|
+
"failed" => 0
|
414
|
+
},
|
415
|
+
"hits" => {
|
416
|
+
"total" => 1,
|
417
|
+
"max_score" => 1.0,
|
418
|
+
"hits" => [ {
|
419
|
+
"_index" => "logstash-2014.10.12",
|
420
|
+
"_type" => "logs",
|
421
|
+
"_id" => "C5b2xLQwTZa76jBmHIbwHQ",
|
422
|
+
"_score" => 1.0,
|
423
|
+
"_source" => {
|
424
|
+
"message" => ["ohayo"],
|
425
|
+
"metadata_with_hash" => { "awesome" => "logstash" },
|
426
|
+
"metadata_with_string" => "a string"
|
427
|
+
}
|
428
|
+
} ]
|
429
|
+
}
|
430
|
+
}
|
431
|
+
end
|
432
|
+
|
433
|
+
let(:scroll_reponse) do
|
434
|
+
{
|
435
|
+
"_scroll_id" => "r453Wc1jh0caLJhSDg",
|
436
|
+
"hits" => { "hits" => [] }
|
437
|
+
}
|
438
|
+
end
|
439
|
+
|
440
|
+
let(:client) { OpenSearch::Client.new }
|
441
|
+
|
442
|
+
before do
|
443
|
+
expect(OpenSearch::Client).to receive(:new).with(any_args).and_return(client)
|
444
|
+
expect(client).to receive(:search).with(any_args).and_return(response)
|
445
|
+
allow(client).to receive(:scroll).with({ :body => {:scroll_id => "cXVlcnlUaGVuRmV0Y2g"}, :scroll => "1m" }).and_return(scroll_reponse)
|
446
|
+
allow(client).to receive(:clear_scroll).and_return(nil)
|
447
|
+
allow(client).to receive(:ping).and_return(nil)
|
448
|
+
end
|
449
|
+
|
450
|
+
ecs_compatibility_matrix(:disabled, :v1, :v8) do |ecs_select|
|
451
|
+
|
452
|
+
before(:each) do
|
453
|
+
allow_any_instance_of(described_class).to receive(:ecs_compatibility).and_return(ecs_compatibility)
|
454
|
+
end
|
455
|
+
|
456
|
+
context 'with docinfo enabled' do
|
457
|
+
let(:config_metadata) do
|
458
|
+
%q[
|
459
|
+
input {
|
460
|
+
opensearch {
|
461
|
+
hosts => ["localhost"]
|
462
|
+
query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
|
463
|
+
docinfo => true
|
464
|
+
}
|
465
|
+
}
|
466
|
+
]
|
467
|
+
end
|
468
|
+
|
469
|
+
it "provides document info under metadata" do
|
470
|
+
event = input(config_metadata) do |pipeline, queue|
|
471
|
+
queue.pop
|
472
|
+
end
|
473
|
+
|
474
|
+
if ecs_select.active_mode == :disabled
|
475
|
+
expect(event.get("[@metadata][_index]")).to eq('logstash-2014.10.12')
|
476
|
+
expect(event.get("[@metadata][_type]")).to eq('logs')
|
477
|
+
expect(event.get("[@metadata][_id]")).to eq('C5b2xLQwTZa76jBmHIbwHQ')
|
478
|
+
else
|
479
|
+
expect(event.get("[@metadata][input][opensearch][_index]")).to eq('logstash-2014.10.12')
|
480
|
+
expect(event.get("[@metadata][input][opensearch][_type]")).to eq('logs')
|
481
|
+
expect(event.get("[@metadata][input][opensearch][_id]")).to eq('C5b2xLQwTZa76jBmHIbwHQ')
|
482
|
+
end
|
483
|
+
end
|
484
|
+
|
485
|
+
it 'merges values if the `docinfo_target` already exist in the `_source` document' do
|
486
|
+
config_metadata_with_hash = %Q[
|
487
|
+
input {
|
488
|
+
opensearch {
|
489
|
+
hosts => ["localhost"]
|
490
|
+
query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
|
491
|
+
docinfo => true
|
492
|
+
docinfo_target => 'metadata_with_hash'
|
493
|
+
}
|
494
|
+
}
|
495
|
+
]
|
496
|
+
|
497
|
+
event = input(config_metadata_with_hash) do |pipeline, queue|
|
498
|
+
queue.pop
|
499
|
+
end
|
500
|
+
|
501
|
+
expect(event.get("[metadata_with_hash][_index]")).to eq('logstash-2014.10.12')
|
502
|
+
expect(event.get("[metadata_with_hash][_type]")).to eq('logs')
|
503
|
+
expect(event.get("[metadata_with_hash][_id]")).to eq('C5b2xLQwTZa76jBmHIbwHQ')
|
504
|
+
expect(event.get("[metadata_with_hash][awesome]")).to eq("logstash")
|
505
|
+
end
|
506
|
+
|
507
|
+
context 'if the `docinfo_target` exist but is not of type hash' do
|
508
|
+
let (:config) { {
|
509
|
+
"hosts" => ["localhost"],
|
510
|
+
"query" => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }',
|
511
|
+
"docinfo" => true,
|
512
|
+
"docinfo_target" => 'metadata_with_string'
|
513
|
+
} }
|
514
|
+
it 'thows an exception if the `docinfo_target` exist but is not of type hash' do
|
515
|
+
expect(client).not_to receive(:clear_scroll)
|
516
|
+
plugin.register
|
517
|
+
expect { plugin.run([]) }.to raise_error(Exception, /incompatible event/)
|
518
|
+
end
|
519
|
+
end
|
520
|
+
|
521
|
+
it 'should move the document information to the specified field' do
|
522
|
+
config = %q[
|
523
|
+
input {
|
524
|
+
opensearch {
|
525
|
+
hosts => ["localhost"]
|
526
|
+
query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
|
527
|
+
docinfo => true
|
528
|
+
docinfo_target => 'meta'
|
529
|
+
}
|
530
|
+
}
|
531
|
+
]
|
532
|
+
event = input(config) do |pipeline, queue|
|
533
|
+
queue.pop
|
534
|
+
end
|
535
|
+
|
536
|
+
expect(event.get("[meta][_index]")).to eq('logstash-2014.10.12')
|
537
|
+
expect(event.get("[meta][_type]")).to eq('logs')
|
538
|
+
expect(event.get("[meta][_id]")).to eq('C5b2xLQwTZa76jBmHIbwHQ')
|
539
|
+
end
|
540
|
+
|
541
|
+
it "allows to specify which fields from the document info to save to metadata" do
|
542
|
+
fields = ["_index"]
|
543
|
+
config = %Q[
|
544
|
+
input {
|
545
|
+
opensearch {
|
546
|
+
hosts => ["localhost"]
|
547
|
+
query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
|
548
|
+
docinfo => true
|
549
|
+
docinfo_fields => #{fields}
|
550
|
+
}
|
551
|
+
}]
|
552
|
+
|
553
|
+
event = input(config) do |pipeline, queue|
|
554
|
+
queue.pop
|
555
|
+
end
|
556
|
+
|
557
|
+
meta_base = event.get(ecs_select.active_mode == :disabled ? "@metadata" : "[@metadata][input][opensearch]")
|
558
|
+
expect(meta_base.keys).to eq(fields)
|
559
|
+
end
|
560
|
+
|
561
|
+
it 'should be able to reference metadata fields in `add_field` decorations' do
|
562
|
+
config = %q[
|
563
|
+
input {
|
564
|
+
opensearch {
|
565
|
+
hosts => ["localhost"]
|
566
|
+
query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
|
567
|
+
docinfo => true
|
568
|
+
add_field => {
|
569
|
+
'identifier' => "foo:%{[@metadata][_type]}:%{[@metadata][_id]}"
|
570
|
+
}
|
571
|
+
}
|
572
|
+
}
|
573
|
+
]
|
574
|
+
|
575
|
+
event = input(config) do |pipeline, queue|
|
576
|
+
queue.pop
|
577
|
+
end
|
578
|
+
|
579
|
+
expect(event.get('identifier')).to eq('foo:logs:C5b2xLQwTZa76jBmHIbwHQ')
|
580
|
+
end if ecs_select.active_mode == :disabled
|
581
|
+
|
582
|
+
end
|
583
|
+
|
584
|
+
end
|
585
|
+
|
586
|
+
context "when not defining the docinfo" do
|
587
|
+
it 'should keep the document information in the root of the event' do
|
588
|
+
config = %q[
|
589
|
+
input {
|
590
|
+
opensearch {
|
591
|
+
hosts => ["localhost"]
|
592
|
+
query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
|
593
|
+
}
|
594
|
+
}
|
595
|
+
]
|
596
|
+
event = input(config) do |pipeline, queue|
|
597
|
+
queue.pop
|
598
|
+
end
|
599
|
+
|
600
|
+
expect(event.get("[@metadata]")).to be_empty
|
601
|
+
end
|
602
|
+
end
|
603
|
+
end
|
604
|
+
|
605
|
+
describe "client" do
|
606
|
+
let(:config) do
|
607
|
+
{
|
608
|
+
|
609
|
+
}
|
610
|
+
end
|
611
|
+
let(:plugin) { described_class.new(config) }
|
612
|
+
let(:event) { LogStash::Event.new({}) }
|
613
|
+
|
614
|
+
describe "proxy" do
|
615
|
+
let(:config) { super().merge({ 'proxy' => 'http://localhost:1234' }) }
|
616
|
+
|
617
|
+
it "should set proxy" do
|
618
|
+
plugin.register
|
619
|
+
client = plugin.send(:client)
|
620
|
+
proxy = extract_transport(client).options[:transport_options][:proxy]
|
621
|
+
|
622
|
+
expect( proxy ).to eql "http://localhost:1234"
|
623
|
+
end
|
624
|
+
|
625
|
+
context 'invalid' do
|
626
|
+
let(:config) { super().merge({ 'proxy' => '${A_MISSING_ENV_VAR:}' }) }
|
627
|
+
|
628
|
+
it "should not set proxy" do
|
629
|
+
plugin.register
|
630
|
+
client = plugin.send(:client)
|
631
|
+
|
632
|
+
expect( extract_transport(client).options[:transport_options] ).to_not include(:proxy)
|
633
|
+
end
|
634
|
+
end
|
635
|
+
end
|
636
|
+
|
637
|
+
class StoppableServer
|
638
|
+
|
639
|
+
attr_reader :port
|
640
|
+
|
641
|
+
def initialize()
|
642
|
+
queue = Queue.new
|
643
|
+
@first_req_waiter = java.util.concurrent.CountDownLatch.new(1)
|
644
|
+
@first_request = nil
|
645
|
+
|
646
|
+
@t = java.lang.Thread.new(
|
647
|
+
proc do
|
648
|
+
begin
|
649
|
+
@server = WEBrick::HTTPServer.new :Port => 0, :DocumentRoot => ".",
|
650
|
+
:Logger => Cabin::Channel.get, # silence WEBrick logging
|
651
|
+
:StartCallback => Proc.new {
|
652
|
+
queue.push("started")
|
653
|
+
}
|
654
|
+
@port = @server.config[:Port]
|
655
|
+
@server.mount_proc '/' do |req, res|
|
656
|
+
res.body = '''
|
657
|
+
{
|
658
|
+
"name": "ce7ccfb438e8",
|
659
|
+
"cluster_name": "docker-cluster",
|
660
|
+
"cluster_uuid": "DyR1hN03QvuCWXRy3jtb0g",
|
661
|
+
"version": {
|
662
|
+
"number": "7.13.1",
|
663
|
+
"build_flavor": "default",
|
664
|
+
"build_type": "docker",
|
665
|
+
"build_hash": "9a7758028e4ea59bcab41c12004603c5a7dd84a9",
|
666
|
+
"build_date": "2021-05-28T17:40:59.346932922Z",
|
667
|
+
"build_snapshot": false,
|
668
|
+
"lucene_version": "8.8.2",
|
669
|
+
"minimum_wire_compatibility_version": "6.8.0",
|
670
|
+
"minimum_index_compatibility_version": "6.0.0-beta1"
|
671
|
+
},
|
672
|
+
"tagline": "You Know, for Search"
|
673
|
+
}
|
674
|
+
'''
|
675
|
+
res.status = 200
|
676
|
+
res['Content-Type'] = 'application/json'
|
677
|
+
@first_request = req
|
678
|
+
@first_req_waiter.countDown()
|
679
|
+
end
|
680
|
+
|
681
|
+
@server.mount_proc '/logstash_unit_test/_search' do |req, res|
|
682
|
+
res.body = '''
|
683
|
+
{
|
684
|
+
"took" : 1,
|
685
|
+
"timed_out" : false,
|
686
|
+
"_shards" : {
|
687
|
+
"total" : 1,
|
688
|
+
"successful" : 1,
|
689
|
+
"skipped" : 0,
|
690
|
+
"failed" : 0
|
691
|
+
},
|
692
|
+
"hits" : {
|
693
|
+
"total" : {
|
694
|
+
"value" : 10000,
|
695
|
+
"relation" : "gte"
|
696
|
+
},
|
697
|
+
"max_score" : 1.0,
|
698
|
+
"hits" : [
|
699
|
+
{
|
700
|
+
"_index" : "test_bulk_index_2",
|
701
|
+
"_type" : "_doc",
|
702
|
+
"_id" : "sHe6A3wBesqF7ydicQvG",
|
703
|
+
"_score" : 1.0,
|
704
|
+
"_source" : {
|
705
|
+
"@timestamp" : "2021-09-20T15:02:02.557Z",
|
706
|
+
"message" : "{\"name\": \"Andrea\"}",
|
707
|
+
"@version" : "1",
|
708
|
+
"host" : "kalispera",
|
709
|
+
"sequence" : 5
|
710
|
+
}
|
711
|
+
}
|
712
|
+
]
|
713
|
+
}
|
714
|
+
}
|
715
|
+
'''
|
716
|
+
res.status = 200
|
717
|
+
res['Content-Type'] = 'application/json'
|
718
|
+
@first_request = req
|
719
|
+
@first_req_waiter.countDown()
|
720
|
+
end
|
721
|
+
|
722
|
+
|
723
|
+
|
724
|
+
@server.start
|
725
|
+
rescue => e
|
726
|
+
puts "Error in webserver thread #{e}"
|
727
|
+
# ignore
|
728
|
+
end
|
729
|
+
end
|
730
|
+
)
|
731
|
+
@t.daemon = true
|
732
|
+
@t.start
|
733
|
+
queue.pop # blocks until the server is up
|
734
|
+
end
|
735
|
+
|
736
|
+
def stop
|
737
|
+
@server.shutdown
|
738
|
+
end
|
739
|
+
|
740
|
+
def wait_receive_request
|
741
|
+
@first_req_waiter.await(2, java.util.concurrent.TimeUnit::SECONDS)
|
742
|
+
@first_request
|
743
|
+
end
|
744
|
+
end
|
745
|
+
|
746
|
+
describe "'user-agent' header" do
|
747
|
+
let!(:webserver) { StoppableServer.new } # webserver must be started before the call, so no lazy "let"
|
748
|
+
|
749
|
+
after :each do
|
750
|
+
webserver.stop
|
751
|
+
end
|
752
|
+
|
753
|
+
it "server should be started" do
|
754
|
+
require 'net/http'
|
755
|
+
response = nil
|
756
|
+
Net::HTTP.start('localhost', webserver.port) {|http|
|
757
|
+
response = http.request_get('/')
|
758
|
+
}
|
759
|
+
expect(response.code.to_i).to eq(200)
|
760
|
+
end
|
761
|
+
|
762
|
+
context "used by plugin" do
|
763
|
+
let(:config) do
|
764
|
+
{
|
765
|
+
"hosts" => ["localhost:#{webserver.port}"],
|
766
|
+
"query" => '{ "query": { "match": { "statuscode": 200 } }, "sort": [ "_doc" ] }',
|
767
|
+
"index" => "logstash_unit_test"
|
768
|
+
}
|
769
|
+
end
|
770
|
+
let(:plugin) { described_class.new(config) }
|
771
|
+
let(:event) { LogStash::Event.new({}) }
|
772
|
+
|
773
|
+
it "client should sent the expect user-agent" do
|
774
|
+
plugin.register
|
775
|
+
|
776
|
+
queue = []
|
777
|
+
plugin.run(queue)
|
778
|
+
|
779
|
+
request = webserver.wait_receive_request
|
780
|
+
|
781
|
+
expect(request.header['user-agent'].size).to eq(1)
|
782
|
+
expect(request.header['user-agent'][0]).to match(/logstash\/\d*\.\d*\.\d* \(OS=.*; JVM=.*\) logstash-input-opensearch\/\d*\.\d*\.\d*/)
|
783
|
+
end
|
784
|
+
end
|
785
|
+
end
|
786
|
+
|
787
|
+
shared_examples 'configurable timeout' do |config_name, manticore_transport_option|
|
788
|
+
let(:config_value) { fail NotImplementedError }
|
789
|
+
let(:config) { super().merge(config_name => config_value) }
|
790
|
+
{
|
791
|
+
:string => 'banana',
|
792
|
+
:negative => -123,
|
793
|
+
:zero => 0,
|
794
|
+
}.each do |value_desc, value|
|
795
|
+
let(:config_value) { value }
|
796
|
+
context "with an invalid #{value_desc} value" do
|
797
|
+
it 'prevents instantiation with a helpful message' do
|
798
|
+
expect(described_class.logger).to receive(:error).with(/Expected positive whole number/)
|
799
|
+
expect { described_class.new(config) }.to raise_error(LogStash::ConfigurationError)
|
800
|
+
end
|
801
|
+
end
|
802
|
+
end
|
803
|
+
|
804
|
+
context 'with a valid value' do
|
805
|
+
let(:config_value) { 17 }
|
806
|
+
|
807
|
+
it "instantiates the opensearch client with the timeout value set via #{manticore_transport_option} in the transport options" do
|
808
|
+
expect(OpenSearch::Client).to receive(:new) do |new_opensearch_client_params|
|
809
|
+
# We rely on Manticore-specific transport options, fail early if we are using a different
|
810
|
+
# transport or are allowing the client to determine its own transport class.
|
811
|
+
expect(new_opensearch_client_params).to include(:transport_class)
|
812
|
+
expect(new_opensearch_client_params[:transport_class].name).to match(/\bManticore\b/)
|
813
|
+
|
814
|
+
expect(new_opensearch_client_params).to include(:transport_options)
|
815
|
+
transport_options = new_opensearch_client_params[:transport_options]
|
816
|
+
expect(transport_options).to include(manticore_transport_option)
|
817
|
+
expect(transport_options[manticore_transport_option]).to eq(config_value.to_i)
|
818
|
+
mock_client = double("fake_client")
|
819
|
+
allow(mock_client).to receive(:ping)
|
820
|
+
mock_client
|
821
|
+
end
|
822
|
+
|
823
|
+
plugin.register
|
824
|
+
end
|
825
|
+
end
|
826
|
+
end
|
827
|
+
|
828
|
+
context 'connect_timeout_seconds' do
|
829
|
+
include_examples('configurable timeout', 'connect_timeout_seconds', :connect_timeout)
|
830
|
+
end
|
831
|
+
context 'request_timeout_seconds' do
|
832
|
+
include_examples('configurable timeout', 'request_timeout_seconds', :request_timeout)
|
833
|
+
end
|
834
|
+
context 'socket_timeout_seconds' do
|
835
|
+
include_examples('configurable timeout', 'socket_timeout_seconds', :socket_timeout)
|
836
|
+
end
|
837
|
+
end
|
838
|
+
|
839
|
+
context "when scheduling" do
|
840
|
+
let(:config) do
|
841
|
+
{
|
842
|
+
"hosts" => ["localhost"],
|
843
|
+
"query" => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }',
|
844
|
+
"schedule" => "* * * * * UTC"
|
845
|
+
}
|
846
|
+
end
|
847
|
+
|
848
|
+
before do
|
849
|
+
plugin.register
|
850
|
+
end
|
851
|
+
|
852
|
+
it "should properly schedule" do
|
853
|
+
Timecop.travel(Time.new(2000))
|
854
|
+
Timecop.scale(60)
|
855
|
+
runner = Thread.new do
|
856
|
+
expect(plugin).to receive(:do_run) {
|
857
|
+
queue << LogStash::Event.new({})
|
858
|
+
}.at_least(:twice)
|
859
|
+
|
860
|
+
plugin.run(queue)
|
861
|
+
end
|
862
|
+
sleep 3
|
863
|
+
plugin.stop
|
864
|
+
runner.kill
|
865
|
+
runner.join
|
866
|
+
expect(queue.size).to eq(2)
|
867
|
+
Timecop.return
|
868
|
+
end
|
869
|
+
|
870
|
+
end
|
871
|
+
|
872
|
+
# @note can be removed once we depends on opensearch gem >= 6.x
|
873
|
+
def extract_transport(client) # on 7.x client.transport is a OpenSearch::Transport::Client
|
874
|
+
client.transport.respond_to?(:transport) ? client.transport.transport : client.transport
|
875
|
+
end
|
876
|
+
|
877
|
+
end
|