logstash-integration-elastic_enterprise_search 2.2.1 → 3.0.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +10 -0
- data/README.md +10 -0
- data/lib/logstash/outputs/elastic_app_search.rb +51 -79
- data/lib/logstash/outputs/elastic_workplace_search.rb +75 -37
- data/lib/logstash/plugin_mixins/enterprise_search/client.rb +35 -0
- data/lib/logstash/plugin_mixins/enterprise_search/manticore_transport.rb +84 -0
- data/lib/logstash/plugin_mixins/enterprise_search/ssl_configs.rb +32 -0
- data/logstash-integration-elastic_enterprise_search.gemspec +3 -4
- data/spec/fixtures/certificates/generate.sh +10 -0
- data/spec/fixtures/certificates/root_ca.crt +34 -0
- data/spec/fixtures/certificates/root_ca.key +52 -0
- data/spec/fixtures/certificates/root_keystore.jks +0 -0
- data/spec/fixtures/certificates/root_untrusted_ca.crt +34 -0
- data/spec/fixtures/certificates/root_untrusted_keystore.jks +0 -0
- data/spec/fixtures/certificates/root_untrusted_keystore.p12 +0 -0
- data/spec/integration/outputs/elastic_app_search_spec.rb +133 -105
- data/spec/integration/outputs/elastic_workplace_search_spec.rb +162 -95
- data/spec/unit/outputs/client_spec.rb +26 -0
- data/spec/unit/outputs/elastic_app_search_spec.rb +117 -0
- data/spec/unit/outputs/elastic_workplace_search_spec.rb +117 -0
- data/spec/unit/outputs/manticore_transport_spec.rb +124 -0
- metadata +55 -40
- data/spec/unit/outputs/appsearch_spec.rb +0 -64
@@ -1,126 +1,193 @@
|
|
1
1
|
# encoding: utf-8
|
2
|
-
require
|
3
|
-
require
|
4
|
-
require
|
5
|
-
require
|
6
|
-
require
|
7
|
-
require "base64"
|
2
|
+
require 'logstash/devutils/rspec/spec_helper'
|
3
|
+
require 'logstash/codecs/plain'
|
4
|
+
require 'logstash/event'
|
5
|
+
require 'json'
|
6
|
+
require 'base64'
|
8
7
|
|
9
|
-
describe
|
8
|
+
describe 'indexing against running Workplace Search', :integration => true do
|
10
9
|
|
11
|
-
|
12
|
-
ENV['ELASTIC_STACK_VERSION'].strip.start_with?("7")
|
13
|
-
end
|
10
|
+
require 'logstash/outputs/elastic_workplace_search'
|
14
11
|
|
12
|
+
let(:is_version7) { ENV['ELASTIC_STACK_VERSION'].strip.start_with?('7') }
|
15
13
|
let(:url) { ENV['ENTERPRISE_SEARCH_URL'] }
|
16
|
-
let(:
|
17
|
-
let(:
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
{"get_token" => true},
|
22
|
-
{"Content-Type" => "application/json",
|
23
|
-
"Accept" => "application/json",
|
24
|
-
"Authorization" => "Basic #{auth}"}
|
25
|
-
)
|
26
|
-
JSON.load(response.body)
|
27
|
-
else
|
28
|
-
# Workplace Search v8.0+ provides the api_tokens API to create or retrieve
|
29
|
-
# the key to be use as access_token
|
30
|
-
conn = Faraday.new(url: url)
|
31
|
-
conn.basic_auth(ENV['ENTERPRISE_SEARCH_USERNAME'], ENV['ENTERPRISE_SEARCH_PASSWORD'])
|
32
|
-
response = conn.post('/ws/org/api_tokens',
|
33
|
-
'{"name":"ls-integration-test-key"}',
|
34
|
-
{"Content-Type" => "application/json", "Accept" => "application/json"})
|
35
|
-
create_response_json = JSON.load(response.body)
|
36
|
-
if create_response_json.has_key?("errors") && create_response_json["errors"].include?("Name is already taken")
|
37
|
-
# when a key with the name already exists, retrieve it
|
38
|
-
response = conn.get('/ws/org/api_tokens', nil, {"Content-Type" => "application/json", "Accept" => "application/json"})
|
39
|
-
retrieve_token_response_json = JSON.load(response.body)
|
40
|
-
response_json = retrieve_token_response_json["results"].find {|res| res["id"] == "ls-integration-test-key"}
|
41
|
-
else
|
42
|
-
response_json = create_response_json
|
43
|
-
end
|
44
|
-
|
45
|
-
conn.close
|
46
|
-
response_json
|
47
|
-
end
|
48
|
-
end
|
49
|
-
let(:access_token) do
|
50
|
-
if is_version7?
|
51
|
-
source.fetch("access_token")
|
52
|
-
else
|
53
|
-
source.fetch("key")
|
54
|
-
end
|
55
|
-
end
|
56
|
-
let(:source_id) do
|
57
|
-
response = Faraday.post(
|
58
|
-
"#{url}/api/ws/v1/sources",
|
59
|
-
JSON.dump("service_type" => "custom", "name" => "whatever"),
|
60
|
-
{"Content-Type" => "application/json",
|
61
|
-
"Accept" => "application/json",
|
62
|
-
"Authorization" => "Bearer #{access_token}"}
|
63
|
-
)
|
64
|
-
source_response_json = JSON.load(response.body)
|
65
|
-
source_response_json.fetch("id")
|
66
|
-
end
|
67
|
-
|
14
|
+
let(:username) { ENV['ENTERPRISE_SEARCH_USERNAME'] }
|
15
|
+
let(:password) { ENV['ENTERPRISE_SEARCH_PASSWORD'] }
|
16
|
+
let(:basic_auth_header) { Base64.strict_encode64("#{username}:#{password}") }
|
17
|
+
let(:access_token) { fetch_access_token }
|
18
|
+
let(:source_id) { fetch_source_id }
|
68
19
|
let(:config) do
|
69
20
|
{
|
70
|
-
|
71
|
-
|
72
|
-
|
21
|
+
'url' => url,
|
22
|
+
'source' => source_id,
|
23
|
+
'access_token' => access_token
|
73
24
|
}
|
74
25
|
end
|
75
26
|
|
76
27
|
subject(:workplace_search_output) { LogStash::Outputs::ElasticWorkplaceSearch.new(config) }
|
77
28
|
|
78
|
-
|
29
|
+
describe 'indexing' do
|
30
|
+
let(:config) { super().merge('ssl_verification_mode' => 'none') }
|
31
|
+
let(:total_property_keys) { %w[meta page total_pages] }
|
32
|
+
let(:register) { true }
|
79
33
|
|
80
|
-
|
81
|
-
|
34
|
+
before(:each) { workplace_search_output.register if register }
|
35
|
+
|
36
|
+
describe 'single event' do
|
37
|
+
let(:event_message) { 'an event to index' }
|
38
|
+
let(:event) { LogStash::Event.new('message' => event_message) }
|
39
|
+
|
40
|
+
it 'should be indexed' do
|
41
|
+
workplace_search_output.multi_receive([event])
|
42
|
+
expect_indexed(1, total_property_keys, event_message)
|
43
|
+
end
|
82
44
|
|
83
|
-
|
84
|
-
|
45
|
+
context 'using sprintf-ed source' do
|
46
|
+
let(:config) { super().merge('source' => '%{source_field}') }
|
47
|
+
let(:event) { LogStash::Event.new('message' => 'an sprintf-ed event', 'source_field' => source_id) }
|
85
48
|
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
expect(parsed_resp.dig("meta", "page", "total_pages")).to eq(1)
|
91
|
-
parsed_resp["results"]
|
49
|
+
it 'should be indexed' do
|
50
|
+
workplace_search_output.multi_receive([event])
|
51
|
+
expect_indexed(1, total_property_keys, 'an sprintf-ed event')
|
52
|
+
end
|
92
53
|
end
|
93
|
-
expect(results.first.fetch("message")).to eq "an event to index"
|
94
54
|
end
|
95
|
-
end
|
96
55
|
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
56
|
+
describe 'multiple events' do
|
57
|
+
let(:events) { generate_events(200, 'multiple events to index') } # 2 times the slice size used to batch
|
58
|
+
|
59
|
+
it 'all should be indexed' do
|
60
|
+
workplace_search_output.multi_receive(events)
|
61
|
+
expect_indexed(200, %w[meta page total_results], 'multiple events to index')
|
62
|
+
end
|
63
|
+
end
|
64
|
+
|
65
|
+
describe 'with ssl enabled using a self-signed certificate', :secure_integration => true do
|
66
|
+
let(:ca_cert) { 'spec/fixtures/certificates/root_ca.crt' }
|
67
|
+
let(:event_message) { 'an event to index with ssl enabled' }
|
68
|
+
let(:event) { LogStash::Event.new('message' => event_message) }
|
69
|
+
|
70
|
+
context 'and ssl_verification_mode set to `full`' do
|
71
|
+
let(:config) { super().merge('ssl_verification_mode' => 'full') }
|
72
|
+
let(:register) { false }
|
73
|
+
|
74
|
+
it 'should raise an error' do
|
75
|
+
allow(workplace_search_output).to receive(:check_connection!).and_return(nil)
|
76
|
+
workplace_search_output.register
|
77
|
+
workplace_search_output.instance_variable_set(:@retry_disabled, true)
|
78
|
+
|
79
|
+
expect { workplace_search_output.multi_receive([event]) }.to raise_error(/PKIX path/)
|
80
|
+
end
|
81
|
+
end
|
82
|
+
|
83
|
+
context 'and ssl_certificate_authorities set to a valid CA' do
|
84
|
+
let(:config) { super().merge('ssl_certificate_authorities' => ca_cert) }
|
85
|
+
it 'should be indexed' do
|
86
|
+
workplace_search_output.multi_receive([event])
|
87
|
+
expect_indexed( 1, total_property_keys, event_message)
|
88
|
+
end
|
89
|
+
end
|
90
|
+
|
91
|
+
context 'and ssl_truststore_path set to a valid CA' do
|
92
|
+
let(:config) do
|
93
|
+
super().merge(
|
94
|
+
'ssl_truststore_path' => 'spec/fixtures/certificates/root_keystore.jks',
|
95
|
+
'ssl_truststore_password' => 'changeme'
|
96
|
+
)
|
97
|
+
end
|
98
|
+
|
99
|
+
it 'should be indexed' do
|
100
|
+
workplace_search_output.multi_receive([event])
|
101
|
+
expect_indexed(1, total_property_keys, event_message)
|
102
|
+
end
|
103
|
+
end
|
104
|
+
|
105
|
+
context 'and ssl_supported_protocols configured' do
|
106
|
+
let(:config) { super().merge('ssl_certificate_authorities' => ca_cert, 'ssl_supported_protocols' => 'TLSv1.3') }
|
107
|
+
|
108
|
+
it 'should be indexed' do
|
109
|
+
workplace_search_output.multi_receive([event])
|
110
|
+
expect_indexed(1, total_property_keys, event_message)
|
111
|
+
end
|
112
|
+
end
|
113
|
+
|
114
|
+
context 'and ssl_cipher_suites configured' do
|
115
|
+
let(:config) { super().merge('ssl_certificate_authorities' => ca_cert, 'ssl_cipher_suites' => 'TLS_AES_256_GCM_SHA384') }
|
116
|
+
|
117
|
+
it 'should be indexed' do
|
118
|
+
workplace_search_output.multi_receive([event])
|
119
|
+
expect_indexed(1, total_property_keys, event_message)
|
120
|
+
end
|
108
121
|
end
|
109
|
-
expect(results.first.fetch("message")).to start_with("an event to index")
|
110
122
|
end
|
111
123
|
end
|
112
124
|
|
113
125
|
private
|
126
|
+
|
114
127
|
def execute_search_call
|
115
|
-
|
128
|
+
faraday_client.post(
|
116
129
|
"#{url}/ws/org/sources/#{source_id}/documents",
|
117
130
|
nil,
|
118
|
-
|
119
|
-
|
131
|
+
'Accept' => 'application/json',
|
132
|
+
'Authorization' => "Basic #{basic_auth_header}"
|
120
133
|
)
|
121
134
|
end
|
122
135
|
|
123
|
-
def generate_events(num_events)
|
124
|
-
(1..num_events).map { |i| LogStash::Event.new(
|
136
|
+
def generate_events(num_events, message_prefix = 'an event to index')
|
137
|
+
(1..num_events).map { |i| LogStash::Event.new('message' => "#{message_prefix} #{i}") }
|
138
|
+
end
|
139
|
+
|
140
|
+
def faraday_client
|
141
|
+
Faraday.new(url, ssl: { verify: false })
|
142
|
+
end
|
143
|
+
|
144
|
+
def fetch_access_token
|
145
|
+
if is_version7
|
146
|
+
response = faraday_client.get("#{url}/api/ws/v1/whoami",
|
147
|
+
{ 'get_token' => true },
|
148
|
+
{ 'Content-Type' => 'application/json',
|
149
|
+
'Accept' => 'application/json',
|
150
|
+
'Authorization' => "Basic #{basic_auth_header}" })
|
151
|
+
|
152
|
+
return JSON.load(response.body).fetch('access_token')
|
153
|
+
end
|
154
|
+
|
155
|
+
client = faraday_client
|
156
|
+
client.headers['Authorization'] = "Basic #{basic_auth_header}"
|
157
|
+
response = client.post('/ws/org/api_tokens',
|
158
|
+
'{"name":"ls-integration-test-key"}',
|
159
|
+
{ 'Content-Type' => 'application/json', 'Accept' => 'application/json' })
|
160
|
+
|
161
|
+
response_json = JSON.load(response.body)
|
162
|
+
# when a key with the name already exists, retrieve it
|
163
|
+
if response_json.key?('errors') && response_json['errors'].include?('Name is already taken')
|
164
|
+
response = client.get('/ws/org/api_tokens', nil, { 'Content-Type' => 'application/json', 'Accept' => 'application/json' })
|
165
|
+
response_json = JSON.load(response.body)['results'].find { |res| res['id'] == 'ls-integration-test-key' }
|
166
|
+
end
|
167
|
+
|
168
|
+
client.close
|
169
|
+
response_json.fetch('key')
|
170
|
+
end
|
171
|
+
|
172
|
+
def fetch_source_id
|
173
|
+
response = faraday_client.post("#{url}/api/ws/v1/sources",
|
174
|
+
JSON.dump('service_type' => 'custom', 'name' => 'whatever'),
|
175
|
+
{ 'Content-Type' => 'application/json',
|
176
|
+
'Accept' => 'application/json',
|
177
|
+
'Authorization' => "Bearer #{access_token}" })
|
178
|
+
|
179
|
+
source_response_json = JSON.load(response.body)
|
180
|
+
source_response_json.fetch('id')
|
181
|
+
end
|
182
|
+
|
183
|
+
def expect_indexed(total_expected, total_property_keys, expected_message_prefix)
|
184
|
+
results = Stud.try(20.times, RSpec::Expectations::ExpectationNotMetError) do
|
185
|
+
attempt_response = execute_search_call
|
186
|
+
expect(attempt_response.status).to eq(200)
|
187
|
+
parsed_resp = JSON.parse(attempt_response.body)
|
188
|
+
expect(parsed_resp.dig(*total_property_keys)).to eq(total_expected)
|
189
|
+
parsed_resp['results']
|
190
|
+
end
|
191
|
+
expect(results.first.fetch('message')).to start_with(expected_message_prefix)
|
125
192
|
end
|
126
193
|
end
|
@@ -0,0 +1,26 @@
|
|
1
|
+
require "logstash/devutils/rspec/spec_helper"
|
2
|
+
require 'logstash/plugin_mixins/enterprise_search/client'
|
3
|
+
|
4
|
+
describe LogStash::PluginMixins::EnterpriseSearch::AppSearch::Client do
|
5
|
+
subject(:client) { described_class.new({}, params: {}) }
|
6
|
+
|
7
|
+
it 'should inherit Elastic::EnterpriseSearch::AppSearch::Client' do
|
8
|
+
expect(described_class.ancestors).to include(Elastic::EnterpriseSearch::AppSearch::Client)
|
9
|
+
end
|
10
|
+
|
11
|
+
it 'should include LogStash::PluginMixins::EnterpriseSearch::ManticoreTransport' do
|
12
|
+
expect(described_class.ancestors).to include(LogStash::PluginMixins::EnterpriseSearch::ManticoreTransport)
|
13
|
+
end
|
14
|
+
end
|
15
|
+
|
16
|
+
describe LogStash::PluginMixins::EnterpriseSearch::WorkplaceSearch::Client do
|
17
|
+
subject(:client) { described_class.new({}, params: {}) }
|
18
|
+
|
19
|
+
it 'should inherit Elastic::EnterpriseSearch::AppSearch::Client' do
|
20
|
+
expect(described_class.ancestors).to include(Elastic::EnterpriseSearch::WorkplaceSearch::Client)
|
21
|
+
end
|
22
|
+
|
23
|
+
it 'should include LogStash::PluginMixins::EnterpriseSearch::ManticoreTransport' do
|
24
|
+
expect(described_class.ancestors).to include(LogStash::PluginMixins::EnterpriseSearch::ManticoreTransport)
|
25
|
+
end
|
26
|
+
end
|
@@ -0,0 +1,117 @@
|
|
1
|
+
require 'logstash/devutils/rspec/spec_helper'
|
2
|
+
require 'logstash/outputs/elastic_app_search'
|
3
|
+
require 'logstash/codecs/plain'
|
4
|
+
require 'logstash/event'
|
5
|
+
|
6
|
+
describe LogStash::Outputs::ElasticAppSearch do
|
7
|
+
let(:event) { LogStash::Event.new('message' => 'An event') }
|
8
|
+
let(:api_key) { 'my_key' }
|
9
|
+
let(:engine) { 'test-engine' }
|
10
|
+
let(:config) { { 'api_key' => api_key, 'engine' => engine } }
|
11
|
+
let(:client) { double('Client') }
|
12
|
+
|
13
|
+
subject(:plugin) { described_class.new(config) }
|
14
|
+
|
15
|
+
before(:each) do
|
16
|
+
allow(plugin).to receive(:check_connection!)
|
17
|
+
plugin.instance_variable_set(:@client, client)
|
18
|
+
end
|
19
|
+
|
20
|
+
describe '#register' do
|
21
|
+
context 'when engine is defined in sprintf format' do
|
22
|
+
let(:config) { super().merge('engine' => '%{engine_name_field}') }
|
23
|
+
it 'does not raise an error' do
|
24
|
+
expect { plugin.register }.to_not raise_error
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
describe '#multi_receive' do
|
30
|
+
let(:response) { double('Response') }
|
31
|
+
let(:response_status) { 200 }
|
32
|
+
let(:response_body) { [{ 'errors' => [] }] }
|
33
|
+
|
34
|
+
before(:each) do
|
35
|
+
allow(response).to receive(:status).and_return(response_status)
|
36
|
+
allow(response).to receive(:body).and_return(response_body)
|
37
|
+
end
|
38
|
+
|
39
|
+
it 'should remove @timestamp and @version fields' do
|
40
|
+
allow(client).to receive(:index_documents) do |_, arguments|
|
41
|
+
expect(arguments[:documents].length).to eq(1)
|
42
|
+
expect(arguments[:documents].first).to_not include('@timestamp', '@version')
|
43
|
+
response
|
44
|
+
end
|
45
|
+
|
46
|
+
plugin.multi_receive([event])
|
47
|
+
end
|
48
|
+
|
49
|
+
context 'with :document_id configured' do
|
50
|
+
let(:config) { super().merge('document_id' => 'foo') }
|
51
|
+
|
52
|
+
it 'should include `id` field' do
|
53
|
+
allow(client).to receive(:index_documents) do |_, arguments|
|
54
|
+
expect(arguments[:documents].length).to eq(1)
|
55
|
+
expect(arguments[:documents].first).to include('id')
|
56
|
+
expect(arguments[:documents].first['id']).to eq('foo')
|
57
|
+
response
|
58
|
+
end
|
59
|
+
|
60
|
+
plugin.multi_receive([event])
|
61
|
+
end
|
62
|
+
end
|
63
|
+
|
64
|
+
context 'with :timestamp_destination configured' do
|
65
|
+
let(:config) { super().merge('timestamp_destination' => 'copied_timestamp') }
|
66
|
+
|
67
|
+
it 'should copy @timestamp value to :timestamp_destination field' do
|
68
|
+
allow(client).to receive(:index_documents) do |_, arguments|
|
69
|
+
expect(arguments[:documents].length).to eq(1)
|
70
|
+
expect(arguments[:documents].first).to include('copied_timestamp')
|
71
|
+
expect(arguments[:documents].first['copied_timestamp']).to_not be_nil
|
72
|
+
response
|
73
|
+
end
|
74
|
+
|
75
|
+
plugin.multi_receive([event])
|
76
|
+
end
|
77
|
+
end
|
78
|
+
|
79
|
+
context 'when multiple engines are defined in sprintf format' do
|
80
|
+
let(:response_body) { [{ 'errors' => [] }, { 'errors' => [] }] }
|
81
|
+
let(:config) { { 'api_key' => api_key, 'engine' => '%{engine_field}' } }
|
82
|
+
|
83
|
+
it 'should index events grouped by resolved engine' do
|
84
|
+
event_engine_a = LogStash::Event.new('message' => 'engine_a', 'engine_field' => 'engine_a')
|
85
|
+
event_engine_b = LogStash::Event.new('message' => 'engine_b', 'engine_field' => 'engine_b')
|
86
|
+
|
87
|
+
allow(client).to receive(:index_documents).twice do |resolved_engine, arguments|
|
88
|
+
docs = arguments[:documents]
|
89
|
+
expect(docs.length).to eq(1)
|
90
|
+
expect(arguments[:documents].first['message']).to eq(resolved_engine)
|
91
|
+
response
|
92
|
+
end
|
93
|
+
|
94
|
+
plugin.multi_receive([event_engine_a, event_engine_b])
|
95
|
+
end
|
96
|
+
end
|
97
|
+
|
98
|
+
context 'when indexing fail' do
|
99
|
+
let(:response_status) { 400 }
|
100
|
+
let(:response_body) { [{ 'errors' => ['failed'] }, { 'errors' => [] }] }
|
101
|
+
|
102
|
+
it 'should log warn message' do
|
103
|
+
allow(client).to receive(:index_documents).and_return(response)
|
104
|
+
allow(plugin.logger).to receive(:warn)
|
105
|
+
|
106
|
+
successful_event = LogStash::Event.new
|
107
|
+
plugin.multi_receive([successful_event, event])
|
108
|
+
|
109
|
+
successful_document = successful_event.to_hash
|
110
|
+
successful_document.delete('@timestamp')
|
111
|
+
successful_document.delete('@version')
|
112
|
+
|
113
|
+
expect(plugin.logger).to have_received(:warn).with('Document failed to index. Dropping..', :document => successful_document, :errors => ['failed']).once
|
114
|
+
end
|
115
|
+
end
|
116
|
+
end
|
117
|
+
end
|
@@ -0,0 +1,117 @@
|
|
1
|
+
require 'logstash/devutils/rspec/spec_helper'
|
2
|
+
require 'logstash/outputs/elastic_workplace_search'
|
3
|
+
require 'logstash/codecs/plain'
|
4
|
+
require 'logstash/event'
|
5
|
+
|
6
|
+
describe LogStash::Outputs::ElasticWorkplaceSearch do
|
7
|
+
let(:event) { LogStash::Event.new('message' => 'An event') }
|
8
|
+
let(:access_token) { 'my_key' }
|
9
|
+
let(:source) { 'test-source' }
|
10
|
+
let(:config) { { 'access_token' => access_token, 'source' => source } }
|
11
|
+
let(:client) { double('Client') }
|
12
|
+
|
13
|
+
|
14
|
+
subject(:plugin) { described_class.new(config) }
|
15
|
+
|
16
|
+
before(:each) do
|
17
|
+
allow(plugin).to receive(:check_connection!)
|
18
|
+
plugin.instance_variable_set(:@client, client)
|
19
|
+
end
|
20
|
+
|
21
|
+
describe '#register' do
|
22
|
+
context 'when source is defined in sprintf format' do
|
23
|
+
let(:config) { super().merge('source' => '%{source_name_field}') }
|
24
|
+
it 'does not raise an error' do
|
25
|
+
expect { plugin.register }.to_not raise_error
|
26
|
+
end
|
27
|
+
end
|
28
|
+
end
|
29
|
+
|
30
|
+
describe '#multi_receive' do
|
31
|
+
let(:response) { double('Response') }
|
32
|
+
let(:response_status) { 200 }
|
33
|
+
let(:response_body) { {} }
|
34
|
+
|
35
|
+
before(:each) do
|
36
|
+
allow(response).to receive(:status).and_return(response_status)
|
37
|
+
allow(response).to receive(:body).and_return(response_body)
|
38
|
+
end
|
39
|
+
|
40
|
+
it 'should remove @timestamp and @version fields' do
|
41
|
+
allow(client).to receive(:index_documents) do |_, arguments|
|
42
|
+
expect(arguments[:documents].length).to eq(1)
|
43
|
+
expect(arguments[:documents].first).to_not include('@timestamp', '@version')
|
44
|
+
response
|
45
|
+
end
|
46
|
+
|
47
|
+
plugin.multi_receive([event])
|
48
|
+
end
|
49
|
+
|
50
|
+
context 'with :document_id configured' do
|
51
|
+
let(:config) { super().merge('document_id' => 'foo') }
|
52
|
+
|
53
|
+
it 'should include `id` field' do
|
54
|
+
allow(client).to receive(:index_documents) do |_, arguments|
|
55
|
+
expect(arguments[:documents].length).to eq(1)
|
56
|
+
expect(arguments[:documents].first).to include('id')
|
57
|
+
expect(arguments[:documents].first['id']).to eq('foo')
|
58
|
+
response
|
59
|
+
end
|
60
|
+
|
61
|
+
plugin.multi_receive([event])
|
62
|
+
end
|
63
|
+
end
|
64
|
+
|
65
|
+
context 'with :timestamp_destination configured' do
|
66
|
+
let(:config) { super().merge('timestamp_destination' => 'copied_timestamp') }
|
67
|
+
|
68
|
+
it 'should copy @timestamp value to :timestamp_destination field' do
|
69
|
+
allow(client).to receive(:index_documents) do |_, arguments|
|
70
|
+
expect(arguments[:documents].length).to eq(1)
|
71
|
+
expect(arguments[:documents].first).to include('copied_timestamp')
|
72
|
+
expect(arguments[:documents].first['copied_timestamp']).to_not be_nil
|
73
|
+
response
|
74
|
+
end
|
75
|
+
|
76
|
+
plugin.multi_receive([event])
|
77
|
+
end
|
78
|
+
end
|
79
|
+
|
80
|
+
context 'when multiple sources are defined in sprintf format' do
|
81
|
+
let(:config) { { 'access_token' => access_token, 'source' => '%{source_field}' } }
|
82
|
+
|
83
|
+
it 'should index events grouped by resolved source' do
|
84
|
+
event_source_a = LogStash::Event.new('message' => 'source_a', 'source_field' => 'source_a')
|
85
|
+
event_source_b = LogStash::Event.new('message' => 'source_b', 'source_field' => 'source_b')
|
86
|
+
|
87
|
+
allow(client).to receive(:index_documents).twice do |resolved_source, arguments|
|
88
|
+
docs = arguments[:documents]
|
89
|
+
expect(docs.length).to eq(1)
|
90
|
+
expect(arguments[:documents].first['message']).to eq(resolved_source)
|
91
|
+
response
|
92
|
+
end
|
93
|
+
|
94
|
+
plugin.multi_receive([event_source_a, event_source_b])
|
95
|
+
end
|
96
|
+
end
|
97
|
+
|
98
|
+
context 'when indexing fail' do
|
99
|
+
let(:response_status) { 400 }
|
100
|
+
let(:response_body) { { 'results' => [{ 'errors' => ['failed'] }, { 'errors' => [] }] } }
|
101
|
+
|
102
|
+
it 'should log warn message' do
|
103
|
+
allow(client).to receive(:index_documents).and_return(response)
|
104
|
+
allow(plugin.logger).to receive(:warn)
|
105
|
+
|
106
|
+
successful_event = LogStash::Event.new
|
107
|
+
plugin.multi_receive([successful_event, event])
|
108
|
+
|
109
|
+
successful_document = successful_event.to_hash
|
110
|
+
successful_document.delete('@timestamp')
|
111
|
+
successful_document.delete('@version')
|
112
|
+
|
113
|
+
expect(plugin.logger).to have_received(:warn).with('Document failed to index. Dropping..', :document => successful_document, :errors => ['failed']).once
|
114
|
+
end
|
115
|
+
end
|
116
|
+
end
|
117
|
+
end
|
@@ -0,0 +1,124 @@
|
|
1
|
+
require 'logstash/devutils/rspec/spec_helper'
|
2
|
+
require 'stud/temporary'
|
3
|
+
require 'logstash/plugin_mixins/enterprise_search/manticore_transport'
|
4
|
+
|
5
|
+
describe LogStash::PluginMixins::EnterpriseSearch::ManticoreTransport do
|
6
|
+
describe 'Client class' do
|
7
|
+
subject(:client_class) do
|
8
|
+
Class.new(Elastic::EnterpriseSearch::Client) do
|
9
|
+
attr_reader :params
|
10
|
+
include LogStash::PluginMixins::EnterpriseSearch::ManticoreTransport
|
11
|
+
|
12
|
+
def initialize(options, params: {})
|
13
|
+
@params = params
|
14
|
+
super options
|
15
|
+
end
|
16
|
+
end
|
17
|
+
end
|
18
|
+
|
19
|
+
context "#transport" do
|
20
|
+
let(:client) { client_class.new({}, params: {}) }
|
21
|
+
|
22
|
+
it 'should override #transport' do
|
23
|
+
expect(client.method(:transport).owner).to eq(LogStash::PluginMixins::EnterpriseSearch::ManticoreTransport)
|
24
|
+
end
|
25
|
+
|
26
|
+
it 'should use manticore setting the :ssl argument' do
|
27
|
+
ssl_config = { ssl: { verify: :disable } }
|
28
|
+
allow(client).to receive(:build_ssl_config).and_return(ssl_config)
|
29
|
+
|
30
|
+
result = client.transport
|
31
|
+
|
32
|
+
if LogStash::PluginMixins::EnterpriseSearch::ManticoreTransport.eps_version_7?
|
33
|
+
expect(result.transport).to be_a(Elasticsearch::Transport::Transport::HTTP::Manticore)
|
34
|
+
else
|
35
|
+
expect(result.transport).to be_a(Elastic::Transport::Transport::HTTP::Manticore)
|
36
|
+
end
|
37
|
+
|
38
|
+
expect(result.instance_variable_get(:@arguments)[:ssl]).to eq(ssl_config)
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
context '#build_ssl_config' do
|
43
|
+
let(:params) { {} }
|
44
|
+
let(:client) { client_class.new({}, params: params) }
|
45
|
+
let(:built_ssl_options) { client.send(:build_ssl_config) }
|
46
|
+
|
47
|
+
[{ param_value: 'full', client_value: :strict },
|
48
|
+
{ param_value: 'none', client_value: :disable }].each do |config|
|
49
|
+
context "when ssl_verification_mode is `#{config[:param_value]}`" do
|
50
|
+
let(:params) { super().merge('ssl_verification_mode' => config[:param_value]) }
|
51
|
+
it "should set :verify to #{config[:client_value]}" do
|
52
|
+
expect(built_ssl_options[:verify]).to eq(config[:client_value])
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
56
|
+
|
57
|
+
context 'when ssl_certificate_authorities is set' do
|
58
|
+
let(:ca_path) { 'spec/fixtures/certificates/root_ca.crt'}
|
59
|
+
let(:params) { super().merge('ssl_certificate_authorities' => [ca_path]) }
|
60
|
+
|
61
|
+
it 'should set :ca_file' do
|
62
|
+
expect(built_ssl_options[:ca_file]).to eq(ca_path)
|
63
|
+
end
|
64
|
+
end
|
65
|
+
|
66
|
+
context 'when ssl_cipher_suites is set' do
|
67
|
+
let(:params) { super().merge('ssl_cipher_suites' => ['TLS_FOO_BAR']) }
|
68
|
+
|
69
|
+
it 'should set :cipher_suites' do
|
70
|
+
expect(built_ssl_options[:cipher_suites]).to eq(['TLS_FOO_BAR'])
|
71
|
+
end
|
72
|
+
end
|
73
|
+
|
74
|
+
context 'when ssl_supported_protocols is set' do
|
75
|
+
let(:params) { super().merge('ssl_supported_protocols' => %w[TLSv1.2 TLSv1.3]) }
|
76
|
+
|
77
|
+
it 'should set :protocols' do
|
78
|
+
expect(built_ssl_options[:protocols]).to eq( %w[TLSv1.2 TLSv1.3])
|
79
|
+
end
|
80
|
+
end
|
81
|
+
|
82
|
+
context 'when ssl_truststore options are set' do
|
83
|
+
let(:keystore_path) { 'spec/fixtures/certificates/root_keystore.jks'}
|
84
|
+
let(:keystore_password) { LogStash::Util::Password.new('changeme') }
|
85
|
+
|
86
|
+
let(:params) do
|
87
|
+
super().merge('ssl_truststore_path' => keystore_path,
|
88
|
+
'ssl_truststore_type' => 'jks',
|
89
|
+
'ssl_truststore_password' => keystore_password)
|
90
|
+
end
|
91
|
+
|
92
|
+
it 'should set :truststore options' do
|
93
|
+
expect(built_ssl_options[:truststore]).to eq(keystore_path)
|
94
|
+
expect(built_ssl_options[:truststore_type]).to eq('jks')
|
95
|
+
expect(built_ssl_options[:truststore_password]).to eq('changeme')
|
96
|
+
end
|
97
|
+
end
|
98
|
+
end
|
99
|
+
end
|
100
|
+
|
101
|
+
describe 'Client class with no :params' do
|
102
|
+
subject(:client_class) { Class.new(Elastic::EnterpriseSearch::Client) }
|
103
|
+
|
104
|
+
context 'when included' do
|
105
|
+
it 'should raise an ArgumentError' do
|
106
|
+
expect do
|
107
|
+
client_class.include LogStash::PluginMixins::EnterpriseSearch::ManticoreTransport
|
108
|
+
end.to raise_error(ArgumentError).with_message(/must respond to :params/)
|
109
|
+
end
|
110
|
+
end
|
111
|
+
end
|
112
|
+
|
113
|
+
describe 'No client class' do
|
114
|
+
subject(:client) { Class.new }
|
115
|
+
|
116
|
+
context 'when included' do
|
117
|
+
it 'should raise an ArgumentError' do
|
118
|
+
expect do
|
119
|
+
client.include LogStash::PluginMixins::EnterpriseSearch::ManticoreTransport
|
120
|
+
end.to raise_error(ArgumentError).with_message(/must inherit Elastic::EnterpriseSearch::Client/)
|
121
|
+
end
|
122
|
+
end
|
123
|
+
end
|
124
|
+
end
|