logstash-output-elasticsearch 7.4.2-java → 7.4.3-java
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +5 -5
- data/CHANGELOG.md +4 -0
- data/docs/index.asciidoc +5 -4
- data/lib/logstash/outputs/elasticsearch/common.rb +9 -2
- data/lib/logstash/outputs/elasticsearch/common_configs.rb +3 -0
- data/lib/logstash/outputs/elasticsearch/http_client.rb +15 -13
- data/logstash-output-elasticsearch.gemspec +1 -1
- data/spec/integration/outputs/painless_update_spec.rb +22 -13
- data/spec/integration/outputs/parent_spec.rb +7 -14
- data/spec/unit/outputs/elasticsearch/http_client_spec.rb +54 -8
- metadata +6 -4
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
|
-
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: 138d01e9314f05b8eee7f9bd1115f3c7aeb33117544e840caa83b3b3e2d1ac54
|
4
|
+
data.tar.gz: 7516db608590f0261e1d66b957648fae9878433bec8e4b67b3325207ceb8604a
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 5f3e2c6287de60def0469b17d4fac31336cd986c35f04f9fd20e7d0f710d8dfdd75163383994da0e627d3f7e78aa61e1fb5d4613e92f0c9f2b2a38e6a147b84a
|
7
|
+
data.tar.gz: 401784c66a69b13c905586f3b5643babc78c37d5996d448c4f7b16d0b3534999236f0efe7717cd56fedd61bc226c12ab0add7b13ec50913e5ea36906a6369ced
|
data/CHANGELOG.md
CHANGED
@@ -1,3 +1,7 @@
|
|
1
|
+
## 7.4.3
|
2
|
+
- Avoid infinite loop by checking for zero body size #737
|
3
|
+
- Add support for join based parent indexing (backport of #632) #686
|
4
|
+
|
1
5
|
## 7.4.2
|
2
6
|
- Use `#response_body` instead of `#body` when debugging response from the server #679
|
3
7
|
- Docs: Add DLQ policy section
|
data/docs/index.asciidoc
CHANGED
@@ -67,8 +67,8 @@ HTTP requests to the bulk API are expected to return a 200 response code. All ot
|
|
67
67
|
|
68
68
|
The following document errors are handled as follows:
|
69
69
|
|
70
|
-
* 400 and 404 errors are sent to the dead letter queue (DLQ), if enabled. If a DLQ is not enabled, a log message will be emitted, and the event will be dropped. See <<dlq-policy>> for more info.
|
71
|
-
* 409 errors (conflict) are logged as a warning and dropped.
|
70
|
+
* 400 and 404 errors are sent to the dead letter queue (DLQ), if enabled. If a DLQ is not enabled, a log message will be emitted, and the event will be dropped. See <<dlq-policy>> for more info.
|
71
|
+
* 409 errors (conflict) are logged as a warning and dropped.
|
72
72
|
|
73
73
|
Note that 409 exceptions are no longer retried. Please set a higher `retry_on_conflict` value if you experience 409 exceptions.
|
74
74
|
It is more performant for Elasticsearch to retry these exceptions than this plugin.
|
@@ -495,7 +495,8 @@ Set script name for scripted update mode
|
|
495
495
|
* Value type is <<string,string>>
|
496
496
|
* Default value is `"painless"`
|
497
497
|
|
498
|
-
Set the language of the used script. If not set, this defaults to painless in ES 5.0
|
498
|
+
Set the language of the used script. If not set, this defaults to painless in ES 5.0.
|
499
|
+
When using indexed (stored) scripts on Elasticsearch 6 and higher, you must set this parameter to `""` (empty string).
|
499
500
|
|
500
501
|
[id="plugins-{type}s-{plugin}-script_type"]
|
501
502
|
===== `script_type`
|
@@ -695,4 +696,4 @@ See also https://www.elastic.co/guide/en/elasticsearch/reference/current/docs-in
|
|
695
696
|
|
696
697
|
|
697
698
|
[id="plugins-{type}s-{plugin}-common-options"]
|
698
|
-
include::{include_path}/{type}.asciidoc[]
|
699
|
+
include::{include_path}/{type}.asciidoc[]
|
@@ -176,8 +176,15 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
176
176
|
params[:pipeline] = event.sprintf(@pipeline)
|
177
177
|
end
|
178
178
|
|
179
|
-
|
180
|
-
|
179
|
+
if @parent
|
180
|
+
if @join_field
|
181
|
+
join_value = event.get(@join_field)
|
182
|
+
parent_value = event.sprintf(@parent)
|
183
|
+
event.set(@join_field, { "name" => join_value, "parent" => parent_value })
|
184
|
+
params[:_routing] = event.sprintf(@parent)
|
185
|
+
else
|
186
|
+
params[:parent] = event.sprintf(@parent)
|
187
|
+
end
|
181
188
|
end
|
182
189
|
|
183
190
|
if @action == 'update'
|
@@ -78,6 +78,9 @@ module LogStash; module Outputs; class ElasticSearch
|
|
78
78
|
# This can be dynamic using the `%{foo}` syntax.
|
79
79
|
mod.config :parent, :validate => :string, :default => nil
|
80
80
|
|
81
|
+
# For child documents, name of the join field
|
82
|
+
mod.config :join_field, :validate => :string, :default => nil
|
83
|
+
|
81
84
|
# Sets the host(s) of the remote instance. If given an array it will load balance requests across the hosts specified in the `hosts` parameter.
|
82
85
|
# Remember the `http` protocol uses the http://www.elastic.co/guide/en/elasticsearch/reference/current/modules-http.html#modules-http[http] address (eg. 9200, not 9300).
|
83
86
|
# `"127.0.0.1"`
|
@@ -50,11 +50,11 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
50
50
|
# through a special http path, such as using mod_rewrite.
|
51
51
|
def initialize(options={})
|
52
52
|
@logger = options[:logger]
|
53
|
-
|
53
|
+
|
54
54
|
# Again, in case we use DEFAULT_OPTIONS in the future, uncomment this.
|
55
55
|
# @options = DEFAULT_OPTIONS.merge(options)
|
56
56
|
@options = options
|
57
|
-
|
57
|
+
|
58
58
|
@url_template = build_url_template
|
59
59
|
|
60
60
|
@pool = build_pool(@options)
|
@@ -62,7 +62,7 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
62
62
|
# connection pool at the same time
|
63
63
|
@bulk_path = @options[:bulk_path]
|
64
64
|
end
|
65
|
-
|
65
|
+
|
66
66
|
def build_url_template
|
67
67
|
{
|
68
68
|
:scheme => self.scheme,
|
@@ -106,7 +106,7 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
106
106
|
if http_compression
|
107
107
|
body_stream.set_encoding "BINARY"
|
108
108
|
stream_writer = Zlib::GzipWriter.new(body_stream, Zlib::DEFAULT_COMPRESSION, Zlib::DEFAULT_STRATEGY)
|
109
|
-
else
|
109
|
+
else
|
110
110
|
stream_writer = body_stream
|
111
111
|
end
|
112
112
|
bulk_responses = []
|
@@ -115,7 +115,9 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
115
115
|
action.map {|line| LogStash::Json.dump(line)}.join("\n") :
|
116
116
|
LogStash::Json.dump(action)
|
117
117
|
as_json << "\n"
|
118
|
-
|
118
|
+
if body_stream.size > 0 && (body_stream.size + as_json.bytesize) > TARGET_BULK_BYTES
|
119
|
+
bulk_responses << bulk_send(body_stream)
|
120
|
+
end
|
119
121
|
stream_writer.write(as_json)
|
120
122
|
end
|
121
123
|
stream_writer.close if http_compression
|
@@ -133,7 +135,7 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
133
135
|
|
134
136
|
def bulk_send(body_stream)
|
135
137
|
params = http_compression ? {:headers => {"Content-Encoding" => "gzip"}} : {}
|
136
|
-
# Discard the URL
|
138
|
+
# Discard the URL
|
137
139
|
response = @pool.post(@bulk_path, params, body_stream.string)
|
138
140
|
if !body_stream.closed?
|
139
141
|
body_stream.truncate(0)
|
@@ -206,7 +208,7 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
206
208
|
else
|
207
209
|
nil
|
208
210
|
end
|
209
|
-
|
211
|
+
|
210
212
|
calculated_scheme = calculate_property(uris, :scheme, explicit_scheme, sniffing)
|
211
213
|
|
212
214
|
if calculated_scheme && calculated_scheme !~ /https?/
|
@@ -226,7 +228,7 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
226
228
|
# Enter things like foo:123, bar and wind up with foo:123, bar:9200
|
227
229
|
calculate_property(uris, :port, nil, sniffing) || 9200
|
228
230
|
end
|
229
|
-
|
231
|
+
|
230
232
|
def uris
|
231
233
|
@options[:hosts]
|
232
234
|
end
|
@@ -240,12 +242,12 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
240
242
|
end
|
241
243
|
|
242
244
|
def http_compression
|
243
|
-
client_settings.fetch(:http_compression,
|
245
|
+
client_settings.fetch(:http_compression, false)
|
244
246
|
end
|
245
247
|
|
246
248
|
def build_adapter(options)
|
247
249
|
timeout = options[:timeout] || 0
|
248
|
-
|
250
|
+
|
249
251
|
adapter_options = {
|
250
252
|
:socket_timeout => timeout,
|
251
253
|
:request_timeout => timeout,
|
@@ -266,11 +268,11 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
266
268
|
end
|
267
269
|
|
268
270
|
adapter_options[:ssl] = ssl_options if self.scheme == 'https'
|
269
|
-
|
271
|
+
|
270
272
|
adapter_class = ::LogStash::Outputs::ElasticSearch::HttpClient::ManticoreAdapter
|
271
273
|
adapter = adapter_class.new(@logger, adapter_options)
|
272
274
|
end
|
273
|
-
|
275
|
+
|
274
276
|
def build_pool(options)
|
275
277
|
adapter = build_adapter(options)
|
276
278
|
|
@@ -319,7 +321,7 @@ module LogStash; module Outputs; class ElasticSearch;
|
|
319
321
|
h.query
|
320
322
|
end
|
321
323
|
prefixed_raw_query = raw_query && !raw_query.empty? ? "?#{raw_query}" : nil
|
322
|
-
|
324
|
+
|
323
325
|
raw_url = "#{raw_scheme}://#{postfixed_userinfo}#{raw_host}:#{raw_port}#{prefixed_raw_path}#{prefixed_raw_query}"
|
324
326
|
|
325
327
|
::LogStash::Util::SafeURI.new(raw_url)
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-output-elasticsearch'
|
3
|
-
s.version = '7.4.
|
3
|
+
s.version = '7.4.3'
|
4
4
|
s.licenses = ['apache-2.0']
|
5
5
|
s.summary = "Logstash Output to Elasticsearch"
|
6
6
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
@@ -93,20 +93,29 @@ if ESHelper.es_version_satisfies?(">= 5")
|
|
93
93
|
insist { r["_source"]["counter"] } == 3
|
94
94
|
end
|
95
95
|
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
@es.
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
96
|
+
context 'with an indexed script' do
|
97
|
+
it "should increment a counter with event/doc 'count' variable with indexed script" do
|
98
|
+
if ESHelper.es_version_satisfies?('<6')
|
99
|
+
@es.perform_request(:put, "_scripts/painless/indexed_update", {}, {"script" => "ctx._source.counter += params.event.count" })
|
100
|
+
else
|
101
|
+
@es.perform_request(:put, "_scripts/indexed_update", {}, {"script" => {"source" => "ctx._source.counter += params.event.count", "lang" => "painless"}})
|
102
|
+
end
|
103
|
+
|
104
|
+
plugin_parameters = {
|
105
|
+
'document_id' => "123",
|
106
|
+
'script' => 'indexed_update',
|
107
|
+
'script_type' => 'indexed'
|
108
|
+
}
|
109
|
+
|
110
|
+
if ESHelper.es_version_satisfies?('>= 6.0.0')
|
111
|
+
plugin_parameters.merge!('script_lang' => '')
|
109
112
|
end
|
113
|
+
|
114
|
+
subject = get_es_output(plugin_parameters)
|
115
|
+
subject.register
|
116
|
+
subject.multi_receive([LogStash::Event.new("count" => 4 )])
|
117
|
+
r = @es.get(:index => 'logstash-update', :type => 'logs', :id => "123", :refresh => true)
|
118
|
+
insist { r["_source"]["counter"] } == 5
|
110
119
|
end
|
111
120
|
end
|
112
121
|
end
|
@@ -1,7 +1,7 @@
|
|
1
1
|
require_relative "../../../spec/es_spec_helper"
|
2
2
|
require "logstash/outputs/elasticsearch"
|
3
3
|
|
4
|
-
if ESHelper.es_version_satisfies?("
|
4
|
+
if ESHelper.es_version_satisfies?("< 6")
|
5
5
|
context "when using elasticsearch 5.x and before", :integration => true do
|
6
6
|
shared_examples "a type based parent indexer" do
|
7
7
|
let(:index) { 10.times.collect { rand(10).to_s }.join("") }
|
@@ -17,11 +17,10 @@ if ESHelper.es_version_satisfies?("<= 5.x")
|
|
17
17
|
before do
|
18
18
|
# Add mapping and a parent document
|
19
19
|
index_url = "http://#{get_host_port()}/#{index}"
|
20
|
-
ftw = FTW::Agent.new
|
21
20
|
mapping = { "mappings" => { "#{type}" => { "_parent" => { "type" => "#{type}_parent" } } } }
|
22
|
-
|
21
|
+
Manticore.put("#{index_url}", {:body => mapping.to_json, :headers => default_headers}).call
|
23
22
|
pdoc = { "foo" => "bar" }
|
24
|
-
|
23
|
+
Manticore.put("#{index_url}/#{type}_parent/test", {:body => pdoc.to_json, :headers => default_headers}).call
|
25
24
|
|
26
25
|
subject.register
|
27
26
|
subject.multi_receive(event_count.times.map { LogStash::Event.new("link_to" => "test", "message" => "Hello World!", "type" => type) })
|
@@ -31,15 +30,13 @@ if ESHelper.es_version_satisfies?("<= 5.x")
|
|
31
30
|
it "ships events" do
|
32
31
|
index_url = "http://#{get_host_port()}/#{index}"
|
33
32
|
|
34
|
-
|
35
|
-
ftw.post!("#{index_url}/_refresh")
|
33
|
+
Manticore.post("#{index_url}/_refresh").call
|
36
34
|
|
37
35
|
# Wait until all events are available.
|
38
36
|
Stud::try(10.times) do
|
39
37
|
query = { "query" => { "has_parent" => { "type" => "#{type}_parent", "query" => { "match" => { "foo" => "bar" } } } } }
|
40
|
-
|
41
|
-
|
42
|
-
response.read_body { |chunk| data << chunk }
|
38
|
+
response = Manticore.post("#{index_url}/_count", {:body => query.to_json, :headers => default_headers})
|
39
|
+
data = response.body
|
43
40
|
result = LogStash::Json.load(data)
|
44
41
|
cur_count = result["count"]
|
45
42
|
insist { cur_count } == event_count
|
@@ -76,7 +73,6 @@ end
|
|
76
73
|
|
77
74
|
if ESHelper.es_version_satisfies?(">= 5.6")
|
78
75
|
context "when using elasticsearch 5.6 and above", :integration => true do
|
79
|
-
|
80
76
|
shared_examples "a join field based parent indexer" do
|
81
77
|
let(:index) { 10.times.collect { rand(10).to_s }.join("") }
|
82
78
|
let(:type) { 10.times.collect { rand(10).to_s }.join("") }
|
@@ -108,10 +104,7 @@ if ESHelper.es_version_satisfies?(">= 5.6")
|
|
108
104
|
}
|
109
105
|
}
|
110
106
|
if ESHelper.es_version_satisfies?('<6')
|
111
|
-
mapping.merge!({
|
112
|
-
"settings" => {
|
113
|
-
"mapping.single_type" => true
|
114
|
-
}})
|
107
|
+
mapping.merge!({ "settings" => { "mapping.single_type" => true }})
|
115
108
|
end
|
116
109
|
Manticore.put("#{index_url}", {:body => mapping.to_json, :headers => default_headers}).call
|
117
110
|
pdoc = { "message" => "ohayo", join_field => parent_relation }
|
@@ -29,7 +29,7 @@ describe LogStash::Outputs::ElasticSearch::HttpClient do
|
|
29
29
|
let(:http_hostname_port) { ::LogStash::Util::SafeURI.new("http://#{hostname_port}") }
|
30
30
|
let(:https_hostname_port) { ::LogStash::Util::SafeURI.new("https://#{hostname_port}") }
|
31
31
|
let(:http_hostname_port_path) { ::LogStash::Util::SafeURI.new("http://#{hostname_port}/path") }
|
32
|
-
|
32
|
+
|
33
33
|
shared_examples("proper host handling") do
|
34
34
|
it "should properly transform a host:port string to a URL" do
|
35
35
|
expect(subject.host_to_url(hostname_port_uri).to_s).to eq(http_hostname_port.to_s + "/")
|
@@ -58,7 +58,7 @@ describe LogStash::Outputs::ElasticSearch::HttpClient do
|
|
58
58
|
context "when SSL is false" do
|
59
59
|
let(:ssl) { false }
|
60
60
|
let(:base_options) { super.merge(:hosts => [https_hostname_port]) }
|
61
|
-
|
61
|
+
|
62
62
|
it "should refuse to handle an https url" do
|
63
63
|
expect {
|
64
64
|
subject.host_to_url(https_hostname_port)
|
@@ -72,32 +72,32 @@ describe LogStash::Outputs::ElasticSearch::HttpClient do
|
|
72
72
|
subject
|
73
73
|
expect(subject.host_to_url(https_hostname_port).to_s).to eq(https_hostname_port.to_s + "/")
|
74
74
|
end
|
75
|
-
end
|
75
|
+
end
|
76
76
|
end
|
77
77
|
|
78
78
|
describe "path" do
|
79
79
|
let(:url) { http_hostname_port_path }
|
80
80
|
let(:base_options) { super.merge(:hosts => [url]) }
|
81
|
-
|
81
|
+
|
82
82
|
it "should allow paths in a url" do
|
83
83
|
expect(subject.host_to_url(url)).to eq(url)
|
84
84
|
end
|
85
85
|
|
86
86
|
context "with the path option set" do
|
87
87
|
let(:base_options) { super.merge(:client_settings => {:path => "/otherpath"}) }
|
88
|
-
|
88
|
+
|
89
89
|
it "should not allow paths in two places" do
|
90
90
|
expect {
|
91
91
|
subject.host_to_url(url)
|
92
92
|
}.to raise_error(LogStash::ConfigurationError)
|
93
93
|
end
|
94
94
|
end
|
95
|
-
|
95
|
+
|
96
96
|
context "with a path missing a leading /" do
|
97
97
|
let(:url) { http_hostname_port }
|
98
98
|
let(:base_options) { super.merge(:client_settings => {:path => "otherpath"}) }
|
99
|
-
|
100
|
-
|
99
|
+
|
100
|
+
|
101
101
|
it "should automatically insert a / in front of path overlays" do
|
102
102
|
expected = url.clone
|
103
103
|
expected.path = url.path + "/otherpath"
|
@@ -180,6 +180,52 @@ describe LogStash::Outputs::ElasticSearch::HttpClient do
|
|
180
180
|
end
|
181
181
|
end
|
182
182
|
|
183
|
+
describe "#bulk" do
|
184
|
+
subject { described_class.new(base_options) }
|
185
|
+
let(:message) { "hey" }
|
186
|
+
let(:actions) { [
|
187
|
+
["index", {:_id=>nil, :_index=>"logstash"}, {"message"=> message}],
|
188
|
+
]}
|
189
|
+
|
190
|
+
context "if a message is over TARGET_BULK_BYTES" do
|
191
|
+
let(:target_bulk_bytes) { LogStash::Outputs::ElasticSearch::TARGET_BULK_BYTES }
|
192
|
+
let(:message) { "a" * (target_bulk_bytes + 1) }
|
193
|
+
|
194
|
+
it "sends the message as its own bulk payload" do
|
195
|
+
allow(subject).to receive(:join_bulk_responses)
|
196
|
+
expect(subject).to receive(:bulk_send).once do |data|
|
197
|
+
expect(data.size).to be > target_bulk_bytes
|
198
|
+
end
|
199
|
+
s = subject.send(:bulk, actions)
|
200
|
+
end
|
201
|
+
end
|
202
|
+
|
203
|
+
context "with two messages" do
|
204
|
+
let(:message1) { "hey" }
|
205
|
+
let(:message2) { "you" }
|
206
|
+
let(:actions) { [
|
207
|
+
["index", {:_id=>nil, :_index=>"logstash"}, {"message"=> message1}],
|
208
|
+
["index", {:_id=>nil, :_index=>"logstash"}, {"message"=> message2}],
|
209
|
+
]}
|
210
|
+
|
211
|
+
it "executes one bulk_send operation" do
|
212
|
+
allow(subject).to receive(:join_bulk_responses)
|
213
|
+
expect(subject).to receive(:bulk_send).once
|
214
|
+
s = subject.send(:bulk, actions)
|
215
|
+
end
|
216
|
+
|
217
|
+
context "if one exceeds TARGET_BULK_BYTES" do
|
218
|
+
let(:target_bulk_bytes) { LogStash::Outputs::ElasticSearch::TARGET_BULK_BYTES }
|
219
|
+
let(:message1) { "a" * (target_bulk_bytes + 1) }
|
220
|
+
it "executes two bulk_send operations" do
|
221
|
+
allow(subject).to receive(:join_bulk_responses)
|
222
|
+
expect(subject).to receive(:bulk_send).twice
|
223
|
+
s = subject.send(:bulk, actions)
|
224
|
+
end
|
225
|
+
end
|
226
|
+
end
|
227
|
+
end
|
228
|
+
|
183
229
|
describe "sniffing" do
|
184
230
|
let(:client) { LogStash::Outputs::ElasticSearch::HttpClient.new(base_options.merge(client_opts)) }
|
185
231
|
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-elasticsearch
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 7.4.
|
4
|
+
version: 7.4.3
|
5
5
|
platform: java
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date:
|
11
|
+
date: 2018-05-11 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
@@ -196,7 +196,9 @@ dependencies:
|
|
196
196
|
- - ">="
|
197
197
|
- !ruby/object:Gem::Version
|
198
198
|
version: '0'
|
199
|
-
description: This gem is a Logstash plugin required to be installed on top of the
|
199
|
+
description: This gem is a Logstash plugin required to be installed on top of the
|
200
|
+
Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This
|
201
|
+
gem is not a stand-alone program
|
200
202
|
email: info@elastic.co
|
201
203
|
executables: []
|
202
204
|
extensions: []
|
@@ -280,7 +282,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
280
282
|
version: '0'
|
281
283
|
requirements: []
|
282
284
|
rubyforge_project:
|
283
|
-
rubygems_version: 2.
|
285
|
+
rubygems_version: 2.6.13
|
284
286
|
signing_key:
|
285
287
|
specification_version: 4
|
286
288
|
summary: Logstash Output to Elasticsearch
|