logstash-output-elasticsearch 4.1.3-java → 5.1.0-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 98d7cc25afb0c139cbf7adda1638712c99b2b34e
4
- data.tar.gz: b9282f9c32662a1ad98205a6a7c7feb3a1f1a807
3
+ metadata.gz: 8e747b0ecd3b13e55c194f3baec4991095a6fe35
4
+ data.tar.gz: dce871709afbdaf6d303a273d7743dc92b964212
5
5
  SHA512:
6
- metadata.gz: d4ef1719f91ad38f76e5a62f191f1cf70515b946cb0928c0ae8f51cda01993ef889c2311d3fd3687e7bd9cc77b1e5301d548bf4752e8841417c6f50b29e9e4ab
7
- data.tar.gz: d51521544af110612bf369562d8e1374c49d69f2535a083e8128824f81cccc3923e51e74db3f175ea922d6f578ae407c512df41c7be501fa525f4889d2ead3e4
6
+ metadata.gz: 8ec8d71aea257c1bdb59122e45ac0472681dd91efd074b360678e273c7bdd15ef7211e2984a2bac84b3ebb4b2b6277c456451abf300f222e632680f40d3f2a6a
7
+ data.tar.gz: f315ec0e2e6f5ed3b9fc91ec30082a9bc2cf5910def149539ba4bf62def32f45b084fd26c0e948f47f142a5ca6d86496bd2cba7d9d41aa717c6af66b6396f11e
data/CHANGELOG.md CHANGED
@@ -1,5 +1,21 @@
1
+ ## 5.1.0
2
+ - Add check_connection_timeout parameter (default 10m)
3
+ - Set default timeout to 60s
4
+
5
+ ## 5.0.0
6
+ - Breaking Change: Index template for 5.0 has been changed to reflect Elasticsearch's mapping changes. Most importantly,
7
+ the subfield for string multi-fields has changed from `.raw` to `.keyword` to match ES default behavior. ([#386](https://github.com/logstash-plugins/logstash-output-elasticsearch/issues/386))
8
+
9
+ **Users installing ES 5.x and LS 5.x**
10
+ This change will not affect you and you will continue to use the ES defaults.
11
+
12
+ **Users upgrading from LS 2.x to LS 5.x with ES 5.x**
13
+ LS will not force upgrade the template, if `logstash` template already exists. This means you will still use
14
+ `.raw` for sub-fields coming from 2.x. If you choose to use the new template, you will have to reindex your data after
15
+ the new template is installed.
16
+
1
17
  ## 4.1.3
2
- - Relax constraint on logstash-core-plugin-api to >= 1.60 <= 2.99
18
+ - Relax constraint on logstash-core-plugin-api to >= 1.60 <= 2.99
3
19
 
4
20
  ## 4.1.2
5
21
 
@@ -20,6 +20,19 @@ require "uri" # for escaping user input
20
20
  #
21
21
  # You can learn more about Elasticsearch at <https://www.elastic.co/products/elasticsearch>
22
22
  #
23
+ # ==== Template management for Elasticsearch 5.x
24
+ # Index template for this version (Logstash 5.0) has been changed to reflect Elasticsearch's mapping changes in version 5.0.
25
+ # Most importantly, the subfield for string multi-fields has changed from `.raw` to `.keyword` to match ES default
26
+ # behavior.
27
+ #
28
+ # ** Users installing ES 5.x and LS 5.x **
29
+ # This change will not affect you and you will continue to use the ES defaults.
30
+ #
31
+ # ** Users upgrading from LS 2.x to LS 5.x with ES 5.x **
32
+ # LS will not force upgrade the template, if `logstash` template already exists. This means you will still use
33
+ # `.raw` for sub-fields coming from 2.x. If you choose to use the new template, you will have to reindex your data after
34
+ # the new template is installed.
35
+ #
23
36
  # ==== Retry Policy
24
37
  #
25
38
  # The retry policy has changed significantly in the 2.2.0 release.
@@ -131,7 +144,7 @@ class LogStash::Outputs::ElasticSearch < LogStash::Outputs::Base
131
144
 
132
145
  # Set the timeout, in seconds, for network operations and requests sent Elasticsearch. If
133
146
  # a timeout occurs, the request will be retried.
134
- config :timeout, :validate => :number
147
+ config :timeout, :validate => :number, :default => 60
135
148
 
136
149
  # Set the Elasticsearch errors in the whitelist that you don't want to log.
137
150
  # A useful example is when you want to skip all 409 errors
@@ -160,6 +173,16 @@ class LogStash::Outputs::ElasticSearch < LogStash::Outputs::Base
160
173
  # Resurrection is the process by which backend endpoints marked 'down' are checked
161
174
  # to see if they have come back to life
162
175
  config :resurrect_delay, :validate => :number, :default => 5
176
+
177
+ # How long to wait before checking if the connection is stale before executing a request on a connection using keepalive.
178
+ # You may want to set this lower, if you get connection errors regularly
179
+ # Quoting the Apache commons docs (this client is based Apache Commmons):
180
+ # 'Defines period of inactivity in milliseconds after which persistent connections must
181
+ # be re-validated prior to being leased to the consumer. Non-positive value passed to
182
+ # this method disables connection validation. This check helps detect connections that
183
+ # have become stale (half-closed) while kept inactive in the pool.'
184
+ # See https://hc.apache.org/httpcomponents-client-ga/httpclient/apidocs/org/apache/http/impl/conn/PoolingHttpClientConnectionManager.html#setValidateAfterInactivity(int)[these docs for more info]
185
+ config :validate_after_inactivity, :validate => :number, :default => 10000
163
186
 
164
187
  def build_client
165
188
  @client = ::LogStash::Outputs::ElasticSearch::HttpClientBuilder.build(@logger, @hosts, params)
@@ -0,0 +1,95 @@
1
+ {
2
+ "template" : "logstash-*",
3
+ "settings" : {
4
+ "index.refresh_interval" : "5s"
5
+ },
6
+ "mappings" : {
7
+ "_default_" : {
8
+ "_all" : {"enabled" : true, "omit_norms" : true},
9
+ "dynamic_templates" : [ {
10
+ "message_field" : {
11
+ "match" : "message",
12
+ "match_mapping_type" : "string",
13
+ "mapping" : {
14
+ "type" : "string", "index" : "analyzed", "omit_norms" : true,
15
+ "fielddata" : { "format" : "disabled" }
16
+ }
17
+ }
18
+ }, {
19
+ "string_fields" : {
20
+ "match" : "*",
21
+ "match_mapping_type" : "string",
22
+ "mapping" : {
23
+ "type" : "string", "index" : "analyzed", "omit_norms" : true,
24
+ "fielddata" : { "format" : "disabled" },
25
+ "fields" : {
26
+ "raw" : {"type": "string", "index" : "not_analyzed", "doc_values" : true, "ignore_above" : 256}
27
+ }
28
+ }
29
+ }
30
+ }, {
31
+ "float_fields" : {
32
+ "match" : "*",
33
+ "match_mapping_type" : "float",
34
+ "mapping" : { "type" : "float", "doc_values" : true }
35
+ }
36
+ }, {
37
+ "double_fields" : {
38
+ "match" : "*",
39
+ "match_mapping_type" : "double",
40
+ "mapping" : { "type" : "double", "doc_values" : true }
41
+ }
42
+ }, {
43
+ "byte_fields" : {
44
+ "match" : "*",
45
+ "match_mapping_type" : "byte",
46
+ "mapping" : { "type" : "byte", "doc_values" : true }
47
+ }
48
+ }, {
49
+ "short_fields" : {
50
+ "match" : "*",
51
+ "match_mapping_type" : "short",
52
+ "mapping" : { "type" : "short", "doc_values" : true }
53
+ }
54
+ }, {
55
+ "integer_fields" : {
56
+ "match" : "*",
57
+ "match_mapping_type" : "integer",
58
+ "mapping" : { "type" : "integer", "doc_values" : true }
59
+ }
60
+ }, {
61
+ "long_fields" : {
62
+ "match" : "*",
63
+ "match_mapping_type" : "long",
64
+ "mapping" : { "type" : "long", "doc_values" : true }
65
+ }
66
+ }, {
67
+ "date_fields" : {
68
+ "match" : "*",
69
+ "match_mapping_type" : "date",
70
+ "mapping" : { "type" : "date", "doc_values" : true }
71
+ }
72
+ }, {
73
+ "geo_point_fields" : {
74
+ "match" : "*",
75
+ "match_mapping_type" : "geo_point",
76
+ "mapping" : { "type" : "geo_point", "doc_values" : true }
77
+ }
78
+ } ],
79
+ "properties" : {
80
+ "@timestamp": { "type": "date", "doc_values" : true },
81
+ "@version": { "type": "string", "index": "not_analyzed", "doc_values" : true },
82
+ "geoip" : {
83
+ "type" : "object",
84
+ "dynamic": true,
85
+ "properties" : {
86
+ "ip": { "type": "ip", "doc_values" : true },
87
+ "location" : { "type" : "geo_point", "doc_values" : true },
88
+ "latitude" : { "type" : "float", "doc_values" : true },
89
+ "longitude" : { "type" : "float", "doc_values" : true }
90
+ }
91
+ }
92
+ }
93
+ }
94
+ }
95
+ }
@@ -5,13 +5,13 @@
5
5
  },
6
6
  "mappings" : {
7
7
  "_default_" : {
8
- "_all" : {"enabled" : true, "omit_norms" : true},
8
+ "_all" : {"enabled" : true, "norms" : false},
9
9
  "dynamic_templates" : [ {
10
10
  "message_field" : {
11
11
  "match" : "message",
12
12
  "match_mapping_type" : "string",
13
13
  "mapping" : {
14
- "type" : "string", "index" : "analyzed", "omit_norms" : true,
14
+ "type" : "string", "index" : "analyzed", "norms" : false,
15
15
  "fielddata" : { "format" : "disabled" }
16
16
  }
17
17
  }
@@ -20,24 +20,23 @@
20
20
  "match" : "*",
21
21
  "match_mapping_type" : "string",
22
22
  "mapping" : {
23
- "type" : "string", "index" : "analyzed", "omit_norms" : true,
24
- "fielddata" : { "format" : "disabled" },
23
+ "type" : "text", "norms" : false,
25
24
  "fields" : {
26
- "raw" : {"type": "string", "index" : "not_analyzed", "ignore_above" : 256}
25
+ "keyword" : { "type": "keyword" }
27
26
  }
28
27
  }
29
28
  }
30
29
  } ],
31
30
  "properties" : {
32
- "@timestamp": { "type": "date" },
33
- "@version": { "type": "string", "index": "not_analyzed" },
31
+ "@timestamp": { "type": "date", "include_in_all": false },
32
+ "@version": { "type": "keyword", "include_in_all": false },
34
33
  "geoip" : {
35
34
  "dynamic": true,
36
35
  "properties" : {
37
36
  "ip": { "type": "ip" },
38
37
  "location" : { "type" : "geo_point" },
39
- "latitude" : { "type" : "float" },
40
- "longitude" : { "type" : "float" }
38
+ "latitude" : { "type" : "half_float" },
39
+ "longitude" : { "type" : "half_float" }
41
40
  }
42
41
  }
43
42
  }
@@ -30,6 +30,11 @@ module LogStash; module Outputs; class ElasticSearch;
30
30
  template_put(name, template)
31
31
  end
32
32
 
33
+ def get_version
34
+ url, response = @pool.get("")
35
+ LogStash::Json.load(response.body)["version"]
36
+ end
37
+
33
38
  def bulk(actions)
34
39
  @action_count ||= 0
35
40
  @action_count += actions.size
@@ -94,6 +99,8 @@ module LogStash; module Outputs; class ElasticSearch;
94
99
 
95
100
  adapter_options[:proxy] = client_settings[:proxy] if client_settings[:proxy]
96
101
 
102
+ adapter_options[:check_connection_timeout] = client_settings[:check_connection_timeout] if client_settings[:check_connection_timeout]
103
+
97
104
  # Having this explicitly set to nil is an error
98
105
  if client_settings[:pool_max]
99
106
  adapter_options[:pool_max] = client_settings[:pool_max]
@@ -4,6 +4,7 @@ module LogStash; module Outputs; class ElasticSearch;
4
4
  client_settings = {
5
5
  :pool_max => params["pool_max"],
6
6
  :pool_max_per_route => params["pool_max_per_route"],
7
+ :check_connection_timeout => params["validate_after_inactivity"]
7
8
  }
8
9
 
9
10
  common_options = {
@@ -4,7 +4,7 @@ module LogStash; module Outputs; class ElasticSearch
4
4
  def self.install_template(plugin)
5
5
  return unless plugin.manage_template
6
6
  plugin.logger.info("Using mapping template from", :path => plugin.template)
7
- template = get_template(plugin.template)
7
+ template = get_template(plugin.template, get_es_major_version(plugin.client))
8
8
  plugin.logger.info("Attempting to install template", :manage_template => template)
9
9
  install(plugin.client, plugin.template_name, template, plugin.template_overwrite)
10
10
  rescue => e
@@ -12,9 +12,16 @@ module LogStash; module Outputs; class ElasticSearch
12
12
  end
13
13
 
14
14
  private
15
+ def self.get_es_version(client)
16
+ client.get_version
17
+ end
18
+
19
+ def self.get_es_major_version(client)
20
+ get_es_version(client)["number"][0]
21
+ end
15
22
 
16
- def self.get_template(path)
17
- template_path = path || default_template_path
23
+ def self.get_template(path, es_major_version)
24
+ template_path = path || default_template_path(es_major_version)
18
25
  read_template_file(template_path)
19
26
  end
20
27
 
@@ -22,8 +29,10 @@ module LogStash; module Outputs; class ElasticSearch
22
29
  client.template_install(template_name, template, template_overwrite)
23
30
  end
24
31
 
25
- def self.default_template_path
26
- ::File.expand_path('elasticsearch-template.json', ::File.dirname(__FILE__))
32
+ def self.default_template_path(es_major_version)
33
+ template_version = es_major_version == "1" ? "2" : es_major_version
34
+ default_template_name = "elasticsearch-template-es#{template_version}x.json"
35
+ ::File.expand_path(default_template_name, ::File.dirname(__FILE__))
27
36
  end
28
37
 
29
38
  def self.read_template_file(template_path)
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-output-elasticsearch'
4
- s.version = '4.1.3'
4
+ s.version = '5.1.0'
5
5
  s.licenses = ['apache-2.0']
6
6
  s.summary = "Logstash Output to Elasticsearch"
7
7
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -1,6 +1,6 @@
1
1
  require_relative "../../../spec/es_spec_helper"
2
2
 
3
- describe "Ingest pipeline execution behavior", :integration => true, :version_5x => true do
3
+ describe "Ingest pipeline execution behavior", :integration => true, :version_greater_than_equal_to_5x => true do
4
4
  subject! do
5
5
  require "logstash/outputs/elasticsearch"
6
6
  settings = {
@@ -19,7 +19,7 @@ describe "Ingest pipeline execution behavior", :integration => true, :version_5x
19
19
  {
20
20
  "grok": {
21
21
  "field": "message",
22
- "pattern": "%{COMBINEDAPACHELOG}"
22
+ "patterns": ["%{COMBINEDAPACHELOG}"]
23
23
  }
24
24
  }
25
25
  ]
@@ -0,0 +1,93 @@
1
+ require_relative "../../../spec/es_spec_helper"
2
+
3
+ # This file is a copy of template test for 2.x. We can DRY this up later.
4
+ describe "index template expected behavior for 5.x", :integration => true, :version_greater_than_equal_to_5x => true do
5
+ subject! do
6
+ require "logstash/outputs/elasticsearch"
7
+ settings = {
8
+ "manage_template" => true,
9
+ "template_overwrite" => true,
10
+ "hosts" => "#{get_host_port()}"
11
+ }
12
+ next LogStash::Outputs::ElasticSearch.new(settings)
13
+ end
14
+
15
+ before :each do
16
+ # Delete all templates first.
17
+ require "elasticsearch"
18
+
19
+ # Clean ES of data before we start.
20
+ @es = get_client
21
+ @es.indices.delete_template(:name => "*")
22
+
23
+ # This can fail if there are no indexes, ignore failure.
24
+ @es.indices.delete(:index => "*") rescue nil
25
+
26
+ subject.register
27
+
28
+ subject.multi_receive([
29
+ LogStash::Event.new("message" => "sample message here"),
30
+ LogStash::Event.new("somevalue" => 100),
31
+ LogStash::Event.new("somevalue" => 10),
32
+ LogStash::Event.new("somevalue" => 1),
33
+ LogStash::Event.new("country" => "us"),
34
+ LogStash::Event.new("country" => "at"),
35
+ LogStash::Event.new("geoip" => { "location" => [ 0.0, 0.0 ] })
36
+ ])
37
+
38
+ @es.indices.refresh
39
+
40
+ # Wait or fail until everything's indexed.
41
+ Stud::try(20.times) do
42
+ r = @es.search
43
+ insist { r["hits"]["total"] } == 7
44
+ end
45
+ end
46
+
47
+ it "permits phrase searching on string fields" do
48
+ results = @es.search(:q => "message:\"sample message\"")
49
+ insist { results["hits"]["total"] } == 1
50
+ insist { results["hits"]["hits"][0]["_source"]["message"] } == "sample message here"
51
+ end
52
+
53
+ it "numbers dynamically map to a numeric type and permit range queries" do
54
+ results = @es.search(:q => "somevalue:[5 TO 105]")
55
+ insist { results["hits"]["total"] } == 2
56
+
57
+ values = results["hits"]["hits"].collect { |r| r["_source"]["somevalue"] }
58
+ insist { values }.include?(10)
59
+ insist { values }.include?(100)
60
+ reject { values }.include?(1)
61
+ end
62
+
63
+ it "does not create .keyword field for the message field" do
64
+ results = @es.search(:q => "message.keyword:\"sample message here\"")
65
+ insist { results["hits"]["total"] } == 0
66
+ end
67
+
68
+ it "creates .keyword field from any string field which is not_analyzed" do
69
+ results = @es.search(:q => "country.keyword:\"us\"")
70
+ insist { results["hits"]["total"] } == 1
71
+ insist { results["hits"]["hits"][0]["_source"]["country"] } == "us"
72
+
73
+ # partial or terms should not work.
74
+ results = @es.search(:q => "country.keyword:\"u\"")
75
+ insist { results["hits"]["total"] } == 0
76
+ end
77
+
78
+ it "make [geoip][location] a geo_point" do
79
+ expect(@es.indices.get_template(name: "logstash")["logstash"]["mappings"]["_default_"]["properties"]["geoip"]["properties"]["location"]["type"]).to eq("geo_point")
80
+ end
81
+
82
+ it "aggregate .keyword results correctly " do
83
+ results = @es.search(:body => { "aggregations" => { "my_agg" => { "terms" => { "field" => "country.keyword" } } } })["aggregations"]["my_agg"]
84
+ terms = results["buckets"].collect { |b| b["key"] }
85
+
86
+ insist { terms }.include?("us")
87
+
88
+ # 'at' is a stopword, make sure stopwords are not ignored.
89
+ insist { terms }.include?("at")
90
+ end
91
+ end
92
+
93
+
@@ -1,6 +1,6 @@
1
1
  require_relative "../../../spec/es_spec_helper"
2
2
 
3
- describe "index template expected behavior", :integration => true do
3
+ describe "index template expected behavior", :integration => true, :version_less_than_5x => true do
4
4
  subject! do
5
5
  require "logstash/outputs/elasticsearch"
6
6
  settings = {
@@ -1,6 +1,6 @@
1
1
  require_relative "../../../spec/es_spec_helper"
2
2
 
3
- describe "Update actions", :integration => true, :version_2x_plus => true do
3
+ describe "Update actions", :integration => true, :version_greater_than_equal_to_2x => true do
4
4
  require "logstash/outputs/elasticsearch"
5
5
 
6
6
  def get_es_output( options={} )
@@ -0,0 +1,57 @@
1
+ require "logstash/devutils/rspec/spec_helper"
2
+ require "logstash/outputs/elasticsearch/http_client"
3
+ require "java"
4
+ require "json"
5
+
6
+ describe LogStash::Outputs::ElasticSearch::TemplateManager do
7
+
8
+ describe ".get_es_major_version" do
9
+ let(:es_1x_version) { '{ "number" : "1.7.0", "build_hash" : "929b9739cae115e73c346cb5f9a6f24ba735a743", "build_timestamp" : "2015-07-16T14:31:07Z", "build_snapshot" : false, "lucene_version" : "4.10.4" }' }
10
+ let(:es_2x_version) { '{ "number" : "2.3.4", "build_hash" : "e455fd0c13dceca8dbbdbb1665d068ae55dabe3f", "build_timestamp" : "2016-06-30T11:24:31Z", "build_snapshot" : false, "lucene_version" : "5.5.0" }' }
11
+ let(:es_5x_version) { '{ "number" : "5.0.0-alpha4", "build_hash" : "b0da471", "build_date" : "2016-06-22T12:33:48.164Z", "build_snapshot" : false, "lucene_version" : "6.1.0" }' }
12
+ let(:client) { double("client") }
13
+ context "elasticsearch 1.x" do
14
+ before(:each) do
15
+ allow(client).to receive(:get_version).and_return(JSON.parse(es_1x_version))
16
+ end
17
+ it "detects major version is 1" do
18
+ expect(described_class.get_es_major_version(client)).to eq("1")
19
+ end
20
+ end
21
+ context "elasticsearch 2.x" do
22
+ before(:each) do
23
+ allow(client).to receive(:get_version).and_return(JSON.parse(es_2x_version))
24
+ end
25
+ it "detects major version is 2" do
26
+ expect(described_class.get_es_major_version(client)).to eq("2")
27
+ end
28
+ end
29
+ context "elasticsearch 5.x" do
30
+ before(:each) do
31
+ allow(client).to receive(:get_version).and_return(JSON.parse(es_5x_version))
32
+ end
33
+ it "detects major version is 5" do
34
+ expect(described_class.get_es_major_version(client)).to eq("5")
35
+ end
36
+ end
37
+ end
38
+
39
+ describe ".default_template_path" do
40
+ context "elasticsearch 1.x" do
41
+ it "chooses the 2x template" do
42
+ expect(described_class.default_template_path("1")).to match(/elasticsearch-template-es2x.json/)
43
+ end
44
+ end
45
+ context "elasticsearch 2.x" do
46
+ it "chooses the 2x template" do
47
+ expect(described_class.default_template_path("2")).to match(/elasticsearch-template-es2x.json/)
48
+ end
49
+ end
50
+ context "elasticsearch 5.x" do
51
+ it "chooses the 2x template" do
52
+ expect(described_class.default_template_path("5")).to match(/elasticsearch-template-es5x.json/)
53
+ end
54
+ end
55
+ end
56
+
57
+ end
@@ -279,4 +279,20 @@ describe "outputs/elasticsearch" do
279
279
  end
280
280
  end
281
281
  end
282
+
283
+ describe "stale connection check" do
284
+ let(:validate_after_inactivity) { 123 }
285
+ subject(:eso) { LogStash::Outputs::ElasticSearch.new("validate_after_inactivity" => validate_after_inactivity) }
286
+
287
+ before do
288
+ allow(::Manticore::Client).to receive(:new).with(any_args)
289
+ subject.register
290
+ end
291
+
292
+ it "should set the correct http client option for 'validate_after_inactivity" do
293
+ expect(::Manticore::Client).to have_received(:new) do |options|
294
+ expect(options[:check_connection_timeout]).to eq(validate_after_inactivity)
295
+ end
296
+ end
297
+ end
282
298
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-elasticsearch
3
3
  version: !ruby/object:Gem::Version
4
- version: 4.1.3
4
+ version: 5.1.0
5
5
  platform: java
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2016-07-14 00:00:00.000000000 Z
11
+ date: 2016-08-31 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -183,7 +183,8 @@ files:
183
183
  - lib/logstash/outputs/elasticsearch.rb
184
184
  - lib/logstash/outputs/elasticsearch/common.rb
185
185
  - lib/logstash/outputs/elasticsearch/common_configs.rb
186
- - lib/logstash/outputs/elasticsearch/elasticsearch-template.json
186
+ - lib/logstash/outputs/elasticsearch/elasticsearch-template-es2x.json
187
+ - lib/logstash/outputs/elasticsearch/elasticsearch-template-es5x.json
187
188
  - lib/logstash/outputs/elasticsearch/http_client.rb
188
189
  - lib/logstash/outputs/elasticsearch/http_client/manticore_adapter.rb
189
190
  - lib/logstash/outputs/elasticsearch/http_client/pool.rb
@@ -202,12 +203,14 @@ files:
202
203
  - spec/integration/outputs/retry_spec.rb
203
204
  - spec/integration/outputs/routing_spec.rb
204
205
  - spec/integration/outputs/secure_spec.rb
206
+ - spec/integration/outputs/templates_5x_spec.rb
205
207
  - spec/integration/outputs/templates_spec.rb
206
208
  - spec/integration/outputs/update_spec.rb
207
209
  - spec/unit/http_client_builder_spec.rb
208
210
  - spec/unit/outputs/elasticsearch/http_client/manticore_adapter_spec.rb
209
211
  - spec/unit/outputs/elasticsearch/http_client/pool_spec.rb
210
212
  - spec/unit/outputs/elasticsearch/http_client_spec.rb
213
+ - spec/unit/outputs/elasticsearch/template_manager_spec.rb
211
214
  - spec/unit/outputs/elasticsearch_proxy_spec.rb
212
215
  - spec/unit/outputs/elasticsearch_spec.rb
213
216
  - spec/unit/outputs/elasticsearch_ssl_spec.rb
@@ -234,7 +237,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
234
237
  version: '0'
235
238
  requirements: []
236
239
  rubyforge_project:
237
- rubygems_version: 2.6.3
240
+ rubygems_version: 2.4.8
238
241
  signing_key:
239
242
  specification_version: 4
240
243
  summary: Logstash Output to Elasticsearch
@@ -251,12 +254,14 @@ test_files:
251
254
  - spec/integration/outputs/retry_spec.rb
252
255
  - spec/integration/outputs/routing_spec.rb
253
256
  - spec/integration/outputs/secure_spec.rb
257
+ - spec/integration/outputs/templates_5x_spec.rb
254
258
  - spec/integration/outputs/templates_spec.rb
255
259
  - spec/integration/outputs/update_spec.rb
256
260
  - spec/unit/http_client_builder_spec.rb
257
261
  - spec/unit/outputs/elasticsearch/http_client/manticore_adapter_spec.rb
258
262
  - spec/unit/outputs/elasticsearch/http_client/pool_spec.rb
259
263
  - spec/unit/outputs/elasticsearch/http_client_spec.rb
264
+ - spec/unit/outputs/elasticsearch/template_manager_spec.rb
260
265
  - spec/unit/outputs/elasticsearch_proxy_spec.rb
261
266
  - spec/unit/outputs/elasticsearch_spec.rb
262
267
  - spec/unit/outputs/elasticsearch_ssl_spec.rb