logstash-filter-elasticsearch 3.3.0 → 3.3.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +6 -0
- data/docs/index.asciidoc +62 -37
- data/lib/logstash/filters/elasticsearch.rb +5 -1
- data/logstash-filter-elasticsearch.gemspec +1 -1
- data/spec/filters/elasticsearch_spec.rb +68 -4
- data/spec/filters/fixtures/request_size0_agg.json +19 -0
- metadata +4 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: f0392952d180a690b095f3639f5b7e6b02ab3bb37df033c7d1cbc588a563060c
|
4
|
+
data.tar.gz: a5673bdde9c5f497b1ab6e258d8b90bd193ca0a63d20f73c09f2a06e1fdafe60
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: e18f6af27ef9effc88d17fc96e7146c67efb2ea575a7a851c4e9e7edb2d860d91202da7ad49bb2bed0a2581fc0ac68f8fff522afa08b7eac282aafe809eefe95
|
7
|
+
data.tar.gz: 78cfbdd8d6204b645921bf5efe3e293968a65117f957487df021aa581d48000fa90927cf1662ce7e39b180fb44d250e8db76d6172654afdf1cc51992b4b6eae2
|
data/CHANGELOG.md
CHANGED
@@ -1,3 +1,9 @@
|
|
1
|
+
## 3.3.1
|
2
|
+
- Fix: The filter now only calls `filter_matched` on events that actually matched.
|
3
|
+
This fixes issues where all events would have success-related actions happened
|
4
|
+
when no match had actually happened (`add_tag`, `add_field`, `remove_tag`,
|
5
|
+
`remove_field`)
|
6
|
+
|
1
7
|
## 3.3.0
|
2
8
|
- Enhancement : if elasticsearch response contains any shard failure, then `tag_on_failure` tags are added to Logstash event
|
3
9
|
- Enhancement : add support for nested fields
|
data/docs/index.asciidoc
CHANGED
@@ -40,50 +40,56 @@ filter to find the matching "start" event based on some operation identifier.
|
|
40
40
|
Then it copies the `@timestamp` field from the "start" event into a new field on
|
41
41
|
the "end" event. Finally, using a combination of the "date" filter and the
|
42
42
|
"ruby" filter, we calculate the time duration in hours between the two events.
|
43
|
+
|
43
44
|
[source,ruby]
|
44
45
|
--------------------------------------------------
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
The example below reproduces the above example but utilises the query_template. This query_template represents a full
|
63
|
-
Elasticsearch query DSL and supports the standard Logstash field substitution syntax. The example below issues
|
64
|
-
the same query as the first example but uses the template shown.
|
65
|
-
|
66
|
-
if [type] == "end" {
|
67
|
-
elasticsearch {
|
68
|
-
hosts => ["es-server"]
|
69
|
-
query_template => "template.json"
|
70
|
-
fields => { "@timestamp" => "started" }
|
71
|
-
}
|
46
|
+
if [type] == "end" {
|
47
|
+
elasticsearch {
|
48
|
+
hosts => ["es-server"]
|
49
|
+
query => "type:start AND operation:%{[opid]}"
|
50
|
+
fields => { "@timestamp" => "started" }
|
51
|
+
}
|
52
|
+
|
53
|
+
date {
|
54
|
+
match => ["[started]", "ISO8601"]
|
55
|
+
target => "[started]"
|
56
|
+
}
|
57
|
+
|
58
|
+
ruby {
|
59
|
+
code => "event.set('duration_hrs', (event.get('@timestamp') - event.get('started')) / 3600)"
|
60
|
+
}
|
61
|
+
}
|
62
|
+
--------------------------------------------------
|
72
63
|
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
64
|
+
The example below reproduces the above example but utilises the query_template.
|
65
|
+
This query_template represents a full Elasticsearch query DSL and supports the
|
66
|
+
standard Logstash field substitution syntax. The example below issues
|
67
|
+
the same query as the first example but uses the template shown.
|
77
68
|
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
69
|
+
[source,ruby]
|
70
|
+
--------------------------------------------------
|
71
|
+
if [type] == "end" {
|
72
|
+
elasticsearch {
|
73
|
+
hosts => ["es-server"]
|
74
|
+
query_template => "template.json"
|
75
|
+
fields => { "@timestamp" => "started" }
|
76
|
+
}
|
82
77
|
|
78
|
+
date {
|
79
|
+
match => ["[started]", "ISO8601"]
|
80
|
+
target => "[started]"
|
81
|
+
}
|
83
82
|
|
83
|
+
ruby {
|
84
|
+
code => "event.set('duration_hrs', (event.get('@timestamp') - event.get('started')) / 3600)"
|
85
|
+
}
|
86
|
+
}
|
87
|
+
--------------------------------------------------
|
84
88
|
|
85
|
-
|
89
|
+
template.json:
|
86
90
|
|
91
|
+
[source,json]
|
92
|
+
--------------------------------------------------
|
87
93
|
{
|
88
94
|
"query": {
|
89
95
|
"query_string": {
|
@@ -92,12 +98,31 @@ the "end" event. Finally, using a combination of the "date" filter and the
|
|
92
98
|
},
|
93
99
|
"_source": ["@timestamp"]
|
94
100
|
}
|
101
|
+
--------------------------------------------------
|
95
102
|
|
96
|
-
As illustrated above, through the use of 'opid', fields from the Logstash
|
103
|
+
As illustrated above, through the use of 'opid', fields from the Logstash
|
104
|
+
events can be referenced within the template.
|
97
105
|
The template will be populated per event prior to being used to query Elasticsearch.
|
98
106
|
|
107
|
+
Note that when you use `query_template`, the Logstash attributes `result_size`
|
108
|
+
and `sort` will be ignored. They should be specified directly in the JSON
|
109
|
+
template. Example:
|
110
|
+
|
111
|
+
[source,json]
|
112
|
+
--------------------------------------------------
|
113
|
+
{
|
114
|
+
"size": 1,
|
115
|
+
"sort" : [ { "@timestamp" : "desc" } ],
|
116
|
+
"query": {
|
117
|
+
"query_string": {
|
118
|
+
"query": "type:start AND operation:%{[opid]}"
|
119
|
+
}
|
120
|
+
},
|
121
|
+
"_source": ["@timestamp"]
|
122
|
+
}
|
99
123
|
--------------------------------------------------
|
100
124
|
|
125
|
+
|
101
126
|
[id="plugins-{type}s-{plugin}-options"]
|
102
127
|
==== Elasticsearch Filter Configuration Options
|
103
128
|
|
@@ -74,6 +74,7 @@ class LogStash::Filters::Elasticsearch < LogStash::Filters::Base
|
|
74
74
|
end # def register
|
75
75
|
|
76
76
|
def filter(event)
|
77
|
+
matched = false
|
77
78
|
begin
|
78
79
|
params = {:index => event.sprintf(@index) }
|
79
80
|
|
@@ -94,6 +95,7 @@ class LogStash::Filters::Elasticsearch < LogStash::Filters::Base
|
|
94
95
|
|
95
96
|
resultsHits = results["hits"]["hits"]
|
96
97
|
if !resultsHits.nil? && !resultsHits.empty?
|
98
|
+
matched = true
|
97
99
|
@fields.each do |old_key, new_key|
|
98
100
|
old_key_path = extract_path(old_key)
|
99
101
|
set = resultsHits.map do |doc|
|
@@ -112,6 +114,7 @@ class LogStash::Filters::Elasticsearch < LogStash::Filters::Base
|
|
112
114
|
|
113
115
|
resultsAggs = results["aggregations"]
|
114
116
|
if !resultsAggs.nil? && !resultsAggs.empty?
|
117
|
+
matched = true
|
115
118
|
@aggregation_fields.each do |agg_name, ls_field|
|
116
119
|
event.set(ls_field, resultsAggs[agg_name])
|
117
120
|
end
|
@@ -120,8 +123,9 @@ class LogStash::Filters::Elasticsearch < LogStash::Filters::Base
|
|
120
123
|
rescue => e
|
121
124
|
@logger.warn("Failed to query elasticsearch for previous event", :index => @index, :query => query, :event => event, :error => e)
|
122
125
|
@tag_on_failure.each{|tag| event.tag(tag)}
|
126
|
+
else
|
127
|
+
filter_matched(event) if matched
|
123
128
|
end
|
124
|
-
filter_matched(event)
|
125
129
|
end # def filter
|
126
130
|
|
127
131
|
private
|
@@ -1,7 +1,7 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
|
3
3
|
s.name = 'logstash-filter-elasticsearch'
|
4
|
-
s.version = '3.3.
|
4
|
+
s.version = '3.3.1'
|
5
5
|
s.licenses = ['Apache License (2.0)']
|
6
6
|
s.summary = "Copies fields from previous log events in Elasticsearch to current events "
|
7
7
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
@@ -124,13 +124,77 @@ describe LogStash::Filters::Elasticsearch do
|
|
124
124
|
end
|
125
125
|
end
|
126
126
|
|
127
|
+
# Tagging test for positive results
|
128
|
+
context "Tagging should occur if query returns results" do
|
129
|
+
let(:config) do
|
130
|
+
{
|
131
|
+
"index" => "foo*",
|
132
|
+
"hosts" => ["localhost:9200"],
|
133
|
+
"query" => "response: 404",
|
134
|
+
"add_tag" => ["tagged"]
|
135
|
+
}
|
136
|
+
end
|
137
|
+
|
138
|
+
let(:response) do
|
139
|
+
LogStash::Json.load(File.read(File.join(File.dirname(__FILE__), "fixtures", "request_x_10.json")))
|
140
|
+
end
|
141
|
+
|
142
|
+
it "should tag the current event if results returned" do
|
143
|
+
plugin.filter(event)
|
144
|
+
expect(event.to_hash["tags"]).to include("tagged")
|
145
|
+
end
|
146
|
+
end
|
147
|
+
|
148
|
+
context "an aggregation search with size 0 that matches" do
|
149
|
+
let(:config) do
|
150
|
+
{
|
151
|
+
"index" => "foo*",
|
152
|
+
"hosts" => ["localhost:9200"],
|
153
|
+
"query" => "response: 404",
|
154
|
+
"add_tag" => ["tagged"],
|
155
|
+
"result_size" => 0,
|
156
|
+
"aggregation_fields" => { "bytes_avg" => "bytes_avg_ls_field" }
|
157
|
+
}
|
158
|
+
end
|
159
|
+
|
160
|
+
let(:response) do
|
161
|
+
LogStash::Json.load(File.read(File.join(File.dirname(__FILE__), "fixtures", "request_size0_agg.json")))
|
162
|
+
end
|
163
|
+
|
164
|
+
it "should tag the current event" do
|
165
|
+
plugin.filter(event)
|
166
|
+
expect(event.get("tags")).to include("tagged")
|
167
|
+
expect(event.get("bytes_avg_ls_field")["value"]).to eq(294)
|
168
|
+
end
|
169
|
+
end
|
170
|
+
|
171
|
+
# Tagging test for negative results
|
172
|
+
context "Tagging should not occur if query has no results" do
|
173
|
+
let(:config) do
|
174
|
+
{
|
175
|
+
"index" => "foo*",
|
176
|
+
"hosts" => ["localhost:9200"],
|
177
|
+
"query" => "response: 404",
|
178
|
+
"add_tag" => ["tagged"]
|
179
|
+
}
|
180
|
+
end
|
181
|
+
|
182
|
+
let(:response) do
|
183
|
+
LogStash::Json.load(File.read(File.join(File.dirname(__FILE__), "fixtures", "request_error.json")))
|
184
|
+
end
|
185
|
+
|
186
|
+
it "should not tag the current event" do
|
187
|
+
plugin.filter(event)
|
188
|
+
expect(event.to_hash["tags"]).to_not include("tagged")
|
189
|
+
end
|
190
|
+
end
|
127
191
|
context "testing a simple query template" do
|
128
192
|
let(:config) do
|
129
193
|
{
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
194
|
+
"hosts" => ["localhost:9200"],
|
195
|
+
"query_template" => File.join(File.dirname(__FILE__), "fixtures", "query_template.json"),
|
196
|
+
"fields" => { "response" => "code" },
|
197
|
+
"result_size" => 1
|
134
198
|
}
|
135
199
|
end
|
136
200
|
|
@@ -0,0 +1,19 @@
|
|
1
|
+
{
|
2
|
+
"took": 49,
|
3
|
+
"timed_out": false,
|
4
|
+
"_shards": {
|
5
|
+
"total": 155,
|
6
|
+
"successful": 155,
|
7
|
+
"failed": 0
|
8
|
+
},
|
9
|
+
"hits": {
|
10
|
+
"total": 13476,
|
11
|
+
"max_score": 1,
|
12
|
+
"hits": []
|
13
|
+
},
|
14
|
+
"aggregations": {
|
15
|
+
"bytes_avg": {
|
16
|
+
"value": 294
|
17
|
+
}
|
18
|
+
}
|
19
|
+
}
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-filter-elasticsearch
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 3.3.
|
4
|
+
version: 3.3.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2018-
|
11
|
+
date: 2018-05-08 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
requirement: !ruby/object:Gem::Requirement
|
@@ -85,6 +85,7 @@ files:
|
|
85
85
|
- spec/filters/elasticsearch_spec.rb
|
86
86
|
- spec/filters/fixtures/query_template.json
|
87
87
|
- spec/filters/fixtures/request_error.json
|
88
|
+
- spec/filters/fixtures/request_size0_agg.json
|
88
89
|
- spec/filters/fixtures/request_x_1.json
|
89
90
|
- spec/filters/fixtures/request_x_10.json
|
90
91
|
- spec/filters/integration/elasticsearch_spec.rb
|
@@ -118,6 +119,7 @@ test_files:
|
|
118
119
|
- spec/filters/elasticsearch_spec.rb
|
119
120
|
- spec/filters/fixtures/query_template.json
|
120
121
|
- spec/filters/fixtures/request_error.json
|
122
|
+
- spec/filters/fixtures/request_size0_agg.json
|
121
123
|
- spec/filters/fixtures/request_x_1.json
|
122
124
|
- spec/filters/fixtures/request_x_10.json
|
123
125
|
- spec/filters/integration/elasticsearch_spec.rb
|