logstash-filter-aggregate 0.1.5 → 2.0.2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/BUILD.md +81 -81
- data/CHANGELOG.md +14 -15
- data/CONTRIBUTORS +10 -10
- data/Gemfile +2 -2
- data/LICENSE +13 -13
- data/README.md +147 -147
- data/lib/logstash/filters/aggregate.rb +277 -277
- data/logstash-filter-aggregate.gemspec +24 -24
- data/spec/filters/aggregate_spec.rb +177 -185
- data/spec/filters/aggregate_spec_helper.rb +49 -49
- metadata +17 -17
@@ -1,24 +1,24 @@
|
|
1
|
-
Gem::Specification.new do |s|
|
2
|
-
s.name = 'logstash-filter-aggregate'
|
3
|
-
s.version = '0.
|
4
|
-
s.licenses = ['Apache License (2.0)']
|
5
|
-
s.summary = "The aim of this filter is to aggregate information available among several events (typically log lines) belonging to a same task, and finally push aggregated information into final task event."
|
6
|
-
s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
|
7
|
-
s.authors = ["Elastic", "Fabien Baligand"]
|
8
|
-
s.email = 'info@elastic.co'
|
9
|
-
s.homepage = "https://github.com/logstash-plugins/logstash-filter-aggregate"
|
10
|
-
s.require_paths = ["lib"]
|
11
|
-
|
12
|
-
# Files
|
13
|
-
s.files = Dir['lib/**/*','spec/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile','LICENSE']
|
14
|
-
|
15
|
-
# Tests
|
16
|
-
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
17
|
-
|
18
|
-
# Special flag to let us know this is actually a logstash plugin
|
19
|
-
s.metadata = { "logstash_plugin" => "true", "logstash_group" => "filter" }
|
20
|
-
|
21
|
-
# Gem dependencies
|
22
|
-
s.add_runtime_dependency
|
23
|
-
s.add_development_dependency 'logstash-devutils', '~> 0'
|
24
|
-
end
|
1
|
+
Gem::Specification.new do |s|
|
2
|
+
s.name = 'logstash-filter-aggregate'
|
3
|
+
s.version = '2.0.2'
|
4
|
+
s.licenses = ['Apache License (2.0)']
|
5
|
+
s.summary = "The aim of this filter is to aggregate information available among several events (typically log lines) belonging to a same task, and finally push aggregated information into final task event."
|
6
|
+
s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
|
7
|
+
s.authors = ["Elastic", "Fabien Baligand"]
|
8
|
+
s.email = 'info@elastic.co'
|
9
|
+
s.homepage = "https://github.com/logstash-plugins/logstash-filter-aggregate"
|
10
|
+
s.require_paths = ["lib"]
|
11
|
+
|
12
|
+
# Files
|
13
|
+
s.files = Dir['lib/**/*','spec/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile','LICENSE']
|
14
|
+
|
15
|
+
# Tests
|
16
|
+
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
17
|
+
|
18
|
+
# Special flag to let us know this is actually a logstash plugin
|
19
|
+
s.metadata = { "logstash_plugin" => "true", "logstash_group" => "filter" }
|
20
|
+
|
21
|
+
# Gem dependencies
|
22
|
+
s.add_runtime_dependency "logstash-core", ">= 2.0.0.beta2", "< 3.0.0"
|
23
|
+
s.add_development_dependency 'logstash-devutils', '~> 0'
|
24
|
+
end
|
@@ -1,185 +1,177 @@
|
|
1
|
-
# encoding: utf-8
|
2
|
-
require "logstash/devutils/rspec/spec_helper"
|
3
|
-
require "logstash/filters/aggregate"
|
4
|
-
require_relative "aggregate_spec_helper"
|
5
|
-
|
6
|
-
describe LogStash::Filters::Aggregate do
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
|
50
|
-
|
51
|
-
|
52
|
-
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
|
57
|
-
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
126
|
-
|
127
|
-
|
128
|
-
|
129
|
-
|
130
|
-
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
137
|
-
|
138
|
-
|
139
|
-
|
140
|
-
|
141
|
-
|
142
|
-
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
|
149
|
-
|
150
|
-
|
151
|
-
|
152
|
-
|
153
|
-
|
154
|
-
|
155
|
-
|
156
|
-
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
164
|
-
|
165
|
-
|
166
|
-
|
167
|
-
|
168
|
-
|
169
|
-
|
170
|
-
|
171
|
-
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
|
176
|
-
|
177
|
-
|
178
|
-
@end_filter.flush()
|
179
|
-
expect(aggregate_maps).to be_empty
|
180
|
-
end
|
181
|
-
end
|
182
|
-
|
183
|
-
end
|
184
|
-
|
185
|
-
end
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/devutils/rspec/spec_helper"
|
3
|
+
require "logstash/filters/aggregate"
|
4
|
+
require_relative "aggregate_spec_helper"
|
5
|
+
|
6
|
+
describe LogStash::Filters::Aggregate do
|
7
|
+
|
8
|
+
before(:each) do
|
9
|
+
set_eviction_instance(nil)
|
10
|
+
aggregate_maps.clear()
|
11
|
+
@start_filter = setup_filter({ "map_action" => "create", "code" => "map['sql_duration'] = 0" })
|
12
|
+
@update_filter = setup_filter({ "map_action" => "update", "code" => "map['sql_duration'] += event['duration']" })
|
13
|
+
@end_filter = setup_filter({ "map_action" => "update", "code" => "event.to_hash.merge!(map)", "end_of_task" => true, "timeout" => 5 })
|
14
|
+
end
|
15
|
+
|
16
|
+
context "Start event" do
|
17
|
+
describe "and receiving an event without task_id" do
|
18
|
+
it "does not record it" do
|
19
|
+
@start_filter.filter(event())
|
20
|
+
expect(aggregate_maps).to be_empty
|
21
|
+
end
|
22
|
+
end
|
23
|
+
describe "and receiving an event with task_id" do
|
24
|
+
it "records it" do
|
25
|
+
event = start_event("taskid" => "id123")
|
26
|
+
@start_filter.filter(event)
|
27
|
+
|
28
|
+
expect(aggregate_maps.size).to eq(1)
|
29
|
+
expect(aggregate_maps["id123"]).not_to be_nil
|
30
|
+
expect(aggregate_maps["id123"].creation_timestamp).to be >= event["@timestamp"]
|
31
|
+
expect(aggregate_maps["id123"].map["sql_duration"]).to eq(0)
|
32
|
+
end
|
33
|
+
end
|
34
|
+
|
35
|
+
describe "and receiving two 'start events' for the same task_id" do
|
36
|
+
it "keeps the first one and does nothing with the second one" do
|
37
|
+
|
38
|
+
first_start_event = start_event("taskid" => "id124")
|
39
|
+
@start_filter.filter(first_start_event)
|
40
|
+
|
41
|
+
first_update_event = update_event("taskid" => "id124", "duration" => 2)
|
42
|
+
@update_filter.filter(first_update_event)
|
43
|
+
|
44
|
+
sleep(1)
|
45
|
+
second_start_event = start_event("taskid" => "id124")
|
46
|
+
@start_filter.filter(second_start_event)
|
47
|
+
|
48
|
+
expect(aggregate_maps.size).to eq(1)
|
49
|
+
expect(aggregate_maps["id124"].creation_timestamp).to be < second_start_event["@timestamp"]
|
50
|
+
expect(aggregate_maps["id124"].map["sql_duration"]).to eq(first_update_event["duration"])
|
51
|
+
end
|
52
|
+
end
|
53
|
+
end
|
54
|
+
|
55
|
+
context "End event" do
|
56
|
+
describe "receiving an event without a previous 'start event'" do
|
57
|
+
describe "but without a previous 'start event'" do
|
58
|
+
it "does nothing with the event" do
|
59
|
+
end_event = end_event("taskid" => "id124")
|
60
|
+
@end_filter.filter(end_event)
|
61
|
+
|
62
|
+
expect(aggregate_maps).to be_empty
|
63
|
+
expect(end_event["sql_duration"]).to be_nil
|
64
|
+
end
|
65
|
+
end
|
66
|
+
end
|
67
|
+
end
|
68
|
+
|
69
|
+
context "Start/end events interaction" do
|
70
|
+
describe "receiving a 'start event'" do
|
71
|
+
before(:each) do
|
72
|
+
@task_id_value = "id_123"
|
73
|
+
@start_event = start_event({"taskid" => @task_id_value})
|
74
|
+
@start_filter.filter(@start_event)
|
75
|
+
expect(aggregate_maps.size).to eq(1)
|
76
|
+
end
|
77
|
+
|
78
|
+
describe "and receiving an end event" do
|
79
|
+
describe "and without an id" do
|
80
|
+
it "does nothing" do
|
81
|
+
end_event = end_event()
|
82
|
+
@end_filter.filter(end_event)
|
83
|
+
expect(aggregate_maps.size).to eq(1)
|
84
|
+
expect(end_event["sql_duration"]).to be_nil
|
85
|
+
end
|
86
|
+
end
|
87
|
+
|
88
|
+
describe "and an id different from the one of the 'start event'" do
|
89
|
+
it "does nothing" do
|
90
|
+
different_id_value = @task_id_value + "_different"
|
91
|
+
@end_filter.filter(end_event("taskid" => different_id_value))
|
92
|
+
|
93
|
+
expect(aggregate_maps.size).to eq(1)
|
94
|
+
expect(aggregate_maps[@task_id_value]).not_to be_nil
|
95
|
+
end
|
96
|
+
end
|
97
|
+
|
98
|
+
describe "and the same id of the 'start event'" do
|
99
|
+
it "add 'sql_duration' field to the end event and deletes the aggregate map associated to taskid" do
|
100
|
+
expect(aggregate_maps.size).to eq(1)
|
101
|
+
|
102
|
+
@update_filter.filter(update_event("taskid" => @task_id_value, "duration" => 2))
|
103
|
+
|
104
|
+
end_event = end_event("taskid" => @task_id_value)
|
105
|
+
@end_filter.filter(end_event)
|
106
|
+
|
107
|
+
expect(aggregate_maps).to be_empty
|
108
|
+
expect(end_event["sql_duration"]).to eq(2)
|
109
|
+
end
|
110
|
+
|
111
|
+
end
|
112
|
+
end
|
113
|
+
end
|
114
|
+
end
|
115
|
+
|
116
|
+
context "Event which causes an exception when code call" do
|
117
|
+
it "intercepts exception, logs the error and tags the event with '_aggregateexception'" do
|
118
|
+
@start_filter = setup_filter({ "code" => "fail 'Test'" })
|
119
|
+
start_event = start_event("taskid" => "id124")
|
120
|
+
@start_filter.filter(start_event)
|
121
|
+
|
122
|
+
expect(start_event["tags"]).to eq(["_aggregateexception"])
|
123
|
+
end
|
124
|
+
end
|
125
|
+
|
126
|
+
context "flush call" do
|
127
|
+
before(:each) do
|
128
|
+
@end_filter.timeout = 1
|
129
|
+
expect(@end_filter.timeout).to eq(1)
|
130
|
+
@task_id_value = "id_123"
|
131
|
+
@start_event = start_event({"taskid" => @task_id_value})
|
132
|
+
@start_filter.filter(@start_event)
|
133
|
+
expect(aggregate_maps.size).to eq(1)
|
134
|
+
end
|
135
|
+
|
136
|
+
describe "no timeout defined in none filter" do
|
137
|
+
it "defines a default timeout on a default filter" do
|
138
|
+
set_eviction_instance(nil)
|
139
|
+
expect(eviction_instance).to be_nil
|
140
|
+
@end_filter.flush()
|
141
|
+
expect(eviction_instance).to eq(@end_filter)
|
142
|
+
expect(@end_filter.timeout).to eq(LogStash::Filters::Aggregate::DEFAULT_TIMEOUT)
|
143
|
+
end
|
144
|
+
end
|
145
|
+
|
146
|
+
describe "timeout is defined on another filter" do
|
147
|
+
it "eviction_instance is not updated" do
|
148
|
+
expect(eviction_instance).not_to be_nil
|
149
|
+
@start_filter.flush()
|
150
|
+
expect(eviction_instance).not_to eq(@start_filter)
|
151
|
+
expect(eviction_instance).to eq(@end_filter)
|
152
|
+
end
|
153
|
+
end
|
154
|
+
|
155
|
+
describe "no timeout defined on the filter" do
|
156
|
+
it "event is not removed" do
|
157
|
+
sleep(2)
|
158
|
+
@start_filter.flush()
|
159
|
+
expect(aggregate_maps.size).to eq(1)
|
160
|
+
end
|
161
|
+
end
|
162
|
+
|
163
|
+
describe "timeout defined on the filter" do
|
164
|
+
it "event is not removed if not expired" do
|
165
|
+
@end_filter.flush()
|
166
|
+
expect(aggregate_maps.size).to eq(1)
|
167
|
+
end
|
168
|
+
it "event is removed if expired" do
|
169
|
+
sleep(2)
|
170
|
+
@end_filter.flush()
|
171
|
+
expect(aggregate_maps).to be_empty
|
172
|
+
end
|
173
|
+
end
|
174
|
+
|
175
|
+
end
|
176
|
+
|
177
|
+
end
|
@@ -1,49 +1,49 @@
|
|
1
|
-
# encoding: utf-8
|
2
|
-
require "logstash/filters/aggregate"
|
3
|
-
|
4
|
-
def event(data = {})
|
5
|
-
data["message"] ||= "Log message"
|
6
|
-
data["@timestamp"] ||= Time.now
|
7
|
-
LogStash::Event.new(data)
|
8
|
-
end
|
9
|
-
|
10
|
-
def start_event(data = {})
|
11
|
-
data["logger"] = "TASK_START"
|
12
|
-
event(data)
|
13
|
-
end
|
14
|
-
|
15
|
-
def update_event(data = {})
|
16
|
-
data["logger"] = "SQL"
|
17
|
-
event(data)
|
18
|
-
end
|
19
|
-
|
20
|
-
def end_event(data = {})
|
21
|
-
data["logger"] = "TASK_END"
|
22
|
-
event(data)
|
23
|
-
end
|
24
|
-
|
25
|
-
def setup_filter(config = {})
|
26
|
-
config["task_id"] ||= "%{taskid}"
|
27
|
-
filter = LogStash::Filters::Aggregate.new(config)
|
28
|
-
filter.register()
|
29
|
-
return filter
|
30
|
-
end
|
31
|
-
|
32
|
-
def filter(event)
|
33
|
-
@start_filter.filter(event)
|
34
|
-
@update_filter.filter(event)
|
35
|
-
@end_filter.filter(event)
|
36
|
-
end
|
37
|
-
|
38
|
-
def aggregate_maps()
|
39
|
-
LogStash::Filters::Aggregate.class_variable_get(:@@aggregate_maps)
|
40
|
-
end
|
41
|
-
|
42
|
-
def eviction_instance()
|
43
|
-
LogStash::Filters::Aggregate.class_variable_get(:@@eviction_instance)
|
44
|
-
end
|
45
|
-
|
46
|
-
def set_eviction_instance(new_value)
|
47
|
-
LogStash::Filters::Aggregate.class_variable_set(:@@eviction_instance, new_value)
|
48
|
-
end
|
49
|
-
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/filters/aggregate"
|
3
|
+
|
4
|
+
def event(data = {})
|
5
|
+
data["message"] ||= "Log message"
|
6
|
+
data["@timestamp"] ||= Time.now
|
7
|
+
LogStash::Event.new(data)
|
8
|
+
end
|
9
|
+
|
10
|
+
def start_event(data = {})
|
11
|
+
data["logger"] = "TASK_START"
|
12
|
+
event(data)
|
13
|
+
end
|
14
|
+
|
15
|
+
def update_event(data = {})
|
16
|
+
data["logger"] = "SQL"
|
17
|
+
event(data)
|
18
|
+
end
|
19
|
+
|
20
|
+
def end_event(data = {})
|
21
|
+
data["logger"] = "TASK_END"
|
22
|
+
event(data)
|
23
|
+
end
|
24
|
+
|
25
|
+
def setup_filter(config = {})
|
26
|
+
config["task_id"] ||= "%{taskid}"
|
27
|
+
filter = LogStash::Filters::Aggregate.new(config)
|
28
|
+
filter.register()
|
29
|
+
return filter
|
30
|
+
end
|
31
|
+
|
32
|
+
def filter(event)
|
33
|
+
@start_filter.filter(event)
|
34
|
+
@update_filter.filter(event)
|
35
|
+
@end_filter.filter(event)
|
36
|
+
end
|
37
|
+
|
38
|
+
def aggregate_maps()
|
39
|
+
LogStash::Filters::Aggregate.class_variable_get(:@@aggregate_maps)
|
40
|
+
end
|
41
|
+
|
42
|
+
def eviction_instance()
|
43
|
+
LogStash::Filters::Aggregate.class_variable_get(:@@eviction_instance)
|
44
|
+
end
|
45
|
+
|
46
|
+
def set_eviction_instance(new_value)
|
47
|
+
LogStash::Filters::Aggregate.class_variable_set(:@@eviction_instance, new_value)
|
48
|
+
end
|
49
|
+
|