logstash-filter-aggregate 2.5.2 → 2.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +3 -0
- data/CONTRIBUTORS +1 -0
- data/Gemfile +9 -0
- data/{NOTICE.txt → NOTICE.TXT} +0 -0
- data/README.md +65 -288
- data/docs/index.asciidoc +552 -0
- data/lib/logstash/filters/aggregate.rb +179 -90
- data/logstash-filter-aggregate.gemspec +2 -2
- data/spec/filters/aggregate_spec.rb +69 -13
- metadata +4 -3
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-filter-aggregate'
|
3
|
-
s.version = '2.
|
3
|
+
s.version = '2.6.0'
|
4
4
|
s.licenses = ['Apache License (2.0)']
|
5
5
|
s.summary = 'The aim of this filter is to aggregate information available among several events (typically log lines) belonging to a same task, and finally push aggregated information into final task event.'
|
6
6
|
s.description = 'This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program'
|
@@ -10,7 +10,7 @@ Gem::Specification.new do |s|
|
|
10
10
|
s.require_paths = ['lib']
|
11
11
|
|
12
12
|
# Files
|
13
|
-
s.files = Dir[
|
13
|
+
s.files = Dir["lib/**/*","spec/**/*","*.gemspec","*.md","CONTRIBUTORS","Gemfile","LICENSE","NOTICE.TXT", "vendor/jar-dependencies/**/*.jar", "vendor/jar-dependencies/**/*.rb", "VERSION", "docs/**/*"]
|
14
14
|
|
15
15
|
# Tests
|
16
16
|
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
@@ -9,7 +9,7 @@ describe LogStash::Filters::Aggregate do
|
|
9
9
|
reset_static_variables()
|
10
10
|
@start_filter = setup_filter({ "map_action" => "create", "code" => "map['sql_duration'] = 0" })
|
11
11
|
@update_filter = setup_filter({ "map_action" => "update", "code" => "map['sql_duration'] += event.get('duration')" })
|
12
|
-
@end_filter = setup_filter({"timeout_task_id_field" => "my_id", "push_map_as_event_on_timeout" => true, "map_action" => "update", "code" => "event.set('sql_duration', map['sql_duration'])", "end_of_task" => true, "timeout" => 5, "timeout_code" => "event.set('test', 'testValue')", "timeout_tags" => ["tag1", "tag2"] })
|
12
|
+
@end_filter = setup_filter({"timeout_task_id_field" => "my_id", "push_map_as_event_on_timeout" => true, "map_action" => "update", "code" => "event.set('sql_duration', map['sql_duration'])", "end_of_task" => true, "timeout" => 5, "inactivity_timeout" => 2, "timeout_code" => "event.set('test', 'testValue')", "timeout_tags" => ["tag1", "tag2"] })
|
13
13
|
end
|
14
14
|
|
15
15
|
context "Validation" do
|
@@ -20,8 +20,16 @@ describe LogStash::Filters::Aggregate do
|
|
20
20
|
}.to raise_error(LogStash::ConfigurationError)
|
21
21
|
end
|
22
22
|
end
|
23
|
+
describe "and register a filter with inactivity_timeout longer than timeout" do
|
24
|
+
it "raises a LogStash::ConfigurationError" do
|
25
|
+
expect {
|
26
|
+
# use a diffrent task_id pattern, otherwise the timeout settings cannot be updated
|
27
|
+
setup_filter({ "task_id" => "%{taskid2}", "code" => "", "timeout" => 2, "inactivity_timeout" => 3 })
|
28
|
+
}.to raise_error(LogStash::ConfigurationError)
|
29
|
+
end
|
30
|
+
end
|
23
31
|
end
|
24
|
-
|
32
|
+
|
25
33
|
context "Start event" do
|
26
34
|
describe "and receiving an event without task_id" do
|
27
35
|
it "does not record it" do
|
@@ -46,10 +54,10 @@ describe LogStash::Filters::Aggregate do
|
|
46
54
|
|
47
55
|
first_start_event = start_event("taskid" => "id124")
|
48
56
|
@start_filter.filter(first_start_event)
|
49
|
-
|
57
|
+
|
50
58
|
first_update_event = update_event("taskid" => "id124", "duration" => 2)
|
51
59
|
@update_filter.filter(first_update_event)
|
52
|
-
|
60
|
+
|
53
61
|
sleep(1)
|
54
62
|
second_start_event = start_event("taskid" => "id124")
|
55
63
|
@start_filter.filter(second_start_event)
|
@@ -190,7 +198,7 @@ describe LogStash::Filters::Aggregate do
|
|
190
198
|
entries = @end_filter.flush()
|
191
199
|
expect(aggregate_maps["%{taskid}"]).to be_empty
|
192
200
|
expect(entries.size).to eq(1)
|
193
|
-
expect(entries[0].get("my_id")).to eq("id_123") # task id
|
201
|
+
expect(entries[0].get("my_id")).to eq("id_123") # task id
|
194
202
|
expect(entries[0].get("sql_duration")).to eq(0) # Aggregation map
|
195
203
|
expect(entries[0].get("test")).to eq("testValue") # Timeout code
|
196
204
|
expect(entries[0].get("tags")).to eq(["tag1", "tag2"]) # Timeout tags
|
@@ -206,6 +214,54 @@ describe LogStash::Filters::Aggregate do
|
|
206
214
|
expect(entries).to be_empty
|
207
215
|
end
|
208
216
|
end
|
217
|
+
|
218
|
+
context "inactivity_timeout" do
|
219
|
+
before(:each) do
|
220
|
+
@end_filter.timeout = 4
|
221
|
+
expect(@end_filter.timeout).to eq(4)
|
222
|
+
@end_filter.inactivity_timeout = 2
|
223
|
+
expect(@end_filter.inactivity_timeout).to eq(2)
|
224
|
+
@task_id_value = "id_123"
|
225
|
+
@start_event = start_event({"taskid" => @task_id_value})
|
226
|
+
@start_filter.filter(@start_event)
|
227
|
+
expect(aggregate_maps["%{taskid}"].size).to eq(1)
|
228
|
+
end
|
229
|
+
describe "event arrives before inactivity_timeout" do
|
230
|
+
it "does not remove event if another" do
|
231
|
+
expect(aggregate_maps["%{taskid}"].size).to eq(1)
|
232
|
+
sleep(1)
|
233
|
+
@start_filter.filter(start_event({"task_id" => @task_id_value}))
|
234
|
+
entries = @end_filter.flush()
|
235
|
+
expect(aggregate_maps["%{taskid}"].size).to eq(1)
|
236
|
+
expect(entries).to be_empty
|
237
|
+
end
|
238
|
+
end
|
239
|
+
describe "no event arrives after inactivity_timeout" do
|
240
|
+
it "removes event" do
|
241
|
+
expect(aggregate_maps["%{taskid}"].size).to eq(1)
|
242
|
+
sleep(3)
|
243
|
+
entries = @end_filter.flush()
|
244
|
+
expect(aggregate_maps["%{taskid}"]).to be_empty
|
245
|
+
expect(entries.size).to eq(1)
|
246
|
+
end
|
247
|
+
end
|
248
|
+
describe "timeout expires while events arrive within inactivity_timeout" do
|
249
|
+
it "removes event" do
|
250
|
+
expect(aggregate_maps["%{taskid}"].size).to eq(1)
|
251
|
+
sleep(1)
|
252
|
+
@start_filter.filter(start_event({"task_id" => @task_id_value}))
|
253
|
+
sleep(1)
|
254
|
+
@start_filter.filter(start_event({"task_id" => @task_id_value}))
|
255
|
+
sleep(1)
|
256
|
+
@start_filter.filter(start_event({"task_id" => @task_id_value}))
|
257
|
+
sleep(2)
|
258
|
+
@start_filter.filter(start_event({"task_id" => @task_id_value}))
|
259
|
+
entries = @end_filter.flush()
|
260
|
+
expect(aggregate_maps["%{taskid}"]).to be_empty
|
261
|
+
expect(entries.size).to eq(1)
|
262
|
+
end
|
263
|
+
end
|
264
|
+
end
|
209
265
|
end
|
210
266
|
|
211
267
|
context "aggregate_maps_path option is defined, " do
|
@@ -213,7 +269,7 @@ describe LogStash::Filters::Aggregate do
|
|
213
269
|
it "stores aggregate maps to configured file and then loads aggregate maps from file" do
|
214
270
|
store_file = "aggregate_maps"
|
215
271
|
expect(File.exist?(store_file)).to be false
|
216
|
-
|
272
|
+
|
217
273
|
one_filter = setup_filter({ "task_id" => "%{one_special_field}", "code" => ""})
|
218
274
|
store_filter = setup_filter({ "code" => "map['sql_duration'] = 0", "aggregate_maps_path" => store_file })
|
219
275
|
expect(aggregate_maps["%{one_special_field}"]).to be_empty
|
@@ -222,10 +278,10 @@ describe LogStash::Filters::Aggregate do
|
|
222
278
|
start_event = start_event("taskid" => 124)
|
223
279
|
filter = store_filter.filter(start_event)
|
224
280
|
expect(aggregate_maps["%{taskid}"].size).to eq(1)
|
225
|
-
|
281
|
+
|
226
282
|
@end_filter.close()
|
227
283
|
expect(aggregate_maps).not_to be_empty
|
228
|
-
|
284
|
+
|
229
285
|
store_filter.close()
|
230
286
|
expect(File.exist?(store_file)).to be true
|
231
287
|
expect(aggregate_maps).to be_empty
|
@@ -247,7 +303,7 @@ describe LogStash::Filters::Aggregate do
|
|
247
303
|
end
|
248
304
|
end
|
249
305
|
end
|
250
|
-
|
306
|
+
|
251
307
|
context "Logstash reload occurs, " do
|
252
308
|
describe "close method is called, " do
|
253
309
|
it "reinitializes static variables" do
|
@@ -256,14 +312,14 @@ describe LogStash::Filters::Aggregate do
|
|
256
312
|
expect(taskid_eviction_instance).to be_nil
|
257
313
|
expect(static_close_instance).not_to be_nil
|
258
314
|
expect(aggregate_maps_path_set).to be false
|
259
|
-
|
315
|
+
|
260
316
|
@end_filter.register()
|
261
317
|
expect(static_close_instance).to be_nil
|
262
318
|
end
|
263
319
|
end
|
264
320
|
end
|
265
321
|
|
266
|
-
context "push_previous_map_as_event option is defined, " do
|
322
|
+
context "push_previous_map_as_event option is defined, " do
|
267
323
|
describe "when push_previous_map_as_event option is activated on another filter with same task_id pattern" do
|
268
324
|
it "should throw a LogStash::ConfigurationError" do
|
269
325
|
expect {
|
@@ -271,7 +327,7 @@ describe LogStash::Filters::Aggregate do
|
|
271
327
|
}.to raise_error(LogStash::ConfigurationError)
|
272
328
|
end
|
273
329
|
end
|
274
|
-
|
330
|
+
|
275
331
|
describe "when a new task id is detected, " do
|
276
332
|
it "should push previous map as new event" do
|
277
333
|
push_filter = setup_filter({ "task_id" => "%{ppm_id}", "code" => "map['ppm_id'] = event.get('ppm_id')", "push_previous_map_as_event" => true, "timeout" => 5, "timeout_task_id_field" => "timeout_task_id_field" })
|
@@ -310,6 +366,6 @@ describe LogStash::Filters::Aggregate do
|
|
310
366
|
end
|
311
367
|
end
|
312
368
|
end
|
313
|
-
|
369
|
+
|
314
370
|
|
315
371
|
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-filter-aggregate
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 2.
|
4
|
+
version: 2.6.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Elastic
|
@@ -9,7 +9,7 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date: 2017-
|
12
|
+
date: 2017-06-11 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
requirement: !ruby/object:Gem::Requirement
|
@@ -56,8 +56,9 @@ files:
|
|
56
56
|
- CONTRIBUTORS
|
57
57
|
- Gemfile
|
58
58
|
- LICENSE
|
59
|
-
- NOTICE.
|
59
|
+
- NOTICE.TXT
|
60
60
|
- README.md
|
61
|
+
- docs/index.asciidoc
|
61
62
|
- lib/logstash/filters/aggregate.rb
|
62
63
|
- logstash-filter-aggregate.gemspec
|
63
64
|
- spec/filters/aggregate_spec.rb
|