logstash-filter-foreach 0.2.0 → 0.2.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +5 -5
- data/lib/logstash/filters/foreach.rb +15 -12
- data/logstash-filter-foreach.gemspec +3 -3
- data/spec/filters/foreach_spec.rb +84 -0
- metadata +7 -4
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
|
-
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: c4dd177028408de6e56bf069a42bba2bacd89364
|
4
|
+
data.tar.gz: ce69da20c37d7121b650cb4258e1f329887abda4
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 27aee6615ab0eeda4f41de716975e7fe8c9dfbfc507ad455bb1f043f68cacb174265ce77c6450fba117470ead6d1fab0c0baf216a542c3fd6708d1357590b6c9
|
7
|
+
data.tar.gz: cf2e7fc701fdac53d74025ebcd6fd60fd3ca48d81a5e27951d074a29ce0857b7dd621c7fd8e9f9910e0e2ec7b3643374efdf5b4afd8994be9a4100d13ccd3807
|
@@ -119,7 +119,7 @@ class LogStash::Filters::Foreach < LogStash::Filters::Base
|
|
119
119
|
|
120
120
|
@logger.trace("Foreach plugin: elsif @@event_data.has_key?(task_id)");
|
121
121
|
|
122
|
-
@logger.warn("Foreach plugin: task_id
|
122
|
+
@logger.warn("Foreach plugin: task_id should be unique. Duplicate value found: '#{task_id}'. Passing through")
|
123
123
|
event.tag(FAILURE_TAG)
|
124
124
|
passthrough = true
|
125
125
|
|
@@ -163,7 +163,7 @@ class LogStash::Filters::Foreach < LogStash::Filters::Base
|
|
163
163
|
|
164
164
|
@logger.trace("Foreach plugin: if !@@event_data.has_key?(task_id)");
|
165
165
|
|
166
|
-
@logger.warn("Foreach plugin: found `end` event
|
166
|
+
@logger.warn("Foreach plugin: found `end` event for task_id = '#{task_id}' without `start` event. Passing through")
|
167
167
|
event.tag(FAILURE_TAG)
|
168
168
|
passthrough = true
|
169
169
|
|
@@ -186,18 +186,13 @@ class LogStash::Filters::Foreach < LogStash::Filters::Base
|
|
186
186
|
|
187
187
|
event_data.lastevent_timestamp = Time.now()
|
188
188
|
|
189
|
-
|
190
|
-
event_data.join_fields[join_field] += [*event.get(join_field)]
|
191
|
-
end
|
189
|
+
event_data.add_join_fields_values(event)
|
192
190
|
event_data.counter -= 1
|
193
191
|
|
194
192
|
if event_data.counter == 0
|
195
193
|
|
196
194
|
@logger.trace("Foreach plugin: if event_data.counter == 0");
|
197
195
|
|
198
|
-
configuration.join_fields.each do |join_field|
|
199
|
-
event_data.initial_event.set(join_field, event_data.join_fields[join_field])
|
200
|
-
end
|
201
196
|
ret_event = event_data.event()
|
202
197
|
filter_matched(ret_event)
|
203
198
|
yield ret_event
|
@@ -233,10 +228,7 @@ class LogStash::Filters::Foreach < LogStash::Filters::Base
|
|
233
228
|
if obj.lastevent_timestamp < Time.now() - obj.configuration.timeout
|
234
229
|
if obj.counter < obj.sub_events_count
|
235
230
|
@logger.warn("Foreach plugin: Flushing partly processed event with task_id = '#{obj.initial_event.sprintf(@task_id)}' after timeout = '#{obj.configuration.timeout.to_s}'")
|
236
|
-
obj.
|
237
|
-
obj.initial_event.set(join_field, obj.join_fields[join_field])
|
238
|
-
end
|
239
|
-
events_to_flush << obj.initial_event
|
231
|
+
events_to_flush << obj.event()
|
240
232
|
else
|
241
233
|
@logger.warn("Foreach plugin: Removing unprocessed event with task_id = '#{obj.initial_event.sprintf(@task_id)}' after timeout = '#{obj.configuration.timeout.to_s}'")
|
242
234
|
end
|
@@ -291,8 +283,19 @@ class LogStash::Filters::Foreach::Element
|
|
291
283
|
end
|
292
284
|
end
|
293
285
|
|
286
|
+
def add_join_fields_values(event)
|
287
|
+
@configuration.join_fields.each do |join_field|
|
288
|
+
@join_fields[join_field] += [event.get(join_field)].flatten
|
289
|
+
end
|
290
|
+
end
|
291
|
+
|
294
292
|
def event()
|
295
293
|
e = @initial_event.clone
|
294
|
+
@configuration.join_fields.each do |join_field|
|
295
|
+
if @join_fields[join_field].length > 0
|
296
|
+
e.set(join_field, @join_fields[join_field])
|
297
|
+
end
|
298
|
+
end
|
296
299
|
e.set('@metadata', @initial_metadata)
|
297
300
|
return e
|
298
301
|
end
|
@@ -1,12 +1,12 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-filter-foreach'
|
3
|
-
s.version = '0.2.
|
3
|
+
s.version = '0.2.3'
|
4
4
|
s.licenses = ['Apache-2.0']
|
5
5
|
s.summary = 'Process filters for every array item'
|
6
6
|
s.description = 'Plugin splits event for every item in array, then you could process other filters for every item and then join event back'
|
7
7
|
s.homepage = 'https://github.com/IIIEII/logstash-filter-foreach'
|
8
|
-
s.authors = ['IIIEII']
|
9
|
-
s.email = 'al.iiieii@gmail.com'
|
8
|
+
s.authors = ['IIIEII', 'Boris Gorbylev']
|
9
|
+
s.email = ['al.iiieii@gmail.com', 'ekho@ekho.name']
|
10
10
|
s.require_paths = ['lib']
|
11
11
|
|
12
12
|
# Files
|
@@ -492,5 +492,89 @@ describe LogStash::Filters::Foreach do
|
|
492
492
|
insist { subject.get("[@metadata][unchanged]") } == "unchanged_value"
|
493
493
|
end
|
494
494
|
end
|
495
|
+
|
496
|
+
describe "should not set empty arrays" do
|
497
|
+
let(:config) do
|
498
|
+
<<-CONFIG
|
499
|
+
filter {
|
500
|
+
foreach {
|
501
|
+
task_id => "%{task_id}"
|
502
|
+
array_field => "array"
|
503
|
+
join_fields => ["join", "join2"]
|
504
|
+
}
|
505
|
+
|
506
|
+
mutate {
|
507
|
+
add_field => { "join" => "%{array}_changed" }
|
508
|
+
}
|
509
|
+
|
510
|
+
foreach {
|
511
|
+
task_id => "%{task_id}"
|
512
|
+
end => true
|
513
|
+
}
|
514
|
+
}
|
515
|
+
CONFIG
|
516
|
+
end
|
517
|
+
|
518
|
+
sample("task_id" => 1, "array" => ["big", "bird", "sesame street"], "unchanged" => "unchanged_value") do
|
519
|
+
insist { subject.is_a?(LogStash::Event) } == true
|
520
|
+
insist { subject.get("array").is_a?(Array) } == true
|
521
|
+
insist { subject.get("array") } == ["big", "bird", "sesame street"]
|
522
|
+
insist { subject.get("join").is_a?(Array) } == true
|
523
|
+
insist { subject.get("join") } == ["big_changed", "bird_changed", "sesame street_changed"]
|
524
|
+
insist { subject.get("join2").nil? } == true
|
525
|
+
end
|
526
|
+
end
|
527
|
+
|
528
|
+
describe "should work with nested loops" do
|
529
|
+
let(:config) do
|
530
|
+
<<-CONFIG
|
531
|
+
filter {
|
532
|
+
foreach {
|
533
|
+
task_id => "%{task_id}"
|
534
|
+
array_field => "array"
|
535
|
+
join_fields => ["join", "join2"]
|
536
|
+
}
|
537
|
+
|
538
|
+
mutate {
|
539
|
+
add_field => { "join" => "%{[array][str]}_changed" }
|
540
|
+
}
|
541
|
+
|
542
|
+
foreach {
|
543
|
+
task_id => "%{task_id}_%{[array][str]}"
|
544
|
+
array_field => "[array][nested]"
|
545
|
+
join_fields => ["join2"]
|
546
|
+
}
|
547
|
+
|
548
|
+
mutate {
|
549
|
+
add_field => { "join2" => [ "%{[array][nested]}_changed", "%{[array][nested]}_changed2" ] }
|
550
|
+
}
|
551
|
+
|
552
|
+
foreach {
|
553
|
+
task_id => "%{task_id}_%{[array][str]}"
|
554
|
+
end => true
|
555
|
+
}
|
556
|
+
|
557
|
+
foreach {
|
558
|
+
task_id => "%{task_id}"
|
559
|
+
end => true
|
560
|
+
}
|
561
|
+
}
|
562
|
+
CONFIG
|
563
|
+
end
|
564
|
+
|
565
|
+
sample("task_id" => 1, "array" => [{"str" => "big", "nested" => ["nested_big1", "nested_big2"]}, {"str" => "bird", "nested" => ["nested_bird1", "nested_bird2"]}, {"str" => "sesame street", "nested" => ["nested_sesame street1", "nested_sesame street2"]}], "unchanged" => "unchanged_value") do
|
566
|
+
insist { subject.is_a?(LogStash::Event) } == true
|
567
|
+
insist { subject.get("join").is_a?(Array) } == true
|
568
|
+
insist { subject.get("join") } == ["big_changed", "bird_changed", "sesame street_changed"]
|
569
|
+
insist { subject.get("join2").is_a?(Array) } == true
|
570
|
+
insist { subject.get("join2") } == [
|
571
|
+
"nested_big1_changed", "nested_big1_changed2", "nested_big2_changed", "nested_big2_changed2",
|
572
|
+
"nested_bird1_changed", "nested_bird1_changed2", "nested_bird2_changed", "nested_bird2_changed2",
|
573
|
+
"nested_sesame street1_changed", "nested_sesame street1_changed2", "nested_sesame street2_changed", "nested_sesame street2_changed2"
|
574
|
+
]
|
575
|
+
insist { subject.get("unchanged").is_a?(String) } == true
|
576
|
+
insist { subject.get("unchanged") } == "unchanged_value"
|
577
|
+
end
|
578
|
+
end
|
495
579
|
end
|
496
580
|
end
|
metadata
CHANGED
@@ -1,14 +1,15 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-filter-foreach
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.2.
|
4
|
+
version: 0.2.3
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- IIIEII
|
8
|
+
- Boris Gorbylev
|
8
9
|
autorequire:
|
9
10
|
bindir: bin
|
10
11
|
cert_chain: []
|
11
|
-
date:
|
12
|
+
date: 2018-02-16 00:00:00.000000000 Z
|
12
13
|
dependencies:
|
13
14
|
- !ruby/object:Gem::Dependency
|
14
15
|
requirement: !ruby/object:Gem::Requirement
|
@@ -74,7 +75,9 @@ dependencies:
|
|
74
75
|
version: 1.3.1
|
75
76
|
description: Plugin splits event for every item in array, then you could process other
|
76
77
|
filters for every item and then join event back
|
77
|
-
email:
|
78
|
+
email:
|
79
|
+
- al.iiieii@gmail.com
|
80
|
+
- ekho@ekho.name
|
78
81
|
executables: []
|
79
82
|
extensions: []
|
80
83
|
extra_rdoc_files: []
|
@@ -111,7 +114,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
111
114
|
version: '0'
|
112
115
|
requirements: []
|
113
116
|
rubyforge_project:
|
114
|
-
rubygems_version: 2.6.
|
117
|
+
rubygems_version: 2.6.14
|
115
118
|
signing_key:
|
116
119
|
specification_version: 4
|
117
120
|
summary: Process filters for every array item
|