logstash-filter-aggregate 2.3.0 → 2.3.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 89d2a20ef87157c9155e42001819efcd8b65c908
4
- data.tar.gz: ad218ff47a177f75185534405db70ffa2c7fd7ca
3
+ metadata.gz: eaa83c85ffda851fdae41182c4ec2d010483dbbe
4
+ data.tar.gz: bfbb53a989e67654304b4e4c9cf89f2b91411c6a
5
5
  SHA512:
6
- metadata.gz: cfd494edb94755712121a5a72325eeac1252d0b78e8ddcdffa69b861bc08b682b4cb08b6759e76be8b71066ddbe93d89f786b69891d7c5736ed4bc15ec0e7232
7
- data.tar.gz: 1910c47decb6a4fc25cb03315c323a261071a69f7182d62f74f8b393ae02d58102fb9491802f34cb0a2a4dfee3c663b05eaa048e18e23b6346fa8e410a59a249
6
+ metadata.gz: 0adfe2d3916570f84230a5823ad6b038f661a5837bb78600e66c3a58d4b066630cf31cf974f939a8071f2ab2549698756a24cb768e6b15df983a2546f0dc6490
7
+ data.tar.gz: 7720f2d38e7757145294e05903376874dcd88a600bcd9174c5da1ac2ef0ca3ba4bd2545c01a107317cf037f99d8be8005cd3830aba62af32d85c6e8e5f57f1c7
@@ -1,3 +1,6 @@
1
+ ## 2.3.1
2
+ - new feature: Add new option "timeout_tags" so that you can add tags to generated timeout events
3
+
1
4
  ## 2.3.0
2
5
  - new feature: Add new option "push_map_as_event_on_timeout" so that when a task timeout happens the aggregation map can be yielded as a new event
3
6
  - new feature: Add new option "timeout_code" which takes the timeout event populated with the aggregation map and executes code on it. This works for "push_map_as_event_on_timeout" as well as "push_previous_map_as_event"
data/README.md CHANGED
@@ -132,7 +132,8 @@ We can also add 'timeout_task_id_field' so we can correlate the task_id, which i
132
132
  push_map_as_event_on_timeout => true
133
133
  timeout_task_id_field => "user_id"
134
134
  timeout => 600 # 10 minutes timeout
135
- timeout_code => "event.tag('_aggregatetimeout')"
135
+ timeout_tags => ['_aggregatetimeout']
136
+ timeout_code => "event['several_clicks'] = (event['clicks'] > 1)"
136
137
  }
137
138
  }
138
139
  ```
@@ -141,9 +142,10 @@ We can also add 'timeout_task_id_field' so we can correlate the task_id, which i
141
142
 
142
143
  ``` json
143
144
  {
144
- "user_id" : "12345",
145
- "clicks" : 3,
146
- "tags" : [
145
+ "user_id": "12345",
146
+ "clicks": 3,
147
+ "several_clicks": true,
148
+ "tags": [
147
149
  "_aggregatetimeout"
148
150
  ]
149
151
  }
@@ -174,7 +176,6 @@ In that case, you don't want to wait task timeout to flush aggregation map.
174
176
  aggregate {
175
177
  task_id => "%{country_name}"
176
178
  code => "
177
- map['tags'] ||= ['aggregated']
178
179
  map['town_name'] ||= []
179
180
  event.to_hash.each do |key,value|
180
181
  map[key] = value unless map.has_key?(key)
@@ -183,6 +184,7 @@ In that case, you don't want to wait task timeout to flush aggregation map.
183
184
  "
184
185
  push_previous_map_as_event => true
185
186
  timeout => 5
187
+ timeout_tags => ['aggregated']
186
188
  }
187
189
 
188
190
  if "aggregated" not in [tags] {
@@ -262,7 +264,7 @@ This enables to detect and process task timeouts in logstash, but also to manage
262
264
  The code to execute to complete timeout generated event, when 'push_map_as_event_on_timeout' or 'push_previous_map_as_event' is set to true.
263
265
  The code block will have access to the newly generated timeout event that is pre-populated with the aggregation map.
264
266
  If 'timeout_task_id_field' is set, the event is also populated with the task_id value
265
- Example value: `"event.tag('_aggregatetimeout')"`
267
+ Example value: `"event['state'] = 'timeout'"`
266
268
 
267
269
  - **timeout_task_id_field**
268
270
  This option indicates the timeout generated event's field for the "task_id" value.
@@ -272,6 +274,10 @@ Example:
272
274
  If the task_id is "12345" and this field is set to "my_id", the generated event will have:
273
275
  `event[ "my_id" ] = "12345"`
274
276
 
277
+ - **timeout_tags**
278
+ Defines tags to add when a timeout event is generated and yield.
279
+ Default value: `[]`
280
+
275
281
  ## Changelog
276
282
 
277
283
  Read [CHANGELOG.md](CHANGELOG.md).
@@ -3,6 +3,7 @@
3
3
  require "logstash/filters/base"
4
4
  require "logstash/namespace"
5
5
  require "thread"
6
+ require "logstash/util/decorators"
6
7
 
7
8
  #
8
9
  # The aim of this filter is to aggregate information available among several events (typically log lines) belonging to a same task,
@@ -143,7 +144,8 @@ require "thread"
143
144
  # push_map_as_event_on_timeout => true
144
145
  # timeout_task_id_field => "user_id"
145
146
  # timeout => 600 # 10 minutes timeout
146
- # timeout_code => "event.tag('_aggregatetimeout')"
147
+ # timeout_tags => ['_aggregatetimeout']
148
+ # timeout_code => "event['several_clicks'] = (event['clicks'] > 1)"
147
149
  # }
148
150
  # }
149
151
  # ----------------------------------
@@ -153,9 +155,10 @@ require "thread"
153
155
  # [source,json]
154
156
  # ----------------------------------
155
157
  # {
156
- # "user_id" : "12345",
157
- # "clicks" : 3,
158
- # "tags" : [
158
+ # "user_id": "12345",
159
+ # "clicks": 3,
160
+ # "several_clicks": true,
161
+ # "tags": [
159
162
  # "_aggregatetimeout"
160
163
  # ]
161
164
  # }
@@ -188,7 +191,6 @@ require "thread"
188
191
  # aggregate {
189
192
  # task_id => "%{country_name}"
190
193
  # code => "
191
- # map['tags'] ||= ['aggregated']
192
194
  # map['town_name'] ||= []
193
195
  # event.to_hash.each do |key,value|
194
196
  # map[key] = value unless map.has_key?(key)
@@ -197,6 +199,7 @@ require "thread"
197
199
  # "
198
200
  # push_previous_map_as_event => true
199
201
  # timeout => 5
202
+ # timeout_tags => ['aggregated']
200
203
  # }
201
204
  #
202
205
  # if "aggregated" not in [tags] {
@@ -256,7 +259,7 @@ class LogStash::Filters::Aggregate < LogStash::Filters::Base
256
259
  #
257
260
  # If 'timeout_task_id_field' is set, the event is also populated with the task_id value
258
261
  #
259
- # Example value: `"event.tag('_aggregatetimeout')"`
262
+ # Example value: `"event['state'] = 'timeout'"`
260
263
  config :timeout_code, :validate => :string, :required => false
261
264
 
262
265
 
@@ -310,10 +313,15 @@ class LogStash::Filters::Aggregate < LogStash::Filters::Base
310
313
  # When this option is enabled, each time a task timeout is detected, it pushes task aggregation map as a new logstash event.
311
314
  # This enables to detect and process task timeouts in logstash, but also to manage tasks that have no explicit end event.
312
315
  config :push_map_as_event_on_timeout, :validate => :boolean, :required => false, :default => false
316
+
317
+ # Defines tags to add when a timeout event is generated and yield
318
+ config :timeout_tags, :validate => :array, :required => false, :default => []
313
319
 
320
+
314
321
  # Default timeout (in seconds) when not defined in plugin configuration
315
322
  DEFAULT_TIMEOUT = 1800
316
323
 
324
+
317
325
  # This is the state of the filter.
318
326
  # For each entry, key is "task_id" and value is a map freely updatable by 'code' config
319
327
  @@aggregate_maps = {}
@@ -452,6 +460,8 @@ class LogStash::Filters::Aggregate < LogStash::Filters::Base
452
460
  if @timeout_task_id_field
453
461
  event_to_yield[@timeout_task_id_field] = task_id
454
462
  end
463
+
464
+ LogStash::Util::Decorators.add_tags(@timeout_tags,event_to_yield,"filters/#{self.class.name}")
455
465
 
456
466
  # Call code block if available
457
467
  if @timeout_code
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-filter-aggregate'
3
- s.version = '2.3.0'
3
+ s.version = '2.3.1'
4
4
  s.licenses = ['Apache License (2.0)']
5
5
  s.summary = "The aim of this filter is to aggregate information available among several events (typically log lines) belonging to a same task, and finally push aggregated information into final task event."
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -10,7 +10,7 @@ describe LogStash::Filters::Aggregate do
10
10
  aggregate_maps.clear()
11
11
  @start_filter = setup_filter({ "map_action" => "create", "code" => "map['sql_duration'] = 0" })
12
12
  @update_filter = setup_filter({ "map_action" => "update", "code" => "map['sql_duration'] += event['duration']" })
13
- @end_filter = setup_filter({"timeout_task_id_field" => "my_id", "push_map_as_event_on_timeout" => true, "map_action" => "update", "code" => "event.to_hash.merge!(map)", "end_of_task" => true, "timeout" => 5, "timeout_code" => "event['test'] = 'testValue'" })
13
+ @end_filter = setup_filter({"timeout_task_id_field" => "my_id", "push_map_as_event_on_timeout" => true, "map_action" => "update", "code" => "event.to_hash.merge!(map)", "end_of_task" => true, "timeout" => 5, "timeout_code" => "event['test'] = 'testValue'", "timeout_tags" => ["tag1", "tag2"] })
14
14
  end
15
15
 
16
16
  context "Start event" do
@@ -182,6 +182,7 @@ describe LogStash::Filters::Aggregate do
182
182
  expect(entries[0]['my_id']).to eq("id_123") # task id
183
183
  expect(entries[0]["sql_duration"]).to eq(0) # Aggregation map
184
184
  expect(entries[0]['test']).to eq("testValue") # Timeout code
185
+ expect(entries[0]['tags']).to eq(["tag1", "tag2"]) # Timeout tags
185
186
  end
186
187
  end
187
188
 
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-filter-aggregate
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.3.0
4
+ version: 2.3.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elastic
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2016-08-03 00:00:00.000000000 Z
12
+ date: 2016-10-01 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  requirement: !ruby/object:Gem::Requirement