logstash-filter-transaction_time 1.0.3 → 1.0.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: fd604885e2f901a408bb03385458f31c1671003a
4
- data.tar.gz: 7694726b5fb532e2cc6b81a5b5bbf7ea23d0ec70
3
+ metadata.gz: '03089c0ec29302fde4807201a27eedabd29f9dc5'
4
+ data.tar.gz: aadea35e5b5cdd92321e1bbe7310faa0252c1a02
5
5
  SHA512:
6
- metadata.gz: c2a6efc82adfa6debb09aeece93b06e22743c0ac07151b632f20fbd7a9633bf0c74fab6f1c0d846fa98938ec6b15ac66b4dd0c1527f39c03c4265b1d8a9be2dc
7
- data.tar.gz: 9a1f33409749d7adb38befec19d2dd9b5ce0bfb59f513c96fba99b911a4f44b6f3efee4055cd5135f58836490f8d85fc4dc881554ccd335e46e1a4883b6ebcaa
6
+ metadata.gz: f8b2558058d717b92ba30c3a4b0172997d138702169b12cf3cbc46af193d922d22fe11dd8c9fd8bb8493c3e808e3a85ff5a4a30d32fabc00544b83adf0dc3feb
7
+ data.tar.gz: 9a1e6678888c794ce841ad2d9a1dbbe6ba086ca72d2cd03ce6eea4c043a5c8ab1a88412996064d54c7d680dca26cd1d26e47196864e2f4d5b6b91d46759344ad
@@ -19,6 +19,10 @@ require "logstash/namespace"
19
19
  # replace_timestamp => ['keep', 'oldest', 'newest']
20
20
  # filter_tag => "transaction tag"
21
21
  # attach_event => ['first','last','oldest','newest','none']
22
+ # release_expired => [true,false]
23
+ # store_data_oldest => []
24
+ # store_data_newest => []
25
+ # periodic_flush => [true,false]
22
26
  # }
23
27
  # }
24
28
  #
@@ -71,11 +75,42 @@ require "logstash/namespace"
71
75
  # The attach_event parameter can be used to append information from one of the events to the
72
76
  # new transaction_time event. The default is to not attach anything.
73
77
  # The memory footprint is kept to a minimum by using the default value.
78
+ #
79
+ # The release_expired parameter determines if the first event in an expired transactions
80
+ # should be released or not. Defaults to true
81
+ #
82
+ # The parameters store_data_oldest and store_data_newest are both used in order to attach
83
+ # specific fields from oldest respectively newest event. An example of this could be:
84
+ #
85
+ # store_data_oldest => ["@timestamp", "work_unit", "work_center", "message_type"]
86
+ # store_data_newest => ["@timestamp", "work_unit", "work_center", "message_type"]
87
+ #
88
+ # Which will result in the genereated transaction event inluding the specified fields from
89
+ # oldest and newest events in a hashmap named oldest/newest under the hash named "transaction_data"
90
+ # Example of output data:
91
+ # "transaction_data" => {
92
+ # "oldest" => {
93
+ # "message_type" => "MaterialIdentified",
94
+ # "@timestamp" => 2018-10-31T07:36:23.072Z,
95
+ # "work_unit" => "WT000743",
96
+ # "work_center" => "WR000046"
97
+ # },
98
+ # "newest" => {
99
+ # "message_type" => "Recipe",
100
+ # "@timestamp" => 2018-10-31T07:36:28.188Z,
101
+ # "work_unit" => "WT000743",
102
+ # "work_center" => "WR000046"
103
+ # }
104
+ # }
105
+
106
+
74
107
 
75
108
  class LogStash::Filters::TransactionTime < LogStash::Filters::Base
76
109
 
77
110
  HOST_FIELD = "host"
78
111
  TRANSACTION_TIME_TAG = "TransactionTime"
112
+ TRANSACTION_TIME_DATA = "transaction_data"
113
+ TRANSACTION_TIME_EXPIRED_TAG = "TransactionTimeExpired"
79
114
  TRANSACTION_TIME_FIELD = "transaction_time"
80
115
  TRANSACTION_UID_FIELD = "transaction_uid"
81
116
  TIMESTAMP_START_FIELD = "timestamp_start"
@@ -96,14 +131,28 @@ class LogStash::Filters::TransactionTime < LogStash::Filters::Base
96
131
  # Whether or not to attach one or none of the events in a transaction to the output event.
97
132
  # Defaults to 'none' - which reduces memory footprint by not adding the event to the transactionlist.
98
133
  config :attach_event, :validate => ['first','last','oldest','newest','none'], :default => 'none'
134
+ # Wheter or not to release the first event in expired transactions
135
+ config :release_expired, :validate => :boolean, :default => true
136
+
137
+ # Store data from the oldest message. Specify array of keys to store.
138
+ config :store_data_oldest, :validate => :array, :default => []
139
+ # Store data from the newest message. Specify array of keys to store
140
+ config :store_data_newest, :validate => :array, :default => []
141
+
142
+ # This filter must have its flush function called periodically to be able to purge
143
+ # expired stored start events.
144
+ config :periodic_flush, :validate => :boolean, :default => true
99
145
 
100
146
  public
101
147
  def register
102
148
  # Add instance variables
103
149
  @transactions = Hash.new
104
150
  @mutex = Mutex.new
105
- @storeEvent = !(@attach_event.eql?"none")
151
+ @attachData = (!@store_data_oldest.nil? && @store_data_oldest.any?) || (!@store_data_newest.nil? && @store_data_newest.any?)
152
+ @storeEvent = (!(@attach_event.eql?"none") || @attachData)
106
153
  @@timestampTag = @timestamp_tag
154
+ @logger.info("Setting up INFO")
155
+ @logger.debug("Setting up DEBUG")
107
156
  end # def register
108
157
 
109
158
  def transactions
@@ -121,24 +170,30 @@ class LogStash::Filters::TransactionTime < LogStash::Filters::Base
121
170
 
122
171
  @logger.debug("Received UID", uid: uid)
123
172
 
173
+
124
174
  #Dont use filter-plugin on events created by this filter-plugin
125
175
  #Dont use filter on anything else but events with the filter_tag if specified
126
- if (!uid.nil? && (event.get("tags").nil? || !event.get("tags").include?(TRANSACTION_TIME_TAG)) &&
176
+ if (!uid.nil? && (event.get("tags").nil? || !event.get("tags").include?(TRANSACTION_TIME_TAG)) &&
177
+ (event.get("tags").nil? || !event.get("tags").include?(TRANSACTION_TIME_EXPIRED_TAG)) &&
127
178
  (@filter_tag.nil? || (!event.get("tags").nil? && event.get("tags").include?(@filter_tag))))
128
- filter_matched(event)
179
+
180
+
129
181
  @mutex.synchronize do
130
182
  if(!@transactions.has_key?(uid))
131
183
  @transactions[uid] = LogStash::Filters::TransactionTime::Transaction.new(event, uid, @storeEvent)
132
184
 
133
185
  else #End of transaction
134
186
  @transactions[uid].addSecond(event,@storeEvent)
135
- transaction_event = new_transactiontime_event(@transactions[uid])
187
+ transaction_event = new_transactiontime_event(@transactions[uid], @attachData)
136
188
  filter_matched(transaction_event)
137
189
  yield transaction_event if block_given?
138
190
  @transactions.delete(uid)
139
191
  end
140
192
  end
141
193
  end
194
+ #Always forward original event
195
+ filter_matched(event)
196
+ return event
142
197
 
143
198
  end # def filter
144
199
 
@@ -146,12 +201,17 @@ class LogStash::Filters::TransactionTime < LogStash::Filters::Base
146
201
  # The method is invoked by LogStash every 5 seconds.
147
202
  def flush(options = {})
148
203
  expired_elements = []
149
-
204
+ #@logger.info("FLUSH")
150
205
  @mutex.synchronize do
151
206
  increment_age_by(5)
152
207
  expired_elements = remove_expired_elements()
153
208
  end
154
209
 
210
+ expired_elements.each do |element|
211
+ filter_matched(element)
212
+ end
213
+ return expired_elements
214
+ #yield expired_elements if block_given?
155
215
  #return create_expired_events_from(expired_elements)
156
216
  end
157
217
 
@@ -162,13 +222,15 @@ class LogStash::Filters::TransactionTime < LogStash::Filters::Base
162
222
  end
163
223
  end
164
224
 
165
- # Remove the expired "start events" from the internal
225
+ # Remove the expired "events" from the internal
166
226
  # buffer and return them.
167
227
  def remove_expired_elements()
168
228
  expired = []
169
229
  @transactions.delete_if do |key, transaction|
170
230
  if(transaction.age >= @timeout)
171
- expired << transaction
231
+ #print("Deleting expired_elements")
232
+ transaction.tag(TRANSACTION_TIME_EXPIRED_TAG)
233
+ (expired << transaction.getEvents()).flatten!
172
234
  next true
173
235
  end
174
236
  next false
@@ -176,7 +238,7 @@ class LogStash::Filters::TransactionTime < LogStash::Filters::Base
176
238
  return expired
177
239
  end
178
240
 
179
- def new_transactiontime_event(transaction)
241
+ def new_transactiontime_event(transaction, attachData)
180
242
 
181
243
 
182
244
  case @attach_event
@@ -199,12 +261,17 @@ class LogStash::Filters::TransactionTime < LogStash::Filters::Base
199
261
  transaction_event.set(TRANSACTION_UID_FIELD, transaction.uid)
200
262
  transaction_event.set(TIMESTAMP_START_FIELD, transaction.getOldestTimestamp())
201
263
 
264
+ #Attach transaction data if any
265
+ if(attachData)
266
+ transaction_data = transaction.getData(store_data_oldest,store_data_newest)
267
+ transaction_event.set(TRANSACTION_TIME_DATA,transaction_data)
268
+ end
269
+
202
270
  if(@replace_timestamp.eql?'oldest')
203
271
  transaction_event.set("@timestamp", transaction.getOldestTimestamp())
204
272
  elsif (@replace_timestamp.eql?'newest')
205
273
  transaction_event.set("@timestamp", transaction.getNewestTimestamp())
206
274
  end
207
-
208
275
 
209
276
  return transaction_event
210
277
  end
@@ -219,7 +286,7 @@ end # class LogStash::Filters::TransactionTime
219
286
 
220
287
 
221
288
  class LogStash::Filters::TransactionTime::Transaction
222
- attr_accessor :firstEvent, :lastEvent,:firstTimestamp, :secondTimestamp, :uid, :age, :diff
289
+ attr_accessor :firstEvent, :lastEvent,:firstTimestamp, :secondTimestamp, :uid, :age, :diff, :data
223
290
 
224
291
  def initialize(firstEvent, uid, storeEvent = false)
225
292
  if(storeEvent)
@@ -230,6 +297,33 @@ class LogStash::Filters::TransactionTime::Transaction
230
297
  @age = 0
231
298
  end
232
299
 
300
+ def getData(oldestKeys, newestKeys)
301
+ if(oldestKeys.any?)
302
+ storeData("oldest",oldestKeys,getOldestEvent())
303
+ end
304
+ if(newestKeys.any?)
305
+ storeData("newest",newestKeys,getNewestEvent())
306
+ end
307
+ return @data
308
+ end
309
+
310
+ def storeData(subDataName, dataKeys, dataEvent)
311
+ if(@data.nil?)
312
+ @data = Hash.new
313
+ end
314
+
315
+ if(@data[subDataName].nil?)
316
+ hashData = Hash.new
317
+ else
318
+ hashData = @data.get(subDataName)
319
+ end
320
+
321
+ dataKeys.each do |dataKey|
322
+ hashData[dataKey] = dataEvent.get(dataKey)
323
+ @data[subDataName] = hashData
324
+ end
325
+ end
326
+
233
327
  def addSecond(lastEvent,storeEvent = false)
234
328
  if(storeEvent)
235
329
  @lastEvent = lastEvent
@@ -282,4 +376,24 @@ class LogStash::Filters::TransactionTime::Transaction
282
376
 
283
377
  return getNewestTimestamp() - getOldestTimestamp()
284
378
  end
379
+
380
+ def tag(value)
381
+ if(!firstEvent.nil?)
382
+ firstEvent.tag(value)
383
+ end
384
+ if(!lastEvent.nil?)
385
+ lastEvent.tag(value)
386
+ end
387
+ end
388
+
389
+ def getEvents()
390
+ events = []
391
+ if(!firstEvent.nil?)
392
+ events << firstEvent
393
+ end
394
+ if(!lastEvent.nil?)
395
+ events << lastEvent
396
+ end
397
+ return events
398
+ end
285
399
  end
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-filter-transaction_time'
3
- s.version = '1.0.3'
3
+ s.version = '1.0.4'
4
4
  s.licenses = ['Apache-2.0','Apache License (2.0)']
5
5
  s.summary = 'Writes the time difference between two events in a transaction to a new event'
6
6
  s.description = 'This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program. Source-code and documentation available at github: https://github.com/AddinITAB/logstash-filter-transaction_time'
@@ -5,6 +5,7 @@ require "logstash/filters/transaction_time"
5
5
 
6
6
  describe LogStash::Filters::TransactionTime do
7
7
  UID_FIELD = "uniqueIdField"
8
+
8
9
  TIMEOUT = 30
9
10
 
10
11
 
@@ -147,6 +148,21 @@ describe LogStash::Filters::TransactionTime do
147
148
  @filter.flush()
148
149
  insist { @filter.transactions.size } == 0
149
150
  end
151
+ it "releases the transactions to output" do
152
+ @filter.filter(event("message" => "Log message", UID_FIELD => uid, "@timestamp" => "2018-04-22T09:46:22.000+0100"))
153
+ insist { @filter.transactions.size } == 1
154
+ @filter.filter(event("message" => "Log message", UID_FIELD => uid2, "@timestamp" => "2018-04-22T09:46:22.000+0100"))
155
+ insist { @filter.transactions.size } == 2
156
+ ((TIMEOUT/5)-1).times do
157
+ @filter.flush()
158
+ end
159
+ counter = 0
160
+ @filter.flush().each do |event|
161
+ counter+=1
162
+ #print(event)
163
+ end
164
+ insist { counter } == 2
165
+ end
150
166
  it "does not flush newer transactions" do
151
167
  @filter.filter(event("message" => "Log message", UID_FIELD => uid, "@timestamp" => "2018-04-22T09:46:22.000+0100"))
152
168
  insist { @filter.transactions.size } == 1
@@ -225,7 +241,6 @@ describe LogStash::Filters::TransactionTime do
225
241
  end
226
242
  end
227
243
 
228
-
229
244
  context "Testing attach_event." do
230
245
  uid = "9ACCA7B7-D0E9-4E52-A023-9D588E5BE42C"
231
246
  describe "Config attach_event" do
@@ -316,4 +331,50 @@ describe LogStash::Filters::TransactionTime do
316
331
  end
317
332
  end
318
333
 
334
+ context "Testing store_data." do
335
+ uid = "9ACCA7B7-D0E9-4E52-A023-9D588E5BE42C"
336
+ describe "Config store_data_oldest" do
337
+ it "attaches data from oldest event into transaction" do
338
+ config = {"store_data_oldest" => ["datafield1"]}
339
+ @config.merge!(config)
340
+
341
+ @filter = LogStash::Filters::TransactionTime.new(@config)
342
+ @filter.register
343
+
344
+ @filter.filter(event("message" => "first", UID_FIELD => uid, "@timestamp" => "2018-04-22T09:46:22.000+0100", "datafield1" => "OldestData"))
345
+ @filter.filter(event("message" => "last", UID_FIELD => uid, "@timestamp" => "2018-04-22T09:46:22.100+0100", "datafield1" => "NewestData")) do | new_event |
346
+ insist { new_event } != nil
347
+ insist { new_event.get("tags").include?("TransactionTime") }
348
+ insist { new_event.get("message") } != nil
349
+ insist { new_event.get("message") } == "first"
350
+ insist { new_event.get("transaction_data") } != nil
351
+ insist { new_event.get("transaction_data")["oldest"]["datafield1"] } != nil
352
+ insist { new_event.get("transaction_data")["oldest"]["datafield1"] } == "OldestData"
353
+ end
354
+ end
355
+
356
+ describe "and store_data_newest" do
357
+ it "attaches data from both newest and oldest event into transaction" do
358
+ config = {"store_data_newest" => ["datafield1"], "store_data_oldest" => ["datafield1"]}
359
+ @config.merge!(config)
360
+
361
+ @filter = LogStash::Filters::TransactionTime.new(@config)
362
+ @filter.register
363
+
364
+ @filter.filter(event("message" => "first", UID_FIELD => uid, "@timestamp" => "2018-04-22T09:46:22.000+0100", "datafield1" => "OldestData"))
365
+ @filter.filter(event("message" => "last", UID_FIELD => uid, "@timestamp" => "2018-04-22T09:46:22.100+0100", "datafield1" => "NewestData")) do | new_event |
366
+ insist { new_event } != nil
367
+ insist { new_event.get("tags").include?("TransactionTime") }
368
+ insist { new_event.get("message") } != nil
369
+ insist { new_event.get("message") } == "first"
370
+ insist { new_event.get("transaction_data") } != nil
371
+ insist { new_event.get("transaction_data")["newest"]["datafield1"] } != nil
372
+ insist { new_event.get("transaction_data")["newest"]["datafield1"] } == "NewestData"
373
+ insist { new_event.get("transaction_data")["oldest"]["datafield1"] } != nil
374
+ insist { new_event.get("transaction_data")["oldest"]["datafield1"] } == "OldestData"
375
+ end
376
+ end
377
+ end
378
+ end
379
+ end
319
380
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-filter-transaction_time
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.0.3
4
+ version: 1.0.4
5
5
  platform: ruby
6
6
  authors:
7
7
  - Tommy Welleby
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2018-04-27 00:00:00.000000000 Z
11
+ date: 2018-10-31 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement