fluent-plugin-site24x7 0.1.2 → 0.1.4

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: c2391265977dd8a33dde6dab1876296406fae59c7633aa0ec0045821bb5688f1
4
- data.tar.gz: 6bd7a51fca919c3c38c4f12cf226f403772c473d12f8c2a716f72ccb844f5ec7
3
+ metadata.gz: eaa27dafc08120c093de5e57ba15b129209637db8759e68a20c2053513cb55e9
4
+ data.tar.gz: 6fc10de6e63e332c816935a656cfb234e733710c7849f7578ebaa035eec7a8d4
5
5
  SHA512:
6
- metadata.gz: aab17017da06b62df8c7ebc8b2ee5e045e9833687945fd1f262890bd32bc9478f58208e1e07675e4295dd5c05fc6691bad50a19c35ea635c50d627438bb6dd01
7
- data.tar.gz: de638056920dcc7cce4a00c1bddad3a4628ce5342e3b837e430d599f5f0db949ffcd81ff18efa55eca2e2d07b8dec761f3bd673367a076af3c69e8dcee6dd7ef
6
+ metadata.gz: 58d44f268199174dabf93bc3b0fc3ceb0add74b2718001aaab95c3c380bf4f2b18bec949162a47a778a7d78d80a067302cc509506ceac6f8a2f21d5dedcc60ba
7
+ data.tar.gz: bd0dc479e64382935a9154006b281b46cad6d93c883b18d5ddb1cbd026ae49d427a35c0e4836ee6cf7eb1996d6d3557cd5d3bb4f862d9149af5655be862ad1d5
data/Gemfile CHANGED
File without changes
data/LICENSE CHANGED
File without changes
data/README.md CHANGED
@@ -1,6 +1,6 @@
1
1
  # Fluentd output plugin for Site24x7
2
2
 
3
- This output plugin allows parse and sending logs directly from Fluentd to Site2x7 - so you don't have to use a separate log shipper.
3
+ With Site24x7 plugin for Fluentd, you can parse and send logs directly from Fluentd, without having to use a separate log shipper.
4
4
 
5
5
 
6
6
  ## Installation
@@ -10,7 +10,7 @@ To add the plugin to your fluentd agent, use the following command:
10
10
  ```
11
11
  $ gem install fluent-plugin-site24x7
12
12
  ```
13
- If you installed the td-agent instead
13
+ If you have installed the td-agent instead
14
14
 
15
15
  ```
16
16
  $ /usr/sbin/td-agent-gem install fluent-plugin-site24x7
@@ -20,7 +20,7 @@ $ /usr/sbin/td-agent-gem install fluent-plugin-site24x7
20
20
 
21
21
  **Configure the output plugin**
22
22
 
23
- To match events and send them to Site24x7, simply add the following code to your configuration file.
23
+ To match events and send them to Site24x7, add the following code to your configuration file.
24
24
 
25
25
  ```cfg
26
26
  # Match events tagged with "site24x7.**" and send them to Site24x7
@@ -49,21 +49,22 @@ To match events and send them to Site24x7, simply add the following code to your
49
49
 
50
50
  </match>
51
51
  ```
52
- After a restart of FluentD, any events tagged with site24x7 are shipped to site24x7 platform.
52
+ After restarting FluentD, any events tagged with site24x7 are shipped to Site24x7 platform.
53
53
 
54
54
  ## Parameters
55
- As fluent-plugin-site24x7 is an output_buffer, you can set all output_buffer properties like it's describe in the [fluentd documentation](http://docs.fluentd.org/articles/output-plugin-overview#buffered-output-parameters).
55
+ As fluent-plugin-site24x7 is an output_buffer, you can set all the output_buffer properties like it's described in the [fluentd documentation](http://docs.fluentd.org/articles/output-plugin-overview#buffered-output-parameters).
56
56
 
57
57
  Property | Description | Default Value
58
58
  ------------ | -------------|------------
59
- log_type_config | log_type_config for your configured log type in site24x7 | nil
60
- max_retry | How many times to resend failed uploads | 3
61
- retry_interval | How long to sleep initially between retries, exponential step-off | 2 seconds
62
- http_idle_timeout | Timeout in seconds that the http persistent connection will stay open without traffic | 5 seconds
63
- http_read_timeout | Timeout in seconds when the socket connects until the connection breaks | 30 secods
59
+ log_type_config | log_type_config of your configured log type in site24x7 | nil
60
+ max_retry | Number of times to resend failed uploads | 3
61
+ retry_interval | Time interval to sleep initially between retries, exponential step-off | 2 seconds
62
+ http_idle_timeout | Timeout (in seconds) for which http persistent connection will stay open without traffic | 5 seconds
63
+ http_read_timeout | Timeout (in seconds) when the socket connects until the connection breaks | 30 secods
64
64
  http_proxy | Your proxy uri | nil
65
65
 
66
66
  ## Release Notes
67
-
67
+ * 0.1.2 - Timezone parsing issue fixed
68
+ * 0.1.1 - Minor changes
68
69
  * 0.1.0 - Initial Release
69
70
 
data/Rakefile CHANGED
File without changes
@@ -3,7 +3,7 @@ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
3
3
 
4
4
  Gem::Specification.new do |spec|
5
5
  spec.name = "fluent-plugin-site24x7"
6
- spec.version = "0.1.2"
6
+ spec.version = "0.1.4"
7
7
  spec.authors = ["Magesh Rajan"]
8
8
  spec.email = ["magesh.rajan@zohocorp.com"]
9
9
 
@@ -5,6 +5,8 @@ require "yajl"
5
5
  require "zlib"
6
6
  require "date"
7
7
  require "fluent/plugin/output"
8
+ require 'digest'
9
+ require 'json'
8
10
 
9
11
  class Fluent::Site24x7Output < Fluent::Plugin::Output
10
12
 
@@ -73,6 +75,80 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
73
75
  @logtype_config = Yajl::Parser.parse(base64_url_decode(@log_type_config))
74
76
  @s247_custom_regex = if @logtype_config.has_key? 'regex' then Regexp.compile(@logtype_config['regex'].gsub('?P<','?<')) else nil end
75
77
  @s247_ignored_fields = if @logtype_config.has_key? 'ignored_fields' then @logtype_config['ignored_fields'] else [] end
78
+ @datetime_regex = if @logtype_config.has_key?'dateRegex' then Regexp.compile(@logtype_config['dateRegex'].gsub('?P<','?<')) else nil end
79
+
80
+ @ml_regex = if @logtype_config.has_key? 'ml_regex' then Regexp.compile(@logtype_config['ml_regex'].gsub('?P<','?<')) else nil end
81
+ @ml_end_regex = if @logtype_config.has_key? 'ml_end_regex' then Regexp.compile(@logtype_config['ml_end_regex'].gsub('?P<','?<')) else nil end
82
+ @max_ml_count = if @logtype_config.has_key? 'ml_regex' then @s247_custom_regex.inspect.scan('\<NewLine\>').length else nil end
83
+ @max_trace_line = 100
84
+ @ml_trace = ''
85
+ @ml_trace_buffer = ''
86
+ @ml_found = false
87
+ @ml_end_line_found = false
88
+ @ml_data = nil
89
+ @ml_count = 0
90
+
91
+ @json_data = ''
92
+ @sub_pattern = {}
93
+
94
+ if !(@logtype_config.has_key?('jsonPath'))
95
+ @message_key = get_last_group_inregex(@s247_custom_regex)
96
+ end
97
+
98
+ if @logtype_config.has_key?('jsonPath')
99
+ @logtype_config['jsonPath'].each_with_index do | key, index |
100
+ if key.has_key?('pattern')
101
+ begin
102
+ if Regexp.new(key['pattern'].gsub('?P<','?<'))
103
+ @sub_pattern[key['name']] = Regexp.compile(key['pattern'].gsub('?P<','?<'))
104
+ end
105
+ rescue Exception => e
106
+ log.error "Invalid subpattern regex #{e.backtrace}"
107
+ end
108
+ end
109
+ end
110
+ end
111
+
112
+ @old_formatted_line = {}
113
+ @formatted_line = {}
114
+
115
+ @masking_config = if @logtype_config.has_key? 'maskingConfig' then @logtype_config['maskingConfig'] else nil end
116
+ @hashing_config = if @logtype_config.has_key? 'hashingConfig' then @logtype_config['hashingConfig'] else nil end
117
+ @derived_config = if @logtype_config.has_key? 'derivedConfig' then @logtype_config['derivedConfig'] else nil end
118
+ @general_regex = Regexp.compile("(.*)")
119
+
120
+ if @derived_config
121
+ @derived_fields = {}
122
+ for key,value in @derived_config do
123
+ @derived_fields[key] = []
124
+ for values in @derived_config[key] do
125
+ @derived_fields[key].push(Regexp.compile(values.gsub('\\\\', '\\')))
126
+ end
127
+ end
128
+ end
129
+
130
+ if @masking_config
131
+ for key,value in @masking_config do
132
+ @masking_config[key]["regex"] = Regexp.compile(@masking_config[key]["regex"])
133
+ end
134
+ end
135
+
136
+ if @hashing_config
137
+ for key,value in @hashing_config do
138
+ @hashing_config[key]["regex"] = Regexp.compile(@hashing_config[key]["regex"])
139
+ end
140
+ end
141
+
142
+ if @logtype_config.has_key?'filterConfig'
143
+ for field,rules in @logtype_config['filterConfig'] do
144
+ temp = []
145
+ for value in @logtype_config['filterConfig'][field]['values'] do
146
+ temp.push(Regexp.compile(value))
147
+ end
148
+ @logtype_config['filterConfig'][field]['values'] = temp.join('|')
149
+ end
150
+ end
151
+
76
152
  @s247_tz = {'hrs': 0, 'mins': 0} #UTC
77
153
  @log_source = Socket.gethostname
78
154
  @valid_logtype = true
@@ -87,6 +163,7 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
87
163
  end
88
164
  @is_timezone_present = if @s247_datetime_format_string.include? '%z' then true else false end
89
165
  if !@is_timezone_present && @logtype_config.has_key?('timezone')
166
+ @s247_datetime_format_string += '%z'
90
167
  tz_value = @logtype_config['timezone']
91
168
  if tz_value.start_with?('+')
92
169
  @s247_tz['hrs'] = Integer('-' + tz_value[1..4])
@@ -97,6 +174,7 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
97
174
  end
98
175
  end
99
176
  end
177
+ Thread.new { timer_task() }
100
178
  end
101
179
 
102
180
  def init_http_client(logtype_config)
@@ -116,7 +194,7 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
116
194
  @s247_http_client.override_headers["User-Agent"] = 'Fluentd'
117
195
  if !@s247_http_client.proxy_uri.nil?
118
196
  log.info "Using HTTP proxy #{@s247_http_client.proxy_uri.scheme}://#{@s247_http_client.proxy_uri.host}:#{@s247_http_client.proxy_uri.port} username: #{@s247_http_client.proxy_uri.user ? "configured" : "not configured"}, password: #{@s247_http_client.proxy_uri.password ? "configured" : "not configured"}"
119
- end
197
+ end
120
198
  end
121
199
 
122
200
  def get_timestamp(datetime_string)
@@ -127,113 +205,262 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
127
205
  end
128
206
  datetime_string += if !@is_year_present then ' '+String(Time.new.year) else '' end
129
207
  if !@is_timezone_present && @logtype_config.has_key?('timezone')
130
- @s247_datetime_format_string += '%z'
131
208
  time_zone = String(@s247_tz['hrs'])+':'+String(@s247_tz['mins'])
132
209
  datetime_string += if time_zone.start_with?('-') then time_zone else '+'+time_zone end
133
210
  end
134
211
  datetime_data = DateTime.strptime(datetime_string, @s247_datetime_format_string)
135
212
  return Integer(datetime_data.strftime('%Q'))
136
- rescue
213
+ rescue Exception => e
214
+ @logger.error "Exception in parsing date: #{e.backtrace}"
137
215
  return 0
138
216
  end
139
217
  end
140
218
 
219
+ def log_line_filter()
220
+ applyMasking()
221
+ applyHashing()
222
+ getDerivedFields()
223
+ end
224
+
225
+ def get_last_group_inregex(s247_custom_regex)
226
+ return @s247_custom_regex.names[-1]
227
+ end
228
+
229
+ def remove_ignored_fields()
230
+ @s247_ignored_fields.each do |field_name|
231
+ @log_size -= if @log_fields.has_key?field_name then @log_fields.delete(field_name).bytesize else 0 end
232
+ end
233
+ end
234
+
235
+ def add_message_metadata()
236
+ @log_fields.update({'_zl_timestamp' => get_timestamp(@log_fields[@logtype_config['dateField']]), 's247agentuid' => @log_source})
237
+ end
238
+
141
239
  def parse_lines(lines)
142
240
  parsed_lines = []
143
- log_size = 0
144
241
  lines.each do |line|
145
- if !line.empty?
146
- begin
147
- if match = line.match(@s247_custom_regex)
148
- log_size += line.bytesize
149
- log_fields = match&.named_captures
150
- removed_log_size=0
151
- @s247_ignored_fields.each do |field_name|
152
- removed_log_size += if log_fields.has_key?field_name then log_fields.delete(field_name).bytesize else 0 end
153
- end
154
- formatted_line = {'_zl_timestamp' => get_timestamp(log_fields[@logtype_config['dateField']]), 's247agentuid' => @log_source}
155
- formatted_line.merge!(log_fields)
156
- parsed_lines.push(formatted_line)
157
- log_size -= removed_log_size
158
- else
159
- log.debug "pattern not matched regex : #{@s247_custom_regex} and received line : #{line}"
160
- end
161
- rescue Exception => e
162
- log.error "Exception in parse_line #{e.backtrace}"
163
- end
164
- end
242
+ if !line.empty?
243
+ begin
244
+ @logged = false
245
+ match = line.match(@s247_custom_regex)
246
+ if match
247
+ @formatted_line.update(@old_formatted_line)
248
+ @log_size += @old_log_size
249
+ @old_log_size = line.bytesize
250
+ @log_fields = match&.named_captures
251
+ remove_ignored_fields()
252
+ add_message_metadata()
253
+ @old_formatted_line = @log_fields
254
+ @last_line_matched = true
255
+ @trace_started = false
256
+ elsif @last_line_matched || @trace_started
257
+ is_date_present = !(line.scan(@datetime_regex).empty?)
258
+ @trace_started = !(is_date_present)
259
+ if !(is_date_present) && @old_formatted_line
260
+ if @old_formatted_line.has_key?(@message_key)
261
+ @old_formatted_line[@message_key] += '\n' + line
262
+ @old_log_size += line.bytesize
263
+ @trace_started = true
264
+ @last_line_matched = false
265
+ end
266
+ end
267
+ end
268
+ if @formatted_line.has_key?('_zl_timestamp')
269
+ log_line_filter()
270
+ parsed_lines.push(@formatted_line)
271
+ @formatted_line = {}
272
+ end
273
+ rescue Exception => e
274
+ log.error "Exception in parse_line #{e.backtrace}"
275
+ @formatted_line = {}
276
+ end
277
+ end
165
278
  end
166
- return parsed_lines, log_size
279
+ return parsed_lines
167
280
  end
168
281
 
169
- def is_filters_matched(formatted_line)
170
- if @logtype_config.has_key?'filterConfig'
171
- @logtype_config['filterConfig'].each do |config|
172
- if formatted_line.has_key?config && (filter_config[config]['match'] ^ (filter_config[config]['values'].include?formatted_line[config]))
173
- return false
174
- end
282
+ def is_filters_matched()
283
+ begin
284
+ if @logtype_config.has_key?'filterConfig'
285
+ @logtype_config['filterConfig'].each do |config,value|
286
+ if @formatted_line[config].scan(Regexp.new(@logtype_config['filterConfig'][config]['values'])).length > 0
287
+ val = true
288
+ else
289
+ val = false
290
+ end
291
+ if (@formatted_line.has_key?config) && (@logtype_config['filterConfig'][config]['match'] ^ (val))
292
+ return false
293
+ end
175
294
  end
295
+ end
296
+ rescue Exception => e
297
+ log.error "Exception occurred in filter: #{e.backtrace}"
176
298
  end
177
299
  return true
178
300
  end
179
301
 
180
302
  def get_json_value(obj, key, datatype=nil)
181
303
  if obj != nil && (obj.has_key?key)
182
- if datatype and datatype == 'json-object'
183
- arr_json = []
184
- child_obj = obj[key]
185
- if child_obj.class == String
186
- child_obj = Yajl::Parser.parse(child_obj.gsub('\\','\\\\'))
187
- end
188
- child_obj.each do |key, value|
189
- arr_json.push({'key' => key, 'value' => String(value)})
190
- end
191
- return arr_json
192
- else
193
- return (if obj.has_key?key then obj[key] else obj[key.downcase] end)
194
- end
195
- elsif key.include?'.'
196
- parent_key = key[0..key.index('.')-1]
197
- child_key = key[key.index('.')+1..-1]
198
- child_obj = obj[if obj.has_key?parent_key then parent_key else parent_key.capitalize() end]
199
- if child_obj.class == String
200
- child_obj = Yajl::Parser.parse(child_obj.replace('\\','\\\\'))
304
+ if datatype and datatype == 'json-object'
305
+ arr_json = []
306
+ child_obj = obj[key]
307
+ if child_obj.class == String
308
+ child_obj = Yajl::Parser.parse(child_obj.gsub('\\','\\\\'))
309
+ end
310
+ child_obj.each do |key, value|
311
+ arr_json.push({'key' => key, 'value' => String(value)})
201
312
  end
202
- return get_json_value(child_obj, child_key)
313
+ return arr_json
314
+ else
315
+ return (if obj.has_key?key then obj[key] else obj[key.downcase] end)
316
+ end
317
+ elsif key.include?'.'
318
+ parent_key = key[0..key.index('.')-1]
319
+ child_key = key[key.index('.')+1..-1]
320
+ child_obj = obj[if obj.has_key?parent_key then parent_key else parent_key.capitalize() end]
321
+ if child_obj.class == String
322
+ child_obj = Yajl::Parser.parse(child_obj.replace('\\','\\\\'))
323
+ end
324
+ return get_json_value(child_obj, child_key,datatype)
203
325
  end
204
326
  end
205
327
 
206
- def json_log_parser(lines_read)
207
- log_size = 0
328
+ def json_log_applier(line)
329
+ json_log_size=0
330
+ @formatted_line = {}
331
+ @log_fields = {}
332
+ event_obj = if line.is_a?(String) then Yajl::Parser.parse(line) else line end
333
+ @logtype_config['jsonPath'].each do |path_obj|
334
+ value = get_json_value(event_obj, path_obj[if path_obj.has_key?'key' then 'key' else 'name' end], path_obj['type'])
335
+ if value
336
+ @log_fields[path_obj['name']] = value
337
+ json_log_size+= String(value).bytesize - (if value.class == Array then value.size*20 else 0 end)
338
+ end
339
+ end
340
+ for key,regex in @sub_pattern do
341
+ if @log_fields.has_key?(key)
342
+ matcher = regex.match(@log_fields.delete(key))
343
+ if matcher
344
+ @log_fields.update(matcher.named_captures)
345
+ remove_ignored_fields()
346
+ @formatted_line.update(@log_fields)
347
+ end
348
+ end
349
+ end
350
+ if !(is_filters_matched())
351
+ return false
352
+ else
353
+ add_message_metadata()
354
+ @formatted_line.update(@log_fields)
355
+ log_line_filter()
356
+ @log_size += json_log_size
357
+ return true
358
+ end
359
+ end
360
+
361
+ def json_log_parser(lines)
208
362
  parsed_lines = []
209
- lines_read.each do |line|
363
+ lines.each do |line|
364
+ begin
365
+ @logged = false
210
366
  if !line.empty?
211
- current_log_size = 0
212
- formatted_line = {}
213
- event_obj = Yajl::Parser.parse(line)
214
- @logtype_config['jsonPath'].each do |path_obj|
215
- value = get_json_value(event_obj, path_obj[if path_obj.has_key?'key' then 'key' else 'name' end], path_obj['type'])
216
- if value
217
- formatted_line[path_obj['name']] = value
218
- current_log_size+= String(value).size - (if value.class == Array then value.size*20 else 0 end)
219
- end
367
+ if line[0] == '{' && @json_data[-1] == '}'
368
+ if json_log_applier(@json_data)
369
+ parsed_lines.push(@formatted_line)
370
+ end
371
+ @json_data=''
220
372
  end
221
- if is_filters_matched(formatted_line)
222
- formatted_line['_zl_timestamp'] = get_timestamp(formatted_line[@logtype_config['dateField']])
223
- formatted_line['s247agentuid'] = @log_source
224
- parsed_lines.push(formatted_line)
225
- log_size+=current_log_size
373
+ @json_data += line
374
+ end
375
+ rescue Exception => e
376
+ log.error "Exception in parse_line #{e.backtrace}"
377
+ end
378
+ end
379
+ return parsed_lines
380
+ end
381
+
382
+ def ml_regex_applier(ml_trace, ml_data)
383
+ begin
384
+ @log_size += @ml_trace.bytesize
385
+ matcher = @s247_custom_regex.match(@ml_trace)
386
+ @log_fields = matcher.named_captures
387
+ @log_fields.update(@ml_data)
388
+ if @s247_ignored_fields
389
+ remove_ignored_fields()
390
+ end
391
+ add_message_metadata()
392
+ @formatted_line.update(@log_fields)
393
+ log_line_filter()
394
+ rescue Exception => e
395
+ log.error "Exception occurred in ml_parser : #{e.backtrace}"
396
+ @formatted_line = {}
397
+ end
398
+ end
399
+
400
+ def ml_log_parser(lines)
401
+ parsed_lines = []
402
+ lines.each do |line|
403
+ if !line.empty?
404
+ begin
405
+ @logged = false
406
+ ml_start_matcher = @ml_regex.match(line)
407
+ if ml_start_matcher || @ml_end_line_found
408
+ @ml_found = ml_start_matcher
409
+ @ml_end_line_found = false
410
+ @formatted_line = {}
411
+ if @ml_trace.length > 0
412
+ begin
413
+ ml_regex_applier(@ml_trace, @ml_data)
414
+ if @ml_trace_buffer && @formatted_line
415
+ @formatted_line[@message_key] = @formatted_line[@message_key] + @ml_trace_buffer
416
+ @log_size += @ml_trace_buffer.bytesize
417
+ end
418
+ parsed_lines.push(@formatted_line)
419
+ @ml_trace = ''
420
+ @ml_trace_buffer = ''
421
+ if @ml_found
422
+ @ml_data = ml_start_matcher.named_captures
423
+ @log_size += line.bytesize
424
+ else
425
+ @ml_data = {}
426
+ end
427
+ @ml_count = 0
428
+ rescue Exception => e
429
+ log.error "Exception occurred in ml_parser : #{e.backtrace}"
430
+ end
431
+ elsif @ml_found
432
+ @log_size += line.bytesize
433
+ @ml_data = ml_start_matcher.named_captures
434
+ end
435
+ elsif @ml_found
436
+ if @ml_count < @max_ml_count
437
+ @ml_trace += '<NewLine>' + line
438
+ elsif @ml_end_regex && @ml_end_regex.match(line)
439
+ @ml_end_line_found = True
440
+ elsif (@ml_count - @max_ml_count) < @max_trace_line
441
+ @ml_trace_buffer += "\n" + line
442
+ end
443
+ @ml_count += 1
226
444
  end
227
- end
445
+ rescue Exception => e
446
+ log.error "Exception occurred in ml_parser : #{e.backtrace}"
447
+ end
448
+ end
228
449
  end
229
- return parsed_lines, log_size
450
+ return parsed_lines
230
451
  end
231
452
 
232
453
  def format(tag, time, record)
233
454
  if @valid_logtype && (@log_upload_allowed || (time.to_i - @log_upload_stopped_time > S247_LOG_UPLOAD_CHECK_INTERVAL))
234
- [record['message']].to_msgpack
455
+ if (record.size == 1)
456
+ if record.has_key?'message'
457
+ [record['message']].to_msgpack
458
+ end
459
+ else
460
+ [record.to_json].to_msgpack
461
+ end
235
462
  end
236
- end
463
+ end
237
464
 
238
465
  def write(chunk)
239
466
  begin
@@ -249,12 +476,15 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
249
476
  end
250
477
 
251
478
  def process_http_events(events)
479
+ @before_time = Time.now
252
480
  batches = batch_http_events(events)
253
481
  batches.each do |batched_event|
254
- formatted_events, log_size = format_http_event_batch(batched_event)
255
- formatted_events = gzip_compress(formatted_events)
256
- send_logs_to_s247(formatted_events, log_size)
257
- end
482
+ formatted_events, @log_size = format_http_event_batch(batched_event)
483
+ if (formatted_events.length>0)
484
+ formatted_events = gzip_compress(formatted_events)
485
+ send_logs_to_s247(formatted_events, @log_size)
486
+ end
487
+ end
258
488
  end
259
489
 
260
490
  def batch_http_events(encoded_events)
@@ -262,7 +492,7 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
262
492
  current_batch = []
263
493
  current_batch_size = 0
264
494
  encoded_events.each_with_index do |encoded_event, i|
265
- current_event_size = encoded_event.bytesize
495
+ current_event_size = String(encoded_event).bytesize
266
496
  if current_event_size > S247_MAX_RECORD_SIZE
267
497
  encoded_event = encoded_event[0..(S247_MAX_RECORD_SIZE-DD_TRUNCATION_SUFFIX.length)]+DD_TRUNCATION_SUFFIX
268
498
  current_event_size = encoded_event.bytesize
@@ -283,13 +513,19 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
283
513
 
284
514
  def format_http_event_batch(events)
285
515
  parsed_lines = []
286
- log_size = 0
516
+ @log_size = 0
517
+ @old_log_size=0
287
518
  if @logtype_config.has_key?'jsonPath'
288
- parsed_lines, log_size = json_log_parser(events)
519
+ parsed_lines = json_log_parser(events)
520
+ elsif @logtype_config.has_key?'ml_regex'
521
+ parsed_lines = ml_log_parser(events)
289
522
  else
290
- parsed_lines, log_size = parse_lines(events)
523
+ parsed_lines = parse_lines(events)
524
+ end
525
+ if (parsed_lines.length > 0)
526
+ return Yajl.dump(parsed_lines), @log_size
291
527
  end
292
- return Yajl.dump(parsed_lines), log_size
528
+ return [],0
293
529
  end
294
530
 
295
531
  def gzip_compress(payload)
@@ -307,7 +543,7 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
307
543
  def send_logs_to_s247(gzipped_parsed_lines, log_size)
308
544
  request = Net::HTTP::Post.new @uri.request_uri
309
545
  request.body = gzipped_parsed_lines
310
- @s247_http_client.override_headers["Log-Size"] = log_size
546
+ @s247_http_client.override_headers["Log-Size"] = @log_size
311
547
  sleep_interval = @retry_interval
312
548
  begin
313
549
  @max_retry.times do |counter|
@@ -320,17 +556,17 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
320
556
  if resp_headers.has_key?'LOG_LICENSE_EXCEEDS' && resp_headers['LOG_LICENSE_EXCEEDS'] == 'True'
321
557
  log.error "Log license limit exceeds so not able to send logs"
322
558
  @log_upload_allowed = false
323
- @log_upload_stopped_time =Time.now.to_i
559
+ @log_upload_stopped_time =Time.now.to_i
324
560
  elsif resp_headers.has_key?'BLOCKED_LOGTYPE' && resp_headers['BLOCKED_LOGTYPE'] == 'True'
325
561
  log.error "Max upload limit reached for log type"
326
562
  @log_upload_allowed = false
327
- @log_upload_stopped_time =Time.now.to_i
563
+ @log_upload_stopped_time =Time.now.to_i
328
564
  elsif resp_headers.has_key?'INVALID_LOGTYPE' && resp_headers['INVALID_LOGTYPE'] == 'True'
329
565
  log.error "Log type not present in this account so stopping log collection"
330
- @valid_logtype = false
566
+ @valid_logtype = false
331
567
  else
332
- @log_upload_allowed = true
333
- log.debug "Successfully sent logs with size #{gzipped_parsed_lines.size} / #{log_size} to site24x7. Upload Id : #{resp_headers['x-uploadid']}"
568
+ @log_upload_allowed = true
569
+ log.debug "Successfully sent logs with size #{gzipped_parsed_lines.size} / #{@log_size} to site24x7. Upload Id : #{resp_headers['x-uploadid']}"
334
570
  end
335
571
  else
336
572
  log.error "Response Code #{resp_headers} from Site24x7, so retrying (#{counter + 1}/#{@max_retry})"
@@ -356,4 +592,161 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
356
592
  end
357
593
  end
358
594
 
359
- end
595
+ def log_the_holded_line()
596
+ @log_size = 0
597
+ if @logged == false
598
+ if (@ml_trace.length>0)
599
+ ml_regex_applier(@ml_trace, @ml_data)
600
+ if @ml_trace_buffer
601
+ if !(@formatted_line.empty?)
602
+ @formatted_line[@message_key] = @formatted_line[@message_key] + @ml_trace_buffer
603
+ @log_size += @ml_trace_buffer.bytesize
604
+ else
605
+ @ml_trace += @ml_trace_buffer.gsub('\n', '<NewLine>')
606
+ ml_regex_applier(@ml_trace, @ml_data)
607
+ end
608
+ @ml_trace_buffer = ''
609
+ end
610
+ @ml_trace = ''
611
+ elsif (@json_data.length>0)
612
+ if !(json_log_applier(@json_data))
613
+ @formatted_line={}
614
+ end
615
+ @json_data = ''
616
+ elsif @old_formatted_line
617
+ @formatted_line.update(@old_formatted_line)
618
+ log_line_filter()
619
+ @log_size += @old_log_size
620
+ @old_formatted_line = {}
621
+ @old_log_size = 0
622
+ end
623
+ @logged = true
624
+ if @format_record
625
+ @custom_parser.format_record()
626
+ end
627
+ if !(@formatted_line.empty?)
628
+ return @formatted_line
629
+ end
630
+ end
631
+ return nil
632
+ end
633
+
634
+ def applyMasking()
635
+ if @masking_config
636
+ begin
637
+ for key,value in @masking_config do
638
+ adjust_length = 0
639
+ mask_regex = @masking_config[key]["regex"]
640
+ if @formatted_line.has_key?key
641
+ field_value = @formatted_line[key]
642
+ if !(mask_regex.eql?(@general_regex))
643
+ matcher = field_value.to_enum(:scan, mask_regex).map { Regexp.last_match }
644
+ if matcher
645
+ (0..(matcher.length)-1).map do |index|
646
+ start = matcher[index].offset(1)[0]
647
+ _end = matcher[index].offset(1)[1]
648
+ if ((start >= 0) && (_end > 0))
649
+ start = start - adjust_length
650
+ _end = _end - adjust_length
651
+ adjust_length += (_end - start) - @masking_config[key]['string'].bytesize
652
+ field_value = field_value[0..(start-1)] + @masking_config[key]['string'] + field_value[_end..field_value.bytesize]
653
+ end
654
+ end
655
+ end
656
+ @formatted_line[key] = field_value
657
+ @log_size -= adjust_length
658
+ else
659
+ @log_size -= (@formatted_line[key].bytesize - @masking_config[key]['string'].bytesize)
660
+ @formatted_line[key] = @masking_config[key]['string']
661
+ end
662
+ end
663
+ end
664
+ rescue Exception => e
665
+ log.error "Exception occurred in masking : #{e.backtrace}"
666
+ end
667
+ end
668
+ end
669
+
670
+ def applyHashing()
671
+ if @hashing_config
672
+ begin
673
+ for key,value in @hashing_config do
674
+ hash_regex = @hashing_config[key]["regex"]
675
+ if @formatted_line.has_key?key
676
+ field_value = @formatted_line[key]
677
+ if (hash_regex.eql?(@general_regex))
678
+ hash_string = Digest::SHA256.hexdigest(field_value)
679
+ field_value = hash_string
680
+ else
681
+ adjust_length = 0
682
+ matcher = field_value.to_enum(:scan, hash_regex).map { Regexp.last_match }
683
+ if matcher
684
+ (0..(matcher.length)-1).map do |index|
685
+ start = matcher[index].offset(1)[0]
686
+ _end = matcher[index].offset(1)[1]
687
+ if ((start >= 0) && (_end > 0))
688
+ start = start - adjust_length
689
+ _end = _end - adjust_length
690
+ hash_string = Digest::SHA256.hexdigest(field_value[start..(_end-1)])
691
+ adjust_length += (_end - start) - hash_string.bytesize
692
+ field_value = field_value[0..(start-1)] + hash_string + field_value[_end..field_value.bytesize]
693
+ end
694
+ end
695
+ end
696
+ end
697
+ if adjust_length
698
+ @log_size -= adjust_length
699
+ else
700
+ @log_size -= (@formatted_line[key].bytesize - field_value.bytesize)
701
+ end
702
+ @formatted_line[key] = field_value
703
+ end
704
+ end
705
+ rescue Exception => e
706
+ log.error "Exception occurred in hashing : #{e.backtrace}"
707
+ end
708
+ end
709
+ end
710
+
711
+ def getDerivedFields()
712
+ if @derived_config
713
+ begin
714
+ for key,value in @derived_fields do
715
+ for each in @derived_fields[key] do
716
+ if @formatted_line.has_key?key
717
+ match_derived = each.match(@formatted_line[key])
718
+ if match_derived
719
+ @formatted_line.update(match_derived.named_captures)
720
+ for field_name,value in match_derived.named_captures do
721
+ @log_size += @formatted_line[field_name].bytesize
722
+ end
723
+ end
724
+ break
725
+ end
726
+ end
727
+ end
728
+ rescue Exception => e
729
+ log.error "Exception occurred in derived fields : #{e.backtrace}"
730
+ end
731
+ end
732
+ end
733
+
734
+ def timer_task()
735
+ while true
736
+ @after_time = Time.now
737
+ if @before_time
738
+ diff = @after_time-@before_time
739
+ if diff.to_i > 29
740
+ out = log_the_holded_line()
741
+ if out != nil
742
+ out = Yajl.dump([out])
743
+ out = gzip_compress(out)
744
+ send_logs_to_s247(out, @log_size)
745
+ end
746
+ end
747
+ end
748
+ sleep(30)
749
+ end
750
+ end
751
+
752
+ end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-site24x7
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.2
4
+ version: 0.1.4
5
5
  platform: ruby
6
6
  authors:
7
7
  - Magesh Rajan
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2021-08-12 00:00:00.000000000 Z
11
+ date: 2022-12-05 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
@@ -152,7 +152,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
152
152
  - !ruby/object:Gem::Version
153
153
  version: '0'
154
154
  requirements: []
155
- rubygems_version: 3.2.24
155
+ rubygems_version: 3.1.6
156
156
  signing_key:
157
157
  specification_version: 4
158
158
  summary: Site24x7 output plugin for Fluent event collector.