fluent-plugin-site24x7 0.1.3 → 0.1.5

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: a33475317d8f77918035c64d70be3fbeb32ea214554da51753f83b51904c7f68
4
- data.tar.gz: 31284ae4e2d074f1545c7bb6641886bc12b3a4ca2ed6fd5de2a12e319409a2bc
3
+ metadata.gz: f625b690765ab8a387bfef2ce93ce69a8dbcd42130248fe72913e6947395e204
4
+ data.tar.gz: a47d573e473f76802523ac3e88951c86e058f7577aff44b4fdf1ef221e4826c9
5
5
  SHA512:
6
- metadata.gz: c90315b127043a5ce466c5af1aada7b26fa15614b4e498fd802ef3b88bf145d56dd8fe6e8f038b564750bc5b9d40d3be5f2c83de6c2dfcfd3ee09f5e199b7d6b
7
- data.tar.gz: 75c9d7cc52685aef20717a37e544dd7adc4b31951fa984449ec9ee7f23f58366ebf85bde6f54716008d88c2923d6013fb6ad836d76807a9550d1b2a2e9037298
6
+ metadata.gz: 450f8c60b8cf1a193d5a2ed8265b2cd9b223368f1df22c37459cdc8d5ca0d21590b6241749432c9b15b5595ae069dfb59b4810bf39e7b40f9bb43193f56eb1bd
7
+ data.tar.gz: d06c06451ed65bdb99b5e61cb7445d22554bb78d6777609ba0faace2a860135f51478fe003d56a023202584d0974eb2d7f8e952a4133e6137cc96bfe01e86b86
@@ -3,7 +3,7 @@ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
3
3
 
4
4
  Gem::Specification.new do |spec|
5
5
  spec.name = "fluent-plugin-site24x7"
6
- spec.version = "0.1.3"
6
+ spec.version = "0.1.5"
7
7
  spec.authors = ["Magesh Rajan"]
8
8
  spec.email = ["magesh.rajan@zohocorp.com"]
9
9
 
@@ -5,6 +5,8 @@ require "yajl"
5
5
  require "zlib"
6
6
  require "date"
7
7
  require "fluent/plugin/output"
8
+ require 'digest'
9
+ require 'json'
8
10
 
9
11
  class Fluent::Site24x7Output < Fluent::Plugin::Output
10
12
 
@@ -73,6 +75,84 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
73
75
  @logtype_config = Yajl::Parser.parse(base64_url_decode(@log_type_config))
74
76
  @s247_custom_regex = if @logtype_config.has_key? 'regex' then Regexp.compile(@logtype_config['regex'].gsub('?P<','?<')) else nil end
75
77
  @s247_ignored_fields = if @logtype_config.has_key? 'ignored_fields' then @logtype_config['ignored_fields'] else [] end
78
+ @datetime_regex = if @logtype_config.has_key?'dateRegex' then Regexp.compile(@logtype_config['dateRegex'].gsub('?P<','?<')) else nil end
79
+
80
+ @ml_regex = if @logtype_config.has_key? 'ml_regex' then Regexp.compile(@logtype_config['ml_regex'].gsub('?P<','?<')) else nil end
81
+ @ml_end_regex = if @logtype_config.has_key? 'ml_end_regex' then Regexp.compile(@logtype_config['ml_end_regex'].gsub('?P<','?<')) else nil end
82
+ @max_ml_count = if @logtype_config.has_key? 'ml_regex' then @s247_custom_regex.inspect.scan('\<NewLine\>').length else nil end
83
+ @max_trace_line = 100
84
+ @ml_trace = ''
85
+ @ml_trace_buffer = ''
86
+ @ml_found = false
87
+ @ml_end_line_found = false
88
+ @ml_data = nil
89
+ @ml_count = 0
90
+
91
+ @json_data = ''
92
+ @sub_pattern = {}
93
+
94
+ if !(@logtype_config.has_key?('jsonPath'))
95
+ @message_key = get_last_group_inregex(@s247_custom_regex)
96
+ end
97
+
98
+ if @logtype_config.has_key?('jsonPath')
99
+ @logtype_config['jsonPath'].each_with_index do | key, index |
100
+ if key.has_key?('pattern')
101
+ begin
102
+ if Regexp.new(key['pattern'].gsub('?P<','?<'))
103
+ @sub_pattern[key['name']] = Regexp.compile(key['pattern'].gsub('?P<','?<'))
104
+ end
105
+ rescue Exception => e
106
+ log.error "Invalid subpattern regex #{e.backtrace}"
107
+ end
108
+ end
109
+ end
110
+ end
111
+
112
+ @old_formatted_line = {}
113
+ @formatted_line = {}
114
+
115
+ @masking_config = if @logtype_config.has_key? 'maskingConfig' then @logtype_config['maskingConfig'] else nil end
116
+ @hashing_config = if @logtype_config.has_key? 'hashingConfig' then @logtype_config['hashingConfig'] else nil end
117
+ @derived_config = if @logtype_config.has_key? 'derivedConfig' then @logtype_config['derivedConfig'] else nil end
118
+ @general_regex = Regexp.compile("(.*)")
119
+
120
+ if @derived_config
121
+ @derived_fields = {}
122
+ for key,value in @derived_config do
123
+ @derived_fields[key] = []
124
+ for values in @derived_config[key] do
125
+ @derived_fields[key].push(Regexp.compile(values.gsub('\\\\', '\\')))
126
+ end
127
+ end
128
+ end
129
+
130
+ if @masking_config
131
+ for key,value in @masking_config do
132
+ @masking_config[key]["regex"] = Regexp.compile(@masking_config[key]["regex"])
133
+ end
134
+ end
135
+
136
+ if @hashing_config
137
+ for key,value in @hashing_config do
138
+ @hashing_config[key]["regex"] = Regexp.compile(@hashing_config[key]["regex"])
139
+ end
140
+ end
141
+
142
+ if @logtype_config.has_key?'filterConfig'
143
+ for field,rules in @logtype_config['filterConfig'] do
144
+ temp = []
145
+ for value in @logtype_config['filterConfig'][field]['values'] do
146
+ if @logtype_config['filterConfig'][field]['exact']
147
+ temp.push("\\A"+value+"\\Z")
148
+ else
149
+ temp.push(value)
150
+ end
151
+ end
152
+ @logtype_config['filterConfig'][field]['values'] = Regexp.compile(temp.join('|'))
153
+ end
154
+ end
155
+
76
156
  @s247_tz = {'hrs': 0, 'mins': 0} #UTC
77
157
  @log_source = Socket.gethostname
78
158
  @valid_logtype = true
@@ -87,16 +167,18 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
87
167
  end
88
168
  @is_timezone_present = if @s247_datetime_format_string.include? '%z' then true else false end
89
169
  if !@is_timezone_present && @logtype_config.has_key?('timezone')
170
+ @s247_datetime_format_string += '%z'
90
171
  tz_value = @logtype_config['timezone']
91
- if tz_value.start_with?('+')
92
- @s247_tz['hrs'] = Integer('-' + tz_value[1..4])
93
- @s247_tz['mins'] = Integer('-' + tz_value[3..6])
94
- elsif tz_value.start_with?('-')
95
- @s247_tz['hrs'] = Integer('+' + tz_value[1..4])
96
- @s247_tz['mins'] = Integer('+' + tz_value[3..6])
97
- end
172
+ if tz_value.start_with?('+')
173
+ @s247_tz['hrs'] = Integer('-' + tz_value[1..2])
174
+ @s247_tz['mins'] = Integer('-' + tz_value[3..4])
175
+ elsif tz_value.start_with?('-')
176
+ @s247_tz['hrs'] = Integer('+' + tz_value[1..2])
177
+ @s247_tz['mins'] = Integer('+' + tz_value[3..4])
178
+ end
98
179
  end
99
180
  end
181
+ Thread.new { timer_task() }
100
182
  end
101
183
 
102
184
  def init_http_client(logtype_config)
@@ -116,7 +198,7 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
116
198
  @s247_http_client.override_headers["User-Agent"] = 'Fluentd'
117
199
  if !@s247_http_client.proxy_uri.nil?
118
200
  log.info "Using HTTP proxy #{@s247_http_client.proxy_uri.scheme}://#{@s247_http_client.proxy_uri.host}:#{@s247_http_client.proxy_uri.port} username: #{@s247_http_client.proxy_uri.user ? "configured" : "not configured"}, password: #{@s247_http_client.proxy_uri.password ? "configured" : "not configured"}"
119
- end
201
+ end
120
202
  end
121
203
 
122
204
  def get_timestamp(datetime_string)
@@ -127,113 +209,265 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
127
209
  end
128
210
  datetime_string += if !@is_year_present then ' '+String(Time.new.year) else '' end
129
211
  if !@is_timezone_present && @logtype_config.has_key?('timezone')
130
- @s247_datetime_format_string += '%z'
131
212
  time_zone = String(@s247_tz['hrs'])+':'+String(@s247_tz['mins'])
132
213
  datetime_string += if time_zone.start_with?('-') then time_zone else '+'+time_zone end
133
214
  end
134
215
  datetime_data = DateTime.strptime(datetime_string, @s247_datetime_format_string)
135
216
  return Integer(datetime_data.strftime('%Q'))
136
- rescue
217
+ rescue Exception => e
218
+ @logger.error "Exception in parsing date: #{e.backtrace}"
137
219
  return 0
138
220
  end
139
221
  end
140
222
 
223
+ def data_collector()
224
+ if @formatted_line.has_key?('_zl_timestamp')
225
+ applyMasking()
226
+ applyHashing()
227
+ getDerivedFields()
228
+ if !is_filters_matched()
229
+ @formatted_line = {}
230
+ return
231
+ end
232
+ remove_ignored_fields()
233
+ log_size_calculation()
234
+ else
235
+ @formatted_line = {}
236
+ return
237
+ end
238
+ end
239
+
240
+ def get_last_group_inregex(s247_custom_regex)
241
+ return @s247_custom_regex.names[-1]
242
+ end
243
+
244
+ def remove_ignored_fields()
245
+ @s247_ignored_fields.each do |field_name|
246
+ if @formatted_line.has_key?field_name
247
+ @formatted_line.delete(field_name)
248
+ end
249
+ end
250
+ end
251
+
252
+ def add_message_metadata()
253
+ @log_fields.update({'_zl_timestamp' => get_timestamp(@log_fields[@logtype_config['dateField']]), 's247agentuid' => @log_source})
254
+ end
255
+
141
256
  def parse_lines(lines)
142
257
  parsed_lines = []
143
- log_size = 0
144
258
  lines.each do |line|
145
- if !line.empty?
146
- begin
147
- if match = line.match(@s247_custom_regex)
148
- log_size += line.bytesize
149
- log_fields = match&.named_captures
150
- removed_log_size=0
151
- @s247_ignored_fields.each do |field_name|
152
- removed_log_size += if log_fields.has_key?field_name then log_fields.delete(field_name).bytesize else 0 end
153
- end
154
- formatted_line = {'_zl_timestamp' => get_timestamp(log_fields[@logtype_config['dateField']]), 's247agentuid' => @log_source}
155
- formatted_line.merge!(log_fields)
156
- parsed_lines.push(formatted_line)
157
- log_size -= removed_log_size
158
- else
159
- log.debug "pattern not matched regex : #{@s247_custom_regex} and received line : #{line}"
160
- end
161
- rescue Exception => e
162
- log.error "Exception in parse_line #{e.backtrace}"
163
- end
164
- end
259
+ if !line.empty?
260
+ begin
261
+ @logged = false
262
+ match = line.match(@s247_custom_regex)
263
+ if match
264
+ @formatted_line.update(@old_formatted_line)
265
+ @log_fields = match&.named_captures
266
+ add_message_metadata()
267
+ @old_formatted_line = @log_fields
268
+ @last_line_matched = true
269
+ @trace_started = false
270
+ elsif @last_line_matched || @trace_started
271
+ is_date_present = !(line.scan(@datetime_regex).empty?)
272
+ @trace_started = !(is_date_present)
273
+ if !(is_date_present) && @old_formatted_line
274
+ if @old_formatted_line.has_key?(@message_key)
275
+ @old_formatted_line[@message_key] += '\n' + line
276
+ @old_log_size += line.bytesize
277
+ @trace_started = true
278
+ @last_line_matched = false
279
+ end
280
+ end
281
+ end
282
+ data_collector()
283
+ if @formatted_line.length >0
284
+ parsed_lines.push(@formatted_line)
285
+ end
286
+ rescue Exception => e
287
+ log.error "Exception in parse_line #{e.backtrace}"
288
+ @formatted_line = {}
289
+ end
290
+ end
165
291
  end
166
- return parsed_lines, log_size
292
+ return parsed_lines
167
293
  end
168
294
 
169
- def is_filters_matched(formatted_line)
170
- if @logtype_config.has_key?'filterConfig'
171
- @logtype_config['filterConfig'].each do |config|
172
- if formatted_line.has_key?config && (filter_config[config]['match'] ^ (filter_config[config]['values'].include?formatted_line[config]))
173
- return false
174
- end
295
+ def is_filters_matched()
296
+ begin
297
+ if @logtype_config.has_key?'filterConfig'
298
+ @logtype_config['filterConfig'].each do |config,value|
299
+ if (@formatted_line.has_key?config)
300
+ if @logtype_config['filterConfig'][config]['values'].match(@formatted_line[config])
301
+ val = true
302
+ else
303
+ val = false
304
+ end
305
+ end
306
+ if (@logtype_config['filterConfig'][config]['match'] ^ (val))
307
+ return false
308
+ end
175
309
  end
310
+ end
311
+ rescue Exception => e
312
+ log.error "Exception occurred in filter: #{e.backtrace}"
176
313
  end
177
314
  return true
178
315
  end
179
316
 
180
317
  def get_json_value(obj, key, datatype=nil)
181
318
  if obj != nil && (obj.has_key?key)
182
- if datatype and datatype == 'json-object'
183
- arr_json = []
184
- child_obj = obj[key]
185
- if child_obj.class == String
186
- child_obj = Yajl::Parser.parse(child_obj.gsub('\\','\\\\'))
187
- end
188
- child_obj.each do |key, value|
189
- arr_json.push({'key' => key, 'value' => String(value)})
190
- end
191
- return arr_json
192
- else
193
- return (if obj.has_key?key then obj[key] else obj[key.downcase] end)
194
- end
195
- elsif key.include?'.'
196
- parent_key = key[0..key.index('.')-1]
197
- child_key = key[key.index('.')+1..-1]
198
- child_obj = obj[if obj.has_key?parent_key then parent_key else parent_key.capitalize() end]
199
- if child_obj.class == String
200
- child_obj = Yajl::Parser.parse(child_obj.replace('\\','\\\\'))
319
+ if datatype and datatype == 'json-object'
320
+ arr_json = []
321
+ child_obj = obj[key]
322
+ if child_obj.class == String
323
+ child_obj = Yajl::Parser.parse(child_obj.gsub('\\','\\\\'))
324
+ end
325
+ child_obj.each do |key, value|
326
+ arr_json.push({'key' => key, 'value' => String(value)})
201
327
  end
202
- return get_json_value(child_obj, child_key)
328
+ return arr_json
329
+ else
330
+ return (if obj.has_key?key then obj[key] else obj[key.downcase] end)
331
+ end
332
+ elsif key.include?'.'
333
+ parent_key = key[0..key.index('.')-1]
334
+ child_key = key[key.index('.')+1..-1]
335
+ child_obj = obj[if obj.has_key?parent_key then parent_key else parent_key.capitalize() end]
336
+ if child_obj.class == String
337
+ child_obj = Yajl::Parser.parse(child_obj.replace('\\','\\\\'))
338
+ end
339
+ return get_json_value(child_obj, child_key,datatype)
203
340
  end
204
341
  end
205
342
 
206
- def json_log_parser(lines_read)
207
- log_size = 0
343
+ def json_log_applier(line)
344
+ json_log_size=0
345
+ @formatted_line = {}
346
+ @log_fields = {}
347
+ event_obj = if line.is_a?(String) then Yajl::Parser.parse(line) else line end
348
+ @logtype_config['jsonPath'].each do |path_obj|
349
+ value = get_json_value(event_obj, path_obj[if path_obj.has_key?'key' then 'key' else 'name' end], path_obj['type'])
350
+ if value
351
+ @log_fields[path_obj['name']] = value
352
+ json_log_size+= String(value).bytesize - (if value.class == Array then value.size*20 else 0 end)
353
+ end
354
+ end
355
+ for key,regex in @sub_pattern do
356
+ if @log_fields.has_key?(key)
357
+ matcher = regex.match(@log_fields.delete(key))
358
+ if matcher
359
+ @log_fields.update(matcher.named_captures)
360
+ @formatted_line.update(@log_fields)
361
+ end
362
+ end
363
+ end
364
+ add_message_metadata()
365
+ @formatted_line.update(@log_fields)
366
+ data_collector()
367
+ if @formatted_line.length >0
368
+ return true
369
+ else
370
+ return false
371
+ end
372
+ end
373
+
374
+ def json_log_parser(lines)
208
375
  parsed_lines = []
209
- lines_read.each do |line|
376
+ lines.each do |line|
377
+ begin
378
+ @logged = false
210
379
  if !line.empty?
211
- current_log_size = 0
212
- formatted_line = {}
213
- event_obj = if line.is_a?(String) then Yajl::Parser.parse(line) else line end
214
- @logtype_config['jsonPath'].each do |path_obj|
215
- value = get_json_value(event_obj, path_obj[if path_obj.has_key?'key' then 'key' else 'name' end], path_obj['type'])
216
- if value
217
- formatted_line[path_obj['name']] = value
218
- current_log_size+= String(value).size - (if value.class == Array then value.size*20 else 0 end)
380
+ if line[0] == '{' && @json_data[-1] == '}'
381
+ if json_log_applier(@json_data)
382
+ parsed_lines.push(@formatted_line)
219
383
  end
384
+ @json_data=''
220
385
  end
221
- if is_filters_matched(formatted_line)
222
- formatted_line['_zl_timestamp'] = get_timestamp(formatted_line[@logtype_config['dateField']])
223
- formatted_line['s247agentuid'] = @log_source
224
- parsed_lines.push(formatted_line)
225
- log_size+=current_log_size
386
+ @json_data += line
387
+ end
388
+ rescue Exception => e
389
+ log.error "Exception in parse_line #{e.backtrace}"
390
+ end
391
+ end
392
+ return parsed_lines
393
+ end
394
+
395
+ def ml_regex_applier(ml_trace, ml_data)
396
+ begin
397
+ matcher = @s247_custom_regex.match(@ml_trace)
398
+ @log_fields = matcher.named_captures
399
+ @log_fields.update(@ml_data)
400
+ add_message_metadata()
401
+ @formatted_line.update(@log_fields)
402
+ rescue Exception => e
403
+ log.error "Exception occurred in ml_parser : #{e.backtrace}"
404
+ end
405
+ end
406
+
407
+ def ml_log_parser(lines)
408
+ parsed_lines = []
409
+ lines.each do |line|
410
+ if !line.empty?
411
+ begin
412
+ @logged = false
413
+ ml_start_matcher = @ml_regex.match(line)
414
+ if ml_start_matcher || @ml_end_line_found
415
+ @ml_found = ml_start_matcher
416
+ @ml_end_line_found = false
417
+ @formatted_line = {}
418
+ if @ml_trace.length > 0
419
+ begin
420
+ ml_regex_applier(@ml_trace, @ml_data)
421
+ if @ml_trace_buffer && @formatted_line
422
+ @formatted_line[@message_key] = @formatted_line[@message_key] + @ml_trace_buffer
423
+ end
424
+ data_collector()
425
+ if @formatted_line.length >0
426
+ parsed_lines.push(@formatted_line)
427
+ end
428
+ @ml_trace = ''
429
+ @ml_trace_buffer = ''
430
+ if @ml_found
431
+ @ml_data = ml_start_matcher.named_captures
432
+ else
433
+ @ml_data = {}
434
+ end
435
+ @ml_count = 0
436
+ rescue Exception => e
437
+ log.error "Exception occurred in ml_parser : #{e.backtrace}"
438
+ end
439
+ elsif @ml_found
440
+ @ml_data = ml_start_matcher.named_captures
441
+ end
442
+ elsif @ml_found
443
+ if @ml_count < @max_ml_count
444
+ @ml_trace += '<NewLine>' + line
445
+ elsif @ml_end_regex && @ml_end_regex.match(line)
446
+ @ml_end_line_found = True
447
+ elsif (@ml_count - @max_ml_count) < @max_trace_line
448
+ @ml_trace_buffer += "\n" + line
449
+ end
450
+ @ml_count += 1
226
451
  end
227
- end
452
+ rescue Exception => e
453
+ log.error "Exception occurred in ml_parser : #{e.backtrace}"
454
+ end
455
+ end
228
456
  end
229
- return parsed_lines, log_size
457
+ return parsed_lines
230
458
  end
231
459
 
232
460
  def format(tag, time, record)
233
461
  if @valid_logtype && (@log_upload_allowed || (time.to_i - @log_upload_stopped_time > S247_LOG_UPLOAD_CHECK_INTERVAL))
234
- if !@logtype_config.has_key?'jsonPath' then [record['message']].to_msgpack else [record].to_msgpack end
462
+ if (record.size == 1)
463
+ if record.has_key?'message'
464
+ [record['message']].to_msgpack
465
+ end
466
+ else
467
+ [record.to_json].to_msgpack
468
+ end
235
469
  end
236
- end
470
+ end
237
471
 
238
472
  def write(chunk)
239
473
  begin
@@ -249,12 +483,15 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
249
483
  end
250
484
 
251
485
  def process_http_events(events)
486
+ @before_time = Time.now
252
487
  batches = batch_http_events(events)
253
488
  batches.each do |batched_event|
254
- formatted_events, log_size = format_http_event_batch(batched_event)
255
- formatted_events = gzip_compress(formatted_events)
256
- send_logs_to_s247(formatted_events, log_size)
257
- end
489
+ formatted_events, @log_size = format_http_event_batch(batched_event)
490
+ if (formatted_events.length>0)
491
+ formatted_events = gzip_compress(formatted_events)
492
+ send_logs_to_s247(formatted_events, @log_size)
493
+ end
494
+ end
258
495
  end
259
496
 
260
497
  def batch_http_events(encoded_events)
@@ -283,13 +520,19 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
283
520
 
284
521
  def format_http_event_batch(events)
285
522
  parsed_lines = []
286
- log_size = 0
523
+ @log_size = 0
524
+ @old_log_size=0
287
525
  if @logtype_config.has_key?'jsonPath'
288
- parsed_lines, log_size = json_log_parser(events)
526
+ parsed_lines = json_log_parser(events)
527
+ elsif @logtype_config.has_key?'ml_regex'
528
+ parsed_lines = ml_log_parser(events)
289
529
  else
290
- parsed_lines, log_size = parse_lines(events)
530
+ parsed_lines = parse_lines(events)
291
531
  end
292
- return Yajl.dump(parsed_lines), log_size
532
+ if (parsed_lines.length > 0)
533
+ return Yajl.dump(parsed_lines), @log_size
534
+ end
535
+ return [],0
293
536
  end
294
537
 
295
538
  def gzip_compress(payload)
@@ -307,7 +550,7 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
307
550
  def send_logs_to_s247(gzipped_parsed_lines, log_size)
308
551
  request = Net::HTTP::Post.new @uri.request_uri
309
552
  request.body = gzipped_parsed_lines
310
- @s247_http_client.override_headers["Log-Size"] = log_size
553
+ @s247_http_client.override_headers["Log-Size"] = @log_size
311
554
  sleep_interval = @retry_interval
312
555
  begin
313
556
  @max_retry.times do |counter|
@@ -315,22 +558,25 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
315
558
  begin
316
559
  response = @s247_http_client.request @uri, request
317
560
  resp_headers = response.each_header.to_h
318
-
561
+
319
562
  if response.code == '200'
320
563
  if resp_headers.has_key?'LOG_LICENSE_EXCEEDS' && resp_headers['LOG_LICENSE_EXCEEDS'] == 'True'
321
564
  log.error "Log license limit exceeds so not able to send logs"
322
565
  @log_upload_allowed = false
323
- @log_upload_stopped_time =Time.now.to_i
566
+ @log_upload_stopped_time =Time.now.to_i
324
567
  elsif resp_headers.has_key?'BLOCKED_LOGTYPE' && resp_headers['BLOCKED_LOGTYPE'] == 'True'
325
568
  log.error "Max upload limit reached for log type"
326
569
  @log_upload_allowed = false
327
- @log_upload_stopped_time =Time.now.to_i
570
+ @log_upload_stopped_time =Time.now.to_i
328
571
  elsif resp_headers.has_key?'INVALID_LOGTYPE' && resp_headers['INVALID_LOGTYPE'] == 'True'
329
572
  log.error "Log type not present in this account so stopping log collection"
330
- @valid_logtype = false
573
+ @valid_logtype = false
574
+ elsif resp_headers['x-uploadid'] == nil
575
+ log.error "upload id is empty hence retry flag enabled #{gzipped_parsed_lines.size} / #{@log_size}"
576
+ need_retry = true
331
577
  else
332
- @log_upload_allowed = true
333
- log.debug "Successfully sent logs with size #{gzipped_parsed_lines.size} / #{log_size} to site24x7. Upload Id : #{resp_headers['x-uploadid']}"
578
+ @log_upload_allowed = true
579
+ log.debug "Successfully sent logs with size #{gzipped_parsed_lines.size} / #{@log_size} to site24x7. Upload Id : #{resp_headers['x-uploadid']}"
334
580
  end
335
581
  else
336
582
  log.error "Response Code #{resp_headers} from Site24x7, so retrying (#{counter + 1}/#{@max_retry})"
@@ -356,4 +602,157 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
356
602
  end
357
603
  end
358
604
 
359
- end
605
+ def log_the_holded_line()
606
+ @log_size = 0
607
+ if @logged == false
608
+ if (@ml_trace.length>0)
609
+ ml_regex_applier(@ml_trace, @ml_data)
610
+ if @ml_trace_buffer
611
+ if !(@formatted_line.empty?)
612
+ @formatted_line[@message_key] = @formatted_line[@message_key] + @ml_trace_buffer
613
+ else
614
+ @ml_trace += @ml_trace_buffer.gsub('\n', '<NewLine>')
615
+ ml_regex_applier(@ml_trace, @ml_data)
616
+ end
617
+ @ml_trace_buffer = ''
618
+ end
619
+ @ml_trace = ''
620
+ elsif (@json_data.length>0)
621
+ if !(json_log_applier(@json_data))
622
+ @formatted_line={}
623
+ end
624
+ @json_data = ''
625
+ elsif @old_formatted_line
626
+ @formatted_line.update(@old_formatted_line)
627
+ data_collector()
628
+ @old_formatted_line = {}
629
+ end
630
+ @logged = true
631
+ if @format_record
632
+ @custom_parser.format_record()
633
+ end
634
+ if !(@formatted_line.empty?)
635
+ return @formatted_line
636
+ end
637
+ end
638
+ return nil
639
+ end
640
+
641
+ def applyMasking()
642
+ if @masking_config
643
+ begin
644
+ for key,value in @masking_config do
645
+ adjust_length = 0
646
+ mask_regex = @masking_config[key]["regex"]
647
+ if @formatted_line.has_key?key
648
+ field_value = @formatted_line[key]
649
+ if !(mask_regex.eql?(@general_regex))
650
+ matcher = field_value.to_enum(:scan, mask_regex).map { Regexp.last_match }
651
+ if matcher
652
+ (0..(matcher.length)-1).map do |index|
653
+ start = matcher[index].offset(1)[0]
654
+ _end = matcher[index].offset(1)[1]
655
+ if ((start >= 0) && (_end > 0))
656
+ start = start - adjust_length
657
+ _end = _end - adjust_length
658
+ adjust_length += (_end - start) - @masking_config[key]['string'].bytesize
659
+ field_value = field_value[0..(start-1)] + @masking_config[key]['string'] + field_value[_end..field_value.bytesize]
660
+ end
661
+ end
662
+ end
663
+ @formatted_line[key] = field_value
664
+ else
665
+ @formatted_line[key] = @masking_config[key]['string']
666
+ end
667
+ end
668
+ end
669
+ rescue Exception => e
670
+ log.error "Exception occurred in masking : #{e.backtrace}"
671
+ end
672
+ end
673
+ end
674
+
675
+ def applyHashing()
676
+ if @hashing_config
677
+ begin
678
+ for key,value in @hashing_config do
679
+ hash_regex = @hashing_config[key]["regex"]
680
+ if @formatted_line.has_key?key
681
+ field_value = @formatted_line[key]
682
+ if (hash_regex.eql?(@general_regex))
683
+ hash_string = Digest::SHA256.hexdigest(field_value)
684
+ field_value = hash_string
685
+ else
686
+ adjust_length = 0
687
+ matcher = field_value.to_enum(:scan, hash_regex).map { Regexp.last_match }
688
+ if matcher
689
+ (0..(matcher.length)-1).map do |index|
690
+ start = matcher[index].offset(1)[0]
691
+ _end = matcher[index].offset(1)[1]
692
+ if ((start >= 0) && (_end > 0))
693
+ start = start - adjust_length
694
+ _end = _end - adjust_length
695
+ hash_string = Digest::SHA256.hexdigest(field_value[start..(_end-1)])
696
+ adjust_length += (_end - start) - hash_string.bytesize
697
+ field_value = field_value[0..(start-1)] + hash_string + field_value[_end..field_value.bytesize]
698
+ end
699
+ end
700
+ end
701
+ end
702
+ @formatted_line[key] = field_value
703
+ end
704
+ end
705
+ rescue Exception => e
706
+ log.error "Exception occurred in hashing : #{e.backtrace}"
707
+ end
708
+ end
709
+ end
710
+
711
+ def getDerivedFields()
712
+ if @derived_config
713
+ begin
714
+ for key,value in @derived_fields do
715
+ for each in @derived_fields[key] do
716
+ if @formatted_line.has_key?key
717
+ match_derived = each.match(@formatted_line[key])
718
+ if match_derived
719
+ @formatted_line.update(match_derived.named_captures)
720
+ end
721
+ break
722
+ end
723
+ end
724
+ end
725
+ rescue Exception => e
726
+ log.error "Exception occurred in derived fields : #{e.backtrace}"
727
+ end
728
+ end
729
+ end
730
+
731
+ def log_size_calculation()
732
+ data_exclusion = ["_zl", "s247", "inode"]
733
+ @formatted_line.each do |field, value|
734
+ unless data_exclusion.any? { |exclusion| field.start_with?(exclusion) }
735
+ @log_size += value.to_s.bytesize
736
+ end
737
+ end
738
+ end
739
+
740
+ def timer_task()
741
+ while true
742
+ @after_time = Time.now
743
+ if @before_time
744
+ diff = @after_time-@before_time
745
+ if diff.to_i > 29
746
+ out = log_the_holded_line()
747
+ if out != nil
748
+ out = Yajl.dump([out])
749
+ out = gzip_compress(out)
750
+ send_logs_to_s247(out, @log_size)
751
+ end
752
+ end
753
+ end
754
+ sleep(30)
755
+ end
756
+ end
757
+
758
+ end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-site24x7
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.3
4
+ version: 0.1.5
5
5
  platform: ruby
6
6
  authors:
7
7
  - Magesh Rajan
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2021-10-12 00:00:00.000000000 Z
11
+ date: 2023-06-09 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler