fluent-plugin-site24x7 0.1.4 → 0.1.6

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: eaa27dafc08120c093de5e57ba15b129209637db8759e68a20c2053513cb55e9
4
- data.tar.gz: 6fc10de6e63e332c816935a656cfb234e733710c7849f7578ebaa035eec7a8d4
3
+ metadata.gz: fc1338accf3055ea6b93742a4280cdbdb8ec4b5e0ab97ea4ffc79e77855f4f28
4
+ data.tar.gz: ce48c6512000a972e2f3209b4b142d6ef4cffcec7268720f0a56fed15e75553c
5
5
  SHA512:
6
- metadata.gz: 58d44f268199174dabf93bc3b0fc3ceb0add74b2718001aaab95c3c380bf4f2b18bec949162a47a778a7d78d80a067302cc509506ceac6f8a2f21d5dedcc60ba
7
- data.tar.gz: bd0dc479e64382935a9154006b281b46cad6d93c883b18d5ddb1cbd026ae49d427a35c0e4836ee6cf7eb1996d6d3557cd5d3bb4f862d9149af5655be862ad1d5
6
+ metadata.gz: a48c2d71823a1756d55ae9343017331f1a6b0579c42f9fcec1c61070062bb51071adfb614b71cd162015943fd368fa179d4f932662d2b8a6e54678ce7e167539
7
+ data.tar.gz: d95117ea90350c380ee4dcacc1dd2238bac3d67e8338fa973c8231a81f2f701ead6d8f02d306b82789afb396094e250507a915ba974f64509ef480339597c881
data/Gemfile CHANGED
File without changes
data/LICENSE CHANGED
File without changes
data/README.md CHANGED
File without changes
data/Rakefile CHANGED
File without changes
@@ -3,7 +3,7 @@ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
3
3
 
4
4
  Gem::Specification.new do |spec|
5
5
  spec.name = "fluent-plugin-site24x7"
6
- spec.version = "0.1.4"
6
+ spec.version = "0.1.6"
7
7
  spec.authors = ["Magesh Rajan"]
8
8
  spec.email = ["magesh.rajan@zohocorp.com"]
9
9
 
@@ -21,8 +21,10 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
21
21
  helpers :compat_parameters
22
22
 
23
23
  config_param :log_type_config, :string
24
- config_param :max_retry, :integer, :default => 3
24
+ config_param :max_retry, :integer, :default => -1
25
25
  config_param :retry_interval, :integer, :default => 2
26
+ config_param :maxretry_interval, :integer, :default => 60 #1 minutes
27
+ config_param :retry_timeout, :integer, :default => 24 * 3600 # 24 hours
26
28
  config_param :http_idle_timeout, :integer, default: 5
27
29
  config_param :http_read_timeout, :integer, default: 30
28
30
  config_param :http_proxy, :string, :default => nil
@@ -143,9 +145,13 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
143
145
  for field,rules in @logtype_config['filterConfig'] do
144
146
  temp = []
145
147
  for value in @logtype_config['filterConfig'][field]['values'] do
146
- temp.push(Regexp.compile(value))
148
+ if @logtype_config['filterConfig'][field]['exact']
149
+ temp.push("\\A"+value+"\\Z")
150
+ else
151
+ temp.push(value)
152
+ end
147
153
  end
148
- @logtype_config['filterConfig'][field]['values'] = temp.join('|')
154
+ @logtype_config['filterConfig'][field]['values'] = Regexp.compile(temp.join('|'))
149
155
  end
150
156
  end
151
157
 
@@ -165,13 +171,13 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
165
171
  if !@is_timezone_present && @logtype_config.has_key?('timezone')
166
172
  @s247_datetime_format_string += '%z'
167
173
  tz_value = @logtype_config['timezone']
168
- if tz_value.start_with?('+')
169
- @s247_tz['hrs'] = Integer('-' + tz_value[1..4])
170
- @s247_tz['mins'] = Integer('-' + tz_value[3..6])
171
- elsif tz_value.start_with?('-')
172
- @s247_tz['hrs'] = Integer('+' + tz_value[1..4])
173
- @s247_tz['mins'] = Integer('+' + tz_value[3..6])
174
- end
174
+ if tz_value.start_with?('+')
175
+ @s247_tz['hrs'] = Integer('-' + tz_value[1..2])
176
+ @s247_tz['mins'] = Integer('-' + tz_value[3..4])
177
+ elsif tz_value.start_with?('-')
178
+ @s247_tz['hrs'] = Integer('+' + tz_value[1..2])
179
+ @s247_tz['mins'] = Integer('+' + tz_value[3..4])
180
+ end
175
181
  end
176
182
  end
177
183
  Thread.new { timer_task() }
@@ -208,18 +214,30 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
208
214
  time_zone = String(@s247_tz['hrs'])+':'+String(@s247_tz['mins'])
209
215
  datetime_string += if time_zone.start_with?('-') then time_zone else '+'+time_zone end
210
216
  end
217
+ #log.info "datetime_string : (#{datetime_string}), s247_datetime_format_string: (#{@s247_datetime_format_string})"
211
218
  datetime_data = DateTime.strptime(datetime_string, @s247_datetime_format_string)
212
219
  return Integer(datetime_data.strftime('%Q'))
213
220
  rescue Exception => e
214
- @logger.error "Exception in parsing date: #{e.backtrace}"
215
- return 0
221
+ log.error "Exception in parsing date: #{e.message}"
222
+ return 0
216
223
  end
217
224
  end
218
225
 
219
- def log_line_filter()
220
- applyMasking()
221
- applyHashing()
222
- getDerivedFields()
226
+ def data_collector()
227
+ if @formatted_line.has_key?('_zl_timestamp')
228
+ applyMasking()
229
+ applyHashing()
230
+ getDerivedFields()
231
+ if !is_filters_matched()
232
+ @formatted_line = {}
233
+ return
234
+ end
235
+ remove_ignored_fields()
236
+ log_size_calculation()
237
+ else
238
+ @formatted_line = {}
239
+ return
240
+ end
223
241
  end
224
242
 
225
243
  def get_last_group_inregex(s247_custom_regex)
@@ -228,7 +246,9 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
228
246
 
229
247
  def remove_ignored_fields()
230
248
  @s247_ignored_fields.each do |field_name|
231
- @log_size -= if @log_fields.has_key?field_name then @log_fields.delete(field_name).bytesize else 0 end
249
+ if @formatted_line.has_key?field_name
250
+ @formatted_line.delete(field_name)
251
+ end
232
252
  end
233
253
  end
234
254
 
@@ -245,10 +265,7 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
245
265
  match = line.match(@s247_custom_regex)
246
266
  if match
247
267
  @formatted_line.update(@old_formatted_line)
248
- @log_size += @old_log_size
249
- @old_log_size = line.bytesize
250
268
  @log_fields = match&.named_captures
251
- remove_ignored_fields()
252
269
  add_message_metadata()
253
270
  @old_formatted_line = @log_fields
254
271
  @last_line_matched = true
@@ -262,14 +279,13 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
262
279
  @old_log_size += line.bytesize
263
280
  @trace_started = true
264
281
  @last_line_matched = false
265
- end
282
+ end
266
283
  end
267
- end
268
- if @formatted_line.has_key?('_zl_timestamp')
269
- log_line_filter()
284
+ end
285
+ data_collector()
286
+ if @formatted_line.length >0
270
287
  parsed_lines.push(@formatted_line)
271
- @formatted_line = {}
272
- end
288
+ end
273
289
  rescue Exception => e
274
290
  log.error "Exception in parse_line #{e.backtrace}"
275
291
  @formatted_line = {}
@@ -283,12 +299,14 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
283
299
  begin
284
300
  if @logtype_config.has_key?'filterConfig'
285
301
  @logtype_config['filterConfig'].each do |config,value|
286
- if @formatted_line[config].scan(Regexp.new(@logtype_config['filterConfig'][config]['values'])).length > 0
287
- val = true
288
- else
289
- val = false
302
+ if (@formatted_line.has_key?config)
303
+ if @logtype_config['filterConfig'][config]['values'].match(@formatted_line[config])
304
+ val = true
305
+ else
306
+ val = false
307
+ end
290
308
  end
291
- if (@formatted_line.has_key?config) && (@logtype_config['filterConfig'][config]['match'] ^ (val))
309
+ if (@logtype_config['filterConfig'][config]['match'] ^ (val))
292
310
  return false
293
311
  end
294
312
  end
@@ -342,20 +360,18 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
342
360
  matcher = regex.match(@log_fields.delete(key))
343
361
  if matcher
344
362
  @log_fields.update(matcher.named_captures)
345
- remove_ignored_fields()
346
363
  @formatted_line.update(@log_fields)
347
364
  end
348
365
  end
349
366
  end
350
- if !(is_filters_matched())
351
- return false
352
- else
353
- add_message_metadata()
354
- @formatted_line.update(@log_fields)
355
- log_line_filter()
356
- @log_size += json_log_size
367
+ add_message_metadata()
368
+ @formatted_line.update(@log_fields)
369
+ data_collector()
370
+ if @formatted_line.length >0
357
371
  return true
358
- end
372
+ else
373
+ return false
374
+ end
359
375
  end
360
376
 
361
377
  def json_log_parser(lines)
@@ -367,7 +383,7 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
367
383
  if line[0] == '{' && @json_data[-1] == '}'
368
384
  if json_log_applier(@json_data)
369
385
  parsed_lines.push(@formatted_line)
370
- end
386
+ end
371
387
  @json_data=''
372
388
  end
373
389
  @json_data += line
@@ -380,20 +396,14 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
380
396
  end
381
397
 
382
398
  def ml_regex_applier(ml_trace, ml_data)
383
- begin
384
- @log_size += @ml_trace.bytesize
399
+ begin
385
400
  matcher = @s247_custom_regex.match(@ml_trace)
386
401
  @log_fields = matcher.named_captures
387
402
  @log_fields.update(@ml_data)
388
- if @s247_ignored_fields
389
- remove_ignored_fields()
390
- end
391
403
  add_message_metadata()
392
404
  @formatted_line.update(@log_fields)
393
- log_line_filter()
394
405
  rescue Exception => e
395
406
  log.error "Exception occurred in ml_parser : #{e.backtrace}"
396
- @formatted_line = {}
397
407
  end
398
408
  end
399
409
 
@@ -413,14 +423,15 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
413
423
  ml_regex_applier(@ml_trace, @ml_data)
414
424
  if @ml_trace_buffer && @formatted_line
415
425
  @formatted_line[@message_key] = @formatted_line[@message_key] + @ml_trace_buffer
416
- @log_size += @ml_trace_buffer.bytesize
417
426
  end
418
- parsed_lines.push(@formatted_line)
427
+ data_collector()
428
+ if @formatted_line.length >0
429
+ parsed_lines.push(@formatted_line)
430
+ end
419
431
  @ml_trace = ''
420
432
  @ml_trace_buffer = ''
421
433
  if @ml_found
422
434
  @ml_data = ml_start_matcher.named_captures
423
- @log_size += line.bytesize
424
435
  else
425
436
  @ml_data = {}
426
437
  end
@@ -429,7 +440,6 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
429
440
  log.error "Exception occurred in ml_parser : #{e.backtrace}"
430
441
  end
431
442
  elsif @ml_found
432
- @log_size += line.bytesize
433
443
  @ml_data = ml_start_matcher.named_captures
434
444
  end
435
445
  elsif @ml_found
@@ -463,26 +473,27 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
463
473
  end
464
474
 
465
475
  def write(chunk)
476
+ current_chunk_id = "#{chunk.dump_unique_id_hex(chunk.unique_id)}"
466
477
  begin
467
- events = Array.new
468
- chunk.msgpack_each do |record|
469
- next if record.empty?
470
- events.push record[0]
471
- end
472
- process_http_events(events)
478
+ events = Array.new
479
+ chunk.msgpack_each do |record|
480
+ next if record.empty?
481
+ events.push record[0]
482
+ end
483
+ process_http_events(events, current_chunk_id)
473
484
  rescue Exception => e
474
485
  log.error "Exception #{e.backtrace}"
475
486
  end
476
487
  end
477
488
 
478
- def process_http_events(events)
489
+ def process_http_events(events, current_chunk_id)
479
490
  @before_time = Time.now
480
491
  batches = batch_http_events(events)
481
492
  batches.each do |batched_event|
482
493
  formatted_events, @log_size = format_http_event_batch(batched_event)
483
494
  if (formatted_events.length>0)
484
495
  formatted_events = gzip_compress(formatted_events)
485
- send_logs_to_s247(formatted_events, @log_size)
496
+ send_logs_to_s247(formatted_events, @log_size, current_chunk_id)
486
497
  end
487
498
  end
488
499
  end
@@ -540,49 +551,73 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
540
551
  gz.string
541
552
  end
542
553
 
543
- def send_logs_to_s247(gzipped_parsed_lines, log_size)
554
+ def send_logs_to_s247(gzipped_parsed_lines, log_size, current_chunk_id)
544
555
  request = Net::HTTP::Post.new @uri.request_uri
545
556
  request.body = gzipped_parsed_lines
546
- @s247_http_client.override_headers["Log-Size"] = @log_size
557
+
547
558
  sleep_interval = @retry_interval
548
559
  begin
549
- @max_retry.times do |counter|
560
+ retries = 0
561
+ first_upload_time = Time.now
562
+ while true
550
563
  need_retry = false
551
564
  begin
565
+ @s247_http_client.override_headers["Log-Size"] = @log_size
566
+ @s247_http_client.override_headers["upload-id"] = current_chunk_id
552
567
  response = @s247_http_client.request @uri, request
553
568
  resp_headers = response.each_header.to_h
554
-
569
+
555
570
  if response.code == '200'
556
- if resp_headers.has_key?'LOG_LICENSE_EXCEEDS' && resp_headers['LOG_LICENSE_EXCEEDS'] == 'True'
557
- log.error "Log license limit exceeds so not able to send logs"
571
+ if resp_headers['x-uploadid'] == nil
572
+ log.error "[#{current_chunk_id}]:upload id is empty hence retry flag enabled #{resp_headers}"
573
+ need_retry = true
574
+ else
575
+ @log_upload_allowed = true
576
+ log.debug "[#{current_chunk_id}]:Successfully sent logs with size #{gzipped_parsed_lines.size} / #{@log_size} to site24x7. Upload Id : #{resp_headers['x-uploadid']}"
577
+ end
578
+ elsif response.code == '400'
579
+ if resp_headers.has_key?'log_license_exceeds' and resp_headers['log_license_exceeds'] == 'True'
580
+ log.error "[#{current_chunk_id}]:Log license limit exceeds so not able to send logs"
558
581
  @log_upload_allowed = false
559
582
  @log_upload_stopped_time =Time.now.to_i
560
- elsif resp_headers.has_key?'BLOCKED_LOGTYPE' && resp_headers['BLOCKED_LOGTYPE'] == 'True'
561
- log.error "Max upload limit reached for log type"
583
+ elsif resp_headers.has_key?'blocked_logtype' and resp_headers['blocked_logtype'] == 'True'
584
+ log.error "[#{current_chunk_id}]:Max upload limit reached for log type"
562
585
  @log_upload_allowed = false
563
586
  @log_upload_stopped_time =Time.now.to_i
564
- elsif resp_headers.has_key?'INVALID_LOGTYPE' && resp_headers['INVALID_LOGTYPE'] == 'True'
565
- log.error "Log type not present in this account so stopping log collection"
587
+ elsif resp_headers.has_key?'api_upload_enabled' and resp_headers['api_upload_enabled'] == 'False'
588
+ log.error "[#{current_chunk_id}]:API upload not enabled for log type : "
589
+ @log_upload_allowed = false
590
+ @log_upload_stopped_time =Time.now.to_i
591
+ elsif resp_headers.has_key?'invalid_logtype' and resp_headers['invalid_logtype'] == 'True'
592
+ log.error "[#{current_chunk_id}]:Log type not present in this account so stopping log collection"
593
+ @valid_logtype = false
594
+ elsif resp_headers.has_key?'invalid_account' and resp_headers['invalid_account'] == 'True'
595
+ log.error "[#{current_chunk_id}]: Invalid account so stopping log collection"
566
596
  @valid_logtype = false
567
597
  else
568
- @log_upload_allowed = true
569
- log.debug "Successfully sent logs with size #{gzipped_parsed_lines.size} / #{@log_size} to site24x7. Upload Id : #{resp_headers['x-uploadid']}"
598
+ log.error "[#{current_chunk_id}]: Upload failed for reason : #{response.message}"
570
599
  end
571
600
  else
572
- log.error "Response Code #{resp_headers} from Site24x7, so retrying (#{counter + 1}/#{@max_retry})"
601
+ log.error "[#{current_chunk_id}]:Response Code #{response.code} from Site24x7, so retrying (#{retries + 1}/#{@max_retry})"
573
602
  need_retry = true
574
603
  end
575
604
  rescue StandardError => e
576
- log.error "Error connecting to Site24x7. exception: #{e} (#{counter + 1}/#{@max_retry})"
605
+ log.error "[#{current_chunk_id}]:Error connecting to Site24x7. exception: #{e} (#{retries + 1}/#{@max_retry})"
606
+ need_retry = true
577
607
  end
578
608
 
579
609
  if need_retry
580
- if counter == @max_retry - 1
581
- log.error "Could not send your logs after #{max_retry} tries"
610
+ retries += 1
611
+ if (retries >= @max_retry && @max_retry > 0) || (Time.now > first_upload_time + @retry_timeout && @retry_timeout > 0)
612
+ log.error "[#{current_chunk_id}]: Internal max retries(#{max_retry}) or retry_timeout : #{first_upload_time + @retry_timeout} reached"
582
613
  break
583
614
  end
615
+ log.info "[#{current_chunk_id}]:Going to retry the upload at #{Time.now + sleep_interval}"
584
616
  sleep(sleep_interval)
585
617
  sleep_interval *= 2
618
+ if sleep_interval > @maxretry_interval
619
+ sleep_interval = @maxretry_interval
620
+ end
586
621
  else
587
622
  return
588
623
  end
@@ -599,8 +634,7 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
599
634
  ml_regex_applier(@ml_trace, @ml_data)
600
635
  if @ml_trace_buffer
601
636
  if !(@formatted_line.empty?)
602
- @formatted_line[@message_key] = @formatted_line[@message_key] + @ml_trace_buffer
603
- @log_size += @ml_trace_buffer.bytesize
637
+ @formatted_line[@message_key] = @formatted_line[@message_key] + @ml_trace_buffer
604
638
  else
605
639
  @ml_trace += @ml_trace_buffer.gsub('\n', '<NewLine>')
606
640
  ml_regex_applier(@ml_trace, @ml_data)
@@ -615,10 +649,8 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
615
649
  @json_data = ''
616
650
  elsif @old_formatted_line
617
651
  @formatted_line.update(@old_formatted_line)
618
- log_line_filter()
619
- @log_size += @old_log_size
652
+ data_collector()
620
653
  @old_formatted_line = {}
621
- @old_log_size = 0
622
654
  end
623
655
  @logged = true
624
656
  if @format_record
@@ -654,9 +686,7 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
654
686
  end
655
687
  end
656
688
  @formatted_line[key] = field_value
657
- @log_size -= adjust_length
658
689
  else
659
- @log_size -= (@formatted_line[key].bytesize - @masking_config[key]['string'].bytesize)
660
690
  @formatted_line[key] = @masking_config[key]['string']
661
691
  end
662
692
  end
@@ -693,11 +723,6 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
693
723
  end
694
724
  end
695
725
  end
696
- end
697
- if adjust_length
698
- @log_size -= adjust_length
699
- else
700
- @log_size -= (@formatted_line[key].bytesize - field_value.bytesize)
701
726
  end
702
727
  @formatted_line[key] = field_value
703
728
  end
@@ -716,10 +741,7 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
716
741
  if @formatted_line.has_key?key
717
742
  match_derived = each.match(@formatted_line[key])
718
743
  if match_derived
719
- @formatted_line.update(match_derived.named_captures)
720
- for field_name,value in match_derived.named_captures do
721
- @log_size += @formatted_line[field_name].bytesize
722
- end
744
+ @formatted_line.update(match_derived.named_captures)
723
745
  end
724
746
  break
725
747
  end
@@ -727,10 +749,19 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
727
749
  end
728
750
  rescue Exception => e
729
751
  log.error "Exception occurred in derived fields : #{e.backtrace}"
730
- end
752
+ end
731
753
  end
732
754
  end
733
755
 
756
+ def log_size_calculation()
757
+ data_exclusion = ["_zl", "s247", "inode"]
758
+ @formatted_line.each do |field, value|
759
+ unless data_exclusion.any? { |exclusion| field.start_with?(exclusion) }
760
+ @log_size += value.to_s.bytesize
761
+ end
762
+ end
763
+ end
764
+
734
765
  def timer_task()
735
766
  while true
736
767
  @after_time = Time.now
@@ -741,7 +772,7 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
741
772
  if out != nil
742
773
  out = Yajl.dump([out])
743
774
  out = gzip_compress(out)
744
- send_logs_to_s247(out, @log_size)
775
+ send_logs_to_s247(out, @log_size, 'holded_line')
745
776
  end
746
777
  end
747
778
  end
@@ -749,4 +780,4 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
749
780
  end
750
781
  end
751
782
 
752
- end
783
+ end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-site24x7
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.4
4
+ version: 0.1.6
5
5
  platform: ruby
6
6
  authors:
7
7
  - Magesh Rajan
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2022-12-05 00:00:00.000000000 Z
11
+ date: 2023-08-26 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
@@ -152,7 +152,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
152
152
  - !ruby/object:Gem::Version
153
153
  version: '0'
154
154
  requirements: []
155
- rubygems_version: 3.1.6
155
+ rubygems_version: 3.1.2
156
156
  signing_key:
157
157
  specification_version: 4
158
158
  summary: Site24x7 output plugin for Fluent event collector.