fluent-plugin-site24x7 0.1.5 → 0.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/fluent-plugin-site24x7.gemspec +1 -1
- data/lib/fluent/plugin/out_site24x7.rb +56 -31
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: fc1338accf3055ea6b93742a4280cdbdb8ec4b5e0ab97ea4ffc79e77855f4f28
|
4
|
+
data.tar.gz: ce48c6512000a972e2f3209b4b142d6ef4cffcec7268720f0a56fed15e75553c
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: a48c2d71823a1756d55ae9343017331f1a6b0579c42f9fcec1c61070062bb51071adfb614b71cd162015943fd368fa179d4f932662d2b8a6e54678ce7e167539
|
7
|
+
data.tar.gz: d95117ea90350c380ee4dcacc1dd2238bac3d67e8338fa973c8231a81f2f701ead6d8f02d306b82789afb396094e250507a915ba974f64509ef480339597c881
|
@@ -21,8 +21,10 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
|
|
21
21
|
helpers :compat_parameters
|
22
22
|
|
23
23
|
config_param :log_type_config, :string
|
24
|
-
config_param :max_retry, :integer, :default =>
|
24
|
+
config_param :max_retry, :integer, :default => -1
|
25
25
|
config_param :retry_interval, :integer, :default => 2
|
26
|
+
config_param :maxretry_interval, :integer, :default => 60 #1 minutes
|
27
|
+
config_param :retry_timeout, :integer, :default => 24 * 3600 # 24 hours
|
26
28
|
config_param :http_idle_timeout, :integer, default: 5
|
27
29
|
config_param :http_read_timeout, :integer, default: 30
|
28
30
|
config_param :http_proxy, :string, :default => nil
|
@@ -212,11 +214,12 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
|
|
212
214
|
time_zone = String(@s247_tz['hrs'])+':'+String(@s247_tz['mins'])
|
213
215
|
datetime_string += if time_zone.start_with?('-') then time_zone else '+'+time_zone end
|
214
216
|
end
|
217
|
+
#log.info "datetime_string : (#{datetime_string}), s247_datetime_format_string: (#{@s247_datetime_format_string})"
|
215
218
|
datetime_data = DateTime.strptime(datetime_string, @s247_datetime_format_string)
|
216
219
|
return Integer(datetime_data.strftime('%Q'))
|
217
220
|
rescue Exception => e
|
218
|
-
|
219
|
-
|
221
|
+
log.error "Exception in parsing date: #{e.message}"
|
222
|
+
return 0
|
220
223
|
end
|
221
224
|
end
|
222
225
|
|
@@ -470,26 +473,27 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
|
|
470
473
|
end
|
471
474
|
|
472
475
|
def write(chunk)
|
476
|
+
current_chunk_id = "#{chunk.dump_unique_id_hex(chunk.unique_id)}"
|
473
477
|
begin
|
474
|
-
|
475
|
-
|
476
|
-
|
477
|
-
|
478
|
-
|
479
|
-
|
478
|
+
events = Array.new
|
479
|
+
chunk.msgpack_each do |record|
|
480
|
+
next if record.empty?
|
481
|
+
events.push record[0]
|
482
|
+
end
|
483
|
+
process_http_events(events, current_chunk_id)
|
480
484
|
rescue Exception => e
|
481
485
|
log.error "Exception #{e.backtrace}"
|
482
486
|
end
|
483
487
|
end
|
484
488
|
|
485
|
-
def process_http_events(events)
|
489
|
+
def process_http_events(events, current_chunk_id)
|
486
490
|
@before_time = Time.now
|
487
491
|
batches = batch_http_events(events)
|
488
492
|
batches.each do |batched_event|
|
489
493
|
formatted_events, @log_size = format_http_event_batch(batched_event)
|
490
494
|
if (formatted_events.length>0)
|
491
495
|
formatted_events = gzip_compress(formatted_events)
|
492
|
-
send_logs_to_s247(formatted_events, @log_size)
|
496
|
+
send_logs_to_s247(formatted_events, @log_size, current_chunk_id)
|
493
497
|
end
|
494
498
|
end
|
495
499
|
end
|
@@ -547,52 +551,73 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
|
|
547
551
|
gz.string
|
548
552
|
end
|
549
553
|
|
550
|
-
def send_logs_to_s247(gzipped_parsed_lines, log_size)
|
554
|
+
def send_logs_to_s247(gzipped_parsed_lines, log_size, current_chunk_id)
|
551
555
|
request = Net::HTTP::Post.new @uri.request_uri
|
552
556
|
request.body = gzipped_parsed_lines
|
553
|
-
|
557
|
+
|
554
558
|
sleep_interval = @retry_interval
|
555
559
|
begin
|
556
|
-
|
560
|
+
retries = 0
|
561
|
+
first_upload_time = Time.now
|
562
|
+
while true
|
557
563
|
need_retry = false
|
558
564
|
begin
|
565
|
+
@s247_http_client.override_headers["Log-Size"] = @log_size
|
566
|
+
@s247_http_client.override_headers["upload-id"] = current_chunk_id
|
559
567
|
response = @s247_http_client.request @uri, request
|
560
568
|
resp_headers = response.each_header.to_h
|
561
569
|
|
562
570
|
if response.code == '200'
|
563
|
-
if resp_headers
|
564
|
-
log.error "
|
571
|
+
if resp_headers['x-uploadid'] == nil
|
572
|
+
log.error "[#{current_chunk_id}]:upload id is empty hence retry flag enabled #{resp_headers}"
|
573
|
+
need_retry = true
|
574
|
+
else
|
575
|
+
@log_upload_allowed = true
|
576
|
+
log.debug "[#{current_chunk_id}]:Successfully sent logs with size #{gzipped_parsed_lines.size} / #{@log_size} to site24x7. Upload Id : #{resp_headers['x-uploadid']}"
|
577
|
+
end
|
578
|
+
elsif response.code == '400'
|
579
|
+
if resp_headers.has_key?'log_license_exceeds' and resp_headers['log_license_exceeds'] == 'True'
|
580
|
+
log.error "[#{current_chunk_id}]:Log license limit exceeds so not able to send logs"
|
565
581
|
@log_upload_allowed = false
|
566
582
|
@log_upload_stopped_time =Time.now.to_i
|
567
|
-
elsif resp_headers.has_key?'
|
568
|
-
log.error "Max upload limit reached for log type"
|
583
|
+
elsif resp_headers.has_key?'blocked_logtype' and resp_headers['blocked_logtype'] == 'True'
|
584
|
+
log.error "[#{current_chunk_id}]:Max upload limit reached for log type"
|
569
585
|
@log_upload_allowed = false
|
570
586
|
@log_upload_stopped_time =Time.now.to_i
|
571
|
-
elsif resp_headers.has_key?'
|
572
|
-
log.error "
|
587
|
+
elsif resp_headers.has_key?'api_upload_enabled' and resp_headers['api_upload_enabled'] == 'False'
|
588
|
+
log.error "[#{current_chunk_id}]:API upload not enabled for log type : "
|
589
|
+
@log_upload_allowed = false
|
590
|
+
@log_upload_stopped_time =Time.now.to_i
|
591
|
+
elsif resp_headers.has_key?'invalid_logtype' and resp_headers['invalid_logtype'] == 'True'
|
592
|
+
log.error "[#{current_chunk_id}]:Log type not present in this account so stopping log collection"
|
593
|
+
@valid_logtype = false
|
594
|
+
elsif resp_headers.has_key?'invalid_account' and resp_headers['invalid_account'] == 'True'
|
595
|
+
log.error "[#{current_chunk_id}]: Invalid account so stopping log collection"
|
573
596
|
@valid_logtype = false
|
574
|
-
elsif resp_headers['x-uploadid'] == nil
|
575
|
-
log.error "upload id is empty hence retry flag enabled #{gzipped_parsed_lines.size} / #{@log_size}"
|
576
|
-
need_retry = true
|
577
597
|
else
|
578
|
-
|
579
|
-
log.debug "Successfully sent logs with size #{gzipped_parsed_lines.size} / #{@log_size} to site24x7. Upload Id : #{resp_headers['x-uploadid']}"
|
598
|
+
log.error "[#{current_chunk_id}]: Upload failed for reason : #{response.message}"
|
580
599
|
end
|
581
600
|
else
|
582
|
-
log.error "Response Code #{
|
601
|
+
log.error "[#{current_chunk_id}]:Response Code #{response.code} from Site24x7, so retrying (#{retries + 1}/#{@max_retry})"
|
583
602
|
need_retry = true
|
584
603
|
end
|
585
604
|
rescue StandardError => e
|
586
|
-
log.error "Error connecting to Site24x7. exception: #{e} (#{
|
605
|
+
log.error "[#{current_chunk_id}]:Error connecting to Site24x7. exception: #{e} (#{retries + 1}/#{@max_retry})"
|
606
|
+
need_retry = true
|
587
607
|
end
|
588
608
|
|
589
609
|
if need_retry
|
590
|
-
|
591
|
-
|
610
|
+
retries += 1
|
611
|
+
if (retries >= @max_retry && @max_retry > 0) || (Time.now > first_upload_time + @retry_timeout && @retry_timeout > 0)
|
612
|
+
log.error "[#{current_chunk_id}]: Internal max retries(#{max_retry}) or retry_timeout : #{first_upload_time + @retry_timeout} reached"
|
592
613
|
break
|
593
614
|
end
|
615
|
+
log.info "[#{current_chunk_id}]:Going to retry the upload at #{Time.now + sleep_interval}"
|
594
616
|
sleep(sleep_interval)
|
595
617
|
sleep_interval *= 2
|
618
|
+
if sleep_interval > @maxretry_interval
|
619
|
+
sleep_interval = @maxretry_interval
|
620
|
+
end
|
596
621
|
else
|
597
622
|
return
|
598
623
|
end
|
@@ -747,7 +772,7 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
|
|
747
772
|
if out != nil
|
748
773
|
out = Yajl.dump([out])
|
749
774
|
out = gzip_compress(out)
|
750
|
-
send_logs_to_s247(out, @log_size)
|
775
|
+
send_logs_to_s247(out, @log_size, 'holded_line')
|
751
776
|
end
|
752
777
|
end
|
753
778
|
end
|
@@ -755,4 +780,4 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
|
|
755
780
|
end
|
756
781
|
end
|
757
782
|
|
758
|
-
end
|
783
|
+
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-site24x7
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.
|
4
|
+
version: 0.1.6
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Magesh Rajan
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2023-
|
11
|
+
date: 2023-08-26 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: bundler
|