fluent-plugin-site24x7 0.1.5 → 0.1.7
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/fluent-plugin-site24x7.gemspec +1 -1
- data/lib/fluent/plugin/out_site24x7.rb +56 -33
- metadata +3 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 0b5f4e324f1115eea47583eb6dbc1467394841aadf376f08af4b07e6b0618578
|
4
|
+
data.tar.gz: '08f30541235f4294db3a0546c5de0ee24d46cb0e68d76ecb96f3199175362844'
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 8b47d505b63ae3bca0f033678cb01932d02732e2eecaab9d6c84f88765772daeddbe6380538d85c669cf5565df7378a63ba893e7408de704b8656d2794f77e10
|
7
|
+
data.tar.gz: 357ecae7bc8d57752cb0e66c94d9d3f69645eaf0258340909903759b00deaf8711c0de5aa1e10a17ca6c785dcc2a005f942f3fa40f460b958120823df34f9f4c
|
@@ -21,8 +21,10 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
|
|
21
21
|
helpers :compat_parameters
|
22
22
|
|
23
23
|
config_param :log_type_config, :string
|
24
|
-
config_param :max_retry, :integer, :default =>
|
24
|
+
config_param :max_retry, :integer, :default => -1
|
25
25
|
config_param :retry_interval, :integer, :default => 2
|
26
|
+
config_param :maxretry_interval, :integer, :default => 60 #1 minutes
|
27
|
+
config_param :retry_timeout, :integer, :default => 24 * 3600 # 24 hours
|
26
28
|
config_param :http_idle_timeout, :integer, default: 5
|
27
29
|
config_param :http_read_timeout, :integer, default: 30
|
28
30
|
config_param :http_proxy, :string, :default => nil
|
@@ -155,7 +157,6 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
|
|
155
157
|
|
156
158
|
@s247_tz = {'hrs': 0, 'mins': 0} #UTC
|
157
159
|
@log_source = Socket.gethostname
|
158
|
-
@valid_logtype = true
|
159
160
|
@log_upload_allowed = true
|
160
161
|
@log_upload_stopped_time = 0
|
161
162
|
@s247_datetime_format_string = @logtype_config['dateFormat']
|
@@ -212,11 +213,12 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
|
|
212
213
|
time_zone = String(@s247_tz['hrs'])+':'+String(@s247_tz['mins'])
|
213
214
|
datetime_string += if time_zone.start_with?('-') then time_zone else '+'+time_zone end
|
214
215
|
end
|
216
|
+
#log.info "datetime_string : (#{datetime_string}), s247_datetime_format_string: (#{@s247_datetime_format_string})"
|
215
217
|
datetime_data = DateTime.strptime(datetime_string, @s247_datetime_format_string)
|
216
218
|
return Integer(datetime_data.strftime('%Q'))
|
217
219
|
rescue Exception => e
|
218
|
-
|
219
|
-
|
220
|
+
log.error "Exception in parsing date: #{e.message}"
|
221
|
+
return 0
|
220
222
|
end
|
221
223
|
end
|
222
224
|
|
@@ -458,7 +460,7 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
|
|
458
460
|
end
|
459
461
|
|
460
462
|
def format(tag, time, record)
|
461
|
-
if
|
463
|
+
if (@log_upload_allowed || (time.to_i - @log_upload_stopped_time > S247_LOG_UPLOAD_CHECK_INTERVAL))
|
462
464
|
if (record.size == 1)
|
463
465
|
if record.has_key?'message'
|
464
466
|
[record['message']].to_msgpack
|
@@ -470,26 +472,27 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
|
|
470
472
|
end
|
471
473
|
|
472
474
|
def write(chunk)
|
475
|
+
current_chunk_id = "#{chunk.dump_unique_id_hex(chunk.unique_id)}"
|
473
476
|
begin
|
474
|
-
|
475
|
-
|
476
|
-
|
477
|
-
|
478
|
-
|
479
|
-
|
477
|
+
events = Array.new
|
478
|
+
chunk.msgpack_each do |record|
|
479
|
+
next if record.empty?
|
480
|
+
events.push record[0]
|
481
|
+
end
|
482
|
+
process_http_events(events, current_chunk_id)
|
480
483
|
rescue Exception => e
|
481
484
|
log.error "Exception #{e.backtrace}"
|
482
485
|
end
|
483
486
|
end
|
484
487
|
|
485
|
-
def process_http_events(events)
|
488
|
+
def process_http_events(events, current_chunk_id)
|
486
489
|
@before_time = Time.now
|
487
490
|
batches = batch_http_events(events)
|
488
491
|
batches.each do |batched_event|
|
489
492
|
formatted_events, @log_size = format_http_event_batch(batched_event)
|
490
493
|
if (formatted_events.length>0)
|
491
494
|
formatted_events = gzip_compress(formatted_events)
|
492
|
-
send_logs_to_s247(formatted_events, @log_size)
|
495
|
+
send_logs_to_s247(formatted_events, @log_size, current_chunk_id)
|
493
496
|
end
|
494
497
|
end
|
495
498
|
end
|
@@ -547,52 +550,72 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
|
|
547
550
|
gz.string
|
548
551
|
end
|
549
552
|
|
550
|
-
def send_logs_to_s247(gzipped_parsed_lines, log_size)
|
553
|
+
def send_logs_to_s247(gzipped_parsed_lines, log_size, current_chunk_id)
|
551
554
|
request = Net::HTTP::Post.new @uri.request_uri
|
552
555
|
request.body = gzipped_parsed_lines
|
553
|
-
|
556
|
+
|
554
557
|
sleep_interval = @retry_interval
|
555
558
|
begin
|
556
|
-
|
559
|
+
retries = 0
|
560
|
+
first_upload_time = Time.now
|
561
|
+
while true
|
557
562
|
need_retry = false
|
558
563
|
begin
|
564
|
+
@s247_http_client.override_headers["Log-Size"] = @log_size
|
565
|
+
@s247_http_client.override_headers["upload-id"] = current_chunk_id
|
559
566
|
response = @s247_http_client.request @uri, request
|
560
567
|
resp_headers = response.each_header.to_h
|
561
568
|
|
562
569
|
if response.code == '200'
|
563
|
-
if resp_headers
|
564
|
-
log.error "
|
570
|
+
if resp_headers['x-uploadid'] == nil
|
571
|
+
log.error "[#{current_chunk_id}]:upload id is empty hence retry flag enabled #{resp_headers}"
|
572
|
+
need_retry = true
|
573
|
+
else
|
574
|
+
@log_upload_allowed = true
|
575
|
+
log.debug "[#{current_chunk_id}]:Successfully sent logs with size #{gzipped_parsed_lines.size} / #{@log_size} to site24x7. Upload Id : #{resp_headers['x-uploadid']}"
|
576
|
+
end
|
577
|
+
elsif response.code == '400'
|
578
|
+
if resp_headers.has_key?'log_license_exceeds' and resp_headers['log_license_exceeds'] == 'True'
|
579
|
+
log.error "[#{current_chunk_id}]:Log license limit exceeds so not able to send logs"
|
565
580
|
@log_upload_allowed = false
|
566
581
|
@log_upload_stopped_time =Time.now.to_i
|
567
|
-
elsif resp_headers.has_key?'
|
568
|
-
log.error "Max upload limit reached for log type"
|
582
|
+
elsif resp_headers.has_key?'blocked_logtype' and resp_headers['blocked_logtype'] == 'True'
|
583
|
+
log.error "[#{current_chunk_id}]:Max upload limit reached for log type"
|
569
584
|
@log_upload_allowed = false
|
570
585
|
@log_upload_stopped_time =Time.now.to_i
|
571
|
-
elsif resp_headers.has_key?'
|
572
|
-
log.error "
|
573
|
-
|
574
|
-
elsif resp_headers['
|
575
|
-
log.error "
|
576
|
-
|
586
|
+
elsif resp_headers.has_key?'api_upload_enabled' and resp_headers['api_upload_enabled'] == 'False'
|
587
|
+
log.error "[#{current_chunk_id}]:API upload not enabled for log type"
|
588
|
+
Fluent::Engine.stop
|
589
|
+
elsif resp_headers.has_key?'invalid_logtype' and resp_headers['invalid_logtype'] == 'True'
|
590
|
+
log.error "[#{current_chunk_id}]:Log type not present in this account so stopping log collection"
|
591
|
+
Fluent::Engine.stop
|
592
|
+
elsif resp_headers.has_key?'invalid_account' and resp_headers['invalid_account'] == 'True'
|
593
|
+
log.error "[#{current_chunk_id}]: Invalid account so stopping log collection"
|
594
|
+
Fluent::Engine.stop
|
577
595
|
else
|
578
|
-
|
579
|
-
log.debug "Successfully sent logs with size #{gzipped_parsed_lines.size} / #{@log_size} to site24x7. Upload Id : #{resp_headers['x-uploadid']}"
|
596
|
+
log.error "[#{current_chunk_id}]: Upload failed for reason : #{response.message}"
|
580
597
|
end
|
581
598
|
else
|
582
|
-
log.error "Response Code #{
|
599
|
+
log.error "[#{current_chunk_id}]:Response Code #{response.code} from Site24x7, so retrying (#{retries + 1}/#{@max_retry})"
|
583
600
|
need_retry = true
|
584
601
|
end
|
585
602
|
rescue StandardError => e
|
586
|
-
log.error "Error connecting to Site24x7. exception: #{e} (#{
|
603
|
+
log.error "[#{current_chunk_id}]:Error connecting to Site24x7. exception: #{e} (#{retries + 1}/#{@max_retry})"
|
604
|
+
need_retry = true
|
587
605
|
end
|
588
606
|
|
589
607
|
if need_retry
|
590
|
-
|
591
|
-
|
608
|
+
retries += 1
|
609
|
+
if (retries >= @max_retry && @max_retry > 0) || (Time.now > first_upload_time + @retry_timeout && @retry_timeout > 0)
|
610
|
+
log.error "[#{current_chunk_id}]: Internal max retries(#{max_retry}) or retry_timeout : #{first_upload_time + @retry_timeout} reached"
|
592
611
|
break
|
593
612
|
end
|
613
|
+
log.info "[#{current_chunk_id}]:Going to retry the upload at #{Time.now + sleep_interval}"
|
594
614
|
sleep(sleep_interval)
|
595
615
|
sleep_interval *= 2
|
616
|
+
if sleep_interval > @maxretry_interval
|
617
|
+
sleep_interval = @maxretry_interval
|
618
|
+
end
|
596
619
|
else
|
597
620
|
return
|
598
621
|
end
|
@@ -747,7 +770,7 @@ class Fluent::Site24x7Output < Fluent::Plugin::Output
|
|
747
770
|
if out != nil
|
748
771
|
out = Yajl.dump([out])
|
749
772
|
out = gzip_compress(out)
|
750
|
-
send_logs_to_s247(out, @log_size)
|
773
|
+
send_logs_to_s247(out, @log_size, 'holded_line')
|
751
774
|
end
|
752
775
|
end
|
753
776
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-site24x7
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.
|
4
|
+
version: 0.1.7
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Magesh Rajan
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2023-
|
11
|
+
date: 2023-12-27 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: bundler
|
@@ -152,7 +152,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
152
152
|
- !ruby/object:Gem::Version
|
153
153
|
version: '0'
|
154
154
|
requirements: []
|
155
|
-
rubygems_version: 3.1.
|
155
|
+
rubygems_version: 3.1.6
|
156
156
|
signing_key:
|
157
157
|
specification_version: 4
|
158
158
|
summary: Site24x7 output plugin for Fluent event collector.
|