logstash-output-site24x7 0.1.1 → 0.1.2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +2 -0
- data/CONTRIBUTORS +0 -0
- data/Gemfile +0 -0
- data/LICENSE +0 -0
- data/README.md +1 -0
- data/lib/logstash/outputs/site24x7.rb +471 -84
- data/logstash-output-site24x7.gemspec +1 -1
- data/spec/outputs/site24x7_spec.rb +0 -0
- metadata +3 -3
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: f50f2f0c133e264a4bc19d63275f26bb4f80563aa8fd65c6b4da22159e8b5c26
|
4
|
+
data.tar.gz: 1117f9c487cdf42c1e0be5ae2e01a173230d3ae0936538b8af627b48283caa66
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: ea1bf33015d904fc2f11e4ab0275e795b2f7c6695f339abd7a101aead93bb2979dfca99ed751b233301e057b8a89d2e0bceef86fb03143d2947a641a5c677f4b
|
7
|
+
data.tar.gz: 75804b58617818495b5a2951fb3efcdfd5483ad80bfac78f111d2e121cf418be993f5b457953f99f8948ef27527c6170485848c891dff6710cebb22c884cb736
|
data/CHANGELOG.md
CHANGED
data/CONTRIBUTORS
CHANGED
File without changes
|
data/Gemfile
CHANGED
File without changes
|
data/LICENSE
CHANGED
File without changes
|
data/README.md
CHANGED
@@ -29,6 +29,7 @@ Property | Description | Default Value
|
|
29
29
|
log_type_config | log_type_config of your configured log type in site24x7 | nil
|
30
30
|
max_retry | Number of times to resend failed uploads | 3
|
31
31
|
retry_interval | Time interval to sleep initially between retries, exponential step-off | 2 seconds
|
32
|
+
log_source | Define custom values, such as the name of the server or environment from where the logs originate | Hostname
|
32
33
|
## Need Help?
|
33
34
|
|
34
35
|
If you need any support please contact us at support@site24x7.com.
|
@@ -4,6 +4,7 @@ require "zlib"
|
|
4
4
|
require "date"
|
5
5
|
require "json"
|
6
6
|
require "base64"
|
7
|
+
require "digest"
|
7
8
|
|
8
9
|
class LogStash::Outputs::Site24x7 < LogStash::Outputs::Base
|
9
10
|
config_name "site24x7"
|
@@ -47,6 +48,80 @@ class LogStash::Outputs::Site24x7 < LogStash::Outputs::Base
|
|
47
48
|
@logtype_config = JSON.parse(base64_url_decode(@log_type_config))
|
48
49
|
@s247_custom_regex = if @logtype_config.has_key? 'regex' then Regexp.compile(@logtype_config['regex'].gsub('?P<','?<')) else nil end
|
49
50
|
@s247_ignored_fields = if @logtype_config.has_key? 'ignored_fields' then @logtype_config['ignored_fields'] else [] end
|
51
|
+
@datetime_regex = if @logtype_config.has_key?'dateRegex' then Regexp.compile(@logtype_config['dateRegex'].gsub('?P<','?<')) else nil end
|
52
|
+
|
53
|
+
@ml_regex = if @logtype_config.has_key? 'ml_regex' then Regexp.compile(@logtype_config['ml_regex'].gsub('?P<','?<')) else nil end
|
54
|
+
@ml_end_regex = if @logtype_config.has_key? 'ml_end_regex' then Regexp.compile(@logtype_config['ml_end_regex'].gsub('?P<','?<')) else nil end
|
55
|
+
@max_ml_count = if @logtype_config.has_key? 'ml_regex' then @s247_custom_regex.inspect.scan('\<NewLine\>').length else nil end
|
56
|
+
@max_trace_line = 100
|
57
|
+
@ml_trace = ''
|
58
|
+
@ml_trace_buffer = ''
|
59
|
+
@ml_found = false
|
60
|
+
@ml_end_line_found = false
|
61
|
+
@ml_data = nil
|
62
|
+
@ml_count = 0
|
63
|
+
|
64
|
+
@json_data = ''
|
65
|
+
@sub_pattern = {}
|
66
|
+
|
67
|
+
if !(@logtype_config.has_key?('jsonPath'))
|
68
|
+
@message_key = get_last_group_inregex(@s247_custom_regex)
|
69
|
+
end
|
70
|
+
|
71
|
+
if @logtype_config.has_key?('jsonPath')
|
72
|
+
@logtype_config['jsonPath'].each_with_index do | key, index |
|
73
|
+
if key.has_key?('pattern')
|
74
|
+
begin
|
75
|
+
if Regexp.new(key['pattern'].gsub('?P<','?<'))
|
76
|
+
@sub_pattern[key['name']] = Regexp.compile(key['pattern'].gsub('?P<','?<'))
|
77
|
+
end
|
78
|
+
rescue Exception => e
|
79
|
+
@logger.error "Invalid subpattern regex #{e.backtrace}"
|
80
|
+
end
|
81
|
+
end
|
82
|
+
end
|
83
|
+
end
|
84
|
+
|
85
|
+
@old_formatted_line = {}
|
86
|
+
@formatted_line = {}
|
87
|
+
|
88
|
+
@masking_config = if @logtype_config.has_key? 'maskingConfig' then @logtype_config['maskingConfig'] else nil end
|
89
|
+
@hashing_config = if @logtype_config.has_key? 'hashingConfig' then @logtype_config['hashingConfig'] else nil end
|
90
|
+
@derived_config = if @logtype_config.has_key? 'derivedConfig' then @logtype_config['derivedConfig'] else nil end
|
91
|
+
@general_regex = Regexp.compile("(.*)")
|
92
|
+
|
93
|
+
if @derived_config
|
94
|
+
@derived_fields = {}
|
95
|
+
for key,value in @derived_config do
|
96
|
+
@derived_fields[key] = []
|
97
|
+
for values in @derived_config[key] do
|
98
|
+
@derived_fields[key].push(Regexp.compile(values.gsub('\\\\', '\\')))
|
99
|
+
end
|
100
|
+
end
|
101
|
+
end
|
102
|
+
|
103
|
+
if @masking_config
|
104
|
+
for key,value in @masking_config do
|
105
|
+
@masking_config[key]["regex"] = Regexp.compile(@masking_config[key]["regex"])
|
106
|
+
end
|
107
|
+
end
|
108
|
+
|
109
|
+
if @hashing_config
|
110
|
+
for key,value in @hashing_config do
|
111
|
+
@hashing_config[key]["regex"] = Regexp.compile(@hashing_config[key]["regex"])
|
112
|
+
end
|
113
|
+
end
|
114
|
+
|
115
|
+
if @logtype_config.has_key?'filterConfig'
|
116
|
+
for field,rules in @logtype_config['filterConfig'] do
|
117
|
+
temp = []
|
118
|
+
for value in @logtype_config['filterConfig'][field]['values'] do
|
119
|
+
temp.push(Regexp.compile(value))
|
120
|
+
end
|
121
|
+
@logtype_config['filterConfig'][field]['values'] = temp.join('|')
|
122
|
+
end
|
123
|
+
end
|
124
|
+
|
50
125
|
@s247_tz = {'hrs': 0, 'mins': 0} #UTC
|
51
126
|
@valid_logtype = true
|
52
127
|
@log_upload_allowed = true
|
@@ -56,20 +131,22 @@ class LogStash::Outputs::Site24x7 < LogStash::Outputs::Base
|
|
56
131
|
if !@s247_datetime_format_string.include? 'unix'
|
57
132
|
@is_year_present = if @s247_datetime_format_string.include?('%y') || @s247_datetime_format_string.include?('%Y') then true else false end
|
58
133
|
if !@is_year_present
|
59
|
-
|
134
|
+
@s247_datetime_format_string = @s247_datetime_format_string+ ' %Y'
|
60
135
|
end
|
61
136
|
@is_timezone_present = if @s247_datetime_format_string.include? '%z' then true else false end
|
62
137
|
if !@is_timezone_present && @logtype_config.has_key?('timezone')
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
138
|
+
@s247_datetime_format_string += '%z'
|
139
|
+
tz_value = @logtype_config['timezone']
|
140
|
+
if tz_value.start_with?('+')
|
141
|
+
@s247_tz['hrs'] = Integer('-' + tz_value[1..4])
|
142
|
+
@s247_tz['mins'] = Integer('-' + tz_value[3..6])
|
143
|
+
elsif tz_value.start_with?('-')
|
144
|
+
@s247_tz['hrs'] = Integer('+' + tz_value[1..4])
|
145
|
+
@s247_tz['mins'] = Integer('+' + tz_value[3..6])
|
146
|
+
end
|
71
147
|
end
|
72
148
|
end
|
149
|
+
Thread.new { timer_task() }
|
73
150
|
end
|
74
151
|
|
75
152
|
def init_http_client(logtype_config)
|
@@ -88,52 +165,96 @@ class LogStash::Outputs::Site24x7 < LogStash::Outputs::Base
|
|
88
165
|
end
|
89
166
|
datetime_string += if !@is_year_present then ' '+String(Time.new.year) else '' end
|
90
167
|
if !@is_timezone_present && @logtype_config.has_key?('timezone')
|
91
|
-
@s247_datetime_format_string += '%z'
|
92
168
|
time_zone = String(@s247_tz['hrs'])+':'+String(@s247_tz['mins'])
|
93
169
|
datetime_string += if time_zone.start_with?('-') then time_zone else '+'+time_zone end
|
94
170
|
end
|
95
171
|
datetime_data = DateTime.strptime(datetime_string, @s247_datetime_format_string)
|
96
172
|
return Integer(datetime_data.strftime('%Q'))
|
97
|
-
rescue
|
98
|
-
|
173
|
+
rescue Exception => e
|
174
|
+
@logger.error "Exception in parsing date: #{e.backtrace}"
|
175
|
+
return 0
|
176
|
+
end
|
177
|
+
end
|
178
|
+
|
179
|
+
def log_line_filter()
|
180
|
+
applyMasking()
|
181
|
+
applyHashing()
|
182
|
+
getDerivedFields()
|
183
|
+
end
|
184
|
+
|
185
|
+
def get_last_group_inregex(s247_custom_regex)
|
186
|
+
return @s247_custom_regex.names[-1]
|
187
|
+
end
|
188
|
+
|
189
|
+
def remove_ignored_fields()
|
190
|
+
@s247_ignored_fields.each do |field_name|
|
191
|
+
@log_size -= if @log_fields.has_key?field_name then @log_fields.delete(field_name).bytesize else 0 end
|
99
192
|
end
|
100
193
|
end
|
101
194
|
|
195
|
+
def add_message_metadata()
|
196
|
+
@log_fields.update({'_zl_timestamp' => get_timestamp(@log_fields[@logtype_config['dateField']]), 's247agentuid' => @log_source})
|
197
|
+
end
|
198
|
+
|
102
199
|
def parse_lines(lines)
|
103
200
|
parsed_lines = []
|
104
|
-
log_size = 0
|
105
201
|
lines.each do |line|
|
106
202
|
if !line.empty?
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
111
|
-
|
112
|
-
|
113
|
-
|
114
|
-
|
115
|
-
|
116
|
-
|
117
|
-
|
118
|
-
|
119
|
-
|
120
|
-
|
121
|
-
|
122
|
-
|
123
|
-
|
124
|
-
|
125
|
-
|
203
|
+
begin
|
204
|
+
@logged = false
|
205
|
+
match = line.match(@s247_custom_regex)
|
206
|
+
if match
|
207
|
+
@formatted_line.update(@old_formatted_line)
|
208
|
+
@log_size += @old_log_size
|
209
|
+
@old_log_size = line.bytesize
|
210
|
+
@log_fields = match&.named_captures
|
211
|
+
remove_ignored_fields()
|
212
|
+
add_message_metadata()
|
213
|
+
@old_formatted_line = @log_fields
|
214
|
+
@last_line_matched = true
|
215
|
+
@trace_started = false
|
216
|
+
elsif @last_line_matched || @trace_started
|
217
|
+
is_date_present = !(line.scan(@datetime_regex).empty?)
|
218
|
+
@trace_started = !(is_date_present)
|
219
|
+
if !(is_date_present) && @old_formatted_line
|
220
|
+
if @old_formatted_line.has_key?(@message_key)
|
221
|
+
@old_formatted_line[@message_key] += '\n' + line
|
222
|
+
@old_log_size += line.bytesize
|
223
|
+
@trace_started = true
|
224
|
+
@last_line_matched = false
|
225
|
+
end
|
226
|
+
end
|
227
|
+
end
|
228
|
+
if @formatted_line.has_key?('_zl_timestamp')
|
229
|
+
log_line_filter()
|
230
|
+
parsed_lines.push(@formatted_line)
|
231
|
+
@formatted_line = {}
|
232
|
+
end
|
233
|
+
rescue Exception => e
|
234
|
+
@logger.error "Exception in parse_line #{e.backtrace}"
|
235
|
+
@formatted_line = {}
|
236
|
+
end
|
237
|
+
end
|
126
238
|
end
|
127
|
-
return parsed_lines
|
239
|
+
return parsed_lines
|
128
240
|
end
|
129
241
|
|
130
|
-
def is_filters_matched(
|
131
|
-
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
242
|
+
def is_filters_matched()
|
243
|
+
begin
|
244
|
+
if @logtype_config.has_key?'filterConfig'
|
245
|
+
@logtype_config['filterConfig'].each do |config,value|
|
246
|
+
if @formatted_line[config].scan(Regexp.new(@logtype_config['filterConfig'][config]['values'])).length > 0
|
247
|
+
val = true
|
248
|
+
else
|
249
|
+
val = false
|
250
|
+
end
|
251
|
+
if (@formatted_line.has_key?config) && (@logtype_config['filterConfig'][config]['match'] ^ (val))
|
252
|
+
return false
|
253
|
+
end
|
136
254
|
end
|
255
|
+
end
|
256
|
+
rescue Exception => e
|
257
|
+
@logger.error "Exception occurred in filter: #{e.backtrace}"
|
137
258
|
end
|
138
259
|
return true
|
139
260
|
end
|
@@ -141,7 +262,7 @@ class LogStash::Outputs::Site24x7 < LogStash::Outputs::Base
|
|
141
262
|
def get_json_value(obj, key, datatype=nil)
|
142
263
|
if obj != nil && (obj.has_key?key)
|
143
264
|
if datatype and datatype == 'json-object'
|
144
|
-
|
265
|
+
arr_json = []
|
145
266
|
child_obj = obj[key]
|
146
267
|
if child_obj.class == String
|
147
268
|
child_obj = JSON.parse(child_obj.gsub('\\','\\\\'))
|
@@ -154,48 +275,151 @@ class LogStash::Outputs::Site24x7 < LogStash::Outputs::Base
|
|
154
275
|
return (if obj.has_key?key then obj[key] else obj[key.downcase] end)
|
155
276
|
end
|
156
277
|
elsif key.include?'.'
|
157
|
-
|
158
|
-
|
159
|
-
|
160
|
-
|
161
|
-
|
162
|
-
|
163
|
-
|
278
|
+
parent_key = key[0..key.index('.')-1]
|
279
|
+
child_key = key[key.index('.')+1..-1]
|
280
|
+
child_obj = obj[if obj.has_key?parent_key then parent_key else parent_key.capitalize() end]
|
281
|
+
if child_obj.class == String
|
282
|
+
child_obj = JSON.parse(child_obj.replace('\\','\\\\'))
|
283
|
+
end
|
284
|
+
return get_json_value(child_obj, child_key,datatype)
|
164
285
|
end
|
165
286
|
end
|
166
287
|
|
288
|
+
def json_log_applier(line)
|
289
|
+
json_log_size=0
|
290
|
+
@formatted_line = {}
|
291
|
+
@log_fields = {}
|
292
|
+
event_obj = if line.is_a?(String) then JSON.parse(line) else line end
|
293
|
+
@logtype_config['jsonPath'].each do |path_obj|
|
294
|
+
value = get_json_value(event_obj, path_obj[if path_obj.has_key?'key' then 'key' else 'name' end], path_obj['type'])
|
295
|
+
if value
|
296
|
+
@log_fields[path_obj['name']] = value
|
297
|
+
json_log_size+= String(value).bytesize - (if value.class == Array then value.size*20 else 0 end)
|
298
|
+
end
|
299
|
+
end
|
300
|
+
for key,regex in @sub_pattern do
|
301
|
+
if @log_fields.has_key?(key)
|
302
|
+
matcher = regex.match(@log_fields.delete(key))
|
303
|
+
if matcher
|
304
|
+
@log_fields.update(matcher.named_captures)
|
305
|
+
remove_ignored_fields()
|
306
|
+
@formatted_line.update(@log_fields)
|
307
|
+
end
|
308
|
+
end
|
309
|
+
end
|
310
|
+
|
311
|
+
if !(is_filters_matched())
|
312
|
+
return false
|
313
|
+
else
|
314
|
+
add_message_metadata()
|
315
|
+
@formatted_line.update(@log_fields)
|
316
|
+
log_line_filter()
|
317
|
+
@log_size += json_log_size
|
318
|
+
return true
|
319
|
+
end
|
320
|
+
end
|
321
|
+
|
167
322
|
def json_log_parser(lines_read)
|
168
|
-
log_size = 0
|
169
323
|
parsed_lines = []
|
170
324
|
lines_read.each do |line|
|
325
|
+
begin
|
326
|
+
@logged = false
|
171
327
|
if !line.empty?
|
172
|
-
|
173
|
-
|
174
|
-
|
175
|
-
@logtype_config['jsonPath'].each do |path_obj|
|
176
|
-
value = get_json_value(event_obj, path_obj[if path_obj.has_key?'key' then 'key' else 'name' end], path_obj['type'])
|
177
|
-
if value
|
178
|
-
formatted_line[path_obj['name']] = value
|
179
|
-
current_log_size+= String(value).size - (if value.class == Array then value.size*20 else 0 end)
|
328
|
+
if ((line[0] == '{') && (@json_data[-1] == '}'))
|
329
|
+
if json_log_applier(@json_data)
|
330
|
+
parsed_lines.push(@formatted_line)
|
180
331
|
end
|
332
|
+
@json_data=''
|
181
333
|
end
|
182
|
-
|
183
|
-
|
184
|
-
|
185
|
-
|
186
|
-
|
334
|
+
@json_data += line
|
335
|
+
end
|
336
|
+
rescue Exception => e
|
337
|
+
@logger.error "Exception in parse_line #{e.backtrace}"
|
338
|
+
end
|
339
|
+
end
|
340
|
+
return parsed_lines
|
341
|
+
end
|
342
|
+
|
343
|
+
def ml_regex_applier(ml_trace, ml_data)
|
344
|
+
begin
|
345
|
+
@log_size += @ml_trace.bytesize
|
346
|
+
matcher = @s247_custom_regex.match(@ml_trace)
|
347
|
+
@log_fields = matcher.named_captures
|
348
|
+
@log_fields.update(@ml_data)
|
349
|
+
if @s247_ignored_fields
|
350
|
+
remove_ignored_fields()
|
351
|
+
end
|
352
|
+
add_message_metadata()
|
353
|
+
@formatted_line.update(@log_fields)
|
354
|
+
log_line_filter()
|
355
|
+
rescue Exception => e
|
356
|
+
@logger.error "Exception occurred in ml_parser : #{e.backtrace}"
|
357
|
+
@formatted_line = {}
|
358
|
+
end
|
359
|
+
end
|
360
|
+
|
361
|
+
def ml_log_parser(lines)
|
362
|
+
parsed_lines = []
|
363
|
+
lines.each do |line|
|
364
|
+
if !line.empty?
|
365
|
+
begin
|
366
|
+
@logged = false
|
367
|
+
ml_start_matcher = @ml_regex.match(line)
|
368
|
+
if ml_start_matcher || @ml_end_line_found
|
369
|
+
@ml_found = ml_start_matcher
|
370
|
+
@ml_end_line_found = false
|
371
|
+
@formatted_line = {}
|
372
|
+
if @ml_trace.length > 0
|
373
|
+
begin
|
374
|
+
ml_regex_applier(@ml_trace, @ml_data)
|
375
|
+
if @ml_trace_buffer && @formatted_line
|
376
|
+
@formatted_line[@message_key] = @formatted_line[@message_key] + @ml_trace_buffer
|
377
|
+
@log_size += @ml_trace_buffer.bytesize
|
378
|
+
end
|
379
|
+
parsed_lines.push(@formatted_line)
|
380
|
+
@ml_trace = ''
|
381
|
+
@ml_trace_buffer = ''
|
382
|
+
if @ml_found
|
383
|
+
@ml_data = ml_start_matcher.named_captures
|
384
|
+
@log_size += line.bytesize
|
385
|
+
else
|
386
|
+
@ml_data = {}
|
387
|
+
end
|
388
|
+
@ml_count = 0
|
389
|
+
rescue Exception => e
|
390
|
+
@logger.error "Exception occurred in ml_parser : #{e.backtrace}"
|
391
|
+
end
|
392
|
+
elsif @ml_found
|
393
|
+
@log_size += line.bytesize
|
394
|
+
@ml_data = ml_start_matcher.named_captures
|
395
|
+
end
|
396
|
+
elsif @ml_found
|
397
|
+
if @ml_count < @max_ml_count
|
398
|
+
@ml_trace += '<NewLine>' + line
|
399
|
+
elsif @ml_end_regex && @ml_end_regex.match(line)
|
400
|
+
@ml_end_line_found = True
|
401
|
+
elsif (@ml_count - @max_ml_count) < @max_trace_line
|
402
|
+
@ml_trace_buffer += "\n" + line
|
403
|
+
end
|
404
|
+
@ml_count += 1
|
187
405
|
end
|
188
|
-
|
406
|
+
rescue Exception => e
|
407
|
+
@logger.error "Exception occurred in ml_parser : #{e.backtrace}"
|
408
|
+
end
|
409
|
+
end
|
189
410
|
end
|
190
|
-
return parsed_lines
|
411
|
+
return parsed_lines
|
191
412
|
end
|
192
|
-
|
413
|
+
|
193
414
|
def process_http_events(events)
|
415
|
+
@before_time = Time.now
|
194
416
|
batches = batch_http_events(events)
|
195
417
|
batches.each do |batched_event|
|
196
|
-
formatted_events, log_size = format_http_event_batch(batched_event)
|
197
|
-
|
198
|
-
|
418
|
+
formatted_events, @log_size = format_http_event_batch(batched_event)
|
419
|
+
if (formatted_events.length>0)
|
420
|
+
formatted_events = gzip_compress(formatted_events)
|
421
|
+
send_logs_to_s247(formatted_events, @log_size)
|
422
|
+
end
|
199
423
|
end
|
200
424
|
end
|
201
425
|
|
@@ -204,12 +428,12 @@ class LogStash::Outputs::Site24x7 < LogStash::Outputs::Base
|
|
204
428
|
current_batch = []
|
205
429
|
current_batch_size = 0
|
206
430
|
encoded_events.each_with_index do |encoded_event, i|
|
207
|
-
event_message = encoded_event.to_hash['message']
|
431
|
+
event_message = if encoded_event.to_hash().has_key? 'AL_PARSED' then encoded_event.to_hash() else encoded_event.to_hash['message'] end
|
432
|
+
current_event_size = if event_message.is_a?(Hash) then event_message.to_s.bytesize else event_message.bytesize end
|
433
|
+
if current_event_size > S247_MAX_RECORD_SIZE
|
434
|
+
event_message = event_message[0..(S247_MAX_RECORD_SIZE-DD_TRUNCATION_SUFFIX.length)]+DD_TRUNCATION_SUFFIX
|
208
435
|
current_event_size = event_message.bytesize
|
209
|
-
|
210
|
-
event_message = event_message[0..(S247_MAX_RECORD_SIZE-DD_TRUNCATION_SUFFIX.length)]+DD_TRUNCATION_SUFFIX
|
211
|
-
current_event_size = event_message.bytesize
|
212
|
-
end
|
436
|
+
end
|
213
437
|
|
214
438
|
if (i > 0 and i % S247_MAX_RECORD_COUNT == 0) or (current_batch_size + current_event_size > S247_MAX_BATCH_SIZE)
|
215
439
|
batches << current_batch
|
@@ -218,7 +442,7 @@ class LogStash::Outputs::Site24x7 < LogStash::Outputs::Base
|
|
218
442
|
end
|
219
443
|
|
220
444
|
current_batch_size += current_event_size
|
221
|
-
current_batch << event_message
|
445
|
+
current_batch << (if event_message.is_a?(Hash) then event_message.to_json.to_s else event_message end)
|
222
446
|
end
|
223
447
|
batches << current_batch
|
224
448
|
batches
|
@@ -226,13 +450,19 @@ class LogStash::Outputs::Site24x7 < LogStash::Outputs::Base
|
|
226
450
|
|
227
451
|
def format_http_event_batch(events)
|
228
452
|
parsed_lines = []
|
229
|
-
log_size = 0
|
453
|
+
@log_size = 0
|
454
|
+
@old_log_size=0
|
230
455
|
if @logtype_config.has_key?'jsonPath'
|
231
|
-
parsed_lines
|
456
|
+
parsed_lines = json_log_parser(events)
|
457
|
+
elsif @logtype_config.has_key?'ml_regex'
|
458
|
+
parsed_lines = ml_log_parser(events)
|
232
459
|
else
|
233
|
-
|
460
|
+
parsed_lines = parse_lines(events)
|
461
|
+
end
|
462
|
+
if (parsed_lines.length > 0)
|
463
|
+
return JSON.dump(parsed_lines), @log_size
|
234
464
|
end
|
235
|
-
return
|
465
|
+
return [],0
|
236
466
|
end
|
237
467
|
|
238
468
|
def gzip_compress(payload)
|
@@ -248,7 +478,7 @@ class LogStash::Outputs::Site24x7 < LogStash::Outputs::Base
|
|
248
478
|
end
|
249
479
|
|
250
480
|
def send_logs_to_s247(gzipped_parsed_lines, log_size)
|
251
|
-
@headers['Log-Size'] = String(log_size)
|
481
|
+
@headers['Log-Size'] = String(@log_size)
|
252
482
|
sleep_interval = @retry_interval
|
253
483
|
begin
|
254
484
|
@max_retry.times do |counter|
|
@@ -260,17 +490,17 @@ class LogStash::Outputs::Site24x7 < LogStash::Outputs::Base
|
|
260
490
|
if resp_headers.has_key?'LOG_LICENSE_EXCEEDS' && resp_headers['LOG_LICENSE_EXCEEDS'] == 'True'
|
261
491
|
@logger.error("Log license limit exceeds so not able to send logs")
|
262
492
|
@log_upload_allowed = false
|
263
|
-
|
493
|
+
@log_upload_stopped_time =Time.now.to_i
|
264
494
|
elsif resp_headers.has_key?'BLOCKED_LOGTYPE' && resp_headers['BLOCKED_LOGTYPE'] == 'True'
|
265
495
|
@logger.error("Max upload limit reached for log type")
|
266
496
|
@log_upload_allowed = false
|
267
|
-
|
497
|
+
@log_upload_stopped_time =Time.now.to_i
|
268
498
|
elsif resp_headers.has_key?'INVALID_LOGTYPE' && resp_headers['INVALID_LOGTYPE'] == 'True'
|
269
499
|
@logger.error("Log type not present in this account so stopping log collection")
|
270
|
-
|
500
|
+
@valid_logtype = false
|
271
501
|
else
|
272
|
-
|
273
|
-
@logger.debug("Successfully sent logs with size #{gzipped_parsed_lines.size} / #{log_size} to site24x7. Upload Id : #{resp_headers['x-uploadid']}")
|
502
|
+
@log_upload_allowed = true
|
503
|
+
@logger.debug("Successfully sent logs with size #{gzipped_parsed_lines.size} / #{@log_size} to site24x7. Upload Id : #{resp_headers['x-uploadid']}")
|
274
504
|
end
|
275
505
|
else
|
276
506
|
@logger.error("Response Code #{response.code} from Site24x7, so retrying (#{counter + 1}/#{@max_retry})")
|
@@ -296,4 +526,161 @@ class LogStash::Outputs::Site24x7 < LogStash::Outputs::Base
|
|
296
526
|
end
|
297
527
|
end
|
298
528
|
|
299
|
-
|
529
|
+
def log_the_holded_line()
|
530
|
+
@log_size = 0
|
531
|
+
if @logged == false
|
532
|
+
if (@ml_trace.length>0)
|
533
|
+
ml_regex_applier(@ml_trace, @ml_data)
|
534
|
+
if @ml_trace_buffer
|
535
|
+
if !(@formatted_line.empty?)
|
536
|
+
@formatted_line[@message_key] = @formatted_line[@message_key] + @ml_trace_buffer
|
537
|
+
@log_size += @ml_trace_buffer.bytesize
|
538
|
+
else
|
539
|
+
@ml_trace += @ml_trace_buffer.gsub('\n', '<NewLine>')
|
540
|
+
ml_regex_applier(@ml_trace, @ml_data)
|
541
|
+
end
|
542
|
+
@ml_trace_buffer = ''
|
543
|
+
end
|
544
|
+
@ml_trace = ''
|
545
|
+
elsif (@json_data.length>0)
|
546
|
+
if !(json_log_applier(@json_data))
|
547
|
+
@formatted_line={}
|
548
|
+
end
|
549
|
+
@json_data = ''
|
550
|
+
elsif @old_formatted_line
|
551
|
+
@formatted_line.update(@old_formatted_line)
|
552
|
+
log_line_filter()
|
553
|
+
@log_size += @old_log_size
|
554
|
+
@old_formatted_line = {}
|
555
|
+
@old_log_size = 0
|
556
|
+
end
|
557
|
+
@logged = true
|
558
|
+
if @format_record
|
559
|
+
@custom_parser.format_record()
|
560
|
+
end
|
561
|
+
if !(@formatted_line.empty?)
|
562
|
+
return @formatted_line
|
563
|
+
end
|
564
|
+
end
|
565
|
+
return nil
|
566
|
+
end
|
567
|
+
|
568
|
+
def applyMasking()
|
569
|
+
if @masking_config
|
570
|
+
begin
|
571
|
+
for key,value in @masking_config do
|
572
|
+
adjust_length = 0
|
573
|
+
mask_regex = @masking_config[key]["regex"]
|
574
|
+
if @formatted_line.has_key?key
|
575
|
+
field_value = @formatted_line[key]
|
576
|
+
if !(mask_regex.eql?(@general_regex))
|
577
|
+
matcher = field_value.to_enum(:scan, mask_regex).map { Regexp.last_match }
|
578
|
+
if matcher
|
579
|
+
(0..(matcher.length)-1).map do |index|
|
580
|
+
start = matcher[index].offset(1)[0]
|
581
|
+
_end = matcher[index].offset(1)[1]
|
582
|
+
if ((start >= 0) && (_end > 0))
|
583
|
+
start = start - adjust_length
|
584
|
+
_end = _end - adjust_length
|
585
|
+
adjust_length += (_end - start) - @masking_config[key]['string'].bytesize
|
586
|
+
field_value = field_value[0..(start-1)] + @masking_config[key]['string'] + field_value[_end..field_value.bytesize]
|
587
|
+
end
|
588
|
+
end
|
589
|
+
end
|
590
|
+
@formatted_line[key] = field_value
|
591
|
+
@log_size -= adjust_length
|
592
|
+
else
|
593
|
+
@log_size -= (@formatted_line[key].bytesize - @masking_config[key]['string'].bytesize)
|
594
|
+
@formatted_line[key] = @masking_config[key]['string']
|
595
|
+
end
|
596
|
+
end
|
597
|
+
end
|
598
|
+
rescue Exception => e
|
599
|
+
@logger.error "Exception occurred in masking : #{e.backtrace}"
|
600
|
+
end
|
601
|
+
end
|
602
|
+
end
|
603
|
+
|
604
|
+
def applyHashing()
|
605
|
+
if @hashing_config
|
606
|
+
begin
|
607
|
+
for key,value in @hashing_config do
|
608
|
+
hash_regex = @hashing_config[key]["regex"]
|
609
|
+
if @formatted_line.has_key?key
|
610
|
+
field_value = @formatted_line[key]
|
611
|
+
if (hash_regex.eql?(@general_regex))
|
612
|
+
hash_string = Digest::SHA256.hexdigest(field_value)
|
613
|
+
field_value = hash_string
|
614
|
+
else
|
615
|
+
adjust_length = 0
|
616
|
+
matcher = field_value.to_enum(:scan, hash_regex).map { Regexp.last_match }
|
617
|
+
if matcher
|
618
|
+
(0..(matcher.length)-1).map do |index|
|
619
|
+
start = matcher[index].offset(1)[0]
|
620
|
+
_end = matcher[index].offset(1)[1]
|
621
|
+
if ((start >= 0) && (_end > 0))
|
622
|
+
start = start - adjust_length
|
623
|
+
_end = _end - adjust_length
|
624
|
+
hash_string = Digest::SHA256.hexdigest(field_value[start..(_end-1)])
|
625
|
+
adjust_length += (_end - start) - hash_string.bytesize
|
626
|
+
field_value = field_value[0..(start-1)] + hash_string + field_value[_end..field_value.bytesize]
|
627
|
+
end
|
628
|
+
end
|
629
|
+
end
|
630
|
+
end
|
631
|
+
if adjust_length
|
632
|
+
@log_size -= adjust_length
|
633
|
+
else
|
634
|
+
@log_size -= (@formatted_line[key].bytesize - field_value.bytesize)
|
635
|
+
end
|
636
|
+
@formatted_line[key] = field_value
|
637
|
+
end
|
638
|
+
end
|
639
|
+
rescue Exception => e
|
640
|
+
@logger.error "Exception occurred in hashing : #{e.backtrace}"
|
641
|
+
end
|
642
|
+
end
|
643
|
+
end
|
644
|
+
|
645
|
+
def getDerivedFields()
|
646
|
+
if @derived_config
|
647
|
+
begin
|
648
|
+
for key,value in @derived_fields do
|
649
|
+
for each in @derived_fields[key] do
|
650
|
+
if @formatted_line.has_key?key
|
651
|
+
match_derived = each.match(@formatted_line[key])
|
652
|
+
if match_derived
|
653
|
+
@formatted_line.update(match_derived.named_captures)
|
654
|
+
for field_name,value in match_derived.named_captures do
|
655
|
+
@log_size += @formatted_line[field_name].bytesize
|
656
|
+
end
|
657
|
+
end
|
658
|
+
break
|
659
|
+
end
|
660
|
+
end
|
661
|
+
end
|
662
|
+
rescue Exception => e
|
663
|
+
@logger.error "Exception occurred in derived fields : #{e.backtrace}"
|
664
|
+
end
|
665
|
+
end
|
666
|
+
end
|
667
|
+
|
668
|
+
def timer_task()
|
669
|
+
while true
|
670
|
+
@after_time = Time.now
|
671
|
+
if @before_time
|
672
|
+
diff = @after_time-@before_time
|
673
|
+
if diff.to_i > 29
|
674
|
+
out = log_the_holded_line()
|
675
|
+
if out != nil
|
676
|
+
out = JSON.dump([out])
|
677
|
+
out = gzip_compress(out)
|
678
|
+
send_logs_to_s247(out, @log_size)
|
679
|
+
end
|
680
|
+
end
|
681
|
+
end
|
682
|
+
sleep(30)
|
683
|
+
end
|
684
|
+
end
|
685
|
+
|
686
|
+
end
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-output-site24x7'
|
3
|
-
s.version = '0.1.
|
3
|
+
s.version = '0.1.2'
|
4
4
|
s.licenses = ['']
|
5
5
|
s.summary = 'Site24x7 output plugin for Logstash event collector'
|
6
6
|
s.homepage = 'https://github.com/site24x7/logstash-output-site24x7'
|
File without changes
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-site24x7
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.1.
|
4
|
+
version: 0.1.2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Magesh Rajan
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2022-
|
11
|
+
date: 2022-12-05 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: logstash-core-plugin-api
|
@@ -121,7 +121,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
121
121
|
- !ruby/object:Gem::Version
|
122
122
|
version: '0'
|
123
123
|
requirements: []
|
124
|
-
rubygems_version: 3.
|
124
|
+
rubygems_version: 3.3.26
|
125
125
|
signing_key:
|
126
126
|
specification_version: 4
|
127
127
|
summary: Site24x7 output plugin for Logstash event collector
|