fluent-plugin-oci-logging-analytics 2.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: dde9a7a3e6be01a2556efa148f43934c2c723ffce544d3c11af1c50fe46f06b7
4
+ data.tar.gz: b442ff6c28ef6bc1c3ddc6d74c6b48e30c9921902607f9581c0aa116e5ba9db2
5
+ SHA512:
6
+ metadata.gz: a106761ec570d6c14435544617bf2357846b022869ef7c6154463b848cf72eeedbc96080a7d819911518818864379655aaa7668c4bad46d4d45a80a10dffc40f
7
+ data.tar.gz: dd7fcd53fd707c1c365cd8c5cbe1f4d6612d27b0de201d9ed2e1a98235322b08cdb7c4d15f968fa3bc667175e226cae1022673c36ba6409ee73bbe0e703417aa
@@ -0,0 +1,22 @@
1
+
2
+ class LogEvents
3
+ attr_accessor :entityId, :entityType, :logSourceName, :logPath, :logRecords , :metadata
4
+ def initialize(lrpe_key, fluentd_records)
5
+ @metadata, @entityId, @entityType, @logSourceName, @logPath = lrpe_key
6
+ @logRecords = fluentd_records.map{ |record|
7
+ record['message']
8
+ }
9
+ end
10
+
11
+ def to_hash
12
+ {
13
+ metadata: @metadata,
14
+ entityId: @entityId,
15
+ entityType: @entityType,
16
+ logSourceName: @logSourceName,
17
+ logPath: @logPath,
18
+ logRecords: @logRecords
19
+ }.compact
20
+ end
21
+
22
+ end
@@ -0,0 +1,22 @@
1
+
2
+ require_relative './logEvents'
3
+
4
+ class LogEventsJson
5
+ attr_accessor :metadata, :LogEvents
6
+ def initialize(metadata,logEvents)
7
+ if metadata!= nil || metadata != 'null'
8
+ @metadata = metadata
9
+ end
10
+ @LogEvents = logEvents
11
+ end
12
+
13
+ def to_hash
14
+ {
15
+ metadata: @metadata,
16
+ logEvents: @LogEvents.map do |le|
17
+ le.to_hash
18
+ end
19
+ }.compact
20
+ end
21
+
22
+ end
@@ -0,0 +1,11 @@
1
+ class MetricsLabels
2
+ attr_accessor :tag, :logGroupId, :logSourceName, :logSet, :drop_reason, :records_valid
3
+ def initialize
4
+ @tag = nil
5
+ @logGroupId = nil
6
+ @logSourceName = nil
7
+ @logSet = nil
8
+ @drop_reason = nil
9
+ @records_valid = 0
10
+ end
11
+ end
@@ -0,0 +1,41 @@
1
+ require 'prometheus/client'
2
+ require 'prometheus/client/registry'
3
+ require 'prometheus/client/gauge'
4
+ require 'prometheus/client/histogram'
5
+ require 'singleton'
6
+
7
+ class PrometheusMetrics
8
+ include Singleton
9
+ attr_accessor :records_received, :records_valid, :records_dropped, :records_error, :records_posted,
10
+ :bytes_received, :bytes_posted, :latency_avg, :chunk_upload_time_taken
11
+ def initialize
12
+ createMetrics
13
+ registerMetrics
14
+ end
15
+ def createMetrics
16
+ gauge = Prometheus::Client::Gauge
17
+ @records_received = gauge.new(:oci_logging_analytics_records_received, docstring: '...', labels: [:tag,:logGroupId,:logSourceName,:logSet])
18
+ @records_valid = gauge.new(:oci_logging_analytics_records_valid, docstring: '...', labels: [:tag,:logGroupId,:logSourceName,:logSet])
19
+ @records_dropped = gauge.new(:oci_logging_analytics_records_dropped, docstring: '...', labels: [:tag,:logGroupId,:logSourceName,:logSet,:drop_reason])
20
+ @records_error = gauge.new(:oci_logging_analytics_records_error, docstring: '...', labels: [:tag,:logGroupId,:logSourceName,:logSet,:error_code, :error_reason])
21
+ @records_posted = gauge.new(:oci_logging_analytics_records_posted, docstring: '...', labels: [:tag,:logGroupId,:logSourceName,:logSet])
22
+ @bytes_received = gauge.new(:oci_logging_analytics_bytes_received, docstring: '...', labels: [:tag])
23
+ @bytes_posted = gauge.new(:oci_logging_analytics_bytes_posted, docstring: '...', labels: [:logGroupId])
24
+ histogram = Prometheus::Client::Histogram
25
+ @latency_avg = histogram.new(:oci_logging_analytics_network_latency_avg, docstring: '...', labels: [:tag])
26
+ @chunk_upload_time_taken = histogram.new(:oci_logging_analytics_chunk_upload_time_taken, docstring: '...', labels: [:logGroupId])
27
+ end
28
+
29
+ def registerMetrics
30
+ registry = Prometheus::Client.registry
31
+ registry.register(@records_received) unless registry.exist?('oci_logging_analytics_records_received')
32
+ registry.register(@records_valid) unless registry.exist?('oci_logging_analytics_records_valid')
33
+ registry.register(@records_dropped) unless registry.exist?('oci_logging_analytics_records_dropped')
34
+ registry.register(@records_error) unless registry.exist?('oci_logging_analytics_records_error')
35
+ registry.register(@records_posted) unless registry.exist?('oci_logging_analytics_records_posted')
36
+ registry.register(@bytes_received) unless registry.exist?('oci_logging_analytics_bytes_received')
37
+ registry.register(@bytes_posted) unless registry.exist?('oci_logging_analytics_bytes_valid')
38
+ registry.register(@latency_avg) unless registry.exist?('oci_logging_analytics_network_latency_avg')
39
+ registry.register(@chunk_upload_time_taken) unless registry.exist?('oci_logging_analytics_chunk_upload_time_taken')
40
+ end
41
+ end
@@ -0,0 +1,919 @@
1
+ ## Copyright (c) 2021 Oracle and/or its affiliates.
2
+ ## The Universal Permissive License (UPL), Version 1.0 as shown at https://oss.oracle.com/licenses/upl/
3
+
4
+ require 'fluent/plugin/output'
5
+ require "benchmark"
6
+ require 'zip'
7
+ require 'oci'
8
+ require 'logger'
9
+ require_relative '../dto/logEventsJson'
10
+ require_relative '../dto/logEvents'
11
+
12
+ module Fluent::Plugin
13
+ class OutOracleOCILogAnalytics < Output
14
+ Fluent::Plugin.register_output('oci-logging-analytics', self)
15
+ helpers :thread, :event_emitter
16
+
17
+ MAX_FILES_PER_ZIP = 100
18
+ @@logger = nil
19
+ @@loganalytics_client = nil
20
+ @@logger_config_errors = []
21
+
22
+
23
+ desc 'OCI Tenancy Namespace.'
24
+ config_param :namespace, :string, :default => nil
25
+ desc 'OCI config file location.'
26
+ config_param :config_file_location, :string, :default => nil
27
+ desc 'Name of the profile to be used.'
28
+ config_param :profile_name, :string, :default => 'DEFAULT'
29
+ desc 'OCI endpoint.'
30
+ config_param :endpoint, :string, :default => nil
31
+ desc 'AuthType to be used.'
32
+ config_param :auth_type, :string, :default => 'InstancePrincipal'
33
+ desc 'Enable local payload dump.'
34
+ config_param :dump_zip_file, :bool, :default => false
35
+ desc 'Payload zip File Location.'
36
+ config_param :zip_file_location, :string, :default => nil
37
+ desc 'The kubernetes_metadata_keys_mapping.'
38
+ config_param :kubernetes_metadata_keys_mapping, :hash, :default => {"container_name":"Kubernetes Container Name","namespace_name":"Kubernetes Namespace Name","pod_name":"Kubernetes Pod Name","container_image":"Kubernetes Container Image","host":"Kubernetes Node Name","master_url":"Kubernetes Master Url"}
39
+
40
+
41
+ #****************************************************************
42
+ desc 'The http proxy to be used.'
43
+ config_param :http_proxy, :string, :default => nil
44
+ desc 'The proxy_ip to be used.'
45
+ config_param :proxy_ip, :string, :default => nil
46
+ desc 'The proxy_port to be used.'
47
+ config_param :proxy_port, :integer, :default => 80
48
+ desc 'The proxy_username to be used.'
49
+ config_param :proxy_username, :string, :default => nil
50
+ desc 'The proxy_password to be used.'
51
+ config_param :proxy_password, :string, :default => nil
52
+
53
+ desc 'OCI Output plugin log location.'
54
+ config_param :plugin_log_location, :string, :default => nil
55
+ desc 'OCI Output plugin log level.'
56
+ config_param :plugin_log_level, :string, :default => nil
57
+ desc 'OCI Output plugin log rotation.'
58
+ config_param :plugin_log_rotation, :string, :default => nil
59
+ desc 'OCI Output plugin log age.'
60
+ config_param :plugin_log_age, :string, :default => nil # Deprecated
61
+ desc 'The maximum log file size at which point the log file to be rotated.'
62
+ config_param :plugin_log_file_size, :string, :default => nil
63
+ desc 'The number of archived/rotated log files to keep.'
64
+ config_param :plugin_log_file_count, :integer, :default => 10
65
+
66
+ desc 'OCI Output plugin 4xx exception handling.' # Except '429'
67
+ config_param :plugin_retry_on_4xx, :bool, :default => false
68
+
69
+ @@default_log_level = 'info'
70
+ @@default_log_rotation = 'daily'
71
+ @@validated_log_size = nil
72
+ @@default_log_size = 1 * 1024 * 1024 # 1MB
73
+ @@default_number_of_logs = 10
74
+
75
+ #************************************************************************
76
+ # following params are only for internal testing.
77
+
78
+ desc 'Default is false. When true, prohibits HTTP requests to oci.'
79
+ config_param :test_flag, :bool, :default => false
80
+ desc 'Sets the environment. Default is prod.'
81
+ config_param :environment, :string, :default => "prod"
82
+ desc 'Default log group'
83
+ config_param :default_log_group,:string, :default => nil
84
+ #*************************************************************************
85
+ # define default buffer section
86
+ config_section :buffer do
87
+ config_set_default :type, 'file'
88
+ config_set_default :chunk_keys, ['oci_la_log_group_id']
89
+ desc 'The number of threads of output plugins, which is used to write chunks in parallel.'
90
+ config_set_default :flush_thread_count, 1
91
+ desc 'The max size of each chunks: events will be written into chunks until the size of chunks become this size.'
92
+ config_set_default :chunk_limit_size, 2 * 1024 * 1024 # 2MB
93
+ desc 'The size limitation of this buffer plugin instance.'
94
+ config_set_default :total_limit_size, 5 * (1024**3) # 5GB
95
+ desc 'Flush interval'
96
+ config_set_default :flush_interval, 30 # seconds
97
+ desc 'The sleep interval of threads to wait next flush trial (when no chunks are waiting).'
98
+ config_set_default :flush_thread_interval, 0.5
99
+ desc 'The sleep interval seconds of threads between flushes when output plugin flushes waiting chunks next to next.'
100
+ config_set_default :flush_thread_burst_interval, 0.05
101
+ desc 'Seconds to wait before next retry to flush, or constant factor of exponential backoff.'
102
+ config_set_default :retry_wait, 2 # seconds
103
+ desc 'The maximum number of times to retry to flush while failing.'
104
+ config_set_default :retry_max_times, 17
105
+ desc 'The base number of exponential backoff for retries.'
106
+ config_set_default :retry_exponential_backoff_base , 2
107
+ desc 'retry_forever'
108
+ config_set_default :retry_forever, true
109
+ end
110
+
111
+ def initialize
112
+ super
113
+ end
114
+
115
+ def initialize_logger()
116
+ filename = nil
117
+ is_default_log_location = false
118
+ if is_valid(@plugin_log_location)
119
+ filename = @plugin_log_location[-1] == '/' ? @plugin_log_location : @plugin_log_location +'/'
120
+ else
121
+ is_default_log_location = true
122
+ end
123
+ if !is_valid_log_level(@plugin_log_level)
124
+ @plugin_log_level = @@default_log_level
125
+ end
126
+ oci_fluent_output_plugin_log = nil
127
+ if is_default_log_location
128
+ oci_fluent_output_plugin_log = 'oci-logging-analytics.log'
129
+ else
130
+ oci_fluent_output_plugin_log = filename+'oci-logging-analytics.log'
131
+ end
132
+
133
+ logger_config = nil
134
+
135
+ if is_valid_number_of_logs(@plugin_log_file_count) && is_valid_log_size(@plugin_log_file_size)
136
+ # When customer provided valid log_file_count and log_file_size.
137
+ # logger will rotate with max log_file_count with each file having max log_file_size.
138
+ # Older logs purged automatically.
139
+ @@logger = Logger.new(oci_fluent_output_plugin_log, @plugin_log_file_count, @@validated_log_size)
140
+ logger_config = 'USER_CONFIG'
141
+ elsif is_valid_log_rotation(@plugin_log_rotation)
142
+ # When customer provided only log_rotation.
143
+ # logger will create a new log based on log_rotation (new file everyday if the rotation is daily).
144
+ # This will create too many logs over a period of time as log purging is not done.
145
+ @@logger = Logger.new(oci_fluent_output_plugin_log, @plugin_log_rotation)
146
+ logger_config = 'FALLBACK_CONFIG'
147
+ else
148
+ # When customer provided invalid log config, default config is considered.
149
+ # logger will rotate with max default log_file_count with each file having max default log_file_size.
150
+ # Older logs purged automatically.
151
+ @@logger = Logger.new(oci_fluent_output_plugin_log, @@default_number_of_logs, @@default_log_size)
152
+ logger_config = 'DEFAULT_CONFIG'
153
+ end
154
+
155
+ logger_set_level(@plugin_log_level)
156
+
157
+ @@logger.info {"Initializing oci-logging-analytics plugin"}
158
+ if is_default_log_location
159
+ @@logger.info {"plugin_log_location is not specified. oci-logging-analytics.log will be generated under directory from where fluentd is executed."}
160
+ end
161
+
162
+ case logger_config
163
+ when 'USER_CONFIG'
164
+ @@logger.info {"Logger for oci-logging-analytics.log is initialized with config values log size: #{@plugin_log_file_size}, number of logs: #{@plugin_log_file_count}"}
165
+ when 'FALLBACK_CONFIG'
166
+ @@logger.info {"Logger for oci-logging-analytics.log is initialized with log rotation: #{@plugin_log_rotation}"}
167
+ when 'DEFAULT_CONFIG'
168
+ @@logger.info {"Logger for oci-logging-analytics.log is initialized with default config values log size: #{@@default_log_size}, number of logs: #{@@default_number_of_logs}"}
169
+ end
170
+ if @@logger_config_errors.length > 0
171
+ @@logger_config_errors. each {|logger_config_error|
172
+ @@logger.warn {"#{logger_config_error}"}
173
+ }
174
+ end
175
+ if is_valid_log_age(@plugin_log_age)
176
+ @@logger.warn {"'plugin_log_age' field is deprecated. Use 'plugin_log_file_size' and 'plugin_log_file_count' instead."}
177
+ end
178
+ end
179
+
180
+ def initialize_loganalytics_client()
181
+ if is_valid(@config_file_location)
182
+ @auth_type = "ConfigFile"
183
+ end
184
+ case @auth_type
185
+ when "InstancePrincipal"
186
+ instance_principals_signer = OCI::Auth::Signers::InstancePrincipalsSecurityTokenSigner.new
187
+ @@loganalytics_client = OCI::LogAnalytics::LogAnalyticsClient.new(config: OCI::Config.new, signer: instance_principals_signer)
188
+ when "ConfigFile"
189
+ my_config = OCI::ConfigFileLoader.load_config(config_file_location: @config_file_location, profile_name: @profile_name)
190
+ if is_valid(endpoint)
191
+ @@loganalytics_client = OCI::LogAnalytics::LogAnalyticsClient.new(config:my_config, endpoint:@endpoint)
192
+ @@logger.info {"loganalytics_client initialised with endpoint: #{@endpoint}"}
193
+ else
194
+ @@loganalytics_client = OCI::LogAnalytics::LogAnalyticsClient.new(config:my_config)
195
+ end
196
+ else
197
+ raise Fluent::ConfigError, "Invalid authType @auth_type, authType must be either InstancePrincipal or ConfigFile."
198
+ abort
199
+ end
200
+
201
+ if is_valid(@proxy_ip) && is_number(@proxy_port)
202
+ if is_valid(@proxy_username) && is_valid(@proxy_password)
203
+ @@loganalytics_client.api_client.proxy_settings = OCI::ApiClientProxySettings.new(@proxy_ip, @proxy_port, @proxy_username, @proxy_password)
204
+ else
205
+ @@loganalytics_client.api_client.proxy_settings = OCI::ApiClientProxySettings.new(@proxy_ip, @proxy_port)
206
+ end
207
+ end
208
+
209
+ rescue => ex
210
+ @@logger.error {"Error occurred while initializing LogAnalytics Client:
211
+ authType: #{@auth_type},
212
+ errorMessage: #{ex}"}
213
+ end
214
+
215
+ def configure(conf)
216
+ super
217
+ initialize_logger
218
+
219
+ initialize_loganalytics_client
220
+ @@logger.error {"Error in config file : Buffer plugin must be of @type file."} unless buffer_config['@type'] == 'file'
221
+ raise Fluent::ConfigError, "Error in config file : Buffer plugin must be of @type file." unless buffer_config['@type'] == 'file'
222
+
223
+ is_mandatory_fields_valid,invalid_field_name = mandatory_field_validator
224
+ if !is_mandatory_fields_valid
225
+ @@logger.error {"Error in config file : invalid #{invalid_field_name}"}
226
+ raise Fluent::ConfigError, "Error in config file : invalid #{invalid_field_name}"
227
+ end
228
+
229
+ # Get the chunk_limit_size from conf as it's not available in the buffer_config
230
+ unless conf.elements(name: 'buffer').empty?
231
+ buffer_conf = conf.elements(name: 'buffer').first
232
+ chunk_limit_size_from_conf = buffer_conf['chunk_limit_size']
233
+ unless chunk_limit_size_from_conf.nil?
234
+ log.debug "chunk limit size as per the configuration file is #{chunk_limit_size_from_conf}"
235
+ case chunk_limit_size_from_conf.to_s
236
+ when /([0-9]+)k/i
237
+ chunk_limit_size_bytes = $~[1].to_i * 1024
238
+ when /([0-9]+)m/i
239
+ chunk_limit_size_bytes = $~[1].to_i * (1024 ** 2)
240
+ when /([0-9]+)g/i
241
+ chunk_limit_size_bytes = $~[1].to_i * (1024 ** 3)
242
+ when /([0-9]+)t/i
243
+ chunk_limit_size_bytes = $~[1].to_i * (1024 ** 4)
244
+ else
245
+ raise Fluent::ConfigError, "error parsing chunk_limit_size"
246
+ end
247
+
248
+ log.debug "chunk limit size in bytes as per the configuration file is #{chunk_limit_size_bytes}"
249
+ if !chunk_limit_size_bytes.between?(1048576, 2097152)
250
+ raise Fluent::ConfigError, "chunk_limit_size must be between 1MB and 2MB"
251
+ end
252
+ end
253
+ end
254
+
255
+ if buffer_config.flush_interval < 10
256
+ raise Fluent::ConfigError, "flush_interval must be greater than or equal to 10sec"
257
+ end
258
+ @mutex = Mutex.new
259
+ @num_flush_threads = Float(buffer_config.flush_thread_count)
260
+ max_chunk_lifespan = (buffer_config.retry_type == :exponential_backoff) ?
261
+ buffer_config.retry_wait * buffer_config.retry_exponential_backoff_base**(buffer_config.retry_max_times+1) - 1 :
262
+ buffer_config.retry_wait * buffer_config.retry_max_times
263
+ end
264
+
265
+ def get_or_parse_logSet(unparsed_logSet, record, record_hash, is_tag_exists)
266
+ oci_la_log_set = nil
267
+ parsed_logSet = nil
268
+ if !is_valid(unparsed_logSet)
269
+ return nil
270
+ end
271
+ if record_hash.has_key?("oci_la_log_set_ext_regex") && is_valid(record["oci_la_log_set_ext_regex"])
272
+ parsed_logSet = unparsed_logSet.match(record["oci_la_log_set_ext_regex"])
273
+ #*******************************************TO-DO**********************************************************
274
+ # Based on the observed behaviour, below cases are handled. We need to revisit this section.
275
+ # When trying to apply regex on a String and getting a matched substring, observed couple of scenarios.
276
+ # For oci_la_log_set_ext_regex value = '.*\\\\/([^\\\\.]{1,40}).*' this returns an array with both input string and matched pattern
277
+ # For oci_la_log_set_ext_regex value = '[ \\\\w-]+?(?=\\\\.)' this returns an array with only matched pattern
278
+ # For few cases, String is returned instead of an array.
279
+ #*******************************************End of TO-DO***************************************************
280
+ if parsed_logSet!= nil # Based on the regex pattern, match is returning different outputs for same input.
281
+ if parsed_logSet.is_a? String
282
+ oci_la_log_set = parsed_logSet.encode("UTF-8") # When matched String is returned instead of an array.
283
+ elsif parsed_logSet.length > 1 #oci_la_log_set_ext_regex '.*\\\\/([^\\\\.]{1,40}).*' this returns an array with both input string and matched pattern
284
+ oci_la_log_set = parsed_logSet[1].encode("UTF-8")
285
+ elsif parsed_logSet.length > 0 # oci_la_log_set_ext_regex '[ \\\\w-]+?(?=\\\\.)' this returns an array with only matched pattern
286
+ oci_la_log_set = parsed_logSet[0].encode("UTF-8") #Encoding to handle escape characters
287
+ else
288
+ oci_la_log_set = nil
289
+ end
290
+ else
291
+ oci_la_log_set = nil
292
+ if is_tag_exists
293
+ @@logger.error {"Error occurred while parsing oci_la_log_set : #{unparsed_logSet} with oci_la_log_set_ext_regex : #{record["oci_la_log_set_ext_regex"]}. Default oci_la_log_set will be assigned to all the records with tag : #{record["tag"]}."}
294
+ else
295
+ @@logger.error {"Error occurred while parsing oci_la_log_set : #{unparsed_logSet} with oci_la_log_set_ext_regex : #{record["oci_la_log_set_ext_regex"]}. Default oci_la_log_set will be assigned."}
296
+ end
297
+ end
298
+ else
299
+ oci_la_log_set = unparsed_logSet.force_encoding('UTF-8').encode("UTF-8")
300
+ end
301
+ return oci_la_log_set
302
+ rescue => ex
303
+ @@logger.error {"Error occurred while parsing oci_la_log_set : #{ex}. Default oci_la_log_set will be assigned."}
304
+ return nil
305
+ end
306
+
307
+ def is_valid(field)
308
+ if field.nil? || field.empty? then
309
+ return false
310
+ else
311
+ return true
312
+ end
313
+ end
314
+
315
+ def is_valid_log_rotation(log_rotation)
316
+ if !is_valid(log_rotation)
317
+ return false
318
+ end
319
+ case log_rotation.downcase
320
+ when "daily"
321
+ return true
322
+ when "weekly"
323
+ return true
324
+ when "monthly"
325
+ return true
326
+ else
327
+ @@logger_config_error << "Only 'daily'/'weekly'/'monthly' are supported for 'plugin_log_rotation'."
328
+ return false
329
+ end
330
+ end
331
+
332
+ def is_valid_log_age(param)
333
+ if !is_valid(param)
334
+ return false
335
+ end
336
+ case param.downcase
337
+ when "daily"
338
+ return true
339
+ when "weekly"
340
+ return true
341
+ when "monthly"
342
+ return true
343
+ else
344
+ return false
345
+ end
346
+ end
347
+
348
+ def is_valid_log_level(param)
349
+ if !is_valid(param)
350
+ return false
351
+ end
352
+ case param.upcase
353
+ when "DEBUG"
354
+ return true
355
+ when "INFO"
356
+ return true
357
+ when "WARN"
358
+ return true
359
+ when "ERROR"
360
+ return true
361
+ when "FATAL"
362
+ return true
363
+ when "UNKNOWN"
364
+ return true
365
+ else
366
+ return false
367
+ end
368
+ end
369
+
370
+ def logger_set_level(param)
371
+ # DEBUG < INFO < WARN < ERROR < FATAL < UNKNOWN
372
+ case @plugin_log_level.upcase
373
+ when "DEBUG"
374
+ @@logger.level = Logger::DEBUG
375
+ when "INFO"
376
+ @@logger.level = Logger::INFO
377
+ when "WARN"
378
+ @@logger.level = Logger::WARN
379
+ when "ERROR"
380
+ @@logger.level = Logger::ERROR
381
+ when "FATAL"
382
+ @@logger.level = Logger::FATAL
383
+ when "UNKNOWN"
384
+ @@logger.level = Logger::UNKNOWN
385
+ end
386
+ end
387
+
388
+ def is_number(field)
389
+ true if Integer(field) rescue false
390
+ end
391
+
392
+ def is_valid_log_size(log_size)
393
+ if log_size != nil
394
+ case log_size.to_s
395
+ when /([0-9]+)k/i
396
+ log_size = $~[1].to_i * 1024
397
+ when /([0-9]+)m/i
398
+ log_size = $~[1].to_i * (1024 ** 2)
399
+ when /([0-9]+)g/i
400
+ log_size = $~[1].to_i * (1024 ** 3)
401
+ else
402
+ @@logger_config_errors << "plugin_log_file_size must be greater than 1KB."
403
+ return false
404
+ end
405
+ @@validated_log_size = log_size
406
+ return true
407
+ else
408
+ return false
409
+ end
410
+ end
411
+
412
+ def is_valid_number_of_logs(number_of_logs)
413
+ if !is_number(number_of_logs) || number_of_logs < 1
414
+ @@logger_config_errors << "plugin_log_file_count must be greater than zero"
415
+ return false
416
+ end
417
+ return true
418
+ end
419
+
420
+ def get_valid_metadata(oci_la_metadata)
421
+ if oci_la_metadata != nil
422
+ if oci_la_metadata.is_a?(Hash)
423
+ valid_metadata = Hash.new
424
+ invalid_keys = []
425
+ oci_la_metadata.each do |key, value|
426
+ if value != nil && !value.is_a?(Hash) && !value.is_a?(Array)
427
+ if key != nil && !key.is_a?(Hash) && !key.is_a?(Array)
428
+ valid_metadata[key] = value
429
+ else
430
+ invalid_keys << key
431
+ end
432
+ else
433
+ invalid_keys << key
434
+ end
435
+ end
436
+ if invalid_keys.length > 0
437
+ @@logger.warn {"Skipping the following oci_la_metadata/oci_la_global_metadata keys #{invalid_keys.compact.reject(&:empty?).join(',')} as the corresponding values are in invalid format."}
438
+ end
439
+ if valid_metadata.length > 0
440
+ return valid_metadata
441
+ else
442
+ return nil
443
+ end
444
+ else
445
+ @@logger.warn {"Ignoring 'oci_la_metadata'/'oci_la_global_metadata' provided in the record_transformer filter as only key-value pairs are supported."}
446
+ return nil
447
+ end
448
+ else
449
+ return nil
450
+ end
451
+ end
452
+
453
+ def mandatory_field_validator
454
+ begin
455
+ if !is_valid(@namespace)
456
+ return false,'namespace'
457
+ elsif !is_valid(@config_file_location) && @auth_type == 'ConfigFile'
458
+ return false,'config_file_location'
459
+ elsif !is_valid(@profile_name) && @auth_type == 'ConfigFile'
460
+ return false,'profile_name'
461
+ else
462
+ return true,nil
463
+ end
464
+ end
465
+ end
466
+
467
+ def is_valid_record(record_hash,record)
468
+ begin
469
+ if !record_hash.has_key?("message")
470
+ if record_hash.has_key?("tag")
471
+ @@logger.warn {"Invalid records associated with tag : #{record["tag"]}. 'message' field is not present in the record."}
472
+ else
473
+ @@logger.info {"InvalidRecord: #{record}"}
474
+ @@logger.warn {"Invalid record. 'message' field is not present in the record."}
475
+ end
476
+ return false
477
+ elsif !record_hash.has_key?("oci_la_log_group_id") || !is_valid(record["oci_la_log_group_id"])
478
+ if record_hash.has_key?("tag")
479
+ @@logger.warn {"Invalid records associated with tag : #{record["tag"]}.'oci_la_log_group_id' must not be empty.
480
+ Skipping all the records associated with the tag"}
481
+ else
482
+ @@logger.warn {"Invalid record.'oci_la_log_group_id' must not be empty"}
483
+ end
484
+ return false
485
+ elsif !record_hash.has_key?("oci_la_log_source_name") || !is_valid(record["oci_la_log_source_name"])
486
+ if record_hash.has_key?("tag")
487
+ @@logger.warn {"Invalid records associated with tag : #{record["tag"]}.'oci_la_log_source_name' must not be empty.
488
+ Skipping all the records associated with the tag"}
489
+ else
490
+ @@logger.warn {"Invalid record.'oci_la_log_source_name' must not be empty"}
491
+ end
492
+ return false
493
+ else
494
+ return true
495
+ end
496
+ end
497
+ end
498
+
499
+ def flatten(kubernetes_metadata)
500
+ kubernetes_metadata.each_with_object({}) do |(key, value), hash|
501
+ hash[key] = value
502
+ if value.is_a? Hash
503
+ flatten(value).map do |hash_key, hash_value|
504
+ hash["#{key}.#{hash_key}"] = hash_value
505
+ end
506
+ end
507
+ end
508
+ end
509
+
510
+ def get_kubernetes_metadata(oci_la_metadata,record)
511
+ if oci_la_metadata == nil
512
+ oci_la_metadata = {}
513
+ end
514
+ kubernetes_metadata = flatten(record["kubernetes"])
515
+ kubernetes_metadata.each do |key, value|
516
+ if kubernetes_metadata_keys_mapping.has_key?(key)
517
+ if !is_valid(oci_la_metadata[kubernetes_metadata_keys_mapping[key]])
518
+ oci_la_metadata[kubernetes_metadata_keys_mapping[key]] = json_message_handler(value)
519
+ end
520
+ end
521
+ end
522
+ return oci_la_metadata
523
+ rescue => ex
524
+ @@logger.error {"Error occurred while getting kubernetes oci_la_metadata:
525
+ error message: #{ex}"}
526
+ return oci_la_metadata
527
+ end
528
+
529
+ def json_message_handler(message)
530
+ if message.is_a?(Hash)
531
+ return JSON.generate(message)
532
+ else
533
+ return message
534
+ end
535
+ rescue => ex
536
+ return message
537
+ end
538
+
539
+ def group_by_logGroupId(chunk)
540
+ begin
541
+ current = Time.now
542
+ current_f, current_s = current.to_f, current.strftime("%Y%m%dT%H%M%S%9NZ")
543
+ records = []
544
+ count = 0
545
+
546
+ invalid_tag_set = Set.new
547
+ incoming_records_per_tag = Hash.new
548
+ invalid_records_per_tag = Hash.new
549
+ tags_per_logGroupId = Hash.new
550
+ tag_logSet_map = Hash.new
551
+ tag_metadata_map = Hash.new
552
+
553
+ chunk.each do |time, record|
554
+
555
+ if !record.nil?
556
+ record_hash = record.keys.map {|x| [x,true]}.to_h
557
+ is_tag_exists = false
558
+ if record_hash.has_key?("tag") && is_valid(record["tag"])
559
+ is_tag_exists = true
560
+ end
561
+
562
+ if is_tag_exists && incoming_records_per_tag.has_key?(record["tag"])
563
+ incoming_records_per_tag[record["tag"]] += 1
564
+ elsif is_tag_exists
565
+ incoming_records_per_tag[record["tag"]] = 1
566
+ end
567
+ #For any given tag, if one record fails (mandatory fields validation) then all the records from that source will be ignored
568
+ if is_tag_exists && invalid_tag_set.include?(record["tag"])
569
+ invalid_records_per_tag[record["tag"]] += 1
570
+ next #This tag is already present in the invalid_tag_set, so ignoring the message.
571
+ end
572
+ #Setting tag/default value for oci_la_log_path, when not provided in config file.
573
+ if !record_hash.has_key?("oci_la_log_path") || !is_valid(record["oci_la_log_path"])
574
+ if is_tag_exists
575
+ record["oci_la_log_path"] = record["tag"]
576
+ else
577
+ record["oci_la_log_path"] = 'UNDEFINED'
578
+ end
579
+ end
580
+
581
+ #Extracting oci_la_log_set when oci_la_log_set_key and oci_la_log_set_ext_regex is provided.
582
+ #1) oci_la_log_set param is not provided in config file and above logic not executed.
583
+ #2) Valid oci_la_log_set_key + No oci_la_log_set_ext_regex
584
+ #a) Valid key available in record with oci_la_log_set_key corresponding value (oci_la_log_set_key is a key in config file) --> oci_la_log_set
585
+ #b) No Valid key available in record with oci_la_log_set_key corresponding value --> nil
586
+ #3) Valid key available in record with oci_la_log_set_key corresponding value + Valid oci_la_log_set_ext_regex
587
+ #a) Parse success --> parsed oci_la_log_set
588
+ #b) Parse failure --> nil (as oci_la_log_set value)
589
+ #4) No oci_la_log_set_key --> do nothing --> nil
590
+
591
+ #Extracting oci_la_log_set when oci_la_log_set and oci_la_log_set_ext_regex is provided.
592
+ #1) Valid oci_la_log_set + No oci_la_log_set_ext_regex --> oci_la_log_set
593
+ #2) Valid oci_la_log_set + Valid oci_la_log_set_ext_regex
594
+ #a) Parse success --> parsed oci_la_log_set
595
+ #b) Parse failure --> nil (as oci_la_log_set value)
596
+ #3) No oci_la_log_set --> do nothing --> nil
597
+
598
+ unparsed_logSet = nil
599
+ processed_logSet = nil
600
+ if is_tag_exists && tag_logSet_map.has_key?(record["tag"])
601
+ record["oci_la_log_set"] = tag_logSet_map[record["tag"]]
602
+ else
603
+ if record_hash.has_key?("oci_la_log_set_key")
604
+ if is_valid(record["oci_la_log_set_key"]) && record_hash.has_key?(record["oci_la_log_set_key"])
605
+ if is_valid(record[record["oci_la_log_set_key"]])
606
+ unparsed_logSet = record[record["oci_la_log_set_key"]]
607
+ processed_logSet = get_or_parse_logSet(unparsed_logSet,record, record_hash,is_tag_exists)
608
+ end
609
+ end
610
+ end
611
+ if !is_valid(processed_logSet) && record_hash.has_key?("oci_la_log_set")
612
+ if is_valid(record["oci_la_log_set"])
613
+ unparsed_logSet = record["oci_la_log_set"]
614
+ processed_logSet = get_or_parse_logSet(unparsed_logSet,record, record_hash,is_tag_exists)
615
+ end
616
+ end
617
+ record["oci_la_log_set"] = processed_logSet
618
+ tag_logSet_map[record["tag"]] = processed_logSet
619
+ end
620
+
621
+ unless is_valid_record(record_hash,record)
622
+ if is_tag_exists
623
+ invalid_tag_set.add(record["tag"])
624
+ invalid_records_per_tag[record["tag"]] = 1
625
+ end
626
+ next
627
+ end
628
+ #This will check for null or empty messages and only that record will be ignored.
629
+ if !is_valid(record["message"])
630
+ if is_tag_exists
631
+ @@logger.warn {"'message' field has empty value, Skipping records associated with tag : #{record["tag"]}."}
632
+ if invalid_records_per_tag.has_key?(record["tag"])
633
+ invalid_records_per_tag[record["tag"]] += 1
634
+ else
635
+ invalid_records_per_tag[record["tag"]] = 1
636
+ end
637
+ else
638
+ @@logger.warn {"'message' field has empty value, Skipping record."}
639
+ end
640
+ next
641
+ else
642
+ record["message"] = json_message_handler(record["message"])
643
+ end
644
+
645
+ if record_hash.has_key?("kubernetes")
646
+ record["oci_la_metadata"] = get_kubernetes_metadata(record["oci_la_metadata"],record)
647
+ end
648
+
649
+ if tag_metadata_map.has_key?(record["tag"])
650
+ record["oci_la_metadata"] = tag_metadata_map[record["tag"]]
651
+ else
652
+ if record_hash.has_key?("oci_la_metadata")
653
+ record["oci_la_metadata"] = get_valid_metadata(record["oci_la_metadata"])
654
+ tags_per_logGroupId[record["tag"]] = record["oci_la_metadata"]
655
+ else
656
+ tags_per_logGroupId[record["tag"]] = nil
657
+ end
658
+ end
659
+
660
+ if is_tag_exists
661
+ if tags_per_logGroupId.has_key?(record["oci_la_log_group_id"])
662
+ if !tags_per_logGroupId[record["oci_la_log_group_id"]].include?(record["tag"])
663
+ tags_per_logGroupId[record["oci_la_log_group_id"]] += ", "+record["tag"]
664
+ end
665
+ else
666
+ tags_per_logGroupId[record["oci_la_log_group_id"]] = record["tag"]
667
+ end
668
+ end
669
+
670
+ records << record
671
+ else
672
+ @@logger.trace {"Record is nil, ignoring the record"}
673
+ end
674
+ end
675
+ @@logger.debug {"records.length:#{records.length}"}
676
+ lrpes_for_logGroupId = {}
677
+ records.group_by{|record|
678
+ oci_la_log_group_id = record['oci_la_log_group_id']
679
+ (oci_la_log_group_id)
680
+ }.map {|oci_la_log_group_id, records_per_logGroupId|
681
+ lrpes_for_logGroupId[oci_la_log_group_id] = records_per_logGroupId
682
+ }
683
+ rescue => ex
684
+ @@logger.error {"Error occurred while grouping records by oci_la_log_group_id:#{ex.inspect}"}
685
+ end
686
+ return incoming_records_per_tag,invalid_records_per_tag,tags_per_logGroupId,lrpes_for_logGroupId
687
+ end
688
+ # main entry point for FluentD's flush_threads, which get invoked
689
+ # when a chunk is ready for flushing (see chunk limits and flush_intervals)
690
+ def write(chunk)
691
+ @@logger.info {"Received new chunk, started processing ..."}
692
+ metrics = Hash.new
693
+ metrics["count"] = 0
694
+ metrics["event"] = "zipping"
695
+ metrics["bytes_in"] = chunk.bytesize
696
+ metrics['records_dropped'] = 0
697
+ begin
698
+ # 1) Create an in-memory zipfile for the given FluentD chunk
699
+ # 2) Synchronization has been removed. See EMCLAS-28675
700
+
701
+ begin
702
+ lrpes_for_logGroupId = {}
703
+ incoming_records_per_tag,invalid_records_per_tag,tags_per_logGroupId,lrpes_for_logGroupId = group_by_logGroupId(chunk)
704
+ valid_message_per_tag = Hash.new
705
+ incoming_records_per_tag.each do |key,value|
706
+ dropped_messages = (invalid_records_per_tag.has_key?(key)) ? invalid_records_per_tag[key].to_i : 0
707
+ valid_messages = value.to_i - dropped_messages
708
+ valid_message_per_tag[key] = valid_messages
709
+ if dropped_messages > 0
710
+ @@logger.info {"Messages: #{value.to_i} \t Valid: #{valid_messages} \t Invalid: #{dropped_messages} \t tag:#{key}"}
711
+ end
712
+ @@logger.debug {"Messages: #{value.to_i} \t Valid: #{valid_messages} \t Invalid: #{dropped_messages} \t tag:#{key}"}
713
+ end
714
+
715
+ if lrpes_for_logGroupId != nil && lrpes_for_logGroupId.length > 0
716
+ lrpes_for_logGroupId.each do |oci_la_log_group_id,records_per_logGroupId|
717
+ begin
718
+ tags = tags_per_logGroupId.key(oci_la_log_group_id)
719
+ @@logger.info {"Generating payload with #{records_per_logGroupId.length} records for oci_la_log_group_id: #{oci_la_log_group_id}"}
720
+ zippedstream = nil
721
+ oci_la_log_set = nil
722
+ logSets_per_logGroupId_map = Hash.new
723
+ # Only MAX_FILES_PER_ZIP (100) files are allowed, which will be grouped and zipped.
724
+ # Due to MAX_FILES_PER_ZIP constraint, for a oci_la_log_group_id, we can get more than one zip file and those many api calls will be made.
725
+ logSets_per_logGroupId_map, oci_la_global_metadata = get_logSets_map_per_logGroupId(oci_la_log_group_id,records_per_logGroupId)
726
+ if logSets_per_logGroupId_map != nil
727
+ logSets_per_logGroupId_map.each do |file_count,records_per_logSet_map|
728
+ zippedstream,number_of_records = get_zipped_stream(oci_la_log_group_id,oci_la_global_metadata,records_per_logSet_map)
729
+ if zippedstream != nil
730
+ zippedstream.rewind #reposition buffer pointer to the beginning
731
+ upload_to_oci(oci_la_log_group_id, number_of_records, zippedstream)
732
+ end
733
+ end
734
+ end
735
+ ensure
736
+ zippedstream&.close
737
+
738
+ end
739
+ end
740
+ end
741
+ end
742
+ end
743
+ end
744
+
745
+ # Each oci_la_log_set will correspond to a separate file in the zip
746
+ # Only MAX_FILES_PER_ZIP files are allowed per zip.
747
+ # Here we are grouping logSets so that if file_count reaches MAX_FILES_PER_ZIP, these files will be considered for a separate zip file.
748
+ def get_logSets_map_per_logGroupId(oci_la_log_group_id,records_per_logGroupId)
749
+ file_count = 0
750
+ oci_la_global_metadata = nil
751
+ is_oci_la_global_metadata_assigned = false
752
+ oci_la_log_set = nil
753
+ records_per_logSet_map = Hash.new
754
+ logSets_per_logGroupId_map = Hash.new
755
+
756
+ records_per_logGroupId.group_by { |record|
757
+ if !is_oci_la_global_metadata_assigned
758
+ record_hash = record.keys.map {|x| [x,true]}.to_h
759
+ if record_hash.has_key?("oci_la_global_metadata")
760
+ oci_la_global_metadata = record['oci_la_global_metadata']
761
+ end
762
+ is_oci_la_global_metadata_assigned = true
763
+ end
764
+ oci_la_log_set = record['oci_la_log_set']
765
+ (oci_la_log_set)
766
+ }.map { |oci_la_log_set, records_per_logSet|
767
+ if file_count % OutOracleOCILogAnalytics::MAX_FILES_PER_ZIP == 0
768
+ records_per_logSet_map = Hash.new
769
+ end
770
+ records_per_logSet_map[oci_la_log_set] = records_per_logSet
771
+ file_count += 1
772
+ if file_count % OutOracleOCILogAnalytics::MAX_FILES_PER_ZIP == 0
773
+ logSets_per_logGroupId_map[file_count] = records_per_logSet_map
774
+ end
775
+ }
776
+ logSets_per_logGroupId_map[file_count] = records_per_logSet_map
777
+ return logSets_per_logGroupId_map,oci_la_global_metadata
778
+ rescue => exc
779
+ @@logger.error {"Error in mapping records to oci_la_log_set.
780
+ oci_la_log_group_id: #{oci_la_log_group_id},
781
+ error message:#{exc}"}
782
+ end
783
+
784
+ # takes a fluentD chunk and converts it to an in-memory zipfile, populating metrics hash provided
785
+ # Any exception raised is passed into the metrics hash, to be re-thrown from write()
786
+ def get_zipped_stream(oci_la_log_group_id,oci_la_global_metadata,records_per_logSet_map)
787
+ begin
788
+ current, = Time.now
789
+ current_f, current_s = current.to_f, current.strftime("%Y%m%dT%H%M%S%9NZ")
790
+ number_of_records = 0
791
+ noOfFilesGenerated = 0
792
+ zippedstream = Zip::OutputStream.write_buffer { |zos|
793
+ records_per_logSet_map.each do |oci_la_log_set,records_per_logSet|
794
+ lrpes_for_logEvents = records_per_logSet.group_by { |record| [
795
+ record['oci_la_metadata'],
796
+ record['oci_la_entity_id'],
797
+ record['oci_la_entity_type'],
798
+ record['oci_la_log_source_name'] ,
799
+ record['oci_la_log_path']
800
+ ]}.map { |lrpe_key, records_per_lrpe|
801
+ number_of_records += records_per_lrpe.length
802
+ LogEvents.new(lrpe_key, records_per_lrpe)
803
+ }
804
+ noOfFilesGenerated = noOfFilesGenerated +1
805
+ if is_valid(oci_la_log_set) then
806
+ nextEntry = oci_la_log_group_id+ "_#{current_s}" +"_"+ noOfFilesGenerated.to_s + "_logSet=" + oci_la_log_set + ".json" #oci_la_log_group_id + ".json"
807
+ else
808
+ nextEntry = oci_la_log_group_id + "_#{current_s}" +"_"+ noOfFilesGenerated.to_s + ".json"
809
+ end
810
+ @@logger.debug {"Added entry #{nextEntry} for oci_la_log_set #{oci_la_log_set} into the zip."}
811
+ zos.put_next_entry(nextEntry)
812
+ logEventsJsonFinal = LogEventsJson.new(oci_la_global_metadata,lrpes_for_logEvents)
813
+ zos.write logEventsJsonFinal.to_hash.to_json
814
+ end
815
+ }
816
+ zippedstream.rewind
817
+ if @dump_zip_file
818
+ save_zip_to_local(oci_la_log_group_id,zippedstream,current_s)
819
+ end
820
+ #zippedstream.rewind if records.length > 0 #reposition buffer pointer to the beginning
821
+ rescue => exc
822
+ @@logger.error {"Error in generating payload.
823
+ oci_la_log_group_id: #{oci_la_log_group_id},
824
+ error message:#{exc}"}
825
+ end
826
+ return zippedstream,number_of_records
827
+ end
828
+
829
+ def save_zip_to_local(oci_la_log_group_id, zippedstream, current_s)
830
+ begin
831
+ fileName = oci_la_log_group_id+"_"+current_s+'.zip'
832
+ fileLocation = @zip_file_location+fileName
833
+ file = File.open(fileLocation, "w")
834
+ file.write(zippedstream.sysread)
835
+ rescue => ex
836
+ @@logger.error {"Error occurred while saving zip file.
837
+ oci_la_log_group_id: oci_la_log_group_id,
838
+ fileLocation: @zip_file_location
839
+ fileName: fileName
840
+ error message: #{ex}"}
841
+ ensure
842
+ file.close unless file.nil?
843
+ end
844
+ end
845
+
846
+ # upload zipped stream to oci
847
+ def upload_to_oci(oci_la_log_group_id, number_of_records, zippedstream)
848
+ begin
849
+ opts = {payload_type: "ZIP"}
850
+
851
+ response = @@loganalytics_client.upload_log_events_file(namespace_name=@namespace,
852
+ logGroupId=oci_la_log_group_id ,
853
+ uploadLogEventsFileDetails=zippedstream,
854
+ opts)
855
+ if !response.nil? && response.status == 200 then
856
+ headers = response.headers
857
+ @@logger.info {"The payload has been successfully uploaded to logAnalytics -
858
+ oci_la_log_group_id: #{oci_la_log_group_id},
859
+ ConsumedRecords: #{number_of_records},
860
+ Date: #{headers['date']},
861
+ Time: #{headers['timecreated']},
862
+ opc-request-id: #{headers['opc-request-id']},
863
+ opc-object-id: #{headers['opc-object-id']}"}
864
+ end
865
+ rescue OCI::Errors::ServiceError => serviceError
866
+ case serviceError.status_code
867
+ when 400
868
+ @@logger.error {"oci upload exception : Error while uploading the payload. Invalid/Incorrect/missing Parameter - opc-request-id:#{serviceError.request_id}"}
869
+ if plugin_retry_on_4xx
870
+ raise serviceError
871
+ end
872
+ when 401
873
+ @@logger.error {"oci upload exception : Error while uploading the payload. Not Authenticated.
874
+ opc-request-id:#{serviceError.request_id}
875
+ message: #{serviceError.message}"}
876
+ if plugin_retry_on_4xx
877
+ raise serviceError
878
+ end
879
+ when 404
880
+ @@logger.error {"oci upload exception : Error while uploading the payload. Authorization failed for given oci_la_log_group_id against given Tenancy Namespace.
881
+ oci_la_log_group_id: #{oci_la_log_group_id}
882
+ Namespace: #{@namespace}
883
+ opc-request-id: #{serviceError.request_id}
884
+ message: #{serviceError.message}"}
885
+ if plugin_retry_on_4xx
886
+ raise serviceError
887
+ end
888
+ when 429
889
+ @@logger.error {"oci upload exception : Error while uploading the payload. Too Many Requests - opc-request-id:#{serviceError.request_id}"}
890
+ raise serviceError
891
+ when 500
892
+ @@logger.error {"oci upload exception : Error while uploading the payload. Internal Server Error - opc-request-id:#{serviceError.request_id}"}
893
+ raise serviceError
894
+
895
+ when 502
896
+ @@logger.error {"oci upload exception : Error while uploading the payload. Bad Gateway - opc-request-id:#{serviceError.request_id}"}
897
+ raise serviceError
898
+
899
+ when 503
900
+ @@logger.error {"oci upload exception : Error while uploading the payload. Service unavailable - opc-request-id:#{serviceError.request_id}"}
901
+ raise serviceError
902
+
903
+ when 504
904
+ @@logger.error {"oci upload exception : Error while uploading the payload. Gateway Timeout - opc-request-id:#{serviceError.request_id}"}
905
+ raise serviceError
906
+
907
+ when 505
908
+ @@logger.error {"oci upload exception : Error while uploading the payload. HTTP Version Not Supported - opc-request-id:#{serviceError.request_id}"}
909
+ raise serviceError
910
+ else
911
+ @@logger.error {"oci upload exception : Error while uploading the payload #{serviceError.message}"}
912
+ raise serviceError
913
+ end
914
+ rescue => ex
915
+ @@logger.error {"oci upload exception : Error while uploading the payload. #{ex}"}
916
+ end
917
+ end
918
+ end
919
+ end
metadata ADDED
@@ -0,0 +1,124 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: fluent-plugin-oci-logging-analytics
3
+ version: !ruby/object:Gem::Version
4
+ version: 2.0.0
5
+ platform: ruby
6
+ authors:
7
+ - OCI Logging Analytics Team
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2022-01-14 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: rake
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - "~>"
18
+ - !ruby/object:Gem::Version
19
+ version: '12.0'
20
+ type: :development
21
+ prerelease: false
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - "~>"
25
+ - !ruby/object:Gem::Version
26
+ version: '12.0'
27
+ - !ruby/object:Gem::Dependency
28
+ name: test-unit
29
+ requirement: !ruby/object:Gem::Requirement
30
+ requirements:
31
+ - - "~>"
32
+ - !ruby/object:Gem::Version
33
+ version: '3.0'
34
+ type: :development
35
+ prerelease: false
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - "~>"
39
+ - !ruby/object:Gem::Version
40
+ version: '3.0'
41
+ - !ruby/object:Gem::Dependency
42
+ name: fluentd
43
+ requirement: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - ">="
46
+ - !ruby/object:Gem::Version
47
+ version: 0.14.10
48
+ - - "<"
49
+ - !ruby/object:Gem::Version
50
+ version: '2'
51
+ type: :runtime
52
+ prerelease: false
53
+ version_requirements: !ruby/object:Gem::Requirement
54
+ requirements:
55
+ - - ">="
56
+ - !ruby/object:Gem::Version
57
+ version: 0.14.10
58
+ - - "<"
59
+ - !ruby/object:Gem::Version
60
+ version: '2'
61
+ - !ruby/object:Gem::Dependency
62
+ name: rubyzip
63
+ requirement: !ruby/object:Gem::Requirement
64
+ requirements:
65
+ - - "~>"
66
+ - !ruby/object:Gem::Version
67
+ version: 2.3.2
68
+ type: :runtime
69
+ prerelease: false
70
+ version_requirements: !ruby/object:Gem::Requirement
71
+ requirements:
72
+ - - "~>"
73
+ - !ruby/object:Gem::Version
74
+ version: 2.3.2
75
+ - !ruby/object:Gem::Dependency
76
+ name: oci
77
+ requirement: !ruby/object:Gem::Requirement
78
+ requirements:
79
+ - - "~>"
80
+ - !ruby/object:Gem::Version
81
+ version: '2.13'
82
+ type: :runtime
83
+ prerelease: false
84
+ version_requirements: !ruby/object:Gem::Requirement
85
+ requirements:
86
+ - - "~>"
87
+ - !ruby/object:Gem::Version
88
+ version: '2.13'
89
+ description: Fluentd Output plugin to ship logs/events to OCI Logging Analytics.
90
+ email:
91
+ - oci_la_plugins@oracle.com
92
+ executables: []
93
+ extensions: []
94
+ extra_rdoc_files: []
95
+ files:
96
+ - lib/fluent/dto/logEvents.rb
97
+ - lib/fluent/dto/logEventsJson.rb
98
+ - lib/fluent/metrics/metricsLabels.rb
99
+ - lib/fluent/metrics/prometheusMetrics.rb
100
+ - lib/fluent/plugin/out_oci-logging-analytics.rb
101
+ homepage:
102
+ licenses:
103
+ - UPL-1.0
104
+ metadata: {}
105
+ post_install_message:
106
+ rdoc_options: []
107
+ require_paths:
108
+ - lib
109
+ required_ruby_version: !ruby/object:Gem::Requirement
110
+ requirements:
111
+ - - ">="
112
+ - !ruby/object:Gem::Version
113
+ version: '0'
114
+ required_rubygems_version: !ruby/object:Gem::Requirement
115
+ requirements:
116
+ - - ">="
117
+ - !ruby/object:Gem::Version
118
+ version: '0'
119
+ requirements: []
120
+ rubygems_version: 3.0.8
121
+ signing_key:
122
+ specification_version: 4
123
+ summary: Fluentd Output plugin to ship logs/events to OCI Logging Analytics.
124
+ test_files: []