semantic_logger 4.2.0 → 4.2.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (88) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +3 -3
  3. data/Rakefile +1 -1
  4. data/lib/semantic_logger/ansi_colors.rb +11 -12
  5. data/lib/semantic_logger/appender.rb +4 -5
  6. data/lib/semantic_logger/appender/async.rb +24 -16
  7. data/lib/semantic_logger/appender/async_batch.rb +1 -4
  8. data/lib/semantic_logger/appender/bugsnag.rb +67 -63
  9. data/lib/semantic_logger/appender/elasticsearch.rb +154 -157
  10. data/lib/semantic_logger/appender/elasticsearch_http.rb +59 -55
  11. data/lib/semantic_logger/appender/file.rb +1 -3
  12. data/lib/semantic_logger/appender/graylog.rb +114 -110
  13. data/lib/semantic_logger/appender/honeybadger.rb +54 -51
  14. data/lib/semantic_logger/appender/http.rb +194 -190
  15. data/lib/semantic_logger/appender/kafka.rb +152 -149
  16. data/lib/semantic_logger/appender/mongodb.rb +3 -3
  17. data/lib/semantic_logger/appender/new_relic.rb +52 -49
  18. data/lib/semantic_logger/appender/sentry.rb +59 -54
  19. data/lib/semantic_logger/appender/splunk.rb +108 -103
  20. data/lib/semantic_logger/appender/splunk_http.rb +82 -79
  21. data/lib/semantic_logger/appender/syslog.rb +4 -5
  22. data/lib/semantic_logger/appender/tcp.rb +8 -29
  23. data/lib/semantic_logger/appender/udp.rb +2 -3
  24. data/lib/semantic_logger/appender/wrapper.rb +2 -2
  25. data/lib/semantic_logger/base.rb +18 -16
  26. data/lib/semantic_logger/concerns/compatibility.rb +0 -1
  27. data/lib/semantic_logger/core_ext/thread.rb +0 -1
  28. data/lib/semantic_logger/formatters.rb +3 -5
  29. data/lib/semantic_logger/formatters/base.rb +2 -3
  30. data/lib/semantic_logger/formatters/color.rb +29 -12
  31. data/lib/semantic_logger/formatters/default.rb +10 -10
  32. data/lib/semantic_logger/formatters/json.rb +0 -2
  33. data/lib/semantic_logger/formatters/one_line.rb +2 -2
  34. data/lib/semantic_logger/formatters/raw.rb +7 -10
  35. data/lib/semantic_logger/formatters/signalfx.rb +3 -5
  36. data/lib/semantic_logger/formatters/syslog.rb +2 -3
  37. data/lib/semantic_logger/formatters/syslog_cee.rb +2 -3
  38. data/lib/semantic_logger/jruby/garbage_collection_logger.rb +8 -5
  39. data/lib/semantic_logger/log.rb +17 -17
  40. data/lib/semantic_logger/loggable.rb +6 -9
  41. data/lib/semantic_logger/logger.rb +0 -1
  42. data/lib/semantic_logger/metric/new_relic.rb +58 -55
  43. data/lib/semantic_logger/metric/signalfx.rb +108 -106
  44. data/lib/semantic_logger/metric/statsd.rb +2 -3
  45. data/lib/semantic_logger/processor.rb +9 -9
  46. data/lib/semantic_logger/semantic_logger.rb +50 -30
  47. data/lib/semantic_logger/subscriber.rb +0 -1
  48. data/lib/semantic_logger/utils.rb +37 -37
  49. data/lib/semantic_logger/version.rb +2 -2
  50. data/test/appender/async_batch_test.rb +0 -1
  51. data/test/appender/async_test.rb +0 -1
  52. data/test/appender/bugsnag_test.rb +7 -8
  53. data/test/appender/elasticsearch_http_test.rb +5 -6
  54. data/test/appender/elasticsearch_test.rb +14 -10
  55. data/test/appender/file_test.rb +5 -6
  56. data/test/appender/graylog_test.rb +8 -8
  57. data/test/appender/honeybadger_test.rb +6 -7
  58. data/test/appender/http_test.rb +4 -5
  59. data/test/appender/kafka_test.rb +5 -6
  60. data/test/appender/mongodb_test.rb +11 -13
  61. data/test/appender/new_relic_test.rb +8 -9
  62. data/test/appender/newrelic_rpm.rb +1 -1
  63. data/test/appender/sentry_test.rb +7 -8
  64. data/test/appender/splunk_http_test.rb +4 -4
  65. data/test/appender/splunk_test.rb +1 -3
  66. data/test/appender/syslog_test.rb +3 -5
  67. data/test/appender/tcp_test.rb +4 -5
  68. data/test/appender/udp_test.rb +4 -5
  69. data/test/appender/wrapper_test.rb +2 -3
  70. data/test/concerns/compatibility_test.rb +0 -1
  71. data/test/debug_as_trace_logger_test.rb +0 -1
  72. data/test/formatters/color_test.rb +5 -6
  73. data/test/formatters/default_test.rb +16 -17
  74. data/test/formatters/one_line_test.rb +1 -2
  75. data/test/formatters/signalfx_test.rb +8 -11
  76. data/test/formatters_test.rb +3 -3
  77. data/test/in_memory_appender.rb +0 -1
  78. data/test/in_memory_appender_helper.rb +1 -1
  79. data/test/in_memory_batch_appender.rb +0 -1
  80. data/test/in_memory_metrics_appender.rb +0 -1
  81. data/test/loggable_test.rb +2 -3
  82. data/test/logger_test.rb +11 -14
  83. data/test/measure_test.rb +13 -15
  84. data/test/metric/new_relic_test.rb +2 -3
  85. data/test/metric/signalfx_test.rb +4 -5
  86. data/test/semantic_logger_test.rb +28 -3
  87. data/test/test_helper.rb +6 -7
  88. metadata +34 -34
@@ -16,206 +16,210 @@ require 'openssl'
16
16
  # appender: :http,
17
17
  # url: 'http://localhost:8088/path'
18
18
  # )
19
- class SemanticLogger::Appender::Http < SemanticLogger::Subscriber
20
- attr_accessor :username, :compress, :header,
21
- :open_timeout, :read_timeout, :continue_timeout
22
- attr_reader :http, :url, :server, :port, :path, :ssl_options
23
-
24
- # Create HTTP(S) log appender
25
- #
26
- # Parameters:
27
- # url: [String]
28
- # Valid URL to post to.
29
- # Example: http://example.com/some_path
30
- # To enable SSL include https in the URL.
31
- # Example: https://example.com/some_path
32
- # verify_mode will default: OpenSSL::SSL::VERIFY_PEER
33
- #
34
- # application: [String]
35
- # Name of this application to appear in log messages.
36
- # Default: SemanticLogger.application
37
- #
38
- # host: [String]
39
- # Name of this host to appear in log messages.
40
- # Default: SemanticLogger.host
41
- #
42
- # username: [String]
43
- # User name for basic Authentication.
44
- # Default: nil ( do not use basic auth )
45
- #
46
- # password: [String]
47
- # Password for basic Authentication.
48
- #
49
- # compress: [true|false]
50
- # Whether to compress the JSON string with GZip.
51
- # Default: false
52
- #
53
- # ssl: [Hash]
54
- # Specific SSL options: For more details see NET::HTTP.start
55
- # ca_file, ca_path, cert, cert_store, ciphers, key, ssl_timeout,
56
- # ssl_version, verify_callback, verify_depth and verify_mode.
57
- #
58
- # level: [:trace | :debug | :info | :warn | :error | :fatal]
59
- # Override the log level for this appender.
60
- # Default: SemanticLogger.default_level
61
- #
62
- # formatter: [Object|Proc]
63
- # An instance of a class that implements #call, or a Proc to be used to format
64
- # the output from this appender
65
- # Default: Use the built-in formatter (See: #call)
66
- #
67
- # filter: [Regexp|Proc]
68
- # RegExp: Only include log messages where the class name matches the supplied.
69
- # regular expression. All other messages will be ignored.
70
- # Proc: Only include log messages where the supplied Proc returns true
71
- # The Proc must return true or false.
72
- #
73
- # open_timeout: [Float]
74
- # Default: 2.0
75
- #
76
- # read_timeout: [Float]
77
- # Default: 1.0
78
- #
79
- # continue_timeout: [Float]
80
- # Default: 1.0
81
- def initialize(url:,
82
- compress: false,
83
- ssl: {},
84
- username: nil,
85
- password: nil,
86
- open_timeout: 2.0,
87
- read_timeout: 1.0,
88
- continue_timeout: 1.0,
89
- level: nil,
90
- formatter: nil,
91
- filter: nil,
92
- application: nil,
93
- host: nil,
94
- &block)
95
-
96
- @url = url
97
- @ssl_options = ssl
98
- @username = username
99
- @password = password
100
- @compress = compress
101
- @open_timeout = open_timeout
102
- @read_timeout = read_timeout
103
- @continue_timeout = continue_timeout
104
-
105
- # On Ruby v2.0 and greater, Net::HTTP.new already uses a persistent connection if the server allows it
106
- @header = {
107
- 'Accept' => 'application/json',
108
- 'Content-Type' => 'application/json',
109
- 'Connection' => 'keep-alive',
110
- 'Keep-Alive' => '300'
111
- }
112
- @header['Content-Encoding'] = 'gzip' if @compress
113
-
114
- uri = URI.parse(@url)
115
- @server = uri.host
116
- raise(ArgumentError, "Invalid format for :url: #{@url.inspect}. Should be similar to: 'http://hostname:port/path'") unless @server
117
-
118
- @port = uri.port
119
- @username = uri.user if !@username && uri.user
120
- @password = uri.password if !@password && uri.password
121
- @path = uri.path
122
- # Path cannot be empty
123
- @path = '/' if @path == ''
124
-
125
- if uri.scheme == 'https'
126
- @ssl_options[:use_ssl] = true
127
- @ssl_options[:verify_mode] ||= OpenSSL::SSL::VERIFY_PEER
128
- @port ||= HTTP.https_default_port
129
- else
130
- @port ||= HTTP.http_default_port
131
- end
132
- @http = nil
133
-
134
- super(level: level, formatter: formatter, filter: filter, application: application, host: host, &block)
135
- reopen
136
- end
137
-
138
- # Re-open after process fork
139
- def reopen
140
- # Close open connection if any
141
- begin
142
- @http.finish if @http
143
- rescue IOError
144
- end
145
-
146
- @http = Net::HTTP.new(server, port)
147
-
148
- if @ssl_options
149
- @http.methods.grep(/\A(\w+)=\z/) do |meth|
150
- key = $1.to_sym
151
- @ssl_options.key?(key) or next
152
- @http.__send__(meth, @ssl_options[key])
19
+ module SemanticLogger
20
+ module Appender
21
+ class Http < SemanticLogger::Subscriber
22
+ attr_accessor :username, :compress, :header,
23
+ :open_timeout, :read_timeout, :continue_timeout
24
+ attr_reader :http, :url, :server, :port, :path, :ssl_options
25
+
26
+ # Create HTTP(S) log appender
27
+ #
28
+ # Parameters:
29
+ # url: [String]
30
+ # Valid URL to post to.
31
+ # Example: http://example.com/some_path
32
+ # To enable SSL include https in the URL.
33
+ # Example: https://example.com/some_path
34
+ # verify_mode will default: OpenSSL::SSL::VERIFY_PEER
35
+ #
36
+ # application: [String]
37
+ # Name of this application to appear in log messages.
38
+ # Default: SemanticLogger.application
39
+ #
40
+ # host: [String]
41
+ # Name of this host to appear in log messages.
42
+ # Default: SemanticLogger.host
43
+ #
44
+ # username: [String]
45
+ # User name for basic Authentication.
46
+ # Default: nil ( do not use basic auth )
47
+ #
48
+ # password: [String]
49
+ # Password for basic Authentication.
50
+ #
51
+ # compress: [true|false]
52
+ # Whether to compress the JSON string with GZip.
53
+ # Default: false
54
+ #
55
+ # ssl: [Hash]
56
+ # Specific SSL options: For more details see NET::HTTP.start
57
+ # ca_file, ca_path, cert, cert_store, ciphers, key, ssl_timeout,
58
+ # ssl_version, verify_callback, verify_depth and verify_mode.
59
+ #
60
+ # level: [:trace | :debug | :info | :warn | :error | :fatal]
61
+ # Override the log level for this appender.
62
+ # Default: SemanticLogger.default_level
63
+ #
64
+ # formatter: [Object|Proc]
65
+ # An instance of a class that implements #call, or a Proc to be used to format
66
+ # the output from this appender
67
+ # Default: Use the built-in formatter (See: #call)
68
+ #
69
+ # filter: [Regexp|Proc]
70
+ # RegExp: Only include log messages where the class name matches the supplied.
71
+ # regular expression. All other messages will be ignored.
72
+ # Proc: Only include log messages where the supplied Proc returns true
73
+ # The Proc must return true or false.
74
+ #
75
+ # open_timeout: [Float]
76
+ # Default: 2.0
77
+ #
78
+ # read_timeout: [Float]
79
+ # Default: 1.0
80
+ #
81
+ # continue_timeout: [Float]
82
+ # Default: 1.0
83
+ def initialize(url:,
84
+ compress: false,
85
+ ssl: {},
86
+ username: nil,
87
+ password: nil,
88
+ open_timeout: 2.0,
89
+ read_timeout: 1.0,
90
+ continue_timeout: 1.0,
91
+ level: nil,
92
+ formatter: nil,
93
+ filter: nil,
94
+ application: nil,
95
+ host: nil,
96
+ &block)
97
+
98
+ @url = url
99
+ @ssl_options = ssl
100
+ @username = username
101
+ @password = password
102
+ @compress = compress
103
+ @open_timeout = open_timeout
104
+ @read_timeout = read_timeout
105
+ @continue_timeout = continue_timeout
106
+
107
+ # On Ruby v2.0 and greater, Net::HTTP.new already uses a persistent connection if the server allows it
108
+ @header = {
109
+ 'Accept' => 'application/json',
110
+ 'Content-Type' => 'application/json',
111
+ 'Connection' => 'keep-alive',
112
+ 'Keep-Alive' => '300'
113
+ }
114
+ @header['Content-Encoding'] = 'gzip' if @compress
115
+
116
+ uri = URI.parse(@url)
117
+ @server = uri.host
118
+ raise(ArgumentError, "Invalid format for :url: #{@url.inspect}. Should be similar to: 'http://hostname:port/path'") unless @server
119
+
120
+ @port = uri.port
121
+ @username = uri.user if !@username && uri.user
122
+ @password = uri.password if !@password && uri.password
123
+ @path = uri.path
124
+ # Path cannot be empty
125
+ @path = '/' if @path == ''
126
+
127
+ if uri.scheme == 'https'
128
+ @ssl_options[:use_ssl] = true
129
+ @ssl_options[:verify_mode] ||= OpenSSL::SSL::VERIFY_PEER
130
+ @port ||= HTTP.https_default_port
131
+ else
132
+ @port ||= HTTP.http_default_port
133
+ end
134
+ @http = nil
135
+
136
+ super(level: level, formatter: formatter, filter: filter, application: application, host: host, &block)
137
+ reopen
153
138
  end
154
- end
155
139
 
156
- @http.open_timeout = @open_timeout
157
- @http.read_timeout = @read_timeout
158
- @http.continue_timeout = @continue_timeout
159
- @http.start
160
- end
140
+ # Re-open after process fork
141
+ def reopen
142
+ # Close open connection if any
143
+ begin
144
+ @http&.finish
145
+ rescue IOError
146
+ nil
147
+ end
148
+
149
+ @http = Net::HTTP.new(server, port)
150
+
151
+ if @ssl_options
152
+ @http.methods.grep(/\A(\w+)=\z/) do |meth|
153
+ key = Regexp.last_match(1).to_sym
154
+ @ssl_options.key?(key) || next
155
+ @http.__send__(meth, @ssl_options[key])
156
+ end
157
+ end
158
+
159
+ @http.open_timeout = @open_timeout
160
+ @http.read_timeout = @read_timeout
161
+ @http.continue_timeout = @continue_timeout
162
+ @http.start
163
+ end
161
164
 
162
- # Forward log messages to HTTP Server
163
- def log(log)
164
- message = formatter.call(log, self)
165
- logger.trace(message)
166
- post(message)
167
- end
165
+ # Forward log messages to HTTP Server
166
+ def log(log)
167
+ message = formatter.call(log, self)
168
+ logger.trace(message)
169
+ post(message)
170
+ end
168
171
 
169
- private
172
+ private
170
173
 
171
- # Use JSON Formatter by default
172
- def default_formatter
173
- SemanticLogger::Formatters::Json.new
174
- end
174
+ # Use JSON Formatter by default
175
+ def default_formatter
176
+ SemanticLogger::Formatters::Json.new
177
+ end
175
178
 
176
- def compress_data(data)
177
- str = StringIO.new
178
- gz = Zlib::GzipWriter.new(str)
179
- gz << data
180
- gz.close
181
- str.string
182
- end
179
+ def compress_data(data)
180
+ str = StringIO.new
181
+ gz = Zlib::GzipWriter.new(str)
182
+ gz << data
183
+ gz.close
184
+ str.string
185
+ end
183
186
 
184
- # HTTP Post
185
- def post(body, request_uri = path)
186
- request = Net::HTTP::Post.new(request_uri, @header)
187
- process_request(request, body)
188
- end
187
+ # HTTP Post
188
+ def post(body, request_uri = path)
189
+ request = Net::HTTP::Post.new(request_uri, @header)
190
+ process_request(request, body)
191
+ end
189
192
 
190
- # HTTP Put
191
- def put(body, request_uri = path)
192
- request = Net::HTTP::Put.new(request_uri, @header)
193
- process_request(request, body)
194
- end
193
+ # HTTP Put
194
+ def put(body, request_uri = path)
195
+ request = Net::HTTP::Put.new(request_uri, @header)
196
+ process_request(request, body)
197
+ end
195
198
 
196
- # HTTP Delete
197
- def delete(request_uri = path)
198
- request = Net::HTTP::Delete.new(request_uri, @header)
199
- process_request(request)
200
- end
199
+ # HTTP Delete
200
+ def delete(request_uri = path)
201
+ request = Net::HTTP::Delete.new(request_uri, @header)
202
+ process_request(request)
203
+ end
201
204
 
202
- # Process HTTP Request
203
- def process_request(request, body = nil)
204
- if body
205
- request.body = compress ? compress_data(body) : body
206
- end
207
- request.basic_auth(@username, @password) if @username
208
- response = @http.request(request)
209
- if response.code == '200' || response.code == '201'
210
- true
211
- else
212
- # Failures are logged to the global semantic logger failsafe logger (Usually stderr or file)
213
- logger.error("Bad HTTP response from: #{url} code: #{response.code}, #{response.body}")
214
- false
205
+ # Process HTTP Request
206
+ def process_request(request, body = nil)
207
+ if body
208
+ request.body = compress ? compress_data(body) : body
209
+ end
210
+ request.basic_auth(@username, @password) if @username
211
+ response = @http.request(request)
212
+ if response.code == '200' || response.code == '201'
213
+ true
214
+ else
215
+ # Failures are logged to the global semantic logger failsafe logger (Usually stderr or file)
216
+ logger.error("Bad HTTP response from: #{url} code: #{response.code}, #{response.body}")
217
+ false
218
+ end
219
+ rescue RuntimeError => exc
220
+ reopen
221
+ raise exc
222
+ end
215
223
  end
216
- rescue RuntimeError => exc
217
- reopen
218
- raise exc
219
224
  end
220
-
221
225
  end
@@ -19,163 +19,166 @@ require 'date'
19
19
  # # Set an optional client id in order to identify the client to Kafka:
20
20
  # client_id: "my-application",
21
21
  # )
22
- class SemanticLogger::Appender::Kafka < SemanticLogger::Subscriber
23
- attr_accessor :seed_brokers, :client_id, :connect_timeout, :socket_timeout,
24
- :ssl_ca_cert, :ssl_client_cert, :ssl_client_cert_key,
25
- :delivery_threshold, :delivery_interval,
26
- :topic, :partition, :partition_key, :key
22
+ module SemanticLogger
23
+ module Appender
24
+ class Kafka < SemanticLogger::Subscriber
25
+ attr_accessor :seed_brokers, :client_id, :connect_timeout, :socket_timeout,
26
+ :ssl_ca_cert, :ssl_client_cert, :ssl_client_cert_key,
27
+ :delivery_threshold, :delivery_interval,
28
+ :topic, :partition, :partition_key, :key
27
29
 
28
- # Send log messages to Kafka in JSON format.
29
- #
30
- # Kafka Parameters:
31
- #
32
- # seed_brokers: [Array<String>, String]
33
- # The list of brokers used to initialize the client. Either an Array of connections,
34
- # or a comma separated string of connections.
35
- # Connections can either be a string of "port:protocol" or a full URI with a scheme.
36
- # If there's a scheme it's ignored and only host/port are used.
37
- #
38
- # client_id: [String]
39
- # The identifier for this application.
40
- # Default: semantic-logger
41
- #
42
- # topic: [String]
43
- # Topic to publish log messages to.
44
- # Default: 'log_messages'
45
- #
46
- # partition: [Integer]
47
- # The partition that the message should be written to.
48
- # Default: nil
49
- #
50
- # partition_key: [String]
51
- # The key that should be used to assign a partition.
52
- # Default: nil
53
- #
54
- # key: [String]
55
- # The message key.
56
- # Default: nil
57
- #
58
- # connect_timeout: [Integer]
59
- # The timeout setting for connecting to brokers.
60
- # Default: nil
61
- #
62
- # socket_timeout: [Integer]
63
- # The timeout setting for socket connections.
64
- # Default: nil
65
- #
66
- # ssl_ca_cert: [String, Array<String>]
67
- # A PEM encoded CA cert, or an Array of PEM encoded CA certs, to use with a SSL connection.
68
- # Default: nil
69
- #
70
- # ssl_client_cert: [String]
71
- # A PEM encoded client cert to use with a SSL connection.
72
- # Must be used in combination with ssl_client_cert_key.
73
- # Default: nil
74
- #
75
- # ssl_client_cert_key [String]
76
- # A PEM encoded client cert key to use with a SSL connection.
77
- # Must be used in combination with ssl_client_cert.
78
- # Default: nil
79
- #
80
- # delivery_threshold: [Integer]
81
- # Number of messages between triggering a delivery of messages to Apache Kafka.
82
- # Default: 100
83
- #
84
- # delivery_interval: [Integer]
85
- # Number of seconds between triggering a delivery of messages to Apache Kafka.
86
- # Default: 5
87
- #
88
- # Semantic Logger Parameters:
89
- #
90
- # level: [:trace | :debug | :info | :warn | :error | :fatal]
91
- # Override the log level for this appender.
92
- # Default: SemanticLogger.default_level
93
- #
94
- # formatter: [Object|Proc|Symbol|Hash]
95
- # An instance of a class that implements #call, or a Proc to be used to format
96
- # the output from this appender
97
- # Default: :raw_json (See: #call)
98
- #
99
- # filter: [Regexp|Proc]
100
- # RegExp: Only include log messages where the class name matches the supplied.
101
- # regular expression. All other messages will be ignored.
102
- # Proc: Only include log messages where the supplied Proc returns true
103
- # The Proc must return true or false.
104
- #
105
- # host: [String]
106
- # Name of this host to appear in log messages.
107
- # Default: SemanticLogger.host
108
- #
109
- # application: [String]
110
- # Name of this application to appear in log messages.
111
- # Default: SemanticLogger.application
112
- def initialize(seed_brokers:, client_id: 'semantic-logger', connect_timeout: nil, socket_timeout: nil,
113
- ssl_ca_cert: nil, ssl_client_cert: nil, ssl_client_cert_key: nil,
114
- topic: 'log_messages', partition: nil, partition_key: nil, key: nil,
115
- delivery_threshold: 100, delivery_interval: 10,
116
- level: nil, formatter: nil, filter: nil, application: nil, host: nil, &block)
30
+ # Send log messages to Kafka in JSON format.
31
+ #
32
+ # Kafka Parameters:
33
+ #
34
+ # seed_brokers: [Array<String>, String]
35
+ # The list of brokers used to initialize the client. Either an Array of connections,
36
+ # or a comma separated string of connections.
37
+ # Connections can either be a string of "port:protocol" or a full URI with a scheme.
38
+ # If there's a scheme it's ignored and only host/port are used.
39
+ #
40
+ # client_id: [String]
41
+ # The identifier for this application.
42
+ # Default: semantic-logger
43
+ #
44
+ # topic: [String]
45
+ # Topic to publish log messages to.
46
+ # Default: 'log_messages'
47
+ #
48
+ # partition: [Integer]
49
+ # The partition that the message should be written to.
50
+ # Default: nil
51
+ #
52
+ # partition_key: [String]
53
+ # The key that should be used to assign a partition.
54
+ # Default: nil
55
+ #
56
+ # key: [String]
57
+ # The message key.
58
+ # Default: nil
59
+ #
60
+ # connect_timeout: [Integer]
61
+ # The timeout setting for connecting to brokers.
62
+ # Default: nil
63
+ #
64
+ # socket_timeout: [Integer]
65
+ # The timeout setting for socket connections.
66
+ # Default: nil
67
+ #
68
+ # ssl_ca_cert: [String, Array<String>]
69
+ # A PEM encoded CA cert, or an Array of PEM encoded CA certs, to use with a SSL connection.
70
+ # Default: nil
71
+ #
72
+ # ssl_client_cert: [String]
73
+ # A PEM encoded client cert to use with a SSL connection.
74
+ # Must be used in combination with ssl_client_cert_key.
75
+ # Default: nil
76
+ #
77
+ # ssl_client_cert_key [String]
78
+ # A PEM encoded client cert key to use with a SSL connection.
79
+ # Must be used in combination with ssl_client_cert.
80
+ # Default: nil
81
+ #
82
+ # delivery_threshold: [Integer]
83
+ # Number of messages between triggering a delivery of messages to Apache Kafka.
84
+ # Default: 100
85
+ #
86
+ # delivery_interval: [Integer]
87
+ # Number of seconds between triggering a delivery of messages to Apache Kafka.
88
+ # Default: 5
89
+ #
90
+ # Semantic Logger Parameters:
91
+ #
92
+ # level: [:trace | :debug | :info | :warn | :error | :fatal]
93
+ # Override the log level for this appender.
94
+ # Default: SemanticLogger.default_level
95
+ #
96
+ # formatter: [Object|Proc|Symbol|Hash]
97
+ # An instance of a class that implements #call, or a Proc to be used to format
98
+ # the output from this appender
99
+ # Default: :raw_json (See: #call)
100
+ #
101
+ # filter: [Regexp|Proc]
102
+ # RegExp: Only include log messages where the class name matches the supplied.
103
+ # regular expression. All other messages will be ignored.
104
+ # Proc: Only include log messages where the supplied Proc returns true
105
+ # The Proc must return true or false.
106
+ #
107
+ # host: [String]
108
+ # Name of this host to appear in log messages.
109
+ # Default: SemanticLogger.host
110
+ #
111
+ # application: [String]
112
+ # Name of this application to appear in log messages.
113
+ # Default: SemanticLogger.application
114
+ def initialize(seed_brokers:, client_id: 'semantic-logger', connect_timeout: nil, socket_timeout: nil,
115
+ ssl_ca_cert: nil, ssl_client_cert: nil, ssl_client_cert_key: nil,
116
+ topic: 'log_messages', partition: nil, partition_key: nil, key: nil,
117
+ delivery_threshold: 100, delivery_interval: 10,
118
+ level: nil, formatter: nil, filter: nil, application: nil, host: nil, &block)
117
119
 
118
- @seed_brokers = seed_brokers
119
- @client_id = @client_id
120
- @connect_timeout = connect_timeout
121
- @socket_timeout = socket_timeout
122
- @ssl_ca_cert = ssl_ca_cert
123
- @ssl_client_cert = ssl_client_cert
124
- @ssl_client_cert_key = ssl_client_cert_key
125
- @topic = topic
126
- @partition = partition
127
- @partition_key = partition_key
128
- @key = key
129
- @delivery_threshold = delivery_threshold
130
- @delivery_interval = delivery_interval
120
+ @seed_brokers = seed_brokers
121
+ @client_id = client_id
122
+ @connect_timeout = connect_timeout
123
+ @socket_timeout = socket_timeout
124
+ @ssl_ca_cert = ssl_ca_cert
125
+ @ssl_client_cert = ssl_client_cert
126
+ @ssl_client_cert_key = ssl_client_cert_key
127
+ @topic = topic
128
+ @partition = partition
129
+ @partition_key = partition_key
130
+ @key = key
131
+ @delivery_threshold = delivery_threshold
132
+ @delivery_interval = delivery_interval
131
133
 
132
- super(level: level, formatter: formatter, filter: filter, application: application, host: host, &block)
133
- reopen
134
- end
134
+ super(level: level, formatter: formatter, filter: filter, application: application, host: host, &block)
135
+ reopen
136
+ end
135
137
 
136
- def reopen
137
- @kafka = ::Kafka.new(
138
- seed_brokers: seed_brokers,
139
- client_id: client_id,
140
- connect_timeout: connect_timeout,
141
- socket_timeout: socket_timeout,
142
- ssl_ca_cert: ssl_ca_cert,
143
- ssl_client_cert: ssl_client_cert,
144
- ssl_client_cert_key: ssl_client_cert_key,
145
- logger: logger
146
- )
138
+ def reopen
139
+ @kafka = ::Kafka.new(
140
+ seed_brokers: seed_brokers,
141
+ client_id: client_id,
142
+ connect_timeout: connect_timeout,
143
+ socket_timeout: socket_timeout,
144
+ ssl_ca_cert: ssl_ca_cert,
145
+ ssl_client_cert: ssl_client_cert,
146
+ ssl_client_cert_key: ssl_client_cert_key,
147
+ logger: logger
148
+ )
147
149
 
148
- @producer = @kafka.async_producer(
149
- delivery_threshold: delivery_threshold,
150
- delivery_interval: delivery_interval
151
- )
152
- end
150
+ @producer = @kafka.async_producer(
151
+ delivery_threshold: delivery_threshold,
152
+ delivery_interval: delivery_interval
153
+ )
154
+ end
153
155
 
154
- def close
155
- @producer.shutdown if @producer
156
- @producer = nil
157
- @kafka.close if @kafka
158
- @kafka = nil
159
- end
156
+ def close
157
+ @producer&.shutdown
158
+ @producer = nil
159
+ @kafka&.close
160
+ @kafka = nil
161
+ end
160
162
 
161
- # Forward log messages to Kafka producer thread.
162
- def log(log)
163
- json = formatter.call(log, self)
164
- @producer.produce(json, topic: topic, partition: partition, partition_key: partition_key, key: key)
165
- end
163
+ # Forward log messages to Kafka producer thread.
164
+ def log(log)
165
+ json = formatter.call(log, self)
166
+ @producer.produce(json, topic: topic, partition: partition, partition_key: partition_key, key: key)
167
+ end
166
168
 
167
- # Use JSON Formatter by default.
168
- def default_formatter
169
- SemanticLogger::Formatters::Json.new
170
- end
169
+ # Use JSON Formatter by default.
170
+ def default_formatter
171
+ SemanticLogger::Formatters::Json.new
172
+ end
171
173
 
172
- # Restart producer thread since there is no other way to flush.
173
- def flush
174
- @producer.shutdown
175
- @producer = @kafka.async_producer(
176
- delivery_threshold: delivery_threshold,
177
- delivery_interval: delivery_interval
178
- )
174
+ # Restart producer thread since there is no other way to flush.
175
+ def flush
176
+ @producer.shutdown
177
+ @producer = @kafka.async_producer(
178
+ delivery_threshold: delivery_threshold,
179
+ delivery_interval: delivery_interval
180
+ )
181
+ end
182
+ end
179
183
  end
180
-
181
184
  end