semantic_logger 4.10.0 → 4.12.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 2ff7d4bcb345581f6ba19f834bb1bf2f921e97904f4b6236142b87af75a4526d
4
- data.tar.gz: b1069d5360296c61c9796f630b1ec065eb1a0a7dc549778c66c5e26c90d0eadb
3
+ metadata.gz: b09c37737f1e42edbeee95b036ba0581d5164d756e13de37aca9b22b3093cd45
4
+ data.tar.gz: 9f66b72480df2371a7d305164e443f9667e1acd5521c1347d54c3841efbfa4c9
5
5
  SHA512:
6
- metadata.gz: c5cfbac7dd8795b11cf79ed3fa9a4a086128d5ee030f0e9db40b75f30f58997c1674a4284b4b287939216bc8de83372e2b7fe00d73e7856c3cc6830310740f0d
7
- data.tar.gz: 2150a809562247d771103a5d387ae5852ff5b0649a4f4288a7dd07a94420bafcfb6b36f3e21077fcc053c2d8bcaffe80a110ab65b52178fb612b7368fd79d87d
6
+ metadata.gz: fb27aa1a6d5dc1f2e0e6505ba9348a7cec091a153763361e8c9c20c21177bf84ac11ac2f21fc37ec994988ba31763dcd5c3134ac40a169ac987ed617eceec46e
7
+ data.tar.gz: 5d084bc8cc57b1e836326eae912667ef6b7a284b0125c0b0acee678af78c01017b9667db9a43df7c0fb38af4998b0ab3128ac05d3e2bce266176e9c4d8fadccd
@@ -59,7 +59,7 @@ module SemanticLogger
59
59
 
60
60
  appender.reopen if appender.respond_to?(:reopen)
61
61
 
62
- @thread.kill if @thread&.alive?
62
+ @thread&.kill if @thread&.alive?
63
63
  @thread = Thread.new { process }
64
64
  end
65
65
 
@@ -21,7 +21,7 @@ module SemanticLogger
21
21
  class Http < SemanticLogger::Subscriber
22
22
  attr_accessor :username, :compress, :header,
23
23
  :open_timeout, :read_timeout, :continue_timeout
24
- attr_reader :http, :url, :server, :port, :path, :ssl_options
24
+ attr_reader :http, :url, :server, :port, :path, :ssl_options, :proxy_url
25
25
 
26
26
  # Create HTTP(S) log appender
27
27
  #
@@ -57,6 +57,16 @@ module SemanticLogger
57
57
  # ca_file, ca_path, cert, cert_store, ciphers, key, ssl_timeout,
58
58
  # ssl_version, verify_callback, verify_depth and verify_mode.
59
59
  #
60
+ # proxy_url: [String]
61
+ # URL of proxy server to use for HTTP(s) connections. Should
62
+ # include username and password if required.
63
+ # Example: http://user@pass:example.com/some_path
64
+ # To enable SSL include https in the URL.
65
+ # Example: https://example.com/some_path
66
+ # If this is set to :ENV, Net::HTTP will use the environment http_proxy*
67
+ # variables if they are set. If set to nil then no proxy will be used,
68
+ # even if the environment variables are set.
69
+ #
60
70
  # level: [:trace | :debug | :info | :warn | :error | :fatal]
61
71
  # Override the log level for this appender.
62
72
  # Default: SemanticLogger.default_level
@@ -85,6 +95,7 @@ module SemanticLogger
85
95
  ssl: {},
86
96
  username: nil,
87
97
  password: nil,
98
+ proxy_url: :ENV,
88
99
  open_timeout: 2.0,
89
100
  read_timeout: 1.0,
90
101
  continue_timeout: 1.0,
@@ -92,6 +103,7 @@ module SemanticLogger
92
103
  &block)
93
104
 
94
105
  @url = url
106
+ @proxy_url = proxy_url
95
107
  @ssl_options = ssl
96
108
  @username = username
97
109
  @password = password
@@ -129,6 +141,9 @@ module SemanticLogger
129
141
  else
130
142
  @port ||= HTTP.http_default_port
131
143
  end
144
+
145
+ @proxy_uri = URI.parse(@proxy_url) if @proxy_url && @proxy_url != :ENV
146
+
132
147
  @http = nil
133
148
 
134
149
  super(**args, &block)
@@ -144,7 +159,11 @@ module SemanticLogger
144
159
  nil
145
160
  end
146
161
 
147
- @http = Net::HTTP.new(server, port)
162
+ @http = if @proxy_uri
163
+ Net::HTTP.new(server, port, @proxy_uri.host, @proxy_uri.port, @proxy_uri.user, @proxy_uri.password)
164
+ else
165
+ Net::HTTP.new(server, port, @proxy_url)
166
+ end
148
167
 
149
168
  if @ssl_options
150
169
  @http.methods.grep(/\A(\w+)=\z/) do |meth|
@@ -23,8 +23,8 @@ module SemanticLogger
23
23
  module Appender
24
24
  class Kafka < SemanticLogger::Subscriber
25
25
  attr_accessor :seed_brokers, :client_id, :connect_timeout, :socket_timeout,
26
- :ssl_ca_cert, :ssl_client_cert, :ssl_client_cert_key,
27
- :delivery_threshold, :delivery_interval,
26
+ :ssl_ca_cert, :ssl_client_cert, :ssl_client_cert_key, :ssl_ca_certs_from_system,
27
+ :delivery_threshold, :delivery_interval, :required_acks,
28
28
  :topic, :partition, :partition_key, :key
29
29
 
30
30
  # Send log messages to Kafka in JSON format.
@@ -79,6 +79,9 @@ module SemanticLogger
79
79
  # Must be used in combination with ssl_client_cert.
80
80
  # Default: nil
81
81
  #
82
+ # ssl_ca_certs_from_system: [boolean]
83
+ # Delegate SSL CA cert to the system certs
84
+ #
82
85
  # delivery_threshold: [Integer]
83
86
  # Number of messages between triggering a delivery of messages to Apache Kafka.
84
87
  # Default: 100
@@ -87,6 +90,10 @@ module SemanticLogger
87
90
  # Number of seconds between triggering a delivery of messages to Apache Kafka.
88
91
  # Default: 5
89
92
  #
93
+ # required_acks: [Integer]
94
+ # Number of replicas that must acknowledge receipt of each log message to the topic
95
+ # Default: 1
96
+ #
90
97
  # Semantic Logger Parameters:
91
98
  #
92
99
  # level: [:trace | :debug | :info | :warn | :error | :fatal]
@@ -116,24 +123,26 @@ module SemanticLogger
116
123
  # Send metrics only events to kafka.
117
124
  # Default: true
118
125
  def initialize(seed_brokers:, client_id: "semantic-logger", connect_timeout: nil, socket_timeout: nil,
119
- ssl_ca_cert: nil, ssl_client_cert: nil, ssl_client_cert_key: nil,
126
+ ssl_ca_cert: nil, ssl_client_cert: nil, ssl_client_cert_key: nil, ssl_ca_certs_from_system: false,
120
127
  topic: "log_messages", partition: nil, partition_key: nil, key: nil,
121
- delivery_threshold: 100, delivery_interval: 10,
128
+ delivery_threshold: 100, delivery_interval: 10, required_acks: 1,
122
129
  metrics: true, **args, &block)
123
130
 
124
- @seed_brokers = seed_brokers
125
- @client_id = client_id
126
- @connect_timeout = connect_timeout
127
- @socket_timeout = socket_timeout
128
- @ssl_ca_cert = ssl_ca_cert
129
- @ssl_client_cert = ssl_client_cert
130
- @ssl_client_cert_key = ssl_client_cert_key
131
- @topic = topic
132
- @partition = partition
133
- @partition_key = partition_key
134
- @key = key
135
- @delivery_threshold = delivery_threshold
136
- @delivery_interval = delivery_interval
131
+ @seed_brokers = seed_brokers
132
+ @client_id = client_id
133
+ @connect_timeout = connect_timeout
134
+ @socket_timeout = socket_timeout
135
+ @ssl_ca_cert = ssl_ca_cert
136
+ @ssl_client_cert = ssl_client_cert
137
+ @ssl_client_cert_key = ssl_client_cert_key
138
+ @ssl_ca_certs_from_system = ssl_ca_certs_from_system
139
+ @topic = topic
140
+ @partition = partition
141
+ @partition_key = partition_key
142
+ @key = key
143
+ @delivery_threshold = delivery_threshold
144
+ @delivery_interval = delivery_interval
145
+ @required_acks = required_acks
137
146
 
138
147
  super(metrics: metrics, **args, &block)
139
148
  reopen
@@ -141,19 +150,21 @@ module SemanticLogger
141
150
 
142
151
  def reopen
143
152
  @kafka = ::Kafka.new(
144
- seed_brokers: seed_brokers,
145
- client_id: client_id,
146
- connect_timeout: connect_timeout,
147
- socket_timeout: socket_timeout,
148
- ssl_ca_cert: ssl_ca_cert,
149
- ssl_client_cert: ssl_client_cert,
150
- ssl_client_cert_key: ssl_client_cert_key,
151
- logger: logger
153
+ seed_brokers: seed_brokers,
154
+ client_id: client_id,
155
+ connect_timeout: connect_timeout,
156
+ socket_timeout: socket_timeout,
157
+ ssl_ca_cert: ssl_ca_cert,
158
+ ssl_client_cert: ssl_client_cert,
159
+ ssl_client_cert_key: ssl_client_cert_key,
160
+ ssl_ca_certs_from_system: ssl_ca_certs_from_system,
161
+ logger: logger
152
162
  )
153
163
 
154
164
  @producer = @kafka.async_producer(
155
165
  delivery_threshold: delivery_threshold,
156
- delivery_interval: delivery_interval
166
+ delivery_interval: delivery_interval,
167
+ required_acks: required_acks
157
168
  )
158
169
  end
159
170
 
@@ -40,7 +40,7 @@ module SemanticLogger
40
40
  def initialize(level: :error, **args, &block)
41
41
  # Replace the Sentry Ruby logger so that we can identify its log
42
42
  # messages and not forward them to Sentry
43
- ::Sentry.init { |config| config.logger = SemanticLogger[::Sentry] }
43
+ ::Sentry.init { |config| config.logger = SemanticLogger[::Sentry] } unless ::Sentry.initialized?
44
44
  super(level: level, **args, &block)
45
45
  end
46
46
 
@@ -186,7 +186,8 @@ module SemanticLogger
186
186
  # to:
187
187
  # `logger.tagged('first', 'more', 'other')`
188
188
  # - For better performance with clean tags, see `SemanticLogger.tagged`.
189
- def tagged(*tags, &block)
189
+ def tagged(*tags)
190
+ block = -> { yield(self) }
190
191
  # Allow named tags to be passed into the logger
191
192
  # Rails::Rack::Logger passes logs as an array with a single argument
192
193
  if tags.size == 1 && !tags.first.is_a?(Array)
@@ -327,14 +328,14 @@ module SemanticLogger
327
328
 
328
329
  log = Log.new(name, level, index)
329
330
  should_log =
330
- if payload.nil? && exception.nil? && message.is_a?(Hash)
331
- # Everything as keyword arguments.
331
+ if exception.nil? && payload.nil? && message.is_a?(Hash)
332
+ # All arguments as a hash in the message.
332
333
  log.assign(**log.extract_arguments(message))
333
334
  elsif exception.nil? && message && payload && payload.is_a?(Hash)
334
- # Message with keyword arguments as the rest.
335
- log.assign(message: message, **log.extract_arguments(payload))
335
+ # Message supplied along with a hash with the remaining arguments.
336
+ log.assign(**log.extract_arguments(payload, message))
336
337
  else
337
- # No keyword arguments.
338
+ # All fields supplied directly.
338
339
  log.assign(message: message, payload: payload, exception: exception)
339
340
  end
340
341
 
@@ -62,7 +62,12 @@ module SemanticLogger
62
62
 
63
63
  def flatten_log
64
64
  flattened = @parsed.map do |key, value|
65
- "#{key}=#{value.to_json}"
65
+ case value
66
+ when Hash, Array
67
+ "#{key}=#{value.to_s.to_json}"
68
+ else
69
+ "#{key}=#{value.to_json}"
70
+ end
66
71
  end
67
72
 
68
73
  flattened.join(" ")
@@ -23,7 +23,7 @@ module SemanticLogger
23
23
  #
24
24
  # Example:
25
25
  # # Log via udp to a remote syslog server on host: `server1` and port `8514`, using the CEE format.
26
- # SemanticLogger.add_appender(appender: :syslog, formatter: syslog_cee, url: 'udp://server1:8514')
26
+ # SemanticLogger.add_appender(appender: :syslog, formatter: :syslog_cee, url: 'udp://server1:8514')
27
27
  def initialize(facility: ::Syslog::LOG_USER, level_map: SemanticLogger::Formatters::Syslog::LevelMap.new, max_size: Integer)
28
28
  @facility = facility
29
29
  @level_map = level_map.is_a?(SemanticLogger::Formatters::Syslog::LevelMap) ? level_map : SemanticLogger::Formatters::Syslog::LevelMap.new(level_map)
@@ -1,15 +1,16 @@
1
1
  module SemanticLogger
2
2
  module Formatters
3
- autoload :Base, "semantic_logger/formatters/base"
4
- autoload :Color, "semantic_logger/formatters/color"
5
- autoload :Default, "semantic_logger/formatters/default"
6
- autoload :Json, "semantic_logger/formatters/json"
7
- autoload :Raw, "semantic_logger/formatters/raw"
8
- autoload :OneLine, "semantic_logger/formatters/one_line"
9
- autoload :Signalfx, "semantic_logger/formatters/signalfx"
10
- autoload :Syslog, "semantic_logger/formatters/syslog"
11
- autoload :Fluentd, "semantic_logger/formatters/fluentd"
12
- autoload :Logfmt, "semantic_logger/formatters/logfmt"
3
+ autoload :Base, "semantic_logger/formatters/base"
4
+ autoload :Color, "semantic_logger/formatters/color"
5
+ autoload :Default, "semantic_logger/formatters/default"
6
+ autoload :Json, "semantic_logger/formatters/json"
7
+ autoload :Raw, "semantic_logger/formatters/raw"
8
+ autoload :OneLine, "semantic_logger/formatters/one_line"
9
+ autoload :Signalfx, "semantic_logger/formatters/signalfx"
10
+ autoload :Syslog, "semantic_logger/formatters/syslog"
11
+ autoload :Fluentd, "semantic_logger/formatters/fluentd"
12
+ autoload :Logfmt, "semantic_logger/formatters/logfmt"
13
+ autoload :SyslogCee, "semantic_logger/formatters/syslog_cee"
13
14
 
14
15
  # Return formatter that responds to call.
15
16
  #
@@ -140,14 +140,25 @@ module SemanticLogger
140
140
  end
141
141
 
142
142
  # Extract the arguments from a Hash Payload
143
- def extract_arguments(payload)
143
+ def extract_arguments(payload, message = nil)
144
144
  raise(ArgumentError, "payload must be a Hash") unless payload.is_a?(Hash)
145
145
 
146
+ message = nil if message == ""
146
147
  return payload if payload.key?(:payload)
147
148
 
148
- args = {}
149
- payload.each_key { |key| args[key] = payload.delete(key) if NON_PAYLOAD_KEYS.include?(key) }
150
- args[:payload] = payload unless payload.empty?
149
+ new_payload = {}
150
+ args = {}
151
+ payload.each_pair do |key, value|
152
+ # Supplied message takes precedence
153
+ if (key == :message) && !message.nil?
154
+ new_payload[key] = value
155
+ next
156
+ end
157
+
158
+ NON_PAYLOAD_KEYS.include?(key) ? args[key] = value : new_payload[key] = value
159
+ end
160
+ args[:payload] = new_payload unless new_payload.empty?
161
+ args[:message] = message if message
151
162
  args
152
163
  end
153
164
 
@@ -253,9 +253,9 @@ module SemanticLogger
253
253
  # When the log_level_signal is raised on this process, the global default log level
254
254
  # rotates through the following log levels in the following order, starting
255
255
  # from the current global default level:
256
- # :warn, :info, :debug, :trace
256
+ # :fatal, :error, :warn, :info, :debug, :trace
257
257
  #
258
- # If the current level is :trace it wraps around back to :warn
258
+ # If the current level is :trace it wraps around back to :fatal
259
259
  #
260
260
  # 2. Logging a Ruby thread dump
261
261
  #
@@ -279,10 +279,11 @@ module SemanticLogger
279
279
  def self.add_signal_handler(log_level_signal = "USR2", thread_dump_signal = "TTIN", gc_log_microseconds = 100_000)
280
280
  if log_level_signal
281
281
  Signal.trap(log_level_signal) do
282
- index = default_level == :trace ? LEVELS.find_index(:error) : LEVELS.find_index(default_level)
283
- new_level = LEVELS[index - 1]
284
- self["SemanticLogger"].warn "Changed global default log level to #{new_level.inspect}"
282
+ current_level_index = LEVELS.find_index(default_level)
283
+ new_level_index = current_level_index == 0 ? LEVELS.size - 1 : current_level_index - 1
284
+ new_level = LEVELS[new_level_index]
285
285
  self.default_level = new_level
286
+ self["SemanticLogger"].warn "Changed global default log level to #{new_level.inspect}"
286
287
  end
287
288
  end
288
289
 
@@ -1,14 +1,28 @@
1
1
  module SemanticLogger
2
- # Thread that submits and processes log requests
2
+ # The SyncProcessor performs logging in the current thread.
3
+ #
4
+ # Appenders are designed to only be used by one thread at a time, so all calls
5
+ # are mutex protected in case SyncProcessor is being used in a multi-threaded environment.
3
6
  class SyncProcessor
4
- extend Forwardable
7
+ def add(*args, &block)
8
+ @mutex.synchronize { @appenders.add(*args, &block) }
9
+ end
10
+
11
+ def log(*args, &block)
12
+ @mutex.synchronize { @appenders.log(*args, &block) }
13
+ end
14
+
15
+ def flush
16
+ @mutex.synchronize { @appenders.flush }
17
+ end
18
+
19
+ def close
20
+ @mutex.synchronize { @appenders.close }
21
+ end
5
22
 
6
- # Forward methods that can be called directly
7
- def_delegator :@appenders, :add
8
- def_delegator :@appenders, :log
9
- def_delegator :@appenders, :flush
10
- def_delegator :@appenders, :close
11
- def_delegator :@appenders, :reopen
23
+ def reopen(*args)
24
+ @mutex.synchronize { @appenders.reopen(*args) }
25
+ end
12
26
 
13
27
  # Allow the internal logger to be overridden from its default of $stderr
14
28
  # Can be replaced with another Ruby logger or Rails logger, but never to
@@ -33,6 +47,7 @@ module SemanticLogger
33
47
  attr_reader :appenders
34
48
 
35
49
  def initialize(appenders = nil)
50
+ @mutex = Mutex.new
36
51
  @appenders = appenders || Appenders.new(self.class.logger.dup)
37
52
  end
38
53
 
@@ -1,3 +1,3 @@
1
1
  module SemanticLogger
2
- VERSION = "4.10.0".freeze
2
+ VERSION = "4.12.0".freeze
3
3
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: semantic_logger
3
3
  version: !ruby/object:Gem::Version
4
- version: 4.10.0
4
+ version: 4.12.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Reid Morrison
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2022-02-05 00:00:00.000000000 Z
11
+ date: 2022-10-30 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: concurrent-ruby
@@ -111,7 +111,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
111
111
  - !ruby/object:Gem::Version
112
112
  version: '0'
113
113
  requirements: []
114
- rubygems_version: 3.3.3
114
+ rubygems_version: 3.2.33
115
115
  signing_key:
116
116
  specification_version: 4
117
117
  summary: Feature rich logging framework, and replacement for existing Ruby & Rails