semantic_logger 4.6.1 → 4.7.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. checksums.yaml +4 -4
  2. data/Rakefile +7 -7
  3. data/lib/semantic_logger.rb +23 -22
  4. data/lib/semantic_logger/appender.rb +32 -33
  5. data/lib/semantic_logger/appender/async.rb +9 -8
  6. data/lib/semantic_logger/appender/async_batch.rb +4 -2
  7. data/lib/semantic_logger/appender/bugsnag.rb +7 -7
  8. data/lib/semantic_logger/appender/elasticsearch.rb +10 -10
  9. data/lib/semantic_logger/appender/elasticsearch_http.rb +4 -4
  10. data/lib/semantic_logger/appender/file.rb +2 -1
  11. data/lib/semantic_logger/appender/graylog.rb +12 -10
  12. data/lib/semantic_logger/appender/honeybadger.rb +3 -3
  13. data/lib/semantic_logger/appender/http.rb +20 -18
  14. data/lib/semantic_logger/appender/kafka.rb +5 -5
  15. data/lib/semantic_logger/appender/mongodb.rb +6 -6
  16. data/lib/semantic_logger/appender/new_relic.rb +2 -2
  17. data/lib/semantic_logger/appender/rabbitmq.rb +5 -5
  18. data/lib/semantic_logger/appender/sentry.rb +7 -7
  19. data/lib/semantic_logger/appender/splunk.rb +5 -5
  20. data/lib/semantic_logger/appender/splunk_http.rb +2 -2
  21. data/lib/semantic_logger/appender/syslog.rb +12 -12
  22. data/lib/semantic_logger/appender/tcp.rb +5 -5
  23. data/lib/semantic_logger/appender/udp.rb +2 -2
  24. data/lib/semantic_logger/appenders.rb +11 -11
  25. data/lib/semantic_logger/base.rb +16 -14
  26. data/lib/semantic_logger/formatters.rb +11 -11
  27. data/lib/semantic_logger/formatters/base.rb +8 -3
  28. data/lib/semantic_logger/formatters/color.rb +1 -1
  29. data/lib/semantic_logger/formatters/default.rb +18 -5
  30. data/lib/semantic_logger/formatters/fluentd.rb +3 -3
  31. data/lib/semantic_logger/formatters/json.rb +1 -1
  32. data/lib/semantic_logger/formatters/raw.rb +30 -6
  33. data/lib/semantic_logger/formatters/signalfx.rb +10 -9
  34. data/lib/semantic_logger/formatters/syslog.rb +3 -3
  35. data/lib/semantic_logger/formatters/syslog_cee.rb +3 -3
  36. data/lib/semantic_logger/jruby/garbage_collection_logger.rb +4 -2
  37. data/lib/semantic_logger/levels.rb +9 -7
  38. data/lib/semantic_logger/log.rb +17 -11
  39. data/lib/semantic_logger/logger.rb +6 -6
  40. data/lib/semantic_logger/metric/new_relic.rb +3 -3
  41. data/lib/semantic_logger/metric/signalfx.rb +3 -3
  42. data/lib/semantic_logger/metric/statsd.rb +7 -7
  43. data/lib/semantic_logger/processor.rb +7 -5
  44. data/lib/semantic_logger/reporters/minitest.rb +4 -4
  45. data/lib/semantic_logger/semantic_logger.rb +25 -10
  46. data/lib/semantic_logger/subscriber.rb +6 -5
  47. data/lib/semantic_logger/sync.rb +12 -0
  48. data/lib/semantic_logger/sync_processor.rb +44 -0
  49. data/lib/semantic_logger/utils.rb +6 -6
  50. data/lib/semantic_logger/version.rb +1 -1
  51. metadata +4 -2
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 22f95d79aaeb38befe2e1c242063464be93f05d16eed7cf17540256c1064f7b2
4
- data.tar.gz: 2ae7b4e00a4415c88d66afd552c4fa03e68dcf2d354994bfe98621d0e08bcdd0
3
+ metadata.gz: 0d3b45427759ee16967074aada7eb8dade063d1705172fd8a082de9bb12e1aaf
4
+ data.tar.gz: 4db9de710bf8b713a61903a16bde0e436f603de1fa017a410cfebaac00f8a245
5
5
  SHA512:
6
- metadata.gz: bc5bd0ce964de270119c5ef3bcab46c8178eb7120c5ad76afd99c0f5c1acbe4d7011eaf88c4d983e943d1e25f09baffd803121cba5a580be168ac874f09e539e
7
- data.tar.gz: 0e64868d3f9363007114fce5c10ced420282edd098816df92e60da755c380434fe42075b5be3ebcb8ca03ff2b8746681a30317e33b88563bd2e3af5579ad9b4f
6
+ metadata.gz: 9a7e25591485879924fe5f438a6d0266bd5b590dfcd45cc7a6810e3b679a94eef066e23b3ebc1fe51f2f91f3dbbbdbb681ebeef8292188e7917872202cd178b6
7
+ data.tar.gz: 1e74f739dc87c03a41cd1e1a3df7d1d62c36ea175ee93817b21fc7f3039ddde1c4a25dfff8caecfa20a182f2e7c222c76f52d5dbf7a0dc10dfce009206d55726
data/Rakefile CHANGED
@@ -1,22 +1,22 @@
1
- require 'rake/clean'
2
- require 'rake/testtask'
1
+ require "rake/clean"
2
+ require "rake/testtask"
3
3
 
4
- $LOAD_PATH.unshift File.expand_path('lib', __dir__)
5
- require 'semantic_logger/version'
4
+ $LOAD_PATH.unshift File.expand_path("lib", __dir__)
5
+ require "semantic_logger/version"
6
6
 
7
7
  task :gem do
8
- system 'gem build semantic_logger.gemspec'
8
+ system "gem build semantic_logger.gemspec"
9
9
  end
10
10
 
11
11
  task publish: :gem do
12
12
  system "git tag -a v#{SemanticLogger::VERSION} -m 'Tagging #{SemanticLogger::VERSION}'"
13
- system 'git push --tags'
13
+ system "git push --tags"
14
14
  system "gem push semantic_logger-#{SemanticLogger::VERSION}.gem"
15
15
  system "rm semantic_logger-#{SemanticLogger::VERSION}.gem"
16
16
  end
17
17
 
18
18
  Rake::TestTask.new(:test) do |t|
19
- t.pattern = 'test/**/*_test.rb'
19
+ t.pattern = "test/**/*_test.rb"
20
20
  t.verbose = true
21
21
  t.warning = false
22
22
  end
@@ -1,43 +1,44 @@
1
- require 'semantic_logger/core_ext/thread'
2
- require 'semantic_logger/version'
1
+ require "semantic_logger/core_ext/thread"
2
+ require "semantic_logger/version"
3
3
 
4
4
  # @formatter:off
5
5
  module SemanticLogger
6
- autoload :AnsiColors, 'semantic_logger/ansi_colors'
7
- autoload :Appender, 'semantic_logger/appender'
8
- autoload :Appenders, 'semantic_logger/appenders'
9
- autoload :Base, 'semantic_logger/base'
10
- autoload :DebugAsTraceLogger, 'semantic_logger/debug_as_trace_logger'
11
- autoload :Formatters, 'semantic_logger/formatters'
12
- autoload :Levels, 'semantic_logger/levels'
13
- autoload :Log, 'semantic_logger/log'
14
- autoload :Logger, 'semantic_logger/logger'
15
- autoload :Loggable, 'semantic_logger/loggable'
16
- autoload :Processor, 'semantic_logger/processor'
17
- autoload :Subscriber, 'semantic_logger/subscriber'
18
- autoload :Utils, 'semantic_logger/utils'
6
+ autoload :AnsiColors, "semantic_logger/ansi_colors"
7
+ autoload :Appender, "semantic_logger/appender"
8
+ autoload :Appenders, "semantic_logger/appenders"
9
+ autoload :Base, "semantic_logger/base"
10
+ autoload :DebugAsTraceLogger, "semantic_logger/debug_as_trace_logger"
11
+ autoload :Formatters, "semantic_logger/formatters"
12
+ autoload :Levels, "semantic_logger/levels"
13
+ autoload :Log, "semantic_logger/log"
14
+ autoload :Logger, "semantic_logger/logger"
15
+ autoload :Loggable, "semantic_logger/loggable"
16
+ autoload :Processor, "semantic_logger/processor"
17
+ autoload :Subscriber, "semantic_logger/subscriber"
18
+ autoload :SyncProcessor, "semantic_logger/sync_processor"
19
+ autoload :Utils, "semantic_logger/utils"
19
20
 
20
21
  module Concerns
21
- autoload :Compatibility, 'semantic_logger/concerns/compatibility'
22
+ autoload :Compatibility, "semantic_logger/concerns/compatibility"
22
23
  end
23
24
 
24
25
  module Metric
25
- autoload :NewRelic, 'semantic_logger/metric/new_relic'
26
- autoload :Signalfx, 'semantic_logger/metric/signalfx'
27
- autoload :Statsd, 'semantic_logger/metric/statsd'
26
+ autoload :NewRelic, "semantic_logger/metric/new_relic"
27
+ autoload :Signalfx, "semantic_logger/metric/signalfx"
28
+ autoload :Statsd, "semantic_logger/metric/statsd"
28
29
  end
29
30
 
30
31
  module Reporters
31
- autoload :Minitest, 'semantic_logger/reporters/minitest'
32
+ autoload :Minitest, "semantic_logger/reporters/minitest"
32
33
  end
33
34
 
34
35
  if defined?(JRuby)
35
36
  module JRuby
36
- autoload :GarbageCollectionLogger, 'semantic_logger/jruby/garbage_collection_logger'
37
+ autoload :GarbageCollectionLogger, "semantic_logger/jruby/garbage_collection_logger"
37
38
  end
38
39
  end
39
40
  end
40
- require 'semantic_logger/semantic_logger'
41
+ require "semantic_logger/semantic_logger"
41
42
  # @formatter:on
42
43
 
43
44
  # Flush all appenders at exit, waiting for outstanding messages on the queue
@@ -1,47 +1,46 @@
1
1
  module SemanticLogger
2
2
  module Appender
3
3
  # @formatter:off
4
- autoload :Async, 'semantic_logger/appender/async'
5
- autoload :AsyncBatch, 'semantic_logger/appender/async_batch'
6
- autoload :Bugsnag, 'semantic_logger/appender/bugsnag'
7
- autoload :Elasticsearch, 'semantic_logger/appender/elasticsearch'
8
- autoload :ElasticsearchHttp, 'semantic_logger/appender/elasticsearch_http'
9
- autoload :File, 'semantic_logger/appender/file'
10
- autoload :Graylog, 'semantic_logger/appender/graylog'
11
- autoload :Honeybadger, 'semantic_logger/appender/honeybadger'
12
- autoload :Kafka, 'semantic_logger/appender/kafka'
13
- autoload :Sentry, 'semantic_logger/appender/sentry'
14
- autoload :Http, 'semantic_logger/appender/http'
15
- autoload :MongoDB, 'semantic_logger/appender/mongodb'
16
- autoload :NewRelic, 'semantic_logger/appender/new_relic'
17
- autoload :Rabbitmq, 'semantic_logger/appender/rabbitmq'
18
- autoload :Splunk, 'semantic_logger/appender/splunk'
19
- autoload :SplunkHttp, 'semantic_logger/appender/splunk_http'
20
- autoload :Syslog, 'semantic_logger/appender/syslog'
21
- autoload :Tcp, 'semantic_logger/appender/tcp'
22
- autoload :Udp, 'semantic_logger/appender/udp'
23
- autoload :Wrapper, 'semantic_logger/appender/wrapper'
4
+ autoload :Async, "semantic_logger/appender/async"
5
+ autoload :AsyncBatch, "semantic_logger/appender/async_batch"
6
+ autoload :Bugsnag, "semantic_logger/appender/bugsnag"
7
+ autoload :Elasticsearch, "semantic_logger/appender/elasticsearch"
8
+ autoload :ElasticsearchHttp, "semantic_logger/appender/elasticsearch_http"
9
+ autoload :File, "semantic_logger/appender/file"
10
+ autoload :Graylog, "semantic_logger/appender/graylog"
11
+ autoload :Honeybadger, "semantic_logger/appender/honeybadger"
12
+ autoload :Kafka, "semantic_logger/appender/kafka"
13
+ autoload :Sentry, "semantic_logger/appender/sentry"
14
+ autoload :Http, "semantic_logger/appender/http"
15
+ autoload :MongoDB, "semantic_logger/appender/mongodb"
16
+ autoload :NewRelic, "semantic_logger/appender/new_relic"
17
+ autoload :Rabbitmq, "semantic_logger/appender/rabbitmq"
18
+ autoload :Splunk, "semantic_logger/appender/splunk"
19
+ autoload :SplunkHttp, "semantic_logger/appender/splunk_http"
20
+ autoload :Syslog, "semantic_logger/appender/syslog"
21
+ autoload :Tcp, "semantic_logger/appender/tcp"
22
+ autoload :Udp, "semantic_logger/appender/udp"
23
+ autoload :Wrapper, "semantic_logger/appender/wrapper"
24
24
  # @formatter:on
25
25
 
26
26
  # Returns [SemanticLogger::Subscriber] appender for the supplied options
27
27
  def self.factory(async: false, batch: nil,
28
- max_queue_size: 10_000, lag_check_interval: 1_000, lag_threshold_s: 30,
29
- batch_size: 300, batch_seconds: 5,
30
- **args,
31
- &block
32
- )
28
+ max_queue_size: 10_000, lag_check_interval: 1_000, lag_threshold_s: 30,
29
+ batch_size: 300, batch_seconds: 5,
30
+ **args,
31
+ &block)
33
32
  appender = build(**args, &block)
34
33
 
35
34
  # If appender implements #batch, then it should use the batch proxy by default.
36
- batch = true if batch.nil? && appender.respond_to?(:batch)
35
+ batch = true if batch.nil? && appender.respond_to?(:batch)
37
36
 
38
37
  if batch == true
39
38
  Appender::AsyncBatch.new(
40
- appender: appender,
41
- max_queue_size: max_queue_size,
42
- lag_threshold_s: lag_threshold_s,
43
- batch_size: batch_size,
44
- batch_seconds: batch_seconds
39
+ appender: appender,
40
+ max_queue_size: max_queue_size,
41
+ lag_threshold_s: lag_threshold_s,
42
+ batch_size: batch_size,
43
+ batch_seconds: batch_seconds
45
44
  )
46
45
  elsif async == true
47
46
  Appender::Async.new(
@@ -71,14 +70,14 @@ module SemanticLogger
71
70
  end
72
71
  elsif metric
73
72
  if metric.is_a?(Symbol)
74
- SemanticLogger::Utils.constantize_symbol(metric, 'SemanticLogger::Metric').new(**args)
73
+ SemanticLogger::Utils.constantize_symbol(metric, "SemanticLogger::Metric").new(**args)
75
74
  elsif metric.is_a?(Subscriber)
76
75
  metric
77
76
  else
78
77
  raise(ArgumentError, "Parameter :metric must be either a Symbol or an object derived from SemanticLogger::Subscriber, not: #{appender.inspect}")
79
78
  end
80
79
  else
81
- raise(ArgumentError, 'To create an appender it must supply one of the following: :io, :file_name, :appender, :metric, or :logger')
80
+ raise(ArgumentError, "To create an appender it must supply one of the following: :io, :file_name, :appender, :metric, or :logger")
82
81
  end
83
82
  end
84
83
 
@@ -1,4 +1,4 @@
1
- require 'forwardable'
1
+ require "forwardable"
2
2
 
3
3
  module SemanticLogger
4
4
  module Appender
@@ -73,6 +73,7 @@ module SemanticLogger
73
73
  # Starts the worker thread if not running.
74
74
  def thread
75
75
  return @thread if @thread&.alive?
76
+
76
77
  @thread = Thread.new { process }
77
78
  end
78
79
 
@@ -115,21 +116,21 @@ module SemanticLogger
115
116
  # This thread is designed to never go down unless the main thread terminates
116
117
  # or the appender is closed.
117
118
  Thread.current.name = logger.name
118
- logger.trace 'Async: Appender thread active'
119
+ logger.trace "Async: Appender thread active"
119
120
  begin
120
121
  process_messages
121
- rescue StandardError => exception
122
+ rescue StandardError => e
122
123
  # This block may be called after the file handles have been released by Ruby
123
124
  begin
124
- logger.error('Async: Restarting due to exception', exception)
125
+ logger.error("Async: Restarting due to exception", e)
125
126
  rescue StandardError
126
127
  nil
127
128
  end
128
129
  retry
129
- rescue Exception => exception
130
+ rescue Exception => e
130
131
  # This block may be called after the file handles have been released by Ruby
131
132
  begin
132
- logger.error('Async: Stopping due to fatal exception', exception)
133
+ logger.error("Async: Stopping due to fatal exception", e)
133
134
  rescue StandardError
134
135
  nil
135
136
  end
@@ -137,7 +138,7 @@ module SemanticLogger
137
138
  @thread = nil
138
139
  # This block may be called after the file handles have been released by Ruby
139
140
  begin
140
- logger.trace('Async: Thread has stopped')
141
+ logger.trace("Async: Thread has stopped")
141
142
  rescue StandardError
142
143
  nil
143
144
  end
@@ -159,7 +160,7 @@ module SemanticLogger
159
160
  break unless process_message(message)
160
161
  end
161
162
  end
162
- logger.trace 'Async: Queue Closed'
163
+ logger.trace "Async: Queue Closed"
163
164
  end
164
165
 
165
166
  # Returns false when message processing should be stopped
@@ -1,4 +1,4 @@
1
- require 'concurrent'
1
+ require "concurrent"
2
2
 
3
3
  module SemanticLogger
4
4
  module Appender
@@ -42,7 +42,9 @@ module SemanticLogger
42
42
  lag_threshold_s: lag_threshold_s
43
43
  )
44
44
 
45
- raise(ArgumentError, "#{appender.class.name} does not support batching. It must implement #batch") unless appender.respond_to?(:batch)
45
+ unless appender.respond_to?(:batch)
46
+ raise(ArgumentError, "#{appender.class.name} does not support batching. It must implement #batch")
47
+ end
46
48
  end
47
49
 
48
50
  # Add log message for processing.
@@ -1,7 +1,7 @@
1
1
  begin
2
- require 'bugsnag'
2
+ require "bugsnag"
3
3
  rescue LoadError
4
- raise LoadError.new('Gem bugsnag is required for logging purposes. Please add the gem "bugsnag" to your Gemfile.')
4
+ raise LoadError, 'Gem bugsnag is required for logging purposes. Please add the gem "bugsnag" to your Gemfile.'
5
5
  end
6
6
 
7
7
  # Send log messages to Bugsnag
@@ -30,7 +30,7 @@ module SemanticLogger
30
30
  # Proc: Only include log messages where the supplied Proc returns true
31
31
  # The Proc must return true or false.
32
32
  def initialize(level: :error, **args, &block)
33
- raise 'Bugsnag only supports :info, :warn, or :error log levels' unless %i[info warn error fatal].include?(level)
33
+ raise "Bugsnag only supports :info, :warn, or :error log levels" unless %i[info warn error fatal].include?(level)
34
34
 
35
35
  # Replace the Bugsnag logger so that we can identify its log messages and not forward them to Bugsnag
36
36
  ::Bugsnag.configure { |config| config.logger = SemanticLogger[Bugsnag] }
@@ -51,7 +51,7 @@ module SemanticLogger
51
51
  # Send an error notification to Bugsnag
52
52
  def log(log)
53
53
  # Ignore logs coming from Bugsnag itself
54
- return false if log.name == 'Bugsnag'
54
+ return false if log.name == "Bugsnag"
55
55
 
56
56
  # Send error messages as Runtime exceptions
57
57
  exception =
@@ -77,11 +77,11 @@ module SemanticLogger
77
77
  def log_level(log)
78
78
  case log.level
79
79
  when :error, :fatal
80
- 'error'
80
+ "error"
81
81
  when :warn
82
- 'warning'
82
+ "warning"
83
83
  else
84
- 'info'
84
+ "info"
85
85
  end
86
86
  end
87
87
  end
@@ -1,10 +1,10 @@
1
1
  begin
2
- require 'elasticsearch'
2
+ require "elasticsearch"
3
3
  rescue LoadError
4
- raise LoadError.new('Gem elasticsearch is required for logging to Elasticsearch. Please add the gem "elasticsearch" to your Gemfile.')
4
+ raise LoadError, 'Gem elasticsearch is required for logging to Elasticsearch. Please add the gem "elasticsearch" to your Gemfile.'
5
5
  end
6
6
 
7
- require 'date'
7
+ require "date"
8
8
 
9
9
  # Forward all log messages to Elasticsearch.
10
10
  #
@@ -123,10 +123,10 @@ module SemanticLogger
123
123
  # send_get_body_as [String]
124
124
  # Specify the HTTP method to use for GET requests with a body.
125
125
  # Default: 'GET'
126
- def initialize(url: 'http://localhost:9200',
127
- index: 'semantic_logger',
128
- date_pattern: '%Y.%m.%d',
129
- type: 'log',
126
+ def initialize(url: "http://localhost:9200",
127
+ index: "semantic_logger",
128
+ date_pattern: "%Y.%m.%d",
129
+ type: "log",
130
130
  level: nil,
131
131
  formatter: nil,
132
132
  filter: nil,
@@ -174,15 +174,15 @@ module SemanticLogger
174
174
 
175
175
  def write_to_elasticsearch(messages)
176
176
  bulk_result = @client.bulk(body: messages)
177
- return unless bulk_result['errors']
177
+ return unless bulk_result["errors"]
178
178
 
179
- failed = bulk_result['items'].reject { |x| x['status'] == 201 }
179
+ failed = bulk_result["items"].reject { |x| x["status"] == 201 }
180
180
  logger.error("ElasticSearch: Write failed. Messages discarded. : #{failed}")
181
181
  end
182
182
 
183
183
  def bulk_index(log)
184
184
  expanded_index_name = log.time.strftime("#{index}-#{date_pattern}")
185
- {'index' => {'_index' => expanded_index_name, '_type' => type}}
185
+ {"index" => {"_index" => expanded_index_name, "_type" => type}}
186
186
  end
187
187
 
188
188
  def default_formatter
@@ -1,4 +1,4 @@
1
- require 'date'
1
+ require "date"
2
2
  # Forward all log messages to Elasticsearch one at a time via a HTTP post.
3
3
  #
4
4
  # Note:
@@ -50,9 +50,9 @@ module SemanticLogger
50
50
  # application: [String]
51
51
  # Name of this application to appear in log messages.
52
52
  # Default: SemanticLogger.application
53
- def initialize(index: 'semantic_logger',
54
- type: 'log',
55
- url: 'http://localhost:9200',
53
+ def initialize(index: "semantic_logger",
54
+ type: "log",
55
+ url: "http://localhost:9200",
56
56
  **http_args,
57
57
  &block)
58
58
 
@@ -64,7 +64,8 @@ module SemanticLogger
64
64
  @log = io
65
65
  else
66
66
  @file_name = file_name
67
- raise 'SemanticLogging::Appender::File missing mandatory parameter :file_name or :io' unless file_name
67
+ raise "SemanticLogging::Appender::File missing mandatory parameter :file_name or :io" unless file_name
68
+
68
69
  reopen
69
70
  end
70
71
 
@@ -1,8 +1,8 @@
1
- require 'uri'
1
+ require "uri"
2
2
  begin
3
- require 'gelf'
3
+ require "gelf"
4
4
  rescue LoadError
5
- raise LoadError.new('Gem gelf is required for logging to Graylog. Please add the gem "gelf" to your Gemfile.')
5
+ raise LoadError, 'Gem gelf is required for logging to Graylog. Please add the gem "gelf" to your Gemfile.'
6
6
  end
7
7
 
8
8
  # Forward log entries to a Graylog server.
@@ -82,8 +82,8 @@ module SemanticLogger
82
82
  # application: [String]
83
83
  # Name of this application to appear in log messages.
84
84
  # Default: SemanticLogger.application
85
- def initialize(url: 'udp://localhost:12201',
86
- max_size: 'WAN',
85
+ def initialize(url: "udp://localhost:12201",
86
+ max_size: "WAN",
87
87
  gelf_options: {},
88
88
  level_map: LevelMap.new,
89
89
  **args,
@@ -105,7 +105,9 @@ module SemanticLogger
105
105
  @port = uri.port
106
106
  @protocol = uri.scheme.to_sym
107
107
 
108
- raise(ArgumentError, "Invalid protocol value: #{@protocol}. Must be :udp or :tcp") unless %i[udp tcp].include?(@protocol)
108
+ unless %i[udp tcp].include?(@protocol)
109
+ raise(ArgumentError, "Invalid protocol value: #{@protocol}. Must be :udp or :tcp")
110
+ end
109
111
 
110
112
  gelf_options[:protocol] ||= (@protocol == :tcp ? GELF::Protocol::TCP : GELF::Protocol::UDP)
111
113
  gelf_options[:facility] ||= application
@@ -120,11 +122,11 @@ module SemanticLogger
120
122
 
121
123
  h[:short_message] = h.delete(:message)
122
124
  if h[:short_message].nil?
123
- h[:short_message] = log.exception.nil? ? '<no-exception-message>' : log.exception.message
125
+ h[:short_message] = log.exception.nil? ? "<no-exception-message>" : log.exception.message
124
126
  end
125
- h[:level] = logger.level_map[log.level]
126
- h[:level_str] = log.level.to_s
127
- h[:duration_str] = h.delete(:duration)
127
+ h[:level] = logger.level_map[log.level]
128
+ h[:level_str] = log.level.to_s
129
+ h[:duration_str] = h.delete(:duration)
128
130
  h
129
131
  end
130
132