logstash-logger-p 0.26.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (94) hide show
  1. checksums.yaml +7 -0
  2. data/.gitignore +21 -0
  3. data/.rspec +3 -0
  4. data/.rubocop.yml +1156 -0
  5. data/.travis.yml +26 -0
  6. data/Appraisals +23 -0
  7. data/CHANGELOG.md +199 -0
  8. data/Gemfile +6 -0
  9. data/LICENSE.txt +22 -0
  10. data/README.md +880 -0
  11. data/Rakefile +23 -0
  12. data/gemfiles/rails_3.2.gemfile +9 -0
  13. data/gemfiles/rails_4.0.gemfile +9 -0
  14. data/gemfiles/rails_4.1.gemfile +9 -0
  15. data/gemfiles/rails_4.2.gemfile +9 -0
  16. data/gemfiles/rails_5.0.gemfile +9 -0
  17. data/gemfiles/rails_5.1.gemfile +9 -0
  18. data/lib/logstash-logger/buffer.rb +336 -0
  19. data/lib/logstash-logger/configuration.rb +29 -0
  20. data/lib/logstash-logger/device/aws_stream.rb +94 -0
  21. data/lib/logstash-logger/device/balancer.rb +40 -0
  22. data/lib/logstash-logger/device/base.rb +73 -0
  23. data/lib/logstash-logger/device/connectable.rb +131 -0
  24. data/lib/logstash-logger/device/file.rb +23 -0
  25. data/lib/logstash-logger/device/firehose.rb +42 -0
  26. data/lib/logstash-logger/device/io.rb +11 -0
  27. data/lib/logstash-logger/device/kafka.rb +57 -0
  28. data/lib/logstash-logger/device/kinesis.rb +44 -0
  29. data/lib/logstash-logger/device/multi_delegator.rb +36 -0
  30. data/lib/logstash-logger/device/redis.rb +76 -0
  31. data/lib/logstash-logger/device/socket.rb +21 -0
  32. data/lib/logstash-logger/device/stderr.rb +13 -0
  33. data/lib/logstash-logger/device/stdout.rb +14 -0
  34. data/lib/logstash-logger/device/tcp.rb +86 -0
  35. data/lib/logstash-logger/device/udp.rb +12 -0
  36. data/lib/logstash-logger/device/unix.rb +18 -0
  37. data/lib/logstash-logger/device.rb +67 -0
  38. data/lib/logstash-logger/formatter/base.rb +73 -0
  39. data/lib/logstash-logger/formatter/cee.rb +11 -0
  40. data/lib/logstash-logger/formatter/cee_syslog.rb +22 -0
  41. data/lib/logstash-logger/formatter/json.rb +11 -0
  42. data/lib/logstash-logger/formatter/json_lines.rb +11 -0
  43. data/lib/logstash-logger/formatter/logstash_event.rb +6 -0
  44. data/lib/logstash-logger/formatter.rb +51 -0
  45. data/lib/logstash-logger/logger.rb +106 -0
  46. data/lib/logstash-logger/multi_logger.rb +153 -0
  47. data/lib/logstash-logger/railtie.rb +51 -0
  48. data/lib/logstash-logger/silenced_logging.rb +83 -0
  49. data/lib/logstash-logger/tagged_logging.rb +40 -0
  50. data/lib/logstash-logger/version.rb +3 -0
  51. data/lib/logstash-logger.rb +11 -0
  52. data/logstash-logger.gemspec +39 -0
  53. data/samples/example.crt +16 -0
  54. data/samples/example.key +15 -0
  55. data/samples/file.conf +11 -0
  56. data/samples/redis.conf +12 -0
  57. data/samples/ssl.conf +15 -0
  58. data/samples/syslog.conf +10 -0
  59. data/samples/tcp.conf +11 -0
  60. data/samples/udp.conf +11 -0
  61. data/samples/unix.conf +11 -0
  62. data/spec/configuration_spec.rb +27 -0
  63. data/spec/constructor_spec.rb +30 -0
  64. data/spec/device/balancer_spec.rb +31 -0
  65. data/spec/device/connectable_spec.rb +74 -0
  66. data/spec/device/file_spec.rb +15 -0
  67. data/spec/device/firehose_spec.rb +41 -0
  68. data/spec/device/io_spec.rb +13 -0
  69. data/spec/device/kafka_spec.rb +32 -0
  70. data/spec/device/kinesis_spec.rb +41 -0
  71. data/spec/device/multi_delegator_spec.rb +31 -0
  72. data/spec/device/redis_spec.rb +52 -0
  73. data/spec/device/socket_spec.rb +15 -0
  74. data/spec/device/stderr_spec.rb +16 -0
  75. data/spec/device/stdout_spec.rb +31 -0
  76. data/spec/device/tcp_spec.rb +120 -0
  77. data/spec/device/udp_spec.rb +9 -0
  78. data/spec/device/unix_spec.rb +23 -0
  79. data/spec/device_spec.rb +97 -0
  80. data/spec/formatter/base_spec.rb +125 -0
  81. data/spec/formatter/cee_spec.rb +15 -0
  82. data/spec/formatter/cee_syslog_spec.rb +43 -0
  83. data/spec/formatter/json_lines_spec.rb +14 -0
  84. data/spec/formatter/json_spec.rb +10 -0
  85. data/spec/formatter/logstash_event_spec.rb +10 -0
  86. data/spec/formatter_spec.rb +79 -0
  87. data/spec/logger_spec.rb +128 -0
  88. data/spec/multi_logger_spec.rb +59 -0
  89. data/spec/rails_spec.rb +91 -0
  90. data/spec/silenced_logging_spec.rb +31 -0
  91. data/spec/spec_helper.rb +111 -0
  92. data/spec/syslog_spec.rb +32 -0
  93. data/spec/tagged_logging_spec.rb +32 -0
  94. metadata +335 -0
data/Rakefile ADDED
@@ -0,0 +1,23 @@
1
+ #!/usr/bin/env rake
2
+ require "bundler/gem_tasks"
3
+ require "bundler/setup"
4
+ require 'rspec/core/rake_task'
5
+
6
+ desc "Run all specs with default options"
7
+ RSpec::Core::RakeTask.new(:spec) do |t|
8
+ t.verbose = false
9
+ end
10
+
11
+ desc "Run specs with TCP socket"
12
+ RSpec::Core::RakeTask.new("spec:tcp") do |t|
13
+ ENV['TYPE'] = 'tcp'
14
+ t.verbose = false
15
+ end
16
+
17
+ desc "Run specs with UDP socket"
18
+ RSpec::Core::RakeTask.new("spec:udp") do |t|
19
+ ENV['TYPE'] = 'udp'
20
+ t.verbose = false
21
+ end
22
+
23
+ task :default => ["spec:tcp", "spec:udp"]
@@ -0,0 +1,9 @@
1
+ # This file was generated by Appraisal
2
+
3
+ source "https://rubygems.org"
4
+
5
+ gem "codecov", :require => false, :group => :test
6
+ gem "codeclimate-test-reporter", :group => :test, :require => nil
7
+ gem "rails", "~> 3.2.18"
8
+
9
+ gemspec :path => "../"
@@ -0,0 +1,9 @@
1
+ # This file was generated by Appraisal
2
+
3
+ source "https://rubygems.org"
4
+
5
+ gem "codecov", require: false, group: :test
6
+ gem "codeclimate-test-reporter", group: :test, require: nil
7
+ gem "rails", "~> 4.0.0"
8
+
9
+ gemspec path: "../"
@@ -0,0 +1,9 @@
1
+ # This file was generated by Appraisal
2
+
3
+ source "https://rubygems.org"
4
+
5
+ gem "codecov", require: false, group: :test
6
+ gem "codeclimate-test-reporter", group: :test, require: nil
7
+ gem "rails", "~> 4.1.1"
8
+
9
+ gemspec path: "../"
@@ -0,0 +1,9 @@
1
+ # This file was generated by Appraisal
2
+
3
+ source "https://rubygems.org"
4
+
5
+ gem "codecov", require: false, group: :test
6
+ gem "codeclimate-test-reporter", group: :test, require: nil
7
+ gem "rails", "~> 4.2.0"
8
+
9
+ gemspec path: "../"
@@ -0,0 +1,9 @@
1
+ # This file was generated by Appraisal
2
+
3
+ source "https://rubygems.org"
4
+
5
+ gem "codecov", require: false, group: :test
6
+ gem "codeclimate-test-reporter", group: :test, require: nil
7
+ gem "rails", "~> 5.0.0"
8
+
9
+ gemspec path: "../"
@@ -0,0 +1,9 @@
1
+ # This file was generated by Appraisal
2
+
3
+ source "https://rubygems.org"
4
+
5
+ gem "codecov", require: false, group: :test
6
+ gem "codeclimate-test-reporter", group: :test, require: nil
7
+ gem "rails", "~> 5.1.0"
8
+
9
+ gemspec path: "../"
@@ -0,0 +1,336 @@
1
+ # Forked from https://github.com/jordansissel/ruby-stud/blob/master/lib/stud/buffer.rb
2
+
3
+ module LogStashLogger
4
+
5
+ # @author {Alex Dean}[http://github.com/alexdean]
6
+ #
7
+ # Implements a generic framework for accepting events which are later flushed
8
+ # in batches. Flushing occurs whenever +:max_items+ or +:max_interval+ (seconds)
9
+ # has been reached.
10
+ #
11
+ # Including class must implement +flush+, which will be called with all
12
+ # accumulated items either when the output buffer fills (+:max_items+) or
13
+ # when a fixed amount of time (+:max_interval+) passes.
14
+ #
15
+ # == batch_receive and flush
16
+ # General receive/flush can be implemented in one of two ways.
17
+ #
18
+ # === batch_receive(event) / flush(events)
19
+ # +flush+ will receive an array of events which were passed to +buffer_receive+.
20
+ #
21
+ # batch_receive('one')
22
+ # batch_receive('two')
23
+ #
24
+ # will cause a flush invocation like
25
+ #
26
+ # flush(['one', 'two'])
27
+ #
28
+ # === batch_receive(event, group) / flush(events, group)
29
+ # flush() will receive an array of events, plus a grouping key.
30
+ #
31
+ # batch_receive('one', :server => 'a')
32
+ # batch_receive('two', :server => 'b')
33
+ # batch_receive('three', :server => 'a')
34
+ # batch_receive('four', :server => 'b')
35
+ #
36
+ # will result in the following flush calls
37
+ #
38
+ # flush(['one', 'three'], {:server => 'a'})
39
+ # flush(['two', 'four'], {:server => 'b'})
40
+ #
41
+ # Grouping keys can be anything which are valid Hash keys. (They don't have to
42
+ # be hashes themselves.) Strings or Fixnums work fine. Use anything which you'd
43
+ # like to receive in your +flush+ method to help enable different handling for
44
+ # various groups of events.
45
+ #
46
+ # == on_flush_error
47
+ # Including class may implement +on_flush_error+, which will be called with an
48
+ # Exception instance whenever buffer_flush encounters an error.
49
+ #
50
+ # * +buffer_flush+ will automatically re-try failed flushes, so +on_flush_error+
51
+ # should not try to implement retry behavior.
52
+ # * Exceptions occurring within +on_flush_error+ are not handled by
53
+ # +buffer_flush+.
54
+ #
55
+ # == on_full_buffer_receive
56
+ # Including class may implement +on_full_buffer_receive+, which will be called
57
+ # whenever +buffer_receive+ is called while the buffer is full.
58
+ #
59
+ # +on_full_buffer_receive+ will receive a Hash like <code>{:pending => 30,
60
+ # :outgoing => 20}</code> which describes the internal state of the module at
61
+ # the moment.
62
+ #
63
+ # == final flush
64
+ # Including class should call <code>buffer_flush(:final => true)</code>
65
+ # during a teardown/shutdown routine (after the last call to buffer_receive)
66
+ # to ensure that all accumulated messages are flushed.
67
+ module Buffer
68
+
69
+ public
70
+ # Initialize the buffer.
71
+ #
72
+ # Call directly from your constructor if you wish to set some non-default
73
+ # options. Otherwise buffer_initialize will be called automatically during the
74
+ # first buffer_receive call.
75
+ #
76
+ # Options:
77
+ # * :max_items, Max number of items to buffer before flushing. Default 50.
78
+ # * :max_interval, Max number of seconds to wait between flushes. Default 5.
79
+ # * :logger, A logger to write log messages to. No default. Optional.
80
+ # * :autoflush, Whether to immediately flush all inbound messages. Default true.
81
+ # * :drop_messages_on_flush_error, Whether to drop messages when there is a flush error. Default false.
82
+ # * :drop_messages_on_full_buffer, Whether to drop messages when the buffer is full. Default false.
83
+ #
84
+ # @param [Hash] options
85
+ def buffer_initialize(options={})
86
+ if ! self.class.method_defined?(:flush)
87
+ raise ArgumentError, "Any class including Stud::Buffer must define a flush() method."
88
+ end
89
+
90
+ @buffer_config = {
91
+ :max_items => options[:max_items] || 50,
92
+ :max_interval => options[:max_interval] || 5,
93
+ :logger => options[:logger] || nil,
94
+ :autoflush => options.fetch(:autoflush, true),
95
+ :has_on_flush_error => self.class.method_defined?(:on_flush_error),
96
+ :has_on_full_buffer_receive => self.class.method_defined?(:on_full_buffer_receive),
97
+ :drop_messages_on_flush_error => options.fetch(:drop_messages_on_flush_error, false),
98
+ :drop_messages_on_full_buffer => options.fetch(:drop_messages_on_full_buffer, false),
99
+ :flush_at_exit => options.fetch(:flush_at_exit, false)
100
+ }
101
+
102
+ if @buffer_config[:flush_at_exit]
103
+ at_exit { buffer_flush(final: true) }
104
+ end
105
+
106
+ reset_buffer
107
+ end
108
+
109
+ def reset_buffer
110
+ reset_flush_timer_thread
111
+
112
+ @buffer_state = {
113
+ # items accepted from including class
114
+ :pending_items => {},
115
+ :pending_count => 0,
116
+
117
+ # guard access to pending_items & pending_count
118
+ :pending_mutex => pending_mutex,
119
+
120
+ # items which are currently being flushed
121
+ :outgoing_items => {},
122
+ :outgoing_count => 0,
123
+
124
+ # ensure only 1 flush is operating at once
125
+ :flush_mutex => flush_mutex,
126
+
127
+ # data for timed flushes
128
+ :last_flush => Time.now,
129
+ :timer => flush_timer_thread
130
+ }
131
+
132
+ # events we've accumulated
133
+ buffer_clear_pending
134
+ end
135
+
136
+ # Determine if +:max_items+ has been reached.
137
+ #
138
+ # buffer_receive calls will block while <code>buffer_full? == true</code>.
139
+ #
140
+ # @return [bool] Is the buffer full?
141
+ def buffer_full?
142
+ @buffer_state[:pending_count] + @buffer_state[:outgoing_count] >= @buffer_config[:max_items]
143
+ end
144
+
145
+ # Save an event for later delivery
146
+ #
147
+ # Events are grouped by the (optional) group parameter you provide.
148
+ # Groups of events, plus the group name, are later passed to +flush+.
149
+ #
150
+ # This call will block if +:max_items+ has been reached.
151
+ #
152
+ # @see Stud::Buffer The overview has more information on grouping and flushing.
153
+ #
154
+ # @param event An item to buffer for flushing later.
155
+ # @param group Optional grouping key. All events with the same key will be
156
+ # passed to +flush+ together, along with the grouping key itself.
157
+ def buffer_receive(event, group=nil)
158
+ buffer_initialize if ! @buffer_state
159
+
160
+ # block if we've accumulated too many events
161
+ while buffer_full? do
162
+ on_full_buffer_receive(
163
+ :pending => @buffer_state[:pending_count],
164
+ :outgoing => @buffer_state[:outgoing_count]
165
+ ) if @buffer_config[:has_on_full_buffer_receive]
166
+
167
+ if @buffer_config[:drop_messages_on_full_buffer]
168
+ reset_buffer
169
+ else
170
+ sleep 0.1
171
+ end
172
+ end
173
+
174
+ @buffer_state[:pending_mutex].synchronize do
175
+ @buffer_state[:pending_items][group] << event
176
+ @buffer_state[:pending_count] += 1
177
+ end
178
+
179
+ if @buffer_config[:autoflush]
180
+ buffer_flush(force: true)
181
+ end
182
+ end
183
+
184
+ # Try to flush events.
185
+ #
186
+ # Returns immediately if flushing is not necessary/possible at the moment:
187
+ # * :max_items have not been accumulated
188
+ # * :max_interval seconds have not elapased since the last flush
189
+ # * another flush is in progress
190
+ #
191
+ # <code>buffer_flush(:force => true)</code> will cause a flush to occur even
192
+ # if +:max_items+ or +:max_interval+ have not been reached. A forced flush
193
+ # will still return immediately (without flushing) if another flush is
194
+ # currently in progress.
195
+ #
196
+ # <code>buffer_flush(:final => true)</code> is identical to <code>buffer_flush(:force => true)</code>,
197
+ # except that if another flush is already in progress, <code>buffer_flush(:final => true)</code>
198
+ # will block/wait for the other flush to finish before proceeding.
199
+ #
200
+ # @param [Hash] options Optional. May be <code>{:force => true}</code> or <code>{:final => true}</code>.
201
+ # @return [Fixnum] The number of items successfully passed to +flush+.
202
+ def buffer_flush(options={})
203
+ force = options[:force] || options[:final]
204
+ final = options[:final]
205
+
206
+ # final flush will wait for lock, so we are sure to flush out all buffered events
207
+ if options[:final]
208
+ @buffer_state[:flush_mutex].lock
209
+ elsif ! @buffer_state[:flush_mutex].try_lock # failed to get lock, another flush already in progress
210
+ return 0
211
+ end
212
+
213
+ items_flushed = 0
214
+
215
+ begin
216
+ time_since_last_flush = (Time.now - @buffer_state[:last_flush])
217
+
218
+ return 0 if @buffer_state[:pending_count] == 0
219
+ return 0 if (!force) &&
220
+ (@buffer_state[:pending_count] < @buffer_config[:max_items]) &&
221
+ (time_since_last_flush < @buffer_config[:max_interval])
222
+
223
+ @buffer_state[:pending_mutex].synchronize do
224
+ @buffer_state[:outgoing_items] = @buffer_state[:pending_items]
225
+ @buffer_state[:outgoing_count] = @buffer_state[:pending_count]
226
+ buffer_clear_pending
227
+ end
228
+
229
+ @buffer_config[:logger].debug do
230
+ debug_output = {
231
+ :outgoing_count => @buffer_state[:outgoing_count],
232
+ :time_since_last_flush => time_since_last_flush,
233
+ :outgoing_events => @buffer_state[:outgoing_items],
234
+ :batch_timeout => @buffer_config[:max_interval],
235
+ :force => force,
236
+ :final => final
237
+ }
238
+ "Flushing output: #{debug_output}"
239
+ end if @buffer_config[:logger]
240
+
241
+ @buffer_state[:outgoing_items].each do |group, events|
242
+ begin
243
+ if group.nil?
244
+ flush(events,final)
245
+ else
246
+ flush(events, group, final)
247
+ end
248
+
249
+ @buffer_state[:outgoing_items].delete(group)
250
+ events_size = events.size
251
+ @buffer_state[:outgoing_count] -= events_size
252
+ items_flushed += events_size
253
+ @buffer_state[:last_flush] = Time.now
254
+
255
+ rescue => e
256
+
257
+ @buffer_config[:logger].warn do
258
+ warn_output = {
259
+ :outgoing_count => @buffer_state[:outgoing_count],
260
+ :exception => e.class.name,
261
+ :backtrace => e.backtrace
262
+ }
263
+ "Failed to flush outgoing items: #{warn_output}"
264
+ end if @buffer_config[:logger]
265
+
266
+ if @buffer_config[:has_on_flush_error]
267
+ on_flush_error e
268
+ end
269
+
270
+ if @buffer_config[:drop_messages_on_flush_error]
271
+ reset_buffer
272
+ else
273
+ cancel_flush
274
+ end
275
+
276
+ end
277
+ end
278
+
279
+ ensure
280
+ @buffer_state[:flush_mutex].unlock
281
+ end
282
+
283
+ return items_flushed
284
+ end
285
+
286
+ private
287
+
288
+ def pending_mutex
289
+ @pending_mutex ||= Mutex.new
290
+ end
291
+
292
+ def flush_mutex
293
+ @flush_mutex ||= Mutex.new
294
+ end
295
+
296
+ def flush_timer_thread
297
+ @flush_timer_thread ||=
298
+ Thread.new do
299
+ loop do
300
+ sleep(@buffer_config[:max_interval])
301
+ begin
302
+ buffer_flush(:force => true)
303
+ rescue
304
+ end
305
+ end
306
+ end
307
+ end
308
+
309
+ def reset_flush_timer_thread
310
+ unless @flush_timer_thread.nil?
311
+ @flush_timer_thread.kill
312
+ @flush_timer_thread = nil
313
+ end
314
+ end
315
+
316
+ def buffer_clear_pending
317
+ @buffer_state[:pending_items] = Hash.new { |h, k| h[k] = [] }
318
+ @buffer_state[:pending_count] = 0
319
+ end
320
+
321
+ def buffer_clear_outgoing
322
+ @buffer_state[:outgoing_items] = Hash.new { |h, k| h[k] = [] }
323
+ @buffer_state[:outgoing_count] = 0
324
+ end
325
+
326
+ def cancel_flush
327
+ @buffer_state[:pending_mutex].synchronize do
328
+ @buffer_state[:outgoing_items].each do |group, items|
329
+ @buffer_state[:pending_items][group].concat items
330
+ end
331
+ @buffer_state[:pending_count] += @buffer_state[:outgoing_count]
332
+ end
333
+ buffer_clear_outgoing
334
+ end
335
+ end
336
+ end
@@ -0,0 +1,29 @@
1
+ module LogStashLogger
2
+ class << self
3
+ def configure(&block)
4
+ @configuration = Configuration.new(&block) if block_given? || @configuration.nil?
5
+ @configuration
6
+ end
7
+
8
+ alias :configuration :configure
9
+ end
10
+
11
+ class Configuration
12
+ attr_accessor :customize_event_block
13
+ attr_accessor :max_message_size
14
+ attr_accessor :default_error_logger
15
+
16
+ def initialize(*args)
17
+ @customize_event_block = nil
18
+ @default_error_logger = Logger.new($stderr)
19
+
20
+ yield self if block_given?
21
+ self
22
+ end
23
+
24
+ def customize_event(&block)
25
+ @customize_event_block = block
26
+ end
27
+
28
+ end
29
+ end
@@ -0,0 +1,94 @@
1
+ begin
2
+ require 'aws-sdk-core'
3
+ rescue LoadError
4
+ require 'aws-sdk'
5
+ end
6
+
7
+ module LogStashLogger
8
+ module Device
9
+ class AwsStream < Connectable
10
+
11
+ DEFAULT_STREAM = 'logstash'
12
+
13
+ @stream_class = nil
14
+ @recoverable_error_codes = []
15
+
16
+ class << self
17
+ attr_accessor :stream_class, :recoverable_error_codes
18
+ end
19
+
20
+ attr_accessor :aws_region, :stream
21
+
22
+ def initialize(opts)
23
+ super
24
+ @access_key_id = opts[:aws_access_key_id]
25
+ @secret_access_key = opts[:aws_secret_access_key]
26
+ @aws_region = opts[:aws_region]
27
+ @stream = opts[:stream] || DEFAULT_STREAM
28
+ end
29
+
30
+ def transform_message(message)
31
+ fail NotImplementedError
32
+ end
33
+
34
+ def put_records(records)
35
+ fail NotImplementedError
36
+ end
37
+
38
+ def is_successful_response(resp)
39
+ fail NotImplementedError
40
+ end
41
+
42
+ def get_response_records(resp)
43
+ fail NotImplementedError
44
+ end
45
+
46
+ def connect
47
+ client_opts = {}
48
+ client_opts[:credentials] = Aws::Credentials.new(@access_key_id, @secret_access_key) unless @access_key_id == nil || @secret_access_key == nil
49
+ client_opts[:region] = @aws_region unless @aws_region == nil
50
+ @io = self.class.stream_class.new(client_opts)
51
+ end
52
+
53
+ def with_connection
54
+ connect unless connected?
55
+ yield
56
+ rescue => e
57
+ log_error(e)
58
+ log_warning("giving up")
59
+ close(flush: false)
60
+ end
61
+
62
+ def write_batch(messages, group = nil)
63
+ records = messages.map{ |m| transform_message(m) }
64
+
65
+ with_connection do
66
+ resp = put_records(records)
67
+
68
+ # Put any failed records back into the buffer
69
+ if !is_successful_response(resp)
70
+ get_response_records(resp).each_with_index do |record, index|
71
+ if self.class.recoverable_error_codes.include?(record.error_code)
72
+ log_warning("Failed to post record using #{self.class.stream_class.name} with error: #{record.error_code} #{record.error_message}")
73
+ log_warning("Retrying")
74
+ write(records[index][:data])
75
+ elsif !record.error_code.nil? && record.error_code != ''
76
+ log_error("Failed to post record using #{self.class.stream_class.name} with error: #{record.error_code} #{record.error_message}")
77
+ end
78
+ end
79
+ end
80
+ end
81
+ end
82
+
83
+ def write_one(message)
84
+ write_batch([message])
85
+ end
86
+
87
+ def close!
88
+ @io = nil
89
+ end
90
+
91
+ end
92
+ end
93
+ end
94
+
@@ -0,0 +1,40 @@
1
+ module LogStashLogger
2
+ module Device
3
+ class Balancer < Base
4
+ attr_reader :devices
5
+
6
+ def initialize(opts)
7
+ @io = self
8
+ @devices = create_devices(opts[:outputs])
9
+ self.class.delegate_to_all(:close, :close!, :flush)
10
+ self.class.delegate_to_one(:write)
11
+ end
12
+
13
+ private
14
+
15
+ def create_devices(opts)
16
+ opts.map { |device_opts| Device.new(device_opts) }
17
+ end
18
+
19
+ def self.delegate_to_all(*methods)
20
+ methods.each do |m|
21
+ define_method(m) do |*args|
22
+ devices.each { |device| device.send(m, *args) }
23
+ end
24
+ end
25
+ end
26
+
27
+ def self.delegate_to_one(*methods)
28
+ methods.each do |m|
29
+ define_method(m) do |*args|
30
+ select_device.send(m, *args)
31
+ end
32
+ end
33
+ end
34
+
35
+ def select_device
36
+ devices.sample
37
+ end
38
+ end
39
+ end
40
+ end
@@ -0,0 +1,73 @@
1
+ module LogStashLogger
2
+ module Device
3
+ class Base
4
+ attr_reader :io
5
+ attr_accessor :sync
6
+ attr_accessor :error_logger
7
+
8
+ def initialize(opts={})
9
+ @sync = opts[:sync]
10
+ @error_logger = opts.fetch(:error_logger, LogStashLogger.configuration.default_error_logger)
11
+ end
12
+
13
+ def to_io
14
+ @io
15
+ end
16
+
17
+ def write(message)
18
+ write_one(message) unless message.nil?
19
+ end
20
+
21
+ def write_one(message)
22
+ @io.write(message)
23
+ rescue => e
24
+ if unrecoverable_error?(e)
25
+ log_error(e)
26
+ log_warning("unrecoverable error, aborting write")
27
+ else
28
+ raise
29
+ end
30
+ end
31
+
32
+ def write_batch(messages, group = nil)
33
+ messages.each do |message|
34
+ write_one(message)
35
+ end
36
+ end
37
+
38
+ def flush
39
+ @io && @io.flush
40
+ end
41
+
42
+ def reset
43
+ close
44
+ end
45
+
46
+ def close(opts = {})
47
+ close!
48
+ rescue => e
49
+ log_error(e)
50
+ end
51
+
52
+ def close!
53
+ @io && @io.close
54
+ ensure
55
+ @io = nil
56
+ end
57
+
58
+ def unrecoverable_error?(e)
59
+ e.is_a?(JSON::GeneratorError)
60
+ end
61
+
62
+ private
63
+
64
+ def log_error(e)
65
+ error_logger.error "[#{self.class}] #{e.class} - #{e.message}"
66
+ end
67
+
68
+ def log_warning(message)
69
+ error_logger.warn("[#{self.class}] #{message}")
70
+ end
71
+ end
72
+ end
73
+ end