logstash-logger-p 0.26.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (94) hide show
  1. checksums.yaml +7 -0
  2. data/.gitignore +21 -0
  3. data/.rspec +3 -0
  4. data/.rubocop.yml +1156 -0
  5. data/.travis.yml +26 -0
  6. data/Appraisals +23 -0
  7. data/CHANGELOG.md +199 -0
  8. data/Gemfile +6 -0
  9. data/LICENSE.txt +22 -0
  10. data/README.md +880 -0
  11. data/Rakefile +23 -0
  12. data/gemfiles/rails_3.2.gemfile +9 -0
  13. data/gemfiles/rails_4.0.gemfile +9 -0
  14. data/gemfiles/rails_4.1.gemfile +9 -0
  15. data/gemfiles/rails_4.2.gemfile +9 -0
  16. data/gemfiles/rails_5.0.gemfile +9 -0
  17. data/gemfiles/rails_5.1.gemfile +9 -0
  18. data/lib/logstash-logger/buffer.rb +336 -0
  19. data/lib/logstash-logger/configuration.rb +29 -0
  20. data/lib/logstash-logger/device/aws_stream.rb +94 -0
  21. data/lib/logstash-logger/device/balancer.rb +40 -0
  22. data/lib/logstash-logger/device/base.rb +73 -0
  23. data/lib/logstash-logger/device/connectable.rb +131 -0
  24. data/lib/logstash-logger/device/file.rb +23 -0
  25. data/lib/logstash-logger/device/firehose.rb +42 -0
  26. data/lib/logstash-logger/device/io.rb +11 -0
  27. data/lib/logstash-logger/device/kafka.rb +57 -0
  28. data/lib/logstash-logger/device/kinesis.rb +44 -0
  29. data/lib/logstash-logger/device/multi_delegator.rb +36 -0
  30. data/lib/logstash-logger/device/redis.rb +76 -0
  31. data/lib/logstash-logger/device/socket.rb +21 -0
  32. data/lib/logstash-logger/device/stderr.rb +13 -0
  33. data/lib/logstash-logger/device/stdout.rb +14 -0
  34. data/lib/logstash-logger/device/tcp.rb +86 -0
  35. data/lib/logstash-logger/device/udp.rb +12 -0
  36. data/lib/logstash-logger/device/unix.rb +18 -0
  37. data/lib/logstash-logger/device.rb +67 -0
  38. data/lib/logstash-logger/formatter/base.rb +73 -0
  39. data/lib/logstash-logger/formatter/cee.rb +11 -0
  40. data/lib/logstash-logger/formatter/cee_syslog.rb +22 -0
  41. data/lib/logstash-logger/formatter/json.rb +11 -0
  42. data/lib/logstash-logger/formatter/json_lines.rb +11 -0
  43. data/lib/logstash-logger/formatter/logstash_event.rb +6 -0
  44. data/lib/logstash-logger/formatter.rb +51 -0
  45. data/lib/logstash-logger/logger.rb +106 -0
  46. data/lib/logstash-logger/multi_logger.rb +153 -0
  47. data/lib/logstash-logger/railtie.rb +51 -0
  48. data/lib/logstash-logger/silenced_logging.rb +83 -0
  49. data/lib/logstash-logger/tagged_logging.rb +40 -0
  50. data/lib/logstash-logger/version.rb +3 -0
  51. data/lib/logstash-logger.rb +11 -0
  52. data/logstash-logger.gemspec +39 -0
  53. data/samples/example.crt +16 -0
  54. data/samples/example.key +15 -0
  55. data/samples/file.conf +11 -0
  56. data/samples/redis.conf +12 -0
  57. data/samples/ssl.conf +15 -0
  58. data/samples/syslog.conf +10 -0
  59. data/samples/tcp.conf +11 -0
  60. data/samples/udp.conf +11 -0
  61. data/samples/unix.conf +11 -0
  62. data/spec/configuration_spec.rb +27 -0
  63. data/spec/constructor_spec.rb +30 -0
  64. data/spec/device/balancer_spec.rb +31 -0
  65. data/spec/device/connectable_spec.rb +74 -0
  66. data/spec/device/file_spec.rb +15 -0
  67. data/spec/device/firehose_spec.rb +41 -0
  68. data/spec/device/io_spec.rb +13 -0
  69. data/spec/device/kafka_spec.rb +32 -0
  70. data/spec/device/kinesis_spec.rb +41 -0
  71. data/spec/device/multi_delegator_spec.rb +31 -0
  72. data/spec/device/redis_spec.rb +52 -0
  73. data/spec/device/socket_spec.rb +15 -0
  74. data/spec/device/stderr_spec.rb +16 -0
  75. data/spec/device/stdout_spec.rb +31 -0
  76. data/spec/device/tcp_spec.rb +120 -0
  77. data/spec/device/udp_spec.rb +9 -0
  78. data/spec/device/unix_spec.rb +23 -0
  79. data/spec/device_spec.rb +97 -0
  80. data/spec/formatter/base_spec.rb +125 -0
  81. data/spec/formatter/cee_spec.rb +15 -0
  82. data/spec/formatter/cee_syslog_spec.rb +43 -0
  83. data/spec/formatter/json_lines_spec.rb +14 -0
  84. data/spec/formatter/json_spec.rb +10 -0
  85. data/spec/formatter/logstash_event_spec.rb +10 -0
  86. data/spec/formatter_spec.rb +79 -0
  87. data/spec/logger_spec.rb +128 -0
  88. data/spec/multi_logger_spec.rb +59 -0
  89. data/spec/rails_spec.rb +91 -0
  90. data/spec/silenced_logging_spec.rb +31 -0
  91. data/spec/spec_helper.rb +111 -0
  92. data/spec/syslog_spec.rb +32 -0
  93. data/spec/tagged_logging_spec.rb +32 -0
  94. metadata +335 -0
@@ -0,0 +1,131 @@
1
+ require 'logstash-logger/buffer'
2
+
3
+ module LogStashLogger
4
+ module Device
5
+ class Connectable < Base
6
+ include LogStashLogger::Buffer
7
+
8
+ attr_accessor :buffer_logger
9
+
10
+ def initialize(opts = {})
11
+ super
12
+
13
+ if opts[:batch_events]
14
+ warn "The :batch_events option is deprecated. Please use :buffer_max_items instead"
15
+ end
16
+
17
+ if opts[:batch_timeout]
18
+ warn "The :batch_timeout option is deprecated. Please use :buffer_max_interval instead"
19
+ end
20
+
21
+ @buffer_group = nil
22
+ @buffer_max_items = opts[:batch_events] || opts[:buffer_max_items]
23
+ @buffer_max_interval = opts[:batch_timeout] || opts[:buffer_max_interval]
24
+ @drop_messages_on_flush_error =
25
+ if opts.key?(:drop_messages_on_flush_error)
26
+ opts.delete(:drop_messages_on_flush_error)
27
+ else
28
+ false
29
+ end
30
+
31
+ @drop_messages_on_full_buffer =
32
+ if opts.key?(:drop_messages_on_full_buffer)
33
+ opts.delete(:drop_messages_on_full_buffer)
34
+ else
35
+ true
36
+ end
37
+
38
+ @buffer_flush_at_exit =
39
+ if opts.key?(:buffer_flush_at_exit)
40
+ opts.delete(:buffer_flush_at_exit)
41
+ else
42
+ true
43
+ end
44
+
45
+ @buffer_logger = opts[:buffer_logger]
46
+
47
+ buffer_initialize(
48
+ max_items: @buffer_max_items,
49
+ max_interval: @buffer_max_interval,
50
+ logger: buffer_logger,
51
+ autoflush: @sync,
52
+ drop_messages_on_flush_error: @drop_messages_on_flush_error,
53
+ drop_messages_on_full_buffer: @drop_messages_on_full_buffer,
54
+ flush_at_exit: @buffer_flush_at_exit
55
+ )
56
+ end
57
+
58
+ def write(message)
59
+ buffer_receive(message, @buffer_group) unless message.nil?
60
+ end
61
+
62
+ def flush(*args)
63
+ if args.empty?
64
+ buffer_flush
65
+ else
66
+ messages, group = *args
67
+ write_batch(messages, group)
68
+ end
69
+ end
70
+
71
+ def on_full_buffer_receive(data)
72
+ log_warning("Buffer Full - #{data}")
73
+ end
74
+
75
+ def close(opts = {})
76
+ if opts.fetch(:flush, true)
77
+ buffer_flush(final: true)
78
+ end
79
+
80
+ super
81
+ end
82
+
83
+ def to_io
84
+ with_connection do
85
+ super
86
+ end
87
+ end
88
+
89
+ def connected?
90
+ !!@io
91
+ end
92
+
93
+ def write_one(message)
94
+ with_connection do
95
+ super
96
+ end
97
+ end
98
+
99
+ def write_batch(messages, group = nil)
100
+ with_connection do
101
+ super
102
+ end
103
+ end
104
+
105
+ # Implemented by subclasses
106
+ def connect
107
+ fail NotImplementedError
108
+ end
109
+
110
+ def reset
111
+ reset_buffer
112
+ close(flush: false)
113
+ end
114
+
115
+ def reconnect
116
+ close(flush: false)
117
+ connect
118
+ end
119
+
120
+ # Ensure the block is executed with a valid connection
121
+ def with_connection(&block)
122
+ connect unless connected?
123
+ yield
124
+ rescue => e
125
+ log_error(e)
126
+ close(flush: false)
127
+ raise
128
+ end
129
+ end
130
+ end
131
+ end
@@ -0,0 +1,23 @@
1
+ require 'fileutils'
2
+
3
+ module LogStashLogger
4
+ module Device
5
+ class File < Base
6
+ def initialize(opts)
7
+ super
8
+ @path = opts[:path] || fail(ArgumentError, "Path is required")
9
+ open
10
+ end
11
+
12
+ def open
13
+ unless ::File.exist? ::File.dirname @path
14
+ ::FileUtils.mkdir_p ::File.dirname @path
15
+ end
16
+
17
+ @io = ::File.open @path, ::File::WRONLY | ::File::APPEND | ::File::CREAT
18
+ @io.binmode
19
+ @io.sync = self.sync
20
+ end
21
+ end
22
+ end
23
+ end
@@ -0,0 +1,42 @@
1
+ begin
2
+ require 'aws-sdk-firehose'
3
+ rescue LoadError
4
+ require 'aws-sdk'
5
+ end
6
+
7
+ require 'logstash-logger/device/aws_stream'
8
+
9
+ module LogStashLogger
10
+ module Device
11
+ class Firehose < AwsStream
12
+ @stream_class = ::Aws::Firehose::Client
13
+ @recoverable_error_codes = [
14
+ "ServiceUnavailable",
15
+ "InternalFailure",
16
+ "ServiceUnavailableException"
17
+ ].freeze
18
+
19
+ def transform_message(message)
20
+ {
21
+ data: message
22
+ }
23
+ end
24
+
25
+ def put_records(records)
26
+ @io.put_record_batch({
27
+ records: records,
28
+ delivery_stream_name: @stream
29
+ })
30
+ end
31
+
32
+ def is_successful_response(resp)
33
+ resp.failed_put_count == 0
34
+ end
35
+
36
+ def get_response_records(resp)
37
+ resp.request_responses
38
+ end
39
+
40
+ end
41
+ end
42
+ end
@@ -0,0 +1,11 @@
1
+ module LogStashLogger
2
+ module Device
3
+ class IO < Base
4
+ def initialize(opts)
5
+ super
6
+ @io = opts[:io] || fail(ArgumentError, 'IO is required')
7
+ @io.sync = sync unless sync.nil?
8
+ end
9
+ end
10
+ end
11
+ end
@@ -0,0 +1,57 @@
1
+ require 'poseidon'
2
+
3
+ module LogStashLogger
4
+ module Device
5
+ class Kafka < Connectable
6
+
7
+ DEFAULT_HOST = 'localhost'
8
+ DEFAULT_PORT = 9092
9
+ DEFAULT_TOPIC = 'logstash'
10
+ DEFAULT_PRODUCER = 'logstash-logger'
11
+ DEFAULT_BACKOFF = 1
12
+
13
+ attr_accessor :hosts, :topic, :producer, :backoff
14
+
15
+ def initialize(opts)
16
+ super
17
+ host = opts[:host] || DEFAULT_HOST
18
+ port = opts[:port] || DEFAULT_PORT
19
+ @hosts = opts[:hosts] || host.split(',').map { |h| "#{h}:#{port}" }
20
+ @topic = opts[:path] || DEFAULT_TOPIC
21
+ @producer = opts[:producer] || DEFAULT_PRODUCER
22
+ @backoff = opts[:backoff] || DEFAULT_BACKOFF
23
+ @buffer_group = @topic
24
+ end
25
+
26
+ def connect
27
+ @io = ::Poseidon::Producer.new(@hosts, @producer)
28
+ end
29
+
30
+ def with_connection
31
+ connect unless connected?
32
+ yield
33
+ rescue ::Poseidon::Errors::ChecksumError, Poseidon::Errors::UnableToFetchMetadata => e
34
+ log_error(e)
35
+ log_warning("reconnect/retry")
36
+ sleep backoff if backoff
37
+ reconnect
38
+ retry
39
+ rescue => e
40
+ log_error(e)
41
+ log_warning("giving up")
42
+ close(flush: false)
43
+ end
44
+
45
+ def write_batch(messages, topic = nil)
46
+ topic ||= @topic
47
+ with_connection do
48
+ @io.send_messages messages.map { |message| Poseidon::MessageToSend.new(topic, message) }
49
+ end
50
+ end
51
+
52
+ def write_one(message, topic = nil)
53
+ write_batch([message], topic)
54
+ end
55
+ end
56
+ end
57
+ end
@@ -0,0 +1,44 @@
1
+ begin
2
+ require 'aws-sdk-kinesis'
3
+ rescue LoadError
4
+ require 'aws-sdk'
5
+ end
6
+
7
+ require 'logstash-logger/device/aws_stream'
8
+
9
+ module LogStashLogger
10
+ module Device
11
+ class Kinesis < AwsStream
12
+ @stream_class = ::Aws::Kinesis::Client
13
+ @recoverable_error_codes = [
14
+ "ServiceUnavailable",
15
+ "Throttling",
16
+ "RequestExpired",
17
+ "ProvisionedThroughputExceededException"
18
+ ].freeze
19
+
20
+ def transform_message(message)
21
+ {
22
+ data: message,
23
+ partition_key: SecureRandom.uuid
24
+ }
25
+ end
26
+
27
+ def put_records(records)
28
+ @io.put_records({
29
+ records: records,
30
+ stream_name: @stream
31
+ })
32
+ end
33
+
34
+ def is_successful_response(resp)
35
+ resp.failed_record_count == 0
36
+ end
37
+
38
+ def get_response_records(resp)
39
+ resp.records
40
+ end
41
+
42
+ end
43
+ end
44
+ end
@@ -0,0 +1,36 @@
1
+ # Container to allow writes to multiple devices
2
+
3
+ # Code originally from:
4
+ # http://stackoverflow.com/a/6410202
5
+
6
+ module LogStashLogger
7
+ module Device
8
+ class MultiDelegator < Base
9
+ attr_reader :devices
10
+
11
+ def initialize(opts)
12
+ @io = self
13
+ @devices = create_devices(opts)
14
+ self.class.delegate(:write, :close, :close!, :flush)
15
+ end
16
+
17
+ private
18
+
19
+ def create_devices(opts)
20
+ output_configurations = opts.delete(:outputs)
21
+ output_configurations.map do |device_opts|
22
+ device_opts = opts.merge(device_opts)
23
+ Device.new(device_opts)
24
+ end
25
+ end
26
+
27
+ def self.delegate(*methods)
28
+ methods.each do |m|
29
+ define_method(m) do |*args|
30
+ @devices.each { |device| device.send(m, *args) }
31
+ end
32
+ end
33
+ end
34
+ end
35
+ end
36
+ end
@@ -0,0 +1,76 @@
1
+ require 'redis'
2
+
3
+ module LogStashLogger
4
+ module Device
5
+ class Redis < Connectable
6
+ DEFAULT_LIST = 'logstash'
7
+
8
+ attr_accessor :list
9
+
10
+ def initialize(opts)
11
+ super
12
+ @list = opts.delete(:list) || DEFAULT_LIST
13
+ @buffer_group = @list
14
+
15
+ normalize_path(opts)
16
+ delete_unknown_keywords(opts)
17
+
18
+ @redis_options = opts
19
+ end
20
+
21
+ def connect
22
+ @io = ::Redis.new(@redis_options)
23
+ end
24
+
25
+ def reconnect
26
+ @io.client.reconnect
27
+ rescue => e
28
+ log_error(e)
29
+ end
30
+
31
+ def with_connection
32
+ connect unless connected?
33
+ yield
34
+ rescue ::Redis::InheritedError
35
+ reconnect
36
+ retry
37
+ rescue => e
38
+ log_error(e)
39
+ close(flush: false)
40
+ raise
41
+ end
42
+
43
+ def write_batch(messages, list = nil)
44
+ list ||= @list
45
+ with_connection do
46
+ @io.rpush(list, messages)
47
+ end
48
+ end
49
+
50
+ def write_one(message, list = nil)
51
+ write_batch(message, list)
52
+ end
53
+
54
+ def close!
55
+ @io && @io.quit
56
+ end
57
+
58
+ private
59
+
60
+ def normalize_path(opts)
61
+ path = opts.fetch(:path, nil)
62
+ if path
63
+ opts[:db] = path.gsub("/", "").to_i unless path.empty?
64
+ opts.delete(:path)
65
+ end
66
+ end
67
+
68
+ def delete_unknown_keywords(opts)
69
+ # due to restrictions in redis client version >= 5
70
+ # Continous error is being logged for unknown keywords, at present there is only one i.e. :sync
71
+ # to prevent that adding following line :)
72
+ opts.delete(:sync)
73
+ end
74
+ end
75
+ end
76
+ end
@@ -0,0 +1,21 @@
1
+ require 'socket'
2
+
3
+ module LogStashLogger
4
+ module Device
5
+ class Socket < Connectable
6
+ DEFAULT_HOST = '0.0.0.0'
7
+
8
+ attr_reader :host, :port
9
+
10
+ def initialize(opts)
11
+ super
12
+ @port = opts[:port] || fail(ArgumentError, "Port is required")
13
+ @host = opts[:host] || DEFAULT_HOST
14
+ end
15
+
16
+ def unrecoverable_error?(e)
17
+ e.is_a?(Errno::EMSGSIZE) || super
18
+ end
19
+ end
20
+ end
21
+ end
@@ -0,0 +1,13 @@
1
+ module LogStashLogger
2
+ module Device
3
+ class Stderr < IO
4
+ def initialize(opts={})
5
+ super({io: $stderr}.merge(opts))
6
+ end
7
+
8
+ def close!
9
+ # no-op
10
+ end
11
+ end
12
+ end
13
+ end
@@ -0,0 +1,14 @@
1
+ module LogStashLogger
2
+ module Device
3
+ class Stdout < IO
4
+ def initialize(opts={})
5
+ super({io: $stdout}.merge(opts))
6
+ end
7
+
8
+ def close!
9
+ # no-op
10
+ # Calling $stdout.close would be a bad idea
11
+ end
12
+ end
13
+ end
14
+ end
@@ -0,0 +1,86 @@
1
+ require 'openssl'
2
+
3
+ module LogStashLogger
4
+ module Device
5
+ class TCP < Socket
6
+ attr_reader :ssl_certificate
7
+
8
+ def initialize(opts)
9
+ super
10
+
11
+ @ssl_certificate = opts[:ssl_certificate]
12
+ @ssl_context = opts[:ssl_context]
13
+ @use_ssl = !!(@ssl_certificate || opts[:ssl_context])
14
+ @use_ssl = opts[:ssl_enable] if opts.has_key? :ssl_enable
15
+ if opts.has_key?(:use_ssl)
16
+ @use_ssl = opts[:use_ssl]
17
+ warn "[LogStashLogger] The use_ssl option is deprecated. Use ssl_enable instead."
18
+ end
19
+ @verify_hostname = opts.fetch(:verify_hostname, true)
20
+ end
21
+
22
+ def ssl_context
23
+ return unless use_ssl?
24
+ @ssl_context || certificate_context
25
+ end
26
+
27
+ def use_ssl?
28
+ @use_ssl
29
+ end
30
+
31
+ def connect
32
+ if use_ssl?
33
+ io.connect
34
+ verify_hostname!
35
+ end
36
+ io
37
+ end
38
+
39
+ def io
40
+ @io ||= if use_ssl?
41
+ ssl_io
42
+ else
43
+ tcp_io
44
+ end
45
+ end
46
+
47
+ protected
48
+
49
+ def tcp_io
50
+ TCPSocket.new(@host, @port).tap do |socket|
51
+ socket.sync = sync unless sync.nil?
52
+ end
53
+ end
54
+
55
+ def ssl_io
56
+ ssl_context ?
57
+ OpenSSL::SSL::SSLSocket.new(tcp_io, ssl_context) :
58
+ OpenSSL::SSL::SSLSocket.new(tcp_io)
59
+ end
60
+
61
+ def certificate_context
62
+ return unless @ssl_certificate
63
+ @certificate_context ||= OpenSSL::SSL::SSLContext.new.tap do |ctx|
64
+ ctx.set_params(cert: @ssl_certificate)
65
+ end
66
+ end
67
+
68
+ def verify_hostname?
69
+ return false unless ssl_context
70
+ !! @verify_hostname
71
+ end
72
+
73
+ def verify_hostname!
74
+ @io.post_connection_check(hostname) if verify_hostname?
75
+ end
76
+
77
+ def hostname
78
+ if String === @verify_hostname
79
+ @verify_hostname
80
+ else
81
+ @host
82
+ end
83
+ end
84
+ end
85
+ end
86
+ end
@@ -0,0 +1,12 @@
1
+ module LogStashLogger
2
+ module Device
3
+ class UDP < Socket
4
+ def connect
5
+ @io = UDPSocket.new.tap do |socket|
6
+ socket.connect(@host, @port)
7
+ socket.sync = sync unless sync.nil?
8
+ end
9
+ end
10
+ end
11
+ end
12
+ end
@@ -0,0 +1,18 @@
1
+ require 'socket'
2
+
3
+ module LogStashLogger
4
+ module Device
5
+ class Unix < Connectable
6
+ def initialize(opts={})
7
+ super
8
+ @path = opts[:path] || fail(ArgumentError, "Path is required")
9
+ end
10
+
11
+ def connect
12
+ @io = ::UNIXSocket.new(@path).tap do |socket|
13
+ socket.sync = sync unless sync.nil?
14
+ end
15
+ end
16
+ end
17
+ end
18
+ end
@@ -0,0 +1,67 @@
1
+ require 'logstash-logger/device/base'
2
+
3
+ module LogStashLogger
4
+ module Device
5
+ DEFAULT_TYPE = :udp
6
+
7
+ autoload :Base, 'logstash-logger/device/base'
8
+ autoload :Connectable, 'logstash-logger/device/connectable'
9
+ autoload :Socket, 'logstash-logger/device/socket'
10
+ autoload :UDP, 'logstash-logger/device/udp'
11
+ autoload :TCP, 'logstash-logger/device/tcp'
12
+ autoload :Unix, 'logstash-logger/device/unix'
13
+ autoload :Redis, 'logstash-logger/device/redis'
14
+ autoload :Kafka, 'logstash-logger/device/kafka'
15
+ autoload :Kinesis, 'logstash-logger/device/kinesis'
16
+ autoload :Firehose, 'logstash-logger/device/firehose'
17
+ autoload :File, 'logstash-logger/device/file'
18
+ autoload :IO, 'logstash-logger/device/io'
19
+ autoload :Stdout, 'logstash-logger/device/stdout'
20
+ autoload :Stderr, 'logstash-logger/device/stderr'
21
+ autoload :Balancer, 'logstash-logger/device/balancer'
22
+ autoload :MultiDelegator, 'logstash-logger/device/multi_delegator'
23
+
24
+ def self.new(opts)
25
+ opts = opts.dup
26
+ build_device(opts)
27
+ end
28
+
29
+ def self.build_device(opts)
30
+ if parsed_uri_opts = parse_uri_config(opts)
31
+ opts.delete(:uri)
32
+ opts.merge!(parsed_uri_opts)
33
+ end
34
+
35
+ type = opts.delete(:type) || DEFAULT_TYPE
36
+
37
+ device_klass_for(type).new(opts)
38
+ end
39
+
40
+ def self.parse_uri_config(opts)
41
+ if uri = opts[:uri]
42
+ require 'uri'
43
+ parsed = ::URI.parse(uri)
44
+ {type: parsed.scheme, host: parsed.host, port: parsed.port, path: parsed.path}
45
+ end
46
+ end
47
+
48
+ def self.device_klass_for(type)
49
+ case type.to_sym
50
+ when :udp then UDP
51
+ when :tcp then TCP
52
+ when :unix then Unix
53
+ when :file then File
54
+ when :redis then Redis
55
+ when :kafka then Kafka
56
+ when :kinesis then Kinesis
57
+ when :firehose then Firehose
58
+ when :io then IO
59
+ when :stdout then Stdout
60
+ when :stderr then Stderr
61
+ when :multi_delegator then MultiDelegator
62
+ when :balancer then Balancer
63
+ else fail ArgumentError, 'Invalid device type'
64
+ end
65
+ end
66
+ end
67
+ end