logstash-output-redis 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml ADDED
@@ -0,0 +1,15 @@
1
+ ---
2
+ !binary "U0hBMQ==":
3
+ metadata.gz: !binary |-
4
+ NDBiYmFiMWM1NDNkNzY2Mjc3ODY2ZGZjNjM5ZmFlNmE3YTEyN2NlMQ==
5
+ data.tar.gz: !binary |-
6
+ NTc0MDBkMWY4MGExN2VhODU2NDFmNzZmYWQ2ODc4NGIyNTVlMDQ2OA==
7
+ SHA512:
8
+ metadata.gz: !binary |-
9
+ NDRmYTg3ZDJiYWFlODhhNDU2NzkyNzc1ZDE0Njk3M2ZkMjY5ODI1ZjM3MzNh
10
+ NjFkNDQxYzQwOTlhNWQyNWZlNTMxYzM3ZmM3NTVlYTI3ZmIyNDhkZWE0ZjQ3
11
+ NjYzMjI2NGJmM2YzMjQ5NzY0ZTQ1MDVjM2RlYzRjNjUzMTlkYWY=
12
+ data.tar.gz: !binary |-
13
+ NDkyODEyMjVmYjQ4NWE1MzY1YTFhZjUxZTQwZDQyODIzZTQzYjQwYjQyOWQx
14
+ NGVhMDczNzdmM2IzYmJiZjM3NWM0ZGU4OTQxYjZiNzE3MjkwYTRiMmUwNWE2
15
+ N2ZiN2NmMWQ1OTczNGQxYzFhMjAyZWMwZTA0NDBmNTYxNzI5MDU=
data/.gitignore ADDED
@@ -0,0 +1,4 @@
1
+ *.gem
2
+ Gemfile.lock
3
+ .bundle
4
+ vendor
data/Gemfile ADDED
@@ -0,0 +1,4 @@
1
+ source 'http://rubygems.org'
2
+ gem 'rake'
3
+ gem 'gem_publisher'
4
+ gem 'archive-tar-minitar'
data/Rakefile ADDED
@@ -0,0 +1,6 @@
1
+ @files=[]
2
+
3
+ task :default do
4
+ system("rake -T")
5
+ end
6
+
@@ -0,0 +1,252 @@
1
+ # encoding: utf-8
2
+ require "logstash/outputs/base"
3
+ require "logstash/namespace"
4
+ require "stud/buffer"
5
+
6
+ # This output will send events to a Redis queue using RPUSH.
7
+ # The RPUSH command is supported in Redis v0.0.7+. Using
8
+ # PUBLISH to a channel requires at least v1.3.8+.
9
+ # While you may be able to make these Redis versions work,
10
+ # the best performance and stability will be found in more
11
+ # recent stable versions. Versions 2.6.0+ are recommended.
12
+ #
13
+ # For more information about Redis, see <http://redis.io/>
14
+ #
15
+ class LogStash::Outputs::Redis < LogStash::Outputs::Base
16
+
17
+ include Stud::Buffer
18
+
19
+ config_name "redis"
20
+ milestone 2
21
+
22
+ # Name is used for logging in case there are multiple instances.
23
+ # TODO: delete
24
+ config :name, :validate => :string, :default => 'default',
25
+ :deprecated => true
26
+
27
+ # The hostname(s) of your Redis server(s). Ports may be specified on any
28
+ # hostname, which will override the global port config.
29
+ #
30
+ # For example:
31
+ #
32
+ # "127.0.0.1"
33
+ # ["127.0.0.1", "127.0.0.2"]
34
+ # ["127.0.0.1:6380", "127.0.0.1"]
35
+ config :host, :validate => :array, :default => ["127.0.0.1"]
36
+
37
+ # Shuffle the host list during Logstash startup.
38
+ config :shuffle_hosts, :validate => :boolean, :default => true
39
+
40
+ # The default port to connect on. Can be overridden on any hostname.
41
+ config :port, :validate => :number, :default => 6379
42
+
43
+ # The Redis database number.
44
+ config :db, :validate => :number, :default => 0
45
+
46
+ # Redis initial connection timeout in seconds.
47
+ config :timeout, :validate => :number, :default => 5
48
+
49
+ # Password to authenticate with. There is no authentication by default.
50
+ config :password, :validate => :password
51
+
52
+ # The name of the Redis queue (we'll use RPUSH on this). Dynamic names are
53
+ # valid here, for example "logstash-%{type}"
54
+ # TODO: delete
55
+ config :queue, :validate => :string, :deprecated => true
56
+
57
+ # The name of a Redis list or channel. Dynamic names are
58
+ # valid here, for example "logstash-%{type}".
59
+ # TODO set required true
60
+ config :key, :validate => :string, :required => false
61
+
62
+ # Either list or channel. If `redis_type` is list, then we will set
63
+ # RPUSH to key. If `redis_type` is channel, then we will PUBLISH to `key`.
64
+ # TODO set required true
65
+ config :data_type, :validate => [ "list", "channel" ], :required => false
66
+
67
+ # Set to true if you want Redis to batch up values and send 1 RPUSH command
68
+ # instead of one command per value to push on the list. Note that this only
69
+ # works with `data_type="list"` mode right now.
70
+ #
71
+ # If true, we send an RPUSH every "batch_events" events or
72
+ # "batch_timeout" seconds (whichever comes first).
73
+ # Only supported for `data_type` is "list".
74
+ config :batch, :validate => :boolean, :default => false
75
+
76
+ # If batch is set to true, the number of events we queue up for an RPUSH.
77
+ config :batch_events, :validate => :number, :default => 50
78
+
79
+ # If batch is set to true, the maximum amount of time between RPUSH commands
80
+ # when there are pending events to flush.
81
+ config :batch_timeout, :validate => :number, :default => 5
82
+
83
+ # Interval for reconnecting to failed Redis connections
84
+ config :reconnect_interval, :validate => :number, :default => 1
85
+
86
+ # In case Redis `data_type` is "list" and has more than @congestion_threshold items,
87
+ # block until someone consumes them and reduces congestion, otherwise if there are
88
+ # no consumers Redis will run out of memory, unless it was configured with OOM protection.
89
+ # But even with OOM protection, a single Redis list can block all other users of Redis,
90
+ # until Redis CPU consumption reaches the max allowed RAM size.
91
+ # A default value of 0 means that this limit is disabled.
92
+ # Only supported for `list` Redis `data_type`.
93
+ config :congestion_threshold, :validate => :number, :default => 0
94
+
95
+ # How often to check for congestion. Default is one second.
96
+ # Zero means to check on every event.
97
+ config :congestion_interval, :validate => :number, :default => 1
98
+
99
+ def register
100
+ require 'redis'
101
+
102
+ # TODO remove after setting key and data_type to true
103
+ if @queue
104
+ if @key or @data_type
105
+ raise RuntimeError.new(
106
+ "Cannot specify queue parameter and key or data_type"
107
+ )
108
+ end
109
+ @key = @queue
110
+ @data_type = 'list'
111
+ end
112
+
113
+ if not @key or not @data_type
114
+ raise RuntimeError.new(
115
+ "Must define queue, or key and data_type parameters"
116
+ )
117
+ end
118
+ # end TODO
119
+
120
+
121
+ if @batch
122
+ if @data_type != "list"
123
+ raise RuntimeError.new(
124
+ "batch is not supported with data_type #{@data_type}"
125
+ )
126
+ end
127
+ buffer_initialize(
128
+ :max_items => @batch_events,
129
+ :max_interval => @batch_timeout,
130
+ :logger => @logger
131
+ )
132
+ end
133
+
134
+ @redis = nil
135
+ if @shuffle_hosts
136
+ @host.shuffle!
137
+ end
138
+ @host_idx = 0
139
+
140
+ @congestion_check_times = Hash.new { |h,k| h[k] = Time.now.to_i - @congestion_interval }
141
+ end # def register
142
+
143
+ def receive(event)
144
+ return unless output?(event)
145
+
146
+ if @batch and @data_type == 'list' # Don't use batched method for pubsub.
147
+ # Stud::Buffer
148
+ buffer_receive(event.to_json, event.sprintf(@key))
149
+ return
150
+ end
151
+
152
+ key = event.sprintf(@key)
153
+ # TODO(sissel): We really should not drop an event, but historically
154
+ # we have dropped events that fail to be converted to json.
155
+ # TODO(sissel): Find a way to continue passing events through even
156
+ # if they fail to convert properly.
157
+ begin
158
+ payload = event.to_json
159
+ rescue Encoding::UndefinedConversionError, ArgumentError
160
+ puts "FAILUREENCODING"
161
+ @logger.error("Failed to convert event to JSON. Invalid UTF-8, maybe?",
162
+ :event => event.inspect)
163
+ return
164
+ end
165
+
166
+ begin
167
+ @redis ||= connect
168
+ if @data_type == 'list'
169
+ congestion_check(key)
170
+ @redis.rpush(key, payload)
171
+ else
172
+ @redis.publish(key, payload)
173
+ end
174
+ rescue => e
175
+ @logger.warn("Failed to send event to Redis", :event => event,
176
+ :identity => identity, :exception => e,
177
+ :backtrace => e.backtrace)
178
+ sleep @reconnect_interval
179
+ @redis = nil
180
+ retry
181
+ end
182
+ end # def receive
183
+
184
+ def congestion_check(key)
185
+ return if @congestion_threshold == 0
186
+ if (Time.now.to_i - @congestion_check_times[key]) >= @congestion_interval # Check congestion only if enough time has passed since last check.
187
+ while @redis.llen(key) > @congestion_threshold # Don't push event to Redis key which has reached @congestion_threshold.
188
+ @logger.warn? and @logger.warn("Redis key size has hit a congestion threshold #{@congestion_threshold} suspending output for #{@congestion_interval} seconds")
189
+ sleep @congestion_interval
190
+ end
191
+ @congestion_check_time = Time.now.to_i
192
+ end
193
+ end
194
+
195
+ # called from Stud::Buffer#buffer_flush when there are events to flush
196
+ def flush(events, key, teardown=false)
197
+ @redis ||= connect
198
+ # we should not block due to congestion on teardown
199
+ # to support this Stud::Buffer#buffer_flush should pass here the :final boolean value.
200
+ congestion_check(key) unless teardown
201
+ @redis.rpush(key, events)
202
+ end
203
+ # called from Stud::Buffer#buffer_flush when an error occurs
204
+ def on_flush_error(e)
205
+ @logger.warn("Failed to send backlog of events to Redis",
206
+ :identity => identity,
207
+ :exception => e,
208
+ :backtrace => e.backtrace
209
+ )
210
+ @redis = connect
211
+ end
212
+
213
+ def teardown
214
+ if @batch
215
+ buffer_flush(:final => true)
216
+ end
217
+ if @data_type == 'channel' and @redis
218
+ @redis.quit
219
+ @redis = nil
220
+ end
221
+ end
222
+
223
+ private
224
+ def connect
225
+ @current_host, @current_port = @host[@host_idx].split(':')
226
+ @host_idx = @host_idx + 1 >= @host.length ? 0 : @host_idx + 1
227
+
228
+ if not @current_port
229
+ @current_port = @port
230
+ end
231
+
232
+ params = {
233
+ :host => @current_host,
234
+ :port => @current_port,
235
+ :timeout => @timeout,
236
+ :db => @db
237
+ }
238
+ @logger.debug(params)
239
+
240
+ if @password
241
+ params[:password] = @password.value
242
+ end
243
+
244
+ Redis.new(params)
245
+ end # def connect
246
+
247
+ # A string used to identify a Redis instance in log messages
248
+ def identity
249
+ @name || "redis://#{@password}@#{@current_host}:#{@current_port}/#{@db} #{@data_type}:#{@key}"
250
+ end
251
+
252
+ end
@@ -0,0 +1,29 @@
1
+ Gem::Specification.new do |s|
2
+
3
+ s.name = 'logstash-output-redis'
4
+ s.version = '0.1.0'
5
+ s.licenses = ['Apache License (2.0)']
6
+ s.summary = "This output will send events to a Redis queue using RPUSH"
7
+ s.description = "This output will send events to a Redis queue using RPUSH. Requires Redis Server 2.6.0+"
8
+ s.authors = ["Elasticsearch"]
9
+ s.email = 'richard.pijnenburg@elasticsearch.com'
10
+ s.homepage = "http://logstash.net/"
11
+ s.require_paths = ["lib"]
12
+
13
+ # Files
14
+ s.files = `git ls-files`.split($\)+::Dir.glob('vendor/*')
15
+
16
+ # Tests
17
+ s.test_files = s.files.grep(%r{^(test|spec|features)/})
18
+
19
+ # Special flag to let us know this is actually a logstash plugin
20
+ s.metadata = { "logstash_plugin" => "true", "group" => "output" }
21
+
22
+ # Gem dependencies
23
+ s.add_runtime_dependency 'logstash', '>= 1.4.0', '< 2.0.0'
24
+
25
+ s.add_runtime_dependency 'redis'
26
+ s.add_runtime_dependency 'stud'
27
+
28
+ end
29
+
@@ -0,0 +1,9 @@
1
+ require "gem_publisher"
2
+
3
+ desc "Publish gem to RubyGems.org"
4
+ task :publish_gem do |t|
5
+ gem_file = Dir.glob(File.expand_path('../*.gemspec',File.dirname(__FILE__))).first
6
+ gem = GemPublisher.publish_if_updated(gem_file, :rubygems)
7
+ puts "Published #{gem}" if gem
8
+ end
9
+
@@ -0,0 +1,169 @@
1
+ require "net/http"
2
+ require "uri"
3
+ require "digest/sha1"
4
+
5
+ def vendor(*args)
6
+ return File.join("vendor", *args)
7
+ end
8
+
9
+ directory "vendor/" => ["vendor"] do |task, args|
10
+ mkdir task.name
11
+ end
12
+
13
+ def fetch(url, sha1, output)
14
+
15
+ puts "Downloading #{url}"
16
+ actual_sha1 = download(url, output)
17
+
18
+ if actual_sha1 != sha1
19
+ fail "SHA1 does not match (expected '#{sha1}' but got '#{actual_sha1}')"
20
+ end
21
+ end # def fetch
22
+
23
+ def file_fetch(url, sha1)
24
+ filename = File.basename( URI(url).path )
25
+ output = "vendor/#{filename}"
26
+ task output => [ "vendor/" ] do
27
+ begin
28
+ actual_sha1 = file_sha1(output)
29
+ if actual_sha1 != sha1
30
+ fetch(url, sha1, output)
31
+ end
32
+ rescue Errno::ENOENT
33
+ fetch(url, sha1, output)
34
+ end
35
+ end.invoke
36
+
37
+ return output
38
+ end
39
+
40
+ def file_sha1(path)
41
+ digest = Digest::SHA1.new
42
+ fd = File.new(path, "r")
43
+ while true
44
+ begin
45
+ digest << fd.sysread(16384)
46
+ rescue EOFError
47
+ break
48
+ end
49
+ end
50
+ return digest.hexdigest
51
+ ensure
52
+ fd.close if fd
53
+ end
54
+
55
+ def download(url, output)
56
+ uri = URI(url)
57
+ digest = Digest::SHA1.new
58
+ tmp = "#{output}.tmp"
59
+ Net::HTTP.start(uri.host, uri.port, :use_ssl => (uri.scheme == "https")) do |http|
60
+ request = Net::HTTP::Get.new(uri.path)
61
+ http.request(request) do |response|
62
+ fail "HTTP fetch failed for #{url}. #{response}" if [200, 301].include?(response.code)
63
+ size = (response["content-length"].to_i || -1).to_f
64
+ count = 0
65
+ File.open(tmp, "w") do |fd|
66
+ response.read_body do |chunk|
67
+ fd.write(chunk)
68
+ digest << chunk
69
+ if size > 0 && $stdout.tty?
70
+ count += chunk.bytesize
71
+ $stdout.write(sprintf("\r%0.2f%%", count/size * 100))
72
+ end
73
+ end
74
+ end
75
+ $stdout.write("\r \r") if $stdout.tty?
76
+ end
77
+ end
78
+
79
+ File.rename(tmp, output)
80
+
81
+ return digest.hexdigest
82
+ rescue SocketError => e
83
+ puts "Failure while downloading #{url}: #{e}"
84
+ raise
85
+ ensure
86
+ File.unlink(tmp) if File.exist?(tmp)
87
+ end # def download
88
+
89
+ def untar(tarball, &block)
90
+ require "archive/tar/minitar"
91
+ tgz = Zlib::GzipReader.new(File.open(tarball))
92
+ # Pull out typesdb
93
+ tar = Archive::Tar::Minitar::Input.open(tgz)
94
+ tar.each do |entry|
95
+ path = block.call(entry)
96
+ next if path.nil?
97
+ parent = File.dirname(path)
98
+
99
+ mkdir_p parent unless File.directory?(parent)
100
+
101
+ # Skip this file if the output file is the same size
102
+ if entry.directory?
103
+ mkdir path unless File.directory?(path)
104
+ else
105
+ entry_mode = entry.instance_eval { @mode } & 0777
106
+ if File.exists?(path)
107
+ stat = File.stat(path)
108
+ # TODO(sissel): Submit a patch to archive-tar-minitar upstream to
109
+ # expose headers in the entry.
110
+ entry_size = entry.instance_eval { @size }
111
+ # If file sizes are same, skip writing.
112
+ next if stat.size == entry_size && (stat.mode & 0777) == entry_mode
113
+ end
114
+ puts "Extracting #{entry.full_name} from #{tarball} #{entry_mode.to_s(8)}"
115
+ File.open(path, "w") do |fd|
116
+ # eof? check lets us skip empty files. Necessary because the API provided by
117
+ # Archive::Tar::Minitar::Reader::EntryStream only mostly acts like an
118
+ # IO object. Something about empty files in this EntryStream causes
119
+ # IO.copy_stream to throw "can't convert nil into String" on JRuby
120
+ # TODO(sissel): File a bug about this.
121
+ while !entry.eof?
122
+ chunk = entry.read(16384)
123
+ fd.write(chunk)
124
+ end
125
+ #IO.copy_stream(entry, fd)
126
+ end
127
+ File.chmod(entry_mode, path)
128
+ end
129
+ end
130
+ tar.close
131
+ File.unlink(tarball) if File.file?(tarball)
132
+ end # def untar
133
+
134
+ def ungz(file)
135
+
136
+ outpath = file.gsub('.gz', '')
137
+ tgz = Zlib::GzipReader.new(File.open(file))
138
+ begin
139
+ File.open(outpath, "w") do |out|
140
+ IO::copy_stream(tgz, out)
141
+ end
142
+ File.unlink(file)
143
+ rescue
144
+ File.unlink(outpath) if File.file?(outpath)
145
+ raise
146
+ end
147
+ tgz.close
148
+ end
149
+
150
+ desc "Process any vendor files required for this plugin"
151
+ task "vendor" do |task, args|
152
+
153
+ @files.each do |file|
154
+ download = file_fetch(file['url'], file['sha1'])
155
+ if download =~ /.tar.gz/
156
+ prefix = download.gsub('.tar.gz', '').gsub('vendor/', '')
157
+ untar(download) do |entry|
158
+ if !file['files'].nil?
159
+ next unless file['files'].include?(entry.full_name.gsub(prefix, ''))
160
+ out = entry.full_name.split("/").last
161
+ end
162
+ File.join('vendor', out)
163
+ end
164
+ elsif download =~ /.gz/
165
+ ungz(download)
166
+ end
167
+ end
168
+
169
+ end
@@ -0,0 +1,128 @@
1
+ require "spec_helper"
2
+ require "logstash/outputs/redis"
3
+ require "logstash/json"
4
+ require "redis"
5
+
6
+ describe LogStash::Outputs::Redis, :redis => true do
7
+
8
+
9
+ describe "ship lots of events to a list" do
10
+ key = 10.times.collect { rand(10).to_s }.join("")
11
+ event_count = 10000 + rand(500)
12
+
13
+ config <<-CONFIG
14
+ input {
15
+ generator {
16
+ message => "hello world"
17
+ count => #{event_count}
18
+ type => "generator"
19
+ }
20
+ }
21
+ output {
22
+ redis {
23
+ host => "127.0.0.1"
24
+ key => "#{key}"
25
+ data_type => list
26
+ }
27
+ }
28
+ CONFIG
29
+
30
+ agent do
31
+ # Query redis directly and inspect the goodness.
32
+ redis = Redis.new(:host => "127.0.0.1")
33
+
34
+ # The list should contain the number of elements our agent pushed up.
35
+ insist { redis.llen(key) } == event_count
36
+
37
+ # Now check all events for order and correctness.
38
+ event_count.times do |value|
39
+ id, element = redis.blpop(key, 0)
40
+ event = LogStash::Event.new(LogStash::Json.load(element))
41
+ insist { event["sequence"] } == value
42
+ insist { event["message"] } == "hello world"
43
+ end
44
+
45
+ # The list should now be empty
46
+ insist { redis.llen(key) } == 0
47
+ end # agent
48
+ end
49
+
50
+ describe "batch mode" do
51
+ key = 10.times.collect { rand(10).to_s }.join("")
52
+ event_count = 200000
53
+
54
+ config <<-CONFIG
55
+ input {
56
+ generator {
57
+ message => "hello world"
58
+ count => #{event_count}
59
+ type => "generator"
60
+ }
61
+ }
62
+ output {
63
+ redis {
64
+ host => "127.0.0.1"
65
+ key => "#{key}"
66
+ data_type => list
67
+ batch => true
68
+ batch_timeout => 5
69
+ timeout => 5
70
+ }
71
+ }
72
+ CONFIG
73
+
74
+ agent do
75
+ # we have to wait for teardown to execute & flush the last batch.
76
+ # otherwise we might start doing assertions before everything has been
77
+ # sent out to redis.
78
+ sleep 2
79
+
80
+ redis = Redis.new(:host => "127.0.0.1")
81
+
82
+ # The list should contain the number of elements our agent pushed up.
83
+ insist { redis.llen(key) } == event_count
84
+
85
+ # Now check all events for order and correctness.
86
+ event_count.times do |value|
87
+ id, element = redis.blpop(key, 0)
88
+ event = LogStash::Event.new(LogStash::Json.load(element))
89
+ insist { event["sequence"] } == value
90
+ insist { event["message"] } == "hello world"
91
+ end
92
+
93
+ # The list should now be empty
94
+ insist { redis.llen(key) } == 0
95
+ end # agent
96
+ end
97
+
98
+ describe "converts US-ASCII to utf-8 without failures" do
99
+ key = 10.times.collect { rand(10).to_s }.join("")
100
+
101
+ config <<-CONFIG
102
+ input {
103
+ generator {
104
+ charset => "US-ASCII"
105
+ message => "\xAD\u0000"
106
+ count => 1
107
+ type => "generator"
108
+ }
109
+ }
110
+ output {
111
+ redis {
112
+ host => "127.0.0.1"
113
+ key => "#{key}"
114
+ data_type => list
115
+ }
116
+ }
117
+ CONFIG
118
+
119
+ agent do
120
+ # Query redis directly and inspect the goodness.
121
+ redis = Redis.new(:host => "127.0.0.1")
122
+
123
+ # The list should contain no elements.
124
+ insist { redis.llen(key) } == 1
125
+ end # agent
126
+ end
127
+ end
128
+
metadata ADDED
@@ -0,0 +1,103 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: logstash-output-redis
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.1.0
5
+ platform: ruby
6
+ authors:
7
+ - Elasticsearch
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2014-11-03 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: logstash
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - ! '>='
18
+ - !ruby/object:Gem::Version
19
+ version: 1.4.0
20
+ - - <
21
+ - !ruby/object:Gem::Version
22
+ version: 2.0.0
23
+ type: :runtime
24
+ prerelease: false
25
+ version_requirements: !ruby/object:Gem::Requirement
26
+ requirements:
27
+ - - ! '>='
28
+ - !ruby/object:Gem::Version
29
+ version: 1.4.0
30
+ - - <
31
+ - !ruby/object:Gem::Version
32
+ version: 2.0.0
33
+ - !ruby/object:Gem::Dependency
34
+ name: redis
35
+ requirement: !ruby/object:Gem::Requirement
36
+ requirements:
37
+ - - ! '>='
38
+ - !ruby/object:Gem::Version
39
+ version: '0'
40
+ type: :runtime
41
+ prerelease: false
42
+ version_requirements: !ruby/object:Gem::Requirement
43
+ requirements:
44
+ - - ! '>='
45
+ - !ruby/object:Gem::Version
46
+ version: '0'
47
+ - !ruby/object:Gem::Dependency
48
+ name: stud
49
+ requirement: !ruby/object:Gem::Requirement
50
+ requirements:
51
+ - - ! '>='
52
+ - !ruby/object:Gem::Version
53
+ version: '0'
54
+ type: :runtime
55
+ prerelease: false
56
+ version_requirements: !ruby/object:Gem::Requirement
57
+ requirements:
58
+ - - ! '>='
59
+ - !ruby/object:Gem::Version
60
+ version: '0'
61
+ description: This output will send events to a Redis queue using RPUSH. Requires Redis
62
+ Server 2.6.0+
63
+ email: richard.pijnenburg@elasticsearch.com
64
+ executables: []
65
+ extensions: []
66
+ extra_rdoc_files: []
67
+ files:
68
+ - .gitignore
69
+ - Gemfile
70
+ - Rakefile
71
+ - lib/logstash/outputs/redis.rb
72
+ - logstash-output-redis.gemspec
73
+ - rakelib/publish.rake
74
+ - rakelib/vendor.rake
75
+ - spec/outputs/redis_spec.rb
76
+ homepage: http://logstash.net/
77
+ licenses:
78
+ - Apache License (2.0)
79
+ metadata:
80
+ logstash_plugin: 'true'
81
+ group: output
82
+ post_install_message:
83
+ rdoc_options: []
84
+ require_paths:
85
+ - lib
86
+ required_ruby_version: !ruby/object:Gem::Requirement
87
+ requirements:
88
+ - - ! '>='
89
+ - !ruby/object:Gem::Version
90
+ version: '0'
91
+ required_rubygems_version: !ruby/object:Gem::Requirement
92
+ requirements:
93
+ - - ! '>='
94
+ - !ruby/object:Gem::Version
95
+ version: '0'
96
+ requirements: []
97
+ rubyforge_project:
98
+ rubygems_version: 2.4.1
99
+ signing_key:
100
+ specification_version: 4
101
+ summary: This output will send events to a Redis queue using RPUSH
102
+ test_files:
103
+ - spec/outputs/redis_spec.rb