logstash-input-redis 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml ADDED
@@ -0,0 +1,15 @@
1
+ ---
2
+ !binary "U0hBMQ==":
3
+ metadata.gz: !binary |-
4
+ YjRkY2M1Y2NlMGJjYjMyM2E3ODZhN2U4MWRiZDJhYWIzZjhmM2IzMA==
5
+ data.tar.gz: !binary |-
6
+ MWU2MGNlNDI0YTc3YjA4OWQyNzhlYzBkMTkzY2FkNTJlMTZkNDI0Yw==
7
+ SHA512:
8
+ metadata.gz: !binary |-
9
+ ODgxMjRkYTc5MzMyOTNiMWVlNzRmZGUyMDRhNzc2NzA4ZmIwNzAzNDllNzVh
10
+ NzA5ZTU2Mzk1NmE5ZDc2ZmUzODgzZWI2OTBiYTQ4NmQzNGU5ZmE2ODJlY2Vj
11
+ YjE1ODAxNTI1NTk3MGE3MzEyNTVlNGE5OGIxMjRlNGMyZDJlNmQ=
12
+ data.tar.gz: !binary |-
13
+ YmRhZGMzZmUxODJiNDBlM2FkZDA2ZGJjZTI4NGQzNzdhNjk3NDJmMThhOTVj
14
+ NmZiZmQ0NjJjNThkMzc3MmQzM2RiZmQ3ZWUzYTVkMmE3NDA3ZjVhNGMxMTFm
15
+ N2MwYTBlMDJjMzFlNGM1MzQ0MmE5MGU5ODMwOTQyNWU4NmVlYmY=
data/.gitignore ADDED
@@ -0,0 +1,4 @@
1
+ *.gem
2
+ Gemfile.lock
3
+ .bundle
4
+ vendor
data/Gemfile ADDED
@@ -0,0 +1,4 @@
1
+ source 'http://rubygems.org'
2
+ gem 'rake'
3
+ gem 'gem_publisher'
4
+ gem 'archive-tar-minitar'
data/Rakefile ADDED
@@ -0,0 +1,6 @@
1
+ @files=[]
2
+
3
+ task :default do
4
+ system("rake -T")
5
+ end
6
+
@@ -0,0 +1,266 @@
1
+ # encoding: utf-8
2
+ require "logstash/inputs/base"
3
+ require "logstash/inputs/threadable"
4
+ require "logstash/namespace"
5
+
6
+ # This input will read events from a Redis instance; it supports both Redis channels and lists.
7
+ # The list command (BLPOP) used by Logstash is supported in Redis v1.3.1+, and
8
+ # the channel commands used by Logstash are found in Redis v1.3.8+.
9
+ # While you may be able to make these Redis versions work, the best performance
10
+ # and stability will be found in more recent stable versions. Versions 2.6.0+
11
+ # are recommended.
12
+ #
13
+ # For more information about Redis, see <http://redis.io/>
14
+ #
15
+ # `batch_count` note: If you use the `batch_count` setting, you *must* use a Redis version 2.6.0 or
16
+ # newer. Anything older does not support the operations used by batching.
17
+ #
18
+ class LogStash::Inputs::Redis < LogStash::Inputs::Threadable
19
+ config_name "redis"
20
+ milestone 2
21
+
22
+ default :codec, "json"
23
+
24
+ # The `name` configuration is used for logging in case there are multiple instances.
25
+ # This feature has no real function and will be removed in future versions.
26
+ config :name, :validate => :string, :default => "default", :deprecated => true
27
+
28
+ # The hostname of your Redis server.
29
+ config :host, :validate => :string, :default => "127.0.0.1"
30
+
31
+ # The port to connect on.
32
+ config :port, :validate => :number, :default => 6379
33
+
34
+ # The Redis database number.
35
+ config :db, :validate => :number, :default => 0
36
+
37
+ # Initial connection timeout in seconds.
38
+ config :timeout, :validate => :number, :default => 5
39
+
40
+ # Password to authenticate with. There is no authentication by default.
41
+ config :password, :validate => :password
42
+
43
+ # The name of the Redis queue (we'll use BLPOP against this).
44
+ # TODO: remove soon.
45
+ config :queue, :validate => :string, :deprecated => true
46
+
47
+ # The name of a Redis list or channel.
48
+ # TODO: change required to true
49
+ config :key, :validate => :string, :required => false
50
+
51
+ # Specify either list or channel. If `redis\_type` is `list`, then we will BLPOP the
52
+ # key. If `redis\_type` is `channel`, then we will SUBSCRIBE to the key.
53
+ # If `redis\_type` is `pattern_channel`, then we will PSUBSCRIBE to the key.
54
+ # TODO: change required to true
55
+ config :data_type, :validate => [ "list", "channel", "pattern_channel" ], :required => false
56
+
57
+ # The number of events to return from Redis using EVAL.
58
+ config :batch_count, :validate => :number, :default => 1
59
+
60
+ public
61
+ def register
62
+ require 'redis'
63
+ @redis = nil
64
+ @redis_url = "redis://#{@password}@#{@host}:#{@port}/#{@db}"
65
+
66
+ # TODO remove after setting key and data_type to true
67
+ if @queue
68
+ if @key or @data_type
69
+ raise RuntimeError.new(
70
+ "Cannot specify queue parameter and key or data_type"
71
+ )
72
+ end
73
+ @key = @queue
74
+ @data_type = 'list'
75
+ end
76
+
77
+ if not @key or not @data_type
78
+ raise RuntimeError.new(
79
+ "Must define queue, or key and data_type parameters"
80
+ )
81
+ end
82
+ # end TODO
83
+
84
+ @logger.info("Registering Redis", :identity => identity)
85
+ end # def register
86
+
87
+ # A string used to identify a Redis instance in log messages
88
+ # TODO(sissel): Use instance variables for this once the @name config
89
+ # option is removed.
90
+ private
91
+ def identity
92
+ @name || "#{@redis_url} #{@data_type}:#{@key}"
93
+ end
94
+
95
+ private
96
+ def connect
97
+ redis = Redis.new(
98
+ :host => @host,
99
+ :port => @port,
100
+ :timeout => @timeout,
101
+ :db => @db,
102
+ :password => @password.nil? ? nil : @password.value
103
+ )
104
+ load_batch_script(redis) if @data_type == 'list' && (@batch_count > 1)
105
+ return redis
106
+ end # def connect
107
+
108
+ private
109
+ def load_batch_script(redis)
110
+ #A Redis Lua EVAL script to fetch a count of keys
111
+ #in case count is bigger than current items in queue whole queue will be returned without extra nil values
112
+ redis_script = <<EOF
113
+ local i = tonumber(ARGV[1])
114
+ local res = {}
115
+ local length = redis.call('llen',KEYS[1])
116
+ if length < i then i = length end
117
+ while (i > 0) do
118
+ local item = redis.call("lpop", KEYS[1])
119
+ if (not item) then
120
+ break
121
+ end
122
+ table.insert(res, item)
123
+ i = i-1
124
+ end
125
+ return res
126
+ EOF
127
+ @redis_script_sha = redis.script(:load, redis_script)
128
+ end
129
+
130
+ private
131
+ def queue_event(msg, output_queue)
132
+ begin
133
+ @codec.decode(msg) do |event|
134
+ decorate(event)
135
+ output_queue << event
136
+ end
137
+ rescue => e # parse or event creation error
138
+ @logger.error("Failed to create event", :message => msg, :exception => e,
139
+ :backtrace => e.backtrace);
140
+ end
141
+ end
142
+
143
+ private
144
+ def list_listener(redis, output_queue)
145
+
146
+ # blpop returns the 'key' read from as well as the item result
147
+ # we only care about the result (2nd item in the list).
148
+ item = redis.blpop(@key, 0)[1]
149
+
150
+ # blpop failed or .. something?
151
+ # TODO(sissel): handle the error
152
+ return if item.nil?
153
+ queue_event(item, output_queue)
154
+
155
+ # If @batch_count is 1, there's no need to continue.
156
+ return if @batch_count == 1
157
+
158
+ begin
159
+ redis.evalsha(@redis_script_sha, [@key], [@batch_count-1]).each do |item|
160
+ queue_event(item, output_queue)
161
+ end
162
+
163
+ # Below is a commented-out implementation of 'batch fetch'
164
+ # using pipelined LPOP calls. This in practice has been observed to
165
+ # perform exactly the same in terms of event throughput as
166
+ # the evalsha method. Given that the EVALSHA implementation uses
167
+ # one call to Redis instead of N (where N == @batch_count) calls,
168
+ # I decided to go with the 'evalsha' method of fetching N items
169
+ # from Redis in bulk.
170
+ #redis.pipelined do
171
+ #error, item = redis.lpop(@key)
172
+ #(@batch_count-1).times { redis.lpop(@key) }
173
+ #end.each do |item|
174
+ #queue_event(item, output_queue) if item
175
+ #end
176
+ # --- End commented out implementation of 'batch fetch'
177
+ rescue Redis::CommandError => e
178
+ if e.to_s =~ /NOSCRIPT/ then
179
+ @logger.warn("Redis may have been restarted, reloading Redis batch EVAL script", :exception => e);
180
+ load_batch_script(redis)
181
+ retry
182
+ else
183
+ raise e
184
+ end
185
+ end
186
+ end
187
+
188
+ private
189
+ def channel_listener(redis, output_queue)
190
+ redis.subscribe @key do |on|
191
+ on.subscribe do |channel, count|
192
+ @logger.info("Subscribed", :channel => channel, :count => count)
193
+ end
194
+
195
+ on.message do |channel, message|
196
+ queue_event message, output_queue
197
+ end
198
+
199
+ on.unsubscribe do |channel, count|
200
+ @logger.info("Unsubscribed", :channel => channel, :count => count)
201
+ end
202
+ end
203
+ end
204
+
205
+ private
206
+ def pattern_channel_listener(redis, output_queue)
207
+ redis.psubscribe @key do |on|
208
+ on.psubscribe do |channel, count|
209
+ @logger.info("Subscribed", :channel => channel, :count => count)
210
+ end
211
+
212
+ on.pmessage do |ch, event, message|
213
+ queue_event message, output_queue
214
+ end
215
+
216
+ on.punsubscribe do |channel, count|
217
+ @logger.info("Unsubscribed", :channel => channel, :count => count)
218
+ end
219
+ end
220
+ end
221
+
222
+ # Since both listeners have the same basic loop, we've abstracted the outer
223
+ # loop.
224
+ private
225
+ def listener_loop(listener, output_queue)
226
+ while !finished?
227
+ begin
228
+ @redis ||= connect
229
+ self.send listener, @redis, output_queue
230
+ rescue Redis::CannotConnectError => e
231
+ @logger.warn("Redis connection problem", :exception => e)
232
+ sleep 1
233
+ @redis = connect
234
+ rescue => e # Redis error
235
+ @logger.warn("Failed to get event from Redis", :name => @name,
236
+ :exception => e, :backtrace => e.backtrace)
237
+ raise e
238
+ end
239
+ end # while !finished?
240
+ end # listener_loop
241
+
242
+ public
243
+ def run(output_queue)
244
+ if @data_type == 'list'
245
+ listener_loop :list_listener, output_queue
246
+ elsif @data_type == 'channel'
247
+ listener_loop :channel_listener, output_queue
248
+ else
249
+ listener_loop :pattern_channel_listener, output_queue
250
+ end
251
+ end # def run
252
+
253
+ public
254
+ def teardown
255
+ if @data_type == 'channel' and @redis
256
+ @redis.unsubscribe
257
+ @redis.quit
258
+ @redis = nil
259
+ end
260
+ if @data_type == 'pattern_channel' and @redis
261
+ @redis.punsubscribe
262
+ @redis.quit
263
+ @redis = nil
264
+ end
265
+ end
266
+ end # class LogStash::Inputs::Redis
@@ -0,0 +1,29 @@
1
+ Gem::Specification.new do |s|
2
+
3
+ s.name = 'logstash-input-redis'
4
+ s.version = '0.1.0'
5
+ s.licenses = ['Apache License (2.0)']
6
+ s.summary = "This input will read events from a Redis instance"
7
+ s.description = "This input will read events from a Redis instance; it supports both Redis channels and lists."
8
+ s.authors = ["Elasticsearch"]
9
+ s.email = 'richard.pijnenburg@elasticsearch.com'
10
+ s.homepage = "http://logstash.net/"
11
+ s.require_paths = ["lib"]
12
+
13
+ # Files
14
+ s.files = `git ls-files`.split($\)+::Dir.glob('vendor/*')
15
+
16
+ # Tests
17
+ s.test_files = s.files.grep(%r{^(test|spec|features)/})
18
+
19
+ # Special flag to let us know this is actually a logstash plugin
20
+ s.metadata = { "logstash_plugin" => "true", "group" => "input" }
21
+
22
+ # Gem dependencies
23
+ s.add_runtime_dependency 'logstash', '>= 1.4.0', '< 2.0.0'
24
+
25
+ s.add_runtime_dependency 'logstash-codec-json'
26
+ s.add_runtime_dependency 'redis'
27
+
28
+ end
29
+
@@ -0,0 +1,9 @@
1
+ require "gem_publisher"
2
+
3
+ desc "Publish gem to RubyGems.org"
4
+ task :publish_gem do |t|
5
+ gem_file = Dir.glob(File.expand_path('../*.gemspec',File.dirname(__FILE__))).first
6
+ gem = GemPublisher.publish_if_updated(gem_file, :rubygems)
7
+ puts "Published #{gem}" if gem
8
+ end
9
+
@@ -0,0 +1,169 @@
1
+ require "net/http"
2
+ require "uri"
3
+ require "digest/sha1"
4
+
5
+ def vendor(*args)
6
+ return File.join("vendor", *args)
7
+ end
8
+
9
+ directory "vendor/" => ["vendor"] do |task, args|
10
+ mkdir task.name
11
+ end
12
+
13
+ def fetch(url, sha1, output)
14
+
15
+ puts "Downloading #{url}"
16
+ actual_sha1 = download(url, output)
17
+
18
+ if actual_sha1 != sha1
19
+ fail "SHA1 does not match (expected '#{sha1}' but got '#{actual_sha1}')"
20
+ end
21
+ end # def fetch
22
+
23
+ def file_fetch(url, sha1)
24
+ filename = File.basename( URI(url).path )
25
+ output = "vendor/#{filename}"
26
+ task output => [ "vendor/" ] do
27
+ begin
28
+ actual_sha1 = file_sha1(output)
29
+ if actual_sha1 != sha1
30
+ fetch(url, sha1, output)
31
+ end
32
+ rescue Errno::ENOENT
33
+ fetch(url, sha1, output)
34
+ end
35
+ end.invoke
36
+
37
+ return output
38
+ end
39
+
40
+ def file_sha1(path)
41
+ digest = Digest::SHA1.new
42
+ fd = File.new(path, "r")
43
+ while true
44
+ begin
45
+ digest << fd.sysread(16384)
46
+ rescue EOFError
47
+ break
48
+ end
49
+ end
50
+ return digest.hexdigest
51
+ ensure
52
+ fd.close if fd
53
+ end
54
+
55
+ def download(url, output)
56
+ uri = URI(url)
57
+ digest = Digest::SHA1.new
58
+ tmp = "#{output}.tmp"
59
+ Net::HTTP.start(uri.host, uri.port, :use_ssl => (uri.scheme == "https")) do |http|
60
+ request = Net::HTTP::Get.new(uri.path)
61
+ http.request(request) do |response|
62
+ fail "HTTP fetch failed for #{url}. #{response}" if [200, 301].include?(response.code)
63
+ size = (response["content-length"].to_i || -1).to_f
64
+ count = 0
65
+ File.open(tmp, "w") do |fd|
66
+ response.read_body do |chunk|
67
+ fd.write(chunk)
68
+ digest << chunk
69
+ if size > 0 && $stdout.tty?
70
+ count += chunk.bytesize
71
+ $stdout.write(sprintf("\r%0.2f%%", count/size * 100))
72
+ end
73
+ end
74
+ end
75
+ $stdout.write("\r \r") if $stdout.tty?
76
+ end
77
+ end
78
+
79
+ File.rename(tmp, output)
80
+
81
+ return digest.hexdigest
82
+ rescue SocketError => e
83
+ puts "Failure while downloading #{url}: #{e}"
84
+ raise
85
+ ensure
86
+ File.unlink(tmp) if File.exist?(tmp)
87
+ end # def download
88
+
89
+ def untar(tarball, &block)
90
+ require "archive/tar/minitar"
91
+ tgz = Zlib::GzipReader.new(File.open(tarball))
92
+ # Pull out typesdb
93
+ tar = Archive::Tar::Minitar::Input.open(tgz)
94
+ tar.each do |entry|
95
+ path = block.call(entry)
96
+ next if path.nil?
97
+ parent = File.dirname(path)
98
+
99
+ mkdir_p parent unless File.directory?(parent)
100
+
101
+ # Skip this file if the output file is the same size
102
+ if entry.directory?
103
+ mkdir path unless File.directory?(path)
104
+ else
105
+ entry_mode = entry.instance_eval { @mode } & 0777
106
+ if File.exists?(path)
107
+ stat = File.stat(path)
108
+ # TODO(sissel): Submit a patch to archive-tar-minitar upstream to
109
+ # expose headers in the entry.
110
+ entry_size = entry.instance_eval { @size }
111
+ # If file sizes are same, skip writing.
112
+ next if stat.size == entry_size && (stat.mode & 0777) == entry_mode
113
+ end
114
+ puts "Extracting #{entry.full_name} from #{tarball} #{entry_mode.to_s(8)}"
115
+ File.open(path, "w") do |fd|
116
+ # eof? check lets us skip empty files. Necessary because the API provided by
117
+ # Archive::Tar::Minitar::Reader::EntryStream only mostly acts like an
118
+ # IO object. Something about empty files in this EntryStream causes
119
+ # IO.copy_stream to throw "can't convert nil into String" on JRuby
120
+ # TODO(sissel): File a bug about this.
121
+ while !entry.eof?
122
+ chunk = entry.read(16384)
123
+ fd.write(chunk)
124
+ end
125
+ #IO.copy_stream(entry, fd)
126
+ end
127
+ File.chmod(entry_mode, path)
128
+ end
129
+ end
130
+ tar.close
131
+ File.unlink(tarball) if File.file?(tarball)
132
+ end # def untar
133
+
134
+ def ungz(file)
135
+
136
+ outpath = file.gsub('.gz', '')
137
+ tgz = Zlib::GzipReader.new(File.open(file))
138
+ begin
139
+ File.open(outpath, "w") do |out|
140
+ IO::copy_stream(tgz, out)
141
+ end
142
+ File.unlink(file)
143
+ rescue
144
+ File.unlink(outpath) if File.file?(outpath)
145
+ raise
146
+ end
147
+ tgz.close
148
+ end
149
+
150
+ desc "Process any vendor files required for this plugin"
151
+ task "vendor" do |task, args|
152
+
153
+ @files.each do |file|
154
+ download = file_fetch(file['url'], file['sha1'])
155
+ if download =~ /.tar.gz/
156
+ prefix = download.gsub('.tar.gz', '').gsub('vendor/', '')
157
+ untar(download) do |entry|
158
+ if !file['files'].nil?
159
+ next unless file['files'].include?(entry.full_name.gsub(prefix, ''))
160
+ out = entry.full_name.split("/").last
161
+ end
162
+ File.join('vendor', out)
163
+ end
164
+ elsif download =~ /.gz/
165
+ ungz(download)
166
+ end
167
+ end
168
+
169
+ end
@@ -0,0 +1,63 @@
1
+ require "spec_helper"
2
+ require "redis"
3
+
4
+ def populate(key, event_count)
5
+ require "logstash/event"
6
+ redis = Redis.new(:host => "localhost")
7
+ event_count.times do |value|
8
+ event = LogStash::Event.new("sequence" => value)
9
+ Stud::try(10.times) do
10
+ redis.rpush(key, event.to_json)
11
+ end
12
+ end
13
+ end
14
+
15
+ def process(pipeline, queue, event_count)
16
+ sequence = 0
17
+ Thread.new { pipeline.run }
18
+ event_count.times do |i|
19
+ event = queue.pop
20
+ insist { event["sequence"] } == i
21
+ end
22
+ pipeline.shutdown
23
+ end # process
24
+
25
+ describe "inputs/redis", :redis => true do
26
+
27
+
28
+ describe "read events from a list" do
29
+ key = 10.times.collect { rand(10).to_s }.join("")
30
+ event_count = 1000 + rand(50)
31
+ config <<-CONFIG
32
+ input {
33
+ redis {
34
+ type => "blah"
35
+ key => "#{key}"
36
+ data_type => "list"
37
+ }
38
+ }
39
+ CONFIG
40
+
41
+ before(:each) { populate(key, event_count) }
42
+
43
+ input { |pipeline, queue| process(pipeline, queue, event_count) }
44
+ end
45
+
46
+ describe "read events from a list with batch_count=5" do
47
+ key = 10.times.collect { rand(10).to_s }.join("")
48
+ event_count = 1000 + rand(50)
49
+ config <<-CONFIG
50
+ input {
51
+ redis {
52
+ type => "blah"
53
+ key => "#{key}"
54
+ data_type => "list"
55
+ batch_count => #{rand(20)+1}
56
+ }
57
+ }
58
+ CONFIG
59
+
60
+ before(:each) { populate(key, event_count) }
61
+ input { |pipeline, queue| process(pipeline, queue, event_count) }
62
+ end
63
+ end
metadata ADDED
@@ -0,0 +1,103 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: logstash-input-redis
3
+ version: !ruby/object:Gem::Version
4
+ version: 0.1.0
5
+ platform: ruby
6
+ authors:
7
+ - Elasticsearch
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2014-11-03 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ name: logstash
15
+ requirement: !ruby/object:Gem::Requirement
16
+ requirements:
17
+ - - ! '>='
18
+ - !ruby/object:Gem::Version
19
+ version: 1.4.0
20
+ - - <
21
+ - !ruby/object:Gem::Version
22
+ version: 2.0.0
23
+ type: :runtime
24
+ prerelease: false
25
+ version_requirements: !ruby/object:Gem::Requirement
26
+ requirements:
27
+ - - ! '>='
28
+ - !ruby/object:Gem::Version
29
+ version: 1.4.0
30
+ - - <
31
+ - !ruby/object:Gem::Version
32
+ version: 2.0.0
33
+ - !ruby/object:Gem::Dependency
34
+ name: logstash-codec-json
35
+ requirement: !ruby/object:Gem::Requirement
36
+ requirements:
37
+ - - ! '>='
38
+ - !ruby/object:Gem::Version
39
+ version: '0'
40
+ type: :runtime
41
+ prerelease: false
42
+ version_requirements: !ruby/object:Gem::Requirement
43
+ requirements:
44
+ - - ! '>='
45
+ - !ruby/object:Gem::Version
46
+ version: '0'
47
+ - !ruby/object:Gem::Dependency
48
+ name: redis
49
+ requirement: !ruby/object:Gem::Requirement
50
+ requirements:
51
+ - - ! '>='
52
+ - !ruby/object:Gem::Version
53
+ version: '0'
54
+ type: :runtime
55
+ prerelease: false
56
+ version_requirements: !ruby/object:Gem::Requirement
57
+ requirements:
58
+ - - ! '>='
59
+ - !ruby/object:Gem::Version
60
+ version: '0'
61
+ description: This input will read events from a Redis instance; it supports both Redis
62
+ channels and lists.
63
+ email: richard.pijnenburg@elasticsearch.com
64
+ executables: []
65
+ extensions: []
66
+ extra_rdoc_files: []
67
+ files:
68
+ - .gitignore
69
+ - Gemfile
70
+ - Rakefile
71
+ - lib/logstash/inputs/redis.rb
72
+ - logstash-input-redis.gemspec
73
+ - rakelib/publish.rake
74
+ - rakelib/vendor.rake
75
+ - spec/inputs/redis_spec.rb
76
+ homepage: http://logstash.net/
77
+ licenses:
78
+ - Apache License (2.0)
79
+ metadata:
80
+ logstash_plugin: 'true'
81
+ group: input
82
+ post_install_message:
83
+ rdoc_options: []
84
+ require_paths:
85
+ - lib
86
+ required_ruby_version: !ruby/object:Gem::Requirement
87
+ requirements:
88
+ - - ! '>='
89
+ - !ruby/object:Gem::Version
90
+ version: '0'
91
+ required_rubygems_version: !ruby/object:Gem::Requirement
92
+ requirements:
93
+ - - ! '>='
94
+ - !ruby/object:Gem::Version
95
+ version: '0'
96
+ requirements: []
97
+ rubyforge_project:
98
+ rubygems_version: 2.4.1
99
+ signing_key:
100
+ specification_version: 4
101
+ summary: This input will read events from a Redis instance
102
+ test_files:
103
+ - spec/inputs/redis_spec.rb