logstash-output-kinesis-leprechaun-fork 0.1.4
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +15 -0
- data/.gitignore +4 -0
- data/CONTRIBUTORS +18 -0
- data/Gemfile +3 -0
- data/LICENSE +13 -0
- data/README.md +86 -0
- data/Rakefile +7 -0
- data/lib/logstash/outputs/kinesis.rb +197 -0
- data/logstash-output-kinesis.gemspec +29 -0
- data/spec/outputs/kinesis_spec.rb +5 -0
- metadata +134 -0
checksums.yaml
ADDED
@@ -0,0 +1,15 @@
|
|
1
|
+
---
|
2
|
+
!binary "U0hBMQ==":
|
3
|
+
metadata.gz: !binary |-
|
4
|
+
NzFjOGFmMDNkMWUyNGYyOTMxMGNkNzMxZDg1ODZjZDMzYzMyNjExOQ==
|
5
|
+
data.tar.gz: !binary |-
|
6
|
+
N2U0YmRmMmNjN2FlYjFlMDQ4Mzg5NGI1NzhiMmE1OWQ1MmE3YmZkZg==
|
7
|
+
SHA512:
|
8
|
+
metadata.gz: !binary |-
|
9
|
+
YmQ4NjA4N2Q2YzVkODFiNTNkOTYxOWNmMTk5Y2ViYTJkMGJjOWMwOGZlNWY4
|
10
|
+
ZTY2MzU3YzVhMDQ5M2Y5ZTdmY2E1ODAxNmRiZTU1ODEyODFhNDJkNmE4ZjI3
|
11
|
+
OTgxN2UzZTk0ZWUxOTRiMzFhMTgwYzEwODQ5MjkwYjNhZjA2YmM=
|
12
|
+
data.tar.gz: !binary |-
|
13
|
+
MWFlOTBiMjA4ZGJmYjc2MzYxM2EyYzExZmRiYWM2Y2MyNzE0NDFlODM2OTNj
|
14
|
+
ZTIyMjUzNjFmYzI3YmIyNTVjMzBiM2M3Zjk2ZWEzYWFhMmFlNGM3MDE4MjMw
|
15
|
+
ODkyZWUyYjk3NTA1YmJhZThlNDA4N2E3NzQ2OWQ5NzIwMmIyYjI=
|
data/.gitignore
ADDED
data/CONTRIBUTORS
ADDED
@@ -0,0 +1,18 @@
|
|
1
|
+
The following is a list of people who have contributed ideas, code, bug
|
2
|
+
reports, or in general have helped logstash along its way.
|
3
|
+
|
4
|
+
Contributors:
|
5
|
+
* Aaron Broad (AaronTheApe)
|
6
|
+
* James Turnbull (jamtur01)
|
7
|
+
* John Price (awesometown)
|
8
|
+
* Jordan Sissel (jordansissel)
|
9
|
+
* Kurt Hurtado (kurtado)
|
10
|
+
* Pier-Hugues Pellerin (ph)
|
11
|
+
* Richard Pijnenburg (electrical)
|
12
|
+
* Sean Laurent (organicveggie)
|
13
|
+
* Toby Collier (tobyw4n)
|
14
|
+
|
15
|
+
Note: If you've sent us patches, bug reports, or otherwise contributed to
|
16
|
+
Logstash, and you aren't on the list above and want to be, please let us know
|
17
|
+
and we'll make sure you're here. Contributions from folks like you are what make
|
18
|
+
open source awesome.
|
data/Gemfile
ADDED
data/LICENSE
ADDED
@@ -0,0 +1,13 @@
|
|
1
|
+
Copyright (c) 2012-2015 Elasticsearch <http://www.elasticsearch.org>
|
2
|
+
|
3
|
+
Licensed under the Apache License, Version 2.0 (the "License");
|
4
|
+
you may not use this file except in compliance with the License.
|
5
|
+
You may obtain a copy of the License at
|
6
|
+
|
7
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
8
|
+
|
9
|
+
Unless required by applicable law or agreed to in writing, software
|
10
|
+
distributed under the License is distributed on an "AS IS" BASIS,
|
11
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12
|
+
See the License for the specific language governing permissions and
|
13
|
+
limitations under the License.
|
data/README.md
ADDED
@@ -0,0 +1,86 @@
|
|
1
|
+
# Logstash Plugin
|
2
|
+
|
3
|
+
This is a plugin for [Logstash](https://github.com/elasticsearch/logstash).
|
4
|
+
|
5
|
+
It is fully free and fully open source. The license is Apache 2.0, meaning you are pretty much free to use it however you want in whatever way.
|
6
|
+
|
7
|
+
## Documentation
|
8
|
+
|
9
|
+
Logstash provides infrastructure to automatically generate documentation for this plugin. We use the asciidoc format to write documentation so any comments in the source code will be first converted into asciidoc and then into html. All plugin documentation are placed under one [central location](http://www.elasticsearch.org/guide/en/logstash/current/).
|
10
|
+
|
11
|
+
- For formatting code or config example, you can use the asciidoc `[source,ruby]` directive
|
12
|
+
- For more asciidoc formatting tips, see the excellent reference here https://github.com/elasticsearch/docs#asciidoc-guide
|
13
|
+
|
14
|
+
## Need Help?
|
15
|
+
|
16
|
+
Need help? Try #logstash on freenode IRC or the logstash-users@googlegroups.com mailing list.
|
17
|
+
|
18
|
+
## Developing
|
19
|
+
|
20
|
+
### 1. Plugin Developement and Testing
|
21
|
+
|
22
|
+
#### Code
|
23
|
+
- To get started, you'll need JRuby with the Bundler gem installed.
|
24
|
+
|
25
|
+
- Create a new plugin or clone and existing from the GitHub [logstash-plugins](https://github.com/logstash-plugins) organization. We also provide [example plugins](https://github.com/logstash-plugins?query=example).
|
26
|
+
|
27
|
+
- Install dependencies
|
28
|
+
```sh
|
29
|
+
bundle install
|
30
|
+
```
|
31
|
+
|
32
|
+
#### Test
|
33
|
+
|
34
|
+
- Update your dependencies
|
35
|
+
|
36
|
+
```sh
|
37
|
+
bundle install
|
38
|
+
```
|
39
|
+
|
40
|
+
- Run tests
|
41
|
+
|
42
|
+
```sh
|
43
|
+
bundle exec rspec
|
44
|
+
```
|
45
|
+
|
46
|
+
### 2. Running your unpublished Plugin in Logstash
|
47
|
+
|
48
|
+
#### 2.1 Run in a local Logstash clone
|
49
|
+
|
50
|
+
- Edit Logstash `Gemfile` and add the local plugin path, for example:
|
51
|
+
```ruby
|
52
|
+
gem "logstash-filter-awesome", :path => "/your/local/logstash-filter-awesome"
|
53
|
+
```
|
54
|
+
- Install plugin
|
55
|
+
```sh
|
56
|
+
bin/plugin install --no-verify
|
57
|
+
```
|
58
|
+
- Run Logstash with your plugin
|
59
|
+
```sh
|
60
|
+
bin/logstash -e 'filter {awesome {}}'
|
61
|
+
```
|
62
|
+
At this point any modifications to the plugin code will be applied to this local Logstash setup. After modifying the plugin, simply rerun Logstash.
|
63
|
+
|
64
|
+
#### 2.2 Run in an installed Logstash
|
65
|
+
|
66
|
+
You can use the same **2.1** method to run your plugin in an installed Logstash by editing its `Gemfile` and pointing the `:path` to your local plugin development directory or you can build the gem and install it using:
|
67
|
+
|
68
|
+
- Build your plugin gem
|
69
|
+
```sh
|
70
|
+
gem build logstash-filter-awesome.gemspec
|
71
|
+
```
|
72
|
+
- Install the plugin from the Logstash home
|
73
|
+
```sh
|
74
|
+
bin/plugin install /your/local/plugin/logstash-filter-awesome.gem
|
75
|
+
```
|
76
|
+
- Start Logstash and proceed to test the plugin
|
77
|
+
|
78
|
+
## Contributing
|
79
|
+
|
80
|
+
All contributions are welcome: ideas, patches, documentation, bug reports, complaints, and even something you drew up on a napkin.
|
81
|
+
|
82
|
+
Programming is not a required skill. Whatever you've seen about open source and maintainers or community members saying "send patches or die" - you will not see that here.
|
83
|
+
|
84
|
+
It is more important to the community that you are able to contribute.
|
85
|
+
|
86
|
+
For more information about contributing, see the [CONTRIBUTING](https://github.com/elasticsearch/logstash/blob/master/CONTRIBUTING.md) file.
|
data/Rakefile
ADDED
@@ -0,0 +1,197 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/outputs/base"
|
3
|
+
require "logstash/namespace"
|
4
|
+
require "logstash/plugin_mixins/aws_config"
|
5
|
+
require "stud/buffer"
|
6
|
+
require "digest/sha2"
|
7
|
+
|
8
|
+
# Push events to an Amazon Web Services Kinesis stream.
|
9
|
+
#
|
10
|
+
# Amazon Kinesis is a fully managed, cloud-based service for real-time data processing
|
11
|
+
# over large, distributed data streams.
|
12
|
+
#
|
13
|
+
# To use this plugin, you *must*:
|
14
|
+
#
|
15
|
+
# * Have an AWS account
|
16
|
+
# * Setup an Kinesis stream
|
17
|
+
# * Create an identify that has access to publish messages to the stream.
|
18
|
+
#
|
19
|
+
# The "consumer" identity must have the following permissions on the strem:
|
20
|
+
#
|
21
|
+
# * kinesis:PutRecords
|
22
|
+
#
|
23
|
+
# Typically, you should setup an IAM policy, create a user and apply the IAM policy to the user.
|
24
|
+
# A sample policy is as follows:
|
25
|
+
# [source,ruby]
|
26
|
+
# {
|
27
|
+
# "Statement": [
|
28
|
+
# {
|
29
|
+
# "Sid": "Stmt1347986764948",
|
30
|
+
# "Action": [
|
31
|
+
# "kinesis:PutRecords",
|
32
|
+
# ],
|
33
|
+
# "Effect": "Allow",
|
34
|
+
# "Resource": [
|
35
|
+
# "arn:aws:kinesis:us-east-1:111122223333:stream/Logstash"
|
36
|
+
# ]
|
37
|
+
# }
|
38
|
+
# ]
|
39
|
+
# }
|
40
|
+
#
|
41
|
+
# See http://aws.amazon.com/iam/ for more details on setting up AWS identities.
|
42
|
+
#
|
43
|
+
# #### Usage:
|
44
|
+
# This is an example of logstash config:
|
45
|
+
# [source,ruby]
|
46
|
+
# output {
|
47
|
+
# kinesis {
|
48
|
+
# access_key_id => "crazy_key" (required)
|
49
|
+
# secret_access_key => "monkey_access_key" (required)
|
50
|
+
# region => "eu-west-1" (required)
|
51
|
+
# stream_name => "my_stream" (required)
|
52
|
+
# event_partition_keys => ["message","@uuid"] (optional)
|
53
|
+
# batch_events => 100 (optional, batch size)
|
54
|
+
# batch_timeout => 5 (optional)
|
55
|
+
# }
|
56
|
+
#
|
57
|
+
class LogStash::Outputs::Kinesis < LogStash::Outputs::Base
|
58
|
+
include LogStash::PluginMixins::AwsConfig
|
59
|
+
include Stud::Buffer
|
60
|
+
|
61
|
+
config_name "kinesis"
|
62
|
+
milestone 1
|
63
|
+
|
64
|
+
# Name of Kinesis stream to push messages into. Note that this is just the name of the stream, not the URL or ARN.
|
65
|
+
config :stream_name, :validate => :string, :required => true
|
66
|
+
|
67
|
+
# Name of the field in an event that contains the partition key for kinesis
|
68
|
+
config :event_partition_keys, :validate => :array, :default => ["message", "@uuid"]
|
69
|
+
|
70
|
+
# Set to true if you want send messages to Kinesis in batches with `put_records`
|
71
|
+
# from the amazon sdk
|
72
|
+
config :batch, :validate => :boolean, :default => true
|
73
|
+
|
74
|
+
# If `batch` is set to true, the number of events we queue up for a `put_records`.
|
75
|
+
config :batch_events, :validate => :number, :default => 100
|
76
|
+
|
77
|
+
# If `batch` is set to true, the maximum amount of time between `put_records` commands when there are pending events to flush.
|
78
|
+
config :batch_timeout, :validate => :number, :default => 5
|
79
|
+
|
80
|
+
public
|
81
|
+
def aws_service_endpoint(region)
|
82
|
+
return {
|
83
|
+
:kinesis_endpoint => "kinesis.#{region}.amazonaws.com"
|
84
|
+
}
|
85
|
+
end
|
86
|
+
|
87
|
+
public
|
88
|
+
def register
|
89
|
+
require "aws-sdk-resources"
|
90
|
+
|
91
|
+
@kinesis = Aws::Kinesis::Client.new(
|
92
|
+
region: @region,
|
93
|
+
access_key_id: @access_key_id,
|
94
|
+
secret_access_key: @secret_access_key
|
95
|
+
)
|
96
|
+
|
97
|
+
if @batch
|
98
|
+
if @batch_events > 500
|
99
|
+
raise RuntimeError.new(
|
100
|
+
"AWS only allows a batch_events parameter of 10 or less"
|
101
|
+
)
|
102
|
+
elsif @batch_events <= 1
|
103
|
+
raise RuntimeError.new(
|
104
|
+
"batch_events parameter must be greater than 1 (or its not a batch)"
|
105
|
+
)
|
106
|
+
end
|
107
|
+
buffer_initialize(
|
108
|
+
:max_items => @batch_events,
|
109
|
+
:max_interval => @batch_timeout,
|
110
|
+
:logger => @logger
|
111
|
+
)
|
112
|
+
else
|
113
|
+
raise NotImplementedError, 'Currently only batch write is supported'
|
114
|
+
end
|
115
|
+
end # def register
|
116
|
+
|
117
|
+
public
|
118
|
+
def receive(event)
|
119
|
+
if @batch
|
120
|
+
partition_key = ""
|
121
|
+
|
122
|
+
@event_partition_keys.each do |partition_key_name|
|
123
|
+
if not event[partition_key_name].nil? and event[partition_key_name].to_s.length > 0
|
124
|
+
@logger.info("Found field named #{partition_key_name}")
|
125
|
+
partition_key = event[partition_key_name].to_s
|
126
|
+
break
|
127
|
+
end
|
128
|
+
@logger.info("No field named #{partition_key_name}")
|
129
|
+
end
|
130
|
+
|
131
|
+
buffer_receive(
|
132
|
+
{
|
133
|
+
data: event.to_json,
|
134
|
+
partition_key: partition_key
|
135
|
+
}
|
136
|
+
)
|
137
|
+
else
|
138
|
+
raise NotImplementedError, 'Currently only batch write is supported'
|
139
|
+
end
|
140
|
+
end # def receive
|
141
|
+
|
142
|
+
# Return a list of events that failed when writing to kinesis
|
143
|
+
def get_failed_records(responses, events)
|
144
|
+
# Iterate over response records
|
145
|
+
response_record_index = 0
|
146
|
+
failed_events = []
|
147
|
+
|
148
|
+
# Check each page in the response
|
149
|
+
if responses.class.name == "Array"
|
150
|
+
responses.each do |response_page|
|
151
|
+
# Check each record in each page
|
152
|
+
response_page.data.records.each do |response_record|
|
153
|
+
# Collect all failed records
|
154
|
+
if not response_record.error_code.nil?
|
155
|
+
@logger.info("put_records: #{response_record.error_message} (#{response_record.error_code})")
|
156
|
+
failed_events.push(events[response_record_index])
|
157
|
+
end
|
158
|
+
response_record_index += 1
|
159
|
+
end
|
160
|
+
end
|
161
|
+
end
|
162
|
+
|
163
|
+
return failed_events
|
164
|
+
end
|
165
|
+
|
166
|
+
# called from Stud::Buffer#buffer_flush when there are events to flush
|
167
|
+
def flush(events, teardown=false)
|
168
|
+
# Initial backoff time
|
169
|
+
backoff = 0.01
|
170
|
+
|
171
|
+
# A retry loop
|
172
|
+
loop do
|
173
|
+
responses = @kinesis.put_records(
|
174
|
+
records: events,
|
175
|
+
stream_name: @stream_name
|
176
|
+
)
|
177
|
+
|
178
|
+
failed_events = get_failed_records(responses, events)
|
179
|
+
|
180
|
+
# Retry if failed events if any
|
181
|
+
break if failed_events.count == 0
|
182
|
+
|
183
|
+
@logger.warn("Failed #{failed_events.count} records. Retrying in #{backoff}.")
|
184
|
+
events = failed_events
|
185
|
+
|
186
|
+
# Exponential + random backoff
|
187
|
+
sleep(backoff)
|
188
|
+
backoff *= 1.95 + rand() / 10
|
189
|
+
end
|
190
|
+
end
|
191
|
+
|
192
|
+
public
|
193
|
+
def teardown
|
194
|
+
buffer_flush(:final => true)
|
195
|
+
finished
|
196
|
+
end # def teardown
|
197
|
+
end
|
@@ -0,0 +1,29 @@
|
|
1
|
+
Gem::Specification.new do |s|
|
2
|
+
s.name = 'logstash-output-kinesis-leprechaun-fork'
|
3
|
+
s.version = '0.1.4'
|
4
|
+
s.licenses = ['Apache License (2.0)']
|
5
|
+
s.summary = "Push events to an Amazon Web Services Kinesis stream."
|
6
|
+
s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
|
7
|
+
s.authors = ["Elasticsearch", "Laurence MacGuire"]
|
8
|
+
s.email = 'leprechaun@gmail.com'
|
9
|
+
s.homepage = "https://github.com/leprechaun/logstash-output-kinesis"
|
10
|
+
s.require_paths = ["lib"]
|
11
|
+
|
12
|
+
# Files
|
13
|
+
s.files = `git ls-files`.split($\)+::Dir.glob('vendor/*')
|
14
|
+
|
15
|
+
# Tests
|
16
|
+
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
17
|
+
|
18
|
+
# Special flag to let us know this is actually a logstash plugin
|
19
|
+
s.metadata = { "logstash_plugin" => "true", "logstash_group" => "output" }
|
20
|
+
|
21
|
+
# Gem dependencies
|
22
|
+
s.add_runtime_dependency "logstash-core", '>= 1.4.0', '< 2.0.0'
|
23
|
+
s.add_runtime_dependency 'logstash-mixin-aws'
|
24
|
+
|
25
|
+
s.add_runtime_dependency 'aws-sdk-resources', '~> 2'
|
26
|
+
s.add_runtime_dependency 'stud'
|
27
|
+
|
28
|
+
s.add_development_dependency 'logstash-devutils'
|
29
|
+
end
|
metadata
ADDED
@@ -0,0 +1,134 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: logstash-output-kinesis-leprechaun-fork
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.1.4
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Elasticsearch
|
8
|
+
- Laurence MacGuire
|
9
|
+
autorequire:
|
10
|
+
bindir: bin
|
11
|
+
cert_chain: []
|
12
|
+
date: 2016-04-28 00:00:00.000000000 Z
|
13
|
+
dependencies:
|
14
|
+
- !ruby/object:Gem::Dependency
|
15
|
+
name: logstash-core
|
16
|
+
requirement: !ruby/object:Gem::Requirement
|
17
|
+
requirements:
|
18
|
+
- - ! '>='
|
19
|
+
- !ruby/object:Gem::Version
|
20
|
+
version: 1.4.0
|
21
|
+
- - <
|
22
|
+
- !ruby/object:Gem::Version
|
23
|
+
version: 2.0.0
|
24
|
+
type: :runtime
|
25
|
+
prerelease: false
|
26
|
+
version_requirements: !ruby/object:Gem::Requirement
|
27
|
+
requirements:
|
28
|
+
- - ! '>='
|
29
|
+
- !ruby/object:Gem::Version
|
30
|
+
version: 1.4.0
|
31
|
+
- - <
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: 2.0.0
|
34
|
+
- !ruby/object:Gem::Dependency
|
35
|
+
name: logstash-mixin-aws
|
36
|
+
requirement: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - ! '>='
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: '0'
|
41
|
+
type: :runtime
|
42
|
+
prerelease: false
|
43
|
+
version_requirements: !ruby/object:Gem::Requirement
|
44
|
+
requirements:
|
45
|
+
- - ! '>='
|
46
|
+
- !ruby/object:Gem::Version
|
47
|
+
version: '0'
|
48
|
+
- !ruby/object:Gem::Dependency
|
49
|
+
name: aws-sdk-resources
|
50
|
+
requirement: !ruby/object:Gem::Requirement
|
51
|
+
requirements:
|
52
|
+
- - ~>
|
53
|
+
- !ruby/object:Gem::Version
|
54
|
+
version: '2'
|
55
|
+
type: :runtime
|
56
|
+
prerelease: false
|
57
|
+
version_requirements: !ruby/object:Gem::Requirement
|
58
|
+
requirements:
|
59
|
+
- - ~>
|
60
|
+
- !ruby/object:Gem::Version
|
61
|
+
version: '2'
|
62
|
+
- !ruby/object:Gem::Dependency
|
63
|
+
name: stud
|
64
|
+
requirement: !ruby/object:Gem::Requirement
|
65
|
+
requirements:
|
66
|
+
- - ! '>='
|
67
|
+
- !ruby/object:Gem::Version
|
68
|
+
version: '0'
|
69
|
+
type: :runtime
|
70
|
+
prerelease: false
|
71
|
+
version_requirements: !ruby/object:Gem::Requirement
|
72
|
+
requirements:
|
73
|
+
- - ! '>='
|
74
|
+
- !ruby/object:Gem::Version
|
75
|
+
version: '0'
|
76
|
+
- !ruby/object:Gem::Dependency
|
77
|
+
name: logstash-devutils
|
78
|
+
requirement: !ruby/object:Gem::Requirement
|
79
|
+
requirements:
|
80
|
+
- - ! '>='
|
81
|
+
- !ruby/object:Gem::Version
|
82
|
+
version: '0'
|
83
|
+
type: :development
|
84
|
+
prerelease: false
|
85
|
+
version_requirements: !ruby/object:Gem::Requirement
|
86
|
+
requirements:
|
87
|
+
- - ! '>='
|
88
|
+
- !ruby/object:Gem::Version
|
89
|
+
version: '0'
|
90
|
+
description: This gem is a logstash plugin required to be installed on top of the
|
91
|
+
Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not
|
92
|
+
a stand-alone program
|
93
|
+
email: leprechaun@gmail.com
|
94
|
+
executables: []
|
95
|
+
extensions: []
|
96
|
+
extra_rdoc_files: []
|
97
|
+
files:
|
98
|
+
- .gitignore
|
99
|
+
- CONTRIBUTORS
|
100
|
+
- Gemfile
|
101
|
+
- LICENSE
|
102
|
+
- README.md
|
103
|
+
- Rakefile
|
104
|
+
- lib/logstash/outputs/kinesis.rb
|
105
|
+
- logstash-output-kinesis.gemspec
|
106
|
+
- spec/outputs/kinesis_spec.rb
|
107
|
+
homepage: https://github.com/leprechaun/logstash-output-kinesis
|
108
|
+
licenses:
|
109
|
+
- Apache License (2.0)
|
110
|
+
metadata:
|
111
|
+
logstash_plugin: 'true'
|
112
|
+
logstash_group: output
|
113
|
+
post_install_message:
|
114
|
+
rdoc_options: []
|
115
|
+
require_paths:
|
116
|
+
- lib
|
117
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
118
|
+
requirements:
|
119
|
+
- - ! '>='
|
120
|
+
- !ruby/object:Gem::Version
|
121
|
+
version: '0'
|
122
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
123
|
+
requirements:
|
124
|
+
- - ! '>='
|
125
|
+
- !ruby/object:Gem::Version
|
126
|
+
version: '0'
|
127
|
+
requirements: []
|
128
|
+
rubyforge_project:
|
129
|
+
rubygems_version: 2.4.8
|
130
|
+
signing_key:
|
131
|
+
specification_version: 4
|
132
|
+
summary: Push events to an Amazon Web Services Kinesis stream.
|
133
|
+
test_files:
|
134
|
+
- spec/outputs/kinesis_spec.rb
|