logstash-output-kinesis-jdk11 5.1.3-java

Sign up to get free protection for your applications and to get access to all the features.
Files changed (46) hide show
  1. checksums.yaml +7 -0
  2. data/.editorconfig +11 -0
  3. data/.gemrelease +3 -0
  4. data/.gitattributes +1 -0
  5. data/.github_changelog_generator +1 -0
  6. data/.gitignore +7 -0
  7. data/.rspec +2 -0
  8. data/.ruby-gemset +1 -0
  9. data/.ruby-version +1 -0
  10. data/.travis.yml +18 -0
  11. data/CHANGELOG.md +58 -0
  12. data/CODE_OF_CONDUCT.md +46 -0
  13. data/CONTRIBUTING.md +36 -0
  14. data/Gemfile +2 -0
  15. data/LICENSE +13 -0
  16. data/README.md +230 -0
  17. data/Rakefile +8 -0
  18. data/build.gradle +24 -0
  19. data/lib/logstash/outputs/kinesis.rb +228 -0
  20. data/lib/logstash-output-kinesis/version.rb +3 -0
  21. data/lib/logstash-output-kinesis_jars.rb +5 -0
  22. data/logstash-output-kinesis.gemspec +34 -0
  23. data/spec/outputs/kinesis_spec.rb +96 -0
  24. data/spec/spec_helper.rb +91 -0
  25. data/vendor/jar-dependencies/runtime-jars/amazon-kinesis-producer-0.12.6.jar +0 -0
  26. data/vendor/jar-dependencies/runtime-jars/aws-java-sdk-core-1.11.128.jar +0 -0
  27. data/vendor/jar-dependencies/runtime-jars/aws-java-sdk-sts-1.11.128.jar +0 -0
  28. data/vendor/jar-dependencies/runtime-jars/commons-codec-1.9.jar +0 -0
  29. data/vendor/jar-dependencies/runtime-jars/commons-io-2.4.jar +0 -0
  30. data/vendor/jar-dependencies/runtime-jars/commons-lang-2.6.jar +0 -0
  31. data/vendor/jar-dependencies/runtime-jars/commons-logging-1.2.jar +0 -0
  32. data/vendor/jar-dependencies/runtime-jars/guava-18.0.jar +0 -0
  33. data/vendor/jar-dependencies/runtime-jars/httpclient-4.5.2.jar +0 -0
  34. data/vendor/jar-dependencies/runtime-jars/httpcore-4.4.4.jar +0 -0
  35. data/vendor/jar-dependencies/runtime-jars/ion-java-1.0.2.jar +0 -0
  36. data/vendor/jar-dependencies/runtime-jars/jackson-annotations-2.6.0.jar +0 -0
  37. data/vendor/jar-dependencies/runtime-jars/jackson-core-2.6.6.jar +0 -0
  38. data/vendor/jar-dependencies/runtime-jars/jackson-databind-2.6.6.jar +0 -0
  39. data/vendor/jar-dependencies/runtime-jars/jackson-dataformat-cbor-2.6.6.jar +0 -0
  40. data/vendor/jar-dependencies/runtime-jars/jaxb-api-2.4.0-b180830.0359.jar +0 -0
  41. data/vendor/jar-dependencies/runtime-jars/jmespath-java-1.11.128.jar +0 -0
  42. data/vendor/jar-dependencies/runtime-jars/joda-time-2.8.1.jar +0 -0
  43. data/vendor/jar-dependencies/runtime-jars/log4j-slf4j-impl-2.6.2.jar +0 -0
  44. data/vendor/jar-dependencies/runtime-jars/protobuf-java-2.6.1.jar +0 -0
  45. data/vendor/jar-dependencies/runtime-jars/slf4j-api-1.7.13.jar +0 -0
  46. metadata +150 -0
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: 5b96b2a50ecc741fc1aa6bbdb845dea9ed009d7909f4997f2f63bcd9f0c26a28
4
+ data.tar.gz: fecef040769c7b6a9a044f532f7d66b210ff703c27df75312f5cbf5373cff13c
5
+ SHA512:
6
+ metadata.gz: 88062243a4f64d87def45db846cf4988fda458b2596a0e3960a01f7267aae44b7fbb9fb9988e25e3b666b9153ea0b06abdd8ca24d3a10988957b4f9665071c7c
7
+ data.tar.gz: d175865467feca1014fa1fb5c6e120f367ceab5c7a02ca4c04b1debad311cbb6a9f422db00488262adc7939af414d13f6135ab49367982c1f3067c6bcff5fc45
data/.editorconfig ADDED
@@ -0,0 +1,11 @@
1
+ # EditorConfig is awesome: http://EditorConfig.org
2
+
3
+ root = true
4
+
5
+ [*]
6
+ end_of_line = lf
7
+ insert_final_newline = true
8
+
9
+ [*.{rb,gemspec}]
10
+ indent_style = space
11
+ indent_size = 2
data/.gemrelease ADDED
@@ -0,0 +1,3 @@
1
+ bump:
2
+ tag: true
3
+ release: false
data/.gitattributes ADDED
@@ -0,0 +1 @@
1
+ vendor/jar-dependencies/runtime-jars/*.jar filter=lfs diff=lfs merge=lfs -text
@@ -0,0 +1 @@
1
+ since-tag=v1.5.0
data/.gitignore ADDED
@@ -0,0 +1,7 @@
1
+ /.gradle/
2
+ /*.gem
3
+ .Gemfile.lock
4
+ .Gemfile.bak
5
+ /.bundle
6
+ .DS_Store
7
+ /Gemfile.lock
data/.rspec ADDED
@@ -0,0 +1,2 @@
1
+ --color
2
+ --require spec_helper
data/.ruby-gemset ADDED
@@ -0,0 +1 @@
1
+ logstash-output-kinesis
data/.ruby-version ADDED
@@ -0,0 +1 @@
1
+ jruby-9.3.4.0
data/.travis.yml ADDED
@@ -0,0 +1,18 @@
1
+ language: ruby
2
+ jdk:
3
+ - oraclejdk8
4
+ before_install:
5
+ - mkdir -p $HOME/bin
6
+ - wget https://github.com/github/git-lfs/releases/download/v1.1.2/git-lfs-linux-amd64-1.1.2.tar.gz
7
+ - tar xvfz git-lfs-linux-amd64-1.1.2.tar.gz
8
+ - mv git-lfs-1.1.2/git-lfs $HOME/bin/git-lfs
9
+ - export PATH=$PATH:$HOME/bin/
10
+ - git lfs pull
11
+ deploy:
12
+ provider: rubygems
13
+ api_key:
14
+ secure: KLegBk+GpcGl4JJvWO2lre8xG/43RUjPlWVVCmvz0hmN33+d78WMRErIgGta9FgXw6KE7VPymXiFrS8EAdY5IHe1Oxt/PuvC1/FiP/oU7IPp1UcmqOxPdiOJtqFy7DqjfNc/nvmvUYf1JKKNMBAjSFgzzccQFB418wUPb1dokkl/QgS2azP3aF9PfKWiZd4iWUVxY30TB2W1+qvjT6ix6idU7aQjTwzFIvEs6WYZuIdl0fB2oF5no8oyw6NVfaxA89Jv2d8CGVlIOp6Q/ZXzNCviM/PYkKVuCmObgG19q4v8L2M0y+2GcLEORbwmwFSLBj6rjLg6WpZXCx+QppA7AeuQZ0rvZ+Jb4Eaqu+dQTd57We1dbcGWgonlisuqkJv0cv7Qy+hupJ5mil/ofTUoHapwhTpsdVch/XsV8rV3wu012j1eeTgp4EjbBcgHYnmc6XJGHLigVAEv//OQEl7eUpM7zZbmpaYmtjJsFveVP/WS8/rAwjytSD58mcVyNzHTtjVgr58ejUc61TvZFErkPhfUjtj+xuRyJKQXmS5Y70itRDBtu3VFNoimXg2Onz3bWmUQSQ7kL3/ctvXUoT8Rmza712bF1ZRblbKLi+sPFXo2++v4wxxOH89aqjdYcnjP+tBz3KCN9jVGJ9c8nUI08Lv29dJreQJl3N1Cjnce2cA=
15
+ gem: logstash-output-kinesis
16
+ on:
17
+ tags: true
18
+ repo: samcday/logstash-output-kinesis
data/CHANGELOG.md ADDED
@@ -0,0 +1,58 @@
1
+ # Change Log
2
+
3
+ ## [5.1.0](https://github.com/samcday/logstash-output-kinesis/tree/5.1.0) (2017-09-01)
4
+ [Full Changelog](https://github.com/samcday/logstash-output-kinesis/compare/v5.0.0...5.1.0)
5
+
6
+ **Closed issues:**
7
+
8
+ - Is it possible send data via a proxy server to kinesis. [\#23](https://github.com/samcday/logstash-output-kinesis/issues/23)
9
+ - logstash-output-kinesis does not work inside alpine linux [\#21](https://github.com/samcday/logstash-output-kinesis/issues/21)
10
+
11
+ **Merged pull requests:**
12
+
13
+ - Support sprintf-style stream names [\#22](https://github.com/samcday/logstash-output-kinesis/pull/22) ([sonya](https://github.com/sonya))
14
+ - Plugin causes x-pack monitoring pipeline to fail [\#20](https://github.com/samcday/logstash-output-kinesis/pull/20) ([jjensn](https://github.com/jjensn))
15
+
16
+ ## [v5.0.0](https://github.com/samcday/logstash-output-kinesis/tree/v5.0.0) (2017-06-10)
17
+ [Full Changelog](https://github.com/samcday/logstash-output-kinesis/compare/v1.6.0...v5.0.0)
18
+
19
+ **Closed issues:**
20
+
21
+ - Is it compatible with Logstash 5.x? [\#17](https://github.com/samcday/logstash-output-kinesis/issues/17)
22
+ - Error Writing to Kinesis on EMR spinup [\#16](https://github.com/samcday/logstash-output-kinesis/issues/16)
23
+ - Installing kinesis output plugin on a amazon linux box... [\#14](https://github.com/samcday/logstash-output-kinesis/issues/14)
24
+ - Added to logstash-plugins? [\#13](https://github.com/samcday/logstash-output-kinesis/issues/13)
25
+ - bundle error [\#12](https://github.com/samcday/logstash-output-kinesis/issues/12)
26
+ - Support to produce UTF-8 JSON [\#11](https://github.com/samcday/logstash-output-kinesis/issues/11)
27
+ - Kinesis is too busy? [\#10](https://github.com/samcday/logstash-output-kinesis/issues/10)
28
+ - jar dependencies conflict with elasticsearch output plugin [\#9](https://github.com/samcday/logstash-output-kinesis/issues/9)
29
+ - Invalid Signature Exception [\#8](https://github.com/samcday/logstash-output-kinesis/issues/8)
30
+ - Logstash & Kinesis losing logs [\#7](https://github.com/samcday/logstash-output-kinesis/issues/7)
31
+ - Not all records make it into Kinesis unless I pass the --debug flag to logstash [\#6](https://github.com/samcday/logstash-output-kinesis/issues/6)
32
+
33
+ **Merged pull requests:**
34
+
35
+ - Support logstash 5 [\#18](https://github.com/samcday/logstash-output-kinesis/pull/18) ([gregsterin](https://github.com/gregsterin))
36
+
37
+ ## [v1.6.0](https://github.com/samcday/logstash-output-kinesis/tree/v1.6.0) (2015-12-05)
38
+ [Full Changelog](https://github.com/samcday/logstash-output-kinesis/compare/v2.1.0...v1.6.0)
39
+
40
+ ## [v2.1.0](https://github.com/samcday/logstash-output-kinesis/tree/v2.1.0) (2015-12-05)
41
+ [Full Changelog](https://github.com/samcday/logstash-output-kinesis/compare/v2.0.1...v2.1.0)
42
+
43
+ ## [v2.0.1](https://github.com/samcday/logstash-output-kinesis/tree/v2.0.1) (2015-11-06)
44
+ [Full Changelog](https://github.com/samcday/logstash-output-kinesis/compare/v1.5.1...v2.0.1)
45
+
46
+ ## [v1.5.1](https://github.com/samcday/logstash-output-kinesis/tree/v1.5.1) (2015-11-06)
47
+ [Full Changelog](https://github.com/samcday/logstash-output-kinesis/compare/v2.0.0...v1.5.1)
48
+
49
+ **Closed issues:**
50
+
51
+ - Dropping lots of messages [\#5](https://github.com/samcday/logstash-output-kinesis/issues/5)
52
+
53
+ ## [v2.0.0](https://github.com/samcday/logstash-output-kinesis/tree/v2.0.0) (2015-10-31)
54
+ [Full Changelog](https://github.com/samcday/logstash-output-kinesis/compare/v1.5.0...v2.0.0)
55
+
56
+
57
+
58
+ \* *This Change Log was automatically generated by [github_changelog_generator](https://github.com/skywinder/Github-Changelog-Generator)*
@@ -0,0 +1,46 @@
1
+ # Contributor Covenant Code of Conduct
2
+
3
+ ## Our Pledge
4
+
5
+ In the interest of fostering an open and welcoming environment, we as contributors and maintainers pledge to making participation in our project and our community a harassment-free experience for everyone, regardless of age, body size, disability, ethnicity, gender identity and expression, level of experience, nationality, personal appearance, race, religion, or sexual identity and orientation.
6
+
7
+ ## Our Standards
8
+
9
+ Examples of behavior that contributes to creating a positive environment include:
10
+
11
+ * Using welcoming and inclusive language
12
+ * Being respectful of differing viewpoints and experiences
13
+ * Gracefully accepting constructive criticism
14
+ * Focusing on what is best for the community
15
+ * Showing empathy towards other community members
16
+
17
+ Examples of unacceptable behavior by participants include:
18
+
19
+ * The use of sexualized language or imagery and unwelcome sexual attention or advances
20
+ * Trolling, insulting/derogatory comments, and personal or political attacks
21
+ * Public or private harassment
22
+ * Publishing others' private information, such as a physical or electronic address, without explicit permission
23
+ * Other conduct which could reasonably be considered inappropriate in a professional setting
24
+
25
+ ## Our Responsibilities
26
+
27
+ Project maintainers are responsible for clarifying the standards of acceptable behavior and are expected to take appropriate and fair corrective action in response to any instances of unacceptable behavior.
28
+
29
+ Project maintainers have the right and responsibility to remove, edit, or reject comments, commits, code, wiki edits, issues, and other contributions that are not aligned to this Code of Conduct, or to ban temporarily or permanently any contributor for other behaviors that they deem inappropriate, threatening, offensive, or harmful.
30
+
31
+ ## Scope
32
+
33
+ This Code of Conduct applies both within project spaces and in public spaces when an individual is representing the project or its community. Examples of representing a project or community include using an official project e-mail address, posting via an official social media account, or acting as an appointed representative at an online or offline event. Representation of a project may be further defined and clarified by project maintainers.
34
+
35
+ ## Enforcement
36
+
37
+ Instances of abusive, harassing, or otherwise unacceptable behavior may be reported by contacting the project team at me@samcday.com. The project team will review and investigate all complaints, and will respond in a way that it deems appropriate to the circumstances. The project team is obligated to maintain confidentiality with regard to the reporter of an incident. Further details of specific enforcement policies may be posted separately.
38
+
39
+ Project maintainers who do not follow or enforce the Code of Conduct in good faith may face temporary or permanent repercussions as determined by other members of the project's leadership.
40
+
41
+ ## Attribution
42
+
43
+ This Code of Conduct is adapted from the [Contributor Covenant][homepage], version 1.4, available at [http://contributor-covenant.org/version/1/4][version]
44
+
45
+ [homepage]: http://contributor-covenant.org
46
+ [version]: http://contributor-covenant.org/version/1/4/
data/CONTRIBUTING.md ADDED
@@ -0,0 +1,36 @@
1
+ ## TL;DR
2
+
3
+ Raising an issue is great, raising a PR is better, raising a PR with tests is *bestest*.
4
+
5
+ ## Developing
6
+
7
+ You'll need [Git LFS](https://git-lfs.github.com/) to properly clone this repo.
8
+
9
+ Ensure you have JRuby 9.1.x installed. [rvm](https://rvm.io/) is your friend :)
10
+
11
+ ```sh
12
+ rvm use --install .
13
+ gem install bundler && bundle install
14
+ bundle exec rake
15
+ ```
16
+
17
+ ### Running tests
18
+
19
+ ```
20
+ rake
21
+ ```
22
+
23
+ ### Building gem
24
+
25
+ ```
26
+ gem build logstash-output-kinesis
27
+ ```
28
+
29
+ ### Testing locally built gem
30
+ ```
31
+ bin/logstash-plugin install --local /path/to/logstash-output-kinesis-5.1.1-java.gem
32
+ ```
33
+
34
+ ### Updating KPL
35
+
36
+ Change the dependency version in `build.gradle`, and then run `gradle copylibs`. Make sure to check in all the updated JARs!
data/Gemfile ADDED
@@ -0,0 +1,2 @@
1
+ source 'https://rubygems.org'
2
+ gemspec
data/LICENSE ADDED
@@ -0,0 +1,13 @@
1
+ Copyright (c) 2012–2015 Sam Day <http://www.samcday.com.au>
2
+
3
+ Licensed under the Apache License, Version 2.0 (the "License");
4
+ you may not use this file except in compliance with the License.
5
+ You may obtain a copy of the License at
6
+
7
+ http://www.apache.org/licenses/LICENSE-2.0
8
+
9
+ Unless required by applicable law or agreed to in writing, software
10
+ distributed under the License is distributed on an "AS IS" BASIS,
11
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ See the License for the specific language governing permissions and
13
+ limitations under the License.
data/README.md ADDED
@@ -0,0 +1,230 @@
1
+ # Kinesis Output Plugin
2
+
3
+ [![Build Status][badge-travis]][travis]
4
+ [![Gem info][badge-gem]][rubygems]
5
+
6
+ This is a plugin for [Logstash](https://github.com/elasticsearch/logstash). It will send log records to a [Kinesis stream](https://aws.amazon.com/kinesis/), using the [Kinesis Producer Library (KPL)](https://docs.aws.amazon.com/kinesis/latest/dev/developing-producers-with-kpl.html).
7
+
8
+ **This version is intended for use with Logstash 5.x.** For plugin versions compatible with older versions of Logstash:
9
+
10
+ * [Logstash 1.5.x](https://github.com/samcday/logstash-output-kinesis/tree/1.5)
11
+ * [Logstash 2.x](https://github.com/samcday/logstash-output-kinesis/tree/2.x)
12
+
13
+
14
+ ## Configuration
15
+
16
+ Minimum required configuration to get this plugin chugging along:
17
+
18
+ ```nginx
19
+ output {
20
+ kinesis {
21
+ stream_name => "logs-stream"
22
+ region => "ap-southeast-2"
23
+ }
24
+ }
25
+ ```
26
+
27
+ This plugin accepts a wide range of configuration options, most of which come from the underlying KPL library itself. [View the full list of KPL configuration options here.][kpldoc]
28
+
29
+ Please note that configuration options are snake_cased instead of camelCased. So, where [KinesisProducerConfiguration][kpldoc] offers a `setMetricsLevel` option, this plugin accepts a `metrics_level` option.
30
+
31
+ ### Dynamic stream name
32
+
33
+ You can dictate the name of the stream to send a record to, based on data in the record itself.
34
+
35
+ ```nginx
36
+ output {
37
+ kinesis {
38
+ stream_name => "%{myfield}-%{myotherfield}"
39
+ }
40
+ }
41
+ ```
42
+
43
+ ### Metrics
44
+
45
+ The underlying KPL library defaults to sending CloudWatch metrics to give insight into what it's actually doing at runtime. It's highly recommended you ensure these metrics are flowing through, and use them to monitor the health of your log shipping.
46
+
47
+ If for some reason you want to switch them off, you can easily do so:
48
+
49
+ ```nginx
50
+ output {
51
+ kinesis {
52
+ # ...
53
+
54
+ metrics_level => "none"
55
+ }
56
+ }
57
+ ```
58
+
59
+ If you choose to keep metrics enabled, ensure the AWS credentials you provide to this plugin are able to write to Kinesis *and* write to CloudWatch.
60
+
61
+ ### Authentication
62
+
63
+ By default, this plugin will use the AWS SDK [DefaultAWSCredentialsProviderChain](https://docs.aws.amazon.com/AWSJavaSDK/latest/javadoc/com/amazonaws/auth/DefaultAWSCredentialsProviderChain.html) to obtain credentials for communication with the Kinesis stream (and CloudWatch, if metrics are enabled). The following places will be checked for credentials:
64
+
65
+ * `AWS_ACCESS_KEY_ID` / `AWS_SECRET_KEY` environment variables available to the Logstash prociess
66
+ * `~/.aws/credentials` credentials file
67
+ * Instance profile (if Logstash is running in an EC2 instance)
68
+
69
+ If you want to provide credentials directly in the config file, you can do so:
70
+
71
+ ```nginx
72
+ output {
73
+ kinesis {
74
+ # ...
75
+
76
+ access_key => "AKIAIDFAKECREDENTIAL"
77
+ secret_key => "KX0ofakeLcredentialsGrightJherepOlolPkQk"
78
+
79
+ # You can provide specific credentials for CloudWatch metrics:
80
+ metrics_access_key => "AKIAIDFAKECREDENTIAL"
81
+ metrics_secret_key => "KX0ofakeLcredentialsGrightJherepOlolPkQk"
82
+ }
83
+ }
84
+ ```
85
+
86
+ If `access_key` and `secret_key` are provided, they will be used for communicating with Kinesis *and* CloudWatch. If `metrics_access_key` and `metrics_secret_key` are provided, they will be used for communication with CloudWatch. If only the metrics credentials were provided, Kinesis would use the default credentials provider (explained above) and CloudWatch would use the specific credentials. Confused? Good!
87
+
88
+ #### Using STS
89
+
90
+ You can also configure this plugin to use [AWS STS](https://docs.aws.amazon.com/STS/latest/APIReference/Welcome.html) to "assume" a role that has access to Kinesis and CloudWatch. If you use this in combination with EC2 instance profiles (which the defaults credentials provider explained above uses) then you can actually configure your Logstash to write to Kinesis and CloudWatch without any hardcoded credentials.
91
+
92
+ ```nginx
93
+ output {
94
+ kinesis {
95
+ # ...
96
+
97
+ role_arn => "arn:aws:iam::123456789:role/my-kinesis-producer-role"
98
+
99
+ # You can also provide a specific role to assume for CloudWatch metrics:
100
+ metrics_role_arn => "arn:aws:iam::123456789:role/my-metrics-role"
101
+ }
102
+ }
103
+ ```
104
+
105
+ You can combine `role_arn` / `metrics_role_arn` with the explicit AWS credentials config explained earlier, too.
106
+
107
+ All this stuff can be mixed too - if you wanted to use hardcoded credentials for Kinesis, but then assume a role via STS for accessing CloudWatch, you can do that. Vice versa would work too - assume a role for accessing Kinesis and then providing hardcoded credentials for CloudWatch. Make things as arbitrarily complicated for yourself as you like ;)
108
+
109
+ ### Building a partition key
110
+
111
+ Kinesis demands a [partition key](https://docs.aws.amazon.com/kinesis/latest/dev/key-concepts.html#partition-key) be provided for each record. By default, this plugin will provide a very boring partition key of `-`. However, you can configure it to compute a partition key from fields in your log events.
112
+
113
+ ```nginx
114
+ output {
115
+ kinesis {
116
+ # ...
117
+ event_partition_keys => ["[field1]", "[field2]"]
118
+ }
119
+ }
120
+ ```
121
+
122
+ #### Randomised partition keys
123
+
124
+ If you don't care about the ordering of your logs in the Kinesis stream, you might want to use a random partition key. This way, your log stream will be more or less uniformly spread across all available shards in the Kinesis stream.
125
+
126
+ ```nginx
127
+ output {
128
+ kinesis {
129
+ randomized_partition_key => true
130
+ }
131
+ }
132
+ ```
133
+
134
+ ### Record Aggregation
135
+
136
+ The [Amazon KPL library can aggregate](https://docs.aws.amazon.com/kinesis/latest/dev/kinesis-kpl-concepts.html#d0e3423) your records when writing to the Kinesis stream. **This behaviour is configured to be enabled by default.**
137
+
138
+ If you are using an older version of the Amazon KCL library to consume your records, or not using KCL at all, your consumer application(s) will probably not behave correctly. See [the matrix on this page](https://docs.aws.amazon.com/kinesis/latest/dev/kinesis-kpl-integration.html) for more info, and read [more about de-aggregating records here](https://docs.aws.amazon.com/kinesis/latest/dev/kinesis-kpl-consumer-deaggregation.html).
139
+
140
+ If you wish to simply disable record aggregation, that's easy:
141
+
142
+ ```nginx
143
+ output {
144
+ kinesis {
145
+ aggregation_enabled => false
146
+ }
147
+ }
148
+ ```
149
+
150
+ ### Backpressure
151
+
152
+ This plugin will enforce backpressure if the records passing through Logstash's pipeline are not making it up to Kinesis fast enough. When this happens, Logstash will stop accepting records for input or filtering, and a warning will be emitted in the Logstash logs.
153
+
154
+ By default, the threshold for blocking is 1000 pending records. If you want to throw more memory / CPU cycles at buffering lots of stuff before it makes it to Kinesis, you can control the high-watermark:
155
+
156
+ ```nginx
157
+ output {
158
+ kinesis {
159
+ max_pending_records => 10000
160
+ }
161
+ }
162
+ ```
163
+
164
+ ### Logging configuration
165
+
166
+ The underlying KPL uses SLF4J for logging and binding for Log4j (used by Logstash) is included in the plugin package. Thus the logging levels can be controlled with the `log4j2.properties` file provided by Logstash.
167
+
168
+ As the KPL might be too noisy with `INFO` level, you might want to dial it down by following configuration in `log4j2.properties`:
169
+
170
+ ```
171
+ ...
172
+ logger.kinesis.name = com.amazonaws.services.kinesis
173
+ logger.kinesis.level = WARN
174
+ logger.kinesis.additivity = false
175
+ logger.kinesis.appenderRef.console.ref = console
176
+ ...
177
+
178
+ ```
179
+
180
+ ## Known Issues
181
+
182
+ ### Alpine Linux is not supported
183
+
184
+ This Logstash plugin uses the KPL daemon under the covers. This daemon is linked against, and specifically requires, glibc. See awslabs/amazon-kinesis-producer#86.
185
+
186
+ ### Noisy shutdown
187
+
188
+ During shutdown of Logstash, you might get noisy warnings like this:
189
+
190
+ ```
191
+ [pool-1-thread-6] WARN com.amazonaws.services.kinesis.producer.Daemon - Exception during updateCredentials
192
+ java.lang.InterruptedException: sleep interrupted
193
+ at java.lang.Thread.sleep(Native Method)
194
+ at com.amazonaws.services.kinesis.producer.Daemon$5.run(Daemon.java:316)
195
+ at java.util.concurrent.Executors$RunnableAdapter.call(Executors.java:471)
196
+ at java.util.concurrent.FutureTask$Sync.innerRun(FutureTask.java:334)
197
+ at java.util.concurrent.FutureTask.run(FutureTask.java:166)
198
+ at java.util.concurrent.ThreadPoolExecutor.runWorker(ThreadPoolExecutor.java:1145)
199
+ at java.util.concurrent.ThreadPoolExecutor$Worker.run(ThreadPoolExecutor.java:615)
200
+ at java.lang.Thread.run(Thread.java:724)
201
+ ```
202
+
203
+ This is caused by awslabs/amazon-kinesis-producer#10.
204
+
205
+ ### Noisy warnings about `Error during socket read`
206
+
207
+ While your Logstash instance is running, you may occasionally get a warning on stderr that looks like this:
208
+
209
+ ```
210
+ [2015-10-20 06:31:08.441640] [0x00007f36c9402700] [error] [io_service_socket.h:229] Error during socket read: End of file; 0 bytes read so far (kinesis.us-west-1.amazonaws.com:443)
211
+ ```
212
+
213
+ This is being tracked in awslabs/amazon-kinesis-producer#17. This log message seems to just be noise - your logs should still be delivering to Kinesis fine (but of course, you should independently verify this!).
214
+
215
+
216
+ ## Contributions
217
+
218
+ Are more than welcome. See [CONTRIBUTING.md](CONTRIBUTING.md)
219
+
220
+
221
+ ## License
222
+
223
+ [Apache License 2.0](LICENSE)
224
+
225
+ [travis]: https://travis-ci.org/samcday/logstash-output-kinesis
226
+ [rubygems]: https://rubygems.org/gems/logstash-output-kinesis
227
+ [kpldoc]: https://github.com/awslabs/amazon-kinesis-producer/blob/v0.12.5/java/amazon-kinesis-producer/src/main/java/com/amazonaws/services/kinesis/producer/KinesisProducerConfiguration.java#L38
228
+
229
+ [badge-travis]: https://img.shields.io/travis/samcday/logstash-output-kinesis.svg?style=flat-square
230
+ [badge-gem]: https://img.shields.io/gem/v/logstash-output-kinesis.svg?style=flat-square
data/Rakefile ADDED
@@ -0,0 +1,8 @@
1
+ require "logstash/devutils/rake"
2
+ require "rspec/core/rake_task"
3
+
4
+ RSpec::Core::RakeTask.new(:spec) do |t|
5
+ t.pattern = Dir.glob('spec/**/*_spec.rb')
6
+ end
7
+
8
+ task :default => :spec
data/build.gradle ADDED
@@ -0,0 +1,24 @@
1
+ apply plugin: 'java'
2
+
3
+ repositories {
4
+ mavenLocal()
5
+ mavenCentral()
6
+ }
7
+
8
+ configurations {
9
+ customConfig.extendsFrom implementation
10
+ }
11
+
12
+ dependencies {
13
+ implementation 'com.amazonaws:amazon-kinesis-producer:0.14.12'
14
+ implementation 'com.amazonaws:aws-java-sdk-sts:1.12.200'
15
+ implementation ('org.apache.logging.log4j:log4j-slf4j-impl:2.6.2' ) {
16
+ exclude group: 'org.apache.logging.log4j', module: 'log4j-api'
17
+ exclude group: 'org.slf4j', module: 'slf4j-api'
18
+ }
19
+ }
20
+
21
+ task copyLibs(type: Copy) {
22
+ from configurations.customConfig
23
+ into 'vendor/jar-dependencies/runtime-jars'
24
+ }
@@ -0,0 +1,228 @@
1
+ # encoding: utf-8
2
+
3
+ require "java"
4
+ require "logstash/outputs/base"
5
+ require "logstash/namespace"
6
+ require "securerandom"
7
+ require "logstash-output-kinesis_jars"
8
+
9
+ # Sends log events to a Kinesis stream. This output plugin uses the official Amazon KPL.
10
+ # Most of the configuration options in this plugin are simply passed on to
11
+ # link:https://github.com/awslabs/amazon-kinesis-producer/blob/v0.12.5/java/amazon-kinesis-producer/src/main/java/com/amazonaws/services/kinesis/producer/KinesisProducerConfiguration.java#L38[KinesisProducerConfiguration]
12
+ class LogStash::Outputs::Kinesis < LogStash::Outputs::Base
13
+ config_name "kinesis"
14
+
15
+ default :codec, 'json'
16
+
17
+ # The name of the stream to send data to.
18
+ config :stream_name, :validate => :string, :required => true
19
+ # A list of event data keys to use when constructing a partition key
20
+ config :event_partition_keys, :validate => :array, :default => []
21
+ # If true, a random partition key will be assigned to each log record
22
+ config :randomized_partition_key, :validate => :boolean, :default => false
23
+ # If the number of records pending being written to Kinesis exceeds this number, then block
24
+ # Logstash processing until they're all written.
25
+ config :max_pending_records, :validate => :number, :default => 1000
26
+
27
+ # An AWS access key to use for authentication to Kinesis and CloudWatch
28
+ config :access_key, :validate => :string
29
+ # An AWS secret key to use for authentication to Kinesis and CloudWatch
30
+ config :secret_key, :validate => :string
31
+ # If provided, STS will be used to assume this role and use it to authenticate to Kinesis and CloudWatch
32
+ config :role_arn, :validate => :string
33
+
34
+ # If provided, use this AWS access key for authentication to CloudWatch
35
+ config :metrics_access_key, :validate => :string
36
+ # If provided, use this AWS secret key for authentication to CloudWatch
37
+ config :metrics_secret_key, :validate => :string
38
+ # If provided, STS will be used to assume this role and use it to authenticate to CloudWatch
39
+ config :metrics_role_arn, :validate => :string
40
+
41
+ config :sts_proxy_host, :validate => :string
42
+ config :sts_proxy_port, :validate => :number
43
+
44
+ config :aggregation_enabled, :validate => :boolean, :default => true
45
+ config :aggregation_max_count, :validate => :number, :default => 4294967295
46
+ config :aggregation_max_size, :validate => :number, :default => 51200
47
+ config :cloudwatch_endpoint, :validate => :string, :default => nil
48
+ config :cloudwatch_port, :validate => :number, :default => 443
49
+ config :collection_max_count, :validate => :number, :default => 500
50
+ config :collection_max_size, :validate => :number, :default => 5242880
51
+ config :connect_timeout, :validate => :number, :default => 6000
52
+ config :credentials_refresh_delay, :validate => :number, :default => 5000
53
+ config :enable_core_dumps, :validate => :boolean, :default => false
54
+ config :fail_if_throttled, :validate => :boolean, :default => false
55
+ config :kinesis_endpoint, :validate => :string, :default => nil
56
+ config :kinesis_port, :validate => :number, :default => 443
57
+ config :log_level, :validate => ["info", "warning", "error"], :default => "info"
58
+ config :max_connections, :validate => :number, :default => 4
59
+ config :metrics_granularity, :validate => ["global", "stream", "shard"], :default => "shard"
60
+ config :metrics_level, :validate => ["none", "summary", "detailed"], :default => "detailed"
61
+ config :metrics_namespace, :validate => :string, :default => "KinesisProducerLibrary"
62
+ config :metrics_upload_delay, :validate => :number, :default => 60000
63
+ config :min_connections, :validate => :number, :default => 1
64
+ config :native_executable, :validate => :string, :default => nil
65
+ config :rate_limit, :validate => :number, :default => 150
66
+ config :record_max_buffered_time, :validate => :number, :default => 100
67
+ config :record_ttl, :validate => :number, :default => 30000
68
+ config :region, :validate => :string, :required => true
69
+ config :request_timeout, :validate => :number, :default => 6000
70
+ config :temp_directory, :validate => :string, :default => nil
71
+ config :verify_certificate, :validate => :boolean, :default => true
72
+
73
+ KPL = com.amazonaws.services.kinesis.producer
74
+ AWSAuth = com.amazonaws.auth
75
+ ByteBuffer = java.nio.ByteBuffer
76
+
77
+ public
78
+ def register
79
+ @metrics_access_key ||= @access_key
80
+ @metrics_secret_key ||= @secret_key
81
+
82
+ @producer = KPL.KinesisProducer::new(create_kpl_config)
83
+ @codec.on_event(&method(:send_record))
84
+ end
85
+
86
+ public
87
+ def receive(event)
88
+ return unless output?(event)
89
+
90
+ if @randomized_partition_key
91
+ event.set("[@metadata][partition_key]", SecureRandom.uuid)
92
+ else
93
+ # Haha - gawd. If I don't put an empty string in the array, then calling .join()
94
+ # on it later will result in a US-ASCII string if the array is empty. Ruby is awesome.
95
+ partition_key_parts = [""]
96
+
97
+ @event_partition_keys.each do |partition_key_name|
98
+ if not event.get(partition_key_name).nil? and event.get(partition_key_name).length > 0
99
+ partition_key_parts << event.get(partition_key_name).to_s
100
+ break
101
+ end
102
+ end
103
+
104
+ event.set("[@metadata][partition_key]", (partition_key_parts * "-").to_s[/.+/m] || "-")
105
+ end
106
+
107
+ begin
108
+ @codec.encode(event)
109
+ rescue => e
110
+ @logger.warn("Error encoding event", :exception => e, :event => event)
111
+ end
112
+ end
113
+
114
+ public
115
+ def close
116
+ @producer.flushSync()
117
+ @producer.destroy()
118
+ end
119
+
120
+ def create_kpl_config
121
+ config = KPL.KinesisProducerConfiguration::new()
122
+
123
+ credentials_provider = create_credentials_provider
124
+ metrics_credentials_provider = create_metrics_credentials_provider
125
+
126
+ config.setAggregationEnabled(@aggregation_enabled)
127
+ config.setAggregationMaxCount(@aggregation_max_count)
128
+ config.setAggregationMaxSize(@aggregation_max_size)
129
+ config.setCloudwatchEndpoint(@cloudwatch_endpoint) if !@cloudwatch_endpoint.nil?
130
+ config.setCloudwatchPort(@cloudwatch_port)
131
+ config.setCollectionMaxCount(@collection_max_count)
132
+ config.setCollectionMaxSize(@collection_max_size)
133
+ config.setConnectTimeout(@connect_timeout)
134
+ config.setCredentialsProvider(credentials_provider)
135
+ config.setCredentialsRefreshDelay(@credentials_refresh_delay)
136
+ config.setEnableCoreDumps(@enable_core_dumps)
137
+ config.setFailIfThrottled(@fail_if_throttled)
138
+ config.setLogLevel(@log_level)
139
+ config.setKinesisEndpoint(@kinesis_endpoint) if !@kinesis_endpoint.nil?
140
+ config.setKinesisPort(@kinesis_port)
141
+ config.setMaxConnections(@max_connections)
142
+ config.setMetricsCredentialsProvider(metrics_credentials_provider)
143
+ config.setMetricsGranularity(@metrics_granularity)
144
+ config.setMetricsLevel(@metrics_level)
145
+ config.setMetricsNamespace(@metrics_namespace)
146
+ config.setMetricsUploadDelay(@metrics_upload_delay)
147
+ config.setMinConnections(@min_connections)
148
+ config.setNativeExecutable(@native_executable) if !@native_executable.nil?
149
+ config.setRateLimit(@rate_limit)
150
+ config.setRecordMaxBufferedTime(@record_max_buffered_time)
151
+ config.setRecordTtl(@record_ttl)
152
+ config.setRegion(@region)
153
+ config.setRequestTimeout(@request_timeout)
154
+ config.setTempDirectory(@temp_directory) if !@temp_directory.nil?
155
+ config.setVerifyCertificate(@verify_certificate)
156
+
157
+ config
158
+ end
159
+
160
+ def create_sts_provider(base_provider, arn)
161
+ client_config = com.amazonaws.ClientConfiguration.new()
162
+ if @sts_proxy_host
163
+ client_config.setProxyHost(@sts_proxy_host)
164
+ end
165
+ if @sts_proxy_port
166
+ client_config.setProxyPort(@sts_proxy_port)
167
+ end
168
+ provider = AWSAuth.STSAssumeRoleSessionCredentialsProvider.new(
169
+ base_provider, arn, "logstash-output-kinesis", client_config)
170
+ provider
171
+ end
172
+
173
+ def create_credentials_provider
174
+ provider = AWSAuth.DefaultAWSCredentialsProviderChain.new()
175
+ if @access_key and @secret_key
176
+ provider = BasicKinesisCredentialsProvider.new(AWSAuth.BasicAWSCredentials.new(@access_key, @secret_key))
177
+ end
178
+ if @role_arn
179
+ provider = create_sts_provider(provider, @role_arn)
180
+ end
181
+ provider
182
+ end
183
+
184
+ def create_metrics_credentials_provider
185
+ provider = AWSAuth.DefaultAWSCredentialsProviderChain.new()
186
+ if @metrics_access_key and @metrics_secret_key
187
+ provider = BasicKinesisCredentialsProvider.new(AWSAuth.BasicAWSCredentials.new(@metrics_access_key, @metrics_secret_key))
188
+ end
189
+ if @metrics_role_arn
190
+ provider = create_sts_provider(provider, @metrics_role_arn)
191
+ end
192
+ provider
193
+ end
194
+
195
+ def send_record(event, payload)
196
+ begin
197
+ event_blob = ByteBuffer::wrap(payload.to_java_bytes)
198
+ @producer.addUserRecord(event.sprintf(@stream_name), event.get("[@metadata][partition_key]"), event_blob)
199
+ rescue => e
200
+ @logger.warn("Error writing event to Kinesis", :exception => e)
201
+ end
202
+
203
+ num = @producer.getOutstandingRecordsCount()
204
+ if num > @max_pending_records
205
+ @logger.warn("Kinesis is too busy - blocking until things have cleared up")
206
+ @producer.flushSync()
207
+ @logger.info("Okay - I've stopped blocking now")
208
+ end
209
+ end
210
+ end
211
+
212
+ class BasicKinesisCredentialsProvider
213
+ java_implements 'com.amazonaws.auth.AWSCredentialsProvider'
214
+
215
+ def initialize(credentials)
216
+ @credentials = credentials
217
+ end
218
+
219
+ java_signature 'com.amazonaws.auth.AWSCredentials getCredentials()'
220
+ def getCredentials
221
+ @credentials
222
+ end
223
+
224
+ java_signature 'void refresh()'
225
+ def refresh
226
+ # Noop.
227
+ end
228
+ end
@@ -0,0 +1,3 @@
1
+ module LogstashOutputKinesis
2
+ VERSION = "5.1.3"
3
+ end
@@ -0,0 +1,5 @@
1
+ # encoding: utf-8
2
+ require 'logstash/environment'
3
+
4
+ root_dir = File.expand_path(File.join(File.dirname(__FILE__), ".."))
5
+ LogStash::Environment.load_runtime_jars! File.join(root_dir, "vendor")
@@ -0,0 +1,34 @@
1
+ lib = File.expand_path("../lib", __FILE__)
2
+ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
3
+ require "logstash-output-kinesis/version"
4
+
5
+ Gem::Specification.new do |s|
6
+ s.name = "logstash-output-kinesis-jdk11"
7
+ s.version = LogstashOutputKinesis::VERSION
8
+ s.licenses = ["Apache License (2.0)"]
9
+ s.summary = "This output plugin sends records to Kinesis using the Kinesis Producer Library (KPL)"
10
+ s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install logstash-output-kinesis. This gem is not a stand-alone program"
11
+ s.authors = ["Sam Day"]
12
+ s.email = "me@samcday.com.au"
13
+ s.homepage = "https://www.github.com/samcday/logstash-output-kinesis"
14
+ s.require_paths = ["lib"]
15
+
16
+ # Files
17
+ s.files = `git ls-files`.split($\)
18
+ # Tests
19
+ s.test_files = s.files.grep(%r{^(test|spec|features)/})
20
+
21
+ # Special flag to let us know this is actually a logstash plugin
22
+ s.metadata = {
23
+ "logstash_plugin" => "true",
24
+ "logstash_group" => "output"
25
+ }
26
+
27
+ s.platform = "java"
28
+
29
+ # Gem dependencies
30
+ s.add_runtime_dependency "logstash-core", ">= 5.0.0"
31
+ s.add_runtime_dependency "logstash-codec-plain", "< 6.0.0"
32
+ s.add_runtime_dependency "logstash-codec-json", "< 6.0.0"
33
+ s.add_development_dependency "logstash-devutils"
34
+ end
@@ -0,0 +1,96 @@
1
+ require "logstash/devutils/rspec/spec_helper"
2
+ require "logstash/outputs/kinesis"
3
+ require "logstash/codecs/plain"
4
+ require "logstash/event"
5
+ require "json"
6
+
7
+ describe LogStash::Outputs::Kinesis do
8
+ let(:config) {{
9
+ "stream_name" => "test",
10
+ "region" => "ap-southeast-2",
11
+ "metrics_level" => "none"
12
+ }}
13
+ let(:sample_event) {
14
+ LogStash::Event.new({
15
+ "message" => "hello",
16
+ 'stream_name' => 'my_stream',
17
+ "field1" => "foo",
18
+ "field2" => "bar"
19
+ })
20
+ }
21
+
22
+ KPL = com.amazonaws.services.kinesis.producer
23
+
24
+ context 'when initializing' do
25
+ it "should register" do
26
+ output = LogStash::Plugin.lookup("output", "kinesis").new(config)
27
+ expect {output.register}.to_not raise_error
28
+ end
29
+
30
+ it 'should populate config with default values' do
31
+ output = LogStash::Outputs::Kinesis.new(config)
32
+ insist { output.randomized_partition_key } == false
33
+ insist { output.event_partition_keys } == []
34
+ end
35
+ end
36
+
37
+ context "when receiving message" do
38
+ it "sends record to Kinesis" do
39
+ expect_any_instance_of(KPL::KinesisProducer).to receive(:addUserRecord)
40
+
41
+ output = LogStash::Outputs::Kinesis.new (config)
42
+ output.register
43
+ output.receive(sample_event)
44
+ output.close
45
+ end
46
+
47
+ it "should support Event#sprintf placeholders in stream_name" do
48
+ expect_any_instance_of(KPL::KinesisProducer).to receive(:addUserRecord)
49
+ .with("my_stream", anything, anything)
50
+
51
+ output = LogStash::Outputs::Kinesis.new(config.merge({
52
+ "stream_name" => "%{stream_name}",
53
+ }))
54
+ output.register
55
+ output.receive(sample_event)
56
+ output.close
57
+ end
58
+
59
+ it "should support blank partition keys" do
60
+ expect_any_instance_of(KPL::KinesisProducer).to receive(:addUserRecord)
61
+ .with(anything, "-", anything)
62
+
63
+ output = LogStash::Outputs::Kinesis.new(config)
64
+ output.register
65
+ output.receive(sample_event)
66
+ output.close
67
+ end
68
+
69
+ it "should support randomized partition keys" do
70
+ expect_any_instance_of(KPL::KinesisProducer).to receive(:addUserRecord)
71
+ .with(anything, /[0-9a-f]+-[0-9a-f]+-[0-9a-f]+-[0-9a-f]+-[0-9a-f]+/, anything)
72
+
73
+ output = LogStash::Outputs::Kinesis.new(config.merge({
74
+ "randomized_partition_key" => true
75
+ }))
76
+ output.register
77
+ output.receive(sample_event)
78
+ output.close
79
+ end
80
+
81
+ it "should support fixed partition keys" do
82
+ # the partition key ends up being an empty string plus the first field
83
+ # we choose joined by a hyphen. this is a holdover from earlier versions
84
+ expect_any_instance_of(KPL::KinesisProducer).to receive(:addUserRecord)
85
+ .with(anything, "-foo", anything)
86
+
87
+ output = LogStash::Outputs::Kinesis.new(config.merge({
88
+ "event_partition_keys" => ["[field1]", "[field2]"]
89
+ }))
90
+ output.register
91
+ output.receive(sample_event)
92
+ output.close
93
+ end
94
+ end
95
+
96
+ end
@@ -0,0 +1,91 @@
1
+ # This file was generated by the `rspec --init` command. Conventionally, all
2
+ # specs live under a `spec` directory, which RSpec adds to the `$LOAD_PATH`.
3
+ # The generated `.rspec` file contains `--require spec_helper` which will cause this
4
+ # file to always be loaded, without a need to explicitly require it in any files.
5
+ #
6
+ # Given that it is always loaded, you are encouraged to keep this file as
7
+ # light-weight as possible. Requiring heavyweight dependencies from this file
8
+ # will add to the boot time of your test suite on EVERY test run, even for an
9
+ # individual file that may not need all of that loaded. Instead, consider making
10
+ # a separate helper file that requires the additional dependencies and performs
11
+ # the additional setup, and require it from the spec files that actually need it.
12
+ #
13
+ # The `.rspec` file also contains a few flags that are not defaults but that
14
+ # users commonly want.
15
+ #
16
+ # See http://rubydoc.info/gems/rspec-core/RSpec/Core/Configuration
17
+ RSpec.configure do |config|
18
+ # rspec-expectations config goes here. You can use an alternate
19
+ # assertion/expectation library such as wrong or the stdlib/minitest
20
+ # assertions if you prefer.
21
+ config.expect_with :rspec do |expectations|
22
+ # This option will default to `true` in RSpec 4. It makes the `description`
23
+ # and `failure_message` of custom matchers include text for helper methods
24
+ # defined using `chain`, e.g.:
25
+ # be_bigger_than(2).and_smaller_than(4).description
26
+ # # => "be bigger than 2 and smaller than 4"
27
+ # ...rather than:
28
+ # # => "be bigger than 2"
29
+ expectations.include_chain_clauses_in_custom_matcher_descriptions = true
30
+ end
31
+
32
+ # rspec-mocks config goes here. You can use an alternate test double
33
+ # library (such as bogus or mocha) by changing the `mock_with` option here.
34
+ config.mock_with :rspec do |mocks|
35
+ # Prevents you from mocking or stubbing a method that does not exist on
36
+ # a real object. This is generally recommended, and will default to
37
+ # `true` in RSpec 4.
38
+
39
+ # ... except this breaks mocking on a Java class it seems.
40
+ mocks.verify_partial_doubles = false
41
+ end
42
+
43
+ # The settings below are suggested to provide a good initial experience
44
+ # with RSpec, but feel free to customize to your heart's content.
45
+ =begin
46
+ # These two settings work together to allow you to limit a spec run
47
+ # to individual examples or groups you care about by tagging them with
48
+ # `:focus` metadata. When nothing is tagged with `:focus`, all examples
49
+ # get run.
50
+ config.filter_run :focus
51
+ config.run_all_when_everything_filtered = true
52
+
53
+ # Limits the available syntax to the non-monkey patched syntax that is recommended.
54
+ # For more details, see:
55
+ # - http://myronmars.to/n/dev-blog/2012/06/rspecs-new-expectation-syntax
56
+ # - http://teaisaweso.me/blog/2013/05/27/rspecs-new-message-expectation-syntax/
57
+ # - http://myronmars.to/n/dev-blog/2014/05/notable-changes-in-rspec-3#new__config_option_to_disable_rspeccore_monkey_patching
58
+ config.disable_monkey_patching!
59
+
60
+ # This setting enables warnings. It's recommended, but in some cases may
61
+ # be too noisy due to issues in dependencies.
62
+ config.warnings = true
63
+
64
+ # Many RSpec users commonly either run the entire suite or an individual
65
+ # file, and it's useful to allow more verbose output when running an
66
+ # individual spec file.
67
+ if config.files_to_run.one?
68
+ # Use the documentation formatter for detailed output,
69
+ # unless a formatter has already been configured
70
+ # (e.g. via a command-line flag).
71
+ config.default_formatter = 'doc'
72
+ end
73
+
74
+ # Print the 10 slowest examples and example groups at the
75
+ # end of the spec run, to help surface which specs are running
76
+ # particularly slow.
77
+ config.profile_examples = 10
78
+
79
+ # Run specs in random order to surface order dependencies. If you find an
80
+ # order dependency and want to debug it, you can fix the order by providing
81
+ # the seed, which is printed after each run.
82
+ # --seed 1234
83
+ config.order = :random
84
+
85
+ # Seed global randomization in this process using the `--seed` CLI option.
86
+ # Setting this allows you to use `--seed` to deterministically reproduce
87
+ # test failures related to randomization by passing the same `--seed` value
88
+ # as the one that triggered the failure.
89
+ Kernel.srand config.seed
90
+ =end
91
+ end
metadata ADDED
@@ -0,0 +1,150 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: logstash-output-kinesis-jdk11
3
+ version: !ruby/object:Gem::Version
4
+ version: 5.1.3
5
+ platform: java
6
+ authors:
7
+ - Sam Day
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2022-04-26 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ requirement: !ruby/object:Gem::Requirement
15
+ requirements:
16
+ - - ">="
17
+ - !ruby/object:Gem::Version
18
+ version: 5.0.0
19
+ name: logstash-core
20
+ prerelease: false
21
+ type: :runtime
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - ">="
25
+ - !ruby/object:Gem::Version
26
+ version: 5.0.0
27
+ - !ruby/object:Gem::Dependency
28
+ requirement: !ruby/object:Gem::Requirement
29
+ requirements:
30
+ - - "<"
31
+ - !ruby/object:Gem::Version
32
+ version: 6.0.0
33
+ name: logstash-codec-plain
34
+ prerelease: false
35
+ type: :runtime
36
+ version_requirements: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - "<"
39
+ - !ruby/object:Gem::Version
40
+ version: 6.0.0
41
+ - !ruby/object:Gem::Dependency
42
+ requirement: !ruby/object:Gem::Requirement
43
+ requirements:
44
+ - - "<"
45
+ - !ruby/object:Gem::Version
46
+ version: 6.0.0
47
+ name: logstash-codec-json
48
+ prerelease: false
49
+ type: :runtime
50
+ version_requirements: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - "<"
53
+ - !ruby/object:Gem::Version
54
+ version: 6.0.0
55
+ - !ruby/object:Gem::Dependency
56
+ requirement: !ruby/object:Gem::Requirement
57
+ requirements:
58
+ - - ">="
59
+ - !ruby/object:Gem::Version
60
+ version: '0'
61
+ name: logstash-devutils
62
+ prerelease: false
63
+ type: :development
64
+ version_requirements: !ruby/object:Gem::Requirement
65
+ requirements:
66
+ - - ">="
67
+ - !ruby/object:Gem::Version
68
+ version: '0'
69
+ description: This gem is a logstash plugin required to be installed on top of the
70
+ Logstash core pipeline using $LS_HOME/bin/plugin install logstash-output-kinesis.
71
+ This gem is not a stand-alone program
72
+ email: me@samcday.com.au
73
+ executables: []
74
+ extensions: []
75
+ extra_rdoc_files: []
76
+ files:
77
+ - ".editorconfig"
78
+ - ".gemrelease"
79
+ - ".gitattributes"
80
+ - ".github_changelog_generator"
81
+ - ".gitignore"
82
+ - ".rspec"
83
+ - ".ruby-gemset"
84
+ - ".ruby-version"
85
+ - ".travis.yml"
86
+ - CHANGELOG.md
87
+ - CODE_OF_CONDUCT.md
88
+ - CONTRIBUTING.md
89
+ - Gemfile
90
+ - LICENSE
91
+ - README.md
92
+ - Rakefile
93
+ - build.gradle
94
+ - lib/logstash-output-kinesis/version.rb
95
+ - lib/logstash-output-kinesis_jars.rb
96
+ - lib/logstash/outputs/kinesis.rb
97
+ - logstash-output-kinesis.gemspec
98
+ - spec/outputs/kinesis_spec.rb
99
+ - spec/spec_helper.rb
100
+ - vendor/jar-dependencies/runtime-jars/amazon-kinesis-producer-0.12.6.jar
101
+ - vendor/jar-dependencies/runtime-jars/aws-java-sdk-core-1.11.128.jar
102
+ - vendor/jar-dependencies/runtime-jars/aws-java-sdk-sts-1.11.128.jar
103
+ - vendor/jar-dependencies/runtime-jars/commons-codec-1.9.jar
104
+ - vendor/jar-dependencies/runtime-jars/commons-io-2.4.jar
105
+ - vendor/jar-dependencies/runtime-jars/commons-lang-2.6.jar
106
+ - vendor/jar-dependencies/runtime-jars/commons-logging-1.2.jar
107
+ - vendor/jar-dependencies/runtime-jars/guava-18.0.jar
108
+ - vendor/jar-dependencies/runtime-jars/httpclient-4.5.2.jar
109
+ - vendor/jar-dependencies/runtime-jars/httpcore-4.4.4.jar
110
+ - vendor/jar-dependencies/runtime-jars/ion-java-1.0.2.jar
111
+ - vendor/jar-dependencies/runtime-jars/jackson-annotations-2.6.0.jar
112
+ - vendor/jar-dependencies/runtime-jars/jackson-core-2.6.6.jar
113
+ - vendor/jar-dependencies/runtime-jars/jackson-databind-2.6.6.jar
114
+ - vendor/jar-dependencies/runtime-jars/jackson-dataformat-cbor-2.6.6.jar
115
+ - vendor/jar-dependencies/runtime-jars/jaxb-api-2.4.0-b180830.0359.jar
116
+ - vendor/jar-dependencies/runtime-jars/jmespath-java-1.11.128.jar
117
+ - vendor/jar-dependencies/runtime-jars/joda-time-2.8.1.jar
118
+ - vendor/jar-dependencies/runtime-jars/log4j-slf4j-impl-2.6.2.jar
119
+ - vendor/jar-dependencies/runtime-jars/protobuf-java-2.6.1.jar
120
+ - vendor/jar-dependencies/runtime-jars/slf4j-api-1.7.13.jar
121
+ homepage: https://www.github.com/samcday/logstash-output-kinesis
122
+ licenses:
123
+ - Apache License (2.0)
124
+ metadata:
125
+ logstash_plugin: 'true'
126
+ logstash_group: output
127
+ post_install_message:
128
+ rdoc_options: []
129
+ require_paths:
130
+ - lib
131
+ required_ruby_version: !ruby/object:Gem::Requirement
132
+ requirements:
133
+ - - ">="
134
+ - !ruby/object:Gem::Version
135
+ version: '0'
136
+ required_rubygems_version: !ruby/object:Gem::Requirement
137
+ requirements:
138
+ - - ">="
139
+ - !ruby/object:Gem::Version
140
+ version: '0'
141
+ requirements: []
142
+ rubyforge_project:
143
+ rubygems_version: 2.7.6
144
+ signing_key:
145
+ specification_version: 4
146
+ summary: This output plugin sends records to Kinesis using the Kinesis Producer Library
147
+ (KPL)
148
+ test_files:
149
+ - spec/outputs/kinesis_spec.rb
150
+ - spec/spec_helper.rb