logstash-input-dis 1.1.3 → 1.1.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (44) hide show
  1. checksums.yaml +4 -4
  2. data/README.md +1 -1
  3. data/lib/com/fasterxml/jackson/core/jackson-annotations/2.9.8/jackson-annotations-2.9.8.jar +0 -0
  4. data/lib/com/fasterxml/jackson/core/jackson-core/2.9.8/jackson-core-2.9.8.jar +0 -0
  5. data/lib/com/fasterxml/jackson/core/jackson-databind/2.9.8/jackson-databind-2.9.8.jar +0 -0
  6. data/lib/com/github/luben/zstd-jni/1.4.0-1/zstd-jni-1.4.0-1.jar +0 -0
  7. data/lib/com/huaweicloud/dis/huaweicloud-dis-kafka-adapter-common/1.2.9/huaweicloud-dis-kafka-adapter-common-1.2.9.jar +0 -0
  8. data/lib/com/huaweicloud/dis/huaweicloud-dis-kafka-adapter/{1.2.1/huaweicloud-dis-kafka-adapter-1.2.1.jar → 1.2.9/huaweicloud-dis-kafka-adapter-1.2.9.jar} +0 -0
  9. data/lib/com/huaweicloud/dis/huaweicloud-sdk-java-dis-iface/{1.3.3/huaweicloud-sdk-java-dis-iface-1.3.3.jar → 1.3.6/huaweicloud-sdk-java-dis-iface-1.3.6.jar} +0 -0
  10. data/lib/com/huaweicloud/dis/huaweicloud-sdk-java-dis/{1.3.3/huaweicloud-sdk-java-dis-1.3.3.jar → 1.3.6/huaweicloud-sdk-java-dis-1.3.6.jar} +0 -0
  11. data/lib/commons-codec/commons-codec/1.11/commons-codec-1.11.jar +0 -0
  12. data/lib/logstash-input-dis_jars.rb +53 -4
  13. data/lib/net/jpountz/lz4/lz4/1.3.0/lz4-1.3.0.jar +0 -0
  14. data/lib/org/apache/httpcomponents/httpclient/4.5.7/httpclient-4.5.7.jar +0 -0
  15. data/lib/org/apache/httpcomponents/httpmime/4.5.7/httpmime-4.5.7.jar +0 -0
  16. data/logstash-input-dis.gemspec +2 -2
  17. data/vendor/jar-dependencies/runtime-jars/commons-codec-1.11.jar +0 -0
  18. data/vendor/jar-dependencies/runtime-jars/httpclient-4.5.7.jar +0 -0
  19. data/vendor/jar-dependencies/runtime-jars/httpmime-4.5.7.jar +0 -0
  20. data/vendor/jar-dependencies/runtime-jars/{huaweicloud-dis-kafka-adapter-1.2.1.jar → huaweicloud-dis-kafka-adapter-1.2.9.jar} +0 -0
  21. data/vendor/jar-dependencies/runtime-jars/huaweicloud-dis-kafka-adapter-common-1.2.9.jar +0 -0
  22. data/vendor/jar-dependencies/runtime-jars/{huaweicloud-sdk-java-dis-1.3.3.jar → huaweicloud-sdk-java-dis-1.3.6.jar} +0 -0
  23. data/vendor/jar-dependencies/runtime-jars/{huaweicloud-sdk-java-dis-iface-1.3.3.jar → huaweicloud-sdk-java-dis-iface-1.3.6.jar} +0 -0
  24. data/vendor/jar-dependencies/runtime-jars/jackson-annotations-2.9.8.jar +0 -0
  25. data/vendor/jar-dependencies/runtime-jars/jackson-core-2.9.8.jar +0 -0
  26. data/vendor/jar-dependencies/runtime-jars/jackson-databind-2.9.8.jar +0 -0
  27. data/vendor/jar-dependencies/runtime-jars/lz4-1.3.0.jar +0 -0
  28. data/vendor/jar-dependencies/runtime-jars/zstd-jni-1.4.0-1.jar +0 -0
  29. metadata +27 -24
  30. data/lib/com/fasterxml/jackson/core/jackson-annotations/2.8.11/jackson-annotations-2.8.11.jar +0 -0
  31. data/lib/com/fasterxml/jackson/core/jackson-core/2.8.11/jackson-core-2.8.11.jar +0 -0
  32. data/lib/com/fasterxml/jackson/core/jackson-databind/2.8.11.3/jackson-databind-2.8.11.3.jar +0 -0
  33. data/lib/com/huaweicloud/dis/huaweicloud-dis-kafka-adapter-common/1.2.1/huaweicloud-dis-kafka-adapter-common-1.2.1.jar +0 -0
  34. data/lib/commons-codec/commons-codec/1.9/commons-codec-1.9.jar +0 -0
  35. data/lib/logstash/inputs/dis.rb +0 -205
  36. data/lib/org/apache/httpcomponents/httpclient/4.5.2/httpclient-4.5.2.jar +0 -0
  37. data/lib/org/apache/httpcomponents/httpmime/4.5.2/httpmime-4.5.2.jar +0 -0
  38. data/vendor/jar-dependencies/runtime-jars/commons-codec-1.9.jar +0 -0
  39. data/vendor/jar-dependencies/runtime-jars/httpclient-4.5.2.jar +0 -0
  40. data/vendor/jar-dependencies/runtime-jars/httpmime-4.5.2.jar +0 -0
  41. data/vendor/jar-dependencies/runtime-jars/huaweicloud-dis-kafka-adapter-common-1.2.1.jar +0 -0
  42. data/vendor/jar-dependencies/runtime-jars/jackson-annotations-2.8.11.jar +0 -0
  43. data/vendor/jar-dependencies/runtime-jars/jackson-core-2.8.11.jar +0 -0
  44. data/vendor/jar-dependencies/runtime-jars/jackson-databind-2.8.11.3.jar +0 -0
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: b1405b2ee6e1dd978e2954b3eb97b129fb96ade0
4
- data.tar.gz: 76006db276e07f84a0a76163a99877382e7ba26c
3
+ metadata.gz: d6f2a35b566544b0d1f13ba1d3125947b46a42ce
4
+ data.tar.gz: d58456cbeaf297ec22392c03a1676e014251da15
5
5
  SHA512:
6
- metadata.gz: 81739e648876455c17b35fdb4e7cd2c232730c051b9a554b96bd8ebcdafbbead1146f5d4ddc7c4b511a60f9dd84a9d84d3db3e374295358ac1eb53797d0222fd
7
- data.tar.gz: 6bd2efaf31d1a050da999844a97bffaef2505e0e39934880dd04a78e91e23df82ff93a0424a6204bcca6f33acb8695fd6a1ed6e1714849df4aac431d7dd42288
6
+ metadata.gz: ee11eb3a2221424c139bca80972d236bb12011596745eb6efe2dddc1197caeda89f2ec3b425a1a358cd17732a2b9036d09dcfe6166d7f4369d3a6ed8e4fe3c60
7
+ data.tar.gz: 71c21e1ed24d203db4e2b189cf4d8e57a1a7851060dd43f32976701fd6760fd6de9fbce4b4c7bfe963389c54fa37ec3532b0a1ec228992f14a3ff12dc9639d97
data/README.md CHANGED
@@ -7,7 +7,7 @@ This is a plugin for [Logstash](https://github.com/elastic/logstash).
7
7
  To get started using this plugin, you will need three things:
8
8
 
9
9
  1. JDK 1.8 +
10
- 2. JRuby with the Bundler gem installed, 9.0.0.0 +
10
+ 2. JRuby with the Bundler gem installed, 9.0.0.0 ~ 9.2.0.0
11
11
  3. Maven
12
12
  4. Logstash
13
13
 
@@ -1,5 +1,54 @@
1
- # encoding: utf-8
2
- require 'logstash/environment'
1
+ # this is a generated file, to avoid over-writing it just delete this comment
2
+ begin
3
+ require 'jar_dependencies'
4
+ rescue LoadError
5
+ require 'com/huaweicloud/dis/huaweicloud-dis-kafka-adapter-common/1.2.9/huaweicloud-dis-kafka-adapter-common-1.2.9.jar'
6
+ require 'com/huaweicloud/dis/huaweicloud-sdk-java-dis-iface/1.3.6/huaweicloud-sdk-java-dis-iface-1.3.6.jar'
7
+ require 'org/apache/httpcomponents/httpasyncclient/4.1.3/httpasyncclient-4.1.3.jar'
8
+ require 'org/apache/logging/log4j/log4j-api/2.8.2/log4j-api-2.8.2.jar'
9
+ require 'commons-io/commons-io/2.4/commons-io-2.4.jar'
10
+ require 'com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar'
11
+ require 'org/slf4j/slf4j-api/1.7.24/slf4j-api-1.7.24.jar'
12
+ require 'org/apache/logging/log4j/log4j-slf4j-impl/2.8.2/log4j-slf4j-impl-2.8.2.jar'
13
+ require 'com/huaweicloud/dis/huaweicloud-dis-kafka-adapter/1.2.9/huaweicloud-dis-kafka-adapter-1.2.9.jar'
14
+ require 'com/github/luben/zstd-jni/1.4.0-1/zstd-jni-1.4.0-1.jar'
15
+ require 'commons-logging/commons-logging/1.2/commons-logging-1.2.jar'
16
+ require 'com/fasterxml/jackson/core/jackson-annotations/2.9.8/jackson-annotations-2.9.8.jar'
17
+ require 'joda-time/joda-time/2.8.1/joda-time-2.8.1.jar'
18
+ require 'com/fasterxml/jackson/core/jackson-databind/2.9.8/jackson-databind-2.9.8.jar'
19
+ require 'org/apache/httpcomponents/httpmime/4.5.7/httpmime-4.5.7.jar'
20
+ require 'org/apache/httpcomponents/httpclient/4.5.7/httpclient-4.5.7.jar'
21
+ require 'commons-codec/commons-codec/1.11/commons-codec-1.11.jar'
22
+ require 'net/jpountz/lz4/lz4/1.3.0/lz4-1.3.0.jar'
23
+ require 'org/apache/httpcomponents/httpcore-nio/4.4.6/httpcore-nio-4.4.6.jar'
24
+ require 'com/fasterxml/jackson/core/jackson-core/2.9.8/jackson-core-2.9.8.jar'
25
+ require 'com/huaweicloud/dis/huaweicloud-sdk-java-dis/1.3.6/huaweicloud-sdk-java-dis-1.3.6.jar'
26
+ require 'org/apache/httpcomponents/httpcore/4.4.4/httpcore-4.4.4.jar'
27
+ require 'org/xerial/snappy/snappy-java/1.1.7.2/snappy-java-1.1.7.2.jar'
28
+ end
3
29
 
4
- root_dir = File.expand_path(File.join(File.dirname(__FILE__), ".."))
5
- LogStash::Environment.load_runtime_jars! File.join(root_dir, "vendor")
30
+ if defined? Jars
31
+ require_jar 'com.huaweicloud.dis', 'huaweicloud-dis-kafka-adapter-common', '1.2.9'
32
+ require_jar 'com.huaweicloud.dis', 'huaweicloud-sdk-java-dis-iface', '1.3.6'
33
+ require_jar 'org.apache.httpcomponents', 'httpasyncclient', '4.1.3'
34
+ require_jar 'org.apache.logging.log4j', 'log4j-api', '2.8.2'
35
+ require_jar 'commons-io', 'commons-io', '2.4'
36
+ require_jar 'com.google.protobuf', 'protobuf-java', '2.5.0'
37
+ require_jar 'org.slf4j', 'slf4j-api', '1.7.24'
38
+ require_jar 'org.apache.logging.log4j', 'log4j-slf4j-impl', '2.8.2'
39
+ require_jar 'com.huaweicloud.dis', 'huaweicloud-dis-kafka-adapter', '1.2.9'
40
+ require_jar 'com.github.luben', 'zstd-jni', '1.4.0-1'
41
+ require_jar 'commons-logging', 'commons-logging', '1.2'
42
+ require_jar 'com.fasterxml.jackson.core', 'jackson-annotations', '2.9.8'
43
+ require_jar 'joda-time', 'joda-time', '2.8.1'
44
+ require_jar 'com.fasterxml.jackson.core', 'jackson-databind', '2.9.8'
45
+ require_jar 'org.apache.httpcomponents', 'httpmime', '4.5.7'
46
+ require_jar 'org.apache.httpcomponents', 'httpclient', '4.5.7'
47
+ require_jar 'commons-codec', 'commons-codec', '1.11'
48
+ require_jar 'net.jpountz.lz4', 'lz4', '1.3.0'
49
+ require_jar 'org.apache.httpcomponents', 'httpcore-nio', '4.4.6'
50
+ require_jar 'com.fasterxml.jackson.core', 'jackson-core', '2.9.8'
51
+ require_jar 'com.huaweicloud.dis', 'huaweicloud-sdk-java-dis', '1.3.6'
52
+ require_jar 'org.apache.httpcomponents', 'httpcore', '4.4.4'
53
+ require_jar 'org.xerial.snappy', 'snappy-java', '1.1.7.2'
54
+ end
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-input-dis'
3
- s.version = '1.1.3'
3
+ s.version = '1.1.4'
4
4
  s.licenses = ['Apache License (2.0)']
5
5
  s.summary = "Reads events from a DIS Stream"
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -18,7 +18,7 @@ Gem::Specification.new do |s|
18
18
  # Special flag to let us know this is actually a logstash plugin
19
19
  s.metadata = { 'logstash_plugin' => 'true', 'logstash_group' => 'input'}
20
20
 
21
- s.requirements << "jar 'com.huaweicloud.dis:huaweicloud-dis-kafka-adapter', '1.2.1'"
21
+ s.requirements << "jar 'com.huaweicloud.dis:huaweicloud-dis-kafka-adapter', '1.2.9'"
22
22
  s.requirements << "jar 'org.apache.logging.log4j:log4j-slf4j-impl', '2.8.2'"
23
23
 
24
24
  s.add_development_dependency 'jar-dependencies', '~> 0.3.2'
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-input-dis
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.1.3
4
+ version: 1.1.4
5
5
  platform: ruby
6
6
  authors:
7
7
  - Data Ingestion Service
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2019-06-10 00:00:00.000000000 Z
11
+ date: 2020-02-17 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -132,52 +132,55 @@ files:
132
132
  - LICENSE
133
133
  - NOTICE.TXT
134
134
  - README.md
135
- - lib/com/fasterxml/jackson/core/jackson-annotations/2.8.11/jackson-annotations-2.8.11.jar
136
- - lib/com/fasterxml/jackson/core/jackson-core/2.8.11/jackson-core-2.8.11.jar
137
- - lib/com/fasterxml/jackson/core/jackson-databind/2.8.11.3/jackson-databind-2.8.11.3.jar
135
+ - lib/com/fasterxml/jackson/core/jackson-annotations/2.9.8/jackson-annotations-2.9.8.jar
136
+ - lib/com/fasterxml/jackson/core/jackson-core/2.9.8/jackson-core-2.9.8.jar
137
+ - lib/com/fasterxml/jackson/core/jackson-databind/2.9.8/jackson-databind-2.9.8.jar
138
+ - lib/com/github/luben/zstd-jni/1.4.0-1/zstd-jni-1.4.0-1.jar
138
139
  - lib/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar
139
- - lib/com/huaweicloud/dis/huaweicloud-dis-kafka-adapter-common/1.2.1/huaweicloud-dis-kafka-adapter-common-1.2.1.jar
140
- - lib/com/huaweicloud/dis/huaweicloud-dis-kafka-adapter/1.2.1/huaweicloud-dis-kafka-adapter-1.2.1.jar
141
- - lib/com/huaweicloud/dis/huaweicloud-sdk-java-dis-iface/1.3.3/huaweicloud-sdk-java-dis-iface-1.3.3.jar
142
- - lib/com/huaweicloud/dis/huaweicloud-sdk-java-dis/1.3.3/huaweicloud-sdk-java-dis-1.3.3.jar
143
- - lib/commons-codec/commons-codec/1.9/commons-codec-1.9.jar
140
+ - lib/com/huaweicloud/dis/huaweicloud-dis-kafka-adapter-common/1.2.9/huaweicloud-dis-kafka-adapter-common-1.2.9.jar
141
+ - lib/com/huaweicloud/dis/huaweicloud-dis-kafka-adapter/1.2.9/huaweicloud-dis-kafka-adapter-1.2.9.jar
142
+ - lib/com/huaweicloud/dis/huaweicloud-sdk-java-dis-iface/1.3.6/huaweicloud-sdk-java-dis-iface-1.3.6.jar
143
+ - lib/com/huaweicloud/dis/huaweicloud-sdk-java-dis/1.3.6/huaweicloud-sdk-java-dis-1.3.6.jar
144
+ - lib/commons-codec/commons-codec/1.11/commons-codec-1.11.jar
144
145
  - lib/commons-io/commons-io/2.4/commons-io-2.4.jar
145
146
  - lib/commons-logging/commons-logging/1.2/commons-logging-1.2.jar
146
147
  - lib/joda-time/joda-time/2.8.1/joda-time-2.8.1.jar
147
148
  - lib/logstash-input-dis_jars.rb
148
- - lib/logstash/inputs/dis.rb
149
+ - lib/net/jpountz/lz4/lz4/1.3.0/lz4-1.3.0.jar
149
150
  - lib/org/apache/httpcomponents/httpasyncclient/4.1.3/httpasyncclient-4.1.3.jar
150
- - lib/org/apache/httpcomponents/httpclient/4.5.2/httpclient-4.5.2.jar
151
+ - lib/org/apache/httpcomponents/httpclient/4.5.7/httpclient-4.5.7.jar
151
152
  - lib/org/apache/httpcomponents/httpcore-nio/4.4.6/httpcore-nio-4.4.6.jar
152
153
  - lib/org/apache/httpcomponents/httpcore/4.4.4/httpcore-4.4.4.jar
153
- - lib/org/apache/httpcomponents/httpmime/4.5.2/httpmime-4.5.2.jar
154
+ - lib/org/apache/httpcomponents/httpmime/4.5.7/httpmime-4.5.7.jar
154
155
  - lib/org/apache/logging/log4j/log4j-api/2.8.2/log4j-api-2.8.2.jar
155
156
  - lib/org/apache/logging/log4j/log4j-slf4j-impl/2.8.2/log4j-slf4j-impl-2.8.2.jar
156
157
  - lib/org/slf4j/slf4j-api/1.7.24/slf4j-api-1.7.24.jar
157
158
  - lib/org/xerial/snappy/snappy-java/1.1.7.2/snappy-java-1.1.7.2.jar
158
159
  - logstash-input-dis.gemspec
159
160
  - spec/unit/inputs/dis_spec.rb
160
- - vendor/jar-dependencies/runtime-jars/commons-codec-1.9.jar
161
+ - vendor/jar-dependencies/runtime-jars/commons-codec-1.11.jar
161
162
  - vendor/jar-dependencies/runtime-jars/commons-io-2.4.jar
162
163
  - vendor/jar-dependencies/runtime-jars/commons-logging-1.2.jar
163
164
  - vendor/jar-dependencies/runtime-jars/httpasyncclient-4.1.3.jar
164
- - vendor/jar-dependencies/runtime-jars/httpclient-4.5.2.jar
165
+ - vendor/jar-dependencies/runtime-jars/httpclient-4.5.7.jar
165
166
  - vendor/jar-dependencies/runtime-jars/httpcore-4.4.4.jar
166
167
  - vendor/jar-dependencies/runtime-jars/httpcore-nio-4.4.6.jar
167
- - vendor/jar-dependencies/runtime-jars/httpmime-4.5.2.jar
168
- - vendor/jar-dependencies/runtime-jars/huaweicloud-dis-kafka-adapter-1.2.1.jar
169
- - vendor/jar-dependencies/runtime-jars/huaweicloud-dis-kafka-adapter-common-1.2.1.jar
170
- - vendor/jar-dependencies/runtime-jars/huaweicloud-sdk-java-dis-1.3.3.jar
171
- - vendor/jar-dependencies/runtime-jars/huaweicloud-sdk-java-dis-iface-1.3.3.jar
172
- - vendor/jar-dependencies/runtime-jars/jackson-annotations-2.8.11.jar
173
- - vendor/jar-dependencies/runtime-jars/jackson-core-2.8.11.jar
174
- - vendor/jar-dependencies/runtime-jars/jackson-databind-2.8.11.3.jar
168
+ - vendor/jar-dependencies/runtime-jars/httpmime-4.5.7.jar
169
+ - vendor/jar-dependencies/runtime-jars/huaweicloud-dis-kafka-adapter-1.2.9.jar
170
+ - vendor/jar-dependencies/runtime-jars/huaweicloud-dis-kafka-adapter-common-1.2.9.jar
171
+ - vendor/jar-dependencies/runtime-jars/huaweicloud-sdk-java-dis-1.3.6.jar
172
+ - vendor/jar-dependencies/runtime-jars/huaweicloud-sdk-java-dis-iface-1.3.6.jar
173
+ - vendor/jar-dependencies/runtime-jars/jackson-annotations-2.9.8.jar
174
+ - vendor/jar-dependencies/runtime-jars/jackson-core-2.9.8.jar
175
+ - vendor/jar-dependencies/runtime-jars/jackson-databind-2.9.8.jar
175
176
  - vendor/jar-dependencies/runtime-jars/joda-time-2.8.1.jar
176
177
  - vendor/jar-dependencies/runtime-jars/log4j-api-2.8.2.jar
177
178
  - vendor/jar-dependencies/runtime-jars/log4j-slf4j-impl-2.8.2.jar
179
+ - vendor/jar-dependencies/runtime-jars/lz4-1.3.0.jar
178
180
  - vendor/jar-dependencies/runtime-jars/protobuf-java-2.5.0.jar
179
181
  - vendor/jar-dependencies/runtime-jars/slf4j-api-1.7.24.jar
180
182
  - vendor/jar-dependencies/runtime-jars/snappy-java-1.1.7.2.jar
183
+ - vendor/jar-dependencies/runtime-jars/zstd-jni-1.4.0-1.jar
181
184
  homepage: https://www.huaweicloud.com/product/dis.html
182
185
  licenses:
183
186
  - Apache License (2.0)
@@ -199,7 +202,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
199
202
  - !ruby/object:Gem::Version
200
203
  version: '0'
201
204
  requirements:
202
- - jar 'com.huaweicloud.dis:huaweicloud-dis-kafka-adapter', '1.2.1'
205
+ - jar 'com.huaweicloud.dis:huaweicloud-dis-kafka-adapter', '1.2.9'
203
206
  - jar 'org.apache.logging.log4j:log4j-slf4j-impl', '2.8.2'
204
207
  rubyforge_project:
205
208
  rubygems_version: 2.6.14.1
@@ -1,205 +0,0 @@
1
- require 'logstash/namespace'
2
- require 'logstash/inputs/base'
3
- require 'stud/interval'
4
- require 'java'
5
- require 'logstash-input-dis_jars.rb'
6
-
7
- # This input will read events from a DIS stream, using DIS Kafka Adapter.
8
- class LogStash::Inputs::Dis < LogStash::Inputs::Base
9
- config_name 'dis'
10
-
11
- default :codec, 'plain'
12
-
13
- config :default_trusted_jks_enabled, :validate => :boolean, :default => false
14
- config :security_token, :validate => :string
15
- config :exception_retries, :validate => :number, :default => 8
16
- config :records_retries, :validate => :number, :default => 20
17
- config :proxy_host, :validate => :string
18
- config :proxy_port, :validate => :number, :default => 80
19
- config :proxy_protocol, :validate => ["http", "https"], :default => "http"
20
- config :proxy_username, :validate => :string
21
- config :proxy_password, :validate => :string
22
- config :proxy_workstation, :validate => :string
23
- config :proxy_domain, :validate => :string
24
- config :proxy_non_proxy_hosts, :validate => :string
25
-
26
- # The frequency in milliseconds that the consumer offsets are committed to Kafka.
27
- config :auto_commit_interval_ms, :validate => :string, :default => "5000"
28
- # What to do when there is no initial offset in Kafka or if an offset is out of range:
29
- #
30
- # * earliest: automatically reset the offset to the earliest offset
31
- # * latest: automatically reset the offset to the latest offset
32
- # * none: throw exception to the consumer if no previous offset is found for the consumer's group
33
- # * anything else: throw exception to the consumer.
34
- config :auto_offset_reset, :validate => :string
35
- # The id string to pass to the server when making requests. The purpose of this
36
- # is to be able to track the source of requests beyond just ip/port by allowing
37
- # a logical application name to be included.
38
- config :client_id, :validate => :string, :default => "logstash"
39
- # Ideally you should have as many threads as the number of partitions for a perfect
40
- # balance — more threads than partitions means that some threads will be idle
41
- config :consumer_threads, :validate => :number, :default => 1
42
- # If true, periodically commit to Kafka the offsets of messages already returned by the consumer.
43
- # This committed offset will be used when the process fails as the position from
44
- # which the consumption will begin.
45
- config :enable_auto_commit, :validate => :string, :default => "true"
46
- # The identifier of the group this consumer belongs to. Consumer group is a single logical subscriber
47
- # that happens to be made up of multiple processors. Messages in a topic will be distributed to all
48
- # Logstash instances with the same `group_id`
49
- config :group_id, :validate => :string, :default => "logstash"
50
- # Java Class used to deserialize the record's key
51
- config :key_deserializer_class, :validate => :string, :default => "com.huaweicloud.dis.adapter.kafka.common.serialization.StringDeserializer"
52
- # Java Class used to deserialize the record's value
53
- config :value_deserializer_class, :validate => :string, :default => "com.huaweicloud.dis.adapter.kafka.common.serialization.StringDeserializer"
54
- # A list of streams to subscribe to, defaults to ["logstash"].
55
- config :streams, :validate => :array, :default => ["logstash"]
56
- # DIS Gateway endpoint
57
- config :endpoint, :validate => :string, :default => "https://dis.cn-north-1.myhuaweicloud.com"
58
- # The ProjectId of the specified region, it can be obtained from My Credential Page
59
- config :project_id, :validate => :string
60
- # Specifies use which region of DIS, now DIS only support cn-north-1
61
- config :region, :validate => :string, :default => "cn-north-1"
62
- # The Access Key ID for hwclouds, it can be obtained from My Credential Page
63
- config :ak, :validate => :string, :required => true
64
- # The Secret key ID is encrypted or not
65
- config :is_sk_encrypted, :default => false
66
- # The encrypt key used to encypt the Secret Key Id
67
- config :encrypt_key, :validate => :string
68
- # The Secret Key ID for hwclouds, it can be obtained from My Credential Page
69
- config :sk, :validate => :string, :required => true
70
- # A topic regex pattern to subscribe to.
71
- # The topics configuration will be ignored when using this configuration.
72
- config :topics_pattern, :validate => :string
73
- # Time kafka consumer will wait to receive new messages from topics
74
- config :poll_timeout_ms, :validate => :number, :default => 100
75
- # Option to add DIS metadata like stream, message size to the event.
76
- # This will add a field named `dis` to the logstash event containing the following attributes:
77
- # `stream`: The stream this message is associated with
78
- # `consumer_group`: The consumer group used to read in this event
79
- # `partition`: The partition this message is associated with
80
- # `offset`: The offset from the partition this message is associated with
81
- # `key`: A ByteBuffer containing the message key
82
- # `timestamp`: The timestamp of this message
83
- config :decorate_events, :validate => :boolean, :default => false
84
-
85
-
86
- public
87
- def register
88
- @runner_threads = []
89
- end # def register
90
-
91
- public
92
- def run(logstash_queue)
93
- @runner_consumers = consumer_threads.times.map { |i| create_consumer("#{client_id}-#{i}") }
94
- @runner_threads = @runner_consumers.map { |consumer| thread_runner(logstash_queue, consumer) }
95
- @runner_threads.each { |t| t.join }
96
- end # def run
97
-
98
- public
99
- def stop
100
- @runner_consumers.each { |c| c.wakeup }
101
- end
102
-
103
- public
104
- def kafka_consumers
105
- @runner_consumers
106
- end
107
-
108
- private
109
- def thread_runner(logstash_queue, consumer)
110
- Thread.new do
111
- begin
112
- unless @topics_pattern.nil?
113
- nooplistener = com.huaweicloud.dis.adapter.kafka.clients.consumer.internals.NoOpConsumerRebalanceListener.new
114
- pattern = java.util.regex.Pattern.compile(@topics_pattern)
115
- consumer.subscribe(pattern, nooplistener)
116
- else
117
- consumer.subscribe(streams);
118
- end
119
- codec_instance = @codec.clone
120
- while !stop?
121
- records = consumer.poll(poll_timeout_ms)
122
- for record in records do
123
- codec_instance.decode(record.value.to_s) do |event|
124
- decorate(event)
125
- if @decorate_events
126
- event.set("[@metadata][dis][topic]", record.topic)
127
- event.set("[@metadata][dis][consumer_group]", @group_id)
128
- event.set("[@metadata][dis][partition]", record.partition)
129
- event.set("[@metadata][dis][offset]", record.offset)
130
- event.set("[@metadata][dis][key]", record.key)
131
- event.set("[@metadata][dis][timestamp]", record.timestamp)
132
- end
133
- logstash_queue << event
134
- end
135
- end
136
- # Manual offset commit
137
- if @enable_auto_commit == "false"
138
- consumer.commitSync
139
- end
140
- end
141
- rescue org.apache.kafka.common.errors.WakeupException => e
142
- raise e if !stop?
143
- ensure
144
- consumer.close
145
- end
146
- end
147
- end
148
-
149
- private
150
- def create_consumer(client_id)
151
- begin
152
- props = java.util.Properties.new
153
- kafka = com.huaweicloud.dis.adapter.kafka.clients.consumer.ConsumerConfig
154
-
155
- props.put("IS_DEFAULT_TRUSTED_JKS_ENABLED", default_trusted_jks_enabled.to_s)
156
- props.put("security.token", security_token) unless security_token.nil?
157
- props.put("exception.retries", exception_retries.to_s)
158
- props.put("records.retries", records_retries.to_s)
159
- props.put("PROXY_HOST", proxy_host) unless proxy_host.nil?
160
- props.put("PROXY_PORT", proxy_port.to_s)
161
- props.put("PROXY_PROTOCOL", proxy_protocol)
162
- props.put("PROXY_USERNAME", proxy_username) unless proxy_username.nil?
163
- props.put("PROXY_PASSWORD", proxy_password) unless proxy_password.nil?
164
- props.put("PROXY_WORKSTATION", proxy_workstation) unless proxy_workstation.nil?
165
- props.put("PROXY_DOMAIN", proxy_domain) unless proxy_domain.nil?
166
- props.put("NON_PROXY_HOSTS", proxy_non_proxy_hosts) unless proxy_non_proxy_hosts.nil?
167
-
168
- props.put("auto.commit.interval.ms", auto_commit_interval_ms)
169
- props.put("auto.offset.reset", auto_offset_reset) unless auto_offset_reset.nil?
170
- props.put("client.id", client_id)
171
- props.put("enable.auto.commit", enable_auto_commit)
172
- props.put("group.id", group_id)
173
- props.put("key.deserializer", "com.huaweicloud.dis.adapter.kafka.common.serialization.StringDeserializer")
174
- props.put("value.deserializer", "com.huaweicloud.dis.adapter.kafka.common.serialization.StringDeserializer")
175
-
176
- # endpoint, project_id, region, ak, sk
177
- props.put("endpoint", endpoint)
178
- props.put("projectId", project_id)
179
- props.put("region", region)
180
- props.put("ak", ak)
181
- if is_sk_encrypted
182
- decrypted_sk = decrypt(@sk)
183
- props.put("sk", decrypted_sk)
184
- else
185
- props.put("sk", sk)
186
- end
187
-
188
- com.huaweicloud.dis.adapter.kafka.clients.consumer.DISKafkaConsumer.new(props)
189
- rescue => e
190
- logger.error("Unable to create DIS Kafka consumer from given configuration",
191
- :kafka_error_message => e,
192
- :cause => e.respond_to?(:getCause) ? e.getCause() : nil)
193
- throw e
194
- end
195
- end
196
-
197
- private
198
- def decrypt(encrypted_sk)
199
- com.huaweicloud.dis.util.encrypt.EncryptUtils.dec([@encrypt_key].to_java(java.lang.String), encrypted_sk)
200
- rescue => e
201
- logger.error("Unable to decrypt sk from given configuration",
202
- :decrypt_error_message => e,
203
- :cause => e.respond_to?(:getCause) ? e.getCause() : nil)
204
- end
205
- end #class LogStash::Inputs::Dis