logstash-input-dis 1.1.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/Gemfile +11 -0
- data/LICENSE +13 -0
- data/NOTICE.TXT +5 -0
- data/README.md +67 -0
- data/lib/com/fasterxml/jackson/core/jackson-annotations/2.8.11/jackson-annotations-2.8.11.jar +0 -0
- data/lib/com/fasterxml/jackson/core/jackson-core/2.8.11/jackson-core-2.8.11.jar +0 -0
- data/lib/com/fasterxml/jackson/core/jackson-databind/2.8.11.3/jackson-databind-2.8.11.3.jar +0 -0
- data/lib/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar +0 -0
- data/lib/com/huaweicloud/dis/huaweicloud-dis-kafka-adapter-common/1.2.1/huaweicloud-dis-kafka-adapter-common-1.2.1.jar +0 -0
- data/lib/com/huaweicloud/dis/huaweicloud-dis-kafka-adapter/1.2.1/huaweicloud-dis-kafka-adapter-1.2.1.jar +0 -0
- data/lib/com/huaweicloud/dis/huaweicloud-sdk-java-dis-iface/1.3.3/huaweicloud-sdk-java-dis-iface-1.3.3.jar +0 -0
- data/lib/com/huaweicloud/dis/huaweicloud-sdk-java-dis/1.3.3/huaweicloud-sdk-java-dis-1.3.3.jar +0 -0
- data/lib/commons-codec/commons-codec/1.9/commons-codec-1.9.jar +0 -0
- data/lib/commons-io/commons-io/2.4/commons-io-2.4.jar +0 -0
- data/lib/commons-logging/commons-logging/1.2/commons-logging-1.2.jar +0 -0
- data/lib/joda-time/joda-time/2.8.1/joda-time-2.8.1.jar +0 -0
- data/lib/logstash-input-dis_jars.rb +5 -0
- data/lib/logstash/inputs/dis.rb +205 -0
- data/lib/org/apache/httpcomponents/httpasyncclient/4.1.3/httpasyncclient-4.1.3.jar +0 -0
- data/lib/org/apache/httpcomponents/httpclient/4.5.2/httpclient-4.5.2.jar +0 -0
- data/lib/org/apache/httpcomponents/httpcore-nio/4.4.6/httpcore-nio-4.4.6.jar +0 -0
- data/lib/org/apache/httpcomponents/httpcore/4.4.4/httpcore-4.4.4.jar +0 -0
- data/lib/org/apache/httpcomponents/httpmime/4.5.2/httpmime-4.5.2.jar +0 -0
- data/lib/org/apache/logging/log4j/log4j-api/2.8.2/log4j-api-2.8.2.jar +0 -0
- data/lib/org/apache/logging/log4j/log4j-slf4j-impl/2.8.2/log4j-slf4j-impl-2.8.2.jar +0 -0
- data/lib/org/slf4j/slf4j-api/1.7.24/slf4j-api-1.7.24.jar +0 -0
- data/lib/org/xerial/snappy/snappy-java/1.1.7.2/snappy-java-1.1.7.2.jar +0 -0
- data/logstash-input-dis.gemspec +35 -0
- data/spec/unit/inputs/dis_spec.rb +39 -0
- data/vendor/jar-dependencies/runtime-jars/commons-codec-1.9.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/commons-io-2.4.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/commons-logging-1.2.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/httpasyncclient-4.1.3.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/httpclient-4.5.2.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/httpcore-4.4.4.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/httpcore-nio-4.4.6.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/httpmime-4.5.2.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/huaweicloud-dis-kafka-adapter-1.2.1.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/huaweicloud-dis-kafka-adapter-common-1.2.1.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/huaweicloud-sdk-java-dis-1.3.3.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/huaweicloud-sdk-java-dis-iface-1.3.3.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/jackson-annotations-2.8.11.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/jackson-core-2.8.11.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/jackson-databind-2.8.11.3.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/joda-time-2.8.1.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/log4j-api-2.8.2.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/log4j-slf4j-impl-2.8.2.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/protobuf-java-2.5.0.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/slf4j-api-1.7.24.jar +0 -0
- data/vendor/jar-dependencies/runtime-jars/snappy-java-1.1.7.2.jar +0 -0
- metadata +210 -0
checksums.yaml
ADDED
|
@@ -0,0 +1,7 @@
|
|
|
1
|
+
---
|
|
2
|
+
SHA1:
|
|
3
|
+
metadata.gz: b1405b2ee6e1dd978e2954b3eb97b129fb96ade0
|
|
4
|
+
data.tar.gz: 76006db276e07f84a0a76163a99877382e7ba26c
|
|
5
|
+
SHA512:
|
|
6
|
+
metadata.gz: 81739e648876455c17b35fdb4e7cd2c232730c051b9a554b96bd8ebcdafbbead1146f5d4ddc7c4b511a60f9dd84a9d84d3db3e374295358ac1eb53797d0222fd
|
|
7
|
+
data.tar.gz: 6bd2efaf31d1a050da999844a97bffaef2505e0e39934880dd04a78e91e23df82ff93a0424a6204bcca6f33acb8695fd6a1ed6e1714849df4aac431d7dd42288
|
data/Gemfile
ADDED
|
@@ -0,0 +1,11 @@
|
|
|
1
|
+
source 'http://gems.ruby-china.com'
|
|
2
|
+
|
|
3
|
+
gemspec
|
|
4
|
+
|
|
5
|
+
logstash_path = ENV["LOGSTASH_PATH"] || "../../logstash"
|
|
6
|
+
use_logstash_source = ENV["LOGSTASH_SOURCE"] && ENV["LOGSTASH_SOURCE"].to_s == "1"
|
|
7
|
+
|
|
8
|
+
if Dir.exist?(logstash_path) && use_logstash_source
|
|
9
|
+
gem 'logstash-core', :path => "#{logstash_path}/logstash-core"
|
|
10
|
+
gem 'logstash-core-plugin-api', :path => "#{logstash_path}/logstash-core-plugin-api"
|
|
11
|
+
end
|
data/LICENSE
ADDED
|
@@ -0,0 +1,13 @@
|
|
|
1
|
+
Copyright (c) 2012-2018 Elasticsearch <http://www.elastic.co>
|
|
2
|
+
|
|
3
|
+
Licensed under the Apache License, Version 2.0 (the "License");
|
|
4
|
+
you may not use this file except in compliance with the License.
|
|
5
|
+
You may obtain a copy of the License at
|
|
6
|
+
|
|
7
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
|
8
|
+
|
|
9
|
+
Unless required by applicable law or agreed to in writing, software
|
|
10
|
+
distributed under the License is distributed on an "AS IS" BASIS,
|
|
11
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
12
|
+
See the License for the specific language governing permissions and
|
|
13
|
+
limitations under the License.
|
data/NOTICE.TXT
ADDED
data/README.md
ADDED
|
@@ -0,0 +1,67 @@
|
|
|
1
|
+
# Logstash Input DIS
|
|
2
|
+
|
|
3
|
+
This is a plugin for [Logstash](https://github.com/elastic/logstash).
|
|
4
|
+
|
|
5
|
+
## Requirements
|
|
6
|
+
|
|
7
|
+
To get started using this plugin, you will need three things:
|
|
8
|
+
|
|
9
|
+
1. JDK 1.8 +
|
|
10
|
+
2. JRuby with the Bundler gem installed, 9.0.0.0 +
|
|
11
|
+
3. Maven
|
|
12
|
+
4. Logstash
|
|
13
|
+
|
|
14
|
+
## Installation
|
|
15
|
+
### 0. 修改 RubyGems 镜像地址
|
|
16
|
+
gem sources --add https://gems.ruby-china.com/ --remove https://rubygems.org/
|
|
17
|
+
|
|
18
|
+
### 1. 安装 JRuby
|
|
19
|
+
### 2. 安装 Bundler gem
|
|
20
|
+
gem install bundler
|
|
21
|
+
|
|
22
|
+
### 3. 安装依赖
|
|
23
|
+
bundle install
|
|
24
|
+
rake install_jars
|
|
25
|
+
gem build logstash-input-dis.gemspec
|
|
26
|
+
|
|
27
|
+
### 4. 编辑 Logstash 的`Gemfile`,并添加本地插件路径
|
|
28
|
+
gem "logstash-input-dis", :path => "/your/local/logstash-input-dis"
|
|
29
|
+
|
|
30
|
+
### 5. 安装插件到 Logstash
|
|
31
|
+
bin/logstash-plugin install --no-verify
|
|
32
|
+
|
|
33
|
+
## Usage
|
|
34
|
+
|
|
35
|
+
```properties
|
|
36
|
+
input
|
|
37
|
+
{
|
|
38
|
+
dis {
|
|
39
|
+
streams => ["YOU_DIS_STREAM_NAME"]
|
|
40
|
+
endpoint => "https://dis.cn-north-1.myhuaweicloud.com"
|
|
41
|
+
ak => "YOU_ACCESS_KEY_ID"
|
|
42
|
+
sk => "YOU_SECRET_KEY_ID"
|
|
43
|
+
region => "cn-north-1"
|
|
44
|
+
project_id => "YOU_PROJECT_ID"
|
|
45
|
+
group_id => "YOU_GROUP_ID"
|
|
46
|
+
decorate_events => true
|
|
47
|
+
auto_offset_reset => "earliest"
|
|
48
|
+
}
|
|
49
|
+
}
|
|
50
|
+
```
|
|
51
|
+
|
|
52
|
+
## Configuration
|
|
53
|
+
|
|
54
|
+
### Parameters
|
|
55
|
+
|
|
56
|
+
| Name | Description | Default |
|
|
57
|
+
| :----------------------- | :--------------------------------------- | :--------------------------------------- |
|
|
58
|
+
| streams | 指定在DIS服务上创建的通道名称。 | - |
|
|
59
|
+
| ak | 用户的Access Key,可从华为云控制台“我的凭证”页获取。 | - |
|
|
60
|
+
| sk | 用户的Secret Key,可从华为云控制台“我的凭证”页获取。 | - |
|
|
61
|
+
| region | 将数据上传到指定Region的DIS服务。 | cn-north-1 |
|
|
62
|
+
| project_id | 用户所属区域的项目ID,可从华为云控制台“我的凭证”页获取。 | - |
|
|
63
|
+
| endpoint | DIS对应Region的数据接口地址。 | https://dis.cn-north-1.myhuaweicloud.com |
|
|
64
|
+
| group_id | DIS App名称,用于标识一个消费组,值可以为任意字符串。| - |
|
|
65
|
+
|
|
66
|
+
## License
|
|
67
|
+
[Apache License 2.0](https://www.apache.org/licenses/LICENSE-2.0.html)
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
data/lib/com/huaweicloud/dis/huaweicloud-sdk-java-dis/1.3.3/huaweicloud-sdk-java-dis-1.3.3.jar
ADDED
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
@@ -0,0 +1,205 @@
|
|
|
1
|
+
require 'logstash/namespace'
|
|
2
|
+
require 'logstash/inputs/base'
|
|
3
|
+
require 'stud/interval'
|
|
4
|
+
require 'java'
|
|
5
|
+
require 'logstash-input-dis_jars.rb'
|
|
6
|
+
|
|
7
|
+
# This input will read events from a DIS stream, using DIS Kafka Adapter.
|
|
8
|
+
class LogStash::Inputs::Dis < LogStash::Inputs::Base
|
|
9
|
+
config_name 'dis'
|
|
10
|
+
|
|
11
|
+
default :codec, 'plain'
|
|
12
|
+
|
|
13
|
+
config :default_trusted_jks_enabled, :validate => :boolean, :default => false
|
|
14
|
+
config :security_token, :validate => :string
|
|
15
|
+
config :exception_retries, :validate => :number, :default => 8
|
|
16
|
+
config :records_retries, :validate => :number, :default => 20
|
|
17
|
+
config :proxy_host, :validate => :string
|
|
18
|
+
config :proxy_port, :validate => :number, :default => 80
|
|
19
|
+
config :proxy_protocol, :validate => ["http", "https"], :default => "http"
|
|
20
|
+
config :proxy_username, :validate => :string
|
|
21
|
+
config :proxy_password, :validate => :string
|
|
22
|
+
config :proxy_workstation, :validate => :string
|
|
23
|
+
config :proxy_domain, :validate => :string
|
|
24
|
+
config :proxy_non_proxy_hosts, :validate => :string
|
|
25
|
+
|
|
26
|
+
# The frequency in milliseconds that the consumer offsets are committed to Kafka.
|
|
27
|
+
config :auto_commit_interval_ms, :validate => :string, :default => "5000"
|
|
28
|
+
# What to do when there is no initial offset in Kafka or if an offset is out of range:
|
|
29
|
+
#
|
|
30
|
+
# * earliest: automatically reset the offset to the earliest offset
|
|
31
|
+
# * latest: automatically reset the offset to the latest offset
|
|
32
|
+
# * none: throw exception to the consumer if no previous offset is found for the consumer's group
|
|
33
|
+
# * anything else: throw exception to the consumer.
|
|
34
|
+
config :auto_offset_reset, :validate => :string
|
|
35
|
+
# The id string to pass to the server when making requests. The purpose of this
|
|
36
|
+
# is to be able to track the source of requests beyond just ip/port by allowing
|
|
37
|
+
# a logical application name to be included.
|
|
38
|
+
config :client_id, :validate => :string, :default => "logstash"
|
|
39
|
+
# Ideally you should have as many threads as the number of partitions for a perfect
|
|
40
|
+
# balance — more threads than partitions means that some threads will be idle
|
|
41
|
+
config :consumer_threads, :validate => :number, :default => 1
|
|
42
|
+
# If true, periodically commit to Kafka the offsets of messages already returned by the consumer.
|
|
43
|
+
# This committed offset will be used when the process fails as the position from
|
|
44
|
+
# which the consumption will begin.
|
|
45
|
+
config :enable_auto_commit, :validate => :string, :default => "true"
|
|
46
|
+
# The identifier of the group this consumer belongs to. Consumer group is a single logical subscriber
|
|
47
|
+
# that happens to be made up of multiple processors. Messages in a topic will be distributed to all
|
|
48
|
+
# Logstash instances with the same `group_id`
|
|
49
|
+
config :group_id, :validate => :string, :default => "logstash"
|
|
50
|
+
# Java Class used to deserialize the record's key
|
|
51
|
+
config :key_deserializer_class, :validate => :string, :default => "com.huaweicloud.dis.adapter.kafka.common.serialization.StringDeserializer"
|
|
52
|
+
# Java Class used to deserialize the record's value
|
|
53
|
+
config :value_deserializer_class, :validate => :string, :default => "com.huaweicloud.dis.adapter.kafka.common.serialization.StringDeserializer"
|
|
54
|
+
# A list of streams to subscribe to, defaults to ["logstash"].
|
|
55
|
+
config :streams, :validate => :array, :default => ["logstash"]
|
|
56
|
+
# DIS Gateway endpoint
|
|
57
|
+
config :endpoint, :validate => :string, :default => "https://dis.cn-north-1.myhuaweicloud.com"
|
|
58
|
+
# The ProjectId of the specified region, it can be obtained from My Credential Page
|
|
59
|
+
config :project_id, :validate => :string
|
|
60
|
+
# Specifies use which region of DIS, now DIS only support cn-north-1
|
|
61
|
+
config :region, :validate => :string, :default => "cn-north-1"
|
|
62
|
+
# The Access Key ID for hwclouds, it can be obtained from My Credential Page
|
|
63
|
+
config :ak, :validate => :string, :required => true
|
|
64
|
+
# The Secret key ID is encrypted or not
|
|
65
|
+
config :is_sk_encrypted, :default => false
|
|
66
|
+
# The encrypt key used to encypt the Secret Key Id
|
|
67
|
+
config :encrypt_key, :validate => :string
|
|
68
|
+
# The Secret Key ID for hwclouds, it can be obtained from My Credential Page
|
|
69
|
+
config :sk, :validate => :string, :required => true
|
|
70
|
+
# A topic regex pattern to subscribe to.
|
|
71
|
+
# The topics configuration will be ignored when using this configuration.
|
|
72
|
+
config :topics_pattern, :validate => :string
|
|
73
|
+
# Time kafka consumer will wait to receive new messages from topics
|
|
74
|
+
config :poll_timeout_ms, :validate => :number, :default => 100
|
|
75
|
+
# Option to add DIS metadata like stream, message size to the event.
|
|
76
|
+
# This will add a field named `dis` to the logstash event containing the following attributes:
|
|
77
|
+
# `stream`: The stream this message is associated with
|
|
78
|
+
# `consumer_group`: The consumer group used to read in this event
|
|
79
|
+
# `partition`: The partition this message is associated with
|
|
80
|
+
# `offset`: The offset from the partition this message is associated with
|
|
81
|
+
# `key`: A ByteBuffer containing the message key
|
|
82
|
+
# `timestamp`: The timestamp of this message
|
|
83
|
+
config :decorate_events, :validate => :boolean, :default => false
|
|
84
|
+
|
|
85
|
+
|
|
86
|
+
public
|
|
87
|
+
def register
|
|
88
|
+
@runner_threads = []
|
|
89
|
+
end # def register
|
|
90
|
+
|
|
91
|
+
public
|
|
92
|
+
def run(logstash_queue)
|
|
93
|
+
@runner_consumers = consumer_threads.times.map { |i| create_consumer("#{client_id}-#{i}") }
|
|
94
|
+
@runner_threads = @runner_consumers.map { |consumer| thread_runner(logstash_queue, consumer) }
|
|
95
|
+
@runner_threads.each { |t| t.join }
|
|
96
|
+
end # def run
|
|
97
|
+
|
|
98
|
+
public
|
|
99
|
+
def stop
|
|
100
|
+
@runner_consumers.each { |c| c.wakeup }
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
public
|
|
104
|
+
def kafka_consumers
|
|
105
|
+
@runner_consumers
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
private
|
|
109
|
+
def thread_runner(logstash_queue, consumer)
|
|
110
|
+
Thread.new do
|
|
111
|
+
begin
|
|
112
|
+
unless @topics_pattern.nil?
|
|
113
|
+
nooplistener = com.huaweicloud.dis.adapter.kafka.clients.consumer.internals.NoOpConsumerRebalanceListener.new
|
|
114
|
+
pattern = java.util.regex.Pattern.compile(@topics_pattern)
|
|
115
|
+
consumer.subscribe(pattern, nooplistener)
|
|
116
|
+
else
|
|
117
|
+
consumer.subscribe(streams);
|
|
118
|
+
end
|
|
119
|
+
codec_instance = @codec.clone
|
|
120
|
+
while !stop?
|
|
121
|
+
records = consumer.poll(poll_timeout_ms)
|
|
122
|
+
for record in records do
|
|
123
|
+
codec_instance.decode(record.value.to_s) do |event|
|
|
124
|
+
decorate(event)
|
|
125
|
+
if @decorate_events
|
|
126
|
+
event.set("[@metadata][dis][topic]", record.topic)
|
|
127
|
+
event.set("[@metadata][dis][consumer_group]", @group_id)
|
|
128
|
+
event.set("[@metadata][dis][partition]", record.partition)
|
|
129
|
+
event.set("[@metadata][dis][offset]", record.offset)
|
|
130
|
+
event.set("[@metadata][dis][key]", record.key)
|
|
131
|
+
event.set("[@metadata][dis][timestamp]", record.timestamp)
|
|
132
|
+
end
|
|
133
|
+
logstash_queue << event
|
|
134
|
+
end
|
|
135
|
+
end
|
|
136
|
+
# Manual offset commit
|
|
137
|
+
if @enable_auto_commit == "false"
|
|
138
|
+
consumer.commitSync
|
|
139
|
+
end
|
|
140
|
+
end
|
|
141
|
+
rescue org.apache.kafka.common.errors.WakeupException => e
|
|
142
|
+
raise e if !stop?
|
|
143
|
+
ensure
|
|
144
|
+
consumer.close
|
|
145
|
+
end
|
|
146
|
+
end
|
|
147
|
+
end
|
|
148
|
+
|
|
149
|
+
private
|
|
150
|
+
def create_consumer(client_id)
|
|
151
|
+
begin
|
|
152
|
+
props = java.util.Properties.new
|
|
153
|
+
kafka = com.huaweicloud.dis.adapter.kafka.clients.consumer.ConsumerConfig
|
|
154
|
+
|
|
155
|
+
props.put("IS_DEFAULT_TRUSTED_JKS_ENABLED", default_trusted_jks_enabled.to_s)
|
|
156
|
+
props.put("security.token", security_token) unless security_token.nil?
|
|
157
|
+
props.put("exception.retries", exception_retries.to_s)
|
|
158
|
+
props.put("records.retries", records_retries.to_s)
|
|
159
|
+
props.put("PROXY_HOST", proxy_host) unless proxy_host.nil?
|
|
160
|
+
props.put("PROXY_PORT", proxy_port.to_s)
|
|
161
|
+
props.put("PROXY_PROTOCOL", proxy_protocol)
|
|
162
|
+
props.put("PROXY_USERNAME", proxy_username) unless proxy_username.nil?
|
|
163
|
+
props.put("PROXY_PASSWORD", proxy_password) unless proxy_password.nil?
|
|
164
|
+
props.put("PROXY_WORKSTATION", proxy_workstation) unless proxy_workstation.nil?
|
|
165
|
+
props.put("PROXY_DOMAIN", proxy_domain) unless proxy_domain.nil?
|
|
166
|
+
props.put("NON_PROXY_HOSTS", proxy_non_proxy_hosts) unless proxy_non_proxy_hosts.nil?
|
|
167
|
+
|
|
168
|
+
props.put("auto.commit.interval.ms", auto_commit_interval_ms)
|
|
169
|
+
props.put("auto.offset.reset", auto_offset_reset) unless auto_offset_reset.nil?
|
|
170
|
+
props.put("client.id", client_id)
|
|
171
|
+
props.put("enable.auto.commit", enable_auto_commit)
|
|
172
|
+
props.put("group.id", group_id)
|
|
173
|
+
props.put("key.deserializer", "com.huaweicloud.dis.adapter.kafka.common.serialization.StringDeserializer")
|
|
174
|
+
props.put("value.deserializer", "com.huaweicloud.dis.adapter.kafka.common.serialization.StringDeserializer")
|
|
175
|
+
|
|
176
|
+
# endpoint, project_id, region, ak, sk
|
|
177
|
+
props.put("endpoint", endpoint)
|
|
178
|
+
props.put("projectId", project_id)
|
|
179
|
+
props.put("region", region)
|
|
180
|
+
props.put("ak", ak)
|
|
181
|
+
if is_sk_encrypted
|
|
182
|
+
decrypted_sk = decrypt(@sk)
|
|
183
|
+
props.put("sk", decrypted_sk)
|
|
184
|
+
else
|
|
185
|
+
props.put("sk", sk)
|
|
186
|
+
end
|
|
187
|
+
|
|
188
|
+
com.huaweicloud.dis.adapter.kafka.clients.consumer.DISKafkaConsumer.new(props)
|
|
189
|
+
rescue => e
|
|
190
|
+
logger.error("Unable to create DIS Kafka consumer from given configuration",
|
|
191
|
+
:kafka_error_message => e,
|
|
192
|
+
:cause => e.respond_to?(:getCause) ? e.getCause() : nil)
|
|
193
|
+
throw e
|
|
194
|
+
end
|
|
195
|
+
end
|
|
196
|
+
|
|
197
|
+
private
|
|
198
|
+
def decrypt(encrypted_sk)
|
|
199
|
+
com.huaweicloud.dis.util.encrypt.EncryptUtils.dec([@encrypt_key].to_java(java.lang.String), encrypted_sk)
|
|
200
|
+
rescue => e
|
|
201
|
+
logger.error("Unable to decrypt sk from given configuration",
|
|
202
|
+
:decrypt_error_message => e,
|
|
203
|
+
:cause => e.respond_to?(:getCause) ? e.getCause() : nil)
|
|
204
|
+
end
|
|
205
|
+
end #class LogStash::Inputs::Dis
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
@@ -0,0 +1,35 @@
|
|
|
1
|
+
Gem::Specification.new do |s|
|
|
2
|
+
s.name = 'logstash-input-dis'
|
|
3
|
+
s.version = '1.1.3'
|
|
4
|
+
s.licenses = ['Apache License (2.0)']
|
|
5
|
+
s.summary = "Reads events from a DIS Stream"
|
|
6
|
+
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
|
7
|
+
s.authors = ['Data Ingestion Service']
|
|
8
|
+
s.email = 'dis@huaweicloud.com'
|
|
9
|
+
s.homepage = "https://www.huaweicloud.com/product/dis.html"
|
|
10
|
+
s.require_paths = ['lib']
|
|
11
|
+
|
|
12
|
+
# Files
|
|
13
|
+
s.files = Dir["lib/**/*","spec/**/*","*.gemspec","*.md","CONTRIBUTORS","Gemfile","LICENSE","NOTICE.TXT", "vendor/jar-dependencies/**/*.jar", "vendor/jar-dependencies/**/*.rb", "VERSION", "docs/**/*"]
|
|
14
|
+
|
|
15
|
+
# Tests
|
|
16
|
+
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
|
17
|
+
|
|
18
|
+
# Special flag to let us know this is actually a logstash plugin
|
|
19
|
+
s.metadata = { 'logstash_plugin' => 'true', 'logstash_group' => 'input'}
|
|
20
|
+
|
|
21
|
+
s.requirements << "jar 'com.huaweicloud.dis:huaweicloud-dis-kafka-adapter', '1.2.1'"
|
|
22
|
+
s.requirements << "jar 'org.apache.logging.log4j:log4j-slf4j-impl', '2.8.2'"
|
|
23
|
+
|
|
24
|
+
s.add_development_dependency 'jar-dependencies', '~> 0.3.2'
|
|
25
|
+
|
|
26
|
+
# Gem dependencies
|
|
27
|
+
s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
|
|
28
|
+
s.add_runtime_dependency 'logstash-codec-json'
|
|
29
|
+
s.add_runtime_dependency 'logstash-codec-plain'
|
|
30
|
+
s.add_runtime_dependency 'stud', '>= 0.0.22', '< 0.1.0'
|
|
31
|
+
|
|
32
|
+
s.add_development_dependency 'logstash-devutils'
|
|
33
|
+
s.add_development_dependency 'rspec-wait'
|
|
34
|
+
end
|
|
35
|
+
|
|
@@ -0,0 +1,39 @@
|
|
|
1
|
+
# encoding: utf-8
|
|
2
|
+
require "logstash/devutils/rspec/spec_helper"
|
|
3
|
+
require "logstash/inputs/dis"
|
|
4
|
+
require "concurrent"
|
|
5
|
+
|
|
6
|
+
class MockConsumer
|
|
7
|
+
def initialize
|
|
8
|
+
@wake = Concurrent::AtomicBoolean.new(false)
|
|
9
|
+
end
|
|
10
|
+
|
|
11
|
+
def subscribe(topics)
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
def poll(ms)
|
|
15
|
+
if @wake.value
|
|
16
|
+
raise org.apache.kafka.common.errors.WakeupException.new
|
|
17
|
+
else
|
|
18
|
+
10.times.map do
|
|
19
|
+
com.huaweicloud.dis.adapter.kafka.clients.consumer.ConsumerRecord.new("logstash", 0, 0, "key", "value")
|
|
20
|
+
end
|
|
21
|
+
end
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
def close
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
def wakeup
|
|
28
|
+
@wake.make_true
|
|
29
|
+
end
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
describe LogStash::Inputs::Dis do
|
|
33
|
+
let(:config) { { 'streams' => ['logstash'], 'project_id' => 'test_project_id', 'ak' => 'test_ak', 'sk' => 'test_sk' } }
|
|
34
|
+
subject { LogStash::Inputs::Dis.new(config) }
|
|
35
|
+
|
|
36
|
+
it "should register" do
|
|
37
|
+
expect {subject.register}.to_not raise_error
|
|
38
|
+
end
|
|
39
|
+
end
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
|
Binary file
|
metadata
ADDED
|
@@ -0,0 +1,210 @@
|
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
|
2
|
+
name: logstash-input-dis
|
|
3
|
+
version: !ruby/object:Gem::Version
|
|
4
|
+
version: 1.1.3
|
|
5
|
+
platform: ruby
|
|
6
|
+
authors:
|
|
7
|
+
- Data Ingestion Service
|
|
8
|
+
autorequire:
|
|
9
|
+
bindir: bin
|
|
10
|
+
cert_chain: []
|
|
11
|
+
date: 2019-06-10 00:00:00.000000000 Z
|
|
12
|
+
dependencies:
|
|
13
|
+
- !ruby/object:Gem::Dependency
|
|
14
|
+
requirement: !ruby/object:Gem::Requirement
|
|
15
|
+
requirements:
|
|
16
|
+
- - "~>"
|
|
17
|
+
- !ruby/object:Gem::Version
|
|
18
|
+
version: 0.3.2
|
|
19
|
+
name: jar-dependencies
|
|
20
|
+
prerelease: false
|
|
21
|
+
type: :development
|
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
23
|
+
requirements:
|
|
24
|
+
- - "~>"
|
|
25
|
+
- !ruby/object:Gem::Version
|
|
26
|
+
version: 0.3.2
|
|
27
|
+
- !ruby/object:Gem::Dependency
|
|
28
|
+
requirement: !ruby/object:Gem::Requirement
|
|
29
|
+
requirements:
|
|
30
|
+
- - ">="
|
|
31
|
+
- !ruby/object:Gem::Version
|
|
32
|
+
version: '1.60'
|
|
33
|
+
- - "<="
|
|
34
|
+
- !ruby/object:Gem::Version
|
|
35
|
+
version: '2.99'
|
|
36
|
+
name: logstash-core-plugin-api
|
|
37
|
+
prerelease: false
|
|
38
|
+
type: :runtime
|
|
39
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
40
|
+
requirements:
|
|
41
|
+
- - ">="
|
|
42
|
+
- !ruby/object:Gem::Version
|
|
43
|
+
version: '1.60'
|
|
44
|
+
- - "<="
|
|
45
|
+
- !ruby/object:Gem::Version
|
|
46
|
+
version: '2.99'
|
|
47
|
+
- !ruby/object:Gem::Dependency
|
|
48
|
+
requirement: !ruby/object:Gem::Requirement
|
|
49
|
+
requirements:
|
|
50
|
+
- - ">="
|
|
51
|
+
- !ruby/object:Gem::Version
|
|
52
|
+
version: '0'
|
|
53
|
+
name: logstash-codec-json
|
|
54
|
+
prerelease: false
|
|
55
|
+
type: :runtime
|
|
56
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
57
|
+
requirements:
|
|
58
|
+
- - ">="
|
|
59
|
+
- !ruby/object:Gem::Version
|
|
60
|
+
version: '0'
|
|
61
|
+
- !ruby/object:Gem::Dependency
|
|
62
|
+
requirement: !ruby/object:Gem::Requirement
|
|
63
|
+
requirements:
|
|
64
|
+
- - ">="
|
|
65
|
+
- !ruby/object:Gem::Version
|
|
66
|
+
version: '0'
|
|
67
|
+
name: logstash-codec-plain
|
|
68
|
+
prerelease: false
|
|
69
|
+
type: :runtime
|
|
70
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
71
|
+
requirements:
|
|
72
|
+
- - ">="
|
|
73
|
+
- !ruby/object:Gem::Version
|
|
74
|
+
version: '0'
|
|
75
|
+
- !ruby/object:Gem::Dependency
|
|
76
|
+
requirement: !ruby/object:Gem::Requirement
|
|
77
|
+
requirements:
|
|
78
|
+
- - ">="
|
|
79
|
+
- !ruby/object:Gem::Version
|
|
80
|
+
version: 0.0.22
|
|
81
|
+
- - "<"
|
|
82
|
+
- !ruby/object:Gem::Version
|
|
83
|
+
version: 0.1.0
|
|
84
|
+
name: stud
|
|
85
|
+
prerelease: false
|
|
86
|
+
type: :runtime
|
|
87
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
88
|
+
requirements:
|
|
89
|
+
- - ">="
|
|
90
|
+
- !ruby/object:Gem::Version
|
|
91
|
+
version: 0.0.22
|
|
92
|
+
- - "<"
|
|
93
|
+
- !ruby/object:Gem::Version
|
|
94
|
+
version: 0.1.0
|
|
95
|
+
- !ruby/object:Gem::Dependency
|
|
96
|
+
requirement: !ruby/object:Gem::Requirement
|
|
97
|
+
requirements:
|
|
98
|
+
- - ">="
|
|
99
|
+
- !ruby/object:Gem::Version
|
|
100
|
+
version: '0'
|
|
101
|
+
name: logstash-devutils
|
|
102
|
+
prerelease: false
|
|
103
|
+
type: :development
|
|
104
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
105
|
+
requirements:
|
|
106
|
+
- - ">="
|
|
107
|
+
- !ruby/object:Gem::Version
|
|
108
|
+
version: '0'
|
|
109
|
+
- !ruby/object:Gem::Dependency
|
|
110
|
+
requirement: !ruby/object:Gem::Requirement
|
|
111
|
+
requirements:
|
|
112
|
+
- - ">="
|
|
113
|
+
- !ruby/object:Gem::Version
|
|
114
|
+
version: '0'
|
|
115
|
+
name: rspec-wait
|
|
116
|
+
prerelease: false
|
|
117
|
+
type: :development
|
|
118
|
+
version_requirements: !ruby/object:Gem::Requirement
|
|
119
|
+
requirements:
|
|
120
|
+
- - ">="
|
|
121
|
+
- !ruby/object:Gem::Version
|
|
122
|
+
version: '0'
|
|
123
|
+
description: This gem is a Logstash plugin required to be installed on top of the
|
|
124
|
+
Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This
|
|
125
|
+
gem is not a stand-alone program
|
|
126
|
+
email: dis@huaweicloud.com
|
|
127
|
+
executables: []
|
|
128
|
+
extensions: []
|
|
129
|
+
extra_rdoc_files: []
|
|
130
|
+
files:
|
|
131
|
+
- Gemfile
|
|
132
|
+
- LICENSE
|
|
133
|
+
- NOTICE.TXT
|
|
134
|
+
- README.md
|
|
135
|
+
- lib/com/fasterxml/jackson/core/jackson-annotations/2.8.11/jackson-annotations-2.8.11.jar
|
|
136
|
+
- lib/com/fasterxml/jackson/core/jackson-core/2.8.11/jackson-core-2.8.11.jar
|
|
137
|
+
- lib/com/fasterxml/jackson/core/jackson-databind/2.8.11.3/jackson-databind-2.8.11.3.jar
|
|
138
|
+
- lib/com/google/protobuf/protobuf-java/2.5.0/protobuf-java-2.5.0.jar
|
|
139
|
+
- lib/com/huaweicloud/dis/huaweicloud-dis-kafka-adapter-common/1.2.1/huaweicloud-dis-kafka-adapter-common-1.2.1.jar
|
|
140
|
+
- lib/com/huaweicloud/dis/huaweicloud-dis-kafka-adapter/1.2.1/huaweicloud-dis-kafka-adapter-1.2.1.jar
|
|
141
|
+
- lib/com/huaweicloud/dis/huaweicloud-sdk-java-dis-iface/1.3.3/huaweicloud-sdk-java-dis-iface-1.3.3.jar
|
|
142
|
+
- lib/com/huaweicloud/dis/huaweicloud-sdk-java-dis/1.3.3/huaweicloud-sdk-java-dis-1.3.3.jar
|
|
143
|
+
- lib/commons-codec/commons-codec/1.9/commons-codec-1.9.jar
|
|
144
|
+
- lib/commons-io/commons-io/2.4/commons-io-2.4.jar
|
|
145
|
+
- lib/commons-logging/commons-logging/1.2/commons-logging-1.2.jar
|
|
146
|
+
- lib/joda-time/joda-time/2.8.1/joda-time-2.8.1.jar
|
|
147
|
+
- lib/logstash-input-dis_jars.rb
|
|
148
|
+
- lib/logstash/inputs/dis.rb
|
|
149
|
+
- lib/org/apache/httpcomponents/httpasyncclient/4.1.3/httpasyncclient-4.1.3.jar
|
|
150
|
+
- lib/org/apache/httpcomponents/httpclient/4.5.2/httpclient-4.5.2.jar
|
|
151
|
+
- lib/org/apache/httpcomponents/httpcore-nio/4.4.6/httpcore-nio-4.4.6.jar
|
|
152
|
+
- lib/org/apache/httpcomponents/httpcore/4.4.4/httpcore-4.4.4.jar
|
|
153
|
+
- lib/org/apache/httpcomponents/httpmime/4.5.2/httpmime-4.5.2.jar
|
|
154
|
+
- lib/org/apache/logging/log4j/log4j-api/2.8.2/log4j-api-2.8.2.jar
|
|
155
|
+
- lib/org/apache/logging/log4j/log4j-slf4j-impl/2.8.2/log4j-slf4j-impl-2.8.2.jar
|
|
156
|
+
- lib/org/slf4j/slf4j-api/1.7.24/slf4j-api-1.7.24.jar
|
|
157
|
+
- lib/org/xerial/snappy/snappy-java/1.1.7.2/snappy-java-1.1.7.2.jar
|
|
158
|
+
- logstash-input-dis.gemspec
|
|
159
|
+
- spec/unit/inputs/dis_spec.rb
|
|
160
|
+
- vendor/jar-dependencies/runtime-jars/commons-codec-1.9.jar
|
|
161
|
+
- vendor/jar-dependencies/runtime-jars/commons-io-2.4.jar
|
|
162
|
+
- vendor/jar-dependencies/runtime-jars/commons-logging-1.2.jar
|
|
163
|
+
- vendor/jar-dependencies/runtime-jars/httpasyncclient-4.1.3.jar
|
|
164
|
+
- vendor/jar-dependencies/runtime-jars/httpclient-4.5.2.jar
|
|
165
|
+
- vendor/jar-dependencies/runtime-jars/httpcore-4.4.4.jar
|
|
166
|
+
- vendor/jar-dependencies/runtime-jars/httpcore-nio-4.4.6.jar
|
|
167
|
+
- vendor/jar-dependencies/runtime-jars/httpmime-4.5.2.jar
|
|
168
|
+
- vendor/jar-dependencies/runtime-jars/huaweicloud-dis-kafka-adapter-1.2.1.jar
|
|
169
|
+
- vendor/jar-dependencies/runtime-jars/huaweicloud-dis-kafka-adapter-common-1.2.1.jar
|
|
170
|
+
- vendor/jar-dependencies/runtime-jars/huaweicloud-sdk-java-dis-1.3.3.jar
|
|
171
|
+
- vendor/jar-dependencies/runtime-jars/huaweicloud-sdk-java-dis-iface-1.3.3.jar
|
|
172
|
+
- vendor/jar-dependencies/runtime-jars/jackson-annotations-2.8.11.jar
|
|
173
|
+
- vendor/jar-dependencies/runtime-jars/jackson-core-2.8.11.jar
|
|
174
|
+
- vendor/jar-dependencies/runtime-jars/jackson-databind-2.8.11.3.jar
|
|
175
|
+
- vendor/jar-dependencies/runtime-jars/joda-time-2.8.1.jar
|
|
176
|
+
- vendor/jar-dependencies/runtime-jars/log4j-api-2.8.2.jar
|
|
177
|
+
- vendor/jar-dependencies/runtime-jars/log4j-slf4j-impl-2.8.2.jar
|
|
178
|
+
- vendor/jar-dependencies/runtime-jars/protobuf-java-2.5.0.jar
|
|
179
|
+
- vendor/jar-dependencies/runtime-jars/slf4j-api-1.7.24.jar
|
|
180
|
+
- vendor/jar-dependencies/runtime-jars/snappy-java-1.1.7.2.jar
|
|
181
|
+
homepage: https://www.huaweicloud.com/product/dis.html
|
|
182
|
+
licenses:
|
|
183
|
+
- Apache License (2.0)
|
|
184
|
+
metadata:
|
|
185
|
+
logstash_plugin: 'true'
|
|
186
|
+
logstash_group: input
|
|
187
|
+
post_install_message:
|
|
188
|
+
rdoc_options: []
|
|
189
|
+
require_paths:
|
|
190
|
+
- lib
|
|
191
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
|
192
|
+
requirements:
|
|
193
|
+
- - ">="
|
|
194
|
+
- !ruby/object:Gem::Version
|
|
195
|
+
version: '0'
|
|
196
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
|
197
|
+
requirements:
|
|
198
|
+
- - ">="
|
|
199
|
+
- !ruby/object:Gem::Version
|
|
200
|
+
version: '0'
|
|
201
|
+
requirements:
|
|
202
|
+
- jar 'com.huaweicloud.dis:huaweicloud-dis-kafka-adapter', '1.2.1'
|
|
203
|
+
- jar 'org.apache.logging.log4j:log4j-slf4j-impl', '2.8.2'
|
|
204
|
+
rubyforge_project:
|
|
205
|
+
rubygems_version: 2.6.14.1
|
|
206
|
+
signing_key:
|
|
207
|
+
specification_version: 4
|
|
208
|
+
summary: Reads events from a DIS Stream
|
|
209
|
+
test_files:
|
|
210
|
+
- spec/unit/inputs/dis_spec.rb
|