logstash-input-tcp 0.1.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +15 -0
- data/.gitignore +4 -0
- data/Gemfile +4 -0
- data/Rakefile +6 -0
- data/lib/logstash/inputs/tcp.rb +238 -0
- data/logstash-input-tcp.gemspec +30 -0
- data/rakelib/publish.rake +9 -0
- data/rakelib/vendor.rake +169 -0
- data/spec/inputs/tcp_spec.rb +280 -0
- metadata +130 -0
checksums.yaml
ADDED
@@ -0,0 +1,15 @@
|
|
1
|
+
---
|
2
|
+
!binary "U0hBMQ==":
|
3
|
+
metadata.gz: !binary |-
|
4
|
+
OWY1MTlhYjg1NDRjNzJjMzI1YmVhMzg2MjM1ZjFmNDQxZTEzNzVmYg==
|
5
|
+
data.tar.gz: !binary |-
|
6
|
+
OGZlMzJmN2RjNTVjZWFhNGJlMDMxOGUxZWMxODc5OTA5MzAzMTkyYw==
|
7
|
+
SHA512:
|
8
|
+
metadata.gz: !binary |-
|
9
|
+
ZTUyMTRmYThhN2ZmZTJlODcwMTkwNTVjMmNiYjgyN2ZjNzlmYzE2ZmQ3NzU0
|
10
|
+
ZTllNWQ2NmFkNGQ4MmI5YjNkNGIwMTA5MzFkZDE3NjUxMGIzMDdjZjBiZmRi
|
11
|
+
YTYxYmJhYmU2YzllOTRkNTg1NDc0YjI5MWI4ZmVmNzdhNDg3NTQ=
|
12
|
+
data.tar.gz: !binary |-
|
13
|
+
YmQ5YzgzOWI5OGM3YzcxNmY5NzQ1MTkzNWEwNWQ0NTBjYjM4ZjhhMjc0ZWUz
|
14
|
+
OTU5MzM4YjAzOTk3YTI1ODEyZTYyZWVhOTk4OGYxM2Y5NTk2ZTFhNTljODll
|
15
|
+
ZTQ4ZTFiNDBlZTA1MDYxMTFjNDI3ODQxNzA5ODk3ZDQ1ZmU1ZjM=
|
data/.gitignore
ADDED
data/Gemfile
ADDED
data/Rakefile
ADDED
@@ -0,0 +1,238 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/inputs/base"
|
3
|
+
require "logstash/namespace"
|
4
|
+
require "logstash/util/socket_peer"
|
5
|
+
|
6
|
+
# Read events over a TCP socket.
|
7
|
+
#
|
8
|
+
# Like stdin and file inputs, each event is assumed to be one line of text.
|
9
|
+
#
|
10
|
+
# Can either accept connections from clients or connect to a server,
|
11
|
+
# depending on `mode`.
|
12
|
+
class LogStash::Inputs::Tcp < LogStash::Inputs::Base
|
13
|
+
class Interrupted < StandardError; end
|
14
|
+
config_name "tcp"
|
15
|
+
milestone 2
|
16
|
+
|
17
|
+
default :codec, "line"
|
18
|
+
|
19
|
+
# When mode is `server`, the address to listen on.
|
20
|
+
# When mode is `client`, the address to connect to.
|
21
|
+
config :host, :validate => :string, :default => "0.0.0.0"
|
22
|
+
|
23
|
+
# When mode is `server`, the port to listen on.
|
24
|
+
# When mode is `client`, the port to connect to.
|
25
|
+
config :port, :validate => :number, :required => true
|
26
|
+
|
27
|
+
# The 'read' timeout in seconds. If a particular tcp connection is idle for
|
28
|
+
# more than this timeout period, we will assume it is dead and close it.
|
29
|
+
#
|
30
|
+
# If you never want to timeout, use -1.
|
31
|
+
config :data_timeout, :validate => :number, :default => -1
|
32
|
+
|
33
|
+
# Mode to operate in. `server` listens for client connections,
|
34
|
+
# `client` connects to a server.
|
35
|
+
config :mode, :validate => ["server", "client"], :default => "server"
|
36
|
+
|
37
|
+
# Enable SSL (must be set for other `ssl_` options to take effect).
|
38
|
+
config :ssl_enable, :validate => :boolean, :default => false
|
39
|
+
|
40
|
+
# Verify the identity of the other end of the SSL connection against the CA.
|
41
|
+
# For input, sets the field `sslsubject` to that of the client certificate.
|
42
|
+
config :ssl_verify, :validate => :boolean, :default => false
|
43
|
+
|
44
|
+
# The SSL CA certificate, chainfile or CA path. The system CA path is automatically included.
|
45
|
+
config :ssl_cacert, :validate => :path
|
46
|
+
|
47
|
+
# SSL certificate path
|
48
|
+
config :ssl_cert, :validate => :path
|
49
|
+
|
50
|
+
# SSL key path
|
51
|
+
config :ssl_key, :validate => :path
|
52
|
+
|
53
|
+
# SSL key passphrase
|
54
|
+
config :ssl_key_passphrase, :validate => :password, :default => nil
|
55
|
+
|
56
|
+
def initialize(*args)
|
57
|
+
super(*args)
|
58
|
+
end # def initialize
|
59
|
+
|
60
|
+
public
|
61
|
+
def register
|
62
|
+
require "socket"
|
63
|
+
require "timeout"
|
64
|
+
require "openssl"
|
65
|
+
|
66
|
+
# monkey patch TCPSocket and SSLSocket to include socket peer
|
67
|
+
TCPSocket.module_eval{include ::LogStash::Util::SocketPeer}
|
68
|
+
OpenSSL::SSL::SSLSocket.module_eval{include ::LogStash::Util::SocketPeer}
|
69
|
+
|
70
|
+
fix_streaming_codecs
|
71
|
+
|
72
|
+
if @ssl_enable
|
73
|
+
@ssl_context = OpenSSL::SSL::SSLContext.new
|
74
|
+
@ssl_context.cert = OpenSSL::X509::Certificate.new(File.read(@ssl_cert))
|
75
|
+
@ssl_context.key = OpenSSL::PKey::RSA.new(File.read(@ssl_key),@ssl_key_passphrase)
|
76
|
+
if @ssl_verify
|
77
|
+
@cert_store = OpenSSL::X509::Store.new
|
78
|
+
# Load the system default certificate path to the store
|
79
|
+
@cert_store.set_default_paths
|
80
|
+
if File.directory?(@ssl_cacert)
|
81
|
+
@cert_store.add_path(@ssl_cacert)
|
82
|
+
else
|
83
|
+
@cert_store.add_file(@ssl_cacert)
|
84
|
+
end
|
85
|
+
@ssl_context.cert_store = @cert_store
|
86
|
+
@ssl_context.verify_mode = OpenSSL::SSL::VERIFY_PEER|OpenSSL::SSL::VERIFY_FAIL_IF_NO_PEER_CERT
|
87
|
+
end
|
88
|
+
end # @ssl_enable
|
89
|
+
|
90
|
+
if server?
|
91
|
+
@logger.info("Starting tcp input listener", :address => "#{@host}:#{@port}")
|
92
|
+
begin
|
93
|
+
@server_socket = TCPServer.new(@host, @port)
|
94
|
+
rescue Errno::EADDRINUSE
|
95
|
+
@logger.error("Could not start TCP server: Address in use", :host => @host, :port => @port)
|
96
|
+
raise
|
97
|
+
end
|
98
|
+
if @ssl_enable
|
99
|
+
@server_socket = OpenSSL::SSL::SSLServer.new(@server_socket, @ssl_context)
|
100
|
+
end # @ssl_enable
|
101
|
+
end
|
102
|
+
end # def register
|
103
|
+
|
104
|
+
private
|
105
|
+
def handle_socket(socket, client_address, output_queue, codec)
|
106
|
+
while true
|
107
|
+
buf = nil
|
108
|
+
# NOTE(petef): the timeout only hits after the line is read or socket dies
|
109
|
+
# TODO(sissel): Why do we have a timeout here? What's the point?
|
110
|
+
if @data_timeout == -1
|
111
|
+
buf = read(socket)
|
112
|
+
else
|
113
|
+
Timeout::timeout(@data_timeout) do
|
114
|
+
buf = read(socket)
|
115
|
+
end
|
116
|
+
end
|
117
|
+
codec.decode(buf) do |event|
|
118
|
+
event["host"] ||= client_address
|
119
|
+
event["sslsubject"] ||= socket.peer_cert.subject if @ssl_enable && @ssl_verify
|
120
|
+
decorate(event)
|
121
|
+
output_queue << event
|
122
|
+
end
|
123
|
+
end # loop
|
124
|
+
rescue EOFError
|
125
|
+
@logger.debug? && @logger.debug("Connection closed", :client => socket.peer)
|
126
|
+
rescue Errno::ECONNRESET
|
127
|
+
@logger.debug? && @logger.debug("Connection reset by peer", :client => socket.peer)
|
128
|
+
rescue => e
|
129
|
+
@logger.error("An error occurred. Closing connection", :client => socket.peer, :exception => e, :backtrace => e.backtrace)
|
130
|
+
ensure
|
131
|
+
socket.close rescue nil
|
132
|
+
|
133
|
+
codec.respond_to?(:flush) && codec.flush do |event|
|
134
|
+
event["host"] ||= client_address
|
135
|
+
event["sslsubject"] ||= socket.peer_cert.subject if @ssl_enable && @ssl_verify
|
136
|
+
decorate(event)
|
137
|
+
output_queue << event
|
138
|
+
end
|
139
|
+
end
|
140
|
+
|
141
|
+
private
|
142
|
+
def client_thread(output_queue, socket)
|
143
|
+
Thread.new(output_queue, socket) do |q, s|
|
144
|
+
begin
|
145
|
+
@logger.debug? && @logger.debug("Accepted connection", :client => s.peer, :server => "#{@host}:#{@port}")
|
146
|
+
handle_socket(s, s.peeraddr[3], q, @codec.clone)
|
147
|
+
rescue Interrupted
|
148
|
+
s.close rescue nil
|
149
|
+
ensure
|
150
|
+
@client_threads_lock.synchronize{@client_threads.delete(Thread.current)}
|
151
|
+
end
|
152
|
+
end
|
153
|
+
end
|
154
|
+
|
155
|
+
private
|
156
|
+
def server?
|
157
|
+
@mode == "server"
|
158
|
+
end # def server?
|
159
|
+
|
160
|
+
private
|
161
|
+
def read(socket)
|
162
|
+
return socket.sysread(16384)
|
163
|
+
end # def readline
|
164
|
+
|
165
|
+
public
|
166
|
+
def run(output_queue)
|
167
|
+
if server?
|
168
|
+
run_server(output_queue)
|
169
|
+
else
|
170
|
+
run_client(output_queue)
|
171
|
+
end
|
172
|
+
end # def run
|
173
|
+
|
174
|
+
def run_server(output_queue)
|
175
|
+
@thread = Thread.current
|
176
|
+
@client_threads = []
|
177
|
+
@client_threads_lock = Mutex.new
|
178
|
+
|
179
|
+
while true
|
180
|
+
begin
|
181
|
+
socket = @server_socket.accept
|
182
|
+
# start a new thread for each connection.
|
183
|
+
@client_threads_lock.synchronize{@client_threads << client_thread(output_queue, socket)}
|
184
|
+
rescue OpenSSL::SSL::SSLError => ssle
|
185
|
+
# NOTE(mrichar1): This doesn't return a useful error message for some reason
|
186
|
+
@logger.error("SSL Error", :exception => ssle, :backtrace => ssle.backtrace)
|
187
|
+
rescue IOError, LogStash::ShutdownSignal
|
188
|
+
if @interrupted
|
189
|
+
@server_socket.close rescue nil
|
190
|
+
|
191
|
+
threads = @client_threads_lock.synchronize{@client_threads.dup}
|
192
|
+
threads.each do |thread|
|
193
|
+
thread.raise(LogStash::ShutdownSignal) if thread.alive?
|
194
|
+
end
|
195
|
+
|
196
|
+
# intended shutdown, get out of the loop
|
197
|
+
break
|
198
|
+
else
|
199
|
+
# it was a genuine IOError, propagate it up
|
200
|
+
raise
|
201
|
+
end
|
202
|
+
end
|
203
|
+
end # loop
|
204
|
+
rescue LogStash::ShutdownSignal
|
205
|
+
# nothing to do
|
206
|
+
ensure
|
207
|
+
@server_socket.close rescue nil
|
208
|
+
end # def run_server
|
209
|
+
|
210
|
+
def run_client(output_queue)
|
211
|
+
@thread = Thread.current
|
212
|
+
while true
|
213
|
+
client_socket = TCPSocket.new(@host, @port)
|
214
|
+
if @ssl_enable
|
215
|
+
client_socket = OpenSSL::SSL::SSLSocket.new(client_socket, @ssl_context)
|
216
|
+
begin
|
217
|
+
client_socket.connect
|
218
|
+
rescue OpenSSL::SSL::SSLError => ssle
|
219
|
+
@logger.error("SSL Error", :exception => ssle, :backtrace => ssle.backtrace)
|
220
|
+
# NOTE(mrichar1): Hack to prevent hammering peer
|
221
|
+
sleep(5)
|
222
|
+
next
|
223
|
+
end
|
224
|
+
end
|
225
|
+
@logger.debug("Opened connection", :client => "#{client_socket.peer}")
|
226
|
+
handle_socket(client_socket, client_socket.peeraddr[3], output_queue, @codec.clone)
|
227
|
+
end # loop
|
228
|
+
ensure
|
229
|
+
client_socket.close rescue nil
|
230
|
+
end # def run
|
231
|
+
|
232
|
+
public
|
233
|
+
def teardown
|
234
|
+
if server?
|
235
|
+
@interrupted = true
|
236
|
+
end
|
237
|
+
end # def teardown
|
238
|
+
end # class LogStash::Inputs::Tcp
|
@@ -0,0 +1,30 @@
|
|
1
|
+
Gem::Specification.new do |s|
|
2
|
+
|
3
|
+
s.name = 'logstash-input-tcp'
|
4
|
+
s.version = '0.1.0'
|
5
|
+
s.licenses = ['Apache License (2.0)']
|
6
|
+
s.summary = "Read events over a TCP socket."
|
7
|
+
s.description = "Read events over a TCP socket."
|
8
|
+
s.authors = ["Elasticsearch"]
|
9
|
+
s.email = 'richard.pijnenburg@elasticsearch.com'
|
10
|
+
s.homepage = "http://logstash.net/"
|
11
|
+
s.require_paths = ["lib"]
|
12
|
+
|
13
|
+
# Files
|
14
|
+
s.files = `git ls-files`.split($\)+::Dir.glob('vendor/*')
|
15
|
+
|
16
|
+
# Tests
|
17
|
+
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
18
|
+
|
19
|
+
# Special flag to let us know this is actually a logstash plugin
|
20
|
+
s.metadata = { "logstash_plugin" => "true", "group" => "input" }
|
21
|
+
|
22
|
+
# Gem dependencies
|
23
|
+
s.add_runtime_dependency 'logstash', '>= 1.4.0', '< 2.0.0'
|
24
|
+
|
25
|
+
s.add_runtime_dependency 'logstash-codec-plain'
|
26
|
+
s.add_runtime_dependency 'logstash-codec-line'
|
27
|
+
s.add_runtime_dependency 'logstash-codec-json'
|
28
|
+
s.add_runtime_dependency 'logstash-codec-json_lines'
|
29
|
+
end
|
30
|
+
|
@@ -0,0 +1,9 @@
|
|
1
|
+
require "gem_publisher"
|
2
|
+
|
3
|
+
desc "Publish gem to RubyGems.org"
|
4
|
+
task :publish_gem do |t|
|
5
|
+
gem_file = Dir.glob(File.expand_path('../*.gemspec',File.dirname(__FILE__))).first
|
6
|
+
gem = GemPublisher.publish_if_updated(gem_file, :rubygems)
|
7
|
+
puts "Published #{gem}" if gem
|
8
|
+
end
|
9
|
+
|
data/rakelib/vendor.rake
ADDED
@@ -0,0 +1,169 @@
|
|
1
|
+
require "net/http"
|
2
|
+
require "uri"
|
3
|
+
require "digest/sha1"
|
4
|
+
|
5
|
+
def vendor(*args)
|
6
|
+
return File.join("vendor", *args)
|
7
|
+
end
|
8
|
+
|
9
|
+
directory "vendor/" => ["vendor"] do |task, args|
|
10
|
+
mkdir task.name
|
11
|
+
end
|
12
|
+
|
13
|
+
def fetch(url, sha1, output)
|
14
|
+
|
15
|
+
puts "Downloading #{url}"
|
16
|
+
actual_sha1 = download(url, output)
|
17
|
+
|
18
|
+
if actual_sha1 != sha1
|
19
|
+
fail "SHA1 does not match (expected '#{sha1}' but got '#{actual_sha1}')"
|
20
|
+
end
|
21
|
+
end # def fetch
|
22
|
+
|
23
|
+
def file_fetch(url, sha1)
|
24
|
+
filename = File.basename( URI(url).path )
|
25
|
+
output = "vendor/#{filename}"
|
26
|
+
task output => [ "vendor/" ] do
|
27
|
+
begin
|
28
|
+
actual_sha1 = file_sha1(output)
|
29
|
+
if actual_sha1 != sha1
|
30
|
+
fetch(url, sha1, output)
|
31
|
+
end
|
32
|
+
rescue Errno::ENOENT
|
33
|
+
fetch(url, sha1, output)
|
34
|
+
end
|
35
|
+
end.invoke
|
36
|
+
|
37
|
+
return output
|
38
|
+
end
|
39
|
+
|
40
|
+
def file_sha1(path)
|
41
|
+
digest = Digest::SHA1.new
|
42
|
+
fd = File.new(path, "r")
|
43
|
+
while true
|
44
|
+
begin
|
45
|
+
digest << fd.sysread(16384)
|
46
|
+
rescue EOFError
|
47
|
+
break
|
48
|
+
end
|
49
|
+
end
|
50
|
+
return digest.hexdigest
|
51
|
+
ensure
|
52
|
+
fd.close if fd
|
53
|
+
end
|
54
|
+
|
55
|
+
def download(url, output)
|
56
|
+
uri = URI(url)
|
57
|
+
digest = Digest::SHA1.new
|
58
|
+
tmp = "#{output}.tmp"
|
59
|
+
Net::HTTP.start(uri.host, uri.port, :use_ssl => (uri.scheme == "https")) do |http|
|
60
|
+
request = Net::HTTP::Get.new(uri.path)
|
61
|
+
http.request(request) do |response|
|
62
|
+
fail "HTTP fetch failed for #{url}. #{response}" if [200, 301].include?(response.code)
|
63
|
+
size = (response["content-length"].to_i || -1).to_f
|
64
|
+
count = 0
|
65
|
+
File.open(tmp, "w") do |fd|
|
66
|
+
response.read_body do |chunk|
|
67
|
+
fd.write(chunk)
|
68
|
+
digest << chunk
|
69
|
+
if size > 0 && $stdout.tty?
|
70
|
+
count += chunk.bytesize
|
71
|
+
$stdout.write(sprintf("\r%0.2f%%", count/size * 100))
|
72
|
+
end
|
73
|
+
end
|
74
|
+
end
|
75
|
+
$stdout.write("\r \r") if $stdout.tty?
|
76
|
+
end
|
77
|
+
end
|
78
|
+
|
79
|
+
File.rename(tmp, output)
|
80
|
+
|
81
|
+
return digest.hexdigest
|
82
|
+
rescue SocketError => e
|
83
|
+
puts "Failure while downloading #{url}: #{e}"
|
84
|
+
raise
|
85
|
+
ensure
|
86
|
+
File.unlink(tmp) if File.exist?(tmp)
|
87
|
+
end # def download
|
88
|
+
|
89
|
+
def untar(tarball, &block)
|
90
|
+
require "archive/tar/minitar"
|
91
|
+
tgz = Zlib::GzipReader.new(File.open(tarball))
|
92
|
+
# Pull out typesdb
|
93
|
+
tar = Archive::Tar::Minitar::Input.open(tgz)
|
94
|
+
tar.each do |entry|
|
95
|
+
path = block.call(entry)
|
96
|
+
next if path.nil?
|
97
|
+
parent = File.dirname(path)
|
98
|
+
|
99
|
+
mkdir_p parent unless File.directory?(parent)
|
100
|
+
|
101
|
+
# Skip this file if the output file is the same size
|
102
|
+
if entry.directory?
|
103
|
+
mkdir path unless File.directory?(path)
|
104
|
+
else
|
105
|
+
entry_mode = entry.instance_eval { @mode } & 0777
|
106
|
+
if File.exists?(path)
|
107
|
+
stat = File.stat(path)
|
108
|
+
# TODO(sissel): Submit a patch to archive-tar-minitar upstream to
|
109
|
+
# expose headers in the entry.
|
110
|
+
entry_size = entry.instance_eval { @size }
|
111
|
+
# If file sizes are same, skip writing.
|
112
|
+
next if stat.size == entry_size && (stat.mode & 0777) == entry_mode
|
113
|
+
end
|
114
|
+
puts "Extracting #{entry.full_name} from #{tarball} #{entry_mode.to_s(8)}"
|
115
|
+
File.open(path, "w") do |fd|
|
116
|
+
# eof? check lets us skip empty files. Necessary because the API provided by
|
117
|
+
# Archive::Tar::Minitar::Reader::EntryStream only mostly acts like an
|
118
|
+
# IO object. Something about empty files in this EntryStream causes
|
119
|
+
# IO.copy_stream to throw "can't convert nil into String" on JRuby
|
120
|
+
# TODO(sissel): File a bug about this.
|
121
|
+
while !entry.eof?
|
122
|
+
chunk = entry.read(16384)
|
123
|
+
fd.write(chunk)
|
124
|
+
end
|
125
|
+
#IO.copy_stream(entry, fd)
|
126
|
+
end
|
127
|
+
File.chmod(entry_mode, path)
|
128
|
+
end
|
129
|
+
end
|
130
|
+
tar.close
|
131
|
+
File.unlink(tarball) if File.file?(tarball)
|
132
|
+
end # def untar
|
133
|
+
|
134
|
+
def ungz(file)
|
135
|
+
|
136
|
+
outpath = file.gsub('.gz', '')
|
137
|
+
tgz = Zlib::GzipReader.new(File.open(file))
|
138
|
+
begin
|
139
|
+
File.open(outpath, "w") do |out|
|
140
|
+
IO::copy_stream(tgz, out)
|
141
|
+
end
|
142
|
+
File.unlink(file)
|
143
|
+
rescue
|
144
|
+
File.unlink(outpath) if File.file?(outpath)
|
145
|
+
raise
|
146
|
+
end
|
147
|
+
tgz.close
|
148
|
+
end
|
149
|
+
|
150
|
+
desc "Process any vendor files required for this plugin"
|
151
|
+
task "vendor" do |task, args|
|
152
|
+
|
153
|
+
@files.each do |file|
|
154
|
+
download = file_fetch(file['url'], file['sha1'])
|
155
|
+
if download =~ /.tar.gz/
|
156
|
+
prefix = download.gsub('.tar.gz', '').gsub('vendor/', '')
|
157
|
+
untar(download) do |entry|
|
158
|
+
if !file['files'].nil?
|
159
|
+
next unless file['files'].include?(entry.full_name.gsub(prefix, ''))
|
160
|
+
out = entry.full_name.split("/").last
|
161
|
+
end
|
162
|
+
File.join('vendor', out)
|
163
|
+
end
|
164
|
+
elsif download =~ /.gz/
|
165
|
+
ungz(download)
|
166
|
+
end
|
167
|
+
end
|
168
|
+
|
169
|
+
end
|
@@ -0,0 +1,280 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "spec_helper"
|
3
|
+
require "socket"
|
4
|
+
require "timeout"
|
5
|
+
require "logstash/json"
|
6
|
+
require "logstash/inputs/tcp"
|
7
|
+
require 'stud/try'
|
8
|
+
|
9
|
+
describe LogStash::Inputs::Tcp do
|
10
|
+
|
11
|
+
|
12
|
+
context "codec (PR #1372)" do
|
13
|
+
it "switches from plain to line" do
|
14
|
+
require "logstash/codecs/plain"
|
15
|
+
require "logstash/codecs/line"
|
16
|
+
plugin = LogStash::Inputs::Tcp.new("codec" => LogStash::Codecs::Plain.new, "port" => 0)
|
17
|
+
plugin.register
|
18
|
+
insist { plugin.codec }.is_a?(LogStash::Codecs::Line)
|
19
|
+
end
|
20
|
+
it "switches from json to json_lines" do
|
21
|
+
require "logstash/codecs/json"
|
22
|
+
require "logstash/codecs/json_lines"
|
23
|
+
plugin = LogStash::Inputs::Tcp.new("codec" => LogStash::Codecs::JSON.new, "port" => 0)
|
24
|
+
plugin.register
|
25
|
+
insist { plugin.codec }.is_a?(LogStash::Codecs::JSONLines)
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
describe "read plain with unicode", :socket => true do
|
30
|
+
event_count = 10
|
31
|
+
port = 5511
|
32
|
+
config <<-CONFIG
|
33
|
+
input {
|
34
|
+
tcp {
|
35
|
+
port => #{port}
|
36
|
+
}
|
37
|
+
}
|
38
|
+
CONFIG
|
39
|
+
|
40
|
+
input do |pipeline, queue|
|
41
|
+
Thread.new { pipeline.run }
|
42
|
+
sleep 0.1 while !pipeline.ready?
|
43
|
+
|
44
|
+
socket = Stud::try(5.times) { TCPSocket.new("127.0.0.1", port) }
|
45
|
+
event_count.times do |i|
|
46
|
+
# unicode smiley for testing unicode support!
|
47
|
+
socket.puts("#{i} ☹")
|
48
|
+
end
|
49
|
+
socket.close
|
50
|
+
|
51
|
+
# wait till all events have been processed
|
52
|
+
Timeout.timeout(1) {sleep 0.1 while queue.size < event_count}
|
53
|
+
|
54
|
+
events = event_count.times.collect { queue.pop }
|
55
|
+
event_count.times do |i|
|
56
|
+
insist { events[i]["message"] } == "#{i} ☹"
|
57
|
+
end
|
58
|
+
end # input
|
59
|
+
end
|
60
|
+
|
61
|
+
describe "read events with plain codec and ISO-8859-1 charset" do
|
62
|
+
port = 5513
|
63
|
+
charset = "ISO-8859-1"
|
64
|
+
config <<-CONFIG
|
65
|
+
input {
|
66
|
+
tcp {
|
67
|
+
port => #{port}
|
68
|
+
codec => plain { charset => "#{charset}" }
|
69
|
+
}
|
70
|
+
}
|
71
|
+
CONFIG
|
72
|
+
|
73
|
+
input do |pipeline, queue|
|
74
|
+
Thread.new { pipeline.run }
|
75
|
+
sleep 0.1 while !pipeline.ready?
|
76
|
+
|
77
|
+
socket = Stud::try(5.times) { TCPSocket.new("127.0.0.1", port) }
|
78
|
+
text = "\xA3" # the £ symbol in ISO-8859-1 aka Latin-1
|
79
|
+
text.force_encoding("ISO-8859-1")
|
80
|
+
socket.puts(text)
|
81
|
+
socket.close
|
82
|
+
|
83
|
+
# wait till all events have been processed
|
84
|
+
Timeout.timeout(1) {sleep 0.1 while queue.size < 1}
|
85
|
+
|
86
|
+
event = queue.pop
|
87
|
+
# Make sure the 0xA3 latin-1 code converts correctly to UTF-8.
|
88
|
+
pending("charset conv broken") do
|
89
|
+
insist { event["message"].size } == 1
|
90
|
+
insist { event["message"].bytesize } == 2
|
91
|
+
insist { event["message"] } == "£"
|
92
|
+
end
|
93
|
+
end # input
|
94
|
+
end
|
95
|
+
|
96
|
+
describe "read events with json codec" do
|
97
|
+
port = 5514
|
98
|
+
config <<-CONFIG
|
99
|
+
input {
|
100
|
+
tcp {
|
101
|
+
port => #{port}
|
102
|
+
codec => json
|
103
|
+
}
|
104
|
+
}
|
105
|
+
CONFIG
|
106
|
+
|
107
|
+
input do |pipeline, queue|
|
108
|
+
Thread.new { pipeline.run }
|
109
|
+
sleep 0.1 while !pipeline.ready?
|
110
|
+
|
111
|
+
data = {
|
112
|
+
"hello" => "world",
|
113
|
+
"foo" => [1,2,3],
|
114
|
+
"baz" => { "1" => "2" },
|
115
|
+
"host" => "example host"
|
116
|
+
}
|
117
|
+
|
118
|
+
socket = Stud::try(5.times) { TCPSocket.new("127.0.0.1", port) }
|
119
|
+
socket.puts(LogStash::Json.dump(data))
|
120
|
+
socket.close
|
121
|
+
|
122
|
+
# wait till all events have been processed
|
123
|
+
Timeout.timeout(1) {sleep 0.1 while queue.size < 1}
|
124
|
+
|
125
|
+
event = queue.pop
|
126
|
+
insist { event["hello"] } == data["hello"]
|
127
|
+
insist { event["foo"].to_a } == data["foo"] # to_a to cast Java ArrayList produced by JrJackson
|
128
|
+
insist { event["baz"] } == data["baz"]
|
129
|
+
|
130
|
+
# Make sure the tcp input, w/ json codec, uses the event's 'host' value,
|
131
|
+
# if present, instead of providing its own
|
132
|
+
insist { event["host"] } == data["host"]
|
133
|
+
end # input
|
134
|
+
end
|
135
|
+
|
136
|
+
describe "read events with json codec (testing 'host' handling)" do
|
137
|
+
port = 5514
|
138
|
+
config <<-CONFIG
|
139
|
+
input {
|
140
|
+
tcp {
|
141
|
+
port => #{port}
|
142
|
+
codec => json
|
143
|
+
}
|
144
|
+
}
|
145
|
+
CONFIG
|
146
|
+
|
147
|
+
input do |pipeline, queue|
|
148
|
+
Thread.new { pipeline.run }
|
149
|
+
sleep 0.1 while !pipeline.ready?
|
150
|
+
|
151
|
+
data = {
|
152
|
+
"hello" => "world"
|
153
|
+
}
|
154
|
+
|
155
|
+
socket = Stud::try(5.times) { TCPSocket.new("127.0.0.1", port) }
|
156
|
+
socket.puts(LogStash::Json.dump(data))
|
157
|
+
socket.close
|
158
|
+
|
159
|
+
# wait till all events have been processed
|
160
|
+
Timeout.timeout(1) {sleep 0.1 while queue.size < 1}
|
161
|
+
|
162
|
+
event = queue.pop
|
163
|
+
insist { event["hello"] } == data["hello"]
|
164
|
+
insist { event }.include?("host")
|
165
|
+
end # input
|
166
|
+
end
|
167
|
+
|
168
|
+
describe "read events with json_lines codec" do
|
169
|
+
port = 5515
|
170
|
+
config <<-CONFIG
|
171
|
+
input {
|
172
|
+
tcp {
|
173
|
+
port => #{port}
|
174
|
+
codec => json_lines
|
175
|
+
}
|
176
|
+
}
|
177
|
+
CONFIG
|
178
|
+
|
179
|
+
input do |pipeline, queue|
|
180
|
+
Thread.new { pipeline.run }
|
181
|
+
sleep 0.1 while !pipeline.ready?
|
182
|
+
|
183
|
+
data = {
|
184
|
+
"hello" => "world",
|
185
|
+
"foo" => [1,2,3],
|
186
|
+
"baz" => { "1" => "2" },
|
187
|
+
"idx" => 0
|
188
|
+
}
|
189
|
+
|
190
|
+
socket = Stud::try(5.times) { TCPSocket.new("127.0.0.1", port) }
|
191
|
+
(1..5).each do |idx|
|
192
|
+
data["idx"] = idx
|
193
|
+
socket.puts(LogStash::Json.dump(data) + "\n")
|
194
|
+
end # do
|
195
|
+
socket.close
|
196
|
+
|
197
|
+
(1..5).each do |idx|
|
198
|
+
event = queue.pop
|
199
|
+
insist { event["hello"] } == data["hello"]
|
200
|
+
insist { event["foo"].to_a } == data["foo"] # to_a to cast Java ArrayList produced by JrJackson
|
201
|
+
insist { event["baz"] } == data["baz"]
|
202
|
+
insist { event["idx"] } == idx
|
203
|
+
end # do
|
204
|
+
end # input
|
205
|
+
end # describe
|
206
|
+
|
207
|
+
describe "one message per connection" do
|
208
|
+
event_count = 10
|
209
|
+
port = 5516
|
210
|
+
config <<-CONFIG
|
211
|
+
input {
|
212
|
+
tcp {
|
213
|
+
port => #{port}
|
214
|
+
}
|
215
|
+
}
|
216
|
+
CONFIG
|
217
|
+
|
218
|
+
input do |pipeline, queue|
|
219
|
+
Thread.new { pipeline.run }
|
220
|
+
sleep 0.1 while !pipeline.ready?
|
221
|
+
|
222
|
+
event_count.times do |i|
|
223
|
+
socket = Stud::try(5.times) { TCPSocket.new("127.0.0.1", port) }
|
224
|
+
socket.puts("#{i}")
|
225
|
+
socket.flush
|
226
|
+
socket.close
|
227
|
+
end
|
228
|
+
|
229
|
+
# wait till all events have been processed
|
230
|
+
Timeout.timeout(1) {sleep 0.1 while queue.size < event_count}
|
231
|
+
|
232
|
+
# since each message is sent on its own tcp connection & thread, exact receiving order cannot be garanteed
|
233
|
+
events = event_count.times.collect{queue.pop}.sort_by{|event| event["message"]}
|
234
|
+
|
235
|
+
event_count.times do |i|
|
236
|
+
insist { events[i]["message"] } == "#{i}"
|
237
|
+
end
|
238
|
+
end # input
|
239
|
+
end
|
240
|
+
|
241
|
+
describe "connection threads are cleaned up when connection is closed" do
|
242
|
+
event_count = 10
|
243
|
+
port = 5517
|
244
|
+
config <<-CONFIG
|
245
|
+
input {
|
246
|
+
tcp {
|
247
|
+
port => #{port}
|
248
|
+
}
|
249
|
+
}
|
250
|
+
CONFIG
|
251
|
+
|
252
|
+
input do |pipeline, queue|
|
253
|
+
Thread.new { pipeline.run }
|
254
|
+
sleep 0.1 while !pipeline.ready?
|
255
|
+
|
256
|
+
inputs = pipeline.instance_variable_get("@inputs")
|
257
|
+
insist { inputs.size } == 1
|
258
|
+
|
259
|
+
sockets = event_count.times.map do |i|
|
260
|
+
socket = Stud::try(5.times) { TCPSocket.new("127.0.0.1", port) }
|
261
|
+
socket.puts("#{i}")
|
262
|
+
socket.flush
|
263
|
+
socket
|
264
|
+
end
|
265
|
+
|
266
|
+
# wait till all events have been processed
|
267
|
+
Timeout.timeout(1) {sleep 0.1 while queue.size < event_count}
|
268
|
+
|
269
|
+
# we should have "event_count" pending threads since sockets were not closed yet
|
270
|
+
client_threads = inputs[0].instance_variable_get("@client_threads")
|
271
|
+
insist { client_threads.size } == event_count
|
272
|
+
|
273
|
+
# close all sockets and make sure there is not more pending threads
|
274
|
+
sockets.each{|socket| socket.close}
|
275
|
+
Timeout.timeout(1) {sleep 0.1 while client_threads.size > 0}
|
276
|
+
insist { client_threads.size } == 0 # this check is actually useless per previous line
|
277
|
+
|
278
|
+
end # input
|
279
|
+
end
|
280
|
+
end
|
metadata
ADDED
@@ -0,0 +1,130 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: logstash-input-tcp
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.1.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Elasticsearch
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2014-10-30 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: logstash
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - ! '>='
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: 1.4.0
|
20
|
+
- - <
|
21
|
+
- !ruby/object:Gem::Version
|
22
|
+
version: 2.0.0
|
23
|
+
type: :runtime
|
24
|
+
prerelease: false
|
25
|
+
version_requirements: !ruby/object:Gem::Requirement
|
26
|
+
requirements:
|
27
|
+
- - ! '>='
|
28
|
+
- !ruby/object:Gem::Version
|
29
|
+
version: 1.4.0
|
30
|
+
- - <
|
31
|
+
- !ruby/object:Gem::Version
|
32
|
+
version: 2.0.0
|
33
|
+
- !ruby/object:Gem::Dependency
|
34
|
+
name: logstash-codec-plain
|
35
|
+
requirement: !ruby/object:Gem::Requirement
|
36
|
+
requirements:
|
37
|
+
- - ! '>='
|
38
|
+
- !ruby/object:Gem::Version
|
39
|
+
version: '0'
|
40
|
+
type: :runtime
|
41
|
+
prerelease: false
|
42
|
+
version_requirements: !ruby/object:Gem::Requirement
|
43
|
+
requirements:
|
44
|
+
- - ! '>='
|
45
|
+
- !ruby/object:Gem::Version
|
46
|
+
version: '0'
|
47
|
+
- !ruby/object:Gem::Dependency
|
48
|
+
name: logstash-codec-line
|
49
|
+
requirement: !ruby/object:Gem::Requirement
|
50
|
+
requirements:
|
51
|
+
- - ! '>='
|
52
|
+
- !ruby/object:Gem::Version
|
53
|
+
version: '0'
|
54
|
+
type: :runtime
|
55
|
+
prerelease: false
|
56
|
+
version_requirements: !ruby/object:Gem::Requirement
|
57
|
+
requirements:
|
58
|
+
- - ! '>='
|
59
|
+
- !ruby/object:Gem::Version
|
60
|
+
version: '0'
|
61
|
+
- !ruby/object:Gem::Dependency
|
62
|
+
name: logstash-codec-json
|
63
|
+
requirement: !ruby/object:Gem::Requirement
|
64
|
+
requirements:
|
65
|
+
- - ! '>='
|
66
|
+
- !ruby/object:Gem::Version
|
67
|
+
version: '0'
|
68
|
+
type: :runtime
|
69
|
+
prerelease: false
|
70
|
+
version_requirements: !ruby/object:Gem::Requirement
|
71
|
+
requirements:
|
72
|
+
- - ! '>='
|
73
|
+
- !ruby/object:Gem::Version
|
74
|
+
version: '0'
|
75
|
+
- !ruby/object:Gem::Dependency
|
76
|
+
name: logstash-codec-json_lines
|
77
|
+
requirement: !ruby/object:Gem::Requirement
|
78
|
+
requirements:
|
79
|
+
- - ! '>='
|
80
|
+
- !ruby/object:Gem::Version
|
81
|
+
version: '0'
|
82
|
+
type: :runtime
|
83
|
+
prerelease: false
|
84
|
+
version_requirements: !ruby/object:Gem::Requirement
|
85
|
+
requirements:
|
86
|
+
- - ! '>='
|
87
|
+
- !ruby/object:Gem::Version
|
88
|
+
version: '0'
|
89
|
+
description: Read events over a TCP socket.
|
90
|
+
email: richard.pijnenburg@elasticsearch.com
|
91
|
+
executables: []
|
92
|
+
extensions: []
|
93
|
+
extra_rdoc_files: []
|
94
|
+
files:
|
95
|
+
- .gitignore
|
96
|
+
- Gemfile
|
97
|
+
- Rakefile
|
98
|
+
- lib/logstash/inputs/tcp.rb
|
99
|
+
- logstash-input-tcp.gemspec
|
100
|
+
- rakelib/publish.rake
|
101
|
+
- rakelib/vendor.rake
|
102
|
+
- spec/inputs/tcp_spec.rb
|
103
|
+
homepage: http://logstash.net/
|
104
|
+
licenses:
|
105
|
+
- Apache License (2.0)
|
106
|
+
metadata:
|
107
|
+
logstash_plugin: 'true'
|
108
|
+
group: input
|
109
|
+
post_install_message:
|
110
|
+
rdoc_options: []
|
111
|
+
require_paths:
|
112
|
+
- lib
|
113
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
114
|
+
requirements:
|
115
|
+
- - ! '>='
|
116
|
+
- !ruby/object:Gem::Version
|
117
|
+
version: '0'
|
118
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
119
|
+
requirements:
|
120
|
+
- - ! '>='
|
121
|
+
- !ruby/object:Gem::Version
|
122
|
+
version: '0'
|
123
|
+
requirements: []
|
124
|
+
rubyforge_project:
|
125
|
+
rubygems_version: 2.4.1
|
126
|
+
signing_key:
|
127
|
+
specification_version: 4
|
128
|
+
summary: Read events over a TCP socket.
|
129
|
+
test_files:
|
130
|
+
- spec/inputs/tcp_spec.rb
|