logstash-output-elasticsearch 0.1.1 → 0.1.5

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,15 +1,15 @@
1
1
  ---
2
2
  !binary "U0hBMQ==":
3
3
  metadata.gz: !binary |-
4
- YTQ0NzIzNzc2MmVkMTYxM2Y5OTBhNTExZjQ4Nzc5ZGJlNDU5MDQwYg==
4
+ OTFjZmRiNDk2YjMzNGE4NzVmYTgyNGMzYzAxNGUxZjY3ZDEwMTE4MQ==
5
5
  data.tar.gz: !binary |-
6
- NGQxYTgwYzRlMjhkMDBkYWFjODM4ZDAyNjhmNWE2NDZjOWY1ZDliZA==
6
+ ZTcyNTg2NzA5ZDQ3NGRmZDM4NGE1ZmJlNzllOWI3MTAxMzgwNjllMQ==
7
7
  SHA512:
8
8
  metadata.gz: !binary |-
9
- NTJlMWQzNDU2OTMwZGMzZDMyNjAwYjE5M2M0NjQ2ZWQ5YmQ4OGRlMDIxODEy
10
- MDY0NTI4NTU2ZWE0NDdlMmU4MTY1NjAwM2IwODAxZTUwZGMxN2IyNDdhZWI3
11
- MWU5MGYyYmNjYWMyYzA5OTQzM2JjMzZhNTk3Njg4NThkZTJjYjE=
9
+ MmIxMGUyZjAxYTE5NmUzZGIzZWRmNDc0NmMyOWVlMTQ2YTQxMjA2ZmQzZDg2
10
+ M2FlMWVjMGRmNTQwMjdjZjM3NzY5Y2UxMTk1ODVlM2NhOTBlZmIwMzA4ZjE4
11
+ YWNjYjdlMzgxYmUyMTMwN2UwMzI2MWEzNjI3OWMyNWM4ZDRlYmE=
12
12
  data.tar.gz: !binary |-
13
- YTQ1YjU1MDNmOTliMzgwYTU4YmNjYTdlMmFjNjQyNTBiODcxMTk5NWU5ZGJh
14
- MjNkMDZlYTBlM2RkOTdjNTBjNDI5MDI4MDIzMTNjNzBjZjQxMzE1ZTk0YTc3
15
- Y2Q3MjgxNmE2NjFmOTQxMjdkYWE3ZWU3NjMwNzgwNzU3MjY0ZGU=
13
+ YjZmYWQzZjMxMGUwODFkZDMzYWY4OTQ2Zjc1NmZiMDRiODUwYWVmMmIyNDNl
14
+ YjMwMjIyMzg2YjA5M2MwNTdlNDdiMmZlOTA0OTZkNGRkZGNmYTFkYTFjNGUz
15
+ YTQ4YjQ1ZWFiZjQzZTU5Y2I5MzI1ZWJlZjVhNTlhMDU5NzZmNDI=
@@ -0,0 +1,4 @@
1
+ *.gem
2
+ Gemfile.lock
3
+ .bundle
4
+ vendor
data/Gemfile ADDED
@@ -0,0 +1,4 @@
1
+ source 'http://rubygems.org'
2
+ gem 'rake'
3
+ gem 'gem_publisher'
4
+ gem 'archive-tar-minitar'
data/LICENSE ADDED
@@ -0,0 +1,13 @@
1
+ Copyright (c) 2012-2014 Elasticsearch <http://www.elasticsearch.org>
2
+
3
+ Licensed under the Apache License, Version 2.0 (the "License");
4
+ you may not use this file except in compliance with the License.
5
+ You may obtain a copy of the License at
6
+
7
+ http://www.apache.org/licenses/LICENSE-2.0
8
+
9
+ Unless required by applicable law or agreed to in writing, software
10
+ distributed under the License is distributed on an "AS IS" BASIS,
11
+ WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
12
+ See the License for the specific language governing permissions and
13
+ limitations under the License.
@@ -0,0 +1,6 @@
1
+ @files=[]
2
+
3
+ task :default do
4
+ system("rake -T")
5
+ end
6
+
@@ -5,20 +5,21 @@ require "logstash/outputs/base"
5
5
  require "logstash/json"
6
6
  require "stud/buffer"
7
7
  require "socket" # for Socket.gethostname
8
+ require "uri" # for escaping user input
9
+ require 'logstash-output-elasticsearch_jars.rb'
8
10
 
9
11
  # This output lets you store logs in Elasticsearch and is the most recommended
10
12
  # output for Logstash. If you plan on using the Kibana web interface, you'll
11
13
  # need to use this output.
12
14
  #
13
15
  # *VERSION NOTE*: Your Elasticsearch cluster must be running Elasticsearch
14
- # %ELASTICSEARCH_VERSION%. If you use any other version of Elasticsearch,
15
- # you should set `protocol => http` in this plugin.
16
+ # 1.0.0 or later.
16
17
  #
17
18
  # If you want to set other Elasticsearch options that are not exposed directly
18
19
  # as configuration options, there are two methods:
19
20
  #
20
21
  # * Create an `elasticsearch.yml` file in the $PWD of the Logstash process
21
- # * Pass in es.* java properties (java -Des.node.foo= or ruby -J-Des.node.foo=)
22
+ # * Pass in es.* java properties (`java -Des.node.foo=` or `ruby -J-Des.node.foo=`)
22
23
  #
23
24
  # With the default `protocol` setting ("node"), this plugin will join your
24
25
  # Elasticsearch cluster as a client node, so it will show up in Elasticsearch's
@@ -28,10 +29,6 @@ require "socket" # for Socket.gethostname
28
29
  #
29
30
  # ## Operational Notes
30
31
  #
31
- # Template management requires Elasticsearch version 0.90.7 or later. If you
32
- # are using a version older than this, please upgrade. You will receive
33
- # more benefits than just template management!
34
- #
35
32
  # If using the default `protocol` setting ("node"), your firewalls might need
36
33
  # to permit port 9300 in *both* directions (from Logstash to Elasticsearch, and
37
34
  # Elasticsearch to Logstash)
@@ -41,32 +38,34 @@ class LogStash::Outputs::ElasticSearch < LogStash::Outputs::Base
41
38
  config_name "elasticsearch"
42
39
  milestone 3
43
40
 
44
- # The index to write events to. This can be dynamic using the %{foo} syntax.
41
+ # The index to write events to. This can be dynamic using the `%{foo}` syntax.
45
42
  # The default value will partition your indices by day so you can more easily
46
43
  # delete old data or only search specific date ranges.
47
44
  # Indexes may not contain uppercase characters.
48
45
  config :index, :validate => :string, :default => "logstash-%{+YYYY.MM.dd}"
49
46
 
50
47
  # The index type to write events to. Generally you should try to write only
51
- # similar events to the same 'type'. String expansion '%{foo}' works here.
48
+ # similar events to the same 'type'. String expansion `%{foo}` works here.
52
49
  config :index_type, :validate => :string
53
50
 
54
- # Starting in Logstash 1.3 (unless you set option "manage_template" to false)
51
+ # Starting in Logstash 1.3 (unless you set option `manage_template` to false)
55
52
  # a default mapping template for Elasticsearch will be applied, if you do not
56
53
  # already have one set to match the index pattern defined (default of
57
- # "logstash-%{+YYYY.MM.dd}"), minus any variables. For example, in this case
58
- # the template will be applied to all indices starting with logstash-*
54
+ # `logstash-%{+YYYY.MM.dd}`), minus any variables. For example, in this case
55
+ # the template will be applied to all indices starting with `logstash-*`
59
56
  #
60
57
  # If you have dynamic templating (e.g. creating indices based on field names)
61
- # then you should set "manage_template" to false and use the REST API to upload
58
+ # then you should set `manage_template` to false and use the REST API to upload
62
59
  # your templates manually.
63
60
  config :manage_template, :validate => :boolean, :default => true
64
61
 
65
62
  # This configuration option defines how the template is named inside Elasticsearch.
66
63
  # Note that if you have used the template management features and subsequently
67
64
  # change this, you will need to prune the old template manually, e.g.
68
- # curl -XDELETE <http://localhost:9200/_template/OldTemplateName?pretty>
69
- # where OldTemplateName is whatever the former setting was.
65
+ #
66
+ # `curl -XDELETE <http://localhost:9200/_template/OldTemplateName?pretty>`
67
+ #
68
+ # where `OldTemplateName` is whatever the former setting was.
70
69
  config :template_name, :validate => :string, :default => "logstash"
71
70
 
72
71
  # You can set the path to your own template here, if you so desire.
@@ -74,7 +73,7 @@ class LogStash::Outputs::ElasticSearch < LogStash::Outputs::Base
74
73
  config :template, :validate => :path
75
74
 
76
75
  # Overwrite the current template with whatever is configured
77
- # in the template and template_name directives.
76
+ # in the `template` and `template_name` directives.
78
77
  config :template_overwrite, :validate => :boolean, :default => false
79
78
 
80
79
  # The document ID for the index. Useful for overwriting existing entries in
@@ -88,7 +87,10 @@ class LogStash::Outputs::ElasticSearch < LogStash::Outputs::Base
88
87
  # The hostname or IP address of the host to use for Elasticsearch unicast discovery
89
88
  # This is only required if the normal multicast/cluster discovery stuff won't
90
89
  # work in your environment.
91
- config :host, :validate => :string
90
+ #
91
+ # `"127.0.0.1"`
92
+ # `["127.0.0.1:9300","127.0.0.2:9300"]`
93
+ config :host, :validate => :array
92
94
 
93
95
  # The port for Elasticsearch transport to use.
94
96
  #
@@ -173,24 +175,34 @@ class LogStash::Outputs::ElasticSearch < LogStash::Outputs::Base
173
175
  #
174
176
  # What does each action do?
175
177
  #
176
- # - index: indexes a document (an event from logstash).
178
+ # - index: indexes a document (an event from Logstash).
177
179
  # - delete: deletes a document by id
178
180
  #
179
181
  # For more details on actions, check out the [Elasticsearch bulk API documentation](http://www.elasticsearch.org/guide/en/elasticsearch/reference/current/docs-bulk.html)
180
182
  config :action, :validate => :string, :default => "index"
181
183
 
184
+ # Username and password (HTTP only)
185
+ config :user, :validate => :string
186
+ config :password, :validate => :password
187
+
188
+ # SSL Configurations (HTTP only)
189
+ #
190
+ # Enable SSL
191
+ config :ssl, :validate => :boolean, :default => false
192
+
193
+ # The .cer or .pem file to validate the server's certificate
194
+ config :cacert, :validate => :path
195
+
196
+ # The JKS truststore to validate the server's certificate
197
+ # Use either `:truststore` or `:cacert`
198
+ config :truststore, :validate => :path
199
+
200
+ # Set the truststore password
201
+ config :truststore_password, :validate => :password
202
+
182
203
  public
183
204
  def register
184
205
  client_settings = {}
185
- client_settings["cluster.name"] = @cluster if @cluster
186
- client_settings["network.host"] = @bind_host if @bind_host
187
- client_settings["transport.tcp.port"] = @bind_port if @bind_port
188
-
189
- if @node_name
190
- client_settings["node.name"] = @node_name
191
- else
192
- client_settings["node.name"] = "logstash-#{Socket.gethostname}-#{$$}-#{object_id}"
193
- end
194
206
 
195
207
  if @protocol.nil?
196
208
  @protocol = LogStash::Environment.jruby? ? "node" : "http"
@@ -199,10 +211,21 @@ class LogStash::Outputs::ElasticSearch < LogStash::Outputs::Base
199
211
  if ["node", "transport"].include?(@protocol)
200
212
  # Node or TransportClient; requires JRuby
201
213
  raise(LogStash::PluginLoadingError, "This configuration requires JRuby. If you are not using JRuby, you must set 'protocol' to 'http'. For example: output { elasticsearch { protocol => \"http\" } }") unless LogStash::Environment.jruby?
202
- LogStash::Environment.load_elasticsearch_jars!
203
214
 
204
- # setup log4j properties for Elasticsearch
205
- LogStash::Logger.setup_log4j(@logger)
215
+ client_settings["cluster.name"] = @cluster if @cluster
216
+ client_settings["network.host"] = @bind_host if @bind_host
217
+ client_settings["transport.tcp.port"] = @bind_port if @bind_port
218
+
219
+ if @node_name
220
+ client_settings["node.name"] = @node_name
221
+ else
222
+ client_settings["node.name"] = "logstash-#{Socket.gethostname}-#{$$}-#{object_id}"
223
+ end
224
+
225
+ @@plugins.each do |plugin|
226
+ name = plugin.name.split('-')[-1]
227
+ client_settings.merge!(LogStash::Outputs::ElasticSearch.const_get(name.capitalize).create_client_config(self))
228
+ end
206
229
  end
207
230
 
208
231
  require "logstash/outputs/elasticsearch/protocol"
@@ -216,49 +239,102 @@ class LogStash::Outputs::ElasticSearch < LogStash::Outputs::Base
216
239
 
217
240
  if @host.nil? && @protocol == "http"
218
241
  @logger.info("No 'host' set in elasticsearch output. Defaulting to localhost")
219
- @host = "localhost"
242
+ @host = ["localhost"]
220
243
  end
221
244
 
222
- options = {
223
- :host => @host,
224
- :port => @port,
245
+ if @ssl
246
+ if @protocol == "http"
247
+ @protocol = "https"
248
+ if @cacert && @truststore
249
+ raise(LogStash::ConfigurationError, "Use either \"cacert\" or \"truststore\" when configuring the CA certificate") if @truststore
250
+ end
251
+ ssl_options = {}
252
+ if @cacert then
253
+ @truststore, ssl_options[:truststore_password] = generate_jks @cacert
254
+ elsif @truststore
255
+ ssl_options[:truststore_password] = @truststore_password.value if @truststore_password
256
+ end
257
+ ssl_options[:truststore] = @truststore
258
+ client_settings[:ssl] = ssl_options
259
+ else
260
+ raise(LogStash::ConfigurationError, "SSL is not supported for '#{@protocol}'. Change the protocol to 'http' if you need SSL.")
261
+ end
262
+ end
263
+
264
+ common_options = {
265
+ :protocol => @protocol,
225
266
  :client_settings => client_settings
226
267
  }
227
268
 
269
+ if @user && @password
270
+ if @protocol =~ /http/
271
+ common_options[:user] = ::URI.escape(@user, "@:")
272
+ common_options[:password] = ::URI.escape(@password.value, "@:")
273
+ else
274
+ raise(LogStash::ConfigurationError, "User and password parameters are not supported for '#{@protocol}'. Change the protocol to 'http' if you need them.")
275
+ end
276
+ end
228
277
 
229
278
  client_class = case @protocol
230
279
  when "transport"
231
280
  LogStash::Outputs::Elasticsearch::Protocols::TransportClient
232
281
  when "node"
233
282
  LogStash::Outputs::Elasticsearch::Protocols::NodeClient
234
- when "http"
283
+ when /http/
235
284
  LogStash::Outputs::Elasticsearch::Protocols::HTTPClient
236
285
  end
237
286
 
238
287
  if @embedded
239
288
  raise(LogStash::ConfigurationError, "The 'embedded => true' setting is only valid for the elasticsearch output under JRuby. You are running #{RUBY_DESCRIPTION}") unless LogStash::Environment.jruby?
240
- LogStash::Environment.load_elasticsearch_jars!
289
+ # LogStash::Environment.load_elasticsearch_jars!
241
290
 
242
291
  # Default @host with embedded to localhost. This should help avoid
243
292
  # newbies tripping on ubuntu and other distros that have a default
244
293
  # firewall that blocks multicast.
245
- @host ||= "localhost"
294
+ @host ||= ["localhost"]
246
295
 
247
296
  # Start Elasticsearch local.
248
297
  start_local_elasticsearch
249
298
  end
250
299
 
251
- @client = client_class.new(options)
300
+ @client = Array.new
301
+
302
+ if protocol == "node" or @host.nil? # if @protocol is "node" or @host is not set
303
+ options = {
304
+ :host => @host,
305
+ :port => @port,
306
+ }.merge(common_options)
307
+ @client << client_class.new(options)
308
+ else # if @protocol in ["transport","http"]
309
+ @host.each do |host|
310
+ (_host,_port) = host.split ":"
311
+ options = {
312
+ :host => _host,
313
+ :port => _port || @port,
314
+ }.merge(common_options)
315
+ @logger.info "Create client to elasticsearch server on #{_host}:#{_port}"
316
+ @client << client_class.new(options)
317
+ end # @host.each
318
+ end
319
+
320
+ if @manage_template
321
+ for client in @client
322
+ begin
323
+ @logger.info("Automatic template management enabled", :manage_template => @manage_template.to_s)
324
+ client.template_install(@template_name, get_template, @template_overwrite)
325
+ break
326
+ rescue => e
327
+ @logger.error("Failed to install template: #{e.message}")
328
+ end
329
+ end # for @client loop
330
+ end # if @manage_templates
252
331
 
253
332
  @logger.info("New Elasticsearch output", :cluster => @cluster,
254
333
  :host => @host, :port => @port, :embedded => @embedded,
255
334
  :protocol => @protocol)
256
335
 
257
-
258
- if @manage_template
259
- @logger.info("Automatic template management enabled", :manage_template => @manage_template.to_s)
260
- @client.template_install(@template_name, get_template, @template_overwrite)
261
- end # if @manage_templates
336
+ @client_idx = 0
337
+ @current_client = @client[@client_idx]
262
338
 
263
339
  buffer_initialize(
264
340
  :max_items => @flush_size,
@@ -267,10 +343,17 @@ class LogStash::Outputs::ElasticSearch < LogStash::Outputs::Base
267
343
  )
268
344
  end # def register
269
345
 
346
+ protected
347
+ def shift_client
348
+ @client_idx = (@client_idx+1) % @client.length
349
+ @current_client = @client[@client_idx]
350
+ @logger.debug? and @logger.debug("Switched current elasticsearch client to ##{@client_idx} at #{@host[@client_idx]}")
351
+ end
352
+
270
353
  public
271
354
  def get_template
272
355
  if @template.nil?
273
- @template = LogStash::Environment.plugin_path("outputs/elasticsearch/elasticsearch-template.json")
356
+ @template = ::File.expand_path('elasticsearch/elasticsearch-template.json', ::File.dirname(__FILE__))
274
357
  if !File.exists?(@template)
275
358
  raise "You must specify 'template => ...' in your elasticsearch output (I looked for '#{@template}')"
276
359
  end
@@ -295,6 +378,29 @@ class LogStash::Outputs::ElasticSearch < LogStash::Outputs::Base
295
378
  @embedded_elasticsearch.start
296
379
  end # def start_local_elasticsearch
297
380
 
381
+ private
382
+ def generate_jks cert_path
383
+
384
+ require 'securerandom'
385
+ require 'tempfile'
386
+ require 'java'
387
+ import java.io.FileInputStream
388
+ import java.io.FileOutputStream
389
+ import java.security.KeyStore
390
+ import java.security.cert.CertificateFactory
391
+
392
+ jks = java.io.File.createTempFile("cert", ".jks")
393
+
394
+ ks = KeyStore.getInstance "JKS"
395
+ ks.load nil, nil
396
+ cf = CertificateFactory.getInstance "X.509"
397
+ cert = cf.generateCertificate FileInputStream.new(cert_path)
398
+ ks.setCertificateEntry "cacert", cert
399
+ pwd = SecureRandom.urlsafe_base64(9)
400
+ ks.store FileOutputStream.new(jks), pwd.to_java.toCharArray
401
+ [jks.path, pwd]
402
+ end
403
+
298
404
  public
299
405
  def receive(event)
300
406
  return unless output?(event)
@@ -313,7 +419,18 @@ class LogStash::Outputs::ElasticSearch < LogStash::Outputs::Base
313
419
  end # def receive
314
420
 
315
421
  def flush(actions, teardown=false)
316
- @client.bulk(actions)
422
+ begin
423
+ @logger.debug? and @logger.debug "Sending bulk of actions to client[#{@client_idx}]: #{@host[@client_idx]}"
424
+ @current_client.bulk(actions)
425
+ rescue => e
426
+ @logger.error "Got error to send bulk of actions to elasticsearch server at #{@host[@client_idx]} : #{e.message}"
427
+ raise e
428
+ ensure
429
+ unless @protocol == "node"
430
+ @logger.debug? and @logger.debug "Shifting current elasticsearch client"
431
+ shift_client
432
+ end
433
+ end
317
434
  # TODO(sissel): Handle errors. Since bulk requests could mostly succeed
318
435
  # (aka partially fail), we need to figure out what documents need to be
319
436
  # retried.
@@ -322,7 +439,17 @@ class LogStash::Outputs::ElasticSearch < LogStash::Outputs::Base
322
439
  end # def flush
323
440
 
324
441
  def teardown
442
+ if @cacert # remove temporary jks store created from the cacert
443
+ File.delete(@truststore)
444
+ end
325
445
  buffer_flush(:final => true)
326
446
  end
327
447
 
448
+ @@plugins = Gem::Specification.find_all{|spec| spec.name =~ /logstash-output-elasticsearch-/ }
449
+
450
+ @@plugins.each do |plugin|
451
+ name = plugin.name.split('-')[-1]
452
+ require "logstash/outputs/elasticsearch/#{name}"
453
+ end
454
+
328
455
  end # class LogStash::Outputs::Elasticsearch
@@ -51,37 +51,33 @@ module LogStash::Outputs::Elasticsearch
51
51
  }
52
52
 
53
53
  def initialize(options={})
54
- require "ftw"
55
54
  super
56
55
  require "elasticsearch" # gem 'elasticsearch-ruby'
56
+ # manticore http transport
57
+ require "elasticsearch/transport/transport/http/manticore"
57
58
  @options = DEFAULT_OPTIONS.merge(options)
58
59
  @client = client
59
60
  end
60
61
 
61
62
  def build_client(options)
62
- client = Elasticsearch::Client.new(
63
- :host => [options[:host], options[:port]].join(":")
64
- )
65
-
66
- # Use FTW to do indexing requests, for now, until we
67
- # can identify and resolve performance problems of elasticsearch-ruby
68
- @bulk_url = "http://#{options[:host]}:#{options[:port]}/_bulk"
69
- @agent = FTW::Agent.new
70
-
71
- return client
72
- end
73
-
74
- if ENV["BULK"] == "esruby"
75
- def bulk(actions)
76
- bulk_esruby(actions)
77
- end
78
- else
79
- def bulk(actions)
80
- bulk_ftw(actions)
63
+ uri = "#{options[:protocol]}://#{options[:host]}:#{options[:port]}"
64
+
65
+ client_options = {
66
+ :host => [uri],
67
+ :transport_options => options[:client_settings]
68
+ }
69
+ client_options[:transport_class] = ::Elasticsearch::Transport::Transport::HTTP::Manticore
70
+ client_options[:ssl] = client_options[:transport_options].delete(:ssl)
71
+
72
+ if options[:user] && options[:password] then
73
+ token = Base64.strict_encode64(options[:user] + ":" + options[:password])
74
+ client_options[:headers] = { "Authorization" => "Basic #{token}" }
81
75
  end
76
+
77
+ Elasticsearch::Client.new client_options
82
78
  end
83
79
 
84
- def bulk_esruby(actions)
80
+ def bulk(actions)
85
81
  @client.bulk(:body => actions.collect do |action, args, source|
86
82
  if source
87
83
  next [ { action => args }, source ]
@@ -89,44 +85,7 @@ module LogStash::Outputs::Elasticsearch
89
85
  next { action => args }
90
86
  end
91
87
  end.flatten)
92
- end # def bulk_esruby
93
-
94
- # Avoid creating a new string for newline every time
95
- NEWLINE = "\n".freeze
96
- def bulk_ftw(actions)
97
- body = actions.collect do |action, args, source|
98
- header = { action => args }
99
- if source
100
- next [ LogStash::Json.dump(header), NEWLINE, LogStash::Json.dump(source), NEWLINE ]
101
- else
102
- next [ LogStash::Json.dump(header), NEWLINE ]
103
- end
104
- end.flatten.join("")
105
- begin
106
- response = @agent.post!(@bulk_url, :body => body)
107
- rescue EOFError
108
- @logger.warn("EOF while writing request or reading response header from elasticsearch", :host => @host, :port => @port)
109
- raise
110
- end
111
-
112
- # Consume the body for error checking
113
- # This will also free up the connection for reuse.
114
- response_body = ""
115
- begin
116
- response.read_body { |chunk| response_body += chunk }
117
- rescue EOFError
118
- @logger.warn("EOF while reading response body from elasticsearch",
119
- :url => @bulk_url)
120
- raise
121
- end
122
-
123
- if response.status != 200
124
- @logger.error("Error writing (bulk) to elasticsearch",
125
- :response => response, :response_body => response_body,
126
- :request_body => body)
127
- raise "Non-OK response code from Elasticsearch: #{response.status}"
128
- end
129
- end # def bulk_ftw
88
+ end # def bulk
130
89
 
131
90
  def template_exists?(name)
132
91
  @client.indices.get_template(:name => name)
@@ -181,16 +140,40 @@ module LogStash::Outputs::Elasticsearch
181
140
  end
182
141
 
183
142
  def hosts(options)
184
- if options[:port].to_s =~ /^\d+-\d+$/
185
- # port ranges are 'host[port1-port2]' according to
186
- # http://www.elasticsearch.org/guide/reference/modules/discovery/zen/
187
- # However, it seems to only query the first port.
188
- # So generate our own list of unicast hosts to scan.
189
- range = Range.new(*options[:port].split("-"))
190
- return range.collect { |p| "#{options[:host]}:#{p}" }.join(",")
143
+ # http://www.elasticsearch.org/guide/reference/modules/discovery/zen/
144
+ result = Array.new
145
+ if options[:host].class == Array
146
+ options[:host].each do |host|
147
+ if host.to_s =~ /^.+:.+$/
148
+ # For host in format: host:port, ignore options[:port]
149
+ result << host
150
+ else
151
+ if options[:port].to_s =~ /^\d+-\d+$/
152
+ # port ranges are 'host[port1-port2]'
153
+ result << Range.new(*options[:port].split("-")).collect { |p| "#{host}:#{p}" }
154
+ else
155
+ result << "#{host}:#{options[:port]}"
156
+ end
157
+ end
158
+ end
191
159
  else
192
- return "#{options[:host]}:#{options[:port]}"
160
+ if options[:host].to_s =~ /^.+:.+$/
161
+ # For host in format: host:port, ignore options[:port]
162
+ result << options[:host]
163
+ else
164
+ if options[:port].to_s =~ /^\d+-\d+$/
165
+ # port ranges are 'host[port1-port2]' according to
166
+ # http://www.elasticsearch.org/guide/reference/modules/discovery/zen/
167
+ # However, it seems to only query the first port.
168
+ # So generate our own list of unicast hosts to scan.
169
+ range = Range.new(*options[:port].split("-"))
170
+ result << range.collect { |p| "#{options[:host]}:#{p}" }
171
+ else
172
+ result << "#{options[:host]}:#{options[:port]}"
173
+ end
174
+ end
193
175
  end
176
+ result.flatten.join(",")
194
177
  end # def hosts
195
178
 
196
179
  def build_client(options)
@@ -268,4 +251,3 @@ module LogStash::Outputs::Elasticsearch
268
251
  class Delete; end
269
252
  end
270
253
  end
271
-
@@ -1,33 +1,37 @@
1
1
  Gem::Specification.new do |s|
2
-
2
+
3
3
  s.name = 'logstash-output-elasticsearch'
4
- s.version = '0.1.1'
4
+ s.version = '0.1.5'
5
5
  s.licenses = ['Apache License (2.0)']
6
6
  s.summary = "Logstash Output to Elasticsearch"
7
7
  s.description = "Output events to elasticsearch"
8
8
  s.authors = ["Elasticsearch"]
9
- s.email = 'rubycoder@example.com'
9
+ s.email = 'richard.pijnenburg@elasticsearch.com'
10
10
  s.homepage = "http://logstash.net/"
11
11
  s.require_paths = ["lib"]
12
-
12
+
13
13
  # Files
14
14
  s.files = `git ls-files`.split($\)
15
-
15
+
16
16
  # Tests
17
17
  s.test_files = s.files.grep(%r{^(test|spec|features)/})
18
-
18
+
19
19
  # Special flag to let us know this is actually a logstash plugin
20
- s.metadata = { "logstash_plugin" => "true" }
20
+ s.metadata = { "logstash_plugin" => "true", "logstash_group" => "output" }
21
21
 
22
22
  # Jar dependencies
23
- s.requirements << "jar 'org.elasticsearch:elasticsearch', '1.2.2'"
23
+ s.requirements << "jar 'org.elasticsearch:elasticsearch', '1.3.1'"
24
24
 
25
25
  # Gem dependencies
26
- s.add_runtime_dependency 'elasticsearch'
26
+ s.add_runtime_dependency 'elasticsearch', ['~> 1.0.6']
27
27
  s.add_runtime_dependency 'stud'
28
28
  s.add_runtime_dependency 'cabin', ['>=0.6.0']
29
- s.add_runtime_dependency 'ftw', ['~> 0.0.39']
29
+ s.add_runtime_dependency 'ftw', ['~> 0.0.40']
30
30
  s.add_runtime_dependency 'logstash', '>= 1.4.0', '< 2.0.0'
31
- s.add_runtime_dependency 'jar-dependencies', ['~> 0.0.6']
32
-
31
+ s.add_runtime_dependency 'jar-dependencies'
32
+
33
+ if RUBY_PLATFORM == 'java'
34
+ gem.add_runtime_dependency "manticore"
35
+ end
33
36
  end
37
+
@@ -0,0 +1,9 @@
1
+ require "gem_publisher"
2
+
3
+ desc "Publish gem to RubyGems.org"
4
+ task :publish_gem do |t|
5
+ gem_file = Dir.glob(File.expand_path('../*.gemspec',File.dirname(__FILE__))).first
6
+ gem = GemPublisher.publish_if_updated(gem_file, :rubygems)
7
+ puts "Published #{gem}" if gem
8
+ end
9
+
@@ -0,0 +1,169 @@
1
+ require "net/http"
2
+ require "uri"
3
+ require "digest/sha1"
4
+
5
+ def vendor(*args)
6
+ return File.join("vendor", *args)
7
+ end
8
+
9
+ directory "vendor/" => ["vendor"] do |task, args|
10
+ mkdir task.name
11
+ end
12
+
13
+ def fetch(url, sha1, output)
14
+
15
+ puts "Downloading #{url}"
16
+ actual_sha1 = download(url, output)
17
+
18
+ if actual_sha1 != sha1
19
+ fail "SHA1 does not match (expected '#{sha1}' but got '#{actual_sha1}')"
20
+ end
21
+ end # def fetch
22
+
23
+ def file_fetch(url, sha1)
24
+ filename = File.basename( URI(url).path )
25
+ output = "vendor/#{filename}"
26
+ task output => [ "vendor/" ] do
27
+ begin
28
+ actual_sha1 = file_sha1(output)
29
+ if actual_sha1 != sha1
30
+ fetch(url, sha1, output)
31
+ end
32
+ rescue Errno::ENOENT
33
+ fetch(url, sha1, output)
34
+ end
35
+ end.invoke
36
+
37
+ return output
38
+ end
39
+
40
+ def file_sha1(path)
41
+ digest = Digest::SHA1.new
42
+ fd = File.new(path, "r")
43
+ while true
44
+ begin
45
+ digest << fd.sysread(16384)
46
+ rescue EOFError
47
+ break
48
+ end
49
+ end
50
+ return digest.hexdigest
51
+ ensure
52
+ fd.close if fd
53
+ end
54
+
55
+ def download(url, output)
56
+ uri = URI(url)
57
+ digest = Digest::SHA1.new
58
+ tmp = "#{output}.tmp"
59
+ Net::HTTP.start(uri.host, uri.port, :use_ssl => (uri.scheme == "https")) do |http|
60
+ request = Net::HTTP::Get.new(uri.path)
61
+ http.request(request) do |response|
62
+ fail "HTTP fetch failed for #{url}. #{response}" if [200, 301].include?(response.code)
63
+ size = (response["content-length"].to_i || -1).to_f
64
+ count = 0
65
+ File.open(tmp, "w") do |fd|
66
+ response.read_body do |chunk|
67
+ fd.write(chunk)
68
+ digest << chunk
69
+ if size > 0 && $stdout.tty?
70
+ count += chunk.bytesize
71
+ $stdout.write(sprintf("\r%0.2f%%", count/size * 100))
72
+ end
73
+ end
74
+ end
75
+ $stdout.write("\r \r") if $stdout.tty?
76
+ end
77
+ end
78
+
79
+ File.rename(tmp, output)
80
+
81
+ return digest.hexdigest
82
+ rescue SocketError => e
83
+ puts "Failure while downloading #{url}: #{e}"
84
+ raise
85
+ ensure
86
+ File.unlink(tmp) if File.exist?(tmp)
87
+ end # def download
88
+
89
+ def untar(tarball, &block)
90
+ require "archive/tar/minitar"
91
+ tgz = Zlib::GzipReader.new(File.open(tarball))
92
+ # Pull out typesdb
93
+ tar = Archive::Tar::Minitar::Input.open(tgz)
94
+ tar.each do |entry|
95
+ path = block.call(entry)
96
+ next if path.nil?
97
+ parent = File.dirname(path)
98
+
99
+ mkdir_p parent unless File.directory?(parent)
100
+
101
+ # Skip this file if the output file is the same size
102
+ if entry.directory?
103
+ mkdir path unless File.directory?(path)
104
+ else
105
+ entry_mode = entry.instance_eval { @mode } & 0777
106
+ if File.exists?(path)
107
+ stat = File.stat(path)
108
+ # TODO(sissel): Submit a patch to archive-tar-minitar upstream to
109
+ # expose headers in the entry.
110
+ entry_size = entry.instance_eval { @size }
111
+ # If file sizes are same, skip writing.
112
+ next if stat.size == entry_size && (stat.mode & 0777) == entry_mode
113
+ end
114
+ puts "Extracting #{entry.full_name} from #{tarball} #{entry_mode.to_s(8)}"
115
+ File.open(path, "w") do |fd|
116
+ # eof? check lets us skip empty files. Necessary because the API provided by
117
+ # Archive::Tar::Minitar::Reader::EntryStream only mostly acts like an
118
+ # IO object. Something about empty files in this EntryStream causes
119
+ # IO.copy_stream to throw "can't convert nil into String" on JRuby
120
+ # TODO(sissel): File a bug about this.
121
+ while !entry.eof?
122
+ chunk = entry.read(16384)
123
+ fd.write(chunk)
124
+ end
125
+ #IO.copy_stream(entry, fd)
126
+ end
127
+ File.chmod(entry_mode, path)
128
+ end
129
+ end
130
+ tar.close
131
+ File.unlink(tarball) if File.file?(tarball)
132
+ end # def untar
133
+
134
+ def ungz(file)
135
+
136
+ outpath = file.gsub('.gz', '')
137
+ tgz = Zlib::GzipReader.new(File.open(file))
138
+ begin
139
+ File.open(outpath, "w") do |out|
140
+ IO::copy_stream(tgz, out)
141
+ end
142
+ File.unlink(file)
143
+ rescue
144
+ File.unlink(outpath) if File.file?(outpath)
145
+ raise
146
+ end
147
+ tgz.close
148
+ end
149
+
150
+ desc "Process any vendor files required for this plugin"
151
+ task "vendor" do |task, args|
152
+
153
+ @files.each do |file|
154
+ download = file_fetch(file['url'], file['sha1'])
155
+ if download =~ /.tar.gz/
156
+ prefix = download.gsub('.tar.gz', '').gsub('vendor/', '')
157
+ untar(download) do |entry|
158
+ if !file['files'].nil?
159
+ next unless file['files'].include?(entry.full_name.gsub(prefix, ''))
160
+ out = entry.full_name.split("/").last
161
+ end
162
+ File.join('vendor', out)
163
+ end
164
+ elsif download =~ /.gz/
165
+ ungz(download)
166
+ end
167
+ end
168
+
169
+ end
@@ -1,10 +1,9 @@
1
- require "test_utils"
1
+ require "spec_helper"
2
2
  require "ftw"
3
3
  require "logstash/plugin"
4
4
  require "logstash/json"
5
5
 
6
6
  describe "outputs/elasticsearch" do
7
- extend LogStash::RSpec
8
7
 
9
8
  it "should register" do
10
9
  output = LogStash::Plugin.lookup("output", "elasticsearch").new("embedded" => "false", "protocol" => "transport", "manage_template" => "false")
@@ -346,4 +345,145 @@ describe "outputs/elasticsearch" do
346
345
  end
347
346
  end
348
347
  end
348
+
349
+ describe "elasticsearch protocol" do
350
+ # ElasticSearch related jars
351
+ #LogStash::Environment.load_elasticsearch_jars!
352
+ # Load elasticsearch protocol
353
+ require "logstash/outputs/elasticsearch/protocol"
354
+
355
+ describe "elasticsearch node client" do
356
+ # Test ElasticSearch Node Client
357
+ # Reference: http://www.elasticsearch.org/guide/reference/modules/discovery/zen/
358
+
359
+ it "should support hosts in both string and array" do
360
+ # Because we defined *hosts* method in NodeClient as private,
361
+ # we use *obj.send :method,[args...]* to call method *hosts*
362
+ client = LogStash::Outputs::Elasticsearch::Protocols::NodeClient.new
363
+
364
+ # Node client should support host in string
365
+ # Case 1: default :host in string
366
+ insist { client.send :hosts, :host => "host",:port => 9300 } == "host:9300"
367
+ # Case 2: :port =~ /^\d+_\d+$/
368
+ insist { client.send :hosts, :host => "host",:port => "9300-9302"} == "host:9300,host:9301,host:9302"
369
+ # Case 3: :host =~ /^.+:.+$/
370
+ insist { client.send :hosts, :host => "host:9303",:port => 9300 } == "host:9303"
371
+ # Case 4: :host =~ /^.+:.+$/ and :port =~ /^\d+_\d+$/
372
+ insist { client.send :hosts, :host => "host:9303",:port => "9300-9302"} == "host:9303"
373
+
374
+ # Node client should support host in array
375
+ # Case 5: :host in array with single item
376
+ insist { client.send :hosts, :host => ["host"],:port => 9300 } == ("host:9300")
377
+ # Case 6: :host in array with more than one items
378
+ insist { client.send :hosts, :host => ["host1","host2"],:port => 9300 } == "host1:9300,host2:9300"
379
+ # Case 7: :host in array with more than one items and :port =~ /^\d+_\d+$/
380
+ insist { client.send :hosts, :host => ["host1","host2"],:port => "9300-9302" } == "host1:9300,host1:9301,host1:9302,host2:9300,host2:9301,host2:9302"
381
+ # Case 8: :host in array with more than one items and some :host =~ /^.+:.+$/
382
+ insist { client.send :hosts, :host => ["host1","host2:9303"],:port => 9300 } == "host1:9300,host2:9303"
383
+ # Case 9: :host in array with more than one items, :port =~ /^\d+_\d+$/ and some :host =~ /^.+:.+$/
384
+ insist { client.send :hosts, :host => ["host1","host2:9303"],:port => "9300-9302" } == "host1:9300,host1:9301,host1:9302,host2:9303"
385
+ end
386
+ end
387
+ end
388
+
389
+ describe "Authentication option" do
390
+ ["node", "transport"].each do |protocol|
391
+ context "with protocol => #{protocol}" do
392
+ subject do
393
+ require "logstash/outputs/elasticsearch"
394
+ settings = {
395
+ "protocol" => protocol,
396
+ "node_name" => "logstash",
397
+ "cluster" => "elasticsearch",
398
+ "host" => "node01",
399
+ "user" => "test",
400
+ "password" => "test"
401
+ }
402
+ next LogStash::Outputs::ElasticSearch.new(settings)
403
+ end
404
+
405
+ it "should fail in register" do
406
+ expect {subject.register}.to raise_error
407
+ end
408
+ end
409
+ end
410
+ end
411
+
412
+ describe "SSL option" do
413
+ ["node", "transport"].each do |protocol|
414
+ context "with protocol => #{protocol}" do
415
+ subject do
416
+ require "logstash/outputs/elasticsearch"
417
+ settings = {
418
+ "protocol" => protocol,
419
+ "node_name" => "logstash",
420
+ "cluster" => "elasticsearch",
421
+ "host" => "node01",
422
+ "ssl" => true
423
+ }
424
+ next LogStash::Outputs::ElasticSearch.new(settings)
425
+ end
426
+
427
+ it "should fail in register" do
428
+ expect {subject.register}.to raise_error
429
+ end
430
+ end
431
+ end
432
+ end
433
+
434
+ describe "send messages to ElasticSearch using HTTPS", :elasticsearch_secure => true do
435
+ subject do
436
+ require "logstash/outputs/elasticsearch"
437
+ settings = {
438
+ "protocol" => "http",
439
+ "node_name" => "logstash",
440
+ "cluster" => "elasticsearch",
441
+ "host" => "node01",
442
+ "user" => "user",
443
+ "password" => "changeme",
444
+ "ssl" => true,
445
+ "cacert" => "/tmp/ca/certs/cacert.pem",
446
+ # or
447
+ #"truststore" => "/tmp/ca/truststore.jks",
448
+ #"truststore_password" => "testeteste"
449
+ }
450
+ next LogStash::Outputs::ElasticSearch.new(settings)
451
+ end
452
+
453
+ before :each do
454
+ subject.register
455
+ end
456
+
457
+ it "sends events to ES" do
458
+ expect {
459
+ subject.receive(LogStash::Event.new("message" => "sample message here"))
460
+ subject.buffer_flush(:final => true)
461
+ }.to_not raise_error
462
+ end
463
+ end
464
+
465
+ describe "connect using HTTP Authentication", :elasticsearch_secure => true do
466
+ subject do
467
+ require "logstash/outputs/elasticsearch"
468
+ settings = {
469
+ "protocol" => "http",
470
+ "cluster" => "elasticsearch",
471
+ "host" => "node01",
472
+ "user" => "user",
473
+ "password" => "changeme",
474
+ }
475
+ next LogStash::Outputs::ElasticSearch.new(settings)
476
+ end
477
+
478
+ before :each do
479
+ subject.register
480
+ end
481
+
482
+ it "sends events to ES" do
483
+ expect {
484
+ subject.receive(LogStash::Event.new("message" => "sample message here"))
485
+ subject.buffer_flush(:final => true)
486
+ }.to_not raise_error
487
+ end
488
+ end
349
489
  end
metadata CHANGED
@@ -1,29 +1,29 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-elasticsearch
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.1.1
4
+ version: 0.1.5
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elasticsearch
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2014-08-04 00:00:00.000000000 Z
11
+ date: 2014-11-07 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: elasticsearch
15
15
  requirement: !ruby/object:Gem::Requirement
16
16
  requirements:
17
- - - ! '>='
17
+ - - ~>
18
18
  - !ruby/object:Gem::Version
19
- version: '0'
19
+ version: 1.0.6
20
20
  type: :runtime
21
21
  prerelease: false
22
22
  version_requirements: !ruby/object:Gem::Requirement
23
23
  requirements:
24
- - - ! '>='
24
+ - - ~>
25
25
  - !ruby/object:Gem::Version
26
- version: '0'
26
+ version: 1.0.6
27
27
  - !ruby/object:Gem::Dependency
28
28
  name: stud
29
29
  requirement: !ruby/object:Gem::Requirement
@@ -58,14 +58,14 @@ dependencies:
58
58
  requirements:
59
59
  - - ~>
60
60
  - !ruby/object:Gem::Version
61
- version: 0.0.39
61
+ version: 0.0.40
62
62
  type: :runtime
63
63
  prerelease: false
64
64
  version_requirements: !ruby/object:Gem::Requirement
65
65
  requirements:
66
66
  - - ~>
67
67
  - !ruby/object:Gem::Version
68
- version: 0.0.39
68
+ version: 0.0.40
69
69
  - !ruby/object:Gem::Dependency
70
70
  name: logstash
71
71
  requirement: !ruby/object:Gem::Requirement
@@ -90,32 +90,39 @@ dependencies:
90
90
  name: jar-dependencies
91
91
  requirement: !ruby/object:Gem::Requirement
92
92
  requirements:
93
- - - ~>
93
+ - - ! '>='
94
94
  - !ruby/object:Gem::Version
95
- version: 0.0.6
95
+ version: '0'
96
96
  type: :runtime
97
97
  prerelease: false
98
98
  version_requirements: !ruby/object:Gem::Requirement
99
99
  requirements:
100
- - - ~>
100
+ - - ! '>='
101
101
  - !ruby/object:Gem::Version
102
- version: 0.0.6
102
+ version: '0'
103
103
  description: Output events to elasticsearch
104
- email: rubycoder@example.com
104
+ email: richard.pijnenburg@elasticsearch.com
105
105
  executables: []
106
106
  extensions: []
107
107
  extra_rdoc_files: []
108
108
  files:
109
+ - .gitignore
110
+ - Gemfile
111
+ - LICENSE
112
+ - Rakefile
109
113
  - lib/logstash/outputs/elasticsearch.rb
110
114
  - lib/logstash/outputs/elasticsearch/elasticsearch-template.json
111
115
  - lib/logstash/outputs/elasticsearch/protocol.rb
112
116
  - logstash-output-elasticsearch.gemspec
117
+ - rakelib/publish.rake
118
+ - rakelib/vendor.rake
113
119
  - spec/outputs/elasticsearch.rb
114
120
  homepage: http://logstash.net/
115
121
  licenses:
116
122
  - Apache License (2.0)
117
123
  metadata:
118
124
  logstash_plugin: 'true'
125
+ logstash_group: output
119
126
  post_install_message:
120
127
  rdoc_options: []
121
128
  require_paths:
@@ -131,12 +138,11 @@ required_rubygems_version: !ruby/object:Gem::Requirement
131
138
  - !ruby/object:Gem::Version
132
139
  version: '0'
133
140
  requirements:
134
- - jar 'org.elasticsearch:elasticsearch', '1.2.2'
141
+ - jar 'org.elasticsearch:elasticsearch', '1.3.1'
135
142
  rubyforge_project:
136
- rubygems_version: 2.3.0
143
+ rubygems_version: 2.4.1
137
144
  signing_key:
138
145
  specification_version: 4
139
146
  summary: Logstash Output to Elasticsearch
140
147
  test_files:
141
148
  - spec/outputs/elasticsearch.rb
142
- has_rdoc: