logstash-core 6.0.0.rc1-java → 6.0.0.rc2-java

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 7f2f575a0b4346cfdb2ebd45a8f92c1f945af334576955f88db79b39e49d92aa
4
- data.tar.gz: e6054d7cbf6db8a5e88554647b09f92b4781809f06f44f62e42987842e0b74be
3
+ metadata.gz: be7703ae1e02681bf68ba3dfd591f85886b15f271b2bce6cc17e49dddcd88c4c
4
+ data.tar.gz: 7c49c07b0af3dd06f605acb08243911e65f1be78108ee6fdb1cc4723dca694a6
5
5
  SHA512:
6
- metadata.gz: 55addb91d39a5878c13ef3ea22e9e9d8dd8b543750e1248e6d1e6d165812169c7b2522a86640d85dcebeb949de6b48e9049f68c339bbe34798bba6694365e02c
7
- data.tar.gz: 70d378ebbd7e89728069139a7a010f977ebf7cd2067cbb4ca2573e0cb481e94c4f69733a2af4622ad805f0ace5588642d7655c68381e56c3b161bfa14ef57f3e
6
+ metadata.gz: dc9b5bc1db75cfb1236aef3072ac38f2d943fb24aa752fd9a525bec60391e06f000f77d7fd8ac48d00744a0ff249c5edd6e7bec1c87ee7475d471999e4e4c032
7
+ data.tar.gz: e856579b5da5a494d8493e6863a96f6ced47e7442ce822bc94144a3dcf39ef0f5d3d5a5f9f99fafceb0ca1d070da0975ed6d33682b8191eb653c1dbc22227fed
@@ -5,8 +5,8 @@
5
5
  gem.requirements << "jar org.apache.logging.log4j:log4j-slf4j-impl, 2.6.2"
6
6
  gem.requirements << "jar org.apache.logging.log4j:log4j-api, 2.6.2"
7
7
  gem.requirements << "jar org.apache.logging.log4j:log4j-core, 2.6.2"
8
- gem.requirements << "jar com.fasterxml.jackson.core:jackson-core, 2.7.3"
9
- gem.requirements << "jar com.fasterxml.jackson.core:jackson-databind, 2.7.3"
10
- gem.requirements << "jar com.fasterxml.jackson.core:jackson-annotations, 2.7.3"
11
- gem.requirements << "jar com.fasterxml.jackson.module:jackson-module-afterburner, 2.7.3"
12
- gem.requirements << "jar com.fasterxml.jackson.dataformat:jackson-dataformat-cbor, 2.7.3"
8
+ gem.requirements << "jar com.fasterxml.jackson.core:jackson-core, 2.9.1"
9
+ gem.requirements << "jar com.fasterxml.jackson.core:jackson-databind, 2.9.1"
10
+ gem.requirements << "jar com.fasterxml.jackson.core:jackson-annotations, 2.9.1"
11
+ gem.requirements << "jar com.fasterxml.jackson.module:jackson-module-afterburner, 2.9.1"
12
+ gem.requirements << "jar com.fasterxml.jackson.dataformat:jackson-dataformat-cbor, 2.9.1"
@@ -2,7 +2,11 @@
2
2
 
3
3
  # The version of logstash core gem.
4
4
  #
5
- # Note to authors: this should not include dashes because 'gem' barfs if
6
- # you include a dash in the version string.
7
-
8
- LOGSTASH_CORE_VERSION = "6.0.0-rc1"
5
+ # sourced from a copy of the master versions.yml file, see logstash-core/logstash-core.gemspec
6
+ if !defined?(ALL_VERSIONS)
7
+ require 'yaml'
8
+ ALL_VERSIONS = YAML.load_file(File.expand_path("../../versions-gem-copy.yml", File.dirname(__FILE__)))
9
+ end
10
+ if !defined?(LOGSTASH_CORE_VERSION)
11
+ LOGSTASH_CORE_VERSION = ALL_VERSIONS.fetch("logstash-core")
12
+ end
@@ -2,25 +2,25 @@
2
2
  begin
3
3
  require 'jar_dependencies'
4
4
  rescue LoadError
5
- require 'com/fasterxml/jackson/core/jackson-databind/2.7.3/jackson-databind-2.7.3.jar'
6
5
  require 'org/apache/logging/log4j/log4j-core/2.6.2/log4j-core-2.6.2.jar'
6
+ require 'com/fasterxml/jackson/core/jackson-databind/2.9.1/jackson-databind-2.9.1.jar'
7
7
  require 'org/apache/logging/log4j/log4j-api/2.6.2/log4j-api-2.6.2.jar'
8
8
  require 'org/slf4j/slf4j-api/1.7.21/slf4j-api-1.7.21.jar'
9
- require 'com/fasterxml/jackson/core/jackson-annotations/2.7.3/jackson-annotations-2.7.3.jar'
9
+ require 'com/fasterxml/jackson/core/jackson-annotations/2.9.1/jackson-annotations-2.9.1.jar'
10
10
  require 'org/apache/logging/log4j/log4j-slf4j-impl/2.6.2/log4j-slf4j-impl-2.6.2.jar'
11
- require 'com/fasterxml/jackson/module/jackson-module-afterburner/2.7.3/jackson-module-afterburner-2.7.3.jar'
12
- require 'com/fasterxml/jackson/dataformat/jackson-dataformat-cbor/2.7.3/jackson-dataformat-cbor-2.7.3.jar'
13
- require 'com/fasterxml/jackson/core/jackson-core/2.7.3/jackson-core-2.7.3.jar'
11
+ require 'com/fasterxml/jackson/module/jackson-module-afterburner/2.9.1/jackson-module-afterburner-2.9.1.jar'
12
+ require 'com/fasterxml/jackson/dataformat/jackson-dataformat-cbor/2.9.1/jackson-dataformat-cbor-2.9.1.jar'
13
+ require 'com/fasterxml/jackson/core/jackson-core/2.9.1/jackson-core-2.9.1.jar'
14
14
  end
15
15
 
16
16
  if defined? Jars
17
- require_jar( 'com.fasterxml.jackson.core', 'jackson-databind', '2.7.3' )
18
17
  require_jar( 'org.apache.logging.log4j', 'log4j-core', '2.6.2' )
18
+ require_jar( 'com.fasterxml.jackson.core', 'jackson-databind', '2.9.1' )
19
19
  require_jar( 'org.apache.logging.log4j', 'log4j-api', '2.6.2' )
20
20
  require_jar( 'org.slf4j', 'slf4j-api', '1.7.21' )
21
- require_jar( 'com.fasterxml.jackson.core', 'jackson-annotations', '2.7.3' )
21
+ require_jar( 'com.fasterxml.jackson.core', 'jackson-annotations', '2.9.1' )
22
22
  require_jar( 'org.apache.logging.log4j', 'log4j-slf4j-impl', '2.6.2' )
23
- require_jar( 'com.fasterxml.jackson.module', 'jackson-module-afterburner', '2.7.3' )
24
- require_jar( 'com.fasterxml.jackson.dataformat', 'jackson-dataformat-cbor', '2.7.3' )
25
- require_jar( 'com.fasterxml.jackson.core', 'jackson-core', '2.7.3' )
23
+ require_jar( 'com.fasterxml.jackson.module', 'jackson-module-afterburner', '2.9.1' )
24
+ require_jar( 'com.fasterxml.jackson.dataformat', 'jackson-dataformat-cbor', '2.9.1' )
25
+ require_jar( 'com.fasterxml.jackson.core', 'jackson-core', '2.9.1' )
26
26
  end
@@ -77,8 +77,9 @@ module LogStash; module Config; module AST
77
77
  # of the output/filter function
78
78
  definitions << "define_singleton_method :#{type}_func do |event|"
79
79
  definitions << " targeted_outputs = []" if type == "output"
80
- definitions << " events = [event]" if type == "filter"
81
- definitions << " @logger.debug? && @logger.debug(\"#{type} received\", \"event\" => event.to_hash)"
80
+ definitions << " events = event" if type == "filter"
81
+ definitions << " @logger.debug? && @logger.debug(\"#{type} received\", \"event\" => event.to_hash)" if type == "output"
82
+ definitions << " @logger.debug? && events.each { |e| @logger.debug(\"#{type} received\", \"event\" => e.to_hash)}" if type == "filter"
82
83
 
83
84
  sections.select { |s| s.plugin_type.text_value == type }.each do |s|
84
85
  definitions << s.compile.split("\n", -1).map { |e| " #{e}" }
@@ -65,6 +65,7 @@ module LogStash module Config
65
65
  pipeline_id = alt_name
66
66
  module_settings.set("pipeline.id", pipeline_id)
67
67
  LogStash::Modules::SettingsMerger.merge_cloud_settings(module_hash, module_settings)
68
+ LogStash::Modules::SettingsMerger.merge_kibana_auth!(module_hash)
68
69
  current_module.with_settings(module_hash)
69
70
  config_test = settings.get("config.test_and_exit")
70
71
  modul_setup = settings.get("modules_setup")
@@ -10,10 +10,15 @@ module LogStash module Instrument module PeriodicPoller
10
10
  end
11
11
 
12
12
  def collect
13
- _, pipeline = @agent.with_running_pipelines { |pipelines| pipelines.first }
14
- unless pipeline.nil?
15
- pipeline.collect_dlq_stats
13
+ pipelines = @agent.with_running_user_defined_pipelines {|pipelines| pipelines}
14
+ unless pipelines.nil?
15
+ pipelines.each {|_, pipeline|
16
+ unless pipeline.nil?
17
+ pipeline.collect_dlq_stats
18
+ end
19
+ }
16
20
  end
17
21
  end
18
22
  end
19
23
  end end end
24
+
@@ -11,9 +11,13 @@ module LogStash module Instrument module PeriodicPoller
11
11
  end
12
12
 
13
13
  def collect
14
- pipeline_id, pipeline = @agent.with_running_pipelines {|pipelines| pipelines.first }
15
- unless pipeline.nil?
16
- pipeline.collect_stats
14
+ pipelines = @agent.with_running_user_defined_pipelines {|pipelines| pipelines}
15
+ unless pipelines.nil?
16
+ pipelines.each {|_, pipeline|
17
+ unless pipeline.nil?
18
+ pipeline.collect_stats
19
+ end
20
+ }
17
21
  end
18
22
  end
19
23
  end
@@ -24,9 +24,11 @@ module LogStash module Modules class KibanaClient
24
24
  end
25
25
  end
26
26
 
27
- attr_reader :version
27
+ SCHEME_REGEX = /^https?$/
28
28
 
29
- def initialize(settings)
29
+ attr_reader :version, :endpoint
30
+
31
+ def initialize(settings, client = nil) # allow for test mock injection
30
32
  @settings = settings
31
33
 
32
34
  client_options = {
@@ -38,8 +40,8 @@ module LogStash module Modules class KibanaClient
38
40
  }
39
41
 
40
42
  ssl_options = {}
41
-
42
- if @settings["var.kibana.ssl.enabled"] == "true"
43
+ ssl_enabled = @settings["var.kibana.ssl.enabled"] == "true"
44
+ if ssl_enabled
43
45
  ssl_options[:verify] = @settings.fetch("var.kibana.ssl.verification_mode", "strict").to_sym
44
46
  ssl_options[:ca_file] = @settings.fetch("var.kibana.ssl.certificate_authority", nil)
45
47
  ssl_options[:client_cert] = @settings.fetch("var.kibana.ssl.certificate", nil)
@@ -48,9 +50,34 @@ module LogStash module Modules class KibanaClient
48
50
 
49
51
  client_options[:ssl] = ssl_options
50
52
 
51
- @client = Manticore::Client.new(client_options)
52
53
  @host = @settings.fetch("var.kibana.host", "localhost:5601")
53
- @scheme = @settings.fetch("var.kibana.scheme", "http")
54
+ implicit_scheme, colon_slash_slash, host = @host.partition("://")
55
+ explicit_scheme = @settings["var.kibana.scheme"]
56
+ @scheme = "http"
57
+ if !colon_slash_slash.empty?
58
+ if !explicit_scheme.nil? && implicit_scheme != explicit_scheme
59
+ # both are set and not the same - error
60
+ msg = sprintf("Detected differing Kibana host schemes as sourced from var.kibana.host: '%s' and var.kibana.scheme: '%s'", implicit_scheme, explicit_scheme)
61
+ raise ArgumentError.new(msg)
62
+ end
63
+ @scheme = implicit_scheme
64
+ @host = host
65
+ elsif !explicit_scheme.nil?
66
+ @scheme = explicit_scheme
67
+ end
68
+
69
+ if SCHEME_REGEX.match(@scheme).nil?
70
+ msg = sprintf("Kibana host scheme given is invalid, given value: '%s' - acceptable values: 'http', 'https'", @scheme)
71
+ raise ArgumentError.new(msg)
72
+ end
73
+
74
+ if ssl_enabled && @scheme != "https"
75
+ @scheme = "https"
76
+ end
77
+
78
+ @endpoint = "#{@scheme}://#{@host}"
79
+
80
+ @client = client || Manticore::Client.new(client_options)
54
81
  @http_options = {:headers => {'Content-Type' => 'application/json'}}
55
82
  username = @settings["var.kibana.username"]
56
83
  if username
@@ -77,7 +104,7 @@ module LogStash module Modules class KibanaClient
77
104
  end
78
105
 
79
106
  def version_parts
80
- @version.split(/\.|\-/)
107
+ @version.split(/[.-]/)
81
108
  end
82
109
 
83
110
  def host_settings
@@ -119,6 +146,6 @@ module LogStash module Modules class KibanaClient
119
146
  end
120
147
 
121
148
  def full_url(relative)
122
- "#{@scheme}://#{@host}/#{relative}"
149
+ "#{@endpoint}/#{relative}"
123
150
  end
124
151
  end end end
@@ -40,9 +40,10 @@ module LogStash module Modules module SettingsMerger
40
40
  settings_copy = LogStash::Util.deep_clone(module_settings)
41
41
  end
42
42
 
43
- module_settings["var.kibana.scheme"] = "https"
43
+ module_settings["var.kibana.scheme"] = cloud_id.kibana_scheme
44
44
  module_settings["var.kibana.host"] = cloud_id.kibana_host
45
- module_settings["var.elasticsearch.hosts"] = cloud_id.elasticsearch_host
45
+ # elasticsearch client does not use scheme, it URI parses the host setting
46
+ module_settings["var.elasticsearch.hosts"] = "#{cloud_id.elasticsearch_scheme}://#{cloud_id.elasticsearch_host}"
46
47
  unless cloud_auth.nil?
47
48
  module_settings["var.elasticsearch.username"] = cloud_auth.username
48
49
  module_settings["var.elasticsearch.password"] = cloud_auth.password
@@ -54,6 +55,11 @@ module LogStash module Modules module SettingsMerger
54
55
  end
55
56
  end
56
57
 
58
+ def merge_kibana_auth!(module_settings)
59
+ module_settings["var.kibana.username"] = module_settings["var.elasticsearch.username"] if module_settings["var.kibana.username"].nil?
60
+ module_settings["var.kibana.password"] = module_settings["var.elasticsearch.password"] if module_settings["var.kibana.password"].nil?
61
+ end
62
+
57
63
  def format_module_settings(settings_before, settings_after)
58
64
  output = []
59
65
  output << "-------- Module Settings ---------"
@@ -498,13 +498,9 @@ module LogStash; class Pipeline < BasePipeline
498
498
  end
499
499
 
500
500
  def filter_batch(batch)
501
- batch.each do |event|
502
- return if @force_shutdown.true?
503
-
504
- filter_func(event).each do |e|
505
- #these are both original and generated events
506
- batch.merge(e) unless e.cancelled?
507
- end
501
+ filter_func(batch.to_a).each do |e|
502
+ #these are both original and generated events
503
+ batch.merge(e) unless e.cancelled?
508
504
  end
509
505
  @filter_queue_client.add_filtered_metrics(batch)
510
506
  @events_filtered.increment(batch.size)
@@ -662,7 +658,7 @@ module LogStash; class Pipeline < BasePipeline
662
658
  def filter(event, &block)
663
659
  maybe_setup_out_plugins
664
660
  # filter_func returns all filtered events, including cancelled ones
665
- filter_func(event).each {|e| block.call(e)}
661
+ filter_func([event]).each {|e| block.call(e)}
666
662
  end
667
663
 
668
664
  # perform filters flush and yield flushed event to the passed block
@@ -3,8 +3,26 @@ require "logstash/namespace"
3
3
  require "base64"
4
4
 
5
5
  module LogStash module Util class CloudSettingId
6
- attr_reader :original, :decoded, :label, :elasticsearch_host, :kibana_host
7
6
 
7
+ def self.cloud_id_encode(*args)
8
+ Base64.urlsafe_encode64(args.join("$"))
9
+ end
10
+ DOT_SEPARATOR = "."
11
+ CLOUD_PORT = ":443"
12
+
13
+ attr_reader :original, :decoded, :label, :elasticsearch_host, :elasticsearch_scheme, :kibana_host, :kibana_scheme
14
+
15
+ # The constructor is expecting a 'cloud.id', a string in 2 variants.
16
+ # 1 part example: 'dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyRub3RhcmVhbCRpZGVudGlmaWVy'
17
+ # 2 part example: 'foobar:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyRub3RhcmVhbCRpZGVudGlmaWVy'
18
+ # The two part variant has a 'label' prepended with a colon separator. The label is not encoded.
19
+ # The 1 part (or second section of the 2 part variant) is base64 encoded.
20
+ # The original string before encoding has three segments separated by a dollar sign.
21
+ # e.g. 'us-east-1.aws.found.io$notareal$identifier'
22
+ # The first segment is the cloud base url, e.g. 'us-east-1.aws.found.io'
23
+ # The second segment is the elasticsearch host identifier, e.g. 'notareal'
24
+ # The third segment is the kibana host identifier, e.g. 'identifier'
25
+ # The 'cloud.id' value decoded into the #attr_reader ivars.
8
26
  def initialize(value)
9
27
  return if value.nil?
10
28
 
@@ -12,27 +30,43 @@ module LogStash module Util class CloudSettingId
12
30
  raise ArgumentError.new("Cloud Id must be String. Received: #{value.class}")
13
31
  end
14
32
  @original = value
15
- @label, sep, last = value.partition(":")
16
- if last.empty?
33
+ @label, colon, encoded = @original.partition(":")
34
+ if encoded.empty?
17
35
  @decoded = Base64.urlsafe_decode64(@label) rescue ""
18
36
  @label = ""
19
37
  else
20
- @decoded = Base64.urlsafe_decode64(last) rescue ""
38
+ @decoded = Base64.urlsafe_decode64(encoded) rescue ""
21
39
  end
40
+
41
+ @decoded = @decoded.encode(Encoding::UTF_8, :invalid => :replace, :undef => :replace)
42
+
22
43
  unless @decoded.count("$") == 2
23
- raise ArgumentError.new("Cloud Id does not decode. Received: \"#{@original}\".")
44
+ raise ArgumentError.new("Cloud Id does not decode. You may need to enable Kibana in the Cloud UI. Received: \"#{@decoded}\".")
24
45
  end
25
- parts = @decoded.split("$")
26
- if parts.any?(&:empty?)
27
- raise ArgumentError.new("Cloud Id, after decoding, is invalid. Format: '<part1>$<part2>$<part3>'. Received: \"#{@decoded}\".")
46
+
47
+ segments = @decoded.split("$")
48
+ if segments.any?(&:empty?)
49
+ raise ArgumentError.new("Cloud Id, after decoding, is invalid. Format: '<segment1>$<segment2>$<segment3>'. Received: \"#{@decoded}\".")
50
+ end
51
+ cloud_base = segments.shift
52
+ cloud_host = "#{DOT_SEPARATOR}#{cloud_base}#{CLOUD_PORT}"
53
+
54
+ @elasticsearch_host, @kibana_host = segments
55
+ if @elasticsearch_host == "undefined"
56
+ raise ArgumentError.new("Cloud Id, after decoding, elasticsearch segment is 'undefined', literally.")
57
+ end
58
+ @elasticsearch_scheme = "https"
59
+ @elasticsearch_host.concat(cloud_host)
60
+
61
+ if @kibana_host == "undefined"
62
+ raise ArgumentError.new("Cloud Id, after decoding, the kibana segment is 'undefined', literally. You may need to enable Kibana in the Cloud UI.")
28
63
  end
29
- cloud_host, es_server, kb_server = parts
30
- @elasticsearch_host = sprintf("%s.%s:443", es_server, cloud_host)
31
- @kibana_host = sprintf("%s.%s:443", kb_server, cloud_host)
64
+ @kibana_scheme = "https"
65
+ @kibana_host.concat(cloud_host)
32
66
  end
33
67
 
34
68
  def to_s
35
- @original.to_s
69
+ @decoded.to_s
36
70
  end
37
71
 
38
72
  def inspect
@@ -287,6 +287,12 @@ module LogStash; module Util
287
287
  # @cancelled[event] = true
288
288
  end
289
289
 
290
+ def to_a
291
+ events = []
292
+ each {|e| events << e}
293
+ events
294
+ end
295
+
290
296
  def each(&blk)
291
297
  # take care not to cause @originals or @generated to change during iteration
292
298
 
@@ -218,6 +218,12 @@ module LogStash; module Util
218
218
  # @cancelled[event] = true
219
219
  end
220
220
 
221
+ def to_a
222
+ events = []
223
+ each {|e| events << e}
224
+ events
225
+ end
226
+
221
227
  def each(&blk)
222
228
  # take care not to cause @originals or @generated to change during iteration
223
229
  @iterating = true
@@ -2,13 +2,11 @@
2
2
 
3
3
  # The version of the logstash package (not the logstash-core gem version).
4
4
  #
5
- # Note to authors: this should not include dashes because 'gem' barfs if
6
- # you include a dash in the version string.
7
-
8
- # TODO: (colin) the logstash-core gem uses it's own version number in logstash-core/lib/logstash-core/version.rb
9
- # there are some dependencies in logstash-core on the LOGSTASH_VERSION constant this is why
10
- # the logstash version is currently defined here in logstash-core/lib/logstash/version.rb but
11
- # eventually this file should be in the root logstash lib fir and dependencies in logstash-core should be
12
- # fixed.
13
-
14
- LOGSTASH_VERSION = "6.0.0-rc1"
5
+ # sourced from a copy of the master versions.yml file, see logstash-core/logstash-core.gemspec
6
+ if !defined?(ALL_VERSIONS)
7
+ require 'yaml'
8
+ ALL_VERSIONS = YAML.load_file(File.expand_path("../../versions-gem-copy.yml", File.dirname(__FILE__)))
9
+ end
10
+ if !defined?(LOGSTASH_VERSION)
11
+ LOGSTASH_VERSION = ALL_VERSIONS.fetch("logstash")
12
+ end
@@ -1,6 +1,25 @@
1
1
  # -*- encoding: utf-8 -*-
2
2
  lib = File.expand_path('../lib', __FILE__)
3
3
  $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
4
+
5
+ project_versions_yaml_path = File.expand_path("../versions.yml", File.dirname(__FILE__))
6
+ if File.exist?(project_versions_yaml_path)
7
+ # we need to copy the project level versions.yml into the gem root
8
+ # to be able to package it into the gems file structure
9
+ # as the require 'logstash-core/version' loads the yaml file from within the gem root.
10
+ #
11
+ # we ignore the copy in git and we overwrite an existing file
12
+ # each time we build the logstash-core gem
13
+ original_lines = IO.readlines(project_versions_yaml_path)
14
+ original_lines << ""
15
+ original_lines << "# This is a copy the project level versions.yml into this gem's root and it is created when the gemspec is evaluated."
16
+ gem_versions_yaml_path = File.expand_path("./versions-gem-copy.yml", File.dirname(__FILE__))
17
+ File.open(gem_versions_yaml_path, 'w') do |new_file|
18
+ # create or overwrite
19
+ new_file.puts(original_lines)
20
+ end
21
+ end
22
+
4
23
  require 'logstash-core/version'
5
24
 
6
25
  Gem::Specification.new do |gem|
@@ -11,7 +30,10 @@ Gem::Specification.new do |gem|
11
30
  gem.homepage = "http://www.elastic.co/guide/en/logstash/current/index.html"
12
31
  gem.license = "Apache License (2.0)"
13
32
 
14
- gem.files = Dir.glob(["logstash-core.gemspec", "gemspec_jars.rb", "lib/**/*.rb", "spec/**/*.rb", "locales/*", "lib/logstash/api/init.ru", "lib/logstash-core/logstash-core.jar"])
33
+ gem.files = Dir.glob(
34
+ %w(versions-gem-copy.yml logstash-core.gemspec gemspec_jars.rb lib/**/*.rb spec/**/*.rb locales/*
35
+ lib/logstash/api/init.ru lib/logstash-core/logstash-core.jar)
36
+ )
15
37
  gem.test_files = gem.files.grep(%r{^(test|spec|features)/})
16
38
  gem.name = "logstash-core"
17
39
  gem.require_paths = ["lib"]
@@ -46,7 +68,7 @@ Gem::Specification.new do |gem|
46
68
  gem.add_runtime_dependency "rubyzip", "~> 1.2.1"
47
69
  gem.add_runtime_dependency "thread_safe", "~> 0.3.5" #(Apache 2.0 license)
48
70
 
49
- gem.add_runtime_dependency "jrjackson", "~> 0.4.2" #(Apache 2.0 license)
71
+ gem.add_runtime_dependency "jrjackson", "~> #{ALL_VERSIONS.fetch('jrjackson')}" #(Apache 2.0 license)
50
72
 
51
73
  gem.add_runtime_dependency "jar-dependencies"
52
74
  # as of Feb 3rd 2016, the ruby-maven gem is resolved to version 3.3.3 and that version
@@ -0,0 +1,60 @@
1
+ # encoding: utf-8
2
+ require "spec_helper"
3
+ require "logstash/modules/kibana_client"
4
+ module LogStash module Modules
5
+ KibanaTestResponse = Struct.new(:code, :body, :headers)
6
+ class KibanaTestClient
7
+ def http(method, endpoint, options)
8
+ self
9
+ end
10
+ def call
11
+ KibanaTestResponse.new(200, '{"version":{"number":"1.2.3","build_snapshot":false}}', {})
12
+ end
13
+ end
14
+ describe KibanaClient do
15
+ let(:settings) { Hash.new }
16
+ let(:test_client) { KibanaTestClient.new }
17
+ let(:kibana_host) { "https://foo.bar:4321" }
18
+ subject(:kibana_client) { described_class.new(settings, test_client) }
19
+
20
+ context "when supplied with conflicting scheme data" do
21
+ let(:settings) { {"var.kibana.scheme" => "http", "var.kibana.host" => kibana_host} }
22
+ it "a new instance will throw an error" do
23
+ expect{described_class.new(settings, test_client)}.to raise_error(ArgumentError, /Detected differing Kibana host schemes as sourced from var\.kibana\.host: 'https' and var\.kibana\.scheme: 'http'/)
24
+ end
25
+ end
26
+
27
+ context "when supplied with invalid schemes" do
28
+ ["httpd", "ftp", "telnet"].each do |uri_scheme|
29
+ it "a new instance will throw an error" do
30
+ re = /Kibana host scheme given is invalid, given value: '#{uri_scheme}' - acceptable values: 'http', 'https'/
31
+ expect{described_class.new({"var.kibana.scheme" => uri_scheme}, test_client)}.to raise_error(ArgumentError, re)
32
+ end
33
+ end
34
+ end
35
+
36
+ context "when supplied with the scheme in the host only" do
37
+ let(:settings) { {"var.kibana.host" => kibana_host} }
38
+ it "has a version and an endpoint" do
39
+ expect(kibana_client.version).to eq("1.2.3")
40
+ expect(kibana_client.endpoint).to eq("https://foo.bar:4321")
41
+ end
42
+ end
43
+
44
+ context "when supplied with the scheme in the scheme setting" do
45
+ let(:settings) { {"var.kibana.scheme" => "https", "var.kibana.host" => "foo.bar:4321"} }
46
+ it "has a version and an endpoint" do
47
+ expect(kibana_client.version).to eq("1.2.3")
48
+ expect(kibana_client.endpoint).to eq(kibana_host)
49
+ end
50
+ end
51
+
52
+ context "when supplied with a no scheme host setting and ssl is enabled" do
53
+ let(:settings) { {"var.kibana.ssl.enabled" => "true", "var.kibana.host" => "foo.bar:4321"} }
54
+ it "has a version and an endpoint" do
55
+ expect(kibana_client.version).to eq("1.2.3")
56
+ expect(kibana_client.endpoint).to eq(kibana_host)
57
+ end
58
+ end
59
+ end
60
+ end end
@@ -28,6 +28,36 @@ describe LogStash::Modules::SettingsMerger do
28
28
  end
29
29
  end
30
30
 
31
+ describe "#merge_kibana_auth" do
32
+
33
+ before do
34
+ described_class.merge_kibana_auth!(mod_settings)
35
+ end
36
+
37
+ context 'only elasticsearch username and password is set' do
38
+ let(:mod_settings) { {"name"=>"mod1", "var.input.tcp.port"=>2222, "var.elasticsearch.username"=>"rupert", "var.elasticsearch.password"=>"fotherington" } }
39
+ it "sets kibana username and password" do
40
+ expect(mod_settings["var.elasticsearch.username"]).to eq("rupert")
41
+ expect(mod_settings["var.elasticsearch.password"]).to eq("fotherington")
42
+ expect(mod_settings["var.kibana.username"]).to eq("rupert")
43
+ expect(mod_settings["var.kibana.password"]).to eq("fotherington")
44
+ end
45
+ end
46
+
47
+ context 'elasticsearch and kibana usernames and passwords are set' do
48
+ let(:mod_settings) { {"name"=>"mod1", "var.input.tcp.port"=>2222, "var.elasticsearch.username"=>"rupert", "var.elasticsearch.password"=>"fotherington",
49
+ "var.kibana.username"=>"davey", "var.kibana.password"=>"stott"} }
50
+
51
+ it "keeps existing kibana username and password" do
52
+ expect(mod_settings["var.elasticsearch.username"]).to eq("rupert")
53
+ expect(mod_settings["var.elasticsearch.password"]).to eq("fotherington")
54
+ expect(mod_settings["var.kibana.username"]).to eq("davey")
55
+ expect(mod_settings["var.kibana.password"]).to eq("stott")
56
+ end
57
+ end
58
+
59
+ end
60
+
31
61
  describe "#merge_cloud_settings" do
32
62
  let(:cloud_id) { LogStash::Util::CloudSettingId.new("label:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyRub3RhcmVhbCRpZGVudGlmaWVy") }
33
63
  let(:cloud_auth) { LogStash::Util::CloudSettingAuth.new("elastix:bigwhoppingfairytail") }
@@ -38,7 +68,7 @@ describe LogStash::Modules::SettingsMerger do
38
68
  {
39
69
  "var.kibana.scheme" => "https",
40
70
  "var.kibana.host" => "identifier.us-east-1.aws.found.io:443",
41
- "var.elasticsearch.hosts" => "notareal.us-east-1.aws.found.io:443",
71
+ "var.elasticsearch.hosts" => "https://notareal.us-east-1.aws.found.io:443",
42
72
  "var.elasticsearch.username" => "elastix",
43
73
  "var.kibana.username" => "elastix"
44
74
  }
@@ -63,7 +93,7 @@ describe LogStash::Modules::SettingsMerger do
63
93
  {
64
94
  "var.kibana.scheme" => "https",
65
95
  "var.kibana.host" => "identifier.us-east-1.aws.found.io:443",
66
- "var.elasticsearch.hosts" => "notareal.us-east-1.aws.found.io:443",
96
+ "var.elasticsearch.hosts" => "https://notareal.us-east-1.aws.found.io:443",
67
97
  }
68
98
  end
69
99
  let(:ls_settings) { SubstituteSettingsForRSpec.new({"cloud.id" => cloud_id}) }
@@ -254,6 +254,14 @@ describe LogStash::Pipeline do
254
254
  pipeline = mock_pipeline_from_string(test_config_with_filters, pipeline_settings_obj)
255
255
  pipeline.close
256
256
  end
257
+
258
+ it "should log each filtered event if config.debug is set to true" do
259
+ pipeline_settings_obj.set("config.debug", true)
260
+ pipeline = mock_pipeline_from_string(test_config_with_filters, pipeline_settings_obj)
261
+ expect(logger).to receive(:debug).with(/filter received/, anything)
262
+ pipeline.filter_func([LogStash::Event.new])
263
+ pipeline.close
264
+ end
257
265
  end
258
266
 
259
267
  context "when there is no command line -w N set" do
@@ -618,9 +626,9 @@ describe LogStash::Pipeline do
618
626
 
619
627
  it "should handle evaluating different config" do
620
628
  expect(pipeline1.output_func(LogStash::Event.new)).not_to include(nil)
621
- expect(pipeline1.filter_func(LogStash::Event.new)).not_to include(nil)
629
+ expect(pipeline1.filter_func([LogStash::Event.new])).not_to include(nil)
622
630
  expect(pipeline2.output_func(LogStash::Event.new)).not_to include(nil)
623
- expect(pipeline1.filter_func(LogStash::Event.new)).not_to include(nil)
631
+ expect(pipeline1.filter_func([LogStash::Event.new])).not_to include(nil)
624
632
  end
625
633
  end
626
634
 
@@ -700,9 +708,9 @@ describe LogStash::Pipeline do
700
708
  # in the current instance and was returning an array containing nil values for
701
709
  # the match.
702
710
  expect(pipeline1.output_func(LogStash::Event.new)).not_to include(nil)
703
- expect(pipeline1.filter_func(LogStash::Event.new)).not_to include(nil)
711
+ expect(pipeline1.filter_func([LogStash::Event.new])).not_to include(nil)
704
712
  expect(pipeline2.output_func(LogStash::Event.new)).not_to include(nil)
705
- expect(pipeline1.filter_func(LogStash::Event.new)).not_to include(nil)
713
+ expect(pipeline1.filter_func([LogStash::Event.new])).not_to include(nil)
706
714
  end
707
715
  end
708
716
 
@@ -53,7 +53,7 @@ describe LogStash::Setting::Modules do
53
53
  context "when given a badly formatted encoded id" do
54
54
  it "should not raise an error" do
55
55
  encoded = Base64.urlsafe_encode64("foo$$bal")
56
- expect { subject.set(encoded) }.to raise_error(ArgumentError, /Cloud Id, after decoding, is invalid. Format: '<part1>\$<part2>\$<part3>'/)
56
+ expect { subject.set(encoded) }.to raise_error(ArgumentError, "Cloud Id, after decoding, is invalid. Format: '<segment1>$<segment2>$<segment3>'. Received: \"foo$$bal\".")
57
57
  end
58
58
  end
59
59
 
@@ -0,0 +1,93 @@
1
+ # encoding: utf-8
2
+ require "spec_helper"
3
+ require "logstash/util/cloud_setting_id"
4
+
5
+ describe LogStash::Util::CloudSettingId do
6
+ let(:input) { "foobar:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyRub3RhcmVhbCRpZGVudGlmaWVy" }
7
+ subject { described_class.new(input) }
8
+
9
+ describe "when given unacceptable input" do
10
+ it "a nil input does not raise an exception" do
11
+ expect{described_class.new(nil)}.not_to raise_exception
12
+ end
13
+ it "when given a nil input, the accessors are all nil" do
14
+ cloud_id = described_class.new(nil)
15
+ expect(cloud_id.original).to be_nil
16
+ expect(cloud_id.decoded).to be_nil
17
+ expect(cloud_id.label).to be_nil
18
+ expect(cloud_id.elasticsearch_host).to be_nil
19
+ expect(cloud_id.kibana_host).to be_nil
20
+ expect(cloud_id.elasticsearch_scheme).to be_nil
21
+ expect(cloud_id.kibana_scheme).to be_nil
22
+ end
23
+
24
+ context "when a malformed value is given" do
25
+ let(:raw) {%w(first second)}
26
+ let(:input) { described_class.cloud_id_encode(*raw) }
27
+ it "raises an error" do
28
+ expect{subject}.to raise_exception(ArgumentError, "Cloud Id does not decode. You may need to enable Kibana in the Cloud UI. Received: \"#{raw[0]}$#{raw[1]}\".")
29
+ end
30
+ end
31
+
32
+ context "when at least one segment is empty" do
33
+ let(:raw) {["first", "", "third"]}
34
+ let(:input) { described_class.cloud_id_encode(*raw) }
35
+ it "raises an error" do
36
+ expect{subject}.to raise_exception(ArgumentError, "Cloud Id, after decoding, is invalid. Format: '<segment1>$<segment2>$<segment3>'. Received: \"#{raw[0]}$#{raw[1]}$#{raw[2]}\".")
37
+ end
38
+ end
39
+
40
+ context "when elasticsearch segment is undefined" do
41
+ let(:raw) {%w(us-east-1.aws.found.io undefined my-kibana)}
42
+ let(:input) { described_class.cloud_id_encode(*raw) }
43
+ it "raises an error" do
44
+ expect{subject}.to raise_exception(ArgumentError, "Cloud Id, after decoding, elasticsearch segment is 'undefined', literally.")
45
+ end
46
+ end
47
+
48
+ context "when kibana segment is undefined" do
49
+ let(:raw) {%w(us-east-1.aws.found.io my-elastic-cluster undefined)}
50
+ let(:input) { described_class.cloud_id_encode(*raw) }
51
+ it "raises an error" do
52
+ expect{subject}.to raise_exception(ArgumentError, "Cloud Id, after decoding, the kibana segment is 'undefined', literally. You may need to enable Kibana in the Cloud UI.")
53
+ end
54
+ end
55
+ end
56
+
57
+ describe "without a label" do
58
+ let(:input) { "dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyRub3RhcmVhbCRpZGVudGlmaWVy" }
59
+ it "#label is empty" do
60
+ expect(subject.label).to be_empty
61
+ end
62
+ it "#decode is set" do
63
+ expect(subject.decoded).to eq("us-east-1.aws.found.io$notareal$identifier")
64
+ end
65
+ end
66
+
67
+ describe "when given acceptable input, the accessors:" do
68
+ it '#original has a value' do
69
+ expect(subject.original).to eq(input)
70
+ end
71
+ it '#decoded has a value' do
72
+ expect(subject.decoded).to eq("us-east-1.aws.found.io$notareal$identifier")
73
+ end
74
+ it '#label has a value' do
75
+ expect(subject.label).to eq("foobar")
76
+ end
77
+ it '#elasticsearch_host has a value' do
78
+ expect(subject.elasticsearch_host).to eq("notareal.us-east-1.aws.found.io:443")
79
+ end
80
+ it '#elasticsearch_scheme has a value' do
81
+ expect(subject.elasticsearch_scheme).to eq("https")
82
+ end
83
+ it '#kibana_host has a value' do
84
+ expect(subject.kibana_host).to eq("identifier.us-east-1.aws.found.io:443")
85
+ end
86
+ it '#kibana_scheme has a value' do
87
+ expect(subject.kibana_scheme).to eq("https")
88
+ end
89
+ it '#to_s has a value of #decoded' do
90
+ expect(subject.to_s).to eq(subject.decoded)
91
+ end
92
+ end
93
+ end
@@ -0,0 +1,23 @@
1
+ ---
2
+ logstash: 6.0.0-rc2
3
+ logstash-core: 6.0.0-rc2
4
+ logstash-core-plugin-api: 2.1.16
5
+ jruby:
6
+ version: 9.1.13.0
7
+ sha1: 815bac27d5daa1459a4477d6d80584f007ce6a68
8
+
9
+ # jruby-runtime-override, if specified, will override the jruby version installed in vendor/jruby for logstash runtime only,
10
+ # not for the compile-time jars
11
+ #
12
+ #jruby-runtime-override:
13
+ # url: http://ci.jruby.org/snapshots/previous/jruby-bin-9.1.13.0-SNAPSHOT.tar.gz
14
+ # version: 9.1.13.0-SNAPSHOT
15
+ # sha1: IGNORE
16
+
17
+ # Note: this file is copied to the root of logstash-core because its gemspec needs it when
18
+ # bundler evaluates the gemspec via bin/logstash
19
+ # Ensure Jackson version here is kept in sync with version used by jrjackson gem
20
+ jrjackson: 0.4.4
21
+ jackson: 2.9.1
22
+
23
+ # This is a copy the project level versions.yml into this gem's root and it is created when the gemspec is evaluated.
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-core
3
3
  version: !ruby/object:Gem::Version
4
- version: 6.0.0.rc1
4
+ version: 6.0.0.rc2
5
5
  platform: java
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2017-09-25 00:00:00.000000000 Z
11
+ date: 2017-10-29 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -251,7 +251,7 @@ dependencies:
251
251
  requirements:
252
252
  - - "~>"
253
253
  - !ruby/object:Gem::Version
254
- version: 0.4.2
254
+ version: 0.4.4
255
255
  name: jrjackson
256
256
  prerelease: false
257
257
  type: :runtime
@@ -259,7 +259,7 @@ dependencies:
259
259
  requirements:
260
260
  - - "~>"
261
261
  - !ruby/object:Gem::Version
262
- version: 0.4.2
262
+ version: 0.4.4
263
263
  - !ruby/object:Gem::Dependency
264
264
  requirement: !ruby/object:Gem::Requirement
265
265
  requirements:
@@ -556,6 +556,7 @@ files:
556
556
  - spec/logstash/legacy_ruby_event_spec.rb
557
557
  - spec/logstash/legacy_ruby_timestamp_spec.rb
558
558
  - spec/logstash/modules/cli_parser_spec.rb
559
+ - spec/logstash/modules/kibana_client_spec.rb
559
560
  - spec/logstash/modules/logstash_config_spec.rb
560
561
  - spec/logstash/modules/scaffold_spec.rb
561
562
  - spec/logstash/modules/settings_merger_spec.rb
@@ -593,6 +594,7 @@ files:
593
594
  - spec/logstash/util/buftok_spec.rb
594
595
  - spec/logstash/util/byte_value_spec.rb
595
596
  - spec/logstash/util/charset_spec.rb
597
+ - spec/logstash/util/cloud_setting_id_spec.rb
596
598
  - spec/logstash/util/duration_formatter_spec.rb
597
599
  - spec/logstash/util/java_version_spec.rb
598
600
  - spec/logstash/util/plugin_version_spec.rb
@@ -609,6 +611,7 @@ files:
609
611
  - spec/support/mocks_classes.rb
610
612
  - spec/support/shared_contexts.rb
611
613
  - spec/support/shared_examples.rb
614
+ - versions-gem-copy.yml
612
615
  homepage: http://www.elastic.co/guide/en/logstash/current/index.html
613
616
  licenses:
614
617
  - Apache License (2.0)
@@ -631,11 +634,11 @@ requirements:
631
634
  - jar org.apache.logging.log4j:log4j-slf4j-impl, 2.6.2
632
635
  - jar org.apache.logging.log4j:log4j-api, 2.6.2
633
636
  - jar org.apache.logging.log4j:log4j-core, 2.6.2
634
- - jar com.fasterxml.jackson.core:jackson-core, 2.7.3
635
- - jar com.fasterxml.jackson.core:jackson-databind, 2.7.3
636
- - jar com.fasterxml.jackson.core:jackson-annotations, 2.7.3
637
- - jar com.fasterxml.jackson.module:jackson-module-afterburner, 2.7.3
638
- - jar com.fasterxml.jackson.dataformat:jackson-dataformat-cbor, 2.7.3
637
+ - jar com.fasterxml.jackson.core:jackson-core, 2.9.1
638
+ - jar com.fasterxml.jackson.core:jackson-databind, 2.9.1
639
+ - jar com.fasterxml.jackson.core:jackson-annotations, 2.9.1
640
+ - jar com.fasterxml.jackson.module:jackson-module-afterburner, 2.9.1
641
+ - jar com.fasterxml.jackson.dataformat:jackson-dataformat-cbor, 2.9.1
639
642
  rubyforge_project:
640
643
  rubygems_version: 2.6.13
641
644
  signing_key:
@@ -695,6 +698,7 @@ test_files:
695
698
  - spec/logstash/legacy_ruby_event_spec.rb
696
699
  - spec/logstash/legacy_ruby_timestamp_spec.rb
697
700
  - spec/logstash/modules/cli_parser_spec.rb
701
+ - spec/logstash/modules/kibana_client_spec.rb
698
702
  - spec/logstash/modules/logstash_config_spec.rb
699
703
  - spec/logstash/modules/scaffold_spec.rb
700
704
  - spec/logstash/modules/settings_merger_spec.rb
@@ -732,6 +736,7 @@ test_files:
732
736
  - spec/logstash/util/buftok_spec.rb
733
737
  - spec/logstash/util/byte_value_spec.rb
734
738
  - spec/logstash/util/charset_spec.rb
739
+ - spec/logstash/util/cloud_setting_id_spec.rb
735
740
  - spec/logstash/util/duration_formatter_spec.rb
736
741
  - spec/logstash/util/java_version_spec.rb
737
742
  - spec/logstash/util/plugin_version_spec.rb