logstash-core 6.0.0.beta2-java → 6.0.0-java

Sign up to get free protection for your applications and to get access to all the features.
Files changed (51) hide show
  1. checksums.yaml +4 -4
  2. data/gemspec_jars.rb +5 -5
  3. data/lib/logstash-core/logstash-core.jar +0 -0
  4. data/lib/logstash-core/version.rb +8 -4
  5. data/lib/logstash-core_jars.rb +10 -10
  6. data/lib/logstash/agent.rb +3 -2
  7. data/lib/logstash/compiler/lscl.rb +15 -3
  8. data/lib/logstash/config/config_ast.rb +3 -2
  9. data/lib/logstash/config/modules_common.rb +1 -0
  10. data/lib/logstash/config/source/local.rb +2 -1
  11. data/lib/logstash/instrument/periodic_poller/dlq.rb +8 -3
  12. data/lib/logstash/instrument/periodic_poller/pq.rb +7 -3
  13. data/lib/logstash/logging/logger.rb +4 -1
  14. data/lib/logstash/modules/kibana_client.rb +35 -8
  15. data/lib/logstash/modules/logstash_config.rb +1 -1
  16. data/lib/logstash/modules/settings_merger.rb +8 -2
  17. data/lib/logstash/pipeline.rb +10 -19
  18. data/lib/logstash/pipeline_action/stop.rb +1 -0
  19. data/lib/logstash/runner.rb +4 -1
  20. data/lib/logstash/util/cloud_setting_id.rb +46 -12
  21. data/lib/logstash/util/modules_setting_array.rb +1 -1
  22. data/lib/logstash/util/password.rb +2 -4
  23. data/lib/logstash/util/wrapped_acked_queue.rb +6 -0
  24. data/lib/logstash/util/wrapped_synchronous_queue.rb +6 -0
  25. data/lib/logstash/version.rb +8 -10
  26. data/logstash-core.gemspec +25 -3
  27. data/spec/logstash/agent/converge_spec.rb +23 -10
  28. data/spec/logstash/agent_spec.rb +35 -15
  29. data/spec/logstash/api/modules/node_stats_spec.rb +5 -1
  30. data/spec/logstash/compiler/compiler_spec.rb +29 -0
  31. data/spec/logstash/config/source/local_spec.rb +3 -2
  32. data/spec/logstash/event_spec.rb +57 -0
  33. data/spec/logstash/modules/kibana_client_spec.rb +60 -0
  34. data/spec/logstash/modules/logstash_config_spec.rb +7 -1
  35. data/spec/logstash/modules/scaffold_spec.rb +1 -1
  36. data/spec/logstash/modules/settings_merger_spec.rb +32 -2
  37. data/spec/logstash/pipeline_action/create_spec.rb +4 -1
  38. data/spec/logstash/pipeline_action/reload_spec.rb +4 -1
  39. data/spec/logstash/pipeline_dlq_commit_spec.rb +3 -1
  40. data/spec/logstash/pipeline_pq_file_spec.rb +5 -7
  41. data/spec/logstash/pipeline_spec.rb +26 -38
  42. data/spec/logstash/runner_spec.rb +1 -5
  43. data/spec/logstash/settings/modules_spec.rb +13 -2
  44. data/spec/logstash/settings/writable_directory_spec.rb +13 -10
  45. data/spec/logstash/timestamp_spec.rb +2 -2
  46. data/spec/logstash/util/cloud_setting_id_spec.rb +93 -0
  47. data/spec/support/helpers.rb +1 -1
  48. data/spec/support/mocks_classes.rb +14 -0
  49. data/spec/support/shared_contexts.rb +9 -0
  50. data/versions-gem-copy.yml +23 -0
  51. metadata +19 -14
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 5c353e53b9ba28be27a573084a75d1dc12d1257e2192057121d5e1c41de195bc
4
- data.tar.gz: 47790a7e594369b8d2c3d1ebfcde2d7fef023aaaf3afb0b747bcc75124c35547
3
+ metadata.gz: e8c1ff16e405816119bbed2fa145f555e22be2b2bd6db83ba9c072bb589cf58a
4
+ data.tar.gz: 64a79ecd4f95bdd7118fd5a0a15406eb6e992937fa0f8fac78287d82976b1fdb
5
5
  SHA512:
6
- metadata.gz: d7acc940476d31aca243c82ea0bc693324e8cdbbeeb7c8abbd06210cd5ad82f143499653b0952696a027ae0b60557518164409a36ff7a4b3ff2d6e4c536dcbb6
7
- data.tar.gz: 8848ec115ba9abfa1f3065e1ee22a63b50e7172e4aea049ebd6a37193e6dd06ee8f4b257863ab625939416a75512dffeaf7f1ad1cd9ef88d8c16db344b1a62a8
6
+ metadata.gz: 8d107250b5c4880960e33dc96dadccd4e54d0b431786d43fbecc5d901e181f6852ee3423a000eaf05eed290d4b061c1344650098a7ffa6b4ff4e1f1e380bde50
7
+ data.tar.gz: 7e06de56f50e30f957d341ba49fc87f4ea1126344ef48fe9c647232fb5704d1711888fcc17ca5fbabe7a2b6ac248ef0a2b2526ec9484c5a563a52d22ad9ccff5
@@ -5,8 +5,8 @@
5
5
  gem.requirements << "jar org.apache.logging.log4j:log4j-slf4j-impl, 2.6.2"
6
6
  gem.requirements << "jar org.apache.logging.log4j:log4j-api, 2.6.2"
7
7
  gem.requirements << "jar org.apache.logging.log4j:log4j-core, 2.6.2"
8
- gem.requirements << "jar com.fasterxml.jackson.core:jackson-core, 2.7.3"
9
- gem.requirements << "jar com.fasterxml.jackson.core:jackson-databind, 2.7.3"
10
- gem.requirements << "jar com.fasterxml.jackson.core:jackson-annotations, 2.7.3"
11
- gem.requirements << "jar com.fasterxml.jackson.module:jackson-module-afterburner, 2.7.3"
12
- gem.requirements << "jar com.fasterxml.jackson.dataformat:jackson-dataformat-cbor, 2.7.3"
8
+ gem.requirements << "jar com.fasterxml.jackson.core:jackson-core, 2.9.1"
9
+ gem.requirements << "jar com.fasterxml.jackson.core:jackson-databind, 2.9.1"
10
+ gem.requirements << "jar com.fasterxml.jackson.core:jackson-annotations, 2.9.1"
11
+ gem.requirements << "jar com.fasterxml.jackson.module:jackson-module-afterburner, 2.9.1"
12
+ gem.requirements << "jar com.fasterxml.jackson.dataformat:jackson-dataformat-cbor, 2.9.1"
@@ -2,7 +2,11 @@
2
2
 
3
3
  # The version of logstash core gem.
4
4
  #
5
- # Note to authors: this should not include dashes because 'gem' barfs if
6
- # you include a dash in the version string.
7
-
8
- LOGSTASH_CORE_VERSION = "6.0.0-beta2"
5
+ # sourced from a copy of the master versions.yml file, see logstash-core/logstash-core.gemspec
6
+ if !defined?(ALL_VERSIONS)
7
+ require 'yaml'
8
+ ALL_VERSIONS = YAML.load_file(File.expand_path("../../versions-gem-copy.yml", File.dirname(__FILE__)))
9
+ end
10
+ if !defined?(LOGSTASH_CORE_VERSION)
11
+ LOGSTASH_CORE_VERSION = ALL_VERSIONS.fetch("logstash-core")
12
+ end
@@ -2,25 +2,25 @@
2
2
  begin
3
3
  require 'jar_dependencies'
4
4
  rescue LoadError
5
- require 'com/fasterxml/jackson/core/jackson-databind/2.7.3/jackson-databind-2.7.3.jar'
6
5
  require 'org/apache/logging/log4j/log4j-core/2.6.2/log4j-core-2.6.2.jar'
6
+ require 'com/fasterxml/jackson/core/jackson-databind/2.9.1/jackson-databind-2.9.1.jar'
7
7
  require 'org/apache/logging/log4j/log4j-api/2.6.2/log4j-api-2.6.2.jar'
8
8
  require 'org/slf4j/slf4j-api/1.7.21/slf4j-api-1.7.21.jar'
9
- require 'com/fasterxml/jackson/core/jackson-annotations/2.7.3/jackson-annotations-2.7.3.jar'
9
+ require 'com/fasterxml/jackson/core/jackson-annotations/2.9.1/jackson-annotations-2.9.1.jar'
10
10
  require 'org/apache/logging/log4j/log4j-slf4j-impl/2.6.2/log4j-slf4j-impl-2.6.2.jar'
11
- require 'com/fasterxml/jackson/module/jackson-module-afterburner/2.7.3/jackson-module-afterburner-2.7.3.jar'
12
- require 'com/fasterxml/jackson/dataformat/jackson-dataformat-cbor/2.7.3/jackson-dataformat-cbor-2.7.3.jar'
13
- require 'com/fasterxml/jackson/core/jackson-core/2.7.3/jackson-core-2.7.3.jar'
11
+ require 'com/fasterxml/jackson/module/jackson-module-afterburner/2.9.1/jackson-module-afterburner-2.9.1.jar'
12
+ require 'com/fasterxml/jackson/dataformat/jackson-dataformat-cbor/2.9.1/jackson-dataformat-cbor-2.9.1.jar'
13
+ require 'com/fasterxml/jackson/core/jackson-core/2.9.1/jackson-core-2.9.1.jar'
14
14
  end
15
15
 
16
16
  if defined? Jars
17
- require_jar( 'com.fasterxml.jackson.core', 'jackson-databind', '2.7.3' )
18
17
  require_jar( 'org.apache.logging.log4j', 'log4j-core', '2.6.2' )
18
+ require_jar( 'com.fasterxml.jackson.core', 'jackson-databind', '2.9.1' )
19
19
  require_jar( 'org.apache.logging.log4j', 'log4j-api', '2.6.2' )
20
20
  require_jar( 'org.slf4j', 'slf4j-api', '1.7.21' )
21
- require_jar( 'com.fasterxml.jackson.core', 'jackson-annotations', '2.7.3' )
21
+ require_jar( 'com.fasterxml.jackson.core', 'jackson-annotations', '2.9.1' )
22
22
  require_jar( 'org.apache.logging.log4j', 'log4j-slf4j-impl', '2.6.2' )
23
- require_jar( 'com.fasterxml.jackson.module', 'jackson-module-afterburner', '2.7.3' )
24
- require_jar( 'com.fasterxml.jackson.dataformat', 'jackson-dataformat-cbor', '2.7.3' )
25
- require_jar( 'com.fasterxml.jackson.core', 'jackson-core', '2.7.3' )
23
+ require_jar( 'com.fasterxml.jackson.module', 'jackson-module-afterburner', '2.9.1' )
24
+ require_jar( 'com.fasterxml.jackson.dataformat', 'jackson-dataformat-cbor', '2.9.1' )
25
+ require_jar( 'com.fasterxml.jackson.core', 'jackson-core', '2.9.1' )
26
26
  end
@@ -337,12 +337,13 @@ class LogStash::Agent
337
337
 
338
338
  unless action_result.successful?
339
339
  logger.error("Failed to execute action", :id => action.pipeline_id,
340
- :action_type => action_result.class, :message => action_result.message)
340
+ :action_type => action_result.class, :message => action_result.message,
341
+ :backtrace => action_result.backtrace)
341
342
  end
342
343
  rescue SystemExit => e
343
344
  converge_result.add(action, e)
344
345
  rescue Exception => e
345
- logger.error("Failed to execute action", :action => action, :exception => e.class.name, :message => e.message)
346
+ logger.error("Failed to execute action", :action => action, :exception => e.class.name, :message => e.message, :backtrace => e.backtrace)
346
347
  converge_result.add(action, e)
347
348
  end
348
349
  end
@@ -101,9 +101,21 @@ module LogStashCompilerLSCLGrammar; module LogStash; module Compiler; module LSC
101
101
  else
102
102
  [k,v]
103
103
  end
104
- }.reduce({}) do |hash,kv|
105
- k,v = kv
106
- hash[k] = v
104
+ }.reduce({}) do |hash, kv|
105
+ k, v = kv
106
+ existing = hash[k]
107
+ if existing.nil?
108
+ hash[k] = v
109
+ elsif existing.kind_of?(::Hash)
110
+ # For legacy reasons, a config can contain multiple `AST::Attribute`s with the same name
111
+ # and a hash-type value (e.g., "match" in the grok filter), which are merged into a single
112
+ # hash value; e.g., `{"match" => {"baz" => "bar"}, "match" => {"foo" => "bulb"}}` is
113
+ # interpreted as `{"match" => {"baz" => "bar", "foo" => "blub"}}`.
114
+ # (NOTE: this bypasses `AST::Hash`'s ability to detect duplicate keys)
115
+ hash[k] = existing.merge(v)
116
+ else
117
+ hash[k] = existing + v
118
+ end
107
119
  hash
108
120
  end
109
121
 
@@ -77,8 +77,9 @@ module LogStash; module Config; module AST
77
77
  # of the output/filter function
78
78
  definitions << "define_singleton_method :#{type}_func do |event|"
79
79
  definitions << " targeted_outputs = []" if type == "output"
80
- definitions << " events = [event]" if type == "filter"
81
- definitions << " @logger.debug? && @logger.debug(\"#{type} received\", \"event\" => event.to_hash)"
80
+ definitions << " events = event" if type == "filter"
81
+ definitions << " @logger.debug? && @logger.debug(\"#{type} received\", \"event\" => event.to_hash)" if type == "output"
82
+ definitions << " @logger.debug? && events.each { |e| @logger.debug(\"#{type} received\", \"event\" => e.to_hash)}" if type == "filter"
82
83
 
83
84
  sections.select { |s| s.plugin_type.text_value == type }.each do |s|
84
85
  definitions << s.compile.split("\n", -1).map { |e| " #{e}" }
@@ -65,6 +65,7 @@ module LogStash module Config
65
65
  pipeline_id = alt_name
66
66
  module_settings.set("pipeline.id", pipeline_id)
67
67
  LogStash::Modules::SettingsMerger.merge_cloud_settings(module_hash, module_settings)
68
+ LogStash::Modules::SettingsMerger.merge_kibana_auth!(module_hash)
68
69
  current_module.with_settings(module_hash)
69
70
  config_test = settings.get("config.test_and_exit")
70
71
  modul_setup = settings.get("modules_setup")
@@ -71,7 +71,8 @@ module LogStash module Config module Source
71
71
  end
72
72
 
73
73
  config_string = ::File.read(file)
74
-
74
+ config_string.force_encoding("UTF-8")
75
+
75
76
  if config_string.valid_encoding?
76
77
  part = org.logstash.common.SourceWithMetadata.new("file", file, 0, 0, config_string)
77
78
  config_parts << part
@@ -10,10 +10,15 @@ module LogStash module Instrument module PeriodicPoller
10
10
  end
11
11
 
12
12
  def collect
13
- _, pipeline = @agent.with_running_pipelines { |pipelines| pipelines.first }
14
- unless pipeline.nil?
15
- pipeline.collect_dlq_stats
13
+ pipelines = @agent.with_running_user_defined_pipelines {|pipelines| pipelines}
14
+ unless pipelines.nil?
15
+ pipelines.each {|_, pipeline|
16
+ unless pipeline.nil?
17
+ pipeline.collect_dlq_stats
18
+ end
19
+ }
16
20
  end
17
21
  end
18
22
  end
19
23
  end end end
24
+
@@ -11,9 +11,13 @@ module LogStash module Instrument module PeriodicPoller
11
11
  end
12
12
 
13
13
  def collect
14
- pipeline_id, pipeline = @agent.with_running_pipelines {|pipelines| pipelines.first }
15
- unless pipeline.nil?
16
- pipeline.collect_stats
14
+ pipelines = @agent.with_running_user_defined_pipelines {|pipelines| pipelines}
15
+ unless pipelines.nil?
16
+ pipelines.each {|_, pipeline|
17
+ unless pipeline.nil?
18
+ pipeline.collect_stats
19
+ end
20
+ }
17
21
  end
18
22
  end
19
23
  end
@@ -73,7 +73,7 @@ module LogStash
73
73
  raise ArgumentError, "invalid level[#{level}] for logger[#{path}]"
74
74
  end
75
75
 
76
- def self.initialize(config_location)
76
+ def self.reconfigure(config_location)
77
77
  @@config_mutex.synchronize do
78
78
  config_location_uri = URI.create(config_location)
79
79
  file_path = config_location_uri.path
@@ -92,6 +92,9 @@ module LogStash
92
92
  end
93
93
  end
94
94
 
95
+ # until dev_utils/rspec/spec_helper is changed, we need to have both methods
96
+ singleton_class.send(:alias_method, :initialize, :reconfigure)
97
+
95
98
  def self.get_logging_context
96
99
  return LoggerContext.getContext(false)
97
100
  end
@@ -24,9 +24,11 @@ module LogStash module Modules class KibanaClient
24
24
  end
25
25
  end
26
26
 
27
- attr_reader :version
27
+ SCHEME_REGEX = /^https?$/
28
28
 
29
- def initialize(settings)
29
+ attr_reader :version, :endpoint
30
+
31
+ def initialize(settings, client = nil) # allow for test mock injection
30
32
  @settings = settings
31
33
 
32
34
  client_options = {
@@ -38,8 +40,8 @@ module LogStash module Modules class KibanaClient
38
40
  }
39
41
 
40
42
  ssl_options = {}
41
-
42
- if @settings["var.kibana.ssl.enabled"] == "true"
43
+ ssl_enabled = @settings["var.kibana.ssl.enabled"] == "true"
44
+ if ssl_enabled
43
45
  ssl_options[:verify] = @settings.fetch("var.kibana.ssl.verification_mode", "strict").to_sym
44
46
  ssl_options[:ca_file] = @settings.fetch("var.kibana.ssl.certificate_authority", nil)
45
47
  ssl_options[:client_cert] = @settings.fetch("var.kibana.ssl.certificate", nil)
@@ -48,9 +50,34 @@ module LogStash module Modules class KibanaClient
48
50
 
49
51
  client_options[:ssl] = ssl_options
50
52
 
51
- @client = Manticore::Client.new(client_options)
52
53
  @host = @settings.fetch("var.kibana.host", "localhost:5601")
53
- @scheme = @settings.fetch("var.kibana.scheme", "http")
54
+ implicit_scheme, colon_slash_slash, host = @host.partition("://")
55
+ explicit_scheme = @settings["var.kibana.scheme"]
56
+ @scheme = "http"
57
+ if !colon_slash_slash.empty?
58
+ if !explicit_scheme.nil? && implicit_scheme != explicit_scheme
59
+ # both are set and not the same - error
60
+ msg = sprintf("Detected differing Kibana host schemes as sourced from var.kibana.host: '%s' and var.kibana.scheme: '%s'", implicit_scheme, explicit_scheme)
61
+ raise ArgumentError.new(msg)
62
+ end
63
+ @scheme = implicit_scheme
64
+ @host = host
65
+ elsif !explicit_scheme.nil?
66
+ @scheme = explicit_scheme
67
+ end
68
+
69
+ if SCHEME_REGEX.match(@scheme).nil?
70
+ msg = sprintf("Kibana host scheme given is invalid, given value: '%s' - acceptable values: 'http', 'https'", @scheme)
71
+ raise ArgumentError.new(msg)
72
+ end
73
+
74
+ if ssl_enabled && @scheme != "https"
75
+ @scheme = "https"
76
+ end
77
+
78
+ @endpoint = "#{@scheme}://#{@host}"
79
+
80
+ @client = client || Manticore::Client.new(client_options)
54
81
  @http_options = {:headers => {'Content-Type' => 'application/json'}}
55
82
  username = @settings["var.kibana.username"]
56
83
  if username
@@ -77,7 +104,7 @@ module LogStash module Modules class KibanaClient
77
104
  end
78
105
 
79
106
  def version_parts
80
- @version.split(/\.|\-/)
107
+ @version.split(/[.-]/)
81
108
  end
82
109
 
83
110
  def host_settings
@@ -119,6 +146,6 @@ module LogStash module Modules class KibanaClient
119
146
  end
120
147
 
121
148
  def full_url(relative)
122
- "#{@scheme}://#{@host}/#{relative}"
149
+ "#{@endpoint}/#{relative}"
123
150
  end
124
151
  end end end
@@ -69,7 +69,7 @@ module LogStash module Modules class LogStashConfig
69
69
  password = @settings["var.elasticsearch.password"]
70
70
  lines = ["hosts => #{hosts}", "index => \"#{index}\""]
71
71
  lines.push(user ? "user => \"#{user}\"" : nil)
72
- lines.push(password ? "password => \"#{password}\"" : nil)
72
+ lines.push(password ? "password => \"#{password.value}\"" : nil)
73
73
  lines.push(type_string ? "document_type => #{type_string}" : nil)
74
74
  lines.push("ssl => #{@settings.fetch('var.elasticsearch.ssl.enabled', false)}")
75
75
  if cacert = @settings["var.elasticsearch.ssl.certificate_authority"]
@@ -40,9 +40,10 @@ module LogStash module Modules module SettingsMerger
40
40
  settings_copy = LogStash::Util.deep_clone(module_settings)
41
41
  end
42
42
 
43
- module_settings["var.kibana.scheme"] = "https"
43
+ module_settings["var.kibana.scheme"] = cloud_id.kibana_scheme
44
44
  module_settings["var.kibana.host"] = cloud_id.kibana_host
45
- module_settings["var.elasticsearch.hosts"] = cloud_id.elasticsearch_host
45
+ # elasticsearch client does not use scheme, it URI parses the host setting
46
+ module_settings["var.elasticsearch.hosts"] = "#{cloud_id.elasticsearch_scheme}://#{cloud_id.elasticsearch_host}"
46
47
  unless cloud_auth.nil?
47
48
  module_settings["var.elasticsearch.username"] = cloud_auth.username
48
49
  module_settings["var.elasticsearch.password"] = cloud_auth.password
@@ -54,6 +55,11 @@ module LogStash module Modules module SettingsMerger
54
55
  end
55
56
  end
56
57
 
58
+ def merge_kibana_auth!(module_settings)
59
+ module_settings["var.kibana.username"] = module_settings["var.elasticsearch.username"] if module_settings["var.kibana.username"].nil?
60
+ module_settings["var.kibana.password"] = module_settings["var.elasticsearch.password"] if module_settings["var.kibana.password"].nil?
61
+ end
62
+
57
63
  def format_module_settings(settings_before, settings_after)
58
64
  output = []
59
65
  output << "-------- Module Settings ---------"
@@ -498,13 +498,9 @@ module LogStash; class Pipeline < BasePipeline
498
498
  end
499
499
 
500
500
  def filter_batch(batch)
501
- batch.each do |event|
502
- return if @force_shutdown.true?
503
-
504
- filter_func(event).each do |e|
505
- #these are both original and generated events
506
- batch.merge(e) unless e.cancelled?
507
- end
501
+ filter_func(batch.to_a).each do |e|
502
+ #these are both original and generated events
503
+ batch.merge(e) unless e.cancelled?
508
504
  end
509
505
  @filter_queue_client.add_filtered_metrics(batch)
510
506
  @events_filtered.increment(batch.size)
@@ -584,17 +580,12 @@ module LogStash; class Pipeline < BasePipeline
584
580
  end
585
581
 
586
582
  # otherwise, report error and restart
587
- if @logger.debug?
588
- @logger.error(I18n.t("logstash.pipeline.worker-error-debug",
589
- default_logging_keys(
590
- :plugin => plugin.inspect,
591
- :error => e.message,
592
- :exception => e.class,
593
- :stacktrace => e.backtrace.join("\n"))))
594
- else
595
- @logger.error(I18n.t("logstash.pipeline.worker-error",
596
- default_logging_keys(:plugin => plugin.inspect, :error => e.message)))
597
- end
583
+ @logger.error(I18n.t("logstash.pipeline.worker-error-debug",
584
+ default_logging_keys(
585
+ :plugin => plugin.inspect,
586
+ :error => e.message,
587
+ :exception => e.class,
588
+ :stacktrace => e.backtrace.join("\n"))))
598
589
 
599
590
  # Assuming the failure that caused this exception is transient,
600
591
  # let's sleep for a bit and execute #run again
@@ -667,7 +658,7 @@ module LogStash; class Pipeline < BasePipeline
667
658
  def filter(event, &block)
668
659
  maybe_setup_out_plugins
669
660
  # filter_func returns all filtered events, including cancelled ones
670
- filter_func(event).each {|e| block.call(e)}
661
+ filter_func([event]).each {|e| block.call(e)}
671
662
  end
672
663
 
673
664
  # perform filters flush and yield flushed event to the passed block
@@ -14,6 +14,7 @@ module LogStash module PipelineAction
14
14
  def execute(agent, pipelines)
15
15
  pipeline = pipelines[pipeline_id]
16
16
  pipeline.shutdown { LogStash::ShutdownWatcher.start(pipeline) }
17
+ pipeline.thread.join
17
18
  pipelines.delete(pipeline_id)
18
19
  # If we reach this part of the code we have succeeded because
19
20
  # the shutdown call will block.
@@ -249,7 +249,10 @@ class LogStash::Runner < Clamp::StrictCommand
249
249
  java.lang.System.setProperty("ls.log.level", setting("log.level"))
250
250
  unless java.lang.System.getProperty("log4j.configurationFile")
251
251
  log4j_config_location = ::File.join(setting("path.settings"), "log4j2.properties")
252
- LogStash::Logging::Logger::initialize("file:///" + log4j_config_location)
252
+
253
+ # Windows safe way to produce a file: URI.
254
+ file_schema = "file://" + (LogStash::Environment.windows? ? "/" : "")
255
+ LogStash::Logging::Logger::reconfigure(URI.join(file_schema + File.absolute_path(log4j_config_location)).to_s)
253
256
  end
254
257
  # override log level that may have been introduced from a custom log4j config file
255
258
  LogStash::Logging::Logger::configure_logging(setting("log.level"))
@@ -3,8 +3,26 @@ require "logstash/namespace"
3
3
  require "base64"
4
4
 
5
5
  module LogStash module Util class CloudSettingId
6
- attr_reader :original, :decoded, :label, :elasticsearch_host, :kibana_host
7
6
 
7
+ def self.cloud_id_encode(*args)
8
+ Base64.urlsafe_encode64(args.join("$"))
9
+ end
10
+ DOT_SEPARATOR = "."
11
+ CLOUD_PORT = ":443"
12
+
13
+ attr_reader :original, :decoded, :label, :elasticsearch_host, :elasticsearch_scheme, :kibana_host, :kibana_scheme
14
+
15
+ # The constructor is expecting a 'cloud.id', a string in 2 variants.
16
+ # 1 part example: 'dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyRub3RhcmVhbCRpZGVudGlmaWVy'
17
+ # 2 part example: 'foobar:dXMtZWFzdC0xLmF3cy5mb3VuZC5pbyRub3RhcmVhbCRpZGVudGlmaWVy'
18
+ # The two part variant has a 'label' prepended with a colon separator. The label is not encoded.
19
+ # The 1 part (or second section of the 2 part variant) is base64 encoded.
20
+ # The original string before encoding has three segments separated by a dollar sign.
21
+ # e.g. 'us-east-1.aws.found.io$notareal$identifier'
22
+ # The first segment is the cloud base url, e.g. 'us-east-1.aws.found.io'
23
+ # The second segment is the elasticsearch host identifier, e.g. 'notareal'
24
+ # The third segment is the kibana host identifier, e.g. 'identifier'
25
+ # The 'cloud.id' value decoded into the #attr_reader ivars.
8
26
  def initialize(value)
9
27
  return if value.nil?
10
28
 
@@ -12,27 +30,43 @@ module LogStash module Util class CloudSettingId
12
30
  raise ArgumentError.new("Cloud Id must be String. Received: #{value.class}")
13
31
  end
14
32
  @original = value
15
- @label, sep, last = value.partition(":")
16
- if last.empty?
33
+ @label, colon, encoded = @original.partition(":")
34
+ if encoded.empty?
17
35
  @decoded = Base64.urlsafe_decode64(@label) rescue ""
18
36
  @label = ""
19
37
  else
20
- @decoded = Base64.urlsafe_decode64(last) rescue ""
38
+ @decoded = Base64.urlsafe_decode64(encoded) rescue ""
21
39
  end
40
+
41
+ @decoded = @decoded.encode(Encoding::UTF_8, :invalid => :replace, :undef => :replace)
42
+
22
43
  unless @decoded.count("$") == 2
23
- raise ArgumentError.new("Cloud Id does not decode. Received: \"#{@original}\".")
44
+ raise ArgumentError.new("Cloud Id does not decode. You may need to enable Kibana in the Cloud UI. Received: \"#{@decoded}\".")
24
45
  end
25
- parts = @decoded.split("$")
26
- if parts.any?(&:empty?)
27
- raise ArgumentError.new("Cloud Id, after decoding, is invalid. Format: '<part1>$<part2>$<part3>'. Received: \"#{@decoded}\".")
46
+
47
+ segments = @decoded.split("$")
48
+ if segments.any?(&:empty?)
49
+ raise ArgumentError.new("Cloud Id, after decoding, is invalid. Format: '<segment1>$<segment2>$<segment3>'. Received: \"#{@decoded}\".")
50
+ end
51
+ cloud_base = segments.shift
52
+ cloud_host = "#{DOT_SEPARATOR}#{cloud_base}#{CLOUD_PORT}"
53
+
54
+ @elasticsearch_host, @kibana_host = segments
55
+ if @elasticsearch_host == "undefined"
56
+ raise ArgumentError.new("Cloud Id, after decoding, elasticsearch segment is 'undefined', literally.")
57
+ end
58
+ @elasticsearch_scheme = "https"
59
+ @elasticsearch_host.concat(cloud_host)
60
+
61
+ if @kibana_host == "undefined"
62
+ raise ArgumentError.new("Cloud Id, after decoding, the kibana segment is 'undefined', literally. You may need to enable Kibana in the Cloud UI.")
28
63
  end
29
- cloud_host, es_server, kb_server = parts
30
- @elasticsearch_host = sprintf("%s.%s:443", es_server, cloud_host)
31
- @kibana_host = sprintf("%s.%s:443", kb_server, cloud_host)
64
+ @kibana_scheme = "https"
65
+ @kibana_host.concat(cloud_host)
32
66
  end
33
67
 
34
68
  def to_s
35
- @original.to_s
69
+ @decoded.to_s
36
70
  end
37
71
 
38
72
  def inspect