logstash-core 6.0.0.alpha2-java → 6.0.0.beta1-java
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +5 -5
- data/gemspec_jars.rb +6 -4
- data/lib/logstash-core/logstash-core.jar +0 -0
- data/lib/logstash-core/logstash-core.rb +2 -2
- data/lib/logstash-core/version.rb +1 -1
- data/lib/logstash-core_jars.rb +14 -10
- data/lib/logstash/agent.rb +4 -2
- data/lib/logstash/api/commands/default_metadata.rb +1 -1
- data/lib/logstash/api/commands/hot_threads_reporter.rb +8 -2
- data/lib/logstash/api/commands/node.rb +2 -2
- data/lib/logstash/api/commands/stats.rb +2 -2
- data/lib/logstash/bootstrap_check/bad_ruby.rb +2 -2
- data/lib/logstash/bootstrap_check/default_config.rb +2 -3
- data/lib/logstash/compiler.rb +12 -12
- data/lib/logstash/compiler/lscl.rb +17 -7
- data/lib/logstash/compiler/treetop_monkeypatches.rb +1 -0
- data/lib/logstash/config/config_ast.rb +11 -1
- data/lib/logstash/config/mixin.rb +5 -0
- data/lib/logstash/config/modules_common.rb +101 -0
- data/lib/logstash/config/source/base.rb +75 -0
- data/lib/logstash/config/source/local.rb +52 -50
- data/lib/logstash/config/source/modules.rb +55 -0
- data/lib/logstash/config/source/multi_local.rb +54 -10
- data/lib/logstash/config/source_loader.rb +1 -0
- data/lib/logstash/config/string_escape.rb +27 -0
- data/lib/logstash/elasticsearch_client.rb +142 -0
- data/lib/logstash/environment.rb +5 -1
- data/lib/logstash/event.rb +0 -1
- data/lib/logstash/instrument/global_metrics.rb +13 -0
- data/lib/logstash/instrument/metric_store.rb +16 -13
- data/lib/logstash/instrument/metric_type/counter.rb +6 -18
- data/lib/logstash/instrument/metric_type/gauge.rb +6 -12
- data/lib/logstash/instrument/periodic_poller/dlq.rb +19 -0
- data/lib/logstash/instrument/periodic_pollers.rb +3 -1
- data/lib/logstash/logging/logger.rb +43 -14
- data/lib/logstash/modules/cli_parser.rb +74 -0
- data/lib/logstash/modules/elasticsearch_config.rb +22 -0
- data/lib/logstash/modules/elasticsearch_importer.rb +37 -0
- data/lib/logstash/modules/elasticsearch_resource.rb +10 -0
- data/lib/logstash/modules/file_reader.rb +36 -0
- data/lib/logstash/modules/kibana_base.rb +24 -0
- data/lib/logstash/modules/kibana_client.rb +122 -0
- data/lib/logstash/modules/kibana_config.rb +125 -0
- data/lib/logstash/modules/kibana_dashboards.rb +36 -0
- data/lib/logstash/modules/kibana_importer.rb +17 -0
- data/lib/logstash/modules/kibana_resource.rb +10 -0
- data/lib/logstash/modules/kibana_settings.rb +40 -0
- data/lib/logstash/modules/logstash_config.rb +120 -0
- data/lib/logstash/modules/resource_base.rb +38 -0
- data/lib/logstash/modules/scaffold.rb +50 -0
- data/lib/logstash/modules/settings_merger.rb +23 -0
- data/lib/logstash/modules/util.rb +17 -0
- data/lib/logstash/namespace.rb +1 -0
- data/lib/logstash/pipeline.rb +66 -27
- data/lib/logstash/pipeline_settings.rb +1 -0
- data/lib/logstash/plugins/registry.rb +1 -0
- data/lib/logstash/runner.rb +47 -3
- data/lib/logstash/settings.rb +20 -1
- data/lib/logstash/util/dead_letter_queue_manager.rb +1 -1
- data/lib/logstash/util/safe_uri.rb +146 -11
- data/lib/logstash/util/thread_dump.rb +4 -3
- data/lib/logstash/util/wrapped_acked_queue.rb +28 -24
- data/lib/logstash/util/wrapped_synchronous_queue.rb +19 -20
- data/lib/logstash/version.rb +1 -1
- data/locales/en.yml +56 -1
- data/logstash-core.gemspec +6 -4
- data/spec/logstash/agent/converge_spec.rb +2 -2
- data/spec/logstash/agent_spec.rb +11 -3
- data/spec/logstash/api/modules/logging_spec.rb +13 -7
- data/spec/logstash/api/modules/node_plugins_spec.rb +23 -5
- data/spec/logstash/api/modules/node_spec.rb +17 -15
- data/spec/logstash/api/modules/node_stats_spec.rb +0 -1
- data/spec/logstash/api/modules/plugins_spec.rb +40 -9
- data/spec/logstash/api/modules/root_spec.rb +0 -1
- data/spec/logstash/api/rack_app_spec.rb +2 -1
- data/spec/logstash/compiler/compiler_spec.rb +54 -7
- data/spec/logstash/config/config_ast_spec.rb +47 -8
- data/spec/logstash/config/mixin_spec.rb +14 -2
- data/spec/logstash/config/pipeline_config_spec.rb +7 -7
- data/spec/logstash/config/source/local_spec.rb +5 -2
- data/spec/logstash/config/source/multi_local_spec.rb +56 -10
- data/spec/logstash/config/source_loader_spec.rb +1 -1
- data/spec/logstash/config/string_escape_spec.rb +24 -0
- data/spec/logstash/event_spec.rb +9 -0
- data/spec/logstash/filters/base_spec.rb +1 -1
- data/spec/logstash/instrument/metric_store_spec.rb +2 -3
- data/spec/logstash/instrument/metric_type/counter_spec.rb +0 -12
- data/spec/logstash/instrument/metric_type/gauge_spec.rb +1 -8
- data/spec/logstash/instrument/periodic_poller/dlq_spec.rb +17 -0
- data/spec/logstash/instrument/periodic_poller/jvm_spec.rb +1 -1
- data/spec/logstash/legacy_ruby_event_spec.rb +0 -9
- data/spec/logstash/legacy_ruby_timestamp_spec.rb +19 -14
- data/spec/logstash/modules/cli_parser_spec.rb +129 -0
- data/spec/logstash/modules/logstash_config_spec.rb +56 -0
- data/spec/logstash/modules/scaffold_spec.rb +239 -0
- data/spec/logstash/pipeline_dlq_commit_spec.rb +1 -1
- data/spec/logstash/pipeline_spec.rb +87 -20
- data/spec/logstash/runner_spec.rb +122 -5
- data/spec/logstash/setting_spec.rb +2 -2
- data/spec/logstash/settings/splittable_string_array_spec.rb +51 -0
- data/spec/logstash/timestamp_spec.rb +8 -2
- data/spec/logstash/util/safe_uri_spec.rb +16 -0
- data/spec/logstash/util/wrapped_acked_queue_spec.rb +63 -0
- data/spec/logstash/util/wrapped_synchronous_queue_spec.rb +0 -22
- data/spec/support/helpers.rb +1 -1
- data/spec/support/matchers.rb +21 -4
- metadata +102 -19
- data/lib/logstash/instrument/metric_type/base.rb +0 -31
- data/lib/logstash/program.rb +0 -14
- data/lib/logstash/string_interpolation.rb +0 -18
@@ -0,0 +1,36 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/namespace"
|
3
|
+
require "logstash/logging"
|
4
|
+
require "logstash/json"
|
5
|
+
|
6
|
+
module LogStash module Modules class FileReader
|
7
|
+
# stub these methods for testing
|
8
|
+
include LogStash::Util::Loggable
|
9
|
+
|
10
|
+
def self.read(path)
|
11
|
+
begin
|
12
|
+
::File.read(path)
|
13
|
+
rescue => e
|
14
|
+
logger.error("Error when reading file from path", :path => path)
|
15
|
+
""
|
16
|
+
end
|
17
|
+
end
|
18
|
+
|
19
|
+
def self.read_json(path)
|
20
|
+
json = read(path)
|
21
|
+
begin
|
22
|
+
LogStash::Json.load(json)
|
23
|
+
rescue => e
|
24
|
+
logger.error("Error when parsing json from path", :path => path)
|
25
|
+
return {}
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
def self.glob(path)
|
30
|
+
files = Dir.glob(path)
|
31
|
+
if files.empty?
|
32
|
+
logger.warn("No files found for glob", :pattern => path)
|
33
|
+
end
|
34
|
+
files
|
35
|
+
end
|
36
|
+
end end end
|
@@ -0,0 +1,24 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/namespace"
|
3
|
+
require "logstash/json"
|
4
|
+
|
5
|
+
module LogStash module Modules class KibanaBase
|
6
|
+
attr_reader :import_path, :content
|
7
|
+
|
8
|
+
def initialize(import_path, content)
|
9
|
+
@import_path, @content = import_path, content
|
10
|
+
end
|
11
|
+
|
12
|
+
def import(client)
|
13
|
+
raise NotImplementedError, "#{self.class.name} needs to implement `#import`"
|
14
|
+
end
|
15
|
+
|
16
|
+
def to_s
|
17
|
+
import_path
|
18
|
+
end
|
19
|
+
|
20
|
+
def content_as_object
|
21
|
+
return content unless content.is_a?(String)
|
22
|
+
LogStash::Json.load(content) rescue nil
|
23
|
+
end
|
24
|
+
end end end
|
@@ -0,0 +1,122 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/namespace"
|
3
|
+
require "logstash/logging"
|
4
|
+
require "logstash/json"
|
5
|
+
require "manticore/client"
|
6
|
+
|
7
|
+
module LogStash module Modules class KibanaClient
|
8
|
+
include LogStash::Util::Loggable
|
9
|
+
|
10
|
+
class Response
|
11
|
+
# to create a custom response with body as an Object (Hash or Array)
|
12
|
+
attr_reader :status, :body, :headers
|
13
|
+
def initialize(status, body, headers={})
|
14
|
+
@status, @body, @headers = status, body, headers
|
15
|
+
@body = body.is_a?(String) ? LogStash::Json.load(body) : body
|
16
|
+
end
|
17
|
+
|
18
|
+
def succeeded?
|
19
|
+
@status >= 200 && @status < 300
|
20
|
+
end
|
21
|
+
|
22
|
+
def failed?
|
23
|
+
!succeeded?
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
attr_reader :version
|
28
|
+
|
29
|
+
def initialize(settings)
|
30
|
+
@settings = settings
|
31
|
+
|
32
|
+
client_options = {
|
33
|
+
request_timeout: 5,
|
34
|
+
connect_timeout: 5,
|
35
|
+
socket_timeout: 5,
|
36
|
+
pool_max: 10,
|
37
|
+
pool_max_per_route: 2
|
38
|
+
}
|
39
|
+
|
40
|
+
ssl_options = {}
|
41
|
+
|
42
|
+
if @settings["var.kibana.ssl.enabled"] == "true"
|
43
|
+
ssl_options[:verify] = @settings.fetch("var.kibana.ssl.verification_mode", "strict").to_sym
|
44
|
+
ssl_options[:ca_file] = @settings.fetch("var.kibana.ssl.certificate_authority", nil)
|
45
|
+
ssl_options[:client_cert] = @settings.fetch("var.kibana.ssl.certificate", nil)
|
46
|
+
ssl_options[:client_key] = @settings.fetch("var.kibana.ssl.key", nil)
|
47
|
+
end
|
48
|
+
|
49
|
+
client_options[:ssl] = ssl_options
|
50
|
+
|
51
|
+
@client = Manticore::Client.new(client_options)
|
52
|
+
@host = @settings.fetch("var.kibana.host", "localhost:5601")
|
53
|
+
username = @settings["var.kibana.username"]
|
54
|
+
password = @settings["var.kibana.password"]
|
55
|
+
|
56
|
+
@scheme = @settings.fetch("var.kibana.scheme", "http")
|
57
|
+
@http_options = {:headers => {'Content-Type' => 'application/json'}}
|
58
|
+
if username
|
59
|
+
@http_options[:headers]['Authorization'] = 'Basic ' + Base64.encode64( "#{username}:#{password}" ).chomp
|
60
|
+
end
|
61
|
+
|
62
|
+
# e.g. {"name":"Elastics-MacBook-Pro.local","version":{"number":"6.0.0-beta1","build_hash":"41e69","build_number":15613,"build_snapshot":true}..}
|
63
|
+
@version = "0.0.0"
|
64
|
+
response = get("api/status")
|
65
|
+
if response.succeeded?
|
66
|
+
status = response.body
|
67
|
+
if status["version"].is_a?(Hash)
|
68
|
+
@version = status["version"]["number"]
|
69
|
+
if status["version"]["build_snapshot"]
|
70
|
+
@version.concat("-SNAPSHOT")
|
71
|
+
end
|
72
|
+
end
|
73
|
+
end
|
74
|
+
@http_options[:headers]['kbn-version'] = @version
|
75
|
+
end
|
76
|
+
|
77
|
+
def version_parts
|
78
|
+
@version.split(/\.|\-/)
|
79
|
+
end
|
80
|
+
|
81
|
+
def host_settings
|
82
|
+
"[\"#{@host}\"]"
|
83
|
+
end
|
84
|
+
|
85
|
+
def get(relative_path)
|
86
|
+
# e.g. api/kibana/settings
|
87
|
+
safely(:get, relative_path, @http_options)
|
88
|
+
end
|
89
|
+
|
90
|
+
# content will be converted to a json string
|
91
|
+
def post(relative_path, content, headers = nil)
|
92
|
+
|
93
|
+
body = content.is_a?(String) ? content : LogStash::Json.dump(content)
|
94
|
+
options = {:body => body}.merge(headers || @http_options)
|
95
|
+
safely(:post, relative_path, options)
|
96
|
+
end
|
97
|
+
|
98
|
+
def head(relative_path)
|
99
|
+
safely(:head, relative_path, @http_options)
|
100
|
+
end
|
101
|
+
|
102
|
+
def can_connect?
|
103
|
+
head("api/status").succeeded?
|
104
|
+
end
|
105
|
+
|
106
|
+
private
|
107
|
+
|
108
|
+
def safely(method_sym, relative_path, options = {})
|
109
|
+
begin
|
110
|
+
resp = @client.http(method_sym, full_url(relative_path), options).call
|
111
|
+
Response.new(resp.code, resp.body, resp.headers)
|
112
|
+
rescue Manticore::ManticoreException => e
|
113
|
+
logger.error("Error when executing Kibana client request", :error => e)
|
114
|
+
body = {"statusCode" => 0, "error" => e.message}
|
115
|
+
Response.new(0, body, {})
|
116
|
+
end
|
117
|
+
end
|
118
|
+
|
119
|
+
def full_url(relative)
|
120
|
+
"#{@scheme}://#{@host}/#{relative}"
|
121
|
+
end
|
122
|
+
end end end
|
@@ -0,0 +1,125 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/namespace"
|
3
|
+
require "logstash/logging"
|
4
|
+
|
5
|
+
require_relative "file_reader"
|
6
|
+
require_relative "kibana_settings"
|
7
|
+
require_relative "kibana_dashboards"
|
8
|
+
require_relative "kibana_resource"
|
9
|
+
|
10
|
+
module LogStash module Modules class KibanaConfig
|
11
|
+
include LogStash::Util::Loggable
|
12
|
+
|
13
|
+
ALLOWED_DIRECTORIES = ["search", "visualization"]
|
14
|
+
METRICS_MAX_BUCKETS = (24 * 60 * 60).freeze # 24 hours of events/sec buckets.
|
15
|
+
attr_reader :index_name # not used when importing via kibana but for BWC with ElastsearchConfig
|
16
|
+
|
17
|
+
# We name it `modul` here because `module` has meaning in Ruby.
|
18
|
+
def initialize(modul, settings)
|
19
|
+
build_versioned_directory(modul)
|
20
|
+
@name = modul.module_name
|
21
|
+
@settings = settings
|
22
|
+
@index_name = "kibana"
|
23
|
+
@pattern_name = "#{@name}-*"
|
24
|
+
@metrics_max_buckets = @settings.fetch("dashboards.metrics_max_buckets", METRICS_MAX_BUCKETS).to_i
|
25
|
+
@kibana_settings = [
|
26
|
+
KibanaSettings::Setting.new("defaultIndex", @pattern_name),
|
27
|
+
KibanaSettings::Setting.new("metrics:max_buckets", @metrics_max_buckets)
|
28
|
+
]
|
29
|
+
end
|
30
|
+
|
31
|
+
def dashboards
|
32
|
+
# there can be more than one dashboard to load
|
33
|
+
filenames = FileReader.read_json(dynamic("dashboard"))
|
34
|
+
filenames.map do |filename|
|
35
|
+
KibanaResource.new(@index_name, "dashboard", dynamic("dashboard", filename))
|
36
|
+
end
|
37
|
+
end
|
38
|
+
|
39
|
+
def index_pattern
|
40
|
+
[KibanaResource.new(@index_name, "index-pattern", dynamic("index-pattern"),nil, @pattern_name)]
|
41
|
+
end
|
42
|
+
|
43
|
+
def resources
|
44
|
+
list = index_pattern
|
45
|
+
dashboards.each do |board|
|
46
|
+
list << board
|
47
|
+
extract_panels_into(board, list)
|
48
|
+
end
|
49
|
+
list.concat(extract_saved_searches_into(list))
|
50
|
+
[
|
51
|
+
KibanaSettings.new("api/kibana/settings", @kibana_settings),
|
52
|
+
KibanaDashboards.new("api/kibana/dashboards/import", list)
|
53
|
+
]
|
54
|
+
end
|
55
|
+
|
56
|
+
private
|
57
|
+
|
58
|
+
def build_versioned_directory(modul)
|
59
|
+
# try to detect which directory holds the config for the kibana version
|
60
|
+
base_dir = ::File.join(modul.directory, "kibana")
|
61
|
+
maj, min, patch = modul.kibana_version_parts
|
62
|
+
version_dir = "#{maj}.#{min}.#{patch}"
|
63
|
+
@directory = ::File.join(base_dir, version_dir)
|
64
|
+
return if ::File.directory?(@directory)
|
65
|
+
version_dir = "#{maj}.#{min}.x"
|
66
|
+
@directory = ::File.join(base_dir, version_dir)
|
67
|
+
return if ::File.directory?(@directory)
|
68
|
+
version_dir = "#{maj}.x"
|
69
|
+
@directory = ::File.join(base_dir, version_dir)
|
70
|
+
unless ::File.directory?(@directory)
|
71
|
+
logger.error("Cannot find kibana version sub-directory", :module => @name, :base_directory => base_dir)
|
72
|
+
end
|
73
|
+
end
|
74
|
+
|
75
|
+
def dynamic(dynamic_folder, filename = @name)
|
76
|
+
::File.join(@directory, dynamic_folder, "#{filename}.json")
|
77
|
+
end
|
78
|
+
|
79
|
+
def extract_panels_into(dashboard, list)
|
80
|
+
dash = dashboard.content_as_object
|
81
|
+
|
82
|
+
if !dash.is_a?(Hash)
|
83
|
+
logger.warn("Kibana dashboard JSON is not an Object", :module => @name)
|
84
|
+
return
|
85
|
+
end
|
86
|
+
|
87
|
+
panelsjson = dash["panelsJSON"]
|
88
|
+
|
89
|
+
if panelsjson.nil?
|
90
|
+
logger.info("No panelJSON key found in kibana dashboard", :module => @name)
|
91
|
+
return
|
92
|
+
end
|
93
|
+
|
94
|
+
begin
|
95
|
+
panels = LogStash::Json.load(panelsjson)
|
96
|
+
rescue => e
|
97
|
+
logger.error("JSON parse error when reading kibana panelsJSON", :module => @name)
|
98
|
+
return
|
99
|
+
end
|
100
|
+
|
101
|
+
panels.each do |panel|
|
102
|
+
panel_type = panel["type"]
|
103
|
+
if ALLOWED_DIRECTORIES.member?(panel_type)
|
104
|
+
list << KibanaResource.new(@index_name, panel_type, dynamic(panel_type, panel["id"]))
|
105
|
+
else
|
106
|
+
logger.warn("panelJSON contained unknown type", :type => panel_type)
|
107
|
+
end
|
108
|
+
end
|
109
|
+
end
|
110
|
+
|
111
|
+
def extract_saved_searches_into(list)
|
112
|
+
result = [] # must not add to list while iterating
|
113
|
+
list.each do |resource|
|
114
|
+
content = resource.content_as_object
|
115
|
+
next if content.nil?
|
116
|
+
next unless content.keys.include?("savedSearchId")
|
117
|
+
saved_search = content["savedSearchId"]
|
118
|
+
next if saved_search.nil?
|
119
|
+
ss_resource = KibanaResource.new(@index_name, "search", dynamic("search", saved_search))
|
120
|
+
next if list.member?(ss_resource) || result.member?(ss_resource)
|
121
|
+
result << ss_resource
|
122
|
+
end
|
123
|
+
result
|
124
|
+
end
|
125
|
+
end end end
|
@@ -0,0 +1,36 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/namespace"
|
3
|
+
require "logstash/logging"
|
4
|
+
require_relative "kibana_base"
|
5
|
+
|
6
|
+
module LogStash module Modules class KibanaDashboards < KibanaBase
|
7
|
+
include LogStash::Util::Loggable
|
8
|
+
|
9
|
+
attr_reader :import_path, :content
|
10
|
+
|
11
|
+
# content is a list of kibana file resources
|
12
|
+
def initialize(import_path, content)
|
13
|
+
@import_path, @content = import_path, content
|
14
|
+
end
|
15
|
+
|
16
|
+
def import(client)
|
17
|
+
# e.g. curl "http://localhost:5601/api/kibana/dashboards/import"
|
18
|
+
# extract and prepare all objects
|
19
|
+
objects = []
|
20
|
+
content.each do |resource|
|
21
|
+
hash = {
|
22
|
+
"id" => resource.content_id,
|
23
|
+
"type" => resource.content_type,
|
24
|
+
"version" => 1,
|
25
|
+
"attributes" => resource.content_as_object
|
26
|
+
}
|
27
|
+
objects << hash
|
28
|
+
end
|
29
|
+
body = {"version": client.version, "objects": objects}
|
30
|
+
response = client.post(import_path, body)
|
31
|
+
if response.failed?
|
32
|
+
logger.error("Attempted POST failed", :url_path => import_path, :response => response.body)
|
33
|
+
end
|
34
|
+
response
|
35
|
+
end
|
36
|
+
end end end
|
@@ -0,0 +1,17 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/namespace"
|
3
|
+
require "logstash/logging"
|
4
|
+
|
5
|
+
module LogStash module Modules class KibanaImporter
|
6
|
+
include LogStash::Util::Loggable
|
7
|
+
|
8
|
+
def initialize(client)
|
9
|
+
@client = client
|
10
|
+
end
|
11
|
+
|
12
|
+
def put(via_kibana)
|
13
|
+
path = via_kibana.import_path
|
14
|
+
logger.debug("Attempting POST", :url_path => path, :content => via_kibana.content)
|
15
|
+
via_kibana.import(@client)
|
16
|
+
end
|
17
|
+
end end end
|
@@ -0,0 +1,40 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/namespace"
|
3
|
+
require "logstash/logging"
|
4
|
+
require_relative "kibana_base"
|
5
|
+
|
6
|
+
module LogStash module Modules class KibanaSettings < KibanaBase
|
7
|
+
include LogStash::Util::Loggable
|
8
|
+
|
9
|
+
class Setting
|
10
|
+
attr_reader :name, :value
|
11
|
+
def initialize(name, value)
|
12
|
+
@name, @value = name, value
|
13
|
+
end
|
14
|
+
end
|
15
|
+
|
16
|
+
attr_reader :import_path, :content
|
17
|
+
|
18
|
+
# content is an array of Setting required for this module
|
19
|
+
def initialize(import_path, content)
|
20
|
+
@import_path, @content = import_path, content
|
21
|
+
end
|
22
|
+
|
23
|
+
def import(client)
|
24
|
+
# e.g. curl "http://localhost:5601/api/kibana/settings"
|
25
|
+
# 6.0.0-beta1 -> {"settings":{"buildNum":{"userValue":15613},"defaultIndex":{"userValue":"arcsight-*"}}}
|
26
|
+
# 5.4 -> {"settings":{"defaultIndex":{"userValue":"cef-*"},"metrics:max_buckets":{"userValue":"600000"}}}
|
27
|
+
# array of Setting objects
|
28
|
+
# The POST api body { "changes": { "defaultIndex": "arcsight-*", "metrics:max_buckets": "400" } }
|
29
|
+
settings = {}
|
30
|
+
content.each do |setting|
|
31
|
+
settings[setting.name] = "#{setting.value}"
|
32
|
+
end
|
33
|
+
body = {"changes" => settings}
|
34
|
+
response = client.post(import_path, body)
|
35
|
+
if response.failed?
|
36
|
+
logger.error("Attempted POST failed", :url_path => import_path, :response => response.body)
|
37
|
+
end
|
38
|
+
response
|
39
|
+
end
|
40
|
+
end end end
|
@@ -0,0 +1,120 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/namespace"
|
3
|
+
require_relative "file_reader"
|
4
|
+
require "logstash/settings"
|
5
|
+
|
6
|
+
module LogStash module Modules class LogStashConfig
|
7
|
+
# We name it `modul` here because `module` has meaning in Ruby.
|
8
|
+
def initialize(modul, settings)
|
9
|
+
@directory = ::File.join(modul.directory, "logstash")
|
10
|
+
@name = modul.module_name
|
11
|
+
@settings = settings
|
12
|
+
end
|
13
|
+
|
14
|
+
def template
|
15
|
+
::File.join(@directory, "#{@name}.conf.erb")
|
16
|
+
end
|
17
|
+
|
18
|
+
def configured_inputs(default = [], aliases = {})
|
19
|
+
name = "var.inputs"
|
20
|
+
values = get_setting(LogStash::Setting::SplittableStringArray.new(name, String, default))
|
21
|
+
|
22
|
+
aliases.each { |k,v| values << v if values.include?(k) }
|
23
|
+
aliases.invert.each { |k,v| values << v if values.include?(k) }
|
24
|
+
values.flatten.uniq
|
25
|
+
end
|
26
|
+
|
27
|
+
def alias_settings_keys!(aliases)
|
28
|
+
aliased_settings = alias_matching_keys(aliases, @settings)
|
29
|
+
@settings = alias_matching_keys(aliases.invert, aliased_settings)
|
30
|
+
end
|
31
|
+
|
32
|
+
def array_to_string(array)
|
33
|
+
"[#{array.collect { |i| "'#{i}'" }.join(", ")}]"
|
34
|
+
end
|
35
|
+
|
36
|
+
def csv_string(array)
|
37
|
+
"'#{array.join(',')}'"
|
38
|
+
end
|
39
|
+
|
40
|
+
def get_setting(setting_class)
|
41
|
+
raw_value = @settings[setting_class.name]
|
42
|
+
# If we dont check for NIL, the Settings class will try to coerce the value
|
43
|
+
# and most of the it will fails when a NIL value is explicitely set.
|
44
|
+
# This will be fixed once we wrap the plugins settings into a Settings class
|
45
|
+
setting_class.set(raw_value) unless raw_value.nil?
|
46
|
+
setting_class.value
|
47
|
+
end
|
48
|
+
|
49
|
+
def setting(name, default)
|
50
|
+
# by default we use the more permissive setting which is a `NullableString`
|
51
|
+
# This is fine because the end format of the logstash configuration is a string representation
|
52
|
+
# of the pipeline. There is a good reason why I think we should use the settings classes, we
|
53
|
+
# can `preprocess` a template and generate a configuration from the defined settings
|
54
|
+
# validate the values and replace them in the template.
|
55
|
+
case default
|
56
|
+
when String
|
57
|
+
get_setting(LogStash::Setting::NullableString.new(name, default.to_s))
|
58
|
+
when Numeric
|
59
|
+
get_setting(LogStash::Setting::Numeric.new(name, default))
|
60
|
+
else
|
61
|
+
get_setting(LogStash::Setting::NullableString.new(name, default.to_s))
|
62
|
+
end
|
63
|
+
end
|
64
|
+
|
65
|
+
def elasticsearch_output_config(type_string = nil)
|
66
|
+
hosts = array_to_string(get_setting(LogStash::Setting::SplittableStringArray.new("var.elasticsearch.hosts", String, ["localhost:9200"])))
|
67
|
+
index = "#{@name}-#{setting("var.elasticsearch.index_suffix", "%{+YYYY.MM.dd}")}"
|
68
|
+
user = @settings["var.elasticsearch.username"]
|
69
|
+
password = @settings["var.elasticsearch.password"]
|
70
|
+
lines = ["hosts => #{hosts}", "index => \"#{index}\""]
|
71
|
+
lines.push(user ? "user => \"#{user}\"" : nil)
|
72
|
+
lines.push(password ? "password => \"#{password}\"" : nil)
|
73
|
+
lines.push(type_string ? "document_type => #{type_string}" : nil)
|
74
|
+
lines.push("ssl => #{@settings.fetch('var.elasticsearch.ssl.enabled', false)}")
|
75
|
+
if cacert = @settings["var.elasticsearch.ssl.certificate_authority"]
|
76
|
+
lines.push("cacert => \"#{cacert}\"") if cacert
|
77
|
+
end
|
78
|
+
# NOTE: the first line should be indented in the conf.erb
|
79
|
+
<<-CONF
|
80
|
+
elasticsearch {
|
81
|
+
#{lines.compact.join("\n ")}
|
82
|
+
manage_template => false
|
83
|
+
}
|
84
|
+
CONF
|
85
|
+
end
|
86
|
+
|
87
|
+
def config_string
|
88
|
+
# process the template and settings
|
89
|
+
# send back as a string
|
90
|
+
renderer = ERB.new(FileReader.read(template))
|
91
|
+
renderer.result(binding)
|
92
|
+
end
|
93
|
+
|
94
|
+
private
|
95
|
+
# For a first version we are copying the values of the original hash,
|
96
|
+
# this might become problematic if we users changes the values of the
|
97
|
+
# settings in the template, which could result in an inconsistent view of the original data
|
98
|
+
#
|
99
|
+
# For v1 of the feature I think its an OK compromise, v2 we have a more advanced hash that
|
100
|
+
# support alias.
|
101
|
+
def alias_matching_keys(aliases, target)
|
102
|
+
aliased_target = target.dup
|
103
|
+
|
104
|
+
aliases.each do |matching_key_prefix, new_key_prefix|
|
105
|
+
target.each do |k, v|
|
106
|
+
re = /^#{matching_key_prefix}\./
|
107
|
+
|
108
|
+
if k =~ re
|
109
|
+
alias_key = k.gsub(re, "#{new_key_prefix}.")
|
110
|
+
|
111
|
+
# If the user setup the same values twices with different values lets just halt.
|
112
|
+
raise "Cannot create an alias, the destination key has already a value set: original key: #{k}, alias key: #{alias_key}" if (!aliased_target[alias_key].nil? && aliased_target[alias_key] != v)
|
113
|
+
aliased_target[alias_key] = v unless v.nil?
|
114
|
+
end
|
115
|
+
end
|
116
|
+
end
|
117
|
+
|
118
|
+
aliased_target
|
119
|
+
end
|
120
|
+
end end end
|