logstash-core 6.0.0.alpha2-java → 6.0.0.beta1-java
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +5 -5
- data/gemspec_jars.rb +6 -4
- data/lib/logstash-core/logstash-core.jar +0 -0
- data/lib/logstash-core/logstash-core.rb +2 -2
- data/lib/logstash-core/version.rb +1 -1
- data/lib/logstash-core_jars.rb +14 -10
- data/lib/logstash/agent.rb +4 -2
- data/lib/logstash/api/commands/default_metadata.rb +1 -1
- data/lib/logstash/api/commands/hot_threads_reporter.rb +8 -2
- data/lib/logstash/api/commands/node.rb +2 -2
- data/lib/logstash/api/commands/stats.rb +2 -2
- data/lib/logstash/bootstrap_check/bad_ruby.rb +2 -2
- data/lib/logstash/bootstrap_check/default_config.rb +2 -3
- data/lib/logstash/compiler.rb +12 -12
- data/lib/logstash/compiler/lscl.rb +17 -7
- data/lib/logstash/compiler/treetop_monkeypatches.rb +1 -0
- data/lib/logstash/config/config_ast.rb +11 -1
- data/lib/logstash/config/mixin.rb +5 -0
- data/lib/logstash/config/modules_common.rb +101 -0
- data/lib/logstash/config/source/base.rb +75 -0
- data/lib/logstash/config/source/local.rb +52 -50
- data/lib/logstash/config/source/modules.rb +55 -0
- data/lib/logstash/config/source/multi_local.rb +54 -10
- data/lib/logstash/config/source_loader.rb +1 -0
- data/lib/logstash/config/string_escape.rb +27 -0
- data/lib/logstash/elasticsearch_client.rb +142 -0
- data/lib/logstash/environment.rb +5 -1
- data/lib/logstash/event.rb +0 -1
- data/lib/logstash/instrument/global_metrics.rb +13 -0
- data/lib/logstash/instrument/metric_store.rb +16 -13
- data/lib/logstash/instrument/metric_type/counter.rb +6 -18
- data/lib/logstash/instrument/metric_type/gauge.rb +6 -12
- data/lib/logstash/instrument/periodic_poller/dlq.rb +19 -0
- data/lib/logstash/instrument/periodic_pollers.rb +3 -1
- data/lib/logstash/logging/logger.rb +43 -14
- data/lib/logstash/modules/cli_parser.rb +74 -0
- data/lib/logstash/modules/elasticsearch_config.rb +22 -0
- data/lib/logstash/modules/elasticsearch_importer.rb +37 -0
- data/lib/logstash/modules/elasticsearch_resource.rb +10 -0
- data/lib/logstash/modules/file_reader.rb +36 -0
- data/lib/logstash/modules/kibana_base.rb +24 -0
- data/lib/logstash/modules/kibana_client.rb +122 -0
- data/lib/logstash/modules/kibana_config.rb +125 -0
- data/lib/logstash/modules/kibana_dashboards.rb +36 -0
- data/lib/logstash/modules/kibana_importer.rb +17 -0
- data/lib/logstash/modules/kibana_resource.rb +10 -0
- data/lib/logstash/modules/kibana_settings.rb +40 -0
- data/lib/logstash/modules/logstash_config.rb +120 -0
- data/lib/logstash/modules/resource_base.rb +38 -0
- data/lib/logstash/modules/scaffold.rb +50 -0
- data/lib/logstash/modules/settings_merger.rb +23 -0
- data/lib/logstash/modules/util.rb +17 -0
- data/lib/logstash/namespace.rb +1 -0
- data/lib/logstash/pipeline.rb +66 -27
- data/lib/logstash/pipeline_settings.rb +1 -0
- data/lib/logstash/plugins/registry.rb +1 -0
- data/lib/logstash/runner.rb +47 -3
- data/lib/logstash/settings.rb +20 -1
- data/lib/logstash/util/dead_letter_queue_manager.rb +1 -1
- data/lib/logstash/util/safe_uri.rb +146 -11
- data/lib/logstash/util/thread_dump.rb +4 -3
- data/lib/logstash/util/wrapped_acked_queue.rb +28 -24
- data/lib/logstash/util/wrapped_synchronous_queue.rb +19 -20
- data/lib/logstash/version.rb +1 -1
- data/locales/en.yml +56 -1
- data/logstash-core.gemspec +6 -4
- data/spec/logstash/agent/converge_spec.rb +2 -2
- data/spec/logstash/agent_spec.rb +11 -3
- data/spec/logstash/api/modules/logging_spec.rb +13 -7
- data/spec/logstash/api/modules/node_plugins_spec.rb +23 -5
- data/spec/logstash/api/modules/node_spec.rb +17 -15
- data/spec/logstash/api/modules/node_stats_spec.rb +0 -1
- data/spec/logstash/api/modules/plugins_spec.rb +40 -9
- data/spec/logstash/api/modules/root_spec.rb +0 -1
- data/spec/logstash/api/rack_app_spec.rb +2 -1
- data/spec/logstash/compiler/compiler_spec.rb +54 -7
- data/spec/logstash/config/config_ast_spec.rb +47 -8
- data/spec/logstash/config/mixin_spec.rb +14 -2
- data/spec/logstash/config/pipeline_config_spec.rb +7 -7
- data/spec/logstash/config/source/local_spec.rb +5 -2
- data/spec/logstash/config/source/multi_local_spec.rb +56 -10
- data/spec/logstash/config/source_loader_spec.rb +1 -1
- data/spec/logstash/config/string_escape_spec.rb +24 -0
- data/spec/logstash/event_spec.rb +9 -0
- data/spec/logstash/filters/base_spec.rb +1 -1
- data/spec/logstash/instrument/metric_store_spec.rb +2 -3
- data/spec/logstash/instrument/metric_type/counter_spec.rb +0 -12
- data/spec/logstash/instrument/metric_type/gauge_spec.rb +1 -8
- data/spec/logstash/instrument/periodic_poller/dlq_spec.rb +17 -0
- data/spec/logstash/instrument/periodic_poller/jvm_spec.rb +1 -1
- data/spec/logstash/legacy_ruby_event_spec.rb +0 -9
- data/spec/logstash/legacy_ruby_timestamp_spec.rb +19 -14
- data/spec/logstash/modules/cli_parser_spec.rb +129 -0
- data/spec/logstash/modules/logstash_config_spec.rb +56 -0
- data/spec/logstash/modules/scaffold_spec.rb +239 -0
- data/spec/logstash/pipeline_dlq_commit_spec.rb +1 -1
- data/spec/logstash/pipeline_spec.rb +87 -20
- data/spec/logstash/runner_spec.rb +122 -5
- data/spec/logstash/setting_spec.rb +2 -2
- data/spec/logstash/settings/splittable_string_array_spec.rb +51 -0
- data/spec/logstash/timestamp_spec.rb +8 -2
- data/spec/logstash/util/safe_uri_spec.rb +16 -0
- data/spec/logstash/util/wrapped_acked_queue_spec.rb +63 -0
- data/spec/logstash/util/wrapped_synchronous_queue_spec.rb +0 -22
- data/spec/support/helpers.rb +1 -1
- data/spec/support/matchers.rb +21 -4
- metadata +102 -19
- data/lib/logstash/instrument/metric_type/base.rb +0 -31
- data/lib/logstash/program.rb +0 -14
- data/lib/logstash/string_interpolation.rb +0 -18
data/lib/logstash/environment.rb
CHANGED
@@ -20,9 +20,12 @@ module LogStash
|
|
20
20
|
Setting::NullableString.new("path.config", nil, false),
|
21
21
|
Setting::WritableDirectory.new("path.data", ::File.join(LogStash::Environment::LOGSTASH_HOME, "data")),
|
22
22
|
Setting::NullableString.new("config.string", nil, false),
|
23
|
+
Setting.new("modules.cli", Array, []),
|
24
|
+
Setting.new("modules", Array, []),
|
23
25
|
Setting::Boolean.new("config.test_and_exit", false),
|
24
26
|
Setting::Boolean.new("config.reload.automatic", false),
|
25
|
-
Setting::
|
27
|
+
Setting::TimeValue.new("config.reload.interval", "3s"), # in seconds
|
28
|
+
Setting::Boolean.new("config.support_escapes", false),
|
26
29
|
Setting::Boolean.new("metric.collect", true),
|
27
30
|
Setting::String.new("pipeline.id", "main"),
|
28
31
|
Setting::Boolean.new("pipeline.system", false),
|
@@ -51,6 +54,7 @@ module LogStash
|
|
51
54
|
Setting::Numeric.new("queue.checkpoint.writes", 1024), # 0 is unlimited
|
52
55
|
Setting::Numeric.new("queue.checkpoint.interval", 1000), # 0 is no time-based checkpointing
|
53
56
|
Setting::Boolean.new("dead_letter_queue.enable", false),
|
57
|
+
Setting::Bytes.new("dead_letter_queue.max_bytes", "1024mb"),
|
54
58
|
Setting::TimeValue.new("slowlog.threshold.warn", "-1"),
|
55
59
|
Setting::TimeValue.new("slowlog.threshold.info", "-1"),
|
56
60
|
Setting::TimeValue.new("slowlog.threshold.debug", "-1"),
|
data/lib/logstash/event.rb
CHANGED
@@ -5,7 +5,6 @@ require "logstash/json"
|
|
5
5
|
require "jruby_event_ext"
|
6
6
|
require "jruby_timestamp_ext"
|
7
7
|
require "logstash/timestamp"
|
8
|
-
require "logstash/string_interpolation"
|
9
8
|
|
10
9
|
# transient pipeline events for normal in-flow signaling as opposed to
|
11
10
|
# flow altering exceptions. for now having base classes is adequate and
|
@@ -41,26 +41,28 @@ module LogStash module Instrument
|
|
41
41
|
# @param [Symbol] The metric key
|
42
42
|
# @return [Object] Return the new_value of the retrieve object in the tree
|
43
43
|
def fetch_or_store(namespaces, key, default_value = nil)
|
44
|
-
provided_value = block_given? ? yield(key) : default_value
|
45
44
|
|
46
45
|
# We first check in the `@fast_lookup` store to see if we have already see that metrics before,
|
47
46
|
# This give us a `o(1)` access, which is faster than searching through the structured
|
48
47
|
# data store (Which is a `o(n)` operation where `n` is the number of element in the namespace and
|
49
|
-
# the value of the key). If the metric is already present in the `@fast_lookup`,
|
50
|
-
#
|
48
|
+
# the value of the key). If the metric is already present in the `@fast_lookup`, then that value is sent
|
49
|
+
# back directly to the caller.
|
51
50
|
#
|
52
|
-
# BUT. If the value is not present in the `@fast_lookup` the value will be inserted and
|
53
|
-
# `#puf_if_absent` will return nil. With this returned value of nil we assume that we don't
|
51
|
+
# BUT. If the value is not present in the `@fast_lookup` the value will be inserted and we assume that we don't
|
54
52
|
# have it in the `@metric_store` for structured search so we add it there too.
|
55
|
-
|
56
|
-
|
57
|
-
|
53
|
+
|
54
|
+
# array.hash as the key since it is faster then using the array itself, see #7772
|
55
|
+
fast_lookup_key = (namespaces.dup << key).hash
|
56
|
+
value = @fast_lookup.get(fast_lookup_key)
|
57
|
+
if value.nil?
|
58
|
+
value = block_given? ? yield(key) : default_value
|
59
|
+
@fast_lookup.put(fast_lookup_key, value)
|
58
60
|
@structured_lookup_mutex.synchronize do
|
59
61
|
# If we cannot find the value this mean we need to save it in the store.
|
60
|
-
fetch_or_store_namespaces(namespaces).fetch_or_store(key,
|
62
|
+
fetch_or_store_namespaces(namespaces).fetch_or_store(key, value)
|
61
63
|
end
|
62
|
-
return provided_value
|
63
64
|
end
|
65
|
+
return value;
|
64
66
|
end
|
65
67
|
|
66
68
|
# This method allow to retrieve values for a specific path,
|
@@ -163,7 +165,7 @@ module LogStash module Instrument
|
|
163
165
|
end
|
164
166
|
|
165
167
|
def has_metric?(*path)
|
166
|
-
@fast_lookup[path]
|
168
|
+
@fast_lookup[path.hash]
|
167
169
|
end
|
168
170
|
|
169
171
|
# Return all the individuals Metric,
|
@@ -185,8 +187,9 @@ module LogStash module Instrument
|
|
185
187
|
def prune(path)
|
186
188
|
key_paths = key_paths(path).map(&:to_sym)
|
187
189
|
@structured_lookup_mutex.synchronize do
|
188
|
-
|
189
|
-
|
190
|
+
fetch_or_store_namespaces(key_paths).each do |key, v|
|
191
|
+
@fast_lookup.delete((key_paths.dup << key).hash)
|
192
|
+
end
|
190
193
|
delete_from_map(@store, key_paths)
|
191
194
|
end
|
192
195
|
end
|
@@ -1,29 +1,17 @@
|
|
1
|
-
#
|
2
|
-
|
3
|
-
require "concurrent"
|
1
|
+
#encoding: utf-8
|
2
|
+
java_import org.logstash.instrument.metrics.counter.LongCounter
|
4
3
|
|
5
4
|
module LogStash module Instrument module MetricType
|
6
|
-
class Counter <
|
7
|
-
def initialize(namespaces, key, value = 0)
|
8
|
-
super(namespaces, key)
|
5
|
+
class Counter < LongCounter
|
9
6
|
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
def increment(value = 1)
|
14
|
-
@counter.increment(value)
|
15
|
-
end
|
7
|
+
def initialize(namespaces, key)
|
8
|
+
super(namespaces, key.to_s)
|
16
9
|
|
17
|
-
def decrement(value = 1)
|
18
|
-
@counter.decrement(value)
|
19
10
|
end
|
20
11
|
|
21
12
|
def execute(action, value = 1)
|
22
|
-
|
13
|
+
send(action, value)
|
23
14
|
end
|
24
15
|
|
25
|
-
def value
|
26
|
-
@counter.value
|
27
|
-
end
|
28
16
|
end
|
29
17
|
end; end; end
|
@@ -1,22 +1,16 @@
|
|
1
1
|
# encoding: utf-8
|
2
|
-
|
3
|
-
require "concurrent/atomic_reference/mutex_atomic"
|
4
|
-
require "logstash/json"
|
5
|
-
|
2
|
+
java_import org.logstash.instrument.metrics.gauge.LazyDelegatingGauge
|
6
3
|
module LogStash module Instrument module MetricType
|
7
|
-
class Gauge <
|
8
|
-
def initialize(namespaces, key)
|
9
|
-
super(namespaces, key)
|
4
|
+
class Gauge < LazyDelegatingGauge
|
10
5
|
|
11
|
-
|
6
|
+
def initialize(namespaces, key)
|
7
|
+
super(namespaces, key.to_s)
|
12
8
|
end
|
13
9
|
|
14
10
|
def execute(action, value = nil)
|
15
|
-
|
11
|
+
send(action, value)
|
16
12
|
end
|
17
13
|
|
18
|
-
def value
|
19
|
-
@gauge.get
|
20
|
-
end
|
21
14
|
end
|
22
15
|
end; end; end
|
16
|
+
|
@@ -0,0 +1,19 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require 'logstash/instrument/periodic_poller/base'
|
3
|
+
|
4
|
+
module LogStash module Instrument module PeriodicPoller
|
5
|
+
class DeadLetterQueue < Base
|
6
|
+
def initialize(metric, agent, options = {})
|
7
|
+
super(metric, options)
|
8
|
+
@metric = metric
|
9
|
+
@agent = agent
|
10
|
+
end
|
11
|
+
|
12
|
+
def collect
|
13
|
+
_, pipeline = @agent.with_running_pipelines { |pipelines| pipelines.first }
|
14
|
+
unless pipeline.nil?
|
15
|
+
pipeline.collect_dlq_stats
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end end end
|
@@ -1,4 +1,5 @@
|
|
1
1
|
# encoding: utf-8
|
2
|
+
require "logstash/instrument/periodic_poller/dlq"
|
2
3
|
require "logstash/instrument/periodic_poller/os"
|
3
4
|
require "logstash/instrument/periodic_poller/jvm"
|
4
5
|
require "logstash/instrument/periodic_poller/pq"
|
@@ -14,7 +15,8 @@ module LogStash module Instrument
|
|
14
15
|
@metric = metric
|
15
16
|
@periodic_pollers = [PeriodicPoller::Os.new(metric),
|
16
17
|
PeriodicPoller::JVM.new(metric),
|
17
|
-
PeriodicPoller::PersistentQueue.new(metric, queue_type, pipelines)
|
18
|
+
PeriodicPoller::PersistentQueue.new(metric, queue_type, pipelines),
|
19
|
+
PeriodicPoller::DeadLetterQueue.new(metric, pipelines)]
|
18
20
|
end
|
19
21
|
|
20
22
|
def start
|
@@ -7,10 +7,13 @@ module LogStash
|
|
7
7
|
java_import org.apache.logging.log4j.LogManager
|
8
8
|
java_import org.apache.logging.log4j.core.config.Configurator
|
9
9
|
java_import org.apache.logging.log4j.core.config.DefaultConfiguration
|
10
|
+
java_import org.apache.logging.log4j.core.config.LoggerConfig
|
11
|
+
java_import org.logstash.log.LogstashLoggerContextFactory
|
12
|
+
java_import org.apache.logging.log4j.core.LoggerContext
|
13
|
+
java_import java.net.URI
|
10
14
|
|
11
15
|
class Logger
|
12
16
|
@@config_mutex = Mutex.new
|
13
|
-
@@logging_context = nil
|
14
17
|
|
15
18
|
def initialize(name)
|
16
19
|
@logger = LogManager.getLogger(name)
|
@@ -65,31 +68,57 @@ module LogStash
|
|
65
68
|
end
|
66
69
|
|
67
70
|
def self.configure_logging(level, path = LogManager::ROOT_LOGGER_NAME)
|
68
|
-
@@config_mutex.synchronize {
|
71
|
+
@@config_mutex.synchronize { set_level(level, path) }
|
69
72
|
rescue Exception => e
|
70
73
|
raise ArgumentError, "invalid level[#{level}] for logger[#{path}]"
|
71
74
|
end
|
72
75
|
|
73
76
|
def self.initialize(config_location)
|
74
77
|
@@config_mutex.synchronize do
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
81
|
-
|
82
|
-
|
83
|
-
|
84
|
-
|
85
|
-
|
78
|
+
config_location_uri = URI.create(config_location)
|
79
|
+
file_path = config_location_uri.path
|
80
|
+
if ::File.exists?(file_path)
|
81
|
+
logs_location = java.lang.System.getProperty("ls.logs")
|
82
|
+
puts "Sending Logstash's logs to #{logs_location} which is now configured via log4j2.properties"
|
83
|
+
#reconfigure the default context to use our log4j2.properties file
|
84
|
+
get_logging_context.setConfigLocation(URI.create(config_location))
|
85
|
+
#ensure everyone agrees which context to use for the LogManager
|
86
|
+
context_factory = LogstashLoggerContextFactory.new(get_logging_context)
|
87
|
+
LogManager.setFactory(context_factory)
|
88
|
+
else
|
89
|
+
# fall back to default config
|
90
|
+
puts "Could not find log4j2 configuration at path #{file_path}. Using default config which logs errors to the console"
|
86
91
|
end
|
87
92
|
end
|
88
93
|
end
|
89
94
|
|
90
95
|
def self.get_logging_context
|
91
|
-
return
|
96
|
+
return LoggerContext.getContext(false)
|
97
|
+
end
|
98
|
+
|
99
|
+
# Clone of org.apache.logging.log4j.core.config.Configurator.setLevel(), but ensure the proper context is used
|
100
|
+
def self.set_level(_level, path)
|
101
|
+
configuration = get_logging_context.getConfiguration()
|
102
|
+
level = Level.valueOf(_level)
|
103
|
+
if path.nil? || path.strip.empty?
|
104
|
+
root_logger = configuration.getRootLogger()
|
105
|
+
if root_logger.getLevel() != level
|
106
|
+
root_logger.setLevel(level)
|
107
|
+
get_logging_context.updateLoggers()
|
108
|
+
end
|
109
|
+
else
|
110
|
+
package_logger = configuration.getLoggerConfig(path)
|
111
|
+
if package_logger.name != path #no package logger found
|
112
|
+
configuration.addLogger(path, LoggerConfig.new(path, level, true))
|
113
|
+
get_logging_context.updateLoggers()
|
114
|
+
elsif package_logger.getLevel() != level
|
115
|
+
package_logger.setLevel(level)
|
116
|
+
get_logging_context.updateLoggers()
|
117
|
+
end
|
118
|
+
end
|
92
119
|
end
|
120
|
+
|
121
|
+
private_class_method :set_level
|
93
122
|
end
|
94
123
|
|
95
124
|
class SlowLogger
|
@@ -0,0 +1,74 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/namespace"
|
3
|
+
require "logstash/logging"
|
4
|
+
require "logstash/errors"
|
5
|
+
|
6
|
+
module LogStash module Modules class CLIParser
|
7
|
+
include LogStash::Util::Loggable
|
8
|
+
|
9
|
+
attr_reader :output
|
10
|
+
def initialize(module_names, module_variables)
|
11
|
+
@output = []
|
12
|
+
# The #compact here catches instances when module_variables may be nil or
|
13
|
+
# [nil] and sets it to []
|
14
|
+
parse_it(module_names, Array(module_variables).compact)
|
15
|
+
end
|
16
|
+
|
17
|
+
def parse_modules(module_list)
|
18
|
+
parsed_modules = []
|
19
|
+
module_list.each do |module_value|
|
20
|
+
# Calling --modules but not filling it results in [nil], so skip that.
|
21
|
+
next if module_value.nil?
|
22
|
+
# Catch if --modules was launched empty but an option/flag (-something)
|
23
|
+
# follows immediately after
|
24
|
+
if module_value.start_with?('-')
|
25
|
+
raise LogStash::ConfigLoadingError, I18n.t("logstash.modules.configuration.modules-invalid-name", :module_name => module_value)
|
26
|
+
end
|
27
|
+
parsed_modules.concat module_value.split(',')
|
28
|
+
end
|
29
|
+
parsed_modules
|
30
|
+
end
|
31
|
+
|
32
|
+
def get_kv(module_name, unparsed)
|
33
|
+
# Ensure that there is at least 1 equals sign in our variable string
|
34
|
+
# Using String#partition to split on the first '='
|
35
|
+
# This hackery is to catch the possibility of an equals (`=`) sign
|
36
|
+
# in a passphrase, which might result in an incomplete key. The
|
37
|
+
# portion before the first `=` should always be the key, leaving
|
38
|
+
# the rest to be the value
|
39
|
+
k, op, rest = unparsed.partition('=')
|
40
|
+
if rest.size.zero?
|
41
|
+
raise LogStash::ConfigLoadingError, I18n.t("logstash.modules.configuration.modules-variables-malformed", :rawvar => (module_name + '.' + unparsed))
|
42
|
+
end
|
43
|
+
return k.strip, rest.strip
|
44
|
+
end
|
45
|
+
|
46
|
+
def name_splitter(unparsed)
|
47
|
+
# It must have at least `modulename.something`
|
48
|
+
module_name, dot, rest = unparsed.partition('.')
|
49
|
+
if rest.count('.') >= 1
|
50
|
+
return module_name, rest
|
51
|
+
else
|
52
|
+
raise LogStash::ConfigLoadingError, I18n.t("logstash.modules.configuration.modules-variables-malformed", :rawvar => unparsed)
|
53
|
+
end
|
54
|
+
end
|
55
|
+
|
56
|
+
def parse_vars(module_name, vars_list)
|
57
|
+
module_hash = {"name" => module_name}
|
58
|
+
vars_list.each do |unparsed|
|
59
|
+
extracted_name, modvar = name_splitter(unparsed)
|
60
|
+
next if extracted_name != module_name
|
61
|
+
k, v = get_kv(extracted_name, modvar)
|
62
|
+
module_hash[k] = v
|
63
|
+
end
|
64
|
+
module_hash
|
65
|
+
end
|
66
|
+
|
67
|
+
def parse_it(module_list, module_variable_list)
|
68
|
+
if module_list.is_a?(Array)
|
69
|
+
parse_modules(module_list).each do |module_name|
|
70
|
+
@output << parse_vars(module_name, module_variable_list)
|
71
|
+
end
|
72
|
+
end
|
73
|
+
end
|
74
|
+
end end end
|
@@ -0,0 +1,22 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/namespace"
|
3
|
+
require "logstash/logging"
|
4
|
+
|
5
|
+
require_relative "elasticsearch_resource"
|
6
|
+
|
7
|
+
module LogStash module Modules class ElasticsearchConfig
|
8
|
+
attr_reader :index_name
|
9
|
+
|
10
|
+
# We name it `modul` here because `module` has meaning in Ruby.
|
11
|
+
def initialize(modul, settings)
|
12
|
+
@directory = ::File.join(modul.directory, "elasticsearch")
|
13
|
+
@name = modul.module_name
|
14
|
+
@settings = settings
|
15
|
+
@full_path = ::File.join(@directory, "#{@name}.json")
|
16
|
+
@index_name = @settings.fetch("elasticsearch.template_path", "_template")
|
17
|
+
end
|
18
|
+
|
19
|
+
def resources
|
20
|
+
[ElasticsearchResource.new(@index_name, "not-used", @full_path)]
|
21
|
+
end
|
22
|
+
end end end
|
@@ -0,0 +1,37 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/namespace"
|
3
|
+
require "logstash/logging"
|
4
|
+
|
5
|
+
module LogStash module Modules class ElasticsearchImporter
|
6
|
+
include LogStash::Util::Loggable
|
7
|
+
|
8
|
+
def initialize(client)
|
9
|
+
@client = client
|
10
|
+
end
|
11
|
+
|
12
|
+
def put(resource, overwrite = true)
|
13
|
+
path = resource.import_path
|
14
|
+
logger.debug("Attempting PUT", :url_path => path, :file_path => resource.content_path)
|
15
|
+
if !overwrite && content_exists?(path)
|
16
|
+
logger.debug("Found existing Elasticsearch resource.", :resource => path)
|
17
|
+
return
|
18
|
+
end
|
19
|
+
put_overwrite(path, resource.content)
|
20
|
+
end
|
21
|
+
|
22
|
+
private
|
23
|
+
|
24
|
+
def put_overwrite(path, content)
|
25
|
+
if content_exists?(path)
|
26
|
+
response = @client.delete(path)
|
27
|
+
end
|
28
|
+
# hmmm, versioning?
|
29
|
+
@client.put(path, content).status == 201
|
30
|
+
end
|
31
|
+
|
32
|
+
def content_exists?(path)
|
33
|
+
response = @client.head(path)
|
34
|
+
response.status >= 200 && response.status < 300
|
35
|
+
end
|
36
|
+
|
37
|
+
end end end # class LogStash::Modules::Importer
|