logstash-core 6.0.0.alpha2-java → 6.0.0.beta1-java

Sign up to get free protection for your applications and to get access to all the features.
Files changed (110) hide show
  1. checksums.yaml +5 -5
  2. data/gemspec_jars.rb +6 -4
  3. data/lib/logstash-core/logstash-core.jar +0 -0
  4. data/lib/logstash-core/logstash-core.rb +2 -2
  5. data/lib/logstash-core/version.rb +1 -1
  6. data/lib/logstash-core_jars.rb +14 -10
  7. data/lib/logstash/agent.rb +4 -2
  8. data/lib/logstash/api/commands/default_metadata.rb +1 -1
  9. data/lib/logstash/api/commands/hot_threads_reporter.rb +8 -2
  10. data/lib/logstash/api/commands/node.rb +2 -2
  11. data/lib/logstash/api/commands/stats.rb +2 -2
  12. data/lib/logstash/bootstrap_check/bad_ruby.rb +2 -2
  13. data/lib/logstash/bootstrap_check/default_config.rb +2 -3
  14. data/lib/logstash/compiler.rb +12 -12
  15. data/lib/logstash/compiler/lscl.rb +17 -7
  16. data/lib/logstash/compiler/treetop_monkeypatches.rb +1 -0
  17. data/lib/logstash/config/config_ast.rb +11 -1
  18. data/lib/logstash/config/mixin.rb +5 -0
  19. data/lib/logstash/config/modules_common.rb +101 -0
  20. data/lib/logstash/config/source/base.rb +75 -0
  21. data/lib/logstash/config/source/local.rb +52 -50
  22. data/lib/logstash/config/source/modules.rb +55 -0
  23. data/lib/logstash/config/source/multi_local.rb +54 -10
  24. data/lib/logstash/config/source_loader.rb +1 -0
  25. data/lib/logstash/config/string_escape.rb +27 -0
  26. data/lib/logstash/elasticsearch_client.rb +142 -0
  27. data/lib/logstash/environment.rb +5 -1
  28. data/lib/logstash/event.rb +0 -1
  29. data/lib/logstash/instrument/global_metrics.rb +13 -0
  30. data/lib/logstash/instrument/metric_store.rb +16 -13
  31. data/lib/logstash/instrument/metric_type/counter.rb +6 -18
  32. data/lib/logstash/instrument/metric_type/gauge.rb +6 -12
  33. data/lib/logstash/instrument/periodic_poller/dlq.rb +19 -0
  34. data/lib/logstash/instrument/periodic_pollers.rb +3 -1
  35. data/lib/logstash/logging/logger.rb +43 -14
  36. data/lib/logstash/modules/cli_parser.rb +74 -0
  37. data/lib/logstash/modules/elasticsearch_config.rb +22 -0
  38. data/lib/logstash/modules/elasticsearch_importer.rb +37 -0
  39. data/lib/logstash/modules/elasticsearch_resource.rb +10 -0
  40. data/lib/logstash/modules/file_reader.rb +36 -0
  41. data/lib/logstash/modules/kibana_base.rb +24 -0
  42. data/lib/logstash/modules/kibana_client.rb +122 -0
  43. data/lib/logstash/modules/kibana_config.rb +125 -0
  44. data/lib/logstash/modules/kibana_dashboards.rb +36 -0
  45. data/lib/logstash/modules/kibana_importer.rb +17 -0
  46. data/lib/logstash/modules/kibana_resource.rb +10 -0
  47. data/lib/logstash/modules/kibana_settings.rb +40 -0
  48. data/lib/logstash/modules/logstash_config.rb +120 -0
  49. data/lib/logstash/modules/resource_base.rb +38 -0
  50. data/lib/logstash/modules/scaffold.rb +50 -0
  51. data/lib/logstash/modules/settings_merger.rb +23 -0
  52. data/lib/logstash/modules/util.rb +17 -0
  53. data/lib/logstash/namespace.rb +1 -0
  54. data/lib/logstash/pipeline.rb +66 -27
  55. data/lib/logstash/pipeline_settings.rb +1 -0
  56. data/lib/logstash/plugins/registry.rb +1 -0
  57. data/lib/logstash/runner.rb +47 -3
  58. data/lib/logstash/settings.rb +20 -1
  59. data/lib/logstash/util/dead_letter_queue_manager.rb +1 -1
  60. data/lib/logstash/util/safe_uri.rb +146 -11
  61. data/lib/logstash/util/thread_dump.rb +4 -3
  62. data/lib/logstash/util/wrapped_acked_queue.rb +28 -24
  63. data/lib/logstash/util/wrapped_synchronous_queue.rb +19 -20
  64. data/lib/logstash/version.rb +1 -1
  65. data/locales/en.yml +56 -1
  66. data/logstash-core.gemspec +6 -4
  67. data/spec/logstash/agent/converge_spec.rb +2 -2
  68. data/spec/logstash/agent_spec.rb +11 -3
  69. data/spec/logstash/api/modules/logging_spec.rb +13 -7
  70. data/spec/logstash/api/modules/node_plugins_spec.rb +23 -5
  71. data/spec/logstash/api/modules/node_spec.rb +17 -15
  72. data/spec/logstash/api/modules/node_stats_spec.rb +0 -1
  73. data/spec/logstash/api/modules/plugins_spec.rb +40 -9
  74. data/spec/logstash/api/modules/root_spec.rb +0 -1
  75. data/spec/logstash/api/rack_app_spec.rb +2 -1
  76. data/spec/logstash/compiler/compiler_spec.rb +54 -7
  77. data/spec/logstash/config/config_ast_spec.rb +47 -8
  78. data/spec/logstash/config/mixin_spec.rb +14 -2
  79. data/spec/logstash/config/pipeline_config_spec.rb +7 -7
  80. data/spec/logstash/config/source/local_spec.rb +5 -2
  81. data/spec/logstash/config/source/multi_local_spec.rb +56 -10
  82. data/spec/logstash/config/source_loader_spec.rb +1 -1
  83. data/spec/logstash/config/string_escape_spec.rb +24 -0
  84. data/spec/logstash/event_spec.rb +9 -0
  85. data/spec/logstash/filters/base_spec.rb +1 -1
  86. data/spec/logstash/instrument/metric_store_spec.rb +2 -3
  87. data/spec/logstash/instrument/metric_type/counter_spec.rb +0 -12
  88. data/spec/logstash/instrument/metric_type/gauge_spec.rb +1 -8
  89. data/spec/logstash/instrument/periodic_poller/dlq_spec.rb +17 -0
  90. data/spec/logstash/instrument/periodic_poller/jvm_spec.rb +1 -1
  91. data/spec/logstash/legacy_ruby_event_spec.rb +0 -9
  92. data/spec/logstash/legacy_ruby_timestamp_spec.rb +19 -14
  93. data/spec/logstash/modules/cli_parser_spec.rb +129 -0
  94. data/spec/logstash/modules/logstash_config_spec.rb +56 -0
  95. data/spec/logstash/modules/scaffold_spec.rb +239 -0
  96. data/spec/logstash/pipeline_dlq_commit_spec.rb +1 -1
  97. data/spec/logstash/pipeline_spec.rb +87 -20
  98. data/spec/logstash/runner_spec.rb +122 -5
  99. data/spec/logstash/setting_spec.rb +2 -2
  100. data/spec/logstash/settings/splittable_string_array_spec.rb +51 -0
  101. data/spec/logstash/timestamp_spec.rb +8 -2
  102. data/spec/logstash/util/safe_uri_spec.rb +16 -0
  103. data/spec/logstash/util/wrapped_acked_queue_spec.rb +63 -0
  104. data/spec/logstash/util/wrapped_synchronous_queue_spec.rb +0 -22
  105. data/spec/support/helpers.rb +1 -1
  106. data/spec/support/matchers.rb +21 -4
  107. metadata +102 -19
  108. data/lib/logstash/instrument/metric_type/base.rb +0 -31
  109. data/lib/logstash/program.rb +0 -14
  110. data/lib/logstash/string_interpolation.rb +0 -18
@@ -20,9 +20,12 @@ module LogStash
20
20
  Setting::NullableString.new("path.config", nil, false),
21
21
  Setting::WritableDirectory.new("path.data", ::File.join(LogStash::Environment::LOGSTASH_HOME, "data")),
22
22
  Setting::NullableString.new("config.string", nil, false),
23
+ Setting.new("modules.cli", Array, []),
24
+ Setting.new("modules", Array, []),
23
25
  Setting::Boolean.new("config.test_and_exit", false),
24
26
  Setting::Boolean.new("config.reload.automatic", false),
25
- Setting::Numeric.new("config.reload.interval", 3), # in seconds
27
+ Setting::TimeValue.new("config.reload.interval", "3s"), # in seconds
28
+ Setting::Boolean.new("config.support_escapes", false),
26
29
  Setting::Boolean.new("metric.collect", true),
27
30
  Setting::String.new("pipeline.id", "main"),
28
31
  Setting::Boolean.new("pipeline.system", false),
@@ -51,6 +54,7 @@ module LogStash
51
54
  Setting::Numeric.new("queue.checkpoint.writes", 1024), # 0 is unlimited
52
55
  Setting::Numeric.new("queue.checkpoint.interval", 1000), # 0 is no time-based checkpointing
53
56
  Setting::Boolean.new("dead_letter_queue.enable", false),
57
+ Setting::Bytes.new("dead_letter_queue.max_bytes", "1024mb"),
54
58
  Setting::TimeValue.new("slowlog.threshold.warn", "-1"),
55
59
  Setting::TimeValue.new("slowlog.threshold.info", "-1"),
56
60
  Setting::TimeValue.new("slowlog.threshold.debug", "-1"),
@@ -5,7 +5,6 @@ require "logstash/json"
5
5
  require "jruby_event_ext"
6
6
  require "jruby_timestamp_ext"
7
7
  require "logstash/timestamp"
8
- require "logstash/string_interpolation"
9
8
 
10
9
  # transient pipeline events for normal in-flow signaling as opposed to
11
10
  # flow altering exceptions. for now having base classes is adequate and
@@ -0,0 +1,13 @@
1
+ class GlobalMetrics
2
+ class Stats(metric)
3
+ @metric = metric
4
+ end
5
+
6
+ def initialize(metric)
7
+ @metric = metric
8
+
9
+ @pipeline_reloads = metric.namespace([:stats, :pipelines])
10
+ end
11
+
12
+
13
+ end
@@ -41,26 +41,28 @@ module LogStash module Instrument
41
41
  # @param [Symbol] The metric key
42
42
  # @return [Object] Return the new_value of the retrieve object in the tree
43
43
  def fetch_or_store(namespaces, key, default_value = nil)
44
- provided_value = block_given? ? yield(key) : default_value
45
44
 
46
45
  # We first check in the `@fast_lookup` store to see if we have already see that metrics before,
47
46
  # This give us a `o(1)` access, which is faster than searching through the structured
48
47
  # data store (Which is a `o(n)` operation where `n` is the number of element in the namespace and
49
- # the value of the key). If the metric is already present in the `@fast_lookup`, the call to
50
- # `#put_if_absent` will return the value. This value is send back directly to the caller.
48
+ # the value of the key). If the metric is already present in the `@fast_lookup`, then that value is sent
49
+ # back directly to the caller.
51
50
  #
52
- # BUT. If the value is not present in the `@fast_lookup` the value will be inserted and
53
- # `#puf_if_absent` will return nil. With this returned value of nil we assume that we don't
51
+ # BUT. If the value is not present in the `@fast_lookup` the value will be inserted and we assume that we don't
54
52
  # have it in the `@metric_store` for structured search so we add it there too.
55
- if found_value = @fast_lookup.put_if_absent(namespaces.dup << key, provided_value)
56
- return found_value
57
- else
53
+
54
+ # array.hash as the key since it is faster then using the array itself, see #7772
55
+ fast_lookup_key = (namespaces.dup << key).hash
56
+ value = @fast_lookup.get(fast_lookup_key)
57
+ if value.nil?
58
+ value = block_given? ? yield(key) : default_value
59
+ @fast_lookup.put(fast_lookup_key, value)
58
60
  @structured_lookup_mutex.synchronize do
59
61
  # If we cannot find the value this mean we need to save it in the store.
60
- fetch_or_store_namespaces(namespaces).fetch_or_store(key, provided_value)
62
+ fetch_or_store_namespaces(namespaces).fetch_or_store(key, value)
61
63
  end
62
- return provided_value
63
64
  end
65
+ return value;
64
66
  end
65
67
 
66
68
  # This method allow to retrieve values for a specific path,
@@ -163,7 +165,7 @@ module LogStash module Instrument
163
165
  end
164
166
 
165
167
  def has_metric?(*path)
166
- @fast_lookup[path]
168
+ @fast_lookup[path.hash]
167
169
  end
168
170
 
169
171
  # Return all the individuals Metric,
@@ -185,8 +187,9 @@ module LogStash module Instrument
185
187
  def prune(path)
186
188
  key_paths = key_paths(path).map(&:to_sym)
187
189
  @structured_lookup_mutex.synchronize do
188
- keys_to_delete = @fast_lookup.keys.select {|namespace| (key_paths - namespace[0..-2]).empty? }
189
- keys_to_delete.each {|k| @fast_lookup.delete(k) }
190
+ fetch_or_store_namespaces(key_paths).each do |key, v|
191
+ @fast_lookup.delete((key_paths.dup << key).hash)
192
+ end
190
193
  delete_from_map(@store, key_paths)
191
194
  end
192
195
  end
@@ -1,29 +1,17 @@
1
- # encoding: utf-8
2
- require "logstash/instrument/metric_type/base"
3
- require "concurrent"
1
+ #encoding: utf-8
2
+ java_import org.logstash.instrument.metrics.counter.LongCounter
4
3
 
5
4
  module LogStash module Instrument module MetricType
6
- class Counter < Base
7
- def initialize(namespaces, key, value = 0)
8
- super(namespaces, key)
5
+ class Counter < LongCounter
9
6
 
10
- @counter = Concurrent::AtomicFixnum.new(value)
11
- end
12
-
13
- def increment(value = 1)
14
- @counter.increment(value)
15
- end
7
+ def initialize(namespaces, key)
8
+ super(namespaces, key.to_s)
16
9
 
17
- def decrement(value = 1)
18
- @counter.decrement(value)
19
10
  end
20
11
 
21
12
  def execute(action, value = 1)
22
- @counter.send(action, value)
13
+ send(action, value)
23
14
  end
24
15
 
25
- def value
26
- @counter.value
27
- end
28
16
  end
29
17
  end; end; end
@@ -1,22 +1,16 @@
1
1
  # encoding: utf-8
2
- require "logstash/instrument/metric_type/base"
3
- require "concurrent/atomic_reference/mutex_atomic"
4
- require "logstash/json"
5
-
2
+ java_import org.logstash.instrument.metrics.gauge.LazyDelegatingGauge
6
3
  module LogStash module Instrument module MetricType
7
- class Gauge < Base
8
- def initialize(namespaces, key)
9
- super(namespaces, key)
4
+ class Gauge < LazyDelegatingGauge
10
5
 
11
- @gauge = Concurrent::MutexAtomicReference.new()
6
+ def initialize(namespaces, key)
7
+ super(namespaces, key.to_s)
12
8
  end
13
9
 
14
10
  def execute(action, value = nil)
15
- @gauge.set(value)
11
+ send(action, value)
16
12
  end
17
13
 
18
- def value
19
- @gauge.get
20
- end
21
14
  end
22
15
  end; end; end
16
+
@@ -0,0 +1,19 @@
1
+ # encoding: utf-8
2
+ require 'logstash/instrument/periodic_poller/base'
3
+
4
+ module LogStash module Instrument module PeriodicPoller
5
+ class DeadLetterQueue < Base
6
+ def initialize(metric, agent, options = {})
7
+ super(metric, options)
8
+ @metric = metric
9
+ @agent = agent
10
+ end
11
+
12
+ def collect
13
+ _, pipeline = @agent.with_running_pipelines { |pipelines| pipelines.first }
14
+ unless pipeline.nil?
15
+ pipeline.collect_dlq_stats
16
+ end
17
+ end
18
+ end
19
+ end end end
@@ -1,4 +1,5 @@
1
1
  # encoding: utf-8
2
+ require "logstash/instrument/periodic_poller/dlq"
2
3
  require "logstash/instrument/periodic_poller/os"
3
4
  require "logstash/instrument/periodic_poller/jvm"
4
5
  require "logstash/instrument/periodic_poller/pq"
@@ -14,7 +15,8 @@ module LogStash module Instrument
14
15
  @metric = metric
15
16
  @periodic_pollers = [PeriodicPoller::Os.new(metric),
16
17
  PeriodicPoller::JVM.new(metric),
17
- PeriodicPoller::PersistentQueue.new(metric, queue_type, pipelines)]
18
+ PeriodicPoller::PersistentQueue.new(metric, queue_type, pipelines),
19
+ PeriodicPoller::DeadLetterQueue.new(metric, pipelines)]
18
20
  end
19
21
 
20
22
  def start
@@ -7,10 +7,13 @@ module LogStash
7
7
  java_import org.apache.logging.log4j.LogManager
8
8
  java_import org.apache.logging.log4j.core.config.Configurator
9
9
  java_import org.apache.logging.log4j.core.config.DefaultConfiguration
10
+ java_import org.apache.logging.log4j.core.config.LoggerConfig
11
+ java_import org.logstash.log.LogstashLoggerContextFactory
12
+ java_import org.apache.logging.log4j.core.LoggerContext
13
+ java_import java.net.URI
10
14
 
11
15
  class Logger
12
16
  @@config_mutex = Mutex.new
13
- @@logging_context = nil
14
17
 
15
18
  def initialize(name)
16
19
  @logger = LogManager.getLogger(name)
@@ -65,31 +68,57 @@ module LogStash
65
68
  end
66
69
 
67
70
  def self.configure_logging(level, path = LogManager::ROOT_LOGGER_NAME)
68
- @@config_mutex.synchronize { Configurator.setLevel(path, Level.valueOf(level)) }
71
+ @@config_mutex.synchronize { set_level(level, path) }
69
72
  rescue Exception => e
70
73
  raise ArgumentError, "invalid level[#{level}] for logger[#{path}]"
71
74
  end
72
75
 
73
76
  def self.initialize(config_location)
74
77
  @@config_mutex.synchronize do
75
- if @@logging_context.nil?
76
- file_path = URI(config_location).path
77
- if ::File.exists?(file_path)
78
- logs_location = java.lang.System.getProperty("ls.logs")
79
- puts "Sending Logstash's logs to #{logs_location} which is now configured via log4j2.properties"
80
- @@logging_context = Configurator.initialize(nil, config_location)
81
- else
82
- # fall back to default config
83
- puts "Could not find log4j2 configuration at path #{file_path}. Using default config which logs to console"
84
- @@logging_context = Configurator.initialize(DefaultConfiguration.new)
85
- end
78
+ config_location_uri = URI.create(config_location)
79
+ file_path = config_location_uri.path
80
+ if ::File.exists?(file_path)
81
+ logs_location = java.lang.System.getProperty("ls.logs")
82
+ puts "Sending Logstash's logs to #{logs_location} which is now configured via log4j2.properties"
83
+ #reconfigure the default context to use our log4j2.properties file
84
+ get_logging_context.setConfigLocation(URI.create(config_location))
85
+ #ensure everyone agrees which context to use for the LogManager
86
+ context_factory = LogstashLoggerContextFactory.new(get_logging_context)
87
+ LogManager.setFactory(context_factory)
88
+ else
89
+ # fall back to default config
90
+ puts "Could not find log4j2 configuration at path #{file_path}. Using default config which logs errors to the console"
86
91
  end
87
92
  end
88
93
  end
89
94
 
90
95
  def self.get_logging_context
91
- return @@logging_context
96
+ return LoggerContext.getContext(false)
97
+ end
98
+
99
+ # Clone of org.apache.logging.log4j.core.config.Configurator.setLevel(), but ensure the proper context is used
100
+ def self.set_level(_level, path)
101
+ configuration = get_logging_context.getConfiguration()
102
+ level = Level.valueOf(_level)
103
+ if path.nil? || path.strip.empty?
104
+ root_logger = configuration.getRootLogger()
105
+ if root_logger.getLevel() != level
106
+ root_logger.setLevel(level)
107
+ get_logging_context.updateLoggers()
108
+ end
109
+ else
110
+ package_logger = configuration.getLoggerConfig(path)
111
+ if package_logger.name != path #no package logger found
112
+ configuration.addLogger(path, LoggerConfig.new(path, level, true))
113
+ get_logging_context.updateLoggers()
114
+ elsif package_logger.getLevel() != level
115
+ package_logger.setLevel(level)
116
+ get_logging_context.updateLoggers()
117
+ end
118
+ end
92
119
  end
120
+
121
+ private_class_method :set_level
93
122
  end
94
123
 
95
124
  class SlowLogger
@@ -0,0 +1,74 @@
1
+ # encoding: utf-8
2
+ require "logstash/namespace"
3
+ require "logstash/logging"
4
+ require "logstash/errors"
5
+
6
+ module LogStash module Modules class CLIParser
7
+ include LogStash::Util::Loggable
8
+
9
+ attr_reader :output
10
+ def initialize(module_names, module_variables)
11
+ @output = []
12
+ # The #compact here catches instances when module_variables may be nil or
13
+ # [nil] and sets it to []
14
+ parse_it(module_names, Array(module_variables).compact)
15
+ end
16
+
17
+ def parse_modules(module_list)
18
+ parsed_modules = []
19
+ module_list.each do |module_value|
20
+ # Calling --modules but not filling it results in [nil], so skip that.
21
+ next if module_value.nil?
22
+ # Catch if --modules was launched empty but an option/flag (-something)
23
+ # follows immediately after
24
+ if module_value.start_with?('-')
25
+ raise LogStash::ConfigLoadingError, I18n.t("logstash.modules.configuration.modules-invalid-name", :module_name => module_value)
26
+ end
27
+ parsed_modules.concat module_value.split(',')
28
+ end
29
+ parsed_modules
30
+ end
31
+
32
+ def get_kv(module_name, unparsed)
33
+ # Ensure that there is at least 1 equals sign in our variable string
34
+ # Using String#partition to split on the first '='
35
+ # This hackery is to catch the possibility of an equals (`=`) sign
36
+ # in a passphrase, which might result in an incomplete key. The
37
+ # portion before the first `=` should always be the key, leaving
38
+ # the rest to be the value
39
+ k, op, rest = unparsed.partition('=')
40
+ if rest.size.zero?
41
+ raise LogStash::ConfigLoadingError, I18n.t("logstash.modules.configuration.modules-variables-malformed", :rawvar => (module_name + '.' + unparsed))
42
+ end
43
+ return k.strip, rest.strip
44
+ end
45
+
46
+ def name_splitter(unparsed)
47
+ # It must have at least `modulename.something`
48
+ module_name, dot, rest = unparsed.partition('.')
49
+ if rest.count('.') >= 1
50
+ return module_name, rest
51
+ else
52
+ raise LogStash::ConfigLoadingError, I18n.t("logstash.modules.configuration.modules-variables-malformed", :rawvar => unparsed)
53
+ end
54
+ end
55
+
56
+ def parse_vars(module_name, vars_list)
57
+ module_hash = {"name" => module_name}
58
+ vars_list.each do |unparsed|
59
+ extracted_name, modvar = name_splitter(unparsed)
60
+ next if extracted_name != module_name
61
+ k, v = get_kv(extracted_name, modvar)
62
+ module_hash[k] = v
63
+ end
64
+ module_hash
65
+ end
66
+
67
+ def parse_it(module_list, module_variable_list)
68
+ if module_list.is_a?(Array)
69
+ parse_modules(module_list).each do |module_name|
70
+ @output << parse_vars(module_name, module_variable_list)
71
+ end
72
+ end
73
+ end
74
+ end end end
@@ -0,0 +1,22 @@
1
+ # encoding: utf-8
2
+ require "logstash/namespace"
3
+ require "logstash/logging"
4
+
5
+ require_relative "elasticsearch_resource"
6
+
7
+ module LogStash module Modules class ElasticsearchConfig
8
+ attr_reader :index_name
9
+
10
+ # We name it `modul` here because `module` has meaning in Ruby.
11
+ def initialize(modul, settings)
12
+ @directory = ::File.join(modul.directory, "elasticsearch")
13
+ @name = modul.module_name
14
+ @settings = settings
15
+ @full_path = ::File.join(@directory, "#{@name}.json")
16
+ @index_name = @settings.fetch("elasticsearch.template_path", "_template")
17
+ end
18
+
19
+ def resources
20
+ [ElasticsearchResource.new(@index_name, "not-used", @full_path)]
21
+ end
22
+ end end end
@@ -0,0 +1,37 @@
1
+ # encoding: utf-8
2
+ require "logstash/namespace"
3
+ require "logstash/logging"
4
+
5
+ module LogStash module Modules class ElasticsearchImporter
6
+ include LogStash::Util::Loggable
7
+
8
+ def initialize(client)
9
+ @client = client
10
+ end
11
+
12
+ def put(resource, overwrite = true)
13
+ path = resource.import_path
14
+ logger.debug("Attempting PUT", :url_path => path, :file_path => resource.content_path)
15
+ if !overwrite && content_exists?(path)
16
+ logger.debug("Found existing Elasticsearch resource.", :resource => path)
17
+ return
18
+ end
19
+ put_overwrite(path, resource.content)
20
+ end
21
+
22
+ private
23
+
24
+ def put_overwrite(path, content)
25
+ if content_exists?(path)
26
+ response = @client.delete(path)
27
+ end
28
+ # hmmm, versioning?
29
+ @client.put(path, content).status == 201
30
+ end
31
+
32
+ def content_exists?(path)
33
+ response = @client.head(path)
34
+ response.status >= 200 && response.status < 300
35
+ end
36
+
37
+ end end end # class LogStash::Modules::Importer
@@ -0,0 +1,10 @@
1
+ # encoding: utf-8
2
+ require "logstash/namespace"
3
+ require_relative "resource_base"
4
+
5
+ module LogStash module Modules class ElasticsearchResource
6
+ include ResourceBase
7
+ def import_path
8
+ base + "/" + content_id
9
+ end
10
+ end end end