logstash-input-opensearch 1.0.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- checksums.yaml.gz.sig +0 -0
- data/ADMINS.md +28 -0
- data/CODE_OF_CONDUCT.md +24 -0
- data/CONTRIBUTING.md +121 -0
- data/DEVELOPER_GUIDE.md +77 -0
- data/Gemfile +14 -0
- data/MAINTAINERS.md +82 -0
- data/README.md +62 -0
- data/RELEASING.md +111 -0
- data/SECURITY.md +3 -0
- data/lib/logstash/inputs/opensearch/patches/_opensearch_transport_connections_selector.rb +52 -0
- data/lib/logstash/inputs/opensearch/patches/_opensearch_transport_http_manticore.rb +44 -0
- data/lib/logstash/inputs/opensearch.rb +432 -0
- data/logstash-input-opensearch.gemspec +52 -0
- data/spec/fixtures/test_certs/ca.crt +20 -0
- data/spec/fixtures/test_certs/ca.key +27 -0
- data/spec/fixtures/test_certs/es.crt +20 -0
- data/spec/fixtures/test_certs/es.key +27 -0
- data/spec/inputs/integration/opensearch_spec.rb +83 -0
- data/spec/inputs/opensearch_spec.rb +877 -0
- data/spec/opensearch_helper.rb +47 -0
- data.tar.gz.sig +1 -0
- metadata +312 -0
- metadata.gz.sig +0 -0
@@ -0,0 +1,432 @@
|
|
1
|
+
# Copyright OpenSearch Contributors
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
3
|
+
|
4
|
+
# encoding: utf-8
|
5
|
+
require "logstash/inputs/base"
|
6
|
+
require "logstash/namespace"
|
7
|
+
require "logstash/json"
|
8
|
+
require "logstash/util/safe_uri"
|
9
|
+
require 'logstash/plugin_mixins/validator_support/field_reference_validation_adapter'
|
10
|
+
require 'logstash/plugin_mixins/event_support/event_factory_adapter'
|
11
|
+
require 'logstash/plugin_mixins/ecs_compatibility_support'
|
12
|
+
require 'logstash/plugin_mixins/ecs_compatibility_support/target_check'
|
13
|
+
require "base64"
|
14
|
+
|
15
|
+
require "opensearch"
|
16
|
+
require "opensearch/transport/transport/http/manticore"
|
17
|
+
require_relative "opensearch/patches/_opensearch_transport_http_manticore"
|
18
|
+
require_relative "opensearch/patches/_opensearch_transport_connections_selector"
|
19
|
+
|
20
|
+
# .Compatibility Note
|
21
|
+
# [NOTE]
|
22
|
+
# ================================================================================
|
23
|
+
# Starting with OpenSearch 5.3, there's an {ref}modules-http.html[HTTP setting]
|
24
|
+
# called `http.content_type.required`. If this option is set to `true`, and you
|
25
|
+
# are using Logstash 2.4 through 5.2, you need to update the OpenSearch input
|
26
|
+
# plugin to version 4.0.2 or higher.
|
27
|
+
#
|
28
|
+
# ================================================================================
|
29
|
+
#
|
30
|
+
# Read from an OpenSearch cluster, based on search query results.
|
31
|
+
# This is useful for replaying test logs, reindexing, etc.
|
32
|
+
# It also supports periodically scheduling lookup enrichments
|
33
|
+
# using a cron syntax (see `schedule` setting).
|
34
|
+
#
|
35
|
+
# Example:
|
36
|
+
# [source,ruby]
|
37
|
+
# input {
|
38
|
+
# # Read all documents from OpenSearch matching the given query
|
39
|
+
# opensearch {
|
40
|
+
# hosts => "localhost"
|
41
|
+
# query => '{ "query": { "match": { "statuscode": 200 } }, "sort": [ "_doc" ] }'
|
42
|
+
# }
|
43
|
+
# }
|
44
|
+
#
|
45
|
+
# This would create an OpenSearch query with the following format:
|
46
|
+
# [source,json]
|
47
|
+
# curl 'http://localhost:9200/logstash-*/_search?&scroll=1m&size=1000' -d '{
|
48
|
+
# "query": {
|
49
|
+
# "match": {
|
50
|
+
# "statuscode": 200
|
51
|
+
# }
|
52
|
+
# },
|
53
|
+
# "sort": [ "_doc" ]
|
54
|
+
# }'
|
55
|
+
#
|
56
|
+
# ==== Scheduling
|
57
|
+
#
|
58
|
+
# Input from this plugin can be scheduled to run periodically according to a specific
|
59
|
+
# schedule. This scheduling syntax is powered by https://github.com/jmettraux/rufus-scheduler[rufus-scheduler].
|
60
|
+
# The syntax is cron-like with some extensions specific to Rufus (e.g. timezone support ).
|
61
|
+
#
|
62
|
+
# Examples:
|
63
|
+
#
|
64
|
+
# |==========================================================
|
65
|
+
# | `* 5 * 1-3 *` | will execute every minute of 5am every day of January through March.
|
66
|
+
# | `0 * * * *` | will execute on the 0th minute of every hour every day.
|
67
|
+
# | `0 6 * * * America/Chicago` | will execute at 6:00am (UTC/GMT -5) every day.
|
68
|
+
# |==========================================================
|
69
|
+
#
|
70
|
+
#
|
71
|
+
# Further documentation describing this syntax can be found https://github.com/jmettraux/rufus-scheduler#parsing-cronlines-and-time-strings[here].
|
72
|
+
#
|
73
|
+
#
|
74
|
+
class LogStash::Inputs::OpenSearch < LogStash::Inputs::Base
|
75
|
+
|
76
|
+
include LogStash::PluginMixins::ECSCompatibilitySupport(:disabled, :v1, :v8 => :v1)
|
77
|
+
include LogStash::PluginMixins::ECSCompatibilitySupport::TargetCheck
|
78
|
+
|
79
|
+
include LogStash::PluginMixins::EventSupport::EventFactoryAdapter
|
80
|
+
|
81
|
+
extend LogStash::PluginMixins::ValidatorSupport::FieldReferenceValidationAdapter
|
82
|
+
|
83
|
+
config_name "opensearch"
|
84
|
+
|
85
|
+
# List of opensearch hosts to use for querying.
|
86
|
+
# Each host can be either IP, HOST, IP:port or HOST:port.
|
87
|
+
# Port defaults to 9200
|
88
|
+
config :hosts, :validate => :array
|
89
|
+
|
90
|
+
# The index or alias to search.
|
91
|
+
config :index, :validate => :string, :default => "logstash-*"
|
92
|
+
|
93
|
+
# The query to be executed. Read the OpenSearch query DSL documentation
|
94
|
+
# for more info
|
95
|
+
# https://opensearch.org/docs/latest/opensearch/query-dsl/index/
|
96
|
+
config :query, :validate => :string, :default => '{ "sort": [ "_doc" ] }'
|
97
|
+
|
98
|
+
# This allows you to set the maximum number of hits returned per scroll.
|
99
|
+
config :size, :validate => :number, :default => 1000
|
100
|
+
|
101
|
+
# This parameter controls the keepalive time in seconds of the scrolling
|
102
|
+
# request and initiates the scrolling process. The timeout applies per
|
103
|
+
# round trip (i.e. between the previous scroll request, to the next).
|
104
|
+
config :scroll, :validate => :string, :default => "1m"
|
105
|
+
|
106
|
+
# This parameter controls the number of parallel slices to be consumed simultaneously
|
107
|
+
# by this pipeline input.
|
108
|
+
config :slices, :validate => :number
|
109
|
+
|
110
|
+
# If set, include OpenSearch document information such as index, type, and
|
111
|
+
# the id in the event.
|
112
|
+
#
|
113
|
+
# It might be important to note, with regards to metadata, that if you're
|
114
|
+
# ingesting documents with the intent to re-index them (or just update them)
|
115
|
+
# that the `action` option in the opensearch output wants to know how to
|
116
|
+
# handle those things. It can be dynamically assigned with a field
|
117
|
+
# added to the metadata.
|
118
|
+
#
|
119
|
+
# Example
|
120
|
+
# [source, ruby]
|
121
|
+
# input {
|
122
|
+
# opensearch {
|
123
|
+
# hosts => "es.production.mysite.org"
|
124
|
+
# index => "mydata-2018.09.*"
|
125
|
+
# query => "*"
|
126
|
+
# size => 500
|
127
|
+
# scroll => "5m"
|
128
|
+
# docinfo => true
|
129
|
+
# }
|
130
|
+
# }
|
131
|
+
# output {
|
132
|
+
# opensearch {
|
133
|
+
# index => "copy-of-production.%{[@metadata][_index]}"
|
134
|
+
# document_type => "%{[@metadata][_type]}"
|
135
|
+
# document_id => "%{[@metadata][_id]}"
|
136
|
+
# }
|
137
|
+
# }
|
138
|
+
#
|
139
|
+
config :docinfo, :validate => :boolean, :default => false
|
140
|
+
|
141
|
+
# Where to move the OpenSearch document information.
|
142
|
+
# default: [@metadata][input][opensearch] in ECS mode, @metadata field otherwise
|
143
|
+
config :docinfo_target, :validate=> :field_reference
|
144
|
+
|
145
|
+
# List of document metadata to move to the `docinfo_target` field.
|
146
|
+
config :docinfo_fields, :validate => :array, :default => ['_index', '_type', '_id']
|
147
|
+
|
148
|
+
# Basic Auth - username
|
149
|
+
config :user, :validate => :string
|
150
|
+
|
151
|
+
# Basic Auth - password
|
152
|
+
config :password, :validate => :password
|
153
|
+
|
154
|
+
# Connection Timeout, in Seconds
|
155
|
+
config :connect_timeout_seconds, :validate => :positive_whole_number, :default => 10
|
156
|
+
|
157
|
+
# Request Timeout, in Seconds
|
158
|
+
config :request_timeout_seconds, :validate => :positive_whole_number, :default => 60
|
159
|
+
|
160
|
+
# Socket Timeout, in Seconds
|
161
|
+
config :socket_timeout_seconds, :validate => :positive_whole_number, :default => 60
|
162
|
+
|
163
|
+
# Set the address of a forward HTTP proxy.
|
164
|
+
config :proxy, :validate => :uri_or_empty
|
165
|
+
|
166
|
+
# SSL
|
167
|
+
config :ssl, :validate => :boolean, :default => false
|
168
|
+
|
169
|
+
# SSL Certificate Authority file in PEM encoded format, must also include any chain certificates as necessary
|
170
|
+
config :ca_file, :validate => :path
|
171
|
+
|
172
|
+
# Schedule of when to periodically run statement, in Cron format
|
173
|
+
# for example: "* * * * *" (execute query every minute, on the minute)
|
174
|
+
#
|
175
|
+
# There is no schedule by default. If no schedule is given, then the statement is run
|
176
|
+
# exactly once.
|
177
|
+
config :schedule, :validate => :string
|
178
|
+
|
179
|
+
# If set, the _source of each hit will be added nested under the target instead of at the top-level
|
180
|
+
config :target, :validate => :field_reference
|
181
|
+
|
182
|
+
def initialize(params={})
|
183
|
+
super(params)
|
184
|
+
|
185
|
+
if docinfo_target.nil?
|
186
|
+
@docinfo_target = ecs_select[disabled: '@metadata', v1: '[@metadata][input][opensearch]']
|
187
|
+
end
|
188
|
+
end
|
189
|
+
|
190
|
+
def register
|
191
|
+
require "rufus/scheduler"
|
192
|
+
|
193
|
+
@options = {
|
194
|
+
:index => @index,
|
195
|
+
:scroll => @scroll,
|
196
|
+
:size => @size
|
197
|
+
}
|
198
|
+
@base_query = LogStash::Json.load(@query)
|
199
|
+
if @slices
|
200
|
+
@base_query.include?('slice') && fail(LogStash::ConfigurationError, "OpenSearch Input Plugin's `query` option cannot specify specific `slice` when configured to manage parallel slices with `slices` option")
|
201
|
+
@slices < 1 && fail(LogStash::ConfigurationError, "OpenSearch Input Plugin's `slices` option must be greater than zero, got `#{@slices}`")
|
202
|
+
end
|
203
|
+
|
204
|
+
transport_options = {:headers => {}}
|
205
|
+
transport_options[:headers].merge!(setup_basic_auth(user, password))
|
206
|
+
transport_options[:headers].merge!({'user-agent' => prepare_user_agent()})
|
207
|
+
transport_options[:request_timeout] = @request_timeout_seconds unless @request_timeout_seconds.nil?
|
208
|
+
transport_options[:connect_timeout] = @connect_timeout_seconds unless @connect_timeout_seconds.nil?
|
209
|
+
transport_options[:socket_timeout] = @socket_timeout_seconds unless @socket_timeout_seconds.nil?
|
210
|
+
|
211
|
+
hosts = setup_hosts
|
212
|
+
ssl_options = setup_ssl
|
213
|
+
|
214
|
+
@logger.warn "Supplied proxy setting (proxy => '') has no effect" if @proxy.eql?('')
|
215
|
+
|
216
|
+
transport_options[:proxy] = @proxy.to_s if @proxy && !@proxy.eql?('')
|
217
|
+
|
218
|
+
@client = OpenSearch::Client.new(
|
219
|
+
:hosts => hosts,
|
220
|
+
:transport_options => transport_options,
|
221
|
+
:transport_class => ::OpenSearch::Transport::Transport::HTTP::Manticore,
|
222
|
+
:ssl => ssl_options
|
223
|
+
)
|
224
|
+
test_connection!
|
225
|
+
@client
|
226
|
+
end
|
227
|
+
|
228
|
+
|
229
|
+
def run(output_queue)
|
230
|
+
if @schedule
|
231
|
+
@scheduler = Rufus::Scheduler.new(:max_work_threads => 1)
|
232
|
+
@scheduler.cron @schedule do
|
233
|
+
do_run(output_queue)
|
234
|
+
end
|
235
|
+
|
236
|
+
@scheduler.join
|
237
|
+
else
|
238
|
+
do_run(output_queue)
|
239
|
+
end
|
240
|
+
end
|
241
|
+
|
242
|
+
def stop
|
243
|
+
@scheduler.stop if @scheduler
|
244
|
+
end
|
245
|
+
|
246
|
+
private
|
247
|
+
|
248
|
+
def do_run(output_queue)
|
249
|
+
# if configured to run a single slice, don't bother spinning up threads
|
250
|
+
return do_run_slice(output_queue) if @slices.nil? || @slices <= 1
|
251
|
+
|
252
|
+
logger.warn("managed slices for query is very large (#{@slices}); consider reducing") if @slices > 8
|
253
|
+
|
254
|
+
@slices.times.map do |slice_id|
|
255
|
+
Thread.new do
|
256
|
+
LogStash::Util::set_thread_name("#{@id}_slice_#{slice_id}")
|
257
|
+
do_run_slice(output_queue, slice_id)
|
258
|
+
end
|
259
|
+
end.map(&:join)
|
260
|
+
end
|
261
|
+
|
262
|
+
def do_run_slice(output_queue, slice_id=nil)
|
263
|
+
slice_query = @base_query
|
264
|
+
slice_query = slice_query.merge('slice' => { 'id' => slice_id, 'max' => @slices}) unless slice_id.nil?
|
265
|
+
|
266
|
+
slice_options = @options.merge(:body => LogStash::Json.dump(slice_query) )
|
267
|
+
|
268
|
+
logger.info("Slice starting", slice_id: slice_id, slices: @slices) unless slice_id.nil?
|
269
|
+
|
270
|
+
begin
|
271
|
+
r = search_request(slice_options)
|
272
|
+
|
273
|
+
r['hits']['hits'].each { |hit| push_hit(hit, output_queue) }
|
274
|
+
logger.debug("Slice progress", slice_id: slice_id, slices: @slices) unless slice_id.nil?
|
275
|
+
|
276
|
+
has_hits = r['hits']['hits'].any?
|
277
|
+
scroll_id = r['_scroll_id']
|
278
|
+
|
279
|
+
while has_hits && scroll_id && !stop?
|
280
|
+
has_hits, scroll_id = process_next_scroll(output_queue, scroll_id)
|
281
|
+
logger.debug("Slice progress", slice_id: slice_id, slices: @slices) if logger.debug? && slice_id
|
282
|
+
end
|
283
|
+
logger.info("Slice complete", slice_id: slice_id, slices: @slices) unless slice_id.nil?
|
284
|
+
ensure
|
285
|
+
clear_scroll(scroll_id)
|
286
|
+
end
|
287
|
+
end
|
288
|
+
|
289
|
+
##
|
290
|
+
# @param output_queue [#<<]
|
291
|
+
# @param scroll_id [String]: a scroll id to resume
|
292
|
+
# @return [Array(Boolean,String)]: a tuple representing whether the response
|
293
|
+
#
|
294
|
+
def process_next_scroll(output_queue, scroll_id)
|
295
|
+
r = scroll_request(scroll_id)
|
296
|
+
r['hits']['hits'].each { |hit| push_hit(hit, output_queue) }
|
297
|
+
[r['hits']['hits'].any?, r['_scroll_id']]
|
298
|
+
rescue => e
|
299
|
+
# this will typically be triggered by a scroll timeout
|
300
|
+
logger.error("Scroll request error, aborting scroll", message: e.message, exception: e.class)
|
301
|
+
# return no hits and original scroll_id so we can try to clear it
|
302
|
+
[false, scroll_id]
|
303
|
+
end
|
304
|
+
|
305
|
+
def push_hit(hit, output_queue)
|
306
|
+
event = targeted_event_factory.new_event hit['_source']
|
307
|
+
set_docinfo_fields(hit, event) if @docinfo
|
308
|
+
decorate(event)
|
309
|
+
output_queue << event
|
310
|
+
end
|
311
|
+
|
312
|
+
def set_docinfo_fields(hit, event)
|
313
|
+
# do not assume event[@docinfo_target] to be in-place updatable. first get it, update it, then at the end set it in the event.
|
314
|
+
docinfo_target = event.get(@docinfo_target) || {}
|
315
|
+
|
316
|
+
unless docinfo_target.is_a?(Hash)
|
317
|
+
@logger.error("Incompatible Event, incompatible type for the docinfo_target=#{@docinfo_target} field in the `_source` document, expected a hash got:", :docinfo_target_type => docinfo_target.class, :event => event.to_hash_with_metadata)
|
318
|
+
|
319
|
+
# TODO: (colin) I am not sure raising is a good strategy here?
|
320
|
+
raise Exception.new("OpenSearch input: incompatible event")
|
321
|
+
end
|
322
|
+
|
323
|
+
@docinfo_fields.each do |field|
|
324
|
+
docinfo_target[field] = hit[field]
|
325
|
+
end
|
326
|
+
|
327
|
+
event.set(@docinfo_target, docinfo_target)
|
328
|
+
end
|
329
|
+
|
330
|
+
def clear_scroll(scroll_id)
|
331
|
+
@client.clear_scroll(:body => { :scroll_id => scroll_id }) if scroll_id
|
332
|
+
rescue => e
|
333
|
+
# ignore & log any clear_scroll errors
|
334
|
+
logger.warn("Ignoring clear_scroll exception", message: e.message, exception: e.class)
|
335
|
+
end
|
336
|
+
|
337
|
+
def scroll_request scroll_id
|
338
|
+
@client.scroll(:body => { :scroll_id => scroll_id }, :scroll => @scroll)
|
339
|
+
end
|
340
|
+
|
341
|
+
def search_request(options)
|
342
|
+
@client.search(options)
|
343
|
+
end
|
344
|
+
|
345
|
+
def hosts_default?(hosts)
|
346
|
+
hosts.nil? || ( hosts.is_a?(Array) && hosts.empty? )
|
347
|
+
end
|
348
|
+
|
349
|
+
def setup_ssl
|
350
|
+
return { :ssl => true, :ca_file => @ca_file } if @ssl && @ca_file
|
351
|
+
return { :ssl => true, :verify => false } if @ssl # Setting verify as false if ca_file is not provided
|
352
|
+
end
|
353
|
+
|
354
|
+
def setup_hosts
|
355
|
+
@hosts = Array(@hosts).map { |host| host.to_s } # potential SafeURI#to_s
|
356
|
+
if @ssl
|
357
|
+
@hosts.map do |h|
|
358
|
+
host, port = h.split(":")
|
359
|
+
{ :host => host, :scheme => 'https', :port => port }
|
360
|
+
end
|
361
|
+
else
|
362
|
+
@hosts
|
363
|
+
end
|
364
|
+
end
|
365
|
+
|
366
|
+
def setup_basic_auth(user, password)
|
367
|
+
return {} unless user && password && password.value
|
368
|
+
|
369
|
+
token = ::Base64.strict_encode64("#{user}:#{password.value}")
|
370
|
+
{ 'Authorization' => "Basic #{token}" }
|
371
|
+
end
|
372
|
+
|
373
|
+
def prepare_user_agent
|
374
|
+
os_name = java.lang.System.getProperty('os.name')
|
375
|
+
os_version = java.lang.System.getProperty('os.version')
|
376
|
+
os_arch = java.lang.System.getProperty('os.arch')
|
377
|
+
jvm_vendor = java.lang.System.getProperty('java.vendor')
|
378
|
+
jvm_version = java.lang.System.getProperty('java.version')
|
379
|
+
|
380
|
+
plugin_version = Gem.loaded_specs["logstash-input-opensearch"].version
|
381
|
+
# example: logstash/7.14.1 (OS=Linux-5.4.0-84-generic-amd64; JVM=AdoptOpenJDK-11.0.11) logstash-input-opensearch/4.10.0
|
382
|
+
"logstash/#{LOGSTASH_VERSION} (OS=#{os_name}-#{os_version}-#{os_arch}; JVM=#{jvm_vendor}-#{jvm_version}) logstash-#{@plugin_type}-#{config_name}/#{plugin_version}"
|
383
|
+
end
|
384
|
+
|
385
|
+
# @private used by unit specs
|
386
|
+
attr_reader :client
|
387
|
+
|
388
|
+
def test_connection!
|
389
|
+
@client.ping
|
390
|
+
rescue OpenSearch::UnsupportedProductError
|
391
|
+
raise LogStash::ConfigurationError, "Could not connect to a compatible version of OpenSearch"
|
392
|
+
end
|
393
|
+
|
394
|
+
module URIOrEmptyValidator
|
395
|
+
##
|
396
|
+
# @override to provide :uri_or_empty validator
|
397
|
+
# @param value [Array<Object>]
|
398
|
+
# @param validator [nil,Array,Symbol]
|
399
|
+
# @return [Array(true,Object)]: if validation is a success, a tuple containing `true` and the coerced value
|
400
|
+
# @return [Array(false,String)]: if validation is a failure, a tuple containing `false` and the failure reason.
|
401
|
+
def validate_value(value, validator)
|
402
|
+
return super unless validator == :uri_or_empty
|
403
|
+
|
404
|
+
value = deep_replace(value)
|
405
|
+
value = hash_or_array(value)
|
406
|
+
|
407
|
+
return true, value.first if value.size == 1 && value.first.empty?
|
408
|
+
|
409
|
+
return super(value, :uri)
|
410
|
+
end
|
411
|
+
end
|
412
|
+
extend(URIOrEmptyValidator)
|
413
|
+
|
414
|
+
module PositiveWholeNumberValidator
|
415
|
+
##
|
416
|
+
# @override to provide :positive_whole_number validator
|
417
|
+
# @param value [Array<Object>]
|
418
|
+
# @param validator [nil,Array,Symbol]
|
419
|
+
# @return [Array(true,Object)]: if validation is a success, a tuple containing `true` and the coerced value
|
420
|
+
# @return [Array(false,String)]: if validation is a failure, a tuple containing `false` and the failure reason.
|
421
|
+
def validate_value(value, validator)
|
422
|
+
return super unless validator == :positive_whole_number
|
423
|
+
|
424
|
+
is_number, coerced_number = super(value, :number)
|
425
|
+
|
426
|
+
return [true, coerced_number.to_i] if is_number && coerced_number.denominator == 1 && coerced_number > 0
|
427
|
+
|
428
|
+
return [false, "Expected positive whole number, got `#{value.inspect}`"]
|
429
|
+
end
|
430
|
+
end
|
431
|
+
extend(PositiveWholeNumberValidator)
|
432
|
+
end
|
@@ -0,0 +1,52 @@
|
|
1
|
+
# Copyright OpenSearch Contributors
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
3
|
+
|
4
|
+
signing_key_path = "private_key.pem"
|
5
|
+
|
6
|
+
Gem::Specification.new do |s|
|
7
|
+
|
8
|
+
s.name = 'logstash-input-opensearch'
|
9
|
+
s.version = '1.0.0'
|
10
|
+
s.licenses = ['Apache License (2.0)']
|
11
|
+
s.summary = "Reads query results from an OpenSearch cluster"
|
12
|
+
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
13
|
+
s.authors = ["Elastic", "OpenSearch Contributors"]
|
14
|
+
s.email = 'opensearch@amazon.com'
|
15
|
+
s.homepage = "https://opensearch.org/"
|
16
|
+
s.require_paths = ["lib"]
|
17
|
+
|
18
|
+
# Files
|
19
|
+
s.files = Dir["lib/**/*","spec/**/*","*.gemspec","*.md","CONTRIBUTORS","Gemfile","LICENSE","NOTICE.TXT", "vendor/jar-dependencies/**/*.jar", "vendor/jar-dependencies/**/*.rb", "VERSION", "docs/**/*"]
|
20
|
+
|
21
|
+
# Tests
|
22
|
+
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
23
|
+
|
24
|
+
if $PROGRAM_NAME.end_with?("gem") && ARGV == ["build", __FILE__] && File.exist?(signing_key_path)
|
25
|
+
s.signing_key = signing_key_path
|
26
|
+
s.cert_chain = ['certs/opensearch-rubygems.pem']
|
27
|
+
s.signing_key = File.expand_path("private_key.pem") if $0 =~ /gem\z/
|
28
|
+
end
|
29
|
+
|
30
|
+
# Special flag to let us know this is actually a logstash plugin
|
31
|
+
s.metadata = { "logstash_plugin" => "true", "logstash_group" => "input" }
|
32
|
+
|
33
|
+
# Gem dependencies
|
34
|
+
s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
|
35
|
+
s.add_runtime_dependency 'logstash-mixin-ecs_compatibility_support', '~> 1.3'
|
36
|
+
s.add_runtime_dependency 'logstash-mixin-event_support', '~> 1.0'
|
37
|
+
s.add_runtime_dependency "logstash-mixin-validator_support", '~> 1.0'
|
38
|
+
|
39
|
+
s.add_runtime_dependency 'tzinfo'
|
40
|
+
s.add_runtime_dependency 'tzinfo-data'
|
41
|
+
s.add_runtime_dependency 'rufus-scheduler'
|
42
|
+
s.add_runtime_dependency 'manticore', ">= 0.7.1"
|
43
|
+
|
44
|
+
s.add_runtime_dependency 'opensearch-ruby'
|
45
|
+
|
46
|
+
s.add_development_dependency 'logstash-codec-plain'
|
47
|
+
s.add_development_dependency 'faraday', "~> 1"
|
48
|
+
s.add_development_dependency 'logstash-devutils'
|
49
|
+
s.add_development_dependency 'timecop'
|
50
|
+
s.add_development_dependency 'cabin', ['~> 0.6']
|
51
|
+
s.add_development_dependency 'webrick'
|
52
|
+
end
|
@@ -0,0 +1,20 @@
|
|
1
|
+
-----BEGIN CERTIFICATE-----
|
2
|
+
MIIDSTCCAjGgAwIBAgIUUcAg9c8B8jiliCkOEJyqoAHrmccwDQYJKoZIhvcNAQEL
|
3
|
+
BQAwNDEyMDAGA1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5l
|
4
|
+
cmF0ZWQgQ0EwHhcNMjEwODEyMDUxNDU1WhcNMjQwODExMDUxNDU1WjA0MTIwMAYD
|
5
|
+
VQQDEylFbGFzdGljIENlcnRpZmljYXRlIFRvb2wgQXV0b2dlbmVyYXRlZCBDQTCC
|
6
|
+
ASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK1HuusRuGNsztd4EQvqwcMr
|
7
|
+
8XvnNNaalerpMOorCGySEFrNf0HxDIVMGMCrOv1F8SvlcGq3XANs2MJ4F2xhhLZr
|
8
|
+
PpqVHx+QnSZ66lu5R89QVSuMh/dCMxhNBlOA/dDlvy+EJBl9H791UGy/ChhSgaBd
|
9
|
+
OKVyGkhjErRTeMIq7rR7UG6GL/fV+JGy41UiLrm1KQP7/XVD9UzZfGq/hylFkTPe
|
10
|
+
oox5BUxdxUdDZ2creOID+agtIYuJVIkelKPQ+ljBY3kWBRexqJQsvyNUs1gZpjpz
|
11
|
+
YUCzuVcXDRuJXYQXGqWXhsBPfJv+ZcSyMIBUfWT/G13cWU1iwufPy0NjajowPZsC
|
12
|
+
AwEAAaNTMFEwHQYDVR0OBBYEFMgkye5+2l+TE0I6RsXRHjGBwpBGMB8GA1UdIwQY
|
13
|
+
MBaAFMgkye5+2l+TE0I6RsXRHjGBwpBGMA8GA1UdEwEB/wQFMAMBAf8wDQYJKoZI
|
14
|
+
hvcNAQELBQADggEBAIgtJW8sy5lBpzPRHkmWSS/SCZIPsABW+cHqQ3e0udrI3CLB
|
15
|
+
G9n7yqAPWOBTbdqC2GM8dvAS/Twx4Bub/lWr84dFCu+t0mQq4l5kpJMVRS0KKXPL
|
16
|
+
DwJbUN3oPNYy4uPn5Xi+XY3BYFce5vwJUsqIxeAbIOxVTNx++k5DFnB0ESAM23QL
|
17
|
+
sgUZl7xl3/DkdO4oHj30gmTRW9bjCJ6umnHIiO3JoJatrprurUIt80vHC4Ndft36
|
18
|
+
NBQ9mZpequ4RYjpSZNLcVsxyFAYwEY4g8MvH0MoMo2RRLfehmMCzXnI/Wh2qEyYz
|
19
|
+
emHprBii/5y1HieKXlX9CZRb5qEPHckDVXW3znw=
|
20
|
+
-----END CERTIFICATE-----
|
@@ -0,0 +1,27 @@
|
|
1
|
+
-----BEGIN RSA PRIVATE KEY-----
|
2
|
+
MIIEowIBAAKCAQEArUe66xG4Y2zO13gRC+rBwyvxe+c01pqV6ukw6isIbJIQWs1/
|
3
|
+
QfEMhUwYwKs6/UXxK+VwardcA2zYwngXbGGEtms+mpUfH5CdJnrqW7lHz1BVK4yH
|
4
|
+
90IzGE0GU4D90OW/L4QkGX0fv3VQbL8KGFKBoF04pXIaSGMStFN4wirutHtQboYv
|
5
|
+
99X4kbLjVSIuubUpA/v9dUP1TNl8ar+HKUWRM96ijHkFTF3FR0NnZyt44gP5qC0h
|
6
|
+
i4lUiR6Uo9D6WMFjeRYFF7GolCy/I1SzWBmmOnNhQLO5VxcNG4ldhBcapZeGwE98
|
7
|
+
m/5lxLIwgFR9ZP8bXdxZTWLC58/LQ2NqOjA9mwIDAQABAoIBABmBC0P6Ebegljkk
|
8
|
+
lO26GdbOKvbfqulDS3mN5QMyXkUMopea03YzMnKUJriE+2O33a1mUcuDPWnLpYPK
|
9
|
+
BTiQieYHlulNtY0Bzf+R69igRq9+1WpZftGnzrlu7NVxkOokRqWJv3546ilV7QZ0
|
10
|
+
f9ngmu+tiN7hEnlBC8m613VMuGGb3czwbCizEVZxlZX0Dk2GExbH7Yf3NNs/aOP/
|
11
|
+
8x6CqgL+rhrtOQ80xwRrOlEF8oSSjXCzypa3nFv21YO3J2lVo4BoIwnHgOzyz46A
|
12
|
+
b37gekqXXajIYQ0HAB+NDgVoCRFFJ7Xe16mgB3DpyUpUJzwiMedJkeQ0TprIownQ
|
13
|
+
+1mPe9ECgYEA/K4jc0trr3sk8KtcZjOYdpvwrhEqSSGEPeGfFujZaKOb8PZ8PX6j
|
14
|
+
MbCTV12nEgm8FEhZQ3azxLnO17gbJ2A+Ksm/IIwnTWlqvvMZD5qTQ7L3qZuCtbWQ
|
15
|
+
+EGC/H1SDjhiwvjHcXP61/tYL/peApBSoj0L4kC+U/VaNyvicudKk08CgYEAr46J
|
16
|
+
4VJBJfZ4ZaUBRy53+fy+mknOfaj2wo8MnD3u+/x4YWTapqvDOPN2nJVtKlIsxbS4
|
17
|
+
qCO+fzUV17YHlsQmGULNbtFuXWJkP/RcLVbe8VYg/6tmk0dJwNAe90flagX2KJov
|
18
|
+
8eDX129nNpuUqrNNWsfeLmPmH6vUzpKlga+1zfUCgYBrbUHHJ96dmbZn2AMNtIvy
|
19
|
+
iXP3HXcj5msJwB3aKJ8eHMkU1kaWAnwxiQfrkfaQ9bCP0v6YbyQY1IJ7NlvdDs7/
|
20
|
+
dAydMtkW0WW/zyztdGN92d3vrx0QUiRTV87vt/wl7ZUXnZt1wcB5CPRCWaiUYHWx
|
21
|
+
YlDmHW6N1XdIk5DQF0OegwKBgEt7S8k3Zo9+A5IgegYy8p7njsQjy8a3qTFJ9DAR
|
22
|
+
aPmrOc8WX/SdkVihRXRZwxAZOOrgoyyYAcYL+xI+T9EBESh3UoC9R2ibb2MYG7Ha
|
23
|
+
0gyN7a4/8eCNHCbs1QOZRAhr+8TFVqv28pbMbWJLToZ+hVns6Zikl0MyzFLtNoAm
|
24
|
+
HlMpAoGBAIOkqnwwuRKhWprL59sdcJfWY26os9nvuDV4LoKFNEFLJhj2AA2/3UlV
|
25
|
+
v85gqNSxnMNlHLZC9l2HZ3mKv/mfx1aikmFvyhJAnk5u0f9KkexmCPLjQzS5q3ba
|
26
|
+
yFuxK2DXwN4x46RgQPFlLjOTCX0BG6rkEu4JdonF8ETSjoCtGEU8
|
27
|
+
-----END RSA PRIVATE KEY-----
|
@@ -0,0 +1,20 @@
|
|
1
|
+
-----BEGIN CERTIFICATE-----
|
2
|
+
MIIDNjCCAh6gAwIBAgIUF9wE+oqGSbm4UVn1y9gEjzyaJFswDQYJKoZIhvcNAQEL
|
3
|
+
BQAwNDEyMDAGA1UEAxMpRWxhc3RpYyBDZXJ0aWZpY2F0ZSBUb29sIEF1dG9nZW5l
|
4
|
+
cmF0ZWQgQ0EwHhcNMjEwODEyMDUxNTI3WhcNMjQwODExMDUxNTI3WjANMQswCQYD
|
5
|
+
VQQDEwJlczCCASIwDQYJKoZIhvcNAQEBBQADggEPADCCAQoCggEBAK2S2by0lgyu
|
6
|
+
1JfgGgZ41PNXbH2qMPMzowguVVdtZ16WM0CaEG7lnLxmMcC+2Q7NnGuFnPAVQo9T
|
7
|
+
Q3bh7j+1PkCJVHUKZfJIeWtGc9+qXBcO1MhedfwM1osSa4bfwM85G+XKWbRNtmSt
|
8
|
+
CoUuKArIyZkzdBAAQLBoQyPf3DIza1Au4j9Hb3zrswD6e7n2PN4ffIyil1GFduLJ
|
9
|
+
2275qqFiOhkEDUhv7BKNftVBh/89O/5lSqAQGuQ1aDRr8TdHwhO71u4ZIU/Pn6yX
|
10
|
+
LGBWrQG53+qpdCsxGvJTfbtIEYUDTN83CirIxDKJgc1QXOEldylztHf4xnQ7ZarJ
|
11
|
+
tqF6pUzHbRsCAwEAAaNnMGUwHQYDVR0OBBYEFFQUK+6Cg2kExRj1xSDzEi4kkgKX
|
12
|
+
MB8GA1UdIwQYMBaAFMgkye5+2l+TE0I6RsXRHjGBwpBGMBgGA1UdEQQRMA+CDWVs
|
13
|
+
YXN0aWNzZWFyY2gwCQYDVR0TBAIwADANBgkqhkiG9w0BAQsFAAOCAQEAinaknZIc
|
14
|
+
7xtQNwUwa+kdET+I4lMz+TJw9vTjGKPJqe082n81ycKU5b+a/OndG90z+dTwhShW
|
15
|
+
f0oZdIe/1rDCdiRU4ceCZA4ybKrFDIbW8gOKZOx9rsgEx9XNELj4ocZTBqxjQmNE
|
16
|
+
Ho91fli5aEm0EL2vJgejh4hcfDeElQ6go9gtvAHQ57XEADQSenvt69jOICOupnS+
|
17
|
+
LSjDVhv/VLi3CAip0B+lD5fX/DVQdrJ62eRGuQYxoouE3saCO58qUUrKB39yD9KA
|
18
|
+
qRA/sVxyLogxaU+5dLfc0NJdOqSzStxQ2vdMvAWo9tZZ2UBGFrk5SdwCQe7Yv5mX
|
19
|
+
qi02i4q6meHGcw==
|
20
|
+
-----END CERTIFICATE-----
|
@@ -0,0 +1,27 @@
|
|
1
|
+
-----BEGIN RSA PRIVATE KEY-----
|
2
|
+
MIIEowIBAAKCAQEArZLZvLSWDK7Ul+AaBnjU81dsfaow8zOjCC5VV21nXpYzQJoQ
|
3
|
+
buWcvGYxwL7ZDs2ca4Wc8BVCj1NDduHuP7U+QIlUdQpl8kh5a0Zz36pcFw7UyF51
|
4
|
+
/AzWixJrht/Azzkb5cpZtE22ZK0KhS4oCsjJmTN0EABAsGhDI9/cMjNrUC7iP0dv
|
5
|
+
fOuzAPp7ufY83h98jKKXUYV24snbbvmqoWI6GQQNSG/sEo1+1UGH/z07/mVKoBAa
|
6
|
+
5DVoNGvxN0fCE7vW7hkhT8+frJcsYFatAbnf6ql0KzEa8lN9u0gRhQNM3zcKKsjE
|
7
|
+
MomBzVBc4SV3KXO0d/jGdDtlqsm2oXqlTMdtGwIDAQABAoIBAQCm/VBDz41ImG7p
|
8
|
+
yu3e6iMeFi7HW5SKdlRUS5dJbHT1uBWJAm/q8TbwvnUBVdsn9cKWY06QYDPQBjAy
|
9
|
+
0LxRSIKivjyl+aIJDZbbEUXrmk/M0zT9rHtgSc2isM8ITH6IHw5q7lmNMPLYOu6T
|
10
|
+
IMvfTDtADBOOTV/vF+/4NKf5GCUXVt1XTzLBFMK0p/ZoI7Fsw7fhH6FR12vk0xA4
|
11
|
+
BEC4pwRbGfHo7P31ii0by8epkve93tF4IZuFmN92A84bN1z7Kc4TYaSbua2rgguz
|
12
|
+
FzMyWpsTxr363HzCK1xOJb6JyJOiXbq4+j2oqtne3GIvyozJeiyKRgjLIMoe/LV7
|
13
|
+
fPPc5wlhAoGBAOD3z0JH2eyR/1RHILFsWInH2nDbKHHuCjhFIL2XloeXsJkiJZ95
|
14
|
+
BpdjExMZCqD44tPNRW/GgWKwoVwltm6zB0aq0aW/OfOzw6fhKt1W+go47L7Tpwap
|
15
|
+
VQgy6BFXSueUKfQDlZEWV4E2gakf8vOl0/VRQExae/CeKf1suEedQaErAoGBAMWE
|
16
|
+
LOmNDEU2NFqghfNBAFYyFJst3YnBmSmlL7W22+OsfSK/PhxnJbuNHxMgxpg9rieW
|
17
|
+
tVyjuZRo/i7WLVm3uG+dK1RJ9t8Y6kpYkCRKpi9G8DBOj3PSulOybBr+fdRfW9mf
|
18
|
+
8UmqOjOkrhxXPkchc9TY4EM7/1XeKvEidlIp0gvRAoGAAurz4zYvW2QhXaR2hhaT
|
19
|
+
p2XSLXiKM8AUndo3rH3U0/lhrvrEZicZsMj2LF88xg20U27sIaD/eJo13Y4XqaPk
|
20
|
+
ykPY6D9srv574SeIeMpx/8PxPiBcoDd+BNc0L1VkgVBoouORAwq5I9HjKKBjdEmI
|
21
|
+
UDw3i0X5KYvDm6fXVAZ0HXUCgYBWc4To8KiXPqNpq2sVzrSkBaWJSmj2G7u7Q6b/
|
22
|
+
RTs3is72v3gjHG6iiaE5URY7mnu4rjlRhAP9Vnsy6uHMrCJZEBTf/sPEYHZj9iGZ
|
23
|
+
EOduOAF3U1tsmaaebbDtm8hdhSOBvITy9kQlSIZAt1r17Ulytz5pj0AySFzJUIkz
|
24
|
+
a0SZkQKBgCWixtUxiK8PAdWhyS++90WJeJn8eqjuSAz+VMtFQFRRWDUbkiHvGMRu
|
25
|
+
o/Hhk6zS46gSF2Evb1d26uUEenXnJlIp6YWzb0DLPrfy5P53kPA6YEvYq5MSAg3l
|
26
|
+
DZOJUF+ko7cWXSZkeTIBH/jrGOdP4tTALZt6DNt+Gz7xwPO5tGgV
|
27
|
+
-----END RSA PRIVATE KEY-----
|
@@ -0,0 +1,83 @@
|
|
1
|
+
# Copyright OpenSearch Contributors
|
2
|
+
# SPDX-License-Identifier: Apache-2.0
|
3
|
+
|
4
|
+
# encoding: utf-8
|
5
|
+
require "logstash/devutils/rspec/spec_helper"
|
6
|
+
require "logstash/plugin"
|
7
|
+
require "logstash/inputs/opensearch"
|
8
|
+
require_relative "../../../spec/opensearch_helper"
|
9
|
+
|
10
|
+
describe LogStash::Inputs::OpenSearch do
|
11
|
+
|
12
|
+
let(:config) { { 'hosts' => [OpenSearchHelper.get_host_port],
|
13
|
+
'index' => 'logs',
|
14
|
+
'query' => '{ "query": { "match": { "message": "Not found"} }}' } }
|
15
|
+
let(:plugin) { described_class.new(config) }
|
16
|
+
let(:event) { LogStash::Event.new({}) }
|
17
|
+
let(:client_options) { Hash.new }
|
18
|
+
|
19
|
+
before(:each) do
|
20
|
+
@es = OpenSearchHelper.get_client(client_options)
|
21
|
+
# Delete all templates first.
|
22
|
+
# Clean OpenSearch of data before we start.
|
23
|
+
@es.indices.delete_template(:name => "*")
|
24
|
+
# This can fail if there are no indexes, ignore failure.
|
25
|
+
@es.indices.delete(:index => "*") rescue nil
|
26
|
+
10.times do
|
27
|
+
OpenSearchHelper.index_doc(@es, :index => 'logs', :body => { :response => 404, :message=> 'Not Found'})
|
28
|
+
end
|
29
|
+
@es.indices.refresh
|
30
|
+
end
|
31
|
+
|
32
|
+
after(:each) do
|
33
|
+
@es.indices.delete_template(:name => "*")
|
34
|
+
@es.indices.delete(:index => "*") rescue nil
|
35
|
+
end
|
36
|
+
|
37
|
+
shared_examples 'an opensearch index plugin' do
|
38
|
+
before(:each) do
|
39
|
+
plugin.register
|
40
|
+
end
|
41
|
+
|
42
|
+
it 'should retrieve json event from opensearch' do
|
43
|
+
queue = []
|
44
|
+
plugin.run(queue)
|
45
|
+
event = queue.pop
|
46
|
+
expect(event).to be_a(LogStash::Event)
|
47
|
+
expect(event.get("response")).to eql(404)
|
48
|
+
end
|
49
|
+
end
|
50
|
+
|
51
|
+
describe 'against an unsecured opensearch', :integration => true do
|
52
|
+
before(:each) do
|
53
|
+
plugin.register
|
54
|
+
end
|
55
|
+
|
56
|
+
it_behaves_like 'an opensearch index plugin'
|
57
|
+
end
|
58
|
+
|
59
|
+
describe 'against a secured opensearch', :secure_integration => true do
|
60
|
+
let(:user) { ENV['USER'] || 'admin' }
|
61
|
+
let(:password) { ENV['PASSWORD'] || 'admin' }
|
62
|
+
|
63
|
+
let(:client_options) { { :user => user, :password => password } }
|
64
|
+
|
65
|
+
let(:config) { super().merge('user' => user, 'password' => password, 'ssl' => true) }
|
66
|
+
|
67
|
+
it_behaves_like 'an opensearch index plugin'
|
68
|
+
|
69
|
+
context "incorrect auth credentials" do
|
70
|
+
|
71
|
+
let(:config) do
|
72
|
+
super().merge('user' => 'archer', 'password' => 'b0gus!')
|
73
|
+
end
|
74
|
+
|
75
|
+
let(:queue) { [] }
|
76
|
+
|
77
|
+
it "fails to run the plugin" do
|
78
|
+
expect { plugin.register }.to raise_error OpenSearch::Transport::Transport::Errors::Unauthorized
|
79
|
+
end
|
80
|
+
end
|
81
|
+
|
82
|
+
end
|
83
|
+
end
|