logstash-input-elasticsearch 4.20.5 → 4.21.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: eaa000218cac721012284803638cb681c0eccf02107c34b03e597998a2e9eee3
4
- data.tar.gz: eef84d9f24f1346b1d4793f6b10f200e790739f63d588888bae1bcdae33ec51f
3
+ metadata.gz: bf89e42e7146cb59e938fed4a2a2d352bc72bf5c73c59d2a674be724c686a543
4
+ data.tar.gz: 9b0b0b80d4a3754dac6e5d4f2c179dbd1be2ee6d6ce6c2b1fc619c2369a808aa
5
5
  SHA512:
6
- metadata.gz: a9ed0836e937e4ceb9726d05be77e39a10cc11189bbf0cffb829e4445f5e6948e339187c2f088d1d9408657c26318375756d974299667fdf86d62b77ba6f00d5
7
- data.tar.gz: 3e919b494b24d4d61a98222e27310a6d0e2f9d1d2b6209b2879c80199dd4c69e39b789a2d6e7503e07fe3097d50261f78d0c4ab7300bd48d6cf79a5d28c66c01
6
+ metadata.gz: 87223b2b656aee8d27c9f0ea92151370f3eb46b1851040054f0fa74e99ab4882278b28c85d4f8290d211ef793ab3eab9141b9c3ab343bd80cc5b9b9e7f0ceb11
7
+ data.tar.gz: 10db333c4a67f69fb695bede43feb8f89692609afd74c40de3babf9b56580c18ea1d839865c66e9ec5ade759825309aecd84a2d269805583e8053b48bb563af9
data/CHANGELOG.md CHANGED
@@ -1,3 +1,6 @@
1
+ ## 4.21.0
2
+ - Add support for custom headers [#217](https://github.com/logstash-plugins/logstash-input-elasticsearch/pull/217)
3
+
1
4
  ## 4.20.5
2
5
  - Add `x-elastic-product-origin` header to Elasticsearch requests [#211](https://github.com/logstash-plugins/logstash-input-elasticsearch/pull/211)
3
6
 
data/docs/index.asciidoc CHANGED
@@ -23,7 +23,7 @@ include::{include_path}/plugin_header.asciidoc[]
23
23
 
24
24
  Read from an Elasticsearch cluster, based on search query results.
25
25
  This is useful for replaying test logs, reindexing, etc.
26
- You can periodically schedule ingestion using a cron syntax
26
+ You can periodically schedule ingestion using a cron syntax
27
27
  (see `schedule` setting) or run the query one time to load
28
28
  data into Logstash.
29
29
 
@@ -106,6 +106,7 @@ This plugin supports the following configuration options plus the <<plugins-{typ
106
106
  | <<plugins-{type}s-{plugin}-cloud_auth>> |<<password,password>>|No
107
107
  | <<plugins-{type}s-{plugin}-cloud_id>> |<<string,string>>|No
108
108
  | <<plugins-{type}s-{plugin}-connect_timeout_seconds>> | <<number,number>>|No
109
+ | <<plugins-{type}s-{plugin}-custom_headers>> |<<hash,hash>>|No
109
110
  | <<plugins-{type}s-{plugin}-docinfo>> |<<boolean,boolean>>|No
110
111
  | <<plugins-{type}s-{plugin}-docinfo_fields>> |<<array,array>>|No
111
112
  | <<plugins-{type}s-{plugin}-docinfo_target>> |<<string,string>>|No
@@ -199,8 +200,18 @@ For more info, check out the
199
200
  The maximum amount of time, in seconds, to wait while establishing a connection to Elasticsearch.
200
201
  Connect timeouts tend to occur when Elasticsearch or an intermediate proxy is overloaded with requests and has exhausted its connection pool.
201
202
 
203
+ [id="plugins-{type}s-{plugin}-custom_headers"]
204
+ ===== `custom_headers`
205
+
206
+ * Value type is <<hash,hash>>
207
+ * Default value is empty
208
+
209
+ Pass a set of key value pairs as the headers sent in each request to an elasticsearch node.
210
+ The headers will be used for any kind of request.
211
+ These custom headers will override any headers previously set by the plugin such as the User Agent or Authorization headers.
212
+
202
213
  [id="plugins-{type}s-{plugin}-docinfo"]
203
- ===== `docinfo`
214
+ ===== `docinfo`
204
215
 
205
216
  * Value type is <<boolean,boolean>>
206
217
  * Default value is `false`
@@ -251,7 +262,7 @@ Example
251
262
 
252
263
 
253
264
  [id="plugins-{type}s-{plugin}-docinfo_fields"]
254
- ===== `docinfo_fields`
265
+ ===== `docinfo_fields`
255
266
 
256
267
  * Value type is <<array,array>>
257
268
  * Default value is `["_index", "_type", "_id"]`
@@ -262,7 +273,7 @@ option lists the metadata fields to save in the current event. See
262
273
  more information.
263
274
 
264
275
  [id="plugins-{type}s-{plugin}-docinfo_target"]
265
- ===== `docinfo_target`
276
+ ===== `docinfo_target`
266
277
 
267
278
  * Value type is <<string,string>>
268
279
  * Default value depends on whether <<plugins-{type}s-{plugin}-ecs_compatibility>> is enabled:
@@ -286,7 +297,7 @@ this option names the field under which to store the metadata fields as subfield
286
297
  Controls this plugin's compatibility with the {ecs-ref}[Elastic Common Schema (ECS)].
287
298
 
288
299
  [id="plugins-{type}s-{plugin}-hosts"]
289
- ===== `hosts`
300
+ ===== `hosts`
290
301
 
291
302
  * Value type is <<array,array>>
292
303
  * There is no default value for this setting.
@@ -296,18 +307,18 @@ can be either IP, HOST, IP:port, or HOST:port. The port defaults to
296
307
  9200.
297
308
 
298
309
  [id="plugins-{type}s-{plugin}-index"]
299
- ===== `index`
310
+ ===== `index`
300
311
 
301
312
  * Value type is <<string,string>>
302
313
  * Default value is `"logstash-*"`
303
314
 
304
- The index or alias to search.
315
+ The index or alias to search.
305
316
  Check out {ref}/api-conventions.html#api-multi-index[Multi Indices
306
317
  documentation] in the Elasticsearch documentation for info on
307
318
  referencing multiple indices.
308
319
 
309
320
  [id="plugins-{type}s-{plugin}-password"]
310
- ===== `password`
321
+ ===== `password`
311
322
 
312
323
  * Value type is <<password,password>>
313
324
  * There is no default value for this setting.
@@ -327,7 +338,7 @@ An empty string is treated as if proxy was not set, this is useful when using
327
338
  environment variables e.g. `proxy => '${LS_PROXY:}'`.
328
339
 
329
340
  [id="plugins-{type}s-{plugin}-query"]
330
- ===== `query`
341
+ ===== `query`
331
342
 
332
343
  * Value type is <<string,string>>
333
344
  * Default value is `'{ "sort": [ "_doc" ] }'`
@@ -375,7 +386,7 @@ The default is 0 (no retry). This value should be equal to or greater than zero.
375
386
  NOTE: Partial failures - such as errors in a subset of all slices - can result in the entire query being retried, which can lead to duplication of data. Avoiding this would require Logstash to store the entire result set of a query in memory which is often not possible.
376
387
 
377
388
  [id="plugins-{type}s-{plugin}-schedule"]
378
- ===== `schedule`
389
+ ===== `schedule`
379
390
 
380
391
  * Value type is <<string,string>>
381
392
  * There is no default value for this setting.
@@ -387,7 +398,7 @@ There is no schedule by default. If no schedule is given, then the statement is
387
398
  exactly once.
388
399
 
389
400
  [id="plugins-{type}s-{plugin}-scroll"]
390
- ===== `scroll`
401
+ ===== `scroll`
391
402
 
392
403
  * Value type is <<string,string>>
393
404
  * Default value is `"1m"`
@@ -410,7 +421,7 @@ The query requires at least one `sort` field, as described in the <<plugins-{typ
410
421
  `scroll` uses {ref}/paginate-search-results.html#scroll-search-results[scroll] API to search, which is no longer recommended.
411
422
 
412
423
  [id="plugins-{type}s-{plugin}-size"]
413
- ===== `size`
424
+ ===== `size`
414
425
 
415
426
  * Value type is <<number,number>>
416
427
  * Default value is `1000`
@@ -598,7 +609,7 @@ It is also possible to target an entry in the event's metadata, which will be av
598
609
 
599
610
 
600
611
  [id="plugins-{type}s-{plugin}-user"]
601
- ===== `user`
612
+ ===== `user`
602
613
 
603
614
  * Value type is <<string,string>>
604
615
  * There is no default value for this setting.
@@ -24,9 +24,9 @@ require_relative "elasticsearch/patches/_elasticsearch_transport_connections_sel
24
24
  # called `http.content_type.required`. If this option is set to `true`, and you
25
25
  # are using Logstash 2.4 through 5.2, you need to update the Elasticsearch input
26
26
  # plugin to version 4.0.2 or higher.
27
- #
27
+ #
28
28
  # ================================================================================
29
- #
29
+ #
30
30
  # Read from an Elasticsearch cluster, based on search query results.
31
31
  # This is useful for replaying test logs, reindexing, etc.
32
32
  # It also supports periodically scheduling lookup enrichments
@@ -166,6 +166,9 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
166
166
  # http://www.elasticsearch.org/guide/en/elasticsearch/guide/current/_document_metadata.html
167
167
  config :docinfo_fields, :validate => :array, :default => ['_index', '_type', '_id']
168
168
 
169
+ # Custom headers for Elasticsearch requests
170
+ config :custom_headers, :validate => :hash, :default => {}
171
+
169
172
  # Basic Auth - username
170
173
  config :user, :validate => :string
171
174
 
@@ -305,6 +308,7 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
305
308
  transport_options[:headers].merge!(setup_basic_auth(user, password))
306
309
  transport_options[:headers].merge!(setup_api_key(api_key))
307
310
  transport_options[:headers].merge!({'user-agent' => prepare_user_agent()})
311
+ transport_options[:headers].merge!(@custom_headers) unless @custom_headers.empty?
308
312
  transport_options[:request_timeout] = @request_timeout_seconds unless @request_timeout_seconds.nil?
309
313
  transport_options[:connect_timeout] = @connect_timeout_seconds unless @connect_timeout_seconds.nil?
310
314
  transport_options[:socket_timeout] = @socket_timeout_seconds unless @socket_timeout_seconds.nil?
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-input-elasticsearch'
4
- s.version = '4.20.5'
4
+ s.version = '4.21.0'
5
5
  s.licenses = ['Apache License (2.0)']
6
6
  s.summary = "Reads query results from an Elasticsearch cluster"
7
7
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -103,6 +103,22 @@ describe LogStash::Inputs::Elasticsearch, :ecs_compatibility_support do
103
103
  expect( extract_transport(client).options[:transport_options][:headers] ).to match hash_including("x-elastic-product-origin"=>"logstash-input-elasticsearch")
104
104
  end
105
105
  end
106
+
107
+ context "with custom headers" do
108
+ let(:config) do
109
+ {
110
+ "schedule" => "* * * * * UTC",
111
+ "custom_headers" => { "Custom-Header-1" => "Custom Value 1", "Custom-Header-2" => "Custom Value 2" }
112
+ }
113
+ end
114
+
115
+
116
+ it "sets custom headers" do
117
+ plugin.register
118
+ client = plugin.send(:client)
119
+ expect( extract_transport(client).options[:transport_options][:headers] ).to match hash_including(config["custom_headers"])
120
+ end
121
+ end
106
122
  end
107
123
 
108
124
  context "retry" do
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-input-elasticsearch
3
3
  version: !ruby/object:Gem::Version
4
- version: 4.20.5
4
+ version: 4.21.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2024-12-05 00:00:00.000000000 Z
11
+ date: 2024-12-18 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement