fluent-plugin-elasticsearch 3.2.4 → 3.3.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/History.md +4 -0
- data/README.md +11 -0
- data/fluent-plugin-elasticsearch.gemspec +1 -1
- data/lib/fluent/plugin/out_elasticsearch.rb +19 -11
- data/test/plugin/test_out_elasticsearch.rb +82 -0
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: '029a448c4a3a928294dd5bf42e6cfd307b0ec035f6450255f239357413321247'
|
4
|
+
data.tar.gz: c88e603bce04eb4d1e34e347bd1b97d8a799c16c4149900ecd9e2b7cbc2dbcce
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 964c3453011c608aaaaf78c7d7af899d5a981036f70fb8d3c1dd592b450010a447a29638486732c19af4802aeb9239dc1402652f094cda7c3ac834fb495dc860
|
7
|
+
data.tar.gz: 0615267ec3802f4e7f8693c50628cea67c648da38970dfbc54a01b54099f3446a00f2f924efd7354a954bfc521d5894fcd65fc3ad8aac2f3f4050ce23dbd6dd6
|
data/History.md
CHANGED
data/README.md
CHANGED
@@ -15,6 +15,7 @@ Current maintainers: @cosmo0920
|
|
15
15
|
* [Usage](#usage)
|
16
16
|
+ [Index templates](#index-templates)
|
17
17
|
* [Configuration](#configuration)
|
18
|
+
+ [host](#host)
|
18
19
|
+ [emit_error_for_missing_id](#emit_error_for_missing_id)
|
19
20
|
+ [hosts](#hosts)
|
20
21
|
+ [user, password, path, scheme, ssl_verify](#user-password-path-scheme-ssl_verify)
|
@@ -128,6 +129,16 @@ This plugin creates Elasticsearch indices by merely writing to them. Consider us
|
|
128
129
|
|
129
130
|
## Configuration
|
130
131
|
|
132
|
+
### host
|
133
|
+
|
134
|
+
```
|
135
|
+
host user-custom-host.domain # default localhost
|
136
|
+
```
|
137
|
+
|
138
|
+
You can specify Elasticsearch host by this parameter.
|
139
|
+
|
140
|
+
**Note:** Since v3.3.0, `host` parameter supports builtin placeholders. If you want to send events dinamically into different hosts at runtime with `elasticsearch_dynamic` output plugin, please consider to switch to use plain `elasticsearch` output plugin. In more detail for builtin placeholders, please refer to [Placeholders](#placeholders) section.
|
141
|
+
|
131
142
|
### emit_error_for_missing_id
|
132
143
|
|
133
144
|
```
|
@@ -3,7 +3,7 @@ $:.push File.expand_path('../lib', __FILE__)
|
|
3
3
|
|
4
4
|
Gem::Specification.new do |s|
|
5
5
|
s.name = 'fluent-plugin-elasticsearch'
|
6
|
-
s.version = '3.
|
6
|
+
s.version = '3.3.0'
|
7
7
|
s.authors = ['diogo', 'pitr']
|
8
8
|
s.email = ['pitr.vern@gmail.com', 'me@diogoterror.com']
|
9
9
|
s.description = %q{Elasticsearch output plugin for Fluent event collector}
|
@@ -348,7 +348,10 @@ EOC
|
|
348
348
|
return Time.at(event_time).to_datetime
|
349
349
|
end
|
350
350
|
|
351
|
-
def client
|
351
|
+
def client(host = nil)
|
352
|
+
# check here to see if we already have a client connection for the given host
|
353
|
+
connection_options = get_connection_options(host)
|
354
|
+
|
352
355
|
@_es ||= begin
|
353
356
|
adapter_conf = lambda {|f| f.adapter @http_backend, @backend_options }
|
354
357
|
local_reload_connections = @reload_connections
|
@@ -356,7 +359,7 @@ EOC
|
|
356
359
|
local_reload_connections = @reload_after
|
357
360
|
end
|
358
361
|
headers = { 'Content-Type' => @content_type.to_s }.merge(@custom_headers)
|
359
|
-
transport = Elasticsearch::Transport::Transport::HTTP::Faraday.new(
|
362
|
+
transport = Elasticsearch::Transport::Transport::HTTP::Faraday.new(connection_options.merge(
|
360
363
|
options: {
|
361
364
|
reload_connections: local_reload_connections,
|
362
365
|
reload_on_failure: @reload_on_failure,
|
@@ -390,11 +393,11 @@ EOC
|
|
390
393
|
end
|
391
394
|
end
|
392
395
|
|
393
|
-
def get_connection_options
|
396
|
+
def get_connection_options(con_host=nil)
|
394
397
|
raise "`password` must be present if `user` is present" if @user && !@password
|
395
398
|
|
396
|
-
hosts = if @hosts
|
397
|
-
@hosts.split(',').map do |host_str|
|
399
|
+
hosts = if con_host || @hosts
|
400
|
+
(con_host || @hosts).split(',').map do |host_str|
|
398
401
|
# Support legacy hosts format host:port,host:port,host:port...
|
399
402
|
if host_str.match(%r{^[^:]+(\:\d+)?$})
|
400
403
|
{
|
@@ -519,22 +522,27 @@ EOC
|
|
519
522
|
|
520
523
|
tag = chunk.metadata.tag
|
521
524
|
extracted_values = expand_placeholders(chunk)
|
525
|
+
host = if @hosts
|
526
|
+
extract_placeholders(@hosts, chunk)
|
527
|
+
else
|
528
|
+
extract_placeholders(@host, chunk)
|
529
|
+
end
|
522
530
|
|
523
531
|
chunk.msgpack_each do |time, record|
|
524
532
|
next unless record.is_a? Hash
|
525
533
|
begin
|
526
534
|
meta, header, record = process_message(tag, meta, header, time, record, extracted_values)
|
527
535
|
info = if @include_index_in_url
|
528
|
-
RequestInfo.new(
|
536
|
+
RequestInfo.new(host, meta.delete("_index".freeze))
|
529
537
|
else
|
530
|
-
RequestInfo.new(
|
538
|
+
RequestInfo.new(host, nil)
|
531
539
|
end
|
532
540
|
|
533
541
|
if append_record_to_messages(@write_operation, meta, header, record, bulk_message[info])
|
534
542
|
bulk_message_count[info] += 1;
|
535
543
|
if bulk_message[info].size > TARGET_BULK_BYTES
|
536
544
|
bulk_message.each do |info, msgs|
|
537
|
-
send_bulk(msgs, tag, chunk, bulk_message_count[info], extracted_values, info
|
545
|
+
send_bulk(msgs, tag, chunk, bulk_message_count[info], extracted_values, info) unless msgs.empty?
|
538
546
|
msgs.clear
|
539
547
|
# Clear bulk_message_count for this info.
|
540
548
|
bulk_message_count[info] = 0;
|
@@ -555,7 +563,7 @@ EOC
|
|
555
563
|
|
556
564
|
|
557
565
|
bulk_message.each do |info, msgs|
|
558
|
-
send_bulk(msgs, tag, chunk, bulk_message_count[info], extracted_values, info
|
566
|
+
send_bulk(msgs, tag, chunk, bulk_message_count[info], extracted_values, info) unless msgs.empty?
|
559
567
|
msgs.clear
|
560
568
|
end
|
561
569
|
end
|
@@ -648,11 +656,11 @@ EOC
|
|
648
656
|
|
649
657
|
# send_bulk given a specific bulk request, the original tag,
|
650
658
|
# chunk, and bulk_message_count
|
651
|
-
def send_bulk(data, tag, chunk, bulk_message_count, extracted_values,
|
659
|
+
def send_bulk(data, tag, chunk, bulk_message_count, extracted_values, info)
|
652
660
|
begin
|
653
661
|
|
654
662
|
log.on_trace { log.trace "bulk request: #{data}" }
|
655
|
-
response = client.bulk body: data, index: index
|
663
|
+
response = client(info.host).bulk body: data, index: info.index
|
656
664
|
log.on_trace { log.trace "bulk response: #{response}" }
|
657
665
|
|
658
666
|
if response['errors']
|
@@ -1462,6 +1462,88 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
1462
1462
|
end
|
1463
1463
|
end
|
1464
1464
|
|
1465
|
+
class HostnamePlaceholders < self
|
1466
|
+
def test_writes_to_extracted_host
|
1467
|
+
driver.configure("host ${tag}\n")
|
1468
|
+
time = Time.parse Date.today.iso8601
|
1469
|
+
elastic_request = stub_elastic("http://extracted-host:9200/_bulk")
|
1470
|
+
driver.run(default_tag: 'extracted-host') do
|
1471
|
+
driver.feed(time.to_i, sample_record)
|
1472
|
+
end
|
1473
|
+
assert_requested(elastic_request)
|
1474
|
+
end
|
1475
|
+
|
1476
|
+
def test_writes_to_multi_hosts_with_placeholders
|
1477
|
+
hosts = [['${tag}', 9201], ['192.168.33.51', 9201], ['192.168.33.52', 9201]]
|
1478
|
+
hosts_string = hosts.map {|x| "#{x[0]}:#{x[1]}"}.compact.join(',')
|
1479
|
+
|
1480
|
+
driver.configure("hosts #{hosts_string}")
|
1481
|
+
|
1482
|
+
hosts.each do |host_info|
|
1483
|
+
host, port = host_info
|
1484
|
+
host = "extracted-host" if host == '${tag}'
|
1485
|
+
stub_elastic_with_store_index_command_counts("http://#{host}:#{port}/_bulk")
|
1486
|
+
end
|
1487
|
+
|
1488
|
+
driver.run(default_tag: 'extracted-host') do
|
1489
|
+
1000.times do
|
1490
|
+
driver.feed(sample_record.merge('age'=>rand(100)))
|
1491
|
+
end
|
1492
|
+
end
|
1493
|
+
|
1494
|
+
# @note: we cannot make multi chunks with options (flush_interval, buffer_chunk_limit)
|
1495
|
+
# it's Fluentd test driver's constraint
|
1496
|
+
# so @index_command_counts.size is always 1
|
1497
|
+
|
1498
|
+
assert(@index_command_counts.size > 0, "not working with hosts options")
|
1499
|
+
|
1500
|
+
total = 0
|
1501
|
+
@index_command_counts.each do |url, count|
|
1502
|
+
total += count
|
1503
|
+
end
|
1504
|
+
assert_equal(2000, total)
|
1505
|
+
end
|
1506
|
+
|
1507
|
+
def test_writes_to_extracted_host_with_time_placeholder
|
1508
|
+
driver.configure(Fluent::Config::Element.new(
|
1509
|
+
'ROOT', '', {
|
1510
|
+
'@type' => 'elasticsearch',
|
1511
|
+
'host' => 'host-%Y%m%d',
|
1512
|
+
}, [
|
1513
|
+
Fluent::Config::Element.new('buffer', 'tag,time', {
|
1514
|
+
'chunk_keys' => ['tag', 'time'],
|
1515
|
+
'timekey' => 3600,
|
1516
|
+
}, [])
|
1517
|
+
]
|
1518
|
+
))
|
1519
|
+
stub_elastic
|
1520
|
+
time = Time.parse Date.today.iso8601
|
1521
|
+
elastic_request = stub_elastic("http://host-#{time.utc.strftime('%Y%m%d')}:9200/_bulk")
|
1522
|
+
driver.run(default_tag: 'test') do
|
1523
|
+
driver.feed(time.to_i, sample_record)
|
1524
|
+
end
|
1525
|
+
assert_requested(elastic_request)
|
1526
|
+
end
|
1527
|
+
|
1528
|
+
def test_writes_to_extracted_host_with_custom_key_placeholder
|
1529
|
+
driver.configure(Fluent::Config::Element.new(
|
1530
|
+
'ROOT', '', {
|
1531
|
+
'@type' => 'elasticsearch',
|
1532
|
+
'host' => 'myhost-${pipeline_id}',
|
1533
|
+
}, [
|
1534
|
+
Fluent::Config::Element.new('buffer', 'tag,pipeline_id', {}, [])
|
1535
|
+
]
|
1536
|
+
))
|
1537
|
+
time = Time.parse Date.today.iso8601
|
1538
|
+
pipeline_id = "5"
|
1539
|
+
elastic_request = stub_elastic("http://myhost-5:9200/_bulk")
|
1540
|
+
driver.run(default_tag: 'test') do
|
1541
|
+
driver.feed(time.to_i, sample_record.merge({"pipeline_id" => pipeline_id}))
|
1542
|
+
end
|
1543
|
+
assert_requested(elastic_request)
|
1544
|
+
end
|
1545
|
+
end
|
1546
|
+
|
1465
1547
|
def test_writes_to_logstash_index_with_specified_prefix_uppercase
|
1466
1548
|
driver.configure("logstash_format true
|
1467
1549
|
logstash_prefix MyPrefix")
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-elasticsearch
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 3.
|
4
|
+
version: 3.3.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- diogo
|
@@ -9,7 +9,7 @@ authors:
|
|
9
9
|
autorequire:
|
10
10
|
bindir: bin
|
11
11
|
cert_chain: []
|
12
|
-
date: 2019-
|
12
|
+
date: 2019-03-01 00:00:00.000000000 Z
|
13
13
|
dependencies:
|
14
14
|
- !ruby/object:Gem::Dependency
|
15
15
|
name: fluentd
|