fluent-plugin-elasticsearch 0.2.0 → 0.3.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.travis.yml +8 -0
- data/History.md +13 -14
- data/README.md +21 -2
- data/fluent-plugin-elasticsearch.gemspec +12 -10
- data/lib/fluent/plugin/out_elasticsearch.rb +37 -9
- data/test/plugin/test_out_elasticsearch.rb +108 -11
- metadata +43 -28
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: c80790f854f11ab486602bfec4e4f96ab56f2f56
|
4
|
+
data.tar.gz: 06163c0e34e87393e84f9a8a04d906dff2fc985b
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 9344c56bfe39072a37402e7e2343a7ec28a9d3417cb14f899bc57744bf91e224d98d5fbf3df048b10d37b36c7bf3edb77812ecd10407c76dad514d0174c98828
|
7
|
+
data.tar.gz: 102828a3a5df25a3c5dcf8462c9a43c5afd802bbe974f2497b5d2c1e9c53f6cf8462165b1a3bad895ed34915e4fdce286cc6976f980085cfe41411fdf25dd2f1
|
data/.travis.yml
ADDED
data/History.md
CHANGED
@@ -1,34 +1,33 @@
|
|
1
|
-
Changelog
|
2
|
-
=========
|
1
|
+
## Changelog
|
3
2
|
|
4
|
-
0.
|
5
|
-
|
3
|
+
### 0.3.0
|
4
|
+
|
5
|
+
- add `parent_key` option (#28)
|
6
|
+
- have travis-ci build on multiple rubies (#30)
|
7
|
+
- add `utc_index` and `hosts` options, switch to using `elasticsearch` gem (#26, #29)
|
8
|
+
|
9
|
+
### 0.2.0
|
6
10
|
|
7
11
|
- fix encoding issues with JSON conversion and again when sending to elasticsearch (#19, #21)
|
8
12
|
- add logstash_dateformat option (#20)
|
9
13
|
|
10
|
-
0.1.4
|
11
|
-
=====
|
14
|
+
### 0.1.4
|
12
15
|
|
13
16
|
- add logstash_prefix option
|
14
17
|
|
15
|
-
0.1.3
|
16
|
-
=====
|
18
|
+
### 0.1.3
|
17
19
|
|
18
20
|
- raising an exception on non-success response from elasticsearch
|
19
21
|
|
20
|
-
0.1.2
|
21
|
-
=====
|
22
|
+
### 0.1.2
|
22
23
|
|
23
24
|
- add id_key option
|
24
25
|
|
25
|
-
0.1.1
|
26
|
-
=====
|
26
|
+
### 0.1.1
|
27
27
|
|
28
28
|
- fix timezone in logstash key
|
29
29
|
|
30
30
|
|
31
|
-
0.1.0
|
32
|
-
=====
|
31
|
+
### 0.1.0
|
33
32
|
|
34
33
|
- Initial gem release.
|
data/README.md
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
# Fluent::Plugin::Elasticsearch
|
1
|
+
# Fluent::Plugin::Elasticsearch, a plugin for [Fluentd](http://fluentd.org)
|
2
2
|
|
3
3
|
[![Gem Version](https://badge.fury.io/rb/fluent-plugin-elasticsearch.png)](http://badge.fury.io/rb/fluent-plugin-elasticsearch)
|
4
4
|
[![Dependency Status](https://gemnasium.com/uken/guard-sidekiq.png)](https://gemnasium.com/uken/fluent-plugin-elasticsearch)
|
@@ -12,6 +12,8 @@ I wrote this so you can search logs routed through Fluentd.
|
|
12
12
|
|
13
13
|
$ gem install fluent-plugin-elasticsearch
|
14
14
|
|
15
|
+
* prerequisite : You need to install [libcurl](http://curl.haxx.se/libcurl/) to work with.
|
16
|
+
|
15
17
|
## Usage
|
16
18
|
|
17
19
|
In your fluentd configration, use `type elasticsearch`. Additional configuration is optional, default values would look like this:
|
@@ -23,9 +25,18 @@ index_name fluentd
|
|
23
25
|
type_name fluentd
|
24
26
|
```
|
25
27
|
|
26
|
-
|
27
28
|
**More options:**
|
28
29
|
|
30
|
+
```
|
31
|
+
hosts host1:port1,host2:port2,host3:port3
|
32
|
+
```
|
33
|
+
|
34
|
+
You can specify multiple elasticsearch hosts with separator ",".
|
35
|
+
|
36
|
+
If you specify multiple hosts, plugin writes to elasticsearch with load balanced. (it's elasticsearch-ruby's feature, default is round-robin.)
|
37
|
+
|
38
|
+
If you specify this option, host and port options are ignored.
|
39
|
+
|
29
40
|
```
|
30
41
|
logstash_format true # defaults to false
|
31
42
|
```
|
@@ -44,6 +55,12 @@ logstash_dateformat %Y.%m. # defaults to "%Y.%m.%d"
|
|
44
55
|
|
45
56
|
By default, the records inserted into index `logstash-YYMMDD`. This option allows to insert into specified index like `logstash-YYYYMM` for a monthly index.
|
46
57
|
|
58
|
+
```
|
59
|
+
utc_index true
|
60
|
+
```
|
61
|
+
|
62
|
+
By default, the records inserted into index `logstash-YYMMDD` with utc (Coordinated Universal Time). This option allows to use local time if you describe utc_index to false.
|
63
|
+
|
47
64
|
---
|
48
65
|
|
49
66
|
```
|
@@ -117,3 +134,5 @@ Please consider using [fluent-plugin-forest](https://github.com/tagomoris/fluent
|
|
117
134
|
3. Commit your changes (`git commit -am 'Add some feature'`)
|
118
135
|
4. Push to the branch (`git push origin my-new-feature`)
|
119
136
|
5. Create new Pull Request
|
137
|
+
|
138
|
+
If you have a question, [open an Issue](https://github.com/uken/fluent-plugin-elasticsearch/issues).
|
@@ -1,23 +1,25 @@
|
|
1
1
|
# -*- encoding: utf-8 -*-
|
2
|
-
$:.push File.expand_path(
|
2
|
+
$:.push File.expand_path('../lib', __FILE__)
|
3
3
|
|
4
4
|
Gem::Specification.new do |s|
|
5
|
-
s.name =
|
6
|
-
s.version = '0.
|
7
|
-
s.authors = [
|
8
|
-
s.email = [
|
5
|
+
s.name = 'fluent-plugin-elasticsearch'
|
6
|
+
s.version = '0.3.0'
|
7
|
+
s.authors = ['diogo', 'pitr']
|
8
|
+
s.email = ['team@uken.com']
|
9
9
|
s.description = %q{ElasticSearch output plugin for Fluent event collector}
|
10
10
|
s.summary = s.description
|
11
|
-
s.homepage =
|
11
|
+
s.homepage = 'https://github.com/uken/fluent-plugin-elasticsearch'
|
12
12
|
s.license = 'MIT'
|
13
13
|
|
14
14
|
s.files = `git ls-files`.split($/)
|
15
15
|
s.executables = s.files.grep(%r{^bin/}).map{ |f| File.basename(f) }
|
16
16
|
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
17
|
-
s.require_paths = [
|
17
|
+
s.require_paths = ['lib']
|
18
18
|
|
19
|
-
s.add_runtime_dependency
|
19
|
+
s.add_runtime_dependency 'fluentd', '~> 0'
|
20
|
+
s.add_runtime_dependency 'patron', '~> 0'
|
21
|
+
s.add_runtime_dependency 'elasticsearch', '~> 0'
|
20
22
|
|
21
|
-
s.add_development_dependency
|
22
|
-
s.add_development_dependency
|
23
|
+
s.add_development_dependency 'rake', '~> 0'
|
24
|
+
s.add_development_dependency 'webmock', '~> 0'
|
23
25
|
end
|
@@ -1,6 +1,7 @@
|
|
1
1
|
# encoding: UTF-8
|
2
|
-
require 'net/http'
|
3
2
|
require 'date'
|
3
|
+
require 'patron'
|
4
|
+
require 'elasticsearch'
|
4
5
|
|
5
6
|
class Fluent::ElasticsearchOutput < Fluent::BufferedOutput
|
6
7
|
Fluent::Plugin.register_output('elasticsearch', self)
|
@@ -10,9 +11,12 @@ class Fluent::ElasticsearchOutput < Fluent::BufferedOutput
|
|
10
11
|
config_param :logstash_format, :bool, :default => false
|
11
12
|
config_param :logstash_prefix, :string, :default => "logstash"
|
12
13
|
config_param :logstash_dateformat, :string, :default => "%Y.%m.%d"
|
14
|
+
config_param :utc_index, :bool, :default => true
|
13
15
|
config_param :type_name, :string, :default => "fluentd"
|
14
16
|
config_param :index_name, :string, :default => "fluentd"
|
15
17
|
config_param :id_key, :string, :default => nil
|
18
|
+
config_param :parent_key, :string, :default => nil
|
19
|
+
config_param :hosts, :string, :default => nil
|
16
20
|
|
17
21
|
include Fluent::SetTagKeyMixin
|
18
22
|
config_set_default :include_tag_key, false
|
@@ -29,6 +33,20 @@ class Fluent::ElasticsearchOutput < Fluent::BufferedOutput
|
|
29
33
|
super
|
30
34
|
end
|
31
35
|
|
36
|
+
def client
|
37
|
+
@_es ||= Elasticsearch::Client.new :hosts => get_hosts, :reload_connections => true, :adapter => :patron, :retry_on_failure => 5
|
38
|
+
raise "Can not reach Elasticsearch cluster (#{@host}:#{@port})!" unless @_es.ping
|
39
|
+
@_es
|
40
|
+
end
|
41
|
+
|
42
|
+
def get_hosts
|
43
|
+
if @hosts
|
44
|
+
@hosts.split(',').map {|x| x.strip}.compact
|
45
|
+
else
|
46
|
+
["#{@host}:#{@port}"]
|
47
|
+
end
|
48
|
+
end
|
49
|
+
|
32
50
|
def format(tag, time, record)
|
33
51
|
[tag, time, record].to_msgpack
|
34
52
|
end
|
@@ -43,7 +61,11 @@ class Fluent::ElasticsearchOutput < Fluent::BufferedOutput
|
|
43
61
|
chunk.msgpack_each do |tag, time, record|
|
44
62
|
if @logstash_format
|
45
63
|
record.merge!({"@timestamp" => Time.at(time).to_datetime.to_s})
|
46
|
-
|
64
|
+
if @utc_index
|
65
|
+
target_index = "#{@logstash_prefix}-#{Time.at(time).getutc.strftime("#{@logstash_dateformat}")}"
|
66
|
+
else
|
67
|
+
target_index = "#{@logstash_prefix}-#{Time.at(time).strftime("#{@logstash_dateformat}")}"
|
68
|
+
end
|
47
69
|
else
|
48
70
|
target_index = @index_name
|
49
71
|
end
|
@@ -56,14 +78,20 @@ class Fluent::ElasticsearchOutput < Fluent::BufferedOutput
|
|
56
78
|
if @id_key && record[@id_key]
|
57
79
|
meta['index']['_id'] = record[@id_key]
|
58
80
|
end
|
59
|
-
|
60
|
-
|
81
|
+
|
82
|
+
if @parent_key && record[@parent_key]
|
83
|
+
meta['index']['_parent'] = record[@parent_key]
|
84
|
+
end
|
85
|
+
|
86
|
+
bulk_message << meta
|
87
|
+
bulk_message << record
|
61
88
|
end
|
62
|
-
bulk_message << ""
|
63
89
|
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
90
|
+
send(bulk_message) unless bulk_message.empty?
|
91
|
+
bulk_message.clear
|
92
|
+
end
|
93
|
+
|
94
|
+
def send(data)
|
95
|
+
client.bulk body: data
|
68
96
|
end
|
69
97
|
end
|
@@ -14,7 +14,7 @@ $:.push File.dirname(__FILE__)
|
|
14
14
|
WebMock.disable_net_connect!
|
15
15
|
|
16
16
|
class ElasticsearchOutput < Test::Unit::TestCase
|
17
|
-
attr_accessor :index_cmds, :
|
17
|
+
attr_accessor :index_cmds, :index_command_counts
|
18
18
|
|
19
19
|
def setup
|
20
20
|
Fluent::Test.setup
|
@@ -26,12 +26,15 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
26
26
|
end
|
27
27
|
|
28
28
|
def sample_record
|
29
|
-
{'age' => 26, 'request_id' => '42'}
|
29
|
+
{'age' => 26, 'request_id' => '42', 'parent_id' => 'parent'}
|
30
|
+
end
|
31
|
+
|
32
|
+
def stub_elastic_ping(url="http://localhost:9200")
|
33
|
+
stub_request(:head, url).with.to_return(:status => 200, :body => "", :headers => {})
|
30
34
|
end
|
31
35
|
|
32
36
|
def stub_elastic(url="http://localhost:9200/_bulk")
|
33
37
|
stub_request(:post, url).with do |req|
|
34
|
-
@content_type = req.headers["Content-Type"]
|
35
38
|
@index_cmds = req.body.split("\n").map {|r| JSON.parse(r) }
|
36
39
|
end
|
37
40
|
end
|
@@ -40,21 +43,28 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
40
43
|
stub_request(:post, url).to_return(:status => [503, "Service Unavailable"])
|
41
44
|
end
|
42
45
|
|
43
|
-
def
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
46
|
+
def stub_elastic_with_store_index_command_counts(url="http://localhost:9200/_bulk")
|
47
|
+
if @index_command_counts == nil
|
48
|
+
@index_command_counts = {}
|
49
|
+
@index_command_counts.default = 0
|
50
|
+
end
|
51
|
+
|
52
|
+
stub_request(:post, url).with do |req|
|
53
|
+
index_cmds = req.body.split("\n").map {|r| JSON.parse(r) }
|
54
|
+
@index_command_counts[url] += index_cmds.size
|
55
|
+
end
|
48
56
|
end
|
49
57
|
|
50
|
-
def
|
58
|
+
def test_writes_to_default_index
|
59
|
+
stub_elastic_ping
|
51
60
|
stub_elastic
|
52
61
|
driver.emit(sample_record)
|
53
62
|
driver.run
|
54
|
-
assert_equal(
|
63
|
+
assert_equal('fluentd', index_cmds.first['index']['_index'])
|
55
64
|
end
|
56
65
|
|
57
66
|
def test_writes_to_default_type
|
67
|
+
stub_elastic_ping
|
58
68
|
stub_elastic
|
59
69
|
driver.emit(sample_record)
|
60
70
|
driver.run
|
@@ -63,6 +73,7 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
63
73
|
|
64
74
|
def test_writes_to_speficied_index
|
65
75
|
driver.configure("index_name myindex\n")
|
76
|
+
stub_elastic_ping
|
66
77
|
stub_elastic
|
67
78
|
driver.emit(sample_record)
|
68
79
|
driver.run
|
@@ -71,6 +82,7 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
71
82
|
|
72
83
|
def test_writes_to_speficied_type
|
73
84
|
driver.configure("type_name mytype\n")
|
85
|
+
stub_elastic_ping
|
74
86
|
stub_elastic
|
75
87
|
driver.emit(sample_record)
|
76
88
|
driver.run
|
@@ -79,6 +91,7 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
79
91
|
|
80
92
|
def test_writes_to_speficied_host
|
81
93
|
driver.configure("host 192.168.33.50\n")
|
94
|
+
stub_elastic_ping("http://192.168.33.50:9200")
|
82
95
|
elastic_request = stub_elastic("http://192.168.33.50:9200/_bulk")
|
83
96
|
driver.emit(sample_record)
|
84
97
|
driver.run
|
@@ -87,13 +100,46 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
87
100
|
|
88
101
|
def test_writes_to_speficied_port
|
89
102
|
driver.configure("port 9201\n")
|
103
|
+
stub_elastic_ping("http://localhost:9201")
|
90
104
|
elastic_request = stub_elastic("http://localhost:9201/_bulk")
|
91
105
|
driver.emit(sample_record)
|
92
106
|
driver.run
|
93
107
|
assert_requested(elastic_request)
|
94
108
|
end
|
95
109
|
|
110
|
+
def test_writes_to_multi_hosts
|
111
|
+
hosts = [['192.168.33.50', 9201], ['192.168.33.51', 9201], ['192.168.33.52', 9201]]
|
112
|
+
hosts_string = hosts.map {|x| "#{x[0]}:#{x[1]}"}.compact.join(',')
|
113
|
+
|
114
|
+
driver.configure("hosts #{hosts_string}")
|
115
|
+
|
116
|
+
hosts.each do |host_info|
|
117
|
+
host, port = host_info
|
118
|
+
stub_elastic_ping("http://#{host}:#{port}")
|
119
|
+
stub_elastic_with_store_index_command_counts("http://#{host}:#{port}/_bulk")
|
120
|
+
end
|
121
|
+
|
122
|
+
1000.times do
|
123
|
+
driver.emit(sample_record.merge('age'=>rand(100)))
|
124
|
+
end
|
125
|
+
|
126
|
+
driver.run
|
127
|
+
|
128
|
+
# @note: we cannot make multi chunks with options (flush_interval, buffer_chunk_limit)
|
129
|
+
# it's Fluentd test driver's constraint
|
130
|
+
# so @index_command_counts.size is always 1
|
131
|
+
|
132
|
+
assert(@index_command_counts.size > 0, "not working with hosts options")
|
133
|
+
|
134
|
+
total = 0
|
135
|
+
@index_command_counts.each do |url, count|
|
136
|
+
total += count
|
137
|
+
end
|
138
|
+
assert_equal(2000, total)
|
139
|
+
end
|
140
|
+
|
96
141
|
def test_makes_bulk_request
|
142
|
+
stub_elastic_ping
|
97
143
|
stub_elastic
|
98
144
|
driver.emit(sample_record)
|
99
145
|
driver.emit(sample_record.merge('age' => 27))
|
@@ -102,6 +148,7 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
102
148
|
end
|
103
149
|
|
104
150
|
def test_all_records_are_preserved_in_bulk
|
151
|
+
stub_elastic_ping
|
105
152
|
stub_elastic
|
106
153
|
driver.emit(sample_record)
|
107
154
|
driver.emit(sample_record.merge('age' => 27))
|
@@ -114,17 +161,31 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
114
161
|
driver.configure("logstash_format true\n")
|
115
162
|
time = Time.parse Date.today.to_s
|
116
163
|
logstash_index = "logstash-#{time.getutc.strftime("%Y.%m.%d")}"
|
164
|
+
stub_elastic_ping
|
117
165
|
stub_elastic
|
118
166
|
driver.emit(sample_record, time)
|
119
167
|
driver.run
|
120
168
|
assert_equal(logstash_index, index_cmds.first['index']['_index'])
|
121
169
|
end
|
122
170
|
|
171
|
+
def test_writes_to_logstash_utc_index
|
172
|
+
driver.configure("logstash_format true\n")
|
173
|
+
driver.configure("utc_index false\n")
|
174
|
+
time = Time.parse Date.today.to_s
|
175
|
+
utc_index = "logstash-#{time.strftime("%Y.%m.%d")}"
|
176
|
+
stub_elastic_ping
|
177
|
+
stub_elastic
|
178
|
+
driver.emit(sample_record, time)
|
179
|
+
driver.run
|
180
|
+
assert_equal(utc_index, index_cmds.first['index']['_index'])
|
181
|
+
end
|
182
|
+
|
123
183
|
def test_writes_to_logstash_index_with_specified_prefix
|
124
184
|
driver.configure("logstash_format true\n")
|
125
185
|
driver.configure("logstash_prefix myprefix\n")
|
126
186
|
time = Time.parse Date.today.to_s
|
127
187
|
logstash_index = "myprefix-#{time.getutc.strftime("%Y.%m.%d")}"
|
188
|
+
stub_elastic_ping
|
128
189
|
stub_elastic
|
129
190
|
driver.emit(sample_record, time)
|
130
191
|
driver.run
|
@@ -136,6 +197,7 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
136
197
|
driver.configure("logstash_dateformat %Y.%m\n")
|
137
198
|
time = Time.parse Date.today.to_s
|
138
199
|
logstash_index = "logstash-#{time.getutc.strftime("%Y.%m")}"
|
200
|
+
stub_elastic_ping
|
139
201
|
stub_elastic
|
140
202
|
driver.emit(sample_record, time)
|
141
203
|
driver.run
|
@@ -148,6 +210,7 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
148
210
|
driver.configure("logstash_dateformat %Y.%m\n")
|
149
211
|
time = Time.parse Date.today.to_s
|
150
212
|
logstash_index = "myprefix-#{time.getutc.strftime("%Y.%m")}"
|
213
|
+
stub_elastic_ping
|
151
214
|
stub_elastic
|
152
215
|
driver.emit(sample_record, time)
|
153
216
|
driver.run
|
@@ -155,6 +218,7 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
155
218
|
end
|
156
219
|
|
157
220
|
def test_doesnt_add_logstash_timestamp_by_default
|
221
|
+
stub_elastic_ping
|
158
222
|
stub_elastic
|
159
223
|
driver.emit(sample_record)
|
160
224
|
driver.run
|
@@ -163,6 +227,7 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
163
227
|
|
164
228
|
def test_adds_logstash_timestamp_when_configured
|
165
229
|
driver.configure("logstash_format true\n")
|
230
|
+
stub_elastic_ping
|
166
231
|
stub_elastic
|
167
232
|
ts = DateTime.now.to_s
|
168
233
|
driver.emit(sample_record)
|
@@ -172,6 +237,7 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
172
237
|
end
|
173
238
|
|
174
239
|
def test_doesnt_add_tag_key_by_default
|
240
|
+
stub_elastic_ping
|
175
241
|
stub_elastic
|
176
242
|
driver.emit(sample_record)
|
177
243
|
driver.run
|
@@ -180,6 +246,7 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
180
246
|
|
181
247
|
def test_adds_tag_key_when_configured
|
182
248
|
driver('mytag').configure("include_tag_key true\n")
|
249
|
+
stub_elastic_ping
|
183
250
|
stub_elastic
|
184
251
|
driver.emit(sample_record)
|
185
252
|
driver.run
|
@@ -189,6 +256,7 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
189
256
|
|
190
257
|
def test_adds_id_key_when_configured
|
191
258
|
driver.configure("id_key request_id\n")
|
259
|
+
stub_elastic_ping
|
192
260
|
stub_elastic
|
193
261
|
driver.emit(sample_record)
|
194
262
|
driver.run
|
@@ -197,6 +265,7 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
197
265
|
|
198
266
|
def test_doesnt_add_id_key_if_missing_when_configured
|
199
267
|
driver.configure("id_key another_request_id\n")
|
268
|
+
stub_elastic_ping
|
200
269
|
stub_elastic
|
201
270
|
driver.emit(sample_record)
|
202
271
|
driver.run
|
@@ -204,16 +273,44 @@ class ElasticsearchOutput < Test::Unit::TestCase
|
|
204
273
|
end
|
205
274
|
|
206
275
|
def test_adds_id_key_when_not_configured
|
276
|
+
stub_elastic_ping
|
207
277
|
stub_elastic
|
208
278
|
driver.emit(sample_record)
|
209
279
|
driver.run
|
210
280
|
assert(!index_cmds[0]['index'].has_key?('_id'))
|
211
281
|
end
|
212
282
|
|
283
|
+
def test_adds_parent_key_when_configured
|
284
|
+
driver.configure("parent_key parent_id\n")
|
285
|
+
stub_elastic_ping
|
286
|
+
stub_elastic
|
287
|
+
driver.emit(sample_record)
|
288
|
+
driver.run
|
289
|
+
assert_equal(index_cmds[0]['index']['_parent'], 'parent')
|
290
|
+
end
|
291
|
+
|
292
|
+
def test_doesnt_add_parent_key_if_missing_when_configured
|
293
|
+
driver.configure("parent_key another_parent_id\n")
|
294
|
+
stub_elastic_ping
|
295
|
+
stub_elastic
|
296
|
+
driver.emit(sample_record)
|
297
|
+
driver.run
|
298
|
+
assert(!index_cmds[0]['index'].has_key?('_parent'))
|
299
|
+
end
|
300
|
+
|
301
|
+
def test_adds_parent_key_when_not_configured
|
302
|
+
stub_elastic_ping
|
303
|
+
stub_elastic
|
304
|
+
driver.emit(sample_record)
|
305
|
+
driver.run
|
306
|
+
assert(!index_cmds[0]['index'].has_key?('_parent'))
|
307
|
+
end
|
308
|
+
|
213
309
|
def test_request_error
|
310
|
+
stub_elastic_ping
|
214
311
|
stub_elastic_unavailable
|
215
312
|
driver.emit(sample_record)
|
216
|
-
assert_raise(
|
313
|
+
assert_raise(Elasticsearch::Transport::Transport::Errors::ServiceUnavailable) {
|
217
314
|
driver.run
|
218
315
|
}
|
219
316
|
end
|
metadata
CHANGED
@@ -1,8 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: fluent-plugin-elasticsearch
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
5
|
-
prerelease:
|
4
|
+
version: 0.3.0
|
6
5
|
platform: ruby
|
7
6
|
authors:
|
8
7
|
- diogo
|
@@ -10,54 +9,76 @@ authors:
|
|
10
9
|
autorequire:
|
11
10
|
bindir: bin
|
12
11
|
cert_chain: []
|
13
|
-
date:
|
12
|
+
date: 2014-03-17 00:00:00.000000000 Z
|
14
13
|
dependencies:
|
15
14
|
- !ruby/object:Gem::Dependency
|
16
15
|
name: fluentd
|
17
16
|
requirement: !ruby/object:Gem::Requirement
|
18
|
-
none: false
|
19
17
|
requirements:
|
20
|
-
- -
|
18
|
+
- - "~>"
|
21
19
|
- !ruby/object:Gem::Version
|
22
20
|
version: '0'
|
23
21
|
type: :runtime
|
24
22
|
prerelease: false
|
25
23
|
version_requirements: !ruby/object:Gem::Requirement
|
26
|
-
none: false
|
27
24
|
requirements:
|
28
|
-
- -
|
25
|
+
- - "~>"
|
26
|
+
- !ruby/object:Gem::Version
|
27
|
+
version: '0'
|
28
|
+
- !ruby/object:Gem::Dependency
|
29
|
+
name: patron
|
30
|
+
requirement: !ruby/object:Gem::Requirement
|
31
|
+
requirements:
|
32
|
+
- - "~>"
|
33
|
+
- !ruby/object:Gem::Version
|
34
|
+
version: '0'
|
35
|
+
type: :runtime
|
36
|
+
prerelease: false
|
37
|
+
version_requirements: !ruby/object:Gem::Requirement
|
38
|
+
requirements:
|
39
|
+
- - "~>"
|
40
|
+
- !ruby/object:Gem::Version
|
41
|
+
version: '0'
|
42
|
+
- !ruby/object:Gem::Dependency
|
43
|
+
name: elasticsearch
|
44
|
+
requirement: !ruby/object:Gem::Requirement
|
45
|
+
requirements:
|
46
|
+
- - "~>"
|
47
|
+
- !ruby/object:Gem::Version
|
48
|
+
version: '0'
|
49
|
+
type: :runtime
|
50
|
+
prerelease: false
|
51
|
+
version_requirements: !ruby/object:Gem::Requirement
|
52
|
+
requirements:
|
53
|
+
- - "~>"
|
29
54
|
- !ruby/object:Gem::Version
|
30
55
|
version: '0'
|
31
56
|
- !ruby/object:Gem::Dependency
|
32
57
|
name: rake
|
33
58
|
requirement: !ruby/object:Gem::Requirement
|
34
|
-
none: false
|
35
59
|
requirements:
|
36
|
-
- -
|
60
|
+
- - "~>"
|
37
61
|
- !ruby/object:Gem::Version
|
38
62
|
version: '0'
|
39
63
|
type: :development
|
40
64
|
prerelease: false
|
41
65
|
version_requirements: !ruby/object:Gem::Requirement
|
42
|
-
none: false
|
43
66
|
requirements:
|
44
|
-
- -
|
67
|
+
- - "~>"
|
45
68
|
- !ruby/object:Gem::Version
|
46
69
|
version: '0'
|
47
70
|
- !ruby/object:Gem::Dependency
|
48
71
|
name: webmock
|
49
72
|
requirement: !ruby/object:Gem::Requirement
|
50
|
-
none: false
|
51
73
|
requirements:
|
52
|
-
- -
|
74
|
+
- - "~>"
|
53
75
|
- !ruby/object:Gem::Version
|
54
76
|
version: '0'
|
55
77
|
type: :development
|
56
78
|
prerelease: false
|
57
79
|
version_requirements: !ruby/object:Gem::Requirement
|
58
|
-
none: false
|
59
80
|
requirements:
|
60
|
-
- -
|
81
|
+
- - "~>"
|
61
82
|
- !ruby/object:Gem::Version
|
62
83
|
version: '0'
|
63
84
|
description: ElasticSearch output plugin for Fluent event collector
|
@@ -67,7 +88,8 @@ executables: []
|
|
67
88
|
extensions: []
|
68
89
|
extra_rdoc_files: []
|
69
90
|
files:
|
70
|
-
- .gitignore
|
91
|
+
- ".gitignore"
|
92
|
+
- ".travis.yml"
|
71
93
|
- Gemfile
|
72
94
|
- History.md
|
73
95
|
- LICENSE.txt
|
@@ -80,33 +102,26 @@ files:
|
|
80
102
|
homepage: https://github.com/uken/fluent-plugin-elasticsearch
|
81
103
|
licenses:
|
82
104
|
- MIT
|
105
|
+
metadata: {}
|
83
106
|
post_install_message:
|
84
107
|
rdoc_options: []
|
85
108
|
require_paths:
|
86
109
|
- lib
|
87
110
|
required_ruby_version: !ruby/object:Gem::Requirement
|
88
|
-
none: false
|
89
111
|
requirements:
|
90
|
-
- -
|
112
|
+
- - ">="
|
91
113
|
- !ruby/object:Gem::Version
|
92
114
|
version: '0'
|
93
|
-
segments:
|
94
|
-
- 0
|
95
|
-
hash: -1091394761449257025
|
96
115
|
required_rubygems_version: !ruby/object:Gem::Requirement
|
97
|
-
none: false
|
98
116
|
requirements:
|
99
|
-
- -
|
117
|
+
- - ">="
|
100
118
|
- !ruby/object:Gem::Version
|
101
119
|
version: '0'
|
102
|
-
segments:
|
103
|
-
- 0
|
104
|
-
hash: -1091394761449257025
|
105
120
|
requirements: []
|
106
121
|
rubyforge_project:
|
107
|
-
rubygems_version:
|
122
|
+
rubygems_version: 2.2.0
|
108
123
|
signing_key:
|
109
|
-
specification_version:
|
124
|
+
specification_version: 4
|
110
125
|
summary: ElasticSearch output plugin for Fluent event collector
|
111
126
|
test_files:
|
112
127
|
- test/helper.rb
|