logstash-input-elasticsearch 1.0.1 → 1.0.2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +3 -0
- data/lib/logstash/inputs/elasticsearch.rb +36 -29
- data/logstash-input-elasticsearch.gemspec +1 -1
- data/spec/inputs/elasticsearch_spec.rb +22 -22
- metadata +21 -21
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 648519ea6b29fdcb5b95a9939080095fe7bf129f
|
4
|
+
data.tar.gz: 8e5c3a0d33d74007942e96806032fc08f7569ebf
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 40e81ca75e061380601bae58d415fa136ee59cce03de7c6dafc59402b117058659ac53d6de4e83d6f0d28acb030cc86cc3be7c27f115831bc59e8218f7393551
|
7
|
+
data.tar.gz: 56fa3228f843bdc67d2a4227e19989d8507f259c8ee8f9f589096923496860f22be0c6a15e8453ec1ee0aa27a08dbd45ef460d6cb56c25033fcd5af13316a7d7
|
data/CHANGELOG.md
CHANGED
@@ -141,32 +141,6 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
|
|
141
141
|
@client = Elasticsearch::Client.new(:hosts => hosts, :transport_options => transport_options)
|
142
142
|
end
|
143
143
|
|
144
|
-
private
|
145
|
-
def run_next(output_queue, scroll_id)
|
146
|
-
r = scroll_request(scroll_id)
|
147
|
-
r['hits']['hits'].each do |hit|
|
148
|
-
event = LogStash::Event.new(hit['_source'])
|
149
|
-
decorate(event)
|
150
|
-
|
151
|
-
if @docinfo
|
152
|
-
event[@docinfo_target] ||= {}
|
153
|
-
|
154
|
-
unless event[@docinfo_target].is_a?(Hash)
|
155
|
-
@logger.error("Elasticsearch Input: Incompatible Event, incompatible type for the `@metadata` field in the `_source` document, expected a hash got:", :metadata_type => event[@docinfo_target].class)
|
156
|
-
|
157
|
-
raise Exception.new("Elasticsearch input: incompatible event")
|
158
|
-
end
|
159
|
-
|
160
|
-
@docinfo_fields.each do |field|
|
161
|
-
event[@docinfo_target][field] = hit[field]
|
162
|
-
end
|
163
|
-
end
|
164
|
-
output_queue << event
|
165
|
-
end
|
166
|
-
|
167
|
-
{:has_hits => r['hits']['hits'].any?, :scroll_id => r['_scroll_id']}
|
168
|
-
end
|
169
|
-
|
170
144
|
public
|
171
145
|
def run(output_queue)
|
172
146
|
|
@@ -175,14 +149,47 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
|
|
175
149
|
|
176
150
|
# since 'scan' doesn't return data on the search call, do an extra scroll
|
177
151
|
if @scan
|
178
|
-
|
152
|
+
r = process_next_scroll(output_queue, r['_scroll_id'])
|
153
|
+
has_hits = r['has_hits']
|
154
|
+
else # not a scan, process the response
|
155
|
+
r['hits']['hits'].each { |hit| push_hit(hit, output_queue) }
|
156
|
+
has_hits = r['hits']['hits'].any?
|
179
157
|
end
|
180
158
|
|
181
|
-
while
|
182
|
-
|
159
|
+
while has_hits do
|
160
|
+
r = process_next_scroll(output_queue, r['_scroll_id'])
|
161
|
+
has_hits = r['has_hits']
|
183
162
|
end
|
184
163
|
end # def run
|
185
164
|
|
165
|
+
private
|
166
|
+
def process_next_scroll(output_queue, scroll_id)
|
167
|
+
r = scroll_request(scroll_id)
|
168
|
+
r['hits']['hits'].each { |hit| push_hit(hit, output_queue) }
|
169
|
+
{'has_hits' => r['hits']['hits'].any?, '_scroll_id' => r['_scroll_id']}
|
170
|
+
end
|
171
|
+
|
172
|
+
private
|
173
|
+
def push_hit(hit, output_queue)
|
174
|
+
event = LogStash::Event.new(hit['_source'])
|
175
|
+
decorate(event)
|
176
|
+
|
177
|
+
if @docinfo
|
178
|
+
event[@docinfo_target] ||= {}
|
179
|
+
|
180
|
+
unless event[@docinfo_target].is_a?(Hash)
|
181
|
+
@logger.error("Elasticsearch Input: Incompatible Event, incompatible type for the `@metadata` field in the `_source` document, expected a hash got:", :metadata_type => event[@docinfo_target].class)
|
182
|
+
|
183
|
+
raise Exception.new("Elasticsearch input: incompatible event")
|
184
|
+
end
|
185
|
+
|
186
|
+
@docinfo_fields.each do |field|
|
187
|
+
event[@docinfo_target][field] = hit[field]
|
188
|
+
end
|
189
|
+
end
|
190
|
+
output_queue << event
|
191
|
+
end
|
192
|
+
|
186
193
|
private
|
187
194
|
def scroll_request scroll_id
|
188
195
|
@client.scroll(:body => scroll_id, :scroll => @scroll)
|
@@ -1,7 +1,7 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
|
3
3
|
s.name = 'logstash-input-elasticsearch'
|
4
|
-
s.version = '1.0.
|
4
|
+
s.version = '1.0.2'
|
5
5
|
s.licenses = ['Apache License (2.0)']
|
6
6
|
s.summary = "Read from an Elasticsearch cluster, based on search query results"
|
7
7
|
s.description = "This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program"
|
@@ -3,7 +3,7 @@ require "logstash/devutils/rspec/spec_helper"
|
|
3
3
|
require "logstash/inputs/elasticsearch"
|
4
4
|
require "elasticsearch"
|
5
5
|
|
6
|
-
describe
|
6
|
+
describe LogStash::Inputs::Elasticsearch do
|
7
7
|
it "should retrieve json event from elasticseach" do
|
8
8
|
config = %q[
|
9
9
|
input {
|
@@ -47,7 +47,9 @@ describe "inputs/elasticsearch", :elasticsearch => true do
|
|
47
47
|
expect(client).to receive(:search).with(any_args).and_return(response)
|
48
48
|
expect(client).to receive(:scroll).with({ :body => "cXVlcnlUaGVuRmV0Y2g", :scroll=> "1m" }).and_return(scroll_reponse)
|
49
49
|
|
50
|
-
event =
|
50
|
+
event = input(config) do |pipeline, queue|
|
51
|
+
queue.pop
|
52
|
+
end
|
51
53
|
|
52
54
|
insist { event }.is_a?(LogStash::Event)
|
53
55
|
insist { event["message"] } == [ "ohayo" ]
|
@@ -102,7 +104,9 @@ describe "inputs/elasticsearch", :elasticsearch => true do
|
|
102
104
|
expect(client).to receive(:scroll).with({ :body => "DcrY3G1xff6SB", :scroll => "1m" }).and_return(scroll_responses.first)
|
103
105
|
expect(client).to receive(:scroll).with({ :body=> "cXVlcnlUaGVuRmV0Y2g", :scroll => "1m" }).and_return(scroll_responses.last)
|
104
106
|
|
105
|
-
event =
|
107
|
+
event = input(config) do |pipeline, queue|
|
108
|
+
queue.pop
|
109
|
+
end
|
106
110
|
|
107
111
|
insist { event }.is_a?(LogStash::Event)
|
108
112
|
insist { event["message"] } == [ "ohayo" ]
|
@@ -181,7 +185,9 @@ describe "inputs/elasticsearch", :elasticsearch => true do
|
|
181
185
|
}
|
182
186
|
]
|
183
187
|
|
184
|
-
event =
|
188
|
+
event = input(config_metadata_with_hash) do |pipeline, queue|
|
189
|
+
queue.pop
|
190
|
+
end
|
185
191
|
|
186
192
|
expect(event[metadata_field]["_index"]).to eq('logstash-2014.10.12')
|
187
193
|
expect(event[metadata_field]["_type"]).to eq('logs')
|
@@ -214,7 +220,9 @@ describe "inputs/elasticsearch", :elasticsearch => true do
|
|
214
220
|
end
|
215
221
|
|
216
222
|
it "should move the document info to the @metadata field" do
|
217
|
-
event =
|
223
|
+
event = input(config_metadata) do |pipeline, queue|
|
224
|
+
queue.pop
|
225
|
+
end
|
218
226
|
|
219
227
|
expect(event["[@metadata][_index]"]).to eq('logstash-2014.10.12')
|
220
228
|
expect(event["[@metadata][_type]"]).to eq('logs')
|
@@ -233,7 +241,9 @@ describe "inputs/elasticsearch", :elasticsearch => true do
|
|
233
241
|
}
|
234
242
|
}
|
235
243
|
]
|
236
|
-
event =
|
244
|
+
event = input(config) do |pipeline, queue|
|
245
|
+
queue.pop
|
246
|
+
end
|
237
247
|
|
238
248
|
expect(event["[meta][_index]"]).to eq('logstash-2014.10.12')
|
239
249
|
expect(event["[meta][_type]"]).to eq('logs')
|
@@ -253,7 +263,9 @@ describe "inputs/elasticsearch", :elasticsearch => true do
|
|
253
263
|
}
|
254
264
|
}]
|
255
265
|
|
256
|
-
event =
|
266
|
+
event = input(config) do |pipeline, queue|
|
267
|
+
queue.pop
|
268
|
+
end
|
257
269
|
|
258
270
|
expect(event["@metadata"].keys).to eq(fields)
|
259
271
|
expect(event["[@metadata][_type]"]).to eq(nil)
|
@@ -273,7 +285,9 @@ describe "inputs/elasticsearch", :elasticsearch => true do
|
|
273
285
|
}
|
274
286
|
}
|
275
287
|
]
|
276
|
-
event =
|
288
|
+
event = input(config) do |pipeline, queue|
|
289
|
+
queue.pop
|
290
|
+
end
|
277
291
|
|
278
292
|
expect(event["[@metadata][_index]"]).to eq(nil)
|
279
293
|
expect(event["[@metadata][_type]"]).to eq(nil)
|
@@ -282,17 +296,3 @@ describe "inputs/elasticsearch", :elasticsearch => true do
|
|
282
296
|
end
|
283
297
|
end
|
284
298
|
end
|
285
|
-
|
286
|
-
def fetch_event(config)
|
287
|
-
pipeline = LogStash::Pipeline.new(config)
|
288
|
-
queue = Queue.new
|
289
|
-
pipeline.instance_eval do
|
290
|
-
@output_func = lambda { |event| queue << event }
|
291
|
-
end
|
292
|
-
pipeline_thread = Thread.new { pipeline.run }
|
293
|
-
event = queue.pop
|
294
|
-
|
295
|
-
pipeline_thread.join
|
296
|
-
|
297
|
-
return event
|
298
|
-
end
|
metadata
CHANGED
@@ -1,18 +1,17 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-input-elasticsearch
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.0.
|
4
|
+
version: 1.0.2
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Elastic
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2015-
|
11
|
+
date: 2015-09-03 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
|
-
|
15
|
-
version_requirements: !ruby/object:Gem::Requirement
|
14
|
+
requirement: !ruby/object:Gem::Requirement
|
16
15
|
requirements:
|
17
16
|
- - '>='
|
18
17
|
- !ruby/object:Gem::Version
|
@@ -20,7 +19,10 @@ dependencies:
|
|
20
19
|
- - <
|
21
20
|
- !ruby/object:Gem::Version
|
22
21
|
version: 2.0.0
|
23
|
-
|
22
|
+
name: logstash-core
|
23
|
+
prerelease: false
|
24
|
+
type: :runtime
|
25
|
+
version_requirements: !ruby/object:Gem::Requirement
|
24
26
|
requirements:
|
25
27
|
- - '>='
|
26
28
|
- !ruby/object:Gem::Version
|
@@ -28,11 +30,8 @@ dependencies:
|
|
28
30
|
- - <
|
29
31
|
- !ruby/object:Gem::Version
|
30
32
|
version: 2.0.0
|
31
|
-
prerelease: false
|
32
|
-
type: :runtime
|
33
33
|
- !ruby/object:Gem::Dependency
|
34
|
-
|
35
|
-
version_requirements: !ruby/object:Gem::Requirement
|
34
|
+
requirement: !ruby/object:Gem::Requirement
|
36
35
|
requirements:
|
37
36
|
- - '>='
|
38
37
|
- !ruby/object:Gem::Version
|
@@ -40,7 +39,10 @@ dependencies:
|
|
40
39
|
- - ~>
|
41
40
|
- !ruby/object:Gem::Version
|
42
41
|
version: '1.0'
|
43
|
-
|
42
|
+
name: elasticsearch
|
43
|
+
prerelease: false
|
44
|
+
type: :runtime
|
45
|
+
version_requirements: !ruby/object:Gem::Requirement
|
44
46
|
requirements:
|
45
47
|
- - '>='
|
46
48
|
- !ruby/object:Gem::Version
|
@@ -48,36 +50,34 @@ dependencies:
|
|
48
50
|
- - ~>
|
49
51
|
- !ruby/object:Gem::Version
|
50
52
|
version: '1.0'
|
51
|
-
prerelease: false
|
52
|
-
type: :runtime
|
53
53
|
- !ruby/object:Gem::Dependency
|
54
|
-
name: logstash-codec-json
|
55
|
-
version_requirements: !ruby/object:Gem::Requirement
|
56
|
-
requirements:
|
57
|
-
- - '>='
|
58
|
-
- !ruby/object:Gem::Version
|
59
|
-
version: '0'
|
60
54
|
requirement: !ruby/object:Gem::Requirement
|
61
55
|
requirements:
|
62
56
|
- - '>='
|
63
57
|
- !ruby/object:Gem::Version
|
64
58
|
version: '0'
|
59
|
+
name: logstash-codec-json
|
65
60
|
prerelease: false
|
66
61
|
type: :runtime
|
67
|
-
- !ruby/object:Gem::Dependency
|
68
|
-
name: logstash-devutils
|
69
62
|
version_requirements: !ruby/object:Gem::Requirement
|
70
63
|
requirements:
|
71
64
|
- - '>='
|
72
65
|
- !ruby/object:Gem::Version
|
73
66
|
version: '0'
|
67
|
+
- !ruby/object:Gem::Dependency
|
74
68
|
requirement: !ruby/object:Gem::Requirement
|
75
69
|
requirements:
|
76
70
|
- - '>='
|
77
71
|
- !ruby/object:Gem::Version
|
78
72
|
version: '0'
|
73
|
+
name: logstash-devutils
|
79
74
|
prerelease: false
|
80
75
|
type: :development
|
76
|
+
version_requirements: !ruby/object:Gem::Requirement
|
77
|
+
requirements:
|
78
|
+
- - '>='
|
79
|
+
- !ruby/object:Gem::Version
|
80
|
+
version: '0'
|
81
81
|
description: This gem is a logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/plugin install gemname. This gem is not a stand-alone program
|
82
82
|
email: info@elastic.co
|
83
83
|
executables: []
|
@@ -117,7 +117,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
117
117
|
version: '0'
|
118
118
|
requirements: []
|
119
119
|
rubyforge_project:
|
120
|
-
rubygems_version: 2.
|
120
|
+
rubygems_version: 2.4.5
|
121
121
|
signing_key:
|
122
122
|
specification_version: 4
|
123
123
|
summary: Read from an Elasticsearch cluster, based on search query results
|