logstash-input-elasticsearch 3.0.2 → 4.0.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: f5b8d06824d9ce5dbbee6d8d903776a936cd868a
4
- data.tar.gz: eb4cae9d990982b54d9892942c5c39fa14e543c1
3
+ metadata.gz: 99cc10fa5daee63fcb318c325f8edaa86c443f22
4
+ data.tar.gz: 0dd1acac2ff9314ab1decf1b57782c1ad37c390e
5
5
  SHA512:
6
- metadata.gz: 3d49cd543e144854ee7cede1aa77fd0010c243b7a06f70911beebfb4f61d210b20486446adbd1c7e165a5caf08ac221810ce92dbb4de53c02e573c07a814af1a
7
- data.tar.gz: df04ca58458f17d663913804b2aa4de4656b055943471ba8cf6398f297fba39bd36bff30164f2a5d10e1439af66a5bc8da597cbebb91892c2206ee903944904e
6
+ metadata.gz: c2b22763cfc5521cbbff86c11d7c01030f4e9d88df57d7d6c02813eaf97524e34e641e2ee109c6d2aa14f270e04a98c2c4d1818d8880fd9d779be6fd982a6146
7
+ data.tar.gz: c89c0da4e80ef31a41d1ac15c13fe34ba5d4eb3ed0ec6cff356b23fd29aeec7b61d65f6d7562bcc663eb05f67a3ca2ccbb4888772715c1fa31467d0eb83bf820
@@ -1,3 +1,11 @@
1
+ ## 4.0.0
2
+ - Remove `scan` from list of options as this is no longer allowed in
3
+ Elasticsearch 5.0.
4
+ - Change default query to sort by \_doc, as this replicates the `scan`
5
+ behavior
6
+ - Improve documentation to show sort by \_doc, and how to add it to custom
7
+ queries.
8
+
1
9
  ## 3.0.2
2
10
  - Relax constraint on logstash-core-plugin-api to >= 1.60 <= 2.99
3
11
 
@@ -12,7 +12,7 @@ require "base64"
12
12
  # # Read all documents from Elasticsearch matching the given query
13
13
  # elasticsearch {
14
14
  # hosts => "localhost"
15
- # query => '{ "query": { "match": { "statuscode": 200 } } }'
15
+ # query => '{ "query": { "match": { "statuscode": 200 } }, "sort": [ "_doc" ] }'
16
16
  # }
17
17
  # }
18
18
  #
@@ -23,7 +23,8 @@ require "base64"
23
23
  # "match": {
24
24
  # "statuscode": 200
25
25
  # }
26
- # }
26
+ # },
27
+ # "sort": [ "_doc" ]
27
28
  # }'
28
29
  #
29
30
  class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
@@ -40,20 +41,16 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
40
41
  config :index, :validate => :string, :default => "logstash-*"
41
42
 
42
43
  # The query to be executed. Read the Elasticsearch query DSL documentation
43
- # for more info
44
+ # for more info
44
45
  # https://www.elastic.co/guide/en/elasticsearch/reference/current/query-dsl.html
45
- config :query, :validate => :string, :default => '{"query": { "match_all": {} } }'
46
-
47
- # Enable the Elasticsearch "scan" search type. This will disable
48
- # sorting but increase speed and performance.
49
- config :scan, :validate => :boolean, :default => true
46
+ config :query, :validate => :string, :default => '{ "sort": [ "_doc" ] }'
50
47
 
51
48
  # This allows you to set the maximum number of hits returned per scroll.
52
49
  config :size, :validate => :number, :default => 1000
53
50
 
54
51
  # This parameter controls the keepalive time in seconds of the scrolling
55
52
  # request and initiates the scrolling process. The timeout applies per
56
- # round trip (i.e. between the previous scan scroll request, to the next).
53
+ # round trip (i.e. between the previous scroll request, to the next).
57
54
  config :scroll, :validate => :string, :default => "1m"
58
55
 
59
56
  # If set, include Elasticsearch document information such as index, type, and
@@ -61,7 +58,7 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
61
58
  #
62
59
  # It might be important to note, with regards to metadata, that if you're
63
60
  # ingesting documents with the intent to re-index them (or just update them)
64
- # that the `action` option in the elasticsearch output want's to know how to
61
+ # that the `action` option in the elasticsearch output wants to know how to
65
62
  # handle those things. It can be dynamically assigned with a field
66
63
  # added to the metadata.
67
64
  #
@@ -117,8 +114,6 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
117
114
  :size => @size
118
115
  }
119
116
 
120
- @options[:search_type] = 'scan' if @scan
121
-
122
117
  transport_options = {}
123
118
 
124
119
  if @user && @password
@@ -146,14 +141,8 @@ class LogStash::Inputs::Elasticsearch < LogStash::Inputs::Base
146
141
  # get first wave of data
147
142
  r = @client.search(@options)
148
143
 
149
- # since 'scan' doesn't return data on the search call, do an extra scroll
150
- if @scan
151
- r = process_next_scroll(output_queue, r['_scroll_id'])
152
- has_hits = r['has_hits']
153
- else # not a scan, process the response
154
- r['hits']['hits'].each { |hit| push_hit(hit, output_queue) }
155
- has_hits = r['hits']['hits'].any?
156
- end
144
+ r['hits']['hits'].each { |hit| push_hit(hit, output_queue) }
145
+ has_hits = r['hits']['hits'].any?
157
146
 
158
147
  while has_hits && !stop?
159
148
  r = process_next_scroll(output_queue, r['_scroll_id'])
@@ -1,7 +1,7 @@
1
1
  Gem::Specification.new do |s|
2
2
 
3
3
  s.name = 'logstash-input-elasticsearch'
4
- s.version = '3.0.2'
4
+ s.version = '4.0.0'
5
5
  s.licenses = ['Apache License (2.0)']
6
6
  s.summary = "Read from an Elasticsearch cluster, based on search query results"
7
7
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -28,4 +28,3 @@ Gem::Specification.new do |s|
28
28
 
29
29
  s.add_development_dependency 'logstash-devutils'
30
30
  end
31
-
@@ -28,7 +28,6 @@ describe LogStash::Inputs::Elasticsearch do
28
28
  input {
29
29
  elasticsearch {
30
30
  hosts => ["localhost"]
31
- scan => false
32
31
  query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
33
32
  }
34
33
  }
@@ -74,63 +73,6 @@ describe LogStash::Inputs::Elasticsearch do
74
73
  insist { event.get("message") } == [ "ohayo" ]
75
74
  end
76
75
 
77
- it "should retrieve json event from elasticseach with scan" do
78
- config = %q[
79
- input {
80
- elasticsearch {
81
- hosts => ["localhost"]
82
- scan => true
83
- query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
84
- }
85
- }
86
- ]
87
-
88
- scan_response = {
89
- "_scroll_id" => "DcrY3G1xff6SB",
90
- }
91
-
92
- scroll_responses = [
93
- {
94
- "_scroll_id" => "cXVlcnlUaGVuRmV0Y2g",
95
- "took" => 27,
96
- "timed_out" => false,
97
- "_shards" => {
98
- "total" => 169,
99
- "successful" => 169,
100
- "failed" => 0
101
- },
102
- "hits" => {
103
- "total" => 1,
104
- "max_score" => 1.0,
105
- "hits" => [ {
106
- "_index" => "logstash-2014.10.12",
107
- "_type" => "logs",
108
- "_id" => "C5b2xLQwTZa76jBmHIbwHQ",
109
- "_score" => 1.0,
110
- "_source" => { "message" => ["ohayo"] }
111
- } ]
112
- }
113
- },
114
- {
115
- "_scroll_id" => "r453Wc1jh0caLJhSDg",
116
- "hits" => { "hits" => [] }
117
- }
118
- ]
119
-
120
- client = Elasticsearch::Client.new
121
- expect(Elasticsearch::Client).to receive(:new).with(any_args).and_return(client)
122
- expect(client).to receive(:search).with(any_args).and_return(scan_response)
123
- expect(client).to receive(:scroll).with({ :body => "DcrY3G1xff6SB", :scroll => "1m" }).and_return(scroll_responses.first)
124
- expect(client).to receive(:scroll).with({ :body=> "cXVlcnlUaGVuRmV0Y2g", :scroll => "1m" }).and_return(scroll_responses.last)
125
-
126
- event = input(config) do |pipeline, queue|
127
- queue.pop
128
- end
129
-
130
- insist { event }.is_a?(LogStash::Event)
131
- insist { event.get("message") } == [ "ohayo" ]
132
- end
133
-
134
76
  context "with Elasticsearch document information" do
135
77
  let!(:response) do
136
78
  {
@@ -150,7 +92,7 @@ describe LogStash::Inputs::Elasticsearch do
150
92
  "_type" => "logs",
151
93
  "_id" => "C5b2xLQwTZa76jBmHIbwHQ",
152
94
  "_score" => 1.0,
153
- "_source" => {
95
+ "_source" => {
154
96
  "message" => ["ohayo"],
155
97
  "metadata_with_hash" => { "awesome" => "logstash" },
156
98
  "metadata_with_string" => "a string"
@@ -181,7 +123,6 @@ describe LogStash::Inputs::Elasticsearch do
181
123
  input {
182
124
  elasticsearch {
183
125
  hosts => ["localhost"]
184
- scan => false
185
126
  query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
186
127
  docinfo => true
187
128
  }
@@ -196,7 +137,6 @@ describe LogStash::Inputs::Elasticsearch do
196
137
  input {
197
138
  elasticsearch {
198
139
  hosts => ["localhost"]
199
- scan => false
200
140
  query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
201
141
  docinfo => true
202
142
  docinfo_target => '#{metadata_field}'
@@ -213,7 +153,7 @@ describe LogStash::Inputs::Elasticsearch do
213
153
  expect(event.get("[#{metadata_field}][_id]")).to eq('C5b2xLQwTZa76jBmHIbwHQ')
214
154
  expect(event.get("[#{metadata_field}][awesome]")).to eq("logstash")
215
155
  end
216
-
156
+
217
157
  it 'thows an exception if the `docinfo_target` exist but is not of type hash' do
218
158
  metadata_field = 'metadata_with_string'
219
159
 
@@ -221,7 +161,6 @@ describe LogStash::Inputs::Elasticsearch do
221
161
  input {
222
162
  elasticsearch {
223
163
  hosts => ["localhost"]
224
- scan => false
225
164
  query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
226
165
  docinfo => true
227
166
  docinfo_target => '#{metadata_field}'
@@ -253,7 +192,6 @@ describe LogStash::Inputs::Elasticsearch do
253
192
  input {
254
193
  elasticsearch {
255
194
  hosts => ["localhost"]
256
- scan => false
257
195
  query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
258
196
  docinfo => true
259
197
  docinfo_target => 'meta'
@@ -275,7 +213,6 @@ describe LogStash::Inputs::Elasticsearch do
275
213
  input {
276
214
  elasticsearch {
277
215
  hosts => ["localhost"]
278
- scan => false
279
216
  query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
280
217
  docinfo => true
281
218
  docinfo_fields => #{fields}
@@ -299,7 +236,6 @@ describe LogStash::Inputs::Elasticsearch do
299
236
  input {
300
237
  elasticsearch {
301
238
  hosts => ["localhost"]
302
- scan => false
303
239
  query => '{ "query": { "match": { "city_name": "Okinawa" } }, "fields": ["message"] }'
304
240
  }
305
241
  }
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-input-elasticsearch
3
3
  version: !ruby/object:Gem::Version
4
- version: 3.0.2
4
+ version: 4.0.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2016-07-14 00:00:00.000000000 Z
11
+ date: 2016-10-13 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -115,7 +115,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
115
115
  version: '0'
116
116
  requirements: []
117
117
  rubyforge_project:
118
- rubygems_version: 2.6.3
118
+ rubygems_version: 2.4.8
119
119
  signing_key:
120
120
  specification_version: 4
121
121
  summary: Read from an Elasticsearch cluster, based on search query results