logstash-output-influxdb 4.0.0 → 5.0.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 1d30a313b6e704668859f42ae9b5023624fbf35e
4
- data.tar.gz: c5b7c44f82b8098a727713596cc1762dc92a0893
3
+ metadata.gz: 319717ea8b3dc90eec8c3bd3a894afc74481301d
4
+ data.tar.gz: f6e7ec77f79060860a7bd4ff65b922cd6c678418
5
5
  SHA512:
6
- metadata.gz: a22503dc7cecf37e1f71ca21aa2aa8404f5547756b326b25b529eb45f335056b3eb4ce3a426e00a13be4fc11c4ab1be28ee1474fb547e48b8babae15e2b926eb
7
- data.tar.gz: 73f9fa98aa9e32f7d9f9facd88dd221e87b5e7518607b66ebc890e587387f24ee0efc05e0b907b702807085e59ff79ccd721ecb6770719be767b3f4bf71f1562
6
+ metadata.gz: 0c57304545fe95980fdfa2b1cce14802875ef2ddbb969c12399cddcea7dea82df0bdaffada54b3e2f15b6c09914236d834a873360b2d7c7eb7277a4b104c45f4
7
+ data.tar.gz: a0a8308b93ac08067a67910251be2e8abbc6cb54b2c256bc25c9237de4e4afe4f0285e5fa59960e7b05d93c93df50df92ae5bfa7c703c3cb5a7eaeaffd12aadf
data/CHANGELOG.md CHANGED
@@ -1,14 +1,18 @@
1
+ ## 5.0.0
2
+ - Use the official influxdb client. This doesn't change the config options, but has some
3
+ breakage potential, so this is a major version bump.
4
+
1
5
  ## 4.0.0
2
- - Breaking: Updated plugin to use new Java Event APIs
3
- - Fix the suite related to the new java event and the ordering of the keys in the hash.
4
- - Relax logstash-core-plugin-api constraints
5
- - update .travis.yml
6
+ - Breaking: Updated plugin to use new Java Event APIs
7
+ - Fix the suite related to the new java event and the ordering of the keys in the hash.
8
+ - Relax logstash-core-plugin-api constraints
9
+ - update .travis.yml
6
10
 
7
11
  ## 3.1.2
8
- - Depend on logstash-core-plugin-api instead of logstash-core, removing the need to mass update plugins on major releases of logstash
12
+ - Depend on logstash-core-plugin-api instead of logstash-core, removing the need to mass update plugins on major releases of logstash
9
13
 
10
14
  ## 3.1.1
11
- - New dependency requirements for logstash-core for the 5.0 release
15
+ - New dependency requirements for logstash-core for the 5.0 release
12
16
 
13
17
  ## 3.1.0
14
18
  - New option to enable SSL/TLS encrypted communication to InfluxDB
@@ -3,6 +3,7 @@ require "logstash/namespace"
3
3
  require "logstash/outputs/base"
4
4
  require "logstash/json"
5
5
  require "stud/buffer"
6
+ require "influxdb"
6
7
 
7
8
  # This output lets you output Metrics to InfluxDB (>= 0.9.0-rc31)
8
9
  #
@@ -23,7 +24,7 @@ class LogStash::Outputs::InfluxDB < LogStash::Outputs::Base
23
24
  config :db, :validate => :string, :default => "statistics"
24
25
 
25
26
  # The retention policy to use
26
- config :retention_policy, :validate => :string, :default => "default"
27
+ config :retention_policy, :validate => :string, :default => "autogen"
27
28
 
28
29
  # The hostname or IP address to reach your InfluxDB instance
29
30
  config :host, :validate => :string, :required => true
@@ -107,22 +108,35 @@ class LogStash::Outputs::InfluxDB < LogStash::Outputs::Base
107
108
  # This helps keep both fast and slow log streams moving along in
108
109
  # near-real-time.
109
110
  config :idle_flush_time, :validate => :number, :default => 1
110
-
111
+
112
+ # The amount of time in seconds to delay the initial retry on connection failure.
113
+ #
114
+ # The delay will increase exponentially for each retry attempt (up to max_retries).
115
+
116
+ config :initial_delay, :validate => :number, :default => 1
117
+
118
+ # The number of time to retry recoverable errors before dropping the events.
119
+ #
120
+ # A value of -1 will cause the plugin to retry indefinately.
121
+ # A value of 0 will cause the plugin to never retry.
122
+ # Otherwise it will retry up to the specified mumber of times.
123
+ #
124
+ config :max_retries, :validate => :number, :default => 3
111
125
 
112
126
  public
113
127
  def register
114
- require 'manticore'
115
128
  require 'cgi'
116
129
 
117
- @client = Manticore::Client.new
118
130
  @queue = []
119
- @protocol = @ssl ? "https" : "http"
120
131
 
121
132
  buffer_initialize(
122
133
  :max_items => @flush_size,
123
134
  :max_interval => @idle_flush_time,
124
135
  :logger => @logger
125
136
  )
137
+ @auth_method = @user.nil? ? 'none'.freeze : "params".freeze
138
+
139
+ @influxdbClient = InfluxDB::Client.new host: @host, port: @port, time_precision: @time_precision, use_ssl: @ssl, verify_ssl: false, retry: @max_retries, initial_delay: @initial_delay, auth_method: @auth_method, username: @user, password: @password.value
126
140
  end # def register
127
141
 
128
142
 
@@ -154,11 +168,11 @@ class LogStash::Outputs::InfluxDB < LogStash::Outputs::Base
154
168
  tags, point = extract_tags(point)
155
169
 
156
170
  event_hash = {
157
- "measurement" => event.sprintf(@measurement),
158
- "time" => time,
159
- "fields" => point
171
+ :series => event.sprintf(@measurement),
172
+ :timestamp => time,
173
+ :values => point
160
174
  }
161
- event_hash["tags"] = tags unless tags.empty?
175
+ event_hash[:tags] = tags unless tags.empty?
162
176
 
163
177
  buffer_receive(event_hash, event.sprintf(@db))
164
178
  end # def receive
@@ -166,67 +180,26 @@ class LogStash::Outputs::InfluxDB < LogStash::Outputs::Base
166
180
 
167
181
  def flush(events, database, teardown = false)
168
182
  @logger.debug? and @logger.debug("Flushing #{events.size} events to #{database} - Teardown? #{teardown}")
169
- post(events_to_request_body(events), database)
183
+ dowrite(events, database)
170
184
  end # def flush
171
-
172
-
173
- def post(body, database, proto = @protocol)
185
+
186
+ def dowrite(events, database)
174
187
  begin
175
- @query_params = "db=#{database}&rp=#{@retention_policy}&precision=#{@time_precision}&u=#{@user}&p=#{@password.value}"
176
- @base_url = "#{proto}://#{@host}:#{@port}/write"
177
- @url = "#{@base_url}?#{@query_params}"
178
-
179
- @logger.debug? and @logger.debug("POSTing to #{@url}")
180
- @logger.debug? and @logger.debug("Post body: #{body}")
181
- response = @client.post!(@url, :body => body)
182
-
183
- rescue EOFError
184
- @logger.warn("EOF while writing request or reading response header from InfluxDB",
185
- :host => @host, :port => @port)
186
- return # abort this flush
188
+ @influxdbClient.write_points(events, @time_precision, @retention_policy, @db )
189
+ rescue InfluxDB::AuthenticationError => ae
190
+ @logger.warn("Authentication Error while writing to InfluxDB", :exception => ae)
191
+ rescue InfluxDB::ConnectionError => ce
192
+ @logger.warn("Connection Error while writing to InfluxDB", :exception => ce)
193
+ rescue Exception => e
194
+ @logger.warn("Non recoverable exception while writing to InfluxDB", :exception => ce)
187
195
  end
188
-
189
- if read_body?(response)
190
- # Consume the body for error checking
191
- # This will also free up the connection for reuse.
192
- body = ""
193
- begin
194
- response.read_body { |chunk| body += chunk }
195
- rescue EOFError
196
- @logger.warn("EOF while reading response body from InfluxDB",
197
- :host => @host, :port => @port)
198
- return # abort this flush
199
- end
200
-
201
- @logger.debug? and @logger.debug("Body: #{body}")
202
- end
203
-
204
- unless response && (200..299).include?(response.code)
205
- @logger.error("Error writing to InfluxDB",
206
- :response => response, :response_body => body,
207
- :request_body => @queue.join("\n"))
208
- return
209
- else
210
- @logger.debug? and @logger.debug("Post response: #{response}")
211
- end
212
- end # def post
196
+ end
213
197
 
214
198
  def close
215
199
  buffer_flush(:final => true)
216
200
  end # def teardown
217
201
 
218
-
219
- # A batch POST for InfluxDB 0.9 looks like this:
220
- # cpu_load_short,host=server01,region=us-west value=0.64 cpu_load_short,host=server02,region=us-west value=0.55 1422568543702900257 cpu_load_short,direction=in,host=server01,region=us-west value=23422.0 1422568543702900257
221
- def events_to_request_body(events)
222
- events.map do |event|
223
- result = escaped_measurement(event["measurement"].dup)
224
- result << "," << event["tags"].map { |tag,value| "#{escaped(tag)}=#{escaped(value)}" }.join(',') if event.has_key?("tags")
225
- result << " " << event["fields"].map { |field,value| "#{escaped(field)}=#{quoted(value)}" }.join(',')
226
- result << " #{event["time"]}"
227
- end.join("\n") #each measurement should be on a separate line
228
- end
229
-
202
+
230
203
  # Create a data point from an event. If @use_event_fields_for_data_points is
231
204
  # true, convert the event to a hash. Otherwise, use @data_points. Each key and
232
205
  # value will be run through event#sprintf with the exception of a non-String
@@ -327,30 +300,4 @@ class LogStash::Outputs::InfluxDB < LogStash::Outputs::Base
327
300
  (timestamp.to_f * multiplier).to_i
328
301
  end
329
302
 
330
-
331
- # Only read the response body if its status is not 1xx, 204, or 304. TODO: Should
332
- # also not try reading the body if the request was a HEAD
333
- def read_body?( response )
334
- ! (response.nil? || [204,304].include?(response.code) || (100..199).include?(response.code))
335
- end
336
-
337
-
338
- # Return a quoted string of the given value if it's not a number
339
- def quoted(value)
340
- Numeric === value ? value : %Q|"#{value.gsub('"','\"')}"|
341
- end
342
-
343
-
344
- # Escape tag key, tag value, or field key
345
- def escaped(value)
346
- value.gsub(/[ ,=]/, ' ' => '\ ', ',' => '\,', '=' => '\=')
347
- end
348
-
349
-
350
- # Escape measurements note they don't need to worry about the '=' case
351
- def escaped_measurement(value)
352
- value.gsub(/[ ,]/, ' ' => '\ ', ',' => '\,')
353
- end
354
-
355
-
356
303
  end # class LogStash::Outputs::InfluxDB
@@ -1,6 +1,6 @@
1
1
  Gem::Specification.new do |s|
2
2
  s.name = 'logstash-output-influxdb'
3
- s.version = '4.0.0'
3
+ s.version = '5.0.0'
4
4
  s.licenses = ['Apache License (2.0)']
5
5
  s.summary = "This output lets you output Metrics to InfluxDB"
6
6
  s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
@@ -22,7 +22,7 @@ Gem::Specification.new do |s|
22
22
  s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
23
23
 
24
24
  s.add_runtime_dependency 'stud'
25
- s.add_runtime_dependency 'manticore'
25
+ s.add_runtime_dependency 'influxdb' , ">= 0.3", "<= 0.3.99"
26
26
 
27
27
  s.add_development_dependency 'logstash-devutils'
28
28
  s.add_development_dependency 'logstash-input-generator'
@@ -1,34 +1,98 @@
1
1
  # encoding: utf-8
2
2
  require "logstash/devutils/rspec/spec_helper"
3
3
  require "logstash/outputs/influxdb"
4
- require "manticore"
5
4
 
6
5
  describe LogStash::Outputs::InfluxDB do
7
6
 
8
- let(:pipeline) { LogStash::Pipeline.new(config) }
7
+ subject { LogStash::Outputs::InfluxDB.new(config) }
8
+
9
+ context "validate minimal default config" do
9
10
 
11
+ let(:config) do
12
+ {
13
+ "host" => "testhost",
14
+ "use_event_fields_for_data_points" => true
15
+ }
16
+ end
17
+
18
+ before do
19
+ subject.register
20
+ subject.close
21
+ end
22
+
23
+ it "sets correct influx client settings" do
24
+
25
+ config = subject.instance_variable_get(:@influxdbClient).config
26
+
27
+ expect(config.next_host).to eq "testhost"
28
+ expect(config.instance_variable_get(:@port)).to eq 8086
29
+ expect(config.instance_variable_get(:@time_precision)).to eq "ms"
30
+ expect(config.instance_variable_get(:@auth_method)).to eq "none".freeze
31
+ expect(config.instance_variable_get(:@initial_delay)).to eq 1
32
+ expect(config.instance_variable_get(:@retry)).to eq 3
33
+ expect(config.instance_variable_get(:@use_ssl)).to eq false
34
+ expect(config.instance_variable_get(:@username)).to eq nil
35
+ expect(config.instance_variable_get(:@password)).to eq nil
36
+
37
+ end
38
+
39
+ end
40
+
41
+ context "validate non default config" do
42
+
43
+ let(:config) do
44
+ {
45
+ "host" => "localhost",
46
+ "use_event_fields_for_data_points" => true,
47
+ "port" => 9999,
48
+ "ssl" => true,
49
+ "user" => "my_user",
50
+ "password" => "my_pass",
51
+ "initial_delay" => 5,
52
+ "max_retries" => 8,
53
+ "time_precision" => "s"
54
+ }
55
+ end
56
+
57
+ before do
58
+ subject.register
59
+ subject.close
60
+ end
61
+
62
+ it "sets correct influx client settings" do
63
+ config = subject.instance_variable_get(:@influxdbClient).config
64
+ expect(config.instance_variable_get(:@port)).to eq 9999
65
+ expect(config.instance_variable_get(:@time_precision)).to eq "s"
66
+ expect(config.instance_variable_get(:@initial_delay)).to eq 5
67
+ expect(config.instance_variable_get(:@retry)).to eq 8
68
+ expect(config.instance_variable_get(:@use_ssl)).to eq true
69
+ expect(config.instance_variable_get(:@username)).to eq "my_user"
70
+ expect(config.instance_variable_get(:@password)).to eq "my_pass"
71
+ expect(config.instance_variable_get(:@auth_method)).to eq "params".freeze
72
+ end
73
+
74
+ end
75
+
10
76
  context "complete pipeline run with 2 events" do
11
77
 
12
78
  let(:config) do
13
- {
14
- "host" => "localhost",
15
- "user" => "someuser",
16
- "password" => "somepwd",
17
- "allow_time_override" => true,
18
- "data_points" => {
19
- "foo" => "%{foo}",
20
- "bar" => "%{bar}",
21
- "time" => "%{time}"
22
- }
79
+ {
80
+ "host" => "localhost",
81
+ "user" => "someuser",
82
+ "password" => "somepwd",
83
+ "allow_time_override" => true,
84
+ "data_points" => {
85
+ "foo" => "%{foo}",
86
+ "bar" => "%{bar}",
87
+ "time" => "%{time}"
23
88
  }
89
+ }
24
90
  end
25
91
 
26
- subject { LogStash::Outputs::InfluxDB.new(config) }
27
-
28
92
  before do
29
93
  subject.register
30
94
  # Added db name parameter to post - M.Laws
31
- allow(subject).to receive(:post).with(result, "statistics")
95
+ allow(subject).to receive(:dowrite).with(result, "statistics")
32
96
 
33
97
  2.times do
34
98
  subject.receive(LogStash::Event.new("foo" => "1", "bar" => "2", "time" => "3", "type" => "generator"))
@@ -38,365 +102,138 @@ describe LogStash::Outputs::InfluxDB do
38
102
  subject.close
39
103
  end
40
104
 
41
- let(:result) { "logstash foo=\"1\",bar=\"2\" 3\nlogstash foo=\"1\",bar=\"2\" 3" }
105
+ #let(:result) { "logstash foo=\"1\",bar=\"2\" 3\nlogstash foo=\"1\",bar=\"2\" 3" }
106
+
107
+ let(:result) {[{:series=>"logstash", :timestamp=>"3", :values=>{"foo"=>"1", "bar"=>"2"}}, {:series=>"logstash", :timestamp=>"3", :values=>{"foo"=>"1", "bar"=>"2"}}]}
42
108
 
43
109
  it "should receive 2 events, flush and call post with 2 items json array" do
44
- expect(subject).to have_received(:post).with(result, "statistics")
110
+ expect(subject).to have_received(:dowrite).with(result, "statistics")
45
111
  end
46
112
 
47
113
  end
48
114
 
49
115
  context "using event fields as data points" do
50
- let(:config) do <<-CONFIG
51
- input {
52
- generator {
53
- message => "foo=1 bar=2 time=3"
54
- count => 1
55
- type => "generator"
56
- }
57
- }
58
-
59
- filter {
60
- kv { }
61
- }
62
-
63
- output {
64
- influxdb {
65
- host => "localhost"
66
- measurement => "my_series"
67
- allow_time_override => true
68
- use_event_fields_for_data_points => true
69
- exclude_fields => ["@version", "@timestamp", "sequence", "message", "type", "host"]
70
- }
71
- }
72
- CONFIG
116
+
117
+ let(:config) do
118
+ {
119
+ "host" => "localhost",
120
+ "measurement" => "my_series",
121
+ "allow_time_override" => true,
122
+ "use_event_fields_for_data_points" => true
123
+ }
73
124
  end
74
125
 
75
- let(:expected_url) { 'http://localhost:8086/write?db=statistics&rp=default&precision=ms&u=&p='}
76
- let(:expected_body) { 'my_series bar="2",foo="1" 3' }
126
+ before do
127
+ subject.register
128
+ # Added db name parameter to post - M.Laws
129
+ allow(subject).to receive(:dowrite).with(result, "statistics")
77
130
 
78
- it "should use the event fields as the data points, excluding @version and @timestamp by default as well as any fields configured by exclude_fields" do
79
- expect_any_instance_of(Manticore::Client).to receive(:post!).with(expected_url, body: expected_body)
80
- pipeline.run
81
- end
82
- end
131
+ subject.receive(LogStash::Event.new("foo" => "1", "bar" => "2", "time" => "3", "type" => "generator"))
83
132
 
84
- context "sending some fields as Influxdb tags" do
85
- let(:config) do <<-CONFIG
86
- input {
87
- generator {
88
- message => "foo=1 bar=2 baz=3 time=4"
89
- count => 1
90
- type => "generator"
91
- }
92
- }
93
-
94
- filter {
95
- kv { }
96
- }
97
-
98
- output {
99
- influxdb {
100
- host => "localhost"
101
- measurement => "my_series"
102
- allow_time_override => true
103
- use_event_fields_for_data_points => true
104
- exclude_fields => ["@version", "@timestamp", "sequence", "message", "type", "host"]
105
- send_as_tags => ["bar", "baz", "qux"]
106
- }
107
- }
108
- CONFIG
133
+ # Close / flush the buffer
134
+ subject.close
109
135
  end
110
136
 
111
- let(:expected_url) { 'http://localhost:8086/write?db=statistics&rp=default&precision=ms&u=&p='}
112
- let(:expected_body) { 'my_series,bar=2,baz=3 foo="1" 4' }
137
+ let(:result) {[{:series=>"my_series", :timestamp=>"3", :values=>{"foo"=>"1", "bar"=>"2"}}]}
113
138
 
114
- it "should send the specified fields as tags" do
115
- expect_any_instance_of(Manticore::Client).to receive(:post!).with(expected_url, body: expected_body)
116
- pipeline.run
139
+ it "should use the event fields as the data points, excluding @version and @timestamp by default as well as any fields configured by exclude_fields" do
140
+ expect(subject).to have_received(:dowrite).with(result, "statistics")
117
141
  end
118
- end
119
142
 
120
- context "Escapeing space characters" do
121
- let(:config) do <<-CONFIG
122
- input {
123
- generator {
124
- message => "foo=1 bar=2 baz=3 time=4"
125
- count => 1
126
- type => "generator"
127
- }
128
- }
129
-
130
- filter {
131
- kv {
132
- add_field => {
133
- "test1" => "yellow cat"
134
- "test space" => "making life hard"
135
- "feild space" => "pink dog"
136
- }
137
- }
138
- }
139
-
140
- output {
141
- influxdb {
142
- host => "localhost"
143
- measurement => "my series"
144
- allow_time_override => true
145
- use_event_fields_for_data_points => true
146
- exclude_fields => ["@version", "@timestamp", "sequence", "message", "type", "host"]
147
- send_as_tags => ["bar", "baz", "test1", "test space"]
148
- }
149
- }
150
- CONFIG
151
- end
143
+ end
152
144
 
153
- let(:expected_url) { 'http://localhost:8086/write?db=statistics&rp=default&precision=ms&u=&p='}
154
- let(:expected_body) { 'my\ series,bar=2,baz=3,test1=yellow\ cat,test\ space=making\ life\ hard foo="1",feild\ space="pink dog" 4' }
155
145
 
156
- it "should send the specified fields as tags" do
157
- expect_any_instance_of(Manticore::Client).to receive(:post!).with(expected_url, body: expected_body)
158
- pipeline.run
159
- end
160
- end
146
+ context "sending some fields as Influxdb tags" do
161
147
 
162
- context "Escapeing comma characters" do
163
- let(:config) do <<-CONFIG
164
- input {
165
- generator {
166
- message => "foo=1 bar=2 baz=3 time=4"
167
- count => 1
168
- type => "generator"
169
- }
170
- }
171
-
172
- filter {
173
- kv {
174
- add_field => {
175
- "test1" => "yellow, cat"
176
- "test, space" => "making, life, hard"
177
- "feild, space" => "pink, dog"
178
- }
179
- }
180
- }
181
-
182
- output {
183
- influxdb {
184
- host => "localhost"
185
- measurement => "my, series"
186
- allow_time_override => true
187
- use_event_fields_for_data_points => true
188
- exclude_fields => ["@version", "@timestamp", "sequence", "message", "type", "host"]
189
- send_as_tags => ["bar", "baz", "test1", "test, space"]
190
- }
191
- }
192
- CONFIG
148
+ let(:config) do
149
+ {
150
+ "host" => "localhost",
151
+ "measurement" => "my_series",
152
+ "allow_time_override" => true,
153
+ "use_event_fields_for_data_points" => true,
154
+ "send_as_tags" => ["bar", "baz", "qux"]
155
+ }
193
156
  end
194
157
 
195
- let(:expected_url) { 'http://localhost:8086/write?db=statistics&rp=default&precision=ms&u=&p='}
196
- let(:expected_body) { 'my\,\ series,bar=2,baz=3,test1=yellow\,\ cat,test\,\ space=making\,\ life\,\ hard foo="1",feild\,\ space="pink, dog" 4' }
158
+ before do
159
+ subject.register
160
+ # Added db name parameter to post - M.Laws
161
+ allow(subject).to receive(:dowrite).with(result, "statistics")
197
162
 
198
- it "should send the specified fields as tags" do
199
- expect_any_instance_of(Manticore::Client).to receive(:post!).with(expected_url, body: expected_body)
200
- pipeline.run
201
- end
202
- end
163
+ subject.receive(LogStash::Event.new("foo" => "1", "bar" => "2", "baz" => "3", "time" => "4", "type" => "generator"))
203
164
 
204
- context "Escapeing equal characters" do
205
- let(:config) do <<-CONFIG
206
- input {
207
- generator {
208
- message => "foo=1 bar=2 baz=3 time=4"
209
- count => 1
210
- type => "generator"
211
- }
212
- }
213
-
214
- filter {
215
- kv {
216
- add_field => {
217
- "test1" => "yellow=cat"
218
- "test=space" => "making= life=hard"
219
- "feild= space" => "pink= dog"
220
- }
221
- }
222
- }
223
-
224
- output {
225
- influxdb {
226
- host => "localhost"
227
- measurement => "my=series"
228
- allow_time_override => true
229
- use_event_fields_for_data_points => true
230
- exclude_fields => ["@version", "@timestamp", "sequence", "message", "type", "host"]
231
- send_as_tags => ["bar", "baz", "test1", "test=space"]
232
- }
233
- }
234
- CONFIG
165
+ # Close / flush the buffer
166
+ subject.close
235
167
  end
236
168
 
237
- let(:expected_url) { 'http://localhost:8086/write?db=statistics&rp=default&precision=ms&u=&p='}
238
- let(:expected_body) { 'my=series,bar=2,baz=3,test1=yellow\=cat,test\=space=making\=\ life\=hard foo="1",feild\=\ space="pink= dog" 4' }
169
+ let(:result) {[{:series=>"my_series", :timestamp=>"4", :tags=>{"bar"=>"2", "baz"=>"3"}, :values=>{"foo"=>"1"}}]}
239
170
 
240
- it "should send the specified fields as tags" do
241
- expect_any_instance_of(Manticore::Client).to receive(:post!).with(expected_url, body: expected_body)
242
- pipeline.run
171
+ it "should use the event fields as the data points, excluding @version and @timestamp by default as well as any fields configured by exclude_fields" do
172
+ expect(subject).to have_received(:dowrite).with(result, "statistics")
243
173
  end
244
- end
245
174
 
246
- context "testing backslash characters" do
247
- let(:config) do <<-CONFIG
248
- input {
249
- generator {
250
- message => 'foo\\=1 bar=2 baz=3 time=4'
251
- count => 1
252
- type => "generator"
253
- }
254
- }
255
-
256
- filter {
257
- kv {
258
- add_field => {
259
- "test1" => "yellow=cat"
260
- "test=space" => "making=, life=hard"
261
- "feildspace" => 'C:\\Griffo'
262
- }
263
- }
264
- }
265
-
266
- output {
267
- influxdb {
268
- host => "localhost"
269
- measurement => 'my\\series'
270
- allow_time_override => true
271
- use_event_fields_for_data_points => true
272
- exclude_fields => ["@version", "@timestamp", "sequence", "message", "type", "host"]
273
- send_as_tags => ['bar', "baz", "test1", "test=space"]
274
- }
275
- }
276
- CONFIG
277
- end
175
+ end
278
176
 
279
- let(:expected_url) { 'http://localhost:8086/write?db=statistics&rp=default&precision=ms&u=&p='}
280
- let(:expected_body) { 'my\series,bar=2,baz=3,test1=yellow\=cat,test\=space=making\=\,\ life\=hard foo\="1",feildspace="C:\Griffo" 4' }
177
+ context "when fields data contains a list of tags" do
281
178
 
282
- it "should send the specified fields as tags" do
283
- expect_any_instance_of(Manticore::Client).to receive(:post!).with(expected_url, body: expected_body)
284
- pipeline.run
179
+ let(:config) do
180
+ {
181
+ "host" => "localhost",
182
+ "measurement" => "my_series",
183
+ "allow_time_override" => true,
184
+ "use_event_fields_for_data_points" => true,
185
+ }
285
186
  end
286
- end
287
187
 
188
+ before do
189
+ subject.register
190
+ # Added db name parameter to post - M.Laws
191
+ allow(subject).to receive(:dowrite).with(result, "statistics")
192
+
193
+ subject.receive(event)
288
194
 
289
- context "when fields data contains a list of tags" do
290
- let(:config) do <<-CONFIG
291
- input {
292
- generator {
293
- message => "foo=1 time=2"
294
- count => 1
295
- type => "generator"
296
- }
297
- }
298
-
299
- filter {
300
- kv { add_tag => [ "tagged" ] }
301
- }
302
-
303
- output {
304
- influxdb {
305
- host => "localhost"
306
- measurement => "my_series"
307
- allow_time_override => true
308
- use_event_fields_for_data_points => true
309
- exclude_fields => ["@version", "@timestamp", "sequence", "message", "type", "host"]
310
- }
311
- }
312
- CONFIG
195
+ # Close / flush the buffer
196
+ subject.close
313
197
  end
198
+
199
+ let(:event) {LogStash::Event.new("foo" => "1", "time" => "2", "tags" => ["tagged"], "type" => "generator")}
200
+ let(:result) {[{:series=>"my_series", :timestamp=>"2", :tags=>{"tagged"=>"true"}, :values=>{"foo"=>"1"}}]}
314
201
 
315
- let(:expected_url) { 'http://localhost:8086/write?db=statistics&rp=default&precision=ms&u=&p='}
316
- let(:expected_body) { 'my_series,tagged=true foo="1" 2' }
317
-
318
- it "should move them to the tags data" do
319
- expect_any_instance_of(Manticore::Client).to receive(:post!).with(expected_url, body: expected_body)
320
- pipeline.run
202
+ it "should use the event fields as the data points, excluding @version and @timestamp by default as well as any fields configured by exclude_fields" do
203
+ expect(subject).to have_received(:dowrite).with(result, "statistics")
321
204
  end
205
+
322
206
  end
323
207
 
324
208
  context "when fields are coerced to numerics" do
325
- let(:config) do <<-CONFIG
326
- input {
327
- generator {
328
- message => "foo=1 bar=2 baz=\\\"quotes\\\" time=3"
329
- count => 1
330
- type => "generator"
331
- }
332
- }
333
-
334
- filter {
335
- kv { }
336
- }
337
-
338
- output {
339
- influxdb {
340
- host => "localhost"
341
- measurement => "my_series"
342
- allow_time_override => true
343
- use_event_fields_for_data_points => true
344
- exclude_fields => ["@version", "@timestamp", "sequence", "message", "type", "host"]
345
- coerce_values => { "foo" => "integer" "bar" => "float" }
346
- }
347
- }
348
- CONFIG
209
+
210
+ let(:config) do
211
+ {
212
+ "host" => "localhost",
213
+ "measurement" => "my_series",
214
+ "allow_time_override" => true,
215
+ "use_event_fields_for_data_points" => true,
216
+ "coerce_values" => { "foo" => "integer", "bar" => "float" }
217
+ }
349
218
  end
350
219
 
351
- let(:expected_url) { 'http://localhost:8086/write?db=statistics&rp=default&precision=ms&u=&p='}
352
- let(:expected_body) { 'my_series bar=2.0,foo=1,baz="\\\"quotes\\\"" 3' } # We want the backslash and the escaped-quote in the request body
220
+ before do
221
+ subject.register
222
+ # Added db name parameter to post - M.Laws
223
+ allow(subject).to receive(:dowrite).with(result, "statistics")
353
224
 
354
- it "should quote all other values (and escaping double quotes)" do
355
- expect_any_instance_of(Manticore::Client).to receive(:post!).with(expected_url, body: expected_body)
356
- pipeline.run
357
- end
358
- end
225
+ subject.receive(LogStash::Event.new("foo" => "1", "bar" => "2.0", "baz"=>"\\\"quotes\\\"", "time"=>3, "type" => "generator"))
359
226
 
360
- # Test issue #32 - Add support for HTTPS via configuration
361
- # --------------------------------------------------------
362
- # A simple test to verify that setting the ssl configuration option works
363
- # similar to other Logstash output plugins (specifically the Elasticsearch
364
- # output plugin).
365
- context "setting the ssl configuration option to true" do
366
- let(:config) do <<-CONFIG
367
- input {
368
- generator {
369
- message => "foo=1 bar=2 baz=3 time=4"
370
- count => 1
371
- type => "generator"
372
- }
373
- }
374
-
375
- filter {
376
- kv { }
377
- }
378
-
379
- output {
380
- influxdb {
381
- host => "localhost"
382
- ssl => true
383
- measurement => "barfoo"
384
- allow_time_override => true
385
- use_event_fields_for_data_points => true
386
- exclude_fields => [ "@version", "@timestamp", "sequence",
387
- "message", "type", "host" ]
388
- }
389
- }
390
- CONFIG
227
+ # Close / flush the buffer
228
+ subject.close
391
229
  end
392
230
 
393
- let(:expected_url) { 'https://localhost:8086/write?db=statistics&rp=default&precision=ms&u=&p=' }
394
- let(:expected_body) { 'barfoo bar="2",foo="1",baz="3" 4' }
231
+ let(:result) {[{:series=>"my_series", :timestamp=>3, :values=>{"foo"=>1, "bar"=>2.0, "baz"=>"\\\"quotes\\\"" }}]}
395
232
 
396
- it "should POST to an https URL" do
397
- expect_any_instance_of(Manticore::Client).to receive(:post!).with(expected_url, body: expected_body)
398
- pipeline.run
233
+ it "should use the event fields as the data points, excluding @version and @timestamp by default as well as any fields configured by exclude_fields" do
234
+ expect(subject).to have_received(:dowrite).with(result, "statistics")
399
235
  end
236
+
400
237
  end
401
238
 
402
239
  # Test issue #31 - Run "db" parameter through event.sprintf() to support...
@@ -405,49 +242,35 @@ describe LogStash::Outputs::InfluxDB do
405
242
  # DATABASE continue to work *after* implementing #31. Also verifies that
406
243
  # sprintf formatting is supported in the measurement name.
407
244
  context "receiving 3 points between 2 measurements in 1 database" do
408
- let(:config) do <<-CONFIG
409
- input {
410
- generator {
411
- lines => [
412
- "foo=1 bar=2 baz=m1 time=1",
413
- "foo=3 bar=4 baz=m2 time=2",
414
- "foo=5 bar=6 baz=m2 time=3"
415
- ]
416
- count => 1
417
- type => "generator"
418
- }
419
- }
420
-
421
- filter {
422
- kv { }
423
- }
424
-
425
- output {
426
- influxdb {
427
- host => "localhost"
428
- db => "barfoo"
429
- measurement => "%{baz}"
430
- allow_time_override => true
431
- use_event_fields_for_data_points => true
432
- exclude_fields => [ "@version", "@timestamp", "sequence",
433
- "message", "type", "host" ]
434
- }
435
- }
436
- CONFIG
245
+
246
+ let(:config) do
247
+ {
248
+ "host" => "localhost",
249
+ "measurement" => "%{baz}",
250
+ "allow_time_override" => true,
251
+ "use_event_fields_for_data_points" => true
252
+ }
437
253
  end
438
254
 
439
- let(:expected_url) { 'http://localhost:8086/write?db=barfoo&rp=default&precision=ms&u=&p=' }
440
- let(:expected_body) { "m1 bar=\"2\",foo=\"1\",baz=\"m1\" 1\nm2 bar=\"4\",foo=\"3\",baz=\"m2\" 2\nm2 bar=\"6\",foo=\"5\",baz=\"m2\" 3" }
255
+ before do
256
+ subject.register
257
+ # Added db name parameter to post - M.Laws
258
+ allow(subject).to receive(:dowrite)
259
+
260
+ subject.receive(LogStash::Event.new("foo"=>"1", "bar"=>"2", "baz" => "m1", "time" => "1", "type" => "generator"))
261
+ subject.receive(LogStash::Event.new("foo"=>"3", "bar"=>"4", "baz" => "m2", "time" => "2", "type" => "generator"))
262
+ subject.receive(LogStash::Event.new("foo"=>"5", "bar"=>"6", "baz" => "m2", "time" => "3", "type" => "generator"))
441
263
 
442
- it "should result in a single POST (one per database)" do
443
- expect_any_instance_of(Manticore::Client).to receive(:post!).once
444
- pipeline.run
264
+ # Close / flush the buffer
265
+ subject.close
445
266
  end
446
267
 
447
- it "should POST in bulk format" do
448
- expect_any_instance_of(Manticore::Client).to receive(:post!).with(expected_url, body: expected_body)
449
- pipeline.run
268
+ let(:result) {[{:series=>"m1", :timestamp=>"1", :values=>{"foo"=>"1", "bar"=>"2", "baz" => "m1" }},{:series=>"m2", :timestamp=>"2", :values=>{"foo"=>"3", "bar"=>"4", "baz" => "m2"}},{:series=>"m2", :timestamp=>"3", :values=>{"foo"=>"5", "bar"=>"6", "baz" => "m2" }}]}
269
+
270
+ it "should use the event fields as the data points, excluding @version and @timestamp by default as well as any fields configured by exclude_fields" do
271
+ expect(subject).to have_received(:dowrite).with(result, "statistics")
450
272
  end
273
+
451
274
  end
452
275
 
453
276
  # Test issue #31 - Run "db" parameter through event.sprintf() to support...
@@ -458,52 +281,38 @@ describe LogStash::Outputs::InfluxDB do
458
281
  # Also verifies that sprintf formatting is correctly supported in the
459
282
  # database name.
460
283
  context "receiving 4 points between 2 measurements in 2 databases" do
461
- let(:config) do <<-CONFIG
462
- input {
463
- generator {
464
- lines => [
465
- "foo=1 bar=db1 baz=m1 time=1",
466
- "foo=2 bar=db1 baz=m2 time=2",
467
- "foo=3 bar=db2 baz=m1 time=3",
468
- "foo=4 bar=db2 baz=m2 time=4"
469
- ]
470
- count => 1
471
- type => "generator"
472
- }
473
- }
474
-
475
- filter {
476
- kv { }
477
- }
478
-
479
- output {
480
- influxdb {
481
- host => "localhost"
482
- db => "%{bar}"
483
- measurement => "%{baz}"
484
- allow_time_override => true
485
- use_event_fields_for_data_points => true
486
- exclude_fields => [ "@version", "@timestamp", "sequence",
487
- "message", "type", "host" ]
488
- }
489
- }
490
- CONFIG
284
+
285
+ let(:config) do
286
+ {
287
+ "host" => "localhost",
288
+ "db" => "%{bar}",
289
+ "measurement" => "%{baz}",
290
+ "allow_time_override" => true,
291
+ "use_event_fields_for_data_points" => true,
292
+ }
491
293
  end
492
294
 
493
- let(:expected_url_db1) { 'http://localhost:8086/write?db=db1&rp=default&precision=ms&u=&p=' }
494
- let(:expected_url_db2) { 'http://localhost:8086/write?db=db2&rp=default&precision=ms&u=&p=' }
495
- let(:expected_body_db1) { "m1 bar=\"db1\",foo=\"1\",baz=\"m1\" 1\nm2 bar=\"db1\",foo=\"2\",baz=\"m2\" 2" }
496
- let(:expected_body_db2) { "m1 bar=\"db2\",foo=\"3\",baz=\"m1\" 3\nm2 bar=\"db2\",foo=\"4\",baz=\"m2\" 4" }
295
+ before do
296
+ subject.register
297
+ # Added db name parameter to post - M.Laws
298
+ allow(subject).to receive(:dowrite)
497
299
 
498
- it "should result in two POSTs (one per database)" do
499
- expect_any_instance_of(Manticore::Client).to receive(:post!).twice
500
- pipeline.run
300
+ subject.receive(LogStash::Event.new("foo"=>"1", "bar"=>"db1", "baz" => "m1", "time" => "1", "type" => "generator"))
301
+ subject.receive(LogStash::Event.new("foo"=>"2", "bar"=>"db1", "baz" => "m1", "time" => "2", "type" => "generator"))
302
+ subject.receive(LogStash::Event.new("foo"=>"3", "bar"=>"db2", "baz" => "m2", "time" => "3", "type" => "generator"))
303
+ subject.receive(LogStash::Event.new("foo"=>"4", "bar"=>"db2", "baz" => "m2", "time" => "4", "type" => "generator"))
304
+ # Close / flush the buffer
305
+ subject.close
501
306
  end
502
307
 
503
- it "should post in bulk format" do
504
- expect_any_instance_of(Manticore::Client).to receive(:post!).with(expected_url_db1, body: expected_body_db1)
505
- expect_any_instance_of(Manticore::Client).to receive(:post!).with(expected_url_db2, body: expected_body_db2)
506
- pipeline.run
308
+ let(:resultdb1) {[{:series=>"m1", :timestamp=>"1", :values=>{"foo"=>"1", "bar"=>"db1", "baz" => "m1" }},{:series=>"m1", :timestamp=>"2", :values=>{"foo"=>"2", "bar"=>"db1", "baz" => "m1" }}]}
309
+ let(:resultdb2) {[{:series=>"m2", :timestamp=>"3", :values=>{"foo"=>"3", "bar"=>"db2", "baz" => "m2" }},{:series=>"m2", :timestamp=>"4", :values=>{"foo"=>"4", "bar"=>"db2", "baz" => "m2" }}]}
310
+
311
+ it "should use the event fields as the data points, excluding @version and @timestamp by default as well as any fields configured by exclude_fields" do
312
+ expect(subject).to have_received(:dowrite).with(resultdb1, "db1").once
313
+ expect(subject).to have_received(:dowrite).with(resultdb2, "db2").once
507
314
  end
315
+
316
+
508
317
  end
509
318
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: logstash-output-influxdb
3
3
  version: !ruby/object:Gem::Version
4
- version: 4.0.0
4
+ version: 5.0.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Elastic
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2016-09-09 00:00:00.000000000 Z
11
+ date: 2017-04-28 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  requirement: !ruby/object:Gem::Requirement
@@ -49,15 +49,21 @@ dependencies:
49
49
  requirements:
50
50
  - - ">="
51
51
  - !ruby/object:Gem::Version
52
- version: '0'
53
- name: manticore
52
+ version: '0.3'
53
+ - - "<="
54
+ - !ruby/object:Gem::Version
55
+ version: 0.3.99
56
+ name: influxdb
54
57
  prerelease: false
55
58
  type: :runtime
56
59
  version_requirements: !ruby/object:Gem::Requirement
57
60
  requirements:
58
61
  - - ">="
59
62
  - !ruby/object:Gem::Version
60
- version: '0'
63
+ version: '0.3'
64
+ - - "<="
65
+ - !ruby/object:Gem::Version
66
+ version: 0.3.99
61
67
  - !ruby/object:Gem::Dependency
62
68
  requirement: !ruby/object:Gem::Requirement
63
69
  requirements: