logstash-output-honeycomb_json_batch 0.1.2 → 0.2.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/README.md +7 -10
- data/lib/logstash/outputs/honeycomb_json_batch.rb +37 -70
- data/logstash-output-honeycomb_json_batch.gemspec +1 -1
- data/spec/outputs/honeycomb_json_batch_spec.rb +20 -27
- metadata +16 -14
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: d0021f21a46eb6dfb4685f88f6a3350e8c32e7e9
|
4
|
+
data.tar.gz: b1838276379b83acb24f4b6c13836922ebb8028c
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: f01e1628b5af907e299762afbf2fb2911beabbda20dd1a37db185af9b80e641a5f211d521a8a211a80394c3b682fc7748d3bf5e725cd08690030b89cb4256637
|
7
|
+
data.tar.gz: 786eeb856cafd6e6e5e30ab31faeb00748650d58081835f4fb7337ed2bfa4b14f0914bf8522d44a392a5e637bd72e20e77bd10a1823bb50d35339e8646a3ba20
|
data/README.md
CHANGED
@@ -1,4 +1,4 @@
|
|
1
|
-
# logstash-output-honeycomb_json_batch
|
1
|
+
# logstash-output-honeycomb_json_batch [![Gem Version](https://badge.fury.io/rb/logstash-output-honeycomb_json_batch.svg)](https://badge.fury.io/rb/logstash-output-honeycomb_json_batch)
|
2
2
|
|
3
3
|
A logstash plugin for interacting with [Honeycomb](https://honeycomb.io) at high volumes. (See here for more information about [using Honeycomb](https://honeycomb.io/intro/) and [its libraries](https://honeycomb.io/docs/send-data/sdks).)
|
4
4
|
|
@@ -14,11 +14,13 @@ The easiest way to use this plugin is by installing it through rubygems like any
|
|
14
14
|
bin/logstash-plugin install logstash-output-honeycomb_json_batch
|
15
15
|
```
|
16
16
|
|
17
|
-
##
|
17
|
+
## Compatibility
|
18
|
+
|
19
|
+
This plugin requires Logstash 2.4 or greater. Please open an issue if you require support for older versions.
|
18
20
|
|
19
|
-
|
21
|
+
## Usage
|
20
22
|
|
21
|
-
A simple config
|
23
|
+
A simple config is:
|
22
24
|
|
23
25
|
```
|
24
26
|
input {
|
@@ -35,12 +37,7 @@ output {
|
|
35
37
|
```
|
36
38
|
|
37
39
|
Additional arguments to `honeycomb_json_batch`:
|
38
|
-
|
39
|
-
Consider these when tuning performance:
|
40
|
-
|
41
|
-
- `flush_size`: Default batch size, defaults to 50
|
42
|
-
- `idle_flush_time`: Default flush interval in seconds, defaults to 5
|
43
|
-
- `pool_max`: Maximum number of requests to be run in parallel, defaults to 10
|
40
|
+
- `flush_size`: Maximum batch size, defaults to 75
|
44
41
|
- `retry_individual`: On failed requests, whether to retry event sends individually, defaults to true
|
45
42
|
- `api_host`: Allows you to override the Honeycomb host, defaults to https://api.honeycomb.io
|
46
43
|
|
@@ -1,14 +1,15 @@
|
|
1
1
|
# encoding: utf-8
|
2
|
+
require "enumerator"
|
2
3
|
require "logstash/outputs/base"
|
3
4
|
require "logstash/namespace"
|
4
5
|
require "logstash/json"
|
5
6
|
require "uri"
|
6
|
-
require "stud/buffer"
|
7
7
|
require "logstash/plugin_mixins/http_client"
|
8
8
|
|
9
9
|
class LogStash::Outputs::HoneycombJSONBatch < LogStash::Outputs::Base
|
10
10
|
include LogStash::PluginMixins::HttpClient
|
11
|
-
|
11
|
+
|
12
|
+
concurrency :shared
|
12
13
|
|
13
14
|
config_name "honeycomb_json_batch"
|
14
15
|
|
@@ -18,24 +19,18 @@ class LogStash::Outputs::HoneycombJSONBatch < LogStash::Outputs::Base
|
|
18
19
|
|
19
20
|
config :dataset, :validate => :string, :required => true
|
20
21
|
|
21
|
-
config :
|
22
|
+
config :retry_individual, :validate => :boolean, :default => true
|
22
23
|
|
23
|
-
config :
|
24
|
+
config :flush_size, :validate => :number, :default => 75
|
24
25
|
|
25
|
-
|
26
|
+
# The following configuration options are deprecated and do nothing.
|
27
|
+
config :idle_flush_time, :validate => :number, :default => 5
|
26
28
|
|
27
29
|
config :pool_max, :validate => :number, :default => 10
|
28
30
|
|
29
31
|
def register
|
30
|
-
# We count outstanding requests with this queue
|
31
|
-
# This queue tracks the requests to create backpressure
|
32
|
-
# When this queue is empty no new requests may be sent,
|
33
|
-
# tokens must be added back by the client on success
|
34
|
-
@request_tokens = SizedQueue.new(@pool_max)
|
35
|
-
@pool_max.times {|t| @request_tokens << true }
|
36
32
|
@total = 0
|
37
33
|
@total_failed = 0
|
38
|
-
@requests = Array.new
|
39
34
|
if @api_host.nil?
|
40
35
|
@api_host = "https://api.honeycomb.io"
|
41
36
|
elsif !@api_host.start_with? "http"
|
@@ -43,94 +38,65 @@ class LogStash::Outputs::HoneycombJSONBatch < LogStash::Outputs::Base
|
|
43
38
|
end
|
44
39
|
@api_host = @api_host.chomp
|
45
40
|
|
46
|
-
buffer_initialize(
|
47
|
-
:max_items => @flush_size,
|
48
|
-
:max_interval => @idle_flush_time,
|
49
|
-
:logger => @logger
|
50
|
-
)
|
51
41
|
logger.info("Initialized honeycomb_json_batch with settings",
|
52
|
-
:flush_size => @flush_size,
|
53
|
-
:idle_flush_time => @idle_flush_time,
|
54
|
-
:request_tokens => @pool_max,
|
55
42
|
:api_host => @api_host,
|
56
43
|
:headers => request_headers,
|
57
44
|
:retry_individual => @retry_individual)
|
58
|
-
|
59
|
-
end
|
60
|
-
|
61
|
-
# This module currently does not support parallel requests as that would circumvent the batching
|
62
|
-
def receive(event, async_type=:background)
|
63
|
-
buffer_receive(event)
|
64
45
|
end
|
65
46
|
|
66
47
|
def close
|
67
|
-
buffer_flush(:final => true)
|
68
48
|
client.close
|
69
49
|
end
|
70
50
|
|
71
|
-
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
76
|
-
|
77
|
-
|
78
|
-
|
79
|
-
|
80
|
-
|
51
|
+
def multi_receive(events)
|
52
|
+
events.each_slice(@flush_size) do |chunk|
|
53
|
+
documents = []
|
54
|
+
chunk.each do |event|
|
55
|
+
data = event.to_hash()
|
56
|
+
timestamp = data.delete("@timestamp")
|
57
|
+
doc = { "time" => timestamp, "data" => data }
|
58
|
+
if samplerate = data.delete("@samplerate")
|
59
|
+
doc["samplerate"] = samplerate.to_i
|
60
|
+
end
|
61
|
+
documents.push(doc)
|
81
62
|
end
|
82
|
-
documents
|
63
|
+
make_request(documents)
|
83
64
|
end
|
84
|
-
|
85
|
-
make_request(documents)
|
86
|
-
end
|
87
|
-
|
88
|
-
def multi_receive(events)
|
89
|
-
events.each {|event| buffer_receive(event)}
|
90
65
|
end
|
91
66
|
|
92
67
|
private
|
93
68
|
|
94
69
|
def make_request(documents)
|
95
70
|
body = LogStash::Json.dump({ @dataset => documents })
|
96
|
-
# Block waiting for a token
|
97
|
-
token = @request_tokens.pop
|
98
|
-
@logger.debug("Got token", :tokens => @request_tokens.length)
|
99
71
|
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
:headers => request_headers,
|
106
|
-
:async => true
|
107
|
-
})
|
108
|
-
rescue Exception => e
|
109
|
-
@logger.warn("An error occurred while indexing: #{e.message}")
|
110
|
-
end
|
111
|
-
|
112
|
-
# attach handlers before performing request
|
113
|
-
request.on_complete do
|
114
|
-
# Make sure we return the token to the pool
|
115
|
-
@request_tokens << token
|
116
|
-
end
|
72
|
+
url = "#{@api_host}/1/batch"
|
73
|
+
request = client.post(url, {
|
74
|
+
:body => body,
|
75
|
+
:headers => request_headers
|
76
|
+
})
|
117
77
|
|
118
78
|
request.on_success do |response|
|
119
79
|
if response.code >= 200 && response.code < 300
|
120
80
|
@total = @total + documents.length
|
121
|
-
@logger.debug("Successfully submitted",
|
81
|
+
@logger.debug("Successfully submitted",
|
122
82
|
:docs => documents.length,
|
123
83
|
:response_code => response.code,
|
124
84
|
:total => @total)
|
125
85
|
else
|
126
86
|
if documents.length > 1 && @retry_individual
|
127
87
|
if statuses = JSON.parse(response.body).values.first
|
128
|
-
|
129
|
-
|
88
|
+
statuses.each_with_index do |status, i|
|
89
|
+
code = status["status"]
|
90
|
+
if code == nil
|
91
|
+
@logger.warn("Status code missing in response: #{status}")
|
92
|
+
next
|
93
|
+
elsif code >= 200 && code < 300
|
94
|
+
next
|
95
|
+
end
|
130
96
|
make_request([documents[i]])
|
131
97
|
end
|
132
98
|
end
|
133
|
-
else
|
99
|
+
else
|
134
100
|
@total_failed += documents.length
|
135
101
|
log_failure(
|
136
102
|
"Encountered non-200 HTTP code #{response.code}",
|
@@ -150,7 +116,7 @@ class LogStash::Outputs::HoneycombJSONBatch < LogStash::Outputs::Base
|
|
150
116
|
:url => url,
|
151
117
|
:method => @http_method,
|
152
118
|
:body => body,
|
153
|
-
:headers =>
|
119
|
+
:headers => request_headers,
|
154
120
|
:message => exception.message,
|
155
121
|
:class => exception.class.name,
|
156
122
|
:backtrace => exception.backtrace,
|
@@ -158,7 +124,8 @@ class LogStash::Outputs::HoneycombJSONBatch < LogStash::Outputs::Base
|
|
158
124
|
)
|
159
125
|
end
|
160
126
|
|
161
|
-
|
127
|
+
request.call
|
128
|
+
|
162
129
|
rescue Exception => e
|
163
130
|
log_failure("Got totally unexpected exception #{e.message}", :docs => documents.length)
|
164
131
|
end
|
@@ -1,6 +1,6 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-output-honeycomb_json_batch'
|
3
|
-
s.version = '0.
|
3
|
+
s.version = '0.2.0'
|
4
4
|
s.licenses = ['Apache-2.0']
|
5
5
|
s.summary = "This output lets you `POST` batches of events to the Honeycomb.io API endpoint"
|
6
6
|
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
@@ -33,22 +33,11 @@ describe LogStash::Outputs::HoneycombJSONBatch do
|
|
33
33
|
|
34
34
|
it "should receive a single post request" do
|
35
35
|
expect(client).to receive(:post).
|
36
|
-
with("#{ api_host }/1/batch", hash_including(:body, :headers
|
36
|
+
with("#{ api_host }/1/batch", hash_including(:body, :headers)).
|
37
37
|
once.
|
38
38
|
and_call_original
|
39
39
|
|
40
|
-
|
41
|
-
@honeycomb.buffer_flush(:force => true)
|
42
|
-
end
|
43
|
-
|
44
|
-
it "should send batches based on the specified flush_size" do
|
45
|
-
expect(client).to receive(:post).
|
46
|
-
with("#{ api_host }/1/batch", hash_including(:body, :headers, :async)).
|
47
|
-
twice.
|
48
|
-
and_call_original
|
49
|
-
|
50
|
-
(flush_size + 1).times {|t| @honeycomb.receive(event)}
|
51
|
-
@honeycomb.buffer_flush(:force => true)
|
40
|
+
@honeycomb.multi_receive([event])
|
52
41
|
end
|
53
42
|
|
54
43
|
it "should attach the right headers for Honeycomb ingestion" do
|
@@ -59,11 +48,10 @@ describe LogStash::Outputs::HoneycombJSONBatch do
|
|
59
48
|
})).once.
|
60
49
|
and_call_original
|
61
50
|
|
62
|
-
@honeycomb.
|
63
|
-
@honeycomb.buffer_flush(:force => true)
|
51
|
+
@honeycomb.multi_receive([event])
|
64
52
|
end
|
65
53
|
|
66
|
-
it "should wrap events in the right structure Honeycomb ingestion" do
|
54
|
+
it "should wrap events in the right structure for Honeycomb ingestion" do
|
67
55
|
data = event.to_hash()
|
68
56
|
data.delete("@timestamp")
|
69
57
|
expect(client).to receive(:post).
|
@@ -71,25 +59,23 @@ describe LogStash::Outputs::HoneycombJSONBatch do
|
|
71
59
|
DATASET => [ { "time" => event.timestamp.to_s, "data" => data } ]
|
72
60
|
}))).once.
|
73
61
|
and_call_original
|
74
|
-
|
75
|
-
@honeycomb.receive(event)
|
76
|
-
@honeycomb.buffer_flush(:force => true)
|
62
|
+
@honeycomb.multi_receive([event])
|
77
63
|
end
|
78
64
|
|
79
65
|
it "should extract timestamp and samplerate from the data" do
|
80
|
-
with_samplerate = LogStash::Event.new("alpha" => 1.0, "@samplerate" => "17.5"
|
66
|
+
with_samplerate = LogStash::Event.new("alpha" => 1.0, "@samplerate" => "17.5",
|
67
|
+
"@timestamp" => "2014-11-17T20:37:17.223Z")
|
81
68
|
data = with_samplerate.to_hash()
|
82
69
|
data.delete("@timestamp")
|
83
70
|
data.delete("@samplerate")
|
84
71
|
|
85
72
|
expect(client).to receive(:post).
|
86
73
|
with("#{ api_host }/1/batch", hash_including(:body => LogStash::Json.dump({
|
87
|
-
DATASET => [ { "time" =>
|
74
|
+
DATASET => [ { "time" => with_samplerate.timestamp.to_s, "data" => data, "samplerate" => 17 } ]
|
88
75
|
}))).once.
|
89
76
|
and_call_original
|
90
77
|
|
91
|
-
@honeycomb.
|
92
|
-
@honeycomb.buffer_flush(:force => true)
|
78
|
+
@honeycomb.multi_receive([with_samplerate])
|
93
79
|
end
|
94
80
|
|
95
81
|
it "should wrap multiple events up in the right structure" do
|
@@ -107,9 +93,16 @@ describe LogStash::Outputs::HoneycombJSONBatch do
|
|
107
93
|
}))).once.
|
108
94
|
and_call_original
|
109
95
|
|
110
|
-
@honeycomb.
|
111
|
-
|
112
|
-
|
113
|
-
|
96
|
+
@honeycomb.multi_receive([event1, event2, event3])
|
97
|
+
end
|
98
|
+
|
99
|
+
it "should chunk large batches" do
|
100
|
+
events = []
|
101
|
+
(1..3*@honeycomb.flush_size).each do |i|
|
102
|
+
events.push(LogStash::Event.new("index" => i))
|
103
|
+
end
|
104
|
+
expect(client).to receive(:post).exactly(3).times.
|
105
|
+
and_call_original
|
106
|
+
@honeycomb.multi_receive(events)
|
114
107
|
end
|
115
108
|
end
|
metadata
CHANGED
@@ -1,16 +1,17 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-honeycomb_json_batch
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.2.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Honeycomb
|
8
|
-
autorequire:
|
8
|
+
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2017-02-
|
11
|
+
date: 2017-02-16 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
|
+
name: logstash-core-plugin-api
|
14
15
|
requirement: !ruby/object:Gem::Requirement
|
15
16
|
requirements:
|
16
17
|
- - ">="
|
@@ -19,9 +20,8 @@ dependencies:
|
|
19
20
|
- - "<="
|
20
21
|
- !ruby/object:Gem::Version
|
21
22
|
version: '2.99'
|
22
|
-
name: logstash-core-plugin-api
|
23
|
-
prerelease: false
|
24
23
|
type: :runtime
|
24
|
+
prerelease: false
|
25
25
|
version_requirements: !ruby/object:Gem::Requirement
|
26
26
|
requirements:
|
27
27
|
- - ">="
|
@@ -31,6 +31,7 @@ dependencies:
|
|
31
31
|
- !ruby/object:Gem::Version
|
32
32
|
version: '2.99'
|
33
33
|
- !ruby/object:Gem::Dependency
|
34
|
+
name: logstash-mixin-http_client
|
34
35
|
requirement: !ruby/object:Gem::Requirement
|
35
36
|
requirements:
|
36
37
|
- - ">="
|
@@ -39,9 +40,8 @@ dependencies:
|
|
39
40
|
- - "<"
|
40
41
|
- !ruby/object:Gem::Version
|
41
42
|
version: 5.0.0
|
42
|
-
name: logstash-mixin-http_client
|
43
|
-
prerelease: false
|
44
43
|
type: :runtime
|
44
|
+
prerelease: false
|
45
45
|
version_requirements: !ruby/object:Gem::Requirement
|
46
46
|
requirements:
|
47
47
|
- - ">="
|
@@ -51,20 +51,22 @@ dependencies:
|
|
51
51
|
- !ruby/object:Gem::Version
|
52
52
|
version: 5.0.0
|
53
53
|
- !ruby/object:Gem::Dependency
|
54
|
+
name: logstash-devutils
|
54
55
|
requirement: !ruby/object:Gem::Requirement
|
55
56
|
requirements:
|
56
57
|
- - ">="
|
57
58
|
- !ruby/object:Gem::Version
|
58
59
|
version: '0'
|
59
|
-
name: logstash-devutils
|
60
|
-
prerelease: false
|
61
60
|
type: :development
|
61
|
+
prerelease: false
|
62
62
|
version_requirements: !ruby/object:Gem::Requirement
|
63
63
|
requirements:
|
64
64
|
- - ">="
|
65
65
|
- !ruby/object:Gem::Version
|
66
66
|
version: '0'
|
67
|
-
description: This gem is a Logstash plugin required to be installed on top of the
|
67
|
+
description: This gem is a Logstash plugin required to be installed on top of the
|
68
|
+
Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This
|
69
|
+
gem is not a stand-alone program
|
68
70
|
email: support@honeycomb.io
|
69
71
|
executables: []
|
70
72
|
extensions: []
|
@@ -82,7 +84,7 @@ licenses:
|
|
82
84
|
metadata:
|
83
85
|
logstash_plugin: 'true'
|
84
86
|
logstash_group: output
|
85
|
-
post_install_message:
|
87
|
+
post_install_message:
|
86
88
|
rdoc_options: []
|
87
89
|
require_paths:
|
88
90
|
- lib
|
@@ -97,9 +99,9 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
97
99
|
- !ruby/object:Gem::Version
|
98
100
|
version: '0'
|
99
101
|
requirements: []
|
100
|
-
rubyforge_project:
|
101
|
-
rubygems_version: 2.
|
102
|
-
signing_key:
|
102
|
+
rubyforge_project:
|
103
|
+
rubygems_version: 2.4.8
|
104
|
+
signing_key:
|
103
105
|
specification_version: 4
|
104
106
|
summary: This output lets you `POST` batches of events to the Honeycomb.io API endpoint
|
105
107
|
test_files:
|