logstash-output-honeycomb_json_batch 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/Gemfile +2 -0
- data/LICENSE +14 -0
- data/README.md +51 -0
- data/lib/logstash/outputs/honeycomb_json_batch.rb +164 -0
- data/logstash-output-honeycomb_json_batch.gemspec +28 -0
- data/spec/outputs/honeycomb_json_batch_spec.rb +99 -0
- metadata +134 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA1:
|
3
|
+
metadata.gz: bf59c27f44d60efa7c924e9b3371133a718e938a
|
4
|
+
data.tar.gz: '09aead3e7779e92feff278e6bfb91c4a5ec09be6'
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: c578d3531ba2d72197eab44b471939fc8dccb5ef1e9f99155915fbc937c937262061bab610d19f15a832903ab80623400741004b4c44068aa46e6ffb20fc3e6b
|
7
|
+
data.tar.gz: cc4fba63f1ca522bc4d744b37bad638848a86fe2f1d7721bd652f1b33e18cad757979ed44d603aaac2d3bb2066593005e339057761e61d5805bb2b95657b87aa
|
data/Gemfile
ADDED
data/LICENSE
ADDED
@@ -0,0 +1,14 @@
|
|
1
|
+
Copyright (c) 2017 Hound Technology, Inc <https://honeycomb.io>
|
2
|
+
|
3
|
+
Licensed under the Apache License, Version 2.0 (the "License");
|
4
|
+
you may not use this file except in compliance with the License.
|
5
|
+
You may obtain a copy of the License at
|
6
|
+
|
7
|
+
http://www.apache.org/licenses/LICENSE-2.0
|
8
|
+
|
9
|
+
Unless required by applicable law or agreed to in writing, software
|
10
|
+
distributed under the License is distributed on an "AS IS" BASIS,
|
11
|
+
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
12
|
+
See the License for the specific language governing permissions and
|
13
|
+
limitations under the License.
|
14
|
+
|
data/README.md
ADDED
@@ -0,0 +1,51 @@
|
|
1
|
+
# Honeycomb Batch JSON Logstash Plugin
|
2
|
+
|
3
|
+
This plugin is a heavily modified version of the standard logstash [http output](https://github.com/logstash-plugins/logstash-output-http) plugin and the [Lucidworks JSON batch plugin](https://github.com/lucidworks/logstash-output-json_batch).
|
4
|
+
|
5
|
+
# Usage
|
6
|
+
|
7
|
+
Please note that the name of the plugin when used is `json_batch`, since it only supports JSON in its current form.
|
8
|
+
|
9
|
+
The default batch size is 50, ...
|
10
|
+
|
11
|
+
output {
|
12
|
+
honeycomb_json_batch {
|
13
|
+
write_key => "YOUR_TEAM_KEY"
|
14
|
+
dataset => "Logstash Batch Test"
|
15
|
+
}
|
16
|
+
}
|
17
|
+
|
18
|
+
# Installation
|
19
|
+
|
20
|
+
The easiest way to use this plugin is by installing it through rubygems like any other logstash plugin. To get the latest version installed, you should run the following command: `bin/logstash-plugin install logstash-output-honeycomb_json_batch`
|
21
|
+
|
22
|
+
TODO: publish to rubygems
|
23
|
+
|
24
|
+
|
25
|
+
# Running locally
|
26
|
+
|
27
|
+
Install dependencies
|
28
|
+
|
29
|
+
```
|
30
|
+
bundle install
|
31
|
+
```
|
32
|
+
|
33
|
+
Run tests
|
34
|
+
|
35
|
+
```
|
36
|
+
bundle exec rspec
|
37
|
+
```
|
38
|
+
|
39
|
+
## Run in an installed Logstash
|
40
|
+
|
41
|
+
You can build the gem and install it using:
|
42
|
+
|
43
|
+
```
|
44
|
+
gem build logstash-output-honeycomb_json_batch.gemspec
|
45
|
+
```
|
46
|
+
|
47
|
+
And install it into your local logstash instance:
|
48
|
+
|
49
|
+
```
|
50
|
+
logstash-plugin install ./path/to/logstash-output-honeycomb_json_batch-VERSION.gem
|
51
|
+
```
|
@@ -0,0 +1,164 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/outputs/base"
|
3
|
+
require "logstash/namespace"
|
4
|
+
require "logstash/json"
|
5
|
+
require "uri"
|
6
|
+
require "stud/buffer"
|
7
|
+
require "logstash/plugin_mixins/http_client"
|
8
|
+
|
9
|
+
class LogStash::Outputs::HoneycombJSONBatch < LogStash::Outputs::Base
|
10
|
+
include LogStash::PluginMixins::HttpClient
|
11
|
+
include Stud::Buffer
|
12
|
+
|
13
|
+
config_name "honeycomb_json_batch"
|
14
|
+
|
15
|
+
# URL host to use, defaults to api.honeycomb.io
|
16
|
+
config :api_host, :validate => :string
|
17
|
+
|
18
|
+
config :write_key, :validate => :string, :required => true
|
19
|
+
|
20
|
+
config :dataset, :validate => :string, :required => true
|
21
|
+
|
22
|
+
config :flush_size, :validate => :number, :default => 50
|
23
|
+
|
24
|
+
config :idle_flush_time, :validate => :number, :default => 5
|
25
|
+
|
26
|
+
config :retry_individual, :validate => :boolean, :default => true
|
27
|
+
|
28
|
+
config :pool_max, :validate => :number, :default => 50
|
29
|
+
|
30
|
+
def register
|
31
|
+
# We count outstanding requests with this queue
|
32
|
+
# This queue tracks the requests to create backpressure
|
33
|
+
# When this queue is empty no new requests may be sent,
|
34
|
+
# tokens must be added back by the client on success
|
35
|
+
@request_tokens = SizedQueue.new(@pool_max)
|
36
|
+
@pool_max.times {|t| @request_tokens << true }
|
37
|
+
@total = 0
|
38
|
+
@total_failed = 0
|
39
|
+
@requests = Array.new
|
40
|
+
if !@api_host.start_with? "http"
|
41
|
+
@api_host = "http://#{ @api_host }"
|
42
|
+
end
|
43
|
+
@api_host = @api_host.chomp
|
44
|
+
|
45
|
+
buffer_initialize(
|
46
|
+
:max_items => @flush_size,
|
47
|
+
:max_interval => @idle_flush_time,
|
48
|
+
:logger => @logger
|
49
|
+
)
|
50
|
+
logger.info("Initialized honeycomb_json_batch with settings",
|
51
|
+
:flush_size => @flush_size,
|
52
|
+
:idle_flush_time => @idle_flush_time,
|
53
|
+
:request_tokens => @pool_max,
|
54
|
+
:api_host => @api_host,
|
55
|
+
:headers => request_headers,
|
56
|
+
:retry_individual => @retry_individual)
|
57
|
+
|
58
|
+
end
|
59
|
+
|
60
|
+
# This module currently does not support parallel requests as that would circumvent the batching
|
61
|
+
def receive(event, async_type=:background)
|
62
|
+
buffer_receive(event)
|
63
|
+
end
|
64
|
+
|
65
|
+
public
|
66
|
+
def flush(events, close=false)
|
67
|
+
documents = [] #this is the array of hashes that we push to Fusion as documents
|
68
|
+
|
69
|
+
events.each do |event|
|
70
|
+
data = event.to_hash()
|
71
|
+
timestamp = data.delete("@timestamp")
|
72
|
+
documents.push({ "time" => timestamp, "data" => data })
|
73
|
+
end
|
74
|
+
|
75
|
+
make_request(documents)
|
76
|
+
end
|
77
|
+
|
78
|
+
def multi_receive(events)
|
79
|
+
events.each {|event| buffer_receive(event)}
|
80
|
+
end
|
81
|
+
|
82
|
+
private
|
83
|
+
|
84
|
+
def make_request(documents)
|
85
|
+
body = LogStash::Json.dump({ @dataset => documents })
|
86
|
+
# Block waiting for a token
|
87
|
+
token = @request_tokens.pop
|
88
|
+
@logger.debug("Got token", :tokens => @request_tokens.length)
|
89
|
+
|
90
|
+
|
91
|
+
# Create an async request
|
92
|
+
begin
|
93
|
+
request = client.post("#{@api_host}/1/batch", {
|
94
|
+
:body => body,
|
95
|
+
:headers => request_headers,
|
96
|
+
:async => true
|
97
|
+
})
|
98
|
+
rescue Exception => e
|
99
|
+
@logger.warn("An error occurred while indexing: #{e.message}")
|
100
|
+
end
|
101
|
+
|
102
|
+
# attach handlers before performing request
|
103
|
+
request.on_complete do
|
104
|
+
# Make sure we return the token to the pool
|
105
|
+
@request_tokens << token
|
106
|
+
end
|
107
|
+
|
108
|
+
request.on_success do |response|
|
109
|
+
if response.code >= 200 && response.code < 300
|
110
|
+
@total = @total + documents.length
|
111
|
+
@logger.debug("Successfully submitted",
|
112
|
+
:docs => documents.length,
|
113
|
+
:response_code => response.code,
|
114
|
+
:total => @total)
|
115
|
+
else
|
116
|
+
if documents.length > 1 && @retry_individual
|
117
|
+
documents.each do |doc|
|
118
|
+
make_request([doc])
|
119
|
+
end
|
120
|
+
else
|
121
|
+
@total_failed += documents.length
|
122
|
+
log_failure(
|
123
|
+
"Encountered non-200 HTTP code #{response.code}",
|
124
|
+
:response_code => response.code,
|
125
|
+
:url => url,
|
126
|
+
:response_body => response.body,
|
127
|
+
:num_docs => documents.length,
|
128
|
+
:retry_individual => @retry_individual,
|
129
|
+
:total_failed => @total_failed)
|
130
|
+
end
|
131
|
+
end
|
132
|
+
end
|
133
|
+
|
134
|
+
request.on_failure do |exception|
|
135
|
+
@total_failed += documents.length
|
136
|
+
log_failure("Could not access URL",
|
137
|
+
:url => url,
|
138
|
+
:method => @http_method,
|
139
|
+
:body => body,
|
140
|
+
:headers => headers,
|
141
|
+
:message => exception.message,
|
142
|
+
:class => exception.class.name,
|
143
|
+
:backtrace => exception.backtrace,
|
144
|
+
:total_failed => @total_failed
|
145
|
+
)
|
146
|
+
end
|
147
|
+
|
148
|
+
Thread.new do
|
149
|
+
client.execute!
|
150
|
+
end
|
151
|
+
end
|
152
|
+
|
153
|
+
# This is split into a separate method mostly to help testing
|
154
|
+
def log_failure(message, opts)
|
155
|
+
@logger.error("[HTTP Output Failure] #{message}", opts)
|
156
|
+
end
|
157
|
+
|
158
|
+
def request_headers()
|
159
|
+
{
|
160
|
+
"Content-Type" => "application/json",
|
161
|
+
"X-Honeycomb-Team" => @write_key
|
162
|
+
}
|
163
|
+
end
|
164
|
+
end
|
@@ -0,0 +1,28 @@
|
|
1
|
+
Gem::Specification.new do |s|
|
2
|
+
s.name = 'logstash-output-honeycomb_json_batch'
|
3
|
+
s.version = '0.1.0'
|
4
|
+
s.licenses = ['Apache-2.0']
|
5
|
+
s.summary = "This output lets you `POST` batches of events to the Honeycomb.io API endpoint"
|
6
|
+
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program"
|
7
|
+
s.authors = ["Honeycomb"]
|
8
|
+
s.email = 'support@honeycomb.io'
|
9
|
+
s.homepage = "https://honeycomb.io"
|
10
|
+
s.require_paths = ["lib"]
|
11
|
+
|
12
|
+
# Files
|
13
|
+
s.files = Dir['lib/**/*','spec/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile','LICENSE','NOTICE.TXT']
|
14
|
+
|
15
|
+
# Tests
|
16
|
+
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
17
|
+
|
18
|
+
# Special flag to let us know this is actually a logstash plugin
|
19
|
+
s.metadata = { "logstash_plugin" => "true", "logstash_group" => "output" }
|
20
|
+
|
21
|
+
# Gem dependencies
|
22
|
+
s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
|
23
|
+
s.add_runtime_dependency "logstash-mixin-http_client", ">= 2.2.1", "< 5.0.0"
|
24
|
+
|
25
|
+
s.add_development_dependency 'logstash-devutils'
|
26
|
+
s.add_development_dependency 'sinatra'
|
27
|
+
s.add_development_dependency 'webrick'
|
28
|
+
end
|
@@ -0,0 +1,99 @@
|
|
1
|
+
require "logstash/devutils/rspec/spec_helper"
|
2
|
+
require "logstash/outputs/honeycomb_json_batch"
|
3
|
+
|
4
|
+
PORT = rand(65535-1024) + 1025
|
5
|
+
WRITE_KEY = "YOUR_WRITE_KEY"
|
6
|
+
DATASET = "YOUR_DATASET"
|
7
|
+
|
8
|
+
describe LogStash::Outputs::HoneycombJSONBatch do
|
9
|
+
let(:port) { PORT }
|
10
|
+
let(:event) { LogStash::Event.new("message" => "hi") }
|
11
|
+
let(:api_host) { "http://localhost:#{port}"}
|
12
|
+
let(:flush_size) { 15 }
|
13
|
+
let(:client) { @honeycomb.client }
|
14
|
+
|
15
|
+
before do
|
16
|
+
@honeycomb = LogStash::Outputs::HoneycombJSONBatch.new(
|
17
|
+
"write_key" => WRITE_KEY,
|
18
|
+
"dataset" => DATASET,
|
19
|
+
"api_host" => api_host,
|
20
|
+
"flush_size" => flush_size
|
21
|
+
)
|
22
|
+
end
|
23
|
+
|
24
|
+
before do
|
25
|
+
allow(@honeycomb).to receive(:client).and_return(client)
|
26
|
+
@honeycomb.register
|
27
|
+
allow(client).to receive(:post).and_call_original
|
28
|
+
end
|
29
|
+
|
30
|
+
after do
|
31
|
+
@honeycomb.close
|
32
|
+
end
|
33
|
+
|
34
|
+
it "should receive a single post request" do
|
35
|
+
expect(client).to receive(:post).
|
36
|
+
with("#{ api_host }/1/batch", hash_including(:body, :headers, :async)).
|
37
|
+
once.
|
38
|
+
and_call_original
|
39
|
+
|
40
|
+
5.times {|t| @honeycomb.receive(event)}
|
41
|
+
@honeycomb.buffer_flush(:force => true)
|
42
|
+
end
|
43
|
+
|
44
|
+
it "should send batches based on the specified flush_size" do
|
45
|
+
expect(client).to receive(:post).
|
46
|
+
with("#{ api_host }/1/batch", hash_including(:body, :headers, :async)).
|
47
|
+
twice.
|
48
|
+
and_call_original
|
49
|
+
|
50
|
+
(flush_size + 1).times {|t| @honeycomb.receive(event)}
|
51
|
+
@honeycomb.buffer_flush(:force => true)
|
52
|
+
end
|
53
|
+
|
54
|
+
it "should attach the right headers for Honeycomb ingestion" do
|
55
|
+
expect(client).to receive(:post).
|
56
|
+
with("#{ api_host }/1/batch", hash_including(:headers => {
|
57
|
+
"Content-Type" => "application/json",
|
58
|
+
"X-Honeycomb-Team" => WRITE_KEY
|
59
|
+
})).once.
|
60
|
+
and_call_original
|
61
|
+
|
62
|
+
@honeycomb.receive(event)
|
63
|
+
@honeycomb.buffer_flush(:force => true)
|
64
|
+
end
|
65
|
+
|
66
|
+
it "should wrap events in the right structure Honeycomb ingestion" do
|
67
|
+
data = event.to_hash()
|
68
|
+
data.delete("@timestamp")
|
69
|
+
expect(client).to receive(:post).
|
70
|
+
with("#{ api_host }/1/batch", hash_including(:body => LogStash::Json.dump({
|
71
|
+
DATASET => [ { "time" => event.timestamp.to_s, "data" => data } ]
|
72
|
+
}))).once.
|
73
|
+
and_call_original
|
74
|
+
|
75
|
+
@honeycomb.receive(event)
|
76
|
+
@honeycomb.buffer_flush(:force => true)
|
77
|
+
end
|
78
|
+
|
79
|
+
it "should wrap multiple events up in the right structure" do
|
80
|
+
event1 = LogStash::Event.new("alpha" => 1.0)
|
81
|
+
event2 = LogStash::Event.new("beta" => 2.0)
|
82
|
+
event3 = LogStash::Event.new("gamma" => 3.0)
|
83
|
+
|
84
|
+
expect(client).to receive(:post).
|
85
|
+
with("#{ api_host }/1/batch", hash_including(:body => LogStash::Json.dump({
|
86
|
+
DATASET => [
|
87
|
+
{ "time" => event1.timestamp.to_s, "data" => { "alpha" => 1.0, "@version" => "1" } },
|
88
|
+
{ "time" => event2.timestamp.to_s, "data" => { "@version" => "1", "beta" => 2.0 } },
|
89
|
+
{ "time" => event3.timestamp.to_s, "data" => { "@version" => "1", "gamma" => 3.0 } }
|
90
|
+
]
|
91
|
+
}))).once.
|
92
|
+
and_call_original
|
93
|
+
|
94
|
+
@honeycomb.receive(event1)
|
95
|
+
@honeycomb.receive(event2)
|
96
|
+
@honeycomb.receive(event3)
|
97
|
+
@honeycomb.buffer_flush(:force => true)
|
98
|
+
end
|
99
|
+
end
|
metadata
ADDED
@@ -0,0 +1,134 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: logstash-output-honeycomb_json_batch
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.1.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Honeycomb
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2017-01-31 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
requirement: !ruby/object:Gem::Requirement
|
15
|
+
requirements:
|
16
|
+
- - ">="
|
17
|
+
- !ruby/object:Gem::Version
|
18
|
+
version: '1.60'
|
19
|
+
- - "<="
|
20
|
+
- !ruby/object:Gem::Version
|
21
|
+
version: '2.99'
|
22
|
+
name: logstash-core-plugin-api
|
23
|
+
prerelease: false
|
24
|
+
type: :runtime
|
25
|
+
version_requirements: !ruby/object:Gem::Requirement
|
26
|
+
requirements:
|
27
|
+
- - ">="
|
28
|
+
- !ruby/object:Gem::Version
|
29
|
+
version: '1.60'
|
30
|
+
- - "<="
|
31
|
+
- !ruby/object:Gem::Version
|
32
|
+
version: '2.99'
|
33
|
+
- !ruby/object:Gem::Dependency
|
34
|
+
requirement: !ruby/object:Gem::Requirement
|
35
|
+
requirements:
|
36
|
+
- - ">="
|
37
|
+
- !ruby/object:Gem::Version
|
38
|
+
version: 2.2.1
|
39
|
+
- - "<"
|
40
|
+
- !ruby/object:Gem::Version
|
41
|
+
version: 5.0.0
|
42
|
+
name: logstash-mixin-http_client
|
43
|
+
prerelease: false
|
44
|
+
type: :runtime
|
45
|
+
version_requirements: !ruby/object:Gem::Requirement
|
46
|
+
requirements:
|
47
|
+
- - ">="
|
48
|
+
- !ruby/object:Gem::Version
|
49
|
+
version: 2.2.1
|
50
|
+
- - "<"
|
51
|
+
- !ruby/object:Gem::Version
|
52
|
+
version: 5.0.0
|
53
|
+
- !ruby/object:Gem::Dependency
|
54
|
+
requirement: !ruby/object:Gem::Requirement
|
55
|
+
requirements:
|
56
|
+
- - ">="
|
57
|
+
- !ruby/object:Gem::Version
|
58
|
+
version: '0'
|
59
|
+
name: logstash-devutils
|
60
|
+
prerelease: false
|
61
|
+
type: :development
|
62
|
+
version_requirements: !ruby/object:Gem::Requirement
|
63
|
+
requirements:
|
64
|
+
- - ">="
|
65
|
+
- !ruby/object:Gem::Version
|
66
|
+
version: '0'
|
67
|
+
- !ruby/object:Gem::Dependency
|
68
|
+
requirement: !ruby/object:Gem::Requirement
|
69
|
+
requirements:
|
70
|
+
- - ">="
|
71
|
+
- !ruby/object:Gem::Version
|
72
|
+
version: '0'
|
73
|
+
name: sinatra
|
74
|
+
prerelease: false
|
75
|
+
type: :development
|
76
|
+
version_requirements: !ruby/object:Gem::Requirement
|
77
|
+
requirements:
|
78
|
+
- - ">="
|
79
|
+
- !ruby/object:Gem::Version
|
80
|
+
version: '0'
|
81
|
+
- !ruby/object:Gem::Dependency
|
82
|
+
requirement: !ruby/object:Gem::Requirement
|
83
|
+
requirements:
|
84
|
+
- - ">="
|
85
|
+
- !ruby/object:Gem::Version
|
86
|
+
version: '0'
|
87
|
+
name: webrick
|
88
|
+
prerelease: false
|
89
|
+
type: :development
|
90
|
+
version_requirements: !ruby/object:Gem::Requirement
|
91
|
+
requirements:
|
92
|
+
- - ">="
|
93
|
+
- !ruby/object:Gem::Version
|
94
|
+
version: '0'
|
95
|
+
description: This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program
|
96
|
+
email: support@honeycomb.io
|
97
|
+
executables: []
|
98
|
+
extensions: []
|
99
|
+
extra_rdoc_files: []
|
100
|
+
files:
|
101
|
+
- Gemfile
|
102
|
+
- LICENSE
|
103
|
+
- README.md
|
104
|
+
- lib/logstash/outputs/honeycomb_json_batch.rb
|
105
|
+
- logstash-output-honeycomb_json_batch.gemspec
|
106
|
+
- spec/outputs/honeycomb_json_batch_spec.rb
|
107
|
+
homepage: https://honeycomb.io
|
108
|
+
licenses:
|
109
|
+
- Apache-2.0
|
110
|
+
metadata:
|
111
|
+
logstash_plugin: 'true'
|
112
|
+
logstash_group: output
|
113
|
+
post_install_message:
|
114
|
+
rdoc_options: []
|
115
|
+
require_paths:
|
116
|
+
- lib
|
117
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
118
|
+
requirements:
|
119
|
+
- - ">="
|
120
|
+
- !ruby/object:Gem::Version
|
121
|
+
version: '0'
|
122
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
123
|
+
requirements:
|
124
|
+
- - ">="
|
125
|
+
- !ruby/object:Gem::Version
|
126
|
+
version: '0'
|
127
|
+
requirements: []
|
128
|
+
rubyforge_project:
|
129
|
+
rubygems_version: 2.6.8
|
130
|
+
signing_key:
|
131
|
+
specification_version: 4
|
132
|
+
summary: This output lets you `POST` batches of events to the Honeycomb.io API endpoint
|
133
|
+
test_files:
|
134
|
+
- spec/outputs/honeycomb_json_batch_spec.rb
|