logstash-output-loki-tenants 1.0.3
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/Gemfile +14 -0
- data/README.md +76 -0
- data/lib/logstash/outputs/loki.rb +301 -0
- data/lib/logstash/outputs/loki/batch.rb +63 -0
- data/lib/logstash/outputs/loki/entry.rb +25 -0
- data/logstash-output-loki.gemspec +26 -0
- data/spec/outputs/loki/entry_spec.rb +59 -0
- data/spec/outputs/loki_spec.rb +281 -0
- metadata +103 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: 468a3f120f85bb99690179384555a9cc4d0b090f027fa6abdbc2e40200973964
|
4
|
+
data.tar.gz: 58532c550dff9474933c6df25b635586d0fd745014753babf8dfaf300e653939
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 45db85447deeda78d894384604a1dd470b1b873d5ea854c6ab26390bdbfdc33640aaef7833af7aa66188ed96ad174414a85e0b90ad5ed186c0bbd7ac21ed4932
|
7
|
+
data.tar.gz: '005299b0c0541b42806f35ae6d0aa7f55967066f8851642c9ec791f58b2d5387065c4600e51bc91fb7830c082abddbf14de51c5c9a82e3ca9d806c6a896de5e1'
|
data/Gemfile
ADDED
@@ -0,0 +1,14 @@
|
|
1
|
+
source 'https://rubygems.org'
|
2
|
+
|
3
|
+
gemspec
|
4
|
+
|
5
|
+
logstash_path = ENV["LOGSTASH_PATH"] || "./logstash"
|
6
|
+
|
7
|
+
if Dir.exist?(logstash_path)
|
8
|
+
gem 'logstash-core', :path => "#{logstash_path}/logstash-core"
|
9
|
+
gem 'logstash-core-plugin-api', :path => "#{logstash_path}/logstash-core-plugin-api"
|
10
|
+
else
|
11
|
+
raise 'missing logstash vendoring'
|
12
|
+
end
|
13
|
+
|
14
|
+
gem "webmock", "~> 3.8"
|
data/README.md
ADDED
@@ -0,0 +1,76 @@
|
|
1
|
+
# Contributing to Loki Logstash Output Plugin
|
2
|
+
|
3
|
+
For information about how to use this plugin see this [documentation](../../docs/sources/clients/logstash/_index.md).
|
4
|
+
|
5
|
+
## Install dependencies
|
6
|
+
|
7
|
+
First, make sure you have JDK version `8` or `11` installed and you have set the `JAVA_HOME` environment variable.
|
8
|
+
|
9
|
+
You need to setup JRuby environment to build this plugin. Refer https://github.com/rbenv/rbenv for setting up your rbenv environment.
|
10
|
+
|
11
|
+
After setting up `rbenv`. Install JRuby
|
12
|
+
|
13
|
+
```bash
|
14
|
+
rbenv install jruby-9.2.10.0
|
15
|
+
rbenv local jruby-9.2.10.0
|
16
|
+
```
|
17
|
+
|
18
|
+
Check that the environment is configured
|
19
|
+
|
20
|
+
```bash
|
21
|
+
ruby --version
|
22
|
+
jruby 9.2.10
|
23
|
+
```
|
24
|
+
|
25
|
+
You should make sure you are running `jruby` and not `ruby`. If the command `ruby --version` still shows `ruby` and not `jruby`, check that PATH contains `$HOME/.rbenv/shims` and `$HOME/.rbenv/bin`. Also verify that you have this in your bash profile:
|
26
|
+
|
27
|
+
```bash
|
28
|
+
export PATH="$HOME/.rbenv/bin:$PATH"
|
29
|
+
eval "$(rbenv init -)"
|
30
|
+
```
|
31
|
+
|
32
|
+
Then install bundler
|
33
|
+
`gem install bundler:2.1.4`
|
34
|
+
|
35
|
+
Follow those instructions to [install logstash](https://www.elastic.co/guide/en/logstash/current/installing-logstash.html) before moving to the next section.
|
36
|
+
|
37
|
+
## Build and test the plugin
|
38
|
+
|
39
|
+
### Install required packages
|
40
|
+
|
41
|
+
```bash
|
42
|
+
git clone git@github.com:elastic/logstash.git
|
43
|
+
cd logstash
|
44
|
+
git checkout tags/v7.6.2
|
45
|
+
export LOGSTASH_PATH=`pwd`
|
46
|
+
export GEM_PATH=$LOGSTASH_PATH/vendor/bundle/jruby/2.5.0
|
47
|
+
export GEM_HOME=$LOGSTASH_PATH/vendor/bundle/jruby/2.5.0
|
48
|
+
./gradlew assemble
|
49
|
+
cd ..
|
50
|
+
ruby -S bundle install
|
51
|
+
ruby -S bundle exec rake vendor
|
52
|
+
```
|
53
|
+
|
54
|
+
### Build the plugin
|
55
|
+
|
56
|
+
`gem build logstash-output-loki.gemspec`
|
57
|
+
|
58
|
+
### Test
|
59
|
+
|
60
|
+
`ruby -S bundle exec rspec`
|
61
|
+
|
62
|
+
Alternatively if you don't want to install JRuby. Enter inside logstash-loki container.
|
63
|
+
|
64
|
+
```bash
|
65
|
+
docker build -t logstash-loki ./
|
66
|
+
docker run -v `pwd`/spec:/home/logstash/spec -it --rm --entrypoint /bin/sh logstash-loki
|
67
|
+
bundle exec rspec
|
68
|
+
```
|
69
|
+
|
70
|
+
## Install plugin to local logstash
|
71
|
+
|
72
|
+
`bin/logstash-plugin install --no-verify --local logstash-output-loki-1.0.0.gem`
|
73
|
+
|
74
|
+
## Send sample event and check plugin is working
|
75
|
+
|
76
|
+
`bin/logstash -f loki.conf`
|
@@ -0,0 +1,301 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/outputs/base"
|
3
|
+
require "logstash/outputs/loki/entry"
|
4
|
+
require "logstash/outputs/loki/batch"
|
5
|
+
require "logstash/namespace"
|
6
|
+
require 'net/http'
|
7
|
+
require 'time'
|
8
|
+
require 'uri'
|
9
|
+
require 'json'
|
10
|
+
|
11
|
+
class LogStash::Outputs::Loki < LogStash::Outputs::Base
|
12
|
+
include Loki
|
13
|
+
config_name "loki"
|
14
|
+
|
15
|
+
## 'A single instance of the Output will be shared among the pipeline worker threads'
|
16
|
+
concurrency :single
|
17
|
+
|
18
|
+
## 'Loki URL'
|
19
|
+
config :url, :validate => :string, :required => true
|
20
|
+
|
21
|
+
## 'BasicAuth credentials'
|
22
|
+
config :username, :validate => :string, :required => false
|
23
|
+
config :password, :validate => :string, secret: true, :required => false
|
24
|
+
|
25
|
+
## 'Client certificate'
|
26
|
+
config :cert, :validate => :path, :required => false
|
27
|
+
config :key, :validate => :path, :required => false
|
28
|
+
|
29
|
+
## 'TLS'
|
30
|
+
config :ca_cert, :validate => :path, :required => false
|
31
|
+
|
32
|
+
## 'Disable server certificate verification'
|
33
|
+
config :insecure_skip_verify, :validate => :boolean, :default => false, :required => false
|
34
|
+
|
35
|
+
## 'Loki Tenant ID'
|
36
|
+
config :tenant_id, :validate => :string, :required => false
|
37
|
+
|
38
|
+
## 'Maximum batch size to accrue before pushing to loki. Defaults to 102400 bytes'
|
39
|
+
config :batch_size, :validate => :number, :default => 102400, :required => false
|
40
|
+
|
41
|
+
## 'Interval in seconds to wait before pushing a batch of records to loki. Defaults to 1 second'
|
42
|
+
config :batch_wait, :validate => :number, :default => 1, :required => false
|
43
|
+
|
44
|
+
## 'Log line field to pick from logstash. Defaults to "message"'
|
45
|
+
config :message_field, :validate => :string, :default => "message", :required => false
|
46
|
+
|
47
|
+
## 'Backoff configuration. Initial backoff time between retries. Default 1s'
|
48
|
+
config :min_delay, :validate => :number, :default => 1, :required => false
|
49
|
+
|
50
|
+
## 'Backoff configuration. Maximum backoff time between retries. Default 300s'
|
51
|
+
config :max_delay, :validate => :number, :default => 300, :required => false
|
52
|
+
|
53
|
+
## 'Backoff configuration. Maximum number of retries to do'
|
54
|
+
config :retries, :validate => :number, :default => 10, :required => false
|
55
|
+
|
56
|
+
attr_reader :batches
|
57
|
+
public
|
58
|
+
def register
|
59
|
+
|
60
|
+
@uri = URI.parse(@url)
|
61
|
+
unless @uri.is_a?(URI::HTTP) || @uri.is_a?(URI::HTTPS)
|
62
|
+
raise LogStash::ConfigurationError, "url parameter must be valid HTTP, currently '#{@url}'"
|
63
|
+
end
|
64
|
+
|
65
|
+
if @min_delay > @max_delay
|
66
|
+
raise LogStash::ConfigurationError, "Min delay should be less than Max delay, currently 'Min delay is #{@min_delay} and Max delay is #{@max_delay}'"
|
67
|
+
end
|
68
|
+
|
69
|
+
@logger.info("Loki output plugin", :class => self.class.name)
|
70
|
+
|
71
|
+
# initialize Queue and Mutex
|
72
|
+
@entries = Queue.new
|
73
|
+
@mutex = Mutex.new
|
74
|
+
@stop = false
|
75
|
+
|
76
|
+
# create nil batch object.
|
77
|
+
@batches = Hash.new
|
78
|
+
|
79
|
+
# validate certs
|
80
|
+
if ssl_cert?
|
81
|
+
load_ssl
|
82
|
+
validate_ssl_key
|
83
|
+
end
|
84
|
+
|
85
|
+
# start batch_max_wait and batch_max_size threads
|
86
|
+
@batch_wait_thread = Thread.new{max_batch_wait()}
|
87
|
+
@batch_size_thread = Thread.new{max_batch_size()}
|
88
|
+
|
89
|
+
end
|
90
|
+
|
91
|
+
def batch(tenant = 'fake')
|
92
|
+
return nil if @batches.nil?
|
93
|
+
return @batches[tenant] if !tenant.nil? && !tenant.empty? && @batches.key?(tenant)
|
94
|
+
return @batches['fake'] if @batches.key?('fake')
|
95
|
+
return nil
|
96
|
+
end
|
97
|
+
|
98
|
+
def max_batch_size
|
99
|
+
loop do
|
100
|
+
@mutex.synchronize do
|
101
|
+
return if @stop
|
102
|
+
end
|
103
|
+
|
104
|
+
e = @entries.deq
|
105
|
+
return if e.nil?
|
106
|
+
|
107
|
+
tenant = nil
|
108
|
+
tenant = e.labels['tenant'] if !e.labels.nil? && e.labels.key?('tenant')
|
109
|
+
tenant = 'fake' if tenant.nil? or tenant.empty?
|
110
|
+
|
111
|
+
@mutex.synchronize do
|
112
|
+
if !add_entry_to_batch(e, tenant)
|
113
|
+
@logger.debug("Max batch_size is reached. Sending batch to loki. Tenant #{tenant}")
|
114
|
+
send_batch_for_tenant(tenant)
|
115
|
+
@batches[tenant] = Batch.new(e)
|
116
|
+
end
|
117
|
+
end
|
118
|
+
end
|
119
|
+
end
|
120
|
+
|
121
|
+
def max_batch_wait
|
122
|
+
# minimum wait frequency is 10 milliseconds
|
123
|
+
min_wait_checkfrequency = 1/100
|
124
|
+
max_wait_checkfrequency = @batch_wait
|
125
|
+
if max_wait_checkfrequency < min_wait_checkfrequency
|
126
|
+
max_wait_checkfrequency = min_wait_checkfrequency
|
127
|
+
end
|
128
|
+
|
129
|
+
loop do
|
130
|
+
@mutex.synchronize do
|
131
|
+
return if @stop
|
132
|
+
end
|
133
|
+
|
134
|
+
sleep(max_wait_checkfrequency)
|
135
|
+
|
136
|
+
@mutex.synchronize do
|
137
|
+
@batches.keys.clone.each { |tenant|
|
138
|
+
if is_batch_expired(tenant)
|
139
|
+
@logger.debug("Max batch_wait time is reached. Sending batch to loki. Tenant #{tenant}")
|
140
|
+
send_batch_for_tenant(tenant)
|
141
|
+
@batches.delete(tenant)
|
142
|
+
end
|
143
|
+
}
|
144
|
+
end
|
145
|
+
end
|
146
|
+
end
|
147
|
+
|
148
|
+
def ssl_cert?
|
149
|
+
!@key.nil? && !@cert.nil?
|
150
|
+
end
|
151
|
+
|
152
|
+
def load_ssl
|
153
|
+
@cert = OpenSSL::X509::Certificate.new(File.read(@cert)) if @cert
|
154
|
+
@key = OpenSSL::PKey.read(File.read(@key)) if @key
|
155
|
+
end
|
156
|
+
|
157
|
+
def validate_ssl_key
|
158
|
+
if !@key.is_a?(OpenSSL::PKey::RSA) && !@key.is_a?(OpenSSL::PKey::DSA)
|
159
|
+
raise LogStash::ConfigurationError, "Unsupported private key type '#{@key.class}''"
|
160
|
+
end
|
161
|
+
end
|
162
|
+
|
163
|
+
def ssl_opts(uri)
|
164
|
+
opts = {
|
165
|
+
use_ssl: uri.scheme == 'https'
|
166
|
+
}
|
167
|
+
|
168
|
+
# disable server certificate verification
|
169
|
+
if @insecure_skip_verify
|
170
|
+
opts = opts.merge(
|
171
|
+
verify_mode: OpenSSL::SSL::VERIFY_NONE
|
172
|
+
)
|
173
|
+
end
|
174
|
+
|
175
|
+
if !@cert.nil? && !@key.nil?
|
176
|
+
opts = opts.merge(
|
177
|
+
verify_mode: OpenSSL::SSL::VERIFY_PEER,
|
178
|
+
cert: @cert,
|
179
|
+
key: @key
|
180
|
+
)
|
181
|
+
end
|
182
|
+
|
183
|
+
unless @ca_cert.nil?
|
184
|
+
opts = opts.merge(
|
185
|
+
ca_file: @ca_cert
|
186
|
+
)
|
187
|
+
end
|
188
|
+
opts
|
189
|
+
end
|
190
|
+
|
191
|
+
# Add an entry to the current batch returns false if the batch is full
|
192
|
+
# and the entry can't be added.
|
193
|
+
def add_entry_to_batch(e, tenant = 'fake')
|
194
|
+
line = e.entry['line']
|
195
|
+
# we don't want to send empty lines.
|
196
|
+
return true if line.to_s.strip.empty?
|
197
|
+
|
198
|
+
tenant = 'fake' if tenant.nil? or tenant.empty?
|
199
|
+
|
200
|
+
if @batches.nil?
|
201
|
+
@batches = Hash.new
|
202
|
+
end
|
203
|
+
|
204
|
+
if !@batches.key?(tenant)
|
205
|
+
@batches[tenant] = Batch.new(e)
|
206
|
+
return true
|
207
|
+
end
|
208
|
+
|
209
|
+
if @batches[tenant].size_bytes_after(line) > @batch_size
|
210
|
+
return false
|
211
|
+
end
|
212
|
+
|
213
|
+
@batches[tenant].add(e)
|
214
|
+
return true
|
215
|
+
end
|
216
|
+
|
217
|
+
def is_batch_expired(tenant = 'fake')
|
218
|
+
tenant = 'fake' if tenant.nil? or tenant.empty?
|
219
|
+
return !@batches.nil? && @batches.key?(tenant) && @batches[tenant].age() >= @batch_wait
|
220
|
+
end
|
221
|
+
|
222
|
+
def send_batch_for_tenant(tenant)
|
223
|
+
send(batch(tenant), tenant)
|
224
|
+
end
|
225
|
+
|
226
|
+
## Receives logstash events
|
227
|
+
public
|
228
|
+
def receive(event)
|
229
|
+
@entries << Entry.new(event, @message_field)
|
230
|
+
end
|
231
|
+
|
232
|
+
def close
|
233
|
+
@entries.close
|
234
|
+
@mutex.synchronize do
|
235
|
+
@stop = true
|
236
|
+
end
|
237
|
+
@batch_wait_thread.join
|
238
|
+
@batch_size_thread.join
|
239
|
+
|
240
|
+
# if by any chance we still have a forming batch, we need to send it.
|
241
|
+
@batches.keys.each { |tenant|
|
242
|
+
send_batch_for_tenant(tenant)
|
243
|
+
}
|
244
|
+
@batches.clear()
|
245
|
+
@batches = nil
|
246
|
+
end
|
247
|
+
|
248
|
+
def send(batch, tenant = 'fake')
|
249
|
+
payload = batch.to_json
|
250
|
+
res = loki_http_request(payload, tenant)
|
251
|
+
if res.is_a?(Net::HTTPSuccess)
|
252
|
+
@logger.debug("Successfully pushed data to loki")
|
253
|
+
else
|
254
|
+
@logger.debug("failed payload", :payload => payload)
|
255
|
+
end
|
256
|
+
end
|
257
|
+
|
258
|
+
def loki_http_request(payload, tenant = 'fake')
|
259
|
+
req = Net::HTTP::Post.new(
|
260
|
+
@uri.request_uri
|
261
|
+
)
|
262
|
+
req.add_field('Content-Type', 'application/json')
|
263
|
+
if !tenant.nil? && !tenant.empty? && !tenant.eql?('fake')
|
264
|
+
req.add_field('X-Scope-OrgID', tenant)
|
265
|
+
elsif !@tenant_id.nil? && !@tenant_id.empty?
|
266
|
+
req.add_field('X-Scope-OrgID', @tenant_id)
|
267
|
+
end
|
268
|
+
|
269
|
+
req['User-Agent']= 'loki-logstash'
|
270
|
+
req.basic_auth(@username, @password) if @username
|
271
|
+
req.body = payload
|
272
|
+
|
273
|
+
opts = ssl_opts(@uri)
|
274
|
+
|
275
|
+
@logger.debug("sending #{req.body.length} bytes to loki. tenant #{tenant}")
|
276
|
+
retry_count = 0
|
277
|
+
delay = @min_delay
|
278
|
+
begin
|
279
|
+
res = Net::HTTP.start(@uri.host, @uri.port, **opts) { |http|
|
280
|
+
http.request(req)
|
281
|
+
}
|
282
|
+
return res if !res.nil? && res.code.to_i != 429 && res.code.to_i.div(100) != 5
|
283
|
+
raise StandardError.new res
|
284
|
+
rescue StandardError => e
|
285
|
+
retry_count += 1
|
286
|
+
@logger.warn("Failed to send batch, attempt: #{retry_count}/#{@retries}", :error_inspect => e.inspect, :error => e)
|
287
|
+
if retry_count < @retries
|
288
|
+
sleep delay
|
289
|
+
if delay * 2 <= @max_delay
|
290
|
+
delay = delay * 2
|
291
|
+
else
|
292
|
+
delay = @max_delay
|
293
|
+
end
|
294
|
+
retry
|
295
|
+
else
|
296
|
+
@logger.error("Failed to send batch", :error_inspect => e.inspect, :error => e)
|
297
|
+
return res
|
298
|
+
end
|
299
|
+
end
|
300
|
+
end
|
301
|
+
end
|
@@ -0,0 +1,63 @@
|
|
1
|
+
require 'time'
|
2
|
+
|
3
|
+
module Loki
|
4
|
+
class Batch
|
5
|
+
attr_reader :streams
|
6
|
+
def initialize(e)
|
7
|
+
@bytes = 0
|
8
|
+
@createdAt = Time.now
|
9
|
+
@streams = {}
|
10
|
+
add(e)
|
11
|
+
end
|
12
|
+
|
13
|
+
def size_bytes
|
14
|
+
return @bytes
|
15
|
+
end
|
16
|
+
|
17
|
+
def add(e)
|
18
|
+
@bytes = @bytes + e.entry['line'].length
|
19
|
+
|
20
|
+
# Append the entry to an already existing stream (if any)
|
21
|
+
labels = e.labels.sort.to_h
|
22
|
+
labelkey = labels.to_s
|
23
|
+
if @streams.has_key?(labelkey)
|
24
|
+
stream = @streams[labelkey]
|
25
|
+
stream['entries'].append(e.entry)
|
26
|
+
return
|
27
|
+
else
|
28
|
+
# Add the entry as a new stream
|
29
|
+
@streams[labelkey] = {
|
30
|
+
"labels" => labels,
|
31
|
+
"entries" => [e.entry],
|
32
|
+
}
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
def size_bytes_after(line)
|
37
|
+
return @bytes + line.length
|
38
|
+
end
|
39
|
+
|
40
|
+
def age()
|
41
|
+
return Time.now - @createdAt
|
42
|
+
end
|
43
|
+
|
44
|
+
def to_json
|
45
|
+
streams = []
|
46
|
+
@streams.each { |_ , stream|
|
47
|
+
streams.append(build_stream(stream))
|
48
|
+
}
|
49
|
+
return {"streams"=>streams}.to_json
|
50
|
+
end
|
51
|
+
|
52
|
+
def build_stream(stream)
|
53
|
+
values = []
|
54
|
+
stream['entries'].each { |entry|
|
55
|
+
values.append([entry['ts'].to_s, entry['line']])
|
56
|
+
}
|
57
|
+
return {
|
58
|
+
'stream'=>stream['labels'],
|
59
|
+
'values' => values
|
60
|
+
}
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
@@ -0,0 +1,25 @@
|
|
1
|
+
module Loki
|
2
|
+
def to_ns(s)
|
3
|
+
(s.to_f * (10**9)).to_i
|
4
|
+
end
|
5
|
+
class Entry
|
6
|
+
include Loki
|
7
|
+
attr_reader :labels, :entry
|
8
|
+
def initialize(event,message_field)
|
9
|
+
@entry = {
|
10
|
+
"ts" => to_ns(event.get("@timestamp")),
|
11
|
+
"line" => event.get(message_field).to_s
|
12
|
+
}
|
13
|
+
event = event.clone()
|
14
|
+
event.remove(message_field)
|
15
|
+
event.remove("@timestamp")
|
16
|
+
|
17
|
+
@labels = {}
|
18
|
+
event.to_hash.each { |key,value|
|
19
|
+
next if key.start_with?('@')
|
20
|
+
next if value.is_a?(Hash)
|
21
|
+
@labels[key] = value.to_s
|
22
|
+
}
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
@@ -0,0 +1,26 @@
|
|
1
|
+
Gem::Specification.new do |s|
|
2
|
+
s.name = 'logstash-output-loki-tenants'
|
3
|
+
s.version = '1.0.3'
|
4
|
+
s.authors = ['Co.brick']
|
5
|
+
s.email = ['aiops.dev@cobrick.com']
|
6
|
+
|
7
|
+
s.summary = 'Output plugin to ship logs to a Grafana Loki server'
|
8
|
+
s.description = 'Output plugin to ship logs to a Grafana Loki server'
|
9
|
+
s.homepage = 'https://rubygems.org/gems/logstash-output-loki-tenants'
|
10
|
+
s.license = 'Apache-2.0'
|
11
|
+
s.require_paths = ["lib"]
|
12
|
+
|
13
|
+
# Files
|
14
|
+
s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile']
|
15
|
+
# Tests
|
16
|
+
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
17
|
+
|
18
|
+
# Special flag to let us know this is actually a logstash plugin
|
19
|
+
s.metadata = { "logstash_plugin" => "true", "logstash_group" => "output" }
|
20
|
+
|
21
|
+
# Gem dependencies
|
22
|
+
#
|
23
|
+
s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
|
24
|
+
s.add_runtime_dependency "logstash-codec-plain", "3.0.6"
|
25
|
+
s.add_development_dependency 'logstash-devutils', "2.0.2"
|
26
|
+
end
|
@@ -0,0 +1,59 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/devutils/rspec/spec_helper"
|
3
|
+
require "logstash/outputs/loki"
|
4
|
+
require "logstash/codecs/plain"
|
5
|
+
require "logstash/event"
|
6
|
+
require "net/http"
|
7
|
+
include Loki
|
8
|
+
|
9
|
+
describe Loki::Entry do
|
10
|
+
context 'test entry generation' do
|
11
|
+
let (:event) {
|
12
|
+
LogStash::Event.new(
|
13
|
+
{
|
14
|
+
'message' => 'hello',
|
15
|
+
'@metadata' => {'foo'=>'bar'},
|
16
|
+
'@version' => '1',
|
17
|
+
'foo' => 5,
|
18
|
+
'agent' => 'filebeat',
|
19
|
+
'log' => {
|
20
|
+
'file' =>
|
21
|
+
{'@path' => '/path/to/file.log'},
|
22
|
+
},
|
23
|
+
'host' => '172.0.0.1',
|
24
|
+
'@timestamp' => Time.now
|
25
|
+
}
|
26
|
+
)
|
27
|
+
}
|
28
|
+
|
29
|
+
it 'labels extracted should not contains object and metadata or timestamp' do
|
30
|
+
entry = Entry.new(event,"message")
|
31
|
+
expect(entry.labels).to eql({ 'agent' => 'filebeat', 'host' => '172.0.0.1', 'foo'=>'5'})
|
32
|
+
expect(entry.entry['ts']).to eql to_ns(event.get("@timestamp"))
|
33
|
+
expect(entry.entry['line']).to eql 'hello'
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
context 'test batch generation with label order' do
|
38
|
+
let (:entries) {[
|
39
|
+
Entry.new(LogStash::Event.new({"message"=>"foobuzz","buzz"=>"bar","cluster"=>"us-central1","@timestamp"=>Time.at(1)}),"message"),
|
40
|
+
Entry.new(LogStash::Event.new({"log"=>"foobar","bar"=>"bar","@timestamp"=>Time.at(2)}),"log"),
|
41
|
+
Entry.new(LogStash::Event.new({"cluster"=>"us-central1","message"=>"foobuzz","buzz"=>"bar","@timestamp"=>Time.at(3)}),"message"),
|
42
|
+
|
43
|
+
]}
|
44
|
+
let (:expected) {
|
45
|
+
{"streams" => [
|
46
|
+
{"stream"=> {"buzz"=>"bar","cluster"=>"us-central1"}, "values" => [[to_ns(Time.at(1)).to_s,"foobuzz"],[to_ns(Time.at(3)).to_s,"foobuzz"]]},
|
47
|
+
{"stream"=> {"bar"=>"bar"}, "values"=>[[to_ns(Time.at(2)).to_s,"foobar"]]},
|
48
|
+
] }
|
49
|
+
}
|
50
|
+
|
51
|
+
it 'to_json' do
|
52
|
+
@batch = Loki::Batch.new(entries.first)
|
53
|
+
entries.drop(1).each { |e| @batch.add(e)}
|
54
|
+
expect(JSON.parse(@batch.to_json)).to eql expected
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
|
59
|
+
end
|
@@ -0,0 +1,281 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/devutils/rspec/spec_helper"
|
3
|
+
require "logstash/outputs/loki"
|
4
|
+
require "logstash/codecs/plain"
|
5
|
+
require "logstash/event"
|
6
|
+
require "net/http"
|
7
|
+
require 'webmock/rspec'
|
8
|
+
include Loki
|
9
|
+
|
10
|
+
describe LogStash::Outputs::Loki do
|
11
|
+
|
12
|
+
let (:simple_loki_config) { {'url' => 'http://localhost:3100'} }
|
13
|
+
|
14
|
+
context 'when initializing' do
|
15
|
+
it "should register" do
|
16
|
+
loki = LogStash::Plugin.lookup("output", "loki").new(simple_loki_config)
|
17
|
+
expect { loki.register }.to_not raise_error
|
18
|
+
end
|
19
|
+
|
20
|
+
it 'should populate loki config with default or initialized values' do
|
21
|
+
loki = LogStash::Outputs::Loki.new(simple_loki_config)
|
22
|
+
expect(loki.url).to eql 'http://localhost:3100'
|
23
|
+
expect(loki.tenant_id).to eql nil
|
24
|
+
expect(loki.batch_size).to eql 102400
|
25
|
+
expect(loki.batch_wait).to eql 1
|
26
|
+
end
|
27
|
+
end
|
28
|
+
|
29
|
+
context 'when adding en entry to the batch' do
|
30
|
+
let (:simple_loki_config) {{'url' => 'http://localhost:3100'}}
|
31
|
+
let (:entry) {Entry.new(LogStash::Event.new({"message"=>"foobuzz","buzz"=>"bar","cluster"=>"us-central1","@timestamp"=>Time.at(1)}),"message")}
|
32
|
+
let (:lbs) { {"buzz"=>"bar","cluster"=>"us-central1"}.sort.to_h}
|
33
|
+
|
34
|
+
it 'should add tenant batch' do
|
35
|
+
plugin = LogStash::Plugin.lookup("output", "loki").new(simple_loki_config)
|
36
|
+
expect(plugin.batch).to eql nil
|
37
|
+
expect(plugin.add_entry_to_batch(entry, "a")).to eql true
|
38
|
+
expect(plugin.add_entry_to_batch(entry, "b")).to eql true
|
39
|
+
expect(plugin.add_entry_to_batch(entry, nil)).to eql true
|
40
|
+
expect(plugin.add_entry_to_batch(entry, "")).to eql true
|
41
|
+
expect(plugin.batches.keys.length).to eq 3
|
42
|
+
end
|
43
|
+
|
44
|
+
it 'should not add empty line' do
|
45
|
+
plugin = LogStash::Plugin.lookup("output", "loki").new(simple_loki_config)
|
46
|
+
emptyEntry = Entry.new(LogStash::Event.new({"message"=>"foobuzz","buzz"=>"bar","cluster"=>"us-central1","@timestamp"=>Time.at(1)}),"foo")
|
47
|
+
expect(plugin.add_entry_to_batch(emptyEntry)).to eql true
|
48
|
+
expect(plugin.batch).to eql nil
|
49
|
+
end
|
50
|
+
|
51
|
+
it 'should add entry' do
|
52
|
+
plugin = LogStash::Plugin.lookup("output", "loki").new(simple_loki_config)
|
53
|
+
expect(plugin.batch).to eql nil
|
54
|
+
expect(plugin.add_entry_to_batch(entry)).to eql true
|
55
|
+
expect(plugin.add_entry_to_batch(entry)).to eql true
|
56
|
+
expect(plugin.batch).not_to be_nil
|
57
|
+
expect(plugin.batch.streams.length).to eq 1
|
58
|
+
expect(plugin.batch.streams[lbs.to_s]['entries'].length).to eq 2
|
59
|
+
expect(plugin.batch.streams[lbs.to_s]['labels']).to eq lbs
|
60
|
+
expect(plugin.batch.size_bytes).to eq 14
|
61
|
+
end
|
62
|
+
|
63
|
+
it 'should not add if full' do
|
64
|
+
plugin = LogStash::Plugin.lookup("output", "loki").new(simple_loki_config.merge!({'batch_size'=>10}))
|
65
|
+
expect(plugin.batch).to eql nil
|
66
|
+
expect(plugin.add_entry_to_batch(entry)).to eql true # first entry is fine.
|
67
|
+
expect(plugin.batch).not_to be_nil
|
68
|
+
expect(plugin.batch.streams.length).to eq 1
|
69
|
+
expect(plugin.batch.streams[lbs.to_s]['entries'].length).to eq 1
|
70
|
+
expect(plugin.batch.streams[lbs.to_s]['labels']).to eq lbs
|
71
|
+
expect(plugin.batch.size_bytes).to eq 7
|
72
|
+
expect(plugin.add_entry_to_batch(entry)).to eql false # second entry goes over the limit.
|
73
|
+
expect(plugin.batch).not_to be_nil
|
74
|
+
expect(plugin.batch.streams.length).to eq 1
|
75
|
+
expect(plugin.batch.streams[lbs.to_s]['entries'].length).to eq 1
|
76
|
+
expect(plugin.batch.streams[lbs.to_s]['labels']).to eq lbs
|
77
|
+
expect(plugin.batch.size_bytes).to eq 7
|
78
|
+
end
|
79
|
+
end
|
80
|
+
|
81
|
+
context 'batch expiration' do
|
82
|
+
let (:entry) {Entry.new(LogStash::Event.new({"message"=>"foobuzz","buzz"=>"bar","cluster"=>"us-central1","@timestamp"=>Time.at(1)}),"message")}
|
83
|
+
|
84
|
+
it 'should not expire if empty' do
|
85
|
+
loki = LogStash::Outputs::Loki.new(simple_loki_config.merge!({'batch_wait'=>0.5}))
|
86
|
+
sleep(1)
|
87
|
+
expect(loki.is_batch_expired).to be false
|
88
|
+
end
|
89
|
+
it 'should not expire batch if not old' do
|
90
|
+
loki = LogStash::Outputs::Loki.new(simple_loki_config.merge!({'batch_wait'=>0.5}))
|
91
|
+
expect(loki.add_entry_to_batch(entry)).to eql true
|
92
|
+
expect(loki.is_batch_expired).to be false
|
93
|
+
end
|
94
|
+
it 'should expire if old' do
|
95
|
+
loki = LogStash::Outputs::Loki.new(simple_loki_config.merge!({'batch_wait'=>0.5}))
|
96
|
+
expect(loki.add_entry_to_batch(entry)).to eql true
|
97
|
+
sleep(1)
|
98
|
+
expect(loki.is_batch_expired).to be true
|
99
|
+
end
|
100
|
+
end
|
101
|
+
|
102
|
+
context 'channel' do
|
103
|
+
let (:event) {LogStash::Event.new({"message"=>"foobuzz","buzz"=>"bar","cluster"=>"us-central1","@timestamp"=>Time.at(1)})}
|
104
|
+
|
105
|
+
it 'should send entry if batch size reached with no tenant' do
|
106
|
+
loki = LogStash::Outputs::Loki.new(simple_loki_config.merge!({'batch_wait'=>0.5,'batch_size'=>10}))
|
107
|
+
loki.register
|
108
|
+
sent = Queue.new
|
109
|
+
allow(loki).to receive(:send) do | batch|
|
110
|
+
Thread.new do
|
111
|
+
sent << batch
|
112
|
+
end
|
113
|
+
end
|
114
|
+
loki.receive(event)
|
115
|
+
loki.receive(event)
|
116
|
+
sent.deq
|
117
|
+
sent.deq
|
118
|
+
loki.close
|
119
|
+
end
|
120
|
+
it 'should send entry while closing' do
|
121
|
+
loki = LogStash::Outputs::Loki.new(simple_loki_config.merge!({'batch_wait'=>10,'batch_size'=>10}))
|
122
|
+
loki.register
|
123
|
+
sent = Queue.new
|
124
|
+
allow(loki).to receive(:send) do | batch|
|
125
|
+
Thread.new do
|
126
|
+
sent << batch
|
127
|
+
end
|
128
|
+
end
|
129
|
+
loki.receive(event)
|
130
|
+
loki.close
|
131
|
+
sent.deq
|
132
|
+
end
|
133
|
+
it 'should send entry when batch is expiring' do
|
134
|
+
loki = LogStash::Outputs::Loki.new(simple_loki_config.merge!({'batch_wait'=>0.5,'batch_size'=>10}))
|
135
|
+
loki.register
|
136
|
+
sent = Queue.new
|
137
|
+
allow(loki).to receive(:send) do | batch|
|
138
|
+
Thread.new do
|
139
|
+
sent << batch
|
140
|
+
end
|
141
|
+
end
|
142
|
+
loki.receive(event)
|
143
|
+
sent.deq
|
144
|
+
sleep(0.01) # Adding a minimal sleep. In few cases @batch=nil might happen after evaluating for nil
|
145
|
+
expect(loki.batch).to be_nil
|
146
|
+
loki.close
|
147
|
+
end
|
148
|
+
end
|
149
|
+
|
150
|
+
context 'http requests' do
|
151
|
+
let (:entry) {Entry.new(LogStash::Event.new({"message"=>"foobuzz","buzz"=>"bar","cluster"=>"us-central1","@timestamp"=>Time.at(1)}),"message")}
|
152
|
+
|
153
|
+
it 'should send message tenant' do
|
154
|
+
conf = {
|
155
|
+
'url'=>'http://localhost:3100/loki/api/v1/push',
|
156
|
+
'username' => 'foo',
|
157
|
+
'password' => 'bar',
|
158
|
+
'tenant_id' => 't'
|
159
|
+
}
|
160
|
+
loki = LogStash::Outputs::Loki.new(conf)
|
161
|
+
loki.register
|
162
|
+
b = Batch.new(entry)
|
163
|
+
post = stub_request(:post, "http://localhost:3100/loki/api/v1/push").with(
|
164
|
+
basic_auth: ['foo', 'bar'],
|
165
|
+
body: b.to_json,
|
166
|
+
headers:{
|
167
|
+
'Content-Type' => 'application/json' ,
|
168
|
+
'User-Agent' => 'loki-logstash',
|
169
|
+
'X-Scope-OrgID'=>'custom',
|
170
|
+
'Accept'=>'*/*',
|
171
|
+
'Accept-Encoding'=>'gzip;q=1.0,deflate;q=0.6,identity;q=0.3',
|
172
|
+
}
|
173
|
+
)
|
174
|
+
loki.send(b, "custom")
|
175
|
+
expect(post).to have_been_requested.times(1)
|
176
|
+
end
|
177
|
+
it 'should send credentials' do
|
178
|
+
conf = {
|
179
|
+
'url'=>'http://localhost:3100/loki/api/v1/push',
|
180
|
+
'username' => 'foo',
|
181
|
+
'password' => 'bar',
|
182
|
+
'tenant_id' => 't'
|
183
|
+
}
|
184
|
+
loki = LogStash::Outputs::Loki.new(conf)
|
185
|
+
loki.register
|
186
|
+
b = Batch.new(entry)
|
187
|
+
post = stub_request(:post, "http://localhost:3100/loki/api/v1/push").with(
|
188
|
+
basic_auth: ['foo', 'bar'],
|
189
|
+
body: b.to_json,
|
190
|
+
headers:{
|
191
|
+
'Content-Type' => 'application/json' ,
|
192
|
+
'User-Agent' => 'loki-logstash',
|
193
|
+
'X-Scope-OrgID'=>'t',
|
194
|
+
'Accept'=>'*/*',
|
195
|
+
'Accept-Encoding'=>'gzip;q=1.0,deflate;q=0.6,identity;q=0.3',
|
196
|
+
}
|
197
|
+
)
|
198
|
+
loki.send(b)
|
199
|
+
expect(post).to have_been_requested.times(1)
|
200
|
+
end
|
201
|
+
it 'should not send credentials' do
|
202
|
+
conf = {
|
203
|
+
'url'=>'http://foo.com/loki/api/v1/push',
|
204
|
+
}
|
205
|
+
loki = LogStash::Outputs::Loki.new(conf)
|
206
|
+
loki.register
|
207
|
+
b = Batch.new(entry)
|
208
|
+
post = stub_request(:post, "http://foo.com/loki/api/v1/push").with(
|
209
|
+
body: b.to_json,
|
210
|
+
headers:{
|
211
|
+
'Content-Type' => 'application/json' ,
|
212
|
+
'User-Agent' => 'loki-logstash',
|
213
|
+
'Accept'=>'*/*',
|
214
|
+
'Accept-Encoding'=>'gzip;q=1.0,deflate;q=0.6,identity;q=0.3',
|
215
|
+
}
|
216
|
+
)
|
217
|
+
loki.send(b)
|
218
|
+
expect(post).to have_been_requested.times(1)
|
219
|
+
end
|
220
|
+
it 'should retry 500' do
|
221
|
+
conf = {
|
222
|
+
'url'=>'http://foo.com/loki/api/v1/push',
|
223
|
+
'retries' => 3,
|
224
|
+
}
|
225
|
+
loki = LogStash::Outputs::Loki.new(conf)
|
226
|
+
loki.register
|
227
|
+
b = Batch.new(entry)
|
228
|
+
post = stub_request(:post, "http://foo.com/loki/api/v1/push").with(
|
229
|
+
body: b.to_json,
|
230
|
+
).to_return(status: [500, "Internal Server Error"])
|
231
|
+
loki.send(b)
|
232
|
+
loki.close
|
233
|
+
expect(post).to have_been_requested.times(3)
|
234
|
+
end
|
235
|
+
it 'should retry 429' do
|
236
|
+
conf = {
|
237
|
+
'url'=>'http://foo.com/loki/api/v1/push',
|
238
|
+
'retries' => 2,
|
239
|
+
}
|
240
|
+
loki = LogStash::Outputs::Loki.new(conf)
|
241
|
+
loki.register
|
242
|
+
b = Batch.new(entry)
|
243
|
+
post = stub_request(:post, "http://foo.com/loki/api/v1/push").with(
|
244
|
+
body: b.to_json,
|
245
|
+
).to_return(status: [429, "stop spamming"])
|
246
|
+
loki.send(b)
|
247
|
+
loki.close
|
248
|
+
expect(post).to have_been_requested.times(2)
|
249
|
+
end
|
250
|
+
it 'should not retry 400' do
|
251
|
+
conf = {
|
252
|
+
'url'=>'http://foo.com/loki/api/v1/push',
|
253
|
+
'retries' => 11,
|
254
|
+
}
|
255
|
+
loki = LogStash::Outputs::Loki.new(conf)
|
256
|
+
loki.register
|
257
|
+
b = Batch.new(entry)
|
258
|
+
post = stub_request(:post, "http://foo.com/loki/api/v1/push").with(
|
259
|
+
body: b.to_json,
|
260
|
+
).to_return(status: [400, "bad request"])
|
261
|
+
loki.send(b)
|
262
|
+
loki.close
|
263
|
+
expect(post).to have_been_requested.times(1)
|
264
|
+
end
|
265
|
+
it 'should retry exception' do
|
266
|
+
conf = {
|
267
|
+
'url'=>'http://foo.com/loki/api/v1/push',
|
268
|
+
'retries' => 11,
|
269
|
+
}
|
270
|
+
loki = LogStash::Outputs::Loki.new(conf)
|
271
|
+
loki.register
|
272
|
+
b = Batch.new(entry)
|
273
|
+
post = stub_request(:post, "http://foo.com/loki/api/v1/push").with(
|
274
|
+
body: b.to_json,
|
275
|
+
).to_raise("some error").then.to_return(status: [200, "fine !"])
|
276
|
+
loki.send(b)
|
277
|
+
loki.close
|
278
|
+
expect(post).to have_been_requested.times(2)
|
279
|
+
end
|
280
|
+
end
|
281
|
+
end
|
metadata
ADDED
@@ -0,0 +1,103 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: logstash-output-loki-tenants
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 1.0.3
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Co.brick
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2021-05-25 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
requirement: !ruby/object:Gem::Requirement
|
15
|
+
requirements:
|
16
|
+
- - ">="
|
17
|
+
- !ruby/object:Gem::Version
|
18
|
+
version: '1.60'
|
19
|
+
- - "<="
|
20
|
+
- !ruby/object:Gem::Version
|
21
|
+
version: '2.99'
|
22
|
+
name: logstash-core-plugin-api
|
23
|
+
type: :runtime
|
24
|
+
prerelease: false
|
25
|
+
version_requirements: !ruby/object:Gem::Requirement
|
26
|
+
requirements:
|
27
|
+
- - ">="
|
28
|
+
- !ruby/object:Gem::Version
|
29
|
+
version: '1.60'
|
30
|
+
- - "<="
|
31
|
+
- !ruby/object:Gem::Version
|
32
|
+
version: '2.99'
|
33
|
+
- !ruby/object:Gem::Dependency
|
34
|
+
requirement: !ruby/object:Gem::Requirement
|
35
|
+
requirements:
|
36
|
+
- - '='
|
37
|
+
- !ruby/object:Gem::Version
|
38
|
+
version: 3.0.6
|
39
|
+
name: logstash-codec-plain
|
40
|
+
type: :runtime
|
41
|
+
prerelease: false
|
42
|
+
version_requirements: !ruby/object:Gem::Requirement
|
43
|
+
requirements:
|
44
|
+
- - '='
|
45
|
+
- !ruby/object:Gem::Version
|
46
|
+
version: 3.0.6
|
47
|
+
- !ruby/object:Gem::Dependency
|
48
|
+
requirement: !ruby/object:Gem::Requirement
|
49
|
+
requirements:
|
50
|
+
- - '='
|
51
|
+
- !ruby/object:Gem::Version
|
52
|
+
version: 2.0.2
|
53
|
+
name: logstash-devutils
|
54
|
+
type: :development
|
55
|
+
prerelease: false
|
56
|
+
version_requirements: !ruby/object:Gem::Requirement
|
57
|
+
requirements:
|
58
|
+
- - '='
|
59
|
+
- !ruby/object:Gem::Version
|
60
|
+
version: 2.0.2
|
61
|
+
description: Output plugin to ship logs to a Grafana Loki server
|
62
|
+
email:
|
63
|
+
- aiops.dev@cobrick.com
|
64
|
+
executables: []
|
65
|
+
extensions: []
|
66
|
+
extra_rdoc_files: []
|
67
|
+
files:
|
68
|
+
- Gemfile
|
69
|
+
- README.md
|
70
|
+
- lib/logstash/outputs/loki.rb
|
71
|
+
- lib/logstash/outputs/loki/batch.rb
|
72
|
+
- lib/logstash/outputs/loki/entry.rb
|
73
|
+
- logstash-output-loki.gemspec
|
74
|
+
- spec/outputs/loki/entry_spec.rb
|
75
|
+
- spec/outputs/loki_spec.rb
|
76
|
+
homepage: https://rubygems.org/gems/logstash-output-loki-tenants
|
77
|
+
licenses:
|
78
|
+
- Apache-2.0
|
79
|
+
metadata:
|
80
|
+
logstash_plugin: 'true'
|
81
|
+
logstash_group: output
|
82
|
+
post_install_message:
|
83
|
+
rdoc_options: []
|
84
|
+
require_paths:
|
85
|
+
- lib
|
86
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
87
|
+
requirements:
|
88
|
+
- - ">="
|
89
|
+
- !ruby/object:Gem::Version
|
90
|
+
version: '0'
|
91
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
92
|
+
requirements:
|
93
|
+
- - ">="
|
94
|
+
- !ruby/object:Gem::Version
|
95
|
+
version: '0'
|
96
|
+
requirements: []
|
97
|
+
rubygems_version: 3.0.6
|
98
|
+
signing_key:
|
99
|
+
specification_version: 4
|
100
|
+
summary: Output plugin to ship logs to a Grafana Loki server
|
101
|
+
test_files:
|
102
|
+
- spec/outputs/loki/entry_spec.rb
|
103
|
+
- spec/outputs/loki_spec.rb
|