logstash-output-loki 1.0.0 → 1.0.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/Gemfile +6 -3
- data/README.md +15 -7
- data/lib/logstash/outputs/loki.rb +71 -139
- data/lib/logstash/outputs/loki/batch.rb +61 -45
- data/lib/logstash/outputs/loki/entry.rb +25 -13
- data/logstash-output-loki.gemspec +4 -4
- data/spec/outputs/loki/entry_spec.rb +59 -0
- data/spec/outputs/loki_spec.rb +205 -71
- metadata +6 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 5d3ae7289992fc6a6b98db3f5f1d929041656f2cd1e7e53915d48df9a8bf216b
|
4
|
+
data.tar.gz: 21e0b37676d7f6d0bca071bd2644a8ce96370e1c62efbf26becb3bce1de63f53
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 74e562693367ffcae0a7c639185ca75a984a156a7b7d8c7c61626b17ea01e20470901a2847230aff5f89d7b800f14cdc35ed82be93dd865af91d54548332dbf2
|
7
|
+
data.tar.gz: 9caf43738978ad056df2661460452c7059bd8a4c646f6e9949b3c7ee492b57ec1d0fa9320df02748f928fe4b002404ec2cfa75c0e9f4d78465606d38088dc8d7
|
data/Gemfile
CHANGED
@@ -2,10 +2,13 @@ source 'https://rubygems.org'
|
|
2
2
|
|
3
3
|
gemspec
|
4
4
|
|
5
|
-
logstash_path = ENV["LOGSTASH_PATH"] || "logstash
|
6
|
-
use_logstash_source = ENV["LOGSTASH_SOURCE"] && ENV["LOGSTASH_SOURCE"].to_s == "1"
|
5
|
+
logstash_path = ENV["LOGSTASH_PATH"] || "./logstash"
|
7
6
|
|
8
|
-
if Dir.exist?(logstash_path)
|
7
|
+
if Dir.exist?(logstash_path)
|
9
8
|
gem 'logstash-core', :path => "#{logstash_path}/logstash-core"
|
10
9
|
gem 'logstash-core-plugin-api', :path => "#{logstash_path}/logstash-core-plugin-api"
|
10
|
+
else
|
11
|
+
raise 'missing logstash vendoring'
|
11
12
|
end
|
13
|
+
|
14
|
+
gem "webmock", "~> 3.8"
|
data/README.md
CHANGED
@@ -1,6 +1,6 @@
|
|
1
|
-
# Loki Logstash Output Plugin
|
1
|
+
# Contributing to Loki Logstash Output Plugin
|
2
2
|
|
3
|
-
|
3
|
+
For information about hwo to use this plugin see this [documentation](../../docs/clients/logstash/README.md).
|
4
4
|
|
5
5
|
## Install dependencies
|
6
6
|
|
@@ -41,11 +41,11 @@ git clone git@github.com:elastic/logstash.git
|
|
41
41
|
cd logstash
|
42
42
|
git checkout tags/v7.6.2
|
43
43
|
export LOGSTASH_PATH=`pwd`
|
44
|
-
export
|
45
|
-
export
|
46
|
-
|
44
|
+
export GEM_PATH=$LOGSTASH_PATH/vendor/bundle/jruby/2.5.0
|
45
|
+
export GEM_HOME=$LOGSTASH_PATH/vendor/bundle/jruby/2.5.0
|
46
|
+
./gradlew assemble
|
47
47
|
cd ..
|
48
|
-
ruby -S bundle install
|
48
|
+
ruby -S bundle install
|
49
49
|
ruby -S bundle exec rake vendor
|
50
50
|
```
|
51
51
|
|
@@ -55,7 +55,15 @@ ruby -S bundle exec rake vendor
|
|
55
55
|
|
56
56
|
### Test
|
57
57
|
|
58
|
-
`bundle exec rspec`
|
58
|
+
`ruby -S bundle exec rspec`
|
59
|
+
|
60
|
+
Alternatively if you don't want to install JRuby. Enter inside logstash-loki container.
|
61
|
+
|
62
|
+
```bash
|
63
|
+
docker build -t logstash-loki ./
|
64
|
+
docker run -v `pwd`/spec:/home/logstash/spec -it --rm --entrypoint /bin/sh logstash-loki
|
65
|
+
bundle exec rspec
|
66
|
+
```
|
59
67
|
|
60
68
|
## Install plugin to local logstash
|
61
69
|
|
@@ -1,5 +1,7 @@
|
|
1
1
|
# encoding: utf-8
|
2
2
|
require "logstash/outputs/base"
|
3
|
+
require "logstash/outputs/loki/entry"
|
4
|
+
require "logstash/outputs/loki/batch"
|
3
5
|
require "logstash/namespace"
|
4
6
|
require 'net/http'
|
5
7
|
require 'concurrent-edge'
|
@@ -8,9 +10,7 @@ require 'uri'
|
|
8
10
|
require 'json'
|
9
11
|
|
10
12
|
class LogStash::Outputs::Loki < LogStash::Outputs::Base
|
11
|
-
|
12
|
-
require 'logstash/outputs/loki/entry'
|
13
|
-
|
13
|
+
include Loki
|
14
14
|
config_name "loki"
|
15
15
|
|
16
16
|
## 'A single instance of the Output will be shared among the pipeline worker threads'
|
@@ -39,12 +39,6 @@ class LogStash::Outputs::Loki < LogStash::Outputs::Base
|
|
39
39
|
## 'Interval in seconds to wait before pushing a batch of records to loki. Defaults to 1 second'
|
40
40
|
config :batch_wait, :validate => :number, :default => 1, :required => false
|
41
41
|
|
42
|
-
## 'Array of label names to include in all logstreams'
|
43
|
-
config :include_labels, :validate => :array, :default => [], :required => true
|
44
|
-
|
45
|
-
## 'Extra labels to add to all log streams'
|
46
|
-
config :external_labels, :validate => :hash, :default => {}, :required => false
|
47
|
-
|
48
42
|
## 'Log line field to pick from logstash. Defaults to "message"'
|
49
43
|
config :message_field, :validate => :string, :default => "message", :required => false
|
50
44
|
|
@@ -57,6 +51,7 @@ class LogStash::Outputs::Loki < LogStash::Outputs::Base
|
|
57
51
|
## 'Backoff configuration. Maximum number of retries to do'
|
58
52
|
config :retries, :validate => :number, :default => 10, :required => false
|
59
53
|
|
54
|
+
attr_reader :batch
|
60
55
|
public
|
61
56
|
def register
|
62
57
|
@uri = URI.parse(@url)
|
@@ -64,22 +59,16 @@ class LogStash::Outputs::Loki < LogStash::Outputs::Base
|
|
64
59
|
raise LogStash::ConfigurationError, "url parameter must be valid HTTP, currently '#{@url}'"
|
65
60
|
end
|
66
61
|
|
67
|
-
if @include_labels.empty?
|
68
|
-
raise LogStash::ConfigurationError, "include_labels should contain atleast one label, currently '#{@include_labels}'"
|
69
|
-
end
|
70
|
-
|
71
62
|
if @min_delay > @max_delay
|
72
63
|
raise LogStash::ConfigurationError, "Min delay should be less than Max delay, currently 'Min delay is #{@min_delay} and Max delay is #{@max_delay}'"
|
73
64
|
end
|
74
65
|
|
75
66
|
@logger.info("Loki output plugin", :class => self.class.name)
|
76
67
|
|
77
|
-
#
|
68
|
+
# initialize channels
|
78
69
|
@Channel = Concurrent::Channel
|
79
70
|
@entries = @Channel.new
|
80
|
-
|
81
|
-
# excluded message and timestamp from labels
|
82
|
-
@exclude_labels = ["message", "@timestamp"]
|
71
|
+
@stop = @Channel.new
|
83
72
|
|
84
73
|
# create nil batch object.
|
85
74
|
@batch = nil
|
@@ -130,8 +119,8 @@ class LogStash::Outputs::Loki < LogStash::Outputs::Base
|
|
130
119
|
end
|
131
120
|
|
132
121
|
def run()
|
133
|
-
min_wait_checkfrequency = 1/
|
134
|
-
max_wait_checkfrequency = @batch_wait
|
122
|
+
min_wait_checkfrequency = 1/100 #1 millisecond
|
123
|
+
max_wait_checkfrequency = @batch_wait / 10
|
135
124
|
if max_wait_checkfrequency < min_wait_checkfrequency
|
136
125
|
max_wait_checkfrequency = min_wait_checkfrequency
|
137
126
|
end
|
@@ -139,30 +128,21 @@ class LogStash::Outputs::Loki < LogStash::Outputs::Base
|
|
139
128
|
@max_wait_check = Concurrent::Channel.tick(max_wait_checkfrequency)
|
140
129
|
loop do
|
141
130
|
Concurrent::Channel.select do |s|
|
131
|
+
s.take(@stop) {
|
132
|
+
return
|
133
|
+
}
|
142
134
|
s.take(@entries) { |e|
|
143
|
-
|
144
|
-
|
145
|
-
|
146
|
-
|
147
|
-
|
148
|
-
line = e.entry['line']
|
149
|
-
if @batch.size_bytes_after(line) > @batch_size
|
150
|
-
@logger.debug("Max batch_size is reached. Sending batch to loki")
|
151
|
-
send(@tenant_id, @batch)
|
152
|
-
@batch = Batch.new(e)
|
153
|
-
next
|
154
|
-
end
|
155
|
-
@batch.add(e)
|
135
|
+
if !add_entry_to_batch(e)
|
136
|
+
@logger.debug("Max batch_size is reached. Sending batch to loki")
|
137
|
+
send(@batch)
|
138
|
+
@batch = Batch.new(e)
|
139
|
+
end
|
156
140
|
}
|
157
141
|
s.take(@max_wait_check) {
|
158
142
|
# Send batch if max wait time has been reached
|
159
|
-
if
|
160
|
-
if @batch.age() < @batch_wait
|
161
|
-
next
|
162
|
-
end
|
163
|
-
|
143
|
+
if is_batch_expired
|
164
144
|
@logger.debug("Max batch_wait time is reached. Sending batch to loki")
|
165
|
-
send(@
|
145
|
+
send(@batch)
|
166
146
|
@batch = nil
|
167
147
|
end
|
168
148
|
}
|
@@ -170,85 +150,62 @@ class LogStash::Outputs::Loki < LogStash::Outputs::Base
|
|
170
150
|
end
|
171
151
|
end
|
172
152
|
|
173
|
-
|
174
|
-
|
175
|
-
def
|
176
|
-
|
177
|
-
|
178
|
-
|
153
|
+
# add an entry to the current batch return false if the batch is full
|
154
|
+
# and the entry can't be added.
|
155
|
+
def add_entry_to_batch(e)
|
156
|
+
line = e.entry['line']
|
157
|
+
# we don't want to send empty lines.
|
158
|
+
return true if line.to_s.strip.empty?
|
179
159
|
|
180
|
-
|
181
|
-
|
160
|
+
if @batch.nil?
|
161
|
+
@batch = Batch.new(e)
|
162
|
+
return true
|
163
|
+
end
|
182
164
|
|
165
|
+
if @batch.size_bytes_after(line) > @batch_size
|
166
|
+
return false
|
167
|
+
end
|
168
|
+
@batch.add(e)
|
169
|
+
return true
|
183
170
|
end
|
184
171
|
|
185
|
-
def
|
186
|
-
|
187
|
-
send(@tenant_id, @batch) if !@batch.nil?
|
188
|
-
@entries.close
|
189
|
-
@max_wait_check.close if !@max_wait_check.nil?
|
172
|
+
def is_batch_expired
|
173
|
+
return !@batch.nil? && @batch.age() >= @batch_wait
|
190
174
|
end
|
191
175
|
|
192
|
-
|
193
|
-
|
194
|
-
|
195
|
-
|
196
|
-
"line" => event.get(@message_field).to_s
|
197
|
-
}
|
198
|
-
return labels, entry_hash
|
176
|
+
## Receives logstash events
|
177
|
+
public
|
178
|
+
def receive(event)
|
179
|
+
@entries << Entry.new(event, @message_field)
|
199
180
|
end
|
200
181
|
|
201
|
-
def
|
202
|
-
|
203
|
-
|
204
|
-
|
205
|
-
if parent_key != ""
|
206
|
-
handle_labels(value, labels, parent_key + "_" + key)
|
207
|
-
else
|
208
|
-
handle_labels(value, labels, key)
|
209
|
-
end
|
210
|
-
else
|
211
|
-
if parent_key != ""
|
212
|
-
labels[parent_key + "_" + key] = value.to_s
|
213
|
-
else
|
214
|
-
labels[key] = value.to_s
|
215
|
-
end
|
216
|
-
end
|
217
|
-
end
|
218
|
-
}
|
219
|
-
return extract_labels(labels)
|
220
|
-
end
|
182
|
+
def close
|
183
|
+
@entries.close
|
184
|
+
@max_wait_check.close if !@max_wait_check.nil?
|
185
|
+
@stop << true # stop will block until it's accepted by the worker.
|
221
186
|
|
222
|
-
|
223
|
-
|
224
|
-
|
225
|
-
if @include_labels.include?(key)
|
226
|
-
key = key.gsub("@", '')
|
227
|
-
labels[key] = value
|
228
|
-
end
|
229
|
-
}
|
230
|
-
return labels
|
187
|
+
# if by any chance we still have a forming batch, we need to send it.
|
188
|
+
send(@batch) if !@batch.nil?
|
189
|
+
@batch = nil
|
231
190
|
end
|
232
191
|
|
233
|
-
def send(
|
234
|
-
payload =
|
235
|
-
res = loki_http_request(
|
236
|
-
|
192
|
+
def send(batch)
|
193
|
+
payload = batch.to_json
|
194
|
+
res = loki_http_request(payload)
|
237
195
|
if res.is_a?(Net::HTTPSuccess)
|
238
196
|
@logger.debug("Successfully pushed data to loki")
|
239
|
-
return
|
240
197
|
else
|
241
|
-
@logger.
|
242
|
-
@logger.debug("Payload object ", :payload => payload)
|
198
|
+
@logger.debug("failed payload", :payload => payload)
|
243
199
|
end
|
244
200
|
end
|
245
201
|
|
246
|
-
def loki_http_request(
|
202
|
+
def loki_http_request(payload)
|
247
203
|
req = Net::HTTP::Post.new(
|
248
204
|
@uri.request_uri
|
249
205
|
)
|
250
206
|
req.add_field('Content-Type', 'application/json')
|
251
|
-
req.add_field('X-Scope-OrgID', tenant_id) if tenant_id
|
207
|
+
req.add_field('X-Scope-OrgID', @tenant_id) if @tenant_id
|
208
|
+
req['User-Agent']= 'loki-logstash'
|
252
209
|
req.basic_auth(@username, @password) if @username
|
253
210
|
req.body = payload
|
254
211
|
|
@@ -256,53 +213,28 @@ class LogStash::Outputs::Loki < LogStash::Outputs::Base
|
|
256
213
|
|
257
214
|
@logger.debug("sending #{req.body.length} bytes to loki")
|
258
215
|
retry_count = 0
|
259
|
-
delay = min_delay
|
216
|
+
delay = @min_delay
|
260
217
|
begin
|
261
|
-
res = Net::HTTP.start(@uri.host, @uri.port, **opts) { |http|
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
|
218
|
+
res = Net::HTTP.start(@uri.host, @uri.port, **opts) { |http|
|
219
|
+
http.request(req)
|
220
|
+
}
|
221
|
+
return res if !res.nil? && res.code.to_i != 429 && res.code.to_i.div(100) != 5
|
222
|
+
raise StandardError.new res
|
223
|
+
rescue StandardError => e
|
268
224
|
retry_count += 1
|
269
|
-
@logger.warn("
|
270
|
-
|
271
|
-
|
272
|
-
|
273
|
-
|
225
|
+
@logger.warn("Failed to send batch attempt: #{retry_count}/#{@retries}", :error_inspect => e.inspect, :error => e)
|
226
|
+
if retry_count < @retries
|
227
|
+
sleep delay
|
228
|
+
if (delay * 2 - delay) > @max_delay
|
229
|
+
delay = delay
|
230
|
+
else
|
231
|
+
delay = delay * 2
|
232
|
+
end
|
233
|
+
retry
|
274
234
|
else
|
275
|
-
|
235
|
+
@logger.error("Failed to send batch", :error_inspect => e.inspect, :error => e)
|
236
|
+
return res
|
276
237
|
end
|
277
|
-
|
278
|
-
retry
|
279
|
-
rescue StandardError => e
|
280
|
-
@logger.error("Error while connecting to loki server ", :error_inspect => e.inspect, :error => e)
|
281
|
-
return res
|
282
238
|
end
|
283
|
-
return res
|
284
|
-
end
|
285
|
-
|
286
|
-
def build_payload(batch)
|
287
|
-
payload = {}
|
288
|
-
payload['streams'] = []
|
289
|
-
batch.streams.each { |labels, stream|
|
290
|
-
stream_obj = get_stream_obj(stream)
|
291
|
-
payload['streams'].push(stream_obj)
|
292
|
-
}
|
293
|
-
return payload.to_json
|
294
|
-
end
|
295
|
-
|
296
|
-
def get_stream_obj(stream)
|
297
|
-
stream_obj = {}
|
298
|
-
stream_obj['stream'] = stream['labels']
|
299
|
-
stream_obj['values'] = []
|
300
|
-
values = []
|
301
|
-
stream['entries'].each { |entry|
|
302
|
-
values.push(entry['ts'].to_s)
|
303
|
-
values.push(entry['line'])
|
304
|
-
}
|
305
|
-
stream_obj['values'].push(values)
|
306
|
-
return stream_obj
|
307
239
|
end
|
308
240
|
end
|
@@ -1,47 +1,63 @@
|
|
1
1
|
require 'time'
|
2
2
|
|
3
|
-
module
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
3
|
+
module Loki
|
4
|
+
class Batch
|
5
|
+
attr_reader :streams
|
6
|
+
def initialize(e)
|
7
|
+
@bytes = 0
|
8
|
+
@createdAt = Time.now
|
9
|
+
@streams = {}
|
10
|
+
add(e)
|
11
|
+
end
|
12
|
+
|
13
|
+
def size_bytes
|
14
|
+
return @bytes
|
15
|
+
end
|
16
|
+
|
17
|
+
def add(e)
|
18
|
+
@bytes = @bytes + e.entry['line'].length
|
19
|
+
|
20
|
+
# Append the entry to an already existing stream (if any)
|
21
|
+
labels = e.labels.sort.to_h
|
22
|
+
labelkey = labels.to_s
|
23
|
+
if @streams.has_key?(labelkey)
|
24
|
+
stream = @streams[labelkey]
|
25
|
+
stream['entries'].append(e.entry)
|
26
|
+
return
|
27
|
+
else
|
28
|
+
# Add the entry as a new stream
|
29
|
+
@streams[labelkey] = {
|
30
|
+
"labels" => labels,
|
31
|
+
"entries" => [e.entry],
|
32
|
+
}
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
def size_bytes_after(line)
|
37
|
+
return @bytes + line.length
|
38
|
+
end
|
39
|
+
|
40
|
+
def age()
|
41
|
+
return Time.now - @createdAt
|
42
|
+
end
|
43
|
+
|
44
|
+
def to_json
|
45
|
+
streams = []
|
46
|
+
@streams.each { |_ , stream|
|
47
|
+
streams.append(build_stream(stream))
|
48
|
+
}
|
49
|
+
return {"streams"=>streams}.to_json
|
50
|
+
end
|
51
|
+
|
52
|
+
def build_stream(stream)
|
53
|
+
values = []
|
54
|
+
stream['entries'].each { |entry|
|
55
|
+
values.append([entry['ts'].to_s, entry['line']])
|
56
|
+
}
|
57
|
+
return {
|
58
|
+
'stream'=>stream['labels'],
|
59
|
+
'values' => values
|
60
|
+
}
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
@@ -1,13 +1,25 @@
|
|
1
|
-
module
|
2
|
-
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
1
|
+
module Loki
|
2
|
+
def to_ns(s)
|
3
|
+
(s.to_f * (10**9)).to_i
|
4
|
+
end
|
5
|
+
class Entry
|
6
|
+
include Loki
|
7
|
+
attr_reader :labels, :entry
|
8
|
+
def initialize(event,message_field)
|
9
|
+
@entry = {
|
10
|
+
"ts" => to_ns(event.get("@timestamp")),
|
11
|
+
"line" => event.get(message_field).to_s
|
12
|
+
}
|
13
|
+
event = event.clone()
|
14
|
+
event.remove(message_field)
|
15
|
+
event.remove("@timestamp")
|
16
|
+
|
17
|
+
@labels = {}
|
18
|
+
event.to_hash.each { |key,value|
|
19
|
+
next if key.start_with?('@')
|
20
|
+
next if value.is_a?(Hash)
|
21
|
+
@labels[key] = value.to_s
|
22
|
+
}
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
@@ -1,8 +1,8 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
2
|
s.name = 'logstash-output-loki'
|
3
|
-
s.version = '1.0.
|
4
|
-
s.authors = ['Aditya C S']
|
5
|
-
s.email = ['aditya.gnu@gmail.com']
|
3
|
+
s.version = '1.0.1'
|
4
|
+
s.authors = ['Aditya C S','Cyril Tovena']
|
5
|
+
s.email = ['aditya.gnu@gmail.com','cyril.tovena@grafana.com']
|
6
6
|
|
7
7
|
s.summary = 'Output plugin to ship logs to a Grafana Loki server'
|
8
8
|
s.description = 'Output plugin to ship logs to a Grafana Loki server'
|
@@ -11,7 +11,7 @@ Gem::Specification.new do |s|
|
|
11
11
|
s.require_paths = ["lib"]
|
12
12
|
|
13
13
|
# Files
|
14
|
-
s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile'
|
14
|
+
s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile']
|
15
15
|
# Tests
|
16
16
|
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
17
17
|
|
@@ -0,0 +1,59 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/devutils/rspec/spec_helper"
|
3
|
+
require "logstash/outputs/loki"
|
4
|
+
require "logstash/codecs/plain"
|
5
|
+
require "logstash/event"
|
6
|
+
require "net/http"
|
7
|
+
include Loki
|
8
|
+
|
9
|
+
describe Loki::Entry do
|
10
|
+
context 'test entry generation' do
|
11
|
+
let (:event) {
|
12
|
+
LogStash::Event.new(
|
13
|
+
{
|
14
|
+
'message' => 'hello',
|
15
|
+
'@metadata' => {'foo'=>'bar'},
|
16
|
+
'@version' => '1',
|
17
|
+
'foo' => 5,
|
18
|
+
'agent' => 'filebeat',
|
19
|
+
'log' => {
|
20
|
+
'file' =>
|
21
|
+
{'@path' => '/path/to/file.log'},
|
22
|
+
},
|
23
|
+
'host' => '172.0.0.1',
|
24
|
+
'@timestamp' => Time.now
|
25
|
+
}
|
26
|
+
)
|
27
|
+
}
|
28
|
+
|
29
|
+
it 'labels extracted should not contains object and metadata or timestamp' do
|
30
|
+
entry = Entry.new(event,"message")
|
31
|
+
expect(entry.labels).to eql({ 'agent' => 'filebeat', 'host' => '172.0.0.1', 'foo'=>'5'})
|
32
|
+
expect(entry.entry['ts']).to eql to_ns(event.get("@timestamp"))
|
33
|
+
expect(entry.entry['line']).to eql 'hello'
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
context 'test batch generation with label order' do
|
38
|
+
let (:entries) {[
|
39
|
+
Entry.new(LogStash::Event.new({"message"=>"foobuzz","buzz"=>"bar","cluster"=>"us-central1","@timestamp"=>Time.at(1)}),"message"),
|
40
|
+
Entry.new(LogStash::Event.new({"log"=>"foobar","bar"=>"bar","@timestamp"=>Time.at(2)}),"log"),
|
41
|
+
Entry.new(LogStash::Event.new({"cluster"=>"us-central1","message"=>"foobuzz","buzz"=>"bar","@timestamp"=>Time.at(3)}),"message"),
|
42
|
+
|
43
|
+
]}
|
44
|
+
let (:expected) {
|
45
|
+
{"streams" => [
|
46
|
+
{"stream"=> {"buzz"=>"bar","cluster"=>"us-central1"}, "values" => [[to_ns(Time.at(1)).to_s,"foobuzz"],[to_ns(Time.at(3)).to_s,"foobuzz"]]},
|
47
|
+
{"stream"=> {"bar"=>"bar"}, "values"=>[[to_ns(Time.at(2)).to_s,"foobar"]]},
|
48
|
+
] }
|
49
|
+
}
|
50
|
+
|
51
|
+
it 'to_json' do
|
52
|
+
@batch = Loki::Batch.new(entries.first)
|
53
|
+
entries.drop(1).each { |e| @batch.add(e)}
|
54
|
+
expect(JSON.parse(@batch.to_json)).to eql expected
|
55
|
+
end
|
56
|
+
end
|
57
|
+
|
58
|
+
|
59
|
+
end
|
data/spec/outputs/loki_spec.rb
CHANGED
@@ -4,9 +4,12 @@ require "logstash/outputs/loki"
|
|
4
4
|
require "logstash/codecs/plain"
|
5
5
|
require "logstash/event"
|
6
6
|
require "net/http"
|
7
|
+
require 'webmock/rspec'
|
8
|
+
include Loki
|
7
9
|
|
8
10
|
describe LogStash::Outputs::Loki do
|
9
|
-
|
11
|
+
|
12
|
+
let (:simple_loki_config) { {'url' => 'http://localhost:3100'} }
|
10
13
|
|
11
14
|
context 'when initializing' do
|
12
15
|
it "should register" do
|
@@ -14,100 +17,231 @@ describe LogStash::Outputs::Loki do
|
|
14
17
|
expect { loki.register }.to_not raise_error
|
15
18
|
end
|
16
19
|
|
17
|
-
it 'should populate loki config with default or
|
20
|
+
it 'should populate loki config with default or initialized values' do
|
18
21
|
loki = LogStash::Outputs::Loki.new(simple_loki_config)
|
19
22
|
expect(loki.url).to eql 'http://localhost:3100'
|
20
23
|
expect(loki.tenant_id).to eql nil
|
21
24
|
expect(loki.batch_size).to eql 102400
|
22
25
|
expect(loki.batch_wait).to eql 1
|
23
|
-
expect(loki.include_labels).to eql ["test_key", "other_key"]
|
24
|
-
expect(loki.external_labels).to include("test" => "value")
|
25
26
|
end
|
26
27
|
end
|
27
28
|
|
28
|
-
context '
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
let(:loki) { LogStash::Plugin.lookup("output", "loki").new(simple_loki_config) }
|
29
|
+
context 'when adding en entry to the batch' do
|
30
|
+
let (:simple_loki_config) {{'url' => 'http://localhost:3100'}}
|
31
|
+
let (:entry) {Entry.new(LogStash::Event.new({"message"=>"foobuzz","buzz"=>"bar","cluster"=>"us-central1","@timestamp"=>Time.at(1)}),"message")}
|
32
|
+
let (:lbs) { {"buzz"=>"bar","cluster"=>"us-central1"}.sort.to_h}
|
33
33
|
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
34
|
+
it 'should not add empty line' do
|
35
|
+
plugin = LogStash::Plugin.lookup("output", "loki").new(simple_loki_config)
|
36
|
+
emptyEntry = Entry.new(LogStash::Event.new({"message"=>"foobuzz","buzz"=>"bar","cluster"=>"us-central1","@timestamp"=>Time.at(1)}),"foo")
|
37
|
+
expect(plugin.add_entry_to_batch(emptyEntry)).to eql true
|
38
|
+
expect(plugin.batch).to eql nil
|
39
|
+
end
|
38
40
|
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
41
|
+
it 'should add entry' do
|
42
|
+
plugin = LogStash::Plugin.lookup("output", "loki").new(simple_loki_config)
|
43
|
+
expect(plugin.batch).to eql nil
|
44
|
+
expect(plugin.add_entry_to_batch(entry)).to eql true
|
45
|
+
expect(plugin.add_entry_to_batch(entry)).to eql true
|
46
|
+
expect(plugin.batch).not_to be_nil
|
47
|
+
expect(plugin.batch.streams.length).to eq 1
|
48
|
+
expect(plugin.batch.streams[lbs.to_s]['entries'].length).to eq 2
|
49
|
+
expect(plugin.batch.streams[lbs.to_s]['labels']).to eq lbs
|
50
|
+
expect(plugin.batch.size_bytes).to eq 14
|
51
|
+
end
|
52
|
+
|
53
|
+
it 'should not add if full' do
|
54
|
+
plugin = LogStash::Plugin.lookup("output", "loki").new(simple_loki_config.merge!({'batch_size'=>10}))
|
55
|
+
expect(plugin.batch).to eql nil
|
56
|
+
expect(plugin.add_entry_to_batch(entry)).to eql true # first entry is fine.
|
57
|
+
expect(plugin.batch).not_to be_nil
|
58
|
+
expect(plugin.batch.streams.length).to eq 1
|
59
|
+
expect(plugin.batch.streams[lbs.to_s]['entries'].length).to eq 1
|
60
|
+
expect(plugin.batch.streams[lbs.to_s]['labels']).to eq lbs
|
61
|
+
expect(plugin.batch.size_bytes).to eq 7
|
62
|
+
expect(plugin.add_entry_to_batch(entry)).to eql false # second entry goes over the limit.
|
63
|
+
expect(plugin.batch).not_to be_nil
|
64
|
+
expect(plugin.batch.streams.length).to eq 1
|
65
|
+
expect(plugin.batch.streams[lbs.to_s]['entries'].length).to eq 1
|
66
|
+
expect(plugin.batch.streams[lbs.to_s]['labels']).to eq lbs
|
67
|
+
expect(plugin.batch.size_bytes).to eq 7
|
68
|
+
end
|
45
69
|
end
|
46
70
|
|
47
|
-
context '
|
48
|
-
let(:
|
49
|
-
let (:simple_loki_config) {{'url' => 'http://localhost:3100', 'include_labels' => ["version", "host", "test"], 'external_labels' => {"test" => "value"}}}
|
50
|
-
let (:event) { LogStash::Event.new({'message' => 'hello', '@version' => '1', 'agent' => 'filebeat', 'host' => '172.0.0.1',
|
51
|
-
'@timestamp' => timestamp}) }
|
52
|
-
let(:loki) { LogStash::Plugin.lookup("output", "loki").new(simple_loki_config) }
|
71
|
+
context 'batch expiration' do
|
72
|
+
let (:entry) {Entry.new(LogStash::Event.new({"message"=>"foobuzz","buzz"=>"bar","cluster"=>"us-central1","@timestamp"=>Time.at(1)}),"message")}
|
53
73
|
|
54
|
-
|
55
|
-
loki.
|
56
|
-
|
74
|
+
it 'should not expire if empty' do
|
75
|
+
loki = LogStash::Outputs::Loki.new(simple_loki_config.merge!({'batch_wait'=>0.5}))
|
76
|
+
sleep(1)
|
77
|
+
expect(loki.is_batch_expired).to be false
|
57
78
|
end
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
expect(loki.
|
79
|
+
it 'should not expire batch if not old' do
|
80
|
+
loki = LogStash::Outputs::Loki.new(simple_loki_config.merge!({'batch_wait'=>0.5}))
|
81
|
+
expect(loki.add_entry_to_batch(entry)).to eql true
|
82
|
+
expect(loki.is_batch_expired).to be false
|
83
|
+
end
|
84
|
+
it 'should expire if old' do
|
85
|
+
loki = LogStash::Outputs::Loki.new(simple_loki_config.merge!({'batch_wait'=>0.5}))
|
86
|
+
expect(loki.add_entry_to_batch(entry)).to eql true
|
87
|
+
sleep(1)
|
88
|
+
expect(loki.is_batch_expired).to be true
|
68
89
|
end
|
69
90
|
end
|
70
91
|
|
71
|
-
context '
|
72
|
-
let (:
|
73
|
-
let (:event) { LogStash::Event.new({'message' => 'hello', '@version' => '1', 'host' => '172.0.0.1',
|
74
|
-
'@timestamp' => LogStash::Timestamp.now}) }
|
75
|
-
let(:loki) { LogStash::Plugin.lookup("output", "loki").new(simple_loki_config) }
|
92
|
+
context 'channel' do
|
93
|
+
let (:event) {LogStash::Event.new({"message"=>"foobuzz","buzz"=>"bar","cluster"=>"us-central1","@timestamp"=>Time.at(1)})}
|
76
94
|
|
77
|
-
|
95
|
+
it 'should send entry if batch size reached with no tenant' do
|
96
|
+
loki = LogStash::Outputs::Loki.new(simple_loki_config.merge!({'batch_wait'=>0.5,'batch_size'=>10}))
|
78
97
|
loki.register
|
98
|
+
sent = Concurrent::Channel.new(capacity: 3)
|
99
|
+
allow(loki).to receive(:send) do |batch|
|
100
|
+
Thread.new do
|
101
|
+
sent << batch
|
102
|
+
end
|
103
|
+
end
|
104
|
+
loki.receive(event)
|
105
|
+
loki.receive(event)
|
79
106
|
loki.close
|
107
|
+
~sent
|
108
|
+
~sent
|
80
109
|
end
|
110
|
+
it 'should send entry while closing' do
|
111
|
+
loki = LogStash::Outputs::Loki.new(simple_loki_config.merge!({'batch_wait'=>10,'batch_size'=>10}))
|
112
|
+
loki.register
|
113
|
+
sent = Concurrent::Channel.new(capacity: 3)
|
114
|
+
allow(loki).to receive(:send) do | batch|
|
115
|
+
Thread.new do
|
116
|
+
sent << batch
|
117
|
+
end
|
118
|
+
end
|
119
|
+
loki.receive(event)
|
120
|
+
loki.close
|
121
|
+
~sent
|
122
|
+
end
|
123
|
+
it 'should send entry when batch is expiring' do
|
124
|
+
loki = LogStash::Outputs::Loki.new(simple_loki_config.merge!({'batch_wait'=>0.5,'batch_size'=>10}))
|
125
|
+
loki.register
|
126
|
+
sent = Concurrent::Channel.new(capacity: 3)
|
127
|
+
allow(loki).to receive(:send) do | batch|
|
128
|
+
Thread.new do
|
129
|
+
sent << batch
|
130
|
+
end
|
131
|
+
end
|
132
|
+
loki.receive(event)
|
133
|
+
~sent
|
134
|
+
expect(loki.batch).to be_nil
|
135
|
+
loki.close
|
136
|
+
end
|
137
|
+
end
|
81
138
|
|
82
|
-
|
83
|
-
|
84
|
-
event_hash = event.to_hash
|
85
|
-
lbls = loki.handle_labels(event_hash, labels, "")
|
86
|
-
entry_hash = {
|
87
|
-
"ts" => event.get("@timestamp").to_i * (10**9),
|
88
|
-
"line" => event.get("message").to_s
|
89
|
-
}
|
90
|
-
e = LogStash::Outputs::Loki::Entry.new(lbls, entry_hash)
|
91
|
-
batch = LogStash::Outputs::Loki::Batch.new(e)
|
92
|
-
payload = loki.build_payload(batch)
|
93
|
-
|
94
|
-
# response should be nil on connection error
|
95
|
-
expect(loki.loki_http_request("fake", payload, 1, 2, 3)).to eql nil
|
96
|
-
|
97
|
-
success = Net::HTTPSuccess.new(1.0, 200, 'OK')
|
98
|
-
allow(loki).to receive(:loki_http_request) { success }
|
99
|
-
allow(success).to receive(:payload).and_return('fake body')
|
100
|
-
expect(loki.loki_http_request("fake", batch, 1, 300, 10).class).to eql Net::HTTPSuccess
|
139
|
+
context 'http requests' do
|
140
|
+
let (:entry) {Entry.new(LogStash::Event.new({"message"=>"foobuzz","buzz"=>"bar","cluster"=>"us-central1","@timestamp"=>Time.at(1)}),"message")}
|
101
141
|
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
142
|
+
it 'should send credentials' do
|
143
|
+
conf = {
|
144
|
+
'url'=>'http://localhost:3100/loki/api/v1/push',
|
145
|
+
'username' => 'foo',
|
146
|
+
'password' => 'bar',
|
147
|
+
'tenant_id' => 't'
|
148
|
+
}
|
149
|
+
loki = LogStash::Outputs::Loki.new(conf)
|
150
|
+
loki.register
|
151
|
+
b = Batch.new(entry)
|
152
|
+
post = stub_request(:post, "http://localhost:3100/loki/api/v1/push").with(
|
153
|
+
basic_auth: ['foo', 'bar'],
|
154
|
+
body: b.to_json,
|
155
|
+
headers:{
|
156
|
+
'Content-Type' => 'application/json' ,
|
157
|
+
'User-Agent' => 'loki-logstash',
|
158
|
+
'X-Scope-OrgID'=>'t',
|
159
|
+
'Accept'=>'*/*',
|
160
|
+
'Accept-Encoding'=>'gzip;q=1.0,deflate;q=0.6,identity;q=0.3',
|
161
|
+
}
|
162
|
+
)
|
163
|
+
loki.send(b)
|
164
|
+
expect(post).to have_been_requested.times(1)
|
165
|
+
end
|
106
166
|
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
167
|
+
it 'should not send credentials' do
|
168
|
+
conf = {
|
169
|
+
'url'=>'http://foo.com/loki/api/v1/push',
|
170
|
+
}
|
171
|
+
loki = LogStash::Outputs::Loki.new(conf)
|
172
|
+
loki.register
|
173
|
+
b = Batch.new(entry)
|
174
|
+
post = stub_request(:post, "http://foo.com/loki/api/v1/push").with(
|
175
|
+
body: b.to_json,
|
176
|
+
headers:{
|
177
|
+
'Content-Type' => 'application/json' ,
|
178
|
+
'User-Agent' => 'loki-logstash',
|
179
|
+
'Accept'=>'*/*',
|
180
|
+
'Accept-Encoding'=>'gzip;q=1.0,deflate;q=0.6,identity;q=0.3',
|
181
|
+
}
|
182
|
+
)
|
183
|
+
loki.send(b)
|
184
|
+
expect(post).to have_been_requested.times(1)
|
185
|
+
end
|
186
|
+
it 'should retry 500' do
|
187
|
+
conf = {
|
188
|
+
'url'=>'http://foo.com/loki/api/v1/push',
|
189
|
+
'retries' => 3,
|
190
|
+
}
|
191
|
+
loki = LogStash::Outputs::Loki.new(conf)
|
192
|
+
loki.register
|
193
|
+
b = Batch.new(entry)
|
194
|
+
post = stub_request(:post, "http://foo.com/loki/api/v1/push").with(
|
195
|
+
body: b.to_json,
|
196
|
+
).to_return(status: [500, "Internal Server Error"])
|
197
|
+
loki.send(b)
|
198
|
+
loki.close
|
199
|
+
expect(post).to have_been_requested.times(3)
|
200
|
+
end
|
201
|
+
it 'should retry 429' do
|
202
|
+
conf = {
|
203
|
+
'url'=>'http://foo.com/loki/api/v1/push',
|
204
|
+
'retries' => 2,
|
205
|
+
}
|
206
|
+
loki = LogStash::Outputs::Loki.new(conf)
|
207
|
+
loki.register
|
208
|
+
b = Batch.new(entry)
|
209
|
+
post = stub_request(:post, "http://foo.com/loki/api/v1/push").with(
|
210
|
+
body: b.to_json,
|
211
|
+
).to_return(status: [429, "stop spamming"])
|
212
|
+
loki.send(b)
|
213
|
+
loki.close
|
214
|
+
expect(post).to have_been_requested.times(2)
|
215
|
+
end
|
216
|
+
it 'should not retry 400' do
|
217
|
+
conf = {
|
218
|
+
'url'=>'http://foo.com/loki/api/v1/push',
|
219
|
+
'retries' => 11,
|
220
|
+
}
|
221
|
+
loki = LogStash::Outputs::Loki.new(conf)
|
222
|
+
loki.register
|
223
|
+
b = Batch.new(entry)
|
224
|
+
post = stub_request(:post, "http://foo.com/loki/api/v1/push").with(
|
225
|
+
body: b.to_json,
|
226
|
+
).to_return(status: [400, "bad request"])
|
227
|
+
loki.send(b)
|
228
|
+
loki.close
|
229
|
+
expect(post).to have_been_requested.times(1)
|
230
|
+
end
|
231
|
+
it 'should retry exception' do
|
232
|
+
conf = {
|
233
|
+
'url'=>'http://foo.com/loki/api/v1/push',
|
234
|
+
'retries' => 11,
|
235
|
+
}
|
236
|
+
loki = LogStash::Outputs::Loki.new(conf)
|
237
|
+
loki.register
|
238
|
+
b = Batch.new(entry)
|
239
|
+
post = stub_request(:post, "http://foo.com/loki/api/v1/push").with(
|
240
|
+
body: b.to_json,
|
241
|
+
).to_raise("some error").then.to_return(status: [200, "fine !"])
|
242
|
+
loki.send(b)
|
243
|
+
loki.close
|
244
|
+
expect(post).to have_been_requested.times(2)
|
111
245
|
end
|
112
246
|
end
|
113
247
|
end
|
metadata
CHANGED
@@ -1,14 +1,15 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-loki
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.0.
|
4
|
+
version: 1.0.1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Aditya C S
|
8
|
+
- Cyril Tovena
|
8
9
|
autorequire:
|
9
10
|
bindir: bin
|
10
11
|
cert_chain: []
|
11
|
-
date: 2020-07-
|
12
|
+
date: 2020-07-16 00:00:00.000000000 Z
|
12
13
|
dependencies:
|
13
14
|
- !ruby/object:Gem::Dependency
|
14
15
|
requirement: !ruby/object:Gem::Requirement
|
@@ -75,6 +76,7 @@ dependencies:
|
|
75
76
|
description: Output plugin to ship logs to a Grafana Loki server
|
76
77
|
email:
|
77
78
|
- aditya.gnu@gmail.com
|
79
|
+
- cyril.tovena@grafana.com
|
78
80
|
executables: []
|
79
81
|
extensions: []
|
80
82
|
extra_rdoc_files: []
|
@@ -85,6 +87,7 @@ files:
|
|
85
87
|
- lib/logstash/outputs/loki/batch.rb
|
86
88
|
- lib/logstash/outputs/loki/entry.rb
|
87
89
|
- logstash-output-loki.gemspec
|
90
|
+
- spec/outputs/loki/entry_spec.rb
|
88
91
|
- spec/outputs/loki_spec.rb
|
89
92
|
homepage: https://github.com/grafana/loki/
|
90
93
|
licenses:
|
@@ -112,4 +115,5 @@ signing_key:
|
|
112
115
|
specification_version: 4
|
113
116
|
summary: Output plugin to ship logs to a Grafana Loki server
|
114
117
|
test_files:
|
118
|
+
- spec/outputs/loki/entry_spec.rb
|
115
119
|
- spec/outputs/loki_spec.rb
|