logstash-output-loki 1.0.0 → 1.1.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/Gemfile +6 -3
- data/README.md +39 -17
- data/lib/logstash/outputs/loki/batch.rb +61 -45
- data/lib/logstash/outputs/loki/entry.rb +26 -13
- data/lib/logstash/outputs/loki.rb +121 -161
- data/logstash-output-loki.gemspec +6 -7
- data/spec/outputs/loki/entry_spec.rb +66 -0
- data/spec/outputs/loki_spec.rb +221 -71
- metadata +11 -21
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 5c6817d07ada63cde10011f1e490f803ddbb0a7a54d75b9472780f4f180c65fe
|
4
|
+
data.tar.gz: e457589a713ed3bc0f74120f3671bfb6b9b9760393cb86c18764e4d5a1fa2bd7
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 358e07d5c964179b7a003e6ac840bb83df74c03e4de0f1916da310ed07e1c669a672d9363afd841c6bdfd49bb1d525ffbc815801ba4f3a8536659dd449173012
|
7
|
+
data.tar.gz: 89360e4ffcb276b9406c017c5c019c43f839398a93a0cda46e0761e45a6a851ef2e9250c9fec5ba409062aef1f12576c9fcaa939a4cba0c0c9d0db15fa698f24
|
data/Gemfile
CHANGED
@@ -2,10 +2,13 @@ source 'https://rubygems.org'
|
|
2
2
|
|
3
3
|
gemspec
|
4
4
|
|
5
|
-
logstash_path = ENV["LOGSTASH_PATH"] || "logstash
|
6
|
-
use_logstash_source = ENV["LOGSTASH_SOURCE"] && ENV["LOGSTASH_SOURCE"].to_s == "1"
|
5
|
+
logstash_path = ENV["LOGSTASH_PATH"] || "./logstash"
|
7
6
|
|
8
|
-
if Dir.exist?(logstash_path)
|
7
|
+
if Dir.exist?(logstash_path)
|
9
8
|
gem 'logstash-core', :path => "#{logstash_path}/logstash-core"
|
10
9
|
gem 'logstash-core-plugin-api', :path => "#{logstash_path}/logstash-core-plugin-api"
|
10
|
+
else
|
11
|
+
raise 'missing logstash vendoring'
|
11
12
|
end
|
13
|
+
|
14
|
+
gem "webmock", "~> 3.8"
|
data/README.md
CHANGED
@@ -1,10 +1,12 @@
|
|
1
|
-
# Loki Logstash Output Plugin
|
1
|
+
# Contributing to Loki Logstash Output Plugin
|
2
2
|
|
3
|
-
|
3
|
+
For information about how to use this plugin see this [documentation](../../docs/sources/clients/logstash/_index.md).
|
4
4
|
|
5
5
|
## Install dependencies
|
6
6
|
|
7
|
-
First
|
7
|
+
First, make sure you have JDK version `8` or `11` installed and you have set the `JAVA_HOME` environment variable.
|
8
|
+
|
9
|
+
You need to setup JRuby environment to build this plugin. Refer https://github.com/rbenv/rbenv for setting up your rbenv environment.
|
8
10
|
|
9
11
|
After setting up `rbenv`. Install JRuby
|
10
12
|
|
@@ -20,47 +22,67 @@ ruby --version
|
|
20
22
|
jruby 9.2.10
|
21
23
|
```
|
22
24
|
|
23
|
-
You should
|
25
|
+
You should make sure you are running `jruby` and not `ruby`. If the command `ruby --version` still shows `ruby` and not `jruby`, check that PATH contains `$HOME/.rbenv/shims` and `$HOME/.rbenv/bin`. Also verify that you have this in your bash profile:
|
24
26
|
|
25
27
|
```bash
|
26
28
|
export PATH="$HOME/.rbenv/bin:$PATH"
|
27
29
|
eval "$(rbenv init -)"
|
28
30
|
```
|
29
31
|
|
30
|
-
Then install bundler
|
31
|
-
|
32
|
+
Then install bundler:
|
33
|
+
|
34
|
+
```bash
|
35
|
+
gem install bundler:2.1.4
|
36
|
+
```
|
32
37
|
|
33
38
|
Follow those instructions to [install logstash](https://www.elastic.co/guide/en/logstash/current/installing-logstash.html) before moving to the next section.
|
34
39
|
|
35
|
-
##
|
40
|
+
## Build and test the plugin
|
36
41
|
|
37
42
|
### Install required packages
|
38
43
|
|
39
44
|
```bash
|
40
45
|
git clone git@github.com:elastic/logstash.git
|
41
46
|
cd logstash
|
42
|
-
git checkout tags/v7.
|
43
|
-
export LOGSTASH_PATH
|
44
|
-
export
|
45
|
-
export
|
46
|
-
|
47
|
+
git checkout tags/v7.16.1
|
48
|
+
export LOGSTASH_PATH="$(pwd)"
|
49
|
+
export GEM_PATH="$LOGSTASH_PATH/vendor/bundle/jruby/2.5.0"
|
50
|
+
export GEM_HOME="$LOGSTASH_PATH/vendor/bundle/jruby/2.5.0"
|
51
|
+
./gradlew assemble
|
47
52
|
cd ..
|
48
|
-
ruby -S bundle
|
53
|
+
ruby -S bundle config set --local path "$LOGSTASH_PATH/vendor/bundle"
|
54
|
+
ruby -S bundle install
|
49
55
|
ruby -S bundle exec rake vendor
|
50
56
|
```
|
51
57
|
|
52
58
|
### Build the plugin
|
53
59
|
|
54
|
-
|
60
|
+
```bash
|
61
|
+
gem build logstash-output-loki.gemspec
|
62
|
+
```
|
55
63
|
|
56
64
|
### Test
|
57
65
|
|
58
|
-
|
66
|
+
```bash
|
67
|
+
ruby -S bundle exec rspec
|
68
|
+
```
|
69
|
+
|
70
|
+
Alternatively if you don't want to install JRuby. Enter inside logstash-loki container.
|
71
|
+
|
72
|
+
```bash
|
73
|
+
docker build -t logstash-loki ./
|
74
|
+
docker run -v $(pwd)/spec:/home/logstash/spec -it --rm --entrypoint /bin/sh logstash-loki
|
75
|
+
bundle exec rspec
|
76
|
+
```
|
59
77
|
|
60
78
|
## Install plugin to local logstash
|
61
79
|
|
62
|
-
|
80
|
+
```bash
|
81
|
+
bin/logstash-plugin install --no-verify --local logstash-output-loki-1.0.0.gem
|
82
|
+
```
|
63
83
|
|
64
84
|
## Send sample event and check plugin is working
|
65
85
|
|
66
|
-
|
86
|
+
```bash
|
87
|
+
bin/logstash -f loki.conf
|
88
|
+
```
|
@@ -1,47 +1,63 @@
|
|
1
1
|
require 'time'
|
2
2
|
|
3
|
-
module
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
|
15
|
-
|
16
|
-
|
17
|
-
|
18
|
-
|
19
|
-
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
26
|
-
|
27
|
-
|
28
|
-
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
38
|
-
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
3
|
+
module Loki
|
4
|
+
class Batch
|
5
|
+
attr_reader :streams
|
6
|
+
def initialize(e)
|
7
|
+
@bytes = 0
|
8
|
+
@createdAt = Time.now
|
9
|
+
@streams = {}
|
10
|
+
add(e)
|
11
|
+
end
|
12
|
+
|
13
|
+
def size_bytes
|
14
|
+
return @bytes
|
15
|
+
end
|
16
|
+
|
17
|
+
def add(e)
|
18
|
+
@bytes = @bytes + e.entry['line'].length
|
19
|
+
|
20
|
+
# Append the entry to an already existing stream (if any)
|
21
|
+
labels = e.labels.sort.to_h
|
22
|
+
labelkey = labels.to_s
|
23
|
+
if @streams.has_key?(labelkey)
|
24
|
+
stream = @streams[labelkey]
|
25
|
+
stream['entries'].append(e.entry)
|
26
|
+
return
|
27
|
+
else
|
28
|
+
# Add the entry as a new stream
|
29
|
+
@streams[labelkey] = {
|
30
|
+
"labels" => labels,
|
31
|
+
"entries" => [e.entry],
|
32
|
+
}
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
def size_bytes_after(line)
|
37
|
+
return @bytes + line.length
|
38
|
+
end
|
39
|
+
|
40
|
+
def age()
|
41
|
+
return Time.now - @createdAt
|
42
|
+
end
|
43
|
+
|
44
|
+
def to_json
|
45
|
+
streams = []
|
46
|
+
@streams.each { |_ , stream|
|
47
|
+
streams.append(build_stream(stream))
|
48
|
+
}
|
49
|
+
return {"streams"=>streams}.to_json
|
50
|
+
end
|
51
|
+
|
52
|
+
def build_stream(stream)
|
53
|
+
values = []
|
54
|
+
stream['entries'].each { |entry|
|
55
|
+
values.append([entry['ts'].to_s, entry['line']])
|
56
|
+
}
|
57
|
+
return {
|
58
|
+
'stream'=>stream['labels'],
|
59
|
+
'values' => values
|
60
|
+
}
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
@@ -1,13 +1,26 @@
|
|
1
|
-
module
|
2
|
-
|
3
|
-
|
4
|
-
|
5
|
-
|
6
|
-
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
1
|
+
module Loki
|
2
|
+
def to_ns(s)
|
3
|
+
(s.to_f * (10**9)).to_i
|
4
|
+
end
|
5
|
+
class Entry
|
6
|
+
include Loki
|
7
|
+
attr_reader :labels, :entry
|
8
|
+
def initialize(event,message_field,include_fields)
|
9
|
+
@entry = {
|
10
|
+
"ts" => to_ns(event.get("@timestamp")),
|
11
|
+
"line" => event.get(message_field).to_s
|
12
|
+
}
|
13
|
+
event = event.clone()
|
14
|
+
event.remove(message_field)
|
15
|
+
event.remove("@timestamp")
|
16
|
+
|
17
|
+
@labels = {}
|
18
|
+
event.to_hash.each { |key,value|
|
19
|
+
next if key.start_with?('@')
|
20
|
+
next if value.is_a?(Hash)
|
21
|
+
next if include_fields.length() > 0 and not include_fields.include?(key)
|
22
|
+
@labels[key] = value.to_s
|
23
|
+
}
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
@@ -1,16 +1,15 @@
|
|
1
1
|
# encoding: utf-8
|
2
2
|
require "logstash/outputs/base"
|
3
|
+
require "logstash/outputs/loki/entry"
|
4
|
+
require "logstash/outputs/loki/batch"
|
3
5
|
require "logstash/namespace"
|
4
6
|
require 'net/http'
|
5
|
-
require 'concurrent-edge'
|
6
7
|
require 'time'
|
7
8
|
require 'uri'
|
8
9
|
require 'json'
|
9
10
|
|
10
11
|
class LogStash::Outputs::Loki < LogStash::Outputs::Base
|
11
|
-
|
12
|
-
require 'logstash/outputs/loki/entry'
|
13
|
-
|
12
|
+
include Loki
|
14
13
|
config_name "loki"
|
15
14
|
|
16
15
|
## 'A single instance of the Output will be shared among the pipeline worker threads'
|
@@ -30,6 +29,9 @@ class LogStash::Outputs::Loki < LogStash::Outputs::Base
|
|
30
29
|
## 'TLS'
|
31
30
|
config :ca_cert, :validate => :path, :required => false
|
32
31
|
|
32
|
+
## 'Disable server certificate verification'
|
33
|
+
config :insecure_skip_verify, :validate => :boolean, :default => false, :required => false
|
34
|
+
|
33
35
|
## 'Loki Tenant ID'
|
34
36
|
config :tenant_id, :validate => :string, :required => false
|
35
37
|
|
@@ -39,24 +41,22 @@ class LogStash::Outputs::Loki < LogStash::Outputs::Base
|
|
39
41
|
## 'Interval in seconds to wait before pushing a batch of records to loki. Defaults to 1 second'
|
40
42
|
config :batch_wait, :validate => :number, :default => 1, :required => false
|
41
43
|
|
42
|
-
## 'Array of label names to include in all logstreams'
|
43
|
-
config :include_labels, :validate => :array, :default => [], :required => true
|
44
|
-
|
45
|
-
## 'Extra labels to add to all log streams'
|
46
|
-
config :external_labels, :validate => :hash, :default => {}, :required => false
|
47
|
-
|
48
44
|
## 'Log line field to pick from logstash. Defaults to "message"'
|
49
45
|
config :message_field, :validate => :string, :default => "message", :required => false
|
50
46
|
|
51
47
|
## 'Backoff configuration. Initial backoff time between retries. Default 1s'
|
52
48
|
config :min_delay, :validate => :number, :default => 1, :required => false
|
53
49
|
|
54
|
-
|
55
|
-
|
50
|
+
## 'An array of fields to map to labels, if defined only fields in this list will be mapped.'
|
51
|
+
config :include_fields, :validate => :array, :default => [], :required => false
|
52
|
+
|
53
|
+
## 'Backoff configuration. Maximum backoff time between retries. Default 300s'
|
54
|
+
config :max_delay, :validate => :number, :default => 300, :required => false
|
56
55
|
|
57
56
|
## 'Backoff configuration. Maximum number of retries to do'
|
58
57
|
config :retries, :validate => :number, :default => 10, :required => false
|
59
58
|
|
59
|
+
attr_reader :batch
|
60
60
|
public
|
61
61
|
def register
|
62
62
|
@uri = URI.parse(@url)
|
@@ -64,22 +64,16 @@ class LogStash::Outputs::Loki < LogStash::Outputs::Base
|
|
64
64
|
raise LogStash::ConfigurationError, "url parameter must be valid HTTP, currently '#{@url}'"
|
65
65
|
end
|
66
66
|
|
67
|
-
if @include_labels.empty?
|
68
|
-
raise LogStash::ConfigurationError, "include_labels should contain atleast one label, currently '#{@include_labels}'"
|
69
|
-
end
|
70
|
-
|
71
67
|
if @min_delay > @max_delay
|
72
68
|
raise LogStash::ConfigurationError, "Min delay should be less than Max delay, currently 'Min delay is #{@min_delay} and Max delay is #{@max_delay}'"
|
73
69
|
end
|
74
70
|
|
75
71
|
@logger.info("Loki output plugin", :class => self.class.name)
|
76
72
|
|
77
|
-
#
|
78
|
-
@
|
79
|
-
@
|
80
|
-
|
81
|
-
# excluded message and timestamp from labels
|
82
|
-
@exclude_labels = ["message", "@timestamp"]
|
73
|
+
# initialize Queue and Mutex
|
74
|
+
@entries = Queue.new
|
75
|
+
@mutex = Mutex.new
|
76
|
+
@stop = false
|
83
77
|
|
84
78
|
# create nil batch object.
|
85
79
|
@batch = nil
|
@@ -90,7 +84,52 @@ class LogStash::Outputs::Loki < LogStash::Outputs::Base
|
|
90
84
|
validate_ssl_key
|
91
85
|
end
|
92
86
|
|
93
|
-
|
87
|
+
# start batch_max_wait and batch_max_size threads
|
88
|
+
@batch_wait_thread = Thread.new{max_batch_wait()}
|
89
|
+
@batch_size_thread = Thread.new{max_batch_size()}
|
90
|
+
end
|
91
|
+
|
92
|
+
def max_batch_size
|
93
|
+
loop do
|
94
|
+
@mutex.synchronize do
|
95
|
+
return if @stop
|
96
|
+
end
|
97
|
+
|
98
|
+
e = @entries.deq
|
99
|
+
return if e.nil?
|
100
|
+
|
101
|
+
@mutex.synchronize do
|
102
|
+
if !add_entry_to_batch(e)
|
103
|
+
@logger.debug("Max batch_size is reached. Sending batch to loki")
|
104
|
+
send(@batch)
|
105
|
+
@batch = Batch.new(e)
|
106
|
+
end
|
107
|
+
end
|
108
|
+
end
|
109
|
+
end
|
110
|
+
|
111
|
+
def max_batch_wait
|
112
|
+
# minimum wait frequency is 10 milliseconds
|
113
|
+
min_wait_checkfrequency = 1/100
|
114
|
+
max_wait_checkfrequency = @batch_wait
|
115
|
+
if max_wait_checkfrequency < min_wait_checkfrequency
|
116
|
+
max_wait_checkfrequency = min_wait_checkfrequency
|
117
|
+
end
|
118
|
+
|
119
|
+
loop do
|
120
|
+
@mutex.synchronize do
|
121
|
+
return if @stop
|
122
|
+
end
|
123
|
+
|
124
|
+
sleep(max_wait_checkfrequency)
|
125
|
+
if is_batch_expired
|
126
|
+
@mutex.synchronize do
|
127
|
+
@logger.debug("Max batch_wait time is reached. Sending batch to loki")
|
128
|
+
send(@batch)
|
129
|
+
@batch = nil
|
130
|
+
end
|
131
|
+
end
|
132
|
+
end
|
94
133
|
end
|
95
134
|
|
96
135
|
def ssl_cert?
|
@@ -113,6 +152,13 @@ class LogStash::Outputs::Loki < LogStash::Outputs::Base
|
|
113
152
|
use_ssl: uri.scheme == 'https'
|
114
153
|
}
|
115
154
|
|
155
|
+
# disable server certificate verification
|
156
|
+
if @insecure_skip_verify
|
157
|
+
opts = opts.merge(
|
158
|
+
verify_mode: OpenSSL::SSL::VERIFY_NONE
|
159
|
+
)
|
160
|
+
end
|
161
|
+
|
116
162
|
if !@cert.nil? && !@key.nil?
|
117
163
|
opts = opts.merge(
|
118
164
|
verify_mode: OpenSSL::SSL::VERIFY_PEER,
|
@@ -129,126 +175,65 @@ class LogStash::Outputs::Loki < LogStash::Outputs::Base
|
|
129
175
|
opts
|
130
176
|
end
|
131
177
|
|
132
|
-
|
133
|
-
|
134
|
-
|
135
|
-
|
136
|
-
|
178
|
+
# Add an entry to the current batch returns false if the batch is full
|
179
|
+
# and the entry can't be added.
|
180
|
+
def add_entry_to_batch(e)
|
181
|
+
line = e.entry['line']
|
182
|
+
# we don't want to send empty lines.
|
183
|
+
return true if line.to_s.strip.empty?
|
184
|
+
|
185
|
+
if @batch.nil?
|
186
|
+
@batch = Batch.new(e)
|
187
|
+
return true
|
137
188
|
end
|
138
189
|
|
139
|
-
@
|
140
|
-
|
141
|
-
Concurrent::Channel.select do |s|
|
142
|
-
s.take(@entries) { |e|
|
143
|
-
if @batch.nil?
|
144
|
-
@batch = Batch.new(e)
|
145
|
-
next
|
146
|
-
end
|
147
|
-
|
148
|
-
line = e.entry['line']
|
149
|
-
if @batch.size_bytes_after(line) > @batch_size
|
150
|
-
@logger.debug("Max batch_size is reached. Sending batch to loki")
|
151
|
-
send(@tenant_id, @batch)
|
152
|
-
@batch = Batch.new(e)
|
153
|
-
next
|
154
|
-
end
|
155
|
-
@batch.add(e)
|
156
|
-
}
|
157
|
-
s.take(@max_wait_check) {
|
158
|
-
# Send batch if max wait time has been reached
|
159
|
-
if !@batch.nil?
|
160
|
-
if @batch.age() < @batch_wait
|
161
|
-
next
|
162
|
-
end
|
163
|
-
|
164
|
-
@logger.debug("Max batch_wait time is reached. Sending batch to loki")
|
165
|
-
send(@tenant_id, @batch)
|
166
|
-
@batch = nil
|
167
|
-
end
|
168
|
-
}
|
169
|
-
end
|
190
|
+
if @batch.size_bytes_after(line) > @batch_size
|
191
|
+
return false
|
170
192
|
end
|
193
|
+
@batch.add(e)
|
194
|
+
return true
|
195
|
+
end
|
196
|
+
|
197
|
+
def is_batch_expired
|
198
|
+
return !@batch.nil? && @batch.age() >= @batch_wait
|
171
199
|
end
|
172
200
|
|
173
201
|
## Receives logstash events
|
174
202
|
public
|
175
203
|
def receive(event)
|
176
|
-
|
177
|
-
event_hash = event.to_hash
|
178
|
-
lbls = handle_labels(event_hash, labels, "")
|
179
|
-
|
180
|
-
data_labels, entry_hash = build_entry(lbls, event)
|
181
|
-
@entries << Entry.new(data_labels, entry_hash)
|
182
|
-
|
204
|
+
@entries << Entry.new(event, @message_field, @include_fields)
|
183
205
|
end
|
184
206
|
|
185
207
|
def close
|
186
|
-
@logger.info("Closing loki output plugin. Flushing all pending batches")
|
187
|
-
send(@tenant_id, @batch) if !@batch.nil?
|
188
208
|
@entries.close
|
189
|
-
@
|
190
|
-
|
191
|
-
|
192
|
-
|
193
|
-
|
194
|
-
entry_hash = {
|
195
|
-
"ts" => event.get("@timestamp").to_i * (10**9),
|
196
|
-
"line" => event.get(@message_field).to_s
|
197
|
-
}
|
198
|
-
return labels, entry_hash
|
199
|
-
end
|
200
|
-
|
201
|
-
def handle_labels(event_hash, labels, parent_key)
|
202
|
-
event_hash.each{ |key,value|
|
203
|
-
if !@exclude_labels.include?(key)
|
204
|
-
if value.is_a?(Hash)
|
205
|
-
if parent_key != ""
|
206
|
-
handle_labels(value, labels, parent_key + "_" + key)
|
207
|
-
else
|
208
|
-
handle_labels(value, labels, key)
|
209
|
-
end
|
210
|
-
else
|
211
|
-
if parent_key != ""
|
212
|
-
labels[parent_key + "_" + key] = value.to_s
|
213
|
-
else
|
214
|
-
labels[key] = value.to_s
|
215
|
-
end
|
216
|
-
end
|
217
|
-
end
|
218
|
-
}
|
219
|
-
return extract_labels(labels)
|
220
|
-
end
|
209
|
+
@mutex.synchronize do
|
210
|
+
@stop = true
|
211
|
+
end
|
212
|
+
@batch_wait_thread.join
|
213
|
+
@batch_size_thread.join
|
221
214
|
|
222
|
-
|
223
|
-
|
224
|
-
|
225
|
-
if @include_labels.include?(key)
|
226
|
-
key = key.gsub("@", '')
|
227
|
-
labels[key] = value
|
228
|
-
end
|
229
|
-
}
|
230
|
-
return labels
|
215
|
+
# if by any chance we still have a forming batch, we need to send it.
|
216
|
+
send(@batch) if !@batch.nil?
|
217
|
+
@batch = nil
|
231
218
|
end
|
232
219
|
|
233
|
-
def send(
|
234
|
-
payload =
|
235
|
-
res = loki_http_request(
|
236
|
-
|
220
|
+
def send(batch)
|
221
|
+
payload = batch.to_json
|
222
|
+
res = loki_http_request(payload)
|
237
223
|
if res.is_a?(Net::HTTPSuccess)
|
238
224
|
@logger.debug("Successfully pushed data to loki")
|
239
|
-
return
|
240
225
|
else
|
241
|
-
@logger.
|
242
|
-
@logger.debug("Payload object ", :payload => payload)
|
226
|
+
@logger.debug("failed payload", :payload => payload)
|
243
227
|
end
|
244
228
|
end
|
245
229
|
|
246
|
-
def loki_http_request(
|
230
|
+
def loki_http_request(payload)
|
247
231
|
req = Net::HTTP::Post.new(
|
248
232
|
@uri.request_uri
|
249
233
|
)
|
250
234
|
req.add_field('Content-Type', 'application/json')
|
251
|
-
req.add_field('X-Scope-OrgID', tenant_id) if tenant_id
|
235
|
+
req.add_field('X-Scope-OrgID', @tenant_id) if @tenant_id
|
236
|
+
req['User-Agent']= 'loki-logstash'
|
252
237
|
req.basic_auth(@username, @password) if @username
|
253
238
|
req.body = payload
|
254
239
|
|
@@ -256,53 +241,28 @@ class LogStash::Outputs::Loki < LogStash::Outputs::Base
|
|
256
241
|
|
257
242
|
@logger.debug("sending #{req.body.length} bytes to loki")
|
258
243
|
retry_count = 0
|
259
|
-
delay = min_delay
|
244
|
+
delay = @min_delay
|
260
245
|
begin
|
261
|
-
res = Net::HTTP.start(@uri.host, @uri.port, **opts) { |http|
|
262
|
-
|
263
|
-
|
264
|
-
|
265
|
-
|
266
|
-
|
267
|
-
|
246
|
+
res = Net::HTTP.start(@uri.host, @uri.port, **opts) { |http|
|
247
|
+
http.request(req)
|
248
|
+
}
|
249
|
+
return res if !res.nil? && res.code.to_i != 429 && res.code.to_i.div(100) != 5
|
250
|
+
raise StandardError.new res
|
251
|
+
rescue StandardError => e
|
268
252
|
retry_count += 1
|
269
|
-
@logger.warn("
|
270
|
-
|
271
|
-
|
272
|
-
|
273
|
-
|
253
|
+
@logger.warn("Failed to send batch, attempt: #{retry_count}/#{@retries}", :error_inspect => e.inspect, :error => e)
|
254
|
+
if retry_count < @retries
|
255
|
+
sleep delay
|
256
|
+
if delay * 2 <= @max_delay
|
257
|
+
delay = delay * 2
|
258
|
+
else
|
259
|
+
delay = @max_delay
|
260
|
+
end
|
261
|
+
retry
|
274
262
|
else
|
275
|
-
|
263
|
+
@logger.error("Failed to send batch", :error_inspect => e.inspect, :error => e)
|
264
|
+
return res
|
276
265
|
end
|
277
|
-
|
278
|
-
retry
|
279
|
-
rescue StandardError => e
|
280
|
-
@logger.error("Error while connecting to loki server ", :error_inspect => e.inspect, :error => e)
|
281
|
-
return res
|
282
266
|
end
|
283
|
-
return res
|
284
|
-
end
|
285
|
-
|
286
|
-
def build_payload(batch)
|
287
|
-
payload = {}
|
288
|
-
payload['streams'] = []
|
289
|
-
batch.streams.each { |labels, stream|
|
290
|
-
stream_obj = get_stream_obj(stream)
|
291
|
-
payload['streams'].push(stream_obj)
|
292
|
-
}
|
293
|
-
return payload.to_json
|
294
|
-
end
|
295
|
-
|
296
|
-
def get_stream_obj(stream)
|
297
|
-
stream_obj = {}
|
298
|
-
stream_obj['stream'] = stream['labels']
|
299
|
-
stream_obj['values'] = []
|
300
|
-
values = []
|
301
|
-
stream['entries'].each { |entry|
|
302
|
-
values.push(entry['ts'].to_s)
|
303
|
-
values.push(entry['line'])
|
304
|
-
}
|
305
|
-
stream_obj['values'].push(values)
|
306
|
-
return stream_obj
|
307
267
|
end
|
308
268
|
end
|
@@ -1,8 +1,8 @@
|
|
1
1
|
Gem::Specification.new do |s|
|
2
|
-
s.name
|
3
|
-
s.version
|
4
|
-
s.authors = ['Aditya C S']
|
5
|
-
s.email = ['aditya.gnu@gmail.com']
|
2
|
+
s.name = 'logstash-output-loki'
|
3
|
+
s.version = '1.1.0'
|
4
|
+
s.authors = ['Aditya C S','Cyril Tovena']
|
5
|
+
s.email = ['aditya.gnu@gmail.com','cyril.tovena@grafana.com']
|
6
6
|
|
7
7
|
s.summary = 'Output plugin to ship logs to a Grafana Loki server'
|
8
8
|
s.description = 'Output plugin to ship logs to a Grafana Loki server'
|
@@ -11,7 +11,7 @@ Gem::Specification.new do |s|
|
|
11
11
|
s.require_paths = ["lib"]
|
12
12
|
|
13
13
|
# Files
|
14
|
-
s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile'
|
14
|
+
s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile']
|
15
15
|
# Tests
|
16
16
|
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
17
17
|
|
@@ -21,7 +21,6 @@ Gem::Specification.new do |s|
|
|
21
21
|
# Gem dependencies
|
22
22
|
#
|
23
23
|
s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
|
24
|
-
s.add_runtime_dependency "logstash-codec-plain", "3.0
|
25
|
-
s.add_runtime_dependency "concurrent-ruby-edge", "0.6.0"
|
24
|
+
s.add_runtime_dependency "logstash-codec-plain", "3.1.0"
|
26
25
|
s.add_development_dependency 'logstash-devutils', "2.0.2"
|
27
26
|
end
|
@@ -0,0 +1,66 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/devutils/rspec/spec_helper"
|
3
|
+
require "logstash/outputs/loki"
|
4
|
+
require "logstash/codecs/plain"
|
5
|
+
require "logstash/event"
|
6
|
+
require "net/http"
|
7
|
+
include Loki
|
8
|
+
|
9
|
+
describe Loki::Entry do
|
10
|
+
context 'test entry generation' do
|
11
|
+
let (:event) {
|
12
|
+
LogStash::Event.new(
|
13
|
+
{
|
14
|
+
'message' => 'hello',
|
15
|
+
'@metadata' => {'foo'=>'bar'},
|
16
|
+
'@version' => '1',
|
17
|
+
'foo' => 5,
|
18
|
+
'agent' => 'filebeat',
|
19
|
+
'log' => {
|
20
|
+
'file' =>
|
21
|
+
{'@path' => '/path/to/file.log'},
|
22
|
+
},
|
23
|
+
'host' => '172.0.0.1',
|
24
|
+
'@timestamp' => Time.now
|
25
|
+
}
|
26
|
+
)
|
27
|
+
}
|
28
|
+
|
29
|
+
it 'labels extracted should not contains object and metadata or timestamp' do
|
30
|
+
entry = Entry.new(event,"message", [])
|
31
|
+
expect(entry.labels).to eql({ 'agent' => 'filebeat', 'host' => '172.0.0.1', 'foo'=>'5'})
|
32
|
+
expect(entry.entry['ts']).to eql to_ns(event.get("@timestamp"))
|
33
|
+
expect(entry.entry['line']).to eql 'hello'
|
34
|
+
end
|
35
|
+
|
36
|
+
it 'labels extracted should only contain allowlisted labels' do
|
37
|
+
entry = Entry.new(event, "message", %w[agent foo])
|
38
|
+
expect(entry.labels).to eql({ 'agent' => 'filebeat', 'foo'=>'5'})
|
39
|
+
expect(entry.entry['ts']).to eql to_ns(event.get("@timestamp"))
|
40
|
+
expect(entry.entry['line']).to eql 'hello'
|
41
|
+
end
|
42
|
+
end
|
43
|
+
|
44
|
+
context 'test batch generation with label order' do
|
45
|
+
let (:entries) {[
|
46
|
+
Entry.new(LogStash::Event.new({"message"=>"foobuzz","buzz"=>"bar","cluster"=>"us-central1","@timestamp"=>Time.at(1)}),"message", []),
|
47
|
+
Entry.new(LogStash::Event.new({"log"=>"foobar","bar"=>"bar","@timestamp"=>Time.at(2)}),"log", []),
|
48
|
+
Entry.new(LogStash::Event.new({"cluster"=>"us-central1","message"=>"foobuzz","buzz"=>"bar","@timestamp"=>Time.at(3)}),"message", []),
|
49
|
+
|
50
|
+
]}
|
51
|
+
let (:expected) {
|
52
|
+
{"streams" => [
|
53
|
+
{"stream"=> {"buzz"=>"bar","cluster"=>"us-central1"}, "values" => [[to_ns(Time.at(1)).to_s,"foobuzz"],[to_ns(Time.at(3)).to_s,"foobuzz"]]},
|
54
|
+
{"stream"=> {"bar"=>"bar"}, "values"=>[[to_ns(Time.at(2)).to_s,"foobar"]]},
|
55
|
+
] }
|
56
|
+
}
|
57
|
+
|
58
|
+
it 'to_json' do
|
59
|
+
@batch = Loki::Batch.new(entries.first)
|
60
|
+
entries.drop(1).each { |e| @batch.add(e)}
|
61
|
+
expect(JSON.parse(@batch.to_json)).to eql expected
|
62
|
+
end
|
63
|
+
end
|
64
|
+
|
65
|
+
|
66
|
+
end
|
data/spec/outputs/loki_spec.rb
CHANGED
@@ -4,9 +4,12 @@ require "logstash/outputs/loki"
|
|
4
4
|
require "logstash/codecs/plain"
|
5
5
|
require "logstash/event"
|
6
6
|
require "net/http"
|
7
|
+
require 'webmock/rspec'
|
8
|
+
include Loki
|
7
9
|
|
8
10
|
describe LogStash::Outputs::Loki do
|
9
|
-
|
11
|
+
|
12
|
+
let (:simple_loki_config) { {'url' => 'http://localhost:3100'} }
|
10
13
|
|
11
14
|
context 'when initializing' do
|
12
15
|
it "should register" do
|
@@ -14,100 +17,247 @@ describe LogStash::Outputs::Loki do
|
|
14
17
|
expect { loki.register }.to_not raise_error
|
15
18
|
end
|
16
19
|
|
17
|
-
it 'should populate loki config with default or
|
20
|
+
it 'should populate loki config with default or initialized values' do
|
18
21
|
loki = LogStash::Outputs::Loki.new(simple_loki_config)
|
19
22
|
expect(loki.url).to eql 'http://localhost:3100'
|
20
23
|
expect(loki.tenant_id).to eql nil
|
21
24
|
expect(loki.batch_size).to eql 102400
|
22
25
|
expect(loki.batch_wait).to eql 1
|
23
|
-
expect(loki.include_labels).to eql ["test_key", "other_key"]
|
24
|
-
expect(loki.external_labels).to include("test" => "value")
|
25
26
|
end
|
26
27
|
end
|
27
28
|
|
28
|
-
context '
|
29
|
-
|
30
|
-
|
31
|
-
|
32
|
-
|
29
|
+
context 'when adding en entry to the batch' do
|
30
|
+
let (:simple_loki_config) {{'url' => 'http://localhost:3100'}}
|
31
|
+
let (:entry) {Entry.new(LogStash::Event.new({"message"=>"foobuzz","buzz"=>"bar","cluster"=>"us-central1","@timestamp"=>Time.at(1)}),"message", [])}
|
32
|
+
let (:lbs) {{"buzz"=>"bar","cluster"=>"us-central1"}.sort.to_h}
|
33
|
+
let (:include_loki_config) {{ 'url' => 'http://localhost:3100', 'include_fields' => ["cluster"] }}
|
34
|
+
let (:include_entry) {Entry.new(LogStash::Event.new({"message"=>"foobuzz","buzz"=>"bar","cluster"=>"us-central1","@timestamp"=>Time.at(1)}),"message", ["cluster"])}
|
35
|
+
let (:include_lbs) {{"cluster"=>"us-central1"}.sort.to_h}
|
33
36
|
|
34
|
-
|
35
|
-
|
36
|
-
|
37
|
-
|
37
|
+
it 'should not add empty line' do
|
38
|
+
plugin = LogStash::Plugin.lookup("output", "loki").new(simple_loki_config)
|
39
|
+
emptyEntry = Entry.new(LogStash::Event.new({"message"=>"foobuzz","buzz"=>"bar","cluster"=>"us-central1","@timestamp"=>Time.at(1)}),"foo", [])
|
40
|
+
expect(plugin.add_entry_to_batch(emptyEntry)).to eql true
|
41
|
+
expect(plugin.batch).to eql nil
|
42
|
+
end
|
38
43
|
|
39
|
-
|
40
|
-
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
44
|
+
it 'should add entry' do
|
45
|
+
plugin = LogStash::Plugin.lookup("output", "loki").new(simple_loki_config)
|
46
|
+
expect(plugin.batch).to eql nil
|
47
|
+
expect(plugin.add_entry_to_batch(entry)).to eql true
|
48
|
+
expect(plugin.add_entry_to_batch(entry)).to eql true
|
49
|
+
expect(plugin.batch).not_to be_nil
|
50
|
+
expect(plugin.batch.streams.length).to eq 1
|
51
|
+
expect(plugin.batch.streams[lbs.to_s]['entries'].length).to eq 2
|
52
|
+
expect(plugin.batch.streams[lbs.to_s]['labels']).to eq lbs
|
53
|
+
expect(plugin.batch.size_bytes).to eq 14
|
54
|
+
end
|
55
|
+
|
56
|
+
it 'should only allowed labels defined in include_fields' do
|
57
|
+
plugin = LogStash::Plugin.lookup("output", "loki").new(include_loki_config)
|
58
|
+
expect(plugin.batch).to eql nil
|
59
|
+
expect(plugin.add_entry_to_batch(include_entry)).to eql true
|
60
|
+
expect(plugin.add_entry_to_batch(include_entry)).to eql true
|
61
|
+
expect(plugin.batch).not_to be_nil
|
62
|
+
expect(plugin.batch.streams.length).to eq 1
|
63
|
+
expect(plugin.batch.streams[include_lbs.to_s]['entries'].length).to eq 2
|
64
|
+
expect(plugin.batch.streams[include_lbs.to_s]['labels']).to eq include_lbs
|
65
|
+
expect(plugin.batch.size_bytes).to eq 14
|
66
|
+
end
|
67
|
+
|
68
|
+
it 'should not add if full' do
|
69
|
+
plugin = LogStash::Plugin.lookup("output", "loki").new(simple_loki_config.merge!({'batch_size'=>10}))
|
70
|
+
expect(plugin.batch).to eql nil
|
71
|
+
expect(plugin.add_entry_to_batch(entry)).to eql true # first entry is fine.
|
72
|
+
expect(plugin.batch).not_to be_nil
|
73
|
+
expect(plugin.batch.streams.length).to eq 1
|
74
|
+
expect(plugin.batch.streams[lbs.to_s]['entries'].length).to eq 1
|
75
|
+
expect(plugin.batch.streams[lbs.to_s]['labels']).to eq lbs
|
76
|
+
expect(plugin.batch.size_bytes).to eq 7
|
77
|
+
expect(plugin.add_entry_to_batch(entry)).to eql false # second entry goes over the limit.
|
78
|
+
expect(plugin.batch).not_to be_nil
|
79
|
+
expect(plugin.batch.streams.length).to eq 1
|
80
|
+
expect(plugin.batch.streams[lbs.to_s]['entries'].length).to eq 1
|
81
|
+
expect(plugin.batch.streams[lbs.to_s]['labels']).to eq lbs
|
82
|
+
expect(plugin.batch.size_bytes).to eq 7
|
83
|
+
end
|
84
|
+
end
|
85
|
+
|
86
|
+
context 'batch expiration' do
|
87
|
+
let (:entry) {Entry.new(LogStash::Event.new({"message"=>"foobuzz","buzz"=>"bar","cluster"=>"us-central1","@timestamp"=>Time.at(1)}),"message", [])}
|
88
|
+
|
89
|
+
it 'should not expire if empty' do
|
90
|
+
loki = LogStash::Outputs::Loki.new(simple_loki_config.merge!({'batch_wait'=>0.5}))
|
91
|
+
sleep(1)
|
92
|
+
expect(loki.is_batch_expired).to be false
|
93
|
+
end
|
94
|
+
it 'should not expire batch if not old' do
|
95
|
+
loki = LogStash::Outputs::Loki.new(simple_loki_config.merge!({'batch_wait'=>0.5}))
|
96
|
+
expect(loki.add_entry_to_batch(entry)).to eql true
|
97
|
+
expect(loki.is_batch_expired).to be false
|
98
|
+
end
|
99
|
+
it 'should expire if old' do
|
100
|
+
loki = LogStash::Outputs::Loki.new(simple_loki_config.merge!({'batch_wait'=>0.5}))
|
101
|
+
expect(loki.add_entry_to_batch(entry)).to eql true
|
102
|
+
sleep(1)
|
103
|
+
expect(loki.is_batch_expired).to be true
|
104
|
+
end
|
45
105
|
end
|
46
106
|
|
47
|
-
context '
|
48
|
-
let(:
|
49
|
-
let (:simple_loki_config) {{'url' => 'http://localhost:3100', 'include_labels' => ["version", "host", "test"], 'external_labels' => {"test" => "value"}}}
|
50
|
-
let (:event) { LogStash::Event.new({'message' => 'hello', '@version' => '1', 'agent' => 'filebeat', 'host' => '172.0.0.1',
|
51
|
-
'@timestamp' => timestamp}) }
|
52
|
-
let(:loki) { LogStash::Plugin.lookup("output", "loki").new(simple_loki_config) }
|
107
|
+
context 'channel' do
|
108
|
+
let (:event) {LogStash::Event.new({"message"=>"foobuzz","buzz"=>"bar","cluster"=>"us-central1","@timestamp"=>Time.at(1)})}
|
53
109
|
|
54
|
-
|
110
|
+
it 'should send entry if batch size reached with no tenant' do
|
111
|
+
loki = LogStash::Outputs::Loki.new(simple_loki_config.merge!({'batch_wait'=>0.5,'batch_size'=>10}))
|
55
112
|
loki.register
|
113
|
+
sent = Queue.new
|
114
|
+
allow(loki).to receive(:send) do |batch|
|
115
|
+
Thread.new do
|
116
|
+
sent << batch
|
117
|
+
end
|
118
|
+
end
|
119
|
+
loki.receive(event)
|
120
|
+
loki.receive(event)
|
121
|
+
sent.deq
|
122
|
+
sent.deq
|
56
123
|
loki.close
|
57
124
|
end
|
58
|
-
|
59
|
-
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
125
|
+
it 'should send entry while closing' do
|
126
|
+
loki = LogStash::Outputs::Loki.new(simple_loki_config.merge!({'batch_wait'=>10,'batch_size'=>10}))
|
127
|
+
loki.register
|
128
|
+
sent = Queue.new
|
129
|
+
allow(loki).to receive(:send) do | batch|
|
130
|
+
Thread.new do
|
131
|
+
sent << batch
|
132
|
+
end
|
133
|
+
end
|
134
|
+
loki.receive(event)
|
135
|
+
loki.close
|
136
|
+
sent.deq
|
137
|
+
end
|
138
|
+
it 'should send entry when batch is expiring' do
|
139
|
+
loki = LogStash::Outputs::Loki.new(simple_loki_config.merge!({'batch_wait'=>0.5,'batch_size'=>10}))
|
140
|
+
loki.register
|
141
|
+
sent = Queue.new
|
142
|
+
allow(loki).to receive(:send) do | batch|
|
143
|
+
Thread.new do
|
144
|
+
sent << batch
|
145
|
+
end
|
146
|
+
end
|
147
|
+
loki.receive(event)
|
148
|
+
sent.deq
|
149
|
+
sleep(0.01) # Adding a minimal sleep. In few cases @batch=nil might happen after evaluating for nil
|
150
|
+
expect(loki.batch).to be_nil
|
151
|
+
loki.close
|
68
152
|
end
|
69
153
|
end
|
70
154
|
|
71
|
-
context '
|
72
|
-
let (:
|
73
|
-
let (:event) { LogStash::Event.new({'message' => 'hello', '@version' => '1', 'host' => '172.0.0.1',
|
74
|
-
'@timestamp' => LogStash::Timestamp.now}) }
|
75
|
-
let(:loki) { LogStash::Plugin.lookup("output", "loki").new(simple_loki_config) }
|
155
|
+
context 'http requests' do
|
156
|
+
let (:entry) {Entry.new(LogStash::Event.new({"message"=>"foobuzz","buzz"=>"bar","cluster"=>"us-central1","@timestamp"=>Time.at(1)}),"message", [])}
|
76
157
|
|
77
|
-
|
158
|
+
it 'should send credentials' do
|
159
|
+
conf = {
|
160
|
+
'url'=>'http://localhost:3100/loki/api/v1/push',
|
161
|
+
'username' => 'foo',
|
162
|
+
'password' => 'bar',
|
163
|
+
'tenant_id' => 't'
|
164
|
+
}
|
165
|
+
loki = LogStash::Outputs::Loki.new(conf)
|
78
166
|
loki.register
|
79
|
-
|
167
|
+
b = Batch.new(entry)
|
168
|
+
post = stub_request(:post, "http://localhost:3100/loki/api/v1/push").with(
|
169
|
+
basic_auth: ['foo', 'bar'],
|
170
|
+
body: b.to_json,
|
171
|
+
headers:{
|
172
|
+
'Content-Type' => 'application/json' ,
|
173
|
+
'User-Agent' => 'loki-logstash',
|
174
|
+
'X-Scope-OrgID'=>'t',
|
175
|
+
'Accept'=>'*/*',
|
176
|
+
'Accept-Encoding'=>'gzip;q=1.0,deflate;q=0.6,identity;q=0.3',
|
177
|
+
}
|
178
|
+
)
|
179
|
+
loki.send(b)
|
180
|
+
expect(post).to have_been_requested.times(1)
|
80
181
|
end
|
81
182
|
|
82
|
-
it '
|
83
|
-
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
183
|
+
it 'should not send credentials' do
|
184
|
+
conf = {
|
185
|
+
'url'=>'http://foo.com/loki/api/v1/push',
|
186
|
+
}
|
187
|
+
loki = LogStash::Outputs::Loki.new(conf)
|
188
|
+
loki.register
|
189
|
+
b = Batch.new(entry)
|
190
|
+
post = stub_request(:post, "http://foo.com/loki/api/v1/push").with(
|
191
|
+
body: b.to_json,
|
192
|
+
headers:{
|
193
|
+
'Content-Type' => 'application/json' ,
|
194
|
+
'User-Agent' => 'loki-logstash',
|
195
|
+
'Accept'=>'*/*',
|
196
|
+
'Accept-Encoding'=>'gzip;q=1.0,deflate;q=0.6,identity;q=0.3',
|
197
|
+
}
|
198
|
+
)
|
199
|
+
loki.send(b)
|
200
|
+
expect(post).to have_been_requested.times(1)
|
201
|
+
end
|
202
|
+
it 'should retry 500' do
|
203
|
+
conf = {
|
204
|
+
'url'=>'http://foo.com/loki/api/v1/push',
|
205
|
+
'retries' => 3,
|
206
|
+
}
|
207
|
+
loki = LogStash::Outputs::Loki.new(conf)
|
208
|
+
loki.register
|
209
|
+
b = Batch.new(entry)
|
210
|
+
post = stub_request(:post, "http://foo.com/loki/api/v1/push").with(
|
211
|
+
body: b.to_json,
|
212
|
+
).to_return(status: [500, "Internal Server Error"])
|
213
|
+
loki.send(b)
|
214
|
+
loki.close
|
215
|
+
expect(post).to have_been_requested.times(3)
|
216
|
+
end
|
217
|
+
it 'should retry 429' do
|
218
|
+
conf = {
|
219
|
+
'url'=>'http://foo.com/loki/api/v1/push',
|
220
|
+
'retries' => 2,
|
221
|
+
}
|
222
|
+
loki = LogStash::Outputs::Loki.new(conf)
|
223
|
+
loki.register
|
224
|
+
b = Batch.new(entry)
|
225
|
+
post = stub_request(:post, "http://foo.com/loki/api/v1/push").with(
|
226
|
+
body: b.to_json,
|
227
|
+
).to_return(status: [429, "stop spamming"])
|
228
|
+
loki.send(b)
|
229
|
+
loki.close
|
230
|
+
expect(post).to have_been_requested.times(2)
|
231
|
+
end
|
232
|
+
it 'should not retry 400' do
|
233
|
+
conf = {
|
234
|
+
'url'=>'http://foo.com/loki/api/v1/push',
|
235
|
+
'retries' => 11,
|
89
236
|
}
|
90
|
-
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
102
|
-
|
103
|
-
|
104
|
-
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
|
109
|
-
|
110
|
-
|
237
|
+
loki = LogStash::Outputs::Loki.new(conf)
|
238
|
+
loki.register
|
239
|
+
b = Batch.new(entry)
|
240
|
+
post = stub_request(:post, "http://foo.com/loki/api/v1/push").with(
|
241
|
+
body: b.to_json,
|
242
|
+
).to_return(status: [400, "bad request"])
|
243
|
+
loki.send(b)
|
244
|
+
loki.close
|
245
|
+
expect(post).to have_been_requested.times(1)
|
246
|
+
end
|
247
|
+
it 'should retry exception' do
|
248
|
+
conf = {
|
249
|
+
'url'=>'http://foo.com/loki/api/v1/push',
|
250
|
+
'retries' => 11,
|
251
|
+
}
|
252
|
+
loki = LogStash::Outputs::Loki.new(conf)
|
253
|
+
loki.register
|
254
|
+
b = Batch.new(entry)
|
255
|
+
post = stub_request(:post, "http://foo.com/loki/api/v1/push").with(
|
256
|
+
body: b.to_json,
|
257
|
+
).to_raise("some error").then.to_return(status: [200, "fine !"])
|
258
|
+
loki.send(b)
|
259
|
+
loki.close
|
260
|
+
expect(post).to have_been_requested.times(2)
|
111
261
|
end
|
112
262
|
end
|
113
263
|
end
|
metadata
CHANGED
@@ -1,14 +1,15 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: logstash-output-loki
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 1.
|
4
|
+
version: 1.1.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Aditya C S
|
8
|
+
- Cyril Tovena
|
8
9
|
autorequire:
|
9
10
|
bindir: bin
|
10
11
|
cert_chain: []
|
11
|
-
date:
|
12
|
+
date: 2022-01-27 00:00:00.000000000 Z
|
12
13
|
dependencies:
|
13
14
|
- !ruby/object:Gem::Dependency
|
14
15
|
requirement: !ruby/object:Gem::Requirement
|
@@ -20,8 +21,8 @@ dependencies:
|
|
20
21
|
- !ruby/object:Gem::Version
|
21
22
|
version: '2.99'
|
22
23
|
name: logstash-core-plugin-api
|
23
|
-
type: :runtime
|
24
24
|
prerelease: false
|
25
|
+
type: :runtime
|
25
26
|
version_requirements: !ruby/object:Gem::Requirement
|
26
27
|
requirements:
|
27
28
|
- - ">="
|
@@ -35,29 +36,15 @@ dependencies:
|
|
35
36
|
requirements:
|
36
37
|
- - '='
|
37
38
|
- !ruby/object:Gem::Version
|
38
|
-
version: 3.0
|
39
|
+
version: 3.1.0
|
39
40
|
name: logstash-codec-plain
|
40
|
-
type: :runtime
|
41
41
|
prerelease: false
|
42
|
-
version_requirements: !ruby/object:Gem::Requirement
|
43
|
-
requirements:
|
44
|
-
- - '='
|
45
|
-
- !ruby/object:Gem::Version
|
46
|
-
version: 3.0.6
|
47
|
-
- !ruby/object:Gem::Dependency
|
48
|
-
requirement: !ruby/object:Gem::Requirement
|
49
|
-
requirements:
|
50
|
-
- - '='
|
51
|
-
- !ruby/object:Gem::Version
|
52
|
-
version: 0.6.0
|
53
|
-
name: concurrent-ruby-edge
|
54
42
|
type: :runtime
|
55
|
-
prerelease: false
|
56
43
|
version_requirements: !ruby/object:Gem::Requirement
|
57
44
|
requirements:
|
58
45
|
- - '='
|
59
46
|
- !ruby/object:Gem::Version
|
60
|
-
version:
|
47
|
+
version: 3.1.0
|
61
48
|
- !ruby/object:Gem::Dependency
|
62
49
|
requirement: !ruby/object:Gem::Requirement
|
63
50
|
requirements:
|
@@ -65,8 +52,8 @@ dependencies:
|
|
65
52
|
- !ruby/object:Gem::Version
|
66
53
|
version: 2.0.2
|
67
54
|
name: logstash-devutils
|
68
|
-
type: :development
|
69
55
|
prerelease: false
|
56
|
+
type: :development
|
70
57
|
version_requirements: !ruby/object:Gem::Requirement
|
71
58
|
requirements:
|
72
59
|
- - '='
|
@@ -75,6 +62,7 @@ dependencies:
|
|
75
62
|
description: Output plugin to ship logs to a Grafana Loki server
|
76
63
|
email:
|
77
64
|
- aditya.gnu@gmail.com
|
65
|
+
- cyril.tovena@grafana.com
|
78
66
|
executables: []
|
79
67
|
extensions: []
|
80
68
|
extra_rdoc_files: []
|
@@ -85,6 +73,7 @@ files:
|
|
85
73
|
- lib/logstash/outputs/loki/batch.rb
|
86
74
|
- lib/logstash/outputs/loki/entry.rb
|
87
75
|
- logstash-output-loki.gemspec
|
76
|
+
- spec/outputs/loki/entry_spec.rb
|
88
77
|
- spec/outputs/loki_spec.rb
|
89
78
|
homepage: https://github.com/grafana/loki/
|
90
79
|
licenses:
|
@@ -107,9 +96,10 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
107
96
|
- !ruby/object:Gem::Version
|
108
97
|
version: '0'
|
109
98
|
requirements: []
|
110
|
-
rubygems_version: 3.
|
99
|
+
rubygems_version: 3.1.6
|
111
100
|
signing_key:
|
112
101
|
specification_version: 4
|
113
102
|
summary: Output plugin to ship logs to a Grafana Loki server
|
114
103
|
test_files:
|
104
|
+
- spec/outputs/loki/entry_spec.rb
|
115
105
|
- spec/outputs/loki_spec.rb
|