logstash-output-lokis 1.1.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: 9e70187853529deb761401635d457f937089dfdba127a8d4a5bfbe038afa59ea
4
+ data.tar.gz: 1ba1d2558609c2037815c9daa05454ec8a86aa48e304f8aa3609d48e39b6cd4c
5
+ SHA512:
6
+ metadata.gz: 292d56839c1ec174ebf8cc569046ea426a9379ced59238db6bb8d67e194d2a3e9e172e918799fec40f80030de00b8d0cc5733a7bc8508330b0d1cdbf8abbfedf
7
+ data.tar.gz: 034ee469829efdca033addab9c95f610b26c24ce4df7cc1c423e2934f9e442e9a4ede19b4155bdf6910365deb3a71a62d438e5f611154890637420fcc65febfa
data/Gemfile ADDED
@@ -0,0 +1,14 @@
1
+ source 'https://rubygems.org'
2
+
3
+ gemspec
4
+
5
+ logstash_path = ENV["LOGSTASH_PATH"] || "./logstash"
6
+
7
+ if Dir.exist?(logstash_path)
8
+ gem 'logstash-core', :path => "#{logstash_path}/logstash-core"
9
+ gem 'logstash-core-plugin-api', :path => "#{logstash_path}/logstash-core-plugin-api"
10
+ else
11
+ raise 'missing logstash vendoring'
12
+ end
13
+
14
+ gem "webmock", "~> 3.8"
data/README.md ADDED
@@ -0,0 +1,88 @@
1
+ # Contributing to Loki Logstash Output Plugin
2
+
3
+ For information about how to use this plugin see this [documentation](../../docs/sources/clients/logstash/_index.md).
4
+
5
+ ## Install dependencies
6
+
7
+ First, make sure you have JDK version `8` or `11` installed and you have set the `JAVA_HOME` environment variable.
8
+
9
+ You need to setup JRuby environment to build this plugin. Refer https://github.com/rbenv/rbenv for setting up your rbenv environment.
10
+
11
+ After setting up `rbenv`. Install JRuby
12
+
13
+ ```bash
14
+ rbenv install jruby-9.2.10.0
15
+ rbenv local jruby-9.2.10.0
16
+ ```
17
+
18
+ Check that the environment is configured
19
+
20
+ ```bash
21
+ ruby --version
22
+ jruby 9.2.10
23
+ ```
24
+
25
+ You should make sure you are running `jruby` and not `ruby`. If the command `ruby --version` still shows `ruby` and not `jruby`, check that PATH contains `$HOME/.rbenv/shims` and `$HOME/.rbenv/bin`. Also verify that you have this in your bash profile:
26
+
27
+ ```bash
28
+ export PATH="$HOME/.rbenv/bin:$PATH"
29
+ eval "$(rbenv init -)"
30
+ ```
31
+
32
+ Then install bundler:
33
+
34
+ ```bash
35
+ gem install bundler:2.1.4
36
+ ```
37
+
38
+ Follow those instructions to [install logstash](https://www.elastic.co/guide/en/logstash/current/installing-logstash.html) before moving to the next section.
39
+
40
+ ## Build and test the plugin
41
+
42
+ ### Install required packages
43
+
44
+ ```bash
45
+ git clone git@github.com:elastic/logstash.git
46
+ cd logstash
47
+ git checkout tags/v7.16.1
48
+ export LOGSTASH_PATH="$(pwd)"
49
+ export GEM_PATH="$LOGSTASH_PATH/vendor/bundle/jruby/2.5.0"
50
+ export GEM_HOME="$LOGSTASH_PATH/vendor/bundle/jruby/2.5.0"
51
+ ./gradlew assemble
52
+ cd ..
53
+ ruby -S bundle config set --local path "$LOGSTASH_PATH/vendor/bundle"
54
+ ruby -S bundle install
55
+ ruby -S bundle exec rake vendor
56
+ ```
57
+
58
+ ### Build the plugin
59
+
60
+ ```bash
61
+ gem build logstash-output-lokis.gemspec
62
+ ```
63
+
64
+ ### Test
65
+
66
+ ```bash
67
+ ruby -S bundle exec rspec
68
+ ```
69
+
70
+ Alternatively if you don't want to install JRuby. Enter inside logstash-loki container.
71
+
72
+ ```bash
73
+ docker build -t logstash-loki ./
74
+ docker run -v $(pwd)/spec:/home/logstash/spec -it --rm --entrypoint /bin/sh logstash-loki
75
+ bundle exec rspec
76
+ ```
77
+
78
+ ## Install plugin to local logstash
79
+
80
+ ```bash
81
+ bin/logstash-plugin install --no-verify --local logstash-output-lokis-1.0.0.gem
82
+ ```
83
+
84
+ ## Send sample event and check plugin is working
85
+
86
+ ```bash
87
+ bin/logstash -f loki.conf
88
+ ```
@@ -0,0 +1,63 @@
1
+ require 'time'
2
+
3
+ module Loki
4
+ class Batch
5
+ attr_reader :streams
6
+ def initialize(e)
7
+ @bytes = 0
8
+ @createdAt = Time.now
9
+ @streams = {}
10
+ add(e)
11
+ end
12
+
13
+ def size_bytes
14
+ return @bytes
15
+ end
16
+
17
+ def add(e)
18
+ @bytes = @bytes + e.entry['line'].length
19
+
20
+ # Append the entry to an already existing stream (if any)
21
+ labels = e.labels.sort.to_h
22
+ labelkey = labels.to_s
23
+ if @streams.has_key?(labelkey)
24
+ stream = @streams[labelkey]
25
+ stream['entries'].append(e.entry)
26
+ return
27
+ else
28
+ # Add the entry as a new stream
29
+ @streams[labelkey] = {
30
+ "labels" => labels,
31
+ "entries" => [e.entry],
32
+ }
33
+ end
34
+ end
35
+
36
+ def size_bytes_after(line)
37
+ return @bytes + line.length
38
+ end
39
+
40
+ def age()
41
+ return Time.now - @createdAt
42
+ end
43
+
44
+ def to_json
45
+ streams = []
46
+ @streams.each { |_ , stream|
47
+ streams.append(build_stream(stream))
48
+ }
49
+ return {"streams"=>streams}.to_json
50
+ end
51
+
52
+ def build_stream(stream)
53
+ values = []
54
+ stream['entries'].each { |entry|
55
+ values.append([entry['ts'].to_s, entry['line']])
56
+ }
57
+ return {
58
+ 'stream'=>stream['labels'],
59
+ 'values' => values
60
+ }
61
+ end
62
+ end
63
+ end
@@ -0,0 +1,26 @@
1
+ module Loki
2
+ def to_ns(s)
3
+ (s.to_f * (10**9)).to_i
4
+ end
5
+ class Entry
6
+ include Loki
7
+ attr_reader :labels, :entry
8
+ def initialize(event,message_field,include_fields)
9
+ @entry = {
10
+ "ts" => to_ns(event.get("@timestamp")),
11
+ "line" => event.get(message_field).to_s
12
+ }
13
+ event = event.clone()
14
+ event.remove(message_field)
15
+ event.remove("@timestamp")
16
+
17
+ @labels = {}
18
+ event.to_hash.each { |key,value|
19
+ next if key.start_with?('@')
20
+ next if value.is_a?(Hash)
21
+ next if include_fields.length() > 0 and not include_fields.include?(key)
22
+ @labels[key] = value.to_s
23
+ }
24
+ end
25
+ end
26
+ end
@@ -0,0 +1,279 @@
1
+ # encoding: utf-8
2
+ require "logstash/outputs/base"
3
+ require "logstash/outputs/loki/entry"
4
+ require "logstash/outputs/loki/batch"
5
+ require "logstash/namespace"
6
+ require 'net/http'
7
+ require 'time'
8
+ require 'uri'
9
+ require 'json'
10
+ require "zlib"
11
+
12
+ class LogStash::Outputs::Loki < LogStash::Outputs::Base
13
+ include Loki
14
+ config_name "loki"
15
+
16
+ ## 'A single instance of the Output will be shared among the pipeline worker threads'
17
+ concurrency :single
18
+
19
+ ## 'Loki URL'
20
+ config :url, :validate => :string, :required => true
21
+
22
+ ## 'BasicAuth credentials'
23
+ config :username, :validate => :string, :required => false
24
+ config :password, :validate => :string, secret: true, :required => false
25
+
26
+ ## 'Client certificate'
27
+ config :cert, :validate => :path, :required => false
28
+ config :key, :validate => :path, :required => false
29
+
30
+ ## 'TLS'
31
+ config :ca_cert, :validate => :path, :required => false
32
+
33
+ ## 'Disable server certificate verification'
34
+ config :insecure_skip_verify, :validate => :boolean, :default => false, :required => false
35
+
36
+ ## 'Loki Tenant ID'
37
+ config :tenant_id, :validate => :string, :required => false
38
+
39
+ ## 'Maximum batch size to accrue before pushing to loki. Defaults to 102400 bytes'
40
+ config :batch_size, :validate => :number, :default => 102400, :required => false
41
+
42
+ ## 'Interval in seconds to wait before pushing a batch of records to loki. Defaults to 1 second'
43
+ config :batch_wait, :validate => :number, :default => 1, :required => false
44
+
45
+ ## 'Log line field to pick from logstash. Defaults to "message"'
46
+ config :message_field, :validate => :string, :default => "message", :required => false
47
+
48
+ ## 'Backoff configuration. Initial backoff time between retries. Default 1s'
49
+ config :min_delay, :validate => :number, :default => 1, :required => false
50
+
51
+ ## 'An array of fields to map to labels, if defined only fields in this list will be mapped.'
52
+ config :include_fields, :validate => :array, :default => [], :required => false
53
+
54
+ ## 'Backoff configuration. Maximum backoff time between retries. Default 300s'
55
+ config :max_delay, :validate => :number, :default => 300, :required => false
56
+
57
+ ## 'Backoff configuration. Maximum number of retries to do'
58
+ config :retries, :validate => :number, :default => 10, :required => false
59
+
60
+ attr_reader :batch
61
+ public
62
+ def register
63
+ @uri = URI.parse(@url)
64
+ unless @uri.is_a?(URI::HTTP) || @uri.is_a?(URI::HTTPS)
65
+ raise LogStash::ConfigurationError, "url parameter must be valid HTTP, currently '#{@url}'"
66
+ end
67
+
68
+ if @min_delay > @max_delay
69
+ raise LogStash::ConfigurationError, "Min delay should be less than Max delay, currently 'Min delay is #{@min_delay} and Max delay is #{@max_delay}'"
70
+ end
71
+
72
+ @logger.info("Loki output plugin", :class => self.class.name)
73
+
74
+ # initialize Queue and Mutex
75
+ @entries = Queue.new
76
+ @mutex = Mutex.new
77
+ @stop = false
78
+
79
+ # create nil batch object.
80
+ @batch = nil
81
+
82
+ # validate certs
83
+ if ssl_cert?
84
+ load_ssl
85
+ validate_ssl_key
86
+ end
87
+
88
+ # start batch_max_wait and batch_max_size threads
89
+ @batch_wait_thread = Thread.new{max_batch_wait()}
90
+ @batch_size_thread = Thread.new{max_batch_size()}
91
+ end
92
+
93
+ def max_batch_size
94
+ loop do
95
+ @mutex.synchronize do
96
+ return if @stop
97
+ end
98
+
99
+ e = @entries.deq
100
+ return if e.nil?
101
+
102
+ @mutex.synchronize do
103
+ if !add_entry_to_batch(e)
104
+ @logger.debug("Max batch_size is reached. Sending batch to loki")
105
+ send(@batch)
106
+ @batch = Batch.new(e)
107
+ end
108
+ end
109
+ end
110
+ end
111
+
112
+ def max_batch_wait
113
+ # minimum wait frequency is 10 milliseconds
114
+ min_wait_checkfrequency = 1/100
115
+ max_wait_checkfrequency = @batch_wait
116
+ if max_wait_checkfrequency < min_wait_checkfrequency
117
+ max_wait_checkfrequency = min_wait_checkfrequency
118
+ end
119
+
120
+ loop do
121
+ @mutex.synchronize do
122
+ return if @stop
123
+ end
124
+
125
+ sleep(max_wait_checkfrequency)
126
+ if is_batch_expired
127
+ @mutex.synchronize do
128
+ @logger.debug("Max batch_wait time is reached. Sending batch to loki")
129
+ send(@batch)
130
+ @batch = nil
131
+ end
132
+ end
133
+ end
134
+ end
135
+
136
+ def ssl_cert?
137
+ !@key.nil? && !@cert.nil?
138
+ end
139
+
140
+ def load_ssl
141
+ @cert = OpenSSL::X509::Certificate.new(File.read(@cert)) if @cert
142
+ @key = OpenSSL::PKey.read(File.read(@key)) if @key
143
+ end
144
+
145
+ def validate_ssl_key
146
+ if !@key.is_a?(OpenSSL::PKey::RSA) && !@key.is_a?(OpenSSL::PKey::DSA)
147
+ raise LogStash::ConfigurationError, "Unsupported private key type '#{@key.class}''"
148
+ end
149
+ end
150
+
151
+ def ssl_opts(uri)
152
+ opts = {
153
+ use_ssl: uri.scheme == 'https'
154
+ }
155
+
156
+ # disable server certificate verification
157
+ if @insecure_skip_verify
158
+ opts = opts.merge(
159
+ verify_mode: OpenSSL::SSL::VERIFY_NONE
160
+ )
161
+ end
162
+
163
+ if !@cert.nil? && !@key.nil?
164
+ opts = opts.merge(
165
+ verify_mode: OpenSSL::SSL::VERIFY_PEER,
166
+ cert: @cert,
167
+ key: @key
168
+ )
169
+ end
170
+
171
+ unless @ca_cert.nil?
172
+ opts = opts.merge(
173
+ ca_file: @ca_cert
174
+ )
175
+ end
176
+ opts
177
+ end
178
+
179
+ # Add an entry to the current batch returns false if the batch is full
180
+ # and the entry can't be added.
181
+ def add_entry_to_batch(e)
182
+ line = e.entry['line']
183
+ # we don't want to send empty lines.
184
+ return true if line.to_s.strip.empty?
185
+
186
+ if @batch.nil?
187
+ @batch = Batch.new(e)
188
+ return true
189
+ end
190
+
191
+ if @batch.size_bytes_after(line) > @batch_size
192
+ return false
193
+ end
194
+ @batch.add(e)
195
+ return true
196
+ end
197
+
198
+ def is_batch_expired
199
+ return !@batch.nil? && @batch.age() >= @batch_wait
200
+ end
201
+
202
+ ## Receives logstash events
203
+ public
204
+ def receive(event)
205
+ @entries << Entry.new(event, @message_field, @include_fields)
206
+ end
207
+
208
+ def close
209
+ @entries.close
210
+ @mutex.synchronize do
211
+ @stop = true
212
+ end
213
+ @batch_wait_thread.join
214
+ @batch_size_thread.join
215
+
216
+ # if by any chance we still have a forming batch, we need to send it.
217
+ send(@batch) if !@batch.nil?
218
+ @batch = nil
219
+ end
220
+ # gzip data
221
+ def gzip(data)
222
+ gz = StringIO.new
223
+ gz.set_encoding("BINARY")
224
+ z = Zlib::GzipWriter.new(gz)
225
+ z.write(data)
226
+ z.close
227
+ gz.string
228
+ end
229
+
230
+ def send(batch)
231
+ payload = batch.to_json
232
+ res = loki_http_request(payload)
233
+ if res.is_a?(Net::HTTPSuccess)
234
+ @logger.debug("Successfully pushed data to loki")
235
+ else
236
+ @logger.debug("failed payload", :payload => payload)
237
+ end
238
+ end
239
+
240
+ def loki_http_request(payload)
241
+ req = Net::HTTP::Post.new(
242
+ @uri.request_uri
243
+ )
244
+ #req.add_field('Content-Type', 'application/json')
245
+ req.add_field('Content-Type', 'gzip')
246
+ req.add_field('X-Scope-OrgID', @tenant_id) if @tenant_id
247
+ req['User-Agent']= 'loki-logstash'
248
+ req.basic_auth(@username, @password) if @username
249
+ req.body = gzip(payload)
250
+
251
+ opts = ssl_opts(@uri)
252
+
253
+ @logger.debug("sending #{req.body.length} bytes to loki")
254
+ retry_count = 0
255
+ delay = @min_delay
256
+ begin
257
+ res = Net::HTTP.start(@uri.host, @uri.port, **opts) { |http|
258
+ http.request(req)
259
+ }
260
+ return res if !res.nil? && res.code.to_i != 429 && res.code.to_i.div(100) != 5
261
+ raise StandardError.new res
262
+ rescue StandardError => e
263
+ retry_count += 1
264
+ @logger.warn("Failed to send batch, attempt: #{retry_count}/#{@retries}", :error_inspect => e.inspect, :error => e)
265
+ if retry_count < @retries
266
+ sleep delay
267
+ if delay * 2 <= @max_delay
268
+ delay = delay * 2
269
+ else
270
+ delay = @max_delay
271
+ end
272
+ retry
273
+ else
274
+ @logger.error("Failed to send batch", :error_inspect => e.inspect, :error => e)
275
+ return res
276
+ end
277
+ end
278
+ end
279
+ end
@@ -0,0 +1,26 @@
1
+ Gem::Specification.new do |s|
2
+ s.name = 'logstash-output-lokis'
3
+ s.version = '1.1.0'
4
+ s.authors = ['Aditya C S','Cyril Tovena']
5
+ s.email = ['aditya.gnu@gmail.com','cyril.tovena@grafana.com']
6
+
7
+ s.summary = 'Output plugin to ship logs to a Grafana Loki server'
8
+ s.description = 'Output plugin to ship logs to a Grafana Loki server'
9
+ s.homepage = 'https://github.com/grafana/loki/'
10
+ s.license = 'Apache-2.0'
11
+ s.require_paths = ["lib"]
12
+
13
+ # Files
14
+ s.files = Dir['lib/**/*','spec/**/*','vendor/**/*','*.gemspec','*.md','CONTRIBUTORS','Gemfile']
15
+ # Tests
16
+ s.test_files = s.files.grep(%r{^(test|spec|features)/})
17
+
18
+ # Special flag to let us know this is actually a logstash plugin
19
+ s.metadata = { "logstash_plugin" => "true", "logstash_group" => "output" }
20
+
21
+ # Gem dependencies
22
+ #
23
+ s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
24
+ s.add_runtime_dependency "logstash-codec-plain", "3.1.0"
25
+ s.add_development_dependency 'logstash-devutils', "2.0.2"
26
+ end
@@ -0,0 +1,66 @@
1
+ # encoding: utf-8
2
+ require "logstash/devutils/rspec/spec_helper"
3
+ require "logstash/outputs/loki"
4
+ require "logstash/codecs/plain"
5
+ require "logstash/event"
6
+ require "net/http"
7
+ include Loki
8
+
9
+ describe Loki::Entry do
10
+ context 'test entry generation' do
11
+ let (:event) {
12
+ LogStash::Event.new(
13
+ {
14
+ 'message' => 'hello',
15
+ '@metadata' => {'foo'=>'bar'},
16
+ '@version' => '1',
17
+ 'foo' => 5,
18
+ 'agent' => 'filebeat',
19
+ 'log' => {
20
+ 'file' =>
21
+ {'@path' => '/path/to/file.log'},
22
+ },
23
+ 'host' => '172.0.0.1',
24
+ '@timestamp' => Time.now
25
+ }
26
+ )
27
+ }
28
+
29
+ it 'labels extracted should not contains object and metadata or timestamp' do
30
+ entry = Entry.new(event,"message", [])
31
+ expect(entry.labels).to eql({ 'agent' => 'filebeat', 'host' => '172.0.0.1', 'foo'=>'5'})
32
+ expect(entry.entry['ts']).to eql to_ns(event.get("@timestamp"))
33
+ expect(entry.entry['line']).to eql 'hello'
34
+ end
35
+
36
+ it 'labels extracted should only contain allowlisted labels' do
37
+ entry = Entry.new(event, "message", %w[agent foo])
38
+ expect(entry.labels).to eql({ 'agent' => 'filebeat', 'foo'=>'5'})
39
+ expect(entry.entry['ts']).to eql to_ns(event.get("@timestamp"))
40
+ expect(entry.entry['line']).to eql 'hello'
41
+ end
42
+ end
43
+
44
+ context 'test batch generation with label order' do
45
+ let (:entries) {[
46
+ Entry.new(LogStash::Event.new({"message"=>"foobuzz","buzz"=>"bar","cluster"=>"us-central1","@timestamp"=>Time.at(1)}),"message", []),
47
+ Entry.new(LogStash::Event.new({"log"=>"foobar","bar"=>"bar","@timestamp"=>Time.at(2)}),"log", []),
48
+ Entry.new(LogStash::Event.new({"cluster"=>"us-central1","message"=>"foobuzz","buzz"=>"bar","@timestamp"=>Time.at(3)}),"message", []),
49
+
50
+ ]}
51
+ let (:expected) {
52
+ {"streams" => [
53
+ {"stream"=> {"buzz"=>"bar","cluster"=>"us-central1"}, "values" => [[to_ns(Time.at(1)).to_s,"foobuzz"],[to_ns(Time.at(3)).to_s,"foobuzz"]]},
54
+ {"stream"=> {"bar"=>"bar"}, "values"=>[[to_ns(Time.at(2)).to_s,"foobar"]]},
55
+ ] }
56
+ }
57
+
58
+ it 'to_json' do
59
+ @batch = Loki::Batch.new(entries.first)
60
+ entries.drop(1).each { |e| @batch.add(e)}
61
+ expect(JSON.parse(@batch.to_json)).to eql expected
62
+ end
63
+ end
64
+
65
+
66
+ end
@@ -0,0 +1,263 @@
1
+ # encoding: utf-8
2
+ require "logstash/devutils/rspec/spec_helper"
3
+ require "logstash/outputs/loki"
4
+ require "logstash/codecs/plain"
5
+ require "logstash/event"
6
+ require "net/http"
7
+ require 'webmock/rspec'
8
+ include Loki
9
+
10
+ describe LogStash::Outputs::Loki do
11
+
12
+ let (:simple_loki_config) { {'url' => 'http://localhost:3100'} }
13
+
14
+ context 'when initializing' do
15
+ it "should register" do
16
+ loki = LogStash::Plugin.lookup("output", "loki").new(simple_loki_config)
17
+ expect { loki.register }.to_not raise_error
18
+ end
19
+
20
+ it 'should populate loki config with default or initialized values' do
21
+ loki = LogStash::Outputs::Loki.new(simple_loki_config)
22
+ expect(loki.url).to eql 'http://localhost:3100'
23
+ expect(loki.tenant_id).to eql nil
24
+ expect(loki.batch_size).to eql 102400
25
+ expect(loki.batch_wait).to eql 1
26
+ end
27
+ end
28
+
29
+ context 'when adding en entry to the batch' do
30
+ let (:simple_loki_config) {{'url' => 'http://localhost:3100'}}
31
+ let (:entry) {Entry.new(LogStash::Event.new({"message"=>"foobuzz","buzz"=>"bar","cluster"=>"us-central1","@timestamp"=>Time.at(1)}),"message", [])}
32
+ let (:lbs) {{"buzz"=>"bar","cluster"=>"us-central1"}.sort.to_h}
33
+ let (:include_loki_config) {{ 'url' => 'http://localhost:3100', 'include_fields' => ["cluster"] }}
34
+ let (:include_entry) {Entry.new(LogStash::Event.new({"message"=>"foobuzz","buzz"=>"bar","cluster"=>"us-central1","@timestamp"=>Time.at(1)}),"message", ["cluster"])}
35
+ let (:include_lbs) {{"cluster"=>"us-central1"}.sort.to_h}
36
+
37
+ it 'should not add empty line' do
38
+ plugin = LogStash::Plugin.lookup("output", "loki").new(simple_loki_config)
39
+ emptyEntry = Entry.new(LogStash::Event.new({"message"=>"foobuzz","buzz"=>"bar","cluster"=>"us-central1","@timestamp"=>Time.at(1)}),"foo", [])
40
+ expect(plugin.add_entry_to_batch(emptyEntry)).to eql true
41
+ expect(plugin.batch).to eql nil
42
+ end
43
+
44
+ it 'should add entry' do
45
+ plugin = LogStash::Plugin.lookup("output", "loki").new(simple_loki_config)
46
+ expect(plugin.batch).to eql nil
47
+ expect(plugin.add_entry_to_batch(entry)).to eql true
48
+ expect(plugin.add_entry_to_batch(entry)).to eql true
49
+ expect(plugin.batch).not_to be_nil
50
+ expect(plugin.batch.streams.length).to eq 1
51
+ expect(plugin.batch.streams[lbs.to_s]['entries'].length).to eq 2
52
+ expect(plugin.batch.streams[lbs.to_s]['labels']).to eq lbs
53
+ expect(plugin.batch.size_bytes).to eq 14
54
+ end
55
+
56
+ it 'should only allowed labels defined in include_fields' do
57
+ plugin = LogStash::Plugin.lookup("output", "loki").new(include_loki_config)
58
+ expect(plugin.batch).to eql nil
59
+ expect(plugin.add_entry_to_batch(include_entry)).to eql true
60
+ expect(plugin.add_entry_to_batch(include_entry)).to eql true
61
+ expect(plugin.batch).not_to be_nil
62
+ expect(plugin.batch.streams.length).to eq 1
63
+ expect(plugin.batch.streams[include_lbs.to_s]['entries'].length).to eq 2
64
+ expect(plugin.batch.streams[include_lbs.to_s]['labels']).to eq include_lbs
65
+ expect(plugin.batch.size_bytes).to eq 14
66
+ end
67
+
68
+ it 'should not add if full' do
69
+ plugin = LogStash::Plugin.lookup("output", "loki").new(simple_loki_config.merge!({'batch_size'=>10}))
70
+ expect(plugin.batch).to eql nil
71
+ expect(plugin.add_entry_to_batch(entry)).to eql true # first entry is fine.
72
+ expect(plugin.batch).not_to be_nil
73
+ expect(plugin.batch.streams.length).to eq 1
74
+ expect(plugin.batch.streams[lbs.to_s]['entries'].length).to eq 1
75
+ expect(plugin.batch.streams[lbs.to_s]['labels']).to eq lbs
76
+ expect(plugin.batch.size_bytes).to eq 7
77
+ expect(plugin.add_entry_to_batch(entry)).to eql false # second entry goes over the limit.
78
+ expect(plugin.batch).not_to be_nil
79
+ expect(plugin.batch.streams.length).to eq 1
80
+ expect(plugin.batch.streams[lbs.to_s]['entries'].length).to eq 1
81
+ expect(plugin.batch.streams[lbs.to_s]['labels']).to eq lbs
82
+ expect(plugin.batch.size_bytes).to eq 7
83
+ end
84
+ end
85
+
86
+ context 'batch expiration' do
87
+ let (:entry) {Entry.new(LogStash::Event.new({"message"=>"foobuzz","buzz"=>"bar","cluster"=>"us-central1","@timestamp"=>Time.at(1)}),"message", [])}
88
+
89
+ it 'should not expire if empty' do
90
+ loki = LogStash::Outputs::Loki.new(simple_loki_config.merge!({'batch_wait'=>0.5}))
91
+ sleep(1)
92
+ expect(loki.is_batch_expired).to be false
93
+ end
94
+ it 'should not expire batch if not old' do
95
+ loki = LogStash::Outputs::Loki.new(simple_loki_config.merge!({'batch_wait'=>0.5}))
96
+ expect(loki.add_entry_to_batch(entry)).to eql true
97
+ expect(loki.is_batch_expired).to be false
98
+ end
99
+ it 'should expire if old' do
100
+ loki = LogStash::Outputs::Loki.new(simple_loki_config.merge!({'batch_wait'=>0.5}))
101
+ expect(loki.add_entry_to_batch(entry)).to eql true
102
+ sleep(1)
103
+ expect(loki.is_batch_expired).to be true
104
+ end
105
+ end
106
+
107
+ context 'channel' do
108
+ let (:event) {LogStash::Event.new({"message"=>"foobuzz","buzz"=>"bar","cluster"=>"us-central1","@timestamp"=>Time.at(1)})}
109
+
110
+ it 'should send entry if batch size reached with no tenant' do
111
+ loki = LogStash::Outputs::Loki.new(simple_loki_config.merge!({'batch_wait'=>0.5,'batch_size'=>10}))
112
+ loki.register
113
+ sent = Queue.new
114
+ allow(loki).to receive(:send) do |batch|
115
+ Thread.new do
116
+ sent << batch
117
+ end
118
+ end
119
+ loki.receive(event)
120
+ loki.receive(event)
121
+ sent.deq
122
+ sent.deq
123
+ loki.close
124
+ end
125
+ it 'should send entry while closing' do
126
+ loki = LogStash::Outputs::Loki.new(simple_loki_config.merge!({'batch_wait'=>10,'batch_size'=>10}))
127
+ loki.register
128
+ sent = Queue.new
129
+ allow(loki).to receive(:send) do | batch|
130
+ Thread.new do
131
+ sent << batch
132
+ end
133
+ end
134
+ loki.receive(event)
135
+ loki.close
136
+ sent.deq
137
+ end
138
+ it 'should send entry when batch is expiring' do
139
+ loki = LogStash::Outputs::Loki.new(simple_loki_config.merge!({'batch_wait'=>0.5,'batch_size'=>10}))
140
+ loki.register
141
+ sent = Queue.new
142
+ allow(loki).to receive(:send) do | batch|
143
+ Thread.new do
144
+ sent << batch
145
+ end
146
+ end
147
+ loki.receive(event)
148
+ sent.deq
149
+ sleep(0.01) # Adding a minimal sleep. In few cases @batch=nil might happen after evaluating for nil
150
+ expect(loki.batch).to be_nil
151
+ loki.close
152
+ end
153
+ end
154
+
155
+ context 'http requests' do
156
+ let (:entry) {Entry.new(LogStash::Event.new({"message"=>"foobuzz","buzz"=>"bar","cluster"=>"us-central1","@timestamp"=>Time.at(1)}),"message", [])}
157
+
158
+ it 'should send credentials' do
159
+ conf = {
160
+ 'url'=>'http://localhost:3100/loki/api/v1/push',
161
+ 'username' => 'foo',
162
+ 'password' => 'bar',
163
+ 'tenant_id' => 't'
164
+ }
165
+ loki = LogStash::Outputs::Loki.new(conf)
166
+ loki.register
167
+ b = Batch.new(entry)
168
+ post = stub_request(:post, "http://localhost:3100/loki/api/v1/push").with(
169
+ basic_auth: ['foo', 'bar'],
170
+ body: b.to_json,
171
+ headers:{
172
+ 'Content-Type' => 'application/json' ,
173
+ 'User-Agent' => 'loki-logstash',
174
+ 'X-Scope-OrgID'=>'t',
175
+ 'Accept'=>'*/*',
176
+ 'Accept-Encoding'=>'gzip;q=1.0,deflate;q=0.6,identity;q=0.3',
177
+ }
178
+ )
179
+ loki.send(b)
180
+ expect(post).to have_been_requested.times(1)
181
+ end
182
+
183
+ it 'should not send credentials' do
184
+ conf = {
185
+ 'url'=>'http://foo.com/loki/api/v1/push',
186
+ }
187
+ loki = LogStash::Outputs::Loki.new(conf)
188
+ loki.register
189
+ b = Batch.new(entry)
190
+ post = stub_request(:post, "http://foo.com/loki/api/v1/push").with(
191
+ body: b.to_json,
192
+ headers:{
193
+ 'Content-Type' => 'application/json' ,
194
+ 'User-Agent' => 'loki-logstash',
195
+ 'Accept'=>'*/*',
196
+ 'Accept-Encoding'=>'gzip;q=1.0,deflate;q=0.6,identity;q=0.3',
197
+ }
198
+ )
199
+ loki.send(b)
200
+ expect(post).to have_been_requested.times(1)
201
+ end
202
+ it 'should retry 500' do
203
+ conf = {
204
+ 'url'=>'http://foo.com/loki/api/v1/push',
205
+ 'retries' => 3,
206
+ }
207
+ loki = LogStash::Outputs::Loki.new(conf)
208
+ loki.register
209
+ b = Batch.new(entry)
210
+ post = stub_request(:post, "http://foo.com/loki/api/v1/push").with(
211
+ body: b.to_json,
212
+ ).to_return(status: [500, "Internal Server Error"])
213
+ loki.send(b)
214
+ loki.close
215
+ expect(post).to have_been_requested.times(3)
216
+ end
217
+ it 'should retry 429' do
218
+ conf = {
219
+ 'url'=>'http://foo.com/loki/api/v1/push',
220
+ 'retries' => 2,
221
+ }
222
+ loki = LogStash::Outputs::Loki.new(conf)
223
+ loki.register
224
+ b = Batch.new(entry)
225
+ post = stub_request(:post, "http://foo.com/loki/api/v1/push").with(
226
+ body: b.to_json,
227
+ ).to_return(status: [429, "stop spamming"])
228
+ loki.send(b)
229
+ loki.close
230
+ expect(post).to have_been_requested.times(2)
231
+ end
232
+ it 'should not retry 400' do
233
+ conf = {
234
+ 'url'=>'http://foo.com/loki/api/v1/push',
235
+ 'retries' => 11,
236
+ }
237
+ loki = LogStash::Outputs::Loki.new(conf)
238
+ loki.register
239
+ b = Batch.new(entry)
240
+ post = stub_request(:post, "http://foo.com/loki/api/v1/push").with(
241
+ body: b.to_json,
242
+ ).to_return(status: [400, "bad request"])
243
+ loki.send(b)
244
+ loki.close
245
+ expect(post).to have_been_requested.times(1)
246
+ end
247
+ it 'should retry exception' do
248
+ conf = {
249
+ 'url'=>'http://foo.com/loki/api/v1/push',
250
+ 'retries' => 11,
251
+ }
252
+ loki = LogStash::Outputs::Loki.new(conf)
253
+ loki.register
254
+ b = Batch.new(entry)
255
+ post = stub_request(:post, "http://foo.com/loki/api/v1/push").with(
256
+ body: b.to_json,
257
+ ).to_raise("some error").then.to_return(status: [200, "fine !"])
258
+ loki.send(b)
259
+ loki.close
260
+ expect(post).to have_been_requested.times(2)
261
+ end
262
+ end
263
+ end
metadata ADDED
@@ -0,0 +1,105 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: logstash-output-lokis
3
+ version: !ruby/object:Gem::Version
4
+ version: 1.1.0
5
+ platform: ruby
6
+ authors:
7
+ - Aditya C S
8
+ - Cyril Tovena
9
+ autorequire:
10
+ bindir: bin
11
+ cert_chain: []
12
+ date: 2023-02-23 00:00:00.000000000 Z
13
+ dependencies:
14
+ - !ruby/object:Gem::Dependency
15
+ name: logstash-core-plugin-api
16
+ requirement: !ruby/object:Gem::Requirement
17
+ requirements:
18
+ - - ">="
19
+ - !ruby/object:Gem::Version
20
+ version: '1.60'
21
+ - - "<="
22
+ - !ruby/object:Gem::Version
23
+ version: '2.99'
24
+ type: :runtime
25
+ prerelease: false
26
+ version_requirements: !ruby/object:Gem::Requirement
27
+ requirements:
28
+ - - ">="
29
+ - !ruby/object:Gem::Version
30
+ version: '1.60'
31
+ - - "<="
32
+ - !ruby/object:Gem::Version
33
+ version: '2.99'
34
+ - !ruby/object:Gem::Dependency
35
+ name: logstash-codec-plain
36
+ requirement: !ruby/object:Gem::Requirement
37
+ requirements:
38
+ - - '='
39
+ - !ruby/object:Gem::Version
40
+ version: 3.1.0
41
+ type: :runtime
42
+ prerelease: false
43
+ version_requirements: !ruby/object:Gem::Requirement
44
+ requirements:
45
+ - - '='
46
+ - !ruby/object:Gem::Version
47
+ version: 3.1.0
48
+ - !ruby/object:Gem::Dependency
49
+ name: logstash-devutils
50
+ requirement: !ruby/object:Gem::Requirement
51
+ requirements:
52
+ - - '='
53
+ - !ruby/object:Gem::Version
54
+ version: 2.0.2
55
+ type: :development
56
+ prerelease: false
57
+ version_requirements: !ruby/object:Gem::Requirement
58
+ requirements:
59
+ - - '='
60
+ - !ruby/object:Gem::Version
61
+ version: 2.0.2
62
+ description: Output plugin to ship logs to a Grafana Loki server
63
+ email:
64
+ - aditya.gnu@gmail.com
65
+ - cyril.tovena@grafana.com
66
+ executables: []
67
+ extensions: []
68
+ extra_rdoc_files: []
69
+ files:
70
+ - Gemfile
71
+ - README.md
72
+ - lib/logstash/outputs/loki.rb
73
+ - lib/logstash/outputs/loki/batch.rb
74
+ - lib/logstash/outputs/loki/entry.rb
75
+ - logstash-output-lokis.gemspec
76
+ - spec/outputs/loki/entry_spec.rb
77
+ - spec/outputs/loki_spec.rb
78
+ homepage: https://github.com/grafana/loki/
79
+ licenses:
80
+ - Apache-2.0
81
+ metadata:
82
+ logstash_plugin: 'true'
83
+ logstash_group: output
84
+ post_install_message:
85
+ rdoc_options: []
86
+ require_paths:
87
+ - lib
88
+ required_ruby_version: !ruby/object:Gem::Requirement
89
+ requirements:
90
+ - - ">="
91
+ - !ruby/object:Gem::Version
92
+ version: '0'
93
+ required_rubygems_version: !ruby/object:Gem::Requirement
94
+ requirements:
95
+ - - ">="
96
+ - !ruby/object:Gem::Version
97
+ version: '0'
98
+ requirements: []
99
+ rubygems_version: 3.1.6
100
+ signing_key:
101
+ specification_version: 4
102
+ summary: Output plugin to ship logs to a Grafana Loki server
103
+ test_files:
104
+ - spec/outputs/loki/entry_spec.rb
105
+ - spec/outputs/loki_spec.rb