logstash-outputs-percy 1.0.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/Gemfile +20 -0
- data/README.md +88 -0
- data/lib/logstash/outputs/Edge/TimeSeries.rb +15 -0
- data/lib/logstash/outputs/Edge/batch.rb +63 -0
- data/lib/logstash/outputs/Edge/entry.rb +26 -0
- data/lib/logstash/outputs/Edge/label.rb +16 -0
- data/lib/logstash/outputs/Edge/sample.rb +11 -0
- data/lib/logstash/outputs/percy.rb +369 -0
- data/lib/logstash/outputs/prom_test.rb +77 -0
- data/logstash-outputs-percy.gemspec +33 -0
- metadata +168 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: 86e88a59aa8640845990ad4e530ee75d3e9144403a4dd412492419fa853ec1f7
|
4
|
+
data.tar.gz: 3a6a321473482b7a6e8d7d84c4e9a24f72c89ca16c100b148e0377919641be8b
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 10310ae5394101e61941316c7dc1afbf72b18f83e81e8e097b0793b430ed73d9a0cc8861c7ea28d573a5ebca59b1e30d2cf27b5006bfa3b9a682a68c03f9080b
|
7
|
+
data.tar.gz: 3f81aa02c32fa7fac5f0769c893296d1a6c32f0f031ca693a183961cc7daa673148a03025778f163181cbb9bd9d86ca7df63c8df7872c9e0dbf1a08a2209fb07
|
data/Gemfile
ADDED
@@ -0,0 +1,20 @@
|
|
1
|
+
source 'https://rubygems.org'
|
2
|
+
|
3
|
+
gemspec
|
4
|
+
|
5
|
+
logstash_path = ENV["LOGSTASH_PATH"] || "./logstash"
|
6
|
+
|
7
|
+
if Dir.exist?(logstash_path)
|
8
|
+
gem 'logstash-core', :path => "#{logstash_path}/logstash-core"
|
9
|
+
gem 'logstash-mixin-http_client', :path => "#{logstash_path}/vendor/bundle/jruby/2.6.0/gems/logstash-mixin-http_client"
|
10
|
+
else
|
11
|
+
raise 'missing logstash vendoring'
|
12
|
+
end
|
13
|
+
|
14
|
+
gem "oauth2", "~> 2.0.9"
|
15
|
+
gem "webmock", "~> 3.8"
|
16
|
+
gem 'logstash-devutils', "~> 2"
|
17
|
+
gem 'sinatra'
|
18
|
+
gem 'webrick'
|
19
|
+
gem "prometheus-client", "~> 1.0.0"
|
20
|
+
gem 'snappy'
|
data/README.md
ADDED
@@ -0,0 +1,88 @@
|
|
1
|
+
# Contributing to Loki Logstash Output Plugin
|
2
|
+
|
3
|
+
For information about how to use this plugin see this [documentation](../../docs/sources/clients/logstash/_index.md).
|
4
|
+
|
5
|
+
## Install dependencies
|
6
|
+
|
7
|
+
First, make sure you have JDK version `8` or `11` installed and you have set the `JAVA_HOME` environment variable.
|
8
|
+
|
9
|
+
You need to setup JRuby environment to build this plugin. Refer https://github.com/rbenv/rbenv for setting up your rbenv environment.
|
10
|
+
|
11
|
+
After setting up `rbenv`. Install JRuby
|
12
|
+
|
13
|
+
```bash
|
14
|
+
rbenv install jruby-9.2.10.0
|
15
|
+
rbenv local jruby-9.2.10.0
|
16
|
+
```
|
17
|
+
|
18
|
+
Check that the environment is configured
|
19
|
+
|
20
|
+
```bash
|
21
|
+
ruby --version
|
22
|
+
jruby 9.2.10
|
23
|
+
```
|
24
|
+
|
25
|
+
You should make sure you are running `jruby` and not `ruby`. If the command `ruby --version` still shows `ruby` and not `jruby`, check that PATH contains `$HOME/.rbenv/shims` and `$HOME/.rbenv/bin`. Also verify that you have this in your bash profile:
|
26
|
+
|
27
|
+
```bash
|
28
|
+
export PATH="$HOME/.rbenv/bin:$PATH"
|
29
|
+
eval "$(rbenv init -)"
|
30
|
+
```
|
31
|
+
|
32
|
+
Then install bundler:
|
33
|
+
|
34
|
+
```bash
|
35
|
+
gem install bundler:2.1.4
|
36
|
+
```
|
37
|
+
|
38
|
+
Follow those instructions to [install logstash](https://www.elastic.co/guide/en/logstash/current/installing-logstash.html) before moving to the next section.
|
39
|
+
|
40
|
+
## Build and test the plugin
|
41
|
+
|
42
|
+
### Install required packages
|
43
|
+
|
44
|
+
```bash
|
45
|
+
git clone git@github.com:elastic/logstash.git
|
46
|
+
cd logstash
|
47
|
+
git checkout tags/v7.16.1
|
48
|
+
export LOGSTASH_PATH="$(pwd)"
|
49
|
+
export GEM_PATH="$LOGSTASH_PATH/vendor/bundle/jruby/2.5.0"
|
50
|
+
export GEM_HOME="$LOGSTASH_PATH/vendor/bundle/jruby/2.5.0"
|
51
|
+
./gradlew assemble
|
52
|
+
cd ..
|
53
|
+
ruby -S bundle /config set --local path "$LOGSTASH_PATH/vendor/bundle"
|
54
|
+
ruby -S bundle install
|
55
|
+
ruby -S bundle exec rake vendor
|
56
|
+
```
|
57
|
+
|
58
|
+
### Build the plugin
|
59
|
+
|
60
|
+
```bash
|
61
|
+
gem build logstash-output-loki.gemspec
|
62
|
+
```
|
63
|
+
|
64
|
+
### Test
|
65
|
+
|
66
|
+
```bash
|
67
|
+
ruby -S bundle exec rspec
|
68
|
+
```
|
69
|
+
|
70
|
+
Alternatively if you don't want to install JRuby. Enter inside logstash-loki container.
|
71
|
+
|
72
|
+
```bash
|
73
|
+
docker build -t logstash-loki ./
|
74
|
+
docker run -v $(pwd)/spec:/home/logstash/spec -it --rm --entrypoint /bin/sh logstash-loki
|
75
|
+
bundle exec rspec
|
76
|
+
```
|
77
|
+
|
78
|
+
## Install plugin to local logstash
|
79
|
+
|
80
|
+
```bash
|
81
|
+
bin/logstash-plugin install --no-verify --local logstash-output-loki-1.0.0.gem
|
82
|
+
```
|
83
|
+
|
84
|
+
## Send sample event and check plugin is working
|
85
|
+
|
86
|
+
```bash
|
87
|
+
bin/logstash -f loki.conf
|
88
|
+
```
|
@@ -0,0 +1,15 @@
|
|
1
|
+
require "logstash/outputs/edge/sample"
|
2
|
+
require "logstash/outputs/edge/label"
|
3
|
+
|
4
|
+
module Edge
|
5
|
+
class TimeSeries
|
6
|
+
attr_reader :labels, :samples
|
7
|
+
|
8
|
+
def initialize(labels_param, sample_param)
|
9
|
+
labels = labels_param
|
10
|
+
samples = sample_param
|
11
|
+
end
|
12
|
+
|
13
|
+
end
|
14
|
+
|
15
|
+
end
|
@@ -0,0 +1,63 @@
|
|
1
|
+
require 'time'
|
2
|
+
|
3
|
+
module Edge
|
4
|
+
class Batch
|
5
|
+
attr_reader :streams
|
6
|
+
def initialize(e)
|
7
|
+
@bytes = 0
|
8
|
+
@createdAt = Time.now
|
9
|
+
@streams = {}
|
10
|
+
add(e)
|
11
|
+
end
|
12
|
+
|
13
|
+
def size_bytes
|
14
|
+
return @bytes
|
15
|
+
end
|
16
|
+
|
17
|
+
def add(e)
|
18
|
+
@bytes = @bytes + e.entry['line'].length
|
19
|
+
|
20
|
+
# Append the entry to an already existing stream (if any)
|
21
|
+
labels = e.labels.sort.to_h
|
22
|
+
labelkey = labels.to_s
|
23
|
+
if @streams.has_key?(labelkey)
|
24
|
+
stream = @streams[labelkey]
|
25
|
+
stream['entries'].append(e.entry)
|
26
|
+
return
|
27
|
+
else
|
28
|
+
# Add the entry as a new stream
|
29
|
+
@streams[labelkey] = {
|
30
|
+
"labels" => labels,
|
31
|
+
"entries" => [e.entry],
|
32
|
+
}
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
def size_bytes_after(line)
|
37
|
+
return @bytes + line.length
|
38
|
+
end
|
39
|
+
|
40
|
+
def age()
|
41
|
+
return Time.now - @createdAt
|
42
|
+
end
|
43
|
+
|
44
|
+
def to_json
|
45
|
+
streams = []
|
46
|
+
@streams.each { |_ , stream|
|
47
|
+
streams.append(build_stream(stream))
|
48
|
+
}
|
49
|
+
return {"streams"=>streams}.to_json
|
50
|
+
end
|
51
|
+
|
52
|
+
def build_stream(stream)
|
53
|
+
values = []
|
54
|
+
stream['entries'].each { |entry|
|
55
|
+
values.append([entry['ts'].to_s, entry['line']])
|
56
|
+
}
|
57
|
+
return {
|
58
|
+
'stream'=>stream['labels'],
|
59
|
+
'values' => values
|
60
|
+
}
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
@@ -0,0 +1,26 @@
|
|
1
|
+
module Edge
|
2
|
+
def to_ns(s)
|
3
|
+
(s.to_f * (10**9)).to_i
|
4
|
+
end
|
5
|
+
class Entry
|
6
|
+
include Edge
|
7
|
+
attr_reader :labels, :entry
|
8
|
+
def initialize(event,message_field,include_fields)
|
9
|
+
@entry = {
|
10
|
+
"ts" => to_ns(event.get("@timestamp")),
|
11
|
+
"line" => event.get(message_field).to_s
|
12
|
+
}
|
13
|
+
event = event.clone()
|
14
|
+
event.remove(message_field)
|
15
|
+
event.remove("@timestamp")
|
16
|
+
|
17
|
+
@labels = {}
|
18
|
+
event.to_hash.each { |key,value|
|
19
|
+
next if key.start_with?('@')
|
20
|
+
next if value.is_a?(Hash)
|
21
|
+
next if include_fields.length() > 0 and not include_fields.include?(key)
|
22
|
+
@labels[key] = value.to_s
|
23
|
+
}
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
@@ -0,0 +1,369 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require "logstash/outputs/base"
|
3
|
+
require "logstash/outputs/edge/entry"
|
4
|
+
require "logstash/outputs/edge/batch"
|
5
|
+
require "logstash/outputs/edge/TimeSeries"
|
6
|
+
require "logstash/outputs/edge/sample"
|
7
|
+
require "logstash/outputs/edge/label"
|
8
|
+
require "logstash/namespace"
|
9
|
+
require 'net/http'
|
10
|
+
require 'time'
|
11
|
+
require 'uri'
|
12
|
+
require 'json'
|
13
|
+
require 'prometheus/client'
|
14
|
+
require 'prometheus/client/push'
|
15
|
+
require 'prometheus/client/formats/text'
|
16
|
+
require 'oauth2'
|
17
|
+
require 'snappy'
|
18
|
+
|
19
|
+
class LogStash::Outputs::Percy < LogStash::Outputs::Base
|
20
|
+
include Edge
|
21
|
+
config_name "lopro"
|
22
|
+
|
23
|
+
## 'A single instance of the Output will be shared among the pipeline worker threads'
|
24
|
+
concurrency :single
|
25
|
+
|
26
|
+
## 'Prom URL'
|
27
|
+
config :url, :validate => :string, :required => true
|
28
|
+
|
29
|
+
## 'BasicAuth credentials'
|
30
|
+
config :client_id, :validate => :string, :required => true
|
31
|
+
config :client_secret, :validate => :string, secret: true, :required => true
|
32
|
+
|
33
|
+
## 'Prom Token URL Domain'
|
34
|
+
config :tokenurl_domain, :validate => :string, :required => true
|
35
|
+
|
36
|
+
## 'Prom Token URL Endpoint'
|
37
|
+
config :tokenurl_endpoint, :validate => :string, :default => "oauth2/v2.0/token", :required => false
|
38
|
+
|
39
|
+
## 'Scopes'
|
40
|
+
config :scopes, :validate => :string, :required => true
|
41
|
+
|
42
|
+
## 'Proxy URL'
|
43
|
+
config :proxy_url, :validate => :string, :required => false
|
44
|
+
|
45
|
+
## 'Disable server certificate verification'
|
46
|
+
config :insecure_skip_verify, :validate => :boolean, :default => false, :required => false
|
47
|
+
|
48
|
+
## 'Client certificate'
|
49
|
+
config :cert, :validate => :path, :required => false
|
50
|
+
config :key, :validate => :path, :required => false
|
51
|
+
|
52
|
+
## 'TLS'
|
53
|
+
config :ca_cert, :validate => :path, :required => false
|
54
|
+
|
55
|
+
## 'Maximum batch size to accrue before pushing to loki. Defaults to 102400 bytes'
|
56
|
+
config :batch_size, :validate => :number, :default => 102400, :required => false
|
57
|
+
|
58
|
+
## 'Interval in seconds to wait before pushing a batch of records to loki. Defaults to 1 second'
|
59
|
+
config :batch_wait, :validate => :number, :default => 1, :required => false
|
60
|
+
|
61
|
+
## 'Log line field to pick from logstash. Defaults to "message"'
|
62
|
+
config :message_field, :validate => :string, :default => "message", :required => false
|
63
|
+
|
64
|
+
## 'Backoff configuration. Initial backoff time between retries. Default 1s'
|
65
|
+
config :min_delay, :validate => :number, :default => 1, :required => false
|
66
|
+
|
67
|
+
## 'An array of fields to map to labels, if defined only fields in this list will be mapped.'
|
68
|
+
config :include_fields, :validate => :array, :default => [], :required => false
|
69
|
+
|
70
|
+
## 'Backoff configuration. Maximum backoff time between retries. Default 300s'
|
71
|
+
config :max_delay, :validate => :number, :default => 300, :required => false
|
72
|
+
|
73
|
+
## 'Backoff configuration. Maximum number of retries to do'
|
74
|
+
config :retries, :validate => :number, :default => 10, :required => false
|
75
|
+
|
76
|
+
attr_reader :batch
|
77
|
+
public
|
78
|
+
def register
|
79
|
+
puts "1"
|
80
|
+
@uri = URI.parse(@url)
|
81
|
+
|
82
|
+
@registry = Prometheus::Client.registry
|
83
|
+
@http_requests = Prometheus::Client::Gauge.new(:commandsAborted_summation, docstring: 'prometheus', labels: [:app, :env, :product_id, :provider, :region, :site_code])
|
84
|
+
@registry.register(@http_requests)
|
85
|
+
|
86
|
+
unless @uri.is_a?(URI::HTTP) || @uri.is_a?(URI::HTTPS)
|
87
|
+
raise LogStash::ConfigurationError, "url parameter must be valid HTTP, currently '#{@url}'"
|
88
|
+
end
|
89
|
+
|
90
|
+
if @min_delay > @max_delay
|
91
|
+
raise LogStash::ConfigurationError, "Min delay should be less than Max delay, currently 'Min delay is #{@min_delay} and Max delay is #{@max_delay}'"
|
92
|
+
end
|
93
|
+
|
94
|
+
@logger.info("Prom output plugin", :class => self.class.name)
|
95
|
+
|
96
|
+
# initialize Queue and Mutex
|
97
|
+
@entries = Queue.new
|
98
|
+
@mutex = Mutex.new
|
99
|
+
@stop = false
|
100
|
+
|
101
|
+
# create nil batch object.
|
102
|
+
@batch = nil
|
103
|
+
|
104
|
+
# validate certs
|
105
|
+
if ssl_cert?
|
106
|
+
load_ssl
|
107
|
+
validate_ssl_key
|
108
|
+
end
|
109
|
+
|
110
|
+
# start batch_max_wait and batch_max_size threads
|
111
|
+
@batch_wait_thread = Thread.new{max_batch_wait()}
|
112
|
+
@batch_size_thread = Thread.new{max_batch_size()}
|
113
|
+
end
|
114
|
+
|
115
|
+
def max_batch_size
|
116
|
+
@logger.info("max_batch_size: started method")
|
117
|
+
loop do
|
118
|
+
@mutex.synchronize do
|
119
|
+
return if @stop
|
120
|
+
end
|
121
|
+
|
122
|
+
e = @entries.deq
|
123
|
+
return if e.nil?
|
124
|
+
|
125
|
+
@mutex.synchronize do
|
126
|
+
if !add_entry_to_batch(e)
|
127
|
+
@logger.debug("Max batch_size is reached. Sending batch to edge-loki")
|
128
|
+
send(@batch)
|
129
|
+
@batch = Batch.new(e)
|
130
|
+
end
|
131
|
+
end
|
132
|
+
end
|
133
|
+
end
|
134
|
+
|
135
|
+
def max_batch_wait
|
136
|
+
# minimum wait frequency is 10 milliseconds
|
137
|
+
min_wait_checkfrequency = 1/100
|
138
|
+
max_wait_checkfrequency = @batch_wait
|
139
|
+
if max_wait_checkfrequency < min_wait_checkfrequency
|
140
|
+
max_wait_checkfrequency = min_wait_checkfrequency
|
141
|
+
end
|
142
|
+
|
143
|
+
loop do
|
144
|
+
@mutex.synchronize do
|
145
|
+
return if @stop
|
146
|
+
end
|
147
|
+
|
148
|
+
sleep(max_wait_checkfrequency)
|
149
|
+
if is_batch_expired
|
150
|
+
@mutex.synchronize do
|
151
|
+
@logger.debug("Max batch_wait time is reached. Sending batch to loki")
|
152
|
+
send(@batch)
|
153
|
+
@batch = nil
|
154
|
+
end
|
155
|
+
end
|
156
|
+
end
|
157
|
+
end
|
158
|
+
|
159
|
+
def ssl_cert?
|
160
|
+
!@key.nil? && !@cert.nil?
|
161
|
+
end
|
162
|
+
|
163
|
+
def load_ssl
|
164
|
+
@cert = OpenSSL::X509::Certificate.new(File.read(@cert)) if @cert
|
165
|
+
@key = OpenSSL::PKey.read(File.read(@key)) if @key
|
166
|
+
end
|
167
|
+
|
168
|
+
def validate_ssl_key
|
169
|
+
if !@key.is_a?(OpenSSL::PKey::RSA) && !@key.is_a?(OpenSSL::PKey::DSA)
|
170
|
+
raise LogStash::ConfigurationError, "Unsupported private key type '#{@key.class}''"
|
171
|
+
end
|
172
|
+
end
|
173
|
+
|
174
|
+
def ssl_opts(uri)
|
175
|
+
opts = {
|
176
|
+
use_ssl: uri.scheme == 'https'
|
177
|
+
}
|
178
|
+
|
179
|
+
# disable server certificate verification
|
180
|
+
if @insecure_skip_verify
|
181
|
+
opts = opts.merge(
|
182
|
+
verify_mode: OpenSSL::SSL::VERIFY_NONE
|
183
|
+
)
|
184
|
+
end
|
185
|
+
|
186
|
+
if !@cert.nil? && !@key.nil?
|
187
|
+
opts = opts.merge(
|
188
|
+
verify_mode: OpenSSL::SSL::VERIFY_PEER,
|
189
|
+
cert: @cert,
|
190
|
+
key: @key
|
191
|
+
)
|
192
|
+
end
|
193
|
+
|
194
|
+
unless @ca_cert.nil?
|
195
|
+
opts = opts.merge(
|
196
|
+
ca_file: @ca_cert
|
197
|
+
)
|
198
|
+
end
|
199
|
+
opts
|
200
|
+
end
|
201
|
+
|
202
|
+
# Add an entry to the current batch returns false if the batch is full and the entry can't be added.
|
203
|
+
def add_entry_to_batch(e)
|
204
|
+
line = e.entry['line']
|
205
|
+
# we don't want to send empty lines.
|
206
|
+
return true if line.to_s.strip.empty?
|
207
|
+
|
208
|
+
if @batch.nil?
|
209
|
+
@batch = Batch.new(e)
|
210
|
+
return true
|
211
|
+
end
|
212
|
+
|
213
|
+
if @batch.size_bytes_after(line) > @batch_size
|
214
|
+
return false
|
215
|
+
end
|
216
|
+
@batch.add(e)
|
217
|
+
return true
|
218
|
+
end
|
219
|
+
|
220
|
+
def is_batch_expired
|
221
|
+
return !@batch.nil? && @batch.age() >= @batch_wait
|
222
|
+
end
|
223
|
+
|
224
|
+
## Receives logstash events
|
225
|
+
public
|
226
|
+
def receive(event)
|
227
|
+
@entries << Entry.new(event, @message_field, @include_fields)
|
228
|
+
end
|
229
|
+
|
230
|
+
def close
|
231
|
+
@entries.close
|
232
|
+
@mutex.synchronize do
|
233
|
+
@stop = true
|
234
|
+
end
|
235
|
+
@batch_wait_thread.join
|
236
|
+
@batch_size_thread.join
|
237
|
+
|
238
|
+
# if by any chance we still have a forming batch, we need to send it.
|
239
|
+
send(@batch) if !@batch.nil?
|
240
|
+
@batch = nil
|
241
|
+
end
|
242
|
+
|
243
|
+
def send(batch)
|
244
|
+
@logger.info("send: started method")
|
245
|
+
payload = batch.to_json
|
246
|
+
client = createClient()
|
247
|
+
access = getToken(client)
|
248
|
+
@http_requests.set(21.534, labels: label_set_for({ app: 'apmt_metric_test', env: 'test', product_id: 'apmt_test', provider: 'onprem', region: 'westeurope', site_code: 'USTIS01'}))
|
249
|
+
#puts "token"
|
250
|
+
#puts token
|
251
|
+
#JSON.parse(token.ac)
|
252
|
+
#puts access.token
|
253
|
+
#puts access.headers
|
254
|
+
push = Prometheus::Client::Push.new('prometheus').edge_add(access.token, @uri, @registry)
|
255
|
+
#Prometheus::Client::Push.new('my-batch-job').add(@registry)
|
256
|
+
#push = Prometheus::Client::Push.new('prom_batch', 'edge_prom', 'https://telemetry.pensieve.maersk-digital.net/api/v1/push').add(@registry)
|
257
|
+
#push.basic_auth("user", "password")
|
258
|
+
#push.add_field 'Authorization', 'Bearer ' + access.token
|
259
|
+
#WriteRequest(access)
|
260
|
+
end
|
261
|
+
|
262
|
+
def label_set_for(labels)
|
263
|
+
#@logger.info("label_set_for: started method")
|
264
|
+
return stringify_values(labels) if !labels.empty?
|
265
|
+
end
|
266
|
+
|
267
|
+
def stringify_values(labels)
|
268
|
+
#@logger.info("stringify_values: started method")
|
269
|
+
stringified = {}
|
270
|
+
labels.each { |k,v| stringified[k] = v.to_s }
|
271
|
+
|
272
|
+
stringified
|
273
|
+
end
|
274
|
+
|
275
|
+
def setLabels(labels_param)
|
276
|
+
stringified = []
|
277
|
+
labels_param.each { |k,v| stringified.append(Label.new(k, v)) }
|
278
|
+
|
279
|
+
stringified
|
280
|
+
end
|
281
|
+
|
282
|
+
def setSamples(sample_ts_param, sample_val_param)
|
283
|
+
return Sample.new(sample_ts_param, sample_val_param)
|
284
|
+
end
|
285
|
+
|
286
|
+
def WriteRequest(token)
|
287
|
+
#labels = setLabels(label_set_for({ '__name__':'vsphere_vm', app: 'apmt_metric_test', env: 'test', product_id: 'apmt_test', provider: 'onprem', region: 'westeurope', site_code: 'USTIS01'}))
|
288
|
+
#puts labels
|
289
|
+
#samples = setSamples(Time.now.to_f(), 10.785)
|
290
|
+
#puts samples
|
291
|
+
#ts = Edge::TimeSeries.new(labels, samples)
|
292
|
+
puts "TimeSeries"
|
293
|
+
payload1 = {"TimeSeries": {Labels: label_set_for({ '__name__':'vsphere_vm', app: 'apmt_logs_test', env: 'test', product_id: 'apmt_test', provider: 'onprem', region: 'westeurope', site_code: 'USTIS01'}), Samples: [Timestamp: Time.now.to_f(), Value: 10.5 ]}}.to_json
|
294
|
+
puts payload1
|
295
|
+
#payload1 = {Labels: labels, Samples: samples}
|
296
|
+
#payload = Marshal.dump(ts)
|
297
|
+
payload = Marshal.dump(payload1)
|
298
|
+
source = Snappy.deflate(payload)
|
299
|
+
|
300
|
+
puts "Payload"
|
301
|
+
puts payload
|
302
|
+
puts "Snappy"
|
303
|
+
puts source
|
304
|
+
response = token.post(@uri, {:body => source, :headers => {'Content-Type' => 'application/x-protobuf', 'Content-Encoding' => 'snappy', 'User-Agent' => 'mop-edge-1.0.0', 'X-Prometheus-Remote-Write-Version' => '0.1.0'}})
|
305
|
+
return response
|
306
|
+
end
|
307
|
+
|
308
|
+
def createClient()
|
309
|
+
@logger.info("createClient: started method")
|
310
|
+
client = OAuth2::Client.new(@client_id, @client_secret, auth_scheme: "basic_auth", site: @tokenurl_domain,:token_url => @tokenurl_endpoint)
|
311
|
+
return client
|
312
|
+
end
|
313
|
+
|
314
|
+
def createClientwithProxy()
|
315
|
+
#@logger.info("createClient: started method")
|
316
|
+
conn_opts = {}
|
317
|
+
conn_opts = conn_opts.merge('ssl' => {verify: false})
|
318
|
+
conn_opts = conn_opts.merge('proxy' => @proxy_url)
|
319
|
+
client = OAuth2::Client.new(@client_id, @client_secret, auth_scheme: "basic_auth", site: @tokenurl_domain,:token_url => @tokenurl_endpoint, :connection_opts => conn_opts)
|
320
|
+
return client
|
321
|
+
end
|
322
|
+
|
323
|
+
def getToken(client)
|
324
|
+
params = {}
|
325
|
+
opts = {}
|
326
|
+
params = params.merge('scope' => @scopes)
|
327
|
+
access = client.client_credentials.get_token(params, opts)
|
328
|
+
return access
|
329
|
+
end
|
330
|
+
|
331
|
+
def edge_http_request(token, payload)
|
332
|
+
@logger.info("edge_http_request: started method")
|
333
|
+
cntLines = JSON.parse(payload)["streams"][0]["values"].size
|
334
|
+
@logger.info("processing #{cntLines} lines to edge-prom")
|
335
|
+
retry_count = 0
|
336
|
+
delay = @min_delay
|
337
|
+
begin
|
338
|
+
res = token.post(@uri, {:body => payload1, :headers => params})
|
339
|
+
status_code = "#{res.status}"
|
340
|
+
@logger.info("send: status_code", :status_code => status_code)
|
341
|
+
begin
|
342
|
+
@logger.info("send: res_body", :res_body => res)
|
343
|
+
#JSON.parse(res.body)
|
344
|
+
#@logger.info("send res parsed", :res.parsed => res.parsed)
|
345
|
+
rescue StandardError => err
|
346
|
+
@logger.warn("Failed to send batch, attempt: ", :error_inspect => err.inspect, :error => err)
|
347
|
+
end
|
348
|
+
return status_code if !status_code.nil? && status_code.to_i != 429 && status_code.to_i.div(100) != 5
|
349
|
+
return res
|
350
|
+
raise StandardError.new res
|
351
|
+
rescue StandardError => e
|
352
|
+
retry_count += 1
|
353
|
+
@logger.warn("Failed to send batch, attempt: #{retry_count}/#{@retries}", :error_inspect => e.inspect, :error => e)
|
354
|
+
if retry_count < @retries
|
355
|
+
sleep delay
|
356
|
+
if delay * 2 <= @max_delay
|
357
|
+
delay = delay * 2
|
358
|
+
else
|
359
|
+
delay = @max_delay
|
360
|
+
end
|
361
|
+
retry
|
362
|
+
else
|
363
|
+
@logger.error("Failed to send batch", :error_inspect => e.inspect, :error => e)
|
364
|
+
return res
|
365
|
+
end
|
366
|
+
end
|
367
|
+
end
|
368
|
+
|
369
|
+
end
|
@@ -0,0 +1,77 @@
|
|
1
|
+
# encoding: utf-8
|
2
|
+
require 'net/http'
|
3
|
+
require 'time'
|
4
|
+
require 'uri'
|
5
|
+
require 'json'
|
6
|
+
require 'oauth2'
|
7
|
+
require 'snappy'
|
8
|
+
|
9
|
+
class Promtest
|
10
|
+
|
11
|
+
def register
|
12
|
+
@url = "https://telemetry.pensieve.maersk-digital.net/api/v1/push"
|
13
|
+
@client_id = "d6c2ea51-64db-4a46-b168-dc5751d30b38"
|
14
|
+
@client_secret = "Nfi8Q~cSuI3PQrCxCNTy5m363cHUSNJ.JaGaYar5"
|
15
|
+
@tokenurl_domain = "https://login.microsoftonline.com/05d75c05-fa1a-42e7-9cf1-eb416c396f2d/"
|
16
|
+
@tokenurl_endpoint = "oauth2/v2.0/token"
|
17
|
+
@scopes = "api://ingestion.pensieve/.default"
|
18
|
+
|
19
|
+
@uri = URI.parse(@url)
|
20
|
+
|
21
|
+
send()
|
22
|
+
end
|
23
|
+
|
24
|
+
def send()
|
25
|
+
puts("send: started method")
|
26
|
+
payload = batch.to_json
|
27
|
+
client = createClient()
|
28
|
+
access = getToken(client)
|
29
|
+
puts "token"
|
30
|
+
puts token
|
31
|
+
|
32
|
+
WriteRequest(access)
|
33
|
+
end
|
34
|
+
|
35
|
+
def label_set_for(labels)
|
36
|
+
#@logger.info("label_set_for: started method")
|
37
|
+
return stringify_values(labels) if !labels.empty?
|
38
|
+
end
|
39
|
+
|
40
|
+
def stringify_values(labels)
|
41
|
+
#@logger.info("stringify_values: started method")
|
42
|
+
stringified = {}
|
43
|
+
labels.each { |k,v| stringified[k] = v.to_s }
|
44
|
+
|
45
|
+
stringified
|
46
|
+
end
|
47
|
+
|
48
|
+
def WriteRequest(token)
|
49
|
+
puts "TimeSeries"
|
50
|
+
payload1 = {"TimeSeries": {Labels: label_set_for({ '__name__':'vsphere_vm', app: 'apmt_logs_test', env: 'test', product_id: 'apmt_test', provider: 'onprem', region: 'westeurope', site_code: 'USTIS01'}), Samples: [Timestamp: Time.now.to_f(), Value: 10.5 ]}}.to_json
|
51
|
+
puts payload1
|
52
|
+
payload = Marshal.dump(payload1)
|
53
|
+
source = Snappy.deflate(payload)
|
54
|
+
|
55
|
+
puts "Payload"
|
56
|
+
puts payload
|
57
|
+
puts "Snappy"
|
58
|
+
puts source
|
59
|
+
response = token.post(@uri, {:body => source, :headers => {'Content-Type' => 'application/x-protobuf', 'Content-Encoding' => 'snappy', 'User-Agent' => 'mop-edge-1.0.0', 'X-Prometheus-Remote-Write-Version' => '0.1.0'}})
|
60
|
+
return response
|
61
|
+
end
|
62
|
+
|
63
|
+
def createClient()
|
64
|
+
@logger.info("createClient: started method")
|
65
|
+
client = OAuth2::Client.new(@client_id, @client_secret, auth_scheme: "basic_auth", site: @tokenurl_domain,:token_url => @tokenurl_endpoint)
|
66
|
+
return client
|
67
|
+
end
|
68
|
+
|
69
|
+
def getToken(client)
|
70
|
+
params = {}
|
71
|
+
opts = {}
|
72
|
+
params = params.merge('scope' => @scopes)
|
73
|
+
access = client.client_credentials.get_token(params, opts)
|
74
|
+
return access
|
75
|
+
end
|
76
|
+
|
77
|
+
end
|
@@ -0,0 +1,33 @@
|
|
1
|
+
Gem::Specification.new do |s|
|
2
|
+
s.name = 'logstash-outputs-percy'
|
3
|
+
s.version = '1.0.0'
|
4
|
+
s.authors = ['Britto Prabhu']
|
5
|
+
s.email = ['britto.prabhu@apmterminals.com']
|
6
|
+
|
7
|
+
s.summary = 'Output plugin to ship logs to Prometheus server with oauth2 token'
|
8
|
+
s.description = 'This Output plugin to ship logs to Prometheus server with oauth2 token and proxy support'
|
9
|
+
s.summary = "Sends events to a generic HTTP or HTTPS endpoint with additional oauth2 token support"
|
10
|
+
s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program. This plugin supports oauth2 token."
|
11
|
+
s.homepage = 'https://github.com/Maersk-Global/apmt-observability-deployment'
|
12
|
+
s.license = 'Apache-2.0'
|
13
|
+
s.require_paths = ["lib"]
|
14
|
+
|
15
|
+
# Files
|
16
|
+
s.files = Dir["lib/**/*","spec/**/*","*.gemspec","*.md","CONTRIBUTORS","Gemfile","LICENSE","NOTICE.TXT", "vendor/jar-dependencies/**/*.jar", "vendor/jar-dependencies/**/*.rb", "VERSION", "docs/**/*"]
|
17
|
+
|
18
|
+
# Tests
|
19
|
+
s.test_files = s.files.grep(%r{^(test|spec|features)/})
|
20
|
+
|
21
|
+
# Special flag to let us know this is actually a logstash plugin
|
22
|
+
s.metadata = { "logstash_plugin" => "true", "logstash_group" => "output" }
|
23
|
+
|
24
|
+
# Gem dependencies
|
25
|
+
s.add_runtime_dependency "oauth2", "~> 2.0.9"
|
26
|
+
s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
|
27
|
+
s.add_runtime_dependency "logstash-mixin-http_client", ">= 7.2.0", "< 8.0.0"
|
28
|
+
s.add_runtime_dependency "prometheus-client", "1.0.0"
|
29
|
+
|
30
|
+
s.add_development_dependency 'logstash-devutils'
|
31
|
+
s.add_development_dependency 'sinatra'
|
32
|
+
s.add_development_dependency 'webrick'
|
33
|
+
end
|
metadata
ADDED
@@ -0,0 +1,168 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: logstash-outputs-percy
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 1.0.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Britto Prabhu
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2024-08-07 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: oauth2
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - "~>"
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: 2.0.9
|
20
|
+
type: :runtime
|
21
|
+
prerelease: false
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - "~>"
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: 2.0.9
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: logstash-core-plugin-api
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - ">="
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: '1.60'
|
34
|
+
- - "<="
|
35
|
+
- !ruby/object:Gem::Version
|
36
|
+
version: '2.99'
|
37
|
+
type: :runtime
|
38
|
+
prerelease: false
|
39
|
+
version_requirements: !ruby/object:Gem::Requirement
|
40
|
+
requirements:
|
41
|
+
- - ">="
|
42
|
+
- !ruby/object:Gem::Version
|
43
|
+
version: '1.60'
|
44
|
+
- - "<="
|
45
|
+
- !ruby/object:Gem::Version
|
46
|
+
version: '2.99'
|
47
|
+
- !ruby/object:Gem::Dependency
|
48
|
+
name: logstash-mixin-http_client
|
49
|
+
requirement: !ruby/object:Gem::Requirement
|
50
|
+
requirements:
|
51
|
+
- - ">="
|
52
|
+
- !ruby/object:Gem::Version
|
53
|
+
version: 7.2.0
|
54
|
+
- - "<"
|
55
|
+
- !ruby/object:Gem::Version
|
56
|
+
version: 8.0.0
|
57
|
+
type: :runtime
|
58
|
+
prerelease: false
|
59
|
+
version_requirements: !ruby/object:Gem::Requirement
|
60
|
+
requirements:
|
61
|
+
- - ">="
|
62
|
+
- !ruby/object:Gem::Version
|
63
|
+
version: 7.2.0
|
64
|
+
- - "<"
|
65
|
+
- !ruby/object:Gem::Version
|
66
|
+
version: 8.0.0
|
67
|
+
- !ruby/object:Gem::Dependency
|
68
|
+
name: prometheus-client
|
69
|
+
requirement: !ruby/object:Gem::Requirement
|
70
|
+
requirements:
|
71
|
+
- - '='
|
72
|
+
- !ruby/object:Gem::Version
|
73
|
+
version: 1.0.0
|
74
|
+
type: :runtime
|
75
|
+
prerelease: false
|
76
|
+
version_requirements: !ruby/object:Gem::Requirement
|
77
|
+
requirements:
|
78
|
+
- - '='
|
79
|
+
- !ruby/object:Gem::Version
|
80
|
+
version: 1.0.0
|
81
|
+
- !ruby/object:Gem::Dependency
|
82
|
+
name: logstash-devutils
|
83
|
+
requirement: !ruby/object:Gem::Requirement
|
84
|
+
requirements:
|
85
|
+
- - ">="
|
86
|
+
- !ruby/object:Gem::Version
|
87
|
+
version: '0'
|
88
|
+
type: :development
|
89
|
+
prerelease: false
|
90
|
+
version_requirements: !ruby/object:Gem::Requirement
|
91
|
+
requirements:
|
92
|
+
- - ">="
|
93
|
+
- !ruby/object:Gem::Version
|
94
|
+
version: '0'
|
95
|
+
- !ruby/object:Gem::Dependency
|
96
|
+
name: sinatra
|
97
|
+
requirement: !ruby/object:Gem::Requirement
|
98
|
+
requirements:
|
99
|
+
- - ">="
|
100
|
+
- !ruby/object:Gem::Version
|
101
|
+
version: '0'
|
102
|
+
type: :development
|
103
|
+
prerelease: false
|
104
|
+
version_requirements: !ruby/object:Gem::Requirement
|
105
|
+
requirements:
|
106
|
+
- - ">="
|
107
|
+
- !ruby/object:Gem::Version
|
108
|
+
version: '0'
|
109
|
+
- !ruby/object:Gem::Dependency
|
110
|
+
name: webrick
|
111
|
+
requirement: !ruby/object:Gem::Requirement
|
112
|
+
requirements:
|
113
|
+
- - ">="
|
114
|
+
- !ruby/object:Gem::Version
|
115
|
+
version: '0'
|
116
|
+
type: :development
|
117
|
+
prerelease: false
|
118
|
+
version_requirements: !ruby/object:Gem::Requirement
|
119
|
+
requirements:
|
120
|
+
- - ">="
|
121
|
+
- !ruby/object:Gem::Version
|
122
|
+
version: '0'
|
123
|
+
description: This gem is a Logstash plugin required to be installed on top of the
|
124
|
+
Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This
|
125
|
+
gem is not a stand-alone program. This plugin supports oauth2 token.
|
126
|
+
email:
|
127
|
+
- britto.prabhu@apmterminals.com
|
128
|
+
executables: []
|
129
|
+
extensions: []
|
130
|
+
extra_rdoc_files: []
|
131
|
+
files:
|
132
|
+
- Gemfile
|
133
|
+
- README.md
|
134
|
+
- lib/logstash/outputs/Edge/TimeSeries.rb
|
135
|
+
- lib/logstash/outputs/Edge/batch.rb
|
136
|
+
- lib/logstash/outputs/Edge/entry.rb
|
137
|
+
- lib/logstash/outputs/Edge/label.rb
|
138
|
+
- lib/logstash/outputs/Edge/sample.rb
|
139
|
+
- lib/logstash/outputs/percy.rb
|
140
|
+
- lib/logstash/outputs/prom_test.rb
|
141
|
+
- logstash-outputs-percy.gemspec
|
142
|
+
homepage: https://github.com/Maersk-Global/apmt-observability-deployment
|
143
|
+
licenses:
|
144
|
+
- Apache-2.0
|
145
|
+
metadata:
|
146
|
+
logstash_plugin: 'true'
|
147
|
+
logstash_group: output
|
148
|
+
post_install_message:
|
149
|
+
rdoc_options: []
|
150
|
+
require_paths:
|
151
|
+
- lib
|
152
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
153
|
+
requirements:
|
154
|
+
- - ">="
|
155
|
+
- !ruby/object:Gem::Version
|
156
|
+
version: '0'
|
157
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
158
|
+
requirements:
|
159
|
+
- - ">="
|
160
|
+
- !ruby/object:Gem::Version
|
161
|
+
version: '0'
|
162
|
+
requirements: []
|
163
|
+
rubygems_version: 3.0.3.1
|
164
|
+
signing_key:
|
165
|
+
specification_version: 4
|
166
|
+
summary: Sends events to a generic HTTP or HTTPS endpoint with additional oauth2 token
|
167
|
+
support
|
168
|
+
test_files: []
|