logstash-output-edge_prom 1.0.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml ADDED
@@ -0,0 +1,7 @@
1
+ ---
2
+ SHA256:
3
+ metadata.gz: 4ae9aa1cf6be8b8357d308f3957d3d855a0d20cc5a52624e0fd6a03e4dafd58b
4
+ data.tar.gz: 64bab6b47d181438f7d669327540f9300f5aa0a34702ed7570f715e772c7d23c
5
+ SHA512:
6
+ metadata.gz: 883f3a85b391f146f8f932640e7c14005c90498a7fa845aa0fa006c1b935fa128f98cf4f4485ae10e9900aeaeb064f9638d889e4d482f27a14f7cbdcb22044b0
7
+ data.tar.gz: 473d4715f229775c1d68af285eccbd3c2c15e94807a5befea08ca6fdce4add9b27d33ec931bec68bcc1d0fe16832e5a05970cd1b3360bf80e5dea5611a1010f8
data/Gemfile ADDED
@@ -0,0 +1,19 @@
1
+ source 'https://rubygems.org'
2
+
3
+ gemspec
4
+
5
+ logstash_path = ENV["LOGSTASH_PATH"] || "./logstash"
6
+
7
+ if Dir.exist?(logstash_path)
8
+ gem 'logstash-core', :path => "#{logstash_path}/logstash-core"
9
+ gem 'logstash-mixin-http_client', :path => "#{logstash_path}/vendor/bundle/jruby/2.6.0/gems/logstash-mixin-http_client"
10
+ else
11
+ raise 'missing logstash vendoring'
12
+ end
13
+
14
+ gem "oauth2", "~> 2.0.9"
15
+ gem "webmock", "~> 3.8"
16
+ gem 'logstash-devutils', "~> 2"
17
+ gem 'sinatra'
18
+ gem 'webrick'
19
+ gem "prometheus-client", "~> 1.0.0"
data/README.md ADDED
@@ -0,0 +1,88 @@
1
+ # Contributing to Loki Logstash Output Plugin
2
+
3
+ For information about how to use this plugin see this [documentation](../../docs/sources/clients/logstash/_index.md).
4
+
5
+ ## Install dependencies
6
+
7
+ First, make sure you have JDK version `8` or `11` installed and you have set the `JAVA_HOME` environment variable.
8
+
9
+ You need to setup JRuby environment to build this plugin. Refer https://github.com/rbenv/rbenv for setting up your rbenv environment.
10
+
11
+ After setting up `rbenv`. Install JRuby
12
+
13
+ ```bash
14
+ rbenv install jruby-9.2.10.0
15
+ rbenv local jruby-9.2.10.0
16
+ ```
17
+
18
+ Check that the environment is configured
19
+
20
+ ```bash
21
+ ruby --version
22
+ jruby 9.2.10
23
+ ```
24
+
25
+ You should make sure you are running `jruby` and not `ruby`. If the command `ruby --version` still shows `ruby` and not `jruby`, check that PATH contains `$HOME/.rbenv/shims` and `$HOME/.rbenv/bin`. Also verify that you have this in your bash profile:
26
+
27
+ ```bash
28
+ export PATH="$HOME/.rbenv/bin:$PATH"
29
+ eval "$(rbenv init -)"
30
+ ```
31
+
32
+ Then install bundler:
33
+
34
+ ```bash
35
+ gem install bundler:2.1.4
36
+ ```
37
+
38
+ Follow those instructions to [install logstash](https://www.elastic.co/guide/en/logstash/current/installing-logstash.html) before moving to the next section.
39
+
40
+ ## Build and test the plugin
41
+
42
+ ### Install required packages
43
+
44
+ ```bash
45
+ git clone git@github.com:elastic/logstash.git
46
+ cd logstash
47
+ git checkout tags/v7.16.1
48
+ export LOGSTASH_PATH="$(pwd)"
49
+ export GEM_PATH="$LOGSTASH_PATH/vendor/bundle/jruby/2.5.0"
50
+ export GEM_HOME="$LOGSTASH_PATH/vendor/bundle/jruby/2.5.0"
51
+ ./gradlew assemble
52
+ cd ..
53
+ ruby -S bundle /config set --local path "$LOGSTASH_PATH/vendor/bundle"
54
+ ruby -S bundle install
55
+ ruby -S bundle exec rake vendor
56
+ ```
57
+
58
+ ### Build the plugin
59
+
60
+ ```bash
61
+ gem build logstash-output-loki.gemspec
62
+ ```
63
+
64
+ ### Test
65
+
66
+ ```bash
67
+ ruby -S bundle exec rspec
68
+ ```
69
+
70
+ Alternatively if you don't want to install JRuby. Enter inside logstash-loki container.
71
+
72
+ ```bash
73
+ docker build -t logstash-loki ./
74
+ docker run -v $(pwd)/spec:/home/logstash/spec -it --rm --entrypoint /bin/sh logstash-loki
75
+ bundle exec rspec
76
+ ```
77
+
78
+ ## Install plugin to local logstash
79
+
80
+ ```bash
81
+ bin/logstash-plugin install --no-verify --local logstash-output-loki-1.0.0.gem
82
+ ```
83
+
84
+ ## Send sample event and check plugin is working
85
+
86
+ ```bash
87
+ bin/logstash -f loki.conf
88
+ ```
@@ -0,0 +1,67 @@
1
+
2
+ module Edge
3
+ class TimeSeries
4
+ attr_reader :labels, :samples
5
+
6
+ Label[] labels
7
+ Sample samples
8
+
9
+ def initialize()
10
+ end
11
+
12
+ def label_set_for(labels)
13
+ #@logger.info("label_set_for: started method")
14
+ return stringify_values(labels) if !labels.empty?
15
+ end
16
+
17
+ def stringify_values(labels)
18
+ #@logger.info("stringify_values: started method")
19
+ stringified = {}
20
+ labels.each { |k,v| stringified[k] = v.to_s }
21
+
22
+ stringified
23
+ end
24
+
25
+ def setLabels=(labels_param)
26
+ labels_param.each { |k,v| labels.append(Labels.new(k, v)) }
27
+ end
28
+
29
+ def setSamples=(samples_param)
30
+ samples = Samples.new(samples_param)
31
+ end
32
+
33
+ def WriteRequest(uri, token)
34
+ self.setLabels(label_set_for({ '__name__':'vsphere_vm', app: 'apmt_metric_test', env: 'test', product_id: 'apmt_test', provider: 'onprem', region: 'westeurope', site_code: 'USTIS01'}))
35
+ self.setSamples([Time.now.to_f(), 10.785]})
36
+ payload = Marshal.dump(self)
37
+ response = token.post(uri, {:body => payload, :headers => {'Content-Type' => 'application/x-protobuf', 'Content-Encoding' => 'snappy', 'User-Agent' => 'mop-edge-1.0.0', 'X-Prometheus-Remote-Write-Version' => '0.1.0'}})
38
+ return response
39
+ end
40
+
41
+ def Send(uri, token)
42
+ return WriteRequest(uri, token)
43
+ end
44
+ end
45
+
46
+ class Label
47
+ String name
48
+ String value
49
+
50
+ def initialize(name_param, value_param)
51
+ name = name_param
52
+ value = value_param
53
+ end
54
+
55
+ end
56
+
57
+ class Sample
58
+ Double value
59
+ Double timestamp
60
+
61
+ def initialize(value_param, timestamp_param)
62
+ timestamp = timestamp_param
63
+ value = value_param
64
+ end
65
+
66
+ end
67
+ end
@@ -0,0 +1,63 @@
1
+ require 'time'
2
+
3
+ module Edge
4
+ class Batch
5
+ attr_reader :streams
6
+ def initialize(e)
7
+ @bytes = 0
8
+ @createdAt = Time.now
9
+ @streams = {}
10
+ add(e)
11
+ end
12
+
13
+ def size_bytes
14
+ return @bytes
15
+ end
16
+
17
+ def add(e)
18
+ @bytes = @bytes + e.entry['line'].length
19
+
20
+ # Append the entry to an already existing stream (if any)
21
+ labels = e.labels.sort.to_h
22
+ labelkey = labels.to_s
23
+ if @streams.has_key?(labelkey)
24
+ stream = @streams[labelkey]
25
+ stream['entries'].append(e.entry)
26
+ return
27
+ else
28
+ # Add the entry as a new stream
29
+ @streams[labelkey] = {
30
+ "labels" => labels,
31
+ "entries" => [e.entry],
32
+ }
33
+ end
34
+ end
35
+
36
+ def size_bytes_after(line)
37
+ return @bytes + line.length
38
+ end
39
+
40
+ def age()
41
+ return Time.now - @createdAt
42
+ end
43
+
44
+ def to_json
45
+ streams = []
46
+ @streams.each { |_ , stream|
47
+ streams.append(build_stream(stream))
48
+ }
49
+ return {"streams"=>streams}.to_json
50
+ end
51
+
52
+ def build_stream(stream)
53
+ values = []
54
+ stream['entries'].each { |entry|
55
+ values.append([entry['ts'].to_s, entry['line']])
56
+ }
57
+ return {
58
+ 'stream'=>stream['labels'],
59
+ 'values' => values
60
+ }
61
+ end
62
+ end
63
+ end
@@ -0,0 +1,26 @@
1
+ module Edge
2
+ def to_ns(s)
3
+ (s.to_f * (10**9)).to_i
4
+ end
5
+ class Entry
6
+ include Edge
7
+ attr_reader :labels, :entry
8
+ def initialize(event,message_field,include_fields)
9
+ @entry = {
10
+ "ts" => to_ns(event.get("@timestamp")),
11
+ "line" => event.get(message_field).to_s
12
+ }
13
+ event = event.clone()
14
+ event.remove(message_field)
15
+ event.remove("@timestamp")
16
+
17
+ @labels = {}
18
+ event.to_hash.each { |key,value|
19
+ next if key.start_with?('@')
20
+ next if value.is_a?(Hash)
21
+ next if include_fields.length() > 0 and not include_fields.include?(key)
22
+ @labels[key] = value.to_s
23
+ }
24
+ end
25
+ end
26
+ end
@@ -0,0 +1,303 @@
1
+ # encoding: utf-8
2
+ require "logstash/outputs/base"
3
+ require "logstash/outputs/edge/entry"
4
+ require "logstash/outputs/edge/batch"
5
+ require "logstash/namespace"
6
+ require 'net/http'
7
+ require 'time'
8
+ require 'uri'
9
+ require 'json'
10
+ require 'prometheus/client'
11
+ require 'prometheus/client/push'
12
+ require 'oauth2'
13
+
14
+ class LogStash::Outputs::Edge_Prom < LogStash::Outputs::Base
15
+ include Edge
16
+ config_name "edge_prom"
17
+
18
+ ## 'A single instance of the Output will be shared among the pipeline worker threads'
19
+ concurrency :single
20
+
21
+ ## 'Prom URL'
22
+ config :url, :validate => :string, :required => true
23
+
24
+ ## 'BasicAuth credentials'
25
+ config :client_id, :validate => :string, :required => true
26
+ config :client_secret, :validate => :string, secret: true, :required => true
27
+
28
+ ## 'Prom Token URL Domain'
29
+ config :tokenurl_domain, :validate => :string, :required => true
30
+
31
+ ## 'Prom Token URL Endpoint'
32
+ config :tokenurl_endpoint, :validate => :string, :default => "oauth2/v2.0/token", :required => false
33
+
34
+ ## 'Scopes'
35
+ config :scopes, :validate => :string, :required => true
36
+
37
+ ## 'Proxy URL'
38
+ config :proxy_url, :validate => :string, :required => false
39
+
40
+ ## 'Disable server certificate verification'
41
+ config :insecure_skip_verify, :validate => :boolean, :default => false, :required => false
42
+
43
+ ## 'Client certificate'
44
+ config :cert, :validate => :path, :required => false
45
+ config :key, :validate => :path, :required => false
46
+
47
+ ## 'TLS'
48
+ config :ca_cert, :validate => :path, :required => false
49
+
50
+ ## 'Maximum batch size to accrue before pushing to loki. Defaults to 102400 bytes'
51
+ config :batch_size, :validate => :number, :default => 102400, :required => false
52
+
53
+ ## 'Interval in seconds to wait before pushing a batch of records to loki. Defaults to 1 second'
54
+ config :batch_wait, :validate => :number, :default => 1, :required => false
55
+
56
+ ## 'Log line field to pick from logstash. Defaults to "message"'
57
+ config :message_field, :validate => :string, :default => "message", :required => false
58
+
59
+ ## 'Backoff configuration. Initial backoff time between retries. Default 1s'
60
+ config :min_delay, :validate => :number, :default => 1, :required => false
61
+
62
+ ## 'An array of fields to map to labels, if defined only fields in this list will be mapped.'
63
+ config :include_fields, :validate => :array, :default => [], :required => false
64
+
65
+ ## 'Backoff configuration. Maximum backoff time between retries. Default 300s'
66
+ config :max_delay, :validate => :number, :default => 300, :required => false
67
+
68
+ ## 'Backoff configuration. Maximum number of retries to do'
69
+ config :retries, :validate => :number, :default => 10, :required => false
70
+
71
+ attr_reader :batch
72
+ public
73
+ def register
74
+ puts "1"
75
+ @uri = URI.parse(@url)
76
+ unless @uri.is_a?(URI::HTTP) || @uri.is_a?(URI::HTTPS)
77
+ raise LogStash::ConfigurationError, "url parameter must be valid HTTP, currently '#{@url}'"
78
+ end
79
+
80
+ if @min_delay > @max_delay
81
+ raise LogStash::ConfigurationError, "Min delay should be less than Max delay, currently 'Min delay is #{@min_delay} and Max delay is #{@max_delay}'"
82
+ end
83
+
84
+ @logger.info("Prom output plugin", :class => self.class.name)
85
+
86
+ # initialize Queue and Mutex
87
+ @entries = Queue.new
88
+ @mutex = Mutex.new
89
+ @stop = false
90
+
91
+ # create nil batch object.
92
+ @batch = nil
93
+
94
+ # validate certs
95
+ if ssl_cert?
96
+ load_ssl
97
+ validate_ssl_key
98
+ end
99
+
100
+ # start batch_max_wait and batch_max_size threads
101
+ @batch_wait_thread = Thread.new{max_batch_wait()}
102
+ @batch_size_thread = Thread.new{max_batch_size()}
103
+ end
104
+
105
+ def max_batch_size
106
+ #@logger.info("max_batch_size: started method")
107
+ loop do
108
+ @mutex.synchronize do
109
+ return if @stop
110
+ end
111
+
112
+ e = @entries.deq
113
+ return if e.nil?
114
+
115
+ @mutex.synchronize do
116
+ if !add_entry_to_batch(e)
117
+ @logger.debug("Max batch_size is reached. Sending batch to edge-loki")
118
+ send(@batch)
119
+ @batch = Batch.new(e)
120
+ end
121
+ end
122
+ end
123
+ end
124
+
125
+ def max_batch_wait
126
+ # minimum wait frequency is 10 milliseconds
127
+ min_wait_checkfrequency = 1/100
128
+ max_wait_checkfrequency = @batch_wait
129
+ if max_wait_checkfrequency < min_wait_checkfrequency
130
+ max_wait_checkfrequency = min_wait_checkfrequency
131
+ end
132
+
133
+ loop do
134
+ @mutex.synchronize do
135
+ return if @stop
136
+ end
137
+
138
+ sleep(max_wait_checkfrequency)
139
+ if is_batch_expired
140
+ @mutex.synchronize do
141
+ @logger.debug("Max batch_wait time is reached. Sending batch to loki")
142
+ send(@batch)
143
+ @batch = nil
144
+ end
145
+ end
146
+ end
147
+ end
148
+
149
+ def ssl_cert?
150
+ !@key.nil? && !@cert.nil?
151
+ end
152
+
153
+ def load_ssl
154
+ @cert = OpenSSL::X509::Certificate.new(File.read(@cert)) if @cert
155
+ @key = OpenSSL::PKey.read(File.read(@key)) if @key
156
+ end
157
+
158
+ def validate_ssl_key
159
+ if !@key.is_a?(OpenSSL::PKey::RSA) && !@key.is_a?(OpenSSL::PKey::DSA)
160
+ raise LogStash::ConfigurationError, "Unsupported private key type '#{@key.class}''"
161
+ end
162
+ end
163
+
164
+ def ssl_opts(uri)
165
+ opts = {
166
+ use_ssl: uri.scheme == 'https'
167
+ }
168
+
169
+ # disable server certificate verification
170
+ if @insecure_skip_verify
171
+ opts = opts.merge(
172
+ verify_mode: OpenSSL::SSL::VERIFY_NONE
173
+ )
174
+ end
175
+
176
+ if !@cert.nil? && !@key.nil?
177
+ opts = opts.merge(
178
+ verify_mode: OpenSSL::SSL::VERIFY_PEER,
179
+ cert: @cert,
180
+ key: @key
181
+ )
182
+ end
183
+
184
+ unless @ca_cert.nil?
185
+ opts = opts.merge(
186
+ ca_file: @ca_cert
187
+ )
188
+ end
189
+ opts
190
+ end
191
+
192
+ # Add an entry to the current batch returns false if the batch is full and the entry can't be added.
193
+ def add_entry_to_batch(e)
194
+ line = e.entry['line']
195
+ # we don't want to send empty lines.
196
+ return true if line.to_s.strip.empty?
197
+
198
+ if @batch.nil?
199
+ @batch = Batch.new(e)
200
+ return true
201
+ end
202
+
203
+ if @batch.size_bytes_after(line) > @batch_size
204
+ return false
205
+ end
206
+ @batch.add(e)
207
+ return true
208
+ end
209
+
210
+ def is_batch_expired
211
+ return !@batch.nil? && @batch.age() >= @batch_wait
212
+ end
213
+
214
+ ## Receives logstash events
215
+ public
216
+ def receive(event)
217
+ @entries << Entry.new(event, @message_field, @include_fields)
218
+ end
219
+
220
+ def close
221
+ @entries.close
222
+ @mutex.synchronize do
223
+ @stop = true
224
+ end
225
+ @batch_wait_thread.join
226
+ @batch_size_thread.join
227
+
228
+ # if by any chance we still have a forming batch, we need to send it.
229
+ send(@batch) if !@batch.nil?
230
+ @batch = nil
231
+ end
232
+
233
+ def send(batch)
234
+ @logger.info("send: started method")
235
+ payload = batch.to_json
236
+ client = createClient()
237
+ access = getToken(client)
238
+ client = Edge::TimeSeries.new()
239
+ client.Send(@uri, access)
240
+ end
241
+
242
+ def createClient()
243
+ @logger.info("createClient: started method")
244
+ client = OAuth2::Client.new(@client_id, @client_secret, auth_scheme: "basic_auth", site: @tokenurl_domain,:token_url => @tokenurl_endpoint)
245
+ return client
246
+ end
247
+
248
+ def createClientwithProxy()
249
+ #@logger.info("createClient: started method")
250
+ conn_opts = {}
251
+ conn_opts = conn_opts.merge('ssl' => {verify: false})
252
+ conn_opts = conn_opts.merge('proxy' => @proxy_url)
253
+ client = OAuth2::Client.new(@client_id, @client_secret, auth_scheme: "basic_auth", site: @tokenurl_domain,:token_url => @tokenurl_endpoint, :connection_opts => conn_opts)
254
+ return client
255
+ end
256
+
257
+ def getToken(client)
258
+ params = {}
259
+ opts = {}
260
+ params = params.merge('scope' => @scopes)
261
+ access = client.client_credentials.get_token(params, opts)
262
+ return access
263
+ end
264
+
265
+ def edge_http_request(token, payload)
266
+ @logger.info("edge_http_request: started method")
267
+ cntLines = JSON.parse(payload)["streams"][0]["values"].size
268
+ @logger.info("processing #{cntLines} lines to edge-prom")
269
+ retry_count = 0
270
+ delay = @min_delay
271
+ begin
272
+ res = token.post(@uri, {:body => payload1, :headers => params})
273
+ status_code = "#{res.status}"
274
+ @logger.info("send: status_code", :status_code => status_code)
275
+ begin
276
+ @logger.info("send: res_body", :res_body => res)
277
+ #JSON.parse(res.body)
278
+ #@logger.info("send res parsed", :res.parsed => res.parsed)
279
+ rescue StandardError => err
280
+ @logger.warn("Failed to send batch, attempt: ", :error_inspect => err.inspect, :error => err)
281
+ end
282
+ return status_code if !status_code.nil? && status_code.to_i != 429 && status_code.to_i.div(100) != 5
283
+ return res
284
+ raise StandardError.new res
285
+ rescue StandardError => e
286
+ retry_count += 1
287
+ @logger.warn("Failed to send batch, attempt: #{retry_count}/#{@retries}", :error_inspect => e.inspect, :error => e)
288
+ if retry_count < @retries
289
+ sleep delay
290
+ if delay * 2 <= @max_delay
291
+ delay = delay * 2
292
+ else
293
+ delay = @max_delay
294
+ end
295
+ retry
296
+ else
297
+ @logger.error("Failed to send batch", :error_inspect => e.inspect, :error => e)
298
+ return res
299
+ end
300
+ end
301
+ end
302
+
303
+ end
@@ -0,0 +1,33 @@
1
+ Gem::Specification.new do |s|
2
+ s.name = 'logstash-output-edge_prom'
3
+ s.version = '1.0.0'
4
+ s.authors = ['Britto Prabhu']
5
+ s.email = ['britto.prabhu@apmterminals.com']
6
+
7
+ s.summary = 'Output plugin to ship logs to Loki server with oauth2 token'
8
+ s.description = 'This Output plugin to ship logs to Loki server with oauth2 token and proxy support'
9
+ s.summary = "Sends events to a generic HTTP or HTTPS endpoint with additional oauth2 token support"
10
+ s.description = "This gem is a Logstash plugin required to be installed on top of the Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This gem is not a stand-alone program. This plugin supports oauth2 token."
11
+ s.homepage = 'https://github.com/Maersk-Global/apmt-observability-deployment'
12
+ s.license = 'Apache-2.0'
13
+ s.require_paths = ["lib"]
14
+
15
+ # Files
16
+ s.files = Dir["lib/**/*","spec/**/*","*.gemspec","*.md","CONTRIBUTORS","Gemfile","LICENSE","NOTICE.TXT", "vendor/jar-dependencies/**/*.jar", "vendor/jar-dependencies/**/*.rb", "VERSION", "docs/**/*"]
17
+
18
+ # Tests
19
+ s.test_files = s.files.grep(%r{^(test|spec|features)/})
20
+
21
+ # Special flag to let us know this is actually a logstash plugin
22
+ s.metadata = { "logstash_plugin" => "true", "logstash_group" => "output" }
23
+
24
+ # Gem dependencies
25
+ s.add_runtime_dependency "oauth2", "~> 2.0.9"
26
+ s.add_runtime_dependency "logstash-core-plugin-api", ">= 1.60", "<= 2.99"
27
+ s.add_runtime_dependency "logstash-mixin-http_client", ">= 7.2.0", "< 8.0.0"
28
+ s.add_runtime_dependency "prometheus-client", "1.0.0"
29
+
30
+ s.add_development_dependency 'logstash-devutils'
31
+ s.add_development_dependency 'sinatra'
32
+ s.add_development_dependency 'webrick'
33
+ end
metadata ADDED
@@ -0,0 +1,165 @@
1
+ --- !ruby/object:Gem::Specification
2
+ name: logstash-output-edge_prom
3
+ version: !ruby/object:Gem::Version
4
+ version: 1.0.0
5
+ platform: ruby
6
+ authors:
7
+ - Britto Prabhu
8
+ autorequire:
9
+ bindir: bin
10
+ cert_chain: []
11
+ date: 2023-09-23 00:00:00.000000000 Z
12
+ dependencies:
13
+ - !ruby/object:Gem::Dependency
14
+ requirement: !ruby/object:Gem::Requirement
15
+ requirements:
16
+ - - "~>"
17
+ - !ruby/object:Gem::Version
18
+ version: 2.0.9
19
+ name: oauth2
20
+ prerelease: false
21
+ type: :runtime
22
+ version_requirements: !ruby/object:Gem::Requirement
23
+ requirements:
24
+ - - "~>"
25
+ - !ruby/object:Gem::Version
26
+ version: 2.0.9
27
+ - !ruby/object:Gem::Dependency
28
+ requirement: !ruby/object:Gem::Requirement
29
+ requirements:
30
+ - - ">="
31
+ - !ruby/object:Gem::Version
32
+ version: '1.60'
33
+ - - "<="
34
+ - !ruby/object:Gem::Version
35
+ version: '2.99'
36
+ name: logstash-core-plugin-api
37
+ prerelease: false
38
+ type: :runtime
39
+ version_requirements: !ruby/object:Gem::Requirement
40
+ requirements:
41
+ - - ">="
42
+ - !ruby/object:Gem::Version
43
+ version: '1.60'
44
+ - - "<="
45
+ - !ruby/object:Gem::Version
46
+ version: '2.99'
47
+ - !ruby/object:Gem::Dependency
48
+ requirement: !ruby/object:Gem::Requirement
49
+ requirements:
50
+ - - ">="
51
+ - !ruby/object:Gem::Version
52
+ version: 7.2.0
53
+ - - "<"
54
+ - !ruby/object:Gem::Version
55
+ version: 8.0.0
56
+ name: logstash-mixin-http_client
57
+ prerelease: false
58
+ type: :runtime
59
+ version_requirements: !ruby/object:Gem::Requirement
60
+ requirements:
61
+ - - ">="
62
+ - !ruby/object:Gem::Version
63
+ version: 7.2.0
64
+ - - "<"
65
+ - !ruby/object:Gem::Version
66
+ version: 8.0.0
67
+ - !ruby/object:Gem::Dependency
68
+ requirement: !ruby/object:Gem::Requirement
69
+ requirements:
70
+ - - '='
71
+ - !ruby/object:Gem::Version
72
+ version: 1.0.0
73
+ name: prometheus-client
74
+ prerelease: false
75
+ type: :runtime
76
+ version_requirements: !ruby/object:Gem::Requirement
77
+ requirements:
78
+ - - '='
79
+ - !ruby/object:Gem::Version
80
+ version: 1.0.0
81
+ - !ruby/object:Gem::Dependency
82
+ requirement: !ruby/object:Gem::Requirement
83
+ requirements:
84
+ - - ">="
85
+ - !ruby/object:Gem::Version
86
+ version: '0'
87
+ name: logstash-devutils
88
+ prerelease: false
89
+ type: :development
90
+ version_requirements: !ruby/object:Gem::Requirement
91
+ requirements:
92
+ - - ">="
93
+ - !ruby/object:Gem::Version
94
+ version: '0'
95
+ - !ruby/object:Gem::Dependency
96
+ requirement: !ruby/object:Gem::Requirement
97
+ requirements:
98
+ - - ">="
99
+ - !ruby/object:Gem::Version
100
+ version: '0'
101
+ name: sinatra
102
+ prerelease: false
103
+ type: :development
104
+ version_requirements: !ruby/object:Gem::Requirement
105
+ requirements:
106
+ - - ">="
107
+ - !ruby/object:Gem::Version
108
+ version: '0'
109
+ - !ruby/object:Gem::Dependency
110
+ requirement: !ruby/object:Gem::Requirement
111
+ requirements:
112
+ - - ">="
113
+ - !ruby/object:Gem::Version
114
+ version: '0'
115
+ name: webrick
116
+ prerelease: false
117
+ type: :development
118
+ version_requirements: !ruby/object:Gem::Requirement
119
+ requirements:
120
+ - - ">="
121
+ - !ruby/object:Gem::Version
122
+ version: '0'
123
+ description: This gem is a Logstash plugin required to be installed on top of the
124
+ Logstash core pipeline using $LS_HOME/bin/logstash-plugin install gemname. This
125
+ gem is not a stand-alone program. This plugin supports oauth2 token.
126
+ email:
127
+ - britto.prabhu@apmterminals.com
128
+ executables: []
129
+ extensions: []
130
+ extra_rdoc_files: []
131
+ files:
132
+ - Gemfile
133
+ - README.md
134
+ - lib/logstash/outputs/Edge/TimeSeries.rb
135
+ - lib/logstash/outputs/Edge/batch.rb
136
+ - lib/logstash/outputs/Edge/entry.rb
137
+ - lib/logstash/outputs/edge_prom.rb
138
+ - logstash-output-edge_prom.gemspec
139
+ homepage: https://github.com/Maersk-Global/apmt-observability-deployment
140
+ licenses:
141
+ - Apache-2.0
142
+ metadata:
143
+ logstash_plugin: 'true'
144
+ logstash_group: output
145
+ post_install_message:
146
+ rdoc_options: []
147
+ require_paths:
148
+ - lib
149
+ required_ruby_version: !ruby/object:Gem::Requirement
150
+ requirements:
151
+ - - ">="
152
+ - !ruby/object:Gem::Version
153
+ version: '0'
154
+ required_rubygems_version: !ruby/object:Gem::Requirement
155
+ requirements:
156
+ - - ">="
157
+ - !ruby/object:Gem::Version
158
+ version: '0'
159
+ requirements: []
160
+ rubygems_version: 3.2.33
161
+ signing_key:
162
+ specification_version: 4
163
+ summary: Sends events to a generic HTTP or HTTPS endpoint with additional oauth2 token
164
+ support
165
+ test_files: []