fluent-plugin-jfrog-metrics 0.2.8 → 0.2.9

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: be1f993ce6a17cbbd55addf9bd1c2c987861373b86d0c9e5bf57fba2391b89c6
4
- data.tar.gz: 42caf47f85ef0005eae2b35b35c6c4f7cf9c0cb39bd8f8b361cdf282f8e5ea2c
3
+ metadata.gz: 4858e796a287f5c256acde626e8932c805f3276403fadd70c9a2300681b3c056
4
+ data.tar.gz: 9ef7a4f3e8a4db027658f16ef2444cdb2c41fc9e6831a6cbd4cfd37d55c85a23
5
5
  SHA512:
6
- metadata.gz: 459aa4c98d67051b19e5c3947277ade652eaaea9fb868bd41ac9ba1d0126bc15247d9b1f4076c21abe526ae74e9e0f68d565578916077ffcf3385717354f7b64
7
- data.tar.gz: 41008a0e14481e8b1b0a9881ada4559cc4d2edbc13d5af4d43d294b26ebebab58710cd0dfcda55815541b77a129cc68f5fc3b668abcb460a0b8069d0465f5091
6
+ metadata.gz: ffbaa211813a9c1b7f24204608745d1088213d7140bff0188a2ed9f6aaea7d07296877b9e02809d89ba6ba151124028d578c58b4d1a63244a39328ea452b7015
7
+ data.tar.gz: 506953ce84e5ec6f2b3510e9485870ccd90ee92fc87d4dad234ff635bbdd80e7731ee0349fd5bbfc78e34e892108a76c94c6a326360413b5efc3c081f9880dc3
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- fluent-plugin-jfrog-metrics (0.2.8)
4
+ fluent-plugin-jfrog-metrics (0.2.9)
5
5
  fluentd (>= 0.14.10, < 2)
6
6
  rest-client (~> 2.1.0)
7
7
 
data/README.md CHANGED
@@ -86,15 +86,21 @@ Integration is done by setting up Xray. Obtain JPD url and access token for API.
86
86
  - **apikey** (string) (required if token is not used, do refer Note section for specifics): API Key is the [Artifactory API Key](https://www.jfrog.com/confluence/display/JFROG/User+Profile#UserProfile-APIKey) for authentication
87
87
  - **token** (string) (required if apikey is not used, do refer Note section for specifics): Admin token is the [Artifactory Scoped Tokens](https://www.jfrog.com/confluence/display/JFROG/Access+Tokens#AccessTokens-GeneratingAdminTokens) for authentication
88
88
  - **metric_prefix** (string) (required): This values pulls the specific metrics. Values can be - jfrog.artifactory, jfrog.xray
89
- - **interval** (integer) (optional): Wait interval between pulling new events
89
+ - **execution_interval** (integer) (optional): Wait interval between pulling new events (scheduler)
90
90
  - Default value: `60`
91
+ - **timeout_interval** (integer) (optional): Timeout interval for pulling new events (scheduler)
92
+ - Default value: `60`
93
+ - **request_timeout** (integer) (optional): Http request timeout when calling Artifactory/Xray to pull new events (http client)
94
+ - Default value: `20`
91
95
  - **common_jpd** (true / false) (optional): This flag should be set as true only for non-kubernetes installations or installations where JPD base URL is same to access both Artifactory and Xray,
92
96
  - ex: https://sample_base_url/artifactory or https://sample_base_url/xray
93
97
  - Default value: false
94
- - **target_platform** (string) (optional): Output format of target platform allowed values SPLUNK and ELASTIC
98
+ - **target_platform** (string) (optional): The target log-vendor ("SPLUNK", "NEWRELIC" or "DATADOG")
95
99
  - Default value: `SPLUNK`
96
100
  - **verify_ssl** (true / false) (optional): This flag should be set as false in order to bypass client's ssl certificate verification. When false, sets ssl_opts['verify_ssl'] to OpenSSL::SSL::VERIFY_NONE. Otherwise, sets ssl_opts['verify_ssl'] to OpenSSL::SSL::VERIFY_PEER
97
101
  - Default value: true
102
+ - **gzip_compression** (true / false) (optional): This flag should be set as true for compressing (gzip) the metrics payload on outbound posts. This parameter is set to false by default for backwards compatibility.
103
+ - Default value: false
98
104
 
99
105
  Note:
100
106
 
@@ -104,6 +110,7 @@ Note:
104
110
 
105
111
  ## Copyright
106
112
 
107
- - Copyright(c) 2020 - JFrog
108
- - License
113
+ * Copyright(c) 2024 - JFrog
114
+ * Maintainers - MahithaB, BenHarosh
115
+ * License
109
116
  - Apache License, Version 2.0
@@ -3,12 +3,12 @@ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
3
3
 
4
4
  Gem::Specification.new do |spec|
5
5
  spec.name = 'fluent-plugin-jfrog-metrics'
6
- spec.version = '0.2.8'
6
+ spec.version = '0.2.9'
7
7
  spec.authors = ['MahithaB, BenHarosh']
8
8
  spec.email = ['cpe-support@jfrog.com']
9
9
 
10
- spec.summary = %q{Fluentd Plugin for converting JFrog Artifactory, Xray generated metrics (Prometheus Exposition Format) to target observability platform format (Splunk HEC, New Relic, Elastic)}
11
- spec.description = %q{Fluentd Plugin for converting JFrog Artifactory, Xray generated metrics (Prometheus Exposition Format) to target observability platform format (Splunk HEC, New Relic, Elastic)}
10
+ spec.summary = %q{Fluentd Plugin for converting JFrog Artifactory, Xray generated metrics (Prometheus Exposition Format) to target observability platform format (Splunk HEC, New Relic, DataDog)}
11
+ spec.description = %q{Fluentd Plugin for converting JFrog Artifactory, Xray generated metrics (Prometheus Exposition Format) to target observability platform format (Splunk HEC, New Relic, DataDog)}
12
12
 
13
13
  spec.homepage = 'https://github.com/jfrog/jfrog-fluentd-plugins/tree/main/fluent-plugin-jfrog-metrics'
14
14
  spec.license = 'Apache-2.0'
@@ -21,8 +21,8 @@ require_relative 'base_metrics_parser'
21
21
  require_relative 'metrics_helper'
22
22
  require_relative 'newrelic_metrics_parser'
23
23
  require_relative 'splunk_metrics_parser'
24
- require_relative 'elastic_metrics_parser'
25
24
  require_relative 'datadog_metrics_parser'
25
+ require_relative 'utility'
26
26
 
27
27
  module Fluent
28
28
  module Plugin
@@ -37,8 +37,9 @@ module Fluent
37
37
  config_param :username, :string, default: ''
38
38
  config_param :apikey, :string, default: '', :secret => true
39
39
  config_param :token, :string, default: '', :secret => true
40
- config_param :execution_interval, :time, default: 10
41
- config_param :timeout_interval, :time, default: 30
40
+ config_param :execution_interval, :time, default: 60
41
+ config_param :timeout_interval, :time, default: 60
42
+ config_param :request_timeout, :time, default: 20
42
43
  config_param :metric_prefix, :string, default: ''
43
44
  config_param :target_platform, :string, default: 'SPLUNK'
44
45
  config_param :common_jpd, :bool, default: false
@@ -59,7 +60,9 @@ module Fluent
59
60
 
60
61
  raise Fluent::ConfigError, 'Must define the metric_prefix to use for getting the metrics.' if @metric_prefix == ''
61
62
 
62
- raise Fluent::ConfigError, 'Must define the vendor to use for getting the metrics.' if @target_platform == ''
63
+ raise Fluent::ConfigError, 'Must define the target_platform to use for getting the metrics.' if @target_platform == ''
64
+
65
+ raise Fluent::ConfigError, 'Must define the target_platform to be fone of the following (DATADOG, NEWRELIC, SPLUNK).' if !(['DATADOG', 'NEWRELIC', 'SPLUNK'].include?(@target_platform))
63
66
  end
64
67
 
65
68
  # `start` is called when starting and after `configure` is successfully completed.
@@ -76,9 +79,9 @@ module Fluent
76
79
  end
77
80
 
78
81
  def run
79
- puts('Preparing metrics collection, creating timer task')
82
+ puts "#{Utility.get_time} Preparing metrics collection, creating timer task"
80
83
  timer_task = Concurrent::TimerTask.new(execution_interval: @execution_interval, timeout_interval: @timeout_interval, run_now: true) do
81
- puts('Timer task execution')
84
+ puts "#{Utility.get_time} Timer task execution"
82
85
  do_execute
83
86
  end
84
87
  timer_task.execute
@@ -86,25 +89,39 @@ module Fluent
86
89
  end
87
90
 
88
91
  def do_execute
89
- puts('Metrics collection started')
90
- metrics_helper = MetricsHelper.new(@metric_prefix, @jpd_url, @username, @apikey, @token, @common_jpd, @verify_ssl)
91
- platform_metrics = metrics_helper.get_metrics
92
+ begin
93
+ puts "#{Utility.get_time} Metrics collection started"
94
+ metrics_helper = MetricsHelper.new(@metric_prefix, @jpd_url, @username, @apikey, @token, @common_jpd, @verify_ssl, @request_timeout)
95
+ platform_metrics = metrics_helper.get_metrics
92
96
 
93
- additional_metrics = metrics_helper.get_additional_metrics
94
- if !additional_metrics.nil? && additional_metrics != ''
95
- platform_metrics += additional_metrics.to_s
96
- end
97
- if @target_platform == 'SPLUNK'
98
- parser = SplunkMetricsParser.new(@metric_prefix, router, @tag)
99
- elsif @target_platform == 'NEWRELIC'
100
- parser = NewRelicMetricsParser.new(@metric_prefix, router, @tag)
101
- elsif @target_platform == 'DATADOG'
102
- parser = DatadogMetricsParser.new(@metric_prefix, router, @tag)
103
- else
104
- raise 'Parser Type is not valid.Should be SPLUNK or NEWRELIC or DATADOG'
105
- end
106
- parser.emit_parsed_metrics(platform_metrics)
107
- puts('Metrics collection finished')
97
+ additional_metrics = metrics_helper.get_additional_metrics
98
+ if !additional_metrics.nil? && additional_metrics != ''
99
+ platform_metrics += additional_metrics.to_s
100
+ end
101
+ puts "#{Utility.get_time} Metrics collection finished"
102
+
103
+ if @target_platform == 'SPLUNK'
104
+ parser = SplunkMetricsParser.new(@metric_prefix, router, @tag)
105
+ elsif @target_platform == 'NEWRELIC'
106
+ parser = NewRelicMetricsParser.new(@metric_prefix, router, @tag)
107
+ elsif @target_platform == 'DATADOG'
108
+ parser = DatadogMetricsParser.new(@metric_prefix, router, @tag)
109
+ else
110
+ raise 'Parser Type is not valid. target_platform Should be SPLUNK or NEWRELIC or DATADOG'
111
+ end
112
+ parser.emit_parsed_metrics(platform_metrics)
113
+
114
+ rescue RestClient::Exceptions::OpenTimeout
115
+ puts "#{Utility.get_time} The request timed out while trying to open a connection. The configured request timeout is: #{@request_timeout}"
116
+ rescue RestClient::Exceptions::ReadTimeout
117
+ puts "#{Utility.get_time} The request timed out while waiting for a response. The configured request timeout is: #{@request_timeout}"
118
+ rescue RestClient::Exceptions::RequestTimeout
119
+ puts "#{Utility.get_time} The request timed out. The configured request timeout is: #{@request_timeout}"
120
+ rescue RestClient::ExceptionWithResponse => e
121
+ puts "#{Utility.get_time} HTTP request failed: #{e.response}"
122
+ rescue StandardError => e
123
+ puts "#{Utility.get_time} An unexpected error occurred: #{e.message}"
124
+ end
108
125
  end
109
126
  end
110
127
  end
@@ -1,11 +1,12 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require 'rest-client'
4
+ require_relative 'utility'
4
5
 
5
6
  class MetricsHelper
6
7
  @@obs_endpoint_exists = false
7
8
 
8
- def initialize(metric_prefix, jpd_url, username, apikey, token, common_jpd, verify_ssl)
9
+ def initialize(metric_prefix, jpd_url, username, apikey, token, common_jpd, verify_ssl, request_timeout)
9
10
  @metric_prefix = metric_prefix
10
11
  @jpd_url = jpd_url
11
12
  @username = username
@@ -13,6 +14,7 @@ class MetricsHelper
13
14
  @token = token
14
15
  @common_jpd = common_jpd
15
16
  @verify_ssl = verify_ssl
17
+ @request_timeout = request_timeout
16
18
  end
17
19
 
18
20
  def get_metrics
@@ -26,62 +28,73 @@ class MetricsHelper
26
28
  "#{@jpd_url}/artifactory/api/v1/metrics"
27
29
  end
28
30
 
29
- puts "Executing #{@metric_prefix} metrics collection from: #{url}"
31
+ puts "#{Utility.get_time} Executing #{@metric_prefix} metrics collection from: #{url}"
30
32
  if !@token.nil? && @token != ''
31
- execute_rest_call(url, @username, nil, @token, true, @verify_ssl)
33
+ execute_rest_call(url, @username, nil, @token, true, @verify_ssl, @request_timeout)
32
34
  elsif !@apikey.nil? && @apikey != ''
33
- execute_rest_call(url, @username, @apikey, nil, false, @verify_ssl)
35
+ execute_rest_call(url, @username, @apikey, nil, false, @verify_ssl, @request_timeout)
34
36
  end
35
37
 
36
38
  end
37
39
 
38
40
  def get_additional_metrics
41
+ puts "#{Utility.get_time} Aadditional metrics collection started"
39
42
  if (@metric_prefix == 'jfrog.artifactory' || @common_jpd == false) && !@token.nil? && @token != ''
40
43
  url = "#{@jpd_url}/observability/api/v1/metrics"
41
- puts "Executing additional metrics collection from: #{url}"
42
- check_endpoint(url, @token, @verify_ssl) if @@obs_endpoint_exists == nil? || !@@obs_endpoint_exists
43
- execute_rest_call(url, @username, nil, @token, true, @verify_ssl) if @@obs_endpoint_exists
44
+ puts "#{Utility.get_time} Collecting additional metrics from: #{url}"
45
+ check_endpoint(url, @token, @verify_ssl, @request_timeout) if @@obs_endpoint_exists == nil? || !@@obs_endpoint_exists
46
+ execute_rest_call(url, @username, nil, @token, true, @verify_ssl, @request_timeout) if @@obs_endpoint_exists
44
47
  end
45
48
  end
46
49
 
47
- def check_endpoint(url, token, verify_ssl)
50
+ def check_endpoint(url, token, verify_ssl, request_timeout)
51
+ puts "#{Utility.get_time} Checking connectivity to endpoint: #{url}"
48
52
  response = RestClient::Request.new(
49
53
  method: :get,
50
54
  url: url,
51
55
  headers: { Authorization: "Bearer #{token}"},
52
- verify_ssl: verify_ssl
56
+ verify_ssl: verify_ssl,
57
+ timeout: request_timeout
53
58
  ).execute do |response, request, result|
54
- @@obs_endpoint_exists = true if response.code == 200
55
- puts "#{url} exists? -> #{@@obs_endpoint_exists}, storing the result for next executions"
59
+ if response.code == 200
60
+ @@obs_endpoint_exists = true
61
+ puts "#{Utility.get_time} #{url} exists: #{@@obs_endpoint_exists}. Storing the result for next executions"
62
+ else
63
+ @@obs_endpoint_exists = false
64
+ puts "#{Utility.get_time} Cannot verify #{url} endpoint. Received response code: #{response.code}, Response body:\n#{response.body}"
65
+ raise "Unexpected response code: #{response.code} when calling #{url}"
66
+ end
56
67
  end
57
68
  end
58
69
 
59
- def execute_rest_call(url, user, password, token, use_token, verify_ssl)
70
+ def execute_rest_call(url, user, password, token, use_token, verify_ssl, request_timeout)
60
71
  request = if use_token == true
61
- RestClient::Request.new(
62
- method: :get,
63
- url: url,
64
- headers: { Authorization: "Bearer #{token}" },
65
- verify_ssl: verify_ssl
66
- )
67
- else
68
- RestClient::Request.new(
69
- method: :get,
70
- url: url,
71
- user: user,
72
- password: password,
73
- verify_ssl: verify_ssl
74
- )
75
- end
76
-
72
+ RestClient::Request.new(
73
+ method: :get,
74
+ url: url,
75
+ headers: { Authorization: "Bearer #{token}" },
76
+ verify_ssl: verify_ssl,
77
+ timeout: request_timeout
78
+ )
79
+ else
80
+ RestClient::Request.new(
81
+ method: :get,
82
+ url: url,
83
+ user: user,
84
+ password: password,
85
+ verify_ssl: verify_ssl,
86
+ timeout: request_timeout,
87
+ )
88
+ end
89
+
77
90
  request.execute do |response, request, result|
78
91
  case response.code
79
92
  when 200
80
- puts "#{@metric_prefix} metrics were successfully collected from url: #{url}"
93
+ puts "#{Utility.get_time} #{@metric_prefix} metrics were successfully collected from url: #{url}"
81
94
  return response.body
82
95
  else
83
- puts "Cannot fetch #{@metric_prefix} metrics from url: #{url}. Received response code: #{response.code}, Response body:\n#{response.body}"
84
- raise Fluent::ConfigError, 'Cannot fetch #{@metric_prefix} metrics'
96
+ puts "#{Utility.get_time} Cannot fetch #{@metric_prefix} metrics from url: #{url}. Received response code: #{response.code}, Response body:\n#{response.body}"
97
+ raise "Unexpected response code: #{response.code} when calling #{url}"
85
98
  end
86
99
  end
87
100
  end
@@ -13,14 +13,12 @@ class NewRelicMetricsParser < BaseMetricsParser
13
13
  hash_data_array = []
14
14
  data_hash = {}
15
15
  data_array = []
16
- puts cleaned_data
17
16
  cleaned_data.each do |interim_data|
18
17
  metrics_hash = {}
19
18
  if interim_data =~ /{/ && interim_data =~ /}/
20
19
  attributes = {}
21
20
  metric_name, additional_dims, metric_val_and_time = interim_data.match(/(.*){(.*)}(.*)/i).captures
22
21
  additional_dims.split("\",").each do |interim_data|
23
- puts interim_data
24
22
  pair_data = interim_data.gsub("\"", "").gsub("{", "").gsub("}", "")
25
23
  interim_data_value = pair_data.split("=", 2)[1]
26
24
  interim_data_key = pair_data.split("=", 2)[0]
@@ -0,0 +1,7 @@
1
+ class Utility
2
+ # one place to control time format for logginggit
3
+ def self.get_time()
4
+ return Time.now.strftime("%Y-%m-%d %H:%M:%S")
5
+ end
6
+
7
+ end
data/spec/spec_helper.rb CHANGED
@@ -25,7 +25,6 @@ end
25
25
  require 'metrics_helper'
26
26
  require 'splunk_metrics_parser'
27
27
  require 'newrelic_metrics_parser'
28
- require 'elastic_metrics_parser'
29
28
 
30
29
  RSpec.configure do |config|
31
30
  # rspec-expectations config goes here. You can use an alternate
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-jfrog-metrics
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.8
4
+ version: 0.2.9
5
5
  platform: ruby
6
6
  authors:
7
7
  - MahithaB, BenHarosh
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2024-09-12 00:00:00.000000000 Z
11
+ date: 2024-10-24 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
@@ -102,7 +102,7 @@ dependencies:
102
102
  version: 2.1.0
103
103
  description: Fluentd Plugin for converting JFrog Artifactory, Xray generated metrics
104
104
  (Prometheus Exposition Format) to target observability platform format (Splunk HEC,
105
- New Relic, Elastic)
105
+ New Relic, DataDog)
106
106
  email:
107
107
  - cpe-support@jfrog.com
108
108
  executables: []
@@ -119,15 +119,14 @@ files:
119
119
  - fluent-plugin-jfrog-metrics.gemspec
120
120
  - lib/fluent/plugin/base_metrics_parser.rb
121
121
  - lib/fluent/plugin/datadog_metrics_parser.rb
122
- - lib/fluent/plugin/elastic_metrics_parser.rb
123
122
  - lib/fluent/plugin/in_jfrog_metrics.rb
124
123
  - lib/fluent/plugin/metrics_helper.rb
125
124
  - lib/fluent/plugin/newrelic_metrics_parser.rb
126
125
  - lib/fluent/plugin/splunk_metrics_parser.rb
126
+ - lib/fluent/plugin/utility.rb
127
127
  - spec/fixtures/files/creds.rb
128
128
  - spec/fixtures/files/sample_artifactory_metrics.txt
129
129
  - spec/fixtures/files/sample_xray_metrics.txt
130
- - spec/lib/elastic_metrics_parser_spec.rb
131
130
  - spec/lib/metrics_helper_spec.rb
132
131
  - spec/lib/newrelic_metrics_parser_spec.rb
133
132
  - spec/lib/splunk_metrics_parser_spec.rb
@@ -158,12 +157,11 @@ signing_key:
158
157
  specification_version: 4
159
158
  summary: Fluentd Plugin for converting JFrog Artifactory, Xray generated metrics (Prometheus
160
159
  Exposition Format) to target observability platform format (Splunk HEC, New Relic,
161
- Elastic)
160
+ DataDog)
162
161
  test_files:
163
162
  - spec/fixtures/files/creds.rb
164
163
  - spec/fixtures/files/sample_artifactory_metrics.txt
165
164
  - spec/fixtures/files/sample_xray_metrics.txt
166
- - spec/lib/elastic_metrics_parser_spec.rb
167
165
  - spec/lib/metrics_helper_spec.rb
168
166
  - spec/lib/newrelic_metrics_parser_spec.rb
169
167
  - spec/lib/splunk_metrics_parser_spec.rb
@@ -1,33 +0,0 @@
1
- # frozen_string_literal: true
2
- require 'json'
3
- require_relative 'base_metrics_parser'
4
-
5
- class ElasticMetricsParser < BaseMetricsParser
6
- def initialize(metric_prefix, router, tag)
7
- @metric_prefix = metric_prefix
8
- @router = router
9
- @tag = tag
10
- end
11
-
12
- def format_data(cleaned_data = [], prefix = '', separator = '')
13
- hash_data_array = []
14
- hash_data = {}
15
- hash_data_with_labels = []
16
- cleaned_data.each do |interim_data|
17
- if interim_data =~ /{/ && interim_data =~ /}/
18
- hash_data_with_dim_fields = {}
19
- metric_name, additional_dims, metric_val_and_time = interim_data.match(/(.*){(.*)}(.*)/i).captures
20
- additional_dims.split(/,/).map do |interim_dim_data|
21
- hash_data_with_dim_fields[prefix + separator + "label" + separator + interim_dim_data.split(/=/)[0]] = interim_dim_data.split(/=/)[1].gsub(/"/, '') if interim_dim_data =~ /=/
22
- end
23
- hash_data_with_dim_fields[prefix + separator + metric_name] = metric_val_and_time.strip.split[0].to_f
24
- hash_data_with_labels << hash_data_with_dim_fields
25
- else
26
- metric_name, value, = interim_data.split
27
- hash_data[prefix + separator + metric_name] = value.to_f
28
- end
29
- end
30
- hash_data_array << hash_data
31
- (hash_data_array << hash_data_with_labels).flatten!
32
- end
33
- end
@@ -1,55 +0,0 @@
1
- # frozen_string_literal: true
2
- [
3
- File.join(File.dirname(__FILE__), '..'),
4
- File.join(File.dirname(__FILE__), '..', 'lib/fluent/plugin'),
5
- File.join(File.dirname(__FILE__), '..', 'spec'),
6
- ].each do |dir|
7
- $LOAD_PATH.unshift(dir) unless $LOAD_PATH.include?(dir)
8
- end
9
-
10
- require 'elastic_metrics_parser'
11
- require 'date'
12
- require 'rspec'
13
-
14
-
15
- RSpec.describe ElasticMetricsParser do
16
- describe "#emit_parsed_metrics" do
17
- it 'should read sample Artifactory metrics data and verify the size of parsed data > 1' do
18
- platform_metrics = File.read('./spec/fixtures/files/sample_artifactory_metrics.txt')
19
- expect(platform_metrics.size).to be > 1
20
-
21
- parser = ElasticMetricsParser.new('jfrog.artifactory', '', 'jfrog.artifactory.metrics')
22
-
23
- normalized_data = parser.normalise_data(platform_metrics)
24
- expect(normalized_data.size).to be > 1
25
-
26
- cleaned_data = parser.clean_data(normalized_data)
27
- expect(cleaned_data.size).to be > 1
28
-
29
- hash_data_array = parser.extract_metrics_in_hash(cleaned_data, 'jfrog.artifactory', '.')
30
- expect(hash_data_array.size).to be > 1
31
-
32
- serialized_data = parser.serialize_data(hash_data_array)
33
- expect(serialized_data.size).to be > 1
34
- end
35
-
36
- it 'should read sample Xray metrics data and verify the size of parsed data > 1' do
37
- platform_metrics = File.read('./spec/fixtures/files/sample_xray_metrics.txt')
38
- expect(platform_metrics.size).to be > 1
39
-
40
- parser = ElasticMetricsParser.new('jfrog.xray', '', 'jfrog.xray.metrics')
41
-
42
- normalized_data = parser.normalise_data(platform_metrics)
43
- expect(normalized_data.size).to be > 1
44
-
45
- cleaned_data = parser.clean_data(normalized_data)
46
- expect(cleaned_data.size).to be > 1
47
-
48
- hash_data_array = parser.extract_metrics_in_hash(cleaned_data, 'jfrog.xray', '.')
49
- expect(hash_data_array.size).to be > 1
50
-
51
- serialized_data = parser.serialize_data(hash_data_array)
52
- expect(serialized_data.size).to be > 1
53
- end
54
- end
55
- end