fluent-plugin-jfrog-metrics 0.2.7 → 0.2.9

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: bc48b6c9f8a51df6c59f938a6283e17751649320e9e02ec811ad05f2dd58fc0e
4
- data.tar.gz: 8996298f2223575ae661e115a9eedf695d5fb14732bedc29e4b634bc69c20fa4
3
+ metadata.gz: 4858e796a287f5c256acde626e8932c805f3276403fadd70c9a2300681b3c056
4
+ data.tar.gz: 9ef7a4f3e8a4db027658f16ef2444cdb2c41fc9e6831a6cbd4cfd37d55c85a23
5
5
  SHA512:
6
- metadata.gz: eee23ebb31284a6a358f618f30a2e6a735e12fc6956c92a22b347db1f128d1b27b908f508a110b4ee93372032f017a4b397c04a98d83662643275ea0137d0915
7
- data.tar.gz: 30837a031c859b1a3882b42e7ba20665d097531bb116ef63434315476a6141d338cc11c7d5f3373d03131d6062987ff219e73e3d7d6abe3b431fac15052ea087
6
+ metadata.gz: ffbaa211813a9c1b7f24204608745d1088213d7140bff0188a2ed9f6aaea7d07296877b9e02809d89ba6ba151124028d578c58b4d1a63244a39328ea452b7015
7
+ data.tar.gz: 506953ce84e5ec6f2b3510e9485870ccd90ee92fc87d4dad234ff635bbdd80e7731ee0349fd5bbfc78e34e892108a76c94c6a326360413b5efc3c081f9880dc3
data/Gemfile.lock CHANGED
@@ -1,9 +1,9 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- fluent-plugin-jfrog-metrics (0.2.5)
4
+ fluent-plugin-jfrog-metrics (0.2.9)
5
5
  fluentd (>= 0.14.10, < 2)
6
- rest-client (~> 2.0)
6
+ rest-client (~> 2.1.0)
7
7
 
8
8
  GEM
9
9
  remote: https://rubygems.org/
data/README.md CHANGED
@@ -86,22 +86,31 @@ Integration is done by setting up Xray. Obtain JPD url and access token for API.
86
86
  - **apikey** (string) (required if token is not used, do refer Note section for specifics): API Key is the [Artifactory API Key](https://www.jfrog.com/confluence/display/JFROG/User+Profile#UserProfile-APIKey) for authentication
87
87
  - **token** (string) (required if apikey is not used, do refer Note section for specifics): Admin token is the [Artifactory Scoped Tokens](https://www.jfrog.com/confluence/display/JFROG/Access+Tokens#AccessTokens-GeneratingAdminTokens) for authentication
88
88
  - **metric_prefix** (string) (required): This values pulls the specific metrics. Values can be - jfrog.artifactory, jfrog.xray
89
- - **interval** (integer) (optional): Wait interval between pulling new events
89
+ - **execution_interval** (integer) (optional): Wait interval between pulling new events (scheduler)
90
90
  - Default value: `60`
91
+ - **timeout_interval** (integer) (optional): Timeout interval for pulling new events (scheduler)
92
+ - Default value: `60`
93
+ - **request_timeout** (integer) (optional): Http request timeout when calling Artifactory/Xray to pull new events (http client)
94
+ - Default value: `20`
91
95
  - **common_jpd** (true / false) (optional): This flag should be set as true only for non-kubernetes installations or installations where JPD base URL is same to access both Artifactory and Xray,
92
96
  - ex: https://sample_base_url/artifactory or https://sample_base_url/xray
93
97
  - Default value: false
94
- - **target_platform** (string) (optional): Output format of target platform allowed values SPLUNK and ELASTIC
98
+ - **target_platform** (string) (optional): The target log-vendor ("SPLUNK", "NEWRELIC" or "DATADOG")
95
99
  - Default value: `SPLUNK`
100
+ - **verify_ssl** (true / false) (optional): This flag should be set as false in order to bypass client's ssl certificate verification. When false, sets ssl_opts['verify_ssl'] to OpenSSL::SSL::VERIFY_NONE. Otherwise, sets ssl_opts['verify_ssl'] to OpenSSL::SSL::VERIFY_PEER
101
+ - Default value: true
102
+ - **gzip_compression** (true / false) (optional): This flag should be set as true for compressing (gzip) the metrics payload on outbound posts. This parameter is set to false by default for backwards compatibility.
103
+ - Default value: false
96
104
 
97
105
  Note:
98
106
 
99
107
  - For Artifactory v7.4 and below only API Key must be used,
100
- - For Artifactory v7.4 to 7.29 either Token or API Key can be used,
101
- - For Artifactory v7.30 and above token only must be used.
108
+ - For Artifactory v7.4 to 7.46 either Token or API Key can be used,
109
+ - For Artifactory v7.47 and above token only must be used.
102
110
 
103
111
  ## Copyright
104
112
 
105
- - Copyright(c) 2020 - JFrog
106
- - License
113
+ * Copyright(c) 2024 - JFrog
114
+ * Maintainers - MahithaB, BenHarosh
115
+ * License
107
116
  - Apache License, Version 2.0
@@ -3,12 +3,12 @@ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
3
3
 
4
4
  Gem::Specification.new do |spec|
5
5
  spec.name = 'fluent-plugin-jfrog-metrics'
6
- spec.version = '0.2.7'
6
+ spec.version = '0.2.9'
7
7
  spec.authors = ['MahithaB, BenHarosh']
8
8
  spec.email = ['cpe-support@jfrog.com']
9
9
 
10
- spec.summary = %q{Fluentd Plugin for converting JFrog Artifactory, Xray generated metrics (Prometheus Exposition Format) to target observability platform format (Splunk HEC, New Relic, Elastic)}
11
- spec.description = %q{Fluentd Plugin for converting JFrog Artifactory, Xray generated metrics (Prometheus Exposition Format) to target observability platform format (Splunk HEC, New Relic, Elastic)}
10
+ spec.summary = %q{Fluentd Plugin for converting JFrog Artifactory, Xray generated metrics (Prometheus Exposition Format) to target observability platform format (Splunk HEC, New Relic, DataDog)}
11
+ spec.description = %q{Fluentd Plugin for converting JFrog Artifactory, Xray generated metrics (Prometheus Exposition Format) to target observability platform format (Splunk HEC, New Relic, DataDog)}
12
12
 
13
13
  spec.homepage = 'https://github.com/jfrog/jfrog-fluentd-plugins/tree/main/fluent-plugin-jfrog-metrics'
14
14
  spec.license = 'Apache-2.0'
@@ -24,7 +24,7 @@ Gem::Specification.new do |spec|
24
24
  spec.add_development_dependency 'bundler', '~> 1.14'
25
25
  spec.add_development_dependency 'rake', '~> 12.0'
26
26
  spec.add_development_dependency 'test-unit', '~> 3.0'
27
- spec.add_development_dependency "rest-client", "~> 2.0"
27
+ spec.add_development_dependency "rest-client", "~> 2.1.0"
28
28
  spec.add_runtime_dependency 'fluentd', ['>= 0.14.10', '< 2']
29
- spec.add_runtime_dependency "rest-client", "~> 2.0"
29
+ spec.add_runtime_dependency "rest-client", "~> 2.1.0"
30
30
  end
@@ -21,8 +21,8 @@ require_relative 'base_metrics_parser'
21
21
  require_relative 'metrics_helper'
22
22
  require_relative 'newrelic_metrics_parser'
23
23
  require_relative 'splunk_metrics_parser'
24
- require_relative 'elastic_metrics_parser'
25
24
  require_relative 'datadog_metrics_parser'
25
+ require_relative 'utility'
26
26
 
27
27
  module Fluent
28
28
  module Plugin
@@ -37,11 +37,13 @@ module Fluent
37
37
  config_param :username, :string, default: ''
38
38
  config_param :apikey, :string, default: '', :secret => true
39
39
  config_param :token, :string, default: '', :secret => true
40
- config_param :execution_interval, :time, default: 10
41
- config_param :timeout_interval, :time, default: 30
40
+ config_param :execution_interval, :time, default: 60
41
+ config_param :timeout_interval, :time, default: 60
42
+ config_param :request_timeout, :time, default: 20
42
43
  config_param :metric_prefix, :string, default: ''
43
44
  config_param :target_platform, :string, default: 'SPLUNK'
44
45
  config_param :common_jpd, :bool, default: false
46
+ config_param :verify_ssl, :bool, default: true
45
47
 
46
48
  # `configure` is called before `start`.
47
49
  # 'conf' is a `Hash` that includes the configuration parameters.
@@ -58,7 +60,9 @@ module Fluent
58
60
 
59
61
  raise Fluent::ConfigError, 'Must define the metric_prefix to use for getting the metrics.' if @metric_prefix == ''
60
62
 
61
- raise Fluent::ConfigError, 'Must define the vendor to use for getting the metrics.' if @target_platform == ''
63
+ raise Fluent::ConfigError, 'Must define the target_platform to use for getting the metrics.' if @target_platform == ''
64
+
65
+ raise Fluent::ConfigError, 'Must define the target_platform to be fone of the following (DATADOG, NEWRELIC, SPLUNK).' if !(['DATADOG', 'NEWRELIC', 'SPLUNK'].include?(@target_platform))
62
66
  end
63
67
 
64
68
  # `start` is called when starting and after `configure` is successfully completed.
@@ -75,9 +79,9 @@ module Fluent
75
79
  end
76
80
 
77
81
  def run
78
- puts('Preparing metrics collection, creating timer task')
82
+ puts "#{Utility.get_time} Preparing metrics collection, creating timer task"
79
83
  timer_task = Concurrent::TimerTask.new(execution_interval: @execution_interval, timeout_interval: @timeout_interval, run_now: true) do
80
- puts('Timer task execution')
84
+ puts "#{Utility.get_time} Timer task execution"
81
85
  do_execute
82
86
  end
83
87
  timer_task.execute
@@ -85,26 +89,39 @@ module Fluent
85
89
  end
86
90
 
87
91
  def do_execute
88
- puts('Executing metrics collection')
89
- metrics_helper = MetricsHelper.new(@metric_prefix, @jpd_url, @username, @apikey, @token, @common_jpd)
90
- platform_metrics = metrics_helper.get_metrics
92
+ begin
93
+ puts "#{Utility.get_time} Metrics collection started"
94
+ metrics_helper = MetricsHelper.new(@metric_prefix, @jpd_url, @username, @apikey, @token, @common_jpd, @verify_ssl, @request_timeout)
95
+ platform_metrics = metrics_helper.get_metrics
91
96
 
92
- additional_metrics = metrics_helper.get_additional_metrics
93
- if !additional_metrics.nil? && additional_metrics != ''
94
- platform_metrics += additional_metrics.to_s
95
- end
96
- if @target_platform == 'SPLUNK'
97
- parser = SplunkMetricsParser.new(@metric_prefix, router, @tag)
98
- elsif @target_platform == 'ELASTIC'
99
- parser = ElasticMetricsParser.new(@metric_prefix, router, @tag)
100
- elsif @target_platform == 'NEWRELIC'
101
- parser = NewRelicMetricsParser.new(@metric_prefix, router, @tag)
102
- elsif @target_platform == 'DATADOG'
103
- parser = DatadogMetricsParser.new(@metric_prefix, router, @tag)
104
- else
105
- raise 'Parser Type is not valid.Should be SPLUNK or ELASTIC or NEWRELIC'
106
- end
107
- parser.emit_parsed_metrics(platform_metrics)
97
+ additional_metrics = metrics_helper.get_additional_metrics
98
+ if !additional_metrics.nil? && additional_metrics != ''
99
+ platform_metrics += additional_metrics.to_s
100
+ end
101
+ puts "#{Utility.get_time} Metrics collection finished"
102
+
103
+ if @target_platform == 'SPLUNK'
104
+ parser = SplunkMetricsParser.new(@metric_prefix, router, @tag)
105
+ elsif @target_platform == 'NEWRELIC'
106
+ parser = NewRelicMetricsParser.new(@metric_prefix, router, @tag)
107
+ elsif @target_platform == 'DATADOG'
108
+ parser = DatadogMetricsParser.new(@metric_prefix, router, @tag)
109
+ else
110
+ raise 'Parser Type is not valid. target_platform Should be SPLUNK or NEWRELIC or DATADOG'
111
+ end
112
+ parser.emit_parsed_metrics(platform_metrics)
113
+
114
+ rescue RestClient::Exceptions::OpenTimeout
115
+ puts "#{Utility.get_time} The request timed out while trying to open a connection. The configured request timeout is: #{@request_timeout}"
116
+ rescue RestClient::Exceptions::ReadTimeout
117
+ puts "#{Utility.get_time} The request timed out while waiting for a response. The configured request timeout is: #{@request_timeout}"
118
+ rescue RestClient::Exceptions::RequestTimeout
119
+ puts "#{Utility.get_time} The request timed out. The configured request timeout is: #{@request_timeout}"
120
+ rescue RestClient::ExceptionWithResponse => e
121
+ puts "#{Utility.get_time} HTTP request failed: #{e.response}"
122
+ rescue StandardError => e
123
+ puts "#{Utility.get_time} An unexpected error occurred: #{e.message}"
124
+ end
108
125
  end
109
126
  end
110
127
  end
@@ -1,17 +1,20 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require 'rest-client'
4
+ require_relative 'utility'
4
5
 
5
6
  class MetricsHelper
6
7
  @@obs_endpoint_exists = false
7
8
 
8
- def initialize(metric_prefix, jpd_url, username, apikey, token, common_jpd)
9
+ def initialize(metric_prefix, jpd_url, username, apikey, token, common_jpd, verify_ssl, request_timeout)
9
10
  @metric_prefix = metric_prefix
10
11
  @jpd_url = jpd_url
11
12
  @username = username
12
13
  @apikey = apikey
13
14
  @token = token
14
15
  @common_jpd = common_jpd
16
+ @verify_ssl = verify_ssl
17
+ @request_timeout = request_timeout
15
18
  end
16
19
 
17
20
  def get_metrics
@@ -24,60 +27,74 @@ class MetricsHelper
24
27
  else
25
28
  "#{@jpd_url}/artifactory/api/v1/metrics"
26
29
  end
30
+
31
+ puts "#{Utility.get_time} Executing #{@metric_prefix} metrics collection from: #{url}"
27
32
  if !@token.nil? && @token != ''
28
- execute_rest_call(url, @username, nil, @token, false, true)
33
+ execute_rest_call(url, @username, nil, @token, true, @verify_ssl, @request_timeout)
29
34
  elsif !@apikey.nil? && @apikey != ''
30
- execute_rest_call(url, @username, @apikey, nil, false, false)
35
+ execute_rest_call(url, @username, @apikey, nil, false, @verify_ssl, @request_timeout)
31
36
  end
32
37
 
33
38
  end
34
39
 
35
40
  def get_additional_metrics
41
+ puts "#{Utility.get_time} Aadditional metrics collection started"
36
42
  if (@metric_prefix == 'jfrog.artifactory' || @common_jpd == false) && !@token.nil? && @token != ''
37
- puts 'Executing additional metrics collection'
38
43
  url = "#{@jpd_url}/observability/api/v1/metrics"
39
- check_endpoint(url, @token) if @@obs_endpoint_exists == nil? || !@@obs_endpoint_exists
40
- execute_rest_call(url, @username, nil, @token, true, true) if @@obs_endpoint_exists
44
+ puts "#{Utility.get_time} Collecting additional metrics from: #{url}"
45
+ check_endpoint(url, @token, @verify_ssl, @request_timeout) if @@obs_endpoint_exists == nil? || !@@obs_endpoint_exists
46
+ execute_rest_call(url, @username, nil, @token, true, @verify_ssl, @request_timeout) if @@obs_endpoint_exists
41
47
  end
42
48
  end
43
49
 
44
- def check_endpoint(url, token)
50
+ def check_endpoint(url, token, verify_ssl, request_timeout)
51
+ puts "#{Utility.get_time} Checking connectivity to endpoint: #{url}"
45
52
  response = RestClient::Request.new(
46
53
  method: :get,
47
54
  url: url,
48
- headers: { Authorization: "Bearer #{token}" }
55
+ headers: { Authorization: "Bearer #{token}"},
56
+ verify_ssl: verify_ssl,
57
+ timeout: request_timeout
49
58
  ).execute do |response, request, result|
50
- @@obs_endpoint_exists = true if response.code == 200
51
- puts "#{url} exists? -> #{@@obs_endpoint_exists}, storing the result for next executions"
59
+ if response.code == 200
60
+ @@obs_endpoint_exists = true
61
+ puts "#{Utility.get_time} #{url} exists: #{@@obs_endpoint_exists}. Storing the result for next executions"
62
+ else
63
+ @@obs_endpoint_exists = false
64
+ puts "#{Utility.get_time} Cannot verify #{url} endpoint. Received response code: #{response.code}, Response body:\n#{response.body}"
65
+ raise "Unexpected response code: #{response.code} when calling #{url}"
66
+ end
52
67
  end
53
68
  end
54
69
 
55
- def execute_rest_call(url, user, password, token, ignore_exception, use_token)
70
+ def execute_rest_call(url, user, password, token, use_token, verify_ssl, request_timeout)
56
71
  request = if use_token == true
57
- RestClient::Request.new(
58
- method: :get,
59
- url: url,
60
- headers: { Authorization: "Bearer #{token}" }
61
- )
62
- else
63
- RestClient::Request.new(
64
- method: :get,
65
- url: url,
66
- user: user,
67
- password: password
68
- )
69
- end
70
-
72
+ RestClient::Request.new(
73
+ method: :get,
74
+ url: url,
75
+ headers: { Authorization: "Bearer #{token}" },
76
+ verify_ssl: verify_ssl,
77
+ timeout: request_timeout
78
+ )
79
+ else
80
+ RestClient::Request.new(
81
+ method: :get,
82
+ url: url,
83
+ user: user,
84
+ password: password,
85
+ verify_ssl: verify_ssl,
86
+ timeout: request_timeout,
87
+ )
88
+ end
89
+
71
90
  request.execute do |response, request, result|
72
91
  case response.code
73
92
  when 200
93
+ puts "#{Utility.get_time} #{@metric_prefix} metrics were successfully collected from url: #{url}"
74
94
  return response.body
75
95
  else
76
- if ignore_exception == true
77
- return ''
78
- else
79
- raise Fluent::ConfigError, 'Cannot fetch #{@metric_prefix} metrics'
80
- end
96
+ puts "#{Utility.get_time} Cannot fetch #{@metric_prefix} metrics from url: #{url}. Received response code: #{response.code}, Response body:\n#{response.body}"
97
+ raise "Unexpected response code: #{response.code} when calling #{url}"
81
98
  end
82
99
  end
83
100
  end
@@ -13,14 +13,12 @@ class NewRelicMetricsParser < BaseMetricsParser
13
13
  hash_data_array = []
14
14
  data_hash = {}
15
15
  data_array = []
16
- puts cleaned_data
17
16
  cleaned_data.each do |interim_data|
18
17
  metrics_hash = {}
19
18
  if interim_data =~ /{/ && interim_data =~ /}/
20
19
  attributes = {}
21
20
  metric_name, additional_dims, metric_val_and_time = interim_data.match(/(.*){(.*)}(.*)/i).captures
22
21
  additional_dims.split("\",").each do |interim_data|
23
- puts interim_data
24
22
  pair_data = interim_data.gsub("\"", "").gsub("{", "").gsub("}", "")
25
23
  interim_data_value = pair_data.split("=", 2)[1]
26
24
  interim_data_key = pair_data.split("=", 2)[0]
@@ -0,0 +1,7 @@
1
+ class Utility
2
+ # one place to control time format for logginggit
3
+ def self.get_time()
4
+ return Time.now.strftime("%Y-%m-%d %H:%M:%S")
5
+ end
6
+
7
+ end
data/spec/spec_helper.rb CHANGED
@@ -25,7 +25,6 @@ end
25
25
  require 'metrics_helper'
26
26
  require 'splunk_metrics_parser'
27
27
  require 'newrelic_metrics_parser'
28
- require 'elastic_metrics_parser'
29
28
 
30
29
  RSpec.configure do |config|
31
30
  # rspec-expectations config goes here. You can use an alternate
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: fluent-plugin-jfrog-metrics
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.2.7
4
+ version: 0.2.9
5
5
  platform: ruby
6
6
  authors:
7
7
  - MahithaB, BenHarosh
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2024-07-16 00:00:00.000000000 Z
11
+ date: 2024-10-24 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: bundler
@@ -58,14 +58,14 @@ dependencies:
58
58
  requirements:
59
59
  - - "~>"
60
60
  - !ruby/object:Gem::Version
61
- version: '2.0'
61
+ version: 2.1.0
62
62
  type: :development
63
63
  prerelease: false
64
64
  version_requirements: !ruby/object:Gem::Requirement
65
65
  requirements:
66
66
  - - "~>"
67
67
  - !ruby/object:Gem::Version
68
- version: '2.0'
68
+ version: 2.1.0
69
69
  - !ruby/object:Gem::Dependency
70
70
  name: fluentd
71
71
  requirement: !ruby/object:Gem::Requirement
@@ -92,17 +92,17 @@ dependencies:
92
92
  requirements:
93
93
  - - "~>"
94
94
  - !ruby/object:Gem::Version
95
- version: '2.0'
95
+ version: 2.1.0
96
96
  type: :runtime
97
97
  prerelease: false
98
98
  version_requirements: !ruby/object:Gem::Requirement
99
99
  requirements:
100
100
  - - "~>"
101
101
  - !ruby/object:Gem::Version
102
- version: '2.0'
102
+ version: 2.1.0
103
103
  description: Fluentd Plugin for converting JFrog Artifactory, Xray generated metrics
104
104
  (Prometheus Exposition Format) to target observability platform format (Splunk HEC,
105
- New Relic, Elastic)
105
+ New Relic, DataDog)
106
106
  email:
107
107
  - cpe-support@jfrog.com
108
108
  executables: []
@@ -119,15 +119,14 @@ files:
119
119
  - fluent-plugin-jfrog-metrics.gemspec
120
120
  - lib/fluent/plugin/base_metrics_parser.rb
121
121
  - lib/fluent/plugin/datadog_metrics_parser.rb
122
- - lib/fluent/plugin/elastic_metrics_parser.rb
123
122
  - lib/fluent/plugin/in_jfrog_metrics.rb
124
123
  - lib/fluent/plugin/metrics_helper.rb
125
124
  - lib/fluent/plugin/newrelic_metrics_parser.rb
126
125
  - lib/fluent/plugin/splunk_metrics_parser.rb
126
+ - lib/fluent/plugin/utility.rb
127
127
  - spec/fixtures/files/creds.rb
128
128
  - spec/fixtures/files/sample_artifactory_metrics.txt
129
129
  - spec/fixtures/files/sample_xray_metrics.txt
130
- - spec/lib/elastic_metrics_parser_spec.rb
131
130
  - spec/lib/metrics_helper_spec.rb
132
131
  - spec/lib/newrelic_metrics_parser_spec.rb
133
132
  - spec/lib/splunk_metrics_parser_spec.rb
@@ -158,12 +157,11 @@ signing_key:
158
157
  specification_version: 4
159
158
  summary: Fluentd Plugin for converting JFrog Artifactory, Xray generated metrics (Prometheus
160
159
  Exposition Format) to target observability platform format (Splunk HEC, New Relic,
161
- Elastic)
160
+ DataDog)
162
161
  test_files:
163
162
  - spec/fixtures/files/creds.rb
164
163
  - spec/fixtures/files/sample_artifactory_metrics.txt
165
164
  - spec/fixtures/files/sample_xray_metrics.txt
166
- - spec/lib/elastic_metrics_parser_spec.rb
167
165
  - spec/lib/metrics_helper_spec.rb
168
166
  - spec/lib/newrelic_metrics_parser_spec.rb
169
167
  - spec/lib/splunk_metrics_parser_spec.rb
@@ -1,33 +0,0 @@
1
- # frozen_string_literal: true
2
- require 'json'
3
- require_relative 'base_metrics_parser'
4
-
5
- class ElasticMetricsParser < BaseMetricsParser
6
- def initialize(metric_prefix, router, tag)
7
- @metric_prefix = metric_prefix
8
- @router = router
9
- @tag = tag
10
- end
11
-
12
- def format_data(cleaned_data = [], prefix = '', separator = '')
13
- hash_data_array = []
14
- hash_data = {}
15
- hash_data_with_labels = []
16
- cleaned_data.each do |interim_data|
17
- if interim_data =~ /{/ && interim_data =~ /}/
18
- hash_data_with_dim_fields = {}
19
- metric_name, additional_dims, metric_val_and_time = interim_data.match(/(.*){(.*)}(.*)/i).captures
20
- additional_dims.split(/,/).map do |interim_dim_data|
21
- hash_data_with_dim_fields[prefix + separator + "label" + separator + interim_dim_data.split(/=/)[0]] = interim_dim_data.split(/=/)[1].gsub(/"/, '') if interim_dim_data =~ /=/
22
- end
23
- hash_data_with_dim_fields[prefix + separator + metric_name] = metric_val_and_time.strip.split[0].to_f
24
- hash_data_with_labels << hash_data_with_dim_fields
25
- else
26
- metric_name, value, = interim_data.split
27
- hash_data[prefix + separator + metric_name] = value.to_f
28
- end
29
- end
30
- hash_data_array << hash_data
31
- (hash_data_array << hash_data_with_labels).flatten!
32
- end
33
- end
@@ -1,55 +0,0 @@
1
- # frozen_string_literal: true
2
- [
3
- File.join(File.dirname(__FILE__), '..'),
4
- File.join(File.dirname(__FILE__), '..', 'lib/fluent/plugin'),
5
- File.join(File.dirname(__FILE__), '..', 'spec'),
6
- ].each do |dir|
7
- $LOAD_PATH.unshift(dir) unless $LOAD_PATH.include?(dir)
8
- end
9
-
10
- require 'elastic_metrics_parser'
11
- require 'date'
12
- require 'rspec'
13
-
14
-
15
- RSpec.describe ElasticMetricsParser do
16
- describe "#emit_parsed_metrics" do
17
- it 'should read sample Artifactory metrics data and verify the size of parsed data > 1' do
18
- platform_metrics = File.read('./spec/fixtures/files/sample_artifactory_metrics.txt')
19
- expect(platform_metrics.size).to be > 1
20
-
21
- parser = ElasticMetricsParser.new('jfrog.artifactory', '', 'jfrog.artifactory.metrics')
22
-
23
- normalized_data = parser.normalise_data(platform_metrics)
24
- expect(normalized_data.size).to be > 1
25
-
26
- cleaned_data = parser.clean_data(normalized_data)
27
- expect(cleaned_data.size).to be > 1
28
-
29
- hash_data_array = parser.extract_metrics_in_hash(cleaned_data, 'jfrog.artifactory', '.')
30
- expect(hash_data_array.size).to be > 1
31
-
32
- serialized_data = parser.serialize_data(hash_data_array)
33
- expect(serialized_data.size).to be > 1
34
- end
35
-
36
- it 'should read sample Xray metrics data and verify the size of parsed data > 1' do
37
- platform_metrics = File.read('./spec/fixtures/files/sample_xray_metrics.txt')
38
- expect(platform_metrics.size).to be > 1
39
-
40
- parser = ElasticMetricsParser.new('jfrog.xray', '', 'jfrog.xray.metrics')
41
-
42
- normalized_data = parser.normalise_data(platform_metrics)
43
- expect(normalized_data.size).to be > 1
44
-
45
- cleaned_data = parser.clean_data(normalized_data)
46
- expect(cleaned_data.size).to be > 1
47
-
48
- hash_data_array = parser.extract_metrics_in_hash(cleaned_data, 'jfrog.xray', '.')
49
- expect(hash_data_array.size).to be > 1
50
-
51
- serialized_data = parser.serialize_data(hash_data_array)
52
- expect(serialized_data.size).to be > 1
53
- end
54
- end
55
- end