scout_apm_logging 0.0.0.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.github/workflows/test.yml +37 -0
- data/.gitignore +13 -0
- data/.rubocop.yml +65 -0
- data/Dockerfile +18 -0
- data/Gemfile +5 -0
- data/README.md +58 -0
- data/Rakefile +35 -0
- data/bin/scout_apm_logging_monitor +5 -0
- data/gems/rails.gemfile +3 -0
- data/lib/scout_apm/logging/config.rb +265 -0
- data/lib/scout_apm/logging/context.rb +58 -0
- data/lib/scout_apm/logging/logger.rb +26 -0
- data/lib/scout_apm/logging/loggers/capture.rb +46 -0
- data/lib/scout_apm/logging/loggers/formatter.rb +86 -0
- data/lib/scout_apm/logging/loggers/logger.rb +82 -0
- data/lib/scout_apm/logging/loggers/proxy.rb +39 -0
- data/lib/scout_apm/logging/loggers/swap.rb +82 -0
- data/lib/scout_apm/logging/monitor/collector/checksum.rb +51 -0
- data/lib/scout_apm/logging/monitor/collector/configuration.rb +148 -0
- data/lib/scout_apm/logging/monitor/collector/downloader.rb +78 -0
- data/lib/scout_apm/logging/monitor/collector/extractor.rb +37 -0
- data/lib/scout_apm/logging/monitor/collector/manager.rb +57 -0
- data/lib/scout_apm/logging/monitor/monitor.rb +214 -0
- data/lib/scout_apm/logging/monitor_manager/manager.rb +150 -0
- data/lib/scout_apm/logging/state.rb +70 -0
- data/lib/scout_apm/logging/utils.rb +86 -0
- data/lib/scout_apm/logging/version.rb +7 -0
- data/lib/scout_apm_logging.rb +35 -0
- data/scout_apm_logging.gemspec +27 -0
- data/spec/data/config_test_1.yml +27 -0
- data/spec/data/empty_logs_config.yml +0 -0
- data/spec/data/logs_config.yml +3 -0
- data/spec/data/mock_config.yml +29 -0
- data/spec/data/state_file.json +3 -0
- data/spec/integration/loggers/capture_spec.rb +78 -0
- data/spec/integration/monitor/collector/downloader/will_verify_checksum.rb +47 -0
- data/spec/integration/monitor/collector_healthcheck_spec.rb +27 -0
- data/spec/integration/monitor/continuous_state_collector_spec.rb +29 -0
- data/spec/integration/monitor/previous_collector_setup_spec.rb +42 -0
- data/spec/integration/monitor_manager/disable_agent_spec.rb +28 -0
- data/spec/integration/monitor_manager/monitor_pid_file_spec.rb +36 -0
- data/spec/integration/monitor_manager/single_monitor_spec.rb +53 -0
- data/spec/integration/rails/lifecycle_spec.rb +29 -0
- data/spec/spec_helper.rb +65 -0
- data/spec/unit/config_spec.rb +25 -0
- data/spec/unit/loggers/capture_spec.rb +64 -0
- data/spec/unit/monitor/collector/configuration_spec.rb +64 -0
- data/spec/unit/state_spec.rb +20 -0
- data/tooling/checksums.rb +106 -0
- metadata +167 -0
@@ -0,0 +1,86 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'json'
|
4
|
+
require 'logger'
|
5
|
+
|
6
|
+
require 'scout_apm'
|
7
|
+
|
8
|
+
module ScoutApm
|
9
|
+
module Logging
|
10
|
+
module Loggers
|
11
|
+
# A simple JSON formatter which we can add a couple attributes to.
|
12
|
+
class Formatter < ::Logger::Formatter
|
13
|
+
DATETIME_FORMAT = '%Y-%m-%dT%H:%M:%S.%LZ'
|
14
|
+
|
15
|
+
def call(severity, time, progname, msg) # rubocop:disable Metrics/AbcSize
|
16
|
+
attributes_to_log[:severity] = severity
|
17
|
+
attributes_to_log[:time] = format_datetime(time)
|
18
|
+
attributes_to_log[:progname] = progname if progname
|
19
|
+
attributes_to_log[:pid] = Process.pid.to_s
|
20
|
+
attributes_to_log[:msg] = msg2str(msg)
|
21
|
+
attributes_to_log['service.name'] = service_name
|
22
|
+
|
23
|
+
attributes_to_log.merge!(scout_layer)
|
24
|
+
attributes_to_log.merge!(scout_context)
|
25
|
+
# Naive local benchmarks show this takes around 200 microseconds. As such, we only apply it to WARN and above.
|
26
|
+
attributes_to_log.merge!(local_log_location) if ::Logger::Severity.const_get(severity) >= ::Logger::Severity::WARN
|
27
|
+
|
28
|
+
"#{attributes_to_log.to_json}\n"
|
29
|
+
end
|
30
|
+
|
31
|
+
private
|
32
|
+
|
33
|
+
def attributes_to_log
|
34
|
+
@attributes_to_log ||= {}
|
35
|
+
end
|
36
|
+
|
37
|
+
def format_datetime(time)
|
38
|
+
time.utc.strftime(DATETIME_FORMAT)
|
39
|
+
end
|
40
|
+
|
41
|
+
def scout_layer # rubocop:disable Metrics/AbcSize
|
42
|
+
req = ScoutApm::RequestManager.lookup
|
43
|
+
layer = req.instance_variable_get('@layers').find { |lay| lay.type == 'Controller' || lay.type == 'Job' }
|
44
|
+
|
45
|
+
return {} unless layer
|
46
|
+
|
47
|
+
name, action = layer.name.split('/')
|
48
|
+
|
49
|
+
return {} unless name && action
|
50
|
+
|
51
|
+
updated_name = name.split('_').map(&:capitalize).join
|
52
|
+
|
53
|
+
derived_key = "#{layer.type.downcase}_entrypoint".to_sym
|
54
|
+
derived_value_of_scout_name = "#{updated_name}#{layer.type.capitalize}##{action.capitalize}"
|
55
|
+
|
56
|
+
{ derived_key => derived_value_of_scout_name }
|
57
|
+
end
|
58
|
+
|
59
|
+
def scout_context
|
60
|
+
req = ScoutApm::RequestManager.lookup
|
61
|
+
extra_context = req.context.instance_variable_get('@extra')
|
62
|
+
user_context = req.context.instance_variable_get('@user')
|
63
|
+
# We may want to make this a configuration option in the future, as well as capturing
|
64
|
+
# the URI from the request annotations, but this may include PII.
|
65
|
+
user_context.delete(:ip)
|
66
|
+
|
67
|
+
user_context.transform_keys { |key| "user.#{key}" }.merge(extra_context)
|
68
|
+
end
|
69
|
+
|
70
|
+
def local_log_location
|
71
|
+
# Should give us the last local stack which called the log within just the last couple frames.
|
72
|
+
last_local_location = caller[0..15].find { |path| path.include?(Rails.root.to_s) }
|
73
|
+
|
74
|
+
return {} unless last_local_location
|
75
|
+
|
76
|
+
{ 'log_location' => last_local_location }
|
77
|
+
end
|
78
|
+
|
79
|
+
# We may need to clean this up a bit.
|
80
|
+
def service_name
|
81
|
+
$PROGRAM_NAME
|
82
|
+
end
|
83
|
+
end
|
84
|
+
end
|
85
|
+
end
|
86
|
+
end
|
@@ -0,0 +1,82 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'logger'
|
4
|
+
|
5
|
+
module ScoutApm
|
6
|
+
module Logging
|
7
|
+
module Loggers
|
8
|
+
class FileLogger < ::Logger
|
9
|
+
end
|
10
|
+
|
11
|
+
# The newly created logger which we can configure, and will log to a filepath.
|
12
|
+
class Logger
|
13
|
+
attr_reader :context, :log_instance
|
14
|
+
|
15
|
+
# 1 MiB
|
16
|
+
LOG_SIZE = 1024 * 1024
|
17
|
+
# 1 log file
|
18
|
+
LOG_AGE = 1
|
19
|
+
|
20
|
+
def initialize(context, log_instance)
|
21
|
+
@context = context
|
22
|
+
@log_instance = log_instance
|
23
|
+
end
|
24
|
+
|
25
|
+
def create_logger!
|
26
|
+
# Defaults are 7 files with 10 MiB.
|
27
|
+
# We create the file in order to prevent a creation header log.
|
28
|
+
File.new(determine_file_path, 'w+') unless File.exist?(determine_file_path)
|
29
|
+
new_logger = FileLogger.new(determine_file_path, LOG_AGE, LOG_SIZE)
|
30
|
+
# Ruby's Logger handles a lot of the coercion itself.
|
31
|
+
new_logger.level = context.config.value('logs_capture_level')
|
32
|
+
new_logger
|
33
|
+
end
|
34
|
+
|
35
|
+
def determine_file_path # rubocop:disable Metrics/AbcSize
|
36
|
+
log_directory = context.config.value('logs_proxy_log_dir')
|
37
|
+
|
38
|
+
original_basename = File.basename(log_destination) if log_destination.is_a?(String)
|
39
|
+
|
40
|
+
file_basename = if original_basename
|
41
|
+
original_basename
|
42
|
+
elsif defined?(::ActiveSupport::Logger) && log_instance.is_a?(::ActiveSupport::Logger)
|
43
|
+
'rails.log'
|
44
|
+
elsif defined?(::ActiveSupport::BroadcastLogger) && log_instance.is_a?(::ActiveSupport::BroadcastLogger)
|
45
|
+
'rails.log'
|
46
|
+
elsif defined?(::Sidekiq::Logger) && log_instance.is_a?(::Sidekiq::Logger)
|
47
|
+
'sidekiq.log'
|
48
|
+
elsif defined?(::ScoutTestLogger) && log_instance.is_a?(::ScoutTestLogger)
|
49
|
+
'test.log'
|
50
|
+
else
|
51
|
+
'mix.log'
|
52
|
+
end
|
53
|
+
|
54
|
+
File.join(log_directory, file_basename)
|
55
|
+
end
|
56
|
+
|
57
|
+
private
|
58
|
+
|
59
|
+
def find_log_destination(logdev)
|
60
|
+
dev = try(logdev, :filename) || try(logdev, :dev)
|
61
|
+
if dev.is_a?(String)
|
62
|
+
dev
|
63
|
+
elsif dev.respond_to?(:path)
|
64
|
+
dev.path
|
65
|
+
elsif dev.respond_to?(:filename) || dev.respond_to?(:dev)
|
66
|
+
find_log_destination(dev)
|
67
|
+
else
|
68
|
+
dev
|
69
|
+
end
|
70
|
+
end
|
71
|
+
|
72
|
+
def log_destination
|
73
|
+
@log_destination ||= find_log_destination(log_instance.instance_variable_get(:@logdev))
|
74
|
+
end
|
75
|
+
|
76
|
+
def try(obj, method)
|
77
|
+
obj.respond_to?(method) ? obj.send(method) : nil
|
78
|
+
end
|
79
|
+
end
|
80
|
+
end
|
81
|
+
end
|
82
|
+
end
|
@@ -0,0 +1,39 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'logger'
|
4
|
+
|
5
|
+
module ScoutApm
|
6
|
+
module Logging
|
7
|
+
module Loggers
|
8
|
+
# Holds both the original application logger and the new one. Relays commands to both.
|
9
|
+
class Proxy
|
10
|
+
def initialize
|
11
|
+
@loggers = []
|
12
|
+
end
|
13
|
+
|
14
|
+
def add(logger)
|
15
|
+
@loggers << logger
|
16
|
+
end
|
17
|
+
|
18
|
+
def remove(logger)
|
19
|
+
@loggers.reject! { |inst_log| inst_log == logger }
|
20
|
+
|
21
|
+
@loggers
|
22
|
+
end
|
23
|
+
|
24
|
+
def method_missing(name, *args, &block)
|
25
|
+
# Some libraries will do stuff like Library.logger.formatter = Rails.logger.formatter
|
26
|
+
# As such, we should return the first logger's (the original logger) return value.
|
27
|
+
return_value = @loggers.first.send(name, *args, &block)
|
28
|
+
@loggers[1..].each { |logger| logger.send(name, *args, &block) }
|
29
|
+
|
30
|
+
return_value
|
31
|
+
end
|
32
|
+
|
33
|
+
def respond_to_missing?(name, *args)
|
34
|
+
@loggers.first.respond_to?(name, *args) || super
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
@@ -0,0 +1,82 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'logger'
|
4
|
+
|
5
|
+
require_relative './formatter'
|
6
|
+
require_relative './logger'
|
7
|
+
|
8
|
+
module ScoutApm
|
9
|
+
module Logging
|
10
|
+
module Loggers
|
11
|
+
# Swaps in our logger for the application's logger.
|
12
|
+
class Swap
|
13
|
+
attr_reader :context, :log_instance, :new_file_logger
|
14
|
+
|
15
|
+
def initialize(context, log_instance)
|
16
|
+
@context = context
|
17
|
+
@log_instance = log_instance
|
18
|
+
end
|
19
|
+
|
20
|
+
def update_logger!
|
21
|
+
create_proxy_log_dir!
|
22
|
+
|
23
|
+
# In Rails 7.1, broadcast logger was added which allows sinking to multiple IO devices.
|
24
|
+
if defined?(::ActiveSupport::BroadcastLogger) && log_instance.is_a?(::ActiveSupport::BroadcastLogger)
|
25
|
+
add_logger_to_broadcast!
|
26
|
+
else
|
27
|
+
swap_in_proxy_logger!
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
def log_location
|
32
|
+
new_file_logger.instance_variable_get(:@logdev).filename
|
33
|
+
end
|
34
|
+
|
35
|
+
private
|
36
|
+
|
37
|
+
def add_logger_to_broadcast!
|
38
|
+
@new_file_logger = create_file_logger
|
39
|
+
@new_file_logger.formatter = Loggers::Formatter.new
|
40
|
+
|
41
|
+
log_instance.broadcast_to(new_file_logger)
|
42
|
+
end
|
43
|
+
|
44
|
+
def swap_in_proxy_logger! # rubocop:disable Metrics/AbcSize
|
45
|
+
proxy_logger = Proxy.new
|
46
|
+
# We can use the previous logdev. log_device will continuously call write
|
47
|
+
# through the devices until the logdev (@dev) is an IO device other than logdev:
|
48
|
+
# https://github.com/ruby/ruby/blob/master/lib/logger/log_device.rb#L42
|
49
|
+
# Log device holds the configurations around shifting too.
|
50
|
+
original_logdevice = log_instance.instance_variable_get(:@logdev)
|
51
|
+
updated_original_logger = ::Logger.new(original_logdevice)
|
52
|
+
updated_original_logger.formatter = log_instance.formatter
|
53
|
+
|
54
|
+
@new_file_logger = create_file_logger
|
55
|
+
@new_file_logger.formatter = Loggers::Formatter.new
|
56
|
+
|
57
|
+
# First logger needs to be the original logger for the return value of relayed calls.
|
58
|
+
proxy_logger.add(updated_original_logger)
|
59
|
+
proxy_logger.add(new_file_logger)
|
60
|
+
|
61
|
+
if defined?(::ActiveSupport::Logger) && log_instance.is_a?(::ActiveSupport::Logger)
|
62
|
+
Rails.logger = proxy_logger
|
63
|
+
elsif defined?(::Sidekiq::Logger) && log_instance.is_a?(::Sidekiq::Logger)
|
64
|
+
Sidekiq.configure_server do |config|
|
65
|
+
config.logger = proxy_logger
|
66
|
+
end
|
67
|
+
elsif defined?(::ScoutTestLogger) && log_instance.is_a?(::ScoutTestLogger)
|
68
|
+
TestLoggerWrapper.logger = proxy_logger
|
69
|
+
end
|
70
|
+
end
|
71
|
+
|
72
|
+
def create_file_logger
|
73
|
+
Loggers::Logger.new(context, log_instance).create_logger!
|
74
|
+
end
|
75
|
+
|
76
|
+
def create_proxy_log_dir!
|
77
|
+
Utils.ensure_directory_exists(context.config.value('logs_proxy_log_dir'))
|
78
|
+
end
|
79
|
+
end
|
80
|
+
end
|
81
|
+
end
|
82
|
+
end
|
@@ -0,0 +1,51 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module ScoutApm
|
4
|
+
module Logging
|
5
|
+
module Collector
|
6
|
+
# Contains logic around verifying the checksum of the otelcol-contrib binary.
|
7
|
+
class Checksum
|
8
|
+
attr_reader :context
|
9
|
+
|
10
|
+
KNOWN_CHECKSUMS = {
|
11
|
+
'darwin_amd64' => '5456734e124221e7ff775c52bd3693d05b3fac43ebe06b22aa5f220f1962ed8c',
|
12
|
+
'darwin_arm64' => 'f9564560798ac5c099885903f303fcda97b7ea649ec299e075b72f3805873879',
|
13
|
+
'linux_amd64' => '326772622016f7ff7e966a7ae8a0f439dc49a3d80b6d79a82b62608af447e851',
|
14
|
+
'linux_arm64' => '73d797817540363a37f27e32270f98053ed17b1df36df2d30db1715ce40f4cff'
|
15
|
+
}.freeze
|
16
|
+
|
17
|
+
def initialize(context)
|
18
|
+
@context = context
|
19
|
+
end
|
20
|
+
|
21
|
+
def verified_checksum?(should_log_failures: false)
|
22
|
+
return false unless File.exist?(collector_tar_path)
|
23
|
+
|
24
|
+
checksum = `sha256sum #{collector_tar_path}`.split(' ').first
|
25
|
+
same_checksum_result = checksum == KNOWN_CHECKSUMS[double]
|
26
|
+
|
27
|
+
log_failed_checksum if !same_checksum_result && should_log_failures
|
28
|
+
same_checksum_result
|
29
|
+
end
|
30
|
+
|
31
|
+
def log_failed_checksum
|
32
|
+
if KNOWN_CHECKSUMS.key?(double)
|
33
|
+
context.logger.error('Checksum verification failed for otelcol.tar.gz.')
|
34
|
+
else
|
35
|
+
context.logger.error("Checksum verification failed for otelcol.tar.gz. Unknown architecture: #{double}")
|
36
|
+
end
|
37
|
+
end
|
38
|
+
|
39
|
+
private
|
40
|
+
|
41
|
+
def double
|
42
|
+
"#{Utils.get_host_os}_#{Utils.get_architecture}"
|
43
|
+
end
|
44
|
+
|
45
|
+
def collector_tar_path
|
46
|
+
"#{context.config.value('collector_download_dir')}/otelcol.tar.gz"
|
47
|
+
end
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
@@ -0,0 +1,148 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module ScoutApm
|
4
|
+
module Logging
|
5
|
+
module Collector
|
6
|
+
# Adds a method to Hash similar to that of the Rails deep_merge.
|
7
|
+
module HashDeepMerge
|
8
|
+
refine Hash do
|
9
|
+
def deep_merge(second)
|
10
|
+
merger = proc { |_, v1, v2|
|
11
|
+
if v1.is_a?(Hash) && v2.is_a?(Hash)
|
12
|
+
v1.merge(v2, &merger)
|
13
|
+
elsif v1.is_a?(Array) && v2.is_a?(Array)
|
14
|
+
v1 | v2
|
15
|
+
else
|
16
|
+
[:undefined, nil, :nil].include?(v2) ? v1 : v2
|
17
|
+
end
|
18
|
+
}
|
19
|
+
merge(second.to_h, &merger)
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
23
|
+
|
24
|
+
# Creates the configuration to be used when launching the collector.
|
25
|
+
class Configuration
|
26
|
+
using HashDeepMerge
|
27
|
+
|
28
|
+
attr_reader :context
|
29
|
+
|
30
|
+
def initialize(context)
|
31
|
+
@context = context
|
32
|
+
end
|
33
|
+
|
34
|
+
def setup!
|
35
|
+
create_storage_directories
|
36
|
+
|
37
|
+
create_config_file
|
38
|
+
end
|
39
|
+
|
40
|
+
def create_config_file
|
41
|
+
contents = YAML.dump(combined_contents)
|
42
|
+
File.write(config_file, contents)
|
43
|
+
end
|
44
|
+
|
45
|
+
private
|
46
|
+
|
47
|
+
def create_storage_directories
|
48
|
+
# Sending queue storage directory
|
49
|
+
Utils.ensure_directory_exists(context.config.value('collector_sending_queue_storage_dir'))
|
50
|
+
# Offset storage directory
|
51
|
+
Utils.ensure_directory_exists(context.config.value('collector_offset_storage_dir'))
|
52
|
+
end
|
53
|
+
|
54
|
+
def combined_contents
|
55
|
+
default_contents = YAML.safe_load(config_contents)
|
56
|
+
|
57
|
+
default_contents.deep_merge(loaded_config_contents)
|
58
|
+
end
|
59
|
+
|
60
|
+
def loaded_config_contents
|
61
|
+
config_path = context.config.value('logs_config')
|
62
|
+
|
63
|
+
if config_path && File.exist?(config_path)
|
64
|
+
YAML.load_file(config_path) || {}
|
65
|
+
elsif File.exist?(assumed_config_file_path)
|
66
|
+
YAML.load_file(assumed_config_file_path) || {}
|
67
|
+
else
|
68
|
+
{}
|
69
|
+
end
|
70
|
+
end
|
71
|
+
|
72
|
+
def config_file
|
73
|
+
context.config.value('collector_config_file')
|
74
|
+
end
|
75
|
+
|
76
|
+
def config_contents # rubocop:disable Metrics/AbcSize
|
77
|
+
<<~CONFIG
|
78
|
+
receivers:
|
79
|
+
filelog:
|
80
|
+
include: [#{context.config.value('logs_monitored').join(',')}]
|
81
|
+
storage: file_storage/filelogreceiver
|
82
|
+
operators:
|
83
|
+
- type: json_parser
|
84
|
+
severity:
|
85
|
+
parse_from: attributes.severity
|
86
|
+
timestamp:
|
87
|
+
parse_from: attributes.time
|
88
|
+
layout: "%Y-%m-%dT%H:%M:%S.%LZ"
|
89
|
+
processors:
|
90
|
+
transform:
|
91
|
+
log_statements:
|
92
|
+
- context: log
|
93
|
+
statements:
|
94
|
+
# Copy original body to raw_bytes attribute.
|
95
|
+
- 'set(attributes["raw_bytes"], body)'
|
96
|
+
# Replace the body with the log message.
|
97
|
+
- 'set(body, attributes["msg"])'
|
98
|
+
# Move service.name attribute to resource attribute.
|
99
|
+
- 'set(resource.attributes["service.name"], attributes["service.name"])'
|
100
|
+
batch:
|
101
|
+
exporters:
|
102
|
+
otlp:
|
103
|
+
endpoint: #{context.config.value('logs_reporting_endpoint')}
|
104
|
+
headers:
|
105
|
+
x-telemetryhub-key: #{context.config.value('logs_ingest_key')}
|
106
|
+
sending_queue:
|
107
|
+
storage: file_storage/otc
|
108
|
+
extensions:
|
109
|
+
health_check:
|
110
|
+
endpoint: #{health_check_endpoint}
|
111
|
+
file_storage/filelogreceiver:
|
112
|
+
directory: #{context.config.value('collector_offset_storage_dir')}
|
113
|
+
file_storage/otc:
|
114
|
+
directory: #{context.config.value('collector_sending_queue_storage_dir')}
|
115
|
+
timeout: 10s
|
116
|
+
service:
|
117
|
+
extensions:
|
118
|
+
- health_check
|
119
|
+
- file_storage/filelogreceiver
|
120
|
+
- file_storage/otc
|
121
|
+
pipelines:
|
122
|
+
logs:
|
123
|
+
receivers:
|
124
|
+
- filelog
|
125
|
+
processors:
|
126
|
+
- transform
|
127
|
+
- batch
|
128
|
+
exporters:
|
129
|
+
- otlp
|
130
|
+
telemetry:
|
131
|
+
metrics:
|
132
|
+
level: none
|
133
|
+
logs:
|
134
|
+
level: #{context.config.value('collector_log_level')}
|
135
|
+
CONFIG
|
136
|
+
end
|
137
|
+
|
138
|
+
def health_check_endpoint
|
139
|
+
"localhost:#{context.config.value('health_check_port')}"
|
140
|
+
end
|
141
|
+
|
142
|
+
def assumed_config_file_path
|
143
|
+
"#{context.application_root}/config/scout_logs_config.yml"
|
144
|
+
end
|
145
|
+
end
|
146
|
+
end
|
147
|
+
end
|
148
|
+
end
|
@@ -0,0 +1,78 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module ScoutApm
|
4
|
+
module Logging
|
5
|
+
module Collector
|
6
|
+
# Downloads the collector-contrib binary from the OpenTelemetry project.
|
7
|
+
class Downloader
|
8
|
+
attr_accessor :failed_count
|
9
|
+
attr_reader :context, :checksum
|
10
|
+
|
11
|
+
def initialize(context)
|
12
|
+
@context = context
|
13
|
+
@checksum = Checksum.new(context)
|
14
|
+
end
|
15
|
+
|
16
|
+
def download!
|
17
|
+
# Already downloaded the collector. Noop.
|
18
|
+
return if checksum.verified_checksum?
|
19
|
+
|
20
|
+
# Account for issues such as failed extractions or download corruptions.
|
21
|
+
download_collector
|
22
|
+
verify_checksum
|
23
|
+
rescue StandardError => e
|
24
|
+
# Bypass Rubcop useless asignment rule.
|
25
|
+
failed_count ||= 0
|
26
|
+
|
27
|
+
if failed_count < 3
|
28
|
+
context.logger.error("Failed to download or extract otelcol-contrib: #{e}. Retrying...")
|
29
|
+
failed_count += 1
|
30
|
+
retry
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
def download_collector(url = nil, redirect: false) # rubocop:disable Metrics/AbcSize
|
35
|
+
# Prevent double logging.
|
36
|
+
unless redirect
|
37
|
+
context.logger.debug("Downloading otelcol-contrib for version #{context.config.value('collector_version')}")
|
38
|
+
end
|
39
|
+
|
40
|
+
url_to_download = url || collector_url
|
41
|
+
uri = URI(url_to_download)
|
42
|
+
|
43
|
+
Net::HTTP.start(uri.host, uri.port, use_ssl: uri.scheme == 'https') do |http|
|
44
|
+
request = Net::HTTP::Get.new(uri)
|
45
|
+
http.request(request) do |response|
|
46
|
+
return download_collector(response['location'], redirect: true) if response.code == '302'
|
47
|
+
|
48
|
+
File.open(destination, 'wb') do |file|
|
49
|
+
response.read_body do |chunk|
|
50
|
+
file.write(chunk)
|
51
|
+
end
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
56
|
+
|
57
|
+
private
|
58
|
+
|
59
|
+
def verify_checksum
|
60
|
+
raise 'Invalid checksum on download.' unless checksum.verified_checksum?
|
61
|
+
end
|
62
|
+
|
63
|
+
def collector_url
|
64
|
+
collector_version = context.config.value('collector_version')
|
65
|
+
architecture = Utils.get_architecture
|
66
|
+
host_os = Utils.get_host_os
|
67
|
+
|
68
|
+
# https://opentelemetry.io/docs/collector/installation/#manual-linux-installation
|
69
|
+
"https://github.com/open-telemetry/opentelemetry-collector-releases/releases/download/v#{collector_version}/otelcol-contrib_#{collector_version}_#{host_os}_#{architecture}.tar.gz"
|
70
|
+
end
|
71
|
+
|
72
|
+
def destination
|
73
|
+
"#{context.config.value('collector_download_dir')}/otelcol.tar.gz"
|
74
|
+
end
|
75
|
+
end
|
76
|
+
end
|
77
|
+
end
|
78
|
+
end
|
@@ -0,0 +1,37 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module ScoutApm
|
4
|
+
module Logging
|
5
|
+
module Collector
|
6
|
+
# Extracts the contents of the collector tar file.
|
7
|
+
class Extractor
|
8
|
+
attr_reader :context
|
9
|
+
|
10
|
+
def initialize(context)
|
11
|
+
@context = context
|
12
|
+
end
|
13
|
+
|
14
|
+
def extract!
|
15
|
+
# Already extracted. Noop.
|
16
|
+
return if has_been_extracted?
|
17
|
+
|
18
|
+
system("tar -xzf #{tar_path} -C #{context.config.value('collector_download_dir')}")
|
19
|
+
end
|
20
|
+
|
21
|
+
def has_been_extracted?
|
22
|
+
File.exist?(binary_path)
|
23
|
+
end
|
24
|
+
|
25
|
+
private
|
26
|
+
|
27
|
+
def tar_path
|
28
|
+
"#{context.config.value('collector_download_dir')}/otelcol.tar.gz"
|
29
|
+
end
|
30
|
+
|
31
|
+
def binary_path
|
32
|
+
"#{context.config.value('collector_download_dir')}/otelcol-contrib"
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
37
|
+
end
|
@@ -0,0 +1,57 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require_relative './checksum'
|
4
|
+
require_relative './configuration'
|
5
|
+
require_relative './downloader'
|
6
|
+
require_relative './extractor'
|
7
|
+
|
8
|
+
module ScoutApm
|
9
|
+
module Logging
|
10
|
+
module Collector
|
11
|
+
# Manager class for the downloading, configuring, and starting of the collector.
|
12
|
+
class Manager
|
13
|
+
attr_reader :context
|
14
|
+
|
15
|
+
def initialize(context)
|
16
|
+
@context = context
|
17
|
+
|
18
|
+
@checksum = Checksum.new(@context)
|
19
|
+
@configuration = Configuration.new(@context)
|
20
|
+
@downloader = Downloader.new(@context)
|
21
|
+
@extractor = Extractor.new(@context)
|
22
|
+
end
|
23
|
+
|
24
|
+
def setup!
|
25
|
+
@configuration.setup!
|
26
|
+
@downloader.download!
|
27
|
+
@extractor.extract!
|
28
|
+
|
29
|
+
start_collector if verified_checksum_and_extracted?
|
30
|
+
end
|
31
|
+
|
32
|
+
def start_collector
|
33
|
+
context.logger.info('Starting otelcol-contrib')
|
34
|
+
collector_process = Process.spawn("#{extracted_collector_path}/otelcol-contrib --config #{config_file}")
|
35
|
+
File.write(context.config.value('collector_pid_file'), collector_process)
|
36
|
+
end
|
37
|
+
|
38
|
+
private
|
39
|
+
|
40
|
+
def verified_checksum_and_extracted?
|
41
|
+
has_verfied_checksum = @checksum.verified_checksum?(should_log_failures: true)
|
42
|
+
has_extracted_content = @extractor.has_been_extracted?
|
43
|
+
|
44
|
+
has_verfied_checksum && has_extracted_content
|
45
|
+
end
|
46
|
+
|
47
|
+
def extracted_collector_path
|
48
|
+
context.config.value('collector_download_dir')
|
49
|
+
end
|
50
|
+
|
51
|
+
def config_file
|
52
|
+
context.config.value('collector_config_file')
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
57
|
+
end
|