activerabbit-ai 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.rspec +3 -0
- data/.standard.yml +3 -0
- data/CHANGELOG.md +49 -0
- data/IMPLEMENTATION_SUMMARY.md +220 -0
- data/README.md +317 -0
- data/Rakefile +10 -0
- data/TESTING_GUIDE.md +585 -0
- data/examples/rails_app_testing.rb +437 -0
- data/examples/rails_integration.rb +243 -0
- data/examples/standalone_usage.rb +309 -0
- data/lib/active_rabbit/client/configuration.rb +162 -0
- data/lib/active_rabbit/client/event_processor.rb +131 -0
- data/lib/active_rabbit/client/exception_tracker.rb +157 -0
- data/lib/active_rabbit/client/http_client.rb +137 -0
- data/lib/active_rabbit/client/n_plus_one_detector.rb +188 -0
- data/lib/active_rabbit/client/performance_monitor.rb +150 -0
- data/lib/active_rabbit/client/pii_scrubber.rb +169 -0
- data/lib/active_rabbit/client/railtie.rb +328 -0
- data/lib/active_rabbit/client/sidekiq_middleware.rb +130 -0
- data/lib/active_rabbit/client/version.rb +7 -0
- data/lib/active_rabbit/client.rb +119 -0
- data/lib/active_rabbit.rb +3 -0
- data/script/test_production_readiness.rb +403 -0
- data/sig/active_rabbit/client.rbs +6 -0
- metadata +155 -0
|
@@ -0,0 +1,157 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "digest"
|
|
4
|
+
|
|
5
|
+
module ActiveRabbit
|
|
6
|
+
module Client
|
|
7
|
+
class ExceptionTracker
|
|
8
|
+
attr_reader :configuration, :http_client
|
|
9
|
+
|
|
10
|
+
def initialize(configuration, http_client)
|
|
11
|
+
@configuration = configuration
|
|
12
|
+
@http_client = http_client
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
def track_exception(exception:, context: {}, user_id: nil, tags: {})
|
|
16
|
+
return unless exception
|
|
17
|
+
return if should_ignore_exception?(exception)
|
|
18
|
+
|
|
19
|
+
exception_data = build_exception_data(
|
|
20
|
+
exception: exception,
|
|
21
|
+
context: context,
|
|
22
|
+
user_id: user_id,
|
|
23
|
+
tags: tags
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
# Apply before_send callback if configured
|
|
27
|
+
if configuration.before_send_exception
|
|
28
|
+
exception_data = configuration.before_send_exception.call(exception_data)
|
|
29
|
+
return unless exception_data # Callback can filter out exceptions by returning nil
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
http_client.post_exception(exception_data)
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
def flush
|
|
36
|
+
# Exception tracker sends immediately, no batching needed
|
|
37
|
+
end
|
|
38
|
+
|
|
39
|
+
private
|
|
40
|
+
|
|
41
|
+
def build_exception_data(exception:, context:, user_id:, tags:)
|
|
42
|
+
backtrace = parse_backtrace(exception.backtrace || [])
|
|
43
|
+
|
|
44
|
+
data = {
|
|
45
|
+
type: exception.class.name,
|
|
46
|
+
message: exception.message,
|
|
47
|
+
backtrace: backtrace,
|
|
48
|
+
fingerprint: generate_fingerprint(exception),
|
|
49
|
+
timestamp: Time.now.iso8601(3),
|
|
50
|
+
environment: configuration.environment,
|
|
51
|
+
release: configuration.release,
|
|
52
|
+
server_name: configuration.server_name,
|
|
53
|
+
context: scrub_pii(context || {}),
|
|
54
|
+
tags: tags || {}
|
|
55
|
+
}
|
|
56
|
+
|
|
57
|
+
data[:user_id] = user_id if user_id
|
|
58
|
+
data[:project_id] = configuration.project_id if configuration.project_id
|
|
59
|
+
|
|
60
|
+
# Add runtime context
|
|
61
|
+
data[:runtime_context] = build_runtime_context
|
|
62
|
+
|
|
63
|
+
# Add request context if available
|
|
64
|
+
if defined?(Thread) && Thread.current[:active_rabbit_request_context]
|
|
65
|
+
data[:request_context] = Thread.current[:active_rabbit_request_context]
|
|
66
|
+
end
|
|
67
|
+
|
|
68
|
+
data
|
|
69
|
+
end
|
|
70
|
+
|
|
71
|
+
def parse_backtrace(backtrace)
|
|
72
|
+
backtrace.map do |line|
|
|
73
|
+
if match = line.match(/^(.+?):(\d+)(?::in `(.+?)')?$/)
|
|
74
|
+
{
|
|
75
|
+
filename: match[1],
|
|
76
|
+
lineno: match[2].to_i,
|
|
77
|
+
method: match[3],
|
|
78
|
+
line: line
|
|
79
|
+
}
|
|
80
|
+
else
|
|
81
|
+
{ line: line }
|
|
82
|
+
end
|
|
83
|
+
end
|
|
84
|
+
end
|
|
85
|
+
|
|
86
|
+
def generate_fingerprint(exception)
|
|
87
|
+
# Create a consistent fingerprint for grouping similar exceptions
|
|
88
|
+
parts = [
|
|
89
|
+
exception.class.name,
|
|
90
|
+
clean_message_for_fingerprint(exception.message),
|
|
91
|
+
extract_relevant_backtrace_for_fingerprint(exception.backtrace)
|
|
92
|
+
].compact
|
|
93
|
+
|
|
94
|
+
Digest::SHA256.hexdigest(parts.join("|"))
|
|
95
|
+
end
|
|
96
|
+
|
|
97
|
+
def clean_message_for_fingerprint(message)
|
|
98
|
+
return "" unless message
|
|
99
|
+
|
|
100
|
+
# Remove dynamic content that would prevent proper grouping
|
|
101
|
+
message
|
|
102
|
+
.gsub(/\d+/, "N") # Replace numbers with N
|
|
103
|
+
.gsub(/0x[a-f0-9]+/i, "0xHEX") # Replace hex addresses
|
|
104
|
+
.gsub(/'[^']+'/, "'STRING'") # Replace quoted strings
|
|
105
|
+
.gsub(/"[^"]+"/, '"STRING"') # Replace double-quoted strings
|
|
106
|
+
.gsub(/\/[^\/\s]+\/[^\/\s]*/, "/PATH/") # Replace file paths
|
|
107
|
+
end
|
|
108
|
+
|
|
109
|
+
def extract_relevant_backtrace_for_fingerprint(backtrace)
|
|
110
|
+
return "" unless backtrace
|
|
111
|
+
|
|
112
|
+
# Take the first few frames from the application (not gems/stdlib)
|
|
113
|
+
app_frames = backtrace
|
|
114
|
+
.select { |line| line.include?(Dir.pwd) } # Only app files
|
|
115
|
+
.first(3) # First 3 frames
|
|
116
|
+
.map { |line| line.gsub(/:\d+/, ":LINE") } # Remove line numbers
|
|
117
|
+
|
|
118
|
+
app_frames.join("|")
|
|
119
|
+
end
|
|
120
|
+
|
|
121
|
+
def build_runtime_context
|
|
122
|
+
context = {
|
|
123
|
+
ruby_version: RUBY_VERSION,
|
|
124
|
+
ruby_platform: RUBY_PLATFORM,
|
|
125
|
+
gem_version: VERSION
|
|
126
|
+
}
|
|
127
|
+
|
|
128
|
+
# Add framework information
|
|
129
|
+
if defined?(Rails)
|
|
130
|
+
context[:rails_version] = Rails.version
|
|
131
|
+
context[:rails_env] = Rails.env if Rails.respond_to?(:env)
|
|
132
|
+
end
|
|
133
|
+
|
|
134
|
+
# Add memory usage if available
|
|
135
|
+
begin
|
|
136
|
+
if defined?(GC)
|
|
137
|
+
context[:gc_stats] = GC.stat
|
|
138
|
+
end
|
|
139
|
+
rescue
|
|
140
|
+
# Ignore if GC.stat is not available
|
|
141
|
+
end
|
|
142
|
+
|
|
143
|
+
context
|
|
144
|
+
end
|
|
145
|
+
|
|
146
|
+
def should_ignore_exception?(exception)
|
|
147
|
+
configuration.should_ignore_exception?(exception)
|
|
148
|
+
end
|
|
149
|
+
|
|
150
|
+
def scrub_pii(data)
|
|
151
|
+
return data unless configuration.enable_pii_scrubbing
|
|
152
|
+
|
|
153
|
+
PiiScrubber.new(configuration).scrub(data)
|
|
154
|
+
end
|
|
155
|
+
end
|
|
156
|
+
end
|
|
157
|
+
end
|
|
@@ -0,0 +1,137 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "faraday"
|
|
4
|
+
require "faraday/retry"
|
|
5
|
+
require "json"
|
|
6
|
+
require "concurrent"
|
|
7
|
+
|
|
8
|
+
module ActiveRabbit
|
|
9
|
+
module Client
|
|
10
|
+
class HttpClient
|
|
11
|
+
attr_reader :configuration
|
|
12
|
+
|
|
13
|
+
def initialize(configuration)
|
|
14
|
+
@configuration = configuration
|
|
15
|
+
@connection = build_connection
|
|
16
|
+
@request_queue = Concurrent::Array.new
|
|
17
|
+
@batch_timer = nil
|
|
18
|
+
@shutdown = false
|
|
19
|
+
end
|
|
20
|
+
|
|
21
|
+
def post_event(event_data)
|
|
22
|
+
enqueue_request(:post, "api/v1/events", event_data)
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
def post_exception(exception_data)
|
|
26
|
+
# Add event_type for batch processing
|
|
27
|
+
exception_data_with_type = exception_data.merge(event_type: 'error')
|
|
28
|
+
enqueue_request(:post, "api/v1/events/errors", exception_data_with_type)
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
def post_performance(performance_data)
|
|
32
|
+
# Add event_type for batch processing
|
|
33
|
+
performance_data_with_type = performance_data.merge(event_type: 'performance')
|
|
34
|
+
enqueue_request(:post, "api/v1/events/performance", performance_data_with_type)
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
def post_batch(batch_data)
|
|
38
|
+
make_request(:post, "api/v1/events/batch", { events: batch_data })
|
|
39
|
+
end
|
|
40
|
+
|
|
41
|
+
def flush
|
|
42
|
+
return if @request_queue.empty?
|
|
43
|
+
|
|
44
|
+
batch = @request_queue.shift(@request_queue.length)
|
|
45
|
+
return if batch.empty?
|
|
46
|
+
|
|
47
|
+
begin
|
|
48
|
+
post_batch(batch)
|
|
49
|
+
rescue => e
|
|
50
|
+
configuration.logger&.error("[ActiveRabbit] Failed to send batch: #{e.message}")
|
|
51
|
+
raise APIError, "Failed to send batch: #{e.message}"
|
|
52
|
+
end
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
def shutdown
|
|
56
|
+
@shutdown = true
|
|
57
|
+
@batch_timer&.shutdown
|
|
58
|
+
flush
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
private
|
|
62
|
+
|
|
63
|
+
def build_connection
|
|
64
|
+
Faraday.new(url: configuration.api_url) do |conn|
|
|
65
|
+
conn.request :json
|
|
66
|
+
conn.request :retry,
|
|
67
|
+
max: configuration.retry_count,
|
|
68
|
+
interval: configuration.retry_delay,
|
|
69
|
+
backoff_factor: 2,
|
|
70
|
+
retry_statuses: [429, 500, 502, 503, 504]
|
|
71
|
+
|
|
72
|
+
conn.response :json
|
|
73
|
+
conn.response :raise_error
|
|
74
|
+
|
|
75
|
+
conn.options.timeout = configuration.timeout
|
|
76
|
+
conn.options.open_timeout = configuration.open_timeout
|
|
77
|
+
|
|
78
|
+
conn.headers["User-Agent"] = "ActiveRabbit-Ruby/#{VERSION}"
|
|
79
|
+
conn.headers["X-Project-Token"] = configuration.api_key
|
|
80
|
+
conn.headers["Content-Type"] = "application/json"
|
|
81
|
+
|
|
82
|
+
if configuration.project_id
|
|
83
|
+
conn.headers["X-Project-ID"] = configuration.project_id
|
|
84
|
+
end
|
|
85
|
+
end
|
|
86
|
+
end
|
|
87
|
+
|
|
88
|
+
def enqueue_request(method, path, data)
|
|
89
|
+
return if @shutdown
|
|
90
|
+
|
|
91
|
+
@request_queue << {
|
|
92
|
+
method: method,
|
|
93
|
+
path: path,
|
|
94
|
+
data: data,
|
|
95
|
+
timestamp: Time.now.to_f
|
|
96
|
+
}
|
|
97
|
+
|
|
98
|
+
# Start batch timer if not already running
|
|
99
|
+
start_batch_timer if @batch_timer.nil? || @batch_timer.shutdown?
|
|
100
|
+
|
|
101
|
+
# Flush if queue is full
|
|
102
|
+
flush if @request_queue.length >= configuration.queue_size
|
|
103
|
+
end
|
|
104
|
+
|
|
105
|
+
def start_batch_timer
|
|
106
|
+
@batch_timer = Concurrent::TimerTask.new(
|
|
107
|
+
execution_interval: configuration.flush_interval,
|
|
108
|
+
timeout_interval: configuration.flush_interval + 5
|
|
109
|
+
) do
|
|
110
|
+
flush unless @request_queue.empty?
|
|
111
|
+
end
|
|
112
|
+
|
|
113
|
+
@batch_timer.execute
|
|
114
|
+
end
|
|
115
|
+
|
|
116
|
+
def make_request(method, path, data)
|
|
117
|
+
response = @connection.public_send(method, path, data)
|
|
118
|
+
|
|
119
|
+
case response.status
|
|
120
|
+
when 200..299
|
|
121
|
+
response.body
|
|
122
|
+
when 429
|
|
123
|
+
raise RateLimitError, "Rate limit exceeded"
|
|
124
|
+
else
|
|
125
|
+
raise APIError, "API request failed with status #{response.status}: #{response.body}"
|
|
126
|
+
end
|
|
127
|
+
rescue Faraday::TimeoutError => e
|
|
128
|
+
raise APIError, "Request timeout: #{e.message}"
|
|
129
|
+
rescue Faraday::ConnectionFailed => e
|
|
130
|
+
raise APIError, "Connection failed: #{e.message}"
|
|
131
|
+
rescue Faraday::Error => e
|
|
132
|
+
raise APIError, "Request failed: #{e.message}"
|
|
133
|
+
end
|
|
134
|
+
end
|
|
135
|
+
end
|
|
136
|
+
end
|
|
137
|
+
|
|
@@ -0,0 +1,188 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require "securerandom"
|
|
4
|
+
|
|
5
|
+
module ActiveRabbit
|
|
6
|
+
module Client
|
|
7
|
+
class NPlusOneDetector
|
|
8
|
+
attr_reader :configuration
|
|
9
|
+
|
|
10
|
+
def initialize(configuration)
|
|
11
|
+
@configuration = configuration
|
|
12
|
+
@query_patterns = Concurrent::Hash.new { |h, k| h[k] = [] }
|
|
13
|
+
@request_queries = Concurrent::Hash.new { |h, k| h[k] = [] }
|
|
14
|
+
end
|
|
15
|
+
|
|
16
|
+
def track_query(sql, bindings = nil, name = nil, duration = nil)
|
|
17
|
+
return unless configuration.enable_n_plus_one_detection
|
|
18
|
+
return unless current_request_id
|
|
19
|
+
|
|
20
|
+
query_info = {
|
|
21
|
+
sql: normalize_sql(sql),
|
|
22
|
+
bindings: bindings,
|
|
23
|
+
name: name,
|
|
24
|
+
duration: duration,
|
|
25
|
+
timestamp: Time.now,
|
|
26
|
+
backtrace: caller(2, 10) # Skip this method and the AR method
|
|
27
|
+
}
|
|
28
|
+
|
|
29
|
+
@request_queries[current_request_id] << query_info
|
|
30
|
+
end
|
|
31
|
+
|
|
32
|
+
def analyze_request_queries(request_id = nil)
|
|
33
|
+
request_id ||= current_request_id
|
|
34
|
+
return unless request_id
|
|
35
|
+
|
|
36
|
+
queries = @request_queries.delete(request_id) || []
|
|
37
|
+
return if queries.empty?
|
|
38
|
+
|
|
39
|
+
n_plus_one_issues = detect_n_plus_one_patterns(queries)
|
|
40
|
+
|
|
41
|
+
n_plus_one_issues.each do |issue|
|
|
42
|
+
report_n_plus_one_issue(issue)
|
|
43
|
+
end
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
def start_request(request_id = nil)
|
|
47
|
+
request_id ||= SecureRandom.uuid
|
|
48
|
+
Thread.current[:active_rabbit_request_id] = request_id
|
|
49
|
+
@request_queries[request_id] = []
|
|
50
|
+
request_id
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
def finish_request(request_id = nil)
|
|
54
|
+
request_id ||= current_request_id
|
|
55
|
+
return unless request_id
|
|
56
|
+
|
|
57
|
+
analyze_request_queries(request_id)
|
|
58
|
+
Thread.current[:active_rabbit_request_id] = nil
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
private
|
|
62
|
+
|
|
63
|
+
def current_request_id
|
|
64
|
+
Thread.current[:active_rabbit_request_id]
|
|
65
|
+
end
|
|
66
|
+
|
|
67
|
+
def normalize_sql(sql)
|
|
68
|
+
return sql unless sql.is_a?(String)
|
|
69
|
+
|
|
70
|
+
# Remove specific values to group similar queries
|
|
71
|
+
normalized = sql.dup
|
|
72
|
+
|
|
73
|
+
# Replace string literals
|
|
74
|
+
normalized.gsub!(/'[^']*'/, '?')
|
|
75
|
+
normalized.gsub!(/"[^"]*"/, '?')
|
|
76
|
+
|
|
77
|
+
# Replace numbers
|
|
78
|
+
normalized.gsub(/\b\d+\b/, '?')
|
|
79
|
+
|
|
80
|
+
# Replace IN clauses with multiple values
|
|
81
|
+
normalized.gsub(/IN\s*\([^)]*\)/i, 'IN (?)')
|
|
82
|
+
|
|
83
|
+
# Normalize whitespace
|
|
84
|
+
normalized.gsub(/\s+/, ' ').strip
|
|
85
|
+
end
|
|
86
|
+
|
|
87
|
+
def detect_n_plus_one_patterns(queries)
|
|
88
|
+
issues = []
|
|
89
|
+
|
|
90
|
+
# Group queries by normalized SQL
|
|
91
|
+
grouped_queries = queries.group_by { |q| q[:sql] }
|
|
92
|
+
|
|
93
|
+
grouped_queries.each do |normalized_sql, query_group|
|
|
94
|
+
next if query_group.size < 3 # Need at least 3 similar queries to consider N+1
|
|
95
|
+
|
|
96
|
+
# Check if queries are executed in quick succession
|
|
97
|
+
if queries_in_quick_succession?(query_group)
|
|
98
|
+
issues << build_n_plus_one_issue(normalized_sql, query_group)
|
|
99
|
+
end
|
|
100
|
+
end
|
|
101
|
+
|
|
102
|
+
issues
|
|
103
|
+
end
|
|
104
|
+
|
|
105
|
+
def queries_in_quick_succession?(query_group)
|
|
106
|
+
return false if query_group.size < 2
|
|
107
|
+
|
|
108
|
+
# Check if queries are within a short time window (1 second)
|
|
109
|
+
first_query_time = query_group.first[:timestamp]
|
|
110
|
+
last_query_time = query_group.last[:timestamp]
|
|
111
|
+
|
|
112
|
+
(last_query_time - first_query_time) < 1.0
|
|
113
|
+
end
|
|
114
|
+
|
|
115
|
+
def build_n_plus_one_issue(normalized_sql, query_group)
|
|
116
|
+
# Find the most common backtrace pattern
|
|
117
|
+
backtrace_patterns = query_group.map { |q| extract_app_backtrace(q[:backtrace]) }
|
|
118
|
+
common_backtrace = find_most_common_backtrace(backtrace_patterns)
|
|
119
|
+
|
|
120
|
+
total_duration = query_group.sum { |q| q[:duration] || 0 }
|
|
121
|
+
|
|
122
|
+
{
|
|
123
|
+
type: "n_plus_one_query",
|
|
124
|
+
normalized_sql: normalized_sql,
|
|
125
|
+
query_count: query_group.size,
|
|
126
|
+
total_duration_ms: total_duration,
|
|
127
|
+
average_duration_ms: total_duration / query_group.size,
|
|
128
|
+
backtrace: common_backtrace,
|
|
129
|
+
first_query_time: query_group.first[:timestamp],
|
|
130
|
+
last_query_time: query_group.last[:timestamp],
|
|
131
|
+
sample_bindings: query_group.first(3).map { |q| q[:bindings] }.compact
|
|
132
|
+
}
|
|
133
|
+
end
|
|
134
|
+
|
|
135
|
+
def extract_app_backtrace(backtrace)
|
|
136
|
+
return [] unless backtrace
|
|
137
|
+
|
|
138
|
+
# Only include application code, not gems or stdlib
|
|
139
|
+
app_root = defined?(Rails) ? Rails.root.to_s : Dir.pwd
|
|
140
|
+
|
|
141
|
+
backtrace.select do |line|
|
|
142
|
+
line.start_with?(app_root) && !line.include?('/vendor/') && !line.include?('/gems/')
|
|
143
|
+
end.first(5) # Limit to first 5 app frames
|
|
144
|
+
end
|
|
145
|
+
|
|
146
|
+
def find_most_common_backtrace(backtrace_patterns)
|
|
147
|
+
return [] if backtrace_patterns.empty?
|
|
148
|
+
|
|
149
|
+
# Find the backtrace pattern that appears most frequently
|
|
150
|
+
backtrace_counts = backtrace_patterns.each_with_object(Hash.new(0)) do |backtrace, counts|
|
|
151
|
+
key = backtrace.join("|")
|
|
152
|
+
counts[key] += 1
|
|
153
|
+
end
|
|
154
|
+
|
|
155
|
+
most_common_key = backtrace_counts.max_by { |_, count| count }&.first
|
|
156
|
+
return [] unless most_common_key
|
|
157
|
+
|
|
158
|
+
most_common_key.split("|")
|
|
159
|
+
end
|
|
160
|
+
|
|
161
|
+
def report_n_plus_one_issue(issue)
|
|
162
|
+
# Create a structured exception for the N+1 issue
|
|
163
|
+
exception_data = {
|
|
164
|
+
type: "NPlusOneQueryIssue",
|
|
165
|
+
message: "N+1 query detected: #{issue[:query_count]} similar queries executed",
|
|
166
|
+
details: issue,
|
|
167
|
+
timestamp: Time.now.iso8601(3),
|
|
168
|
+
environment: configuration.environment,
|
|
169
|
+
release: configuration.release,
|
|
170
|
+
server_name: configuration.server_name
|
|
171
|
+
}
|
|
172
|
+
|
|
173
|
+
exception_data[:project_id] = configuration.project_id if configuration.project_id
|
|
174
|
+
|
|
175
|
+
# Send as a performance issue rather than an exception
|
|
176
|
+
Client.track_event(
|
|
177
|
+
"n_plus_one_detected",
|
|
178
|
+
{
|
|
179
|
+
normalized_sql: issue[:normalized_sql],
|
|
180
|
+
query_count: issue[:query_count],
|
|
181
|
+
total_duration_ms: issue[:total_duration_ms],
|
|
182
|
+
average_duration_ms: issue[:average_duration_ms]
|
|
183
|
+
}
|
|
184
|
+
)
|
|
185
|
+
end
|
|
186
|
+
end
|
|
187
|
+
end
|
|
188
|
+
end
|
|
@@ -0,0 +1,150 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module ActiveRabbit
|
|
4
|
+
module Client
|
|
5
|
+
class PerformanceMonitor
|
|
6
|
+
attr_reader :configuration, :http_client
|
|
7
|
+
|
|
8
|
+
def initialize(configuration, http_client)
|
|
9
|
+
@configuration = configuration
|
|
10
|
+
@http_client = http_client
|
|
11
|
+
@active_transactions = Concurrent::Hash.new
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
def track_performance(name:, duration_ms:, metadata: {})
|
|
15
|
+
return unless configuration.enable_performance_monitoring
|
|
16
|
+
|
|
17
|
+
performance_data = build_performance_data(
|
|
18
|
+
name: name,
|
|
19
|
+
duration_ms: duration_ms,
|
|
20
|
+
metadata: metadata
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
http_client.post_performance(performance_data)
|
|
24
|
+
end
|
|
25
|
+
|
|
26
|
+
def start_transaction(name, metadata: {})
|
|
27
|
+
return unless configuration.enable_performance_monitoring
|
|
28
|
+
|
|
29
|
+
transaction_id = SecureRandom.uuid
|
|
30
|
+
@active_transactions[transaction_id] = {
|
|
31
|
+
name: name,
|
|
32
|
+
start_time: Time.now,
|
|
33
|
+
metadata: metadata
|
|
34
|
+
}
|
|
35
|
+
|
|
36
|
+
transaction_id
|
|
37
|
+
end
|
|
38
|
+
|
|
39
|
+
def finish_transaction(transaction_id, additional_metadata: {})
|
|
40
|
+
return unless configuration.enable_performance_monitoring
|
|
41
|
+
return unless @active_transactions.key?(transaction_id)
|
|
42
|
+
|
|
43
|
+
transaction = @active_transactions.delete(transaction_id)
|
|
44
|
+
duration_ms = ((Time.now - transaction[:start_time]) * 1000).round(2)
|
|
45
|
+
|
|
46
|
+
track_performance(
|
|
47
|
+
name: transaction[:name],
|
|
48
|
+
duration_ms: duration_ms,
|
|
49
|
+
metadata: transaction[:metadata].merge(additional_metadata)
|
|
50
|
+
)
|
|
51
|
+
end
|
|
52
|
+
|
|
53
|
+
def measure(name, metadata: {})
|
|
54
|
+
return yield unless configuration.enable_performance_monitoring
|
|
55
|
+
|
|
56
|
+
start_time = Time.now
|
|
57
|
+
result = yield
|
|
58
|
+
end_time = Time.now
|
|
59
|
+
|
|
60
|
+
duration_ms = ((end_time - start_time) * 1000).round(2)
|
|
61
|
+
|
|
62
|
+
track_performance(
|
|
63
|
+
name: name,
|
|
64
|
+
duration_ms: duration_ms,
|
|
65
|
+
metadata: metadata
|
|
66
|
+
)
|
|
67
|
+
|
|
68
|
+
result
|
|
69
|
+
end
|
|
70
|
+
|
|
71
|
+
def flush
|
|
72
|
+
# Performance monitor sends immediately, no batching needed
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
private
|
|
76
|
+
|
|
77
|
+
def build_performance_data(name:, duration_ms:, metadata:)
|
|
78
|
+
data = {
|
|
79
|
+
name: name.to_s,
|
|
80
|
+
duration_ms: duration_ms.to_f,
|
|
81
|
+
metadata: scrub_pii(metadata || {}),
|
|
82
|
+
timestamp: Time.now.iso8601(3),
|
|
83
|
+
environment: configuration.environment,
|
|
84
|
+
release: configuration.release,
|
|
85
|
+
server_name: configuration.server_name
|
|
86
|
+
}
|
|
87
|
+
|
|
88
|
+
data[:project_id] = configuration.project_id if configuration.project_id
|
|
89
|
+
|
|
90
|
+
# Add performance context
|
|
91
|
+
data[:performance_context] = build_performance_context
|
|
92
|
+
|
|
93
|
+
# Add request context if available
|
|
94
|
+
if defined?(Thread) && Thread.current[:active_rabbit_request_context]
|
|
95
|
+
data[:request_context] = Thread.current[:active_rabbit_request_context]
|
|
96
|
+
end
|
|
97
|
+
|
|
98
|
+
data
|
|
99
|
+
end
|
|
100
|
+
|
|
101
|
+
def build_performance_context
|
|
102
|
+
context = {}
|
|
103
|
+
|
|
104
|
+
# Memory usage
|
|
105
|
+
begin
|
|
106
|
+
if defined?(GC)
|
|
107
|
+
gc_stats = GC.stat
|
|
108
|
+
context[:memory] = {
|
|
109
|
+
heap_allocated_pages: gc_stats[:heap_allocated_pages],
|
|
110
|
+
heap_sorted_length: gc_stats[:heap_sorted_length],
|
|
111
|
+
heap_allocatable_pages: gc_stats[:heap_allocatable_pages],
|
|
112
|
+
heap_available_slots: gc_stats[:heap_available_slots],
|
|
113
|
+
heap_live_slots: gc_stats[:heap_live_slots],
|
|
114
|
+
heap_free_slots: gc_stats[:heap_free_slots],
|
|
115
|
+
total_allocated_pages: gc_stats[:total_allocated_pages]
|
|
116
|
+
}
|
|
117
|
+
end
|
|
118
|
+
rescue
|
|
119
|
+
# Ignore if GC stats are not available
|
|
120
|
+
end
|
|
121
|
+
|
|
122
|
+
# Process information
|
|
123
|
+
begin
|
|
124
|
+
context[:process] = {
|
|
125
|
+
pid: Process.pid
|
|
126
|
+
}
|
|
127
|
+
rescue
|
|
128
|
+
# Ignore if process info is not available
|
|
129
|
+
end
|
|
130
|
+
|
|
131
|
+
# Thread information
|
|
132
|
+
begin
|
|
133
|
+
context[:threading] = {
|
|
134
|
+
active_threads: Thread.list.size
|
|
135
|
+
}
|
|
136
|
+
rescue
|
|
137
|
+
# Ignore if thread info is not available
|
|
138
|
+
end
|
|
139
|
+
|
|
140
|
+
context
|
|
141
|
+
end
|
|
142
|
+
|
|
143
|
+
def scrub_pii(data)
|
|
144
|
+
return data unless configuration.enable_pii_scrubbing
|
|
145
|
+
|
|
146
|
+
PiiScrubber.new(configuration).scrub(data)
|
|
147
|
+
end
|
|
148
|
+
end
|
|
149
|
+
end
|
|
150
|
+
end
|