catpm 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/MIT-LICENSE +20 -0
- data/README.md +222 -0
- data/Rakefile +6 -0
- data/app/assets/stylesheets/catpm/application.css +15 -0
- data/app/controllers/catpm/application_controller.rb +6 -0
- data/app/controllers/catpm/endpoints_controller.rb +63 -0
- data/app/controllers/catpm/errors_controller.rb +63 -0
- data/app/controllers/catpm/events_controller.rb +89 -0
- data/app/controllers/catpm/samples_controller.rb +13 -0
- data/app/controllers/catpm/status_controller.rb +79 -0
- data/app/controllers/catpm/system_controller.rb +17 -0
- data/app/helpers/catpm/application_helper.rb +264 -0
- data/app/jobs/catpm/application_job.rb +6 -0
- data/app/mailers/catpm/application_mailer.rb +8 -0
- data/app/models/catpm/application_record.rb +7 -0
- data/app/models/catpm/bucket.rb +45 -0
- data/app/models/catpm/error_record.rb +37 -0
- data/app/models/catpm/event_bucket.rb +12 -0
- data/app/models/catpm/event_sample.rb +22 -0
- data/app/models/catpm/sample.rb +26 -0
- data/app/views/catpm/endpoints/_sample_table.html.erb +36 -0
- data/app/views/catpm/endpoints/show.html.erb +124 -0
- data/app/views/catpm/errors/index.html.erb +66 -0
- data/app/views/catpm/errors/show.html.erb +107 -0
- data/app/views/catpm/events/index.html.erb +73 -0
- data/app/views/catpm/events/show.html.erb +86 -0
- data/app/views/catpm/samples/show.html.erb +113 -0
- data/app/views/catpm/shared/_page_nav.html.erb +6 -0
- data/app/views/catpm/shared/_segments_waterfall.html.erb +147 -0
- data/app/views/catpm/status/index.html.erb +124 -0
- data/app/views/catpm/system/index.html.erb +454 -0
- data/app/views/layouts/catpm/application.html.erb +381 -0
- data/config/routes.rb +19 -0
- data/db/migrate/20250601000001_create_catpm_tables.rb +104 -0
- data/lib/catpm/adapter/base.rb +85 -0
- data/lib/catpm/adapter/postgresql.rb +186 -0
- data/lib/catpm/adapter/sqlite.rb +159 -0
- data/lib/catpm/adapter.rb +28 -0
- data/lib/catpm/auto_instrument.rb +145 -0
- data/lib/catpm/buffer.rb +59 -0
- data/lib/catpm/circuit_breaker.rb +60 -0
- data/lib/catpm/collector.rb +320 -0
- data/lib/catpm/configuration.rb +103 -0
- data/lib/catpm/custom_event.rb +37 -0
- data/lib/catpm/engine.rb +39 -0
- data/lib/catpm/errors.rb +6 -0
- data/lib/catpm/event.rb +75 -0
- data/lib/catpm/fingerprint.rb +52 -0
- data/lib/catpm/flusher.rb +462 -0
- data/lib/catpm/lifecycle.rb +76 -0
- data/lib/catpm/middleware.rb +75 -0
- data/lib/catpm/middleware_probe.rb +28 -0
- data/lib/catpm/patches/httpclient.rb +44 -0
- data/lib/catpm/patches/net_http.rb +39 -0
- data/lib/catpm/request_segments.rb +101 -0
- data/lib/catpm/segment_subscribers.rb +242 -0
- data/lib/catpm/span_helpers.rb +51 -0
- data/lib/catpm/stack_sampler.rb +226 -0
- data/lib/catpm/subscribers.rb +47 -0
- data/lib/catpm/tdigest.rb +174 -0
- data/lib/catpm/trace.rb +165 -0
- data/lib/catpm/version.rb +5 -0
- data/lib/catpm.rb +66 -0
- data/lib/generators/catpm/install_generator.rb +36 -0
- data/lib/generators/catpm/templates/initializer.rb.tt +77 -0
- data/lib/tasks/catpm_seed.rake +79 -0
- data/lib/tasks/catpm_tasks.rake +6 -0
- metadata +123 -0
|
@@ -0,0 +1,174 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Catpm
|
|
4
|
+
# Minimal TDigest implementation for percentile estimation.
|
|
5
|
+
# Based on the merging digest variant of the t-digest algorithm
|
|
6
|
+
# by Ted Dunning (https://github.com/tdunning/t-digest).
|
|
7
|
+
#
|
|
8
|
+
# Provides accurate percentile estimates (p50, p95, p99) using
|
|
9
|
+
# fixed memory (~1-3 KB serialized). Supports lossless merging
|
|
10
|
+
# of two digests.
|
|
11
|
+
class TDigest
|
|
12
|
+
Centroid = Struct.new(:mean, :weight)
|
|
13
|
+
|
|
14
|
+
COMPRESSION = 100 # Controls accuracy vs. memory trade-off
|
|
15
|
+
|
|
16
|
+
attr_reader :count
|
|
17
|
+
|
|
18
|
+
def initialize(compression: COMPRESSION)
|
|
19
|
+
@compression = compression
|
|
20
|
+
@centroids = []
|
|
21
|
+
@count = 0
|
|
22
|
+
@min = Float::INFINITY
|
|
23
|
+
@max = -Float::INFINITY
|
|
24
|
+
@buffer = []
|
|
25
|
+
@buffer_limit = @compression * 5
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
def add(value, weight = 1)
|
|
29
|
+
value = value.to_f
|
|
30
|
+
@buffer << Centroid.new(value, weight)
|
|
31
|
+
@count += weight
|
|
32
|
+
@min = value if value < @min
|
|
33
|
+
@max = value if value > @max
|
|
34
|
+
flush_buffer if @buffer.size >= @buffer_limit
|
|
35
|
+
self
|
|
36
|
+
end
|
|
37
|
+
|
|
38
|
+
def percentile(p)
|
|
39
|
+
raise ArgumentError, 'percentile must be between 0 and 1' unless (0..1).cover?(p)
|
|
40
|
+
return nil if @count == 0
|
|
41
|
+
|
|
42
|
+
flush_buffer unless @buffer.empty?
|
|
43
|
+
return @centroids.first.mean if @centroids.size == 1
|
|
44
|
+
|
|
45
|
+
target = p * @count
|
|
46
|
+
cumulative = 0.0
|
|
47
|
+
|
|
48
|
+
@centroids.each_with_index do |centroid, i|
|
|
49
|
+
lower = cumulative
|
|
50
|
+
upper = cumulative + centroid.weight
|
|
51
|
+
|
|
52
|
+
if target < lower + centroid.weight / 2.0
|
|
53
|
+
if i == 0
|
|
54
|
+
return interpolate(@min, centroid.mean, target / (centroid.weight / 2.0))
|
|
55
|
+
else
|
|
56
|
+
prev = @centroids[i - 1]
|
|
57
|
+
prev_upper = lower
|
|
58
|
+
prev_mid = prev_upper - prev.weight / 2.0
|
|
59
|
+
curr_mid = lower + centroid.weight / 2.0
|
|
60
|
+
return interpolate(prev.mean, centroid.mean,
|
|
61
|
+
(target - prev_mid) / (curr_mid - prev_mid))
|
|
62
|
+
end
|
|
63
|
+
end
|
|
64
|
+
|
|
65
|
+
cumulative = upper
|
|
66
|
+
end
|
|
67
|
+
|
|
68
|
+
@centroids.last.mean
|
|
69
|
+
end
|
|
70
|
+
|
|
71
|
+
def merge(other)
|
|
72
|
+
return self if other.nil? || other.count == 0
|
|
73
|
+
|
|
74
|
+
other.send(:flush_buffer) unless other.send(:buffer).empty?
|
|
75
|
+
other.send(:centroids).each do |c|
|
|
76
|
+
add(c.mean, c.weight)
|
|
77
|
+
end
|
|
78
|
+
self
|
|
79
|
+
end
|
|
80
|
+
|
|
81
|
+
def empty?
|
|
82
|
+
@count == 0
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
# Binary serialization: [compression(f64), count(u64), n_centroids(u32), min(f64), max(f64), (mean(f64), weight(u32))...]
|
|
86
|
+
def serialize
|
|
87
|
+
flush_buffer unless @buffer.empty?
|
|
88
|
+
|
|
89
|
+
parts = []
|
|
90
|
+
parts << [@compression].pack('E') # f64 little-endian
|
|
91
|
+
parts << [@count].pack('Q<') # u64 little-endian
|
|
92
|
+
parts << [@centroids.size].pack('V') # u32 little-endian
|
|
93
|
+
parts << [@min].pack('E') # f64
|
|
94
|
+
parts << [@max].pack('E') # f64
|
|
95
|
+
|
|
96
|
+
@centroids.each do |c|
|
|
97
|
+
parts << [c.mean].pack('E') # f64
|
|
98
|
+
parts << [c.weight.to_i].pack('V') # u32
|
|
99
|
+
end
|
|
100
|
+
|
|
101
|
+
parts.join.b
|
|
102
|
+
end
|
|
103
|
+
|
|
104
|
+
def self.deserialize(blob)
|
|
105
|
+
return new if blob.nil? || blob.empty?
|
|
106
|
+
|
|
107
|
+
blob = blob.b
|
|
108
|
+
offset = 0
|
|
109
|
+
|
|
110
|
+
compression = blob[offset, 8].unpack1('E'); offset += 8
|
|
111
|
+
count = blob[offset, 8].unpack1('Q<'); offset += 8
|
|
112
|
+
n = blob[offset, 4].unpack1('V'); offset += 4
|
|
113
|
+
min = blob[offset, 8].unpack1('E'); offset += 8
|
|
114
|
+
max = blob[offset, 8].unpack1('E'); offset += 8
|
|
115
|
+
|
|
116
|
+
digest = new(compression: compression.to_i)
|
|
117
|
+
digest.instance_variable_set(:@count, count)
|
|
118
|
+
digest.instance_variable_set(:@min, min)
|
|
119
|
+
digest.instance_variable_set(:@max, max)
|
|
120
|
+
|
|
121
|
+
centroids = []
|
|
122
|
+
n.times do
|
|
123
|
+
mean = blob[offset, 8].unpack1('E'); offset += 8
|
|
124
|
+
weight = blob[offset, 4].unpack1('V'); offset += 4
|
|
125
|
+
centroids << Centroid.new(mean, weight)
|
|
126
|
+
end
|
|
127
|
+
digest.instance_variable_set(:@centroids, centroids)
|
|
128
|
+
|
|
129
|
+
digest
|
|
130
|
+
end
|
|
131
|
+
|
|
132
|
+
private
|
|
133
|
+
|
|
134
|
+
attr_reader :centroids, :buffer
|
|
135
|
+
|
|
136
|
+
def flush_buffer
|
|
137
|
+
return if @buffer.empty?
|
|
138
|
+
|
|
139
|
+
all = @centroids + @buffer
|
|
140
|
+
@buffer = []
|
|
141
|
+
all.sort_by!(&:mean)
|
|
142
|
+
|
|
143
|
+
merged = []
|
|
144
|
+
weight_so_far = 0
|
|
145
|
+
|
|
146
|
+
all.each do |centroid|
|
|
147
|
+
if merged.empty?
|
|
148
|
+
merged << Centroid.new(centroid.mean, centroid.weight)
|
|
149
|
+
else
|
|
150
|
+
last = merged.last
|
|
151
|
+
q = (weight_so_far + last.weight / 2.0) / @count
|
|
152
|
+
limit = 4.0 * @count * q * (1 - q) / @compression
|
|
153
|
+
|
|
154
|
+
if last.weight + centroid.weight <= limit
|
|
155
|
+
# Merge into existing centroid
|
|
156
|
+
new_weight = last.weight + centroid.weight
|
|
157
|
+
last.mean = (last.mean * last.weight + centroid.mean * centroid.weight) / new_weight
|
|
158
|
+
last.weight = new_weight
|
|
159
|
+
else
|
|
160
|
+
weight_so_far += last.weight
|
|
161
|
+
merged << Centroid.new(centroid.mean, centroid.weight)
|
|
162
|
+
end
|
|
163
|
+
end
|
|
164
|
+
end
|
|
165
|
+
|
|
166
|
+
@centroids = merged
|
|
167
|
+
end
|
|
168
|
+
|
|
169
|
+
def interpolate(a, b, fraction)
|
|
170
|
+
fraction = fraction.clamp(0.0, 1.0)
|
|
171
|
+
a + (b - a) * fraction
|
|
172
|
+
end
|
|
173
|
+
end
|
|
174
|
+
end
|
data/lib/catpm/trace.rb
ADDED
|
@@ -0,0 +1,165 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Catpm
|
|
4
|
+
def self.span(name, type: :custom, &block)
|
|
5
|
+
unless enabled?
|
|
6
|
+
return block.call if block
|
|
7
|
+
return nil
|
|
8
|
+
end
|
|
9
|
+
|
|
10
|
+
req_segments = Thread.current[:catpm_request_segments]
|
|
11
|
+
unless req_segments
|
|
12
|
+
return trace(name, &block)
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
start_time = Process.clock_gettime(Process::CLOCK_MONOTONIC)
|
|
16
|
+
index = req_segments.push_span(type: type, detail: name, started_at: start_time)
|
|
17
|
+
|
|
18
|
+
begin
|
|
19
|
+
block.call
|
|
20
|
+
ensure
|
|
21
|
+
req_segments.pop_span(index)
|
|
22
|
+
end
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
def self.trace(name, metadata: {}, context: {}, &block)
|
|
26
|
+
unless enabled?
|
|
27
|
+
return block.call if block
|
|
28
|
+
return nil
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
start_time = Process.clock_gettime(Process::CLOCK_MONOTONIC)
|
|
32
|
+
error = nil
|
|
33
|
+
|
|
34
|
+
begin
|
|
35
|
+
result = block.call
|
|
36
|
+
rescue => e
|
|
37
|
+
error = e
|
|
38
|
+
raise
|
|
39
|
+
ensure
|
|
40
|
+
duration_ms = (Process.clock_gettime(Process::CLOCK_MONOTONIC) - start_time) * 1000.0
|
|
41
|
+
|
|
42
|
+
req_segments = Thread.current[:catpm_request_segments]
|
|
43
|
+
if req_segments
|
|
44
|
+
source = duration_ms >= config.segment_source_threshold ? extract_trace_source : nil
|
|
45
|
+
req_segments.add(
|
|
46
|
+
type: :custom, duration: duration_ms, detail: name,
|
|
47
|
+
source: source, started_at: start_time
|
|
48
|
+
)
|
|
49
|
+
elsif buffer
|
|
50
|
+
Collector.process_custom(
|
|
51
|
+
name: name, duration: duration_ms,
|
|
52
|
+
metadata: metadata, error: error, context: context
|
|
53
|
+
)
|
|
54
|
+
end
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
result
|
|
58
|
+
end
|
|
59
|
+
|
|
60
|
+
def self.start_trace(name, metadata: {}, context: {})
|
|
61
|
+
Span.new(name: name, metadata: metadata, context: context)
|
|
62
|
+
end
|
|
63
|
+
|
|
64
|
+
# Instrument a block as a full request — creates a controller span,
|
|
65
|
+
# collects all segments (SQL, cache, etc.), and pushes a complete Event.
|
|
66
|
+
# Use this for non-ActionController contexts (webhooks, custom endpoints).
|
|
67
|
+
#
|
|
68
|
+
# Catpm.track_request(kind: :custom, target: "WebhookController#message") do
|
|
69
|
+
# process_update(...)
|
|
70
|
+
# end
|
|
71
|
+
#
|
|
72
|
+
def self.track_request(kind: :http, target:, operation: '', context: {}, metadata: {})
|
|
73
|
+
return yield unless enabled?
|
|
74
|
+
|
|
75
|
+
req_segments = Thread.current[:catpm_request_segments]
|
|
76
|
+
owns_segments = false
|
|
77
|
+
|
|
78
|
+
if req_segments.nil? && config.instrument_segments
|
|
79
|
+
req_segments = RequestSegments.new(
|
|
80
|
+
max_segments: config.max_segments_per_request,
|
|
81
|
+
request_start: Process.clock_gettime(Process::CLOCK_MONOTONIC),
|
|
82
|
+
stack_sample: config.instrument_stack_sampler
|
|
83
|
+
)
|
|
84
|
+
Thread.current[:catpm_request_segments] = req_segments
|
|
85
|
+
owns_segments = true
|
|
86
|
+
end
|
|
87
|
+
|
|
88
|
+
if req_segments
|
|
89
|
+
started_at = Process.clock_gettime(Process::CLOCK_MONOTONIC)
|
|
90
|
+
ctrl_idx = req_segments.push_span(type: :controller, detail: target, started_at: started_at)
|
|
91
|
+
end
|
|
92
|
+
|
|
93
|
+
error = nil
|
|
94
|
+
start_time = Process.clock_gettime(Process::CLOCK_MONOTONIC)
|
|
95
|
+
|
|
96
|
+
begin
|
|
97
|
+
yield
|
|
98
|
+
rescue => e
|
|
99
|
+
error = e
|
|
100
|
+
raise
|
|
101
|
+
ensure
|
|
102
|
+
duration = (Process.clock_gettime(Process::CLOCK_MONOTONIC) - start_time) * 1000.0
|
|
103
|
+
req_segments&.pop_span(ctrl_idx) if ctrl_idx
|
|
104
|
+
req_segments&.stop_sampler
|
|
105
|
+
|
|
106
|
+
Collector.process_tracked(
|
|
107
|
+
kind: kind, target: target, operation: operation,
|
|
108
|
+
duration: duration, context: context, metadata: metadata,
|
|
109
|
+
error: error, req_segments: req_segments
|
|
110
|
+
)
|
|
111
|
+
|
|
112
|
+
if owns_segments
|
|
113
|
+
Thread.current[:catpm_request_segments] = nil
|
|
114
|
+
end
|
|
115
|
+
end
|
|
116
|
+
end
|
|
117
|
+
|
|
118
|
+
class Span
|
|
119
|
+
def initialize(name:, metadata: {}, context: {})
|
|
120
|
+
@name = name
|
|
121
|
+
@metadata = metadata
|
|
122
|
+
@context = context
|
|
123
|
+
@start_time = Process.clock_gettime(Process::CLOCK_MONOTONIC)
|
|
124
|
+
@finished = false
|
|
125
|
+
end
|
|
126
|
+
|
|
127
|
+
def finish(error: nil)
|
|
128
|
+
return if @finished
|
|
129
|
+
|
|
130
|
+
@finished = true
|
|
131
|
+
duration_ms = (Process.clock_gettime(Process::CLOCK_MONOTONIC) - @start_time) * 1000.0
|
|
132
|
+
|
|
133
|
+
req_segments = Thread.current[:catpm_request_segments]
|
|
134
|
+
if req_segments
|
|
135
|
+
source = duration_ms >= Catpm.config.segment_source_threshold ? Catpm.send(:extract_trace_source) : nil
|
|
136
|
+
req_segments.add(
|
|
137
|
+
type: :custom, duration: duration_ms, detail: @name,
|
|
138
|
+
source: source, started_at: @start_time
|
|
139
|
+
)
|
|
140
|
+
elsif Catpm.enabled? && Catpm.buffer
|
|
141
|
+
Collector.process_custom(
|
|
142
|
+
name: @name, duration: duration_ms,
|
|
143
|
+
metadata: @metadata, error: error, context: @context
|
|
144
|
+
)
|
|
145
|
+
end
|
|
146
|
+
end
|
|
147
|
+
|
|
148
|
+
def finished?
|
|
149
|
+
@finished
|
|
150
|
+
end
|
|
151
|
+
end
|
|
152
|
+
|
|
153
|
+
private
|
|
154
|
+
|
|
155
|
+
def self.extract_trace_source
|
|
156
|
+
locations = caller_locations(3, 50)
|
|
157
|
+
locations&.each do |loc|
|
|
158
|
+
path = loc.path.to_s
|
|
159
|
+
if Fingerprint.app_frame?(path)
|
|
160
|
+
return "#{path}:#{loc.lineno}"
|
|
161
|
+
end
|
|
162
|
+
end
|
|
163
|
+
nil
|
|
164
|
+
end
|
|
165
|
+
end
|
data/lib/catpm.rb
ADDED
|
@@ -0,0 +1,66 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require 'catpm/version'
|
|
4
|
+
require 'catpm/configuration'
|
|
5
|
+
require 'catpm/event'
|
|
6
|
+
require 'catpm/custom_event'
|
|
7
|
+
require 'catpm/tdigest'
|
|
8
|
+
require 'catpm/errors'
|
|
9
|
+
require 'catpm/buffer'
|
|
10
|
+
require 'catpm/circuit_breaker'
|
|
11
|
+
require 'catpm/adapter'
|
|
12
|
+
require 'catpm/fingerprint'
|
|
13
|
+
require 'catpm/stack_sampler'
|
|
14
|
+
require 'catpm/request_segments'
|
|
15
|
+
require 'catpm/flusher'
|
|
16
|
+
require 'catpm/collector'
|
|
17
|
+
require 'catpm/middleware'
|
|
18
|
+
require 'catpm/middleware_probe'
|
|
19
|
+
require 'catpm/subscribers'
|
|
20
|
+
require 'catpm/segment_subscribers'
|
|
21
|
+
require 'catpm/lifecycle'
|
|
22
|
+
require 'catpm/trace'
|
|
23
|
+
require 'catpm/span_helpers'
|
|
24
|
+
require 'catpm/auto_instrument'
|
|
25
|
+
require 'catpm/engine'
|
|
26
|
+
|
|
27
|
+
module Catpm
|
|
28
|
+
class << self
|
|
29
|
+
def configure
|
|
30
|
+
yield(config)
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
def config
|
|
34
|
+
@config ||= Configuration.new
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
def reset_config!
|
|
38
|
+
@config = Configuration.new
|
|
39
|
+
@buffer = nil
|
|
40
|
+
@flusher = nil
|
|
41
|
+
end
|
|
42
|
+
|
|
43
|
+
def enabled?
|
|
44
|
+
config.enabled
|
|
45
|
+
end
|
|
46
|
+
|
|
47
|
+
attr_writer :buffer, :flusher
|
|
48
|
+
|
|
49
|
+
attr_reader :buffer
|
|
50
|
+
|
|
51
|
+
attr_reader :flusher
|
|
52
|
+
|
|
53
|
+
def stats
|
|
54
|
+
@stats ||= { dropped_events: 0, circuit_opens: 0, flushes: 0 }
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
def reset_stats!
|
|
58
|
+
@stats = { dropped_events: 0, circuit_opens: 0, flushes: 0 }
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
def event(name, **payload)
|
|
62
|
+
return unless enabled? && config.events_enabled
|
|
63
|
+
buffer&.push(CustomEvent.new(name: name, payload: payload))
|
|
64
|
+
end
|
|
65
|
+
end
|
|
66
|
+
end
|
|
@@ -0,0 +1,36 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require 'rails/generators'
|
|
4
|
+
|
|
5
|
+
module Catpm
|
|
6
|
+
module Generators
|
|
7
|
+
class InstallGenerator < Rails::Generators::Base
|
|
8
|
+
source_root File.expand_path('templates', __dir__)
|
|
9
|
+
|
|
10
|
+
desc 'Install catpm: copy migrations, create initializer, mount engine'
|
|
11
|
+
|
|
12
|
+
def copy_migrations
|
|
13
|
+
rake 'catpm:install:migrations'
|
|
14
|
+
end
|
|
15
|
+
|
|
16
|
+
def create_initializer
|
|
17
|
+
template 'initializer.rb.tt', 'config/initializers/catpm.rb'
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
def mount_engine
|
|
21
|
+
route 'mount Catpm::Engine => "/catpm"'
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
def show_post_install
|
|
25
|
+
say ''
|
|
26
|
+
say 'catpm installed successfully!', :green
|
|
27
|
+
say ''
|
|
28
|
+
say 'Next steps:'
|
|
29
|
+
say ' 1. Run: rails db:migrate'
|
|
30
|
+
say ' 2. Review config/initializers/catpm.rb'
|
|
31
|
+
say ' 3. Visit /catpm in your browser'
|
|
32
|
+
say ''
|
|
33
|
+
end
|
|
34
|
+
end
|
|
35
|
+
end
|
|
36
|
+
end
|
|
@@ -0,0 +1,77 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
Catpm.configure do |config|
|
|
4
|
+
# === Security ===
|
|
5
|
+
# Protect the dashboard with HTTP Basic Auth or a custom policy.
|
|
6
|
+
# If neither is configured, the dashboard is accessible to everyone.
|
|
7
|
+
#
|
|
8
|
+
# config.http_basic_auth_user = ENV["CATPM_USER"]
|
|
9
|
+
# config.http_basic_auth_password = ENV["CATPM_PASSWORD"]
|
|
10
|
+
# config.access_policy = ->(request) { request.env["warden"].user&.admin? }
|
|
11
|
+
|
|
12
|
+
# === PII Filtering ===
|
|
13
|
+
# Rails' filter_parameters are inherited automatically.
|
|
14
|
+
# Add extra patterns here:
|
|
15
|
+
#
|
|
16
|
+
# config.additional_filter_parameters = [:card_number, :ssn]
|
|
17
|
+
|
|
18
|
+
# === Instrumentation ===
|
|
19
|
+
#
|
|
20
|
+
# config.instrument_http = true # Track HTTP requests (default: true)
|
|
21
|
+
# config.instrument_jobs = false # Track ActiveJob (default: false)
|
|
22
|
+
# config.instrument_segments = true # Track SQL/view/cache segments (default: true)
|
|
23
|
+
# config.instrument_net_http = false # Patch Net::HTTP for outbound tracking (default: false)
|
|
24
|
+
# config.instrument_middleware_stack = false # Decompose middleware into per-middleware segments (default: false)
|
|
25
|
+
# config.max_segments_per_request = 50 # Cap segments per request (keeps slowest)
|
|
26
|
+
# config.segment_source_threshold = 0.0 # ms — capture caller_locations above this (raise to reduce overhead)
|
|
27
|
+
# config.max_sql_length = 200 # Truncate SQL queries to this length
|
|
28
|
+
# config.slow_threshold = 500 # ms — threshold for slow samples (all kinds)
|
|
29
|
+
# config.slow_threshold_per_kind = { # Override per kind
|
|
30
|
+
# http: 500,
|
|
31
|
+
# job: 5_000,
|
|
32
|
+
# custom: 1_000
|
|
33
|
+
# }
|
|
34
|
+
# config.ignored_targets = [ # Patterns to exclude from tracking
|
|
35
|
+
# "HealthcheckController#index",
|
|
36
|
+
# "/assets/*",
|
|
37
|
+
# ]
|
|
38
|
+
|
|
39
|
+
# === Sampling ===
|
|
40
|
+
#
|
|
41
|
+
# config.random_sample_rate = 20 # 1 in N requests sampled randomly
|
|
42
|
+
# config.max_random_samples_per_endpoint = 5 # Random samples kept per endpoint
|
|
43
|
+
# config.max_slow_samples_per_endpoint = 5 # Slow samples kept per endpoint
|
|
44
|
+
|
|
45
|
+
# === Tuning ===
|
|
46
|
+
#
|
|
47
|
+
# config.retention_period = nil # nil = keep forever (default); set to e.g. 1.year to delete older data
|
|
48
|
+
# config.max_buffer_memory = 32.megabytes # Maximum in-memory buffer size
|
|
49
|
+
# config.flush_interval = 30 # seconds
|
|
50
|
+
# config.flush_jitter = 5 # +/- seconds random offset per cycle
|
|
51
|
+
# config.max_error_contexts = 5 # Contexts kept per error fingerprint
|
|
52
|
+
# config.cleanup_interval = 1.hour # How often to run downsampling
|
|
53
|
+
# config.persistence_batch_size = 100 # Records per DB batch operation
|
|
54
|
+
|
|
55
|
+
# === Downsampling ===
|
|
56
|
+
# Data is kept forever with progressively coarser resolution:
|
|
57
|
+
# - Last hour: 1-minute buckets
|
|
58
|
+
# - 1h–24h: 5-minute buckets
|
|
59
|
+
# - 1d–1w: 1-hour buckets
|
|
60
|
+
# - 1w–3mo: 1-day buckets
|
|
61
|
+
# - Older than 3 months: 1-week buckets
|
|
62
|
+
#
|
|
63
|
+
# config.bucket_sizes = { recent: 1.minute, medium: 5.minutes, hourly: 1.hour, daily: 1.day, weekly: 1.week }
|
|
64
|
+
|
|
65
|
+
# === Advanced ===
|
|
66
|
+
#
|
|
67
|
+
# config.circuit_breaker_failure_threshold = 5 # Failures before circuit opens
|
|
68
|
+
# config.circuit_breaker_recovery_timeout = 60 # seconds before retry
|
|
69
|
+
# config.sqlite_busy_timeout = 5_000 # ms — SQLite lock wait
|
|
70
|
+
# config.backtrace_lines = 10 # Backtrace lines in error contexts
|
|
71
|
+
# config.shutdown_timeout = 5 # seconds — graceful shutdown wait
|
|
72
|
+
# config.error_handler = ->(e) { Rails.logger.error("[catpm] #{e.message}") }
|
|
73
|
+
|
|
74
|
+
# === Enable/Disable ===
|
|
75
|
+
#
|
|
76
|
+
# config.enabled = Rails.env.production? || Rails.env.staging?
|
|
77
|
+
end
|
|
@@ -0,0 +1,79 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
namespace :catpm do
|
|
4
|
+
desc 'Seed realistic custom event data (7 days, 9 event types)'
|
|
5
|
+
task seed_events: :environment do
|
|
6
|
+
EVENT_TYPES = {
|
|
7
|
+
'allowed' => { weight: 35, payload: -> { { user_id: rand(1..5000), chat_id: rand(1..2000) } } },
|
|
8
|
+
'gpt' => { weight: 15, payload: -> { { model: %w[gpt-4o gpt-4o-mini].sample, tokens: rand(50..2000), user_id: rand(1..5000) } } },
|
|
9
|
+
'cache' => { weight: 15, payload: -> { { hit: [true, false].sample, key: "msg:#{rand(1..10000)}" } } },
|
|
10
|
+
'spam' => { weight: 10, payload: -> { { user_id: rand(1..5000), score: rand(0.7..1.0).round(2), action: 'blocked' } } },
|
|
11
|
+
'moderation' => { weight: 10, payload: -> { { user_id: rand(1..5000), category: %w[hate violence self-harm].sample, flagged: [true, false].sample } } },
|
|
12
|
+
'ratelimit' => { weight: 5, payload: -> { { user_id: rand(1..5000), limit: %w[messages commands media].sample } } },
|
|
13
|
+
'command' => { weight: 4, payload: -> { { command: %w[/start /help /settings /stats /premium].sample, user_id: rand(1..5000) } } },
|
|
14
|
+
'payment' => { weight: 3, payload: -> { { amount: [299, 499, 999].sample, currency: 'USD', user_id: rand(1..5000) } } },
|
|
15
|
+
'error' => { weight: 3, payload: -> { { error_class: %w[Timeout::Error Net::ReadTimeout Redis::CannotConnectError].sample, context: 'background' } } }
|
|
16
|
+
}.freeze
|
|
17
|
+
|
|
18
|
+
# Build weighted pool
|
|
19
|
+
pool = EVENT_TYPES.flat_map { |name, cfg| [name] * cfg[:weight] }
|
|
20
|
+
|
|
21
|
+
days = 7
|
|
22
|
+
start_time = days.days.ago.beginning_of_hour
|
|
23
|
+
end_time = Time.current
|
|
24
|
+
|
|
25
|
+
puts "Seeding event buckets from #{start_time} to #{end_time}..."
|
|
26
|
+
|
|
27
|
+
# Generate 1-minute buckets
|
|
28
|
+
bucket_records = []
|
|
29
|
+
current = start_time
|
|
30
|
+
|
|
31
|
+
while current < end_time
|
|
32
|
+
hour = current.hour
|
|
33
|
+
# Lower activity at night (2am-7am)
|
|
34
|
+
base_rate = (hour >= 2 && hour < 7) ? rand(30..80) : rand(200..500)
|
|
35
|
+
|
|
36
|
+
# Distribute events across types for this minute
|
|
37
|
+
counts = Hash.new(0)
|
|
38
|
+
base_rate.times { counts[pool.sample] += 1 }
|
|
39
|
+
|
|
40
|
+
counts.each do |name, count|
|
|
41
|
+
bucket_records << {
|
|
42
|
+
name: name,
|
|
43
|
+
bucket_start: current,
|
|
44
|
+
count: count
|
|
45
|
+
}
|
|
46
|
+
end
|
|
47
|
+
|
|
48
|
+
current += 1.minute
|
|
49
|
+
|
|
50
|
+
# Batch insert every hour of data
|
|
51
|
+
if bucket_records.size >= 500
|
|
52
|
+
Catpm::EventBucket.insert_all(bucket_records)
|
|
53
|
+
bucket_records.clear
|
|
54
|
+
print '.'
|
|
55
|
+
end
|
|
56
|
+
end
|
|
57
|
+
|
|
58
|
+
Catpm::EventBucket.insert_all(bucket_records) if bucket_records.any?
|
|
59
|
+
puts "\nInserted #{Catpm::EventBucket.count} event buckets."
|
|
60
|
+
|
|
61
|
+
# Generate samples (20 per event type)
|
|
62
|
+
puts 'Seeding event samples...'
|
|
63
|
+
sample_records = []
|
|
64
|
+
EVENT_TYPES.each do |name, cfg|
|
|
65
|
+
20.times do
|
|
66
|
+
recorded_at = start_time + rand((end_time - start_time).to_i).seconds
|
|
67
|
+
sample_records << {
|
|
68
|
+
name: name,
|
|
69
|
+
payload: cfg[:payload].call.to_json,
|
|
70
|
+
recorded_at: recorded_at
|
|
71
|
+
}
|
|
72
|
+
end
|
|
73
|
+
end
|
|
74
|
+
|
|
75
|
+
Catpm::EventSample.insert_all(sample_records) if sample_records.any?
|
|
76
|
+
puts "Inserted #{Catpm::EventSample.count} event samples."
|
|
77
|
+
puts 'Done!'
|
|
78
|
+
end
|
|
79
|
+
end
|