brainzlab-rails 0.1.1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/CLAUDE.md +144 -0
- data/IMPLEMENTATION_PLAN.md +370 -0
- data/Rakefile +8 -0
- data/brainzlab-rails.gemspec +42 -0
- data/lib/brainzlab/rails/analyzers/cache_efficiency.rb +123 -0
- data/lib/brainzlab/rails/analyzers/n_plus_one_detector.rb +90 -0
- data/lib/brainzlab/rails/analyzers/slow_query_analyzer.rb +118 -0
- data/lib/brainzlab/rails/collectors/action_cable.rb +212 -0
- data/lib/brainzlab/rails/collectors/action_controller.rb +299 -0
- data/lib/brainzlab/rails/collectors/action_mailer.rb +187 -0
- data/lib/brainzlab/rails/collectors/action_view.rb +176 -0
- data/lib/brainzlab/rails/collectors/active_job.rb +374 -0
- data/lib/brainzlab/rails/collectors/active_record.rb +250 -0
- data/lib/brainzlab/rails/collectors/active_storage.rb +306 -0
- data/lib/brainzlab/rails/collectors/base.rb +129 -0
- data/lib/brainzlab/rails/collectors/cache.rb +384 -0
- data/lib/brainzlab/rails/configuration.rb +121 -0
- data/lib/brainzlab/rails/event_router.rb +67 -0
- data/lib/brainzlab/rails/railtie.rb +98 -0
- data/lib/brainzlab/rails/subscriber.rb +164 -0
- data/lib/brainzlab/rails/version.rb +7 -0
- data/lib/brainzlab-rails.rb +72 -0
- metadata +178 -0
|
@@ -0,0 +1,306 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module BrainzLab
|
|
4
|
+
module Rails
|
|
5
|
+
module Collectors
|
|
6
|
+
# Collects Active Storage events for file operation observability
|
|
7
|
+
class ActiveStorage < Base
|
|
8
|
+
def process(event_data)
|
|
9
|
+
case event_data[:name]
|
|
10
|
+
when 'service_upload.active_storage'
|
|
11
|
+
handle_upload(event_data)
|
|
12
|
+
when 'service_download.active_storage', 'service_streaming_download.active_storage'
|
|
13
|
+
handle_download(event_data)
|
|
14
|
+
when 'service_download_chunk.active_storage'
|
|
15
|
+
handle_download_chunk(event_data)
|
|
16
|
+
when 'service_delete.active_storage'
|
|
17
|
+
handle_delete(event_data)
|
|
18
|
+
when 'service_delete_prefixed.active_storage'
|
|
19
|
+
handle_delete_prefixed(event_data)
|
|
20
|
+
when 'service_exist.active_storage'
|
|
21
|
+
handle_exist(event_data)
|
|
22
|
+
when 'service_url.active_storage'
|
|
23
|
+
handle_url(event_data)
|
|
24
|
+
when 'service_update_metadata.active_storage'
|
|
25
|
+
handle_update_metadata(event_data)
|
|
26
|
+
when 'preview.active_storage'
|
|
27
|
+
handle_preview(event_data)
|
|
28
|
+
when 'transform.active_storage'
|
|
29
|
+
handle_transform(event_data)
|
|
30
|
+
when 'analyze.active_storage'
|
|
31
|
+
handle_analyze(event_data)
|
|
32
|
+
end
|
|
33
|
+
end
|
|
34
|
+
|
|
35
|
+
private
|
|
36
|
+
|
|
37
|
+
def handle_upload(event_data)
|
|
38
|
+
payload = event_data[:payload]
|
|
39
|
+
key = payload[:key]
|
|
40
|
+
service = payload[:service]
|
|
41
|
+
checksum = payload[:checksum]
|
|
42
|
+
duration_ms = event_data[:duration_ms]
|
|
43
|
+
|
|
44
|
+
# === PULSE: Upload span ===
|
|
45
|
+
send_to_pulse(event_data, {
|
|
46
|
+
name: "storage.upload",
|
|
47
|
+
category: 'storage.upload',
|
|
48
|
+
attributes: {
|
|
49
|
+
key: key,
|
|
50
|
+
service: service,
|
|
51
|
+
checksum: checksum
|
|
52
|
+
}
|
|
53
|
+
})
|
|
54
|
+
|
|
55
|
+
# === FLUX: Metrics ===
|
|
56
|
+
send_to_flux(:increment, 'rails.storage.uploads', 1, {
|
|
57
|
+
service: service
|
|
58
|
+
})
|
|
59
|
+
send_to_flux(:timing, 'rails.storage.upload_ms', duration_ms, {
|
|
60
|
+
service: service
|
|
61
|
+
})
|
|
62
|
+
|
|
63
|
+
# === RECALL: Log upload ===
|
|
64
|
+
send_to_recall(:info, "File uploaded", {
|
|
65
|
+
key: key,
|
|
66
|
+
service: service,
|
|
67
|
+
duration_ms: duration_ms
|
|
68
|
+
})
|
|
69
|
+
|
|
70
|
+
# === REFLEX: Breadcrumb ===
|
|
71
|
+
add_breadcrumb(
|
|
72
|
+
"Storage upload: #{key}",
|
|
73
|
+
category: 'storage.upload',
|
|
74
|
+
level: :info,
|
|
75
|
+
data: {
|
|
76
|
+
key: key,
|
|
77
|
+
service: service,
|
|
78
|
+
duration_ms: duration_ms
|
|
79
|
+
}
|
|
80
|
+
)
|
|
81
|
+
end
|
|
82
|
+
|
|
83
|
+
def handle_download(event_data)
|
|
84
|
+
payload = event_data[:payload]
|
|
85
|
+
key = payload[:key]
|
|
86
|
+
service = payload[:service]
|
|
87
|
+
duration_ms = event_data[:duration_ms]
|
|
88
|
+
streaming = event_data[:name].include?('streaming')
|
|
89
|
+
|
|
90
|
+
# === PULSE: Download span ===
|
|
91
|
+
send_to_pulse(event_data, {
|
|
92
|
+
name: streaming ? "storage.stream" : "storage.download",
|
|
93
|
+
category: 'storage.download',
|
|
94
|
+
attributes: {
|
|
95
|
+
key: key,
|
|
96
|
+
service: service,
|
|
97
|
+
streaming: streaming
|
|
98
|
+
}
|
|
99
|
+
})
|
|
100
|
+
|
|
101
|
+
# === FLUX: Metrics ===
|
|
102
|
+
metric_name = streaming ? 'rails.storage.streams' : 'rails.storage.downloads'
|
|
103
|
+
send_to_flux(:increment, metric_name, 1, { service: service })
|
|
104
|
+
send_to_flux(:timing, 'rails.storage.download_ms', duration_ms, {
|
|
105
|
+
service: service,
|
|
106
|
+
streaming: streaming
|
|
107
|
+
})
|
|
108
|
+
|
|
109
|
+
# === REFLEX: Breadcrumb ===
|
|
110
|
+
add_breadcrumb(
|
|
111
|
+
"Storage #{streaming ? 'stream' : 'download'}: #{key}",
|
|
112
|
+
category: 'storage.download',
|
|
113
|
+
level: :debug,
|
|
114
|
+
data: {
|
|
115
|
+
key: key,
|
|
116
|
+
service: service,
|
|
117
|
+
duration_ms: duration_ms
|
|
118
|
+
}
|
|
119
|
+
)
|
|
120
|
+
end
|
|
121
|
+
|
|
122
|
+
def handle_download_chunk(event_data)
|
|
123
|
+
payload = event_data[:payload]
|
|
124
|
+
key = payload[:key]
|
|
125
|
+
service = payload[:service]
|
|
126
|
+
range = payload[:range]
|
|
127
|
+
|
|
128
|
+
# === FLUX: Chunk metrics ===
|
|
129
|
+
send_to_flux(:increment, 'rails.storage.chunks', 1, {
|
|
130
|
+
service: service
|
|
131
|
+
})
|
|
132
|
+
|
|
133
|
+
# === REFLEX: Breadcrumb (debug level for chunks) ===
|
|
134
|
+
add_breadcrumb(
|
|
135
|
+
"Storage chunk: #{key}",
|
|
136
|
+
category: 'storage.chunk',
|
|
137
|
+
level: :debug,
|
|
138
|
+
data: {
|
|
139
|
+
key: key,
|
|
140
|
+
range: range.to_s
|
|
141
|
+
}
|
|
142
|
+
)
|
|
143
|
+
end
|
|
144
|
+
|
|
145
|
+
def handle_delete(event_data)
|
|
146
|
+
payload = event_data[:payload]
|
|
147
|
+
key = payload[:key]
|
|
148
|
+
service = payload[:service]
|
|
149
|
+
duration_ms = event_data[:duration_ms]
|
|
150
|
+
|
|
151
|
+
# === PULSE: Delete span ===
|
|
152
|
+
send_to_pulse(event_data, {
|
|
153
|
+
name: "storage.delete",
|
|
154
|
+
category: 'storage.delete',
|
|
155
|
+
attributes: {
|
|
156
|
+
key: key,
|
|
157
|
+
service: service
|
|
158
|
+
}
|
|
159
|
+
})
|
|
160
|
+
|
|
161
|
+
# === FLUX: Metrics ===
|
|
162
|
+
send_to_flux(:increment, 'rails.storage.deletes', 1, {
|
|
163
|
+
service: service
|
|
164
|
+
})
|
|
165
|
+
|
|
166
|
+
# === RECALL: Log deletion ===
|
|
167
|
+
send_to_recall(:info, "File deleted", {
|
|
168
|
+
key: key,
|
|
169
|
+
service: service,
|
|
170
|
+
duration_ms: duration_ms
|
|
171
|
+
})
|
|
172
|
+
|
|
173
|
+
# === REFLEX: Breadcrumb ===
|
|
174
|
+
add_breadcrumb(
|
|
175
|
+
"Storage delete: #{key}",
|
|
176
|
+
category: 'storage.delete',
|
|
177
|
+
level: :info,
|
|
178
|
+
data: { key: key, service: service }
|
|
179
|
+
)
|
|
180
|
+
end
|
|
181
|
+
|
|
182
|
+
def handle_delete_prefixed(event_data)
|
|
183
|
+
payload = event_data[:payload]
|
|
184
|
+
prefix = payload[:prefix]
|
|
185
|
+
service = payload[:service]
|
|
186
|
+
|
|
187
|
+
# === FLUX: Bulk delete metrics ===
|
|
188
|
+
send_to_flux(:increment, 'rails.storage.bulk_deletes', 1, {
|
|
189
|
+
service: service
|
|
190
|
+
})
|
|
191
|
+
|
|
192
|
+
# === RECALL: Log bulk deletion ===
|
|
193
|
+
send_to_recall(:info, "Bulk file deletion", {
|
|
194
|
+
prefix: prefix,
|
|
195
|
+
service: service
|
|
196
|
+
})
|
|
197
|
+
end
|
|
198
|
+
|
|
199
|
+
def handle_exist(event_data)
|
|
200
|
+
payload = event_data[:payload]
|
|
201
|
+
key = payload[:key]
|
|
202
|
+
service = payload[:service]
|
|
203
|
+
exist = payload[:exist]
|
|
204
|
+
|
|
205
|
+
# === FLUX: Existence check metrics ===
|
|
206
|
+
send_to_flux(:increment, 'rails.storage.exist_checks', 1, {
|
|
207
|
+
service: service,
|
|
208
|
+
exists: exist
|
|
209
|
+
})
|
|
210
|
+
end
|
|
211
|
+
|
|
212
|
+
def handle_url(event_data)
|
|
213
|
+
payload = event_data[:payload]
|
|
214
|
+
key = payload[:key]
|
|
215
|
+
service = payload[:service]
|
|
216
|
+
duration_ms = event_data[:duration_ms]
|
|
217
|
+
|
|
218
|
+
# === FLUX: URL generation metrics ===
|
|
219
|
+
send_to_flux(:increment, 'rails.storage.url_generations', 1, {
|
|
220
|
+
service: service
|
|
221
|
+
})
|
|
222
|
+
send_to_flux(:timing, 'rails.storage.url_ms', duration_ms, {
|
|
223
|
+
service: service
|
|
224
|
+
})
|
|
225
|
+
end
|
|
226
|
+
|
|
227
|
+
def handle_update_metadata(event_data)
|
|
228
|
+
payload = event_data[:payload]
|
|
229
|
+
key = payload[:key]
|
|
230
|
+
service = payload[:service]
|
|
231
|
+
|
|
232
|
+
# === FLUX: Metadata update metrics ===
|
|
233
|
+
send_to_flux(:increment, 'rails.storage.metadata_updates', 1, {
|
|
234
|
+
service: service
|
|
235
|
+
})
|
|
236
|
+
|
|
237
|
+
# === REFLEX: Breadcrumb ===
|
|
238
|
+
add_breadcrumb(
|
|
239
|
+
"Storage metadata update: #{key}",
|
|
240
|
+
category: 'storage.metadata',
|
|
241
|
+
level: :debug,
|
|
242
|
+
data: {
|
|
243
|
+
key: key,
|
|
244
|
+
content_type: payload[:content_type],
|
|
245
|
+
disposition: payload[:disposition]
|
|
246
|
+
}
|
|
247
|
+
)
|
|
248
|
+
end
|
|
249
|
+
|
|
250
|
+
def handle_preview(event_data)
|
|
251
|
+
payload = event_data[:payload]
|
|
252
|
+
key = payload[:key]
|
|
253
|
+
duration_ms = event_data[:duration_ms]
|
|
254
|
+
|
|
255
|
+
# === PULSE: Preview generation span ===
|
|
256
|
+
send_to_pulse(event_data, {
|
|
257
|
+
name: "storage.preview",
|
|
258
|
+
category: 'storage.preview',
|
|
259
|
+
attributes: { key: key }
|
|
260
|
+
})
|
|
261
|
+
|
|
262
|
+
# === FLUX: Preview metrics ===
|
|
263
|
+
send_to_flux(:increment, 'rails.storage.previews', 1)
|
|
264
|
+
send_to_flux(:timing, 'rails.storage.preview_ms', duration_ms)
|
|
265
|
+
end
|
|
266
|
+
|
|
267
|
+
def handle_transform(event_data)
|
|
268
|
+
payload = event_data[:payload]
|
|
269
|
+
duration_ms = event_data[:duration_ms]
|
|
270
|
+
|
|
271
|
+
# === PULSE: Transform span ===
|
|
272
|
+
send_to_pulse(event_data, {
|
|
273
|
+
name: "storage.transform",
|
|
274
|
+
category: 'storage.transform',
|
|
275
|
+
attributes: {}
|
|
276
|
+
})
|
|
277
|
+
|
|
278
|
+
# === FLUX: Transform metrics ===
|
|
279
|
+
send_to_flux(:increment, 'rails.storage.transforms', 1)
|
|
280
|
+
send_to_flux(:timing, 'rails.storage.transform_ms', duration_ms)
|
|
281
|
+
end
|
|
282
|
+
|
|
283
|
+
def handle_analyze(event_data)
|
|
284
|
+
payload = event_data[:payload]
|
|
285
|
+
analyzer = payload[:analyzer]
|
|
286
|
+
duration_ms = event_data[:duration_ms]
|
|
287
|
+
|
|
288
|
+
# === PULSE: Analyze span ===
|
|
289
|
+
send_to_pulse(event_data, {
|
|
290
|
+
name: "storage.analyze.#{analyzer}",
|
|
291
|
+
category: 'storage.analyze',
|
|
292
|
+
attributes: { analyzer: analyzer }
|
|
293
|
+
})
|
|
294
|
+
|
|
295
|
+
# === FLUX: Analyze metrics ===
|
|
296
|
+
send_to_flux(:increment, 'rails.storage.analyzes', 1, {
|
|
297
|
+
analyzer: analyzer
|
|
298
|
+
})
|
|
299
|
+
send_to_flux(:timing, 'rails.storage.analyze_ms', duration_ms, {
|
|
300
|
+
analyzer: analyzer
|
|
301
|
+
})
|
|
302
|
+
end
|
|
303
|
+
end
|
|
304
|
+
end
|
|
305
|
+
end
|
|
306
|
+
end
|
|
@@ -0,0 +1,129 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module BrainzLab
|
|
4
|
+
module Rails
|
|
5
|
+
module Collectors
|
|
6
|
+
# Base class for all event collectors
|
|
7
|
+
# Provides common functionality for routing events to products
|
|
8
|
+
class Base
|
|
9
|
+
attr_reader :configuration
|
|
10
|
+
|
|
11
|
+
def initialize(configuration)
|
|
12
|
+
@configuration = configuration
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
def process(event_data)
|
|
16
|
+
raise NotImplementedError, "#{self.class} must implement #process"
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
protected
|
|
20
|
+
|
|
21
|
+
# Send to Pulse (APM) - performance tracing
|
|
22
|
+
def send_to_pulse(event_data, span_data)
|
|
23
|
+
return unless @configuration.pulse_effectively_enabled?
|
|
24
|
+
|
|
25
|
+
BrainzLab::Pulse.record_span(
|
|
26
|
+
name: span_data[:name],
|
|
27
|
+
duration_ms: event_data[:duration_ms],
|
|
28
|
+
category: span_data[:category],
|
|
29
|
+
attributes: span_data[:attributes] || {},
|
|
30
|
+
timestamp: event_data[:timestamp]
|
|
31
|
+
)
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
# Send to Recall (Logs) - structured logging
|
|
35
|
+
def send_to_recall(level, message, data = {})
|
|
36
|
+
return unless @configuration.recall_effectively_enabled?
|
|
37
|
+
|
|
38
|
+
case level
|
|
39
|
+
when :debug
|
|
40
|
+
BrainzLab::Recall.debug(message, **data)
|
|
41
|
+
when :info
|
|
42
|
+
BrainzLab::Recall.info(message, **data)
|
|
43
|
+
when :warn
|
|
44
|
+
BrainzLab::Recall.warn(message, **data)
|
|
45
|
+
when :error
|
|
46
|
+
BrainzLab::Recall.error(message, **data)
|
|
47
|
+
end
|
|
48
|
+
end
|
|
49
|
+
|
|
50
|
+
# Send to Reflex (Errors) - error tracking with context
|
|
51
|
+
def send_to_reflex(exception, context = {})
|
|
52
|
+
return unless @configuration.reflex_effectively_enabled?
|
|
53
|
+
|
|
54
|
+
BrainzLab::Reflex.capture(exception, context: context)
|
|
55
|
+
end
|
|
56
|
+
|
|
57
|
+
# Add breadcrumb to Reflex
|
|
58
|
+
def add_breadcrumb(message, category:, level: :info, data: {})
|
|
59
|
+
return unless @configuration.reflex_effectively_enabled?
|
|
60
|
+
|
|
61
|
+
BrainzLab::Reflex.add_breadcrumb(
|
|
62
|
+
message,
|
|
63
|
+
category: category,
|
|
64
|
+
level: level,
|
|
65
|
+
data: data
|
|
66
|
+
)
|
|
67
|
+
end
|
|
68
|
+
|
|
69
|
+
# Send to Flux (Metrics) - counters, gauges, histograms
|
|
70
|
+
def send_to_flux(metric_type, metric_name, value, tags = {})
|
|
71
|
+
return unless @configuration.flux_effectively_enabled?
|
|
72
|
+
|
|
73
|
+
case metric_type
|
|
74
|
+
when :increment
|
|
75
|
+
BrainzLab::Flux.increment(metric_name, value, tags: tags)
|
|
76
|
+
when :gauge
|
|
77
|
+
BrainzLab::Flux.gauge(metric_name, value, tags: tags)
|
|
78
|
+
when :histogram
|
|
79
|
+
BrainzLab::Flux.histogram(metric_name, value, tags: tags)
|
|
80
|
+
when :timing
|
|
81
|
+
BrainzLab::Flux.timing(metric_name, value, tags: tags)
|
|
82
|
+
end
|
|
83
|
+
end
|
|
84
|
+
|
|
85
|
+
# Send to Nerve (Jobs) - job-specific tracking
|
|
86
|
+
def send_to_nerve(job_event_type, job_data)
|
|
87
|
+
return unless @configuration.nerve_effectively_enabled?
|
|
88
|
+
|
|
89
|
+
# Nerve integration for job monitoring
|
|
90
|
+
# This will be expanded as Nerve product develops
|
|
91
|
+
BrainzLab.debug_log("[Nerve] #{job_event_type}: #{job_data.inspect}")
|
|
92
|
+
end
|
|
93
|
+
|
|
94
|
+
# Extract common request context
|
|
95
|
+
def extract_request_context(payload)
|
|
96
|
+
request = payload[:request]
|
|
97
|
+
return {} unless request
|
|
98
|
+
|
|
99
|
+
{
|
|
100
|
+
request_id: request.request_id,
|
|
101
|
+
method: request.method,
|
|
102
|
+
path: request.path,
|
|
103
|
+
format: request.format&.to_s,
|
|
104
|
+
remote_ip: request.remote_ip,
|
|
105
|
+
user_agent: request.user_agent
|
|
106
|
+
}.compact
|
|
107
|
+
end
|
|
108
|
+
|
|
109
|
+
# Sanitize sensitive data from params
|
|
110
|
+
def sanitize_params(params)
|
|
111
|
+
return {} unless params.is_a?(Hash)
|
|
112
|
+
|
|
113
|
+
sensitive_keys = %w[password password_confirmation token api_key secret]
|
|
114
|
+
params.transform_values.with_index do |(key, value), _|
|
|
115
|
+
if sensitive_keys.any? { |sk| key.to_s.downcase.include?(sk) }
|
|
116
|
+
'[FILTERED]'
|
|
117
|
+
elsif value.is_a?(Hash)
|
|
118
|
+
sanitize_params(value)
|
|
119
|
+
else
|
|
120
|
+
value
|
|
121
|
+
end
|
|
122
|
+
end
|
|
123
|
+
rescue StandardError
|
|
124
|
+
{}
|
|
125
|
+
end
|
|
126
|
+
end
|
|
127
|
+
end
|
|
128
|
+
end
|
|
129
|
+
end
|