dontbugme 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/LICENSE +21 -0
- data/README.md +174 -0
- data/app/controllers/dontbugme/traces_controller.rb +45 -0
- data/app/views/dontbugme/traces/diff.html.erb +18 -0
- data/app/views/dontbugme/traces/index.html.erb +30 -0
- data/app/views/dontbugme/traces/show.html.erb +15 -0
- data/app/views/layouts/dontbugme/application.html.erb +56 -0
- data/bin/dontbugme +5 -0
- data/lib/dontbugme/cleanup_job.rb +17 -0
- data/lib/dontbugme/cli.rb +171 -0
- data/lib/dontbugme/config/routes.rb +7 -0
- data/lib/dontbugme/configuration.rb +147 -0
- data/lib/dontbugme/context.rb +25 -0
- data/lib/dontbugme/correlation.rb +25 -0
- data/lib/dontbugme/engine.rb +11 -0
- data/lib/dontbugme/formatters/diff.rb +187 -0
- data/lib/dontbugme/formatters/json.rb +11 -0
- data/lib/dontbugme/formatters/timeline.rb +119 -0
- data/lib/dontbugme/middleware/rack.rb +37 -0
- data/lib/dontbugme/middleware/sidekiq.rb +31 -0
- data/lib/dontbugme/middleware/sidekiq_client.rb +14 -0
- data/lib/dontbugme/railtie.rb +47 -0
- data/lib/dontbugme/recorder.rb +70 -0
- data/lib/dontbugme/source_location.rb +44 -0
- data/lib/dontbugme/span.rb +70 -0
- data/lib/dontbugme/span_collection.rb +40 -0
- data/lib/dontbugme/store/async.rb +45 -0
- data/lib/dontbugme/store/base.rb +23 -0
- data/lib/dontbugme/store/memory.rb +61 -0
- data/lib/dontbugme/store/postgresql.rb +186 -0
- data/lib/dontbugme/store/sqlite.rb +148 -0
- data/lib/dontbugme/subscribers/action_mailer.rb +53 -0
- data/lib/dontbugme/subscribers/active_job.rb +44 -0
- data/lib/dontbugme/subscribers/active_record.rb +81 -0
- data/lib/dontbugme/subscribers/base.rb +19 -0
- data/lib/dontbugme/subscribers/cache.rb +54 -0
- data/lib/dontbugme/subscribers/net_http.rb +87 -0
- data/lib/dontbugme/subscribers/redis.rb +63 -0
- data/lib/dontbugme/trace.rb +142 -0
- data/lib/dontbugme/version.rb +5 -0
- data/lib/dontbugme.rb +118 -0
- data/lib/generators/dontbugme/install/install_generator.rb +17 -0
- data/lib/generators/dontbugme/install/templates/dontbugme.rb +17 -0
- metadata +164 -0
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Dontbugme
|
|
4
|
+
class Recorder
|
|
5
|
+
class << self
|
|
6
|
+
def record(kind:, identifier:, metadata: {}, &block)
|
|
7
|
+
return yield unless Dontbugme.config.recording?
|
|
8
|
+
return yield unless should_sample?
|
|
9
|
+
|
|
10
|
+
metadata = metadata.dup
|
|
11
|
+
metadata[:correlation_id] ||= Correlation.current || Correlation.generate
|
|
12
|
+
Correlation.current = metadata[:correlation_id]
|
|
13
|
+
|
|
14
|
+
trace = Trace.new(kind: kind, identifier: identifier, metadata: metadata)
|
|
15
|
+
Context.current = trace
|
|
16
|
+
|
|
17
|
+
yield
|
|
18
|
+
trace.finish!
|
|
19
|
+
persist(trace)
|
|
20
|
+
trace
|
|
21
|
+
rescue StandardError => e
|
|
22
|
+
trace&.finish!(error: e)
|
|
23
|
+
persist(trace) if trace && Dontbugme.config.record_on_error
|
|
24
|
+
raise
|
|
25
|
+
ensure
|
|
26
|
+
Context.clear!
|
|
27
|
+
Correlation.clear! unless kind == :request
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
def add_span(category:, operation:, detail:, payload: {}, duration_ms: 0, started_at: nil)
|
|
31
|
+
trace = Context.current
|
|
32
|
+
return unless trace
|
|
33
|
+
|
|
34
|
+
# started_at can be a Time (from ActiveSupport::Notifications) or we compute from monotonic
|
|
35
|
+
started_offset = if started_at
|
|
36
|
+
((started_at - trace.started_at_time) * 1000).round(2)
|
|
37
|
+
else
|
|
38
|
+
(Process.clock_gettime(Process::CLOCK_MONOTONIC, :float_millisecond) - trace.started_at_monotonic - duration_ms).round(2)
|
|
39
|
+
end
|
|
40
|
+
source = SourceLocation.capture
|
|
41
|
+
|
|
42
|
+
span = Span.new(
|
|
43
|
+
category: category,
|
|
44
|
+
operation: operation,
|
|
45
|
+
detail: detail,
|
|
46
|
+
payload: payload,
|
|
47
|
+
started_at: started_offset,
|
|
48
|
+
duration_ms: duration_ms,
|
|
49
|
+
source: source
|
|
50
|
+
)
|
|
51
|
+
|
|
52
|
+
trace.add_span(span)
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
private
|
|
56
|
+
|
|
57
|
+
def should_sample?
|
|
58
|
+
rate = Dontbugme.config.sample_rate.to_f
|
|
59
|
+
return true if rate >= 1.0
|
|
60
|
+
|
|
61
|
+
Random.rand < rate
|
|
62
|
+
end
|
|
63
|
+
|
|
64
|
+
def persist(trace)
|
|
65
|
+
store = Dontbugme.store
|
|
66
|
+
store&.save_trace(trace)
|
|
67
|
+
end
|
|
68
|
+
end
|
|
69
|
+
end
|
|
70
|
+
end
|
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Dontbugme
|
|
4
|
+
class SourceLocation
|
|
5
|
+
class << self
|
|
6
|
+
def capture
|
|
7
|
+
return nil unless Dontbugme.config.recording?
|
|
8
|
+
return nil if Dontbugme.config.source_mode == :off
|
|
9
|
+
|
|
10
|
+
config = Dontbugme.config
|
|
11
|
+
limit = config.source_stack_limit
|
|
12
|
+
depth = config.source_mode == :shallow ? 1 : config.source_depth
|
|
13
|
+
filters = config.source_filter
|
|
14
|
+
|
|
15
|
+
locations = caller_locations(1, limit)
|
|
16
|
+
return nil if locations.nil? || locations.empty?
|
|
17
|
+
|
|
18
|
+
exclude_patterns = %w[dontbugme /gems/ bundler]
|
|
19
|
+
app_frames = locations.select do |loc|
|
|
20
|
+
path = loc.absolute_path || loc.path.to_s
|
|
21
|
+
next false if exclude_patterns.any? { |p| path.include?(p) }
|
|
22
|
+
filters.any? { |f| path.include?(f) }
|
|
23
|
+
end
|
|
24
|
+
|
|
25
|
+
return nil if app_frames.empty?
|
|
26
|
+
|
|
27
|
+
frames_to_keep = app_frames.first(depth)
|
|
28
|
+
frames_to_keep.map { |loc| format_location(loc) }.join(' <- ')
|
|
29
|
+
end
|
|
30
|
+
|
|
31
|
+
private
|
|
32
|
+
|
|
33
|
+
def format_location(loc)
|
|
34
|
+
path = loc.absolute_path || loc.path.to_s
|
|
35
|
+
# Use relative path from cwd if possible for shorter output
|
|
36
|
+
base = path
|
|
37
|
+
if defined?(Rails) && Rails.root
|
|
38
|
+
base = Pathname.new(path).relative_path_from(Rails.root).to_s rescue path
|
|
39
|
+
end
|
|
40
|
+
"#{base}:#{loc.lineno} in `#{loc.label}`"
|
|
41
|
+
end
|
|
42
|
+
end
|
|
43
|
+
end
|
|
44
|
+
end
|
|
@@ -0,0 +1,70 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Dontbugme
|
|
4
|
+
class Span
|
|
5
|
+
attr_reader :id, :category, :operation, :detail, :payload, :started_at, :duration_ms, :source
|
|
6
|
+
|
|
7
|
+
def initialize(
|
|
8
|
+
category:,
|
|
9
|
+
operation:,
|
|
10
|
+
detail:,
|
|
11
|
+
payload: {},
|
|
12
|
+
started_at:,
|
|
13
|
+
duration_ms:,
|
|
14
|
+
source: nil
|
|
15
|
+
)
|
|
16
|
+
@id = "sp_#{SecureRandom.hex(4)}"
|
|
17
|
+
@category = category.to_sym
|
|
18
|
+
@operation = operation.to_s
|
|
19
|
+
max_size = defined?(Dontbugme) && Dontbugme.respond_to?(:config) ? Dontbugme.config&.max_span_detail_size : 8192
|
|
20
|
+
@detail = truncate_string(detail.to_s, max_size)
|
|
21
|
+
@payload = payload
|
|
22
|
+
@started_at = started_at
|
|
23
|
+
@duration_ms = duration_ms
|
|
24
|
+
@source = source
|
|
25
|
+
end
|
|
26
|
+
|
|
27
|
+
def to_h
|
|
28
|
+
{
|
|
29
|
+
id: id,
|
|
30
|
+
category: category,
|
|
31
|
+
operation: operation,
|
|
32
|
+
detail: detail,
|
|
33
|
+
payload: payload,
|
|
34
|
+
started_at: started_at,
|
|
35
|
+
duration_ms: duration_ms,
|
|
36
|
+
source: source
|
|
37
|
+
}
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
def self.from_h(hash)
|
|
41
|
+
new(
|
|
42
|
+
category: hash[:category] || hash['category'],
|
|
43
|
+
operation: hash[:operation] || hash['operation'],
|
|
44
|
+
detail: hash[:detail] || hash['detail'],
|
|
45
|
+
payload: (hash[:payload] || hash['payload'] || {}).transform_keys(&:to_sym),
|
|
46
|
+
started_at: hash[:started_at] || hash['started_at'],
|
|
47
|
+
duration_ms: hash[:duration_ms] || hash['duration_ms'],
|
|
48
|
+
source: hash[:source] || hash['source']
|
|
49
|
+
)
|
|
50
|
+
end
|
|
51
|
+
|
|
52
|
+
private
|
|
53
|
+
|
|
54
|
+
def truncate_string(str, max_size)
|
|
55
|
+
return str if str.nil? || max_size.nil?
|
|
56
|
+
return str if str.bytesize <= max_size
|
|
57
|
+
|
|
58
|
+
truncated = str.byteslice(0, max_size)
|
|
59
|
+
original_size = str.bytesize
|
|
60
|
+
"#{truncated}[truncated, #{format_bytes(original_size)} original]"
|
|
61
|
+
end
|
|
62
|
+
|
|
63
|
+
def format_bytes(bytes)
|
|
64
|
+
return "#{bytes}B" if bytes < 1024
|
|
65
|
+
return "#{(bytes / 1024.0).round(1)}KB" if bytes < 1024 * 1024
|
|
66
|
+
|
|
67
|
+
"#{(bytes / (1024.0 * 1024)).round(1)}MB"
|
|
68
|
+
end
|
|
69
|
+
end
|
|
70
|
+
end
|
|
@@ -0,0 +1,40 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Dontbugme
|
|
4
|
+
class SpanCollection
|
|
5
|
+
include Enumerable
|
|
6
|
+
|
|
7
|
+
def initialize(spans)
|
|
8
|
+
@spans = spans.to_a.freeze
|
|
9
|
+
end
|
|
10
|
+
|
|
11
|
+
def each(&block)
|
|
12
|
+
@spans.each(&block)
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
def count
|
|
16
|
+
@spans.count
|
|
17
|
+
end
|
|
18
|
+
|
|
19
|
+
def category(cat)
|
|
20
|
+
@spans.select { |s| s.category == cat.to_sym }
|
|
21
|
+
end
|
|
22
|
+
|
|
23
|
+
def method_missing(method_name, *args, &block)
|
|
24
|
+
cat = method_name.to_s.downcase.to_sym
|
|
25
|
+
return category(cat) if known_category?(cat)
|
|
26
|
+
|
|
27
|
+
super
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
def respond_to_missing?(method_name, include_private = false)
|
|
31
|
+
known_category?(method_name.to_s.downcase.to_sym) || super
|
|
32
|
+
end
|
|
33
|
+
|
|
34
|
+
private
|
|
35
|
+
|
|
36
|
+
def known_category?(cat)
|
|
37
|
+
%i[sql http redis cache mailer enqueue custom snapshot].include?(cat)
|
|
38
|
+
end
|
|
39
|
+
end
|
|
40
|
+
end
|
|
@@ -0,0 +1,45 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Dontbugme
|
|
4
|
+
module Store
|
|
5
|
+
class Async < Base
|
|
6
|
+
def initialize(backend)
|
|
7
|
+
@backend = backend
|
|
8
|
+
@queue = Queue.new
|
|
9
|
+
@thread = start_worker
|
|
10
|
+
end
|
|
11
|
+
|
|
12
|
+
def save_trace(trace)
|
|
13
|
+
@queue << [:save, trace]
|
|
14
|
+
end
|
|
15
|
+
|
|
16
|
+
def find_trace(trace_id)
|
|
17
|
+
@backend.find_trace(trace_id)
|
|
18
|
+
end
|
|
19
|
+
|
|
20
|
+
def search(filters = {})
|
|
21
|
+
@backend.search(filters)
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
def cleanup(before:)
|
|
25
|
+
@queue << [:cleanup, before]
|
|
26
|
+
end
|
|
27
|
+
|
|
28
|
+
private
|
|
29
|
+
|
|
30
|
+
def start_worker
|
|
31
|
+
Thread.new do
|
|
32
|
+
loop do
|
|
33
|
+
action, arg = @queue.pop
|
|
34
|
+
case action
|
|
35
|
+
when :save
|
|
36
|
+
@backend.save_trace(arg)
|
|
37
|
+
when :cleanup
|
|
38
|
+
@backend.cleanup(before: arg)
|
|
39
|
+
end
|
|
40
|
+
end
|
|
41
|
+
end
|
|
42
|
+
end
|
|
43
|
+
end
|
|
44
|
+
end
|
|
45
|
+
end
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Dontbugme
|
|
4
|
+
module Store
|
|
5
|
+
class Base
|
|
6
|
+
def save_trace(trace)
|
|
7
|
+
raise NotImplementedError
|
|
8
|
+
end
|
|
9
|
+
|
|
10
|
+
def find_trace(trace_id)
|
|
11
|
+
raise NotImplementedError
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
def search(filters = {})
|
|
15
|
+
raise NotImplementedError
|
|
16
|
+
end
|
|
17
|
+
|
|
18
|
+
def cleanup(before:)
|
|
19
|
+
raise NotImplementedError
|
|
20
|
+
end
|
|
21
|
+
end
|
|
22
|
+
end
|
|
23
|
+
end
|
|
@@ -0,0 +1,61 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Dontbugme
|
|
4
|
+
module Store
|
|
5
|
+
class Memory < Base
|
|
6
|
+
def initialize
|
|
7
|
+
@traces = {}
|
|
8
|
+
@mutex = Mutex.new
|
|
9
|
+
end
|
|
10
|
+
|
|
11
|
+
def save_trace(trace)
|
|
12
|
+
@mutex.synchronize do
|
|
13
|
+
@traces[trace.id] = trace.to_h
|
|
14
|
+
end
|
|
15
|
+
end
|
|
16
|
+
|
|
17
|
+
def find_trace(trace_id)
|
|
18
|
+
data = @mutex.synchronize { @traces[trace_id] }
|
|
19
|
+
return nil unless data
|
|
20
|
+
|
|
21
|
+
Trace.from_h(data)
|
|
22
|
+
end
|
|
23
|
+
|
|
24
|
+
def search(filters = {})
|
|
25
|
+
traces = @mutex.synchronize { @traces.values.dup }
|
|
26
|
+
traces = apply_filters(traces, filters)
|
|
27
|
+
traces.sort_by { |t| t[:started_at] || '' }.reverse.map { |h| Trace.from_h(h) }
|
|
28
|
+
end
|
|
29
|
+
|
|
30
|
+
def cleanup(before:)
|
|
31
|
+
cutoff = before.is_a?(Time) ? before : Time.parse(before.to_s)
|
|
32
|
+
@mutex.synchronize do
|
|
33
|
+
@traces.delete_if { |_, t| parse_time(t[:started_at]) < cutoff }
|
|
34
|
+
end
|
|
35
|
+
end
|
|
36
|
+
|
|
37
|
+
private
|
|
38
|
+
|
|
39
|
+
def apply_filters(traces, filters)
|
|
40
|
+
traces = traces.select { |t| t[:status] == filters[:status].to_s } if filters[:status]
|
|
41
|
+
traces = traces.select { |t| t[:status] == filters['status'].to_s } if filters['status']
|
|
42
|
+
traces = traces.select { |t| t[:kind].to_s == filters[:kind].to_s } if filters[:kind]
|
|
43
|
+
if filters[:identifier]
|
|
44
|
+
pattern = /#{Regexp.escape(filters[:identifier].to_s)}/i
|
|
45
|
+
traces = traces.select { |t| t[:identifier].to_s.match?(pattern) }
|
|
46
|
+
end
|
|
47
|
+
if filters[:correlation_id]
|
|
48
|
+
cid = filters[:correlation_id].to_s
|
|
49
|
+
traces = traces.select { |t| (t[:correlation_id] || t.dig(:metadata, :correlation_id)).to_s == cid }
|
|
50
|
+
end
|
|
51
|
+
traces = traces.first(filters[:limit] || filters['limit'] || 100)
|
|
52
|
+
traces
|
|
53
|
+
end
|
|
54
|
+
|
|
55
|
+
def parse_time(val)
|
|
56
|
+
return Time.at(0) unless val
|
|
57
|
+
val.is_a?(Time) ? val : Time.parse(val.to_s)
|
|
58
|
+
end
|
|
59
|
+
end
|
|
60
|
+
end
|
|
61
|
+
end
|
|
@@ -0,0 +1,186 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require 'json'
|
|
4
|
+
|
|
5
|
+
module Dontbugme
|
|
6
|
+
module Store
|
|
7
|
+
class Postgresql < Base
|
|
8
|
+
def initialize(connection: nil)
|
|
9
|
+
@connection = connection || Dontbugme.config.postgresql_connection || default_connection
|
|
10
|
+
raise ArgumentError, 'PostgreSQL store requires a database connection' unless @connection
|
|
11
|
+
ensure_schema
|
|
12
|
+
end
|
|
13
|
+
|
|
14
|
+
def save_trace(trace)
|
|
15
|
+
data = trace.to_h
|
|
16
|
+
correlation_id = data[:correlation_id] || data[:metadata]&.dig(:correlation_id)
|
|
17
|
+
exec_params(
|
|
18
|
+
<<~SQL,
|
|
19
|
+
INSERT INTO dontbugme_traces
|
|
20
|
+
(id, kind, identifier, status, started_at, duration_ms, correlation_id, metadata_json, spans_json, error_json)
|
|
21
|
+
VALUES ($1, $2, $3, $4, $5, $6, $7, $8, $9, $10)
|
|
22
|
+
ON CONFLICT (id) DO UPDATE SET
|
|
23
|
+
kind = EXCLUDED.kind,
|
|
24
|
+
identifier = EXCLUDED.identifier,
|
|
25
|
+
status = EXCLUDED.status,
|
|
26
|
+
started_at = EXCLUDED.started_at,
|
|
27
|
+
duration_ms = EXCLUDED.duration_ms,
|
|
28
|
+
correlation_id = EXCLUDED.correlation_id,
|
|
29
|
+
metadata_json = EXCLUDED.metadata_json,
|
|
30
|
+
spans_json = EXCLUDED.spans_json,
|
|
31
|
+
error_json = EXCLUDED.error_json
|
|
32
|
+
SQL
|
|
33
|
+
data[:id],
|
|
34
|
+
data[:kind].to_s,
|
|
35
|
+
data[:identifier],
|
|
36
|
+
data[:status].to_s,
|
|
37
|
+
data[:started_at],
|
|
38
|
+
data[:duration_ms],
|
|
39
|
+
correlation_id,
|
|
40
|
+
data[:metadata].to_json,
|
|
41
|
+
data[:spans].to_json,
|
|
42
|
+
data[:error]&.to_json
|
|
43
|
+
)
|
|
44
|
+
end
|
|
45
|
+
|
|
46
|
+
def find_trace(trace_id)
|
|
47
|
+
result = query_result('SELECT * FROM dontbugme_traces WHERE id = $1', [trace_id])
|
|
48
|
+
return nil if result.blank?
|
|
49
|
+
|
|
50
|
+
row = result.is_a?(Array) ? result.first : result.to_a.first
|
|
51
|
+
return nil unless row
|
|
52
|
+
|
|
53
|
+
row_to_trace(row)
|
|
54
|
+
end
|
|
55
|
+
|
|
56
|
+
def search(filters = {})
|
|
57
|
+
sql = 'SELECT * FROM dontbugme_traces WHERE 1=1'
|
|
58
|
+
params = []
|
|
59
|
+
i = 1
|
|
60
|
+
|
|
61
|
+
if filters[:status]
|
|
62
|
+
params << filters[:status].to_s
|
|
63
|
+
sql += " AND status = $#{i}"
|
|
64
|
+
i += 1
|
|
65
|
+
end
|
|
66
|
+
if filters[:kind]
|
|
67
|
+
params << filters[:kind].to_s
|
|
68
|
+
sql += " AND kind = $#{i}"
|
|
69
|
+
i += 1
|
|
70
|
+
end
|
|
71
|
+
if filters[:identifier]
|
|
72
|
+
params << "%#{filters[:identifier]}%"
|
|
73
|
+
sql += " AND identifier LIKE $#{i}"
|
|
74
|
+
i += 1
|
|
75
|
+
end
|
|
76
|
+
if filters[:correlation_id]
|
|
77
|
+
params << filters[:correlation_id].to_s
|
|
78
|
+
sql += " AND correlation_id = $#{i}"
|
|
79
|
+
i += 1
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
params << (filters[:limit] || filters['limit'] || 100)
|
|
83
|
+
sql += " ORDER BY started_at DESC LIMIT $#{i}"
|
|
84
|
+
|
|
85
|
+
result = query_result(sql, params)
|
|
86
|
+
rows = result.respond_to?(:to_a) ? result.to_a : Array(result)
|
|
87
|
+
rows.map { |row| row_to_trace(normalize_row(row)) }
|
|
88
|
+
end
|
|
89
|
+
|
|
90
|
+
def cleanup(before:)
|
|
91
|
+
cutoff = before.is_a?(Time) ? before.utc.iso8601 : before.to_s
|
|
92
|
+
exec_params('DELETE FROM dontbugme_traces WHERE started_at < $1', [cutoff])
|
|
93
|
+
end
|
|
94
|
+
|
|
95
|
+
def query_result(sql, params)
|
|
96
|
+
if conn.respond_to?(:exec_query)
|
|
97
|
+
conn.exec_query(sql, 'Dontbugme', params)
|
|
98
|
+
else
|
|
99
|
+
exec_params(sql, params)
|
|
100
|
+
end
|
|
101
|
+
end
|
|
102
|
+
|
|
103
|
+
def normalize_row(row)
|
|
104
|
+
return row if row.respond_to?(:[])
|
|
105
|
+
|
|
106
|
+
row
|
|
107
|
+
end
|
|
108
|
+
|
|
109
|
+
private
|
|
110
|
+
|
|
111
|
+
def conn
|
|
112
|
+
@connection
|
|
113
|
+
end
|
|
114
|
+
|
|
115
|
+
def default_connection
|
|
116
|
+
return nil unless defined?(ActiveRecord::Base)
|
|
117
|
+
|
|
118
|
+
ActiveRecord::Base.connection
|
|
119
|
+
end
|
|
120
|
+
|
|
121
|
+
def exec_params(sql, params)
|
|
122
|
+
if conn.respond_to?(:raw_connection)
|
|
123
|
+
conn.raw_connection.exec_params(sql, params)
|
|
124
|
+
elsif conn.respond_to?(:exec_params)
|
|
125
|
+
conn.exec_params(sql, params)
|
|
126
|
+
else
|
|
127
|
+
# Fallback: use execute with sanitization
|
|
128
|
+
sanitized = ActiveRecord::Base.sanitize_sql_array([sql] + params)
|
|
129
|
+
conn.execute(sanitized)
|
|
130
|
+
end
|
|
131
|
+
end
|
|
132
|
+
|
|
133
|
+
def exec_query(sql, params = [])
|
|
134
|
+
if conn.respond_to?(:exec_query)
|
|
135
|
+
conn.exec_query(sql, 'Dontbugme', params)
|
|
136
|
+
else
|
|
137
|
+
result = exec_params(sql, params)
|
|
138
|
+
result
|
|
139
|
+
end
|
|
140
|
+
end
|
|
141
|
+
|
|
142
|
+
def ensure_schema
|
|
143
|
+
return unless conn
|
|
144
|
+
|
|
145
|
+
conn.execute(<<~SQL)
|
|
146
|
+
CREATE TABLE IF NOT EXISTS dontbugme_traces (
|
|
147
|
+
id VARCHAR(32) PRIMARY KEY,
|
|
148
|
+
kind VARCHAR(32),
|
|
149
|
+
identifier VARCHAR(512),
|
|
150
|
+
status VARCHAR(32),
|
|
151
|
+
started_at TIMESTAMP,
|
|
152
|
+
duration_ms REAL,
|
|
153
|
+
correlation_id VARCHAR(64),
|
|
154
|
+
metadata_json JSONB,
|
|
155
|
+
spans_json JSONB,
|
|
156
|
+
error_json JSONB
|
|
157
|
+
)
|
|
158
|
+
SQL
|
|
159
|
+
conn.execute('CREATE INDEX IF NOT EXISTS idx_dontbugme_started_at ON dontbugme_traces(started_at)')
|
|
160
|
+
conn.execute('CREATE INDEX IF NOT EXISTS idx_dontbugme_status ON dontbugme_traces(status)')
|
|
161
|
+
conn.execute('CREATE INDEX IF NOT EXISTS idx_dontbugme_correlation_id ON dontbugme_traces(correlation_id)')
|
|
162
|
+
rescue StandardError
|
|
163
|
+
# Schema might already exist
|
|
164
|
+
end
|
|
165
|
+
|
|
166
|
+
def row_to_trace(row)
|
|
167
|
+
metadata_json = row['metadata_json'] || row[:metadata_json]
|
|
168
|
+
spans_json = row['spans_json'] || row[:spans_json]
|
|
169
|
+
error_json = row['error_json'] || row[:error_json]
|
|
170
|
+
hash = {
|
|
171
|
+
id: row['id'] || row[:id],
|
|
172
|
+
kind: row['kind'] || row[:kind],
|
|
173
|
+
identifier: row['identifier'] || row[:identifier],
|
|
174
|
+
status: row['status'] || row[:status],
|
|
175
|
+
started_at: (row['started_at'] || row[:started_at])&.to_s,
|
|
176
|
+
duration_ms: (row['duration_ms'] || row[:duration_ms])&.to_f,
|
|
177
|
+
correlation_id: row['correlation_id'] || row[:correlation_id],
|
|
178
|
+
metadata: metadata_json ? (metadata_json.is_a?(String) ? JSON.parse(metadata_json, symbolize_names: true) : metadata_json) : {},
|
|
179
|
+
spans: spans_json ? (spans_json.is_a?(String) ? JSON.parse(spans_json, symbolize_names: true) : spans_json) : [],
|
|
180
|
+
error: error_json ? (error_json.is_a?(String) ? JSON.parse(error_json, symbolize_names: true) : error_json) : nil
|
|
181
|
+
}
|
|
182
|
+
Trace.from_h(hash)
|
|
183
|
+
end
|
|
184
|
+
end
|
|
185
|
+
end
|
|
186
|
+
end
|
|
@@ -0,0 +1,148 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
require 'fileutils'
|
|
4
|
+
require 'json'
|
|
5
|
+
|
|
6
|
+
module Dontbugme
|
|
7
|
+
module Store
|
|
8
|
+
class Sqlite < Base
|
|
9
|
+
def initialize(path: nil)
|
|
10
|
+
@path = path || Dontbugme.config.sqlite_path
|
|
11
|
+
ensure_directory
|
|
12
|
+
ensure_schema
|
|
13
|
+
end
|
|
14
|
+
|
|
15
|
+
def save_trace(trace)
|
|
16
|
+
data = trace.to_h
|
|
17
|
+
correlation_id = data[:correlation_id] || data[:metadata]&.dig(:correlation_id)
|
|
18
|
+
db.execute(
|
|
19
|
+
'INSERT OR REPLACE INTO traces (id, kind, identifier, status, started_at, duration_ms, correlation_id, metadata_json, spans_json, error_json) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)',
|
|
20
|
+
data[:id],
|
|
21
|
+
data[:kind].to_s,
|
|
22
|
+
data[:identifier],
|
|
23
|
+
data[:status].to_s,
|
|
24
|
+
data[:started_at],
|
|
25
|
+
data[:duration_ms],
|
|
26
|
+
correlation_id,
|
|
27
|
+
data[:metadata].to_json,
|
|
28
|
+
data[:spans].to_json,
|
|
29
|
+
data[:error]&.to_json
|
|
30
|
+
)
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
def find_trace(trace_id)
|
|
34
|
+
row = db.execute('SELECT id, kind, identifier, status, started_at, duration_ms, correlation_id, metadata_json, spans_json, error_json FROM traces WHERE id = ?', trace_id).first
|
|
35
|
+
return nil unless row
|
|
36
|
+
|
|
37
|
+
row_to_trace(row)
|
|
38
|
+
end
|
|
39
|
+
|
|
40
|
+
def search(filters = {})
|
|
41
|
+
sql = 'SELECT id, kind, identifier, status, started_at, duration_ms, correlation_id, metadata_json, spans_json, error_json FROM traces WHERE 1=1'
|
|
42
|
+
params = []
|
|
43
|
+
|
|
44
|
+
if filters[:status]
|
|
45
|
+
sql += ' AND status = ?'
|
|
46
|
+
params << filters[:status].to_s
|
|
47
|
+
end
|
|
48
|
+
if filters[:kind]
|
|
49
|
+
sql += ' AND kind = ?'
|
|
50
|
+
params << filters[:kind].to_s
|
|
51
|
+
end
|
|
52
|
+
if filters[:identifier]
|
|
53
|
+
sql += ' AND identifier LIKE ?'
|
|
54
|
+
params << "%#{filters[:identifier]}%"
|
|
55
|
+
end
|
|
56
|
+
if filters[:correlation_id]
|
|
57
|
+
sql += ' AND correlation_id = ?'
|
|
58
|
+
params << filters[:correlation_id].to_s
|
|
59
|
+
end
|
|
60
|
+
|
|
61
|
+
sql += ' ORDER BY started_at DESC LIMIT ?'
|
|
62
|
+
params << (filters[:limit] || filters['limit'] || 100)
|
|
63
|
+
|
|
64
|
+
rows = db.execute(sql, *params)
|
|
65
|
+
rows.map { |row| row_to_trace(row) }
|
|
66
|
+
end
|
|
67
|
+
|
|
68
|
+
def cleanup(before:)
|
|
69
|
+
cutoff = before.is_a?(Time) ? before.iso8601 : before.to_s
|
|
70
|
+
db.execute('DELETE FROM traces WHERE started_at < ?', cutoff)
|
|
71
|
+
end
|
|
72
|
+
|
|
73
|
+
private
|
|
74
|
+
|
|
75
|
+
def db
|
|
76
|
+
@db ||= begin
|
|
77
|
+
require 'sqlite3'
|
|
78
|
+
SQLite3::Database.new(@path)
|
|
79
|
+
end
|
|
80
|
+
end
|
|
81
|
+
|
|
82
|
+
def ensure_directory
|
|
83
|
+
dir = File.dirname(@path)
|
|
84
|
+
FileUtils.mkdir_p(dir) unless Dir.exist?(dir)
|
|
85
|
+
end
|
|
86
|
+
|
|
87
|
+
def ensure_schema
|
|
88
|
+
db.execute(<<~SQL)
|
|
89
|
+
CREATE TABLE IF NOT EXISTS traces (
|
|
90
|
+
id TEXT PRIMARY KEY,
|
|
91
|
+
kind TEXT,
|
|
92
|
+
identifier TEXT,
|
|
93
|
+
status TEXT,
|
|
94
|
+
started_at TEXT,
|
|
95
|
+
duration_ms REAL,
|
|
96
|
+
correlation_id TEXT,
|
|
97
|
+
metadata_json TEXT,
|
|
98
|
+
spans_json TEXT,
|
|
99
|
+
error_json TEXT
|
|
100
|
+
)
|
|
101
|
+
SQL
|
|
102
|
+
migrate_add_correlation_id
|
|
103
|
+
db.execute('CREATE INDEX IF NOT EXISTS idx_traces_started_at ON traces(started_at)')
|
|
104
|
+
db.execute('CREATE INDEX IF NOT EXISTS idx_traces_status ON traces(status)')
|
|
105
|
+
db.execute('CREATE INDEX IF NOT EXISTS idx_traces_correlation_id ON traces(correlation_id)')
|
|
106
|
+
end
|
|
107
|
+
|
|
108
|
+
def migrate_add_correlation_id
|
|
109
|
+
return if db.execute("PRAGMA table_info(traces)").any? { |col| col[1] == 'correlation_id' }
|
|
110
|
+
|
|
111
|
+
db.execute('ALTER TABLE traces ADD COLUMN correlation_id TEXT')
|
|
112
|
+
end
|
|
113
|
+
|
|
114
|
+
def row_to_trace(row)
|
|
115
|
+
# row: [id, kind, identifier, status, started_at, duration_ms, correlation_id, metadata_json, spans_json, error_json]
|
|
116
|
+
# Handle both old schema (9 cols) and new (10 cols)
|
|
117
|
+
if row.size >= 10
|
|
118
|
+
hash = {
|
|
119
|
+
id: row[0],
|
|
120
|
+
kind: row[1],
|
|
121
|
+
identifier: row[2],
|
|
122
|
+
status: row[3],
|
|
123
|
+
started_at: row[4],
|
|
124
|
+
duration_ms: row[5],
|
|
125
|
+
metadata: row[7] ? JSON.parse(row[7], symbolize_names: true) : {},
|
|
126
|
+
spans: row[8] ? JSON.parse(row[8], symbolize_names: true) : [],
|
|
127
|
+
error: row[9] ? JSON.parse(row[9], symbolize_names: true) : nil
|
|
128
|
+
}
|
|
129
|
+
hash[:correlation_id] = row[6] if row[6]
|
|
130
|
+
hash[:metadata][:correlation_id] ||= row[6] if row[6]
|
|
131
|
+
else
|
|
132
|
+
hash = {
|
|
133
|
+
id: row[0],
|
|
134
|
+
kind: row[1],
|
|
135
|
+
identifier: row[2],
|
|
136
|
+
status: row[3],
|
|
137
|
+
started_at: row[4],
|
|
138
|
+
duration_ms: row[5],
|
|
139
|
+
metadata: row[6] ? JSON.parse(row[6], symbolize_names: true) : {},
|
|
140
|
+
spans: row[7] ? JSON.parse(row[7], symbolize_names: true) : [],
|
|
141
|
+
error: row[8] ? JSON.parse(row[8], symbolize_names: true) : nil
|
|
142
|
+
}
|
|
143
|
+
end
|
|
144
|
+
Trace.from_h(hash)
|
|
145
|
+
end
|
|
146
|
+
end
|
|
147
|
+
end
|
|
148
|
+
end
|