rails_observatory 0.1.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/README.md +42 -0
- data/Rakefile +8 -0
- data/app/assets/config/rails_observatory_manifest.js +2 -0
- data/app/assets/images/rails_observatory/logo.svg +8 -0
- data/app/assets/js/application.js +88 -0
- data/app/assets/js/controllers/chart_controller.js +176 -0
- data/app/assets/js/controllers/event_details_controller.js +15 -0
- data/app/assets/js/controllers/index.js +9 -0
- data/app/assets/js/controllers/sparkline_controller.js +72 -0
- data/app/assets/stylesheets/application/card.css +51 -0
- data/app/assets/stylesheets/application/chart.css +34 -0
- data/app/assets/stylesheets/application/dropdown.css +62 -0
- data/app/assets/stylesheets/application/global_modifiers.css +10 -0
- data/app/assets/stylesheets/application/query_table.css +68 -0
- data/app/assets/stylesheets/application/side_nav.css +62 -0
- data/app/assets/stylesheets/application/side_panel.css +35 -0
- data/app/assets/stylesheets/application/tab_nav.css +64 -0
- data/app/assets/stylesheets/application/table_chart.css +66 -0
- data/app/assets/stylesheets/application/tbd.css +70 -0
- data/app/assets/stylesheets/application/top_nav.css +33 -0
- data/app/assets/stylesheets/application.css +42 -0
- data/app/assets/stylesheets/elements/a.css +8 -0
- data/app/assets/stylesheets/elements/button.css +21 -0
- data/app/assets/stylesheets/elements/details.css +12 -0
- data/app/assets/stylesheets/elements/root.css +26 -0
- data/app/assets/stylesheets/elements/section.css +9 -0
- data/app/assets/stylesheets/errors/show/details.css +13 -0
- data/app/assets/stylesheets/layout/app.css +23 -0
- data/app/assets/stylesheets/layout/details-side-panel.css +15 -0
- data/app/assets/stylesheets/layout/requests.css +45 -0
- data/app/assets/stylesheets/layout/two-column.css +17 -0
- data/app/assets/stylesheets/mixins/nav_button.css +19 -0
- data/app/assets/stylesheets/requests/stats.css +35 -0
- data/app/controllers/rails_observatory/application_controller.rb +24 -0
- data/app/controllers/rails_observatory/errors_controller.rb +27 -0
- data/app/controllers/rails_observatory/jobs_controller.rb +25 -0
- data/app/controllers/rails_observatory/mailers_controller.rb +11 -0
- data/app/controllers/rails_observatory/requests_controller.rb +33 -0
- data/app/helpers/rails_observatory/application_helper.rb +110 -0
- data/app/jobs/rails_observatory/application_job.rb +4 -0
- data/app/mailers/rails_observatory/application_mailer.rb +6 -0
- data/app/views/layouts/rails_observatory/application.html.erb +93 -0
- data/app/views/new_user_mailer/greeting.html.erb +1 -0
- data/app/views/posts/index.html.erb +1 -0
- data/app/views/rails_observatory/application/_chart.html.erb +23 -0
- data/app/views/rails_observatory/application/_events_table.html.erb +24 -0
- data/app/views/rails_observatory/application/_sparkline.html.erb +17 -0
- data/app/views/rails_observatory/application/_trace.html.erb +122 -0
- data/app/views/rails_observatory/errors/index.html.erb +87 -0
- data/app/views/rails_observatory/errors/show.html.erb +193 -0
- data/app/views/rails_observatory/jobs/_table_chart.html.erb +29 -0
- data/app/views/rails_observatory/jobs/index.html.erb +20 -0
- data/app/views/rails_observatory/jobs/show.html.erb +8 -0
- data/app/views/rails_observatory/logs/index.html.erb +18 -0
- data/app/views/rails_observatory/mailers/index.html.erb +11 -0
- data/app/views/rails_observatory/mailers/show.html.erb +10 -0
- data/app/views/rails_observatory/requests/_text_gauge.html.erb +4 -0
- data/app/views/rails_observatory/requests/index.html.erb +56 -0
- data/app/views/rails_observatory/requests/show.html.erb +16 -0
- data/config/routes.rb +7 -0
- data/lib/rails_observatory/action_mailer_subscriber.rb +14 -0
- data/lib/rails_observatory/engine.rb +49 -0
- data/lib/rails_observatory/event_collector.rb +43 -0
- data/lib/rails_observatory/log_collector.rb +46 -0
- data/lib/rails_observatory/mailer_previews/delivered_mail_preview.rb +9 -0
- data/lib/rails_observatory/middleware.rb +77 -0
- data/lib/rails_observatory/models/error.rb +67 -0
- data/lib/rails_observatory/models/event_collection.rb +137 -0
- data/lib/rails_observatory/models/events.rb +22 -0
- data/lib/rails_observatory/models/job_trace.rb +28 -0
- data/lib/rails_observatory/models/logs.rb +9 -0
- data/lib/rails_observatory/models/mail_delivery.rb +33 -0
- data/lib/rails_observatory/models/redis_model.rb +112 -0
- data/lib/rails_observatory/models/request_trace.rb +29 -0
- data/lib/rails_observatory/railties/active_job_instrumentation.rb +48 -0
- data/lib/rails_observatory/railties/redis_runtime.rb +11 -0
- data/lib/rails_observatory/redis/logging_middleware.rb +22 -0
- data/lib/rails_observatory/redis/redis_client_instrumentation.rb +18 -0
- data/lib/rails_observatory/redis/time_series/increment_script.lua +67 -0
- data/lib/rails_observatory/redis/time_series/insertion.rb +73 -0
- data/lib/rails_observatory/redis/time_series/query_builder.rb +149 -0
- data/lib/rails_observatory/redis/time_series/timing_script.lua +89 -0
- data/lib/rails_observatory/redis/time_series.rb +91 -0
- data/lib/rails_observatory/serializers/event_serializer.rb +19 -0
- data/lib/rails_observatory/serializers/headers_serializer.rb +12 -0
- data/lib/rails_observatory/serializers/job_serializer.rb +11 -0
- data/lib/rails_observatory/serializers/mail_delivery_job_serializer.rb +14 -0
- data/lib/rails_observatory/serializers/request_serializer.rb +17 -0
- data/lib/rails_observatory/serializers/response_serializer.rb +14 -0
- data/lib/rails_observatory/serializers/serializer.rb +51 -0
- data/lib/rails_observatory/version.rb +3 -0
- data/lib/rails_observatory.rb +3 -0
- data/public/assets/js/application.js +11186 -0
- data/public/assets/logo_with_text.svg +21 -0
- data/public/assets/stylesheets/application.css +757 -0
- metadata +197 -0
@@ -0,0 +1,67 @@
|
|
1
|
+
require_relative './redis_model'
|
2
|
+
require 'digest'
|
3
|
+
module RailsObservatory
|
4
|
+
class Error < RedisModel
|
5
|
+
|
6
|
+
attribute :time, :float
|
7
|
+
attribute :fingerprint, :string
|
8
|
+
attribute :has_causes, :boolean, indexed: false
|
9
|
+
attribute :causes, compressed: true, indexed: false
|
10
|
+
attribute :location, :string
|
11
|
+
attribute :class_name, :string
|
12
|
+
attribute :message, :string
|
13
|
+
attribute :source_extracts, compressed: true, indexed: false
|
14
|
+
attribute :trace, compressed: true, indexed: false
|
15
|
+
|
16
|
+
alias_attribute :id, :fingerprint
|
17
|
+
|
18
|
+
attr_accessor :exception
|
19
|
+
|
20
|
+
def exception=(ex)
|
21
|
+
ex_wrapper = ActionDispatch::ExceptionWrapper.new(Rails.backtrace_cleaner, ex)
|
22
|
+
payload = payload_for_wrapped_exception(ex_wrapper)
|
23
|
+
assign_attributes(payload)
|
24
|
+
self.has_causes = ex_wrapper.has_cause?
|
25
|
+
self.causes = ex_wrapper.wrapped_causes.map { payload_for_wrapped_exception(_1) }
|
26
|
+
self.fingerprint = build_fingerprint(ex_wrapper)
|
27
|
+
end
|
28
|
+
|
29
|
+
private
|
30
|
+
|
31
|
+
def trace_for_ex(wrapped_ex)
|
32
|
+
wrapped_ex.full_trace.each_with_index.map do |trace, idx|
|
33
|
+
{
|
34
|
+
exception_object_id: wrapped_ex.exception.object_id,
|
35
|
+
id: idx,
|
36
|
+
trace: trace,
|
37
|
+
is_application_trace: wrapped_ex.application_trace.include?(trace),
|
38
|
+
}
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
def payload_for_wrapped_exception(wrapped_ex)
|
43
|
+
{
|
44
|
+
class_name: wrapped_ex.exception_class_name,
|
45
|
+
message: wrapped_ex.message,
|
46
|
+
source_extracts: wrapped_ex.source_extracts,
|
47
|
+
trace: trace_for_ex(wrapped_ex),
|
48
|
+
}
|
49
|
+
end
|
50
|
+
|
51
|
+
def contextual_request_id
|
52
|
+
context = ActiveSupport::ExecutionContext.to_h
|
53
|
+
context[:controller]&.request&.request_id || context[:job]&.request_id
|
54
|
+
end
|
55
|
+
|
56
|
+
def exception_string(wrapped_ex)
|
57
|
+
wrapped_ex.exception_class_name + wrapped_ex.exception.backtrace.map { _1.split(":").slice(0..1).join(":") }.join("\n")
|
58
|
+
end
|
59
|
+
|
60
|
+
def build_fingerprint(wrapped_ex)
|
61
|
+
exceptions = [wrapped_ex]
|
62
|
+
exceptions.push(*wrapped_ex.wrapped_causes) if wrapped_ex.has_cause?
|
63
|
+
Digest::SHA256.hexdigest(exceptions.map { exception_string(_1) }.join("\n"))
|
64
|
+
end
|
65
|
+
|
66
|
+
end
|
67
|
+
end
|
@@ -0,0 +1,137 @@
|
|
1
|
+
module RailsObservatory
|
2
|
+
|
3
|
+
class EventCollection
|
4
|
+
include Enumerable
|
5
|
+
|
6
|
+
attr_accessor :events
|
7
|
+
|
8
|
+
delegate :push, :<<, :size, to: :events
|
9
|
+
delegate :empty?, to: :to_a
|
10
|
+
|
11
|
+
def initialize(events)
|
12
|
+
@events = events
|
13
|
+
end
|
14
|
+
|
15
|
+
def without(*names)
|
16
|
+
copy = self.clone
|
17
|
+
copy.instance_exec { @without = names }
|
18
|
+
copy
|
19
|
+
end
|
20
|
+
|
21
|
+
def only(*names)
|
22
|
+
copy = self.clone
|
23
|
+
copy.instance_exec { @only = names }
|
24
|
+
copy
|
25
|
+
end
|
26
|
+
|
27
|
+
def each
|
28
|
+
decorate_events unless @decorated
|
29
|
+
iterating_set = @events
|
30
|
+
if @without
|
31
|
+
iterating_set = iterating_set.reject { _1['name'].in?(@without) }
|
32
|
+
end
|
33
|
+
if @only
|
34
|
+
iterating_set = iterating_set.select { _1['name'].in?(@only) }
|
35
|
+
end
|
36
|
+
iterating_set.then(&method(:decorate_with_relative_time)).each { yield _1 }
|
37
|
+
end
|
38
|
+
|
39
|
+
def to_series
|
40
|
+
all_events = to_a
|
41
|
+
min, max = all_events.minmax_by { _1['depth'] }.pluck('depth')
|
42
|
+
|
43
|
+
category_primer = (min..max).map do |depth|
|
44
|
+
{
|
45
|
+
x: depth.to_s,
|
46
|
+
y: nil,
|
47
|
+
event_self_time: 0,
|
48
|
+
}
|
49
|
+
end
|
50
|
+
|
51
|
+
grouped_events = all_events.group_by { _1['name'].split('.').last }.sort_by { _1.first }
|
52
|
+
grouped_events.map do |name, events|
|
53
|
+
{
|
54
|
+
name: name,
|
55
|
+
data: category_primer + events.map do |ev|
|
56
|
+
{
|
57
|
+
x: ev['depth'].to_s,
|
58
|
+
y: [ev['relative_start_at'], ev['relative_end_at']],
|
59
|
+
event_self_time: ev['self_time'],
|
60
|
+
event_name: ev['name'].split('.').first,
|
61
|
+
start_at: ev['start_at'],
|
62
|
+
}
|
63
|
+
end
|
64
|
+
}
|
65
|
+
end
|
66
|
+
end
|
67
|
+
|
68
|
+
|
69
|
+
|
70
|
+
private
|
71
|
+
|
72
|
+
def decorate_events
|
73
|
+
@events = @events.then(&method(:sort_events))
|
74
|
+
.then(&method(:decorate_with_self_time))
|
75
|
+
.then(&method(:merge_middleware))
|
76
|
+
.then(&method(:decorate_with_depth))
|
77
|
+
@decorated = true
|
78
|
+
end
|
79
|
+
|
80
|
+
def sort_events(events)
|
81
|
+
events.sort_by { _1['start_at'] }
|
82
|
+
end
|
83
|
+
|
84
|
+
def merge_middleware(events)
|
85
|
+
middleware_events = events.select { _1['name'] == 'process_middleware.action_dispatch' }
|
86
|
+
return events if middleware_events.empty?
|
87
|
+
merged_middleware = middleware_events.reduce(middleware_events.first.without('self_time')) do |merged, event|
|
88
|
+
merged['self_time'] ||= 0
|
89
|
+
merged['self_time'] += event['self_time']
|
90
|
+
merged['middleware_stack'] ||= []
|
91
|
+
merged['middleware_stack'] << event
|
92
|
+
merged
|
93
|
+
end
|
94
|
+
[merged_middleware] + events.excluding(middleware_events)
|
95
|
+
end
|
96
|
+
|
97
|
+
def decorate_with_depth(events)
|
98
|
+
depth_stack = []
|
99
|
+
events.each do |e|
|
100
|
+
event_range = (e['start_at']..e['end_at'])
|
101
|
+
depth_stack.select! { _1.cover?(event_range) }
|
102
|
+
e['depth'] = depth_stack.size
|
103
|
+
depth_stack << event_range
|
104
|
+
end
|
105
|
+
end
|
106
|
+
|
107
|
+
def decorate_with_self_time(events)
|
108
|
+
events.each do |ev|
|
109
|
+
ev_range = (ev['start_at']..ev['end_at'])
|
110
|
+
sub_events = events.excluding(ev).select { ev_range.cover?(_1['start_at'].._1['end_at']) }
|
111
|
+
sub_event_time = non_overlapping_ranges(sub_events).reduce(0) { |sum, range| sum + (range.end - range.begin) }
|
112
|
+
ev['self_time'] = ev['duration'] - sub_event_time
|
113
|
+
end
|
114
|
+
end
|
115
|
+
|
116
|
+
def non_overlapping_ranges(events)
|
117
|
+
events.reduce([]) do |arr, event|
|
118
|
+
event_range = (event['start_at']..event['end_at'])
|
119
|
+
if arr.any? { |r| r.cover?(event_range) }
|
120
|
+
arr
|
121
|
+
else
|
122
|
+
arr << event_range
|
123
|
+
end
|
124
|
+
end
|
125
|
+
end
|
126
|
+
|
127
|
+
def decorate_with_relative_time(events)
|
128
|
+
return events if events.empty?
|
129
|
+
first_event = events.first['start_at']
|
130
|
+
events.each do |ev|
|
131
|
+
ev['relative_start_at'] = ev['start_at'] - first_event
|
132
|
+
ev['relative_end_at'] = ev['end_at'] - first_event
|
133
|
+
end
|
134
|
+
end
|
135
|
+
|
136
|
+
end
|
137
|
+
end
|
@@ -0,0 +1,22 @@
|
|
1
|
+
module RailsObservatory
|
2
|
+
module Events
|
3
|
+
extend ActiveSupport::Concern
|
4
|
+
|
5
|
+
included do
|
6
|
+
attribute :events, indexed: false, compressed: true
|
7
|
+
|
8
|
+
def events
|
9
|
+
attr_value = super
|
10
|
+
return nil if attr_value.nil?
|
11
|
+
EventCollection.new(attr_value)
|
12
|
+
end
|
13
|
+
end
|
14
|
+
|
15
|
+
|
16
|
+
|
17
|
+
def mail_events
|
18
|
+
events.only('enqueue.action_job', 'deliver.action_mailer')
|
19
|
+
.reject { _1['name'] == 'enqueue.action_job' && _1.dig('payload', 'job', 'class') != 'ActionMailer::MailDeliveryJob' }
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
@@ -0,0 +1,28 @@
|
|
1
|
+
require_relative './redis_model'
|
2
|
+
require_relative './events'
|
3
|
+
require_relative './logs'
|
4
|
+
|
5
|
+
module RailsObservatory
|
6
|
+
class JobTrace < RedisModel
|
7
|
+
include Events
|
8
|
+
include Logs
|
9
|
+
|
10
|
+
def self.key_prefix
|
11
|
+
"jt"
|
12
|
+
end
|
13
|
+
|
14
|
+
attribute :job_id, :string
|
15
|
+
attribute :queue_name, :string
|
16
|
+
attribute :queue_adapter, :string
|
17
|
+
attribute :job_class, :string
|
18
|
+
attribute :executions, :integer
|
19
|
+
attribute :error, :boolean
|
20
|
+
attribute :time, :float
|
21
|
+
attribute :allocations, :integer, indexed: false
|
22
|
+
attribute :queue_latency, :float, indexed: false
|
23
|
+
attribute :duration, :float, indexed: false
|
24
|
+
|
25
|
+
alias_attribute :id, :job_id
|
26
|
+
alias_attribute :name, :job_class
|
27
|
+
end
|
28
|
+
end
|
@@ -0,0 +1,33 @@
|
|
1
|
+
require_relative './redis_model'
|
2
|
+
|
3
|
+
module RailsObservatory
|
4
|
+
class MailDelivery < RedisModel
|
5
|
+
attribute :message_id, :string
|
6
|
+
attribute :time, :float
|
7
|
+
attribute :duration, :float
|
8
|
+
attribute :mailer, :string
|
9
|
+
attribute :to, :string
|
10
|
+
attribute :from, :string
|
11
|
+
attribute :subject, :string
|
12
|
+
attribute :mail, compressed: true, indexed: false
|
13
|
+
|
14
|
+
alias_attribute :id, :message_id
|
15
|
+
|
16
|
+
def to=(val)
|
17
|
+
if val.is_a?(Array)
|
18
|
+
super(val.join(', '))
|
19
|
+
else
|
20
|
+
super
|
21
|
+
end
|
22
|
+
end
|
23
|
+
|
24
|
+
def from=(val)
|
25
|
+
if val.is_a?(Array)
|
26
|
+
super(val.join(', '))
|
27
|
+
else
|
28
|
+
super
|
29
|
+
end
|
30
|
+
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
@@ -0,0 +1,112 @@
|
|
1
|
+
require_relative '../serializers/serializer'
|
2
|
+
require_relative './event_collection'
|
3
|
+
require_relative '../redis/time_series'
|
4
|
+
require 'zlib'
|
5
|
+
module RailsObservatory
|
6
|
+
class RedisModel
|
7
|
+
include ActiveModel::Model
|
8
|
+
include ActiveModel::Attributes
|
9
|
+
include ActiveModel::Serializers::JSON
|
10
|
+
|
11
|
+
class NotFound < StandardError; end
|
12
|
+
|
13
|
+
class << self
|
14
|
+
attr_accessor :indexed_attributes
|
15
|
+
attr_accessor :compressed_attributes
|
16
|
+
end
|
17
|
+
|
18
|
+
def self.attribute(name, *args, indexed: true, compressed: false, **rest)
|
19
|
+
if indexed
|
20
|
+
self.indexed_attributes ||= []
|
21
|
+
indexed_attributes << name
|
22
|
+
end
|
23
|
+
if compressed
|
24
|
+
self.compressed_attributes ||= []
|
25
|
+
compressed_attributes << name.to_s
|
26
|
+
end
|
27
|
+
super(name, *args, **rest)
|
28
|
+
end
|
29
|
+
|
30
|
+
def self.redis
|
31
|
+
Rails.configuration.rails_observatory.redis
|
32
|
+
end
|
33
|
+
|
34
|
+
def redis
|
35
|
+
self.class.redis
|
36
|
+
end
|
37
|
+
|
38
|
+
def self.key_prefix
|
39
|
+
name.demodulize.underscore
|
40
|
+
end
|
41
|
+
|
42
|
+
def self.key_name(id)
|
43
|
+
"#{key_prefix}:#{id}"
|
44
|
+
end
|
45
|
+
|
46
|
+
def self.count
|
47
|
+
total, *results = redis.call("FT.SEARCH", index_name, '*', "SORTBY", "time", "DESC")
|
48
|
+
total
|
49
|
+
end
|
50
|
+
|
51
|
+
def self.all
|
52
|
+
total, *results = redis.call("FT.SEARCH", index_name, '*', "SORTBY", "time", "DESC")
|
53
|
+
Hash[*results].values.map(&:last).map { JSON.parse(_1) }.map { new(_1) }
|
54
|
+
end
|
55
|
+
|
56
|
+
def self.find(id)
|
57
|
+
result = redis.call("JSON.GET", key_name(id), "$") || raise(NotFound, "Could not find #{name} with id #{id}")
|
58
|
+
attrs = JSON.parse(result).first
|
59
|
+
|
60
|
+
compressed_attributes.each do |attr|
|
61
|
+
val = redis.call("GET", [key_prefix, attr].join("_") + ":#{id}")
|
62
|
+
attrs.merge!(attr => JSON.parse(Zlib.gunzip(val)))
|
63
|
+
end
|
64
|
+
|
65
|
+
self.new(attrs)
|
66
|
+
end
|
67
|
+
|
68
|
+
ATTRIBUTE_TYPE_TO_REDIS_TYPE = {
|
69
|
+
string: "TEXT",
|
70
|
+
integer: "NUMERIC",
|
71
|
+
float: "NUMERIC",
|
72
|
+
boolean: "TAG"
|
73
|
+
}
|
74
|
+
|
75
|
+
def self.index_name
|
76
|
+
"#{key_prefix}-idx"
|
77
|
+
end
|
78
|
+
|
79
|
+
def self.create_redis_index
|
80
|
+
schema = indexed_attributes.flat_map do |attr|
|
81
|
+
["$.#{attr}", "AS", "#{attr}", ATTRIBUTE_TYPE_TO_REDIS_TYPE[attribute_types[attr.to_s].type]]
|
82
|
+
end
|
83
|
+
redis.call("FT.CREATE", index_name, "ON", "JSON", "PREFIX", "1", key_prefix, "SCHEMA", *schema)
|
84
|
+
end
|
85
|
+
|
86
|
+
def self.index_info
|
87
|
+
info = Hash[*redis.call("FT.INFO", index_name)]
|
88
|
+
info['attributes'] = info['attributes'].map { Hash[*_1] }
|
89
|
+
info['index_definition'] = Hash[*info['index_definition']]
|
90
|
+
info
|
91
|
+
end
|
92
|
+
|
93
|
+
def self.ensure_index
|
94
|
+
redis.call("FT._LIST").include?(index_name) || create_redis_index
|
95
|
+
end
|
96
|
+
|
97
|
+
def attribute_names_for_serialization
|
98
|
+
attributes.keys - self.class.compressed_attributes
|
99
|
+
end
|
100
|
+
|
101
|
+
def save
|
102
|
+
redis.multi do |r|
|
103
|
+
r.call("JSON.SET", self.class.key_name(id), "$", JSON.generate(as_json))
|
104
|
+
self.class.compressed_attributes.each do |attr|
|
105
|
+
compressed_value = Zlib.gzip(JSON.generate(@attributes.fetch_value(attr)), level: Zlib::BEST_COMPRESSION)
|
106
|
+
r.call("SET", [self.class.key_prefix, attr].join("_") + ":#{id}", compressed_value)
|
107
|
+
end
|
108
|
+
end
|
109
|
+
end
|
110
|
+
|
111
|
+
end
|
112
|
+
end
|
@@ -0,0 +1,29 @@
|
|
1
|
+
require_relative './redis_model'
|
2
|
+
require_relative './events'
|
3
|
+
require_relative './logs'
|
4
|
+
module RailsObservatory
|
5
|
+
class RequestTrace < RedisModel
|
6
|
+
include Events
|
7
|
+
include Logs
|
8
|
+
|
9
|
+
attribute :request_id, :string
|
10
|
+
attribute :status, :integer
|
11
|
+
attribute :http_method, :string
|
12
|
+
attribute :path, :string
|
13
|
+
attribute :action, :string
|
14
|
+
attribute :format, :string
|
15
|
+
attribute :error, :boolean
|
16
|
+
attribute :route_pattern, :string
|
17
|
+
attribute :time, :float
|
18
|
+
attribute :duration, :float
|
19
|
+
attribute :allocations, :integer, indexed: false
|
20
|
+
|
21
|
+
alias_attribute :id, :request_id
|
22
|
+
alias_attribute :name, :action
|
23
|
+
|
24
|
+
def self.key_prefix
|
25
|
+
"rt"
|
26
|
+
end
|
27
|
+
|
28
|
+
end
|
29
|
+
end
|
@@ -0,0 +1,48 @@
|
|
1
|
+
require_relative '../event_collector'
|
2
|
+
require_relative '../models/job_trace'
|
3
|
+
module RailsObservatory
|
4
|
+
module Railties
|
5
|
+
module ActiveJobInstrumentation
|
6
|
+
|
7
|
+
def perform_now
|
8
|
+
TimeSeries.distribution("job.queue_latency", Time.now - enqueued_at, labels: { queue_name: }) unless enqueued_at.nil?
|
9
|
+
labels = { job_class: self.class.name, queue_name: }
|
10
|
+
TimeSeries.increment("job.count", labels:)
|
11
|
+
TimeSeries.increment("job.retry_count", labels:) if executions > 1
|
12
|
+
|
13
|
+
start_at = Time.now
|
14
|
+
start_at_mono = Process.clock_gettime(Process::CLOCK_MONOTONIC, :float_millisecond)
|
15
|
+
result = nil
|
16
|
+
logs = []
|
17
|
+
events = EventCollector.instance.collect_events do
|
18
|
+
logs = LogCollector.collect_logs do
|
19
|
+
result = super
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end_at_mono = Process.clock_gettime(Process::CLOCK_MONOTONIC, :float_millisecond)
|
23
|
+
result
|
24
|
+
rescue Exception => error
|
25
|
+
events = error.instance_variable_get(:@_trace_events)
|
26
|
+
end_at_mono = Process.clock_gettime(Process::CLOCK_MONOTONIC, :float_millisecond)
|
27
|
+
TimeSeries.increment("job.error_count", labels:)
|
28
|
+
raise
|
29
|
+
ensure
|
30
|
+
duration = end_at_mono - start_at_mono
|
31
|
+
TimeSeries.distribution("job.latency", duration, labels:)
|
32
|
+
JobTrace.new(
|
33
|
+
job_id: job_id,
|
34
|
+
time: start_at.to_f,
|
35
|
+
duration:,
|
36
|
+
queue_adapter: ActiveJob.adapter_name(queue_adapter),
|
37
|
+
executions:,
|
38
|
+
job_class: self.class.name,
|
39
|
+
queue_name:,
|
40
|
+
events: events.map { Serializer.serialize(_1) },
|
41
|
+
logs:,
|
42
|
+
error: error.present?
|
43
|
+
).save
|
44
|
+
end
|
45
|
+
|
46
|
+
end
|
47
|
+
end
|
48
|
+
end
|
@@ -0,0 +1,22 @@
|
|
1
|
+
module RailsObservatory
|
2
|
+
module LoggingMiddleware
|
3
|
+
def connect(redis_config)
|
4
|
+
puts "[Redis] CONNECT"
|
5
|
+
super
|
6
|
+
end
|
7
|
+
|
8
|
+
def call(command, redis_config)
|
9
|
+
if command.first == "SCRIPT"
|
10
|
+
puts "[Redis] #{command.first}"
|
11
|
+
else
|
12
|
+
puts "[Redis] #{command.first} #{command[1..-1].join(" ")}"
|
13
|
+
end
|
14
|
+
super
|
15
|
+
end
|
16
|
+
|
17
|
+
def call_pipelined(commands, redis_config)
|
18
|
+
puts "[Redis] [Pipelined] #{commands.join("\n")}"
|
19
|
+
super
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
@@ -0,0 +1,18 @@
|
|
1
|
+
module RailsObservatory
|
2
|
+
module RedisClientInstrumentation
|
3
|
+
def call(command, redis_config)
|
4
|
+
payload_command = command
|
5
|
+
payload_command = [payload_command.first] if payload_command.first == "SCRIPT"
|
6
|
+
ActiveSupport::Notifications.instrument("call.redis", { command: payload_command.join(' ') }) do
|
7
|
+
super
|
8
|
+
end
|
9
|
+
end
|
10
|
+
|
11
|
+
def call_pipelined(commands, redis_config)
|
12
|
+
res = nil
|
13
|
+
took = Benchmark.realtime { res = super }
|
14
|
+
# puts "Redis call_pipelined took #{took * 1000} ms"
|
15
|
+
res
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
@@ -0,0 +1,67 @@
|
|
1
|
+
-- Helper function to get all combinations of a table
|
2
|
+
local function generate_key_combinations(keys)
|
3
|
+
local n = #keys
|
4
|
+
local combs = {}
|
5
|
+
table.insert(combs, {})
|
6
|
+
|
7
|
+
local function helper(curr_comb, start_idx)
|
8
|
+
if start_idx <= n then
|
9
|
+
for i = start_idx, n do
|
10
|
+
local new_comb = {}
|
11
|
+
for _, v in ipairs(curr_comb) do
|
12
|
+
table.insert(new_comb, v)
|
13
|
+
end
|
14
|
+
table.insert(new_comb, keys[i])
|
15
|
+
table.insert(combs, new_comb)
|
16
|
+
helper(new_comb, i + 1)
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
21
|
+
helper({}, 1)
|
22
|
+
return combs
|
23
|
+
end
|
24
|
+
|
25
|
+
-- Main script begins here
|
26
|
+
local metric_name = tostring(ARGV[1]) -- Ensure it's a string
|
27
|
+
local raw_retention = 10000 -- Hardcoded to 10ms
|
28
|
+
local compaction_retention = 31536000000 -- Hardcoded to 1 year in ms (365*24*60*60*1000)
|
29
|
+
|
30
|
+
-- Extracting labels
|
31
|
+
---@type table
|
32
|
+
local labels = {}
|
33
|
+
local keys = {}
|
34
|
+
for i = 2, #ARGV, 2 do
|
35
|
+
local key = tostring(ARGV[i])
|
36
|
+
local value = tostring(ARGV[i + 1])
|
37
|
+
labels[key] = value
|
38
|
+
redis.call("SADD", metric_name .. ':labels', key)
|
39
|
+
table.insert(keys, key)
|
40
|
+
end
|
41
|
+
|
42
|
+
local key_combinations = generate_key_combinations(keys)
|
43
|
+
|
44
|
+
-- For each combination, upsert and add labels
|
45
|
+
for _, comb_keys in ipairs(key_combinations) do
|
46
|
+
local ts_name = metric_name
|
47
|
+
local label_set = {}
|
48
|
+
|
49
|
+
for _, key in ipairs(comb_keys) do
|
50
|
+
ts_name = ts_name .. ":" .. labels[key]
|
51
|
+
table.insert(label_set, key)
|
52
|
+
table.insert(label_set, labels[key])
|
53
|
+
end
|
54
|
+
|
55
|
+
if redis.call("EXISTS", ts_name) == 0 then
|
56
|
+
redis.call("TS.CREATE", ts_name, "RETENTION", raw_retention, "CHUNK_SIZE", 48)
|
57
|
+
end
|
58
|
+
|
59
|
+
local compaction_key = ts_name .. "_" .. "sum"
|
60
|
+
if redis.call("EXISTS", compaction_key) == 0 then
|
61
|
+
redis.call("TS.CREATE", compaction_key, "RETENTION", compaction_retention, "CHUNK_SIZE", 48, "LABELS", "name", metric_name, "compaction", "sum", unpack(label_set))
|
62
|
+
redis.call("TS.CREATERULE", ts_name, compaction_key, "AGGREGATION", "sum", 10000)
|
63
|
+
end
|
64
|
+
redis.call("TS.ADD", ts_name, "*", 1, 'ON_DUPLICATE', 'SUM')
|
65
|
+
end
|
66
|
+
|
67
|
+
return "OK"
|
@@ -0,0 +1,73 @@
|
|
1
|
+
require 'benchmark'
|
2
|
+
|
3
|
+
SCRIPT = File.read(File.join(File.dirname(__FILE__), 'timing_script.lua'))
|
4
|
+
INCREMENT_SCRIPT = File.read(File.join(File.dirname(__FILE__), 'increment_script.lua'))
|
5
|
+
|
6
|
+
class RedisScript
|
7
|
+
|
8
|
+
def self.redis
|
9
|
+
Rails.configuration.rails_observatory.redis
|
10
|
+
end
|
11
|
+
|
12
|
+
def redis
|
13
|
+
self.class.redis
|
14
|
+
end
|
15
|
+
|
16
|
+
def initialize(lua_string)
|
17
|
+
@script = lua_string
|
18
|
+
end
|
19
|
+
|
20
|
+
def call(*args)
|
21
|
+
@sha1 ||= load_script
|
22
|
+
redis.call("EVALSHA", @sha1, 0, *args)
|
23
|
+
rescue => e
|
24
|
+
if e.message =~ /NOSCRIPT/
|
25
|
+
@sha1 = load_script
|
26
|
+
retry
|
27
|
+
else
|
28
|
+
raise e
|
29
|
+
end
|
30
|
+
end
|
31
|
+
|
32
|
+
def load_script
|
33
|
+
redis.call('SCRIPT', 'LOAD', @script)
|
34
|
+
end
|
35
|
+
|
36
|
+
end
|
37
|
+
|
38
|
+
TIMING_SCRIPT = RedisScript.new(SCRIPT)
|
39
|
+
INCREMENT_CALL = RedisScript.new(INCREMENT_SCRIPT)
|
40
|
+
|
41
|
+
module RailsObservatory
|
42
|
+
|
43
|
+
class TimeSeries
|
44
|
+
module Insertion
|
45
|
+
|
46
|
+
# TODO: These need to take in a timestamp
|
47
|
+
def distribution(name, value, labels: {})
|
48
|
+
prefixed_name = begin
|
49
|
+
if defined?(self::PREFIX)
|
50
|
+
[self::PREFIX, name].join('.')
|
51
|
+
else
|
52
|
+
name
|
53
|
+
end
|
54
|
+
end
|
55
|
+
TIMING_SCRIPT.call(prefixed_name, value, labels.to_a.flatten.map(&:to_s))
|
56
|
+
end
|
57
|
+
alias_method :record_timing, :distribution
|
58
|
+
|
59
|
+
def increment(name, labels: {})
|
60
|
+
|
61
|
+
prefixed_name = begin
|
62
|
+
if defined?(self::PREFIX)
|
63
|
+
[self::PREFIX, name].join('.')
|
64
|
+
else
|
65
|
+
name
|
66
|
+
end
|
67
|
+
end
|
68
|
+
INCREMENT_CALL.call(prefixed_name, labels.to_a.flatten.map(&:to_s))
|
69
|
+
end
|
70
|
+
alias_method :record_occurrence, :increment
|
71
|
+
end
|
72
|
+
end
|
73
|
+
end
|