karafka-web 0.5.2 → 0.6.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (57) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/CHANGELOG.md +60 -6
  4. data/Gemfile.lock +14 -14
  5. data/karafka-web.gemspec +3 -3
  6. data/lib/karafka/web/config.rb +11 -5
  7. data/lib/karafka/web/installer.rb +2 -3
  8. data/lib/karafka/web/tracking/consumers/contracts/consumer_group.rb +1 -1
  9. data/lib/karafka/web/tracking/consumers/contracts/job.rb +4 -1
  10. data/lib/karafka/web/tracking/consumers/contracts/partition.rb +1 -1
  11. data/lib/karafka/web/tracking/consumers/contracts/report.rb +1 -1
  12. data/lib/karafka/web/tracking/consumers/contracts/subscription_group.rb +1 -1
  13. data/lib/karafka/web/tracking/consumers/contracts/topic.rb +3 -1
  14. data/lib/karafka/web/tracking/consumers/listeners/base.rb +2 -2
  15. data/lib/karafka/web/tracking/consumers/listeners/errors.rb +8 -44
  16. data/lib/karafka/web/tracking/consumers/listeners/processing.rb +5 -0
  17. data/lib/karafka/web/tracking/consumers/reporter.rb +151 -0
  18. data/lib/karafka/web/tracking/contracts/base.rb +34 -0
  19. data/lib/karafka/web/tracking/contracts/error.rb +31 -0
  20. data/lib/karafka/web/tracking/helpers/error_info.rb +50 -0
  21. data/lib/karafka/web/tracking/memoized_shell.rb +1 -1
  22. data/lib/karafka/web/tracking/producers/listeners/base.rb +33 -0
  23. data/lib/karafka/web/tracking/producers/listeners/errors.rb +66 -0
  24. data/lib/karafka/web/tracking/producers/listeners/reporter.rb +21 -0
  25. data/lib/karafka/web/tracking/producers/reporter.rb +101 -0
  26. data/lib/karafka/web/tracking/producers/sampler.rb +42 -0
  27. data/lib/karafka/web/ui/controllers/consumers.rb +2 -4
  28. data/lib/karafka/web/ui/models/counters.rb +51 -0
  29. data/lib/karafka/web/ui/pro/controllers/consumers.rb +2 -3
  30. data/lib/karafka/web/ui/pro/views/consumers/consumer/_job.erb +6 -6
  31. data/lib/karafka/web/ui/pro/views/consumers/index.erb +25 -21
  32. data/lib/karafka/web/ui/pro/views/consumers/jobs.erb +1 -1
  33. data/lib/karafka/web/ui/pro/views/errors/_breadcrumbs.erb +1 -2
  34. data/lib/karafka/web/ui/pro/views/errors/_error.erb +8 -6
  35. data/lib/karafka/web/ui/pro/views/errors/show.erb +3 -2
  36. data/lib/karafka/web/ui/public/stylesheets/application.css +4 -0
  37. data/lib/karafka/web/ui/views/consumers/_no_consumers.erb +9 -0
  38. data/lib/karafka/web/ui/views/consumers/index.erb +24 -20
  39. data/lib/karafka/web/ui/views/errors/_breadcrumbs.erb +1 -2
  40. data/lib/karafka/web/ui/views/errors/_detail.erb +9 -1
  41. data/lib/karafka/web/ui/views/errors/_error.erb +8 -6
  42. data/lib/karafka/web/ui/views/errors/show.erb +50 -2
  43. data/lib/karafka/web/ui/views/shared/_feature_pro.erb +4 -0
  44. data/lib/karafka/web/ui/views/shared/_pagination.erb +8 -2
  45. data/lib/karafka/web/ui/views/shared/exceptions/pro_only.erb +0 -4
  46. data/lib/karafka/web/version.rb +1 -1
  47. data.tar.gz.sig +0 -0
  48. metadata +26 -16
  49. metadata.gz.sig +0 -0
  50. data/lib/karafka/web/tracking/base_contract.rb +0 -31
  51. data/lib/karafka/web/tracking/reporter.rb +0 -144
  52. data/lib/karafka/web/ui/pro/views/consumers/_summary.erb +0 -81
  53. data/lib/karafka/web/ui/pro/views/errors/_cleaned.erb +0 -3
  54. data/lib/karafka/web/ui/pro/views/errors/_detail.erb +0 -31
  55. data/lib/karafka/web/ui/pro/views/errors/_no_errors.erb +0 -3
  56. data/lib/karafka/web/ui/pro/views/jobs/_breadcrumbs.erb +0 -5
  57. data/lib/karafka/web/ui/views/consumers/_breadcrumbs.erb +0 -27
@@ -0,0 +1,50 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Web
5
+ module Tracking
6
+ # Namespace for tracking related helpers
7
+ module Helpers
8
+ # Module containing some helper methods useful for extracting extra errors info
9
+ module ErrorInfo
10
+ # Extracts the basic error info
11
+ #
12
+ # @param error [StandardError] error that occurred
13
+ # @return [Array<String, String, String>] array with error name, message and backtrace
14
+ def extract_error_info(error)
15
+ app_root = "#{::Karafka.root}/"
16
+
17
+ gem_home = if ENV.key?('GEM_HOME')
18
+ ENV['GEM_HOME']
19
+ else
20
+ File.expand_path(File.join(Karafka.gem_root.to_s, '../'))
21
+ end
22
+
23
+ gem_home = "#{gem_home}/"
24
+
25
+ backtrace = error.backtrace || []
26
+ backtrace.map! { |line| line.gsub(app_root, '') }
27
+ backtrace.map! { |line| line.gsub(gem_home, '') }
28
+
29
+ [
30
+ error.class.name,
31
+ extract_error_message(error),
32
+ backtrace.join("\n")
33
+ ]
34
+ end
35
+
36
+ # @param error [StandardError] error that occurred
37
+ # @return [String] formatted exception message
38
+ def extract_error_message(error)
39
+ error_message = error.message.to_s[0, 10_000]
40
+ error_message.force_encoding('utf-8')
41
+ error_message.scrub! if error_message.respond_to?(:scrub!)
42
+ error_message
43
+ rescue StandardError
44
+ '!!! Error message extraction failed !!!'
45
+ end
46
+ end
47
+ end
48
+ end
49
+ end
50
+ end
@@ -6,7 +6,7 @@ module Karafka
6
6
  # processes
7
7
  module Tracking
8
8
  # Class used to run shell command that also returns previous result in case of a failure
9
- # This is used because children cat get signals when performing stat fetches and then
9
+ # This is used because children can get signals when performing stat fetches and then
10
10
  # fetch is stopped. This can cause invalid results from sub-shell commands.
11
11
  #
12
12
  # This will return last result as log as there was one.
@@ -0,0 +1,33 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Web
5
+ module Tracking
6
+ module Producers
7
+ # Namespace for producers listeners
8
+ module Listeners
9
+ # Base listener for producer related listeners
10
+ class Base
11
+ include ::Karafka::Core::Helpers::Time
12
+ extend Forwardable
13
+
14
+ def_delegators :sampler, :track
15
+ def_delegators :reporter, :report, :report!
16
+
17
+ private
18
+
19
+ # @return [Object] sampler in use
20
+ def sampler
21
+ @sampler ||= ::Karafka::Web.config.tracking.producers.sampler
22
+ end
23
+
24
+ # @return [Object] reporter in use
25
+ def reporter
26
+ @reporter ||= ::Karafka::Web.config.tracking.producers.reporter
27
+ end
28
+ end
29
+ end
30
+ end
31
+ end
32
+ end
33
+ end
@@ -0,0 +1,66 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Web
5
+ module Tracking
6
+ module Producers
7
+ module Listeners
8
+ # Listener for tracking producers published errors
9
+ class Errors < Base
10
+ include Tracking::Helpers::ErrorInfo
11
+
12
+ # Schema used by producers error reporting
13
+ SCHEMA_VERSION = '1.0.0'
14
+
15
+ private_constant :SCHEMA_VERSION
16
+
17
+ # Tracks any producer related errors
18
+ #
19
+ # @param event [Karafka::Core::Monitoring::Event]
20
+ def on_error_occurred(event)
21
+ track do |sampler|
22
+ sampler.errors << build_error_details(event)
23
+ end
24
+ end
25
+
26
+ private
27
+
28
+ # @param event [Karafka::Core::Monitoring::Event]
29
+ # @return [Hash] hash with error data for the sampler
30
+ def build_error_details(event)
31
+ type = event[:type]
32
+
33
+ error_class, error_message, backtrace = extract_error_info(event[:error])
34
+
35
+ {
36
+ schema_version: SCHEMA_VERSION,
37
+ producer_id: event[:producer_id],
38
+ type: type,
39
+ error_class: error_class,
40
+ error_message: error_message,
41
+ backtrace: backtrace,
42
+ details: build_details(type, event.payload),
43
+ occurred_at: float_now,
44
+ process: {
45
+ name: sampler.process_name
46
+ }
47
+ }
48
+ end
49
+
50
+ # @param type [String] error type
51
+ # @param payload [Hash] error payload
52
+ # @return [Hash] hash with details
53
+ def build_details(type, payload)
54
+ case type
55
+ when 'librdkafka.dispatch_error'
56
+ payload.slice(:topic, :partition, :offset)
57
+ else
58
+ {}
59
+ end
60
+ end
61
+ end
62
+ end
63
+ end
64
+ end
65
+ end
66
+ end
@@ -0,0 +1,21 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Web
5
+ module Tracking
6
+ module Producers
7
+ module Listeners
8
+ # Special listener that we use to report data about producers states
9
+ # We don't have to have a separate thread for reporting, because producers have their
10
+ # own internal threads for changes polling and we can utilize this thread
11
+ class Reporter < Base
12
+ # @param _event [Karafka::Core::Monitoring::Event]
13
+ def on_statistics_emitted(_event)
14
+ reporter.report
15
+ end
16
+ end
17
+ end
18
+ end
19
+ end
20
+ end
21
+ end
@@ -0,0 +1,101 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Web
5
+ module Tracking
6
+ module Producers
7
+ # Reports the collected data about the producer and sends it, so we can use it in the UI
8
+ #
9
+ # @note Producer reported does not have to operate with the `forced` dispatch mainly
10
+ # because there is no expectation on immediate status updates for producers and their
11
+ # dispatch flow is always periodic based.
12
+ class Reporter
13
+ include ::Karafka::Core::Helpers::Time
14
+
15
+ # Minimum number of messages to produce to produce them in sync mode
16
+ # This acts as a small back-off not to overload the system in case we would have
17
+ # extremely big number of errors happening
18
+ PRODUCE_SYNC_THRESHOLD = 25
19
+
20
+ private_constant :PRODUCE_SYNC_THRESHOLD
21
+
22
+ # This mutex is shared between tracker and samplers so there is no case where metrics
23
+ # would be collected same time tracker reports
24
+ MUTEX = Mutex.new
25
+
26
+ def initialize
27
+ # If there are any errors right after we started sampling, dispatch them immediately
28
+ @tracked_at = monotonic_now - 10_000
29
+ @error_contract = Tracking::Contracts::Error.new
30
+ end
31
+
32
+ # Dispatches the current state from sampler to appropriate topics
33
+ def report
34
+ MUTEX.synchronize do
35
+ return unless report?
36
+
37
+ @tracked_at = monotonic_now
38
+
39
+ # Report errors that occurred (if any)
40
+ messages = sampler.errors.map do |error|
41
+ @error_contract.validate!(error)
42
+
43
+ {
44
+ topic: Karafka::Web.config.topics.errors,
45
+ payload: error.to_json,
46
+ # Always dispatch errors from the same process to the same partition
47
+ key: error[:process][:name]
48
+ }
49
+ end
50
+
51
+ return if messages.empty?
52
+
53
+ produce(messages)
54
+
55
+ # Clear the sampler so it tracks new state changes without previous once impacting
56
+ # the data
57
+ sampler.clear
58
+ end
59
+ end
60
+
61
+ private
62
+
63
+ # @return [Boolean] Should we report or is it not yet time to do so
64
+ def report?
65
+ return false unless ::Karafka.producer.status.active?
66
+
67
+ (monotonic_now - @tracked_at) >= ::Karafka::Web.config.tracking.interval
68
+ end
69
+
70
+ # @return [Object] sampler for the metrics
71
+ def sampler
72
+ @sampler ||= ::Karafka::Web.config.tracking.producers.sampler
73
+ end
74
+
75
+ # Produces messages to Kafka.
76
+ #
77
+ # @param messages [Array<Hash>]
78
+ #
79
+ # @note We pick either sync or async dependent on number of messages. The trick here is,
80
+ # that we do not want to end up overloading the internal queue with messages in case
81
+ # someone has a lot of errors from processing or other errors. Producing sync will wait
82
+ # for the delivery, hence will slow things down a little bit. On the other hand during
83
+ # normal operations we should not have that many messages to dispatch and it should not
84
+ # slowdown any processing.
85
+ def produce(messages)
86
+ if messages.count >= PRODUCE_SYNC_THRESHOLD
87
+ ::Karafka.producer.produce_many_sync(messages)
88
+ else
89
+ ::Karafka.producer.produce_many_async(messages)
90
+ end
91
+ # Since we run this in a background thread, there may be a case upon shutdown, where the
92
+ # producer is closed right before a potential dispatch. It is not worth dealing with this
93
+ # and we can just safely ignore this
94
+ rescue WaterDrop::Errors::ProducerClosedError
95
+ nil
96
+ end
97
+ end
98
+ end
99
+ end
100
+ end
101
+ end
@@ -0,0 +1,42 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Web
5
+ module Tracking
6
+ # Namespace for all the things related to tracking producers
7
+ module Producers
8
+ # Samples for collecting producers related data we're interested in
9
+ class Sampler < Tracking::Sampler
10
+ include ::Karafka::Core::Helpers::Time
11
+
12
+ attr_reader :errors
13
+
14
+ # Current schema version
15
+ # This can be used in the future for detecting incompatible changes and writing
16
+ # migrations
17
+ SCHEMA_VERSION = '1.0.0'
18
+
19
+ def initialize
20
+ super
21
+
22
+ @errors = []
23
+ @started_at = float_now
24
+ end
25
+
26
+ # We cannot report and track the same time, that is why we use mutex here. To make sure
27
+ # that samples aggregations and counting does not interact with reporter flushing.
28
+ def track
29
+ Reporter::MUTEX.synchronize do
30
+ yield(self)
31
+ end
32
+ end
33
+
34
+ # Clears the sampler (for use after data dispatch)
35
+ def clear
36
+ @errors.clear
37
+ end
38
+ end
39
+ end
40
+ end
41
+ end
42
+ end
@@ -10,11 +10,9 @@ module Karafka
10
10
  # @note For now we load all and paginate over the squashed data.
11
11
  def index
12
12
  @current_state = Models::State.current!
13
- processes_total = Models::Processes.active(@current_state)
14
-
15
- @counters = Lib::HashProxy.new(@current_state[:stats])
13
+ @counters = Models::Counters.new(@current_state)
16
14
  @processes, @next_page = Lib::PaginateArray.new.call(
17
- processes_total,
15
+ Models::Processes.active(@current_state),
18
16
  @params.current_page
19
17
  )
20
18
 
@@ -0,0 +1,51 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Web
5
+ module Ui
6
+ module Models
7
+ # Represents the top counters bar values on the consumers view
8
+ class Counters < Lib::HashProxy
9
+ # Max errors partitions we support for estimations
10
+ MAX_ERROR_PARTITIONS = 100
11
+
12
+ private_constant :MAX_ERROR_PARTITIONS
13
+
14
+ # @param state [Hash]
15
+ def initialize(state)
16
+ super(state[:stats])
17
+ @hash[:errors] = estimate_errors_count
18
+ end
19
+
20
+ private
21
+
22
+ # Estimates the number of errors present in the errors topic.
23
+ def estimate_errors_count
24
+ estimated = 0
25
+
26
+ MAX_ERROR_PARTITIONS.times do |partition|
27
+ begin
28
+ offsets = Karafka::Admin.read_watermark_offsets(
29
+ ::Karafka::Web.config.topics.errors,
30
+ partition
31
+ )
32
+ # We estimate that way instead of using `#cluster_info` to get the partitions count
33
+ # inside the errors topic, because it is around 90x faster to query for invalid
34
+ # partition and get the error, instead of querying for all topics on a big cluster
35
+ #
36
+ # Most of the users use one or few error partitions at most, so this is fairly
37
+ # efficient and not problematic
38
+ rescue Rdkafka::RdkafkaError => e
39
+ e.code == :unknown_partition ? break : raise
40
+ end
41
+
42
+ estimated += offsets.last - offsets.first
43
+ end
44
+
45
+ estimated
46
+ end
47
+ end
48
+ end
49
+ end
50
+ end
51
+ end
@@ -21,10 +21,9 @@ module Karafka
21
21
  # Consumers list
22
22
  def index
23
23
  @current_state = Models::State.current!
24
- processes_total = Models::Processes.active(@current_state)
25
- @counters = Lib::HashProxy.new(@current_state[:stats])
24
+ @counters = Models::Counters.new(@current_state)
26
25
  @processes, @next_page = Lib::PaginateArray.new.call(
27
- processes_total,
26
+ Models::Processes.active(@current_state),
28
27
  @params.current_page
29
28
  )
30
29
 
@@ -1,4 +1,10 @@
1
1
  <tr>
2
+ <td>
3
+ <span class="badge bg-secondary badge-topic" title="Consumer group: <%= job.consumer_group %>">
4
+ <%= job.topic %>:
5
+ <%= job.partition %>
6
+ </span>
7
+ </td>
2
8
  <td>
3
9
  <code><%= job.consumer %></code>
4
10
 
@@ -7,12 +13,6 @@
7
13
  <%== tags(job.tags) %>
8
14
  <% end %>
9
15
  </td>
10
- <td>
11
- <span class="badge bg-secondary badge-topic" title="Consumer group: <%= job.consumer_group %>">
12
- <%= job.topic %>:
13
- <%= job.partition %>
14
- </span>
15
- </td>
16
16
  <td>
17
17
  <code>#<%= job.type %></code>
18
18
  </td>
@@ -4,27 +4,31 @@
4
4
  <div class="container">
5
5
  <div class="row">
6
6
  <div class="col-sm-12">
7
- <table class="processes bg-white table table-hover table-bordered table-striped mb-0 align-middle">
8
- <thead>
9
- <tr class="align-middle">
10
- <th>Name</th>
11
- <th class="col-sm-2">Started</th>
12
- <th class="col-sm-1">Memory</th>
13
- <th class="col-sm-1">Performance</th>
14
- <th class="col-sm-1">Load</th>
15
- <th class="col-sm-1">Total lag</th>
16
- </tr>
17
- </thead>
18
- <tbody>
19
- <%==
20
- render_each(
21
- @processes,
22
- 'consumers/_consumer',
23
- local: :process
24
- )
25
- %>
26
- </tbody>
27
- </table>
7
+ <% if @processes.empty? %>
8
+ <%== partial 'consumers/no_consumers' %>
9
+ <% else %>
10
+ <table class="processes bg-white table table-hover table-bordered table-striped mb-0 align-middle">
11
+ <thead>
12
+ <tr class="align-middle">
13
+ <th>Name</th>
14
+ <th class="col-sm-2">Started</th>
15
+ <th class="col-sm-1">Memory</th>
16
+ <th class="col-sm-1">Performance</th>
17
+ <th class="col-sm-1">Load</th>
18
+ <th class="col-sm-1">Total lag</th>
19
+ </tr>
20
+ </thead>
21
+ <tbody>
22
+ <%==
23
+ render_each(
24
+ @processes,
25
+ 'consumers/_consumer',
26
+ local: :process
27
+ )
28
+ %>
29
+ </tbody>
30
+ </table>
31
+ <% end %>
28
32
  </div>
29
33
  </div>
30
34
  </div>
@@ -17,8 +17,8 @@
17
17
  <table class="processes bg-white table table-hover table-bordered table-striped mb-0 align-middle">
18
18
  <thead>
19
19
  <tr class="align-middle">
20
- <th>Consumer</th>
21
20
  <th>Topic</th>
21
+ <th>Consumer</th>
22
22
  <th>Type</th>
23
23
  <th>First offset</th>
24
24
  <th>Last offset</th>
@@ -16,9 +16,8 @@
16
16
  <%=
17
17
  type = @error_message.payload[:type]
18
18
  error_class = @error_message.payload[:error_class]
19
- offset = @error_message.offset
20
19
 
21
- "#{type}: #{error_class} #{offset}"
20
+ "#{type}: #{error_class}"
22
21
  %>
23
22
  </a>
24
23
  </li>
@@ -9,14 +9,16 @@
9
9
 
10
10
  <tr>
11
11
  <td>
12
- <% if error[:details].key?(:topic) %>
13
- <%= error[:details][:topic] %>: <%= error[:details][:partition] %>
14
- <% else %>
15
- <%= error[:type] %>
16
- <% end %>
12
+ <span class="badge bg-secondary badge-topic">
13
+ <% if error[:details].key?(:topic) %>
14
+ <%= error[:details][:topic] %>: <%= error[:details][:partition] %>
15
+ <% else %>
16
+ <%= error[:type] %>
17
+ <% end %>
18
+ </span>
17
19
  </td>
18
20
  <td>
19
- <%== error[:process_name] %>
21
+ <%== error[:process][:name] %>
20
22
  </td>
21
23
  <td>
22
24
  <%= error[:error_class] %>:
@@ -1,9 +1,8 @@
1
1
  <%==
2
2
  type = @error_message.payload[:type]
3
3
  error_class = @error_message.payload[:error_class]
4
- offset = @error_message.offset
5
4
 
6
- view_title("#{type}: #{error_class} #{offset}")
5
+ view_title("#{type}: #{error_class}")
7
6
  %>
8
7
 
9
8
  <div class="container">
@@ -36,6 +35,7 @@
36
35
  </table>
37
36
  </div>
38
37
  </div>
38
+
39
39
  <div class="row mb-4">
40
40
  <div class="col-sm-12">
41
41
  <h5 class="mb-2">
@@ -44,6 +44,7 @@
44
44
  <hr/>
45
45
  </div>
46
46
  </div>
47
+
47
48
  <div class="row mb-4">
48
49
  <div class="col-sm-12">
49
50
  <div class="card">
@@ -108,3 +108,7 @@ code.wrapped {
108
108
  #metrics ul {
109
109
  padding-left: 0;
110
110
  }
111
+
112
+ .blurred {
113
+ filter: blur(4px)
114
+ }
@@ -0,0 +1,9 @@
1
+ <div class="alert alert-info" role="alert">
2
+ <p>
3
+ There are no Karafka consumer processes actively reporting to the Web UI.
4
+ </p>
5
+
6
+ <p class="mb-0">
7
+ If you are sure you are running at least one <code>karafka server</code> instance, please make sure that it can report to the <code><%= Karafka::Web.config.topics.consumers.reports %></code> topic.
8
+ </p>
9
+ </div>
@@ -4,26 +4,30 @@
4
4
  <div class="container">
5
5
  <div class="row">
6
6
  <div class="col-sm-12">
7
- <table class="processes bg-white table table-hover table-bordered table-striped mb-0 align-middle">
8
- <thead>
9
- <tr class="align-middle">
10
- <th>Name</th>
11
- <th class="col-sm-2">Started</th>
12
- <th class="col-sm-1">Memory</th>
13
- <th class="col-sm-1">Utilization</th>
14
- <th class="col-sm-1">Total lag</th>
15
- </tr>
16
- </thead>
17
- <tbody>
18
- <%==
19
- render_each(
20
- @processes,
21
- 'consumers/_consumer',
22
- local: :process
23
- )
24
- %>
25
- </tbody>
26
- </table>
7
+ <% if @processes.empty? %>
8
+ <%== partial 'consumers/no_consumers' %>
9
+ <% else %>
10
+ <table class="processes bg-white table table-hover table-bordered table-striped mb-0 align-middle">
11
+ <thead>
12
+ <tr class="align-middle">
13
+ <th>Name</th>
14
+ <th class="col-sm-2">Started</th>
15
+ <th class="col-sm-1">Memory</th>
16
+ <th class="col-sm-1">Utilization</th>
17
+ <th class="col-sm-1">Total lag</th>
18
+ </tr>
19
+ </thead>
20
+ <tbody>
21
+ <%==
22
+ render_each(
23
+ @processes,
24
+ 'consumers/_consumer',
25
+ local: :process
26
+ )
27
+ %>
28
+ </tbody>
29
+ </table>
30
+ <% end %>
27
31
  </div>
28
32
  </div>
29
33
  </div>
@@ -10,9 +10,8 @@
10
10
  <%=
11
11
  type = @error_message.payload[:type]
12
12
  error_class = @error_message.payload[:error_class]
13
- offset = @error_message.offset
14
13
 
15
- "#{type}: #{error_class} #{offset}"
14
+ "#{type}: #{error_class}"
16
15
  %>
17
16
  </a>
18
17
  </li>
@@ -21,8 +21,16 @@
21
21
  <%= k %>
22
22
  </td>
23
23
  <td>
24
- <% if k == :occurred_at %>
24
+ <% case k
25
+ when :occurred_at
26
+ %>
25
27
  <%== relative_time v %>
28
+ <% when :error_class %>
29
+ <code><%= v %></code>
30
+ <% when :type %>
31
+ <span class="badge bg-secondary">
32
+ <%= v %>
33
+ </span>
26
34
  <% else %>
27
35
  <%= v %>
28
36
  <% end %>