karafka-web 0.5.2 → 0.6.1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (66) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/CHANGELOG.md +69 -6
  4. data/Gemfile.lock +14 -14
  5. data/karafka-web.gemspec +3 -3
  6. data/lib/karafka/web/config.rb +11 -5
  7. data/lib/karafka/web/installer.rb +2 -3
  8. data/lib/karafka/web/tracking/consumers/contracts/consumer_group.rb +1 -1
  9. data/lib/karafka/web/tracking/consumers/contracts/job.rb +4 -1
  10. data/lib/karafka/web/tracking/consumers/contracts/partition.rb +1 -1
  11. data/lib/karafka/web/tracking/consumers/contracts/report.rb +8 -4
  12. data/lib/karafka/web/tracking/consumers/contracts/subscription_group.rb +1 -1
  13. data/lib/karafka/web/tracking/consumers/contracts/topic.rb +3 -1
  14. data/lib/karafka/web/tracking/consumers/listeners/base.rb +2 -2
  15. data/lib/karafka/web/tracking/consumers/listeners/errors.rb +8 -44
  16. data/lib/karafka/web/tracking/consumers/listeners/processing.rb +5 -0
  17. data/lib/karafka/web/tracking/consumers/reporter.rb +151 -0
  18. data/lib/karafka/web/tracking/consumers/sampler.rb +2 -1
  19. data/lib/karafka/web/tracking/contracts/base.rb +34 -0
  20. data/lib/karafka/web/tracking/contracts/error.rb +31 -0
  21. data/lib/karafka/web/tracking/helpers/error_info.rb +50 -0
  22. data/lib/karafka/web/tracking/memoized_shell.rb +1 -1
  23. data/lib/karafka/web/tracking/producers/listeners/base.rb +33 -0
  24. data/lib/karafka/web/tracking/producers/listeners/errors.rb +66 -0
  25. data/lib/karafka/web/tracking/producers/listeners/reporter.rb +21 -0
  26. data/lib/karafka/web/tracking/producers/reporter.rb +101 -0
  27. data/lib/karafka/web/tracking/producers/sampler.rb +42 -0
  28. data/lib/karafka/web/tracking/sampler.rb +5 -0
  29. data/lib/karafka/web/ui/controllers/consumers.rb +2 -4
  30. data/lib/karafka/web/ui/models/counters.rb +51 -0
  31. data/lib/karafka/web/ui/models/status.rb +31 -7
  32. data/lib/karafka/web/ui/pro/controllers/consumers.rb +2 -3
  33. data/lib/karafka/web/ui/pro/views/consumers/consumer/_job.erb +6 -6
  34. data/lib/karafka/web/ui/pro/views/consumers/consumer/_metrics.erb +6 -1
  35. data/lib/karafka/web/ui/pro/views/consumers/index.erb +25 -21
  36. data/lib/karafka/web/ui/pro/views/consumers/jobs.erb +1 -1
  37. data/lib/karafka/web/ui/pro/views/errors/_breadcrumbs.erb +1 -2
  38. data/lib/karafka/web/ui/pro/views/errors/_error.erb +8 -6
  39. data/lib/karafka/web/ui/pro/views/errors/show.erb +3 -2
  40. data/lib/karafka/web/ui/public/stylesheets/application.css +4 -0
  41. data/lib/karafka/web/ui/views/consumers/_no_consumers.erb +9 -0
  42. data/lib/karafka/web/ui/views/consumers/index.erb +24 -20
  43. data/lib/karafka/web/ui/views/errors/_breadcrumbs.erb +1 -2
  44. data/lib/karafka/web/ui/views/errors/_detail.erb +9 -1
  45. data/lib/karafka/web/ui/views/errors/_error.erb +8 -6
  46. data/lib/karafka/web/ui/views/errors/show.erb +50 -2
  47. data/lib/karafka/web/ui/views/shared/_feature_pro.erb +4 -0
  48. data/lib/karafka/web/ui/views/shared/_pagination.erb +8 -2
  49. data/lib/karafka/web/ui/views/shared/exceptions/pro_only.erb +0 -4
  50. data/lib/karafka/web/ui/views/status/failures/_initial_state.erb +1 -10
  51. data/lib/karafka/web/ui/views/status/info/_components.erb +6 -1
  52. data/lib/karafka/web/ui/views/status/show.erb +6 -1
  53. data/lib/karafka/web/ui/views/status/successes/_connection.erb +1 -0
  54. data/lib/karafka/web/ui/views/status/warnings/_connection.erb +11 -0
  55. data/lib/karafka/web/version.rb +1 -1
  56. data.tar.gz.sig +0 -0
  57. metadata +28 -16
  58. metadata.gz.sig +0 -0
  59. data/lib/karafka/web/tracking/base_contract.rb +0 -31
  60. data/lib/karafka/web/tracking/reporter.rb +0 -144
  61. data/lib/karafka/web/ui/pro/views/consumers/_summary.erb +0 -81
  62. data/lib/karafka/web/ui/pro/views/errors/_cleaned.erb +0 -3
  63. data/lib/karafka/web/ui/pro/views/errors/_detail.erb +0 -31
  64. data/lib/karafka/web/ui/pro/views/errors/_no_errors.erb +0 -3
  65. data/lib/karafka/web/ui/pro/views/jobs/_breadcrumbs.erb +0 -5
  66. data/lib/karafka/web/ui/views/consumers/_breadcrumbs.erb +0 -27
@@ -0,0 +1,31 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Web
5
+ module Tracking
6
+ module Contracts
7
+ # Contract for error reporting
8
+ # Since producers and consumers report their errors to the same topic, we need to have
9
+ # a unified contract for both
10
+ class Error < Base
11
+ configure
12
+
13
+ required(:schema_version) { |val| val.is_a?(String) }
14
+ required(:type) { |val| val.is_a?(String) && !val.empty? }
15
+ required(:error_class) { |val| val.is_a?(String) && !val.empty? }
16
+ required(:error_message) { |val| val.is_a?(String) }
17
+ required(:backtrace) { |val| val.is_a?(String) }
18
+ required(:details) { |val| val.is_a?(Hash) }
19
+ required(:occurred_at) { |val| val.is_a?(Float) }
20
+
21
+ nested(:process) do
22
+ required(:name) { |val| val.is_a?(String) && !val.empty? }
23
+ # Tags may not be present for producers because they may operate from outside of
24
+ # karafka taggable process
25
+ optional(:tags) { |val| val.is_a?(Karafka::Core::Taggable::Tags) }
26
+ end
27
+ end
28
+ end
29
+ end
30
+ end
31
+ end
@@ -0,0 +1,50 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Web
5
+ module Tracking
6
+ # Namespace for tracking related helpers
7
+ module Helpers
8
+ # Module containing some helper methods useful for extracting extra errors info
9
+ module ErrorInfo
10
+ # Extracts the basic error info
11
+ #
12
+ # @param error [StandardError] error that occurred
13
+ # @return [Array<String, String, String>] array with error name, message and backtrace
14
+ def extract_error_info(error)
15
+ app_root = "#{::Karafka.root}/"
16
+
17
+ gem_home = if ENV.key?('GEM_HOME')
18
+ ENV['GEM_HOME']
19
+ else
20
+ File.expand_path(File.join(Karafka.gem_root.to_s, '../'))
21
+ end
22
+
23
+ gem_home = "#{gem_home}/"
24
+
25
+ backtrace = error.backtrace || []
26
+ backtrace.map! { |line| line.gsub(app_root, '') }
27
+ backtrace.map! { |line| line.gsub(gem_home, '') }
28
+
29
+ [
30
+ error.class.name,
31
+ extract_error_message(error),
32
+ backtrace.join("\n")
33
+ ]
34
+ end
35
+
36
+ # @param error [StandardError] error that occurred
37
+ # @return [String] formatted exception message
38
+ def extract_error_message(error)
39
+ error_message = error.message.to_s[0, 10_000]
40
+ error_message.force_encoding('utf-8')
41
+ error_message.scrub! if error_message.respond_to?(:scrub!)
42
+ error_message
43
+ rescue StandardError
44
+ '!!! Error message extraction failed !!!'
45
+ end
46
+ end
47
+ end
48
+ end
49
+ end
50
+ end
@@ -6,7 +6,7 @@ module Karafka
6
6
  # processes
7
7
  module Tracking
8
8
  # Class used to run shell command that also returns previous result in case of a failure
9
- # This is used because children cat get signals when performing stat fetches and then
9
+ # This is used because children can get signals when performing stat fetches and then
10
10
  # fetch is stopped. This can cause invalid results from sub-shell commands.
11
11
  #
12
12
  # This will return last result as log as there was one.
@@ -0,0 +1,33 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Web
5
+ module Tracking
6
+ module Producers
7
+ # Namespace for producers listeners
8
+ module Listeners
9
+ # Base listener for producer related listeners
10
+ class Base
11
+ include ::Karafka::Core::Helpers::Time
12
+ extend Forwardable
13
+
14
+ def_delegators :sampler, :track
15
+ def_delegators :reporter, :report, :report!
16
+
17
+ private
18
+
19
+ # @return [Object] sampler in use
20
+ def sampler
21
+ @sampler ||= ::Karafka::Web.config.tracking.producers.sampler
22
+ end
23
+
24
+ # @return [Object] reporter in use
25
+ def reporter
26
+ @reporter ||= ::Karafka::Web.config.tracking.producers.reporter
27
+ end
28
+ end
29
+ end
30
+ end
31
+ end
32
+ end
33
+ end
@@ -0,0 +1,66 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Web
5
+ module Tracking
6
+ module Producers
7
+ module Listeners
8
+ # Listener for tracking producers published errors
9
+ class Errors < Base
10
+ include Tracking::Helpers::ErrorInfo
11
+
12
+ # Schema used by producers error reporting
13
+ SCHEMA_VERSION = '1.0.0'
14
+
15
+ private_constant :SCHEMA_VERSION
16
+
17
+ # Tracks any producer related errors
18
+ #
19
+ # @param event [Karafka::Core::Monitoring::Event]
20
+ def on_error_occurred(event)
21
+ track do |sampler|
22
+ sampler.errors << build_error_details(event)
23
+ end
24
+ end
25
+
26
+ private
27
+
28
+ # @param event [Karafka::Core::Monitoring::Event]
29
+ # @return [Hash] hash with error data for the sampler
30
+ def build_error_details(event)
31
+ type = event[:type]
32
+
33
+ error_class, error_message, backtrace = extract_error_info(event[:error])
34
+
35
+ {
36
+ schema_version: SCHEMA_VERSION,
37
+ producer_id: event[:producer_id],
38
+ type: type,
39
+ error_class: error_class,
40
+ error_message: error_message,
41
+ backtrace: backtrace,
42
+ details: build_details(type, event.payload),
43
+ occurred_at: float_now,
44
+ process: {
45
+ name: sampler.process_name
46
+ }
47
+ }
48
+ end
49
+
50
+ # @param type [String] error type
51
+ # @param payload [Hash] error payload
52
+ # @return [Hash] hash with details
53
+ def build_details(type, payload)
54
+ case type
55
+ when 'librdkafka.dispatch_error'
56
+ payload.slice(:topic, :partition, :offset)
57
+ else
58
+ {}
59
+ end
60
+ end
61
+ end
62
+ end
63
+ end
64
+ end
65
+ end
66
+ end
@@ -0,0 +1,21 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Web
5
+ module Tracking
6
+ module Producers
7
+ module Listeners
8
+ # Special listener that we use to report data about producers states
9
+ # We don't have to have a separate thread for reporting, because producers have their
10
+ # own internal threads for changes polling and we can utilize this thread
11
+ class Reporter < Base
12
+ # @param _event [Karafka::Core::Monitoring::Event]
13
+ def on_statistics_emitted(_event)
14
+ reporter.report
15
+ end
16
+ end
17
+ end
18
+ end
19
+ end
20
+ end
21
+ end
@@ -0,0 +1,101 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Web
5
+ module Tracking
6
+ module Producers
7
+ # Reports the collected data about the producer and sends it, so we can use it in the UI
8
+ #
9
+ # @note Producer reported does not have to operate with the `forced` dispatch mainly
10
+ # because there is no expectation on immediate status updates for producers and their
11
+ # dispatch flow is always periodic based.
12
+ class Reporter
13
+ include ::Karafka::Core::Helpers::Time
14
+
15
+ # Minimum number of messages to produce to produce them in sync mode
16
+ # This acts as a small back-off not to overload the system in case we would have
17
+ # extremely big number of errors happening
18
+ PRODUCE_SYNC_THRESHOLD = 25
19
+
20
+ private_constant :PRODUCE_SYNC_THRESHOLD
21
+
22
+ # This mutex is shared between tracker and samplers so there is no case where metrics
23
+ # would be collected same time tracker reports
24
+ MUTEX = Mutex.new
25
+
26
+ def initialize
27
+ # If there are any errors right after we started sampling, dispatch them immediately
28
+ @tracked_at = monotonic_now - 10_000
29
+ @error_contract = Tracking::Contracts::Error.new
30
+ end
31
+
32
+ # Dispatches the current state from sampler to appropriate topics
33
+ def report
34
+ MUTEX.synchronize do
35
+ return unless report?
36
+
37
+ @tracked_at = monotonic_now
38
+
39
+ # Report errors that occurred (if any)
40
+ messages = sampler.errors.map do |error|
41
+ @error_contract.validate!(error)
42
+
43
+ {
44
+ topic: Karafka::Web.config.topics.errors,
45
+ payload: error.to_json,
46
+ # Always dispatch errors from the same process to the same partition
47
+ key: error[:process][:name]
48
+ }
49
+ end
50
+
51
+ return if messages.empty?
52
+
53
+ produce(messages)
54
+
55
+ # Clear the sampler so it tracks new state changes without previous once impacting
56
+ # the data
57
+ sampler.clear
58
+ end
59
+ end
60
+
61
+ private
62
+
63
+ # @return [Boolean] Should we report or is it not yet time to do so
64
+ def report?
65
+ return false unless ::Karafka.producer.status.active?
66
+
67
+ (monotonic_now - @tracked_at) >= ::Karafka::Web.config.tracking.interval
68
+ end
69
+
70
+ # @return [Object] sampler for the metrics
71
+ def sampler
72
+ @sampler ||= ::Karafka::Web.config.tracking.producers.sampler
73
+ end
74
+
75
+ # Produces messages to Kafka.
76
+ #
77
+ # @param messages [Array<Hash>]
78
+ #
79
+ # @note We pick either sync or async dependent on number of messages. The trick here is,
80
+ # that we do not want to end up overloading the internal queue with messages in case
81
+ # someone has a lot of errors from processing or other errors. Producing sync will wait
82
+ # for the delivery, hence will slow things down a little bit. On the other hand during
83
+ # normal operations we should not have that many messages to dispatch and it should not
84
+ # slowdown any processing.
85
+ def produce(messages)
86
+ if messages.count >= PRODUCE_SYNC_THRESHOLD
87
+ ::Karafka.producer.produce_many_sync(messages)
88
+ else
89
+ ::Karafka.producer.produce_many_async(messages)
90
+ end
91
+ # Since we run this in a background thread, there may be a case upon shutdown, where the
92
+ # producer is closed right before a potential dispatch. It is not worth dealing with this
93
+ # and we can just safely ignore this
94
+ rescue WaterDrop::Errors::ProducerClosedError
95
+ nil
96
+ end
97
+ end
98
+ end
99
+ end
100
+ end
101
+ end
@@ -0,0 +1,42 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Web
5
+ module Tracking
6
+ # Namespace for all the things related to tracking producers
7
+ module Producers
8
+ # Samples for collecting producers related data we're interested in
9
+ class Sampler < Tracking::Sampler
10
+ include ::Karafka::Core::Helpers::Time
11
+
12
+ attr_reader :errors
13
+
14
+ # Current schema version
15
+ # This can be used in the future for detecting incompatible changes and writing
16
+ # migrations
17
+ SCHEMA_VERSION = '1.0.0'
18
+
19
+ def initialize
20
+ super
21
+
22
+ @errors = []
23
+ @started_at = float_now
24
+ end
25
+
26
+ # We cannot report and track the same time, that is why we use mutex here. To make sure
27
+ # that samples aggregations and counting does not interact with reporter flushing.
28
+ def track
29
+ Reporter::MUTEX.synchronize do
30
+ yield(self)
31
+ end
32
+ end
33
+
34
+ # Clears the sampler (for use after data dispatch)
35
+ def clear
36
+ @errors.clear
37
+ end
38
+ end
39
+ end
40
+ end
41
+ end
42
+ end
@@ -32,6 +32,11 @@ module Karafka
32
32
  ::Karafka::VERSION
33
33
  end
34
34
 
35
+ # @return [String] Karafka Web UI version
36
+ def karafka_web_version
37
+ ::Karafka::Web::VERSION
38
+ end
39
+
35
40
  # @return [String] Karafka::Core version
36
41
  def karafka_core_version
37
42
  ::Karafka::Core::VERSION
@@ -10,11 +10,9 @@ module Karafka
10
10
  # @note For now we load all and paginate over the squashed data.
11
11
  def index
12
12
  @current_state = Models::State.current!
13
- processes_total = Models::Processes.active(@current_state)
14
-
15
- @counters = Lib::HashProxy.new(@current_state[:stats])
13
+ @counters = Models::Counters.new(@current_state)
16
14
  @processes, @next_page = Lib::PaginateArray.new.call(
17
- processes_total,
15
+ Models::Processes.active(@current_state),
18
16
  @params.current_page
19
17
  )
20
18
 
@@ -0,0 +1,51 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Web
5
+ module Ui
6
+ module Models
7
+ # Represents the top counters bar values on the consumers view
8
+ class Counters < Lib::HashProxy
9
+ # Max errors partitions we support for estimations
10
+ MAX_ERROR_PARTITIONS = 100
11
+
12
+ private_constant :MAX_ERROR_PARTITIONS
13
+
14
+ # @param state [Hash]
15
+ def initialize(state)
16
+ super(state[:stats])
17
+ @hash[:errors] = estimate_errors_count
18
+ end
19
+
20
+ private
21
+
22
+ # Estimates the number of errors present in the errors topic.
23
+ def estimate_errors_count
24
+ estimated = 0
25
+
26
+ MAX_ERROR_PARTITIONS.times do |partition|
27
+ begin
28
+ offsets = Karafka::Admin.read_watermark_offsets(
29
+ ::Karafka::Web.config.topics.errors,
30
+ partition
31
+ )
32
+ # We estimate that way instead of using `#cluster_info` to get the partitions count
33
+ # inside the errors topic, because it is around 90x faster to query for invalid
34
+ # partition and get the error, instead of querying for all topics on a big cluster
35
+ #
36
+ # Most of the users use one or few error partitions at most, so this is fairly
37
+ # efficient and not problematic
38
+ rescue Rdkafka::RdkafkaError => e
39
+ e.code == :unknown_partition ? break : raise
40
+ end
41
+
42
+ estimated += offsets.last - offsets.first
43
+ end
44
+
45
+ estimated
46
+ end
47
+ end
48
+ end
49
+ end
50
+ end
51
+ end
@@ -5,6 +5,7 @@ module Karafka
5
5
  module Ui
6
6
  module Models
7
7
  # Model that represents the general status of the Web UI.
8
+ #
8
9
  # We use this data to display a status page that helps with debugging on what is missing
9
10
  # in the overall setup of the Web UI.
10
11
  #
@@ -15,7 +16,18 @@ module Karafka
15
16
  Step = Struct.new(:status, :details) do
16
17
  # @return [Boolean] is the given step successfully configured and working
17
18
  def success?
18
- status == :success
19
+ status == :success || status == :warning
20
+ end
21
+
22
+ # @return [String] local namespace for partial of a given type
23
+ def partial_namespace
24
+ case status
25
+ when :success then 'successes'
26
+ when :warning then 'warnings'
27
+ when :failure then 'failures'
28
+ else
29
+ raise ::Karafka::Errors::UnsupportedCaseError, status
30
+ end
19
31
  end
20
32
 
21
33
  # @return [String] stringified status
@@ -29,11 +41,21 @@ module Karafka
29
41
  connect
30
42
  end
31
43
 
32
- # @return [Status::Step] were we able to connect to Kafka or not
44
+ # @return [Status::Step] were we able to connect to Kafka or not and how fast.
45
+ # Some people try to work with Kafka over the internet with really high latency and this
46
+ # should be highlighted in the UI as often the connection just becomes unstable
33
47
  def connection
48
+ level = if @connection_time < 1_000
49
+ :success
50
+ elsif @connection_time < 1_000_000
51
+ :warning
52
+ else
53
+ :failure
54
+ end
55
+
34
56
  Step.new(
35
- @connected ? :success : :failure,
36
- nil
57
+ level,
58
+ { time: @connection_time }
37
59
  )
38
60
  end
39
61
 
@@ -171,12 +193,14 @@ module Karafka
171
193
  topics
172
194
  end
173
195
 
174
- # Tries connecting with the cluster and sets the connection state
196
+ # Tries connecting with the cluster and saves the cluster info and the connection time
197
+ # @note If fails, `connection_time` will be 1_000_000
175
198
  def connect
199
+ started = Time.now.to_f
176
200
  @cluster_info = ::Karafka::Admin.cluster_info
177
- @connected = true
201
+ @connection_time = (Time.now.to_f - started) * 1_000
178
202
  rescue ::Rdkafka::RdkafkaError
179
- @connected = false
203
+ @connection_time = 1_000_000
180
204
  end
181
205
  end
182
206
  end
@@ -21,10 +21,9 @@ module Karafka
21
21
  # Consumers list
22
22
  def index
23
23
  @current_state = Models::State.current!
24
- processes_total = Models::Processes.active(@current_state)
25
- @counters = Lib::HashProxy.new(@current_state[:stats])
24
+ @counters = Models::Counters.new(@current_state)
26
25
  @processes, @next_page = Lib::PaginateArray.new.call(
27
- processes_total,
26
+ Models::Processes.active(@current_state),
28
27
  @params.current_page
29
28
  )
30
29
 
@@ -1,4 +1,10 @@
1
1
  <tr>
2
+ <td>
3
+ <span class="badge bg-secondary badge-topic" title="Consumer group: <%= job.consumer_group %>">
4
+ <%= job.topic %>:
5
+ <%= job.partition %>
6
+ </span>
7
+ </td>
2
8
  <td>
3
9
  <code><%= job.consumer %></code>
4
10
 
@@ -7,12 +13,6 @@
7
13
  <%== tags(job.tags) %>
8
14
  <% end %>
9
15
  </td>
10
- <td>
11
- <span class="badge bg-secondary badge-topic" title="Consumer group: <%= job.consumer_group %>">
12
- <%= job.topic %>:
13
- <%= job.partition %>
14
- </span>
15
- </td>
16
16
  <td>
17
17
  <code>#<%= job.type %></code>
18
18
  </td>
@@ -113,7 +113,12 @@
113
113
  </span>
114
114
 
115
115
  <span class="badge bg-secondary">
116
- karafka core
116
+ karafka-web
117
+ <%= @process.karafka_web %>
118
+ </span>
119
+
120
+ <span class="badge bg-secondary">
121
+ karafka-core
117
122
  <%= @process.karafka_core %>
118
123
  </span>
119
124
 
@@ -4,27 +4,31 @@
4
4
  <div class="container">
5
5
  <div class="row">
6
6
  <div class="col-sm-12">
7
- <table class="processes bg-white table table-hover table-bordered table-striped mb-0 align-middle">
8
- <thead>
9
- <tr class="align-middle">
10
- <th>Name</th>
11
- <th class="col-sm-2">Started</th>
12
- <th class="col-sm-1">Memory</th>
13
- <th class="col-sm-1">Performance</th>
14
- <th class="col-sm-1">Load</th>
15
- <th class="col-sm-1">Total lag</th>
16
- </tr>
17
- </thead>
18
- <tbody>
19
- <%==
20
- render_each(
21
- @processes,
22
- 'consumers/_consumer',
23
- local: :process
24
- )
25
- %>
26
- </tbody>
27
- </table>
7
+ <% if @processes.empty? %>
8
+ <%== partial 'consumers/no_consumers' %>
9
+ <% else %>
10
+ <table class="processes bg-white table table-hover table-bordered table-striped mb-0 align-middle">
11
+ <thead>
12
+ <tr class="align-middle">
13
+ <th>Name</th>
14
+ <th class="col-sm-2">Started</th>
15
+ <th class="col-sm-1">Memory</th>
16
+ <th class="col-sm-1">Performance</th>
17
+ <th class="col-sm-1">Load</th>
18
+ <th class="col-sm-1">Total lag</th>
19
+ </tr>
20
+ </thead>
21
+ <tbody>
22
+ <%==
23
+ render_each(
24
+ @processes,
25
+ 'consumers/_consumer',
26
+ local: :process
27
+ )
28
+ %>
29
+ </tbody>
30
+ </table>
31
+ <% end %>
28
32
  </div>
29
33
  </div>
30
34
  </div>
@@ -17,8 +17,8 @@
17
17
  <table class="processes bg-white table table-hover table-bordered table-striped mb-0 align-middle">
18
18
  <thead>
19
19
  <tr class="align-middle">
20
- <th>Consumer</th>
21
20
  <th>Topic</th>
21
+ <th>Consumer</th>
22
22
  <th>Type</th>
23
23
  <th>First offset</th>
24
24
  <th>Last offset</th>
@@ -16,9 +16,8 @@
16
16
  <%=
17
17
  type = @error_message.payload[:type]
18
18
  error_class = @error_message.payload[:error_class]
19
- offset = @error_message.offset
20
19
 
21
- "#{type}: #{error_class} #{offset}"
20
+ "#{type}: #{error_class}"
22
21
  %>
23
22
  </a>
24
23
  </li>
@@ -9,14 +9,16 @@
9
9
 
10
10
  <tr>
11
11
  <td>
12
- <% if error[:details].key?(:topic) %>
13
- <%= error[:details][:topic] %>: <%= error[:details][:partition] %>
14
- <% else %>
15
- <%= error[:type] %>
16
- <% end %>
12
+ <span class="badge bg-secondary badge-topic">
13
+ <% if error[:details].key?(:topic) %>
14
+ <%= error[:details][:topic] %>: <%= error[:details][:partition] %>
15
+ <% else %>
16
+ <%= error[:type] %>
17
+ <% end %>
18
+ </span>
17
19
  </td>
18
20
  <td>
19
- <%== error[:process_name] %>
21
+ <%== error[:process][:name] %>
20
22
  </td>
21
23
  <td>
22
24
  <%= error[:error_class] %>: