karafka-web 0.7.1 → 0.7.3
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data/CHANGELOG.md +12 -1
- data/Gemfile.lock +1 -1
- data/lib/karafka/web/config.rb +5 -1
- data/lib/karafka/web/processing/consumers/aggregators/metrics.rb +3 -3
- data/lib/karafka/web/processing/consumers/aggregators/state.rb +2 -2
- data/lib/karafka/web/ui/models/processes.rb +12 -5
- data/lib/karafka/web/ui/models/status.rb +41 -12
- data/lib/karafka/web/ui/views/consumers/_consumer.erb +5 -1
- data/lib/karafka/web/ui/views/dashboard/index.erb +1 -1
- data/lib/karafka/web/ui/views/status/failures/_consumers_reports.erb +11 -0
- data/lib/karafka/web/ui/views/status/failures/_initial_consumers_metrics.erb +23 -9
- data/lib/karafka/web/ui/views/status/failures/_initial_consumers_state.erb +23 -9
- data/lib/karafka/web/ui/views/status/show.erb +17 -2
- data/lib/karafka/web/version.rb +1 -1
- data.tar.gz.sig +0 -0
- metadata +3 -2
- metadata.gz.sig +0 -0
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: dc9572e3dfbb4565361fdbc9521fb7a9a7833664f42bd8e9f0e63e940a397034
|
4
|
+
data.tar.gz: 07fb369bcd0d1aa6c80ef20ff96289989c38ff934a1eb795ab85f39b7adf1228
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 51bc23c17be963c3b19143aa8034046c0bd85df4251dc9e2e911a8ab28ae194d9ae62e2ab1d3aa0b3e2f8dba33649ad51881d9e96fe669fffbdc971f406ce3eb
|
7
|
+
data.tar.gz: 332247644ed57d25687af816ed850f7847b6eb8e63646bf492460424f0d5855c8dace2f1127d0d84631bffa8f6a9fb71e42a64ece7141a6acb96d5b5f987896c
|
checksums.yaml.gz.sig
CHANGED
Binary file
|
data/CHANGELOG.md
CHANGED
@@ -1,7 +1,18 @@
|
|
1
1
|
# Karafka Web changelog
|
2
2
|
|
3
|
+
## 0.7.3 (2023-09-18)
|
4
|
+
- [Improvement] Mitigate a case where a race-condition during upgrade would crash data.
|
5
|
+
|
6
|
+
## 0.7.2 (2023-09-18)
|
7
|
+
- [Improvement] Display hidden by accident errors for OSS metrics.
|
8
|
+
- [Improvement] Use a five second cache for non-production environments to improve dev experience.
|
9
|
+
- [Improvement] Limit number of partitions listed on the Consumers view if they exceed 10 to improve readability and indicate, that there are more in OSS similar to Pro.
|
10
|
+
- [Improvement] Squash processes reports based on the key instead of payload skipping deserialization for duplicated reports.
|
11
|
+
- [Improvement] Make sure, that the Karafka topics present data can be deserialized and report on the status page if not.
|
12
|
+
- [Fix] Extensive data-poll on processes despite no processes being available.
|
13
|
+
|
3
14
|
## 0.7.1 (2023-09-15)
|
4
|
-
- [Improvement] Limit number of partitions listed on the Consumers view if they exceed 10 to improve readability and indicate, that there are more.
|
15
|
+
- [Improvement] Limit number of partitions listed on the Consumers view if they exceed 10 to improve readability and indicate, that there are more in Pro.
|
5
16
|
- [Improvement] Make sure, that small messages size (less than 100 bytes) is correctly displayed.
|
6
17
|
- [Fix] Validate refresh time.
|
7
18
|
- [Fix] Fix invalid message payload size display (KB instead of B, etc).
|
data/Gemfile.lock
CHANGED
data/lib/karafka/web/config.rb
CHANGED
@@ -92,7 +92,11 @@ module Karafka
|
|
92
92
|
end
|
93
93
|
|
94
94
|
# UI cache to improve performance of views that reuse states that are not often changed
|
95
|
-
setting :cache, default: Ui::Lib::TtlCache.new(
|
95
|
+
setting :cache, default: Ui::Lib::TtlCache.new(
|
96
|
+
# Use the TTL for internal cache in prod but invalidate quickly in other environments,
|
97
|
+
# as for example in development things may change frequently
|
98
|
+
Karafka.env.production? ? 60_000 * 5 : 5_000
|
99
|
+
)
|
96
100
|
|
97
101
|
# Should we display internal topics of Kafka. The once starting with `__`
|
98
102
|
# By default we do not display them as they are not usable from regular users perspective
|
@@ -106,17 +106,17 @@ module Karafka
|
|
106
106
|
.reject(&:negative?)
|
107
107
|
|
108
108
|
lags_stored = partitions_data
|
109
|
-
.map { |p_details| p_details.fetch(:lag_stored) }
|
109
|
+
.map { |p_details| p_details.fetch(:lag_stored, -1) }
|
110
110
|
.reject(&:negative?)
|
111
111
|
|
112
112
|
offsets_hi = partitions_data
|
113
|
-
.map { |p_details| p_details.fetch(:hi_offset) }
|
113
|
+
.map { |p_details| p_details.fetch(:hi_offset, -1) }
|
114
114
|
.reject(&:negative?)
|
115
115
|
|
116
116
|
# Last stable offsets freeze durations - we pick the max freeze to indicate
|
117
117
|
# the longest open transaction that potentially may be hanging
|
118
118
|
ls_offsets_fd = partitions_data
|
119
|
-
.map { |p_details| p_details.fetch(:ls_offset_fd) }
|
119
|
+
.map { |p_details| p_details.fetch(:ls_offset_fd, 0) }
|
120
120
|
.reject(&:negative?)
|
121
121
|
|
122
122
|
# If there is no lag that would not be negative, it means we did not mark
|
@@ -152,8 +152,8 @@ module Karafka
|
|
152
152
|
stats[:listeners] += report_process[:listeners] || 0
|
153
153
|
stats[:processes] += 1
|
154
154
|
stats[:rss] += report_process[:memory_usage]
|
155
|
-
stats[:lag] += lags.reject(&:negative?).sum
|
156
|
-
stats[:lag_stored] += lags_stored.reject(&:negative?).sum
|
155
|
+
stats[:lag] += lags.compact.reject(&:negative?).sum
|
156
|
+
stats[:lag_stored] += lags_stored.compact.reject(&:negative?).sum
|
157
157
|
utilization += report_stats[:utilization]
|
158
158
|
end
|
159
159
|
|
@@ -18,9 +18,10 @@ module Karafka
|
|
18
18
|
# @param state [State] current system state from which we can get processes metadata
|
19
19
|
# @return [Array<Process>]
|
20
20
|
def active(state)
|
21
|
-
|
21
|
+
messages = fetch_reports(state)
|
22
|
+
messages = squash_processes_data(messages)
|
23
|
+
processes = messages.map(&:payload)
|
22
24
|
evict_expired_processes(processes)
|
23
|
-
processes = squash_processes_data(processes)
|
24
25
|
processes = sort_processes(processes)
|
25
26
|
|
26
27
|
processes.map { |process_hash| Process.new(process_hash) }
|
@@ -32,7 +33,13 @@ module Karafka
|
|
32
33
|
# @param state [State]
|
33
34
|
# @return [Array<Hash>] array with deserialized processes reports
|
34
35
|
def fetch_reports(state)
|
35
|
-
|
36
|
+
processes = state[:processes]
|
37
|
+
|
38
|
+
# Short track when no processes not to run a read when nothing will be given
|
39
|
+
# This allows us to handle a case where we would load 10k of reports for nothing
|
40
|
+
return [] if processes.empty?
|
41
|
+
|
42
|
+
offsets = processes
|
36
43
|
.values
|
37
44
|
.map { |process| process[:offset] }
|
38
45
|
.sort
|
@@ -46,7 +53,7 @@ module Karafka
|
|
46
53
|
# was bypassed by state changes in the processes
|
47
54
|
10_000,
|
48
55
|
offsets.first || -1
|
49
|
-
)
|
56
|
+
)
|
50
57
|
end
|
51
58
|
|
52
59
|
# Collapses processes data and only keeps the most recent report for give process
|
@@ -55,7 +62,7 @@ module Karafka
|
|
55
62
|
def squash_processes_data(processes)
|
56
63
|
processes
|
57
64
|
.reverse
|
58
|
-
.uniq
|
65
|
+
.uniq(&:key)
|
59
66
|
.reverse
|
60
67
|
end
|
61
68
|
|
@@ -134,46 +134,75 @@ module Karafka
|
|
134
134
|
)
|
135
135
|
end
|
136
136
|
|
137
|
-
# @return [Status::Step] Is the initial consumers state present in Kafka
|
137
|
+
# @return [Status::Step] Is the initial consumers state present in Kafka and that they
|
138
|
+
# can be deserialized
|
138
139
|
def initial_consumers_state
|
140
|
+
details = { issue_type: :presence }
|
141
|
+
|
139
142
|
if replication.success?
|
140
|
-
|
141
|
-
|
143
|
+
begin
|
144
|
+
@current_state ||= Models::ConsumersState.current
|
145
|
+
status = @current_state ? :success : :failure
|
146
|
+
rescue JSON::ParserError
|
147
|
+
status = :failure
|
148
|
+
details[:issue_type] = :deserialization
|
149
|
+
end
|
142
150
|
else
|
143
151
|
status = :halted
|
144
152
|
end
|
145
153
|
|
146
154
|
Step.new(
|
147
155
|
status,
|
148
|
-
|
156
|
+
details
|
149
157
|
)
|
150
158
|
end
|
151
159
|
|
152
|
-
# @return [Status::Step] Is the initial consumers metrics record present in Kafka
|
160
|
+
# @return [Status::Step] Is the initial consumers metrics record present in Kafka and
|
161
|
+
# that they can be deserialized
|
153
162
|
def initial_consumers_metrics
|
163
|
+
details = { issue_type: :presence }
|
164
|
+
|
154
165
|
if initial_consumers_state.success?
|
155
|
-
|
156
|
-
|
166
|
+
begin
|
167
|
+
@current_metrics ||= Models::ConsumersMetrics.current
|
168
|
+
status = @current_metrics ? :success : :failure
|
169
|
+
rescue JSON::ParserError
|
170
|
+
status = :failure
|
171
|
+
details[:issue_type] = :deserialization
|
172
|
+
end
|
157
173
|
else
|
158
174
|
status = :halted
|
159
175
|
end
|
160
176
|
|
161
177
|
Step.new(
|
162
178
|
status,
|
163
|
-
|
179
|
+
details
|
164
180
|
)
|
165
181
|
end
|
166
182
|
|
167
|
-
# @return [Status::Step]
|
168
|
-
|
169
|
-
def live_reporting
|
183
|
+
# @return [Status::Step] could we read and operate on the current processes data (if any)
|
184
|
+
def consumers_reports
|
170
185
|
if initial_consumers_metrics.success?
|
171
186
|
@processes ||= Models::Processes.active(@current_state)
|
172
|
-
status =
|
187
|
+
status = :success
|
173
188
|
else
|
174
189
|
status = :halted
|
175
190
|
end
|
176
191
|
|
192
|
+
Step.new(status, nil)
|
193
|
+
rescue JSON::ParserError
|
194
|
+
Step.new(:failure, nil)
|
195
|
+
end
|
196
|
+
|
197
|
+
# @return [Status::Step] Is there at least one active karafka server reporting to the
|
198
|
+
# Web UI
|
199
|
+
def live_reporting
|
200
|
+
status = if consumers_reports.success?
|
201
|
+
@processes.empty? ? :failure : :success
|
202
|
+
else
|
203
|
+
:halted
|
204
|
+
end
|
205
|
+
|
177
206
|
Step.new(
|
178
207
|
status,
|
179
208
|
nil
|
@@ -14,7 +14,11 @@
|
|
14
14
|
<% subscription_group.topics.each do |topic| %>
|
15
15
|
<span class="badge bg-secondary badge-topic" title="Consumer group: <%= consumer_group.id %>">
|
16
16
|
<%= topic.name %>:
|
17
|
-
|
17
|
+
<% if topic.partitions.size > 10 %>
|
18
|
+
<%= "#{topic.partitions.first(10).map(&:id).join(',')}..." %>
|
19
|
+
<% else %>
|
20
|
+
<%= topic.partitions.map(&:id).join(',') %>
|
21
|
+
<% end %>
|
18
22
|
</span>
|
19
23
|
<% end %>
|
20
24
|
<% end %>
|
@@ -29,7 +29,7 @@
|
|
29
29
|
|
30
30
|
<div class="tab-content">
|
31
31
|
<div class="tab-pane show active" id="messages" role="tabpanel">
|
32
|
-
<% data = @aggregated_charts.with(:messages) %>
|
32
|
+
<% data = @aggregated_charts.with(:messages, :errors) %>
|
33
33
|
<%== partial 'shared/chart', locals: { data: data, id: 'messages' } %>
|
34
34
|
</div>
|
35
35
|
|
@@ -0,0 +1,11 @@
|
|
1
|
+
<p>
|
2
|
+
At least one consumer report appears to be corrupted.
|
3
|
+
</p>
|
4
|
+
|
5
|
+
<p>
|
6
|
+
This issue typically arises when invalid messages have been sent to the Karafka consumers' reports topic or when the topic has been populated with data from a newer Karafka Web UI without updating it.
|
7
|
+
</p>
|
8
|
+
|
9
|
+
<p class="mb-0">
|
10
|
+
To resolve this, please first attempt to upgrade the Karafka Web UI. If the problem persists, execute <code>bundle exec karafka-web reset</code> to reset the Web UI.
|
11
|
+
</p>
|
@@ -1,11 +1,25 @@
|
|
1
|
-
|
2
|
-
|
3
|
-
|
1
|
+
<% if details[:issue_type] == :deserialization %>
|
2
|
+
<p>
|
3
|
+
The initial state of the consumers metrics appears to be corrupted.
|
4
|
+
</p>
|
4
5
|
|
5
|
-
<p>
|
6
|
-
|
7
|
-
</p>
|
6
|
+
<p>
|
7
|
+
This issue typically arises when invalid messages have been sent to the Karafka consumers' metrics topic or when the topic has been populated with data from a newer Karafka Web UI without updating it.
|
8
|
+
</p>
|
8
9
|
|
9
|
-
<p class="mb-0">
|
10
|
-
|
11
|
-
</p>
|
10
|
+
<p class="mb-0">
|
11
|
+
To resolve this, please first attempt to upgrade the Karafka Web UI. If the problem persists, execute <code>bundle exec karafka-web reset</code> to reset the Web UI.
|
12
|
+
</p>
|
13
|
+
<% else %>
|
14
|
+
<p>
|
15
|
+
The initial consumers metrics for the Web UI were not created.
|
16
|
+
</p>
|
17
|
+
|
18
|
+
<p>
|
19
|
+
It means that the <code>bundle exec karafka-web migrate</code> was not executed or failed.
|
20
|
+
</p>
|
21
|
+
|
22
|
+
<p class="mb-0">
|
23
|
+
To fix this, you need to ensure that the <code>bundle exec karafka-web migrate</code> runs successfully.
|
24
|
+
</p>
|
25
|
+
<% end %>
|
@@ -1,11 +1,25 @@
|
|
1
|
-
|
2
|
-
|
3
|
-
|
1
|
+
<% if details[:issue_type] == :deserialization %>
|
2
|
+
<p>
|
3
|
+
The initial state of the consumers appears to be corrupted.
|
4
|
+
</p>
|
4
5
|
|
5
|
-
<p>
|
6
|
-
|
7
|
-
</p>
|
6
|
+
<p>
|
7
|
+
This issue typically arises when invalid messages have been sent to the Karafka consumers' state topic or when the topic has been populated with data from a newer Karafka Web UI without updating it.
|
8
|
+
</p>
|
8
9
|
|
9
|
-
<p class="mb-0">
|
10
|
-
|
11
|
-
</p>
|
10
|
+
<p class="mb-0">
|
11
|
+
To resolve this, please first attempt to upgrade the Karafka Web UI. If the problem persists, execute <code>bundle exec karafka-web reset</code> to reset the Web UI.
|
12
|
+
</p>
|
13
|
+
<% else %>
|
14
|
+
<p>
|
15
|
+
The initial consumers state for the Web UI was not created.
|
16
|
+
</p>
|
17
|
+
|
18
|
+
<p>
|
19
|
+
It means that the <code>bundle exec karafka-web migrate</code> was not executed or failed.
|
20
|
+
</p>
|
21
|
+
|
22
|
+
<p class="mb-0">
|
23
|
+
To fix this, you need to ensure that the <code>bundle exec karafka-web migrate</code> runs successfully.
|
24
|
+
</p>
|
25
|
+
<% end %>
|
@@ -82,7 +82,7 @@
|
|
82
82
|
partial(
|
83
83
|
"status/#{@status.initial_consumers_state.to_s}",
|
84
84
|
locals: {
|
85
|
-
title: 'Initial consumers state
|
85
|
+
title: 'Initial consumers state',
|
86
86
|
description: partial(
|
87
87
|
'status/failures/initial_consumers_state',
|
88
88
|
locals: {
|
@@ -97,7 +97,7 @@
|
|
97
97
|
partial(
|
98
98
|
"status/#{@status.initial_consumers_metrics.to_s}",
|
99
99
|
locals: {
|
100
|
-
title: 'Initial consumers metrics
|
100
|
+
title: 'Initial consumers metrics',
|
101
101
|
description: partial(
|
102
102
|
'status/failures/initial_consumers_metrics',
|
103
103
|
locals: {
|
@@ -108,6 +108,21 @@
|
|
108
108
|
)
|
109
109
|
%>
|
110
110
|
|
111
|
+
<%==
|
112
|
+
partial(
|
113
|
+
"status/#{@status.consumers_reports.to_s}",
|
114
|
+
locals: {
|
115
|
+
title: 'Consumers reports',
|
116
|
+
description: partial(
|
117
|
+
'status/failures/consumers_reports',
|
118
|
+
locals: {
|
119
|
+
details: @status.consumers_reports.details
|
120
|
+
}
|
121
|
+
)
|
122
|
+
}
|
123
|
+
)
|
124
|
+
%>
|
125
|
+
|
111
126
|
<%==
|
112
127
|
partial(
|
113
128
|
"status/#{@status.live_reporting.to_s}",
|
data/lib/karafka/web/version.rb
CHANGED
data.tar.gz.sig
CHANGED
Binary file
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: karafka-web
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.7.
|
4
|
+
version: 0.7.3
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Maciej Mensfeld
|
@@ -35,7 +35,7 @@ cert_chain:
|
|
35
35
|
AnG1dJU+yL2BK7vaVytLTstJME5mepSZ46qqIJXMuWob/YPDmVaBF39TDSG9e34s
|
36
36
|
msG3BiCqgOgHAnL23+CN3Rt8MsuRfEtoTKpJVcCfoEoNHOkc
|
37
37
|
-----END CERTIFICATE-----
|
38
|
-
date: 2023-09-
|
38
|
+
date: 2023-09-18 00:00:00.000000000 Z
|
39
39
|
dependencies:
|
40
40
|
- !ruby/object:Gem::Dependency
|
41
41
|
name: erubi
|
@@ -422,6 +422,7 @@ files:
|
|
422
422
|
- lib/karafka/web/ui/views/status/_success.erb
|
423
423
|
- lib/karafka/web/ui/views/status/_warning.erb
|
424
424
|
- lib/karafka/web/ui/views/status/failures/_connection.erb
|
425
|
+
- lib/karafka/web/ui/views/status/failures/_consumers_reports.erb
|
425
426
|
- lib/karafka/web/ui/views/status/failures/_consumers_reports_schema_state.erb
|
426
427
|
- lib/karafka/web/ui/views/status/failures/_enabled.erb
|
427
428
|
- lib/karafka/web/ui/views/status/failures/_initial_consumers_metrics.erb
|
metadata.gz.sig
CHANGED
Binary file
|