karafka-web 0.11.3 → 0.11.4
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/CHANGELOG.md +14 -0
- data/Gemfile +0 -2
- data/Gemfile.lock +45 -32
- data/bin/integrations +44 -0
- data/bin/rspecs +6 -2
- data/docker-compose.yml +1 -1
- data/karafka-web.gemspec +1 -1
- data/lib/karafka/web/app.rb +2 -3
- data/lib/karafka/web/cli/help.rb +1 -1
- data/lib/karafka/web/config.rb +8 -0
- data/lib/karafka/web/contracts/base.rb +2 -4
- data/lib/karafka/web/contracts/config.rb +5 -5
- data/lib/karafka/web/management/actions/enable.rb +14 -1
- data/lib/karafka/web/management/migrations/consumers_reports/1761645571_rename_process_name_to_id.rb +38 -0
- data/lib/karafka/web/management/migrator.rb +3 -2
- data/lib/karafka/web/pro/commanding/commands/base.rb +1 -1
- data/lib/karafka/web/pro/commanding/contracts/config.rb +2 -4
- data/lib/karafka/web/pro/commanding/handlers/partitions/tracker.rb +2 -3
- data/lib/karafka/web/pro/ui/controllers/scheduled_messages/schedules_controller.rb +1 -2
- data/lib/karafka/web/pro/ui/controllers/topics/distributions_controller.rb +1 -3
- data/lib/karafka/web/pro/ui/lib/branding/contracts/config.rb +2 -4
- data/lib/karafka/web/pro/ui/lib/policies/contracts/config.rb +2 -4
- data/lib/karafka/web/pro/ui/lib/search/contracts/config.rb +3 -5
- data/lib/karafka/web/pro/ui/lib/search/contracts/form.rb +3 -5
- data/lib/karafka/web/pro/ui/lib/search/runner.rb +14 -1
- data/lib/karafka/web/pro/ui/routes/errors.rb +3 -3
- data/lib/karafka/web/pro/ui/routes/explorer.rb +3 -3
- data/lib/karafka/web/pro/ui/views/health/_no_partition_data.erb +9 -0
- data/lib/karafka/web/pro/ui/views/health/_partitions_with_fallback.erb +41 -0
- data/lib/karafka/web/pro/ui/views/health/changes.erb +12 -13
- data/lib/karafka/web/pro/ui/views/health/lags.erb +12 -13
- data/lib/karafka/web/pro/ui/views/health/offsets.erb +12 -13
- data/lib/karafka/web/pro/ui/views/health/overview.erb +15 -16
- data/lib/karafka/web/processing/consumer.rb +8 -3
- data/lib/karafka/web/processing/consumers/aggregators/metrics.rb +1 -1
- data/lib/karafka/web/processing/consumers/aggregators/state.rb +5 -5
- data/lib/karafka/web/processing/consumers/contracts/state.rb +1 -1
- data/lib/karafka/web/processing/consumers/reports_migrator.rb +49 -0
- data/lib/karafka/web/processing/time_series_tracker.rb +1 -1
- data/lib/karafka/web/tracking/consumers/contracts/report.rb +1 -1
- data/lib/karafka/web/tracking/consumers/contracts/topic.rb +1 -0
- data/lib/karafka/web/tracking/consumers/listeners/errors.rb +2 -1
- data/lib/karafka/web/tracking/consumers/listeners/processing.rb +46 -0
- data/lib/karafka/web/tracking/consumers/listeners/statistics.rb +1 -0
- data/lib/karafka/web/tracking/consumers/sampler/enrichers/base.rb +20 -0
- data/lib/karafka/web/tracking/consumers/sampler/enrichers/consumer_groups.rb +116 -0
- data/lib/karafka/web/tracking/consumers/sampler/metrics/base.rb +20 -0
- data/lib/karafka/web/tracking/consumers/sampler/metrics/container.rb +113 -0
- data/lib/karafka/web/tracking/consumers/sampler/metrics/jobs.rb +60 -0
- data/lib/karafka/web/tracking/consumers/sampler/metrics/network.rb +48 -0
- data/lib/karafka/web/tracking/consumers/sampler/metrics/os.rb +206 -0
- data/lib/karafka/web/tracking/consumers/sampler/metrics/server.rb +33 -0
- data/lib/karafka/web/tracking/consumers/sampler.rb +34 -215
- data/lib/karafka/web/tracking/contracts/error.rb +1 -0
- data/lib/karafka/web/tracking/helpers/ttls/hash.rb +2 -3
- data/lib/karafka/web/tracking/helpers/ttls/stats.rb +1 -2
- data/lib/karafka/web/tracking/producers/listeners/errors.rb +2 -1
- data/lib/karafka/web/tracking/ui/errors.rb +76 -0
- data/lib/karafka/web/ui/base.rb +19 -9
- data/lib/karafka/web/ui/controllers/requests/execution_wrapper.rb +2 -4
- data/lib/karafka/web/ui/controllers/requests/params.rb +1 -1
- data/lib/karafka/web/ui/helpers/application_helper.rb +1 -1
- data/lib/karafka/web/ui/helpers/paths_helper.rb +6 -9
- data/lib/karafka/web/ui/lib/sorter.rb +1 -1
- data/lib/karafka/web/ui/models/health.rb +14 -9
- data/lib/karafka/web/ui/models/jobs.rb +4 -6
- data/lib/karafka/web/ui/models/message.rb +7 -8
- data/lib/karafka/web/ui/models/metrics/aggregated.rb +4 -4
- data/lib/karafka/web/ui/models/metrics/charts/aggregated.rb +1 -2
- data/lib/karafka/web/ui/models/metrics/charts/topics.rb +2 -2
- data/lib/karafka/web/ui/models/metrics/topics.rb +3 -4
- data/lib/karafka/web/ui/models/recurring_tasks/schedule.rb +1 -1
- data/lib/karafka/web/ui/public/javascripts/application.min.js.gz +0 -0
- data/lib/karafka/web/ui/public/stylesheets/application.min.css +3 -0
- data/lib/karafka/web/ui/public/stylesheets/application.min.css.br +0 -0
- data/lib/karafka/web/ui/public/stylesheets/application.min.css.gz +0 -0
- data/lib/karafka/web/ui/routes/errors.rb +3 -3
- data/lib/karafka/web/ui/views/shared/exceptions/unhandled_error.erb +42 -0
- data/lib/karafka/web/version.rb +1 -1
- data/lib/karafka/web.rb +2 -3
- data/package-lock.json +180 -236
- data/package.json +3 -3
- data/renovate.json +13 -0
- metadata +18 -3
|
@@ -77,8 +77,21 @@ module Karafka
|
|
|
77
77
|
|
|
78
78
|
private
|
|
79
79
|
|
|
80
|
+
# Dynamically defines accessor methods for each search criteria field.
|
|
81
|
+
# For example, if SEARCH_CRITERIA_FIELDS includes :topic, this creates:
|
|
82
|
+
# def topic
|
|
83
|
+
# @topic ||= @search_criteria.fetch(:topic)
|
|
84
|
+
# end
|
|
80
85
|
SEARCH_CRITERIA_FIELDS.each do |q|
|
|
86
|
+
# Example for q = :topic:
|
|
87
|
+
# def topic
|
|
88
|
+
# @topic ||= @search_criteria.fetch(:topic)
|
|
89
|
+
# end
|
|
81
90
|
class_eval <<~RUBY, __FILE__, __LINE__ + 1
|
|
91
|
+
# def topic
|
|
92
|
+
# @topic ||= @search_criteria.fetch(:topic)
|
|
93
|
+
# end
|
|
94
|
+
|
|
82
95
|
def #{q}
|
|
83
96
|
@#{q} ||= @search_criteria.fetch(:#{q})
|
|
84
97
|
end
|
|
@@ -122,7 +135,7 @@ module Karafka
|
|
|
122
135
|
end
|
|
123
136
|
|
|
124
137
|
iterator_query = {
|
|
125
|
-
@topic => partitions_to_search.
|
|
138
|
+
@topic => partitions_to_search.to_h { |par| [par, start] }
|
|
126
139
|
}
|
|
127
140
|
|
|
128
141
|
@iterator = Karafka::Pro::Iterator.new(iterator_query)
|
|
@@ -15,10 +15,10 @@ module Karafka
|
|
|
15
15
|
controller = build(Controllers::ErrorsController)
|
|
16
16
|
|
|
17
17
|
r.get :partition_id, Integer do |partition_id, offset|
|
|
18
|
-
if params.current_offset
|
|
19
|
-
r.redirect root_path('errors', partition_id, params.current_offset)
|
|
20
|
-
else
|
|
18
|
+
if params.current_offset == -1
|
|
21
19
|
controller.show(partition_id, offset)
|
|
20
|
+
else
|
|
21
|
+
r.redirect root_path('errors', partition_id, params.current_offset)
|
|
22
22
|
end
|
|
23
23
|
end
|
|
24
24
|
|
|
@@ -81,14 +81,14 @@ module Karafka
|
|
|
81
81
|
r.get String, :partition_id, Integer do |topic_id, partition_id, offset|
|
|
82
82
|
# If when viewing given message we get an offset of different message, we should
|
|
83
83
|
# redirect there. This allows us to support pagination with the current engine
|
|
84
|
-
if params.current_offset
|
|
84
|
+
if params.current_offset == -1
|
|
85
|
+
controller.show(topic_id, partition_id, offset)
|
|
86
|
+
else
|
|
85
87
|
r.redirect explorer_topics_path(
|
|
86
88
|
topic_id,
|
|
87
89
|
partition_id,
|
|
88
90
|
params.current_offset
|
|
89
91
|
)
|
|
90
|
-
else
|
|
91
|
-
controller.show(topic_id, partition_id, offset)
|
|
92
92
|
end
|
|
93
93
|
end
|
|
94
94
|
|
|
@@ -0,0 +1,9 @@
|
|
|
1
|
+
<%# This code is part of Karafka Pro, a commercial component not licensed under LGPL. %>
|
|
2
|
+
<%# See LICENSE for details. %>
|
|
3
|
+
|
|
4
|
+
<tr class="text-muted">
|
|
5
|
+
<td><%= partition_id %></td>
|
|
6
|
+
<td colspan="<%= colspan %>" class="text-center">
|
|
7
|
+
No data available. Partition may not be assigned or is not being consumed by any active process.
|
|
8
|
+
</td>
|
|
9
|
+
</tr>
|
|
@@ -0,0 +1,41 @@
|
|
|
1
|
+
<%# This code is part of Karafka Pro, a commercial component not licensed under LGPL. %>
|
|
2
|
+
<%# See LICENSE for details. %>
|
|
3
|
+
|
|
4
|
+
<%#
|
|
5
|
+
Renders partitions with fallback for missing partition data
|
|
6
|
+
|
|
7
|
+
Required locals:
|
|
8
|
+
- topic_details: Hash containing :partitions and :partitions_count
|
|
9
|
+
- topic_name: Name of the topic
|
|
10
|
+
- partition_partial: Path to the partition partial to render
|
|
11
|
+
- colspan: Number of columns for the "no data" message
|
|
12
|
+
%>
|
|
13
|
+
|
|
14
|
+
<% partitions = topic_details[:partitions] %>
|
|
15
|
+
|
|
16
|
+
<% partitions.each do |partition_id, details| %>
|
|
17
|
+
<%==
|
|
18
|
+
partial(
|
|
19
|
+
partition_partial,
|
|
20
|
+
locals: {
|
|
21
|
+
topic_name: topic_name,
|
|
22
|
+
partition_id: partition_id,
|
|
23
|
+
details: details
|
|
24
|
+
}
|
|
25
|
+
)
|
|
26
|
+
%>
|
|
27
|
+
<% end %>
|
|
28
|
+
|
|
29
|
+
<% topic_details[:partitions_count].times do |partition_id| %>
|
|
30
|
+
<% next if partitions.include?(partition_id) %>
|
|
31
|
+
|
|
32
|
+
<%==
|
|
33
|
+
partial(
|
|
34
|
+
'health/no_partition_data',
|
|
35
|
+
locals: {
|
|
36
|
+
partition_id: partition_id,
|
|
37
|
+
colspan: colspan
|
|
38
|
+
}
|
|
39
|
+
)
|
|
40
|
+
%>
|
|
41
|
+
<% end %>
|
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
<h2 class="h2"><%= cg_name %></h2>
|
|
15
15
|
|
|
16
16
|
<% topics = details[:topics] %>
|
|
17
|
-
<% topics.each_with_index do |(topic_name,
|
|
17
|
+
<% topics.each_with_index do |(topic_name, topic_details), index| %>
|
|
18
18
|
<div class="data-table-wrapper">
|
|
19
19
|
<table class="data-table">
|
|
20
20
|
<thead>
|
|
@@ -38,18 +38,17 @@
|
|
|
38
38
|
</tr>
|
|
39
39
|
</thead>
|
|
40
40
|
<tbody>
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
<% end %>
|
|
41
|
+
<%==
|
|
42
|
+
partial(
|
|
43
|
+
'health/partitions_with_fallback',
|
|
44
|
+
locals: {
|
|
45
|
+
topic_details: topic_details,
|
|
46
|
+
topic_name: topic_name,
|
|
47
|
+
partition_partial: 'health/partition_times',
|
|
48
|
+
colspan: 5
|
|
49
|
+
}
|
|
50
|
+
)
|
|
51
|
+
%>
|
|
53
52
|
</tbody>
|
|
54
53
|
</table>
|
|
55
54
|
|
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
<h2 class="h2"><%= cg_name %></h2>
|
|
15
15
|
|
|
16
16
|
<% topics = details[:topics] %>
|
|
17
|
-
<% topics.each_with_index do |(topic_name,
|
|
17
|
+
<% topics.each_with_index do |(topic_name, topic_details), index| %>
|
|
18
18
|
<div class="data-table-wrapper">
|
|
19
19
|
<table class="data-table">
|
|
20
20
|
<thead>
|
|
@@ -39,18 +39,17 @@
|
|
|
39
39
|
</tr>
|
|
40
40
|
</thead>
|
|
41
41
|
<tbody>
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
<% end %>
|
|
42
|
+
<%==
|
|
43
|
+
partial(
|
|
44
|
+
'health/partitions_with_fallback',
|
|
45
|
+
locals: {
|
|
46
|
+
topic_details: topic_details,
|
|
47
|
+
topic_name: topic_name,
|
|
48
|
+
partition_partial: 'health/partition_lags',
|
|
49
|
+
colspan: 5
|
|
50
|
+
}
|
|
51
|
+
)
|
|
52
|
+
%>
|
|
54
53
|
</tbody>
|
|
55
54
|
</table>
|
|
56
55
|
|
|
@@ -14,7 +14,7 @@
|
|
|
14
14
|
<h2 class="h2"><%= cg_name %></h2>
|
|
15
15
|
|
|
16
16
|
<% topics = details[:topics] %>
|
|
17
|
-
<% topics.each_with_index do |(topic_name,
|
|
17
|
+
<% topics.each_with_index do |(topic_name, topic_details), index| %>
|
|
18
18
|
<div class="data-table-wrapper">
|
|
19
19
|
<table class="data-table">
|
|
20
20
|
<thead>
|
|
@@ -42,18 +42,17 @@
|
|
|
42
42
|
</tr>
|
|
43
43
|
</thead>
|
|
44
44
|
<tbody>
|
|
45
|
-
|
|
46
|
-
|
|
47
|
-
|
|
48
|
-
|
|
49
|
-
|
|
50
|
-
|
|
51
|
-
|
|
52
|
-
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
<% end %>
|
|
45
|
+
<%==
|
|
46
|
+
partial(
|
|
47
|
+
'health/partitions_with_fallback',
|
|
48
|
+
locals: {
|
|
49
|
+
topic_details: topic_details,
|
|
50
|
+
topic_name: topic_name,
|
|
51
|
+
partition_partial: 'health/partition_offset',
|
|
52
|
+
colspan: 11
|
|
53
|
+
}
|
|
54
|
+
)
|
|
55
|
+
%>
|
|
57
56
|
</tbody>
|
|
58
57
|
</table>
|
|
59
58
|
|
|
@@ -10,11 +10,11 @@
|
|
|
10
10
|
<%== partial 'health/no_data' %>
|
|
11
11
|
<% end %>
|
|
12
12
|
|
|
13
|
-
<% @stats.each_with_index do |(cg_name,
|
|
13
|
+
<% @stats.each_with_index do |(cg_name, cg_details), index| %>
|
|
14
14
|
<h2 class="h2"><%= cg_name %></h2>
|
|
15
15
|
|
|
16
|
-
<% topics =
|
|
17
|
-
<% topics.each_with_index do |(topic_name,
|
|
16
|
+
<% topics = cg_details[:topics] %>
|
|
17
|
+
<% topics.each_with_index do |(topic_name, topic_details), index| %>
|
|
18
18
|
<div class="data-table-wrapper">
|
|
19
19
|
<table class="data-table">
|
|
20
20
|
<thead>
|
|
@@ -50,22 +50,21 @@
|
|
|
50
50
|
</tr>
|
|
51
51
|
</thead>
|
|
52
52
|
<tbody>
|
|
53
|
-
|
|
54
|
-
|
|
55
|
-
|
|
56
|
-
|
|
57
|
-
|
|
58
|
-
|
|
59
|
-
|
|
60
|
-
|
|
61
|
-
|
|
62
|
-
|
|
63
|
-
|
|
64
|
-
<% end %>
|
|
53
|
+
<%==
|
|
54
|
+
partial(
|
|
55
|
+
'health/partitions_with_fallback',
|
|
56
|
+
locals: {
|
|
57
|
+
topic_details: topic_details,
|
|
58
|
+
topic_name: topic_name,
|
|
59
|
+
partition_partial: 'health/partition',
|
|
60
|
+
colspan: 7
|
|
61
|
+
}
|
|
62
|
+
)
|
|
63
|
+
%>
|
|
65
64
|
</tbody>
|
|
66
65
|
</table>
|
|
67
66
|
|
|
68
|
-
<%== partial 'health/table_metadata', locals: { details:
|
|
67
|
+
<%== partial 'health/table_metadata', locals: { details: cg_details } %>
|
|
69
68
|
</div>
|
|
70
69
|
<% end %>
|
|
71
70
|
<% end %>
|
|
@@ -19,6 +19,8 @@ module Karafka
|
|
|
19
19
|
|
|
20
20
|
# If there is even one incompatible message, we need to stop
|
|
21
21
|
consumers_messages.each do |message|
|
|
22
|
+
report = message.payload
|
|
23
|
+
|
|
22
24
|
case @reports_schema_manager.call(message)
|
|
23
25
|
when :current
|
|
24
26
|
true
|
|
@@ -35,7 +37,9 @@ module Karafka
|
|
|
35
37
|
# since in most cases this is intermediate due to rolling upgrades, this should not
|
|
36
38
|
# significantly impact the state tracking and processing.
|
|
37
39
|
when :older
|
|
38
|
-
|
|
40
|
+
# Migrate old report format to current schema expectations (in-place)
|
|
41
|
+
@reports_migrator.call(report)
|
|
42
|
+
@state_aggregator.add_state(report, message.offset)
|
|
39
43
|
|
|
40
44
|
next
|
|
41
45
|
else
|
|
@@ -44,8 +48,8 @@ module Karafka
|
|
|
44
48
|
|
|
45
49
|
# We need to run the aggregations on each message in order to compensate for
|
|
46
50
|
# potential lags.
|
|
47
|
-
@state_aggregator.add(
|
|
48
|
-
@metrics_aggregator.add_report(
|
|
51
|
+
@state_aggregator.add(report, message.offset)
|
|
52
|
+
@metrics_aggregator.add_report(report)
|
|
49
53
|
@metrics_aggregator.add_stats(@state_aggregator.stats)
|
|
50
54
|
# Indicates that we had at least one report we used to enrich data
|
|
51
55
|
# If there were no state changes, there is no reason to flush data. This can occur
|
|
@@ -86,6 +90,7 @@ module Karafka
|
|
|
86
90
|
@flush_interval = ::Karafka::Web.config.processing.interval
|
|
87
91
|
|
|
88
92
|
@reports_schema_manager = Consumers::SchemaManager.new
|
|
93
|
+
@reports_migrator = Consumers::ReportsMigrator.new
|
|
89
94
|
@state_aggregator = Consumers::Aggregators::State.new(@reports_schema_manager)
|
|
90
95
|
@state_contract = Consumers::Contracts::State.new
|
|
91
96
|
|
|
@@ -68,7 +68,7 @@ module Karafka
|
|
|
68
68
|
# care about what a stopped process was doing and we can also remove it from active
|
|
69
69
|
# reports.
|
|
70
70
|
def evict_expired_processes
|
|
71
|
-
max_ttl = @aggregated_from - ::Karafka::Web.config.ttl / 1_000
|
|
71
|
+
max_ttl = @aggregated_from - (::Karafka::Web.config.ttl / 1_000)
|
|
72
72
|
|
|
73
73
|
@active_reports.delete_if do |_id, report|
|
|
74
74
|
report[:dispatched_at] < max_ttl || report[:process][:status] == 'stopped'
|
|
@@ -107,7 +107,7 @@ module Karafka
|
|
|
107
107
|
# stopped processes for extra time within the ttl limitations. This makes tracking of
|
|
108
108
|
# things from UX perspective nicer.
|
|
109
109
|
def evict_expired_processes
|
|
110
|
-
max_ttl = @aggregated_from - ::Karafka::Web.config.ttl / 1_000
|
|
110
|
+
max_ttl = @aggregated_from - (::Karafka::Web.config.ttl / 1_000)
|
|
111
111
|
|
|
112
112
|
state[:processes].delete_if do |_id, details|
|
|
113
113
|
details[:dispatched_at] < max_ttl
|
|
@@ -169,13 +169,13 @@ module Karafka
|
|
|
169
169
|
end
|
|
170
170
|
|
|
171
171
|
# @param report [Hash]
|
|
172
|
-
|
|
172
|
+
# @param block [Proc]
|
|
173
|
+
# @yieldparam partition_stats [Hash] statistics for a single partition
|
|
174
|
+
def iterate_partitions(report, &block)
|
|
173
175
|
report[:consumer_groups].each_value do |consumer_group|
|
|
174
176
|
consumer_group[:subscription_groups].each_value do |subscription_group|
|
|
175
177
|
subscription_group[:topics].each_value do |topic|
|
|
176
|
-
topic[:partitions].each_value
|
|
177
|
-
yield(partition)
|
|
178
|
-
end
|
|
178
|
+
topic[:partitions].each_value(&block)
|
|
179
179
|
end
|
|
180
180
|
end
|
|
181
181
|
end
|
|
@@ -0,0 +1,49 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Karafka
|
|
4
|
+
module Web
|
|
5
|
+
module Processing
|
|
6
|
+
module Consumers
|
|
7
|
+
# Migrator for consumer reports that applies per-message transformations
|
|
8
|
+
#
|
|
9
|
+
# Unlike the Management::Migrator which operates on aggregated states,
|
|
10
|
+
# this migrator runs on each individual consumer report as it's processed.
|
|
11
|
+
#
|
|
12
|
+
# This is necessary because:
|
|
13
|
+
# - Reports are continuously published during upgrades
|
|
14
|
+
# - Reports are short-lived (TTL-based) and don't need persistent migrations
|
|
15
|
+
# - Old schema reports may remain in Kafka topics for extended periods
|
|
16
|
+
#
|
|
17
|
+
# Migrations are lightweight transformations that normalize old report formats
|
|
18
|
+
# to work with current processing code. Migrations are stored in
|
|
19
|
+
# lib/karafka/web/management/migrations/consumers_reports/ alongside other migrations.
|
|
20
|
+
class ReportsMigrator
|
|
21
|
+
# Applies all applicable migrations to a consumer report
|
|
22
|
+
#
|
|
23
|
+
# @param report [Hash] deserialized consumer report
|
|
24
|
+
# @return [Hash] the same report object, potentially modified in-place
|
|
25
|
+
def call(report)
|
|
26
|
+
# Apply each applicable migration in order
|
|
27
|
+
migrations.each do |migration_class|
|
|
28
|
+
next unless migration_class.applicable?(report[:schema_version])
|
|
29
|
+
|
|
30
|
+
migration_class.new.migrate(report)
|
|
31
|
+
end
|
|
32
|
+
|
|
33
|
+
report
|
|
34
|
+
end
|
|
35
|
+
|
|
36
|
+
private
|
|
37
|
+
|
|
38
|
+
# Lazy-initialized cache of report migrations
|
|
39
|
+
# Only computed when first needed to avoid memory overhead if no old reports exist
|
|
40
|
+
def migrations
|
|
41
|
+
@migrations ||= Management::Migrations::Base
|
|
42
|
+
.sorted_descendants
|
|
43
|
+
.select { |migration_class| migration_class.type == :consumers_reports }
|
|
44
|
+
end
|
|
45
|
+
end
|
|
46
|
+
end
|
|
47
|
+
end
|
|
48
|
+
end
|
|
49
|
+
end
|
|
@@ -53,7 +53,7 @@ module Karafka
|
|
|
53
53
|
# @param existing [Hash] existing historical metrics (may be empty for the first state)
|
|
54
54
|
def initialize(existing)
|
|
55
55
|
# Builds an empty structure for potential time ranges we are interested in
|
|
56
|
-
@historicals = TIME_RANGES.keys.
|
|
56
|
+
@historicals = TIME_RANGES.keys.to_h { |name| [name, []] }
|
|
57
57
|
|
|
58
58
|
# Fetch the existing (if any) historical values that we already have
|
|
59
59
|
import_existing(existing)
|
|
@@ -10,7 +10,7 @@ module Karafka
|
|
|
10
10
|
include Tracking::Helpers::ErrorInfo
|
|
11
11
|
|
|
12
12
|
# Schema used by consumers error reporting
|
|
13
|
-
SCHEMA_VERSION = '1.
|
|
13
|
+
SCHEMA_VERSION = '1.2.0'
|
|
14
14
|
|
|
15
15
|
private_constant :SCHEMA_VERSION
|
|
16
16
|
|
|
@@ -38,6 +38,7 @@ module Karafka
|
|
|
38
38
|
track do |sampler|
|
|
39
39
|
sampler.errors << {
|
|
40
40
|
schema_version: SCHEMA_VERSION,
|
|
41
|
+
id: SecureRandom.uuid,
|
|
41
42
|
type: event[:type],
|
|
42
43
|
error_class: error_class,
|
|
43
44
|
error_message: error_message,
|
|
@@ -25,7 +25,29 @@ module Karafka
|
|
|
25
25
|
eofed
|
|
26
26
|
].each do |action|
|
|
27
27
|
# Tracks the job that is going to be scheduled so we can also display pending jobs
|
|
28
|
+
# Dynamically creates methods like:
|
|
29
|
+
# def on_consumer_before_schedule_consume(event)
|
|
30
|
+
# def on_consumer_before_schedule_revoked(event)
|
|
31
|
+
# etc.
|
|
32
|
+
# Example for action = :consume:
|
|
33
|
+
# # @param event [Karafka::Core::Monitoring::Event]
|
|
34
|
+
# def on_consumer_before_schedule_consume(event)
|
|
35
|
+
# consumer = event.payload[:caller]
|
|
36
|
+
# jid = job_id(consumer, 'consume')
|
|
37
|
+
# job_details = job_details(consumer, 'consume')
|
|
38
|
+
# job_details[:status] = 'pending'
|
|
39
|
+
# track { |sampler| sampler.jobs[jid] = job_details }
|
|
40
|
+
# end
|
|
28
41
|
class_eval <<~RUBY, __FILE__, __LINE__ + 1
|
|
42
|
+
# @param event [Karafka::Core::Monitoring::Event]
|
|
43
|
+
# def on_consumer_before_schedule_consume(event)
|
|
44
|
+
# consumer = event.payload[:caller]
|
|
45
|
+
# jid = job_id(consumer, 'consume')
|
|
46
|
+
# job_details = job_details(consumer, 'consume')
|
|
47
|
+
# job_details[:status] = 'pending'
|
|
48
|
+
# track { |sampler| sampler.jobs[jid] = job_details }
|
|
49
|
+
# end
|
|
50
|
+
|
|
29
51
|
# @param event [Karafka::Core::Monitoring::Event]
|
|
30
52
|
def on_consumer_before_schedule_#{action}(event)
|
|
31
53
|
consumer = event.payload[:caller]
|
|
@@ -115,7 +137,31 @@ module Karafka
|
|
|
115
137
|
[:tick, :ticked, 'tick'],
|
|
116
138
|
[:eof, :eofed, 'eofed']
|
|
117
139
|
].each do |pre, post, action|
|
|
140
|
+
# Dynamically creates methods like:
|
|
141
|
+
# def on_consumer_revoke(event)
|
|
142
|
+
# def on_consumer_shutting_down(event)
|
|
143
|
+
# etc.
|
|
144
|
+
# Example for pre = :revoke, action = :revoked:
|
|
145
|
+
# # Stores this job details
|
|
146
|
+
# #
|
|
147
|
+
# # @param event [Karafka::Core::Monitoring::Event]
|
|
148
|
+
# def on_consumer_revoke(event)
|
|
149
|
+
# consumer = event.payload[:caller]
|
|
150
|
+
# jid = job_id(consumer, 'revoked')
|
|
151
|
+
# job_details = job_details(consumer, 'revoked')
|
|
152
|
+
# track { |sampler| sampler.counters[:jobs] += 1; sampler.jobs[jid] = job_details }
|
|
153
|
+
# end
|
|
118
154
|
class_eval <<~METHOD, __FILE__, __LINE__ + 1
|
|
155
|
+
# Stores this job details
|
|
156
|
+
#
|
|
157
|
+
# @param event [Karafka::Core::Monitoring::Event]
|
|
158
|
+
# def on_consumer_revoke(event)
|
|
159
|
+
# consumer = event.payload[:caller]
|
|
160
|
+
# jid = job_id(consumer, 'revoked')
|
|
161
|
+
# job_details = job_details(consumer, 'revoked')
|
|
162
|
+
# track { |sampler| sampler.counters[:jobs] += 1; sampler.jobs[jid] = job_details }
|
|
163
|
+
# end
|
|
164
|
+
|
|
119
165
|
# Stores this job details
|
|
120
166
|
#
|
|
121
167
|
# @param event [Karafka::Core::Monitoring::Event]
|
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
# frozen_string_literal: true
|
|
2
|
+
|
|
3
|
+
module Karafka
|
|
4
|
+
module Web
|
|
5
|
+
module Tracking
|
|
6
|
+
module Consumers
|
|
7
|
+
class Sampler < Tracking::Sampler
|
|
8
|
+
# Namespace for data enrichers that augment sampler data with additional details
|
|
9
|
+
module Enrichers
|
|
10
|
+
# Base class for data enrichers
|
|
11
|
+
# This is an abstract base class that can be extended to create custom enrichers
|
|
12
|
+
class Base
|
|
13
|
+
# Placeholder for future common functionality
|
|
14
|
+
end
|
|
15
|
+
end
|
|
16
|
+
end
|
|
17
|
+
end
|
|
18
|
+
end
|
|
19
|
+
end
|
|
20
|
+
end
|