karafka-web 0.7.6 → 0.7.8
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data/.github/workflows/ci.yml +2 -2
- data/CHANGELOG.md +11 -0
- data/Gemfile.lock +27 -17
- data/README.md +3 -0
- data/bin/karafka-web +2 -1
- data/docker-compose.yml +2 -0
- data/karafka-web.gemspec +2 -2
- data/lib/karafka/web/cli/base.rb +35 -0
- data/lib/karafka/web/cli/help.rb +25 -0
- data/lib/karafka/web/cli/install.rb +27 -0
- data/lib/karafka/web/cli/migrate.rb +33 -0
- data/lib/karafka/web/cli/reset.rb +27 -0
- data/lib/karafka/web/cli/uninstall.rb +17 -0
- data/lib/karafka/web/cli.rb +9 -76
- data/lib/karafka/web/config.rb +36 -2
- data/lib/karafka/web/processing/consumers/metrics.rb +4 -1
- data/lib/karafka/web/processing/consumers/state.rb +4 -1
- data/lib/karafka/web/tracking/consumers/reporter.rb +21 -30
- data/lib/karafka/web/tracking/producers/reporter.rb +6 -4
- data/lib/karafka/web/tracking/producers/sampler.rb +3 -0
- data/lib/karafka/web/tracking/reporter.rb +25 -0
- data/lib/karafka/web/tracking/scheduler.rb +46 -0
- data/lib/karafka/web/ui/lib/admin.rb +56 -0
- data/lib/karafka/web/ui/models/cluster_info.rb +2 -2
- data/lib/karafka/web/ui/models/consumers_metrics.rb +4 -2
- data/lib/karafka/web/ui/models/consumers_state.rb +4 -2
- data/lib/karafka/web/ui/models/counters.rb +1 -1
- data/lib/karafka/web/ui/models/message.rb +2 -2
- data/lib/karafka/web/ui/models/processes.rb +1 -1
- data/lib/karafka/web/ui/models/watermark_offsets.rb +1 -1
- data/lib/karafka/web/ui/pro/controllers/explorer.rb +16 -5
- data/lib/karafka/web/ui/pro/views/errors/_error.erb +1 -1
- data/lib/karafka/web/ui/pro/views/explorer/_message.erb +1 -1
- data/lib/karafka/web/ui/public/javascripts/live_poll.js +1 -1
- data/lib/karafka/web/ui/views/errors/_error.erb +1 -1
- data/lib/karafka/web/version.rb +1 -1
- data.tar.gz.sig +0 -0
- metadata +14 -6
- metadata.gz.sig +0 -0
- data/lib/karafka/web/tracking/producers/listeners/reporter.rb +0 -21
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 2d46244304483184c3be59a0686497a9f948e9b666dccf00961b560e60db4655
|
4
|
+
data.tar.gz: 329686ce548063787fe06290d67650793ebef6c014f43b753780ad120671a8b1
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 5a18a3c23f63382c6c41dc5ac7f82f893bc14a55e7ec68af9cf16da0f47a45aee77ca9626973570cb9d6abe9355e0712f117424e00619b7b0198748c3bfbeb55
|
7
|
+
data.tar.gz: f639bd90ca21c09fcd798fc289079cafeb9c752d261ecf88df9762e347df738bf260c658f0f15b3b08c3708daf091217c0159d85ac975380463a65a45d876d0b
|
checksums.yaml.gz.sig
CHANGED
Binary file
|
data/.github/workflows/ci.yml
CHANGED
@@ -68,7 +68,7 @@ jobs:
|
|
68
68
|
strategy:
|
69
69
|
fail-fast: false
|
70
70
|
steps:
|
71
|
-
- uses: actions/checkout@
|
71
|
+
- uses: actions/checkout@v4
|
72
72
|
with:
|
73
73
|
fetch-depth: 0
|
74
74
|
|
@@ -89,7 +89,7 @@ jobs:
|
|
89
89
|
strategy:
|
90
90
|
fail-fast: false
|
91
91
|
steps:
|
92
|
-
- uses: actions/checkout@
|
92
|
+
- uses: actions/checkout@v4
|
93
93
|
with:
|
94
94
|
fetch-depth: 0
|
95
95
|
- name: Run Coditsu
|
data/CHANGELOG.md
CHANGED
@@ -1,5 +1,16 @@
|
|
1
1
|
# Karafka Web changelog
|
2
2
|
|
3
|
+
## 0.7.8 (2023-10-24)
|
4
|
+
- [Enhancement] Support transactional producer usage with Web UI.
|
5
|
+
- [Fix] Fix a bug where critical errors (like `IRB::Abort`) would not abort the ongoing transaction.
|
6
|
+
- [Fix] Prevent a scenario where an ongoing transactional producer would have stats emitted and an error that could not have been dispatched because of the transaction, creating a dead-lock.
|
7
|
+
- [Fix] Make sure that the `recent` displays the most recent non-compacted, non-system message.
|
8
|
+
- [Fix] Improve the `recent` message display to compensate for aborted transactions.
|
9
|
+
- [Fix] Fix `ReferenceError: response is not defined` that occurs when Web UI returns refresh non 200.
|
10
|
+
|
11
|
+
## 0.7.7 (2023-10-20)
|
12
|
+
- [Fix] Remove `thor` as a CLI engine due to breaking changes.
|
13
|
+
|
3
14
|
## 0.7.6 (2023-10-10)
|
4
15
|
- [Fix] Fix nested SASL/SAML data visible in the routing details (#173)
|
5
16
|
|
data/Gemfile.lock
CHANGED
@@ -1,9 +1,9 @@
|
|
1
1
|
PATH
|
2
2
|
remote: .
|
3
3
|
specs:
|
4
|
-
karafka-web (0.7.
|
4
|
+
karafka-web (0.7.8)
|
5
5
|
erubi (~> 1.4)
|
6
|
-
karafka (>= 2.2.
|
6
|
+
karafka (>= 2.2.9, < 3.0.0)
|
7
7
|
karafka-core (>= 2.2.2, < 3.0.0)
|
8
8
|
roda (~> 3.68, >= 3.69)
|
9
9
|
tilt (~> 2.0)
|
@@ -11,35 +11,45 @@ PATH
|
|
11
11
|
GEM
|
12
12
|
remote: https://rubygems.org/
|
13
13
|
specs:
|
14
|
-
activesupport (7.
|
14
|
+
activesupport (7.1.1)
|
15
|
+
base64
|
16
|
+
bigdecimal
|
15
17
|
concurrent-ruby (~> 1.0, >= 1.0.2)
|
18
|
+
connection_pool (>= 2.2.5)
|
19
|
+
drb
|
16
20
|
i18n (>= 1.6, < 2)
|
17
21
|
minitest (>= 5.1)
|
22
|
+
mutex_m
|
18
23
|
tzinfo (~> 2.0)
|
24
|
+
base64 (0.1.1)
|
25
|
+
bigdecimal (3.1.4)
|
19
26
|
byebug (11.1.3)
|
20
27
|
concurrent-ruby (1.2.2)
|
28
|
+
connection_pool (2.4.1)
|
21
29
|
diff-lcs (1.5.0)
|
22
30
|
docile (1.4.0)
|
31
|
+
drb (2.1.1)
|
32
|
+
ruby2_keywords
|
23
33
|
erubi (1.12.0)
|
24
34
|
factory_bot (6.3.0)
|
25
35
|
activesupport (>= 5.0.0)
|
26
|
-
ffi (1.
|
36
|
+
ffi (1.16.3)
|
27
37
|
i18n (1.14.1)
|
28
38
|
concurrent-ruby (~> 1.0)
|
29
|
-
karafka (2.2.
|
39
|
+
karafka (2.2.9)
|
30
40
|
karafka-core (>= 2.2.2, < 2.3.0)
|
31
|
-
|
32
|
-
waterdrop (>= 2.6.6, < 3.0.0)
|
41
|
+
waterdrop (>= 2.6.10, < 3.0.0)
|
33
42
|
zeitwerk (~> 2.3)
|
34
|
-
karafka-core (2.2.
|
43
|
+
karafka-core (2.2.3)
|
35
44
|
concurrent-ruby (>= 1.1)
|
36
|
-
karafka-rdkafka (>= 0.13.
|
37
|
-
karafka-rdkafka (0.13.
|
45
|
+
karafka-rdkafka (>= 0.13.6, < 0.14.0)
|
46
|
+
karafka-rdkafka (0.13.6)
|
38
47
|
ffi (~> 1.15)
|
39
48
|
mini_portile2 (~> 2.6)
|
40
49
|
rake (> 12)
|
41
|
-
mini_portile2 (2.8.
|
50
|
+
mini_portile2 (2.8.5)
|
42
51
|
minitest (5.20.0)
|
52
|
+
mutex_m (0.1.2)
|
43
53
|
rack (3.0.8)
|
44
54
|
rack-test (2.1.0)
|
45
55
|
rack (>= 1.3)
|
@@ -47,7 +57,7 @@ GEM
|
|
47
57
|
rack (>= 3.0.0.beta1)
|
48
58
|
webrick
|
49
59
|
rake (13.0.6)
|
50
|
-
roda (3.
|
60
|
+
roda (3.73.0)
|
51
61
|
rack
|
52
62
|
rspec (3.12.0)
|
53
63
|
rspec-core (~> 3.12.0)
|
@@ -62,21 +72,21 @@ GEM
|
|
62
72
|
diff-lcs (>= 1.2.0, < 2.0)
|
63
73
|
rspec-support (~> 3.12.0)
|
64
74
|
rspec-support (3.12.1)
|
75
|
+
ruby2_keywords (0.0.5)
|
65
76
|
simplecov (0.22.0)
|
66
77
|
docile (~> 1.1)
|
67
78
|
simplecov-html (~> 0.11)
|
68
79
|
simplecov_json_formatter (~> 0.1)
|
69
80
|
simplecov-html (0.12.3)
|
70
81
|
simplecov_json_formatter (0.1.4)
|
71
|
-
|
72
|
-
tilt (2.2.0)
|
82
|
+
tilt (2.3.0)
|
73
83
|
tzinfo (2.0.6)
|
74
84
|
concurrent-ruby (~> 1.0)
|
75
|
-
waterdrop (2.6.
|
76
|
-
karafka-core (>= 2.
|
85
|
+
waterdrop (2.6.10)
|
86
|
+
karafka-core (>= 2.2.3, < 3.0.0)
|
77
87
|
zeitwerk (~> 2.3)
|
78
88
|
webrick (1.8.1)
|
79
|
-
zeitwerk (2.6.
|
89
|
+
zeitwerk (2.6.12)
|
80
90
|
|
81
91
|
PLATFORMS
|
82
92
|
x86_64-linux
|
data/README.md
CHANGED
@@ -8,6 +8,9 @@ Karafka Web UI is a user interface for the [Karafka framework](https://github.co
|
|
8
8
|
|
9
9
|
It allows for easy access to various metrics, such as the number of messages consumed, the number of errors, and the number of consumers operating. It also provides a way to view the different Kafka topics, consumers, and groups that are being used by the application.
|
10
10
|
|
11
|
+
> [!IMPORTANT]
|
12
|
+
> All of Karafka ecosystems components documentation, including the Web UI, can be found [here](https://karafka.io/docs/#web-ui).
|
13
|
+
|
11
14
|
## Getting started
|
12
15
|
|
13
16
|
Karafka Web UI documentation is part of the Karafka framework documentation and can be found [here](https://karafka.io/docs).
|
data/bin/karafka-web
CHANGED
data/docker-compose.yml
CHANGED
data/karafka-web.gemspec
CHANGED
@@ -17,7 +17,7 @@ Gem::Specification.new do |spec|
|
|
17
17
|
spec.licenses = %w[LGPL-3.0 Commercial]
|
18
18
|
|
19
19
|
spec.add_dependency 'erubi', '~> 1.4'
|
20
|
-
spec.add_dependency 'karafka', '>= 2.2.
|
20
|
+
spec.add_dependency 'karafka', '>= 2.2.9', '< 3.0.0'
|
21
21
|
spec.add_dependency 'karafka-core', '>= 2.2.2', '< 3.0.0'
|
22
22
|
spec.add_dependency 'roda', '~> 3.68', '>= 3.69'
|
23
23
|
spec.add_dependency 'tilt', '~> 2.0'
|
@@ -36,7 +36,7 @@ Gem::Specification.new do |spec|
|
|
36
36
|
spec.metadata = {
|
37
37
|
'funding_uri' => 'https://karafka.io/#become-pro',
|
38
38
|
'homepage_uri' => 'https://karafka.io',
|
39
|
-
'changelog_uri' => 'https://
|
39
|
+
'changelog_uri' => 'https://karafka.io/docs/Changelog-Karafka-Web-UI',
|
40
40
|
'bug_tracker_uri' => 'https://github.com/karafka/karafka-web/issues',
|
41
41
|
'source_code_uri' => 'https://github.com/karafka/karafka-web',
|
42
42
|
'documentation_uri' => 'https://karafka.io/docs',
|
@@ -0,0 +1,35 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Web
|
5
|
+
class Cli
|
6
|
+
# Base command for all the Web Cli commands
|
7
|
+
class Base < Karafka::Cli::Base
|
8
|
+
include ::Karafka::Helpers::Colorize
|
9
|
+
|
10
|
+
class << self
|
11
|
+
# @return [Array<Class>] available commands
|
12
|
+
def commands
|
13
|
+
ObjectSpace
|
14
|
+
.each_object(Class)
|
15
|
+
.select { |klass| klass.superclass == Karafka::Web::Cli::Base }
|
16
|
+
.reject { |klass| klass.to_s.end_with?('::Base') }
|
17
|
+
.sort_by(&:name)
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
21
|
+
private
|
22
|
+
|
23
|
+
# Takes the CLI user provided replication factor but if not present, uses the brokers count
|
24
|
+
# to decide. For non-dev clusters (with one broker) we usually want to have replication of
|
25
|
+
# two, just to have some redundancy.
|
26
|
+
# @param cli_replication_factor [Integer, false] user requested replication factor or false
|
27
|
+
# if we are supposed to compute the factor automatically
|
28
|
+
# @return [Integer] replication factor for Karafka Web UI topics
|
29
|
+
def compute_replication_factor(cli_replication_factor)
|
30
|
+
cli_replication_factor || (Ui::Models::ClusterInfo.fetch.brokers.size > 1 ? 2 : 1)
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
@@ -0,0 +1,25 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Web
|
5
|
+
class Cli
|
6
|
+
# Displays help
|
7
|
+
class Help < Base
|
8
|
+
desc 'Describes available commands'
|
9
|
+
|
10
|
+
# Print available commands
|
11
|
+
def call
|
12
|
+
# Find the longest command for alignment purposes
|
13
|
+
max_command_length = self.class.commands.map(&:name).map(&:size).max
|
14
|
+
|
15
|
+
puts 'Karafka Web UI commands:'
|
16
|
+
|
17
|
+
# Print each command formatted with its description
|
18
|
+
self.class.commands.each do |command|
|
19
|
+
puts " #{command.name.ljust(max_command_length)} # #{command.desc}"
|
20
|
+
end
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Web
|
5
|
+
class Cli
|
6
|
+
# Installs Web UI
|
7
|
+
class Install < Base
|
8
|
+
desc 'Installs the Web UI'
|
9
|
+
|
10
|
+
option(
|
11
|
+
:replication_factor,
|
12
|
+
'Replication factor for created topics',
|
13
|
+
Integer,
|
14
|
+
['--replication_factor [FACTOR]']
|
15
|
+
)
|
16
|
+
|
17
|
+
# Installs Karafka Web. Creates all needed topics, populates the data and adds the needed
|
18
|
+
# code to `karafka.rb`.
|
19
|
+
def call
|
20
|
+
Karafka::Web::Installer.new.install(
|
21
|
+
replication_factor: compute_replication_factor(options[:replication_factor])
|
22
|
+
)
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
@@ -0,0 +1,33 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Web
|
5
|
+
class Cli
|
6
|
+
# Migrates the Web UI topics and states if needed
|
7
|
+
class Migrate < Base
|
8
|
+
desc 'Runs necessary migrations of Web UI topics and states'
|
9
|
+
|
10
|
+
option(
|
11
|
+
:replication_factor,
|
12
|
+
'Replication factor for created topics',
|
13
|
+
Integer,
|
14
|
+
['--replication_factor [FACTOR]']
|
15
|
+
)
|
16
|
+
|
17
|
+
# Creates new topics (if any) and populates missing data.
|
18
|
+
# It does **not** remove topics and will not populate data if it is already there.
|
19
|
+
#
|
20
|
+
# Useful in two scenarios:
|
21
|
+
# 1. When setting up Web-UI in a new environment, so the Web-UI has the proper initial
|
22
|
+
# state.
|
23
|
+
# 2. When upgrading Web-UI in-between versions that would require extra topics and/or
|
24
|
+
# extra states populated.
|
25
|
+
def call
|
26
|
+
Karafka::Web::Installer.new.migrate(
|
27
|
+
replication_factor: compute_replication_factor(options[:replication_factor])
|
28
|
+
)
|
29
|
+
end
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
@@ -0,0 +1,27 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Web
|
5
|
+
class Cli
|
6
|
+
# Resets the Web UI
|
7
|
+
class Reset < Base
|
8
|
+
desc 'Resets the Web UI by removing all the Web topics and creating them again'
|
9
|
+
|
10
|
+
option(
|
11
|
+
:replication_factor,
|
12
|
+
'Replication factor for created topics',
|
13
|
+
Integer,
|
14
|
+
['--replication_factor [FACTOR]']
|
15
|
+
)
|
16
|
+
|
17
|
+
# Resets Karafka Web. Removes the topics, creates them again and populates the initial
|
18
|
+
# state again. This is useful in case the Web-UI metrics or anything else got corrupted.
|
19
|
+
def call
|
20
|
+
Karafka::Web::Installer.new.reset(
|
21
|
+
replication_factor: compute_replication_factor(options[:replication_factor])
|
22
|
+
)
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
26
|
+
end
|
27
|
+
end
|
@@ -0,0 +1,17 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Web
|
5
|
+
class Cli
|
6
|
+
# Uninstalls the Web UI
|
7
|
+
class Uninstall < Base
|
8
|
+
desc 'Removes all the Web UI topics and the enabled code'
|
9
|
+
|
10
|
+
# Uninstalls Karafka Web
|
11
|
+
def call
|
12
|
+
Karafka::Web::Installer.new.uninstall
|
13
|
+
end
|
14
|
+
end
|
15
|
+
end
|
16
|
+
end
|
17
|
+
end
|
data/lib/karafka/web/cli.rb
CHANGED
@@ -2,82 +2,15 @@
|
|
2
2
|
|
3
3
|
module Karafka
|
4
4
|
module Web
|
5
|
-
#
|
6
|
-
class Cli <
|
7
|
-
|
8
|
-
|
9
|
-
|
10
|
-
|
11
|
-
|
12
|
-
|
13
|
-
|
14
|
-
desc: 'Replication factor for created topics',
|
15
|
-
default: false,
|
16
|
-
check_default_type: false,
|
17
|
-
type: :numeric
|
18
|
-
)
|
19
|
-
# Installs Karafka Web. Creates all needed topics, populates the data and adds the needed
|
20
|
-
# code to `karafka.rb`.
|
21
|
-
def install
|
22
|
-
Karafka::Web::Installer.new.install(
|
23
|
-
replication_factor: compute_replication_factor(options[:replication_factor])
|
24
|
-
)
|
25
|
-
end
|
26
|
-
|
27
|
-
desc 'migrate', 'Creates necessary topics if not present and populates state data'
|
28
|
-
method_option(
|
29
|
-
:replication_factor,
|
30
|
-
desc: 'Replication factor for created topics',
|
31
|
-
default: false,
|
32
|
-
check_default_type: false,
|
33
|
-
type: :numeric
|
34
|
-
)
|
35
|
-
# Creates new topics (if any) and populates missing data.
|
36
|
-
# It does **not** remove topics and will not populate data if it is already there.
|
37
|
-
#
|
38
|
-
# Useful in two scenarios:
|
39
|
-
# 1. When setting up Web-UI in a new environment, so the Web-UI has the proper initial
|
40
|
-
# state.
|
41
|
-
# 2. When upgrading Web-UI in-between versions that would require extra topics and/or extra
|
42
|
-
# states populated.
|
43
|
-
def migrate
|
44
|
-
Karafka::Web::Installer.new.migrate(
|
45
|
-
replication_factor: compute_replication_factor(options[:replication_factor])
|
46
|
-
)
|
47
|
-
end
|
48
|
-
|
49
|
-
desc 'reset', 'Resets the Web UI by removing all the Web topics and creating them again'
|
50
|
-
method_option(
|
51
|
-
:replication_factor,
|
52
|
-
desc: 'Replication factor for created topics',
|
53
|
-
default: false,
|
54
|
-
check_default_type: false,
|
55
|
-
type: :numeric
|
56
|
-
)
|
57
|
-
# Resets Karafka Web. Removes the topics, creates them again and populates the initial state
|
58
|
-
# again. This is useful in case the Web-UI metrics or anything else got corrupted.
|
59
|
-
def reset
|
60
|
-
Karafka::Web::Installer.new.reset(
|
61
|
-
replication_factor: compute_replication_factor(options[:replication_factor])
|
62
|
-
)
|
63
|
-
end
|
64
|
-
|
65
|
-
desc 'uninstall', 'Removes all the Web UI topics and the enabled code'
|
66
|
-
# Uninstalls Karafka Web
|
67
|
-
def uninstall
|
68
|
-
Karafka::Web::Installer.new.uninstall
|
69
|
-
end
|
70
|
-
|
71
|
-
private
|
72
|
-
|
73
|
-
# Takes the CLI user provided replication factor but if not present, uses the brokers count
|
74
|
-
# to decide. For non-dev clusters (with one broker) we usually want to have replication of
|
75
|
-
# two, just to have some redundancy.
|
76
|
-
# @param cli_replication_factor [Integer, false] user requested replication factor or false
|
77
|
-
# if we are supposed to compute the factor automatically
|
78
|
-
# @return [Integer] replication factor for Karafka Web UI topics
|
79
|
-
def compute_replication_factor(cli_replication_factor)
|
80
|
-
cli_replication_factor || Ui::Models::ClusterInfo.fetch.brokers.size > 1 ? 2 : 1
|
5
|
+
# Web CLI
|
6
|
+
class Cli < Karafka::Cli
|
7
|
+
class << self
|
8
|
+
private
|
9
|
+
|
10
|
+
# @return [Array<Class>] command classes
|
11
|
+
def commands
|
12
|
+
Base.commands
|
13
|
+
end
|
81
14
|
end
|
82
15
|
end
|
83
16
|
end
|
data/lib/karafka/web/config.rb
CHANGED
@@ -36,6 +36,10 @@ module Karafka
|
|
36
36
|
# 5 seconds should be enough
|
37
37
|
setting :interval, default: 5_000
|
38
38
|
|
39
|
+
# Main Web UI reporting scheduler that runs a background thread and reports periodically
|
40
|
+
# from the consumer reporter and producer reporter
|
41
|
+
setting :scheduler, default: Tracking::Scheduler.new
|
42
|
+
|
39
43
|
setting :consumers do
|
40
44
|
# Reports the metrics collected in the sampler
|
41
45
|
setting :reporter, default: Tracking::Consumers::Reporter.new
|
@@ -58,8 +62,7 @@ module Karafka
|
|
58
62
|
setting :sampler, default: Tracking::Producers::Sampler.new
|
59
63
|
|
60
64
|
setting :listeners, default: [
|
61
|
-
Tracking::Producers::Listeners::Errors.new
|
62
|
-
Tracking::Producers::Listeners::Reporter.new
|
65
|
+
Tracking::Producers::Listeners::Errors.new
|
63
66
|
]
|
64
67
|
end
|
65
68
|
end
|
@@ -114,6 +117,37 @@ module Karafka
|
|
114
117
|
# In some cases you may want to limit what is being displayed due to the type of data you
|
115
118
|
# are dealing with
|
116
119
|
setting :visibility_filter, default: Ui::Models::VisibilityFilter.new
|
120
|
+
|
121
|
+
# Specific kafka settings that are tuned to operate within the Web UI interface.
|
122
|
+
#
|
123
|
+
# Please do not change them unless you know what you are doing as their misconfiguration
|
124
|
+
# may cause Web UI to misbehave
|
125
|
+
#
|
126
|
+
# The settings are inherited as follows:
|
127
|
+
# 1. root routing level `kafka` settings
|
128
|
+
# 2. admin `kafka` settings
|
129
|
+
# 3. web ui `kafka` settings from here
|
130
|
+
#
|
131
|
+
# Those settings impact ONLY Web UI interface and do not affect other scopes. This is done
|
132
|
+
# on purpose as we want to improve responsiveness of the interface by tuning some of the
|
133
|
+
# settings and this is not that relevant for processing itself.
|
134
|
+
#
|
135
|
+
# option [Hash] extra changes to the default admin kafka settings
|
136
|
+
setting :kafka, default: {
|
137
|
+
# optimizes the responsiveness of the Web UI in three scenarios:
|
138
|
+
# - topics to which writes happen only in transactions so EOF is yield faster
|
139
|
+
# - heavily compacted topics
|
140
|
+
# - Web UI topics read operations when using transactional producer
|
141
|
+
#
|
142
|
+
# This can be configured to be higher if you do not use transactional WaterDrop producer.
|
143
|
+
# This value is used when last message (first from the high watermark offset) is the
|
144
|
+
# transaction commit message. In cases like this the EOF gets propagated after this time
|
145
|
+
# so we have to wait. Default 500ms means, that for some views, where we take our data
|
146
|
+
# that might have been committed via transactional producer, we would wait for 1 second
|
147
|
+
# to get needed data. If you are experiencing timeouts or other issues with the Web IU
|
148
|
+
# interface, you can increase this.
|
149
|
+
'fetch.wait.max.ms': 100
|
150
|
+
}
|
117
151
|
end
|
118
152
|
end
|
119
153
|
end
|
@@ -14,7 +14,10 @@ module Karafka
|
|
14
14
|
metrics_message = ::Karafka::Admin.read_topic(
|
15
15
|
Karafka::Web.config.topics.consumers.metrics,
|
16
16
|
0,
|
17
|
-
|
17
|
+
# We need to take more in case there would be transactions running.
|
18
|
+
# In theory we could take two but this compensates for any involuntary
|
19
|
+
# revocations and cases where two producers would write to the same state
|
20
|
+
5
|
18
21
|
).last
|
19
22
|
|
20
23
|
return metrics_message.payload if metrics_message
|
@@ -14,7 +14,10 @@ module Karafka
|
|
14
14
|
state_message = ::Karafka::Admin.read_topic(
|
15
15
|
Karafka::Web.config.topics.consumers.states,
|
16
16
|
0,
|
17
|
-
|
17
|
+
# We need to take more in case there would be transactions running.
|
18
|
+
# In theory we could take two but this compensates for any involuntary
|
19
|
+
# revocations and cases where two producers would write to the same state
|
20
|
+
5
|
18
21
|
).last
|
19
22
|
|
20
23
|
return state_message.payload if state_message
|
@@ -5,10 +5,7 @@ module Karafka
|
|
5
5
|
module Tracking
|
6
6
|
module Consumers
|
7
7
|
# Reports the collected data about the process and sends it, so we can use it in the UI
|
8
|
-
class Reporter
|
9
|
-
include ::Karafka::Core::Helpers::Time
|
10
|
-
include ::Karafka::Helpers::Async
|
11
|
-
|
8
|
+
class Reporter < Tracking::Reporter
|
12
9
|
# Minimum number of messages to produce to produce them in sync mode
|
13
10
|
# This acts as a small back-off not to overload the system in case we would have
|
14
11
|
# extremely big number of errors happening
|
@@ -21,12 +18,31 @@ module Karafka
|
|
21
18
|
MUTEX = Mutex.new
|
22
19
|
|
23
20
|
def initialize
|
21
|
+
super
|
24
22
|
# Move back so first report is dispatched fast to indicate, that the process is alive
|
25
23
|
@tracked_at = monotonic_now - 10_000
|
26
24
|
@report_contract = Consumers::Contracts::Report.new
|
27
25
|
@error_contract = Tracking::Contracts::Error.new
|
28
26
|
end
|
29
27
|
|
28
|
+
# We never report in initializing phase because things are not yet fully configured
|
29
|
+
# We never report in the initialized because server is not yet ready until Karafka is
|
30
|
+
# fully running and some of the things like listeners are not yet available
|
31
|
+
#
|
32
|
+
# This method will also be `false` in case we are not running in `karafka server` or
|
33
|
+
# in embedding, because in those cases Karafka does not go beyond the `initialized` phase
|
34
|
+
#
|
35
|
+
# @return [Boolean] are we able to report consumer state
|
36
|
+
def active?
|
37
|
+
# If we do not have a producer that we could use to report or it was closed, we cannot
|
38
|
+
# and should not report
|
39
|
+
return false unless super
|
40
|
+
return false if ::Karafka::App.initializing?
|
41
|
+
return false if ::Karafka::App.initialized?
|
42
|
+
|
43
|
+
true
|
44
|
+
end
|
45
|
+
|
30
46
|
# Dispatches the current state from sampler to appropriate topics
|
31
47
|
#
|
32
48
|
# @param forced [Boolean] should we report bypassing the time frequency or should we
|
@@ -41,11 +57,6 @@ module Karafka
|
|
41
57
|
sampler.sample
|
42
58
|
|
43
59
|
MUTEX.synchronize do
|
44
|
-
# Start background thread only when needed
|
45
|
-
# This prevents us from starting it too early or for non-consumer processes where
|
46
|
-
# Karafka is being included
|
47
|
-
async_call unless @running
|
48
|
-
|
49
60
|
return unless report?(forced)
|
50
61
|
|
51
62
|
@tracked_at = monotonic_now
|
@@ -97,31 +108,11 @@ module Karafka
|
|
97
108
|
|
98
109
|
private
|
99
110
|
|
100
|
-
# Reports the process state once in a while
|
101
|
-
def call
|
102
|
-
@running = true
|
103
|
-
|
104
|
-
# We won't track more often anyhow but want to try frequently not to miss a window
|
105
|
-
# We need to convert the sleep interval into seconds for sleep
|
106
|
-
sleep_time = ::Karafka::Web.config.tracking.interval.to_f / 1_000 / 10
|
107
|
-
|
108
|
-
loop do
|
109
|
-
report
|
110
|
-
|
111
|
-
sleep(sleep_time)
|
112
|
-
end
|
113
|
-
end
|
114
|
-
|
115
111
|
# @param forced [Boolean] is this report forced. Forced means that as long as we can
|
116
112
|
# flush we will flush
|
117
113
|
# @return [Boolean] Should we report or is it not yet time to do so
|
118
114
|
def report?(forced)
|
119
|
-
|
120
|
-
return false if ::Karafka::App.initializing?
|
121
|
-
# We never report in the initialized because server is not yet ready until Karafka is
|
122
|
-
# fully running and some of the things like listeners are not yet available
|
123
|
-
return false if ::Karafka::App.initialized?
|
124
|
-
|
115
|
+
return false unless active?
|
125
116
|
return true if forced
|
126
117
|
|
127
118
|
(monotonic_now - @tracked_at) >= ::Karafka::Web.config.tracking.interval
|
@@ -9,9 +9,7 @@ module Karafka
|
|
9
9
|
# @note Producer reported does not have to operate with the `forced` dispatch mainly
|
10
10
|
# because there is no expectation on immediate status updates for producers and their
|
11
11
|
# dispatch flow is always periodic based.
|
12
|
-
class Reporter
|
13
|
-
include ::Karafka::Core::Helpers::Time
|
14
|
-
|
12
|
+
class Reporter < Tracking::Reporter
|
15
13
|
# Minimum number of messages to produce to produce them in sync mode
|
16
14
|
# This acts as a small back-off not to overload the system in case we would have
|
17
15
|
# extremely big number of errors happening
|
@@ -24,6 +22,7 @@ module Karafka
|
|
24
22
|
MUTEX = Mutex.new
|
25
23
|
|
26
24
|
def initialize
|
25
|
+
super
|
27
26
|
# If there are any errors right after we started sampling, dispatch them immediately
|
28
27
|
@tracked_at = monotonic_now - 10_000
|
29
28
|
@error_contract = Tracking::Contracts::Error.new
|
@@ -62,7 +61,7 @@ module Karafka
|
|
62
61
|
|
63
62
|
# @return [Boolean] Should we report or is it not yet time to do so
|
64
63
|
def report?
|
65
|
-
return false unless
|
64
|
+
return false unless active?
|
66
65
|
|
67
66
|
(monotonic_now - @tracked_at) >= ::Karafka::Web.config.tracking.interval
|
68
67
|
end
|
@@ -93,6 +92,9 @@ module Karafka
|
|
93
92
|
# and we can just safely ignore this
|
94
93
|
rescue WaterDrop::Errors::ProducerClosedError
|
95
94
|
nil
|
95
|
+
rescue StandardError => e
|
96
|
+
p '------------------------------------------------'
|
97
|
+
p e
|
96
98
|
end
|
97
99
|
end
|
98
100
|
end
|
@@ -26,6 +26,9 @@ module Karafka
|
|
26
26
|
# We cannot report and track the same time, that is why we use mutex here. To make sure
|
27
27
|
# that samples aggregations and counting does not interact with reporter flushing.
|
28
28
|
def track
|
29
|
+
# Prevents deadlocks when something producer related fails in the Web UI reporter
|
30
|
+
return yield(self) if Reporter::MUTEX.owned?
|
31
|
+
|
29
32
|
Reporter::MUTEX.synchronize do
|
30
33
|
yield(self)
|
31
34
|
end
|
@@ -0,0 +1,25 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Web
|
5
|
+
module Tracking
|
6
|
+
# Base reporter from which all the reports should inherit
|
7
|
+
class Reporter
|
8
|
+
include ::Karafka::Core::Helpers::Time
|
9
|
+
|
10
|
+
# Can this reporter report. Since some reporters may report only in part of the processes
|
11
|
+
# where Karafka is used (like `karafka server`) each may implement more complex rules.
|
12
|
+
#
|
13
|
+
# The basic is not to report unless we have a producer and this producer is active
|
14
|
+
#
|
15
|
+
# @return [Boolean]
|
16
|
+
def active?
|
17
|
+
return false unless ::Karafka::App.producer
|
18
|
+
return false unless ::Karafka::App.producer.status.active?
|
19
|
+
|
20
|
+
true
|
21
|
+
end
|
22
|
+
end
|
23
|
+
end
|
24
|
+
end
|
25
|
+
end
|
@@ -0,0 +1,46 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Web
|
5
|
+
module Tracking
|
6
|
+
# Triggers reporters to report in an async mode in a separate thread
|
7
|
+
# We report this way to prevent any potential dead-locks in cases we would be emitting
|
8
|
+
# statistics during transactions.
|
9
|
+
#
|
10
|
+
# We should never use the notifications thread for sensitive IO bound operations.
|
11
|
+
class Scheduler
|
12
|
+
include ::Karafka::Helpers::Async
|
13
|
+
|
14
|
+
# Creates the scheduler and runs its internal reporting
|
15
|
+
def initialize
|
16
|
+
async_call
|
17
|
+
end
|
18
|
+
|
19
|
+
private
|
20
|
+
|
21
|
+
# Reports the process state once in a while
|
22
|
+
def call
|
23
|
+
# We won't track more often anyhow but want to try frequently not to miss a window
|
24
|
+
# We need to convert the sleep interval into seconds for sleep
|
25
|
+
sleep_time = ::Karafka::Web.config.tracking.interval.to_f / 1_000 / 10
|
26
|
+
|
27
|
+
loop do
|
28
|
+
# Not every reporter may be active at a given stage or in a context of a given process
|
29
|
+
# We select only those that decided that they are active.
|
30
|
+
reporters.select(&:active?).each(&:report)
|
31
|
+
|
32
|
+
sleep(sleep_time)
|
33
|
+
end
|
34
|
+
end
|
35
|
+
|
36
|
+
# @return [Array] consumers and producers reporters
|
37
|
+
def reporters
|
38
|
+
@reporters ||= [
|
39
|
+
::Karafka::Web.config.tracking.consumers.reporter,
|
40
|
+
::Karafka::Web.config.tracking.producers.reporter
|
41
|
+
].freeze
|
42
|
+
end
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
@@ -0,0 +1,56 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Karafka
|
4
|
+
module Web
|
5
|
+
module Ui
|
6
|
+
module Lib
|
7
|
+
# Wrapper around Karafka Admin that alters its behaviours or injects Web UI interface
|
8
|
+
# specific settings that optimize the responsiveness of the UI when operating on topics
|
9
|
+
#
|
10
|
+
# @note Not all commands need those optimizations, hence we alter only those that need
|
11
|
+
# that and we only expose those admin commands that are used in the Web-UI interface
|
12
|
+
# component.
|
13
|
+
#
|
14
|
+
# @note We expose here only admin methods used in the Web UI interface. Processing uses the
|
15
|
+
# `Karafka::Admin` with the defaults
|
16
|
+
class Admin
|
17
|
+
class << self
|
18
|
+
extend Forwardable
|
19
|
+
|
20
|
+
def_delegators ::Karafka::Admin, :read_watermark_offsets, :cluster_info
|
21
|
+
|
22
|
+
# Allows us to read messages from the topic
|
23
|
+
#
|
24
|
+
# @param name [String, Symbol] topic name
|
25
|
+
# @param partition [Integer] partition
|
26
|
+
# @param count [Integer] how many messages we want to get at most
|
27
|
+
# @param start_offset [Integer, Time] offset from which we should start. If -1 is provided
|
28
|
+
# (default) we will start from the latest offset. If time is provided, the appropriate
|
29
|
+
# offset will be resolved. If negative beyond -1 is provided, we move backwards more.
|
30
|
+
# @param settings [Hash] kafka extra settings (optional)
|
31
|
+
#
|
32
|
+
# @return [Array<Karafka::Messages::Message>] array with messages
|
33
|
+
def read_topic(name, partition, count, start_offset = -1, settings = {})
|
34
|
+
::Karafka::Admin.read_topic(
|
35
|
+
name,
|
36
|
+
partition,
|
37
|
+
count,
|
38
|
+
start_offset,
|
39
|
+
# Merge our Web UI specific settings
|
40
|
+
config.merge(settings)
|
41
|
+
)
|
42
|
+
end
|
43
|
+
|
44
|
+
private
|
45
|
+
|
46
|
+
# @return [Hash] kafka config for Web UI interface.
|
47
|
+
# @note It does **not** affect tracking or processing
|
48
|
+
def config
|
49
|
+
::Karafka::Web.config.ui.kafka
|
50
|
+
end
|
51
|
+
end
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
55
|
+
end
|
56
|
+
end
|
@@ -4,7 +4,7 @@ module Karafka
|
|
4
4
|
module Web
|
5
5
|
module Ui
|
6
6
|
module Models
|
7
|
-
# Wraps around the `
|
7
|
+
# Wraps around the `Lib::Admin#cluster_info` with caching and some additional aliases
|
8
8
|
# so we can reference relevant information easily
|
9
9
|
class ClusterInfo
|
10
10
|
class << self
|
@@ -18,7 +18,7 @@ module Karafka
|
|
18
18
|
cluster_info = cache.read(:cluster_info)
|
19
19
|
|
20
20
|
if cluster_info.nil? || !cached
|
21
|
-
cluster_info = cache.write(:cluster_info,
|
21
|
+
cluster_info = cache.write(:cluster_info, Lib::Admin.cluster_info)
|
22
22
|
end
|
23
23
|
|
24
24
|
cluster_info
|
@@ -32,10 +32,12 @@ module Karafka
|
|
32
32
|
|
33
33
|
# @return [::Karafka::Messages::Message, nil] most recent state or nil if none
|
34
34
|
def fetch
|
35
|
-
::
|
35
|
+
Lib::Admin.read_topic(
|
36
36
|
Karafka::Web.config.topics.consumers.metrics,
|
37
37
|
0,
|
38
|
-
|
38
|
+
# We need to take last two and not the last because in case of a transactional
|
39
|
+
# producer, the last one will match the transaction commit message
|
40
|
+
2
|
39
41
|
).last
|
40
42
|
end
|
41
43
|
end
|
@@ -41,10 +41,12 @@ module Karafka
|
|
41
41
|
|
42
42
|
# @return [::Karafka::Messages::Message, nil] most recent state or nil if none
|
43
43
|
def fetch
|
44
|
-
::
|
44
|
+
Lib::Admin.read_topic(
|
45
45
|
Karafka::Web.config.topics.consumers.states,
|
46
46
|
0,
|
47
|
-
|
47
|
+
# We need to take last two and not the last because in case of a transactional
|
48
|
+
# producer, the last one will match the transaction commit message
|
49
|
+
2
|
48
50
|
).last
|
49
51
|
end
|
50
52
|
|
@@ -17,7 +17,7 @@ module Karafka
|
|
17
17
|
# @param offset [Integer]
|
18
18
|
# @raise [::Karafka::Web::Errors::Ui::NotFoundError] when not found
|
19
19
|
def find(topic_id, partition_id, offset)
|
20
|
-
message =
|
20
|
+
message = Lib::Admin.read_topic(
|
21
21
|
topic_id,
|
22
22
|
partition_id,
|
23
23
|
1,
|
@@ -195,7 +195,7 @@ module Karafka
|
|
195
195
|
# @return [Array<Karafka::Messages::Message>, false] topic partition messages or false
|
196
196
|
# in case we hit a non-existing offset
|
197
197
|
def read_topic(*args)
|
198
|
-
::
|
198
|
+
Lib::Admin.read_topic(*args)
|
199
199
|
rescue Rdkafka::RdkafkaError => e
|
200
200
|
return false if e.code == :auto_offset_reset
|
201
201
|
|
@@ -44,7 +44,7 @@ module Karafka
|
|
44
44
|
.map { |process| process[:offset] }
|
45
45
|
.sort
|
46
46
|
|
47
|
-
::
|
47
|
+
Lib::Admin.read_topic(
|
48
48
|
::Karafka::Web.config.topics.consumers.reports,
|
49
49
|
0,
|
50
50
|
# We set 10k here because we start from the latest offset of the reports, hence
|
@@ -13,7 +13,7 @@ module Karafka
|
|
13
13
|
# @param partition_id [Integer]
|
14
14
|
# @return [WatermarkOffsets]
|
15
15
|
def find(topic_id, partition_id)
|
16
|
-
offsets = ::
|
16
|
+
offsets = Lib::Admin.read_watermark_offsets(topic_id, partition_id)
|
17
17
|
|
18
18
|
new(
|
19
19
|
low: offsets.first,
|
@@ -132,11 +132,22 @@ module Karafka
|
|
132
132
|
active_partitions, = Paginators::Partitions.call(partitions_count, 1)
|
133
133
|
end
|
134
134
|
|
135
|
-
|
136
|
-
messages, = Models::Message.topic_page(topic_id, active_partitions, 1)
|
135
|
+
recent = nil
|
137
136
|
|
138
|
-
#
|
139
|
-
|
137
|
+
# This selects first pages with most recent messages and moves to next if first
|
138
|
+
# contains only compacted data, etc.
|
139
|
+
#
|
140
|
+
# We do it until we find a message we could refer to (if doable) within first
|
141
|
+
# ten pages
|
142
|
+
10.times do |page|
|
143
|
+
messages, = Models::Message.topic_page(topic_id, active_partitions, page + 1)
|
144
|
+
|
145
|
+
# Selects newest out of all partitions
|
146
|
+
# Reject compacted messages and transaction-related once
|
147
|
+
recent = messages.reject { |message| message.is_a?(Array) }.max_by(&:timestamp)
|
148
|
+
|
149
|
+
break if recent
|
150
|
+
end
|
140
151
|
|
141
152
|
recent || raise(::Karafka::Web::Errors::Ui::NotFoundError)
|
142
153
|
|
@@ -184,7 +195,7 @@ module Karafka
|
|
184
195
|
# @param partition_id [Integer]
|
185
196
|
# @param time [Time] time of the message
|
186
197
|
def closest(topic_id, partition_id, time)
|
187
|
-
target = ::
|
198
|
+
target = Lib::Admin.read_topic(topic_id, partition_id, 1, time).first
|
188
199
|
|
189
200
|
partition_path = "explorer/#{topic_id}/#{partition_id}"
|
190
201
|
partition_path += "?offset=#{target.offset}" if target
|
@@ -1,7 +1,7 @@
|
|
1
1
|
<% if error_msg.is_a?(Array) %>
|
2
2
|
<tr>
|
3
3
|
<td colspan="5" class="text-center text-muted">
|
4
|
-
This error
|
4
|
+
This offset does not contain error data. The message may have been compacted or is a system entry.
|
5
5
|
</td>
|
6
6
|
</tr>
|
7
7
|
<% else %>
|
@@ -7,7 +7,7 @@
|
|
7
7
|
<%= message[1] %>
|
8
8
|
</td>
|
9
9
|
<td colspan="3" class="text-center text-muted">
|
10
|
-
This message
|
10
|
+
This offset does not contain any data. The message may have been compacted or is a system entry.
|
11
11
|
</td>
|
12
12
|
</tr>
|
13
13
|
<% else %>
|
@@ -1,7 +1,7 @@
|
|
1
1
|
<% if error_msg.is_a?(Array) %>
|
2
2
|
<tr>
|
3
3
|
<td colspan="5" class="text-center text-muted">
|
4
|
-
This error
|
4
|
+
This offset does not contain error data. The message may have been compacted or is a system entry.
|
5
5
|
</td>
|
6
6
|
</tr>
|
7
7
|
<% else %>
|
data/lib/karafka/web/version.rb
CHANGED
data.tar.gz.sig
CHANGED
Binary file
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: karafka-web
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.7.
|
4
|
+
version: 0.7.8
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Maciej Mensfeld
|
@@ -35,7 +35,7 @@ cert_chain:
|
|
35
35
|
AnG1dJU+yL2BK7vaVytLTstJME5mepSZ46qqIJXMuWob/YPDmVaBF39TDSG9e34s
|
36
36
|
msG3BiCqgOgHAnL23+CN3Rt8MsuRfEtoTKpJVcCfoEoNHOkc
|
37
37
|
-----END CERTIFICATE-----
|
38
|
-
date: 2023-10-
|
38
|
+
date: 2023-10-24 00:00:00.000000000 Z
|
39
39
|
dependencies:
|
40
40
|
- !ruby/object:Gem::Dependency
|
41
41
|
name: erubi
|
@@ -57,7 +57,7 @@ dependencies:
|
|
57
57
|
requirements:
|
58
58
|
- - ">="
|
59
59
|
- !ruby/object:Gem::Version
|
60
|
-
version: 2.2.
|
60
|
+
version: 2.2.9
|
61
61
|
- - "<"
|
62
62
|
- !ruby/object:Gem::Version
|
63
63
|
version: 3.0.0
|
@@ -67,7 +67,7 @@ dependencies:
|
|
67
67
|
requirements:
|
68
68
|
- - ">="
|
69
69
|
- !ruby/object:Gem::Version
|
70
|
-
version: 2.2.
|
70
|
+
version: 2.2.9
|
71
71
|
- - "<"
|
72
72
|
- !ruby/object:Gem::Version
|
73
73
|
version: 3.0.0
|
@@ -173,6 +173,12 @@ files:
|
|
173
173
|
- lib/karafka/web.rb
|
174
174
|
- lib/karafka/web/app.rb
|
175
175
|
- lib/karafka/web/cli.rb
|
176
|
+
- lib/karafka/web/cli/base.rb
|
177
|
+
- lib/karafka/web/cli/help.rb
|
178
|
+
- lib/karafka/web/cli/install.rb
|
179
|
+
- lib/karafka/web/cli/migrate.rb
|
180
|
+
- lib/karafka/web/cli/reset.rb
|
181
|
+
- lib/karafka/web/cli/uninstall.rb
|
176
182
|
- lib/karafka/web/config.rb
|
177
183
|
- lib/karafka/web/contracts/base.rb
|
178
184
|
- lib/karafka/web/contracts/config.rb
|
@@ -219,10 +225,11 @@ files:
|
|
219
225
|
- lib/karafka/web/tracking/memoized_shell.rb
|
220
226
|
- lib/karafka/web/tracking/producers/listeners/base.rb
|
221
227
|
- lib/karafka/web/tracking/producers/listeners/errors.rb
|
222
|
-
- lib/karafka/web/tracking/producers/listeners/reporter.rb
|
223
228
|
- lib/karafka/web/tracking/producers/reporter.rb
|
224
229
|
- lib/karafka/web/tracking/producers/sampler.rb
|
230
|
+
- lib/karafka/web/tracking/reporter.rb
|
225
231
|
- lib/karafka/web/tracking/sampler.rb
|
232
|
+
- lib/karafka/web/tracking/scheduler.rb
|
226
233
|
- lib/karafka/web/tracking/ttl_array.rb
|
227
234
|
- lib/karafka/web/tracking/ttl_hash.rb
|
228
235
|
- lib/karafka/web/ui/app.rb
|
@@ -241,6 +248,7 @@ files:
|
|
241
248
|
- lib/karafka/web/ui/controllers/status.rb
|
242
249
|
- lib/karafka/web/ui/helpers/application_helper.rb
|
243
250
|
- lib/karafka/web/ui/helpers/paths_helper.rb
|
251
|
+
- lib/karafka/web/ui/lib/admin.rb
|
244
252
|
- lib/karafka/web/ui/lib/hash_proxy.rb
|
245
253
|
- lib/karafka/web/ui/lib/paginations/base.rb
|
246
254
|
- lib/karafka/web/ui/lib/paginations/offset_based.rb
|
@@ -447,7 +455,7 @@ licenses:
|
|
447
455
|
metadata:
|
448
456
|
funding_uri: https://karafka.io/#become-pro
|
449
457
|
homepage_uri: https://karafka.io
|
450
|
-
changelog_uri: https://
|
458
|
+
changelog_uri: https://karafka.io/docs/Changelog-Karafka-Web-UI
|
451
459
|
bug_tracker_uri: https://github.com/karafka/karafka-web/issues
|
452
460
|
source_code_uri: https://github.com/karafka/karafka-web
|
453
461
|
documentation_uri: https://karafka.io/docs
|
metadata.gz.sig
CHANGED
Binary file
|
@@ -1,21 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
module Karafka
|
4
|
-
module Web
|
5
|
-
module Tracking
|
6
|
-
module Producers
|
7
|
-
module Listeners
|
8
|
-
# Special listener that we use to report data about producers states
|
9
|
-
# We don't have to have a separate thread for reporting, because producers have their
|
10
|
-
# own internal threads for changes polling and we can utilize this thread
|
11
|
-
class Reporter < Base
|
12
|
-
# @param _event [Karafka::Core::Monitoring::Event]
|
13
|
-
def on_statistics_emitted(_event)
|
14
|
-
reporter.report
|
15
|
-
end
|
16
|
-
end
|
17
|
-
end
|
18
|
-
end
|
19
|
-
end
|
20
|
-
end
|
21
|
-
end
|