karafka-web 0.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (178) hide show
  1. checksums.yaml +7 -0
  2. checksums.yaml.gz.sig +3 -0
  3. data/.coditsu/ci.yml +3 -0
  4. data/.diffend.yml +3 -0
  5. data/.github/FUNDING.yml +1 -0
  6. data/.github/ISSUE_TEMPLATE/bug_report.md +50 -0
  7. data/.github/ISSUE_TEMPLATE/feature_request.md +20 -0
  8. data/.github/workflows/ci.yml +49 -0
  9. data/.gitignore +69 -0
  10. data/.ruby-gemset +1 -0
  11. data/.ruby-version +1 -0
  12. data/CHANGELOG.md +9 -0
  13. data/CODE_OF_CONDUCT.md +46 -0
  14. data/Gemfile +7 -0
  15. data/Gemfile.lock +52 -0
  16. data/LICENSE +17 -0
  17. data/README.md +29 -0
  18. data/bin/karafka-web +33 -0
  19. data/certs/cert_chain.pem +26 -0
  20. data/config/locales/errors.yml +9 -0
  21. data/karafka-web.gemspec +44 -0
  22. data/lib/karafka/web/app.rb +17 -0
  23. data/lib/karafka/web/config.rb +80 -0
  24. data/lib/karafka/web/deserializer.rb +20 -0
  25. data/lib/karafka/web/errors.rb +25 -0
  26. data/lib/karafka/web/installer.rb +124 -0
  27. data/lib/karafka/web/processing/consumer.rb +66 -0
  28. data/lib/karafka/web/processing/consumers/aggregator.rb +130 -0
  29. data/lib/karafka/web/processing/consumers/state.rb +32 -0
  30. data/lib/karafka/web/tracking/base_contract.rb +31 -0
  31. data/lib/karafka/web/tracking/consumers/contracts/consumer_group.rb +33 -0
  32. data/lib/karafka/web/tracking/consumers/contracts/job.rb +26 -0
  33. data/lib/karafka/web/tracking/consumers/contracts/partition.rb +22 -0
  34. data/lib/karafka/web/tracking/consumers/contracts/report.rb +95 -0
  35. data/lib/karafka/web/tracking/consumers/contracts/topic.rb +29 -0
  36. data/lib/karafka/web/tracking/consumers/listeners/base.rb +33 -0
  37. data/lib/karafka/web/tracking/consumers/listeners/errors.rb +107 -0
  38. data/lib/karafka/web/tracking/consumers/listeners/pausing.rb +45 -0
  39. data/lib/karafka/web/tracking/consumers/listeners/processing.rb +157 -0
  40. data/lib/karafka/web/tracking/consumers/listeners/statistics.rb +123 -0
  41. data/lib/karafka/web/tracking/consumers/listeners/status.rb +58 -0
  42. data/lib/karafka/web/tracking/consumers/sampler.rb +216 -0
  43. data/lib/karafka/web/tracking/memoized_shell.rb +48 -0
  44. data/lib/karafka/web/tracking/reporter.rb +144 -0
  45. data/lib/karafka/web/tracking/ttl_array.rb +59 -0
  46. data/lib/karafka/web/tracking/ttl_hash.rb +16 -0
  47. data/lib/karafka/web/ui/app.rb +78 -0
  48. data/lib/karafka/web/ui/base.rb +77 -0
  49. data/lib/karafka/web/ui/controllers/base.rb +40 -0
  50. data/lib/karafka/web/ui/controllers/become_pro.rb +17 -0
  51. data/lib/karafka/web/ui/controllers/cluster.rb +24 -0
  52. data/lib/karafka/web/ui/controllers/consumers.rb +27 -0
  53. data/lib/karafka/web/ui/controllers/errors.rb +43 -0
  54. data/lib/karafka/web/ui/controllers/jobs.rb +33 -0
  55. data/lib/karafka/web/ui/controllers/requests/params.rb +30 -0
  56. data/lib/karafka/web/ui/controllers/responses/data.rb +26 -0
  57. data/lib/karafka/web/ui/controllers/routing.rb +30 -0
  58. data/lib/karafka/web/ui/helpers/application_helper.rb +144 -0
  59. data/lib/karafka/web/ui/lib/hash_proxy.rb +66 -0
  60. data/lib/karafka/web/ui/lib/paginate_array.rb +38 -0
  61. data/lib/karafka/web/ui/models/consumer_group.rb +20 -0
  62. data/lib/karafka/web/ui/models/health.rb +44 -0
  63. data/lib/karafka/web/ui/models/job.rb +13 -0
  64. data/lib/karafka/web/ui/models/message.rb +99 -0
  65. data/lib/karafka/web/ui/models/partition.rb +13 -0
  66. data/lib/karafka/web/ui/models/process.rb +56 -0
  67. data/lib/karafka/web/ui/models/processes.rb +86 -0
  68. data/lib/karafka/web/ui/models/state.rb +67 -0
  69. data/lib/karafka/web/ui/models/topic.rb +19 -0
  70. data/lib/karafka/web/ui/pro/app.rb +120 -0
  71. data/lib/karafka/web/ui/pro/controllers/cluster.rb +16 -0
  72. data/lib/karafka/web/ui/pro/controllers/consumers.rb +54 -0
  73. data/lib/karafka/web/ui/pro/controllers/dlq.rb +44 -0
  74. data/lib/karafka/web/ui/pro/controllers/errors.rb +57 -0
  75. data/lib/karafka/web/ui/pro/controllers/explorer.rb +79 -0
  76. data/lib/karafka/web/ui/pro/controllers/health.rb +33 -0
  77. data/lib/karafka/web/ui/pro/controllers/jobs.rb +26 -0
  78. data/lib/karafka/web/ui/pro/controllers/routing.rb +26 -0
  79. data/lib/karafka/web/ui/pro/views/consumers/_breadcrumbs.erb +27 -0
  80. data/lib/karafka/web/ui/pro/views/consumers/_consumer.erb +60 -0
  81. data/lib/karafka/web/ui/pro/views/consumers/_counters.erb +50 -0
  82. data/lib/karafka/web/ui/pro/views/consumers/_summary.erb +81 -0
  83. data/lib/karafka/web/ui/pro/views/consumers/consumer/_consumer_group.erb +109 -0
  84. data/lib/karafka/web/ui/pro/views/consumers/consumer/_job.erb +26 -0
  85. data/lib/karafka/web/ui/pro/views/consumers/consumer/_metrics.erb +126 -0
  86. data/lib/karafka/web/ui/pro/views/consumers/consumer/_no_jobs.erb +9 -0
  87. data/lib/karafka/web/ui/pro/views/consumers/consumer/_no_subscriptions.erb +9 -0
  88. data/lib/karafka/web/ui/pro/views/consumers/consumer/_partition.erb +32 -0
  89. data/lib/karafka/web/ui/pro/views/consumers/consumer/_stopped.erb +10 -0
  90. data/lib/karafka/web/ui/pro/views/consumers/consumer/_tabs.erb +20 -0
  91. data/lib/karafka/web/ui/pro/views/consumers/index.erb +30 -0
  92. data/lib/karafka/web/ui/pro/views/consumers/jobs.erb +42 -0
  93. data/lib/karafka/web/ui/pro/views/consumers/subscriptions.erb +23 -0
  94. data/lib/karafka/web/ui/pro/views/dlq/_breadcrumbs.erb +5 -0
  95. data/lib/karafka/web/ui/pro/views/dlq/_no_topics.erb +9 -0
  96. data/lib/karafka/web/ui/pro/views/dlq/_topic.erb +12 -0
  97. data/lib/karafka/web/ui/pro/views/dlq/index.erb +16 -0
  98. data/lib/karafka/web/ui/pro/views/errors/_breadcrumbs.erb +25 -0
  99. data/lib/karafka/web/ui/pro/views/errors/_detail.erb +29 -0
  100. data/lib/karafka/web/ui/pro/views/errors/_error.erb +26 -0
  101. data/lib/karafka/web/ui/pro/views/errors/_partition_option.erb +7 -0
  102. data/lib/karafka/web/ui/pro/views/errors/index.erb +58 -0
  103. data/lib/karafka/web/ui/pro/views/errors/show.erb +56 -0
  104. data/lib/karafka/web/ui/pro/views/explorer/_breadcrumbs.erb +29 -0
  105. data/lib/karafka/web/ui/pro/views/explorer/_detail.erb +21 -0
  106. data/lib/karafka/web/ui/pro/views/explorer/_encryption_enabled.erb +18 -0
  107. data/lib/karafka/web/ui/pro/views/explorer/_failed_deserialization.erb +4 -0
  108. data/lib/karafka/web/ui/pro/views/explorer/_message.erb +16 -0
  109. data/lib/karafka/web/ui/pro/views/explorer/_partition_option.erb +7 -0
  110. data/lib/karafka/web/ui/pro/views/explorer/_topic.erb +12 -0
  111. data/lib/karafka/web/ui/pro/views/explorer/index.erb +17 -0
  112. data/lib/karafka/web/ui/pro/views/explorer/partition.erb +56 -0
  113. data/lib/karafka/web/ui/pro/views/explorer/show.erb +65 -0
  114. data/lib/karafka/web/ui/pro/views/health/_breadcrumbs.erb +5 -0
  115. data/lib/karafka/web/ui/pro/views/health/_partition.erb +35 -0
  116. data/lib/karafka/web/ui/pro/views/health/index.erb +60 -0
  117. data/lib/karafka/web/ui/pro/views/jobs/_breadcrumbs.erb +5 -0
  118. data/lib/karafka/web/ui/pro/views/jobs/_job.erb +31 -0
  119. data/lib/karafka/web/ui/pro/views/jobs/_no_jobs.erb +9 -0
  120. data/lib/karafka/web/ui/pro/views/jobs/index.erb +34 -0
  121. data/lib/karafka/web/ui/pro/views/shared/_navigation.erb +57 -0
  122. data/lib/karafka/web/ui/public/images/favicon.ico +0 -0
  123. data/lib/karafka/web/ui/public/images/logo.svg +28 -0
  124. data/lib/karafka/web/ui/public/javascripts/application.js +41 -0
  125. data/lib/karafka/web/ui/public/javascripts/bootstrap.min.js +7 -0
  126. data/lib/karafka/web/ui/public/javascripts/highlight.min.js +337 -0
  127. data/lib/karafka/web/ui/public/javascripts/live_poll.js +124 -0
  128. data/lib/karafka/web/ui/public/javascripts/timeago.min.js +1 -0
  129. data/lib/karafka/web/ui/public/stylesheets/application.css +106 -0
  130. data/lib/karafka/web/ui/public/stylesheets/bootstrap.min.css +7 -0
  131. data/lib/karafka/web/ui/public/stylesheets/bootstrap.min.css.map +1 -0
  132. data/lib/karafka/web/ui/public/stylesheets/highlight.min.css +10 -0
  133. data/lib/karafka/web/ui/views/cluster/_breadcrumbs.erb +5 -0
  134. data/lib/karafka/web/ui/views/cluster/_broker.erb +5 -0
  135. data/lib/karafka/web/ui/views/cluster/_partition.erb +22 -0
  136. data/lib/karafka/web/ui/views/cluster/index.erb +72 -0
  137. data/lib/karafka/web/ui/views/consumers/_breadcrumbs.erb +27 -0
  138. data/lib/karafka/web/ui/views/consumers/_consumer.erb +43 -0
  139. data/lib/karafka/web/ui/views/consumers/_counters.erb +44 -0
  140. data/lib/karafka/web/ui/views/consumers/_summary.erb +81 -0
  141. data/lib/karafka/web/ui/views/consumers/consumer/_consumer_group.erb +109 -0
  142. data/lib/karafka/web/ui/views/consumers/consumer/_job.erb +26 -0
  143. data/lib/karafka/web/ui/views/consumers/consumer/_metrics.erb +126 -0
  144. data/lib/karafka/web/ui/views/consumers/consumer/_no_jobs.erb +9 -0
  145. data/lib/karafka/web/ui/views/consumers/consumer/_no_subscriptions.erb +9 -0
  146. data/lib/karafka/web/ui/views/consumers/consumer/_partition.erb +32 -0
  147. data/lib/karafka/web/ui/views/consumers/consumer/_stopped.erb +10 -0
  148. data/lib/karafka/web/ui/views/consumers/consumer/_tabs.erb +20 -0
  149. data/lib/karafka/web/ui/views/consumers/index.erb +29 -0
  150. data/lib/karafka/web/ui/views/errors/_breadcrumbs.erb +19 -0
  151. data/lib/karafka/web/ui/views/errors/_detail.erb +29 -0
  152. data/lib/karafka/web/ui/views/errors/_error.erb +26 -0
  153. data/lib/karafka/web/ui/views/errors/index.erb +38 -0
  154. data/lib/karafka/web/ui/views/errors/show.erb +30 -0
  155. data/lib/karafka/web/ui/views/jobs/_breadcrumbs.erb +5 -0
  156. data/lib/karafka/web/ui/views/jobs/_job.erb +22 -0
  157. data/lib/karafka/web/ui/views/jobs/_no_jobs.erb +9 -0
  158. data/lib/karafka/web/ui/views/jobs/index.erb +31 -0
  159. data/lib/karafka/web/ui/views/layout.erb +23 -0
  160. data/lib/karafka/web/ui/views/routing/_breadcrumbs.erb +15 -0
  161. data/lib/karafka/web/ui/views/routing/_consumer_group.erb +34 -0
  162. data/lib/karafka/web/ui/views/routing/_detail.erb +25 -0
  163. data/lib/karafka/web/ui/views/routing/_topic.erb +18 -0
  164. data/lib/karafka/web/ui/views/routing/index.erb +10 -0
  165. data/lib/karafka/web/ui/views/routing/show.erb +26 -0
  166. data/lib/karafka/web/ui/views/shared/_become_pro.erb +13 -0
  167. data/lib/karafka/web/ui/views/shared/_brand.erb +3 -0
  168. data/lib/karafka/web/ui/views/shared/_content.erb +31 -0
  169. data/lib/karafka/web/ui/views/shared/_header.erb +20 -0
  170. data/lib/karafka/web/ui/views/shared/_navigation.erb +57 -0
  171. data/lib/karafka/web/ui/views/shared/_pagination.erb +21 -0
  172. data/lib/karafka/web/ui/views/shared/exceptions/not_found.erb +39 -0
  173. data/lib/karafka/web/ui/views/shared/exceptions/pro_only.erb +52 -0
  174. data/lib/karafka/web/version.rb +8 -0
  175. data/lib/karafka/web.rb +60 -0
  176. data.tar.gz.sig +0 -0
  177. metadata +328 -0
  178. metadata.gz.sig +0 -0
@@ -0,0 +1,124 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Web
5
+ # Responsible for setup of the Web UI and Karafka Web-UI related components initialization.
6
+ class Installer
7
+ # Creates needed topics and the initial zero state, so even if no `karafka server` processes
8
+ # are running, we can still display the empty UI
9
+ #
10
+ # @param replication_factor [Integer] replication factor we want to use (1 by default)
11
+ def bootstrap!(replication_factor: 1)
12
+ bootstrap_topics!(replication_factor)
13
+ bootstrap_state!
14
+ end
15
+
16
+ # Adds the extra needed consumer group, topics and routes for Web UI to be able to operate
17
+ def enable!
18
+ ::Karafka::App.routes.draw do
19
+ web_deserializer = ::Karafka::Web::Deserializer.new
20
+
21
+ consumer_group ::Karafka::Web.config.processing.consumer_group do
22
+ # Topic we listen on to materialize the states
23
+ topic ::Karafka::Web.config.topics.consumers.reports do
24
+ # Since we materialize state in intervals, we can poll for half of this time without
25
+ # impacting the reporting responsiveness
26
+ max_wait_time ::Karafka::Web.config.processing.interval / 2
27
+ max_messages 1_000
28
+ consumer ::Karafka::Web::Processing::Consumer
29
+ deserializer web_deserializer
30
+ manual_offset_management true
31
+ end
32
+
33
+ # We define those two here without consumption, so Web understands how to deserialize
34
+ # them when used / viewed
35
+ topic ::Karafka::Web.config.topics.consumers.states do
36
+ active false
37
+ deserializer web_deserializer
38
+ end
39
+
40
+ topic ::Karafka::Web.config.topics.errors do
41
+ active false
42
+ deserializer web_deserializer
43
+ end
44
+ end
45
+ end
46
+
47
+ # Installs all the consumer related listeners
48
+ ::Karafka::Web.config.tracking.consumers.listeners.each do |listener|
49
+ ::Karafka.monitor.subscribe(listener)
50
+ end
51
+
52
+ # Installs all the producer related listeners
53
+ ::Karafka::Web.config.tracking.producers.listeners.each do |listener|
54
+ ::Karafka.producer.monitor.subscribe(listener)
55
+ end
56
+ end
57
+
58
+ private
59
+
60
+ # Creates all the needed topics for the admin UI to work
61
+ #
62
+ # @param replication_factor [Integer]
63
+ def bootstrap_topics!(replication_factor = 1)
64
+ # This topic needs to have one partition
65
+ ::Karafka::Admin.create_topic(
66
+ ::Karafka::Web.config.topics.consumers.states,
67
+ 1,
68
+ replication_factor,
69
+ # We care only about the most recent state, previous are irrelevant
70
+ { 'cleanup.policy': 'compact' }
71
+ )
72
+
73
+ # This topic needs to have one partition
74
+ ::Karafka::Admin.create_topic(
75
+ ::Karafka::Web.config.topics.consumers.reports,
76
+ 1,
77
+ replication_factor,
78
+ # We do not need to to store this data for longer than 7 days as this data is only used
79
+ # to materialize the end states
80
+ # On the other hand we do not want to have it really short-living because in case of a
81
+ # consumer crash, we may want to use this info to catch up and backfill the state
82
+ { 'retention.ms': 7 * 24 * 60 * 60 * 1_000 }
83
+ )
84
+
85
+ # All the errors will be dispatched here
86
+ # This topic can have multiple partitions but we go with one by default. A single Ruby
87
+ # process should not crash that often and if there is an expectation of a higher volume
88
+ # of errors, this can be changed by the end user
89
+ ::Karafka::Admin.create_topic(
90
+ ::Karafka::Web.config.topics.errors,
91
+ 1,
92
+ replication_factor
93
+ )
94
+
95
+ bootstrap_state!
96
+ end
97
+
98
+ # Creates the initial state record with all values being empty
99
+ def bootstrap_state!
100
+ ::Karafka.producer.produce_sync(
101
+ topic: Karafka::Web.config.topics.consumers.states,
102
+ key: Karafka::Web.config.topics.consumers.states,
103
+ payload: {
104
+ processes: {},
105
+ stats: {
106
+ batches: 0,
107
+ messages: 0,
108
+ errors: 0,
109
+ retries: 0,
110
+ dead: 0,
111
+ busy: 0,
112
+ enqueued: 0,
113
+ threads_count: 0,
114
+ processes: 0,
115
+ rss: 0,
116
+ listeners_count: 0,
117
+ utilization: 0
118
+ }
119
+ }.to_json
120
+ )
121
+ end
122
+ end
123
+ end
124
+ end
@@ -0,0 +1,66 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Web
5
+ # Namespace used to encapsulate all the components needed to process the states data and
6
+ # store it back in Kafka
7
+ module Processing
8
+ # Consumer used to squash and process statistics coming from particular processes, so this
9
+ # data can be read and used. We consume this info overwriting the data we previously had
10
+ # (if any)
11
+ class Consumer < Karafka::BaseConsumer
12
+ include ::Karafka::Core::Helpers::Time
13
+
14
+ # @param args [Object] all the arguments `Karafka::BaseConsumer` accepts by default
15
+ def initialize(*args)
16
+ super
17
+
18
+ @flush_interval = ::Karafka::Web.config.processing.interval / 1_000
19
+ @consumers_aggregator = ::Karafka::Web.config.processing.consumers.aggregator
20
+ # We set this that way so we report with first batch and so we report in the development
21
+ # mode. In the development mode, there is a new instance per each invocation, thus we need
22
+ # to always initially report, so the web UI works well in the dev mode where consumer
23
+ # instances are not long-living.
24
+ @flushed_at = monotonic_now - @flush_interval
25
+ end
26
+
27
+ # Aggregates consumers state into a single current state representation
28
+ def consume
29
+ messages
30
+ .select { |message| message.payload[:type] == 'consumer' }
31
+ .each { |message| @consumers_aggregator.add(message.payload, message.offset) }
32
+
33
+ return unless periodic_flush?
34
+
35
+ flush
36
+
37
+ mark_as_consumed(messages.last)
38
+ end
39
+
40
+ # Flush final state on shutdown
41
+ def shutdown
42
+ flush if @consumers_aggregator
43
+ end
44
+
45
+ private
46
+
47
+ # @return [Boolean] is it time to persist the new current state
48
+ def periodic_flush?
49
+ (monotonic_now - @flushed_at) > @flush_interval
50
+ end
51
+
52
+ # Persists the new current state by flushing it to Kafka
53
+ def flush
54
+ @flushed_at = monotonic_now
55
+
56
+ producer.produce_async(
57
+ topic: Karafka::Web.config.topics.consumers.states,
58
+ payload: @consumers_aggregator.to_json,
59
+ # This will ensure that the consumer states are compacted
60
+ key: Karafka::Web.config.topics.consumers.states
61
+ )
62
+ end
63
+ end
64
+ end
65
+ end
66
+ end
@@ -0,0 +1,130 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Web
5
+ module Processing
6
+ # Namespace for consumer sub-components
7
+ module Consumers
8
+ # Aggregator that tracks consumers processes states, aggregates the metrics and converts
9
+ # data points into a materialized current state.
10
+ class Aggregator
11
+ include ::Karafka::Core::Helpers::Time
12
+
13
+ def initialize
14
+ # We keep whole reports for computation of active, current counters
15
+ @active_reports = {}
16
+ end
17
+
18
+ # Uses provided process state report to update the current materialized state
19
+ # @param report [Hash] consumer process state report
20
+ # @param offset [Integer] offset of the message with the state report. This offset is
21
+ # needed as we need to be able to get all the consumers reports from a given offset.
22
+ def add(report, offset)
23
+ memoize_process_report(report)
24
+ increment_total_counters(report)
25
+ update_process_state(report, offset)
26
+ # We always evict after counters updates because we want to use expired (stopped)
27
+ # data for counters as it was valid previously. This can happen only when web consumer
28
+ # had a lag and is catching up.
29
+ evict_expired_processes
30
+ # We could calculate this on a per request basis but this would require fetching all
31
+ # the active processes for each view and we do not want that for performance reasons
32
+ refresh_current_stats
33
+ end
34
+
35
+ # @param _args [Object] extra parsing arguments (not used)
36
+ # @return [String] json representation of the current processes state
37
+ def to_json(*_args)
38
+ state.to_json
39
+ end
40
+
41
+ private
42
+
43
+ # @return [Hash] hash with current state from Kafka or an empty new initial state
44
+ def state
45
+ @state ||= State.current
46
+ end
47
+
48
+ # Updates the report for given process in memory
49
+ # @param report [Hash]
50
+ def memoize_process_report(report)
51
+ @active_reports[report[:process][:name]] = report
52
+ end
53
+
54
+ # Increments the total counters based on the provided report
55
+ # @param report [Hash]
56
+ def increment_total_counters(report)
57
+ report[:stats][:total].each do |key, value|
58
+ state[:stats][key] ||= 0
59
+ state[:stats][key] += value
60
+ end
61
+ end
62
+
63
+ # Registers or updates the given process state based on the report
64
+ #
65
+ # @param report [Hash]
66
+ # @param offset [Integer]
67
+ def update_process_state(report, offset)
68
+ process_name = report[:process][:name]
69
+
70
+ state[:processes][process_name] = {
71
+ dispatched_at: report[:dispatched_at],
72
+ offset: offset
73
+ }
74
+ end
75
+
76
+ # Evicts expired processes from the current state
77
+ # We consider processes dead if they do not report often enough
78
+ # @note We do not evict based on states (stopped), because we want to report the
79
+ # stopped processes for extra time within the ttl limitations. This makes tracking of
80
+ # things from UX perspective nicer.
81
+ def evict_expired_processes
82
+ max_ttl = float_now - ::Karafka::Web.config.ttl / 1_000
83
+
84
+ state[:processes].delete_if do |_name, details|
85
+ details[:dispatched_at] < max_ttl
86
+ end
87
+
88
+ @active_reports.delete_if do |_name, details|
89
+ details[:dispatched_at] < max_ttl
90
+ end
91
+ end
92
+
93
+ # Refreshes the counters that are computed based on incoming reports and not a total sum.
94
+ # For this we use active reports we have in memory. It may not be accurate for the first
95
+ # few seconds but it is much more optimal from performance perspective than computing
96
+ # this fetching all data from Kafka for each view.
97
+ def refresh_current_stats
98
+ stats = state[:stats]
99
+
100
+ stats[:busy] = 0
101
+ stats[:enqueued] = 0
102
+ stats[:threads_count] = 0
103
+ stats[:processes] = 0
104
+ stats[:rss] = 0
105
+ stats[:listeners_count] = 0
106
+ utilization = 0
107
+
108
+ @active_reports
109
+ .values
110
+ .reject { |report| report[:process][:status] == 'stopped' }
111
+ .each do |report|
112
+ report_stats = report[:stats]
113
+ report_process = report[:process]
114
+
115
+ stats[:busy] += report_stats[:busy]
116
+ stats[:enqueued] += report_stats[:enqueued]
117
+ stats[:threads_count] += report_process[:concurrency]
118
+ stats[:processes] += 1
119
+ stats[:rss] += report_process[:memory_usage]
120
+ stats[:listeners_count] += report_process[:listeners]
121
+ utilization += report_stats[:utilization]
122
+ end
123
+
124
+ stats[:utilization] = utilization / (stats[:processes] + 0.0001)
125
+ end
126
+ end
127
+ end
128
+ end
129
+ end
130
+ end
@@ -0,0 +1,32 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Web
5
+ module Processing
6
+ module Consumers
7
+ # Fetches the current consumer processes aggregated state
8
+ class State
9
+ extend ::Karafka::Core::Helpers::Time
10
+
11
+ class << self
12
+ # Try bootstrapping from the current state from Kafka if exists and if not, just use
13
+ # a blank state. Blank state will not be flushed because materialization into Kafka
14
+ # happens only after first report is received.
15
+ #
16
+ # @return [Hash, false] last (current) aggregated processes state or false if no
17
+ # state is available
18
+ def current
19
+ state_message = ::Karafka::Admin.read_topic(
20
+ Karafka::Web.config.topics.consumers.states,
21
+ 0,
22
+ 1
23
+ ).last
24
+
25
+ state_message ? state_message.payload : { processes: {}, stats: {} }
26
+ end
27
+ end
28
+ end
29
+ end
30
+ end
31
+ end
32
+ end
@@ -0,0 +1,31 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Web
5
+ module Tracking
6
+ # Base for all the metric related contracts
7
+ class BaseContract < ::Karafka::Core::Contractable::Contract
8
+ class << self
9
+ # This layer is not for users extensive feedback, thus we can easily use the minimum
10
+ # error messaging there is.
11
+ def configure
12
+ super do |config|
13
+ config.error_messages = YAML.safe_load(
14
+ File.read(
15
+ File.join(Karafka::Web.gem_root, 'config', 'locales', 'errors.yml')
16
+ )
17
+ ).fetch('en').fetch('validations').fetch('web')
18
+ end
19
+ end
20
+ end
21
+
22
+ # @param data [Hash] data for validation
23
+ # @return [Boolean] true if all good
24
+ # @raise [Errors::ContractError] invalid report
25
+ def validate!(data)
26
+ super(data, Errors::Tracking::ContractError)
27
+ end
28
+ end
29
+ end
30
+ end
31
+ end
@@ -0,0 +1,33 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Web
5
+ module Tracking
6
+ module Consumers
7
+ # Consumer tracking related contracts
8
+ module Contracts
9
+ # Expected data for each consumer group
10
+ # It's mostly about topics details
11
+ class ConsumerGroup < BaseContract
12
+ configure
13
+
14
+ required(:id) { |val| val.is_a?(String) && !val.empty? }
15
+ required(:topics) { |val| val.is_a?(Hash) }
16
+
17
+ virtual do |data, errors|
18
+ next unless errors.empty?
19
+
20
+ topic_contract = Topic.new
21
+
22
+ data.fetch(:topics).each do |_topic_name, details|
23
+ topic_contract.validate!(details)
24
+ end
25
+
26
+ nil
27
+ end
28
+ end
29
+ end
30
+ end
31
+ end
32
+ end
33
+ end
@@ -0,0 +1,26 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Web
5
+ module Tracking
6
+ module Consumers
7
+ module Contracts
8
+ # Contract for the job reporting details
9
+ class Job < BaseContract
10
+ configure
11
+
12
+ required(:consumer) { |val| val.is_a?(String) }
13
+ required(:consumer_group) { |val| val.is_a?(String) }
14
+ required(:started_at) { |val| val.is_a?(Float) && val >= 0 }
15
+ required(:topic) { |val| val.is_a?(String) }
16
+ required(:partition) { |val| val.is_a?(Integer) && val >= 0 }
17
+ required(:first_offset) { |val| val.is_a?(Integer) && val >= 0 }
18
+ required(:last_offset) { |val| val.is_a?(Integer) && val >= 0 }
19
+ required(:comitted_offset) { |val| val.is_a?(Integer) }
20
+ required(:type) { |val| %w[consume revoked shutdown].include?(val) }
21
+ end
22
+ end
23
+ end
24
+ end
25
+ end
26
+ end
@@ -0,0 +1,22 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Web
5
+ module Tracking
6
+ module Consumers
7
+ module Contracts
8
+ # Partition metrics required for web to operate
9
+ class Partition < BaseContract
10
+ configure
11
+
12
+ required(:id) { |val| val.is_a?(Integer) && val >= 0 }
13
+ required(:lag_stored) { |val| val.is_a?(Integer) }
14
+ required(:lag_stored_d) { |val| val.is_a?(Integer) }
15
+ required(:committed_offset) { |val| val.is_a?(Integer) }
16
+ required(:stored_offset) { |val| val.is_a?(Integer) }
17
+ end
18
+ end
19
+ end
20
+ end
21
+ end
22
+ end
@@ -0,0 +1,95 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Web
5
+ module Tracking
6
+ module Consumers
7
+ module Contracts
8
+ # Main consumer process related reporting schema
9
+ #
10
+ # Any outgoing reporting needs to match this format for it to work with the statuses
11
+ # consumer.
12
+ class Report < BaseContract
13
+ configure
14
+
15
+ required(:schema_version) { |val| val.is_a?(String) }
16
+ required(:dispatched_at) { |val| val.is_a?(Numeric) && val.positive? }
17
+ # We have consumers and producer reports and need to ensure that each is handled
18
+ # in an expected fashion
19
+ required(:type) { |val| val == 'consumer' }
20
+
21
+ nested(:process) do
22
+ required(:started_at) { |val| val.is_a?(Numeric) && val.positive? }
23
+ required(:name) { |val| val.is_a?(String) && val.count(':') >= 2 }
24
+ required(:memory_usage) { |val| val.is_a?(Integer) && val >= 0 }
25
+ required(:memory_total_usage) { |val| val.is_a?(Integer) && val >= 0 }
26
+ required(:memory_size) { |val| val.is_a?(Integer) && val >= 0 }
27
+ required(:status) { |val| ::Karafka::Status::STATES.key?(val.to_sym) }
28
+ required(:listeners) { |val| val.is_a?(Integer) && val >= 0 }
29
+ required(:concurrency) { |val| val.is_a?(Integer) && val.positive? }
30
+
31
+ required(:cpu_usage) do |val|
32
+ val.is_a?(Array) &&
33
+ val.all? { |key| key.is_a?(Numeric) } &&
34
+ val.all? { |key| key >= -1 } &&
35
+ val.size == 3
36
+ end
37
+ end
38
+
39
+ nested(:versions) do
40
+ required(:karafka) { |val| val.is_a?(String) && !val.empty? }
41
+ required(:waterdrop) { |val| val.is_a?(String) && !val.empty? }
42
+ required(:ruby) { |val| val.is_a?(String) && !val.empty? }
43
+ end
44
+
45
+ nested(:stats) do
46
+ required(:busy) { |val| val.is_a?(Integer) && val >= 0 }
47
+ required(:enqueued) { |val| val.is_a?(Integer) && val >= 0 }
48
+ required(:utilization) { |val| val.is_a?(Numeric) && val >= 0 }
49
+
50
+ nested(:total) do
51
+ required(:batches) { |val| val.is_a?(Numeric) && val >= 0 }
52
+ required(:messages) { |val| val.is_a?(Numeric) && val >= 0 }
53
+ required(:errors) { |val| val.is_a?(Numeric) && val >= 0 }
54
+ required(:retries) { |val| val.is_a?(Numeric) && val >= 0 }
55
+ required(:dead) { |val| val.is_a?(Numeric) && val >= 0 }
56
+ end
57
+ end
58
+
59
+ # Consumer groups have topics that have partitions
60
+ required(:consumer_groups) { |val| val.is_a?(Hash) }
61
+
62
+ required(:jobs) { |val| val.is_a?(Array) }
63
+
64
+ # Validates that all the data about given consumer group is as expected
65
+ virtual do |data, errors|
66
+ next unless errors.empty?
67
+
68
+ cg_contract = ConsumerGroup.new
69
+
70
+ # Consumer group id (key) is irrelevant because it is also in the details
71
+ data.fetch(:consumer_groups).each do |_, details|
72
+ cg_contract.validate!(details)
73
+ end
74
+
75
+ nil
76
+ end
77
+
78
+ # Validates that job reference has all the needed info
79
+ virtual do |data, errors|
80
+ next unless errors.empty?
81
+
82
+ job_contract = Job.new
83
+
84
+ data.fetch(:jobs).each do |details|
85
+ job_contract.validate!(details)
86
+ end
87
+
88
+ nil
89
+ end
90
+ end
91
+ end
92
+ end
93
+ end
94
+ end
95
+ end
@@ -0,0 +1,29 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Web
5
+ module Tracking
6
+ module Consumers
7
+ module Contracts
8
+ # Expected topic information that needs to go out
9
+ class Topic < BaseContract
10
+ required(:name) { |val| val.is_a?(String) && !val.empty? }
11
+ required(:partitions) { |val| val.is_a?(Hash) }
12
+
13
+ virtual do |data, errors|
14
+ next unless errors.empty?
15
+
16
+ partition_contract = Partition.new
17
+
18
+ data.fetch(:partitions).each do |_partition_id, details|
19
+ partition_contract.validate!(details)
20
+ end
21
+
22
+ nil
23
+ end
24
+ end
25
+ end
26
+ end
27
+ end
28
+ end
29
+ end
@@ -0,0 +1,33 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Web
5
+ module Tracking
6
+ module Consumers
7
+ # Consumer monitoring related listeners
8
+ module Listeners
9
+ # Base consumers processes related listener
10
+ class Base
11
+ include ::Karafka::Core::Helpers::Time
12
+ extend Forwardable
13
+
14
+ def_delegators :sampler, :track
15
+ def_delegators :reporter, :report, :report!
16
+
17
+ private
18
+
19
+ # @return [Object] sampler in use
20
+ def sampler
21
+ @sampler ||= ::Karafka::Web.config.tracking.consumers.sampler
22
+ end
23
+
24
+ # @return [Object] reported in use
25
+ def reporter
26
+ @reporter ||= ::Karafka::Web.config.tracking.reporter
27
+ end
28
+ end
29
+ end
30
+ end
31
+ end
32
+ end
33
+ end