karafka-web 0.11.2 → 0.11.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +24 -0
  3. data/Gemfile +0 -2
  4. data/Gemfile.lock +78 -39
  5. data/bin/integrations +44 -0
  6. data/bin/rspecs +6 -2
  7. data/bin/verify_kafka_warnings +1 -1
  8. data/config/locales/errors.yml +1 -0
  9. data/docker-compose.yml +1 -3
  10. data/karafka-web.gemspec +2 -2
  11. data/lib/karafka/web/app.rb +2 -3
  12. data/lib/karafka/web/cli/help.rb +1 -1
  13. data/lib/karafka/web/config.rb +8 -0
  14. data/lib/karafka/web/contracts/base.rb +2 -4
  15. data/lib/karafka/web/contracts/config.rb +5 -5
  16. data/lib/karafka/web/deserializer.rb +6 -1
  17. data/lib/karafka/web/errors.rb +8 -5
  18. data/lib/karafka/web/management/actions/enable.rb +14 -1
  19. data/lib/karafka/web/management/migrations/consumers_reports/1761645571_rename_process_name_to_id.rb +38 -0
  20. data/lib/karafka/web/management/migrator.rb +3 -2
  21. data/lib/karafka/web/pro/commanding/commands/base.rb +1 -1
  22. data/lib/karafka/web/pro/commanding/contracts/config.rb +2 -4
  23. data/lib/karafka/web/pro/commanding/handlers/partitions/tracker.rb +2 -3
  24. data/lib/karafka/web/pro/ui/controllers/scheduled_messages/schedules_controller.rb +1 -2
  25. data/lib/karafka/web/pro/ui/controllers/topics/distributions_controller.rb +1 -3
  26. data/lib/karafka/web/pro/ui/lib/branding/contracts/config.rb +2 -4
  27. data/lib/karafka/web/pro/ui/lib/policies/contracts/config.rb +2 -4
  28. data/lib/karafka/web/pro/ui/lib/search/contracts/config.rb +3 -5
  29. data/lib/karafka/web/pro/ui/lib/search/contracts/form.rb +3 -5
  30. data/lib/karafka/web/pro/ui/lib/search/runner.rb +14 -1
  31. data/lib/karafka/web/pro/ui/routes/errors.rb +3 -3
  32. data/lib/karafka/web/pro/ui/routes/explorer.rb +3 -3
  33. data/lib/karafka/web/pro/ui/views/health/_no_partition_data.erb +9 -0
  34. data/lib/karafka/web/pro/ui/views/health/_partitions_with_fallback.erb +41 -0
  35. data/lib/karafka/web/pro/ui/views/health/changes.erb +12 -13
  36. data/lib/karafka/web/pro/ui/views/health/lags.erb +12 -13
  37. data/lib/karafka/web/pro/ui/views/health/offsets.erb +12 -13
  38. data/lib/karafka/web/pro/ui/views/health/overview.erb +15 -16
  39. data/lib/karafka/web/processing/consumer.rb +8 -3
  40. data/lib/karafka/web/processing/consumers/aggregators/metrics.rb +1 -1
  41. data/lib/karafka/web/processing/consumers/aggregators/state.rb +10 -6
  42. data/lib/karafka/web/processing/consumers/contracts/state.rb +6 -1
  43. data/lib/karafka/web/processing/consumers/reports_migrator.rb +49 -0
  44. data/lib/karafka/web/processing/time_series_tracker.rb +1 -1
  45. data/lib/karafka/web/tracking/consumers/contracts/report.rb +1 -1
  46. data/lib/karafka/web/tracking/consumers/contracts/topic.rb +1 -0
  47. data/lib/karafka/web/tracking/consumers/listeners/errors.rb +2 -1
  48. data/lib/karafka/web/tracking/consumers/listeners/processing.rb +46 -0
  49. data/lib/karafka/web/tracking/consumers/listeners/statistics.rb +1 -0
  50. data/lib/karafka/web/tracking/consumers/sampler/enrichers/base.rb +20 -0
  51. data/lib/karafka/web/tracking/consumers/sampler/enrichers/consumer_groups.rb +116 -0
  52. data/lib/karafka/web/tracking/consumers/sampler/metrics/base.rb +20 -0
  53. data/lib/karafka/web/tracking/consumers/sampler/metrics/container.rb +113 -0
  54. data/lib/karafka/web/tracking/consumers/sampler/metrics/jobs.rb +60 -0
  55. data/lib/karafka/web/tracking/consumers/sampler/metrics/network.rb +48 -0
  56. data/lib/karafka/web/tracking/consumers/sampler/metrics/os.rb +206 -0
  57. data/lib/karafka/web/tracking/consumers/sampler/metrics/server.rb +33 -0
  58. data/lib/karafka/web/tracking/consumers/sampler.rb +34 -215
  59. data/lib/karafka/web/tracking/contracts/error.rb +1 -0
  60. data/lib/karafka/web/tracking/helpers/ttls/hash.rb +2 -3
  61. data/lib/karafka/web/tracking/helpers/ttls/stats.rb +1 -2
  62. data/lib/karafka/web/tracking/producers/listeners/base.rb +1 -1
  63. data/lib/karafka/web/tracking/producers/listeners/errors.rb +2 -1
  64. data/lib/karafka/web/tracking/ui/errors.rb +76 -0
  65. data/lib/karafka/web/ui/base.rb +19 -9
  66. data/lib/karafka/web/ui/controllers/requests/execution_wrapper.rb +2 -4
  67. data/lib/karafka/web/ui/controllers/requests/params.rb +1 -1
  68. data/lib/karafka/web/ui/helpers/application_helper.rb +1 -1
  69. data/lib/karafka/web/ui/helpers/paths_helper.rb +6 -9
  70. data/lib/karafka/web/ui/lib/sorter.rb +1 -1
  71. data/lib/karafka/web/ui/models/health.rb +14 -9
  72. data/lib/karafka/web/ui/models/jobs.rb +4 -6
  73. data/lib/karafka/web/ui/models/message.rb +7 -8
  74. data/lib/karafka/web/ui/models/metrics/aggregated.rb +4 -4
  75. data/lib/karafka/web/ui/models/metrics/charts/aggregated.rb +1 -2
  76. data/lib/karafka/web/ui/models/metrics/charts/topics.rb +2 -2
  77. data/lib/karafka/web/ui/models/metrics/topics.rb +3 -4
  78. data/lib/karafka/web/ui/models/recurring_tasks/schedule.rb +1 -1
  79. data/lib/karafka/web/ui/public/javascripts/application.min.js.gz +0 -0
  80. data/lib/karafka/web/ui/public/stylesheets/application.min.css +199 -105
  81. data/lib/karafka/web/ui/public/stylesheets/application.min.css.br +0 -0
  82. data/lib/karafka/web/ui/public/stylesheets/application.min.css.gz +0 -0
  83. data/lib/karafka/web/ui/public/stylesheets/libs/highlight_dark.min.css.gz +0 -0
  84. data/lib/karafka/web/ui/public/stylesheets/libs/highlight_light.min.css.gz +0 -0
  85. data/lib/karafka/web/ui/routes/errors.rb +3 -3
  86. data/lib/karafka/web/ui/views/shared/exceptions/unhandled_error.erb +42 -0
  87. data/lib/karafka/web/version.rb +1 -1
  88. data/lib/karafka/web.rb +10 -13
  89. data/package-lock.json +184 -240
  90. data/package.json +3 -3
  91. data/renovate.json +13 -0
  92. metadata +19 -4
@@ -146,9 +146,9 @@ module Karafka
146
146
  # @param page [Integer] which page we want to get
147
147
  def topic_page(topic_id, partitions_ids, page)
148
148
  # This is the bottleneck, for each partition we make one request :(
149
- offsets = partitions_ids.map do |partition_id|
149
+ offsets = partitions_ids.to_h do |partition_id|
150
150
  [partition_id, Models::WatermarkOffsets.find(topic_id, partition_id)]
151
- end.to_h
151
+ end
152
152
 
153
153
  # Count number of elements we have in each partition
154
154
  # This assumes linear presence until low. If not, gaps will be filled like we fill
@@ -158,7 +158,7 @@ module Karafka
158
158
  # Establish initial offsets for the iterator (where to start) per partition
159
159
  # We do not use the negative lookup iterator because we already can compute starting
160
160
  # offsets. This saves a lot of calls to Kafka
161
- ranges = Sets.call(counts, page).map do |partition_position, partition_range|
161
+ ranges = Sets.call(counts, page).to_h do |partition_position, partition_range|
162
162
  partition_id = partitions_ids.to_a[partition_position]
163
163
  watermarks = offsets[partition_id]
164
164
 
@@ -169,7 +169,7 @@ module Karafka
169
169
 
170
170
  # This range represents offsets we want to fetch
171
171
  [partition_id, lowest..highest]
172
- end.to_h
172
+ end
173
173
 
174
174
  # We start on our topic from the lowest offset for each expected partition
175
175
  iterator = Karafka::Pro::Iterator.new(
@@ -208,18 +208,17 @@ module Karafka
208
208
  end
209
209
 
210
210
  [
211
- aggregated.values.map(&:values).map(&:reverse).reduce(:+),
211
+ aggregated.values.sum([]) { |partition| partition.values.reverse },
212
212
  !Sets.call(counts, page + 1).empty?
213
213
  ]
214
214
  end
215
215
 
216
216
  private
217
217
 
218
- # @param args [Object] anything required by the admin `#read_topic`
219
218
  # @return [Array<Karafka::Messages::Message>, false] topic partition messages or false
220
219
  # in case we hit a non-existing offset
221
- def read_topic(*args)
222
- Lib::Admin.read_topic(*args)
220
+ def read_topic(*)
221
+ Lib::Admin.read_topic(*)
223
222
  rescue Rdkafka::RdkafkaError => e
224
223
  return false if e.code == :auto_offset_reset
225
224
 
@@ -103,10 +103,10 @@ module Karafka
103
103
  base = sample.last.dup
104
104
 
105
105
  DELTA_KEYS.each do |key|
106
- base[key] = previous.last[key] + (sample.last[key] - previous.last[key]) / 2
106
+ base[key] = previous.last[key] + ((sample.last[key] - previous.last[key]) / 2)
107
107
  end
108
108
 
109
- filled << [previous.first + (sample.first - previous.first) / 2, base]
109
+ filled << [previous.first + ((sample.first - previous.first) / 2), base]
110
110
  end
111
111
 
112
112
  filled << sample
@@ -182,12 +182,12 @@ module Karafka
182
182
  # @param current [Hash]
183
183
  # @return [Hash] delta computed values
184
184
  def compute_deltas(previous, current)
185
- DELTA_KEYS.map do |delta_key|
185
+ DELTA_KEYS.to_h do |delta_key|
186
186
  [
187
187
  delta_key,
188
188
  current.fetch(delta_key) - previous.fetch(delta_key)
189
189
  ]
190
- end.to_h
190
+ end
191
191
  end
192
192
  end
193
193
  end
@@ -38,8 +38,7 @@ module Karafka
38
38
  # @return [String] JSON with data about all the charts we were interested in
39
39
  def with(*args)
40
40
  args
41
- .map { |name| [name.to_sym, public_send(name)] }
42
- .to_h
41
+ .to_h { |name| [name.to_sym, public_send(name)] }
43
42
  .to_json
44
43
  end
45
44
 
@@ -36,11 +36,11 @@ module Karafka
36
36
  end
37
37
 
38
38
  # Extract the lag stored only from all the data
39
- per_topic = @data.to_h.map do |topic, metrics|
39
+ per_topic = @data.to_h.to_h do |topic, metrics|
40
40
  extracted = metrics.map { |metric| [metric.first, metric.last[:lag_hybrid]] }
41
41
 
42
42
  [topic, extracted]
43
- end.to_h
43
+ end
44
44
 
45
45
  # We name it with a space because someone may have a topic called "total" and we
46
46
  # want to avoid collisions
@@ -43,9 +43,9 @@ module Karafka
43
43
 
44
44
  # Always align the order of topics in hash based on their name so it is
45
45
  # independent from the reported order
46
- extracted[range] = range_extracted.keys.sort.map do |key|
46
+ extracted[range] = range_extracted.keys.sort.to_h do |key|
47
47
  [key, range_extracted[key]]
48
- end.to_h
48
+ end
49
49
  end
50
50
 
51
51
  extracted
@@ -67,8 +67,7 @@ module Karafka
67
67
  .select { |val| val.is_a?(Hash) }
68
68
  .flat_map(&:keys)
69
69
  .uniq
70
- .map { |key| [key, nil] }
71
- .to_h
70
+ .to_h { |key| [key, nil] }
72
71
  .freeze
73
72
 
74
73
  # Normalize data in between topics reportings
@@ -50,7 +50,7 @@ module Karafka
50
50
  new(candidate.payload)
51
51
  rescue Rdkafka::RdkafkaError => e
52
52
  # If any of "topic missing" is raised, we return false but other errors we re-raise
53
- raise(e) unless EXPECTED_RDKAFKA_ERRORS.any? { |code| e.code == code }
53
+ raise(e) unless EXPECTED_RDKAFKA_ERRORS.include?(e.code)
54
54
 
55
55
  false
56
56
  end