karafka-web 0.11.3 → 0.11.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (85) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +14 -0
  3. data/Gemfile +0 -2
  4. data/Gemfile.lock +45 -32
  5. data/bin/integrations +44 -0
  6. data/bin/rspecs +6 -2
  7. data/docker-compose.yml +1 -1
  8. data/karafka-web.gemspec +1 -1
  9. data/lib/karafka/web/app.rb +2 -3
  10. data/lib/karafka/web/cli/help.rb +1 -1
  11. data/lib/karafka/web/config.rb +8 -0
  12. data/lib/karafka/web/contracts/base.rb +2 -4
  13. data/lib/karafka/web/contracts/config.rb +5 -5
  14. data/lib/karafka/web/management/actions/enable.rb +14 -1
  15. data/lib/karafka/web/management/migrations/consumers_reports/1761645571_rename_process_name_to_id.rb +38 -0
  16. data/lib/karafka/web/management/migrator.rb +3 -2
  17. data/lib/karafka/web/pro/commanding/commands/base.rb +1 -1
  18. data/lib/karafka/web/pro/commanding/contracts/config.rb +2 -4
  19. data/lib/karafka/web/pro/commanding/handlers/partitions/tracker.rb +2 -3
  20. data/lib/karafka/web/pro/ui/controllers/scheduled_messages/schedules_controller.rb +1 -2
  21. data/lib/karafka/web/pro/ui/controllers/topics/distributions_controller.rb +1 -3
  22. data/lib/karafka/web/pro/ui/lib/branding/contracts/config.rb +2 -4
  23. data/lib/karafka/web/pro/ui/lib/policies/contracts/config.rb +2 -4
  24. data/lib/karafka/web/pro/ui/lib/search/contracts/config.rb +3 -5
  25. data/lib/karafka/web/pro/ui/lib/search/contracts/form.rb +3 -5
  26. data/lib/karafka/web/pro/ui/lib/search/runner.rb +14 -1
  27. data/lib/karafka/web/pro/ui/routes/errors.rb +3 -3
  28. data/lib/karafka/web/pro/ui/routes/explorer.rb +3 -3
  29. data/lib/karafka/web/pro/ui/views/health/_no_partition_data.erb +9 -0
  30. data/lib/karafka/web/pro/ui/views/health/_partitions_with_fallback.erb +41 -0
  31. data/lib/karafka/web/pro/ui/views/health/changes.erb +12 -13
  32. data/lib/karafka/web/pro/ui/views/health/lags.erb +12 -13
  33. data/lib/karafka/web/pro/ui/views/health/offsets.erb +12 -13
  34. data/lib/karafka/web/pro/ui/views/health/overview.erb +15 -16
  35. data/lib/karafka/web/processing/consumer.rb +8 -3
  36. data/lib/karafka/web/processing/consumers/aggregators/metrics.rb +1 -1
  37. data/lib/karafka/web/processing/consumers/aggregators/state.rb +5 -5
  38. data/lib/karafka/web/processing/consumers/contracts/state.rb +1 -1
  39. data/lib/karafka/web/processing/consumers/reports_migrator.rb +49 -0
  40. data/lib/karafka/web/processing/time_series_tracker.rb +1 -1
  41. data/lib/karafka/web/tracking/consumers/contracts/report.rb +1 -1
  42. data/lib/karafka/web/tracking/consumers/contracts/topic.rb +1 -0
  43. data/lib/karafka/web/tracking/consumers/listeners/errors.rb +2 -1
  44. data/lib/karafka/web/tracking/consumers/listeners/processing.rb +46 -0
  45. data/lib/karafka/web/tracking/consumers/listeners/statistics.rb +1 -0
  46. data/lib/karafka/web/tracking/consumers/sampler/enrichers/base.rb +20 -0
  47. data/lib/karafka/web/tracking/consumers/sampler/enrichers/consumer_groups.rb +116 -0
  48. data/lib/karafka/web/tracking/consumers/sampler/metrics/base.rb +20 -0
  49. data/lib/karafka/web/tracking/consumers/sampler/metrics/container.rb +113 -0
  50. data/lib/karafka/web/tracking/consumers/sampler/metrics/jobs.rb +60 -0
  51. data/lib/karafka/web/tracking/consumers/sampler/metrics/network.rb +48 -0
  52. data/lib/karafka/web/tracking/consumers/sampler/metrics/os.rb +206 -0
  53. data/lib/karafka/web/tracking/consumers/sampler/metrics/server.rb +33 -0
  54. data/lib/karafka/web/tracking/consumers/sampler.rb +34 -215
  55. data/lib/karafka/web/tracking/contracts/error.rb +1 -0
  56. data/lib/karafka/web/tracking/helpers/ttls/hash.rb +2 -3
  57. data/lib/karafka/web/tracking/helpers/ttls/stats.rb +1 -2
  58. data/lib/karafka/web/tracking/producers/listeners/errors.rb +2 -1
  59. data/lib/karafka/web/tracking/ui/errors.rb +76 -0
  60. data/lib/karafka/web/ui/base.rb +19 -9
  61. data/lib/karafka/web/ui/controllers/requests/execution_wrapper.rb +2 -4
  62. data/lib/karafka/web/ui/controllers/requests/params.rb +1 -1
  63. data/lib/karafka/web/ui/helpers/application_helper.rb +1 -1
  64. data/lib/karafka/web/ui/helpers/paths_helper.rb +6 -9
  65. data/lib/karafka/web/ui/lib/sorter.rb +1 -1
  66. data/lib/karafka/web/ui/models/health.rb +14 -9
  67. data/lib/karafka/web/ui/models/jobs.rb +4 -6
  68. data/lib/karafka/web/ui/models/message.rb +7 -8
  69. data/lib/karafka/web/ui/models/metrics/aggregated.rb +4 -4
  70. data/lib/karafka/web/ui/models/metrics/charts/aggregated.rb +1 -2
  71. data/lib/karafka/web/ui/models/metrics/charts/topics.rb +2 -2
  72. data/lib/karafka/web/ui/models/metrics/topics.rb +3 -4
  73. data/lib/karafka/web/ui/models/recurring_tasks/schedule.rb +1 -1
  74. data/lib/karafka/web/ui/public/javascripts/application.min.js.gz +0 -0
  75. data/lib/karafka/web/ui/public/stylesheets/application.min.css +3 -0
  76. data/lib/karafka/web/ui/public/stylesheets/application.min.css.br +0 -0
  77. data/lib/karafka/web/ui/public/stylesheets/application.min.css.gz +0 -0
  78. data/lib/karafka/web/ui/routes/errors.rb +3 -3
  79. data/lib/karafka/web/ui/views/shared/exceptions/unhandled_error.erb +42 -0
  80. data/lib/karafka/web/version.rb +1 -1
  81. data/lib/karafka/web.rb +2 -3
  82. data/package-lock.json +180 -236
  83. data/package.json +3 -3
  84. data/renovate.json +13 -0
  85. metadata +18 -3
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 7b958131581b6fe2fd1a45a68b7366fb842ca15eccd946988516e78b7fcc3cce
4
- data.tar.gz: 899898076b7fc09111d8cfd77f5e68fe6158b9bfee3128292f77248f2b5922c8
3
+ metadata.gz: c30d2f5113f89bf796fbfa024336ebcad6796826bcbd691fc73fab1fb3fd05dd
4
+ data.tar.gz: 3c4fe2b494603f7c7f73c73fc1b0f8a82a4c09dc06ab885dd334f7d96632c8da
5
5
  SHA512:
6
- metadata.gz: b53b0e2dc696cf5af719ad49b90c130a7122261a8123e5ac8ca4deb31d0f397e49f09d71032df4e840ad3a5f9b5b3115709fbcd08568a0991ee24b0da1b7054e
7
- data.tar.gz: b3eeac33c925b9527aef85231a74a255c3d59aedd36f20a174e72b282db744cb6a0779258c9136266f54041ab0e81ce8c95a6a042d4ecc908e0cbba299d651f3
6
+ metadata.gz: d4327b5af8e3a41e379e5db63b08269ec9a22d5912a4839e86d113a377f8003b0d597af7613b2d6243cbaa6520c649b1f1ec66d548bf780afda069f121fd4921
7
+ data.tar.gz: 14c3162caadaf02381148953758e5162278bc5f8415d28dd0476453942059aa0cd582957b9cd7f8e84faf8d9213302510594fc548ec3602c9171785eb25ba813
data/CHANGELOG.md CHANGED
@@ -1,5 +1,19 @@
1
1
  # Karafka Web Changelog
2
2
 
3
+ ## 0.11.4 (2025-11-01)
4
+ - [Enhancement] Show placeholder rows for partitions with no data during rebalances in health view. The UI now displays all topic partitions (0 to N-1) with "No data available" indicators for partitions currently being rebalanced, preventing confusion from disappearing partitions. Consumer reports now include `partitions_cnt` field extracted from librdkafka statistics. Consumer schema version bumped to 1.5.0 (breaking change).
5
+ - [Enhancement] Track and report UI errors originating from Roda/Puma web processes directly to Kafka errors topic for visibility and debugging. UI errors are dispatched asynchronously from web processes using a dedicated listener.
6
+ - [Enhancement] Require Karafka 2.5.2 at minimum and migrate from string-based execution mode comparisons to the new ExecutionMode object API.
7
+ - [Enhancement] Increase Web UI processing consumer backoff time to 30 seconds when encountering incompatible schema errors to prevent error spam during rolling upgrades.
8
+ - [Enhancement] Add unique `id` field to error reports to track duplicate error occurrences. Error schema version bumped to 1.2.0 while maintaining backward compatibility with older error formats (1.0.0, 1.1.0) in the Web UI.
9
+ - [Enhancement] Add container-aware metrics collection for Docker/Kubernetes environments. The Web UI now reports accurate container memory limits from cgroups (v1 and v2) instead of misleading host metrics, while maintaining full backward compatibility with non-containerized deployments.
10
+ - [Enhancement] Add per-message report migration system to handle schema evolution for consumer reports. This allows transparent migration of old report formats (e.g., schema 1.2.x using `process[:name]`) to current expectations (schema 1.3.0+ using `process[:id]`), ensuring backward compatibility with reports from older karafka-web versions (≤ v0.8.2) that may still exist in Kafka topics.
11
+ - [Change] Reduce `max_messages` for consumer reports processing from 1000 to 200 to prevent excessive memory usage in large-scale deployments. Processing 1000 messages at once can impact memory consumption significantly in big systems, while 200 messages provides better memory efficiency with negligible impact on throughput.
12
+ - [Refactor] Extract metrics collection logic from monolithic Sampler into focused, single-responsibility classes (Metrics::Base, Metrics::Os, Metrics::Container, Metrics::Network, Metrics::Server, Metrics::Jobs) and consumer groups enrichment into dedicated enricher (Enrichers::Base, Enrichers::ConsumerGroups) for improved maintainability and testability.
13
+ - [Testing] Add Docker-based integration tests for container metrics collection. Tests verify cgroup v1/v2 detection, memory limit reading, and fallback behavior across multiple containerized scenarios with different resource constraints.
14
+ - [Fix] Fix "OS memory used" metric on Linux reporting same value as RSS instead of system-wide memory usage. The metric now correctly sums memory usage across all processes (or all container processes when running in Docker/Kubernetes) to match macOS behavior and original design intent.
15
+ - [Fix] Fix crash when processing old consumer reports from schema versions < 1.3.0 (karafka-web ≤ v0.8.2) that used `process[:name]` field instead of `process[:id]`. The error `undefined method 'to_sym' for nil` would occur when these old reports were encountered during upgrades. Reports are now automatically migrated in-place to the current schema format.
16
+
3
17
  ## 0.11.3 (2025-09-29)
4
18
  - [Enhancement] Upgrade DaisyUI to 5.1.
5
19
  - [Change] Remove Ruby `3.1` support according to the EOL schedule.
data/Gemfile CHANGED
@@ -2,8 +2,6 @@
2
2
 
3
3
  source 'https://rubygems.org'
4
4
 
5
- plugin 'diffend'
6
-
7
5
  gemspec
8
6
 
9
7
  group :test do
data/Gemfile.lock CHANGED
@@ -1,9 +1,9 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- karafka-web (0.11.3)
4
+ karafka-web (0.11.4)
5
5
  erubi (~> 1.4)
6
- karafka (>= 2.5.0, < 2.6.0)
6
+ karafka (>= 2.5.2, < 2.6.0)
7
7
  karafka-core (>= 2.5.0, < 2.6.0)
8
8
  roda (~> 3.68, >= 3.69)
9
9
  tilt (~> 2.0)
@@ -11,7 +11,7 @@ PATH
11
11
  GEM
12
12
  remote: https://rubygems.org/
13
13
  specs:
14
- activesupport (7.2.2.2)
14
+ activesupport (8.0.3)
15
15
  base64
16
16
  benchmark (>= 0.3)
17
17
  bigdecimal
@@ -23,19 +23,20 @@ GEM
23
23
  minitest (>= 5.1)
24
24
  securerandom (>= 0.3)
25
25
  tzinfo (~> 2.0, >= 2.0.5)
26
- base64 (0.2.0)
26
+ uri (>= 0.13.1)
27
+ base64 (0.3.0)
27
28
  benchmark (0.4.1)
28
- bigdecimal (3.1.9)
29
+ bigdecimal (3.3.1)
29
30
  byebug (12.0.0)
30
31
  concurrent-ruby (1.3.5)
31
- connection_pool (2.5.3)
32
+ connection_pool (2.5.4)
32
33
  diff-lcs (1.6.2)
33
34
  docile (1.4.1)
34
35
  drb (2.2.3)
35
36
  erubi (1.13.1)
36
- et-orbi (1.2.11)
37
+ et-orbi (1.4.0)
37
38
  tzinfo
38
- factory_bot (6.5.5)
39
+ factory_bot (6.5.6)
39
40
  activesupport (>= 6.1.0)
40
41
  ffi (1.17.2)
41
42
  ffi (1.17.2-aarch64-linux-gnu)
@@ -48,43 +49,54 @@ GEM
48
49
  ffi (1.17.2-x86_64-darwin)
49
50
  ffi (1.17.2-x86_64-linux-gnu)
50
51
  ffi (1.17.2-x86_64-linux-musl)
51
- fugit (1.11.2)
52
- et-orbi (~> 1, >= 1.2.11)
52
+ fugit (1.12.1)
53
+ et-orbi (~> 1.4)
53
54
  raabro (~> 1.4)
54
55
  i18n (1.14.7)
55
56
  concurrent-ruby (~> 1.0)
56
- karafka (2.5.0)
57
+ json (2.15.1)
58
+ karafka (2.5.2)
57
59
  base64 (~> 0.2)
58
- karafka-core (>= 2.5.2, < 2.6.0)
59
- karafka-rdkafka (>= 0.19.5)
60
- waterdrop (>= 2.8.3, < 3.0.0)
60
+ karafka-core (>= 2.5.6, < 2.6.0)
61
+ karafka-rdkafka (>= 0.22.0)
62
+ waterdrop (>= 2.8.9, < 3.0.0)
61
63
  zeitwerk (~> 2.3)
62
- karafka-core (2.5.2)
63
- karafka-rdkafka (>= 0.19.2, < 0.21.0)
64
+ karafka-core (2.5.7)
65
+ karafka-rdkafka (>= 0.20.0)
64
66
  logger (>= 1.6.0)
65
- karafka-rdkafka (0.20.1)
67
+ karafka-rdkafka (0.22.2)
66
68
  ffi (~> 1.15)
69
+ json (> 2.0)
67
70
  logger
68
71
  mini_portile2 (~> 2.6)
69
72
  rake (> 12)
70
- karafka-rdkafka (0.20.1-arm64-darwin)
73
+ karafka-rdkafka (0.22.2-aarch64-linux-gnu)
71
74
  ffi (~> 1.15)
75
+ json (> 2.0)
72
76
  logger
73
77
  mini_portile2 (~> 2.6)
74
78
  rake (> 12)
75
- karafka-rdkafka (0.20.1-x86_64-linux-gnu)
79
+ karafka-rdkafka (0.22.2-arm64-darwin)
76
80
  ffi (~> 1.15)
81
+ json (> 2.0)
77
82
  logger
78
83
  mini_portile2 (~> 2.6)
79
84
  rake (> 12)
80
- karafka-rdkafka (0.20.1-x86_64-linux-musl)
85
+ karafka-rdkafka (0.22.2-x86_64-linux-gnu)
81
86
  ffi (~> 1.15)
87
+ json (> 2.0)
88
+ logger
89
+ mini_portile2 (~> 2.6)
90
+ rake (> 12)
91
+ karafka-rdkafka (0.22.2-x86_64-linux-musl)
92
+ ffi (~> 1.15)
93
+ json (> 2.0)
82
94
  logger
83
95
  mini_portile2 (~> 2.6)
84
96
  rake (> 12)
85
97
  logger (1.7.0)
86
98
  mini_portile2 (2.8.9)
87
- minitest (5.25.5)
99
+ minitest (5.26.0)
88
100
  nokogiri (1.18.10)
89
101
  mini_portile2 (~> 2.8.2)
90
102
  racc (~> 1.4)
@@ -107,44 +119,45 @@ GEM
107
119
  ostruct (0.6.3)
108
120
  raabro (1.4.0)
109
121
  racc (1.8.1)
110
- rack (3.1.16)
122
+ rack (3.2.3)
111
123
  rack-test (2.2.0)
112
124
  rack (>= 1.3)
113
125
  rackup (0.2.3)
114
126
  rack (>= 3.0.0.beta1)
115
127
  webrick
116
128
  rake (13.3.0)
117
- roda (3.92.0)
129
+ roda (3.97.0)
118
130
  rack
119
- rspec (3.13.1)
131
+ rspec (3.13.2)
120
132
  rspec-core (~> 3.13.0)
121
133
  rspec-expectations (~> 3.13.0)
122
134
  rspec-mocks (~> 3.13.0)
123
- rspec-core (3.13.4)
135
+ rspec-core (3.13.6)
124
136
  rspec-support (~> 3.13.0)
125
137
  rspec-expectations (3.13.5)
126
138
  diff-lcs (>= 1.2.0, < 2.0)
127
139
  rspec-support (~> 3.13.0)
128
- rspec-mocks (3.13.5)
140
+ rspec-mocks (3.13.6)
129
141
  diff-lcs (>= 1.2.0, < 2.0)
130
142
  rspec-support (~> 3.13.0)
131
- rspec-support (3.13.4)
143
+ rspec-support (3.13.6)
132
144
  securerandom (0.4.1)
133
145
  simplecov (0.22.0)
134
146
  docile (~> 1.1)
135
147
  simplecov-html (~> 0.11)
136
148
  simplecov_json_formatter (~> 0.1)
137
- simplecov-html (0.13.1)
149
+ simplecov-html (0.13.2)
138
150
  simplecov_json_formatter (0.1.4)
139
- tilt (2.6.0)
151
+ tilt (2.6.1)
140
152
  tzinfo (2.0.6)
141
153
  concurrent-ruby (~> 1.0)
142
- waterdrop (2.8.4)
154
+ uri (1.0.4)
155
+ waterdrop (2.8.12)
143
156
  karafka-core (>= 2.4.9, < 3.0.0)
144
- karafka-rdkafka (>= 0.19.2)
157
+ karafka-rdkafka (>= 0.20.0)
145
158
  zeitwerk (~> 2.3)
146
159
  webrick (1.9.1)
147
- zeitwerk (2.6.18)
160
+ zeitwerk (2.7.3)
148
161
 
149
162
  PLATFORMS
150
163
  aarch64-linux-gnu
data/bin/integrations ADDED
@@ -0,0 +1,44 @@
1
+ #!/usr/bin/env bash
2
+
3
+ # Integration test runner for container metrics
4
+ # Runs tests inside Docker containers with various memory/CPU configurations
5
+
6
+ set -e
7
+
8
+ echo "================================================"
9
+ echo "Container Metrics Integration Tests"
10
+ echo "================================================"
11
+ echo ""
12
+
13
+ # Build the test image first
14
+ echo "Building integration test Docker image..."
15
+ docker compose -f spec/integrations/docker-compose.yml build
16
+
17
+ echo ""
18
+ echo "Running integration tests in containers..."
19
+ echo ""
20
+
21
+ # Run all test scenarios
22
+ # Note: Each container runs independently and exits after tests complete
23
+ docker compose -f spec/integrations/docker-compose.yml up \
24
+ --abort-on-container-exit \
25
+ --exit-code-from test-container-limited \
26
+ test-container-limited
27
+
28
+ docker compose -f spec/integrations/docker-compose.yml up \
29
+ --abort-on-container-exit \
30
+ --exit-code-from test-container-unlimited \
31
+ test-container-unlimited
32
+
33
+ docker compose -f spec/integrations/docker-compose.yml up \
34
+ --abort-on-container-exit \
35
+ --exit-code-from test-container-strict \
36
+ test-container-strict
37
+
38
+ echo ""
39
+ echo "================================================"
40
+ echo "All integration tests completed successfully!"
41
+ echo "================================================"
42
+
43
+ # Clean up
44
+ docker compose -f spec/integrations/docker-compose.yml down
data/bin/rspecs CHANGED
@@ -2,5 +2,9 @@
2
2
 
3
3
  set -e
4
4
 
5
- SPECS_TYPE=regular bundle exec rspec --exclude-pattern "**/pro/**/*_spec.rb"
6
- SPECS_TYPE=pro bundle exec rspec spec/lib/karafka/web/pro
5
+ SPECS_TYPE=regular bundle exec rspec \
6
+ spec/lib \
7
+ --exclude-pattern "**/pro/**/*_spec.rb"
8
+
9
+ SPECS_TYPE=pro bundle exec rspec \
10
+ spec/lib/karafka/web/pro
data/docker-compose.yml CHANGED
@@ -1,7 +1,7 @@
1
1
  services:
2
2
  kafka:
3
3
  container_name: kafka
4
- image: confluentinc/cp-kafka:8.0.1
4
+ image: confluentinc/cp-kafka:8.1.0
5
5
 
6
6
  ports:
7
7
  - 9092:9092
data/karafka-web.gemspec CHANGED
@@ -17,7 +17,7 @@ Gem::Specification.new do |spec|
17
17
  spec.licenses = %w[LGPL-3.0-only Commercial]
18
18
 
19
19
  spec.add_dependency 'erubi', '~> 1.4'
20
- spec.add_dependency 'karafka', '>= 2.5.0', '< 2.6.0'
20
+ spec.add_dependency 'karafka', '>= 2.5.2', '< 2.6.0'
21
21
  spec.add_dependency 'karafka-core', '>= 2.5.0', '< 2.6.0'
22
22
  spec.add_dependency 'roda', '~> 3.68', '>= 3.69'
23
23
  spec.add_dependency 'tilt', '~> 2.0'
@@ -6,9 +6,8 @@ module Karafka
6
6
  class App
7
7
  class << self
8
8
  # @param env [Hash] Rack env
9
- # @param block [Proc] Rack block
10
- def call(env, &block)
11
- engine.call(env, &block)
9
+ def call(env, &)
10
+ engine.call(env, &)
12
11
  end
13
12
 
14
13
  # @return [Class] regular or pro Web engine
@@ -10,7 +10,7 @@ module Karafka
10
10
  # Print available commands
11
11
  def call
12
12
  # Find the longest command for alignment purposes
13
- max_command_length = self.class.commands.map(&:name).map(&:size).max
13
+ max_command_length = self.class.commands.map { |command| command.name.size }.max
14
14
 
15
15
  puts 'Karafka Web UI commands:'
16
16
 
@@ -171,6 +171,14 @@ module Karafka
171
171
  Tracking::Producers::Listeners::Errors.new
172
172
  ]
173
173
  end
174
+
175
+ setting :ui do
176
+ # Listeners needed for the Web UI to track errors from Roda/Puma
177
+ # These dispatch directly to Kafka since there's no background reporter in web processes
178
+ setting :listeners, default: [
179
+ Tracking::Ui::Errors.new
180
+ ]
181
+ end
174
182
  end
175
183
 
176
184
  # States processing related settings
@@ -13,10 +13,8 @@ module Karafka
13
13
  return super if block_given?
14
14
 
15
15
  super do |config|
16
- config.error_messages = YAML.safe_load(
17
- File.read(
18
- File.join(Karafka::Web.gem_root, 'config', 'locales', 'errors.yml')
19
- )
16
+ config.error_messages = YAML.safe_load_file(
17
+ File.join(Karafka::Web.gem_root, 'config', 'locales', 'errors.yml')
20
18
  ).fetch('en').fetch('validations').fetch('web')
21
19
  end
22
20
  end
@@ -19,7 +19,7 @@ module Karafka
19
19
  required(:name) { |val| val.is_a?(String) && TOPIC_REGEXP.match?(val) }
20
20
 
21
21
  required(:config) do |val|
22
- val.is_a?(Hash) && !val.empty? && val.keys.all? { |key| key.is_a?(Symbol) }
22
+ val.is_a?(Hash) && !val.empty? && val.keys.all?(Symbol)
23
23
  end
24
24
  end
25
25
 
@@ -28,7 +28,7 @@ module Karafka
28
28
  required(:name) { |val| val.is_a?(String) && TOPIC_REGEXP.match?(val) }
29
29
 
30
30
  required(:config) do |val|
31
- val.is_a?(Hash) && !val.empty? && val.keys.all? { |key| key.is_a?(Symbol) }
31
+ val.is_a?(Hash) && !val.empty? && val.keys.all?(Symbol)
32
32
  end
33
33
  end
34
34
 
@@ -36,7 +36,7 @@ module Karafka
36
36
  required(:name) { |val| val.is_a?(String) && TOPIC_REGEXP.match?(val) }
37
37
 
38
38
  required(:config) do |val|
39
- val.is_a?(Hash) && !val.empty? && val.keys.all? { |key| key.is_a?(Symbol) }
39
+ val.is_a?(Hash) && !val.empty? && val.keys.all?(Symbol)
40
40
  end
41
41
  end
42
42
 
@@ -44,7 +44,7 @@ module Karafka
44
44
  required(:name) { |val| val.is_a?(String) && TOPIC_REGEXP.match?(val) }
45
45
 
46
46
  required(:config) do |val|
47
- val.is_a?(Hash) && !val.empty? && val.keys.all? { |key| key.is_a?(Symbol) }
47
+ val.is_a?(Hash) && !val.empty? && val.keys.all?(Symbol)
48
48
  end
49
49
  end
50
50
 
@@ -52,7 +52,7 @@ module Karafka
52
52
  required(:name) { |val| val.is_a?(String) && TOPIC_REGEXP.match?(val) }
53
53
 
54
54
  required(:config) do |val|
55
- val.is_a?(Hash) && !val.empty? && val.keys.all? { |key| key.is_a?(Symbol) }
55
+ val.is_a?(Hash) && !val.empty? && val.keys.all?(Symbol)
56
56
  end
57
57
  end
58
58
  end
@@ -61,7 +61,7 @@ module Karafka
61
61
  # Since we materialize state in intervals, we can poll for half of this time
62
62
  # without impacting the reporting responsiveness
63
63
  max_wait_time ::Karafka::Web.config.processing.interval / 2
64
- max_messages 1_000
64
+ max_messages 200
65
65
  consumer ::Karafka::Web::Processing::Consumer
66
66
  # This needs to be true in order not to reload the consumer in dev. This consumer
67
67
  # should not be affected by the end user development process
@@ -73,6 +73,13 @@ module Karafka
73
73
  # consumer group name would be renamed and we would start consuming all
74
74
  # historical
75
75
  initial_offset 'latest'
76
+ # Increase backoff time on errors. Incompatible schema errors are not recoverable
77
+ # until rolling upgrade completes, so we use a longer max timeout to prevent
78
+ # spamming errors in logs.
79
+ # We set this ourselves so user settings do not impact frequency of retrying
80
+ pause_timeout 5_000
81
+ pause_max_timeout 60_000
82
+ pause_with_exponential_backoff true
76
83
  # We use the defaults + our config alterations that may not align with what
77
84
  # user wants for his topics.
78
85
  kafka kafka_config
@@ -124,6 +131,12 @@ module Karafka
124
131
 
125
132
  ::Karafka::Web.producer.monitor.subscribe(listener)
126
133
  end
134
+
135
+ # Installs all the UI related listeners for tracking errors from web processes
136
+ # These listen on Karafka monitor to catch instrumented UI errors
137
+ ::Karafka::Web.config.tracking.ui.listeners.each do |listener|
138
+ ::Karafka.monitor.subscribe(listener)
139
+ end
127
140
  end
128
141
 
129
142
  # In most cases we want to close the producer if possible.
@@ -0,0 +1,38 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Web
5
+ module Management
6
+ module Migrations
7
+ # Consumer reports migrations
8
+ module ConsumersReports
9
+ # Migrates consumer reports from schema < 1.3.0 that used process[:name] to process[:id]
10
+ #
11
+ # In schema versions 1.2.x and earlier (karafka-web <= v0.8.2), the process identifier
12
+ # was stored in the :name field. Starting with schema 1.3.0 (karafka-web v0.9.0+),
13
+ # this was renamed to :id for consistency.
14
+ #
15
+ # This migration ensures old reports can be processed by current aggregators that
16
+ # expect the :id field.
17
+ class RenameProcessNameToId < Base
18
+ # Apply to all schema versions before 1.3.0
19
+ self.versions_until = '1.3.0'
20
+ self.type = :consumers_reports
21
+
22
+ # @param report [Hash] consumer report to migrate
23
+ def migrate(report)
24
+ # If :id already exists, nothing to do (already migrated or newer schema)
25
+ return if report[:process][:id]
26
+
27
+ # Rename :name to :id
28
+ # Both :name (in schema < 1.3.0) and :id (in schema >= 1.3.0) were always
29
+ # required fields, so we don't need nil checks for valid reports
30
+ report[:process][:id] = report[:process][:name]
31
+ report[:process].delete(:name)
32
+ end
33
+ end
34
+ end
35
+ end
36
+ end
37
+ end
38
+ end
@@ -65,8 +65,6 @@ module Karafka
65
65
  'consumers metrics newer than supported'
66
66
  )
67
67
  end
68
-
69
- true
70
68
  end
71
69
 
72
70
  # Applies migrations if needed and mutates the in-memory data
@@ -76,6 +74,9 @@ module Karafka
76
74
  any_migrations = false
77
75
 
78
76
  Migrations::Base.sorted_descendants.each do |migration_class|
77
+ # Skip report migrations - they are applied per-message by ReportsMigrator
78
+ next if migration_class.type == :consumers_reports
79
+
79
80
  data = send(migration_class.type)
80
81
 
81
82
  next unless migration_class.applicable?(data[:schema_version])
@@ -50,7 +50,7 @@ module Karafka
50
50
  # being executed in the embedded or swarm processes since there the signaling is
51
51
  # handled differently (either via the main process or supervisor).
52
52
  def standalone?
53
- Karafka::Server.execution_mode == :standalone
53
+ Karafka::Server.execution_mode.standalone?
54
54
  end
55
55
 
56
56
  # @return [String] id of the current consumer process
@@ -12,10 +12,8 @@ module Karafka
12
12
  # Makes sure, all the expected commanding config is defined as it should be
13
13
  class Config < ::Karafka::Contracts::Base
14
14
  configure do |config|
15
- config.error_messages = YAML.safe_load(
16
- File.read(
17
- File.join(Karafka::Web.gem_root, 'config', 'locales', 'pro_errors.yml')
18
- )
15
+ config.error_messages = YAML.safe_load_file(
16
+ File.join(Karafka::Web.gem_root, 'config', 'locales', 'pro_errors.yml')
19
17
  ).fetch('en').fetch('validations').fetch('config')
20
18
  end
21
19
 
@@ -41,17 +41,16 @@ module Karafka
41
41
  # @param subscription_group_id [String] id of the subscription group for which we
42
42
  # want to get all the requests. Subscription groups ids (not names) are unique
43
43
  # within the application, so it is unique "enough".
44
- # @param block [Proc]
45
44
  #
46
45
  # @yieldparam [Request] given command request for the requested subscription group
47
- def each_for(subscription_group_id, &block)
46
+ def each_for(subscription_group_id, &)
48
47
  requests = nil
49
48
 
50
49
  @mutex.synchronize do
51
50
  requests = @requests.delete(subscription_group_id)
52
51
  end
53
52
 
54
- (requests || EMPTY_ARRAY).each(&block)
53
+ (requests || EMPTY_ARRAY).each(&)
55
54
  end
56
55
  end
57
56
  end
@@ -21,8 +21,7 @@ module Karafka
21
21
  # cluster
22
22
  candidates = Karafka::App
23
23
  .routes
24
- .map(&:topics)
25
- .map(&:to_a)
24
+ .map { |route| route.topics.to_a }
26
25
  .flatten
27
26
  .select(&:scheduled_messages?)
28
27
  .reject { |topic| topic.name.end_with?(states_postfix) }
@@ -65,9 +65,7 @@ module Karafka
65
65
  topic_name,
66
66
  partition_count
67
67
  )
68
- rescue Rdkafka::RdkafkaError => e
69
- @form_error = e
70
- rescue Rdkafka::Config::ConfigError => e
68
+ rescue Rdkafka::RdkafkaError, Rdkafka::Config::ConfigError => e
71
69
  @form_error = e
72
70
  end
73
71
 
@@ -14,10 +14,8 @@ module Karafka
14
14
  # Makes sure, all the expected UI branding config is defined as it should be
15
15
  class Config < ::Karafka::Contracts::Base
16
16
  configure do |config|
17
- config.error_messages = YAML.safe_load(
18
- File.read(
19
- File.join(Karafka::Web.gem_root, 'config', 'locales', 'pro_errors.yml')
20
- )
17
+ config.error_messages = YAML.safe_load_file(
18
+ File.join(Karafka::Web.gem_root, 'config', 'locales', 'pro_errors.yml')
21
19
  ).fetch('en').fetch('validations').fetch('config')
22
20
  end
23
21
 
@@ -15,10 +15,8 @@ module Karafka
15
15
  # Makes sure, all the expected UI policies config is defined as it should be
16
16
  class Config < ::Karafka::Contracts::Base
17
17
  configure do |config|
18
- config.error_messages = YAML.safe_load(
19
- File.read(
20
- File.join(Karafka::Web.gem_root, 'config', 'locales', 'pro_errors.yml')
21
- )
18
+ config.error_messages = YAML.safe_load_file(
19
+ File.join(Karafka::Web.gem_root, 'config', 'locales', 'pro_errors.yml')
22
20
  ).fetch('en').fetch('validations').fetch('config')
23
21
  end
24
22
 
@@ -15,10 +15,8 @@ module Karafka
15
15
  # Makes sure, all the expected UI search config is defined as it should be
16
16
  class Config < ::Karafka::Contracts::Base
17
17
  configure do |config|
18
- config.error_messages = YAML.safe_load(
19
- File.read(
20
- File.join(Karafka::Web.gem_root, 'config', 'locales', 'pro_errors.yml')
21
- )
18
+ config.error_messages = YAML.safe_load_file(
19
+ File.join(Karafka::Web.gem_root, 'config', 'locales', 'pro_errors.yml')
22
20
  ).fetch('en').fetch('validations').fetch('config')
23
21
  end
24
22
 
@@ -33,7 +31,7 @@ module Karafka
33
31
  required(:limits) do |val|
34
32
  next false unless val.is_a?(Array)
35
33
  next false if val.empty?
36
- next false unless val.all? { |count| count.is_a?(Integer) }
34
+ next false unless val.all?(Integer)
37
35
  next false unless val.all?(&:positive?)
38
36
 
39
37
  true
@@ -20,10 +20,8 @@ module Karafka
20
20
  # normalized to simplify the flow.
21
21
  class Form < Web::Contracts::Base
22
22
  configure do |config|
23
- config.error_messages = YAML.safe_load(
24
- File.read(
25
- File.join(Karafka::Web.gem_root, 'config', 'locales', 'pro_errors.yml')
26
- )
23
+ config.error_messages = YAML.safe_load_file(
24
+ File.join(Karafka::Web.gem_root, 'config', 'locales', 'pro_errors.yml')
27
25
  ).fetch('en').fetch('validations').fetch('search_form')
28
26
  end
29
27
 
@@ -74,7 +72,7 @@ module Karafka
74
72
  next false unless val.is_a?(Array)
75
73
  next false if val.empty?
76
74
 
77
- val.all? { |ar_val| ar_val.is_a?(String) }
75
+ val.all?(String)
78
76
  end
79
77
 
80
78
  # Special validation for timestamp to make sure it is not older than 2010