karafka-web 0.11.2 → 0.11.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (92) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +24 -0
  3. data/Gemfile +0 -2
  4. data/Gemfile.lock +78 -39
  5. data/bin/integrations +44 -0
  6. data/bin/rspecs +6 -2
  7. data/bin/verify_kafka_warnings +1 -1
  8. data/config/locales/errors.yml +1 -0
  9. data/docker-compose.yml +1 -3
  10. data/karafka-web.gemspec +2 -2
  11. data/lib/karafka/web/app.rb +2 -3
  12. data/lib/karafka/web/cli/help.rb +1 -1
  13. data/lib/karafka/web/config.rb +8 -0
  14. data/lib/karafka/web/contracts/base.rb +2 -4
  15. data/lib/karafka/web/contracts/config.rb +5 -5
  16. data/lib/karafka/web/deserializer.rb +6 -1
  17. data/lib/karafka/web/errors.rb +8 -5
  18. data/lib/karafka/web/management/actions/enable.rb +14 -1
  19. data/lib/karafka/web/management/migrations/consumers_reports/1761645571_rename_process_name_to_id.rb +38 -0
  20. data/lib/karafka/web/management/migrator.rb +3 -2
  21. data/lib/karafka/web/pro/commanding/commands/base.rb +1 -1
  22. data/lib/karafka/web/pro/commanding/contracts/config.rb +2 -4
  23. data/lib/karafka/web/pro/commanding/handlers/partitions/tracker.rb +2 -3
  24. data/lib/karafka/web/pro/ui/controllers/scheduled_messages/schedules_controller.rb +1 -2
  25. data/lib/karafka/web/pro/ui/controllers/topics/distributions_controller.rb +1 -3
  26. data/lib/karafka/web/pro/ui/lib/branding/contracts/config.rb +2 -4
  27. data/lib/karafka/web/pro/ui/lib/policies/contracts/config.rb +2 -4
  28. data/lib/karafka/web/pro/ui/lib/search/contracts/config.rb +3 -5
  29. data/lib/karafka/web/pro/ui/lib/search/contracts/form.rb +3 -5
  30. data/lib/karafka/web/pro/ui/lib/search/runner.rb +14 -1
  31. data/lib/karafka/web/pro/ui/routes/errors.rb +3 -3
  32. data/lib/karafka/web/pro/ui/routes/explorer.rb +3 -3
  33. data/lib/karafka/web/pro/ui/views/health/_no_partition_data.erb +9 -0
  34. data/lib/karafka/web/pro/ui/views/health/_partitions_with_fallback.erb +41 -0
  35. data/lib/karafka/web/pro/ui/views/health/changes.erb +12 -13
  36. data/lib/karafka/web/pro/ui/views/health/lags.erb +12 -13
  37. data/lib/karafka/web/pro/ui/views/health/offsets.erb +12 -13
  38. data/lib/karafka/web/pro/ui/views/health/overview.erb +15 -16
  39. data/lib/karafka/web/processing/consumer.rb +8 -3
  40. data/lib/karafka/web/processing/consumers/aggregators/metrics.rb +1 -1
  41. data/lib/karafka/web/processing/consumers/aggregators/state.rb +10 -6
  42. data/lib/karafka/web/processing/consumers/contracts/state.rb +6 -1
  43. data/lib/karafka/web/processing/consumers/reports_migrator.rb +49 -0
  44. data/lib/karafka/web/processing/time_series_tracker.rb +1 -1
  45. data/lib/karafka/web/tracking/consumers/contracts/report.rb +1 -1
  46. data/lib/karafka/web/tracking/consumers/contracts/topic.rb +1 -0
  47. data/lib/karafka/web/tracking/consumers/listeners/errors.rb +2 -1
  48. data/lib/karafka/web/tracking/consumers/listeners/processing.rb +46 -0
  49. data/lib/karafka/web/tracking/consumers/listeners/statistics.rb +1 -0
  50. data/lib/karafka/web/tracking/consumers/sampler/enrichers/base.rb +20 -0
  51. data/lib/karafka/web/tracking/consumers/sampler/enrichers/consumer_groups.rb +116 -0
  52. data/lib/karafka/web/tracking/consumers/sampler/metrics/base.rb +20 -0
  53. data/lib/karafka/web/tracking/consumers/sampler/metrics/container.rb +113 -0
  54. data/lib/karafka/web/tracking/consumers/sampler/metrics/jobs.rb +60 -0
  55. data/lib/karafka/web/tracking/consumers/sampler/metrics/network.rb +48 -0
  56. data/lib/karafka/web/tracking/consumers/sampler/metrics/os.rb +206 -0
  57. data/lib/karafka/web/tracking/consumers/sampler/metrics/server.rb +33 -0
  58. data/lib/karafka/web/tracking/consumers/sampler.rb +34 -215
  59. data/lib/karafka/web/tracking/contracts/error.rb +1 -0
  60. data/lib/karafka/web/tracking/helpers/ttls/hash.rb +2 -3
  61. data/lib/karafka/web/tracking/helpers/ttls/stats.rb +1 -2
  62. data/lib/karafka/web/tracking/producers/listeners/base.rb +1 -1
  63. data/lib/karafka/web/tracking/producers/listeners/errors.rb +2 -1
  64. data/lib/karafka/web/tracking/ui/errors.rb +76 -0
  65. data/lib/karafka/web/ui/base.rb +19 -9
  66. data/lib/karafka/web/ui/controllers/requests/execution_wrapper.rb +2 -4
  67. data/lib/karafka/web/ui/controllers/requests/params.rb +1 -1
  68. data/lib/karafka/web/ui/helpers/application_helper.rb +1 -1
  69. data/lib/karafka/web/ui/helpers/paths_helper.rb +6 -9
  70. data/lib/karafka/web/ui/lib/sorter.rb +1 -1
  71. data/lib/karafka/web/ui/models/health.rb +14 -9
  72. data/lib/karafka/web/ui/models/jobs.rb +4 -6
  73. data/lib/karafka/web/ui/models/message.rb +7 -8
  74. data/lib/karafka/web/ui/models/metrics/aggregated.rb +4 -4
  75. data/lib/karafka/web/ui/models/metrics/charts/aggregated.rb +1 -2
  76. data/lib/karafka/web/ui/models/metrics/charts/topics.rb +2 -2
  77. data/lib/karafka/web/ui/models/metrics/topics.rb +3 -4
  78. data/lib/karafka/web/ui/models/recurring_tasks/schedule.rb +1 -1
  79. data/lib/karafka/web/ui/public/javascripts/application.min.js.gz +0 -0
  80. data/lib/karafka/web/ui/public/stylesheets/application.min.css +199 -105
  81. data/lib/karafka/web/ui/public/stylesheets/application.min.css.br +0 -0
  82. data/lib/karafka/web/ui/public/stylesheets/application.min.css.gz +0 -0
  83. data/lib/karafka/web/ui/public/stylesheets/libs/highlight_dark.min.css.gz +0 -0
  84. data/lib/karafka/web/ui/public/stylesheets/libs/highlight_light.min.css.gz +0 -0
  85. data/lib/karafka/web/ui/routes/errors.rb +3 -3
  86. data/lib/karafka/web/ui/views/shared/exceptions/unhandled_error.erb +42 -0
  87. data/lib/karafka/web/version.rb +1 -1
  88. data/lib/karafka/web.rb +10 -13
  89. data/package-lock.json +184 -240
  90. data/package.json +3 -3
  91. data/renovate.json +13 -0
  92. metadata +19 -4
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 4320bd74426900e3a77f1748242c757cb44a1b0b578300a413d5a1cb706ec3de
4
- data.tar.gz: 75be8a6b974829adde0b266cb2df6e1f62752a35dc271110a04a00eef93865df
3
+ metadata.gz: c30d2f5113f89bf796fbfa024336ebcad6796826bcbd691fc73fab1fb3fd05dd
4
+ data.tar.gz: 3c4fe2b494603f7c7f73c73fc1b0f8a82a4c09dc06ab885dd334f7d96632c8da
5
5
  SHA512:
6
- metadata.gz: 58a4c8ed2d9d400070aa4d43bb29a73006c435c00fd4d9143dd520db01368b5984729bd947bd88a8f2c1865fff25935449150b329f9396405ec53ce82c850299
7
- data.tar.gz: d1c96b677662d862d9d05a70945f3ca7fbc55359033fb7f3350600eb4e0388673bc8482f1f165f0775b5d02bf8075801e1450286b59a0fdc4bd2b2d9e5405ef7
6
+ metadata.gz: d4327b5af8e3a41e379e5db63b08269ec9a22d5912a4839e86d113a377f8003b0d597af7613b2d6243cbaa6520c649b1f1ec66d548bf780afda069f121fd4921
7
+ data.tar.gz: 14c3162caadaf02381148953758e5162278bc5f8415d28dd0476453942059aa0cd582957b9cd7f8e84faf8d9213302510594fc548ec3602c9171785eb25ba813
data/CHANGELOG.md CHANGED
@@ -1,5 +1,26 @@
1
1
  # Karafka Web Changelog
2
2
 
3
+ ## 0.11.4 (2025-11-01)
4
+ - [Enhancement] Show placeholder rows for partitions with no data during rebalances in health view. The UI now displays all topic partitions (0 to N-1) with "No data available" indicators for partitions currently being rebalanced, preventing confusion from disappearing partitions. Consumer reports now include `partitions_cnt` field extracted from librdkafka statistics. Consumer schema version bumped to 1.5.0 (breaking change).
5
+ - [Enhancement] Track and report UI errors originating from Roda/Puma web processes directly to Kafka errors topic for visibility and debugging. UI errors are dispatched asynchronously from web processes using a dedicated listener.
6
+ - [Enhancement] Require Karafka 2.5.2 at minimum and migrate from string-based execution mode comparisons to the new ExecutionMode object API.
7
+ - [Enhancement] Increase Web UI processing consumer backoff time to 30 seconds when encountering incompatible schema errors to prevent error spam during rolling upgrades.
8
+ - [Enhancement] Add unique `id` field to error reports to track duplicate error occurrences. Error schema version bumped to 1.2.0 while maintaining backward compatibility with older error formats (1.0.0, 1.1.0) in the Web UI.
9
+ - [Enhancement] Add container-aware metrics collection for Docker/Kubernetes environments. The Web UI now reports accurate container memory limits from cgroups (v1 and v2) instead of misleading host metrics, while maintaining full backward compatibility with non-containerized deployments.
10
+ - [Enhancement] Add per-message report migration system to handle schema evolution for consumer reports. This allows transparent migration of old report formats (e.g., schema 1.2.x using `process[:name]`) to current expectations (schema 1.3.0+ using `process[:id]`), ensuring backward compatibility with reports from older karafka-web versions (≤ v0.8.2) that may still exist in Kafka topics.
11
+ - [Change] Reduce `max_messages` for consumer reports processing from 1000 to 200 to prevent excessive memory usage in large-scale deployments. Processing 1000 messages at once can impact memory consumption significantly in big systems, while 200 messages provides better memory efficiency with negligible impact on throughput.
12
+ - [Refactor] Extract metrics collection logic from monolithic Sampler into focused, single-responsibility classes (Metrics::Base, Metrics::Os, Metrics::Container, Metrics::Network, Metrics::Server, Metrics::Jobs) and consumer groups enrichment into dedicated enricher (Enrichers::Base, Enrichers::ConsumerGroups) for improved maintainability and testability.
13
+ - [Testing] Add Docker-based integration tests for container metrics collection. Tests verify cgroup v1/v2 detection, memory limit reading, and fallback behavior across multiple containerized scenarios with different resource constraints.
14
+ - [Fix] Fix "OS memory used" metric on Linux reporting same value as RSS instead of system-wide memory usage. The metric now correctly sums memory usage across all processes (or all container processes when running in Docker/Kubernetes) to match macOS behavior and original design intent.
15
+ - [Fix] Fix crash when processing old consumer reports from schema versions < 1.3.0 (karafka-web ≤ v0.8.2) that used `process[:name]` field instead of `process[:id]`. The error `undefined method 'to_sym' for nil` would occur when these old reports were encountered during upgrades. Reports are now automatically migrated in-place to the current schema format.
16
+
17
+ ## 0.11.3 (2025-09-29)
18
+ - [Enhancement] Upgrade DaisyUI to 5.1.
19
+ - [Change] Remove Ruby `3.1` support according to the EOL schedule.
20
+ - [Change] Normalize how libs and dependencies are required (no functional change for the end user)
21
+ - [Fix] Fix a case where the states JSON would contain multiple entries for the same processes causing `JSON.parse` with `allow_duplicate_key: false` to fail.
22
+ - [Fix] Fix incorrect reference to `IncompatibleSchemaError`.
23
+
3
24
  ## 0.11.2 (2025-08-18)
4
25
  - [Enhancement] Make sure that TTL counters related `#inspect` are thread-safe.
5
26
  - [Change] Add new CI action to trigger auto-doc refresh.
@@ -158,6 +179,9 @@
158
179
  - [Fix] License identifier `LGPL-3.0` is deprecated for SPDX (#2177).
159
180
  - [Fix] Do not include prettifying the payload for visibility in the resource computation cost.
160
181
 
182
+ ## 0.9.2 (2025-09-19)
183
+ - [Fix] Fix BaseController caller action name extraction.
184
+
161
185
  ## 0.9.1 (2024-05-03)
162
186
  - [Fix] OSS `lag_stored` for not-subscribed consumers causes Web UI to crash.
163
187
 
data/Gemfile CHANGED
@@ -2,8 +2,6 @@
2
2
 
3
3
  source 'https://rubygems.org'
4
4
 
5
- plugin 'diffend'
6
-
7
5
  gemspec
8
6
 
9
7
  group :test do
data/Gemfile.lock CHANGED
@@ -1,9 +1,9 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- karafka-web (0.11.2)
4
+ karafka-web (0.11.4)
5
5
  erubi (~> 1.4)
6
- karafka (>= 2.5.0.rc2, < 2.6.0)
6
+ karafka (>= 2.5.2, < 2.6.0)
7
7
  karafka-core (>= 2.5.0, < 2.6.0)
8
8
  roda (~> 3.68, >= 3.69)
9
9
  tilt (~> 2.0)
@@ -11,7 +11,7 @@ PATH
11
11
  GEM
12
12
  remote: https://rubygems.org/
13
13
  specs:
14
- activesupport (7.2.2.2)
14
+ activesupport (8.0.3)
15
15
  base64
16
16
  benchmark (>= 0.3)
17
17
  bigdecimal
@@ -23,102 +23,141 @@ GEM
23
23
  minitest (>= 5.1)
24
24
  securerandom (>= 0.3)
25
25
  tzinfo (~> 2.0, >= 2.0.5)
26
- base64 (0.2.0)
26
+ uri (>= 0.13.1)
27
+ base64 (0.3.0)
27
28
  benchmark (0.4.1)
28
- bigdecimal (3.1.9)
29
+ bigdecimal (3.3.1)
29
30
  byebug (12.0.0)
30
31
  concurrent-ruby (1.3.5)
31
- connection_pool (2.5.3)
32
+ connection_pool (2.5.4)
32
33
  diff-lcs (1.6.2)
33
34
  docile (1.4.1)
34
35
  drb (2.2.3)
35
36
  erubi (1.13.1)
36
- et-orbi (1.2.11)
37
+ et-orbi (1.4.0)
37
38
  tzinfo
38
- factory_bot (6.5.5)
39
+ factory_bot (6.5.6)
39
40
  activesupport (>= 6.1.0)
40
41
  ffi (1.17.2)
41
- fugit (1.11.1)
42
- et-orbi (~> 1, >= 1.2.11)
42
+ ffi (1.17.2-aarch64-linux-gnu)
43
+ ffi (1.17.2-aarch64-linux-musl)
44
+ ffi (1.17.2-arm-linux-gnu)
45
+ ffi (1.17.2-arm-linux-musl)
46
+ ffi (1.17.2-arm64-darwin)
47
+ ffi (1.17.2-x86-linux-gnu)
48
+ ffi (1.17.2-x86-linux-musl)
49
+ ffi (1.17.2-x86_64-darwin)
50
+ ffi (1.17.2-x86_64-linux-gnu)
51
+ ffi (1.17.2-x86_64-linux-musl)
52
+ fugit (1.12.1)
53
+ et-orbi (~> 1.4)
43
54
  raabro (~> 1.4)
44
55
  i18n (1.14.7)
45
56
  concurrent-ruby (~> 1.0)
46
- karafka (2.5.0)
57
+ json (2.15.1)
58
+ karafka (2.5.2)
47
59
  base64 (~> 0.2)
48
- karafka-core (>= 2.5.2, < 2.6.0)
49
- karafka-rdkafka (>= 0.19.5)
50
- waterdrop (>= 2.8.3, < 3.0.0)
60
+ karafka-core (>= 2.5.6, < 2.6.0)
61
+ karafka-rdkafka (>= 0.22.0)
62
+ waterdrop (>= 2.8.9, < 3.0.0)
51
63
  zeitwerk (~> 2.3)
52
- karafka-core (2.5.2)
53
- karafka-rdkafka (>= 0.19.2, < 0.21.0)
64
+ karafka-core (2.5.7)
65
+ karafka-rdkafka (>= 0.20.0)
54
66
  logger (>= 1.6.0)
55
- karafka-rdkafka (0.19.5)
67
+ karafka-rdkafka (0.22.2)
56
68
  ffi (~> 1.15)
69
+ json (> 2.0)
70
+ logger
71
+ mini_portile2 (~> 2.6)
72
+ rake (> 12)
73
+ karafka-rdkafka (0.22.2-aarch64-linux-gnu)
74
+ ffi (~> 1.15)
75
+ json (> 2.0)
76
+ logger
77
+ mini_portile2 (~> 2.6)
78
+ rake (> 12)
79
+ karafka-rdkafka (0.22.2-arm64-darwin)
80
+ ffi (~> 1.15)
81
+ json (> 2.0)
82
+ logger
83
+ mini_portile2 (~> 2.6)
84
+ rake (> 12)
85
+ karafka-rdkafka (0.22.2-x86_64-linux-gnu)
86
+ ffi (~> 1.15)
87
+ json (> 2.0)
88
+ logger
89
+ mini_portile2 (~> 2.6)
90
+ rake (> 12)
91
+ karafka-rdkafka (0.22.2-x86_64-linux-musl)
92
+ ffi (~> 1.15)
93
+ json (> 2.0)
94
+ logger
57
95
  mini_portile2 (~> 2.6)
58
96
  rake (> 12)
59
97
  logger (1.7.0)
60
98
  mini_portile2 (2.8.9)
61
- minitest (5.25.5)
62
- nokogiri (1.18.9)
99
+ minitest (5.26.0)
100
+ nokogiri (1.18.10)
63
101
  mini_portile2 (~> 2.8.2)
64
102
  racc (~> 1.4)
65
- nokogiri (1.18.9-aarch64-linux-gnu)
103
+ nokogiri (1.18.10-aarch64-linux-gnu)
66
104
  racc (~> 1.4)
67
- nokogiri (1.18.9-aarch64-linux-musl)
105
+ nokogiri (1.18.10-aarch64-linux-musl)
68
106
  racc (~> 1.4)
69
- nokogiri (1.18.9-arm-linux-gnu)
107
+ nokogiri (1.18.10-arm-linux-gnu)
70
108
  racc (~> 1.4)
71
- nokogiri (1.18.9-arm-linux-musl)
109
+ nokogiri (1.18.10-arm-linux-musl)
72
110
  racc (~> 1.4)
73
- nokogiri (1.18.9-arm64-darwin)
111
+ nokogiri (1.18.10-arm64-darwin)
74
112
  racc (~> 1.4)
75
- nokogiri (1.18.9-x86_64-darwin)
113
+ nokogiri (1.18.10-x86_64-darwin)
76
114
  racc (~> 1.4)
77
- nokogiri (1.18.9-x86_64-linux-gnu)
115
+ nokogiri (1.18.10-x86_64-linux-gnu)
78
116
  racc (~> 1.4)
79
- nokogiri (1.18.9-x86_64-linux-musl)
117
+ nokogiri (1.18.10-x86_64-linux-musl)
80
118
  racc (~> 1.4)
81
119
  ostruct (0.6.3)
82
120
  raabro (1.4.0)
83
121
  racc (1.8.1)
84
- rack (3.1.16)
122
+ rack (3.2.3)
85
123
  rack-test (2.2.0)
86
124
  rack (>= 1.3)
87
125
  rackup (0.2.3)
88
126
  rack (>= 3.0.0.beta1)
89
127
  webrick
90
- rake (13.2.1)
91
- roda (3.92.0)
128
+ rake (13.3.0)
129
+ roda (3.97.0)
92
130
  rack
93
- rspec (3.13.1)
131
+ rspec (3.13.2)
94
132
  rspec-core (~> 3.13.0)
95
133
  rspec-expectations (~> 3.13.0)
96
134
  rspec-mocks (~> 3.13.0)
97
- rspec-core (3.13.4)
135
+ rspec-core (3.13.6)
98
136
  rspec-support (~> 3.13.0)
99
137
  rspec-expectations (3.13.5)
100
138
  diff-lcs (>= 1.2.0, < 2.0)
101
139
  rspec-support (~> 3.13.0)
102
- rspec-mocks (3.13.5)
140
+ rspec-mocks (3.13.6)
103
141
  diff-lcs (>= 1.2.0, < 2.0)
104
142
  rspec-support (~> 3.13.0)
105
- rspec-support (3.13.4)
143
+ rspec-support (3.13.6)
106
144
  securerandom (0.4.1)
107
145
  simplecov (0.22.0)
108
146
  docile (~> 1.1)
109
147
  simplecov-html (~> 0.11)
110
148
  simplecov_json_formatter (~> 0.1)
111
- simplecov-html (0.13.1)
149
+ simplecov-html (0.13.2)
112
150
  simplecov_json_formatter (0.1.4)
113
- tilt (2.6.0)
151
+ tilt (2.6.1)
114
152
  tzinfo (2.0.6)
115
153
  concurrent-ruby (~> 1.0)
116
- waterdrop (2.8.4)
154
+ uri (1.0.4)
155
+ waterdrop (2.8.12)
117
156
  karafka-core (>= 2.4.9, < 3.0.0)
118
- karafka-rdkafka (>= 0.19.2)
157
+ karafka-rdkafka (>= 0.20.0)
119
158
  zeitwerk (~> 2.3)
120
159
  webrick (1.9.1)
121
- zeitwerk (2.6.18)
160
+ zeitwerk (2.7.3)
122
161
 
123
162
  PLATFORMS
124
163
  aarch64-linux-gnu
data/bin/integrations ADDED
@@ -0,0 +1,44 @@
1
+ #!/usr/bin/env bash
2
+
3
+ # Integration test runner for container metrics
4
+ # Runs tests inside Docker containers with various memory/CPU configurations
5
+
6
+ set -e
7
+
8
+ echo "================================================"
9
+ echo "Container Metrics Integration Tests"
10
+ echo "================================================"
11
+ echo ""
12
+
13
+ # Build the test image first
14
+ echo "Building integration test Docker image..."
15
+ docker compose -f spec/integrations/docker-compose.yml build
16
+
17
+ echo ""
18
+ echo "Running integration tests in containers..."
19
+ echo ""
20
+
21
+ # Run all test scenarios
22
+ # Note: Each container runs independently and exits after tests complete
23
+ docker compose -f spec/integrations/docker-compose.yml up \
24
+ --abort-on-container-exit \
25
+ --exit-code-from test-container-limited \
26
+ test-container-limited
27
+
28
+ docker compose -f spec/integrations/docker-compose.yml up \
29
+ --abort-on-container-exit \
30
+ --exit-code-from test-container-unlimited \
31
+ test-container-unlimited
32
+
33
+ docker compose -f spec/integrations/docker-compose.yml up \
34
+ --abort-on-container-exit \
35
+ --exit-code-from test-container-strict \
36
+ test-container-strict
37
+
38
+ echo ""
39
+ echo "================================================"
40
+ echo "All integration tests completed successfully!"
41
+ echo "================================================"
42
+
43
+ # Clean up
44
+ docker compose -f spec/integrations/docker-compose.yml down
data/bin/rspecs CHANGED
@@ -2,5 +2,9 @@
2
2
 
3
3
  set -e
4
4
 
5
- SPECS_TYPE=regular bundle exec rspec --exclude-pattern "**/pro/**/*_spec.rb"
6
- SPECS_TYPE=pro bundle exec rspec spec/lib/karafka/web/pro
5
+ SPECS_TYPE=regular bundle exec rspec \
6
+ spec/lib \
7
+ --exclude-pattern "**/pro/**/*_spec.rb"
8
+
9
+ SPECS_TYPE=pro bundle exec rspec \
10
+ spec/lib/karafka/web/pro
@@ -13,7 +13,7 @@ allowed_patterns=(
13
13
  )
14
14
 
15
15
  # Get all warnings
16
- warnings=$(docker logs --since=0 kafka | grep WARN)
16
+ warnings=$(docker logs --since=0 kafka | grep "] WARN ")
17
17
  exit_code=0
18
18
 
19
19
  while IFS= read -r line; do
@@ -7,3 +7,4 @@ en:
7
7
  missing: needs to be present
8
8
  id_format: needs to be a String
9
9
  format: is invalid
10
+ processes_format: must be a hash with symbol keys
data/docker-compose.yml CHANGED
@@ -1,9 +1,7 @@
1
- version: '2'
2
-
3
1
  services:
4
2
  kafka:
5
3
  container_name: kafka
6
- image: confluentinc/cp-kafka:8.0.0
4
+ image: confluentinc/cp-kafka:8.1.0
7
5
 
8
6
  ports:
9
7
  - 9092:9092
data/karafka-web.gemspec CHANGED
@@ -17,14 +17,14 @@ Gem::Specification.new do |spec|
17
17
  spec.licenses = %w[LGPL-3.0-only Commercial]
18
18
 
19
19
  spec.add_dependency 'erubi', '~> 1.4'
20
- spec.add_dependency 'karafka', '>= 2.5.0.rc2', '< 2.6.0'
20
+ spec.add_dependency 'karafka', '>= 2.5.2', '< 2.6.0'
21
21
  spec.add_dependency 'karafka-core', '>= 2.5.0', '< 2.6.0'
22
22
  spec.add_dependency 'roda', '~> 3.68', '>= 3.69'
23
23
  spec.add_dependency 'tilt', '~> 2.0'
24
24
 
25
25
  spec.add_development_dependency 'rackup', '~> 0.2'
26
26
 
27
- spec.required_ruby_version = '>= 3.1.0'
27
+ spec.required_ruby_version = '>= 3.2.0'
28
28
 
29
29
  spec.executables = %w[karafka-web]
30
30
  spec.require_paths = %w[lib]
@@ -6,9 +6,8 @@ module Karafka
6
6
  class App
7
7
  class << self
8
8
  # @param env [Hash] Rack env
9
- # @param block [Proc] Rack block
10
- def call(env, &block)
11
- engine.call(env, &block)
9
+ def call(env, &)
10
+ engine.call(env, &)
12
11
  end
13
12
 
14
13
  # @return [Class] regular or pro Web engine
@@ -10,7 +10,7 @@ module Karafka
10
10
  # Print available commands
11
11
  def call
12
12
  # Find the longest command for alignment purposes
13
- max_command_length = self.class.commands.map(&:name).map(&:size).max
13
+ max_command_length = self.class.commands.map { |command| command.name.size }.max
14
14
 
15
15
  puts 'Karafka Web UI commands:'
16
16
 
@@ -171,6 +171,14 @@ module Karafka
171
171
  Tracking::Producers::Listeners::Errors.new
172
172
  ]
173
173
  end
174
+
175
+ setting :ui do
176
+ # Listeners needed for the Web UI to track errors from Roda/Puma
177
+ # These dispatch directly to Kafka since there's no background reporter in web processes
178
+ setting :listeners, default: [
179
+ Tracking::Ui::Errors.new
180
+ ]
181
+ end
174
182
  end
175
183
 
176
184
  # States processing related settings
@@ -13,10 +13,8 @@ module Karafka
13
13
  return super if block_given?
14
14
 
15
15
  super do |config|
16
- config.error_messages = YAML.safe_load(
17
- File.read(
18
- File.join(Karafka::Web.gem_root, 'config', 'locales', 'errors.yml')
19
- )
16
+ config.error_messages = YAML.safe_load_file(
17
+ File.join(Karafka::Web.gem_root, 'config', 'locales', 'errors.yml')
20
18
  ).fetch('en').fetch('validations').fetch('web')
21
19
  end
22
20
  end
@@ -19,7 +19,7 @@ module Karafka
19
19
  required(:name) { |val| val.is_a?(String) && TOPIC_REGEXP.match?(val) }
20
20
 
21
21
  required(:config) do |val|
22
- val.is_a?(Hash) && !val.empty? && val.keys.all? { |key| key.is_a?(Symbol) }
22
+ val.is_a?(Hash) && !val.empty? && val.keys.all?(Symbol)
23
23
  end
24
24
  end
25
25
 
@@ -28,7 +28,7 @@ module Karafka
28
28
  required(:name) { |val| val.is_a?(String) && TOPIC_REGEXP.match?(val) }
29
29
 
30
30
  required(:config) do |val|
31
- val.is_a?(Hash) && !val.empty? && val.keys.all? { |key| key.is_a?(Symbol) }
31
+ val.is_a?(Hash) && !val.empty? && val.keys.all?(Symbol)
32
32
  end
33
33
  end
34
34
 
@@ -36,7 +36,7 @@ module Karafka
36
36
  required(:name) { |val| val.is_a?(String) && TOPIC_REGEXP.match?(val) }
37
37
 
38
38
  required(:config) do |val|
39
- val.is_a?(Hash) && !val.empty? && val.keys.all? { |key| key.is_a?(Symbol) }
39
+ val.is_a?(Hash) && !val.empty? && val.keys.all?(Symbol)
40
40
  end
41
41
  end
42
42
 
@@ -44,7 +44,7 @@ module Karafka
44
44
  required(:name) { |val| val.is_a?(String) && TOPIC_REGEXP.match?(val) }
45
45
 
46
46
  required(:config) do |val|
47
- val.is_a?(Hash) && !val.empty? && val.keys.all? { |key| key.is_a?(Symbol) }
47
+ val.is_a?(Hash) && !val.empty? && val.keys.all?(Symbol)
48
48
  end
49
49
  end
50
50
 
@@ -52,7 +52,7 @@ module Karafka
52
52
  required(:name) { |val| val.is_a?(String) && TOPIC_REGEXP.match?(val) }
53
53
 
54
54
  required(:config) do |val|
55
- val.is_a?(Hash) && !val.empty? && val.keys.all? { |key| key.is_a?(Symbol) }
55
+ val.is_a?(Hash) && !val.empty? && val.keys.all?(Symbol)
56
56
  end
57
57
  end
58
58
  end
@@ -21,7 +21,12 @@ module Karafka
21
21
 
22
22
  ::JSON.parse(
23
23
  raw_payload,
24
- symbolize_names: true
24
+ symbolize_names: true,
25
+ # We allow duplicates keys because of a fixed bug that was causing duplicated process
26
+ # ids to leak into the consumers states data. Once a proper migration is written, this
27
+ # can be retired
28
+ # @see https://github.com/karafka/karafka-web/issues/741
29
+ allow_duplicate_key: true
25
30
  )
26
31
  end
27
32
  end
@@ -54,6 +54,14 @@ module Karafka
54
54
 
55
55
  # Similar to the one related to consumers states
56
56
  MissingConsumersMetricsTopicError = Class.new(BaseError)
57
+
58
+ # Similar to management and ui errors with the same name, it is raised when a critical
59
+ # incompatibility is detected during processing.
60
+ #
61
+ # This error is raised when there was an attempt to process reports that are in a newer
62
+ # version that the one in the current process. We prevent this from happening not to
63
+ # corrupt the data. Please upgrade all the Web UI consumers to the same version
64
+ IncompatibleSchemaError = Class.new(BaseError)
57
65
  end
58
66
 
59
67
  # Ui related errors
@@ -67,11 +75,6 @@ module Karafka
67
75
 
68
76
  # Raised when we want to stop the flow and render 403
69
77
  ForbiddenError = Class.new(BaseError)
70
-
71
- # Raised when trying to get info about a consumer that has incompatible schema in its
72
- # report. It usually means you are running different version of the Web UI in the consumer
73
- # and in the Web server
74
- IncompatibleSchemaError = Class.new(BaseError)
75
78
  end
76
79
  end
77
80
  end
@@ -61,7 +61,7 @@ module Karafka
61
61
  # Since we materialize state in intervals, we can poll for half of this time
62
62
  # without impacting the reporting responsiveness
63
63
  max_wait_time ::Karafka::Web.config.processing.interval / 2
64
- max_messages 1_000
64
+ max_messages 200
65
65
  consumer ::Karafka::Web::Processing::Consumer
66
66
  # This needs to be true in order not to reload the consumer in dev. This consumer
67
67
  # should not be affected by the end user development process
@@ -73,6 +73,13 @@ module Karafka
73
73
  # consumer group name would be renamed and we would start consuming all
74
74
  # historical
75
75
  initial_offset 'latest'
76
+ # Increase backoff time on errors. Incompatible schema errors are not recoverable
77
+ # until rolling upgrade completes, so we use a longer max timeout to prevent
78
+ # spamming errors in logs.
79
+ # We set this ourselves so user settings do not impact frequency of retrying
80
+ pause_timeout 5_000
81
+ pause_max_timeout 60_000
82
+ pause_with_exponential_backoff true
76
83
  # We use the defaults + our config alterations that may not align with what
77
84
  # user wants for his topics.
78
85
  kafka kafka_config
@@ -124,6 +131,12 @@ module Karafka
124
131
 
125
132
  ::Karafka::Web.producer.monitor.subscribe(listener)
126
133
  end
134
+
135
+ # Installs all the UI related listeners for tracking errors from web processes
136
+ # These listen on Karafka monitor to catch instrumented UI errors
137
+ ::Karafka::Web.config.tracking.ui.listeners.each do |listener|
138
+ ::Karafka.monitor.subscribe(listener)
139
+ end
127
140
  end
128
141
 
129
142
  # In most cases we want to close the producer if possible.
@@ -0,0 +1,38 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Web
5
+ module Management
6
+ module Migrations
7
+ # Consumer reports migrations
8
+ module ConsumersReports
9
+ # Migrates consumer reports from schema < 1.3.0 that used process[:name] to process[:id]
10
+ #
11
+ # In schema versions 1.2.x and earlier (karafka-web <= v0.8.2), the process identifier
12
+ # was stored in the :name field. Starting with schema 1.3.0 (karafka-web v0.9.0+),
13
+ # this was renamed to :id for consistency.
14
+ #
15
+ # This migration ensures old reports can be processed by current aggregators that
16
+ # expect the :id field.
17
+ class RenameProcessNameToId < Base
18
+ # Apply to all schema versions before 1.3.0
19
+ self.versions_until = '1.3.0'
20
+ self.type = :consumers_reports
21
+
22
+ # @param report [Hash] consumer report to migrate
23
+ def migrate(report)
24
+ # If :id already exists, nothing to do (already migrated or newer schema)
25
+ return if report[:process][:id]
26
+
27
+ # Rename :name to :id
28
+ # Both :name (in schema < 1.3.0) and :id (in schema >= 1.3.0) were always
29
+ # required fields, so we don't need nil checks for valid reports
30
+ report[:process][:id] = report[:process][:name]
31
+ report[:process].delete(:name)
32
+ end
33
+ end
34
+ end
35
+ end
36
+ end
37
+ end
38
+ end
@@ -65,8 +65,6 @@ module Karafka
65
65
  'consumers metrics newer than supported'
66
66
  )
67
67
  end
68
-
69
- true
70
68
  end
71
69
 
72
70
  # Applies migrations if needed and mutates the in-memory data
@@ -76,6 +74,9 @@ module Karafka
76
74
  any_migrations = false
77
75
 
78
76
  Migrations::Base.sorted_descendants.each do |migration_class|
77
+ # Skip report migrations - they are applied per-message by ReportsMigrator
78
+ next if migration_class.type == :consumers_reports
79
+
79
80
  data = send(migration_class.type)
80
81
 
81
82
  next unless migration_class.applicable?(data[:schema_version])
@@ -50,7 +50,7 @@ module Karafka
50
50
  # being executed in the embedded or swarm processes since there the signaling is
51
51
  # handled differently (either via the main process or supervisor).
52
52
  def standalone?
53
- Karafka::Server.execution_mode == :standalone
53
+ Karafka::Server.execution_mode.standalone?
54
54
  end
55
55
 
56
56
  # @return [String] id of the current consumer process
@@ -12,10 +12,8 @@ module Karafka
12
12
  # Makes sure, all the expected commanding config is defined as it should be
13
13
  class Config < ::Karafka::Contracts::Base
14
14
  configure do |config|
15
- config.error_messages = YAML.safe_load(
16
- File.read(
17
- File.join(Karafka::Web.gem_root, 'config', 'locales', 'pro_errors.yml')
18
- )
15
+ config.error_messages = YAML.safe_load_file(
16
+ File.join(Karafka::Web.gem_root, 'config', 'locales', 'pro_errors.yml')
19
17
  ).fetch('en').fetch('validations').fetch('config')
20
18
  end
21
19
 
@@ -41,17 +41,16 @@ module Karafka
41
41
  # @param subscription_group_id [String] id of the subscription group for which we
42
42
  # want to get all the requests. Subscription groups ids (not names) are unique
43
43
  # within the application, so it is unique "enough".
44
- # @param block [Proc]
45
44
  #
46
45
  # @yieldparam [Request] given command request for the requested subscription group
47
- def each_for(subscription_group_id, &block)
46
+ def each_for(subscription_group_id, &)
48
47
  requests = nil
49
48
 
50
49
  @mutex.synchronize do
51
50
  requests = @requests.delete(subscription_group_id)
52
51
  end
53
52
 
54
- (requests || EMPTY_ARRAY).each(&block)
53
+ (requests || EMPTY_ARRAY).each(&)
55
54
  end
56
55
  end
57
56
  end
@@ -21,8 +21,7 @@ module Karafka
21
21
  # cluster
22
22
  candidates = Karafka::App
23
23
  .routes
24
- .map(&:topics)
25
- .map(&:to_a)
24
+ .map { |route| route.topics.to_a }
26
25
  .flatten
27
26
  .select(&:scheduled_messages?)
28
27
  .reject { |topic| topic.name.end_with?(states_postfix) }