karafka-web 0.11.3 → 0.11.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (91) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +17 -0
  3. data/Gemfile +1 -2
  4. data/Gemfile.lock +58 -41
  5. data/bin/integrations +44 -0
  6. data/bin/rspecs +6 -2
  7. data/docker-compose.yml +1 -1
  8. data/karafka-web.gemspec +2 -2
  9. data/lib/karafka/web/app.rb +2 -3
  10. data/lib/karafka/web/cli/help.rb +1 -1
  11. data/lib/karafka/web/config.rb +14 -0
  12. data/lib/karafka/web/contracts/base.rb +2 -4
  13. data/lib/karafka/web/contracts/config.rb +6 -5
  14. data/lib/karafka/web/inflector.rb +1 -1
  15. data/lib/karafka/web/management/actions/enable.rb +14 -1
  16. data/lib/karafka/web/management/migrations/consumers_reports/1761645571_rename_process_name_to_id.rb +38 -0
  17. data/lib/karafka/web/management/migrator.rb +3 -2
  18. data/lib/karafka/web/pro/commanding/commands/base.rb +1 -1
  19. data/lib/karafka/web/pro/commanding/contracts/config.rb +2 -4
  20. data/lib/karafka/web/pro/commanding/handlers/partitions/tracker.rb +2 -3
  21. data/lib/karafka/web/pro/ui/controllers/cluster_controller.rb +1 -1
  22. data/lib/karafka/web/pro/ui/controllers/scheduled_messages/schedules_controller.rb +1 -2
  23. data/lib/karafka/web/pro/ui/controllers/topics/distributions_controller.rb +1 -3
  24. data/lib/karafka/web/pro/ui/lib/branding/contracts/config.rb +2 -4
  25. data/lib/karafka/web/pro/ui/lib/policies/contracts/config.rb +2 -4
  26. data/lib/karafka/web/pro/ui/lib/search/contracts/config.rb +3 -5
  27. data/lib/karafka/web/pro/ui/lib/search/contracts/form.rb +3 -5
  28. data/lib/karafka/web/pro/ui/lib/search/runner.rb +14 -1
  29. data/lib/karafka/web/pro/ui/routes/errors.rb +3 -3
  30. data/lib/karafka/web/pro/ui/routes/explorer.rb +3 -3
  31. data/lib/karafka/web/pro/ui/views/health/_no_partition_data.erb +9 -0
  32. data/lib/karafka/web/pro/ui/views/health/_partitions_with_fallback.erb +41 -0
  33. data/lib/karafka/web/pro/ui/views/health/changes.erb +12 -13
  34. data/lib/karafka/web/pro/ui/views/health/lags.erb +12 -13
  35. data/lib/karafka/web/pro/ui/views/health/offsets.erb +12 -13
  36. data/lib/karafka/web/pro/ui/views/health/overview.erb +15 -16
  37. data/lib/karafka/web/processing/consumer.rb +8 -3
  38. data/lib/karafka/web/processing/consumers/aggregators/metrics.rb +1 -1
  39. data/lib/karafka/web/processing/consumers/aggregators/state.rb +5 -5
  40. data/lib/karafka/web/processing/consumers/contracts/state.rb +1 -1
  41. data/lib/karafka/web/processing/consumers/reports_migrator.rb +49 -0
  42. data/lib/karafka/web/processing/time_series_tracker.rb +1 -1
  43. data/lib/karafka/web/tracking/consumers/contracts/report.rb +1 -1
  44. data/lib/karafka/web/tracking/consumers/contracts/topic.rb +1 -0
  45. data/lib/karafka/web/tracking/consumers/listeners/errors.rb +2 -1
  46. data/lib/karafka/web/tracking/consumers/listeners/processing.rb +46 -0
  47. data/lib/karafka/web/tracking/consumers/listeners/statistics.rb +1 -0
  48. data/lib/karafka/web/tracking/consumers/sampler/enrichers/base.rb +20 -0
  49. data/lib/karafka/web/tracking/consumers/sampler/enrichers/consumer_groups.rb +116 -0
  50. data/lib/karafka/web/tracking/consumers/sampler/metrics/base.rb +20 -0
  51. data/lib/karafka/web/tracking/consumers/sampler/metrics/container.rb +113 -0
  52. data/lib/karafka/web/tracking/consumers/sampler/metrics/jobs.rb +60 -0
  53. data/lib/karafka/web/tracking/consumers/sampler/metrics/network.rb +48 -0
  54. data/lib/karafka/web/tracking/consumers/sampler/metrics/os.rb +206 -0
  55. data/lib/karafka/web/tracking/consumers/sampler/metrics/server.rb +33 -0
  56. data/lib/karafka/web/tracking/consumers/sampler.rb +34 -215
  57. data/lib/karafka/web/tracking/contracts/error.rb +1 -0
  58. data/lib/karafka/web/tracking/helpers/ttls/hash.rb +2 -3
  59. data/lib/karafka/web/tracking/helpers/ttls/stats.rb +1 -2
  60. data/lib/karafka/web/tracking/producers/listeners/errors.rb +2 -1
  61. data/lib/karafka/web/tracking/ui/errors.rb +76 -0
  62. data/lib/karafka/web/ui/base.rb +26 -11
  63. data/lib/karafka/web/ui/controllers/requests/execution_wrapper.rb +2 -4
  64. data/lib/karafka/web/ui/controllers/requests/params.rb +1 -1
  65. data/lib/karafka/web/ui/helpers/application_helper.rb +1 -1
  66. data/lib/karafka/web/ui/helpers/paths_helper.rb +7 -10
  67. data/lib/karafka/web/ui/lib/cache.rb +1 -1
  68. data/lib/karafka/web/ui/lib/hash_proxy.rb +1 -1
  69. data/lib/karafka/web/ui/lib/paginations/paginators/sets.rb +1 -1
  70. data/lib/karafka/web/ui/lib/sorter.rb +1 -1
  71. data/lib/karafka/web/ui/models/broker.rb +1 -1
  72. data/lib/karafka/web/ui/models/health.rb +14 -9
  73. data/lib/karafka/web/ui/models/jobs.rb +4 -6
  74. data/lib/karafka/web/ui/models/message.rb +7 -8
  75. data/lib/karafka/web/ui/models/metrics/aggregated.rb +4 -4
  76. data/lib/karafka/web/ui/models/metrics/charts/aggregated.rb +1 -2
  77. data/lib/karafka/web/ui/models/metrics/charts/topics.rb +2 -2
  78. data/lib/karafka/web/ui/models/metrics/topics.rb +3 -4
  79. data/lib/karafka/web/ui/models/recurring_tasks/schedule.rb +1 -1
  80. data/lib/karafka/web/ui/public/javascripts/application.min.js.gz +0 -0
  81. data/lib/karafka/web/ui/public/stylesheets/application.min.css +6 -0
  82. data/lib/karafka/web/ui/public/stylesheets/application.min.css.br +0 -0
  83. data/lib/karafka/web/ui/public/stylesheets/application.min.css.gz +0 -0
  84. data/lib/karafka/web/ui/routes/errors.rb +3 -3
  85. data/lib/karafka/web/ui/views/shared/exceptions/unhandled_error.erb +42 -0
  86. data/lib/karafka/web/version.rb +1 -1
  87. data/lib/karafka/web.rb +2 -3
  88. data/package-lock.json +180 -236
  89. data/package.json +3 -3
  90. data/renovate.json +13 -0
  91. metadata +20 -5
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 7b958131581b6fe2fd1a45a68b7366fb842ca15eccd946988516e78b7fcc3cce
4
- data.tar.gz: 899898076b7fc09111d8cfd77f5e68fe6158b9bfee3128292f77248f2b5922c8
3
+ metadata.gz: 24ceb5279e48629e0a1c3a609b1c041323bdedc9faf6cf03dec926c7c0278b4f
4
+ data.tar.gz: 4ea8f9aef69bb0ee9ac51cc20e132e64e955df2984f6f2359b1c878421793972
5
5
  SHA512:
6
- metadata.gz: b53b0e2dc696cf5af719ad49b90c130a7122261a8123e5ac8ca4deb31d0f397e49f09d71032df4e840ad3a5f9b5b3115709fbcd08568a0991ee24b0da1b7054e
7
- data.tar.gz: b3eeac33c925b9527aef85231a74a255c3d59aedd36f20a174e72b282db744cb6a0779258c9136266f54041ab0e81ce8c95a6a042d4ecc908e0cbba299d651f3
6
+ metadata.gz: 7861ba705d420ddd875053bb9f687302ad8d2719d332e925cab53075edf267245660e2467934c746f3707327f198538321585b77abe60fcb1bcb49c3c8a1771a
7
+ data.tar.gz: dd30a59da0ed8590eb471965d1f6e917b15f2a390eb099e4b1527ca1a490dda46695fb3a40cc1e96729f15377a2b74d3a361d593a8b6cf5747fb03ecd4efb461
data/CHANGELOG.md CHANGED
@@ -1,5 +1,22 @@
1
1
  # Karafka Web Changelog
2
2
 
3
+ ## 0.11.5 (2025-11-14)
4
+ - [Enhancement] Utilize newly released Roda session management `:env_key` to isolate Karafka Web session from the main application session.
5
+
6
+ ## 0.11.4 (2025-11-01)
7
+ - [Enhancement] Show placeholder rows for partitions with no data during rebalances in health view. The UI now displays all topic partitions (0 to N-1) with "No data available" indicators for partitions currently being rebalanced, preventing confusion from disappearing partitions. Consumer reports now include `partitions_cnt` field extracted from librdkafka statistics. Consumer schema version bumped to 1.5.0 (breaking change).
8
+ - [Enhancement] Track and report UI errors originating from Roda/Puma web processes directly to Kafka errors topic for visibility and debugging. UI errors are dispatched asynchronously from web processes using a dedicated listener.
9
+ - [Enhancement] Require Karafka 2.5.2 at minimum and migrate from string-based execution mode comparisons to the new ExecutionMode object API.
10
+ - [Enhancement] Increase Web UI processing consumer backoff time to 30 seconds when encountering incompatible schema errors to prevent error spam during rolling upgrades.
11
+ - [Enhancement] Add unique `id` field to error reports to track duplicate error occurrences. Error schema version bumped to 1.2.0 while maintaining backward compatibility with older error formats (1.0.0, 1.1.0) in the Web UI.
12
+ - [Enhancement] Add container-aware metrics collection for Docker/Kubernetes environments. The Web UI now reports accurate container memory limits from cgroups (v1 and v2) instead of misleading host metrics, while maintaining full backward compatibility with non-containerized deployments.
13
+ - [Enhancement] Add per-message report migration system to handle schema evolution for consumer reports. This allows transparent migration of old report formats (e.g., schema 1.2.x using `process[:name]`) to current expectations (schema 1.3.0+ using `process[:id]`), ensuring backward compatibility with reports from older karafka-web versions (≤ v0.8.2) that may still exist in Kafka topics.
14
+ - [Change] Reduce `max_messages` for consumer reports processing from 1000 to 200 to prevent excessive memory usage in large-scale deployments. Processing 1000 messages at once can impact memory consumption significantly in big systems, while 200 messages provides better memory efficiency with negligible impact on throughput.
15
+ - [Refactor] Extract metrics collection logic from monolithic Sampler into focused, single-responsibility classes (Metrics::Base, Metrics::Os, Metrics::Container, Metrics::Network, Metrics::Server, Metrics::Jobs) and consumer groups enrichment into dedicated enricher (Enrichers::Base, Enrichers::ConsumerGroups) for improved maintainability and testability.
16
+ - [Testing] Add Docker-based integration tests for container metrics collection. Tests verify cgroup v1/v2 detection, memory limit reading, and fallback behavior across multiple containerized scenarios with different resource constraints.
17
+ - [Fix] Fix "OS memory used" metric on Linux reporting same value as RSS instead of system-wide memory usage. The metric now correctly sums memory usage across all processes (or all container processes when running in Docker/Kubernetes) to match macOS behavior and original design intent.
18
+ - [Fix] Fix crash when processing old consumer reports from schema versions < 1.3.0 (karafka-web ≤ v0.8.2) that used `process[:name]` field instead of `process[:id]`. The error `undefined method 'to_sym' for nil` would occur when these old reports were encountered during upgrades. Reports are now automatically migrated in-place to the current schema format.
19
+
3
20
  ## 0.11.3 (2025-09-29)
4
21
  - [Enhancement] Upgrade DaisyUI to 5.1.
5
22
  - [Change] Remove Ruby `3.1` support according to the EOL schedule.
data/Gemfile CHANGED
@@ -2,8 +2,6 @@
2
2
 
3
3
  source 'https://rubygems.org'
4
4
 
5
- plugin 'diffend'
6
-
7
5
  gemspec
8
6
 
9
7
  group :test do
@@ -16,4 +14,5 @@ group :test do
16
14
  gem 'rack-test'
17
15
  gem 'rspec'
18
16
  gem 'simplecov'
17
+ gem 'yard-lint'
19
18
  end
data/Gemfile.lock CHANGED
@@ -1,41 +1,41 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- karafka-web (0.11.3)
4
+ karafka-web (0.11.5)
5
5
  erubi (~> 1.4)
6
- karafka (>= 2.5.0, < 2.6.0)
6
+ karafka (>= 2.5.2, < 2.6.0)
7
7
  karafka-core (>= 2.5.0, < 2.6.0)
8
- roda (~> 3.68, >= 3.69)
8
+ roda (~> 3.69, >= 3.69)
9
9
  tilt (~> 2.0)
10
10
 
11
11
  GEM
12
12
  remote: https://rubygems.org/
13
13
  specs:
14
- activesupport (7.2.2.2)
14
+ activesupport (8.1.1)
15
15
  base64
16
- benchmark (>= 0.3)
17
16
  bigdecimal
18
17
  concurrent-ruby (~> 1.0, >= 1.3.1)
19
18
  connection_pool (>= 2.2.5)
20
19
  drb
21
20
  i18n (>= 1.6, < 2)
21
+ json
22
22
  logger (>= 1.4.2)
23
23
  minitest (>= 5.1)
24
24
  securerandom (>= 0.3)
25
25
  tzinfo (~> 2.0, >= 2.0.5)
26
- base64 (0.2.0)
27
- benchmark (0.4.1)
28
- bigdecimal (3.1.9)
26
+ uri (>= 0.13.1)
27
+ base64 (0.3.0)
28
+ bigdecimal (3.3.1)
29
29
  byebug (12.0.0)
30
30
  concurrent-ruby (1.3.5)
31
- connection_pool (2.5.3)
31
+ connection_pool (2.5.4)
32
32
  diff-lcs (1.6.2)
33
33
  docile (1.4.1)
34
34
  drb (2.2.3)
35
35
  erubi (1.13.1)
36
- et-orbi (1.2.11)
36
+ et-orbi (1.4.0)
37
37
  tzinfo
38
- factory_bot (6.5.5)
38
+ factory_bot (6.5.6)
39
39
  activesupport (>= 6.1.0)
40
40
  ffi (1.17.2)
41
41
  ffi (1.17.2-aarch64-linux-gnu)
@@ -48,43 +48,54 @@ GEM
48
48
  ffi (1.17.2-x86_64-darwin)
49
49
  ffi (1.17.2-x86_64-linux-gnu)
50
50
  ffi (1.17.2-x86_64-linux-musl)
51
- fugit (1.11.2)
52
- et-orbi (~> 1, >= 1.2.11)
51
+ fugit (1.12.1)
52
+ et-orbi (~> 1.4)
53
53
  raabro (~> 1.4)
54
54
  i18n (1.14.7)
55
55
  concurrent-ruby (~> 1.0)
56
- karafka (2.5.0)
56
+ json (2.16.0)
57
+ karafka (2.5.2)
57
58
  base64 (~> 0.2)
58
- karafka-core (>= 2.5.2, < 2.6.0)
59
- karafka-rdkafka (>= 0.19.5)
60
- waterdrop (>= 2.8.3, < 3.0.0)
59
+ karafka-core (>= 2.5.6, < 2.6.0)
60
+ karafka-rdkafka (>= 0.22.0)
61
+ waterdrop (>= 2.8.9, < 3.0.0)
61
62
  zeitwerk (~> 2.3)
62
- karafka-core (2.5.2)
63
- karafka-rdkafka (>= 0.19.2, < 0.21.0)
63
+ karafka-core (2.5.7)
64
+ karafka-rdkafka (>= 0.20.0)
64
65
  logger (>= 1.6.0)
65
- karafka-rdkafka (0.20.1)
66
- ffi (~> 1.15)
66
+ karafka-rdkafka (0.23.0)
67
+ ffi (~> 1.17.1)
68
+ json (> 2.0)
67
69
  logger
68
70
  mini_portile2 (~> 2.6)
69
71
  rake (> 12)
70
- karafka-rdkafka (0.20.1-arm64-darwin)
71
- ffi (~> 1.15)
72
+ karafka-rdkafka (0.23.0-aarch64-linux-gnu)
73
+ ffi (~> 1.17.1)
74
+ json (> 2.0)
72
75
  logger
73
76
  mini_portile2 (~> 2.6)
74
77
  rake (> 12)
75
- karafka-rdkafka (0.20.1-x86_64-linux-gnu)
76
- ffi (~> 1.15)
78
+ karafka-rdkafka (0.23.0-arm64-darwin)
79
+ ffi (~> 1.17.1)
80
+ json (> 2.0)
77
81
  logger
78
82
  mini_portile2 (~> 2.6)
79
83
  rake (> 12)
80
- karafka-rdkafka (0.20.1-x86_64-linux-musl)
81
- ffi (~> 1.15)
84
+ karafka-rdkafka (0.23.0-x86_64-linux-gnu)
85
+ ffi (~> 1.17.1)
86
+ json (> 2.0)
87
+ logger
88
+ mini_portile2 (~> 2.6)
89
+ rake (> 12)
90
+ karafka-rdkafka (0.23.0-x86_64-linux-musl)
91
+ ffi (~> 1.17.1)
92
+ json (> 2.0)
82
93
  logger
83
94
  mini_portile2 (~> 2.6)
84
95
  rake (> 12)
85
96
  logger (1.7.0)
86
97
  mini_portile2 (2.8.9)
87
- minitest (5.25.5)
98
+ minitest (5.26.1)
88
99
  nokogiri (1.18.10)
89
100
  mini_portile2 (~> 2.8.2)
90
101
  racc (~> 1.4)
@@ -107,44 +118,49 @@ GEM
107
118
  ostruct (0.6.3)
108
119
  raabro (1.4.0)
109
120
  racc (1.8.1)
110
- rack (3.1.16)
121
+ rack (3.2.4)
111
122
  rack-test (2.2.0)
112
123
  rack (>= 1.3)
113
124
  rackup (0.2.3)
114
125
  rack (>= 3.0.0.beta1)
115
126
  webrick
116
- rake (13.3.0)
117
- roda (3.92.0)
127
+ rake (13.3.1)
128
+ roda (3.98.0)
118
129
  rack
119
- rspec (3.13.1)
130
+ rspec (3.13.2)
120
131
  rspec-core (~> 3.13.0)
121
132
  rspec-expectations (~> 3.13.0)
122
133
  rspec-mocks (~> 3.13.0)
123
- rspec-core (3.13.4)
134
+ rspec-core (3.13.6)
124
135
  rspec-support (~> 3.13.0)
125
136
  rspec-expectations (3.13.5)
126
137
  diff-lcs (>= 1.2.0, < 2.0)
127
138
  rspec-support (~> 3.13.0)
128
- rspec-mocks (3.13.5)
139
+ rspec-mocks (3.13.7)
129
140
  diff-lcs (>= 1.2.0, < 2.0)
130
141
  rspec-support (~> 3.13.0)
131
- rspec-support (3.13.4)
142
+ rspec-support (3.13.6)
132
143
  securerandom (0.4.1)
133
144
  simplecov (0.22.0)
134
145
  docile (~> 1.1)
135
146
  simplecov-html (~> 0.11)
136
147
  simplecov_json_formatter (~> 0.1)
137
- simplecov-html (0.13.1)
148
+ simplecov-html (0.13.2)
138
149
  simplecov_json_formatter (0.1.4)
139
- tilt (2.6.0)
150
+ tilt (2.6.1)
140
151
  tzinfo (2.0.6)
141
152
  concurrent-ruby (~> 1.0)
142
- waterdrop (2.8.4)
153
+ uri (1.1.1)
154
+ waterdrop (2.8.13)
143
155
  karafka-core (>= 2.4.9, < 3.0.0)
144
- karafka-rdkafka (>= 0.19.2)
156
+ karafka-rdkafka (>= 0.20.0)
145
157
  zeitwerk (~> 2.3)
146
158
  webrick (1.9.1)
147
- zeitwerk (2.6.18)
159
+ yard (0.9.37)
160
+ yard-lint (1.2.3)
161
+ yard (~> 0.9)
162
+ zeitwerk (~> 2.6)
163
+ zeitwerk (2.7.3)
148
164
 
149
165
  PLATFORMS
150
166
  aarch64-linux-gnu
@@ -170,6 +186,7 @@ DEPENDENCIES
170
186
  rackup (~> 0.2)
171
187
  rspec
172
188
  simplecov
189
+ yard-lint
173
190
 
174
191
  BUNDLED WITH
175
- 2.6.9
192
+ 2.7.1
data/bin/integrations ADDED
@@ -0,0 +1,44 @@
1
+ #!/usr/bin/env bash
2
+
3
+ # Integration test runner for container metrics
4
+ # Runs tests inside Docker containers with various memory/CPU configurations
5
+
6
+ set -e
7
+
8
+ echo "================================================"
9
+ echo "Container Metrics Integration Tests"
10
+ echo "================================================"
11
+ echo ""
12
+
13
+ # Build the test image first
14
+ echo "Building integration test Docker image..."
15
+ docker compose -f spec/integrations/docker-compose.yml build
16
+
17
+ echo ""
18
+ echo "Running integration tests in containers..."
19
+ echo ""
20
+
21
+ # Run all test scenarios
22
+ # Note: Each container runs independently and exits after tests complete
23
+ docker compose -f spec/integrations/docker-compose.yml up \
24
+ --abort-on-container-exit \
25
+ --exit-code-from test-container-limited \
26
+ test-container-limited
27
+
28
+ docker compose -f spec/integrations/docker-compose.yml up \
29
+ --abort-on-container-exit \
30
+ --exit-code-from test-container-unlimited \
31
+ test-container-unlimited
32
+
33
+ docker compose -f spec/integrations/docker-compose.yml up \
34
+ --abort-on-container-exit \
35
+ --exit-code-from test-container-strict \
36
+ test-container-strict
37
+
38
+ echo ""
39
+ echo "================================================"
40
+ echo "All integration tests completed successfully!"
41
+ echo "================================================"
42
+
43
+ # Clean up
44
+ docker compose -f spec/integrations/docker-compose.yml down
data/bin/rspecs CHANGED
@@ -2,5 +2,9 @@
2
2
 
3
3
  set -e
4
4
 
5
- SPECS_TYPE=regular bundle exec rspec --exclude-pattern "**/pro/**/*_spec.rb"
6
- SPECS_TYPE=pro bundle exec rspec spec/lib/karafka/web/pro
5
+ SPECS_TYPE=regular bundle exec rspec \
6
+ spec/lib \
7
+ --exclude-pattern "**/pro/**/*_spec.rb"
8
+
9
+ SPECS_TYPE=pro bundle exec rspec \
10
+ spec/lib/karafka/web/pro
data/docker-compose.yml CHANGED
@@ -1,7 +1,7 @@
1
1
  services:
2
2
  kafka:
3
3
  container_name: kafka
4
- image: confluentinc/cp-kafka:8.0.1
4
+ image: confluentinc/cp-kafka:8.1.0
5
5
 
6
6
  ports:
7
7
  - 9092:9092
data/karafka-web.gemspec CHANGED
@@ -17,9 +17,9 @@ Gem::Specification.new do |spec|
17
17
  spec.licenses = %w[LGPL-3.0-only Commercial]
18
18
 
19
19
  spec.add_dependency 'erubi', '~> 1.4'
20
- spec.add_dependency 'karafka', '>= 2.5.0', '< 2.6.0'
20
+ spec.add_dependency 'karafka', '>= 2.5.2', '< 2.6.0'
21
21
  spec.add_dependency 'karafka-core', '>= 2.5.0', '< 2.6.0'
22
- spec.add_dependency 'roda', '~> 3.68', '>= 3.69'
22
+ spec.add_dependency 'roda', '~> 3.69', '>= 3.69'
23
23
  spec.add_dependency 'tilt', '~> 2.0'
24
24
 
25
25
  spec.add_development_dependency 'rackup', '~> 0.2'
@@ -6,9 +6,8 @@ module Karafka
6
6
  class App
7
7
  class << self
8
8
  # @param env [Hash] Rack env
9
- # @param block [Proc] Rack block
10
- def call(env, &block)
11
- engine.call(env, &block)
9
+ def call(env, &)
10
+ engine.call(env, &)
12
11
  end
13
12
 
14
13
  # @return [Class] regular or pro Web engine
@@ -10,7 +10,7 @@ module Karafka
10
10
  # Print available commands
11
11
  def call
12
12
  # Find the longest command for alignment purposes
13
- max_command_length = self.class.commands.map(&:name).map(&:size).max
13
+ max_command_length = self.class.commands.map { |command| command.name.size }.max
14
14
 
15
15
  puts 'Karafka Web UI commands:'
16
16
 
@@ -171,6 +171,14 @@ module Karafka
171
171
  Tracking::Producers::Listeners::Errors.new
172
172
  ]
173
173
  end
174
+
175
+ setting :ui do
176
+ # Listeners needed for the Web UI to track errors from Roda/Puma
177
+ # These dispatch directly to Kafka since there's no background reporter in web processes
178
+ setting :listeners, default: [
179
+ Tracking::Ui::Errors.new
180
+ ]
181
+ end
174
182
  end
175
183
 
176
184
  # States processing related settings
@@ -202,6 +210,12 @@ module Karafka
202
210
  # Cookie key name
203
211
  setting :key, default: '_karafka_session'
204
212
 
213
+ # Rack middleware session env key. We use independent key from "rack.session" here to
214
+ # prevent our data from leaking to the main app (when mounted) and the other way around.
215
+ # This also prevents us from overloading the session object with extra data that could
216
+ # cause it to go beyond a cookie limit.
217
+ setting :env_key, default: 'karafka.session'
218
+
205
219
  # Secret for the session cookie
206
220
  setting :secret, default: SecureRandom.hex(32)
207
221
  end
@@ -13,10 +13,8 @@ module Karafka
13
13
  return super if block_given?
14
14
 
15
15
  super do |config|
16
- config.error_messages = YAML.safe_load(
17
- File.read(
18
- File.join(Karafka::Web.gem_root, 'config', 'locales', 'errors.yml')
19
- )
16
+ config.error_messages = YAML.safe_load_file(
17
+ File.join(Karafka::Web.gem_root, 'config', 'locales', 'errors.yml')
20
18
  ).fetch('en').fetch('validations').fetch('web')
21
19
  end
22
20
  end
@@ -19,7 +19,7 @@ module Karafka
19
19
  required(:name) { |val| val.is_a?(String) && TOPIC_REGEXP.match?(val) }
20
20
 
21
21
  required(:config) do |val|
22
- val.is_a?(Hash) && !val.empty? && val.keys.all? { |key| key.is_a?(Symbol) }
22
+ val.is_a?(Hash) && !val.empty? && val.keys.all?(Symbol)
23
23
  end
24
24
  end
25
25
 
@@ -28,7 +28,7 @@ module Karafka
28
28
  required(:name) { |val| val.is_a?(String) && TOPIC_REGEXP.match?(val) }
29
29
 
30
30
  required(:config) do |val|
31
- val.is_a?(Hash) && !val.empty? && val.keys.all? { |key| key.is_a?(Symbol) }
31
+ val.is_a?(Hash) && !val.empty? && val.keys.all?(Symbol)
32
32
  end
33
33
  end
34
34
 
@@ -36,7 +36,7 @@ module Karafka
36
36
  required(:name) { |val| val.is_a?(String) && TOPIC_REGEXP.match?(val) }
37
37
 
38
38
  required(:config) do |val|
39
- val.is_a?(Hash) && !val.empty? && val.keys.all? { |key| key.is_a?(Symbol) }
39
+ val.is_a?(Hash) && !val.empty? && val.keys.all?(Symbol)
40
40
  end
41
41
  end
42
42
 
@@ -44,7 +44,7 @@ module Karafka
44
44
  required(:name) { |val| val.is_a?(String) && TOPIC_REGEXP.match?(val) }
45
45
 
46
46
  required(:config) do |val|
47
- val.is_a?(Hash) && !val.empty? && val.keys.all? { |key| key.is_a?(Symbol) }
47
+ val.is_a?(Hash) && !val.empty? && val.keys.all?(Symbol)
48
48
  end
49
49
  end
50
50
 
@@ -52,7 +52,7 @@ module Karafka
52
52
  required(:name) { |val| val.is_a?(String) && TOPIC_REGEXP.match?(val) }
53
53
 
54
54
  required(:config) do |val|
55
- val.is_a?(Hash) && !val.empty? && val.keys.all? { |key| key.is_a?(Symbol) }
55
+ val.is_a?(Hash) && !val.empty? && val.keys.all?(Symbol)
56
56
  end
57
57
  end
58
58
  end
@@ -91,6 +91,7 @@ module Karafka
91
91
  nested(:ui) do
92
92
  nested(:sessions) do
93
93
  required(:key) { |val| val.is_a?(String) && !val.empty? }
94
+ required(:env_key) { |val| val.is_a?(String) && !val.empty? }
94
95
  required(:secret) { |val| val.is_a?(String) && val.length >= 64 }
95
96
  end
96
97
 
@@ -13,7 +13,7 @@ module Karafka
13
13
 
14
14
  private_constant :MIGRATION_ABSPATH_REGEXP, :MIGRATION_BASENAME_REGEXP
15
15
 
16
- # @param [String] basename of the file to be loaded
16
+ # @param basename [String] basename of the file to be loaded
17
17
  # @param abspath [String] absolute path of the file to be loaded
18
18
  # @return [String] Constant name to be used for given file
19
19
  def camelize(basename, abspath)
@@ -61,7 +61,7 @@ module Karafka
61
61
  # Since we materialize state in intervals, we can poll for half of this time
62
62
  # without impacting the reporting responsiveness
63
63
  max_wait_time ::Karafka::Web.config.processing.interval / 2
64
- max_messages 1_000
64
+ max_messages 200
65
65
  consumer ::Karafka::Web::Processing::Consumer
66
66
  # This needs to be true in order not to reload the consumer in dev. This consumer
67
67
  # should not be affected by the end user development process
@@ -73,6 +73,13 @@ module Karafka
73
73
  # consumer group name would be renamed and we would start consuming all
74
74
  # historical
75
75
  initial_offset 'latest'
76
+ # Increase backoff time on errors. Incompatible schema errors are not recoverable
77
+ # until rolling upgrade completes, so we use a longer max timeout to prevent
78
+ # spamming errors in logs.
79
+ # We set this ourselves so user settings do not impact frequency of retrying
80
+ pause_timeout 5_000
81
+ pause_max_timeout 60_000
82
+ pause_with_exponential_backoff true
76
83
  # We use the defaults + our config alterations that may not align with what
77
84
  # user wants for his topics.
78
85
  kafka kafka_config
@@ -124,6 +131,12 @@ module Karafka
124
131
 
125
132
  ::Karafka::Web.producer.monitor.subscribe(listener)
126
133
  end
134
+
135
+ # Installs all the UI related listeners for tracking errors from web processes
136
+ # These listen on Karafka monitor to catch instrumented UI errors
137
+ ::Karafka::Web.config.tracking.ui.listeners.each do |listener|
138
+ ::Karafka.monitor.subscribe(listener)
139
+ end
127
140
  end
128
141
 
129
142
  # In most cases we want to close the producer if possible.
@@ -0,0 +1,38 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Web
5
+ module Management
6
+ module Migrations
7
+ # Consumer reports migrations
8
+ module ConsumersReports
9
+ # Migrates consumer reports from schema < 1.3.0 that used process[:name] to process[:id]
10
+ #
11
+ # In schema versions 1.2.x and earlier (karafka-web <= v0.8.2), the process identifier
12
+ # was stored in the :name field. Starting with schema 1.3.0 (karafka-web v0.9.0+),
13
+ # this was renamed to :id for consistency.
14
+ #
15
+ # This migration ensures old reports can be processed by current aggregators that
16
+ # expect the :id field.
17
+ class RenameProcessNameToId < Base
18
+ # Apply to all schema versions before 1.3.0
19
+ self.versions_until = '1.3.0'
20
+ self.type = :consumers_reports
21
+
22
+ # @param report [Hash] consumer report to migrate
23
+ def migrate(report)
24
+ # If :id already exists, nothing to do (already migrated or newer schema)
25
+ return if report[:process][:id]
26
+
27
+ # Rename :name to :id
28
+ # Both :name (in schema < 1.3.0) and :id (in schema >= 1.3.0) were always
29
+ # required fields, so we don't need nil checks for valid reports
30
+ report[:process][:id] = report[:process][:name]
31
+ report[:process].delete(:name)
32
+ end
33
+ end
34
+ end
35
+ end
36
+ end
37
+ end
38
+ end
@@ -65,8 +65,6 @@ module Karafka
65
65
  'consumers metrics newer than supported'
66
66
  )
67
67
  end
68
-
69
- true
70
68
  end
71
69
 
72
70
  # Applies migrations if needed and mutates the in-memory data
@@ -76,6 +74,9 @@ module Karafka
76
74
  any_migrations = false
77
75
 
78
76
  Migrations::Base.sorted_descendants.each do |migration_class|
77
+ # Skip report migrations - they are applied per-message by ReportsMigrator
78
+ next if migration_class.type == :consumers_reports
79
+
79
80
  data = send(migration_class.type)
80
81
 
81
82
  next unless migration_class.applicable?(data[:schema_version])
@@ -50,7 +50,7 @@ module Karafka
50
50
  # being executed in the embedded or swarm processes since there the signaling is
51
51
  # handled differently (either via the main process or supervisor).
52
52
  def standalone?
53
- Karafka::Server.execution_mode == :standalone
53
+ Karafka::Server.execution_mode.standalone?
54
54
  end
55
55
 
56
56
  # @return [String] id of the current consumer process
@@ -12,10 +12,8 @@ module Karafka
12
12
  # Makes sure, all the expected commanding config is defined as it should be
13
13
  class Config < ::Karafka::Contracts::Base
14
14
  configure do |config|
15
- config.error_messages = YAML.safe_load(
16
- File.read(
17
- File.join(Karafka::Web.gem_root, 'config', 'locales', 'pro_errors.yml')
18
- )
15
+ config.error_messages = YAML.safe_load_file(
16
+ File.join(Karafka::Web.gem_root, 'config', 'locales', 'pro_errors.yml')
19
17
  ).fetch('en').fetch('validations').fetch('config')
20
18
  end
21
19
 
@@ -41,17 +41,16 @@ module Karafka
41
41
  # @param subscription_group_id [String] id of the subscription group for which we
42
42
  # want to get all the requests. Subscription groups ids (not names) are unique
43
43
  # within the application, so it is unique "enough".
44
- # @param block [Proc]
45
44
  #
46
45
  # @yieldparam [Request] given command request for the requested subscription group
47
- def each_for(subscription_group_id, &block)
46
+ def each_for(subscription_group_id, &)
48
47
  requests = nil
49
48
 
50
49
  @mutex.synchronize do
51
50
  requests = @requests.delete(subscription_group_id)
52
51
  end
53
52
 
54
- (requests || EMPTY_ARRAY).each(&block)
53
+ (requests || EMPTY_ARRAY).each(&)
55
54
  end
56
55
  end
57
56
  end
@@ -30,7 +30,7 @@ module Karafka
30
30
 
31
31
  # Displays selected broker configuration
32
32
  #
33
- # @param broker_id [String] id of the broker
33
+ # @param broker_id [String]
34
34
  def show(broker_id)
35
35
  @broker = Models::Broker.find(broker_id)
36
36
 
@@ -21,8 +21,7 @@ module Karafka
21
21
  # cluster
22
22
  candidates = Karafka::App
23
23
  .routes
24
- .map(&:topics)
25
- .map(&:to_a)
24
+ .map { |route| route.topics.to_a }
26
25
  .flatten
27
26
  .select(&:scheduled_messages?)
28
27
  .reject { |topic| topic.name.end_with?(states_postfix) }
@@ -65,9 +65,7 @@ module Karafka
65
65
  topic_name,
66
66
  partition_count
67
67
  )
68
- rescue Rdkafka::RdkafkaError => e
69
- @form_error = e
70
- rescue Rdkafka::Config::ConfigError => e
68
+ rescue Rdkafka::RdkafkaError, Rdkafka::Config::ConfigError => e
71
69
  @form_error = e
72
70
  end
73
71
 
@@ -14,10 +14,8 @@ module Karafka
14
14
  # Makes sure, all the expected UI branding config is defined as it should be
15
15
  class Config < ::Karafka::Contracts::Base
16
16
  configure do |config|
17
- config.error_messages = YAML.safe_load(
18
- File.read(
19
- File.join(Karafka::Web.gem_root, 'config', 'locales', 'pro_errors.yml')
20
- )
17
+ config.error_messages = YAML.safe_load_file(
18
+ File.join(Karafka::Web.gem_root, 'config', 'locales', 'pro_errors.yml')
21
19
  ).fetch('en').fetch('validations').fetch('config')
22
20
  end
23
21