karafka-web 0.7.3 → 0.7.4

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: dc9572e3dfbb4565361fdbc9521fb7a9a7833664f42bd8e9f0e63e940a397034
4
- data.tar.gz: 07fb369bcd0d1aa6c80ef20ff96289989c38ff934a1eb795ab85f39b7adf1228
3
+ metadata.gz: 7a31ebcde158ca18cc353d3fefad976b7dfb8d9fe3b3114ea8f43b533229434a
4
+ data.tar.gz: f9870e5268ebebb38c838dc8a220030beaebfe30d4d53cd63537c5c2f442e2d4
5
5
  SHA512:
6
- metadata.gz: 51bc23c17be963c3b19143aa8034046c0bd85df4251dc9e2e911a8ab28ae194d9ae62e2ab1d3aa0b3e2f8dba33649ad51881d9e96fe669fffbdc971f406ce3eb
7
- data.tar.gz: 332247644ed57d25687af816ed850f7847b6eb8e63646bf492460424f0d5855c8dace2f1127d0d84631bffa8f6a9fb71e42a64ece7141a6acb96d5b5f987896c
6
+ metadata.gz: '02152094838c0be1606f49b4963e7d0791f1800bc7b7fbf71240866ed84e73bc264322415b48272faf46c715d70568fc054fc7183406d182d3c95eeb664019db'
7
+ data.tar.gz: 787a16d7f91f7ccb907a3cbea0599337bd7525b1f2982f023b639227df1b52511afcc763422c71d0e2bf34216d3f3a4fe6794ae8824abeea9302b84071de8ca5
checksums.yaml.gz.sig CHANGED
Binary file
data/CHANGELOG.md CHANGED
@@ -1,5 +1,9 @@
1
1
  # Karafka Web changelog
2
2
 
3
+ ## 0.7.4 (2023-09-19)
4
+ - [Improvement] Skip aggregations on older schemas during upgrades. This only skips process-reports (that are going to be rolled) on the 5s window in case of an upgrade that should not be a rolling one anyhow. This simplifies the operations and minimizes the risk on breaking upgrades.
5
+ - [Fix] Fix not working `ps` for macOS.
6
+
3
7
  ## 0.7.3 (2023-09-18)
4
8
  - [Improvement] Mitigate a case where a race-condition during upgrade would crash data.
5
9
 
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- karafka-web (0.7.3)
4
+ karafka-web (0.7.4)
5
5
  erubi (~> 1.4)
6
6
  karafka (>= 2.2.3, < 3.0.0)
7
7
  karafka-core (>= 2.2.2, < 3.0.0)
@@ -26,6 +26,7 @@ module Karafka
26
26
 
27
27
  # We set this that way so we report with first batch and so we report as fast as possible
28
28
  @flushed_at = monotonic_now - @flush_interval
29
+ @established = false
29
30
  end
30
31
 
31
32
  # Aggregates consumers state into a single current state representation
@@ -34,10 +35,24 @@ module Karafka
34
35
 
35
36
  # If there is even one incompatible message, we need to stop
36
37
  consumers_messages.each do |message|
37
- unless @schema_manager.compatible?(message)
38
+ case @schema_manager.call(message)
39
+ when :current
40
+ true
41
+ when :newer
42
+ @schema_manager.invalidate!
43
+
38
44
  dispatch
39
45
 
40
46
  raise ::Karafka::Web::Errors::Processing::IncompatibleSchemaError
47
+ # Older reports mean someone is in the middle of upgrade. Schema change related
48
+ # upgrades always should happen without a rolling-upgrade, hence we can reject those
49
+ # requests without significant or any impact on data quality but without having to
50
+ # worry about backwards compatibility. Errors are tracked independently, so it should
51
+ # not be a problem.
52
+ when :older
53
+ next
54
+ else
55
+ raise ::Karafka::Errors::UnsupportedCaseError
41
56
  end
42
57
 
43
58
  # We need to run the aggregations on each message in order to compensate for
@@ -45,6 +60,10 @@ module Karafka
45
60
  @state_aggregator.add(message.payload, message.offset)
46
61
  @metrics_aggregator.add_report(message.payload)
47
62
  @metrics_aggregator.add_stats(@state_aggregator.stats)
63
+ # Indicates that we had at least one report we used to enrich data
64
+ # If there were no state changes, there is no reason to flush data. This can occur
65
+ # when we had some messages but we skipped them for any reason on a first run
66
+ @established = true
48
67
 
49
68
  # Optimize memory usage in pro
50
69
  message.clean! if Karafka.pro?
@@ -59,17 +78,15 @@ module Karafka
59
78
 
60
79
  # Flush final state on shutdown
61
80
  def shutdown
62
- return unless @state_aggregator
63
-
64
- materialize
65
- validate!
66
- flush
81
+ dispatch
67
82
  end
68
83
 
69
84
  private
70
85
 
71
86
  # Flushes the state of the Web-UI to the DB
72
87
  def dispatch
88
+ return unless @established
89
+
73
90
  materialize
74
91
  validate!
75
92
  flush
@@ -102,7 +102,7 @@ module Karafka
102
102
  partitions_data = topic_details.fetch(:partitions).values
103
103
 
104
104
  lags = partitions_data
105
- .map { |p_details| p_details[:lag] || 0 }
105
+ .map { |p_details| p_details.fetch(:lag, -1) }
106
106
  .reject(&:negative?)
107
107
 
108
108
  lags_stored = partitions_data
@@ -27,12 +27,8 @@ module Karafka
27
27
  end
28
28
 
29
29
  # @param message [Karafka::Messages::Message] consumer report
30
- # @return [Boolean] true if all good or false if incompatible
31
- #
32
- # @note The state switch is one-direction only. If we encounter an incompatible message
33
- # we need to stop processing so further checks even with valid should not switch it
34
- # back to valid
35
- def compatible?(message)
30
+ # @return [Symbol] is the given message using older, newer or current schema
31
+ def call(message)
36
32
  schema_version = message.payload[:schema_version]
37
33
 
38
34
  # Save on memory allocation by reusing
@@ -40,8 +36,19 @@ module Karafka
40
36
  # an object with each message
41
37
  message_version = @cache[schema_version] ||= ::Gem::Version.new(schema_version)
42
38
 
43
- return true if message_version <= CURRENT_VERSION
39
+ return :older if message_version < CURRENT_VERSION
40
+ return :newer if message_version > CURRENT_VERSION
41
+
42
+ :current
43
+ end
44
44
 
45
+ # Moves the schema manager state to incompatible to indicate in the Web-UI that we
46
+ # cannot move forward because schema is incompatible.
47
+ #
48
+ # @note The state switch is one-direction only. If we encounter an incompatible message
49
+ # we need to stop processing so further checks even with valid should not switch it
50
+ # back to valid
51
+ def invalidate!
45
52
  @valid = false
46
53
  end
47
54
 
@@ -247,11 +247,20 @@ module Karafka
247
247
  # Loads our ps results into memory so we can extract from them whatever we need
248
248
  def memory_threads_ps
249
249
  @memory_threads_ps = case RUBY_PLATFORM
250
- when /darwin|bsd|linux/
250
+ when /linux/
251
251
  @shell
252
- .call('ps -A -o rss=,thcount,pid')
252
+ .call('ps -A -o rss=,thcount=,pid=')
253
253
  .split("\n")
254
254
  .map { |row| row.strip.split(' ').map(&:to_i) }
255
+ # thcount is not available on macos ps
256
+ # because of that we inject 0 as threads count similar to how
257
+ # we do on windows
258
+ when /darwin|bsd/
259
+ @shell
260
+ .call('ps -A -o rss=,pid=')
261
+ .split("\n")
262
+ .map { |row| row.strip.split(' ').map(&:to_i) }
263
+ .map { |row| [row.first, 0, row.last] }
255
264
  else
256
265
  @memory_threads_ps = false
257
266
  end
@@ -3,6 +3,6 @@
3
3
  module Karafka
4
4
  module Web
5
5
  # Current gem version
6
- VERSION = '0.7.3'
6
+ VERSION = '0.7.4'
7
7
  end
8
8
  end
data.tar.gz.sig CHANGED
Binary file
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka-web
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.7.3
4
+ version: 0.7.4
5
5
  platform: ruby
6
6
  authors:
7
7
  - Maciej Mensfeld
@@ -35,7 +35,7 @@ cert_chain:
35
35
  AnG1dJU+yL2BK7vaVytLTstJME5mepSZ46qqIJXMuWob/YPDmVaBF39TDSG9e34s
36
36
  msG3BiCqgOgHAnL23+CN3Rt8MsuRfEtoTKpJVcCfoEoNHOkc
37
37
  -----END CERTIFICATE-----
38
- date: 2023-09-18 00:00:00.000000000 Z
38
+ date: 2023-09-19 00:00:00.000000000 Z
39
39
  dependencies:
40
40
  - !ruby/object:Gem::Dependency
41
41
  name: erubi
metadata.gz.sig CHANGED
Binary file