karafka 2.0.24 → 2.0.26

Sign up to get free protection for your applications and to get access to all the features.
Files changed (50) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/workflows/ci.yml +22 -3
  4. data/.ruby-version +1 -1
  5. data/CHANGELOG.md +20 -0
  6. data/Gemfile.lock +9 -8
  7. data/README.md +3 -1
  8. data/bin/integrations +8 -0
  9. data/bin/verify_license_integrity +35 -0
  10. data/config/locales/errors.yml +1 -0
  11. data/karafka.gemspec +2 -2
  12. data/lib/karafka/admin.rb +14 -10
  13. data/lib/karafka/app.rb +1 -0
  14. data/lib/karafka/base_consumer.rb +1 -7
  15. data/lib/karafka/connection/client.rb +21 -0
  16. data/lib/karafka/connection/consumer_group_coordinator.rb +7 -1
  17. data/lib/karafka/connection/listener.rb +4 -3
  18. data/lib/karafka/connection/listeners_batch.rb +6 -0
  19. data/lib/karafka/contracts/server_cli_options.rb +1 -0
  20. data/lib/karafka/contracts/topic.rb +12 -1
  21. data/lib/karafka/instrumentation/logger_listener.rb +24 -8
  22. data/lib/karafka/instrumentation/notifications.rb +14 -7
  23. data/lib/karafka/instrumentation/proctitle_listener.rb +7 -16
  24. data/lib/karafka/instrumentation/vendors/datadog/listener.rb +2 -2
  25. data/lib/karafka/pro/processing/strategies/aj_lrj_mom_vp.rb +5 -0
  26. data/lib/karafka/pro/processing/strategies/default.rb +6 -0
  27. data/lib/karafka/pro/processing/strategies/lrj.rb +5 -0
  28. data/lib/karafka/pro/processing/strategies/lrj_mom.rb +5 -0
  29. data/lib/karafka/process.rb +3 -1
  30. data/lib/karafka/processing/jobs_queue.rb +2 -2
  31. data/lib/karafka/processing/strategies/base.rb +5 -0
  32. data/lib/karafka/processing/strategies/default.rb +14 -0
  33. data/lib/karafka/processing/worker.rb +2 -0
  34. data/lib/karafka/routing/builder.rb +1 -1
  35. data/lib/karafka/routing/consumer_group.rb +3 -3
  36. data/lib/karafka/routing/consumer_mapper.rb +0 -10
  37. data/lib/karafka/routing/router.rb +12 -2
  38. data/lib/karafka/routing/subscription_group.rb +18 -1
  39. data/lib/karafka/routing/topic.rb +11 -0
  40. data/lib/karafka/runner.rb +1 -0
  41. data/lib/karafka/server.rb +14 -5
  42. data/lib/karafka/status.rb +6 -0
  43. data/lib/karafka/time_trackers/base.rb +1 -6
  44. data/lib/karafka/time_trackers/pause.rb +2 -2
  45. data/lib/karafka/time_trackers/poll.rb +2 -2
  46. data/lib/karafka/version.rb +1 -1
  47. data/lib/karafka.rb +2 -0
  48. data.tar.gz.sig +0 -0
  49. metadata +6 -5
  50. metadata.gz.sig +0 -0
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 25eb837923f78f1bf35402bf24469228d8f5af12090b2d406718a0077ce42598
4
- data.tar.gz: 23520ecab430080061046e683c0dcb47316d1216d1ad9843894caefff76e24cc
3
+ metadata.gz: e1e0356b40b0812dc321a825731c6fcd7413f9d1d30a23288597d2f832c6b5d3
4
+ data.tar.gz: f887c3bc93f945329fa4bba4bcc81a8f95a9d5153fae64aa5b0936935502909d
5
5
  SHA512:
6
- metadata.gz: 3e84c4c4dedd7a160ebcbd6cff53e0dfcf601a31eb19469d1a86eabb3f33507f3b1871c8f4489b743651118ed47e52925f12dba4fc67e462bd8be04d71dfa3bb
7
- data.tar.gz: 8b9754f5566c5ef213c4803cfba2a4263553a5112e23fd12e42a963fd493f8f20c1409b12303628519ba7ef5bcf07aa09fabc6dd4ffc2a2114a6bad8f94cbc72
6
+ metadata.gz: 32b9c06212b90bb49232931a06e9ae445b76c2c8780c0774923945458dd425233c37167825c5d1f39118cdebb8352f6a014a8e547947367fa712e46fe07c96d5
7
+ data.tar.gz: 2047da1a400328dd13465f9c94b033a9841675b6e7274fc9d94a1677866d0feb9611858a02296a20e6e7b8ea589c0b903ae35e572c6c6ad6c8a3040300692703
checksums.yaml.gz.sig CHANGED
Binary file
@@ -25,7 +25,7 @@ jobs:
25
25
  - name: Set up Ruby
26
26
  uses: ruby/setup-ruby@v1
27
27
  with:
28
- ruby-version: 3.1
28
+ ruby-version: 3.2
29
29
  bundler-cache: true
30
30
 
31
31
  - name: Install Diffend plugin
@@ -34,6 +34,22 @@ jobs:
34
34
  - name: Bundle Secure
35
35
  run: bundle secure
36
36
 
37
+ karafka-checksum:
38
+ runs-on: ubuntu-latest
39
+ strategy:
40
+ fail-fast: false
41
+ steps:
42
+ - uses: actions/checkout@v3
43
+ with:
44
+ fetch-depth: 0
45
+ - name: Run Karafka license checksum verification
46
+ env:
47
+ KARAFKA_PRO_USERNAME: ${{ secrets.KARAFKA_PRO_USERNAME }}
48
+ KARAFKA_PRO_PASSWORD: ${{ secrets.KARAFKA_PRO_PASSWORD }}
49
+ KARAFKA_PRO_VERSION: ${{ secrets.KARAFKA_PRO_VERSION }}
50
+ KARAFKA_PRO_LICENSE_CHECKSUM: ${{ secrets.KARAFKA_PRO_LICENSE_CHECKSUM }}
51
+ run: bin/verify_license_integrity
52
+
37
53
  coditsu:
38
54
  runs-on: ubuntu-latest
39
55
  strategy:
@@ -53,6 +69,7 @@ jobs:
53
69
  fail-fast: false
54
70
  matrix:
55
71
  ruby:
72
+ - '3.2'
56
73
  # We run it against the oldest and the newest of a given major to make sure, that there
57
74
  # are no syntax-sugars that we would use that were introduced down the road
58
75
  - '3.1'
@@ -62,7 +79,7 @@ jobs:
62
79
  - '2.7'
63
80
  - '2.7.0'
64
81
  include:
65
- - ruby: '3.1'
82
+ - ruby: '3.2'
66
83
  coverage: 'true'
67
84
  steps:
68
85
  - uses: actions/checkout@v3
@@ -92,11 +109,12 @@ jobs:
92
109
  fail-fast: false
93
110
  matrix:
94
111
  ruby:
112
+ - '3.2'
95
113
  - '3.1'
96
114
  - '3.0'
97
115
  - '2.7'
98
116
  include:
99
- - ruby: '3.1'
117
+ - ruby: '3.2'
100
118
  coverage: 'true'
101
119
  steps:
102
120
  - uses: actions/checkout@v3
@@ -132,5 +150,6 @@ jobs:
132
150
  KARAFKA_PRO_USERNAME: ${{ secrets.KARAFKA_PRO_USERNAME }}
133
151
  KARAFKA_PRO_PASSWORD: ${{ secrets.KARAFKA_PRO_PASSWORD }}
134
152
  KARAFKA_PRO_VERSION: ${{ secrets.KARAFKA_PRO_VERSION }}
153
+ KARAFKA_PRO_LICENSE_CHECKSUM: ${{ secrets.KARAFKA_PRO_LICENSE_CHECKSUM }}
135
154
  GITHUB_COVERAGE: ${{matrix.coverage}}
136
155
  run: bin/integrations
data/.ruby-version CHANGED
@@ -1 +1 @@
1
- 3.1.3
1
+ 3.2.0
data/CHANGELOG.md CHANGED
@@ -1,5 +1,25 @@
1
1
  # Karafka framework changelog
2
2
 
3
+ ## 2.0.26 (2023-01-10)
4
+ - **[Feature]** Allow for disabling given topics by setting `active` to false. It will exclude them from consumption but will allow to have their definitions for using admin APIs, etc.
5
+ - [Improvement] Early terminate on `read_topic` when reaching the last offset available on the request time.
6
+ - [Improvement] Introduce a `quiet` state that indicates that Karafka is not only moving to quiet mode but actually that it reached it and no work will happen anymore in any of the consumer groups.
7
+ - [Improvement] Use Karafka defined routes topics when possible for `read_topic` admin API.
8
+ - [Improvement] Introduce `client.pause` and `client.resume` instrumentation hooks for tracking client topic partition pausing and resuming. This is alongside of `consumer.consuming.pause` that can be used to track both manual and automatic pausing with more granular consumer related details. The `client.*` should be used for low level tracking.
9
+ - [Improvement] Replace `LoggerListener` pause notification with one based on `client.pause` instead of `consumer.consuming.pause`.
10
+ - [Improvement] Expand `LoggerListener` with `client.resume` notification.
11
+ - [Improvement] Replace random anonymous subscription groups ids with stable once.
12
+ - [Improvement] Add `consumer.consume`, `consumer.revoke` and `consumer.shutting_down` notification events and move the revocation logic calling to strategies.
13
+ - [Change] Rename job queue statistics `processing` key to `busy`. No changes needed because naming in the DataDog listener stays the same.
14
+ - [Fix] Fix proctitle listener state changes reporting on new states.
15
+ - [Fix] Make sure all files descriptors are closed in the integration specs.
16
+ - [Fix] Fix a case where empty subscription groups could leak into the execution flow.
17
+ - [Fix] Fix `LoggerListener` reporting so it does not end with `.`.
18
+ - [Fix] Run previously defined (if any) signal traps created prior to Karafka signals traps.
19
+
20
+ ## 2.0.25 (2023-01-10)
21
+ - Release yanked due to accidental release with local changes.
22
+
3
23
  ## 2.0.24 (2022-12-19)
4
24
  - **[Feature]** Provide out of the box encryption support for Pro.
5
25
  - [Improvement] Add instrumentation upon `#pause`.
data/Gemfile.lock CHANGED
@@ -1,8 +1,8 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- karafka (2.0.24)
5
- karafka-core (>= 2.0.7, < 3.0.0)
4
+ karafka (2.0.26)
5
+ karafka-core (>= 2.0.8, < 3.0.0)
6
6
  thor (>= 0.20)
7
7
  waterdrop (>= 2.4.7, < 3.0.0)
8
8
  zeitwerk (~> 2.3)
@@ -29,11 +29,11 @@ GEM
29
29
  activesupport (>= 5.0)
30
30
  i18n (1.12.0)
31
31
  concurrent-ruby (~> 1.0)
32
- karafka-core (2.0.7)
32
+ karafka-core (2.0.8)
33
33
  concurrent-ruby (>= 1.1)
34
34
  rdkafka (>= 0.12)
35
- mini_portile2 (2.8.0)
36
- minitest (5.16.3)
35
+ mini_portile2 (2.8.1)
36
+ minitest (5.17.0)
37
37
  rake (13.0.6)
38
38
  rdkafka (0.12.0)
39
39
  ffi (~> 1.15)
@@ -45,14 +45,14 @@ GEM
45
45
  rspec-mocks (~> 3.12.0)
46
46
  rspec-core (3.12.0)
47
47
  rspec-support (~> 3.12.0)
48
- rspec-expectations (3.12.0)
48
+ rspec-expectations (3.12.1)
49
49
  diff-lcs (>= 1.2.0, < 2.0)
50
50
  rspec-support (~> 3.12.0)
51
51
  rspec-mocks (3.12.1)
52
52
  diff-lcs (>= 1.2.0, < 2.0)
53
53
  rspec-support (~> 3.12.0)
54
54
  rspec-support (3.12.0)
55
- simplecov (0.21.2)
55
+ simplecov (0.22.0)
56
56
  docile (~> 1.1)
57
57
  simplecov-html (~> 0.11)
58
58
  simplecov_json_formatter (~> 0.1)
@@ -67,6 +67,7 @@ GEM
67
67
  zeitwerk (2.6.6)
68
68
 
69
69
  PLATFORMS
70
+ arm64-darwin-21
70
71
  x86_64-darwin-21
71
72
  x86_64-linux
72
73
 
@@ -79,4 +80,4 @@ DEPENDENCIES
79
80
  simplecov
80
81
 
81
82
  BUNDLED WITH
82
- 2.3.26
83
+ 2.4.2
data/README.md CHANGED
@@ -4,7 +4,7 @@
4
4
  [![Gem Version](https://badge.fury.io/rb/karafka.svg)](http://badge.fury.io/rb/karafka)
5
5
  [![Join the chat at https://slack.karafka.io](https://raw.githubusercontent.com/karafka/misc/master/slack.svg)](https://slack.karafka.io)
6
6
 
7
- **Note**: Upgrade notes for migration from Karafka `1.4` to Karafka `2.0` can be found [here](https://karafka.io/docs/Upgrades-2.0/).
7
+ **Note**: Upgrade instructions for migration from Karafka `1.4` to Karafka `2.0` can be found [here](https://karafka.io/docs/Upgrades-2.0/).
8
8
 
9
9
  ## About Karafka
10
10
 
@@ -40,6 +40,8 @@ Karafka **uses** threads to handle many messages simultaneously in the same proc
40
40
 
41
41
  ## Getting started
42
42
 
43
+ ![karafka web ui](https://raw.githubusercontent.com/karafka/misc/master/printscreens/web-ui.png)
44
+
43
45
  If you're entirely new to the subject, you can start with our "Kafka on Rails" articles series, which will get you up and running with the terminology and basic ideas behind using Kafka:
44
46
 
45
47
  - [Kafka on Rails: Using Kafka with Ruby on Rails – Part 1 – Kafka basics and its advantages](https://mensfeld.pl/2017/11/kafka-on-rails-using-kafka-with-ruby-on-rails-part-1-kafka-basics-and-its-advantages/)
data/bin/integrations CHANGED
@@ -152,6 +152,13 @@ class Scenario
152
152
  end
153
153
  end
154
154
 
155
+ # Close all the files that are open, so they do not pile up
156
+ def close
157
+ @stdin.close
158
+ @stdout.close
159
+ @stderr.close
160
+ end
161
+
155
162
  private
156
163
 
157
164
  # Sets up a proper environment for a given spec to run and returns the run command
@@ -248,6 +255,7 @@ while finished_scenarios.size < scenarios.size
248
255
  active_scenarios.select(&:finished?).each do |exited|
249
256
  scenario = active_scenarios.delete(exited)
250
257
  scenario.report
258
+ scenario.close
251
259
  finished_scenarios << scenario
252
260
  end
253
261
 
@@ -0,0 +1,35 @@
1
+ #!/usr/bin/env bash
2
+
3
+ # This script verifies integrity of the Pro license
4
+ # Run it before bundle install to ensure, that what you are fetching is what you expect
5
+ # Run it after bundle install to ensure that the local artefact was not compromised
6
+
7
+ #!/usr/bin/env bash
8
+
9
+ set -e
10
+
11
+ if [ "$MODE" != "after" ]; then
12
+ # Check the remote license prior to bundle installing
13
+ curl \
14
+ --fail \
15
+ -u $KARAFKA_PRO_USERNAME:$KARAFKA_PRO_PASSWORD \
16
+ https://gems.karafka.io/gems/karafka-license-$KARAFKA_PRO_VERSION.gem \
17
+ -o ./karafka-license.gem
18
+ else
19
+ # Check the local cached one after bundle install
20
+ cache_path=`ruby -e 'puts "#{Gem.dir}/cache/"'`
21
+ cp "$cache_path/karafka-license-$KARAFKA_PRO_VERSION.gem" ./karafka-license.gem
22
+ fi
23
+
24
+ detected=`sha256sum ./karafka-license.gem | awk '{ print $1 }'`
25
+
26
+ rm ./karafka-license.gem
27
+
28
+ echo -n "Karafka Pro license artifact checksum verification result: "
29
+
30
+ if [ "$detected" = "$KARAFKA_PRO_LICENSE_CHECKSUM" ]; then
31
+ echo "Success"
32
+ else
33
+ echo -e "\033[0;31mFailure!\033[0m"
34
+ exit 1
35
+ fi
@@ -44,6 +44,7 @@ en:
44
44
  dead_letter_queue.max_retries_format: needs to be equal or bigger than 0
45
45
  dead_letter_queue.topic_format: 'needs to be a string with a Kafka accepted format'
46
46
  dead_letter_queue.active_format: needs to be either true or false
47
+ active_format: needs to be either true or false
47
48
 
48
49
  consumer_group:
49
50
  missing: needs to be present
data/karafka.gemspec CHANGED
@@ -12,7 +12,7 @@ Gem::Specification.new do |spec|
12
12
  spec.authors = ['Maciej Mensfeld']
13
13
  spec.email = %w[contact@karafka.io]
14
14
  spec.homepage = 'https://karafka.io'
15
- spec.licenses = ['LGPL-3.0', 'Commercial']
15
+ spec.licenses = %w[LGPL-3.0 Commercial]
16
16
  spec.summary = 'Karafka is Ruby and Rails efficient Kafka processing framework.'
17
17
  spec.description = <<-DESC
18
18
  Karafka is Ruby and Rails efficient Kafka processing framework.
@@ -21,7 +21,7 @@ Gem::Specification.new do |spec|
21
21
  without having to focus on things that are not your business domain.
22
22
  DESC
23
23
 
24
- spec.add_dependency 'karafka-core', '>= 2.0.7', '< 3.0.0'
24
+ spec.add_dependency 'karafka-core', '>= 2.0.8', '< 3.0.0'
25
25
  spec.add_dependency 'thor', '>= 0.20'
26
26
  spec.add_dependency 'waterdrop', '>= 2.4.7', '< 3.0.0'
27
27
  spec.add_dependency 'zeitwerk', '~> 2.3'
data/lib/karafka/admin.rb CHANGED
@@ -30,29 +30,30 @@ module Karafka
30
30
  # @param name [String, Symbol] topic name
31
31
  # @param partition [Integer] partition
32
32
  # @param count [Integer] how many messages we want to get at most
33
- # @param offset [Integer] offset from which we should start. If -1 is provided (default) we
34
- # will start from the latest offset
33
+ # @param start_offset [Integer] offset from which we should start. If -1 is provided
34
+ # (default) we will start from the latest offset
35
35
  #
36
36
  # @return [Array<Karafka::Messages::Message>] array with messages
37
- def read_topic(name, partition, count, offset = -1)
37
+ def read_topic(name, partition, count, start_offset = -1)
38
38
  messages = []
39
39
  tpl = Rdkafka::Consumer::TopicPartitionList.new
40
40
 
41
41
  with_consumer do |consumer|
42
- if offset.negative?
43
- offsets = consumer.query_watermark_offsets(name, partition)
44
- offset = offsets.last - count
45
- end
42
+ offsets = consumer.query_watermark_offsets(name, partition)
43
+ end_offset = offsets.last
46
44
 
47
- offset = offset.negative? ? 0 : offset
45
+ start_offset = [0, offsets.last - count].max if start_offset.negative?
48
46
 
49
- tpl.add_topic_and_partitions_with_offsets(name, partition => offset)
47
+ tpl.add_topic_and_partitions_with_offsets(name, partition => start_offset)
50
48
  consumer.assign(tpl)
51
49
 
52
50
  # We should poll as long as we don't have all the messages that we need or as long as
53
51
  # we do not read all the messages from the topic
54
52
  loop do
53
+ # If we've got as many messages as we've wanted stop
55
54
  break if messages.size >= count
55
+ # If we've reached end of the topic messages, don't process more
56
+ break if !messages.empty? && end_offset <= messages.last.offset
56
57
 
57
58
  message = consumer.poll(200)
58
59
  messages << message if message
@@ -67,7 +68,10 @@ module Karafka
67
68
  messages.map do |message|
68
69
  Messages::Builders::Message.call(
69
70
  message,
70
- Topic.new(name, Karafka::App.config.deserializer),
71
+ # Use topic from routes if we can match it or create a dummy one
72
+ # Dummy one is used in case we cannot match the topic with routes. This can happen
73
+ # when admin API is used to read topics that are not part of the routing
74
+ Routing::Router.find_by(name: name) || Topic.new(name, App.config.deserializer),
71
75
  Time.now
72
76
  )
73
77
  end
data/lib/karafka/app.rb CHANGED
@@ -29,6 +29,7 @@ module Karafka
29
29
  .delete_if { |_, sgs| sgs.empty? }
30
30
  .each { |_, sgs| sgs.each { |sg| sg.topics.delete_if { |top| !top.active? } } }
31
31
  .each { |_, sgs| sgs.delete_if { |sg| sg.topics.empty? } }
32
+ .reject { |cg, _| cg.subscription_groups.empty? }
32
33
  .to_h
33
34
  end
34
35
 
@@ -99,10 +99,6 @@ module Karafka
99
99
  # @private
100
100
  def on_revoked
101
101
  handle_revoked
102
-
103
- Karafka.monitor.instrument('consumer.revoked', caller: self) do
104
- revoked
105
- end
106
102
  rescue StandardError => e
107
103
  Karafka.monitor.instrument(
108
104
  'error.occurred',
@@ -116,9 +112,7 @@ module Karafka
116
112
  #
117
113
  # @private
118
114
  def on_shutdown
119
- Karafka.monitor.instrument('consumer.shutdown', caller: self) do
120
- shutdown
121
- end
115
+ handle_shutdown
122
116
  rescue StandardError => e
123
117
  Karafka.monitor.instrument(
124
118
  'error.occurred',
@@ -14,6 +14,9 @@ module Karafka
14
14
  # @note Consumer name may change in case we regenerate it
15
15
  attr_reader :name
16
16
 
17
+ # @return [String] id of the client
18
+ attr_reader :id
19
+
17
20
  # How many times should we retry polling in case of a failure
18
21
  MAX_POLL_RETRIES = 20
19
22
 
@@ -29,6 +32,7 @@ module Karafka
29
32
  # with all the configuration details needed for us to create a client
30
33
  # @return [Karafka::Connection::Rdk::Consumer]
31
34
  def initialize(subscription_group)
35
+ @id = SecureRandom.hex(6)
32
36
  # Name is set when we build consumer
33
37
  @name = ''
34
38
  @mutex = Mutex.new
@@ -165,6 +169,15 @@ module Karafka
165
169
 
166
170
  return unless tpl
167
171
 
172
+ Karafka.monitor.instrument(
173
+ 'client.pause',
174
+ caller: self,
175
+ subscription_group: @subscription_group,
176
+ topic: topic,
177
+ partition: partition,
178
+ offset: offset
179
+ )
180
+
168
181
  @paused_tpls[topic][partition] = tpl
169
182
 
170
183
  @kafka.pause(tpl)
@@ -195,6 +208,14 @@ module Karafka
195
208
  # happen in the first place
196
209
  return unless @paused_tpls[topic].delete(partition)
197
210
 
211
+ Karafka.monitor.instrument(
212
+ 'client.resume',
213
+ caller: self,
214
+ subscription_group: @subscription_group,
215
+ topic: topic,
216
+ partition: partition
217
+ )
218
+
198
219
  @kafka.resume(tpl)
199
220
  ensure
200
221
  @mutex.unlock
@@ -21,10 +21,16 @@ module Karafka
21
21
  @finished = Set.new
22
22
  end
23
23
 
24
+ # @return [Boolean] true if all the subscription groups from a given consumer group are
25
+ # finished
26
+ def finished?
27
+ @finished.size == @group_size
28
+ end
29
+
24
30
  # @return [Boolean] can we start shutdown on a given listener
25
31
  # @note If true, will also obtain a lock so no-one else will be closing the same time we do
26
32
  def shutdown?
27
- @finished.size == @group_size && @shutdown_lock.try_lock
33
+ finished? && @shutdown_lock.try_lock
28
34
  end
29
35
 
30
36
  # Unlocks the shutdown lock
@@ -85,7 +85,7 @@ module Karafka
85
85
  # propagate this far.
86
86
  def fetch_loop
87
87
  # Run the main loop as long as we are not stopping or moving into quiet mode
88
- until Karafka::App.stopping? || Karafka::App.quieting?
88
+ until Karafka::App.stopping? || Karafka::App.quieting? || Karafka::App.quiet?
89
89
  Karafka.monitor.instrument(
90
90
  'connection.listener.fetch_loop',
91
91
  caller: self,
@@ -156,8 +156,9 @@ module Karafka
156
156
  # within this consumer group
157
157
  @consumer_group_coordinator.finish_work(id)
158
158
 
159
- # Wait if we're in the quiet mode
160
- wait_pinging(wait_until: -> { !Karafka::App.quieting? })
159
+ # Wait if we're in the process of finishing started work or finished all the work and
160
+ # just sitting and being quiet
161
+ wait_pinging(wait_until: -> { !(Karafka::App.quieting? || Karafka::App.quiet?) })
161
162
 
162
163
  # We need to wait until all the work in the whole consumer group (local to the process)
163
164
  # is done. Otherwise we may end up with locks and `Timed out LeaveGroupRequest in flight`
@@ -6,14 +6,20 @@ module Karafka
6
6
  class ListenersBatch
7
7
  include Enumerable
8
8
 
9
+ attr_reader :coordinators
10
+
9
11
  # @param jobs_queue [JobsQueue]
10
12
  # @return [ListenersBatch]
11
13
  def initialize(jobs_queue)
14
+ @coordinators = []
15
+
12
16
  @batch = App.subscription_groups.flat_map do |_consumer_group, subscription_groups|
13
17
  consumer_group_coordinator = Connection::ConsumerGroupCoordinator.new(
14
18
  subscription_groups.size
15
19
  )
16
20
 
21
+ @coordinators << consumer_group_coordinator
22
+
17
23
  subscription_groups.map do |subscription_group|
18
24
  Connection::Listener.new(
19
25
  consumer_group_coordinator,
@@ -80,6 +80,7 @@ module Karafka
80
80
  # Makes sure we have anything to subscribe to when we start the server
81
81
  virtual do |_, errors|
82
82
  next unless errors.empty?
83
+
83
84
  next unless Karafka::App.subscription_groups.empty?
84
85
 
85
86
  [[%i[topics], :topics_missing]]
@@ -12,7 +12,6 @@ module Karafka
12
12
  ).fetch('en').fetch('validations').fetch('topic')
13
13
  end
14
14
 
15
- required(:consumer) { |val| !val.nil? }
16
15
  required(:deserializer) { |val| !val.nil? }
17
16
  required(:id) { |val| val.is_a?(String) && Contracts::TOPIC_REGEXP.match?(val) }
18
17
  required(:kafka) { |val| val.is_a?(Hash) && !val.empty? }
@@ -20,8 +19,20 @@ module Karafka
20
19
  required(:initial_offset) { |val| %w[earliest latest].include?(val) }
21
20
  required(:max_wait_time) { |val| val.is_a?(Integer) && val >= 10 }
22
21
  required(:name) { |val| val.is_a?(String) && Contracts::TOPIC_REGEXP.match?(val) }
22
+ required(:active) { |val| [true, false].include?(val) }
23
23
  required(:subscription_group) { |val| val.is_a?(String) && !val.empty? }
24
24
 
25
+ # Consumer needs to be present only if topic is active
26
+ # We allow not to define consumer for non-active because they may be only used via admin
27
+ # api or other ways and not consumed with consumer
28
+ virtual do |data, errors|
29
+ next unless errors.empty?
30
+ next if data.fetch(:consumer)
31
+ next unless data.fetch(:active)
32
+
33
+ [[%w[consumer], :missing]]
34
+ end
35
+
25
36
  virtual do |data, errors|
26
37
  next unless errors.empty?
27
38
 
@@ -63,19 +63,30 @@ module Karafka
63
63
  info "[#{job.id}] #{job_type} job for #{consumer} on #{topic} finished in #{time}ms"
64
64
  end
65
65
 
66
- # Prints info about a pause occurrence. Irrelevant if user or system initiated.
66
+ # Prints info about a consumer pause occurrence. Irrelevant if user or system initiated.
67
67
  #
68
68
  # @param event [Karafka::Core::Monitoring::Event] event details including payload
69
- def on_consumer_consuming_pause(event)
69
+ def on_client_pause(event)
70
70
  topic = event[:topic]
71
71
  partition = event[:partition]
72
72
  offset = event[:offset]
73
- consumer = event[:caller]
74
- timeout = event[:timeout]
73
+ client = event[:caller]
74
+
75
+ info <<~MSG.tr("\n", ' ').strip!
76
+ [#{client.id}] Pausing partition #{partition} of topic #{topic} on offset #{offset}
77
+ MSG
78
+ end
79
+
80
+ # Prints information about resuming of processing of a given topic partition
81
+ #
82
+ # @param event [Karafka::Core::Monitoring::Event] event details including payload
83
+ def on_client_resume(event)
84
+ topic = event[:topic]
85
+ partition = event[:partition]
86
+ client = event[:caller]
75
87
 
76
88
  info <<~MSG.tr("\n", ' ').strip!
77
- [#{consumer.id}] Pausing partition #{partition} of topic #{topic}
78
- on offset #{offset} for #{timeout} ms.
89
+ [#{client.id}] Resuming partition #{partition} of topic #{topic}
79
90
  MSG
80
91
  end
81
92
 
@@ -128,12 +139,17 @@ module Karafka
128
139
 
129
140
  return if Karafka.pro?
130
141
 
131
- info 'See LICENSE and the LGPL-3.0 for licensing details.'
142
+ info 'See LICENSE and the LGPL-3.0 for licensing details'
132
143
  end
133
144
 
134
145
  # @param _event [Karafka::Core::Monitoring::Event] event details including payload
135
146
  def on_app_quieting(_event)
136
- info 'Switching to quiet mode. New messages will not be processed.'
147
+ info 'Switching to quiet mode. New messages will not be processed'
148
+ end
149
+
150
+ # @param _event [Karafka::Core::Monitoring::Event] event details including payload
151
+ def on_app_quiet(_event)
152
+ info 'Reached quiet mode. No messages will be processed anymore'
137
153
  end
138
154
 
139
155
  # Logs info that we're going to stop the Karafka server.
@@ -20,30 +20,37 @@ module Karafka
20
20
  app.initialized
21
21
  app.running
22
22
  app.quieting
23
+ app.quiet
23
24
  app.stopping
24
25
  app.stopped
25
26
  app.terminated
26
27
 
28
+ client.pause
29
+ client.resume
30
+
31
+ connection.listener.before_fetch_loop
32
+ connection.listener.fetch_loop
33
+ connection.listener.fetch_loop.received
34
+
35
+ consumer.consume
27
36
  consumer.consumed
28
37
  consumer.consuming.pause
29
38
  consumer.consuming.retry
39
+ consumer.revoke
30
40
  consumer.revoked
41
+ consumer.shutting_down
31
42
  consumer.shutdown
32
43
 
33
- process.notice_signal
44
+ dead_letter_queue.dispatched
34
45
 
35
- connection.listener.before_fetch_loop
36
- connection.listener.fetch_loop
37
- connection.listener.fetch_loop.received
46
+ process.notice_signal
38
47
 
39
- dead_letter_queue.dispatched
48
+ statistics.emitted
40
49
 
41
50
  worker.process
42
51
  worker.processed
43
52
  worker.completed
44
53
 
45
- statistics.emitted
46
-
47
54
  error.occurred
48
55
  ].freeze
49
56
 
@@ -4,22 +4,13 @@ module Karafka
4
4
  module Instrumentation
5
5
  # Listener that sets a proc title with a nice descriptive value
6
6
  class ProctitleListener
7
- # Updates proc title to an initializing one
8
- # @param _event [Karafka::Core::Monitoring::Event] event details including payload
9
- def on_app_initializing(_event)
10
- setproctitle('initializing')
11
- end
12
-
13
- # Updates proc title to a running one
14
- # @param _event [Karafka::Core::Monitoring::Event] event details including payload
15
- def on_app_running(_event)
16
- setproctitle('running')
17
- end
18
-
19
- # Updates proc title to a stopping one
20
- # @param _event [Karafka::Core::Monitoring::Event] event details including payload
21
- def on_app_stopping(_event)
22
- setproctitle('stopping')
7
+ Status::STATES.each_key do |state|
8
+ class_eval <<~RUBY, __FILE__, __LINE__ + 1
9
+ # Updates proc title to an appropriate state
10
+ def on_app_#{state}(_event)
11
+ setproctitle('#{state}')
12
+ end
13
+ RUBY
23
14
  end
24
15
 
25
16
  private
@@ -148,7 +148,7 @@ module Karafka
148
148
  jq_stats = event[:jobs_queue].statistics
149
149
 
150
150
  gauge('worker.total_threads', Karafka::App.config.concurrency, tags: default_tags)
151
- histogram('worker.processing', jq_stats[:processing], tags: default_tags)
151
+ histogram('worker.processing', jq_stats[:busy], tags: default_tags)
152
152
  histogram('worker.enqueued_jobs', jq_stats[:enqueued], tags: default_tags)
153
153
  end
154
154
 
@@ -158,7 +158,7 @@ module Karafka
158
158
  def on_worker_processed(event)
159
159
  jq_stats = event[:jobs_queue].statistics
160
160
 
161
- histogram('worker.processing', jq_stats[:processing], tags: default_tags)
161
+ histogram('worker.processing', jq_stats[:busy], tags: default_tags)
162
162
  end
163
163
 
164
164
  private
@@ -61,6 +61,11 @@ module Karafka
61
61
  coordinator.on_revoked do
62
62
  coordinator.revoke
63
63
  end
64
+
65
+ Karafka.monitor.instrument('consumer.revoke', caller: self)
66
+ Karafka.monitor.instrument('consumer.revoked', caller: self) do
67
+ revoked
68
+ end
64
69
  end
65
70
  end
66
71
  end
@@ -46,6 +46,7 @@ module Karafka
46
46
  # This can happen primarily when an LRJ job gets to the internal worker queue and
47
47
  # this partition is revoked prior processing.
48
48
  unless revoked?
49
+ Karafka.monitor.instrument('consumer.consume', caller: self)
49
50
  Karafka.monitor.instrument('consumer.consumed', caller: self) do
50
51
  consume
51
52
  end
@@ -91,6 +92,11 @@ module Karafka
91
92
 
92
93
  coordinator.revoke
93
94
  end
95
+
96
+ Karafka.monitor.instrument('consumer.revoke', caller: self)
97
+ Karafka.monitor.instrument('consumer.revoked', caller: self) do
98
+ revoked
99
+ end
94
100
  end
95
101
  end
96
102
  end
@@ -70,6 +70,11 @@ module Karafka
70
70
  # a failure. Double non-blocking resume could cause problems in coordination.
71
71
  coordinator.revoke
72
72
  end
73
+
74
+ Karafka.monitor.instrument('consumer.revoke', caller: self)
75
+ Karafka.monitor.instrument('consumer.revoked', caller: self) do
76
+ revoked
77
+ end
73
78
  end
74
79
  end
75
80
  end
@@ -60,6 +60,11 @@ module Karafka
60
60
  coordinator.on_revoked do
61
61
  coordinator.revoke
62
62
  end
63
+
64
+ Karafka.monitor.instrument('consumer.revoke', caller: self)
65
+ Karafka.monitor.instrument('consumer.revoked', caller: self) do
66
+ revoked
67
+ end
63
68
  end
64
69
  end
65
70
  end
@@ -53,12 +53,14 @@ module Karafka
53
53
  # trap context s some things may not work there as expected, that is why we spawn a separate
54
54
  # thread to handle the signals process
55
55
  def trap_signal(signal)
56
- trap(signal) do
56
+ previous_handler = ::Signal.trap(signal) do
57
57
  Thread.new do
58
58
  notice_signal(signal)
59
59
 
60
60
  (@callbacks[signal] || []).each(&:call)
61
61
  end
62
+
63
+ previous_handler.call if previous_handler.respond_to?(:call)
62
64
  end
63
65
  end
64
66
 
@@ -125,13 +125,13 @@ module Karafka
125
125
  @semaphores[group_id].pop while wait?(group_id)
126
126
  end
127
127
 
128
- # - `processing` - number of jobs that are currently being processed (active work)
128
+ # - `busy` - number of jobs that are currently being processed (active work)
129
129
  # - `enqueued` - number of jobs in the queue that are waiting to be picked up by a worker
130
130
  #
131
131
  # @return [Hash] hash with basic usage statistics of this queue.
132
132
  def statistics
133
133
  {
134
- processing: size - @queue.size,
134
+ busy: size - @queue.size,
135
135
  enqueued: @queue.size
136
136
  }.freeze
137
137
  end
@@ -36,6 +36,11 @@ module Karafka
36
36
  def handle_revoked
37
37
  raise NotImplementedError, 'Implement in a subclass'
38
38
  end
39
+
40
+ # Shutdown handling
41
+ def handle_shutdown
42
+ raise NotImplementedError, 'Implement in a subclass'
43
+ end
39
44
  end
40
45
  end
41
46
  end
@@ -25,6 +25,7 @@ module Karafka
25
25
 
26
26
  # Run the user consumption code
27
27
  def handle_consume
28
+ Karafka.monitor.instrument('consumer.consume', caller: self)
28
29
  Karafka.monitor.instrument('consumer.consumed', caller: self) do
29
30
  consume
30
31
  end
@@ -70,6 +71,19 @@ module Karafka
70
71
  resume
71
72
 
72
73
  coordinator.revoke
74
+
75
+ Karafka.monitor.instrument('consumer.revoke', caller: self)
76
+ Karafka.monitor.instrument('consumer.revoked', caller: self) do
77
+ revoked
78
+ end
79
+ end
80
+
81
+ # Runs the shutdown code
82
+ def handle_shutdown
83
+ Karafka.monitor.instrument('consumer.shutting_down', caller: self)
84
+ Karafka.monitor.instrument('consumer.shutdown', caller: self) do
85
+ shutdown
86
+ end
73
87
  end
74
88
  end
75
89
  end
@@ -76,6 +76,8 @@ module Karafka
76
76
  Karafka.monitor.instrument(
77
77
  'error.occurred',
78
78
  caller: self,
79
+ job: job,
80
+ jobs_queue: @jobs_queue,
79
81
  error: e,
80
82
  type: 'worker.process.error'
81
83
  )
@@ -80,7 +80,7 @@ module Karafka
80
80
  # @param subscription_group_name [String, Symbol] subscription group id. When not provided,
81
81
  # a random uuid will be used
82
82
  # @param block [Proc] further topics definitions
83
- def subscription_group(subscription_group_name = SecureRandom.hex(6), &block)
83
+ def subscription_group(subscription_group_name = SubscriptionGroup.id, &block)
84
84
  consumer_group('app') do
85
85
  target.public_send(:subscription_group=, subscription_group_name.to_s, &block)
86
86
  end
@@ -26,7 +26,7 @@ module Karafka
26
26
  @topics = Topics.new([])
27
27
  # Initialize the subscription group so there's always a value for it, since even if not
28
28
  # defined directly, a subscription group will be created
29
- @current_subscription_group_id = SecureRandom.hex(6)
29
+ @current_subscription_group_id = SubscriptionGroup.id
30
30
  end
31
31
 
32
32
  # @return [Boolean] true if this consumer group should be active in our current process
@@ -55,7 +55,7 @@ module Karafka
55
55
  # topic definition
56
56
  # @param name [String, Symbol] name of the current subscription group
57
57
  # @param block [Proc] block that may include topics definitions
58
- def subscription_group=(name = SecureRandom.hex(6), &block)
58
+ def subscription_group=(name = SubscriptionGroup.id, &block)
59
59
  # We cast it here, so the routing supports symbol based but that's anyhow later on
60
60
  # validated as a string
61
61
  @current_subscription_group_id = name
@@ -64,7 +64,7 @@ module Karafka
64
64
 
65
65
  # We need to reset the current subscription group after it is used, so it won't leak
66
66
  # outside to other topics that would be defined without a defined subscription group
67
- @current_subscription_group_id = SecureRandom.hex(6)
67
+ @current_subscription_group_id = SubscriptionGroup.id
68
68
  end
69
69
 
70
70
  # @return [Array<Routing::SubscriptionGroup>] all the subscription groups build based on
@@ -12,16 +12,6 @@ module Karafka
12
12
  # raw_consumer_group_name
13
13
  # end
14
14
  # end
15
- #
16
- # @example Mapper for replacing "_" with "." in topic names
17
- # class MyMapper
18
- # def call(raw_consumer_group_name)
19
- # [
20
- # Karafka::Helpers::Inflector.map(Karafka::App.config.client_id.to_s),
21
- # raw_consumer_group_name
22
- # ].join('_').gsub('_', '.')
23
- # end
24
- # end
25
15
  class ConsumerMapper
26
16
  # @param raw_consumer_group_name [String, Symbol] string or symbolized consumer group name
27
17
  # @return [String] remapped final consumer group name
@@ -14,16 +14,26 @@ module Karafka
14
14
  # @raise [Karafka::Topic::NonMatchingTopicError] raised if topic name does not match
15
15
  # any route defined by user using routes.draw
16
16
  def find(topic_id)
17
+ find_by(id: topic_id) || raise(Errors::NonMatchingRouteError, topic_id)
18
+ end
19
+
20
+ # Finds first reference of a given topic based on provided lookup attribute
21
+ # @param lookup [Hash<Symbol, String>] hash with attribute - value key pairs
22
+ # @return [Karafka::Routing::Topic, nil] proper route details or nil if not found
23
+ def find_by(lookup)
17
24
  App.consumer_groups.each do |consumer_group|
18
25
  consumer_group.topics.each do |topic|
19
- return topic if topic.id == topic_id
26
+ return topic if lookup.all? do |attribute, value|
27
+ topic.public_send(attribute) == value
28
+ end
20
29
  end
21
30
  end
22
31
 
23
- raise(Errors::NonMatchingRouteError, topic_id)
32
+ nil
24
33
  end
25
34
 
26
35
  module_function :find
36
+ module_function :find_by
27
37
  end
28
38
  end
29
39
  end
@@ -8,7 +8,23 @@ module Karafka
8
8
  # @note One subscription group will always belong to one consumer group, but one consumer
9
9
  # group can have multiple subscription groups.
10
10
  class SubscriptionGroup
11
- attr_reader :id, :name, :topics, :kafka
11
+ attr_reader :id, :name, :topics, :kafka, :consumer_group
12
+
13
+ # Numeric for counting groups
14
+ GROUP_COUNT = Concurrent::AtomicFixnum.new
15
+
16
+ private_constant :GROUP_COUNT
17
+
18
+ class << self
19
+ # Generates new subscription group id that will be used in case of anonymous subscription
20
+ # groups
21
+ # @return [String] hex(6) compatible reproducible id
22
+ def id
23
+ ::Digest::MD5.hexdigest(
24
+ GROUP_COUNT.increment.to_s
25
+ )[0..11]
26
+ end
27
+ end
12
28
 
13
29
  # @param position [Integer] position of this subscription group in all the subscriptions
14
30
  # groups array. We need to have this value for sake of static group memberships, where
@@ -17,6 +33,7 @@ module Karafka
17
33
  # @return [SubscriptionGroup] built subscription group
18
34
  def initialize(position, topics)
19
35
  @name = topics.first.subscription_group
36
+ @consumer_group = topics.first.consumer_group
20
37
  @id = "#{@name}_#{position}"
21
38
  @position = position
22
39
  @topics = topics
@@ -27,6 +27,7 @@ module Karafka
27
27
  @name = name.to_s
28
28
  @consumer_group = consumer_group
29
29
  @attributes = {}
30
+ @active = true
30
31
  # @note We use identifier related to the consumer group that owns a topic, because from
31
32
  # Karafka 0.6 we can handle multiple Kafka instances with the same process and we can
32
33
  # have same topic name across multiple consumer groups
@@ -66,6 +67,12 @@ module Karafka
66
67
  end
67
68
  end
68
69
 
70
+ # Allows to disable topic by invoking this method and setting it to `false`.
71
+ # @param active [Boolean] should this topic be consumed or not
72
+ def active(active)
73
+ @active = active
74
+ end
75
+
69
76
  # @return [Class] consumer class that we should use
70
77
  # @note This is just an alias to the `#consumer` method. We however want to use it internally
71
78
  # instead of referencing the `#consumer`. We use this to indicate that this method returns
@@ -77,6 +84,9 @@ module Karafka
77
84
 
78
85
  # @return [Boolean] should this topic be in use
79
86
  def active?
87
+ # Never active if disabled via routing
88
+ return false unless @active
89
+
80
90
  topics = Karafka::App.config.internal.routing.active.topics
81
91
 
82
92
  # When empty it means no topics were specified, hence all should be used
@@ -93,6 +103,7 @@ module Karafka
93
103
  Hash[map].merge!(
94
104
  id: id,
95
105
  name: name,
106
+ active: active?,
96
107
  consumer: consumer,
97
108
  consumer_group_id: consumer_group.id,
98
109
  subscription_group: subscription_group
@@ -19,6 +19,7 @@ module Karafka
19
19
  # We aggregate threads here for a supervised shutdown process
20
20
  Karafka::Server.workers = workers
21
21
  Karafka::Server.listeners = listeners
22
+ Karafka::Server.jobs_queue = jobs_queue
22
23
 
23
24
  # All the listener threads need to finish
24
25
  listeners.each(&:join)
@@ -20,6 +20,9 @@ module Karafka
20
20
  # Set of workers
21
21
  attr_accessor :workers
22
22
 
23
+ # Jobs queue
24
+ attr_accessor :jobs_queue
25
+
23
26
  # Method which runs app
24
27
  def run
25
28
  self.listeners = []
@@ -45,9 +48,9 @@ module Karafka
45
48
 
46
49
  # We always need to wait for Karafka to stop here since we should wait for the stop running
47
50
  # in a separate thread (or trap context) to indicate everything is closed
48
- # Since `#start` is blocking, we were get here only after the runner is done. This will
51
+ # Since `#start` is blocking, we will get here only after the runner is done. This will
49
52
  # not add any performance degradation because of that.
50
- Thread.pass until Karafka::App.terminated?
53
+ sleep(0.1) until Karafka::App.terminated?
51
54
  # Try its best to shutdown underlying components before re-raising
52
55
  # rubocop:disable Lint/RescueException
53
56
  rescue Exception => e
@@ -106,8 +109,6 @@ module Karafka
106
109
  # This can cause memory leaks and crashes.
107
110
  listeners.each(&:shutdown)
108
111
 
109
- Karafka::App.producer.close
110
-
111
112
  # We also do not forcefully terminate everything when running in the embedded mode,
112
113
  # otherwise we would overwrite the shutdown process of the process that started Karafka
113
114
  return unless process.supervised?
@@ -129,12 +130,20 @@ module Karafka
129
130
  end
130
131
 
131
132
  # Quiets the Karafka server.
132
- # Karafka will stop processing but won't quiet to consumer group, so no rebalance will be
133
+ #
134
+ # Karafka will stop processing but won't quit the consumer group, so no rebalance will be
133
135
  # triggered until final shutdown.
134
136
  def quiet
135
137
  # We don't have to safe-guard it with check states as the state transitions work only
136
138
  # in one direction
137
139
  Karafka::App.quiet!
140
+
141
+ # We need one more thread to monitor the process and move to quieted once everything
142
+ # is quiet and no processing is happening anymore
143
+ Thread.new do
144
+ sleep(0.1) until listeners.coordinators.all?(&:finished?)
145
+ Karafka::App.quieted!
146
+ end
138
147
  end
139
148
 
140
149
  private
@@ -8,9 +8,15 @@ module Karafka
8
8
  initializing: :initialize!,
9
9
  initialized: :initialized!,
10
10
  running: :run!,
11
+ # will no longer pickup any work, but current work will be finished
11
12
  quieting: :quiet!,
13
+ # no work is happening but we keep process with the assignments running
14
+ quiet: :quieted!,
15
+ # shutdown started
12
16
  stopping: :stop!,
17
+ # all things are done and most of the things except critical are closed
13
18
  stopped: :stopped!,
19
+ # immediately after this process exists
14
20
  terminated: :terminate!
15
21
  }.freeze
16
22
 
@@ -8,12 +8,7 @@ module Karafka
8
8
  module TimeTrackers
9
9
  # Base class for all the time-trackers.
10
10
  class Base
11
- private
12
-
13
- # @return [Float] current time in milliseconds
14
- def now
15
- ::Process.clock_gettime(::Process::CLOCK_MONOTONIC) * 1000
16
- end
11
+ include ::Karafka::Core::Helpers::Time
17
12
  end
18
13
  end
19
14
  end
@@ -58,7 +58,7 @@ module Karafka
58
58
  def pause(timeout = backoff_interval)
59
59
  @mutex.synchronize do
60
60
  @current_timeout = timeout
61
- @started_at = now
61
+ @started_at = monotonic_now
62
62
  @ends_at = @started_at + timeout
63
63
  end
64
64
  end
@@ -95,7 +95,7 @@ module Karafka
95
95
  # @return [Boolean] did the pause expire
96
96
  def expired?
97
97
  @mutex.synchronize do
98
- @ends_at ? now >= @ends_at : true
98
+ @ends_at ? monotonic_now >= @ends_at : true
99
99
  end
100
100
  end
101
101
 
@@ -34,12 +34,12 @@ module Karafka
34
34
  # Starts time tracking.
35
35
  def start
36
36
  @attempts += 1
37
- @started_at = now
37
+ @started_at = monotonic_now
38
38
  end
39
39
 
40
40
  # Stops time tracking of a given piece of code and updates the remaining time.
41
41
  def checkpoint
42
- @remaining -= (now - @started_at)
42
+ @remaining -= (monotonic_now - @started_at)
43
43
  end
44
44
 
45
45
  # @return [Boolean] If anything went wrong, can we retry after a backoff period or not
@@ -3,5 +3,5 @@
3
3
  # Main module namespace
4
4
  module Karafka
5
5
  # Current Karafka version
6
- VERSION = '2.0.24'
6
+ VERSION = '2.0.26'
7
7
  end
data/lib/karafka.rb CHANGED
@@ -14,7 +14,9 @@
14
14
  base64
15
15
  date
16
16
  singleton
17
+ digest
17
18
  zeitwerk
19
+ concurrent/atomic/atomic_fixnum
18
20
  ].each(&method(:require))
19
21
 
20
22
  # Karafka framework main namespace
data.tar.gz.sig CHANGED
Binary file
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.0.24
4
+ version: 2.0.26
5
5
  platform: ruby
6
6
  authors:
7
7
  - Maciej Mensfeld
@@ -35,7 +35,7 @@ cert_chain:
35
35
  Qf04B9ceLUaC4fPVEz10FyobjaFoY4i32xRto3XnrzeAgfEe4swLq8bQsR3w/EF3
36
36
  MGU0FeSV2Yj7Xc2x/7BzLK8xQn5l7Yy75iPF+KP3vVmDHnNl
37
37
  -----END CERTIFICATE-----
38
- date: 2022-12-19 00:00:00.000000000 Z
38
+ date: 2023-01-10 00:00:00.000000000 Z
39
39
  dependencies:
40
40
  - !ruby/object:Gem::Dependency
41
41
  name: karafka-core
@@ -43,7 +43,7 @@ dependencies:
43
43
  requirements:
44
44
  - - ">="
45
45
  - !ruby/object:Gem::Version
46
- version: 2.0.7
46
+ version: 2.0.8
47
47
  - - "<"
48
48
  - !ruby/object:Gem::Version
49
49
  version: 3.0.0
@@ -53,7 +53,7 @@ dependencies:
53
53
  requirements:
54
54
  - - ">="
55
55
  - !ruby/object:Gem::Version
56
- version: 2.0.7
56
+ version: 2.0.8
57
57
  - - "<"
58
58
  - !ruby/object:Gem::Version
59
59
  version: 3.0.0
@@ -145,6 +145,7 @@ files:
145
145
  - bin/scenario
146
146
  - bin/stress_many
147
147
  - bin/stress_one
148
+ - bin/verify_license_integrity
148
149
  - certs/cert_chain.pem
149
150
  - certs/karafka-pro.pem
150
151
  - config/locales/errors.yml
@@ -349,7 +350,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
349
350
  - !ruby/object:Gem::Version
350
351
  version: '0'
351
352
  requirements: []
352
- rubygems_version: 3.3.26
353
+ rubygems_version: 3.4.1
353
354
  signing_key:
354
355
  specification_version: 4
355
356
  summary: Karafka is Ruby and Rails efficient Kafka processing framework.
metadata.gz.sig CHANGED
Binary file