karafka 1.1.0.alpha1 → 1.1.0.alpha2

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 3a4a0b649f34461fbeef206fdc2c92bd3e6d6ff2
4
- data.tar.gz: 87c937be4ee2bdfdc9b1957e458754e88cb15231
3
+ metadata.gz: f36768874a7001b58704a74348f346bafe0a397e
4
+ data.tar.gz: a2f2aa883aefc839982304935e0953e564c51dd4
5
5
  SHA512:
6
- metadata.gz: bee787850078c780417e90a87ea5945b12ef84b70e49a888696fd37454de8301fa25130a0f2b736ac8ea9df2a72a58837fe37aced3a3d2f7261bf11546cf23d6
7
- data.tar.gz: 3c42797b1d29ca0d107b570c42d9f7b28e45c1f6c3664d8acd055379594569eb2fd801c8c9e44bc9120ee001fe2c4ade401577cb82024689549c9ff67b6177cb
6
+ metadata.gz: c888e558e5599518a9a5bc9f34857f3db39a02cf1501475b06d91810481e00f214bcdc609bdbae7298bc3e3c0ba643d98b5aac2af3bc6aa3a60a57335cc3b4bb
7
+ data.tar.gz: 874e74e40f9e6797154f34fa054c929e0c208350305b1dd7b2aec160c7ac04395b03b2683ef7e8e663cf7072dbf0ba8c8e77a05b7e4801de47eb5c577c7122a9
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- karafka (1.1.0.alpha1)
4
+ karafka (1.1.0.alpha2)
5
5
  activesupport (>= 5.0)
6
6
  dry-configurable (~> 0.7)
7
7
  dry-validation (~> 0.11)
@@ -21,12 +21,12 @@ module Karafka
21
21
  offset_retention_time heartbeat_interval
22
22
  ],
23
23
  subscription: %i[start_from_beginning max_bytes_per_partition],
24
- consuming: %i[min_bytes max_wait_time automatically_mark_as_processed],
24
+ consuming: %i[min_bytes max_wait_time],
25
25
  pausing: %i[pause_timeout],
26
26
  # All the options that are under kafka config namespace, but are not used
27
27
  # directly with kafka api, but from the Karafka user perspective, they are
28
28
  # still related to kafka. They should not be proxied anywhere
29
- ignored: %i[reconnect_timeout]
29
+ ignored: %i[reconnect_timeout automatically_mark_as_consumed]
30
30
  }
31
31
  end
32
32
 
@@ -51,7 +51,10 @@ module Karafka
51
51
  # @return [Hash] hash with all the settings required by
52
52
  # Kafka::Consumer#consume_each_message and Kafka::Consumer#consume_each_batch method
53
53
  def consuming(consumer_group)
54
- sanitize(fetch_for(:consuming, consumer_group))
54
+ settings = {
55
+ automatically_mark_as_processed: consumer_group.automatically_mark_as_consumed
56
+ }
57
+ sanitize(fetch_for(:consuming, consumer_group, settings))
55
58
  end
56
59
 
57
60
  # Builds all the configuration settings for kafka consumer#subscribe method
@@ -87,9 +87,9 @@ module Karafka
87
87
  # seconds of latency, set `max_wait_time` to 5. You should make sure
88
88
  # max_wait_time * num brokers + heartbeat_interval is less than session_timeout.
89
89
  setting :max_wait_time, 1
90
- # option automatically_mark_as_processed [Boolean] should we automatically mark received
91
- # messages as processed after non-error consumption
92
- setting :automatically_mark_as_processed, true
90
+ # option automatically_mark_as_consumed [Boolean] should we automatically mark received
91
+ # messages as consumed (processed) after non-error consumption
92
+ setting :automatically_mark_as_consumed, true
93
93
  # option reconnect_timeout [Integer] How long should we wait before trying to reconnect to
94
94
  # Kafka cluster that went down (in seconds)
95
95
  setting :reconnect_timeout, 5
@@ -3,5 +3,5 @@
3
3
  # Main module namespace
4
4
  module Karafka
5
5
  # Current Karafka version
6
- VERSION = '1.1.0.alpha1'
6
+ VERSION = '1.1.0.alpha2'
7
7
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.1.0.alpha1
4
+ version: 1.1.0.alpha2
5
5
  platform: ruby
6
6
  authors:
7
7
  - Maciej Mensfeld