karafka 1.1.0.alpha1 → 1.1.0.alpha2
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/Gemfile.lock +1 -1
- data/lib/karafka/attributes_map.rb +2 -2
- data/lib/karafka/connection/config_adapter.rb +4 -1
- data/lib/karafka/setup/config.rb +3 -3
- data/lib/karafka/version.rb +1 -1
- metadata +1 -1
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA1:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: f36768874a7001b58704a74348f346bafe0a397e
|
4
|
+
data.tar.gz: a2f2aa883aefc839982304935e0953e564c51dd4
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: c888e558e5599518a9a5bc9f34857f3db39a02cf1501475b06d91810481e00f214bcdc609bdbae7298bc3e3c0ba643d98b5aac2af3bc6aa3a60a57335cc3b4bb
|
7
|
+
data.tar.gz: 874e74e40f9e6797154f34fa054c929e0c208350305b1dd7b2aec160c7ac04395b03b2683ef7e8e663cf7072dbf0ba8c8e77a05b7e4801de47eb5c577c7122a9
|
data/Gemfile.lock
CHANGED
@@ -21,12 +21,12 @@ module Karafka
|
|
21
21
|
offset_retention_time heartbeat_interval
|
22
22
|
],
|
23
23
|
subscription: %i[start_from_beginning max_bytes_per_partition],
|
24
|
-
consuming: %i[min_bytes max_wait_time
|
24
|
+
consuming: %i[min_bytes max_wait_time],
|
25
25
|
pausing: %i[pause_timeout],
|
26
26
|
# All the options that are under kafka config namespace, but are not used
|
27
27
|
# directly with kafka api, but from the Karafka user perspective, they are
|
28
28
|
# still related to kafka. They should not be proxied anywhere
|
29
|
-
ignored: %i[reconnect_timeout]
|
29
|
+
ignored: %i[reconnect_timeout automatically_mark_as_consumed]
|
30
30
|
}
|
31
31
|
end
|
32
32
|
|
@@ -51,7 +51,10 @@ module Karafka
|
|
51
51
|
# @return [Hash] hash with all the settings required by
|
52
52
|
# Kafka::Consumer#consume_each_message and Kafka::Consumer#consume_each_batch method
|
53
53
|
def consuming(consumer_group)
|
54
|
-
|
54
|
+
settings = {
|
55
|
+
automatically_mark_as_processed: consumer_group.automatically_mark_as_consumed
|
56
|
+
}
|
57
|
+
sanitize(fetch_for(:consuming, consumer_group, settings))
|
55
58
|
end
|
56
59
|
|
57
60
|
# Builds all the configuration settings for kafka consumer#subscribe method
|
data/lib/karafka/setup/config.rb
CHANGED
@@ -87,9 +87,9 @@ module Karafka
|
|
87
87
|
# seconds of latency, set `max_wait_time` to 5. You should make sure
|
88
88
|
# max_wait_time * num brokers + heartbeat_interval is less than session_timeout.
|
89
89
|
setting :max_wait_time, 1
|
90
|
-
# option
|
91
|
-
# messages as processed after non-error consumption
|
92
|
-
setting :
|
90
|
+
# option automatically_mark_as_consumed [Boolean] should we automatically mark received
|
91
|
+
# messages as consumed (processed) after non-error consumption
|
92
|
+
setting :automatically_mark_as_consumed, true
|
93
93
|
# option reconnect_timeout [Integer] How long should we wait before trying to reconnect to
|
94
94
|
# Kafka cluster that went down (in seconds)
|
95
95
|
setting :reconnect_timeout, 5
|
data/lib/karafka/version.rb
CHANGED