karafka 2.0.39 → 2.0.40
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data/CHANGELOG.md +6 -0
- data/Gemfile.lock +1 -1
- data/lib/karafka/messages/builders/batch_metadata.rb +6 -5
- data/lib/karafka/messages/builders/messages.rb +3 -1
- data/lib/karafka/messages/messages.rb +5 -0
- data/lib/karafka/pro/processing/strategies/dlq/vp.rb +2 -1
- data/lib/karafka/processing/executor.rb +15 -6
- data/lib/karafka/version.rb +1 -1
- data.tar.gz.sig +0 -0
- metadata +2 -2
- metadata.gz.sig +0 -0
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 9a4dd6f2d6a1354ac085a5f7ff7de13ca8589d9e83f7fffe2f83c2e4501895a6
|
4
|
+
data.tar.gz: 55426141e6974b329f8e207e20d3acd3935ca025a58b3f1fc287be3c363dee8f
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: d5bdbee5398cc6230d31d6915aef359071f68536b89bb7661ce3323a61b926966fe045fd15d3b19b648478a82ce631692302da63e2ccadc8c7c3b476649ae10e
|
7
|
+
data.tar.gz: 924af69aee02bb6a726e09d7f277c8a79dc18eccef6dccf51e62090dc956584ac242bb30b90d0edaec2c626d02960b445ed52e2142b34c20b8b59ce1339b0469
|
checksums.yaml.gz.sig
CHANGED
Binary file
|
data/CHANGELOG.md
CHANGED
@@ -1,5 +1,11 @@
|
|
1
1
|
# Karafka framework changelog
|
2
2
|
|
3
|
+
## 2.0.40 (2023-04-13)
|
4
|
+
- [Improvement] Introduce `Karafka::Messages::Messages#empty?` method to handle Idle related cases where shutdown or revocation would be called on an empty messages set. This method allows for checking if there are any messages in the messages batch.
|
5
|
+
- [Refactor] Require messages builder to accept partition and do not fetch it from messages.
|
6
|
+
- [Refactor] Use empty messages set for internal APIs (Idle) (so there always is `Karafka::Messages::Messages`)
|
7
|
+
- [Refactor] Allow for empty messages set initialization with -1001 and -1 on metadata (similar to `librdkafka`)
|
8
|
+
|
3
9
|
## 2.0.39 (2023-04-11)
|
4
10
|
- **[Feature]** Provide ability to throttle/limit number of messages processed in a time unit (#1203)
|
5
11
|
- **[Feature]** Provide Delayed Topics (#1000)
|
data/Gemfile.lock
CHANGED
@@ -10,22 +10,23 @@ module Karafka
|
|
10
10
|
#
|
11
11
|
# @param messages [Array<Karafka::Messages::Message>] messages array
|
12
12
|
# @param topic [Karafka::Routing::Topic] topic for which we've fetched the batch
|
13
|
+
# @param partition [Integer] partition of this metadata
|
13
14
|
# @param scheduled_at [Time] moment when the batch was scheduled for processing
|
14
15
|
# @return [Karafka::Messages::BatchMetadata] batch metadata object
|
15
16
|
#
|
16
17
|
# @note We do not set `processed_at` as this needs to be assigned when the batch is
|
17
18
|
# picked up for processing.
|
18
|
-
def call(messages, topic, scheduled_at)
|
19
|
+
def call(messages, topic, partition, scheduled_at)
|
19
20
|
Karafka::Messages::BatchMetadata.new(
|
20
21
|
size: messages.count,
|
21
|
-
first_offset: messages.first
|
22
|
-
last_offset: messages.last
|
22
|
+
first_offset: messages.first&.offset || -1001,
|
23
|
+
last_offset: messages.last&.offset || -1001,
|
23
24
|
deserializer: topic.deserializer,
|
24
|
-
partition:
|
25
|
+
partition: partition,
|
25
26
|
topic: topic.name,
|
26
27
|
# We go with the assumption that the creation of the whole batch is the last message
|
27
28
|
# creation time
|
28
|
-
created_at: messages.last
|
29
|
+
created_at: messages.last&.timestamp || nil,
|
29
30
|
# When this batch was built and scheduled for execution
|
30
31
|
scheduled_at: scheduled_at,
|
31
32
|
# This needs to be set to a correct value prior to processing starting
|
@@ -11,14 +11,16 @@ module Karafka
|
|
11
11
|
#
|
12
12
|
# @param messages [Array<Karafka::Messages::Message>] karafka messages array
|
13
13
|
# @param topic [Karafka::Routing::Topic] topic for which we're received messages
|
14
|
+
# @param partition [Integer] partition of those messages
|
14
15
|
# @param received_at [Time] moment in time when the messages were received
|
15
16
|
# @return [Karafka::Messages::Messages] messages batch object
|
16
|
-
def call(messages, topic, received_at)
|
17
|
+
def call(messages, topic, partition, received_at)
|
17
18
|
# We cannot freeze the batch metadata because it is altered with the processed_at time
|
18
19
|
# prior to the consumption. It is being frozen there
|
19
20
|
metadata = BatchMetadata.call(
|
20
21
|
messages,
|
21
22
|
topic,
|
23
|
+
partition,
|
22
24
|
received_at
|
23
25
|
)
|
24
26
|
|
@@ -21,7 +21,8 @@ module Karafka
|
|
21
21
|
#
|
22
22
|
# In general because we collapse processing in virtual partitions to one on errors, there
|
23
23
|
# is no special action that needs to be taken because we warranty that even with VPs
|
24
|
-
# on errors a retry collapses into a single state
|
24
|
+
# on errors a retry collapses into a single state and from this single state we can
|
25
|
+
# mark as consumed the message that we are moving to the DLQ.
|
25
26
|
module Vp
|
26
27
|
# Features for this strategy
|
27
28
|
FEATURES = %i[
|
@@ -44,10 +44,6 @@ module Karafka
|
|
44
44
|
#
|
45
45
|
# @param messages [Array<Karafka::Messages::Message>]
|
46
46
|
def before_enqueue(messages)
|
47
|
-
# the moment we've received the batch or actually the moment we've enqueued it,
|
48
|
-
# but good enough
|
49
|
-
@enqueued_at = Time.now
|
50
|
-
|
51
47
|
# Recreate consumer with each batch if persistence is not enabled
|
52
48
|
# We reload the consumers with each batch instead of relying on some external signals
|
53
49
|
# when needed for consistency. That way devs may have it on or off and not in this
|
@@ -57,8 +53,11 @@ module Karafka
|
|
57
53
|
# First we build messages batch...
|
58
54
|
consumer.messages = Messages::Builders::Messages.call(
|
59
55
|
messages,
|
60
|
-
|
61
|
-
|
56
|
+
topic,
|
57
|
+
partition,
|
58
|
+
# the moment we've received the batch or actually the moment we've enqueued it,
|
59
|
+
# but good enough
|
60
|
+
Time.now
|
62
61
|
)
|
63
62
|
|
64
63
|
consumer.on_before_enqueue
|
@@ -84,6 +83,16 @@ module Karafka
|
|
84
83
|
# This may include house-keeping or other state management changes that can occur but that
|
85
84
|
# not mean there are any new messages available for the end user to process
|
86
85
|
def idle
|
86
|
+
# Initializes the messages set in case idle operation would happen before any processing
|
87
|
+
# This prevents us from having no messages object at all as the messages object and
|
88
|
+
# its metadata may be used for statistics
|
89
|
+
consumer.messages ||= Messages::Builders::Messages.call(
|
90
|
+
[],
|
91
|
+
topic,
|
92
|
+
partition,
|
93
|
+
Time.now
|
94
|
+
)
|
95
|
+
|
87
96
|
consumer.on_idle
|
88
97
|
end
|
89
98
|
|
data/lib/karafka/version.rb
CHANGED
data.tar.gz.sig
CHANGED
Binary file
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: karafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 2.0.
|
4
|
+
version: 2.0.40
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Maciej Mensfeld
|
@@ -35,7 +35,7 @@ cert_chain:
|
|
35
35
|
Qf04B9ceLUaC4fPVEz10FyobjaFoY4i32xRto3XnrzeAgfEe4swLq8bQsR3w/EF3
|
36
36
|
MGU0FeSV2Yj7Xc2x/7BzLK8xQn5l7Yy75iPF+KP3vVmDHnNl
|
37
37
|
-----END CERTIFICATE-----
|
38
|
-
date: 2023-04-
|
38
|
+
date: 2023-04-13 00:00:00.000000000 Z
|
39
39
|
dependencies:
|
40
40
|
- !ruby/object:Gem::Dependency
|
41
41
|
name: karafka-core
|
metadata.gz.sig
CHANGED
Binary file
|