karafka-testing 2.4.2 → 2.4.4
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data/.github/workflows/ci.yml +1 -0
- data/.ruby-version +1 -1
- data/CHANGELOG.md +10 -2
- data/Gemfile.lock +15 -11
- data/lib/karafka/testing/minitest/helpers.rb +39 -12
- data/lib/karafka/testing/rspec/helpers.rb +38 -12
- data/lib/karafka/testing/version.rb +1 -1
- data.tar.gz.sig +0 -0
- metadata +3 -3
- metadata.gz.sig +0 -0
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 7dbd023d8dfb5cb1f11dce4a3ddf893c005228b9f8bc9f0bf199d37eaad6a5be
|
4
|
+
data.tar.gz: 17754c2c15f2b975241294fd6eff2e0ff40f64188b0bffcf110f019e52fc3e84
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 2ee758d81cc2ddf98fedafeac9105cf3c50aa0ad6845bca729aa3dde5c461f82328c91a6dd449faa460ee28c8e27e5b26ea87f1bbdc541f4be8c11aae6388020
|
7
|
+
data.tar.gz: 80d66738edd43d2e8a373f29b3658f12fce556505c5185af5929bc4e721c80eec54af6479661129cb1b57f67c7bc506fca0ddd410aea5418f85be8b71e6b6608
|
checksums.yaml.gz.sig
CHANGED
Binary file
|
data/.github/workflows/ci.yml
CHANGED
data/.ruby-version
CHANGED
@@ -1 +1 @@
|
|
1
|
-
3.3.
|
1
|
+
3.3.3
|
data/CHANGELOG.md
CHANGED
@@ -1,9 +1,17 @@
|
|
1
1
|
# Karafka Test gem changelog
|
2
2
|
|
3
|
-
## 2.4.
|
3
|
+
## 2.4.4 (2024-07-02)
|
4
|
+
- [Enhancement] Memoize `consumer_for` so consumers can be set up for multiple topics and `let(:consumer)` is no longer a requirement. (dorner)
|
5
|
+
|
6
|
+
## 2.4.3 (2024-05-06)
|
7
|
+
- [Fix] Fix: raw_key is not being assigned for rspec (CaioPenhalver)
|
8
|
+
- [Fix] Fix: raw_key is not being assigned for minitest
|
9
|
+
- [Fix] Fix: headers is not being assigned for minitest and rspec
|
10
|
+
|
11
|
+
## 2.4.2 (2024-04-30)
|
4
12
|
- [Fix] Fix FrozenError when accessing key and headers in `Karafka::Messages::Metadata` (tldn0718)
|
5
13
|
|
6
|
-
## 2.4.1 (2024-
|
14
|
+
## 2.4.1 (2024-04-29)
|
7
15
|
- [Fix] Fix instance variable in minitest helper (tldn0718)
|
8
16
|
|
9
17
|
## 2.4.0 (2024-04-26)
|
data/Gemfile.lock
CHANGED
@@ -1,30 +1,34 @@
|
|
1
1
|
PATH
|
2
2
|
remote: .
|
3
3
|
specs:
|
4
|
-
karafka-testing (2.4.
|
4
|
+
karafka-testing (2.4.4)
|
5
5
|
karafka (>= 2.4.0, < 2.5.0)
|
6
6
|
waterdrop (>= 2.7.0)
|
7
7
|
|
8
8
|
GEM
|
9
9
|
remote: https://rubygems.org/
|
10
10
|
specs:
|
11
|
-
|
12
|
-
|
11
|
+
base64 (0.2.0)
|
12
|
+
ffi (1.17.0)
|
13
|
+
ffi (1.17.0-x86_64-linux-gnu)
|
14
|
+
karafka (2.4.3)
|
15
|
+
base64 (~> 0.2)
|
13
16
|
karafka-core (>= 2.4.0, < 2.5.0)
|
14
|
-
waterdrop (>= 2.7.
|
17
|
+
waterdrop (>= 2.7.3, < 3.0.0)
|
15
18
|
zeitwerk (~> 2.3)
|
16
|
-
karafka-core (2.4.
|
17
|
-
karafka-rdkafka (>= 0.15.0, < 0.
|
18
|
-
karafka-rdkafka (0.
|
19
|
+
karafka-core (2.4.3)
|
20
|
+
karafka-rdkafka (>= 0.15.0, < 0.17.0)
|
21
|
+
karafka-rdkafka (0.16.0)
|
19
22
|
ffi (~> 1.15)
|
20
23
|
mini_portile2 (~> 2.6)
|
21
24
|
rake (> 12)
|
22
|
-
mini_portile2 (2.8.
|
25
|
+
mini_portile2 (2.8.7)
|
23
26
|
rake (13.2.1)
|
24
|
-
waterdrop (2.7.
|
27
|
+
waterdrop (2.7.3)
|
25
28
|
karafka-core (>= 2.4.0, < 3.0.0)
|
29
|
+
karafka-rdkafka (>= 0.15.1)
|
26
30
|
zeitwerk (~> 2.3)
|
27
|
-
zeitwerk (2.6.
|
31
|
+
zeitwerk (2.6.16)
|
28
32
|
|
29
33
|
PLATFORMS
|
30
34
|
ruby
|
@@ -34,4 +38,4 @@ DEPENDENCIES
|
|
34
38
|
karafka-testing!
|
35
39
|
|
36
40
|
BUNDLED WITH
|
37
|
-
2.5.
|
41
|
+
2.5.14
|
@@ -14,6 +14,14 @@ module Karafka
|
|
14
14
|
module Minitest
|
15
15
|
# Minitest helpers module that needs to be included
|
16
16
|
module Helpers
|
17
|
+
# Map to convert dispatch attributes into their "delivery" format, since we bypass Kafka
|
18
|
+
METADATA_DISPATCH_MAPPINGS = {
|
19
|
+
raw_key: :key,
|
20
|
+
raw_headers: :headers
|
21
|
+
}.freeze
|
22
|
+
|
23
|
+
private_constant :METADATA_DISPATCH_MAPPINGS
|
24
|
+
|
17
25
|
class << self
|
18
26
|
# Adds all the needed extra functionalities to the minitest group
|
19
27
|
# @param base [Class] Minitest example group we want to extend
|
@@ -29,6 +37,8 @@ module Karafka
|
|
29
37
|
@_karafka_consumer_messages.clear
|
30
38
|
@_karafka_producer_client.reset
|
31
39
|
|
40
|
+
@_karafka_consumer_mappings = {}
|
41
|
+
|
32
42
|
Karafka.producer.stubs(:client).returns(@_karafka_producer_client)
|
33
43
|
end
|
34
44
|
|
@@ -79,24 +89,32 @@ module Karafka
|
|
79
89
|
# @example Send a json message to consumer and simulate, that it is partition 6
|
80
90
|
# @karafka.produce({ 'hello' => 'world' }.to_json, 'partition' => 6)
|
81
91
|
def _karafka_add_message_to_consumer_if_needed(message)
|
92
|
+
consumer_obj = if defined?(@consumer)
|
93
|
+
@consumer
|
94
|
+
else
|
95
|
+
@_karafka_consumer_mappings&.dig(message[:topic])
|
96
|
+
end
|
82
97
|
# Consumer needs to be defined in order to pass messages to it
|
83
|
-
return unless defined?(
|
98
|
+
return unless defined?(consumer_obj)
|
84
99
|
# We're interested in adding message to consumer only when it is a Karafka consumer
|
85
100
|
# Users may want to test other things (models producing messages for example) and in
|
86
101
|
# their case consumer will not be a consumer
|
87
|
-
return unless
|
102
|
+
return unless consumer_obj.is_a?(Karafka::BaseConsumer)
|
88
103
|
# We target to the consumer only messages that were produced to it, since specs may also
|
89
104
|
# produce other messages targeting other topics
|
90
|
-
return unless message[:topic] ==
|
105
|
+
return unless message[:topic] == consumer_obj.topic.name
|
91
106
|
|
92
107
|
# Build message metadata and copy any metadata that would come from the message
|
93
|
-
metadata = _karafka_message_metadata_defaults
|
108
|
+
metadata = _karafka_message_metadata_defaults(consumer_obj)
|
94
109
|
|
95
110
|
metadata.keys.each do |key|
|
96
|
-
|
111
|
+
message_key = METADATA_DISPATCH_MAPPINGS.fetch(key, key)
|
97
112
|
|
98
|
-
|
113
|
+
next unless message.key?(message_key)
|
114
|
+
|
115
|
+
metadata[key] = message.fetch(message_key)
|
99
116
|
end
|
117
|
+
|
100
118
|
# Add this message to previously produced messages
|
101
119
|
@_karafka_consumer_messages << Karafka::Messages::Message.new(
|
102
120
|
message[:payload],
|
@@ -105,13 +123,13 @@ module Karafka
|
|
105
123
|
# Update batch metadata
|
106
124
|
batch_metadata = Karafka::Messages::Builders::BatchMetadata.call(
|
107
125
|
@_karafka_consumer_messages,
|
108
|
-
|
126
|
+
consumer_obj.topic,
|
109
127
|
0,
|
110
128
|
Time.now
|
111
129
|
)
|
112
130
|
|
113
131
|
# Update consumer messages batch
|
114
|
-
|
132
|
+
consumer_obj.messages = Karafka::Messages::Messages.new(
|
115
133
|
@_karafka_consumer_messages,
|
116
134
|
batch_metadata
|
117
135
|
)
|
@@ -121,9 +139,16 @@ module Karafka
|
|
121
139
|
# @param payload [String] payload we want to dispatch
|
122
140
|
# @param metadata [Hash] any metadata we want to dispatch alongside the payload
|
123
141
|
def _karafka_produce(payload, metadata = {})
|
142
|
+
topic = if metadata[:topic]
|
143
|
+
metadata[:topic]
|
144
|
+
elsif defined?(@consumer)
|
145
|
+
@consumer.topic.name
|
146
|
+
else
|
147
|
+
@_karafka_consumer_mappings&.keys&.last
|
148
|
+
end
|
124
149
|
Karafka.producer.produce_sync(
|
125
150
|
{
|
126
|
-
topic:
|
151
|
+
topic: topic,
|
127
152
|
payload: payload
|
128
153
|
}.merge(metadata)
|
129
154
|
)
|
@@ -136,17 +161,18 @@ module Karafka
|
|
136
161
|
|
137
162
|
private
|
138
163
|
|
164
|
+
# @param consumer_obj [Karafka::BaseConsumer] consumer reference
|
139
165
|
# @return [Hash] message default options
|
140
|
-
def _karafka_message_metadata_defaults
|
166
|
+
def _karafka_message_metadata_defaults(consumer_obj)
|
141
167
|
{
|
142
|
-
deserializers:
|
168
|
+
deserializers: consumer_obj.topic.deserializers,
|
143
169
|
timestamp: Time.now,
|
144
170
|
raw_headers: {},
|
145
171
|
raw_key: nil,
|
146
172
|
offset: @_karafka_consumer_messages.size,
|
147
173
|
partition: 0,
|
148
174
|
received_at: Time.now,
|
149
|
-
topic:
|
175
|
+
topic: consumer_obj.topic.name
|
150
176
|
}
|
151
177
|
end
|
152
178
|
|
@@ -168,6 +194,7 @@ module Karafka
|
|
168
194
|
@consumer.coordinator.seek_offset = 0
|
169
195
|
# Indicate usage as for tests no direct enqueuing happens
|
170
196
|
@consumer.instance_variable_set('@used', true)
|
197
|
+
@_karafka_consumer_mappings[topic.name] = @consumer
|
171
198
|
@consumer
|
172
199
|
end
|
173
200
|
end
|
@@ -15,6 +15,14 @@ module Karafka
|
|
15
15
|
module RSpec
|
16
16
|
# RSpec helpers module that needs to be included
|
17
17
|
module Helpers
|
18
|
+
# Map to convert dispatch attributes into their "delivery" format, since we bypass Kafka
|
19
|
+
METADATA_DISPATCH_MAPPINGS = {
|
20
|
+
raw_key: :key,
|
21
|
+
raw_headers: :headers
|
22
|
+
}.freeze
|
23
|
+
|
24
|
+
private_constant :METADATA_DISPATCH_MAPPINGS
|
25
|
+
|
18
26
|
class << self
|
19
27
|
# Adds all the needed extra functionalities to the rspec group
|
20
28
|
# @param base [Class] RSpec example group we want to extend
|
@@ -36,6 +44,7 @@ module Karafka
|
|
36
44
|
|
37
45
|
_karafka_consumer_messages.clear
|
38
46
|
_karafka_producer_client.reset
|
47
|
+
@_karafka_consumer_mappings = {}
|
39
48
|
|
40
49
|
if Object.const_defined?('Mocha', false)
|
41
50
|
Karafka.producer.stubs(:client).returns(_karafka_producer_client)
|
@@ -86,23 +95,30 @@ module Karafka
|
|
86
95
|
# karafka.produce({ 'hello' => 'world' }.to_json, 'partition' => 6)
|
87
96
|
# end
|
88
97
|
def _karafka_add_message_to_consumer_if_needed(message)
|
98
|
+
consumer_obj = if defined?(consumer)
|
99
|
+
consumer
|
100
|
+
else
|
101
|
+
@_karafka_consumer_mappings&.dig(message[:topic])
|
102
|
+
end
|
89
103
|
# Consumer needs to be defined in order to pass messages to it
|
90
|
-
return unless
|
104
|
+
return unless consumer_obj
|
91
105
|
# We're interested in adding message to consumer only when it is a Karafka consumer
|
92
106
|
# Users may want to test other things (models producing messages for example) and in
|
93
107
|
# their case consumer will not be a consumer
|
94
|
-
return unless
|
108
|
+
return unless consumer_obj.is_a?(Karafka::BaseConsumer)
|
95
109
|
# We target to the consumer only messages that were produced to it, since specs may also
|
96
110
|
# produce other messages targeting other topics
|
97
|
-
return unless message[:topic] ==
|
111
|
+
return unless message[:topic] == consumer_obj.topic.name
|
98
112
|
|
99
113
|
# Build message metadata and copy any metadata that would come from the message
|
100
|
-
metadata = _karafka_message_metadata_defaults
|
114
|
+
metadata = _karafka_message_metadata_defaults(consumer_obj)
|
101
115
|
|
102
116
|
metadata.keys.each do |key|
|
103
|
-
|
117
|
+
message_key = METADATA_DISPATCH_MAPPINGS.fetch(key, key)
|
104
118
|
|
105
|
-
|
119
|
+
next unless message.key?(message_key)
|
120
|
+
|
121
|
+
metadata[key] = message.fetch(message_key)
|
106
122
|
end
|
107
123
|
|
108
124
|
# Add this message to previously produced messages
|
@@ -114,13 +130,13 @@ module Karafka
|
|
114
130
|
# Update batch metadata
|
115
131
|
batch_metadata = Karafka::Messages::Builders::BatchMetadata.call(
|
116
132
|
_karafka_consumer_messages,
|
117
|
-
|
133
|
+
consumer_obj.topic,
|
118
134
|
0,
|
119
135
|
Time.now
|
120
136
|
)
|
121
137
|
|
122
138
|
# Update consumer messages batch
|
123
|
-
|
139
|
+
consumer_obj.messages = Karafka::Messages::Messages.new(
|
124
140
|
_karafka_consumer_messages,
|
125
141
|
batch_metadata
|
126
142
|
)
|
@@ -130,9 +146,16 @@ module Karafka
|
|
130
146
|
# @param payload [String] payload we want to dispatch
|
131
147
|
# @param metadata [Hash] any metadata we want to dispatch alongside the payload
|
132
148
|
def _karafka_produce(payload, metadata = {})
|
149
|
+
topic = if metadata[:topic]
|
150
|
+
metadata[:topic]
|
151
|
+
elsif defined?(consumer)
|
152
|
+
consumer.topic.name
|
153
|
+
else
|
154
|
+
@_karafka_consumer_mappings&.keys&.last
|
155
|
+
end
|
133
156
|
Karafka.producer.produce_sync(
|
134
157
|
{
|
135
|
-
topic:
|
158
|
+
topic: topic,
|
136
159
|
payload: payload
|
137
160
|
}.merge(metadata)
|
138
161
|
)
|
@@ -145,17 +168,18 @@ module Karafka
|
|
145
168
|
|
146
169
|
private
|
147
170
|
|
171
|
+
# @param consumer_obj [Karafka::BaseConsumer] consumer reference
|
148
172
|
# @return [Hash] message default options
|
149
|
-
def _karafka_message_metadata_defaults
|
173
|
+
def _karafka_message_metadata_defaults(consumer_obj)
|
150
174
|
{
|
151
|
-
deserializers:
|
175
|
+
deserializers: consumer_obj.topic.deserializers,
|
152
176
|
timestamp: Time.now,
|
153
177
|
raw_headers: {},
|
154
178
|
raw_key: nil,
|
155
179
|
offset: _karafka_consumer_messages.size,
|
156
180
|
partition: 0,
|
157
181
|
received_at: Time.now,
|
158
|
-
topic:
|
182
|
+
topic: consumer_obj.topic.name
|
159
183
|
}
|
160
184
|
end
|
161
185
|
|
@@ -178,6 +202,8 @@ module Karafka
|
|
178
202
|
consumer.coordinator.seek_offset = 0
|
179
203
|
# Indicate usage as for tests no direct enqueuing happens
|
180
204
|
consumer.instance_variable_set('@used', true)
|
205
|
+
|
206
|
+
@_karafka_consumer_mappings[topic.name] = consumer
|
181
207
|
consumer
|
182
208
|
end
|
183
209
|
end
|
data.tar.gz.sig
CHANGED
Binary file
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: karafka-testing
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 2.4.
|
4
|
+
version: 2.4.4
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Maciej Mensfeld
|
@@ -35,7 +35,7 @@ cert_chain:
|
|
35
35
|
AnG1dJU+yL2BK7vaVytLTstJME5mepSZ46qqIJXMuWob/YPDmVaBF39TDSG9e34s
|
36
36
|
msG3BiCqgOgHAnL23+CN3Rt8MsuRfEtoTKpJVcCfoEoNHOkc
|
37
37
|
-----END CERTIFICATE-----
|
38
|
-
date: 2024-
|
38
|
+
date: 2024-07-02 00:00:00.000000000 Z
|
39
39
|
dependencies:
|
40
40
|
- !ruby/object:Gem::Dependency
|
41
41
|
name: karafka
|
@@ -132,7 +132,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
132
132
|
- !ruby/object:Gem::Version
|
133
133
|
version: '0'
|
134
134
|
requirements: []
|
135
|
-
rubygems_version: 3.5.
|
135
|
+
rubygems_version: 3.5.11
|
136
136
|
signing_key:
|
137
137
|
specification_version: 4
|
138
138
|
summary: Library which provides helpers for easier Karafka consumers tests
|
metadata.gz.sig
CHANGED
Binary file
|