rimless 0.1.0

Sign up to get free protection for your applications and to get access to all the features.
@@ -0,0 +1,57 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rimless
4
+ # The configuration for the rimless gem.
5
+ class Configuration
6
+ include ActiveSupport::Configurable
7
+
8
+ # Used to identity this client on the user agent header
9
+ config_accessor(:app_name) { Rimless.local_app_name }
10
+
11
+ # Environment to use
12
+ config_accessor(:env) do
13
+ next(ENV.fetch('KAFKA_ENV', Rails.env).to_sym) if defined? Rails
14
+
15
+ ENV.fetch('KAFKA_ENV', 'development').to_sym
16
+ end
17
+
18
+ # The Apache Kafka client id (consumer group name)
19
+ config_accessor(:client_id) do
20
+ ENV.fetch('KAFKA_CLIENT_ID', Rimless.local_app_name)
21
+ end
22
+
23
+ # The logger instance to use (when available we use the +Rails.logger+)
24
+ config_accessor(:logger) do
25
+ next(Rails.logger) if defined? Rails
26
+
27
+ Logger.new($stdout)
28
+ end
29
+
30
+ # At least one broker of the Apache Kafka cluster
31
+ config_accessor(:kafka_brokers) do
32
+ ENV.fetch('KAFKA_BROKERS', 'kafka://message-bus.local:9092').split(',')
33
+ end
34
+
35
+ # The source Apache Avro schema files location (templates)
36
+ config_accessor(:avro_schema_path) do
37
+ path = %w[config avro_schemas]
38
+ next(Rails.root.join(*path)) if defined? Rails
39
+
40
+ Pathname.new(Dir.pwd).join(*path)
41
+ end
42
+
43
+ # The compiled Apache Avro schema files location (usable with Avro gem)
44
+ config_accessor(:compiled_avro_schema_path) do
45
+ path = %w[config avro_schemas compiled]
46
+ next(Rails.root.join(*path)) if defined? Rails
47
+
48
+ Pathname.new(Dir.pwd).join(*path)
49
+ end
50
+
51
+ # The Confluent Schema Registry API URL to use
52
+ config_accessor(:schema_registry_url) do
53
+ ENV.fetch('KAFKA_SCHEMA_REGISTRY_URL',
54
+ 'http://schema-registry.message-bus.local')
55
+ end
56
+ end
57
+ end
@@ -0,0 +1,75 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rimless
4
+ # The top-level configuration handling.
5
+ #
6
+ # rubocop:disable Style/ClassVars because we split module code
7
+ module ConfigurationHandling
8
+ extend ActiveSupport::Concern
9
+
10
+ class_methods do
11
+ # Retrieve the current configuration object.
12
+ #
13
+ # @return [Configuration]
14
+ def configuration
15
+ @@configuration ||= Configuration.new
16
+ end
17
+
18
+ # Configure the concern by providing a block which takes
19
+ # care of this task. Example:
20
+ #
21
+ # FactoryBot::Instrumentation.configure do |conf|
22
+ # # conf.xyz = [..]
23
+ # end
24
+ def configure
25
+ yield(configuration)
26
+ configure_dependencies
27
+ end
28
+
29
+ # Reset the current configuration with the default one.
30
+ def reset_configuration!
31
+ @@configuration = Configuration.new
32
+ end
33
+
34
+ # Retrieve the current configured environment. You can use it like
35
+ # +Rails.env+ to query it. E.g. +Rimless.env.production?+.
36
+ #
37
+ # @return [ActiveSupport::StringInquirer] the environment
38
+ def env
39
+ @@env = ActiveSupport::StringInquirer.new(configuration.env.to_s) \
40
+ if @env.to_s != configuration.env.to_s
41
+ @@env
42
+ end
43
+
44
+ # A simple convention helper to setup Apache Kafka topic names.
45
+ #
46
+ # @param app [String] the application namespace
47
+ # @return [String] the Apache Kafka topic name prefix
48
+ def topic_prefix(app = Rimless.configuration.app_name)
49
+ "#{Rimless.env}.#{app}."
50
+ end
51
+
52
+ # Pass back the local application name. When we are loaded together with
53
+ # a Rails application we use the application class name. This
54
+ # application name is URI/GID compatible. When no local application is
55
+ # available, we just pass back +nil+.
56
+ #
57
+ # @return [String, nil] the Rails application name, or +nil+
58
+ def local_app_name
59
+ # Check for non-Rails integration
60
+ return unless defined? Rails
61
+ # Check if a application is defined
62
+ return if Rails.application.nil?
63
+
64
+ # Pass back the URI compatible application name
65
+ Rails.application.class.parent_name.underscore.dasherize
66
+ end
67
+
68
+ # Retrieve the current configured logger instance.
69
+ #
70
+ # @return [Logger] the logger instance
71
+ delegate :logger, to: :configuration
72
+ end
73
+ end
74
+ # rubocop:enable Style/ClassVars
75
+ end
@@ -0,0 +1,55 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rimless
4
+ # The top-level dependencies helpers.
5
+ module Dependencies
6
+ extend ActiveSupport::Concern
7
+
8
+ class_methods do
9
+ # (Re)configure our gem dependencies. We take care of setting up
10
+ # +WaterDrop+, our Apache Kafka driver and +AvroTurf+, our Confluent
11
+ # Schema Registry driver.
12
+ def configure_dependencies
13
+ configure_waterdrop
14
+ configure_avro_turf
15
+ end
16
+
17
+ # Set sensible defaults for the +WaterDrop+ gem.
18
+ def configure_waterdrop
19
+ WaterDrop.setup do |config|
20
+ # Activate message delivery and use the default logger
21
+ config.deliver = true
22
+ config.logger = Rimless.logger
23
+ # An optional identifier of a Kafka consumer (in a consumer group)
24
+ # that is passed to a Kafka broker with every request. A logical
25
+ # application name to be included in Kafka logs and monitoring
26
+ # aggregates.
27
+ config.client_id = Rimless.configuration.client_id
28
+ # All the known brokers, at least one. The ruby-kafka driver will
29
+ # discover the whole cluster structure once and when issues occur to
30
+ # dynamically adjust scaling operations.
31
+ config.kafka.seed_brokers = Rimless.configuration.kafka_brokers
32
+ # All brokers MUST acknowledge a new message
33
+ config.kafka.required_acks = -1
34
+ end
35
+ end
36
+
37
+ # Set sensible defaults for the +AvroTurf+ gem and (re)compile the Apache
38
+ # Avro schema templates (ERB), so the gem can handle them properly.
39
+ def configure_avro_turf
40
+ # Setup a global available Apache Avro decoder/encoder with support for
41
+ # the Confluent Schema Registry, but first create a helper instance
42
+ avro_utils = Rimless::AvroUtils.new
43
+ # Compile our Avro schema templates to ready-to-consume Avro schemas
44
+ avro_utils.recompile_schemas
45
+ # Register a global Avro messaging instance
46
+ Rimless.avro = AvroTurf::Messaging.new(
47
+ logger: Rimless.logger,
48
+ namespace: avro_utils.namespace,
49
+ schemas_path: avro_utils.output_path,
50
+ registry_url: Rimless.configuration.schema_registry_url
51
+ )
52
+ end
53
+ end
54
+ end
55
+ end
@@ -0,0 +1,106 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rimless
4
+ # The top-level Apache Kafka helpers.
5
+ module KafkaHelpers
6
+ extend ActiveSupport::Concern
7
+
8
+ # rubocop:disable Metrics/BlockLength because its an Active Support concern
9
+ class_methods do
10
+ # Generate a common topic name for Apache Kafka while taking care of
11
+ # configured prefixes.
12
+ #
13
+ # @param name [String, Symbol] the topic name
14
+ # @param app [String, Symbol] a different application name, by default
15
+ # the local app
16
+ # @return [String] the complete topic name
17
+ #
18
+ # @example Name only
19
+ # Rimless.topic(:users)
20
+ # @example Name with app
21
+ # Rimless.topic(:users, app: 'test-api')
22
+ # @example Mix and match
23
+ # Rimless.topic(name: 'test', app: :fancy_app)
24
+ #
25
+ # rubocop:disable Metrics/AbcSize because of the usage flexibility
26
+ def topic(*args)
27
+ opts = args.last
28
+ name = args.first if [String, Symbol].member?(args.first.class)
29
+
30
+ if opts.is_a?(Hash)
31
+ name = opts[:name] if opts.key?(:name)
32
+ app = opts[:app] if opts.key?(:app)
33
+ end
34
+
35
+ name ||= nil
36
+ app ||= Rimless.configuration.app_name
37
+
38
+ raise ArgumentError, 'No name given' if name.nil?
39
+
40
+ "#{Rimless.topic_prefix(app)}#{name}"
41
+ end
42
+ # rubocop:enable Metrics/AbcSize
43
+
44
+ # Send a single message to Apache Kafka. The data is encoded according to
45
+ # the given Apache Avro schema. The destination Kafka topic may be a
46
+ # relative name, or a hash which is passed to the +.topic+ method to
47
+ # manipulate the application details. The message is send is a
48
+ # synchronous, blocking way.
49
+ #
50
+ # @param data [Hash{Symbol => Mixed}] the raw data, unencoded
51
+ # @param schema [String, Symbol] the Apache Avro schema to use
52
+ # @param topic [String, Symbol, Hash{Symbol => Mixed}] the destination
53
+ # Apache Kafka topic
54
+ def sync_message(data:, schema:, topic:, **args)
55
+ encoded = Rimless.avro.encode(data, schema_name: schema.to_s)
56
+ sync_raw_message(data: encoded, topic: topic, **args)
57
+ end
58
+ alias_method :message, :sync_message
59
+
60
+ # Send a single message to Apache Kafka. The data is encoded according to
61
+ # the given Apache Avro schema. The destination Kafka topic may be a
62
+ # relative name, or a hash which is passed to the +.topic+ method to
63
+ # manipulate the application details. The message is send is an
64
+ # asynchronous, non-blocking way.
65
+ #
66
+ # @param data [Hash{Symbol => Mixed}] the raw data, unencoded
67
+ # @param schema [String, Symbol] the Apache Avro schema to use
68
+ # @param topic [String, Symbol, Hash{Symbol => Mixed}] the destination
69
+ # Apache Kafka topic
70
+ def async_message(data:, schema:, topic:, **args)
71
+ encoded = Rimless.avro.encode(data, schema_name: schema.to_s)
72
+ async_raw_message(data: encoded, topic: topic, **args)
73
+ end
74
+
75
+ # Send a single message to Apache Kafka. The data is not touched, so you
76
+ # need to encode it yourself before you pass it in. The destination Kafka
77
+ # topic may be a relative name, or a hash which is passed to the +.topic+
78
+ # method to manipulate the application details. The message is send is a
79
+ # synchronous, blocking way.
80
+ #
81
+ # @param data [Hash{Symbol => Mixed}] the raw data, unencoded
82
+ # @param topic [String, Symbol, Hash{Symbol => Mixed}] the destination
83
+ # Apache Kafka topic
84
+ def sync_raw_message(data:, topic:, **args)
85
+ args = args.merge(topic: topic(topic))
86
+ WaterDrop::SyncProducer.call(data, **args)
87
+ end
88
+ alias_method :raw_message, :sync_raw_message
89
+
90
+ # Send a single message to Apache Kafka. The data is not touched, so you
91
+ # need to encode it yourself before you pass it in. The destination Kafka
92
+ # topic may be a relative name, or a hash which is passed to the +.topic+
93
+ # method to manipulate the application details. The message is send is an
94
+ # asynchronous, non-blocking way.
95
+ #
96
+ # @param data [Hash{Symbol => Mixed}] the raw data, unencoded
97
+ # @param topic [String, Symbol, Hash{Symbol => Mixed}] the destination
98
+ # Apache Kafka topic
99
+ def async_raw_message(data:, topic:, **args)
100
+ args = args.merge(topic: topic(topic))
101
+ WaterDrop::AsyncProducer.call(data, **args)
102
+ end
103
+ end
104
+ # rubocop:enable Metrics/BlockLength
105
+ end
106
+ end
@@ -0,0 +1,25 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rimless
4
+ # Rails-specific initializations.
5
+ class Railtie < Rails::Railtie
6
+ # Run before all Rails initializers, but after the application is defined
7
+ config.before_initialize do
8
+ conf = Rimless.configuration
9
+ app_name = Rimless.local_app_name
10
+
11
+ # Reset the default application name (which is +nil+), because the Rails
12
+ # application was not defined when the rimless gem was loaded
13
+ conf.app_name = app_name
14
+
15
+ # Set the app name as default client id, when not already set
16
+ conf.client_id ||= app_name
17
+ end
18
+
19
+ # Run after all configuration is set via Rails initializers
20
+ config.after_initialize do
21
+ # Reconfigure our dependencies
22
+ Rimless.configure_dependencies
23
+ end
24
+ end
25
+ end
@@ -0,0 +1,40 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'webmock'
4
+ require 'webmock/rspec'
5
+ require 'avro_turf/test/fake_confluent_schema_registry_server'
6
+ require 'rimless'
7
+ require 'rimless/rspec/helpers'
8
+ require 'rimless/rspec/matchers'
9
+
10
+ # RSpec 1.x and 2.x compatibility
11
+ #
12
+ # @see http://bit.ly/2GbAYsU
13
+ raise 'No RSPEC_CONFIGURER is defined, webmock is missing?' \
14
+ unless defined?(RSPEC_CONFIGURER)
15
+
16
+ RSPEC_CONFIGURER.configure do |config|
17
+ config.include Rimless::RSpec::Helpers
18
+ config.include Rimless::RSpec::Matchers
19
+
20
+ # Stub all Confluent Schema Registry requests and handle them gracefully with
21
+ # the help of the faked (inlined) Schema Registry server. This allows us to
22
+ # perform the actual Apache Avro message encoding/decoding without the need
23
+ # to have a Schema Registry up and running.
24
+ config.before do
25
+ # Get the Excon connection from the AvroTurf instance
26
+ connection = Rimless.avro.instance_variable_get(:@registry)
27
+ .instance_variable_get(:@upstream)
28
+ .instance_variable_get(:@connection)
29
+ .instance_variable_get(:@data)
30
+ # Enable WebMock on the already instantiated
31
+ # Confluent Schema Registry Excon connection
32
+ connection[:mock] = true
33
+ # Grab all Confluent Schema Registry requests and send
34
+ # them to the faked (inlined) Schema Registry
35
+ stub_request(:any, %r{^http://#{connection[:hostname]}})
36
+ .to_rack(FakeConfluentSchemaRegistryServer)
37
+ # Clear any cached data
38
+ FakeConfluentSchemaRegistryServer.clear
39
+ end
40
+ end
@@ -0,0 +1,17 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rimless
4
+ # Some general RSpec testing stuff.
5
+ module RSpec
6
+ # A collection of Rimless/RSpec helpers.
7
+ module Helpers
8
+ # A simple helper to parse a blob of Apache Avro data.
9
+ #
10
+ # @param data [String] the Apache Avro blob
11
+ # @return [Hash{String => Mixed}] the parsed payload
12
+ def avro_parse(data)
13
+ Rimless.avro.decode(data)
14
+ end
15
+ end
16
+ end
17
+ end
@@ -0,0 +1,286 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rimless
4
+ # Some general RSpec testing stuff.
5
+ module RSpec
6
+ # A set of Rimless/RSpec matchers.
7
+ module Matchers
8
+ # The Apache Kafka message expectation.
9
+ #
10
+ # rubocop:disable Metrics/ClassLength because its almost RSpec API code
11
+ class HaveSentKafkaMessage < ::RSpec::Matchers::BuiltIn::BaseMatcher
12
+ include ::RSpec::Mocks::ExampleMethods
13
+
14
+ # Instantiate a new expectation object.
15
+ #
16
+ # @param schema [String, Symbol, nil] the expected message schema
17
+ # @return [HaveSentKafkaMessage] the expectation instance
18
+ def initialize(schema)
19
+ @schema = schema
20
+ @args = {}
21
+ @data = {}
22
+ @messages = []
23
+ set_expected_number(:exactly, 1)
24
+ end
25
+
26
+ # Collect the expectation arguments for the Kafka message passing. (eg.
27
+ # topic)
28
+ #
29
+ # @param args [Hash{Symbol => Mixed}] the expected arguments
30
+ # @return [HaveSentKafkaMessage] the expectation instance
31
+ def with(**args)
32
+ @args = args
33
+ self
34
+ end
35
+
36
+ # Collect the expectations for the encoded message. The passed message
37
+ # will be decoded accordingly for the check.
38
+ #
39
+ # @param args [Hash{Symbol => Mixed}] the expected arguments
40
+ # @return [HaveSentKafkaMessage] the expectation instance
41
+ def with_data(**args)
42
+ @data = args
43
+ self
44
+ end
45
+
46
+ # Set the expected amount of message (exactly).
47
+ #
48
+ # @param count [Integer] the expected amount
49
+ # @return [HaveSentKafkaMessage] the expectation instance
50
+ def exactly(count)
51
+ set_expected_number(:exactly, count)
52
+ self
53
+ end
54
+
55
+ # Set the expected amount of message (at least).
56
+ #
57
+ # @param count [Integer] the expected amount
58
+ # @return [HaveSentKafkaMessage] the expectation instance
59
+ def at_least(count)
60
+ set_expected_number(:at_least, count)
61
+ self
62
+ end
63
+
64
+ # Set the expected amount of message (at most).
65
+ #
66
+ # @param count [Integer] the expected amount
67
+ # @return [HaveSentKafkaMessage] the expectation instance
68
+ def at_most(count)
69
+ set_expected_number(:at_most, count)
70
+ self
71
+ end
72
+
73
+ # Just syntactic sugar.
74
+ #
75
+ # @return [HaveSentKafkaMessage] the expectation instance
76
+ def times
77
+ self
78
+ end
79
+
80
+ # Just syntactic sugar for a regular +exactly(:once)+ call.
81
+ #
82
+ # @return [HaveSentKafkaMessage] the expectation instance
83
+ def once
84
+ exactly(:once)
85
+ end
86
+
87
+ # Just syntactic sugar for a regular +exactly(:twice)+ call.
88
+ #
89
+ # @return [HaveSentKafkaMessage] the expectation instance
90
+ def twice
91
+ exactly(:twice)
92
+ end
93
+
94
+ # Just syntactic sugar for a regular +exactly(:thrice)+ call.
95
+ #
96
+ # @return [HaveSentKafkaMessage] the expectation instance
97
+ def thrice
98
+ exactly(:thrice)
99
+ end
100
+
101
+ # Serve the RSpec matcher API and signalize we support block evaluation.
102
+ #
103
+ # @return [Boolean] the answer
104
+ def supports_block_expectations?
105
+ true
106
+ end
107
+
108
+ # The actual RSpec API check for the expectation.
109
+ #
110
+ # @param proc [Proc] the block to evaluate
111
+ # @return [Boolean] whenever the check was successful or not
112
+ def matches?(proc)
113
+ unless proc.is_a? Proc
114
+ raise ArgumentError, 'have_sent_kafka_message and ' \
115
+ 'sent_kafka_message only support block ' \
116
+ 'expectations'
117
+ end
118
+
119
+ listen_to_messages
120
+ proc.call
121
+ check
122
+ end
123
+
124
+ # The actual RSpec API check for the expectation (negative).
125
+ #
126
+ # @param proc [Proc] the block to evaluate
127
+ # @return [Boolean] whenever the check was unsuccessful or not
128
+ def does_not_match?(proc)
129
+ set_expected_number(:at_least, 1)
130
+
131
+ !matches?(proc)
132
+ end
133
+
134
+ private
135
+
136
+ # Set the expectation type and count for the checking.
137
+ #
138
+ # @param relativity [Symbol] the amount expectation type
139
+ # @param count [Integer] the expected amount
140
+ def set_expected_number(relativity, count)
141
+ @expectation_type = relativity
142
+ @expected_number = case count
143
+ when :once then 1
144
+ when :twice then 2
145
+ when :thrice then 3
146
+ else Integer(count)
147
+ end
148
+ end
149
+
150
+ # Perform the result set checking of recorded message which were sent.
151
+ #
152
+ # @return [Boolean] the answer
153
+ def check
154
+ @matching, @unmatching = @messages.partition do |message|
155
+ schema_match?(message) && arguments_match?(message) &&
156
+ data_match?(message)
157
+ end
158
+
159
+ @matching_count = @matching.size
160
+
161
+ case @expectation_type
162
+ when :exactly then @expected_number == @matching_count
163
+ when :at_most then @expected_number >= @matching_count
164
+ when :at_least then @expected_number <= @matching_count
165
+ end
166
+ end
167
+
168
+ # Check for the expected schema on the given message.
169
+ #
170
+ # @param message [Hash{Symbol => Mixed}] the message under inspection
171
+ # @return [Boolean] the check result
172
+ def schema_match?(message)
173
+ return true unless @schema
174
+
175
+ begin
176
+ Rimless.avro.decode(message[:data], schema_name: @schema.to_s)
177
+ return true
178
+ rescue Avro::IO::SchemaMatchException
179
+ false
180
+ end
181
+ end
182
+
183
+ # Check for the expected arguments on the Kafka message producer call.
184
+ #
185
+ # @param message [Hash{Symbol => Mixed}] the message under inspection
186
+ # @return [Boolean] the check result
187
+ def arguments_match?(message)
188
+ return true unless @args.any?
189
+
190
+ ::RSpec::Mocks::ArgumentListMatcher.new(*@args)
191
+ .args_match?(*message[:args])
192
+ end
193
+
194
+ # Check for the expected data on the encoded Apache Avro message.
195
+ # (deep include)
196
+ #
197
+ # @param message [Hash{Symbol => Mixed}] the message under inspection
198
+ # @return [Boolean] the check result
199
+ def data_match?(message)
200
+ return true unless @data.any?
201
+
202
+ actual_data = Rimless.avro.decode(message[:data])
203
+ expected_data = @data.deep_stringify_keys
204
+
205
+ actual_data.merge(expected_data) == actual_data
206
+ end
207
+
208
+ # Setup the +WaterDrop+ spies and record each sent message.
209
+ def listen_to_messages
210
+ allow(WaterDrop::SyncProducer).to receive(:call) do |data, **args|
211
+ @messages << { data: data, args: args, type: :sync }
212
+ nil
213
+ end
214
+
215
+ allow(WaterDrop::AsyncProducer).to receive(:call) do |data, **args|
216
+ @messages << { data: data, args: args, type: :async }
217
+ nil
218
+ end
219
+ end
220
+
221
+ # Serve the RSpec API and return the positive failure message.
222
+ #
223
+ # @return [String] the message to display
224
+ def failure_message
225
+ result = ["expected to send #{base_message}"]
226
+
227
+ if @unmatching.any?
228
+ result << "\nSent messages:"
229
+ @unmatching.each do |message|
230
+ result << "\n #{base_message_detail(message)}"
231
+ end
232
+ end
233
+
234
+ result.join
235
+ end
236
+
237
+ # Serve the RSpec API and return the negative failure message.
238
+ #
239
+ # @return [String] the message to display
240
+ def failure_message_when_negated
241
+ "expected not to send #{base_message}"
242
+ end
243
+
244
+ # The base error message with all the expectation details included.
245
+ #
246
+ # @return [String] the expectation details message
247
+ def base_message
248
+ expectation_mod = @expectation_type.to_s.humanize.downcase
249
+ result = ["#{expectation_mod} #{@expected_number} messages,"]
250
+
251
+ result << " with schema #{@schema}," if @schema
252
+ result << " with #{@args}," if @args.any?
253
+ result << " with data #{@data}," if @data.any?
254
+ result << " but sent #{@matching_count}"
255
+
256
+ result.join
257
+ end
258
+
259
+ # The expectation details of a single message when unmatching messages
260
+ # were found.
261
+ #
262
+ # @return [String] the expectation details of a single message
263
+ def base_message_detail(message)
264
+ result = ['message']
265
+
266
+ result << " with #{message[:args]}" if message[:args].any?
267
+ result << " with data: #{Rimless.avro.decode(message[:data])}"
268
+
269
+ result.join
270
+ end
271
+ end
272
+ # rubocop:enable Metrics/ClassLength
273
+
274
+ # Check for messages which were sent to Apache Kafka by the given block.
275
+ #
276
+ # @param schema [String, Symbol, nil] the Apache Avro schema to check
277
+ #
278
+ # rubocop:disable Naming/PredicateName because its a RSpec matcher
279
+ def have_sent_kafka_message(schema = nil)
280
+ HaveSentKafkaMessage.new(schema)
281
+ end
282
+ alias sent_kafka_message have_sent_kafka_message
283
+ # rubocop:enable Naming/PredicateName
284
+ end
285
+ end
286
+ end