rimless 2.9.0 → 3.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. checksums.yaml +4 -4
  2. data/Appraisals +2 -2
  3. data/CHANGELOG.md +70 -0
  4. data/Gemfile +0 -1
  5. data/README.md +64 -62
  6. data/Rakefile +13 -4
  7. data/UPGRADING.md +491 -0
  8. data/doc/kafka-playground/Dockerfile +5 -5
  9. data/doc/kafka-playground/Gemfile +1 -1
  10. data/doc/kafka-playground/Gemfile.lock +178 -140
  11. data/doc/kafka-playground/README.md +1 -1
  12. data/doc/kafka-playground/bin/consume-topic +1 -1
  13. data/doc/kafka-playground/bin/create-topic +28 -17
  14. data/doc/kafka-playground/bin/delete-topic +8 -3
  15. data/doc/kafka-playground/bin/list-topics +1 -1
  16. data/doc/kafka-playground/bin/produce-event +31 -18
  17. data/doc/kafka-playground/config/environment.rb +6 -38
  18. data/doc/kafka-playground/config/initializers/resolv.rb +59 -0
  19. data/doc/kafka-playground/config/initializers/rimless.rb +39 -0
  20. data/doc/kafka-playground/examples/rimless-produce +19 -20
  21. data/doc/upgrade-guide-sources/README.md +221 -0
  22. data/doc/upgrade-guide-sources/dep-avro_turf-1.20.md +23 -0
  23. data/doc/upgrade-guide-sources/dep-karafka-2.0.md +117 -0
  24. data/doc/upgrade-guide-sources/dep-waterdrop-2.8.md +30 -0
  25. data/gemfiles/rails_8.0.gemfile +1 -1
  26. data/gemfiles/rails_8.1.gemfile +1 -1
  27. data/lib/rimless/compatibility/.gitkeep +0 -0
  28. data/lib/rimless/configuration.rb +80 -6
  29. data/lib/rimless/consumer/app.rb +182 -0
  30. data/lib/rimless/{karafka → consumer}/avro_deserializer.rb +8 -6
  31. data/lib/rimless/consumer/base.rb +118 -0
  32. data/lib/rimless/consumer/job.rb +35 -0
  33. data/lib/rimless/consumer/job_bridge.rb +113 -0
  34. data/lib/rimless/extensions/avro_helpers.rb +83 -0
  35. data/lib/rimless/extensions/configuration_handling.rb +77 -0
  36. data/lib/rimless/extensions/consumer.rb +20 -0
  37. data/lib/rimless/extensions/dependencies.rb +84 -0
  38. data/lib/rimless/extensions/kafka_helpers.rb +46 -0
  39. data/lib/rimless/extensions/producer.rb +103 -0
  40. data/lib/rimless/initializers/compatibility.rb +3 -4
  41. data/lib/rimless/railtie.rb +7 -7
  42. data/lib/rimless/rspec/helpers.rb +53 -13
  43. data/lib/rimless/rspec/matchers.rb +14 -11
  44. data/lib/rimless/rspec.rb +13 -29
  45. data/lib/rimless/tasks/consumer.rake +18 -6
  46. data/lib/rimless/tasks/templates/application_consumer.rb +1 -1
  47. data/lib/rimless/tasks/templates/custom_consumer.rb +1 -1
  48. data/lib/rimless/tasks/templates/custom_consumer_spec.rb +5 -4
  49. data/lib/rimless/tasks/templates/karafka.rb +5 -4
  50. data/lib/rimless/version.rb +3 -1
  51. data/lib/rimless.rb +12 -14
  52. data/rimless.gemspec +7 -9
  53. metadata +40 -67
  54. data/lib/rimless/avro_helpers.rb +0 -81
  55. data/lib/rimless/base_consumer.rb +0 -30
  56. data/lib/rimless/compatibility/karafka_1_4.rb +0 -52
  57. data/lib/rimless/configuration_handling.rb +0 -82
  58. data/lib/rimless/consumer.rb +0 -209
  59. data/lib/rimless/consumer_job.rb +0 -10
  60. data/lib/rimless/dependencies.rb +0 -69
  61. data/lib/rimless/kafka_helpers.rb +0 -104
  62. data/lib/rimless/karafka/base64_interchanger.rb +0 -32
  63. data/lib/rimless/karafka/passthrough_mapper.rb +0 -29
  64. data/lib/rimless/tasks/stats.rake +0 -22
@@ -0,0 +1,77 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rimless
4
+ module Extensions
5
+ # The top-level configuration handling.
6
+ #
7
+ # rubocop:disable Style/ClassVars -- because we split module code
8
+ module ConfigurationHandling
9
+ extend ActiveSupport::Concern
10
+
11
+ class_methods do
12
+ # Retrieve the current configuration object.
13
+ #
14
+ # @return [Configuration]
15
+ def configuration
16
+ @@configuration ||= Configuration.new
17
+ end
18
+
19
+ # Configure the concern by providing a block which takes
20
+ # care of this task. Example:
21
+ #
22
+ # FactoryBot::Instrumentation.configure do |conf|
23
+ # # conf.xyz = [..]
24
+ # end
25
+ def configure
26
+ yield(configuration)
27
+ configure_dependencies
28
+ end
29
+
30
+ # Reset the current configuration with the default one.
31
+ def reset_configuration!
32
+ @@configuration = Configuration.new
33
+ end
34
+
35
+ # Retrieve the current configured environment. You can use it like
36
+ # +Rails.env+ to query it. E.g. +Rimless.env.production?+.
37
+ #
38
+ # @return [ActiveSupport::StringInquirer] the environment
39
+ def env
40
+ @@env = ActiveSupport::StringInquirer.new(configuration.env.to_s) \
41
+ if @env.to_s != configuration.env.to_s
42
+ @@env
43
+ end
44
+
45
+ # A simple convention helper to setup Apache Kafka topic names.
46
+ #
47
+ # @param app [String] the application namespace
48
+ # @return [String] the Apache Kafka topic name prefix
49
+ def topic_prefix(app = Rimless.configuration.app_name)
50
+ "#{Rimless.env}.#{app}."
51
+ end
52
+
53
+ # Pass back the local application name. When we are loaded together
54
+ # with a Rails application we use the application class name. This
55
+ # application name is URI/GID compatible. When no local application is
56
+ # available, we just pass back +nil+.
57
+ #
58
+ # @return [String, nil] the Rails application name, or +nil+
59
+ def local_app_name
60
+ # Check for non-Rails integration
61
+ return unless defined? Rails
62
+ # Check if a application is defined
63
+ return if Rails.application.nil?
64
+
65
+ # Pass back the URI compatible application name
66
+ Rails.application.class.module_parent_name.underscore.dasherize
67
+ end
68
+
69
+ # Retrieve the current configured logger instance.
70
+ #
71
+ # @return [Logger] the logger instance
72
+ delegate :logger, to: :configuration
73
+ end
74
+ end
75
+ # rubocop:enable Style/ClassVars
76
+ end
77
+ end
@@ -0,0 +1,20 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rimless
4
+ module Extensions
5
+ # The top-level Apache Kafka message consumer integration.
6
+ module Consumer
7
+ extend ActiveSupport::Concern
8
+
9
+ class_methods do
10
+ # A simple shortcut to fetch the Karafka-wrapping consumer application.
11
+ #
12
+ # @return [Rimless::Consumer::App] the internal consumer
13
+ # application class
14
+ def consumer
15
+ @consumer ||= Rimless::Consumer::App.new
16
+ end
17
+ end
18
+ end
19
+ end
20
+ end
@@ -0,0 +1,84 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rimless
4
+ module Extensions
5
+ # The top-level dependencies helpers.
6
+ module Dependencies
7
+ extend ActiveSupport::Concern
8
+
9
+ class_methods do
10
+ # (Re)configure our gem dependencies. We take care of setting up
11
+ # +WaterDrop+, our Apache Kafka driver and +AvroTurf+, our Confluent
12
+ # Schema Registry driver.
13
+ def configure_dependencies
14
+ configure_avro
15
+ configure_producer
16
+ end
17
+
18
+ # Set sensible defaults for the +AvroTurf+ gem and (re)compile the
19
+ # Apache Avro schema templates (ERB), so the gem can handle them
20
+ # properly.
21
+ def configure_avro
22
+ # No need to configure AvroTurf when no schema registry URL is
23
+ # available. Its fine to skip this for scenarios where the full
24
+ # application configuration is not available (eg. on Rails asset
25
+ # precompilations, etc)
26
+ return if Rimless.configuration.schema_registry_url.blank?
27
+
28
+ # Setup a global available Apache Avro decoder/encoder with support
29
+ # for the Confluent Schema Registry, but first create a helper
30
+ # instance
31
+ Rimless.avro_utils = Rimless::AvroUtils.new
32
+ # Compile our Avro schema templates to ready-to-consume Avro schemas
33
+ Rimless.avro_utils.recompile_schemas
34
+ # Register a global Avro messaging instance
35
+ Rimless.avro = AvroTurf::Messaging.new(
36
+ **Rimless.configuration.avro_configure.call(
37
+ logger: Rimless.logger,
38
+ namespace: Rimless.avro_utils.namespace,
39
+ schemas_path: Rimless.avro_utils.output_path,
40
+ registry_url: Rimless.configuration.schema_registry_url
41
+ )
42
+ )
43
+ end
44
+
45
+ # Set sensible defaults for the +WaterDrop+ gem.
46
+ def configure_producer
47
+ # Skip WaterDrop configuration when no brokers/client id is
48
+ # available, because it will raise. Its fine to have none available
49
+ # for situations like Rails asset precompilations, etc. - on runtime
50
+ # the settings should be available, otherwise the message producing
51
+ # just fails/raise.
52
+ return if Rimless.configuration.kafka_brokers.empty? \
53
+ || Rimless.configuration.client_id.blank?
54
+
55
+ # Register a global waterdrop producer instance
56
+ Rimless.producer = WaterDrop::Producer.new do |config|
57
+ # Activate message delivery and use the default logger
58
+ config.deliver = true
59
+ config.logger = Rimless.logger
60
+
61
+ # See: https://bit.ly/3OtIfeu (+config.kafka+ settings)
62
+
63
+ # An optional identifier of a Kafka consumer (in a consumer group)
64
+ # that is passed to a Kafka broker with every request. A logical
65
+ # application name to be included in Kafka logs and monitoring
66
+ # aggregates.
67
+ config.kafka[:'client.id'] = Rimless.configuration.client_id
68
+ # All the known brokers, at least one. The ruby-kafka driver will
69
+ # discover the whole cluster structure once and when issues occur
70
+ # to dynamically adjust scaling operations.
71
+ config.kafka[:'bootstrap.servers'] =
72
+ Rimless.configuration.kafka_brokers
73
+ # All brokers MUST acknowledge a new message by default
74
+ config.kafka[:'request.required.acks'] = -1
75
+
76
+ # Call the user-configurable block with our configuration
77
+ # for customizations
78
+ Rimless.configuration.producer_configure.call(config)
79
+ end
80
+ end
81
+ end
82
+ end
83
+ end
84
+ end
@@ -0,0 +1,46 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rimless
4
+ module Extensions
5
+ # The top-level Apache Kafka helpers.
6
+ module KafkaHelpers
7
+ extend ActiveSupport::Concern
8
+
9
+ class_methods do
10
+ # Generate a common topic name for Apache Kafka while taking care of
11
+ # configured prefixes.
12
+ #
13
+ # @param args [Array<Mixed>] the relative topic name
14
+ # @return [String] the complete topic name
15
+ #
16
+ # @example Name only
17
+ # Rimless.topic(:users)
18
+ # @example Name with app
19
+ # Rimless.topic(:users, app: 'test-api')
20
+ # @example Mix and match
21
+ # Rimless.topic(name: 'test', app: :fancy_app)
22
+ # @example Full name - use as is
23
+ # Rimless.topic(full_name: 'my.custom.topic.name')
24
+ def topic(*args)
25
+ opts = args.last
26
+ name = args.first if [String, Symbol].member?(args.first.class)
27
+
28
+ if opts.is_a?(Hash)
29
+ # When we got a full name, we use it as is
30
+ return opts[:full_name] if opts.key? :full_name
31
+
32
+ name = opts[:name] if opts.key?(:name)
33
+ app = opts[:app] if opts.key?(:app)
34
+ end
35
+
36
+ name ||= nil
37
+ app ||= Rimless.configuration.app_name
38
+
39
+ raise ArgumentError, 'No name given' if name.nil?
40
+
41
+ "#{Rimless.topic_prefix(app)}#{name}".tr('_', '-')
42
+ end
43
+ end
44
+ end
45
+ end
46
+ end
@@ -0,0 +1,103 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rimless
4
+ module Extensions
5
+ # The top-level Apache Kafka message producer integration.
6
+ module Producer
7
+ extend ActiveSupport::Concern
8
+
9
+ class_methods do
10
+ # A shared +WaterDrop::Producer+ instance
11
+ mattr_accessor :producer
12
+
13
+ # Send a single message to Apache Kafka. The data is encoded according
14
+ # to the given Apache Avro schema. The destination Kafka topic may be a
15
+ # relative name, or a hash which is passed to the +.topic+ method to
16
+ # manipulate the application details. The message is sent is a
17
+ # synchronous, blocking way.
18
+ #
19
+ # @param data [Hash{Symbol => Mixed}] the raw data, unencoded
20
+ # @param schema [String, Symbol] the Apache Avro schema to use
21
+ # @param topic [String, Symbol, Hash{Symbol => Mixed}] the destination
22
+ # Apache Kafka topic
23
+ # @param args [Hash{Symbol => Mixed}] additional parameters,
24
+ # see: https://bit.ly/4tHjcVg
25
+ def sync_message(data:, schema:, topic:, **args)
26
+ encoded = Rimless.encode(data, schema: schema)
27
+ sync_raw_message(data: encoded, topic: topic, **args)
28
+ end
29
+ alias_method :message, :sync_message
30
+
31
+ # Send a single message to Apache Kafka. The data is encoded according
32
+ # to the given Apache Avro schema. The destination Kafka topic may be a
33
+ # relative name, or a hash which is passed to the +.topic+ method to
34
+ # manipulate the application details. The message is sent is an
35
+ # asynchronous, non-blocking way.
36
+ #
37
+ # @param data [Hash{Symbol => Mixed}] the raw data, unencoded
38
+ # @param schema [String, Symbol] the Apache Avro schema to use
39
+ # @param topic [String, Symbol, Hash{Symbol => Mixed}] the destination
40
+ # Apache Kafka topic
41
+ # @param args [Hash{Symbol => Mixed}] additional parameters,
42
+ # see: https://bit.ly/4tHjcVg
43
+ def async_message(data:, schema:, topic:, **args)
44
+ encoded = Rimless.encode(data, schema: schema)
45
+ async_raw_message(data: encoded, topic: topic, **args)
46
+ end
47
+
48
+ # Send a single message to Apache Kafka. The data is not transformed, so
49
+ # you need to encode it yourself before you pass it in. The destination
50
+ # Kafka topic may be a relative name, or a hash which is passed to the
51
+ # +.topic+ method to manipulate the application details. The message is
52
+ # sent is a synchronous, blocking way.
53
+ #
54
+ # @param data [Hash{Symbol => Mixed}] the raw data, unencoded
55
+ # @param topic [String, Symbol, Hash{Symbol => Mixed}] the destination
56
+ # Apache Kafka topic
57
+ # @param headers [Hash{String => String, Array<String>}, nil] the
58
+ # message headers to send
59
+ # @param args [Hash{Symbol => Mixed}] additional parameters,
60
+ # see: https://bit.ly/4tHjcVg
61
+ def sync_raw_message(data:, topic:, headers: nil, **args)
62
+ args = args.merge(topic: topic(topic), payload: data)
63
+
64
+ # A compatibility helper for headers, as WaterDrop is now more strict
65
+ if headers.present?
66
+ args[:headers] = headers
67
+ args[:headers].deep_stringify_keys!.deep_transform_values!(&:to_s) \
68
+ if headers.is_a? Hash
69
+ end
70
+
71
+ producer.produce_sync(**args)
72
+ end
73
+ alias_method :raw_message, :sync_raw_message
74
+
75
+ # Send a single message to Apache Kafka. The data is not touched, so
76
+ # you need to encode it yourself before you pass it in. The destination
77
+ # Kafka topic may be a relative name, or a hash which is passed to the
78
+ # +.topic+ method to manipulate the application details. The message is
79
+ # sent is an asynchronous, non-blocking way.
80
+ #
81
+ # @param data [Hash{Symbol => Mixed}] the raw data, unencoded
82
+ # @param topic [String, Symbol, Hash{Symbol => Mixed}] the destination
83
+ # Apache Kafka topic
84
+ # @param headers [Hash{String => String, Array<String>}, nil] the
85
+ # message headers to send
86
+ # @param args [Hash{Symbol => Mixed}] additional parameters,
87
+ # see: https://bit.ly/4tHjcVg
88
+ def async_raw_message(data:, topic:, headers: nil, **args)
89
+ args = args.merge(topic: topic(topic), payload: data)
90
+
91
+ # A compatibility helper for headers, as WaterDrop is now more strict
92
+ if headers.present?
93
+ args[:headers] = headers
94
+ args[:headers].deep_stringify_keys!.deep_transform_values!(&:to_s) \
95
+ if headers.is_a? Hash
96
+ end
97
+
98
+ producer.produce_async(**args)
99
+ end
100
+ end
101
+ end
102
+ end
103
+ end
@@ -11,7 +11,6 @@ def rimless_gem_version?(gem_name, expected)
11
11
  Gem::Dependency.new('', expected.to_s).match?('', actual)
12
12
  end
13
13
 
14
- # Load some polyfills for ActiveSupport lower than 6.0
15
- require 'rimless/compatibility/karafka_1_4' \
16
- if rimless_gem_version?('karafka', '~> 1.4') \
17
- && rimless_gem_version?('thor', '>= 1.3')
14
+ # Load some patches for GEM lower than VERSION
15
+ # require 'rimless/compatibility/GEM__VERSION' \
16
+ # if rimless_gem_version?('GEM', '~> VERSION')
@@ -19,19 +19,19 @@ module Rimless
19
19
  # register application consumers
20
20
  Rails::CodeStatistics.register_directory('Consumers', 'app/consumers') \
21
21
  if defined?(Rails::CodeStatistics)
22
+
23
+ # Karafka tries forcefully to load its bootfile, when it detects a
24
+ # Rails application (via railtie). This may fail on applications
25
+ # which use Rimless just for message producing. So we configure a
26
+ # special setting for Karafka to skip its loading.
27
+ # See: https://bit.ly/4uuqFaI
28
+ ENV['KARAFKA_BOOT_FILE'] = 'false' unless Karafka.boot_file.exist?
22
29
  end
23
30
 
24
31
  # Run after all configuration is set via Rails initializers
25
32
  config.after_initialize do
26
33
  # Reconfigure our dependencies
27
34
  Rimless.configure_dependencies
28
-
29
- # Load the Karafka application inside the Sidekiq server application
30
- if defined? Sidekiq
31
- Sidekiq.configure_server do
32
- Rimless.consumer.initialize!
33
- end
34
- end
35
35
  end
36
36
 
37
37
  # Load all our Rake tasks if we're supposed to do
@@ -14,27 +14,41 @@ module Rimless
14
14
  Rimless.avro_decode(data, **)
15
15
  end
16
16
 
17
- # A simple helper to fake a deserialized Apache Kafka message for
18
- # consuming.
17
+ # A simple helper to generate Apache Kafka message doubles for consuming.
19
18
  #
20
19
  # @param payload [Hash{Symbol => Mixed}] the message payload
21
20
  # @param topic [String, Hash{Symbol => Mixed}] the actual message
22
21
  # topic (full as string, or parts via hash)
23
- # @return [OpenStruct] the fake deserialized Kafka message
24
- def kafka_message(topic: nil, headers: {}, **payload)
25
- OpenStruct.new(
26
- topic: Rimless.topic(topic),
22
+ # @param metadata [Hash{Symbol => Mixed}] the message metadata
23
+ # @return [RSpec::Mocks::InstanceVerifyingDouble] the Kafka message double
24
+ #
25
+ # rubocop:disable Metrics/MethodLength -- because of the metadata handling
26
+ def kafka_message(topic: nil, headers: {}, metadata: {}, **payload)
27
+ metadata = {
28
+ topic: topic ? Rimless.topic(topic) : nil,
29
+ partition: 0,
30
+ offset: 206,
31
+ key: nil,
27
32
  headers: headers,
33
+ timestamp: Time.current,
34
+ received_at: Time.current,
35
+ **metadata
36
+ }
37
+
38
+ instance_double(
39
+ Karafka::Messages::Message,
40
+ deserialized?: true,
41
+ tombstone?: false,
28
42
  payload: payload,
29
- is_control_record: false,
30
- key: nil,
31
- offset: 206,
32
- partition: 0,
33
- create_time: Time.current,
34
- receive_time: Time.current,
35
- deserialized: true
43
+ metadata: instance_double(
44
+ Karafka::Messages::Metadata,
45
+ **metadata,
46
+ to_h: metadata
47
+ ),
48
+ **metadata
36
49
  )
37
50
  end
51
+ # rubocop:enable Metrics/MethodLength
38
52
 
39
53
  # Capture all Apache Kafka messages of the given block.
40
54
  #
@@ -43,6 +57,32 @@ module Rimless
43
57
  def capture_kafka_messages(&)
44
58
  Rimless::RSpec::Matchers::HaveSentKafkaMessage.new(nil).capture(&)
45
59
  end
60
+
61
+ # An augmented helper for +karafka.consumer_for+, provided by the
62
+ # +karafka-testing+ gem to locate and instantiate a consumer. When the
63
+ # found consumer features the Rimless job bridge consumer logic, the
64
+ # +enqueue_job+ is replaced to not enqueue the job, but perform it
65
+ # inline. Otherwise the end-user consumer logic is not executed, which is
66
+ # clearly the user expectation.
67
+ #
68
+ # @param topic [String] the full topic name, use +Rimless.topic+ if
69
+ # needed
70
+ # @return [Karafka::BaseConsumer] the found consumer
71
+ def kafka_consumer_for(topic)
72
+ # The +karafka+ helper is provided by the +karafka-testing+ gem
73
+ karafka.consumer_for(topic).tap do |consumer|
74
+ # When we're not dealing with a regular Rimless job bridge consumer,
75
+ # we skip further processing
76
+ next unless consumer.respond_to? :enqueue_job
77
+
78
+ # Otherwise rig the job bridging and run the wrapped consumer instead
79
+ allow(consumer).to receive(:enqueue_job) do |message|
80
+ Rimless.configuration.consumer_job_class.perform_now(
81
+ **consumer.message_to_job_args(message)
82
+ )
83
+ end
84
+ end
85
+ end
46
86
  end
47
87
  end
48
88
  end
@@ -218,19 +218,22 @@ module Rimless
218
218
  # Setup the +WaterDrop+ spies and record each sent message.
219
219
  # because of the message decoding
220
220
  def listen_to_messages
221
- decode = proc do |encoded|
222
- { encoded_data: encoded, data: Rimless.avro.decode(encoded) }
223
- end
224
-
225
- allow(WaterDrop::SyncProducer).to receive(:call) do |data, **args|
226
- @messages << { args: args, type: :sync }.merge(decode[data])
227
- nil
221
+ handle_message = proc do |type|
222
+ proc do |payload:, **args|
223
+ @messages << {
224
+ type:,
225
+ args:,
226
+ encoded_data: payload,
227
+ data: Rimless.avro.decode(payload)
228
+ }
229
+ nil
230
+ end
228
231
  end
229
232
 
230
- allow(WaterDrop::AsyncProducer).to receive(:call) do |data, **args|
231
- @messages << { args: args, type: :async }.merge(decode[data])
232
- nil
233
- end
233
+ allow(Rimless.producer).to \
234
+ receive(:produce_sync, &handle_message[:sync])
235
+ allow(Rimless.producer).to \
236
+ receive(:produce_async, &handle_message[:async])
234
237
  end
235
238
 
236
239
  # Serve the RSpec API and return the positive failure message.
data/lib/rimless/rspec.rb CHANGED
@@ -5,17 +5,8 @@ require 'webmock/rspec'
5
5
  require 'rimless'
6
6
  require 'rimless/rspec/helpers'
7
7
  require 'rimless/rspec/matchers'
8
- require 'karafka/testing/rspec/helpers'
9
-
10
- # This fake schema registry server uses Sinatra but the gem does not include
11
- # this dependency as runtime, just as development. Therefore we added it.
12
8
  require 'avro_turf/test/fake_confluent_schema_registry_server'
13
-
14
- # Add a monkey patch to add proper Sinatra 4.x support
15
- class FakeConfluentSchemaRegistryServer
16
- # Allow any host name on tests
17
- set :host_authorization, { permitted_hosts: [] }
18
- end
9
+ require 'karafka/testing/rspec/helpers'
19
10
 
20
11
  # RSpec 1.x and 2.x compatibility
21
12
  #
@@ -26,7 +17,12 @@ raise 'No RSPEC_CONFIGURER is defined, webmock is missing?' \
26
17
  RSPEC_CONFIGURER.configure do |config|
27
18
  config.include Rimless::RSpec::Helpers
28
19
  config.include Rimless::RSpec::Matchers
29
- config.include Karafka::Testing::RSpec::Helpers
20
+
21
+ # Load the Karafka testing helpers when we're running in an actual end-user
22
+ # application, not within our own test suite as we do not provide a
23
+ # `karafka.rb` boot entry
24
+ config.include Karafka::Testing::RSpec::Helpers \
25
+ if Karafka.boot_file.exist?
30
26
 
31
27
  # Set the custom +consumer+ type for consumer spec files
32
28
  config.define_derived_metadata(file_path: %r{/spec/consumers/}) do |meta|
@@ -53,31 +49,19 @@ RSPEC_CONFIGURER.configure do |config|
53
49
  # the help of the faked (inlined) Schema Registry server. This allows us to
54
50
  # perform the actual Apache Avro message encoding/decoding without the need
55
51
  # to have a Schema Registry up and running.
56
- config.before(:each) do |example|
57
- # Get the Excon connection from the AvroTurf instance
58
- connection = Rimless.avro.instance_variable_get(:@registry)
59
- .instance_variable_get(:@upstream)
60
- .instance_variable_get(:@connection)
61
- .instance_variable_get(:@data)
62
- # Enable WebMock on the already instantiated
63
- # Confluent Schema Registry Excon connection
64
- connection[:mock] = true
65
- # Grab all Confluent Schema Registry requests and send
52
+ config.before(:each) do
53
+ # Intercept all Confluent Schema Registry requests and send
66
54
  # them to the faked (inlined) Schema Registry
67
- stub_request(:any, %r{^http://#{connection[:hostname]}})
55
+ stub_request(:any, /^#{Rimless.configuration.schema_registry_url}/)
68
56
  .to_rack(FakeConfluentSchemaRegistryServer)
69
57
  # Clear any cached data
70
58
  FakeConfluentSchemaRegistryServer.clear
71
59
 
72
60
  # Do not interact with Apache Kafka itself on tests
73
- allow(WaterDrop::AsyncProducer).to receive(:call)
74
- allow(WaterDrop::SyncProducer).to receive(:call)
61
+ allow(Rimless.producer).to receive(:produce_sync)
62
+ allow(Rimless.producer).to receive(:produce_async)
75
63
 
76
64
  # Reconfigure the Rimless AvroTurf instance
77
- Rimless.configure_avro_turf
78
-
79
- # When the example type is a Kafka consumer, we must initialize
80
- # the Karafka framework first.
81
- Rimless.consumer.initialize! if example.metadata[:type] == :consumer
65
+ Rimless.configure_avro
82
66
  end
83
67
  end
@@ -13,17 +13,29 @@ namespace :rimless do
13
13
  task routes: :environment do
14
14
  require 'rimless'
15
15
 
16
- Rimless.consumer.consumer_groups.each do |consumer_group|
16
+ Rimless.consumer.routes.each do |consumer_group|
17
17
  consumer_group.topics.each do |topic|
18
18
  name = topic.name.split('.')[1..].join('.')
19
19
 
20
- puts "# Topic: #{name}"
21
- puts "# Consumer: #{topic.consumer}"
20
+ consumer = topic.consumer
21
+ consumer = consumer.consumer.constantize \
22
+ if consumer.new.is_a? Rimless::Consumer::JobBridge
22
23
 
23
- base = topic.consumer.superclass.new(topic).methods
24
- events = topic.consumer.new(topic).methods - base
24
+ base_methods = consumer.superclass.instance_methods(false)
25
+ event_methods = (consumer.instance_methods(false) - base_methods).sort
25
26
 
26
- puts "# Events: #{events.join(', ')}"
27
+ event_methods = if event_methods.count > 3
28
+ event_methods.join("\n##{' ' * 20}")
29
+ else
30
+ event_methods.join(', ')
31
+ end
32
+
33
+ puts <<~INFO
34
+ # Topic (canonical): #{name}
35
+ # Topic (full name): #{topic.name}
36
+ # Consumer: #{consumer}
37
+ # Events: #{event_methods}
38
+ INFO
27
39
  puts
28
40
  end
29
41
  end
@@ -2,5 +2,5 @@
2
2
 
3
3
  # The overall shared base consumer for Apache Kafka messages. Just write your
4
4
  # own specific consumer and inherit this one to share logic.
5
- class ApplicationConsumer < Rimless::BaseConsumer
5
+ class ApplicationConsumer < Rimless::Consumer::Base
6
6
  end
@@ -7,6 +7,6 @@ class CustomConsumer < ApplicationConsumer
7
7
  # Handle +custom_event+ event messages.
8
8
  def custom_event(property1:, property2: nil)
9
9
  # Do whatever you need to do
10
- [property1, property2]
10
+ Rails.logger.debug([property1, property2])
11
11
  end
12
12
  end
@@ -4,11 +4,11 @@ require 'rails_helper'
4
4
 
5
5
  RSpec.describe CustomConsumer do
6
6
  let(:topic) { Rimless.topic(app: :your_app, name: :your_topic) }
7
- let(:instance) { karafka_consumer_for(topic) }
7
+ let(:instance) { kafka_consumer_for(topic) }
8
8
  let(:action) { instance.consume }
9
- let(:params) { kafka_message(topic: topic, **payload) }
9
+ let(:message) { kafka_message(topic: topic, **payload) }
10
10
 
11
- before { allow(instance).to receive(:params).and_return(params) }
11
+ before { allow(instance).to receive(:messages).and_return([message]) }
12
12
 
13
13
  context 'with custom_event message' do
14
14
  let(:payload) do
@@ -16,7 +16,8 @@ RSpec.describe CustomConsumer do
16
16
  end
17
17
 
18
18
  it 'returns the payload properties' do
19
- expect(action).to eql(['test', nil])
19
+ expect(Rails.logger).to receive(:debug).with(['test', nil]).once
20
+ action
20
21
  end
21
22
  end
22
23
  end
@@ -5,16 +5,17 @@ require 'rimless'
5
5
  # Setup the topic-consumer routing table and boot the consumer application
6
6
  Rimless.consumer.topics(
7
7
  { app: :your_app, name: :your_topic } => CustomConsumer
8
- ).boot!
8
+ )
9
9
 
10
10
  # Configure Karafka/ruby-kafka settings
11
11
  # Rimless.consumer.configure do |config|
12
- # # See https://github.com/karafka/karafka/wiki/Configuration
13
- # # config.kafka.start_from_beginning = false
12
+ # # See: https://bit.ly/3MAF6Jk (+config.*+ root level Karafka settings)
13
+ # # See: https://bit.ly/3OtIfeu (+config.kafka+ settings)
14
+ # # config.kafka[:'initial_offset'] = 'latest'
14
15
  # end
15
16
 
16
17
  # We want a less verbose logging on development
17
18
  # Rimless.logger.level = Logger::INFO if Rails.env.development?
18
19
 
19
- # Use a different Sidekiq queue for the consumer jobs
20
+ # Use a different ActiveJob queue for the consumer jobs
20
21
  # Rimless.configuration.consumer_job_queue = :messages