rimless 2.9.0 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. checksums.yaml +4 -4
  2. data/Appraisals +2 -2
  3. data/CHANGELOG.md +66 -0
  4. data/Gemfile +0 -1
  5. data/README.md +64 -62
  6. data/Rakefile +13 -4
  7. data/UPGRADING.md +491 -0
  8. data/doc/upgrade-guide-sources/README.md +221 -0
  9. data/doc/upgrade-guide-sources/dep-avro_turf-1.20.md +23 -0
  10. data/doc/upgrade-guide-sources/dep-karafka-2.0.md +117 -0
  11. data/doc/upgrade-guide-sources/dep-waterdrop-2.8.md +30 -0
  12. data/gemfiles/rails_8.0.gemfile +1 -1
  13. data/gemfiles/rails_8.1.gemfile +1 -1
  14. data/lib/rimless/compatibility/.gitkeep +0 -0
  15. data/lib/rimless/configuration.rb +80 -6
  16. data/lib/rimless/consumer/app.rb +182 -0
  17. data/lib/rimless/{karafka → consumer}/avro_deserializer.rb +8 -6
  18. data/lib/rimless/consumer/base.rb +118 -0
  19. data/lib/rimless/consumer/job.rb +35 -0
  20. data/lib/rimless/consumer/job_bridge.rb +113 -0
  21. data/lib/rimless/extensions/avro_helpers.rb +83 -0
  22. data/lib/rimless/extensions/configuration_handling.rb +77 -0
  23. data/lib/rimless/extensions/consumer.rb +20 -0
  24. data/lib/rimless/extensions/dependencies.rb +84 -0
  25. data/lib/rimless/extensions/kafka_helpers.rb +46 -0
  26. data/lib/rimless/extensions/producer.rb +103 -0
  27. data/lib/rimless/initializers/compatibility.rb +3 -4
  28. data/lib/rimless/railtie.rb +7 -7
  29. data/lib/rimless/rspec/helpers.rb +53 -13
  30. data/lib/rimless/rspec/matchers.rb +14 -11
  31. data/lib/rimless/rspec.rb +13 -29
  32. data/lib/rimless/tasks/consumer.rake +18 -6
  33. data/lib/rimless/tasks/templates/application_consumer.rb +1 -1
  34. data/lib/rimless/tasks/templates/custom_consumer.rb +1 -1
  35. data/lib/rimless/tasks/templates/custom_consumer_spec.rb +5 -4
  36. data/lib/rimless/tasks/templates/karafka.rb +5 -4
  37. data/lib/rimless/version.rb +3 -1
  38. data/lib/rimless.rb +12 -14
  39. data/rimless.gemspec +7 -9
  40. metadata +38 -67
  41. data/lib/rimless/avro_helpers.rb +0 -81
  42. data/lib/rimless/base_consumer.rb +0 -30
  43. data/lib/rimless/compatibility/karafka_1_4.rb +0 -52
  44. data/lib/rimless/configuration_handling.rb +0 -82
  45. data/lib/rimless/consumer.rb +0 -209
  46. data/lib/rimless/consumer_job.rb +0 -10
  47. data/lib/rimless/dependencies.rb +0 -69
  48. data/lib/rimless/kafka_helpers.rb +0 -104
  49. data/lib/rimless/karafka/base64_interchanger.rb +0 -32
  50. data/lib/rimless/karafka/passthrough_mapper.rb +0 -29
  51. data/lib/rimless/tasks/stats.rake +0 -22
@@ -1,10 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Rimless
4
- # The base consumer job where each message is processed asynchronous via
5
- # Sidekiq. We need to inherit the Karafka base worker class into a custom
6
- # one, otherwise it fails.
7
- class ConsumerJob < ::Karafka::BaseWorker
8
- sidekiq_options queue: Rimless.configuration.consumer_job_queue
9
- end
10
- end
@@ -1,69 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Rimless
4
- # The top-level dependencies helpers.
5
- module Dependencies
6
- extend ActiveSupport::Concern
7
-
8
- class_methods do
9
- # (Re)configure our gem dependencies. We take care of setting up
10
- # +WaterDrop+, our Apache Kafka driver and +AvroTurf+, our Confluent
11
- # Schema Registry driver.
12
- def configure_dependencies
13
- configure_waterdrop
14
- configure_avro_turf
15
- end
16
-
17
- # Set sensible defaults for the +WaterDrop+ gem.
18
- def configure_waterdrop
19
- # Skip WaterDrop configuration when no brokers/client id is available,
20
- # because it will raise. Its fine to have none available for situations
21
- # like Rails asset precompilations, etc. - on runtime the settings
22
- # should be available, otherwise the message producing just
23
- # fails/raise.
24
- return if Rimless.configuration.kafka_brokers.empty? \
25
- || Rimless.configuration.client_id.blank?
26
-
27
- WaterDrop.setup do |config|
28
- # Activate message delivery and use the default logger
29
- config.deliver = true
30
- config.logger = Rimless.logger
31
- # An optional identifier of a Kafka consumer (in a consumer group)
32
- # that is passed to a Kafka broker with every request. A logical
33
- # application name to be included in Kafka logs and monitoring
34
- # aggregates.
35
- config.client_id = Rimless.configuration.client_id
36
- # All the known brokers, at least one. The ruby-kafka driver will
37
- # discover the whole cluster structure once and when issues occur to
38
- # dynamically adjust scaling operations.
39
- config.kafka.seed_brokers = Rimless.configuration.kafka_brokers
40
- # All brokers MUST acknowledge a new message
41
- config.kafka.required_acks = -1
42
- end
43
- end
44
-
45
- # Set sensible defaults for the +AvroTurf+ gem and (re)compile the Apache
46
- # Avro schema templates (ERB), so the gem can handle them properly.
47
- def configure_avro_turf
48
- # No need to configure AvroTurf when no schema registry URL is
49
- # available. Its fine to skip this for scenarios where not the full
50
- # application configuration is available (eg. on Rails asset
51
- # precompilations, etc)
52
- return if Rimless.configuration.schema_registry_url.blank?
53
-
54
- # Setup a global available Apache Avro decoder/encoder with support for
55
- # the Confluent Schema Registry, but first create a helper instance
56
- Rimless.avro_utils = Rimless::AvroUtils.new
57
- # Compile our Avro schema templates to ready-to-consume Avro schemas
58
- Rimless.avro_utils.recompile_schemas
59
- # Register a global Avro messaging instance
60
- Rimless.avro = AvroTurf::Messaging.new(
61
- logger: Rimless.logger,
62
- namespace: Rimless.avro_utils.namespace,
63
- schemas_path: Rimless.avro_utils.output_path,
64
- registry_url: Rimless.configuration.schema_registry_url
65
- )
66
- end
67
- end
68
- end
69
- end
@@ -1,104 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Rimless
4
- # The top-level Apache Kafka helpers.
5
- module KafkaHelpers
6
- extend ActiveSupport::Concern
7
-
8
- class_methods do
9
- # Generate a common topic name for Apache Kafka while taking care of
10
- # configured prefixes.
11
- #
12
- # @param args [Array<Mixed>] the relative topic name
13
- # @return [String] the complete topic name
14
- #
15
- # @example Name only
16
- # Rimless.topic(:users)
17
- # @example Name with app
18
- # Rimless.topic(:users, app: 'test-api')
19
- # @example Mix and match
20
- # Rimless.topic(name: 'test', app: :fancy_app)
21
- # @example Full name - use as is
22
- # Rimless.topic(full_name: 'my.custom.topic.name')
23
- def topic(*args)
24
- opts = args.last
25
- name = args.first if [String, Symbol].member?(args.first.class)
26
-
27
- if opts.is_a?(Hash)
28
- # When we got a full name, we use it as is
29
- return opts[:full_name] if opts.key? :full_name
30
-
31
- name = opts[:name] if opts.key?(:name)
32
- app = opts[:app] if opts.key?(:app)
33
- end
34
-
35
- name ||= nil
36
- app ||= Rimless.configuration.app_name
37
-
38
- raise ArgumentError, 'No name given' if name.nil?
39
-
40
- "#{Rimless.topic_prefix(app)}#{name}".tr('_', '-')
41
- end
42
-
43
- # Send a single message to Apache Kafka. The data is encoded according to
44
- # the given Apache Avro schema. The destination Kafka topic may be a
45
- # relative name, or a hash which is passed to the +.topic+ method to
46
- # manipulate the application details. The message is send is a
47
- # synchronous, blocking way.
48
- #
49
- # @param data [Hash{Symbol => Mixed}] the raw data, unencoded
50
- # @param schema [String, Symbol] the Apache Avro schema to use
51
- # @param topic [String, Symbol, Hash{Symbol => Mixed}] the destination
52
- # Apache Kafka topic
53
- def sync_message(data:, schema:, topic:, **args)
54
- encoded = Rimless.encode(data, schema: schema)
55
- sync_raw_message(data: encoded, topic: topic, **args)
56
- end
57
- alias_method :message, :sync_message
58
-
59
- # Send a single message to Apache Kafka. The data is encoded according to
60
- # the given Apache Avro schema. The destination Kafka topic may be a
61
- # relative name, or a hash which is passed to the +.topic+ method to
62
- # manipulate the application details. The message is send is an
63
- # asynchronous, non-blocking way.
64
- #
65
- # @param data [Hash{Symbol => Mixed}] the raw data, unencoded
66
- # @param schema [String, Symbol] the Apache Avro schema to use
67
- # @param topic [String, Symbol, Hash{Symbol => Mixed}] the destination
68
- # Apache Kafka topic
69
- def async_message(data:, schema:, topic:, **args)
70
- encoded = Rimless.encode(data, schema: schema)
71
- async_raw_message(data: encoded, topic: topic, **args)
72
- end
73
-
74
- # Send a single message to Apache Kafka. The data is not touched, so you
75
- # need to encode it yourself before you pass it in. The destination Kafka
76
- # topic may be a relative name, or a hash which is passed to the +.topic+
77
- # method to manipulate the application details. The message is send is a
78
- # synchronous, blocking way.
79
- #
80
- # @param data [Hash{Symbol => Mixed}] the raw data, unencoded
81
- # @param topic [String, Symbol, Hash{Symbol => Mixed}] the destination
82
- # Apache Kafka topic
83
- def sync_raw_message(data:, topic:, **args)
84
- args = args.merge(topic: topic(topic))
85
- WaterDrop::SyncProducer.call(data, **args)
86
- end
87
- alias_method :raw_message, :sync_raw_message
88
-
89
- # Send a single message to Apache Kafka. The data is not touched, so you
90
- # need to encode it yourself before you pass it in. The destination Kafka
91
- # topic may be a relative name, or a hash which is passed to the +.topic+
92
- # method to manipulate the application details. The message is send is an
93
- # asynchronous, non-blocking way.
94
- #
95
- # @param data [Hash{Symbol => Mixed}] the raw data, unencoded
96
- # @param topic [String, Symbol, Hash{Symbol => Mixed}] the destination
97
- # Apache Kafka topic
98
- def async_raw_message(data:, topic:, **args)
99
- args = args.merge(topic: topic(topic))
100
- WaterDrop::AsyncProducer.call(data, **args)
101
- end
102
- end
103
- end
104
- end
@@ -1,32 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Rimless
4
- module Karafka
5
- # Allow the +karafka-sidekiq-backend+ gem to transfer binary Apache Kafka
6
- # messages to the actual Sidekiq job.
7
- #
8
- # rubocop:disable Security/MarshalLoad -- because we encode/decode the
9
- # messages in our own controlled context
10
- class Base64Interchanger < ::Karafka::Interchanger
11
- # Encode a binary Apache Kafka message(s) so they can be passed to the
12
- # Sidekiq +Rimless::ConsumerJob+.
13
- #
14
- # @param params_batch [Karafka::Params::ParamsBatch] the karafka params
15
- # batch object
16
- # @return [String] the marshaled+base64 encoded data
17
- def encode(params_batch)
18
- Base64.encode64(Marshal.dump(super))
19
- end
20
-
21
- # Decode the binary Apache Kafka message(s) so they can be processed by
22
- # the Sidekiq +Rimless::ConsumerJob+.
23
- #
24
- # @param params_string [String] the marshaled+base64 encoded data
25
- # @return [Array<Hash>] the unmarshaled+base64 decoded data
26
- def decode(params_batch)
27
- super(Marshal.load(Base64.decode64(params_batch))).map(&:stringify_keys)
28
- end
29
- end
30
- # rubocop:enable Security/MarshalLoad
31
- end
32
- end
@@ -1,29 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Rimless
4
- module Karafka
5
- # The Karafka framework makes some assumptions about the consumer group and
6
- # topic names. We have our own opinions/conventions, so we just pass them
7
- # through unmodified.
8
- class PassthroughMapper
9
- # We do not want to modify the given consumer group name, so we
10
- # pass it through.
11
- #
12
- # @param raw_consumer_group_name [String, Symbol] the original
13
- # consumer group name
14
- # @return [String, Symbol] the original consumer group name
15
- def call(raw_consumer_group_name)
16
- raw_consumer_group_name
17
- end
18
-
19
- # We do not want to modify the given topic name, so we pass it through.
20
- #
21
- # @param topic [String, Symbol] the original topic name
22
- # @return [String, Symbol] the original topic name
23
- def incoming(topic)
24
- topic
25
- end
26
- alias outgoing incoming
27
- end
28
- end
29
- end
@@ -1,22 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- # TODO: Remove this file, when Rails >= 8.0 is the minimum requirement
4
- if defined?(Rails) && Rails.env.development? && Rails::VERSION::STRING < '8.0.0'
5
- require 'rspec/core/rake_task'
6
-
7
- # rubocop:disable Rails/RakeEnvironment -- because this is just an helper
8
- # command, no need for an application bootstrap
9
- task :stats do
10
- require 'rails/code_statistics'
11
-
12
- [
13
- [:unshift, 'Consumer', 'app/consumers']
14
- ].each do |method, type, dir|
15
- next unless File.directory? dir
16
-
17
- STATS_DIRECTORIES.send(method, [type, dir])
18
- CodeStatistics::TEST_TYPES << type if type.include? 'specs'
19
- end
20
- end
21
- # rubocop:enable Rails/RakeEnvironment
22
- end