rimless 2.9.0 → 3.0.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (51) hide show
  1. checksums.yaml +4 -4
  2. data/Appraisals +2 -2
  3. data/CHANGELOG.md +66 -0
  4. data/Gemfile +0 -1
  5. data/README.md +64 -62
  6. data/Rakefile +13 -4
  7. data/UPGRADING.md +491 -0
  8. data/doc/upgrade-guide-sources/README.md +221 -0
  9. data/doc/upgrade-guide-sources/dep-avro_turf-1.20.md +23 -0
  10. data/doc/upgrade-guide-sources/dep-karafka-2.0.md +117 -0
  11. data/doc/upgrade-guide-sources/dep-waterdrop-2.8.md +30 -0
  12. data/gemfiles/rails_8.0.gemfile +1 -1
  13. data/gemfiles/rails_8.1.gemfile +1 -1
  14. data/lib/rimless/compatibility/.gitkeep +0 -0
  15. data/lib/rimless/configuration.rb +80 -6
  16. data/lib/rimless/consumer/app.rb +182 -0
  17. data/lib/rimless/{karafka → consumer}/avro_deserializer.rb +8 -6
  18. data/lib/rimless/consumer/base.rb +118 -0
  19. data/lib/rimless/consumer/job.rb +35 -0
  20. data/lib/rimless/consumer/job_bridge.rb +113 -0
  21. data/lib/rimless/extensions/avro_helpers.rb +83 -0
  22. data/lib/rimless/extensions/configuration_handling.rb +77 -0
  23. data/lib/rimless/extensions/consumer.rb +20 -0
  24. data/lib/rimless/extensions/dependencies.rb +84 -0
  25. data/lib/rimless/extensions/kafka_helpers.rb +46 -0
  26. data/lib/rimless/extensions/producer.rb +103 -0
  27. data/lib/rimless/initializers/compatibility.rb +3 -4
  28. data/lib/rimless/railtie.rb +7 -7
  29. data/lib/rimless/rspec/helpers.rb +53 -13
  30. data/lib/rimless/rspec/matchers.rb +14 -11
  31. data/lib/rimless/rspec.rb +13 -29
  32. data/lib/rimless/tasks/consumer.rake +18 -6
  33. data/lib/rimless/tasks/templates/application_consumer.rb +1 -1
  34. data/lib/rimless/tasks/templates/custom_consumer.rb +1 -1
  35. data/lib/rimless/tasks/templates/custom_consumer_spec.rb +5 -4
  36. data/lib/rimless/tasks/templates/karafka.rb +5 -4
  37. data/lib/rimless/version.rb +3 -1
  38. data/lib/rimless.rb +12 -14
  39. data/rimless.gemspec +7 -9
  40. metadata +38 -67
  41. data/lib/rimless/avro_helpers.rb +0 -81
  42. data/lib/rimless/base_consumer.rb +0 -30
  43. data/lib/rimless/compatibility/karafka_1_4.rb +0 -52
  44. data/lib/rimless/configuration_handling.rb +0 -82
  45. data/lib/rimless/consumer.rb +0 -209
  46. data/lib/rimless/consumer_job.rb +0 -10
  47. data/lib/rimless/dependencies.rb +0 -69
  48. data/lib/rimless/kafka_helpers.rb +0 -104
  49. data/lib/rimless/karafka/base64_interchanger.rb +0 -32
  50. data/lib/rimless/karafka/passthrough_mapper.rb +0 -29
  51. data/lib/rimless/tasks/stats.rake +0 -22
@@ -0,0 +1,182 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rimless
4
+ module Consumer
5
+ # The consumer application which adds some convenience helpers and
6
+ # library-related configurations.
7
+ class App < Karafka::App
8
+ # Allow accessing the class-level configuration methods from our instance
9
+ delegate :setup, :routes, :config, to: self
10
+
11
+ # Creates a new Rimless/Karafka application instance while configuring
12
+ # our library defaults.
13
+ #
14
+ # @return [Rimless::Consumer::App] the configured consumer application
15
+ #
16
+ # rubocop:disable Metrics/MethodLength -- because of the Karafka
17
+ # configuration
18
+ def initialize
19
+ # Run the parent class initialization
20
+ super
21
+
22
+ setup do |config|
23
+ # See: https://bit.ly/3OtIfeu (+config.kafka+ settings)
24
+
25
+ # An optional identifier of a Kafka consumer (in a consumer group)
26
+ # that is passed to a Kafka broker with every request. A logical
27
+ # application name to be included in Kafka logs and monitoring
28
+ # aggregates.
29
+ config.kafka[:'client.id'] = Rimless.configuration.client_id
30
+ # All the known brokers, at least one. The ruby-kafka driver will
31
+ # discover the whole cluster structure once and when issues occur
32
+ # to dynamically adjust scaling operations.
33
+ config.kafka[:'bootstrap.servers'] =
34
+ Rimless.configuration.kafka_brokers
35
+ # All brokers MUST acknowledge a new message by default
36
+ config.kafka[:'request.required.acks'] = -1
37
+
38
+ # See: https://bit.ly/3MAF6Jk (+config.*+ settings)
39
+
40
+ # Used to uniquely identify given client instance - for logging only
41
+ config.client_id = [
42
+ Rimless.configuration.client_id,
43
+ Process.pid,
44
+ Socket.gethostname
45
+ ].join('-')
46
+
47
+ # Should be unique per application to properly track message
48
+ # consumption. See: Kafka Consumer Groups.
49
+ config.group_id = Rimless.configuration.client_id
50
+
51
+ # We use dots (parts separation) and underscores for topic names, by
52
+ # convention.
53
+ config.strict_topics_namespacing = false
54
+
55
+ # Number of milliseconds after which Karafka no longer waits for the
56
+ # consumers to stop gracefully but instead we force terminate
57
+ # everything.
58
+ config.shutdown_timeout = 10.seconds.in_milliseconds
59
+
60
+ # Recreate consumers with each batch. This will allow Rails code
61
+ # reload to work in the development mode. Otherwise Karafka process
62
+ # would not be aware of code changes.
63
+ config.consumer_persistence = Rimless.env.production?
64
+
65
+ # Use our logger instead
66
+ config.logger = Rimless.logger
67
+ end
68
+
69
+ # Add the logging listener to Karafka in order to facilitate our gem
70
+ # logger. When the user configuration results in an falsy value (eg.
71
+ # +nil+ or +false+), we skip it.
72
+ listener = Rimless.configuration.consumer_logger_listener
73
+ Karafka.monitor.subscribe(listener) if listener
74
+
75
+ # Configure some routing defaults
76
+ routes.draw do
77
+ defaults do
78
+ deserializers(
79
+ payload: Rimless.configuration.avro_deserializer_class.new
80
+ )
81
+ end
82
+ end
83
+
84
+ # Call the user-configurable block with our configuration
85
+ # for customizations
86
+ setup(&Rimless.configuration.consumer_configure)
87
+ end
88
+ # rubocop:enable Metrics/MethodLength
89
+
90
+ # Configure the topics-consumer routing table in a lean way.
91
+ #
92
+ # Examples:
93
+ #
94
+ # topics({ app: :test_app, name: :admins } => YourConsumer)
95
+ # topics({ app: :test_app, names: %i[users admins] } => YourConsumer)
96
+ #
97
+ # Examples:
98
+ #
99
+ # topics(
100
+ # { app: :test_app, name: :admins } => lambda { |topic|
101
+ # consumer Rimless::Consumer::JobBridge.build(dest_consumer)
102
+ # }
103
+ # )
104
+ #
105
+ # Examples:
106
+ #
107
+ # topics do
108
+ # topic('name') do
109
+ # consumer CustomConsumer
110
+ # end
111
+ # end
112
+ #
113
+ # @param topics [Hash{Hash => Class, Proc}] the topic to consumer mapping
114
+ # @yield the given block on the routing table
115
+ # @return [Rimless::Consumer::App] the application instance for chaining
116
+ def topics(topics = [], &block)
117
+ routes.draw do
118
+ consumer_group(Rimless.configuration.client_id) do
119
+ instance_exec(&block) if block_given?
120
+
121
+ topics.each do |topic_parts, dest_consumer|
122
+ Rimless.consumer.topic_names(topic_parts).each do |topic_name|
123
+ configure = proc do
124
+ consumer(
125
+ Rimless.configuration.job_bridge_class.build(dest_consumer)
126
+ )
127
+ deserializers(
128
+ payload: Rimless.configuration.avro_deserializer_class.new
129
+ )
130
+ end
131
+ configure = dest_consumer if dest_consumer.is_a? Proc
132
+ topic(topic_name, &configure)
133
+ end
134
+ end
135
+ end
136
+ end
137
+
138
+ self
139
+ end
140
+
141
+ # Build the conventional Apache Kafka topic names from the given parts.
142
+ # This allows various forms like single strings/symbols and a hash in the
143
+ # form of +{ app: [String, Symbol], name: [String, Symbol], names:
144
+ # [Array<String, Symbol>] }+. This allows the maximum of flexibility.
145
+ #
146
+ # @param parts [String, Symbol, Hash{Symbol => Mixed}] the topic
147
+ # name parts
148
+ # @return [Array<String>] the full topic names
149
+ def topic_names(parts)
150
+ # We have a single app, but multiple names so we handle them
151
+ if parts.is_a?(Hash) && parts.key?(:names)
152
+ return parts[:names].map do |name|
153
+ Rimless.topic(parts.merge(name: name))
154
+ end
155
+ end
156
+
157
+ # We cannot handle the given input
158
+ [Rimless.topic(parts)]
159
+ end
160
+
161
+ # Allows the user to re-configure the Karafka application if this is
162
+ # needed. (eg. to set some kafka driver settings, etc)
163
+ #
164
+ # @yield [Karafka::Setup::ConfigProxy] the given block to allow
165
+ # configuration manipulation
166
+ # @return [Rimless::Consumer::App] our self for chaining
167
+ def configure(&)
168
+ setup(&)
169
+ self
170
+ end
171
+
172
+ # Check if we run as the Karafka server (consumer) process or not.
173
+ # Unfortunately Karafka still does not offer a solution for it like
174
+ # +Sidekiq.server?+. (Last tested with Karafka version +2.5.7+)
175
+ #
176
+ # @return [Boolean] whenever we run as the Karafka server or not
177
+ def server?
178
+ $PROGRAM_NAME.end_with?('karafka') && ARGV.include?('server')
179
+ end
180
+ end
181
+ end
182
+ end
@@ -1,24 +1,26 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Rimless
4
- module Karafka
4
+ module Consumer
5
5
  # A custom Apache Avro compatible message deserializer.
6
6
  class AvroDeserializer
7
7
  # Deserialize an Apache Avro encoded Apache Kafka message.
8
8
  #
9
- # @param params [Karafka::Params::Params] the Karafka message parameters
10
- # @return [Hash{Symbol => Mixed}] the deserialized Apache Avro message
11
- def call(params)
9
+ # @param message [Karafka::Messages::Message] the Karafka message to
10
+ # deserialize
11
+ # @return [Hash{Symbol => Mixed}, nil] the deserialized Apache Avro
12
+ # message, or +nil+ when we received a tombstone message
13
+ def call(message)
12
14
  # When the Kafka message does not have a payload, we won't fail.
13
15
  # This is for Kafka users which use log compaction with a nil payload.
14
- return if params.raw_payload.nil?
16
+ return if message.raw_payload.nil?
15
17
 
16
18
  # We use sparsed hashes inside of Apache Avro messages for schema-less
17
19
  # blobs of data, such as loosely structured metadata blobs. That's a
18
20
  # somewhat bad idea on strictly typed and defined messages, but their
19
21
  # occurrence should be rare.
20
22
  Rimless
21
- .decode(params.raw_payload)
23
+ .decode(message.raw_payload)
22
24
  .then { |data| Sparsify(data, sparse_array: true) }
23
25
  .then { |data| data.transform_keys { |key| key.delete('\\') } }
24
26
  .then { |data| Unsparsify(data, sparse_array: true) }
@@ -0,0 +1,118 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rimless
4
+ module Consumer
5
+ # The base consumer where all Apache Kafka messages will be processed,
6
+ # within an ActiveJob job. It comes with some simple conventions to keep
7
+ # the actual application code simple to use. Example usage on an
8
+ # application:
9
+ #
10
+ # app/consumers/my_consumer.rb
11
+ #
12
+ # class IdentityApiConsumer < ApplicationConsumer
13
+ # # Handle +identity-api.users/user_locked+ messages.
14
+ # #
15
+ # # @param user [Hash{Symbol => Mixed}] the event user data
16
+ # # @param args [Hash{Symbol => Mixed}] additional event data
17
+ # def user_locked(user:, **args)
18
+ # # ..
19
+ # end
20
+ # end
21
+ #
22
+ # Despite its default usage within an ActiveJob context, it still directly
23
+ # usable by Karafka. Just be warned that, when running inside a ActiveJob
24
+ # context, it lack support for various Karafka internals (eg. coordinator,
25
+ # client, etc).
26
+ class Base < Karafka::BaseConsumer
27
+ # Allow to handle a single message, each at a time
28
+ attr_accessor :message
29
+
30
+ # Allow older clients to access the current message as +params+, and the
31
+ # current messages batch as +params_batch+ (Karafka 1.4 style)
32
+ alias params message
33
+ alias params_batch messages
34
+
35
+ # A structure for Karafka::BaseConsumer#coodinator, within job contexts.
36
+ #
37
+ # rubocop:disable Lint/StructNewOverride -- because +:partition+ is
38
+ # expected to be overwritten
39
+ JobContextCoordinator = Struct.new(:topic, :partition)
40
+ # rubocop:enable Lint/StructNewOverride
41
+
42
+ # Build a new disposable consumer instance for a single Apache Kafka
43
+ # message, which should be processed within the ActiveJob context.
44
+ #
45
+ # @param payload [Mixed] the (already) decoded Kafka message payload
46
+ # @param metadata [Hash] the Kafka message metadata (string/symbol
47
+ # keys are allowed)
48
+ # @return [Rimless::Consumer::Base] the job context-aware consumer
49
+ # instance
50
+ def self.build_for_job(payload:, metadata:)
51
+ new.tap do |consumer|
52
+ metadata = metadata.symbolize_keys
53
+
54
+ consumer.coordinator = JobContextCoordinator.new(
55
+ topic: metadata[:topic],
56
+ partition: metadata[:partition]
57
+ )
58
+ consumer.producer = Rimless.producer
59
+
60
+ metadata = Karafka::Messages::Metadata.new(
61
+ **metadata.except(:key, :headers),
62
+ raw_key: metadata[:key],
63
+ raw_headers: metadata[:headers],
64
+ deserializers: job_deserializers
65
+ )
66
+ consumer.messages =
67
+ [Karafka::Messages::Message.new(payload, metadata)]
68
+ end
69
+ end
70
+
71
+ # A custom set of Karafka deserializers, exclusive for the ActiveJob
72
+ # context. As we already get the deserialized details (payload, message
73
+ # key, message headers), we just want to pass the values through.
74
+ #
75
+ # @return [Karafka::Routing::Features::Deserializers::Config] the
76
+ # deserializers config object
77
+ def self.job_deserializers
78
+ @job_deserializers ||=
79
+ Karafka::Routing::Features::Deserializers::Config.new(
80
+ active: true,
81
+ payload: ->(message) { message.raw_payload },
82
+ key: Karafka::Deserializers::Key.new,
83
+ headers: Karafka::Deserializers::Headers.new
84
+ )
85
+ end
86
+
87
+ # A generic message consuming handler which resolves the message event
88
+ # name to an actual method. All message data (top-level keys) is passed
89
+ # down to the event method as symbol arguments.
90
+ def consume
91
+ messages.each do |message|
92
+ self.message = message
93
+
94
+ # We ignore events we do not handle by definition
95
+ send(event, **arguments) if !event.nil? && respond_to?(event)
96
+ end
97
+ end
98
+
99
+ # Prepare the message payload as event method arguments.
100
+ #
101
+ # @return [Hash{Symbol => Mixed}] the event method arguments
102
+ def arguments
103
+ event_name_key = :event
104
+ event_name_key = 'event' if message.payload.key? 'event'
105
+ message.payload.except(event_name_key)
106
+ end
107
+
108
+ # A shortcut to fetch the event name from the Kafka message.
109
+ #
110
+ # @return [Symbol] the event name of the current message
111
+ def event
112
+ event_name = message.payload[:event]
113
+ event_name ||= message.payload['event']
114
+ event_name&.to_sym
115
+ end
116
+ end
117
+ end
118
+ end
@@ -0,0 +1,35 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rimless
4
+ module Consumer
5
+ # A simple consumer job, enqueued by the job bridge, after a message was
6
+ # consumed from an Apache Kafka topic.
7
+ class Job < ActiveJob::Base
8
+ # Configure the default job queue
9
+ queue_as Rimless.configuration.consumer_job_queue
10
+
11
+ # Receive a single message/event from the Karafka process, consuming it
12
+ # from a Apache Kafka topic. Within the context we "simulate" a Karafka
13
+ # consumer context and run the configured consumer class (a user
14
+ # application class, from +app/consumers/+) with the single message.
15
+ #
16
+ # The Karafka consumer context is just "simulated", as it does not
17
+ # feature all components accessible by a regular Karafka consumer
18
+ # context. This includes access to the real +coordinator+, or +client+.
19
+ # But access to an +producer+ is provided. Check the
20
+ # Rimless::Consumer::Base for more details.
21
+ #
22
+ # @param payload [Mixed] the (already) decoded Kafka message payload
23
+ # @param consumer [String] the consumer class name to use
24
+ # @param metadata [Hash] the Kafka message metadata (string/symbol
25
+ # keys are allowed)
26
+ def perform(payload:, consumer:, metadata:)
27
+ # Try to lookup the given consumer and create a new instance for it,
28
+ # which is configured for the job context we're running in
29
+ consumer = consumer.constantize.build_for_job(payload:, metadata:)
30
+ # Run the actual consumer logic
31
+ consumer.consume
32
+ end
33
+ end
34
+ end
35
+ end
@@ -0,0 +1,113 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rimless
4
+ module Consumer
5
+ # This is our default consumer for Karafka, which wraps the actual user
6
+ # consumer classes, consumes all message of a batch and enqueues them as
7
+ # ActiveJob job. It builds a bridge between the Karafka topic/message
8
+ # consumer process and a later running ActiveJob processor (eg. Sidekiq or
9
+ # Solid Queue).
10
+ class JobBridge < Karafka::BaseConsumer
11
+ # Persist the wrapped consumer, to pass it later while enqueuing jobs
12
+ class_attribute :consumer
13
+
14
+ class << self
15
+ # Build a new anonymous wrapper class, based on the given destination
16
+ # consumer class.
17
+ #
18
+ # @param consumer [Class] the consumer to pass down to the jobs
19
+ # @return [Class] the new and configured wrapper class
20
+ def build(consumer)
21
+ # We cannot serialize anonymous classes, as they need to cross
22
+ # process borders via ActiveJob here, and the resulting job needs to
23
+ # constantize the serialized class name again
24
+ raise ArgumentError, "Anonymous consumer class passed: #{consumer}" \
25
+ unless consumer.name
26
+
27
+ Class.new(self).tap do |wrapper|
28
+ wrapper.consumer = consumer.name
29
+ end
30
+ end
31
+
32
+ # A custom object/class inspection helper to allow pretty printing of
33
+ # the anonymous class.
34
+ #
35
+ # @return [String] the pretty-printed class/instance
36
+ def inspect
37
+ # When not an anonymous class
38
+ return super unless name.nil?
39
+
40
+ # Otherwise the anonymous wrapper class
41
+ "#{Rimless::Consumer::JobBridge.name}[consumer=#{consumer.inspect}]"
42
+ end
43
+ alias to_s inspect
44
+ end
45
+
46
+ # Consume all messages of the current batch, and mark each message
47
+ # afterwards as processed (asynchronous). You can simply overwrite this
48
+ # method if you need more precise control of the message processing,
49
+ # eg. just using marking the whole batch processed, or custom error
50
+ # handling.
51
+ #
52
+ # This method provides *at-least-once* delivery semantics. Each message
53
+ # is enqueued to ActiveJob and then marked as consumed via
54
+ # +mark_as_consumed+ (asynchronous commit). In the unlikely event that
55
+ # the Karafka process crashes after +perform_later+ succeeds but before
56
+ # the offset is committed to the broker (e.g. OOM kill, hardware
57
+ # failure), the message will be re-delivered and enqueued again on
58
+ # restart. Downstream consumers should therefore be idempotent. For
59
+ # stronger guarantees, use +mark_as_consumed!+ (synchronous commit) at
60
+ # the cost of throughput, by providing a custom +job_bridge_class+ via
61
+ # +Rimless.configuration+.
62
+ #
63
+ # See: https://bit.ly/4aPXaai - then configure your own
64
+ # `Rimless.configuration.job_bridge_class`.
65
+ def consume
66
+ messages.each do |message|
67
+ enqueue_job(message)
68
+ mark_as_consumed(message)
69
+ end
70
+ end
71
+
72
+ # Enqueue a new job for the given message.
73
+ #
74
+ # @param message [Karafka::Messages::Message] the message to enqueue
75
+ def enqueue_job(message)
76
+ Rimless.configuration.consumer_job_class.perform_later(
77
+ **message_to_job_args(message)
78
+ )
79
+ end
80
+
81
+ # Convert the given +Karafka::Messages::Message+ instance to a simple
82
+ # hash, which can be transported by ActiveJob.
83
+ #
84
+ # @param message [Karafka::Messages::Message] the message to enqueue
85
+ # @return [Hash{Symbol => Mixed}] the job argument
86
+ def message_to_job_args(message)
87
+ {
88
+ payload: message.payload,
89
+ consumer:,
90
+ metadata: message.metadata.to_h.slice(
91
+ :topic,
92
+ :partition,
93
+ :offset,
94
+ :timestamp,
95
+ :received_at
96
+ ).merge(
97
+ key: message.metadata.key,
98
+ headers: message.metadata.headers
99
+ )
100
+ }
101
+ end
102
+
103
+ # A custom object/class inspection helper to allow pretty printing of
104
+ # the anonymous class.
105
+ #
106
+ # @return [String] the pretty-printed class/instance
107
+ def inspect
108
+ "#<#{Rimless::Consumer::JobBridge.name} consumer=#{consumer.inspect}>"
109
+ end
110
+ alias to_s inspect
111
+ end
112
+ end
113
+ end
@@ -0,0 +1,83 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rimless
4
+ module Extensions
5
+ # The top-level Apache Avro helpers.
6
+ module AvroHelpers
7
+ extend ActiveSupport::Concern
8
+
9
+ class_methods do
10
+ # A top-level +AvroTurf::Messaging+ instance
11
+ mattr_accessor :avro
12
+ # A shared +Rimless::AvroUtils+ instance
13
+ mattr_accessor :avro_utils
14
+
15
+ # A shortcut to encode data using the specified schema to the Apache
16
+ # Avro format. This also applies data sanitation to avoid issues with
17
+ # the low level Apache Avro library (symbolized keys, etc) and it
18
+ # allows deep-relative schema names. When you pass +.deep.deep+ for
19
+ # example (leading period) it will prefix the schema name with the
20
+ # local namespace (so it becomes absolute).
21
+ #
22
+ # @param data [Mixed] the data structure to encode
23
+ # @param schema [String, Symbol] name of the schema that should be used
24
+ # @param opts [Hash{Symbol => Mixed}] additional options
25
+ # @return [String] the Apache Avro blob
26
+ def avro_encode(data, schema:, **opts)
27
+ data = avro_sanitize(data)
28
+
29
+ # When the deep-relative form (+.deep.deep[..]+) is present, we add
30
+ # our local namespace, so Avro can resolve it
31
+ schema = avro_utils.namespace + schema.to_s \
32
+ if schema.to_s.start_with? '.'
33
+
34
+ avro.encode(data, schema_name: schema.to_s, **opts)
35
+ end
36
+ alias_method :encode, :avro_encode
37
+
38
+ # A shortcut to parse a blob of Apache Avro data.
39
+ #
40
+ # @param data [String] the Apache Avro blob
41
+ # @param opts [Hash{Symbol => Mixed}] additional options
42
+ # @return [Mixed] the decoded data structure
43
+ def avro_decode(data, **opts)
44
+ avro.decode(data, **opts).deep_symbolize_keys!
45
+ end
46
+ alias_method :decode, :avro_decode
47
+
48
+ # The Apache Avro Ruby gem requires simple typed hashes for encoding.
49
+ # This forces us to convert eg. Grape entity representations into
50
+ # simple string-keyed hashes. Use this method to prepare a hash for the
51
+ # Apache Avro serialization.
52
+ #
53
+ # Note about the implementation: JSON serialization and parsing is the
54
+ # simplest and fastest way to accomplish this.
55
+ #
56
+ # @param hash [Hash{Mixed => Mixed}] the hash to sanitize
57
+ # @return [Hash{String => Mixed}] the simple typed input hash
58
+ def avro_to_h(hash)
59
+ JSON.parse(hash.to_json)
60
+ end
61
+ alias_method :avro_sanitize, :avro_to_h
62
+
63
+ # Convert the given deep hash into a sparsified flat hash while
64
+ # transforming all values to strings. This allows to convert a
65
+ # schema-less hash to a Apache Avro compatible map.
66
+ #
67
+ # @see http://avro.apache.org/docs/current/spec.html#Maps
68
+ # @example Convert schema-less hash
69
+ # avro_schemaless_map(a: { b: { c: true } })
70
+ # # => { "a.b.c" => "true" }
71
+ #
72
+ # @param hash [Hash{Mixed => Mixed}] the deep hash
73
+ # @return [Hash{String => String}] the flattened and sparsified hash
74
+ def avro_schemaless_h(hash)
75
+ Sparsify(hash, sparse_array: true)
76
+ .transform_values(&:to_s)
77
+ .transform_keys { |key| key.delete('\\') }
78
+ end
79
+ alias_method :avro_schemaless_map, :avro_schemaless_h
80
+ end
81
+ end
82
+ end
83
+ end
@@ -0,0 +1,77 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rimless
4
+ module Extensions
5
+ # The top-level configuration handling.
6
+ #
7
+ # rubocop:disable Style/ClassVars -- because we split module code
8
+ module ConfigurationHandling
9
+ extend ActiveSupport::Concern
10
+
11
+ class_methods do
12
+ # Retrieve the current configuration object.
13
+ #
14
+ # @return [Configuration]
15
+ def configuration
16
+ @@configuration ||= Configuration.new
17
+ end
18
+
19
+ # Configure the concern by providing a block which takes
20
+ # care of this task. Example:
21
+ #
22
+ # FactoryBot::Instrumentation.configure do |conf|
23
+ # # conf.xyz = [..]
24
+ # end
25
+ def configure
26
+ yield(configuration)
27
+ configure_dependencies
28
+ end
29
+
30
+ # Reset the current configuration with the default one.
31
+ def reset_configuration!
32
+ @@configuration = Configuration.new
33
+ end
34
+
35
+ # Retrieve the current configured environment. You can use it like
36
+ # +Rails.env+ to query it. E.g. +Rimless.env.production?+.
37
+ #
38
+ # @return [ActiveSupport::StringInquirer] the environment
39
+ def env
40
+ @@env = ActiveSupport::StringInquirer.new(configuration.env.to_s) \
41
+ if @env.to_s != configuration.env.to_s
42
+ @@env
43
+ end
44
+
45
+ # A simple convention helper to setup Apache Kafka topic names.
46
+ #
47
+ # @param app [String] the application namespace
48
+ # @return [String] the Apache Kafka topic name prefix
49
+ def topic_prefix(app = Rimless.configuration.app_name)
50
+ "#{Rimless.env}.#{app}."
51
+ end
52
+
53
+ # Pass back the local application name. When we are loaded together
54
+ # with a Rails application we use the application class name. This
55
+ # application name is URI/GID compatible. When no local application is
56
+ # available, we just pass back +nil+.
57
+ #
58
+ # @return [String, nil] the Rails application name, or +nil+
59
+ def local_app_name
60
+ # Check for non-Rails integration
61
+ return unless defined? Rails
62
+ # Check if a application is defined
63
+ return if Rails.application.nil?
64
+
65
+ # Pass back the URI compatible application name
66
+ Rails.application.class.module_parent_name.underscore.dasherize
67
+ end
68
+
69
+ # Retrieve the current configured logger instance.
70
+ #
71
+ # @return [Logger] the logger instance
72
+ delegate :logger, to: :configuration
73
+ end
74
+ end
75
+ # rubocop:enable Style/ClassVars
76
+ end
77
+ end
@@ -0,0 +1,20 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Rimless
4
+ module Extensions
5
+ # The top-level Apache Kafka message consumer integration.
6
+ module Consumer
7
+ extend ActiveSupport::Concern
8
+
9
+ class_methods do
10
+ # A simple shortcut to fetch the Karafka-wrapping consumer application.
11
+ #
12
+ # @return [Rimless::Consumer::App] the internal consumer
13
+ # application class
14
+ def consumer
15
+ @consumer ||= Rimless::Consumer::App.new
16
+ end
17
+ end
18
+ end
19
+ end
20
+ end