rimless 2.9.0 → 3.1.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (64) hide show
  1. checksums.yaml +4 -4
  2. data/Appraisals +2 -2
  3. data/CHANGELOG.md +70 -0
  4. data/Gemfile +0 -1
  5. data/README.md +64 -62
  6. data/Rakefile +13 -4
  7. data/UPGRADING.md +491 -0
  8. data/doc/kafka-playground/Dockerfile +5 -5
  9. data/doc/kafka-playground/Gemfile +1 -1
  10. data/doc/kafka-playground/Gemfile.lock +178 -140
  11. data/doc/kafka-playground/README.md +1 -1
  12. data/doc/kafka-playground/bin/consume-topic +1 -1
  13. data/doc/kafka-playground/bin/create-topic +28 -17
  14. data/doc/kafka-playground/bin/delete-topic +8 -3
  15. data/doc/kafka-playground/bin/list-topics +1 -1
  16. data/doc/kafka-playground/bin/produce-event +31 -18
  17. data/doc/kafka-playground/config/environment.rb +6 -38
  18. data/doc/kafka-playground/config/initializers/resolv.rb +59 -0
  19. data/doc/kafka-playground/config/initializers/rimless.rb +39 -0
  20. data/doc/kafka-playground/examples/rimless-produce +19 -20
  21. data/doc/upgrade-guide-sources/README.md +221 -0
  22. data/doc/upgrade-guide-sources/dep-avro_turf-1.20.md +23 -0
  23. data/doc/upgrade-guide-sources/dep-karafka-2.0.md +117 -0
  24. data/doc/upgrade-guide-sources/dep-waterdrop-2.8.md +30 -0
  25. data/gemfiles/rails_8.0.gemfile +1 -1
  26. data/gemfiles/rails_8.1.gemfile +1 -1
  27. data/lib/rimless/compatibility/.gitkeep +0 -0
  28. data/lib/rimless/configuration.rb +80 -6
  29. data/lib/rimless/consumer/app.rb +182 -0
  30. data/lib/rimless/{karafka → consumer}/avro_deserializer.rb +8 -6
  31. data/lib/rimless/consumer/base.rb +118 -0
  32. data/lib/rimless/consumer/job.rb +35 -0
  33. data/lib/rimless/consumer/job_bridge.rb +113 -0
  34. data/lib/rimless/extensions/avro_helpers.rb +83 -0
  35. data/lib/rimless/extensions/configuration_handling.rb +77 -0
  36. data/lib/rimless/extensions/consumer.rb +20 -0
  37. data/lib/rimless/extensions/dependencies.rb +84 -0
  38. data/lib/rimless/extensions/kafka_helpers.rb +46 -0
  39. data/lib/rimless/extensions/producer.rb +103 -0
  40. data/lib/rimless/initializers/compatibility.rb +3 -4
  41. data/lib/rimless/railtie.rb +7 -7
  42. data/lib/rimless/rspec/helpers.rb +53 -13
  43. data/lib/rimless/rspec/matchers.rb +14 -11
  44. data/lib/rimless/rspec.rb +13 -29
  45. data/lib/rimless/tasks/consumer.rake +18 -6
  46. data/lib/rimless/tasks/templates/application_consumer.rb +1 -1
  47. data/lib/rimless/tasks/templates/custom_consumer.rb +1 -1
  48. data/lib/rimless/tasks/templates/custom_consumer_spec.rb +5 -4
  49. data/lib/rimless/tasks/templates/karafka.rb +5 -4
  50. data/lib/rimless/version.rb +3 -1
  51. data/lib/rimless.rb +12 -14
  52. data/rimless.gemspec +7 -9
  53. metadata +40 -67
  54. data/lib/rimless/avro_helpers.rb +0 -81
  55. data/lib/rimless/base_consumer.rb +0 -30
  56. data/lib/rimless/compatibility/karafka_1_4.rb +0 -52
  57. data/lib/rimless/configuration_handling.rb +0 -82
  58. data/lib/rimless/consumer.rb +0 -209
  59. data/lib/rimless/consumer_job.rb +0 -10
  60. data/lib/rimless/dependencies.rb +0 -69
  61. data/lib/rimless/kafka_helpers.rb +0 -104
  62. data/lib/rimless/karafka/base64_interchanger.rb +0 -32
  63. data/lib/rimless/karafka/passthrough_mapper.rb +0 -29
  64. data/lib/rimless/tasks/stats.rake +0 -22
@@ -0,0 +1,59 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'resolv'
4
+
5
+ # Check if we're running in an environment without IPv6 support, requesting
6
+ # IPv6 nameservers for mDNS will result in the following error:
7
+ #
8
+ # Errno::EADDRNOTAVAIL: Cannot assign requested address
9
+ # sendto(2) for "ff02::fb" port 5353
10
+ #
11
+ # This is confusing, as we expect a mDNS lookup failure to look like this:
12
+ #
13
+ # no address for schema-registry.message-bus.local (Resolv::ResolvError)
14
+ #
15
+ # Therefore, we drop the IPv6 mDNS nameserver address.
16
+ if File.empty?('/proc/net/if_inet6')
17
+ Resolv::MDNS::Addresses.delete_if do |(ip, _port)|
18
+ ip == Resolv::MDNS::AddressV6
19
+ end
20
+ end
21
+
22
+ # A custom resolver factory for our local environments.
23
+ # See: https://github.com/excon/excon/pull/897
24
+ class LocalResolverFactory
25
+ # Create a new +Resolv+ resolver instance, configured for our local
26
+ # environment.
27
+ #
28
+ # @return [Resolv] the new resolver instance
29
+ def self.create_resolver
30
+ Resolv.new(create_resolvers)
31
+ end
32
+
33
+ # Create new resolvers for our local environment (hosts, mDNS, DNS).
34
+ #
35
+ # @return [Array<Resolv::Hosts, Resolv::MDNS, Resolv::DNS>] the new
36
+ # resolvers to combine
37
+ def self.create_resolvers
38
+ # The misleading Errno::EADDRNOTAVAIL, catches an awful long timeout for
39
+ # the mDNS resolver (75 seconds). But when we're going to remove the IPv6
40
+ # mDNS nameserver address, we have to configure more meaningful timeouts
41
+ # for mDNS.
42
+ mdns_resolver = Resolv::MDNS.new
43
+ mdns_resolver.timeouts = 3
44
+
45
+ [
46
+ Resolv::Hosts.new,
47
+ mdns_resolver,
48
+ Resolv::DNS.new
49
+ ]
50
+ end
51
+ end
52
+
53
+ # Replace the default resolvers
54
+ Resolv::DefaultResolver.replace_resolvers(
55
+ LocalResolverFactory.create_resolvers
56
+ )
57
+
58
+ # Configure Excon to use our custom resolver factory
59
+ Excon.defaults[:resolver_factory] = LocalResolverFactory
@@ -0,0 +1,39 @@
1
+ # frozen_string_literal: true
2
+
3
+ Rimless.configure do |conf|
4
+ conf.env = 'production'
5
+ conf.app_name = 'playground_app'
6
+ conf.client_id = 'playground'
7
+ conf.logger = AppLogger
8
+ conf.kafka_brokers = 'kafka.playground.local:9092'
9
+ conf.schema_registry_url = 'http://schema-registry.playground.local'
10
+ end
11
+
12
+ $kafka_config = Rimless.producer.config.kafka
13
+ $rdkafka_config = Rdkafka::Config.new($kafka_config)
14
+
15
+ KafkaAdminClient = $rdkafka_config.admin
16
+
17
+ def describe_topic(name)
18
+ return unless topic?(name)
19
+
20
+ topics[name].merge(configs: topic_configs(name))
21
+ end
22
+
23
+ def topic_configs(name)
24
+ KafkaAdminClient.describe_configs(
25
+ [{ resource_type: 2, resource_name: name.to_s }]
26
+ ).wait.resources.first.configs.map do |conf|
27
+ [conf.name, conf.value]
28
+ end.sort_by(&:first).to_h
29
+ end
30
+
31
+ def topic?(name)
32
+ topics.key? name.to_s
33
+ end
34
+
35
+ def topics
36
+ $rdkafka_config.admin.metadata.topics.index_by do |cur|
37
+ cur[:topic_name].to_s
38
+ end
39
+ end
@@ -1,42 +1,41 @@
1
1
  #!/usr/bin/env ruby
2
+ # frozen_string_literal: true
2
3
 
3
4
  require_relative '../config/environment'
4
5
 
5
6
  # Setup classes which are Apache Avro schema compatible, can be anything which
6
7
  # be converted to a hash via +#to_h+ (eg. OpenStruct, RecursiveOpenStruct,
7
8
  # Class, Struct, etc)
8
- PaymentEvent = Struct.new(:event, :payment,
9
- keyword_init: true)
9
+ PaymentEvent = Struct.new(:event, :payment)
10
10
 
11
11
  Payment = Struct.new(:gid, :currency, :net_amount_sum, :items, :state,
12
- :created_at, :updated_at,
13
- keyword_init: true)
12
+ :created_at, :updated_at)
14
13
 
15
- PaymentItem = Struct.new(:gid, :net_amount, :tax_rate, :created_at, :updated_at,
16
- keyword_init: true)
14
+ PaymentItem = Struct.new(:gid, :net_amount, :tax_rate, :created_at,
15
+ :updated_at)
17
16
 
18
17
  # Setup the real data instances which we serialize with Apache Avro and push to
19
18
  # Apache Kafka
20
- item_1_id = SecureRandom.uuid
21
- item_1 = PaymentItem.new(gid: "gid://playground-app/PaymentItem/#{item_1_id}",
22
- net_amount: 499,
23
- tax_rate: 19,
24
- created_at: Time.current,
25
- updated_at: nil)
26
-
27
- item_2_id = SecureRandom.uuid
28
- item_2 = PaymentItem.new(gid: "gid://playground-app/PaymentItem/#{item_2_id}",
29
- net_amount: 1,
30
- tax_rate: 19,
31
- created_at: Time.current,
32
- updated_at: nil)
19
+ item1_id = SecureRandom.uuid
20
+ item1 = PaymentItem.new(gid: "gid://playground-app/PaymentItem/#{item1_id}",
21
+ net_amount: 499,
22
+ tax_rate: 19,
23
+ created_at: Time.current,
24
+ updated_at: nil)
25
+
26
+ item2_id = SecureRandom.uuid
27
+ item2 = PaymentItem.new(gid: "gid://playground-app/PaymentItem/#{item2_id}",
28
+ net_amount: 1,
29
+ tax_rate: 19,
30
+ created_at: Time.current,
31
+ updated_at: nil)
33
32
 
34
33
  payment_id = SecureRandom.uuid
35
34
  payment = Payment.new(gid: "gid://playground-app/Payment/#{payment_id}",
36
35
  currency: :eur,
37
36
  net_amount_sum: 500,
38
37
  state: :authorized,
39
- items: [item_1, item_2],
38
+ items: [item1, item2],
40
39
  created_at: Time.current,
41
40
  updated_at: Time.current)
42
41
 
@@ -0,0 +1,221 @@
1
+ # Top Level Overview
2
+
3
+ - [Dependency Changes](#dependency-changes)
4
+ - [Structural Changes](#structural-changes)
5
+ - [Removed](#removed)
6
+ - [Moved, API unchanged, constants changed](#moved-api-unchanged-constants-changed)
7
+ - [Moved, API mostly unchanged, constants changed](#moved-api-mostly-unchanged-constants-changed)
8
+ - [Moved, API unchanged](#moved-api-unchanged)
9
+ - [Logic changed](#logic-changed)
10
+ - [Configuration Changes](#configuration-changes)
11
+ - [Changed](#changed)
12
+ - [New](#new)
13
+ - [Producer (WaterDrop)](#producer-waterdrop)
14
+ - [Consumer Setup (Karafka)](#consumer-setup-karafka)
15
+ - [Karafka 2 (New)](#karafka-2-new)
16
+ - [Phase 1: Boot](#phase-1-boot)
17
+ - [Phase 2: Message Consumption (batch fetching, single consumption)](#phase-2-message-consumption-batch-fetching-single-consumption)
18
+ - [Karafka 1 (Old)](#karafka-1-old)
19
+ - [Phase 1: Boot](#phase-1-boot-1)
20
+ - [Phase 2: Message Consumption (batch fetching, single consumption)](#phase-2-message-consumption-batch-fetching-single-consumption-1)
21
+ - [Testing](#testing)
22
+
23
+ ## Dependency Changes
24
+
25
+ * Migrated the `avro_turf` gem from `~> 0.11.0` to `~> 1.20`
26
+ * Migrated the `waterdrop` gem from `~> 1.4` to `~> 2.8`
27
+ * Migrated the `karafka` gem from `~> 1.4` to `~> 2.5`
28
+ * Switched from Sidekiq to ActiveJob (and added the dependency)
29
+
30
+ ## Structural Changes
31
+
32
+ ### Removed
33
+
34
+ * lib/rimless/karafka/base64_interchanger.rb
35
+ `Rimless::Karafka::Base64Interchanger` (no interchanging needed, as we do not
36
+ move the avro/binary encoded kafka message payload from Karafka to Sidekiq
37
+ anymore — instead we decode the message payload within the Karafka process,
38
+ and move the decoded data to ActiveJob and use the regular ActiveJob
39
+ arguments serialization)
40
+ * lib/rimless/karafka/passthrough_mapper.rb
41
+ `Rimless::Karafka::PassthroughMapper` (Karafka removed consumer/topics
42
+ mapper concepts completely — which was effectively a no-op, as Rimless
43
+ implemented a passthrough mapper to just keep inputs equal to outputs for
44
+ these names)
45
+ * lib/rimless/compatibility/karafka_1_4.rb (dropped Karafka 1.x support)
46
+
47
+ ### Moved, API unchanged, constants changed
48
+
49
+ * lib/rimless/karafka/avro_deserializer.rb `Rimless::Karafka::AvroDeserializer`
50
+ -> lib/rimless/consumer/avro_deserializer.rb
51
+ `Rimless::Consumer::AvroDeserializer`
52
+
53
+ ### Moved, API mostly unchanged, constants changed
54
+
55
+ * lib/rimless/consumer.rb `Rimless::ConsumerApp` -> lib/rimless/consumer/app.rb
56
+ `Rimless::Consumer::App` (functionality is kept, but some methods were
57
+ removed)
58
+ * lib/rimless/consumer_job.rb `Rimless::ConsumerJob` ->
59
+ lib/rimless/consumer/job.rb `Rimless::Consumer::Job` (Sidekiq -> ActiveJob)
60
+ * lib/rimless/base_consumer.rb `Rimless::BaseConsumer` ->
61
+ lib/rimless/consumer/base.rb `Rimless::Consumer::Base` (all
62
+ functionality/methods kept, API extended, `#consume` now returns the
63
+ `#messages` array instead of the result of the dispatched event method)
64
+
65
+ ### Moved, API unchanged
66
+
67
+ * lib/rimless/avro_helpers.rb -> lib/rimless/extensions/avro_helpers.rb
68
+ * lib/rimless/configuration_handling.rb ->
69
+ lib/rimless/extensions/configuration_handling.rb
70
+ * lib/rimless/kafka_helpers.rb -> lib/rimless/extensions/kafka_helpers.rb
71
+ * lib/rimless/dependencies.rb -> lib/rimless/extensions/dependencies.rb
72
+
73
+ ### Logic changed
74
+
75
+ * lib/rimless/railtie.rb (Karafka is no longer initialized within a Sidekiq
76
+ server context; this was needed in the past for the encoded/binary Kafka
77
+ message payload interchanging, as the data was actually parsed within the
78
+ Sidekiq process)
79
+
80
+ ## Configuration Changes
81
+
82
+ ### Changed
83
+
84
+ * `KAFKA_BROKERS` (env var) / `config.kafka_brokers` (format change — no
85
+ protocol anymore, just host:port CSV, old format:
86
+ `kafka://your.domain:9092,kafka..`, new format: `your.domain:9092,host..`) —
87
+ the old format is still supported for compatibility
88
+
89
+ ### New
90
+
91
+ * `config.consumer_logger_listener` (allows configuring the Karafka logging,
92
+ or providing a custom solution)
93
+ * `config.job_bridge_class` (allows configuring a custom job bridge class
94
+ that takes care of receiving Kafka messages and producing/enqueuing ActiveJob
95
+ jobs)
96
+ * `config.consumer_job_class` (allows configuring a custom job class that
97
+ processes the enqueued Kafka messages produced by the job bridge)
98
+ * `config.avro_deserializer_class` (allows configuring a custom Apache Avro
99
+ deserializer class that may implement additional parsing/processing, for
100
+ example)
101
+ * `config.avro_configure` (allows users to fully customize the
102
+ `AvroTurf::Messaging` instance)
103
+ * `config.producer_configure` (allows users to fully customize the
104
+ `WaterDrop::Producer` instance)
105
+ * `config.consumer_configure` (allows users to fully customize the
106
+ `Karafka::App` instance)
107
+
108
+ ## Producer (WaterDrop)
109
+
110
+ * No breaking changes, as we wrap it with our Kafka helpers (e.g.
111
+ `Rimless.message` and the like — their API stayed stable)
112
+
113
+ ## Consumer Setup (Karafka)
114
+
115
+ <table>
116
+ <tr>
117
+ <td valign="top">
118
+
119
+ ### Karafka 2 (New)
120
+
121
+ #### Phase 1: Boot
122
+
123
+ * `bundle exec karafka server`
124
+ * Karafka: load some Karafka (server) defaults
125
+ * Karafka: require `rails` — if available
126
+ * Karafka: require `/app/karafka.rb`
127
+ * `/app/karafka.rb`: require `rimless`
128
+ * `/app/karafka.rb`: `Rimless.consumer.topics(..)`
129
+ * Rimless: `Rimless.consumer -> Rimless::Consumer::App.new` — this configures
130
+ Karafka (including logging, code reload)
131
+ * Karafka: Karafka server takes over
132
+ => (set up consumer groups, start listening for messages)
133
+
134
+ <br><br><br><br><br><br>
135
+
136
+ #### Phase 2: Message Consumption (batch fetching, single consumption)
137
+
138
+ * Karafka: receives message(s) on topic (synced by consumer group)
139
+ => just one Karafka server process handles a single message, per partition
140
+ (no double processing)
141
+ * Karafka: routes the message(s) of the topic to the Rimless "job bridge"
142
+ consumer (`Rimless::Consumer::JobBridge`), then all messages of the batch are
143
+ processed (lazily deserialized) and enqueued as an ActiveJob
144
+ (`Rimless::Consumer::Job`) — while the decoded message payload is passed as
145
+ job parameters and serialized/deserialized by ActiveJob (the job execution
146
+ may then be concurrent via Sidekiq or another ActiveJob adapter)
147
+ => the Kafka message now leaves the Karafka server process
148
+
149
+ * ActiveJob: `Rimless::Consumer::Job` is picked up and executed
150
+ * Rimless: a `Rimless::Consumer::Base` child class is searched by the
151
+ `consumer` parameter (class inside `/app/consumers`) and instantiated for
152
+ the job context (hydrating consumer metadata, the message batch containing
153
+ the single message, etc)
154
+ * Rimless: `Rimless::Consumer::Base` unpacks the message `event` (e.g.
155
+ `user_updated`) and dispatches it as a method on the child consumer with
156
+ the remaining event parameters as arguments
157
+ * App: `/app/consumers` class kicks in and runs business application logic
158
+ => e.g. `IdentityApiConsumer.user_updated(user:, **_)`
159
+
160
+ </td>
161
+ <td valign="top">
162
+
163
+ ### Karafka 1 (Old)
164
+
165
+ #### Phase 1: Boot
166
+
167
+ * `bundle exec karafka server`
168
+ * Karafka: load some Karafka (server) defaults
169
+ * Karafka: require `/app/karafka.rb`
170
+ * `/app/karafka.rb`: require `rimless`
171
+ * Rimless: require `railtie` — set up Sidekiq server part
172
+ * `/app/karafka.rb`: `Rimless.consumer.topics(..).boot!`
173
+ * Rimless: `Rimless.consumer -> ConsumerApp.initialize!`
174
+ * `initialize_rails!`
175
+ * `initialize_monitors!`
176
+ * `initialize_karafka!`
177
+ * `initialize_logger!`
178
+ * `initialize_code_reload!`
179
+ * Karafka: Karafka server takes over
180
+ => (set up consumer groups, start listening for messages)
181
+
182
+ #### Phase 2: Message Consumption (batch fetching, single consumption)
183
+
184
+ * Karafka: receives message on topic (synced by consumer group)
185
+ => just one Karafka server process handles a single message, per partition
186
+ (no double processing)
187
+ * Karafka: run `Rimless::Karafka::PassthroughMapper` for routing (no-op)
188
+ * Karafka: deserialize message payload with `Rimless::Karafka::AvroDeserializer`
189
+ * Karafka: decoded message is passed into `Karafka::Backends::Sidekiq`
190
+ => (karafka-sidekiq-backend gem)
191
+ * karafka-sidekiq-backend: wrap the message payload with
192
+ `Rimless::Karafka::Base64Interchanger`
193
+ => (Ruby object marshalling + base64 encoding for Valkey/Redis transport,
194
+ to overcome binary encoding issues)
195
+ => quite high size overhead on Valkey/Redis
196
+ * karafka-sidekiq-backend: enqueue `Rimless::ConsumerJob` with the wrapped
197
+ message payload
198
+ => the Kafka message now leaves the Karafka server process
199
+
200
+ * Sidekiq: `Rimless::ConsumerJob` is picked up and executed
201
+ * karafka-sidekiq-backend: a `Rimless::BaseConsumer` class is searched for
202
+ the message (child class inside `/app/consumers`)
203
+ * Rimless: `Rimless::BaseConsumer` unpacks the message `event` (e.g.
204
+ `user_updated`) and dispatches it as a method on the child consumer with
205
+ the remaining event parameters as arguments
206
+ * App: `/app/consumers` class kicks in and runs business application logic
207
+ => e.g. `IdentityApiConsumer.user_updated(user:, **_)`
208
+
209
+ </td>
210
+ </tr>
211
+ </table>
212
+
213
+ ## Testing
214
+
215
+ See: https://github.com/karafka/karafka-testing/blob/master/2.0-Upgrade.md
216
+
217
+ * Replace `#karafka_consumer_for` in your specs with `#kafka_consumer_for`
218
+ (provided and augmented by Rimless to skip job enqueuing and instead
219
+ perform the wrapped consumer job directly)
220
+ * Replace `#publish_for_karafka` in your specs with `karafka.produce` (in
221
+ case you did not use the Rimless message producing helpers)
@@ -0,0 +1,23 @@
1
+ # AvroTurf RubyGem (relevant changes)
2
+
3
+ * See: https://github.com/dasch/avro_turf/blob/master/CHANGELOG.md
4
+ * Migrated the `avro_turf` gem from `~> 0.11.0` to `~> 1.20`
5
+
6
+ ---
7
+
8
+ - [Important](#important)
9
+ - [Minor](#minor)
10
+
11
+ ## Important
12
+
13
+ * The `excon` dependency was upgraded to `>= 0.104, < 2`
14
+ * Removed `sinatra` as a development dependency (our Rimless gem dropped the
15
+ `sinatra` gem dependency, too)
16
+ * Stopped caching nested sub-schemas
17
+
18
+ ## Minor
19
+
20
+ * Added compatibility with Avro v1.12.x
21
+ * Added `resolv_resolver` parameter to `AvroTurf::Messaging` to make use of
22
+ custom domain name resolvers and their options, for example `nameserver` and
23
+ `timeouts`
@@ -0,0 +1,117 @@
1
+ # Karafka RubyGem (relevant changes)
2
+
3
+ * See: https://github.com/karafka/karafka/blob/master/CHANGELOG.md
4
+ * See: https://github.com/karafka/karafka/wiki/Upgrades-Karafka-2.0
5
+ * Migrated the `karafka` gem from `~> 1.4` to `~> 2.5`
6
+
7
+ ---
8
+
9
+ - [Important (Structural)](#important-structural)
10
+ - [Important (Logical)](#important-logical)
11
+ - [Important (Configurations)](#important-configurations)
12
+ - [Important (End-user code changes)](#important-end-user-code-changes)
13
+ - [Minor](#minor)
14
+
15
+ ## Important (Structural)
16
+
17
+ * Removed the topic mappers concept completely
18
+ * Removed support for using sidekiq-backend due to the introduction of
19
+ multi-threading
20
+ * Removed the now incompatible `karafka-sidekiq-backend` gem
21
+ * If you use sidekiq-backend, you have two options:
22
+ * Leverage Karafka's multi-threading capabilities
23
+ * Pipe the jobs to Sidekiq yourself (this is what we do with the Rimless
24
+ gem now)
25
+ * Removed the Responders concept in favor of WaterDrop producer usage
26
+ * Removed all callbacks completely in favor of the finalizer method `#shutdown`
27
+ * Removed single message consumption mode in favor of documentation on how to
28
+ do it easily yourself (see:
29
+ https://github.com/karafka/karafka/wiki/Consuming-messages#consuming-messages-one-at-a-time)
30
+ * In the past, Rimless configured `config.batch_fetching = true` and
31
+ `config.batch_consuming = false`, resulting in single message processing
32
+ within the Karafka process, but each Kafka message was enqueued as a
33
+ Sidekiq worker/job — so message consumption was always concurrent. Batch
34
+ fetching is now always done by Karafka; adjust `config.max_wait_time` or
35
+ `config.max_messages` to optimize for latency or throughput (also check
36
+ `config.kafka[:'enable.partition.eof'] = true`, see:
37
+ https://karafka.io/docs/Latency-and-Throughput/).
38
+ * Renamed `Karafka::Params::BatchMetadata` to
39
+ `Karafka::Messages::BatchMetadata`
40
+ * Renamed `Karafka::Params::Params` to `Karafka::Messages::Message`
41
+ * Renamed `#params_batch` in consumers to `#messages` (Rimless adds a
42
+ compatibility delegation for the old `#params_batch`)
43
+ * Renamed `Karafka::Params::Metadata` to `Karafka::Messages::Metadata`
44
+ * Renamed `Karafka::Fetcher` to `Karafka::Runner` and aligned notification key
45
+ names
46
+ * Renamed `Karafka::Instrumentation::StdoutListener` to
47
+ `Karafka::Instrumentation::LoggerListener`
48
+ * Renamed `Karafka::Serializers::JSON::Deserializer` to
49
+ `Karafka::Deserializers::Payload`
50
+
51
+ ## Important (Logical)
52
+
53
+ * Changed how the routing style (0.5) behaves. It now builds a single consumer
54
+ group instead of one per topic (consumer groups: 2.0 uses 1 for all topics,
55
+ 1.4 used 1 per topic)
56
+ * Karafka 2.0 introduces seamless Ruby on Rails integration via `Rails::Railtie`
57
+ without needing extra configuration (this is reflected in the Rimless gem,
58
+ as we no longer initialize the Rails application)
59
+
60
+ ## Important (Configurations)
61
+
62
+ * Karafka 2.0 is powered by librdkafka, Rimless allows configuration via
63
+ `Rimless.configuration.consumer_configure` and the configuration is split
64
+ into Karafka settings (root level, see:
65
+ https://github.com/karafka/karafka/blob/v2.5.5/lib/karafka/setup/config.rb)
66
+ and Kafka settings (`config.kafka`, see:
67
+ https://github.com/confluentinc/librdkafka/blob/master/CONFIGURATION.md)
68
+ * Below you can find some of the most significant naming changes in the
69
+ configuration options:
70
+ * Root options:
71
+ * `start_from_beginning` is now `initial_offset` and accepts either
72
+ 'earliest' or 'latest'
73
+ * `ssl_ca_certs_from_system` is no longer needed, but `kafka`
74
+ `security.protocol` needs to be set to `ssl`
75
+ * `batch_fetching` is no longer needed
76
+ * `batch_consuming` is no longer needed
77
+ * `serializer` is no longer needed because Responders have been removed
78
+ from Karafka
79
+ * `topic_mapper` is no longer needed, as the concept of mapping topic names
80
+ has been removed from Karafka
81
+ * `backend` is no longer needed because Karafka is now multi-threaded
82
+ * `manual_offset_management` now needs to be set on a per-topic basis
83
+ * Kafka options:
84
+ * `kafka.seed_brokers` is now `bootstrap.servers` without the protocol
85
+ definition
86
+ * `kafka.heartbeat_interval` is no longer needed.
87
+ * SASL and SSL options changes are described in their own section.
88
+
89
+ ## Important (End-user code changes)
90
+
91
+ * Remove WaterDrop setup code from your `karafka.rb`:
92
+ ```ruby
93
+ # This can be safely removed
94
+ monitor.subscribe('app.initialized') do
95
+ WaterDrop.setup { |config| config.deliver = !Karafka.env.test? }
96
+ end
97
+ ```
98
+ * Remove direct WaterDrop listener references from your `karafka.rb`:
99
+ ```ruby
100
+ # This can be safely removed
101
+ Karafka.monitor.subscribe(WaterDrop::Instrumentation::LoggerListener.new)
102
+ ```
103
+ * Remove the `KarafkaApp.boot!` from the end of `karafka.rb`:
104
+ ```ruby
105
+ # Remove this
106
+ KarafkaApp.boot!
107
+ # or in case of Rimless:
108
+ Rimless.consumer.topics(...).boot! # just the `.boot!` call
109
+ ```
110
+
111
+ ## Minor
112
+
113
+ * No `dry-*` gems are used as dependencies anymore
114
+ * Added `KARAFKA_REQUIRE_RAILS` to disable the default Rails require, to run
115
+ Karafka without Rails despite having Rails in the Gemfile
116
+ * Allow running boot-file-less Rails setup Karafka CLI commands where
117
+ configuration is done in initializers
@@ -0,0 +1,30 @@
1
+ # WaterDrop RubyGem (relevant changes)
2
+
3
+ * See: https://github.com/karafka/waterdrop/blob/master/CHANGELOG.md
4
+ * Migrated the `waterdrop` gem from `~> 1.4` to `~> 2.8`
5
+
6
+ ---
7
+
8
+ - [Important](#important)
9
+ - [Minor](#minor)
10
+
11
+ ## Important
12
+
13
+ * Replaced `ruby-kafka` with `rdkafka` (karafka-rdkafka, native gem/lib)
14
+ * The new underlying Kafka library has different/renamed options (see:
15
+ https://github.com/confluentinc/librdkafka/blob/master/CONFIGURATION.md)
16
+ * No `dry-*` gems are used as dependencies anymore
17
+ * Complete redesign of the API (this is wrapped by Rimless helpers, which
18
+ remained stable)
19
+ * All time-related values are now configured in milliseconds instead of some
20
+ being in seconds and some in milliseconds
21
+
22
+ ## Minor
23
+
24
+ * Added support for sending tombstone messages
25
+ * Changed auto-generated ID from `SecureRandom#uuid` to `SecureRandom#hex(6)`
26
+ * Introduced transactions support
27
+ * Added support for producing messages with arrays of strings in headers
28
+ (KIP-82)
29
+ * Added `WaterDrop::ConnectionPool` for efficient connection pooling using the
30
+ proven `connection_pool` gem
@@ -7,7 +7,6 @@ gem "bundler", ">= 2.6", "< 5"
7
7
  gem "countless", "~> 2.2"
8
8
  gem "factory_bot", "~> 6.2"
9
9
  gem "guard-rspec", "~> 4.7"
10
- gem "railties", "~> 8.0.0"
11
10
  gem "rake", "~> 13.0"
12
11
  gem "redcarpet", "~> 3.5"
13
12
  gem "rspec", "~> 3.12"
@@ -19,6 +18,7 @@ gem "timecop", ">= 0.9.6"
19
18
  gem "vcr", "~> 6.0"
20
19
  gem "yard", ">= 0.9.28"
21
20
  gem "yard-activesupport-concern", ">= 0.0.1"
21
+ gem "activejob", "~> 8.0.0"
22
22
  gem "activesupport", "~> 8.0.0"
23
23
 
24
24
  gemspec path: "../"
@@ -7,7 +7,6 @@ gem "bundler", ">= 2.6", "< 5"
7
7
  gem "countless", "~> 2.2"
8
8
  gem "factory_bot", "~> 6.2"
9
9
  gem "guard-rspec", "~> 4.7"
10
- gem "railties", "~> 8.1.0"
11
10
  gem "rake", "~> 13.0"
12
11
  gem "redcarpet", "~> 3.5"
13
12
  gem "rspec", "~> 3.12"
@@ -19,6 +18,7 @@ gem "timecop", ">= 0.9.6"
19
18
  gem "vcr", "~> 6.0"
20
19
  gem "yard", ">= 0.9.28"
21
20
  gem "yard-activesupport-concern", ">= 0.0.1"
21
+ gem "activejob", "~> 8.1.0"
22
22
  gem "activesupport", "~> 8.1.0"
23
23
 
24
24
  gemspec path: "../"
File without changes