deimos-ruby 2.2.3.pre.beta2 → 2.3.0.pre.beta1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop.yml +1 -1
  3. data/CHANGELOG.md +1 -1
  4. data/CLAUDE.md +270 -0
  5. data/README.md +0 -2
  6. data/deimos-ruby.gemspec +2 -2
  7. data/lib/deimos/backends/kafka.rb +1 -1
  8. data/lib/deimos/backends/kafka_async.rb +1 -2
  9. data/lib/deimos/config/configuration.rb +1 -1
  10. data/lib/deimos/ext/producer_middleware.rb +2 -2
  11. data/lib/deimos/kafka_source.rb +1 -1
  12. data/lib/deimos/metrics/datadog.rb +1 -3
  13. data/lib/deimos/schema_backends/avro_base.rb +4 -6
  14. data/lib/deimos/schema_backends/avro_local.rb +13 -12
  15. data/lib/deimos/schema_backends/avro_schema_registry.rb +14 -15
  16. data/lib/deimos/schema_backends/avro_validation.rb +1 -1
  17. data/lib/deimos/schema_backends/base.rb +5 -4
  18. data/lib/deimos/schema_backends/mock.rb +1 -1
  19. data/lib/deimos/schema_backends/plain.rb +1 -1
  20. data/lib/deimos/schema_backends/proto_base.rb +34 -10
  21. data/lib/deimos/schema_backends/proto_local.rb +5 -5
  22. data/lib/deimos/schema_backends/proto_schema_registry.rb +27 -6
  23. data/lib/deimos/test_helpers.rb +0 -5
  24. data/lib/deimos/transcoder.rb +1 -1
  25. data/lib/deimos/utils/outbox_producer.rb +2 -2
  26. data/lib/deimos/version.rb +1 -1
  27. data/lib/deimos.rb +15 -35
  28. data/lib/generators/deimos/active_record_generator.rb +1 -1
  29. data/lib/generators/deimos/schema_class_generator.rb +3 -3
  30. data/lib/generators/deimos/v2_generator.rb +2 -2
  31. data/spec/deimos_spec.rb +0 -32
  32. data/spec/gen/sample/v1/sample_key_pb.rb +17 -0
  33. data/spec/generators/schema_class_generator_spec.rb +1 -1
  34. data/spec/protos/sample/v1/sample_key.proto +7 -0
  35. data/spec/schema_backends/avro_base_shared.rb +1 -1
  36. data/spec/schema_backends/avro_local_spec.rb +1 -8
  37. data/spec/schema_backends/avro_schema_registry_spec.rb +7 -7
  38. data/spec/schema_backends/base_spec.rb +2 -2
  39. data/spec/schema_backends/proto_schema_registry_spec.rb +222 -19
  40. data/spec/spec_helper.rb +1 -1
  41. metadata +32 -35
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require_relative 'proto_base'
4
- require 'proto_turf'
4
+ require 'schema_registry_client'
5
5
 
6
6
  module Deimos
7
7
  module SchemaBackends
@@ -14,14 +14,14 @@ module Deimos
14
14
  end
15
15
 
16
16
  # @override
17
- def encode_payload(payload, schema: nil, topic: nil)
17
+ def encode_payload(payload, schema: nil, subject: nil)
18
18
  msg = payload.is_a?(Hash) ? proto_schema.msgclass.new(**payload) : payload
19
19
  proto_schema.msgclass.encode(msg)
20
20
  end
21
21
 
22
- # @return [ProtoTurf]
23
- def self.proto_turf
24
- @proto_turf ||= ProtoTurf.new(
22
+ # @return [SchemaRegistry::Client]
23
+ def self.schema_registry
24
+ @schema_registry ||= SchemaRegistry::Client.new(
25
25
  registry_url: Deimos.config.schema.registry_url,
26
26
  logger: Karafka.logger
27
27
  )
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require_relative 'proto_base'
4
- require 'proto_turf'
4
+ require 'schema_registry_client'
5
5
 
6
6
  module Deimos
7
7
  module SchemaBackends
@@ -10,22 +10,43 @@ module Deimos
10
10
 
11
11
  # @override
12
12
  def decode_payload(payload, schema:)
13
- self.class.proto_turf.decode(payload)
13
+ self.class.schema_registry.decode(payload)
14
14
  end
15
15
 
16
16
  # @override
17
- def encode_payload(payload, schema: nil, topic: nil)
17
+ def encode_payload(payload, schema: nil, subject: nil)
18
18
  msg = payload.is_a?(Hash) ? proto_schema.msgclass.new(**payload) : payload
19
- self.class.proto_turf.encode(msg, subject: topic)
19
+ encoder = subject&.ends_with?('-key') ? self.class.key_schema_registry : self.class.schema_registry
20
+ encoder.encode(msg, subject: subject)
21
+ end
22
+
23
+ # @override
24
+ def encode_proto_key(key, topic: nil, field: nil)
25
+ schema_text = SchemaRegistry::Output::JsonSchema.output(proto_schema.to_proto, path: field)
26
+ self.class.key_schema_registry.encode(key, subject: "#{topic}-key", schema_text: schema_text)
27
+ end
28
+
29
+ # @override
30
+ def decode_proto_key(payload)
31
+ self.class.key_schema_registry.decode(payload)
20
32
  end
21
33
 
22
34
  # @return [ProtoTurf]
23
- def self.proto_turf
24
- @proto_turf ||= ProtoTurf.new(
35
+ def self.schema_registry
36
+ @schema_registry ||= ProtoTurf.new(
25
37
  registry_url: Deimos.config.schema.registry_url,
26
38
  logger: Karafka.logger
27
39
  )
28
40
  end
41
+
42
+ def self.key_schema_registry
43
+ @key_schema_registry ||= SchemaRegistry::Client.new(
44
+ registry_url: Deimos.config.schema.registry_url,
45
+ logger: Karafka.logger,
46
+ schema_type: SchemaRegistry::Schema::ProtoJsonSchema
47
+ )
48
+ end
49
+
29
50
  end
30
51
  end
31
52
  end
@@ -16,11 +16,6 @@ module Deimos
16
16
  def self.included(base)
17
17
  super
18
18
  base.include Karafka::Testing::RSpec::Helpers
19
- RSpec.configure do |config|
20
- config.before(:each) do
21
- allow(Deimos).to receive(:producer_for).and_return(Karafka.producer)
22
- end
23
- end
24
19
  end
25
20
 
26
21
  # @return [Array<Hash>]
@@ -36,7 +36,7 @@ module Deimos
36
36
  if self.key_field
37
37
  self.backend.encode_key(self.key_field, key, topic: @topic)
38
38
  else
39
- self.backend.encode(key, topic: @topic)
39
+ self.backend.encode(key, topic: @topic, is_key: true)
40
40
  end
41
41
  end
42
42
 
@@ -205,9 +205,9 @@ module Deimos
205
205
  batch_size = batch.size
206
206
  current_index = 0
207
207
 
208
- batch[current_index..-1].in_groups_of(batch_size, false).each do |group|
208
+ batch[current_index..].in_groups_of(batch_size, false).each do |group|
209
209
  @logger.debug("Publishing #{group.size} messages to #{@current_topic}")
210
- Deimos.producer_for(@current_topic).produce_many_sync(group)
210
+ Karafka.producer.produce_many_sync(group)
211
211
  current_index += group.size
212
212
  @logger.info("Sent #{group.size} messages to #{@current_topic}")
213
213
  end
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Deimos
4
- VERSION = '2.2.3-beta2'
4
+ VERSION = '2.3.0-beta1'
5
5
  end
data/lib/deimos.rb CHANGED
@@ -142,34 +142,23 @@ module Deimos
142
142
  signal_handler.run!
143
143
  end
144
144
 
145
- def setup_producers
146
- @producers = {}
147
- producers_by_broker = {}
148
- Deimos.karafka_configs.each do |topic|
149
- broker = topic.kafka[:'bootstrap.servers']
150
- producers_by_broker[broker] ||= ::WaterDrop::Producer.new do |p_config|
151
- config_hash = Karafka::Setup::Config.config.kafka.merge(topic.kafka)
152
- p_config.kafka = Karafka::Setup::AttributesMap.producer(config_hash)
153
- end
154
- @producers[topic.name] = producers_by_broker[broker]
155
- end
156
- end
157
-
158
145
  def setup_karafka
159
- setup_producers
160
- waterdrop_producers.each do |producer|
161
- producer.middleware.append(Deimos::ProducerMiddleware)
162
- producer.monitor.subscribe(ProducerMetricsListener.new)
163
- producer.monitor.subscribe('error.occurred') do |event|
164
- if event.payload.key?(:messages)
165
- topic = event[:messages].first[:topic]
166
- config = Deimos.karafka_config_for(topic: topic)
167
- message = Deimos::Logging.messages_log_text(config&.payload_log, event[:messages])
168
- Karafka.logger.error("Error producing messages: #{event[:error].message} #{message.to_json}")
169
- end
146
+ Karafka.producer.middleware.append(Deimos::ProducerMiddleware)
147
+ # for multiple setup calls
148
+ Karafka.producer.config.kafka =
149
+ Karafka::Setup::AttributesMap.producer(Karafka::Setup::Config.config.kafka.dup)
150
+ EVENT_TYPES.each { |type| Karafka.monitor.notifications_bus.register_event(type) }
151
+
152
+ Karafka.producer.monitor.subscribe(ProducerMetricsListener.new)
153
+
154
+ Karafka.producer.monitor.subscribe('error.occurred') do |event|
155
+ if event.payload.key?(:messages)
156
+ topic = event[:messages].first[:topic]
157
+ config = Deimos.karafka_config_for(topic: topic)
158
+ message = Deimos::Logging.messages_log_text(config&.payload_log, event[:messages])
159
+ Karafka.logger.error("Error producing messages: #{event[:error].message} #{message.to_json}")
170
160
  end
171
161
  end
172
- EVENT_TYPES.each { |type| Karafka.monitor.notifications_bus.register_event(type) }
173
162
  end
174
163
 
175
164
  # @return [Array<Karafka::Routing::Topic]
@@ -187,16 +176,7 @@ module Deimos
187
176
  end
188
177
  end
189
178
 
190
- # @return [Array<::WaterDrop::Producer>]
191
- def waterdrop_producers
192
- (@producers.values + [Karafka.producer]).uniq
193
- end
194
-
195
- # @param topic [String]
196
- def producer_for(topic)
197
- @producers[topic] || Karafka.producer
198
- end
199
-
179
+ # @param handler_class [Class]
200
180
  # @return [String,nil]
201
181
  def topic_for_consumer(handler_class)
202
182
  Deimos.karafka_configs.each do |topic|
@@ -48,7 +48,7 @@ module Deimos
48
48
  # @return [String]
49
49
  def schema
50
50
  last_dot = self.full_schema.rindex('.')
51
- self.full_schema[(last_dot + 1)..-1]
51
+ self.full_schema[(last_dot + 1)..]
52
52
  end
53
53
 
54
54
  # @return [String]
@@ -13,7 +13,7 @@ module Deimos
13
13
  # @return [Array<Symbol>]
14
14
  SPECIAL_TYPES = %i(record enum).freeze
15
15
  # @return [String]
16
- INITIALIZE_WHITESPACE = "\n#{' ' * 19}"
16
+ INITIALIZE_WHITESPACE = "\n#{' ' * 19}".freeze
17
17
  # @return [Array<String>]
18
18
  IGNORE_DEFAULTS = %w(message_id timestamp).freeze
19
19
  # @return [String]
@@ -192,7 +192,7 @@ module Deimos
192
192
 
193
193
  def generate_from_schema_files(found_schemas)
194
194
  path = Deimos.config.schema.path || Deimos.config.schema.paths[:avro].first
195
- schema_store = AvroTurf::MutableSchemaStore.new(path: path)
195
+ schema_store = SchemaRegistry::AvroSchemaStore.new(path: path)
196
196
  schema_store.load_schemas!
197
197
  schema_store.schemas.values.sort_by { |s| "#{s.namespace}#{s.name}" }.each do |schema|
198
198
  name = "#{schema.namespace}.#{schema.name}"
@@ -265,7 +265,7 @@ module Deimos
265
265
  end
266
266
 
267
267
  result = "def initialize(_from_message: false, #{arguments.first}"
268
- arguments[1..-1].each_with_index do |arg, _i|
268
+ arguments[1..].each_with_index do |arg, _i|
269
269
  result += ",#{INITIALIZE_WHITESPACE}#{arg}"
270
270
  end
271
271
  "#{result})"
@@ -119,7 +119,7 @@ module Deimos
119
119
  end
120
120
 
121
121
  def consumer_configs
122
- deimos_config.consumer_objects.group_by(&:group_id).map { |group_id, consumers|
122
+ deimos_config.consumer_objects.group_by(&:group_id).to_h do |group_id, consumers|
123
123
  [group_id, consumers.map do |consumer|
124
124
  kafka_configs = {}
125
125
  kafka_configs['auto.offset.reset'] = consumer.start_from_beginning ? 'earliest' : 'latest'
@@ -159,7 +159,7 @@ module Deimos
159
159
  configs[:each_message] = true unless consumer.delivery.to_s == 'inline_batch'
160
160
  configs
161
161
  end]
162
- }.to_h
162
+ end
163
163
  end
164
164
 
165
165
  def producer_configs
data/spec/deimos_spec.rb CHANGED
@@ -50,36 +50,4 @@ describe Deimos do
50
50
  end
51
51
  end
52
52
 
53
- specify '#producer_for' do
54
- allow(described_class).to receive(:producer_for).and_call_original
55
- Karafka::App.routes.redraw do
56
- topic 'main-broker' do
57
- active false
58
- kafka({
59
- 'bootstrap.servers': 'broker1:9092'
60
- })
61
- end
62
- topic 'main-broker2' do
63
- active false
64
- kafka({
65
- 'bootstrap.servers': 'broker1:9092'
66
- })
67
- end
68
- topic 'other-broker' do
69
- active false
70
- kafka({
71
- 'bootstrap.servers': 'broker2:9092'
72
- })
73
- end
74
- end
75
- described_class.setup_producers
76
-
77
- producer1 = described_class.producer_for('main-broker')
78
- producer2 = described_class.producer_for('main-broker2')
79
- producer3 = described_class.producer_for('other-broker')
80
- expect(producer1).to eq(producer2)
81
- expect(producer1.config.kafka[:'bootstrap.servers']).to eq('broker1:9092')
82
- expect(producer3.config.kafka[:'bootstrap.servers']).to eq('broker2:9092')
83
- end
84
-
85
53
  end
@@ -0,0 +1,17 @@
1
+ # frozen_string_literal: true
2
+ # Generated by the protocol buffer compiler. DO NOT EDIT!
3
+ # source: sample/v1/sample_key.proto
4
+
5
+ require 'google/protobuf'
6
+
7
+
8
+ descriptor_data = "\n\x1asample/v1/sample_key.proto\x12\tsample.v1\"\x1f\n\x10SampleMessageKey\x12\x0b\n\x03str\x18\x01 \x01(\tb\x06proto3"
9
+
10
+ pool = ::Google::Protobuf::DescriptorPool.generated_pool
11
+ pool.add_serialized_file(descriptor_data)
12
+
13
+ module Sample
14
+ module V1
15
+ SampleMessageKey = ::Google::Protobuf::DescriptorPool.generated_pool.lookup("sample.v1.SampleMessageKey").msgclass
16
+ end
17
+ end
@@ -20,7 +20,7 @@ end
20
20
 
21
21
  RSpec.describe Deimos::Generators::SchemaClassGenerator do
22
22
  let(:schema_class_path) { 'spec/app/lib/schema_classes' }
23
- let(:files) { Dir["#{schema_class_path}/**/*.rb"].map { |f| [f, File.read(f)]}.to_h }
23
+ let(:files) { Dir["#{schema_class_path}/**/*.rb"].to_h { |f| [f, File.read(f)]} }
24
24
 
25
25
  before(:each) do
26
26
  Deimos.config.reset!
@@ -0,0 +1,7 @@
1
+ syntax = "proto3";
2
+
3
+ package sample.v1;
4
+
5
+ message SampleMessageKey {
6
+ string str = 1;
7
+ }
@@ -66,7 +66,7 @@ RSpec.shared_examples_for('an Avro backend') do
66
66
  expect(backend.encode_key('test_id', 1, topic: 'topic')).to eq('itsme')
67
67
  expect(backend).to have_received(:encode).
68
68
  with({ 'test_id' => 1 }, { schema: 'MySchema_key', topic: 'topic' })
69
- expect(backend.schema_store.find('MySchema_key', 'com.my-namespace').to_avro).
69
+ expect(backend.schema_store.find('com.my-namespace.MySchema_key').to_avro).
70
70
  to eq(
71
71
  'doc' => 'Key for com.my-namespace.MySchema - autogenerated by Deimos',
72
72
  'fields' => [
@@ -15,17 +15,10 @@ RSpec.describe Deimos::SchemaBackends::AvroLocal do
15
15
  it_should_behave_like 'an Avro backend'
16
16
 
17
17
  it 'should encode and decode correctly' do
18
- avro_turf = instance_double(AvroTurf)
19
- allow(avro_turf).to receive_messages(encode: 'encoded-payload', decode: payload)
20
- allow(backend).to receive(:avro_turf).and_return(avro_turf)
21
18
  results = backend.encode(payload)
22
- expect(results).to eq('encoded-payload')
19
+ expect(results).to start_with("Obj\u0001\u0004\u0014avro.codec\bnull\u0016avro.schema\x94")
23
20
  results = backend.decode(results)
24
21
  expect(results).to eq(payload)
25
- expect(avro_turf).to have_received(:encode).
26
- with(payload, schema_name: 'MySchema', namespace: 'com.my-namespace')
27
- expect(avro_turf).to have_received(:decode).
28
- with('encoded-payload', schema_name: 'MySchema', namespace: 'com.my-namespace')
29
22
  end
30
23
 
31
24
  end
@@ -15,17 +15,17 @@ RSpec.describe Deimos::SchemaBackends::AvroSchemaRegistry do
15
15
  it_should_behave_like 'an Avro backend'
16
16
 
17
17
  it 'should encode and decode correctly' do
18
- avro_turf = instance_double(AvroTurf::Messaging)
19
- allow(avro_turf).to receive_messages(encode: 'encoded-payload', decode: payload)
20
- allow(backend).to receive(:avro_turf_messaging).and_return(avro_turf)
18
+ schema_registry = instance_double(SchemaRegistry::Client)
19
+ allow(schema_registry).to receive_messages(encode: 'encoded-payload', decode: payload)
20
+ allow(backend).to receive(:schema_registry).and_return(schema_registry)
21
21
  results = backend.encode(payload, topic: 'topic')
22
22
  expect(results).to eq('encoded-payload')
23
23
  results = backend.decode(results)
24
24
  expect(results).to eq(payload)
25
- expect(avro_turf).to have_received(:encode).
26
- with(payload, schema_name: 'MySchema', subject: 'topic')
27
- expect(avro_turf).to have_received(:decode).
28
- with('encoded-payload', schema_name: 'MySchema')
25
+ expect(schema_registry).to have_received(:encode).
26
+ with(payload, schema_name: 'com.my-namespace.MySchema', subject: 'topic-value')
27
+ expect(schema_registry).to have_received(:decode).
28
+ with('encoded-payload')
29
29
  end
30
30
 
31
31
  end
@@ -6,13 +6,13 @@ describe Deimos::SchemaBackends::Base do
6
6
 
7
7
  it 'should validate on encode' do
8
8
  expect(backend).to receive(:validate).with(payload, schema: 'schema')
9
- expect(backend).to receive(:encode_payload).with(payload, schema: 'schema', topic: 'topic')
9
+ expect(backend).to receive(:encode_payload).with(payload, schema: 'schema', subject: 'topic-value')
10
10
  backend.encode(payload, topic: 'topic')
11
11
  end
12
12
 
13
13
  it 'should validate and encode a passed schema' do
14
14
  expect(backend).to receive(:validate).with(payload, schema: 'schema2')
15
- expect(backend).to receive(:encode_payload).with(payload, schema: 'schema2', topic: 'topic')
15
+ expect(backend).to receive(:encode_payload).with(payload, schema: 'schema2', subject: 'topic-value')
16
16
  backend.encode(payload, schema: 'schema2', topic: 'topic')
17
17
  end
18
18
 
@@ -2,6 +2,7 @@
2
2
 
3
3
  require 'deimos/schema_backends/proto_schema_registry'
4
4
  require_relative "#{__dir__}/../gen/sample/v1/sample_pb"
5
+ require_relative "#{__dir__}/../gen/sample/v1/sample_key_pb"
5
6
 
6
7
  RSpec.describe Deimos::SchemaBackends::ProtoSchemaRegistry do
7
8
  let(:payload) do
@@ -16,32 +17,234 @@ RSpec.describe Deimos::SchemaBackends::ProtoSchemaRegistry do
16
17
  str_map: { 'foo' => 'bar' }
17
18
  )
18
19
  end
20
+ let(:schema_registry) { instance_double(SchemaRegistry::Client) }
21
+ let(:key_schema_registry) { instance_double(SchemaRegistry::Client) }
19
22
 
20
23
  let(:backend) { described_class.new(schema: 'sample.v1.SampleMessage') }
21
24
 
22
- specify('#encode_key') do
23
- expect(backend.encode_key(nil, 789)).to eq('789')
24
- expect(backend.encode_key(nil, 'string')).to eq('string')
25
- expect(backend.encode_key(nil, { foo: 'bar' })).to eq('{"foo":"bar"}')
26
- expect(backend.encode_key(:foo, 'bar')).to eq('bar')
25
+ before(:each) do
26
+ allow(described_class).to receive_messages(schema_registry: schema_registry,
27
+ key_schema_registry: key_schema_registry)
27
28
  end
28
29
 
29
- specify('#decode_key') do
30
- expect(backend.decode_key('789', nil)).to eq(789)
31
- expect(backend.decode_key('{"foo":"bar"}', :foo)).to eq('bar')
32
- expect(backend.decode_key('{"foo":"bar"}', nil)).to eq({ 'foo' => 'bar' })
30
+ describe 'payload encoding and decoding' do
31
+ it 'should encode and decode payloads correctly' do
32
+ allow(schema_registry).to receive_messages(encode: 'encoded-payload', decode: payload)
33
+
34
+ # Encode
35
+ encoded = backend.encode(payload, topic: 'topic')
36
+ expect(encoded).to eq('encoded-payload')
37
+ expect(schema_registry).to have_received(:encode).with(payload, subject: 'topic-value')
38
+
39
+ # Decode
40
+ decoded = backend.decode(encoded)
41
+ expect(decoded).to eq(payload)
42
+ expect(schema_registry).to have_received(:decode).with('encoded-payload')
43
+ end
44
+
45
+ it 'should encode hash payloads by converting to protobuf message' do
46
+ hash_payload = {
47
+ str: 'string',
48
+ num: 123,
49
+ flag: true
50
+ }
51
+ allow(schema_registry).to receive(:encode).and_return('encoded-payload')
52
+
53
+ backend.encode(hash_payload, topic: 'topic')
54
+
55
+ # Should have converted hash to protobuf message before encoding
56
+ expect(schema_registry).to have_received(:encode) do |arg, **kwargs|
57
+ expect(arg).to be_a(Sample::V1::SampleMessage)
58
+ expect(arg.str).to eq('string')
59
+ expect(arg.num).to eq(123)
60
+ expect(arg.flag).to be(true)
61
+ expect(kwargs).to eq(subject: 'topic-value')
62
+ end
63
+ end
33
64
  end
34
65
 
35
- it 'should encode and decode correctly' do
36
- proto_turf = instance_double(ProtoTurf)
37
- allow(proto_turf).to receive_messages(encode: 'encoded-payload', decode: payload)
38
- allow(described_class).to receive(:proto_turf).and_return(proto_turf)
39
- results = backend.encode(payload, topic: 'topic')
40
- expect(results).to eq('encoded-payload')
41
- results = backend.decode(results)
42
- expect(results).to eq(payload)
43
- expect(proto_turf).to have_received(:encode).with(payload, subject: 'topic')
44
- expect(proto_turf).to have_received(:decode).with('encoded-payload')
66
+ describe 'key encoding with key_config field (auto-generated JSON schema)' do
67
+ it 'should encode a simple field key using JSON schema' do
68
+ key_hash = { 'str' => 'test-key' }
69
+
70
+ allow(key_schema_registry).to receive(:encode).and_return('encoded-key')
71
+
72
+ # encode_key is called from encode_proto_key with the hash
73
+ encoded = backend.encode_key('str', 'test-key', topic: 'my-topic')
74
+
75
+ expect(key_schema_registry).to have_received(:encode).with(
76
+ key_hash,
77
+ subject: 'my-topic-key',
78
+ schema_text: anything
79
+ )
80
+ expect(encoded).to eq('encoded-key')
81
+ end
82
+
83
+ it 'should encode a hash key with field extraction using JSON schema' do
84
+ full_payload = { 'str' => 'test-key', 'num' => 123 }
85
+ key_hash = { 'str' => 'test-key' }
86
+
87
+ allow(key_schema_registry).to receive(:encode).and_return('encoded-key')
88
+
89
+ encoded = backend.encode_key('str', full_payload, topic: 'my-topic')
90
+
91
+ expect(key_schema_registry).to have_received(:encode).with(
92
+ key_hash,
93
+ subject: 'my-topic-key',
94
+ schema_text: anything
95
+ )
96
+ expect(encoded).to eq('encoded-key')
97
+ end
98
+
99
+ it 'should decode a JSON schema key' do
100
+ decoded_json = { 'str' => 'test-key' }
101
+ allow(key_schema_registry).to receive(:decode).and_return(decoded_json)
102
+
103
+ result = backend.decode_key('encoded-key', 'str')
104
+
105
+ expect(key_schema_registry).to have_received(:decode).with('encoded-key')
106
+ expect(result).to eq('test-key')
107
+ end
108
+
109
+ it 'should decode a JSON schema key without field extraction' do
110
+ decoded_json = { 'str' => 'test-key', 'num' => 123 }
111
+ allow(key_schema_registry).to receive(:decode).and_return(decoded_json)
112
+
113
+ result = backend.decode_key('encoded-key', nil)
114
+
115
+ expect(key_schema_registry).to have_received(:decode).with('encoded-key')
116
+ expect(result).to eq(decoded_json)
117
+ end
45
118
  end
46
119
 
120
+ describe 'key encoding with key_config schema (separate Protobuf schema)' do
121
+ let(:key_backend) { described_class.new(schema: 'sample.v1.SampleMessageKey') }
122
+ let(:key_schema_registry_for_test) { instance_double(SchemaRegistry::Client) }
123
+ let(:schema_registry_for_test) { instance_double(SchemaRegistry::Client) }
124
+
125
+ before(:each) do
126
+ # For key_backend tests, mock both schema_registry instances
127
+ allow(described_class).to receive_messages(schema_registry: schema_registry_for_test,
128
+ key_schema_registry: key_schema_registry_for_test)
129
+ end
130
+
131
+ it 'should encode a protobuf key message using the key schema' do
132
+ key_msg = Sample::V1::SampleMessageKey.new(str: 'test-key')
133
+ # Since subject ends with '-key', encode_payload uses key_schema_registry
134
+ allow(key_schema_registry_for_test).to receive(:encode).and_return('encoded-key')
135
+
136
+ encoded = key_backend.encode(key_msg, topic: 'my-topic', is_key: true)
137
+
138
+ expect(key_schema_registry_for_test).to have_received(:encode).with(key_msg, subject: 'my-topic-key')
139
+ expect(encoded).to eq('encoded-key')
140
+ end
141
+
142
+ it 'should encode a hash key using the key schema' do
143
+ key_hash = { str: 'test-key' }
144
+ allow(key_schema_registry_for_test).to receive(:encode).and_return('encoded-key')
145
+
146
+ encoded = key_backend.encode(key_hash, topic: 'my-topic', is_key: true)
147
+
148
+ expect(key_schema_registry_for_test).to have_received(:encode) do |arg, **kwargs|
149
+ expect(arg).to be_a(Sample::V1::SampleMessageKey)
150
+ expect(arg.str).to eq('test-key')
151
+ expect(kwargs).to eq(subject: 'my-topic-key')
152
+ end
153
+ expect(encoded).to eq('encoded-key')
154
+ end
155
+
156
+ it 'should decode a protobuf key' do
157
+ key_msg = Sample::V1::SampleMessageKey.new(str: 'test-key')
158
+ allow(schema_registry_for_test).to receive(:decode).and_return(key_msg)
159
+
160
+ decoded = key_backend.decode('encoded-key')
161
+
162
+ expect(schema_registry_for_test).to have_received(:decode).with('encoded-key')
163
+ expect(decoded).to eq(key_msg)
164
+ end
165
+ end
166
+
167
+ describe 'backward compatibility with plain string/JSON keys' do
168
+ it 'should encode plain string keys as strings' do
169
+ expect(backend.encode_key(nil, 'simple-string', topic: 'my-topic')).to eq('simple-string')
170
+ end
171
+
172
+ it 'should encode integer keys as strings' do
173
+ expect(backend.encode_key(nil, 789, topic: 'my-topic')).to eq('789')
174
+ end
175
+
176
+ it 'should encode hash keys using JSON schema when no field specified' do
177
+ key_hash = { foo: 'bar', baz: 'qux' }
178
+ allow(key_schema_registry).to receive(:encode).and_return('encoded-json')
179
+
180
+ backend.encode_key(nil, key_hash, topic: 'my-topic')
181
+
182
+ # Should use encode_proto_key which calls key_schema_registry.encode
183
+ expect(key_schema_registry).to have_received(:encode)
184
+ end
185
+
186
+ it 'should decode JSON string keys to hashes' do
187
+ decoded_hash = { 'foo' => 'bar' }
188
+ allow(key_schema_registry).to receive(:decode).and_return(decoded_hash)
189
+
190
+ result = backend.decode_key('{"foo":"bar"}', nil)
191
+ expect(result).to eq(decoded_hash)
192
+ end
193
+
194
+ it 'should decode JSON keys with field extraction' do
195
+ decoded_hash = { 'foo' => 'bar', 'baz' => 'qux' }
196
+ allow(key_schema_registry).to receive(:decode).and_return(decoded_hash)
197
+
198
+ result = backend.decode_key('encoded-key', :foo)
199
+ expect(result).to eq('bar')
200
+ end
201
+ end
202
+
203
+ describe 'edge cases' do
204
+ it 'should handle nested field extraction for keys' do
205
+ # Use nested field from the actual schema: non_union_nested.nested_str
206
+ nested_payload = { 'non_union_nested' => { 'nested_str' => 'value', 'nested_num' => 123 }, 'str' => 'other' }
207
+ allow(key_schema_registry).to receive(:encode).and_return('encoded-key')
208
+
209
+ backend.encode_key('non_union_nested.nested_str', nested_payload, topic: 'my-topic')
210
+
211
+ # Should encode with just the nested_str field extracted from the nested message
212
+ expect(key_schema_registry).to have_received(:encode).with(
213
+ { 'nested_str' => 'value' },
214
+ subject: 'my-topic-key',
215
+ schema_text: anything
216
+ )
217
+ end
218
+
219
+ it 'should handle encoding payloads with subject suffix detection' do
220
+ allow(schema_registry).to receive(:encode).and_return('encoded-payload')
221
+ allow(key_schema_registry).to receive(:encode).and_return('encoded-payload')
222
+
223
+ # When subject ends with '-key', should use key_schema_registry
224
+ backend.encode_payload(payload, subject: 'my-topic-key')
225
+ expect(key_schema_registry).to have_received(:encode).with(payload, subject: 'my-topic-key')
226
+
227
+ # When subject doesn't end with '-key', should use schema_registry
228
+ backend.encode_payload(payload, subject: 'my-topic-value')
229
+ expect(schema_registry).to have_received(:encode).with(payload, subject: 'my-topic-value')
230
+ end
231
+
232
+ it 'should handle nil payloads gracefully' do
233
+ # encode with nil payload - schema_registry will handle the nil
234
+ allow(schema_registry).to receive(:encode).with(nil, subject: 'topic-value').and_return(nil)
235
+ expect(backend.encode(nil, topic: 'topic')).to be_nil
236
+
237
+ # decode returns nil for nil payload (base class checks this)
238
+ expect(backend.decode(nil)).to be_nil
239
+ end
240
+ end
241
+
242
+ describe 'schema field introspection' do
243
+ it 'should return schema fields from protobuf descriptor' do
244
+ fields = backend.schema_fields
245
+
246
+ expect(fields).to be_an(Array)
247
+ expect(fields.map(&:name)).to include('str', 'num', 'str_arr', 'flag', 'timestamp')
248
+ end
249
+ end
47
250
  end