deimos-ruby 2.3.0.pre.beta4 → 2.3.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (58) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop.yml +1 -1
  3. data/CHANGELOG.md +7 -1
  4. data/README.md +12 -0
  5. data/deimos-ruby.gemspec +2 -2
  6. data/lib/deimos/active_record_consumer.rb +1 -0
  7. data/lib/deimos/active_record_producer.rb +11 -5
  8. data/lib/deimos/backends/kafka.rb +1 -1
  9. data/lib/deimos/backends/kafka_async.rb +2 -1
  10. data/lib/deimos/config/configuration.rb +1 -1
  11. data/lib/deimos/ext/producer_middleware.rb +2 -2
  12. data/lib/deimos/kafka_source.rb +1 -1
  13. data/lib/deimos/metrics/datadog.rb +3 -1
  14. data/lib/deimos/schema_backends/avro_base.rb +6 -4
  15. data/lib/deimos/schema_backends/avro_local.rb +12 -13
  16. data/lib/deimos/schema_backends/avro_schema_registry.rb +15 -14
  17. data/lib/deimos/schema_backends/avro_validation.rb +1 -1
  18. data/lib/deimos/schema_backends/base.rb +4 -5
  19. data/lib/deimos/schema_backends/mock.rb +1 -1
  20. data/lib/deimos/schema_backends/plain.rb +1 -1
  21. data/lib/deimos/schema_backends/proto_base.rb +11 -36
  22. data/lib/deimos/schema_backends/proto_local.rb +5 -5
  23. data/lib/deimos/schema_backends/proto_schema_registry.rb +7 -32
  24. data/lib/deimos/test_helpers.rb +8 -0
  25. data/lib/deimos/transcoder.rb +1 -1
  26. data/lib/deimos/utils/outbox_producer.rb +2 -2
  27. data/lib/deimos/version.rb +1 -1
  28. data/lib/deimos.rb +35 -15
  29. data/lib/generators/deimos/active_record_generator.rb +1 -1
  30. data/lib/generators/deimos/schema_class_generator.rb +3 -3
  31. data/lib/generators/deimos/v2_generator.rb +2 -2
  32. data/spec/active_record_batch_consumer_association_spec.rb +1 -1
  33. data/spec/deimos_spec.rb +32 -0
  34. data/spec/generators/schema_class_generator_spec.rb +4 -5
  35. data/spec/schema_backends/avro_base_shared.rb +1 -1
  36. data/spec/schema_backends/avro_local_spec.rb +8 -1
  37. data/spec/schema_backends/avro_schema_registry_spec.rb +7 -7
  38. data/spec/schema_backends/base_spec.rb +2 -2
  39. data/spec/schema_backends/proto_schema_registry_spec.rb +19 -222
  40. data/spec/snapshots/consumers-no-nest.snap +7 -7
  41. data/spec/snapshots/consumers.snap +7 -7
  42. data/spec/snapshots/consumers_and_producers-no-nest.snap +7 -7
  43. data/spec/snapshots/consumers_and_producers.snap +7 -7
  44. data/spec/snapshots/consumers_circular-no-nest.snap +7 -7
  45. data/spec/snapshots/consumers_circular.snap +7 -7
  46. data/spec/snapshots/consumers_complex_types-no-nest.snap +7 -7
  47. data/spec/snapshots/consumers_complex_types.snap +7 -7
  48. data/spec/snapshots/consumers_nested-no-nest.snap +7 -7
  49. data/spec/snapshots/consumers_nested.snap +7 -7
  50. data/spec/snapshots/namespace_folders.snap +7 -7
  51. data/spec/snapshots/namespace_map.snap +7 -7
  52. data/spec/snapshots/producers_with_key-no-nest.snap +7 -7
  53. data/spec/snapshots/producers_with_key.snap +7 -7
  54. data/spec/spec_helper.rb +1 -1
  55. metadata +35 -32
  56. data/CLAUDE.md +0 -270
  57. data/spec/gen/sample/v1/sample_key_pb.rb +0 -17
  58. data/spec/protos/sample/v1/sample_key.proto +0 -7
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: f8e48e6252ea21b5af3a7f2ec921f7a82db95e6286eaf52ac15f94ad8e1043d6
4
- data.tar.gz: 99bf9b5ecb064e644897e1ef2177704d460b9531a9fc51e551b8462f22fe1943
3
+ metadata.gz: ce1117d7a1d29304fcb0cafca935db90c4378ceaf9aa089dfcf9fa56fe5ea25d
4
+ data.tar.gz: 964f517ef411af2dcb3dbbc9f6e507d34fefcac6ef9321b8937d686bd3e307b7
5
5
  SHA512:
6
- metadata.gz: 7cf3edfdf000781943adb58eddc2e73e2fa2c8b7cede12cfbebc1407235af33459b44402602a9e26534187f81df9ed67a38029ffc917d0c94e4e0a8924645929
7
- data.tar.gz: da90d82a5919b02893b7877e668a287f3deb64c766f6b5a5543d6217b2782c14ab4b0c87f87f0dcad8e537905b5b480d53f756530eb1b4c6e187c0569f13b0f0
6
+ metadata.gz: 5684bbf8cc1e2a0831369b47f28ffb84795a3d21a47b1b11d159b2909e9af36fd83b1756a47a9902d04c792a0a389cb8d675e0451f50e75c7d8c3ccf50a389ac
7
+ data.tar.gz: 9946b5a3ceeae405778731c82e1bfcc24dfae89a19bb0b2553620446815b712abc81a2bb9303e31ecb1d8d7aa68c3856df6cb2c46c1264f3305e6dbe0e3b530b
data/.rubocop.yml CHANGED
@@ -4,7 +4,7 @@ plugins:
4
4
  - rubocop-rspec
5
5
 
6
6
  AllCops:
7
- TargetRubyVersion: 3.0
7
+ TargetRubyVersion: 2.5
8
8
  Exclude:
9
9
  - lib/deimos/monkey_patches/*.rb
10
10
  - spec/gen/**/*.rb
data/CHANGELOG.md CHANGED
@@ -7,7 +7,13 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
7
7
 
8
8
  ## UNRELEASED
9
9
 
10
- - Major change: Switch from using `avro_turf` and `proto_turf` to use `schema_registry_client`, which handles both Avro and Protobuf.
10
+ # 2.3.1 - 2026-01-22
11
+
12
+ - Feature: Allow strings to be used in the `record_class` declaration.
13
+
14
+ # 2.3.0 - 2026-01-13
15
+
16
+ - Feature: Support broker setting per topic in producer configs.
11
17
 
12
18
  # 2.2.2 - 2025-11-7
13
19
 
data/README.md CHANGED
@@ -94,6 +94,8 @@ Currently we have the following possible schema backends:
94
94
  * Protobuf Schema Registry (use Protobuf with the Confluent Schema Registry)
95
95
  * Mock (no actual encoding/decoding).
96
96
 
97
+ Note that to use Protobuf, you must include the [proto_turf](https://github.com/flipp-oss/proto_turf) gem in your Gemfile.
98
+
97
99
  Other possible schemas could [JSONSchema](https://json-schema.org/), etc. Feel free to
98
100
  contribute!
99
101
 
@@ -139,6 +141,12 @@ end
139
141
 
140
142
  Note that if you are using Protobuf, you need to pass a Protobuf message object as the payload - you can't use a bare hash.
141
143
 
144
+ ## Multiple clusters
145
+
146
+ If you have topics that are being routed to different clusters via Karafka configs, you can continue to make use of Deimos producers without having to instantiate the producer itself. Instead of calling `MyProducer.produce(message)`, you can call `Deimos.producer_for('MyTopic').produce(message)`.
147
+
148
+ Deimos will keep around one producer per broker server (i.e. `bootstrap.servers` config) that it sees on startup.
149
+
142
150
  ## Auto-added Fields
143
151
 
144
152
  If your schema has a field called `message_id`, and the payload you give
@@ -390,6 +398,8 @@ class MyProducer < Deimos::ActiveRecordProducer
390
398
  # using the default functionality and don't need to override it)
391
399
  # by setting `refetch` to false. This will avoid extra database fetches.
392
400
  record_class Widget, refetch: false
401
+
402
+ # You can use a string here instead to avoid eager loading: record_class 'Widget'
393
403
 
394
404
  # Optionally override this if you want the message to be
395
405
  # sent even if fields that aren't in the schema are changed.
@@ -477,6 +487,7 @@ A sample consumer would look as follows:
477
487
  ```ruby
478
488
  class MyConsumer < Deimos::ActiveRecordConsumer
479
489
  record_class Widget
490
+ # or use a string: record_class 'Widget'
480
491
 
481
492
  # Optional override of the way to fetch records based on payload and
482
493
  # key. Default is to use the key to search the primary key of the table.
@@ -554,6 +565,7 @@ A sample batch consumer would look as follows:
554
565
  ```ruby
555
566
  class MyConsumer < Deimos::ActiveRecordConsumer
556
567
  record_class Widget
568
+ # or use a string: record_class 'Widget'
557
569
 
558
570
  # Controls whether the batch is compacted before consuming.
559
571
  # If true, only the last message for each unique key in a batch will be
data/deimos-ruby.gemspec CHANGED
@@ -17,10 +17,10 @@ Gem::Specification.new do |spec|
17
17
  spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
18
18
  spec.require_paths = ['lib']
19
19
 
20
+ spec.add_dependency('avro_turf', '>= 1.4', '< 2')
20
21
  spec.add_dependency('benchmark', '~> 0.5')
21
22
  spec.add_dependency('fig_tree', '~> 0.2.0')
22
23
  spec.add_dependency('karafka', '~> 2.0')
23
- spec.add_dependency('schema_registry_client')
24
24
  spec.add_dependency('sigurd', '>= 0.1.0', '< 1.0')
25
25
 
26
26
  spec.add_development_dependency('activerecord-import')
@@ -33,6 +33,7 @@ Gem::Specification.new do |spec|
33
33
  spec.add_development_dependency('guard-rubocop', '~> 1')
34
34
  spec.add_development_dependency('karafka-testing', '~> 2.0')
35
35
  spec.add_development_dependency('pg', '~> 1.1')
36
+ spec.add_development_dependency('proto_turf')
36
37
  spec.add_development_dependency('rails', '~> 8.0')
37
38
  spec.add_development_dependency('rake', '~> 13')
38
39
  spec.add_development_dependency('rspec', '~> 3')
@@ -41,7 +42,6 @@ Gem::Specification.new do |spec|
41
42
  spec.add_development_dependency('rspec-snapshot', '~> 2.0')
42
43
  spec.add_development_dependency('rubocop', '~> 1.0')
43
44
  spec.add_development_dependency('rubocop-rspec', '3.8')
44
- spec.add_development_dependency('schema_registry_client')
45
45
  spec.add_development_dependency('sord', '>= 5.0')
46
46
  spec.add_development_dependency('sqlite3', '~> 2.7')
47
47
  spec.add_development_dependency('steep', '~> 1.0')
@@ -72,6 +72,7 @@ module Deimos
72
72
  # Setup
73
73
  def initialize
74
74
  @klass = self.class.config[:record_class]
75
+ @klass = @klass.constantize if @klass.is_a?(String)
75
76
  @compacted = self.class.config[:compacted] != false
76
77
  end
77
78
 
@@ -13,18 +13,24 @@ module Deimos
13
13
  class ActiveRecordProducer < Producer
14
14
  class << self
15
15
  # Indicate the class this producer is working on.
16
- # @param klass [Class]
16
+ # @param klass [Class,String]
17
17
  # @param refetch [Boolean] if true, and we are given a hash instead of
18
18
  # a record object, refetch the record to pass into the `generate_payload`
19
19
  # method.
20
20
  # @return [void]
21
21
  def record_class(klass=nil, refetch: true)
22
- return @record_class if klass.nil?
22
+ return record_klass if klass.nil?
23
23
 
24
24
  @record_class = klass
25
25
  @refetch_record = refetch
26
26
  end
27
27
 
28
+ # @return [Class,nil]
29
+ def record_klass
30
+ @record_class = @record_class.constantize if @record_class.is_a?(String)
31
+ @record_class
32
+ end
33
+
28
34
  # @param record [ActiveRecord::Base]
29
35
  # @param force_send [Boolean]
30
36
  # @return [void]
@@ -38,14 +44,14 @@ module Deimos
38
44
  def send_events(records, force_send: false)
39
45
  return if Deimos.producers_disabled?(self)
40
46
 
41
- primary_key = @record_class&.primary_key
47
+ primary_key = record_klass&.primary_key
42
48
  messages = records.map do |record|
43
49
  if record.respond_to?(:attributes)
44
50
  attrs = record.attributes.with_indifferent_access
45
51
  else
46
52
  attrs = record.with_indifferent_access
47
53
  if @refetch_record && attrs[primary_key]
48
- record = @record_class.find(attrs[primary_key])
54
+ record = record_klass.find(attrs[primary_key])
49
55
  end
50
56
  end
51
57
  generate_payload(attrs, record).with_indifferent_access
@@ -96,7 +102,7 @@ module Deimos
96
102
  # than this value).
97
103
  # @return [ActiveRecord::Relation]
98
104
  def poll_query(time_from:, time_to:, min_id:, column_name: :updated_at)
99
- klass = @record_class
105
+ klass = record_klass
100
106
  table = ActiveRecord::Base.connection.quote_table_name(klass.table_name)
101
107
  column = ActiveRecord::Base.connection.quote_column_name(column_name)
102
108
  primary = ActiveRecord::Base.connection.quote_column_name(klass.primary_key)
@@ -6,7 +6,7 @@ module Deimos
6
6
  class Kafka < Base
7
7
  # :nodoc:
8
8
  def self.execute(producer_class:, messages:)
9
- Karafka.producer.produce_many_sync(messages)
9
+ Deimos.producer_for(producer_class.topic).produce_many_sync(messages)
10
10
  end
11
11
  end
12
12
  end
@@ -4,9 +4,10 @@ module Deimos
4
4
  module Backends
5
5
  # Backend which produces to Kafka via an async producer.
6
6
  class KafkaAsync < Base
7
+
7
8
  # :nodoc:
8
9
  def self.execute(producer_class:, messages:)
9
- Karafka.producer.produce_many_async(messages)
10
+ Deimos.producer_for(producer_class.topic).produce_many_async(messages)
10
11
  end
11
12
  end
12
13
  end
@@ -43,7 +43,7 @@ module Deimos
43
43
  'Please provide a directory.'
44
44
  end
45
45
 
46
- Dir["./#{Deimos.config.schema.generated_class_path}/**/*.rb"].
46
+ Dir["./#{Deimos.config.schema.generated_class_path}/**/*.rb"].sort.
47
47
  each { |f| require f }
48
48
  rescue LoadError
49
49
  raise 'Cannot load schema classes. Please regenerate classes with' \
@@ -83,7 +83,7 @@ module Deimos
83
83
  nil
84
84
  else
85
85
  encoder.encode(message.payload,
86
- topic: "#{Deimos.config.producers.topic_prefix}#{config.name}")
86
+ topic: "#{Deimos.config.producers.topic_prefix}#{config.name}-value")
87
87
  end
88
88
  end
89
89
 
@@ -96,7 +96,7 @@ module Deimos
96
96
  if config.deserializers[:key].respond_to?(:encode_key)
97
97
  config.deserializers[:key].encode_key(key)
98
98
  elsif key
99
- config.deserializers[:payload].encode(key, is_key: true)
99
+ config.deserializers[:payload].encode(key)
100
100
  else
101
101
  key
102
102
  end
@@ -118,7 +118,7 @@ end
118
118
  unique_columns = column_names.map(&:to_s) -
119
119
  options[:on_duplicate_key_update].map(&:to_s) - %w(id created_at)
120
120
  records = hashes_without_id.map do |hash|
121
- self.where(unique_columns.to_h { |c| [c, hash[c]] }).first
121
+ self.where(unique_columns.map { |c| [c, hash[c]] }.to_h).first
122
122
  end
123
123
  self.kafka_producers.each { |p| p.send_events(records) }
124
124
  else
@@ -58,7 +58,9 @@ module Deimos
58
58
  end
59
59
  end
60
60
  Karafka::Setup::Config.setup if Karafka.producer.nil?
61
- Karafka.producer.monitor.subscribe(waterdrop_listener)
61
+ Deimos.waterdrop_producers.each do |producer|
62
+ producer.monitor.subscribe(waterdrop_listener)
63
+ end
62
64
  end
63
65
 
64
66
  # :nodoc:
@@ -1,7 +1,9 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require_relative 'base'
4
- require 'schema_registry_client'
4
+ require 'avro'
5
+ require 'avro_turf'
6
+ require 'avro_turf/mutable_schema_store'
5
7
  require_relative 'avro_schema_coercer'
6
8
 
7
9
  module Deimos
@@ -13,7 +15,7 @@ module Deimos
13
15
  # @override
14
16
  def initialize(schema:, namespace:)
15
17
  super
16
- @schema_store = SchemaRegistry::AvroSchemaStore.new(path: Deimos.config.schema.path)
18
+ @schema_store = AvroTurf::MutableSchemaStore.new(path: Deimos.config.schema.path)
17
19
  end
18
20
 
19
21
  def supports_key_schemas?
@@ -29,7 +31,7 @@ module Deimos
29
31
  def encode_key(key_id, key, topic: nil)
30
32
  begin
31
33
  @key_schema ||= @schema_store.find("#{@schema}_key")
32
- rescue SchemaRegistry::SchemaNotFoundError
34
+ rescue AvroTurf::SchemaNotFoundError
33
35
  @key_schema = generate_key_schema(key_id)
34
36
  end
35
37
  field_name = _field_name_from_schema(@key_schema)
@@ -182,7 +184,7 @@ module Deimos
182
184
  # @return [Avro::Schema]
183
185
  def avro_schema(schema=nil)
184
186
  schema ||= @schema
185
- @schema_store.find("#{@namespace}.#{schema}")
187
+ @schema_store.find(schema, @namespace)
186
188
  end
187
189
 
188
190
  # @param value_schema [Hash]
@@ -8,24 +8,23 @@ module Deimos
8
8
  class AvroLocal < AvroBase
9
9
  # @override
10
10
  def decode_payload(payload, schema:)
11
- stream = StringIO.new(payload)
12
- schema = @schema_store.find("#{@namespace}.#{schema}")
13
- reader = Avro::IO::DatumReader.new(nil, schema)
14
- Avro::DataFile::Reader.new(stream, reader).first
11
+ avro_turf.decode(payload, schema_name: schema, namespace: @namespace)
15
12
  end
16
13
 
17
14
  # @override
18
- def encode_payload(payload, schema: nil, subject: nil)
19
- stream = StringIO.new
20
- schema = schema_store.find("#{@namespace}.#{schema}")
21
- writer = Avro::IO::DatumWriter.new(schema)
22
-
23
- dw = Avro::DataFile::Writer.new(stream, writer, schema)
24
- dw << payload.to_h
25
- dw.close
26
- stream.string
15
+ def encode_payload(payload, schema: nil, topic: nil)
16
+ avro_turf.encode(payload, schema_name: schema, namespace: @namespace)
27
17
  end
28
18
 
19
+ private
20
+
21
+ # @return [AvroTurf]
22
+ def avro_turf
23
+ @avro_turf ||= AvroTurf.new(
24
+ schemas_path: Deimos.config.schema.path,
25
+ schema_store: @schema_store
26
+ )
27
+ end
29
28
  end
30
29
  end
31
30
  end
@@ -1,6 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require_relative 'avro_base'
4
+ require 'avro_turf/messaging'
4
5
 
5
6
  module Deimos
6
7
  module SchemaBackends
@@ -8,27 +9,27 @@ module Deimos
8
9
  class AvroSchemaRegistry < AvroBase
9
10
  # @override
10
11
  def decode_payload(payload, schema:)
11
- schema_registry.decode(payload.to_s)
12
+ avro_turf_messaging.decode(payload.to_s, schema_name: schema)
12
13
  end
13
14
 
14
15
  # @override
15
- def encode_payload(payload, schema: nil, subject: nil)
16
- schema_registry.encode(payload, subject: subject || schema, schema_name: "#{@namespace}.#{schema}")
16
+ def encode_payload(payload, schema: nil, topic: nil)
17
+ avro_turf_messaging.encode(payload, schema_name: schema, subject: topic || schema)
17
18
  end
18
19
 
19
20
  private
20
21
 
21
- # @return [SchemaRegistry::Client]
22
- def schema_registry
23
- @schema_registry ||= SchemaRegistry::Client.new(
24
- registry_url: Deimos.config.schema.registry_url,
25
- logger: Karafka.logger,
26
- user: Deimos.config.schema.user,
27
- password: Deimos.config.schema.password,
28
- schema_type: SchemaRegistry::Schema::Avro
29
- )
30
- SchemaRegistry.avro_schema_path = Deimos.config.schema.path
31
- @schema_registry
22
+ # @return [AvroTurf::Messaging]
23
+ def avro_turf_messaging
24
+ @avro_turf_messaging ||= AvroTurf::Messaging.new(
25
+ schema_store: @schema_store,
26
+ registry_url: Deimos.config.schema.registry_url,
27
+ schemas_path: Deimos.config.schema.path,
28
+ user: Deimos.config.schema.user,
29
+ password: Deimos.config.schema.password,
30
+ namespace: @namespace,
31
+ logger: Karafka.logger
32
+ )
32
33
  end
33
34
  end
34
35
  end
@@ -13,7 +13,7 @@ module Deimos
13
13
  end
14
14
 
15
15
  # @override
16
- def encode_payload(payload, schema: nil, subject: nil)
16
+ def encode_payload(payload, schema: nil, topic: nil)
17
17
  payload.to_h.with_indifferent_access.to_json
18
18
  end
19
19
  end
@@ -56,10 +56,9 @@ module Deimos
56
56
  # @param schema [String,Symbol]
57
57
  # @param topic [String]
58
58
  # @return [String]
59
- def encode(payload, schema: nil, topic: nil, is_key: false)
59
+ def encode(payload, schema: nil, topic: nil)
60
60
  validate(payload, schema: schema || @schema)
61
- subject = is_key ? "#{topic}-key" : "#{topic}-value"
62
- encode_payload(payload, schema: schema || @schema, subject: subject)
61
+ encode_payload(payload, schema: schema || @schema, topic: topic)
63
62
  end
64
63
 
65
64
  # Decode a payload with a schema. Public method.
@@ -115,9 +114,9 @@ module Deimos
115
114
  # Encode a payload. To be defined by subclass.
116
115
  # @param payload [Hash]
117
116
  # @param schema [String,Symbol]
118
- # @param subject [String]
117
+ # @param topic [String]
119
118
  # @return [String]
120
- def encode_payload(_payload, schema:, subject: nil)
119
+ def encode_payload(_payload, schema:, topic: nil)
121
120
  raise MissingImplementationError
122
121
  end
123
122
 
@@ -15,7 +15,7 @@ module Deimos
15
15
  end
16
16
 
17
17
  # @override
18
- def encode_payload(payload, schema:, subject: nil)
18
+ def encode_payload(payload, schema:, topic: nil)
19
19
  payload.is_a?(String) ? 'payload-encoded' : payload.map { |k, v| [k, "encoded-#{v}"] }.to_json
20
20
  end
21
21
 
@@ -15,7 +15,7 @@ module Deimos
15
15
  end
16
16
 
17
17
  # @override
18
- def encode_payload(payload, schema:, subject: nil)
18
+ def encode_payload(payload, schema:, topic: nil)
19
19
  payload.to_s
20
20
  end
21
21
 
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require_relative 'base'
4
- require 'schema_registry_client'
4
+ require 'proto_turf'
5
5
 
6
6
  module Deimos
7
7
  module SchemaBackends
@@ -24,51 +24,26 @@ module Deimos
24
24
  float: :float,
25
25
  message: :record
26
26
  }.freeze
27
-
28
27
  def proto_schema(schema=@schema)
29
- proto = Google::Protobuf::DescriptorPool.generated_pool.lookup(schema)
30
- if proto.nil?
31
- raise "Could not find Protobuf schema '#{schema}'."
32
- end
33
-
34
- proto
28
+ Google::Protobuf::DescriptorPool.generated_pool.lookup(schema)
35
29
  end
36
30
 
37
31
  # @override
38
32
  def encode_key(key_id, key, topic: nil)
39
- if key.respond_to?(:to_h)
40
- hash = if key_id
41
- key_id.to_s.split('.')[...-1].each do |k|
42
- key = key.with_indifferent_access[k]
43
- end
44
- key.to_h.with_indifferent_access.slice(key_id.split('.').last)
45
- else
46
- key.to_h.sort.to_h
47
- end
48
- self.encode_proto_key(hash, topic: topic, field: key_id)
49
- elsif key_id
50
- hash = { key_id.to_s.split('.').last => key }
51
- self.encode_proto_key(hash, topic: topic, field: key_id)
33
+ if key.is_a?(Hash)
34
+ key_id ? key.with_indifferent_access[key_id].to_s : key.sort.to_h.to_json
52
35
  else
53
36
  key.to_s
54
37
  end
55
38
  end
56
39
 
57
- # @param hash [Hash]
58
- # @return [String]
59
- def encode_proto_key(hash, topic: nil)
60
- hash.sort.to_h.to_json
61
- end
62
-
63
- def decode_proto_key(payload)
64
- JSON.parse(payload)
65
- rescue StandardError
66
- payload
67
- end
68
-
69
40
  # @override
70
41
  def decode_key(payload, key_id)
71
- val = decode_proto_key(payload)
42
+ val = begin
43
+ JSON.parse(payload)
44
+ rescue StandardError
45
+ payload
46
+ end
72
47
  key_id ? val[key_id.to_s] : val
73
48
  end
74
49
 
@@ -110,8 +85,8 @@ module Deimos
110
85
  :mock
111
86
  end
112
87
 
113
- def supports_key_schemas?
114
- false
88
+ def generate_key_schema(_field_name)
89
+ raise 'Protobuf cannot generate key schemas! Please use field_config :plain'
115
90
  end
116
91
 
117
92
  end
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require_relative 'proto_base'
4
- require 'schema_registry_client'
4
+ require 'proto_turf'
5
5
 
6
6
  module Deimos
7
7
  module SchemaBackends
@@ -14,14 +14,14 @@ module Deimos
14
14
  end
15
15
 
16
16
  # @override
17
- def encode_payload(payload, schema: nil, subject: nil)
17
+ def encode_payload(payload, schema: nil, topic: nil)
18
18
  msg = payload.is_a?(Hash) ? proto_schema.msgclass.new(**payload) : payload
19
19
  proto_schema.msgclass.encode(msg)
20
20
  end
21
21
 
22
- # @return [SchemaRegistry::Client]
23
- def self.schema_registry
24
- @schema_registry ||= SchemaRegistry::Client.new(
22
+ # @return [ProtoTurf]
23
+ def self.proto_turf
24
+ @proto_turf ||= ProtoTurf.new(
25
25
  registry_url: Deimos.config.schema.registry_url,
26
26
  logger: Karafka.logger
27
27
  )
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require_relative 'proto_base'
4
- require 'schema_registry_client'
4
+ require 'proto_turf'
5
5
 
6
6
  module Deimos
7
7
  module SchemaBackends
@@ -10,47 +10,22 @@ module Deimos
10
10
 
11
11
  # @override
12
12
  def decode_payload(payload, schema:)
13
- self.class.schema_registry.decode(payload)
13
+ self.class.proto_turf.decode(payload)
14
14
  end
15
15
 
16
16
  # @override
17
- def encode_payload(payload, schema: nil, subject: nil)
17
+ def encode_payload(payload, schema: nil, topic: nil)
18
18
  msg = payload.is_a?(Hash) ? proto_schema.msgclass.new(**payload) : payload
19
- encoder = subject&.ends_with?('-key') ? self.class.key_schema_registry : self.class.schema_registry
20
- encoder.encode(msg, subject: subject)
19
+ self.class.proto_turf.encode(msg, subject: topic)
21
20
  end
22
21
 
23
- # @override
24
- def encode_proto_key(key, topic: nil, field: nil)
25
- schema_text = SchemaRegistry::Output::JsonSchema.output(proto_schema.to_proto, path: field)
26
- self.class.key_schema_registry.encode(key, subject: "#{topic}-key", schema_text: schema_text)
27
- end
28
-
29
- # @override
30
- def decode_proto_key(payload)
31
- self.class.key_schema_registry.decode(payload)
32
- end
33
-
34
- # @return [SchemaRegistry::Client]
35
- def self.schema_registry
36
- @schema_registry ||= SchemaRegistry::Client.new(
22
+ # @return [ProtoTurf]
23
+ def self.proto_turf
24
+ @proto_turf ||= ProtoTurf.new(
37
25
  registry_url: Deimos.config.schema.registry_url,
38
- user: Deimos.config.schema.user,
39
- password: Deimos.config.schema.password,
40
26
  logger: Karafka.logger
41
27
  )
42
28
  end
43
-
44
- def self.key_schema_registry
45
- @key_schema_registry ||= SchemaRegistry::Client.new(
46
- registry_url: Deimos.config.schema.registry_url,
47
- user: Deimos.config.schema.user,
48
- password: Deimos.config.schema.password,
49
- logger: Karafka.logger,
50
- schema_type: SchemaRegistry::Schema::ProtoJsonSchema
51
- )
52
- end
53
-
54
29
  end
55
30
  end
56
31
  end
@@ -16,6 +16,14 @@ module Deimos
16
16
  def self.included(base)
17
17
  super
18
18
  base.include Karafka::Testing::RSpec::Helpers
19
+
20
+ # Ensure that we only use Karafka.producer, not the producers we set up for multi-broker
21
+ # configs. Only Karafka.producer works with Karafka test helpers.
22
+ RSpec.configure do |config|
23
+ config.before(:each) do
24
+ allow(Deimos).to receive(:producer_for).and_return(Karafka.producer)
25
+ end
26
+ end
19
27
  end
20
28
 
21
29
  # @return [Array<Hash>]
@@ -36,7 +36,7 @@ module Deimos
36
36
  if self.key_field
37
37
  self.backend.encode_key(self.key_field, key, topic: @topic)
38
38
  else
39
- self.backend.encode(key, topic: @topic, is_key: true)
39
+ self.backend.encode(key, topic: @topic)
40
40
  end
41
41
  end
42
42
 
@@ -205,9 +205,9 @@ module Deimos
205
205
  batch_size = batch.size
206
206
  current_index = 0
207
207
 
208
- batch[current_index..].in_groups_of(batch_size, false).each do |group|
208
+ batch[current_index..-1].in_groups_of(batch_size, false).each do |group|
209
209
  @logger.debug("Publishing #{group.size} messages to #{@current_topic}")
210
- Karafka.producer.produce_many_sync(group)
210
+ Deimos.producer_for(@current_topic).produce_many_sync(group)
211
211
  current_index += group.size
212
212
  @logger.info("Sent #{group.size} messages to #{@current_topic}")
213
213
  end
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Deimos
4
- VERSION = '2.3.0-beta4'
4
+ VERSION = '2.3.1'
5
5
  end