deimos-ruby 2.3.0.pre.beta4 → 2.3.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (55) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop.yml +1 -1
  3. data/CHANGELOG.md +3 -1
  4. data/README.md +8 -0
  5. data/deimos-ruby.gemspec +2 -2
  6. data/lib/deimos/backends/kafka.rb +1 -1
  7. data/lib/deimos/backends/kafka_async.rb +2 -1
  8. data/lib/deimos/config/configuration.rb +1 -1
  9. data/lib/deimos/ext/producer_middleware.rb +2 -2
  10. data/lib/deimos/kafka_source.rb +1 -1
  11. data/lib/deimos/metrics/datadog.rb +3 -1
  12. data/lib/deimos/schema_backends/avro_base.rb +6 -4
  13. data/lib/deimos/schema_backends/avro_local.rb +12 -13
  14. data/lib/deimos/schema_backends/avro_schema_registry.rb +15 -14
  15. data/lib/deimos/schema_backends/avro_validation.rb +1 -1
  16. data/lib/deimos/schema_backends/base.rb +4 -5
  17. data/lib/deimos/schema_backends/mock.rb +1 -1
  18. data/lib/deimos/schema_backends/plain.rb +1 -1
  19. data/lib/deimos/schema_backends/proto_base.rb +11 -36
  20. data/lib/deimos/schema_backends/proto_local.rb +5 -5
  21. data/lib/deimos/schema_backends/proto_schema_registry.rb +7 -32
  22. data/lib/deimos/test_helpers.rb +8 -0
  23. data/lib/deimos/transcoder.rb +1 -1
  24. data/lib/deimos/utils/outbox_producer.rb +2 -2
  25. data/lib/deimos/version.rb +1 -1
  26. data/lib/deimos.rb +35 -15
  27. data/lib/generators/deimos/active_record_generator.rb +1 -1
  28. data/lib/generators/deimos/schema_class_generator.rb +3 -3
  29. data/lib/generators/deimos/v2_generator.rb +2 -2
  30. data/spec/deimos_spec.rb +32 -0
  31. data/spec/generators/schema_class_generator_spec.rb +4 -5
  32. data/spec/schema_backends/avro_base_shared.rb +1 -1
  33. data/spec/schema_backends/avro_local_spec.rb +8 -1
  34. data/spec/schema_backends/avro_schema_registry_spec.rb +7 -7
  35. data/spec/schema_backends/base_spec.rb +2 -2
  36. data/spec/schema_backends/proto_schema_registry_spec.rb +19 -222
  37. data/spec/snapshots/consumers-no-nest.snap +7 -7
  38. data/spec/snapshots/consumers.snap +7 -7
  39. data/spec/snapshots/consumers_and_producers-no-nest.snap +7 -7
  40. data/spec/snapshots/consumers_and_producers.snap +7 -7
  41. data/spec/snapshots/consumers_circular-no-nest.snap +7 -7
  42. data/spec/snapshots/consumers_circular.snap +7 -7
  43. data/spec/snapshots/consumers_complex_types-no-nest.snap +7 -7
  44. data/spec/snapshots/consumers_complex_types.snap +7 -7
  45. data/spec/snapshots/consumers_nested-no-nest.snap +7 -7
  46. data/spec/snapshots/consumers_nested.snap +7 -7
  47. data/spec/snapshots/namespace_folders.snap +7 -7
  48. data/spec/snapshots/namespace_map.snap +7 -7
  49. data/spec/snapshots/producers_with_key-no-nest.snap +7 -7
  50. data/spec/snapshots/producers_with_key.snap +7 -7
  51. data/spec/spec_helper.rb +1 -1
  52. metadata +35 -32
  53. data/CLAUDE.md +0 -270
  54. data/spec/gen/sample/v1/sample_key_pb.rb +0 -17
  55. data/spec/protos/sample/v1/sample_key.proto +0 -7
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: f8e48e6252ea21b5af3a7f2ec921f7a82db95e6286eaf52ac15f94ad8e1043d6
4
- data.tar.gz: 99bf9b5ecb064e644897e1ef2177704d460b9531a9fc51e551b8462f22fe1943
3
+ metadata.gz: 9811f05eec0d9b6777e2ed047a2e81b0a23bd770b52866bf8c8a79f84c8ec586
4
+ data.tar.gz: c84cba2ecf750f4349ba86e47ddab2d01a4dd8e5f8fd9ab008b3fcfeb14b1425
5
5
  SHA512:
6
- metadata.gz: 7cf3edfdf000781943adb58eddc2e73e2fa2c8b7cede12cfbebc1407235af33459b44402602a9e26534187f81df9ed67a38029ffc917d0c94e4e0a8924645929
7
- data.tar.gz: da90d82a5919b02893b7877e668a287f3deb64c766f6b5a5543d6217b2782c14ab4b0c87f87f0dcad8e537905b5b480d53f756530eb1b4c6e187c0569f13b0f0
6
+ metadata.gz: b8319ada2f2b715e750e7de5caf6c6117d6154da6277988e12851f420c69a2f3adc0ab30594a23bca86ec8ffa57ef2cc3e5ab9758421e26fbe91cf682ad28e04
7
+ data.tar.gz: 2174b8d776b547689422d6af381f423680d61df368be813e53f213eb8d084971b338a3382d8775e07ef324b9443fc1f5427fed834c54629d52c88728d016fd49
data/.rubocop.yml CHANGED
@@ -4,7 +4,7 @@ plugins:
4
4
  - rubocop-rspec
5
5
 
6
6
  AllCops:
7
- TargetRubyVersion: 3.0
7
+ TargetRubyVersion: 2.5
8
8
  Exclude:
9
9
  - lib/deimos/monkey_patches/*.rb
10
10
  - spec/gen/**/*.rb
data/CHANGELOG.md CHANGED
@@ -7,7 +7,9 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
7
7
 
8
8
  ## UNRELEASED
9
9
 
10
- - Major change: Switch from using `avro_turf` and `proto_turf` to use `schema_registry_client`, which handles both Avro and Protobuf.
10
+ # 2.3.0 - 2026-01-13
11
+
12
+ - Feature: Support broker setting per topic in producer configs.
11
13
 
12
14
  # 2.2.2 - 2025-11-7
13
15
 
data/README.md CHANGED
@@ -94,6 +94,8 @@ Currently we have the following possible schema backends:
94
94
  * Protobuf Schema Registry (use Protobuf with the Confluent Schema Registry)
95
95
  * Mock (no actual encoding/decoding).
96
96
 
97
+ Note that to use Protobuf, you must include the [proto_turf](https://github.com/flipp-oss/proto_turf) gem in your Gemfile.
98
+
97
99
  Other possible schemas could [JSONSchema](https://json-schema.org/), etc. Feel free to
98
100
  contribute!
99
101
 
@@ -139,6 +141,12 @@ end
139
141
 
140
142
  Note that if you are using Protobuf, you need to pass a Protobuf message object as the payload - you can't use a bare hash.
141
143
 
144
+ ## Multiple clusters
145
+
146
+ If you have topics that are being routed to different clusters via Karafka configs, you can continue to make use of Deimos producers without having to instantiate the producer itself. Instead of calling `MyProducer.produce(message)`, you can call `Deimos.producer_for('MyTopic').produce(message)`.
147
+
148
+ Deimos will keep around one producer per broker server (i.e. `bootstrap.servers` config) that it sees on startup.
149
+
142
150
  ## Auto-added Fields
143
151
 
144
152
  If your schema has a field called `message_id`, and the payload you give
data/deimos-ruby.gemspec CHANGED
@@ -17,10 +17,10 @@ Gem::Specification.new do |spec|
17
17
  spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
18
18
  spec.require_paths = ['lib']
19
19
 
20
+ spec.add_dependency('avro_turf', '>= 1.4', '< 2')
20
21
  spec.add_dependency('benchmark', '~> 0.5')
21
22
  spec.add_dependency('fig_tree', '~> 0.2.0')
22
23
  spec.add_dependency('karafka', '~> 2.0')
23
- spec.add_dependency('schema_registry_client')
24
24
  spec.add_dependency('sigurd', '>= 0.1.0', '< 1.0')
25
25
 
26
26
  spec.add_development_dependency('activerecord-import')
@@ -33,6 +33,7 @@ Gem::Specification.new do |spec|
33
33
  spec.add_development_dependency('guard-rubocop', '~> 1')
34
34
  spec.add_development_dependency('karafka-testing', '~> 2.0')
35
35
  spec.add_development_dependency('pg', '~> 1.1')
36
+ spec.add_development_dependency('proto_turf')
36
37
  spec.add_development_dependency('rails', '~> 8.0')
37
38
  spec.add_development_dependency('rake', '~> 13')
38
39
  spec.add_development_dependency('rspec', '~> 3')
@@ -41,7 +42,6 @@ Gem::Specification.new do |spec|
41
42
  spec.add_development_dependency('rspec-snapshot', '~> 2.0')
42
43
  spec.add_development_dependency('rubocop', '~> 1.0')
43
44
  spec.add_development_dependency('rubocop-rspec', '3.8')
44
- spec.add_development_dependency('schema_registry_client')
45
45
  spec.add_development_dependency('sord', '>= 5.0')
46
46
  spec.add_development_dependency('sqlite3', '~> 2.7')
47
47
  spec.add_development_dependency('steep', '~> 1.0')
@@ -6,7 +6,7 @@ module Deimos
6
6
  class Kafka < Base
7
7
  # :nodoc:
8
8
  def self.execute(producer_class:, messages:)
9
- Karafka.producer.produce_many_sync(messages)
9
+ Deimos.producer_for(producer_class.topic).produce_many_sync(messages)
10
10
  end
11
11
  end
12
12
  end
@@ -4,9 +4,10 @@ module Deimos
4
4
  module Backends
5
5
  # Backend which produces to Kafka via an async producer.
6
6
  class KafkaAsync < Base
7
+
7
8
  # :nodoc:
8
9
  def self.execute(producer_class:, messages:)
9
- Karafka.producer.produce_many_async(messages)
10
+ Deimos.producer_for(producer_class.topic).produce_many_async(messages)
10
11
  end
11
12
  end
12
13
  end
@@ -43,7 +43,7 @@ module Deimos
43
43
  'Please provide a directory.'
44
44
  end
45
45
 
46
- Dir["./#{Deimos.config.schema.generated_class_path}/**/*.rb"].
46
+ Dir["./#{Deimos.config.schema.generated_class_path}/**/*.rb"].sort.
47
47
  each { |f| require f }
48
48
  rescue LoadError
49
49
  raise 'Cannot load schema classes. Please regenerate classes with' \
@@ -83,7 +83,7 @@ module Deimos
83
83
  nil
84
84
  else
85
85
  encoder.encode(message.payload,
86
- topic: "#{Deimos.config.producers.topic_prefix}#{config.name}")
86
+ topic: "#{Deimos.config.producers.topic_prefix}#{config.name}-value")
87
87
  end
88
88
  end
89
89
 
@@ -96,7 +96,7 @@ module Deimos
96
96
  if config.deserializers[:key].respond_to?(:encode_key)
97
97
  config.deserializers[:key].encode_key(key)
98
98
  elsif key
99
- config.deserializers[:payload].encode(key, is_key: true)
99
+ config.deserializers[:payload].encode(key)
100
100
  else
101
101
  key
102
102
  end
@@ -118,7 +118,7 @@ end
118
118
  unique_columns = column_names.map(&:to_s) -
119
119
  options[:on_duplicate_key_update].map(&:to_s) - %w(id created_at)
120
120
  records = hashes_without_id.map do |hash|
121
- self.where(unique_columns.to_h { |c| [c, hash[c]] }).first
121
+ self.where(unique_columns.map { |c| [c, hash[c]] }.to_h).first
122
122
  end
123
123
  self.kafka_producers.each { |p| p.send_events(records) }
124
124
  else
@@ -58,7 +58,9 @@ module Deimos
58
58
  end
59
59
  end
60
60
  Karafka::Setup::Config.setup if Karafka.producer.nil?
61
- Karafka.producer.monitor.subscribe(waterdrop_listener)
61
+ Deimos.waterdrop_producers.each do |producer|
62
+ producer.monitor.subscribe(waterdrop_listener)
63
+ end
62
64
  end
63
65
 
64
66
  # :nodoc:
@@ -1,7 +1,9 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require_relative 'base'
4
- require 'schema_registry_client'
4
+ require 'avro'
5
+ require 'avro_turf'
6
+ require 'avro_turf/mutable_schema_store'
5
7
  require_relative 'avro_schema_coercer'
6
8
 
7
9
  module Deimos
@@ -13,7 +15,7 @@ module Deimos
13
15
  # @override
14
16
  def initialize(schema:, namespace:)
15
17
  super
16
- @schema_store = SchemaRegistry::AvroSchemaStore.new(path: Deimos.config.schema.path)
18
+ @schema_store = AvroTurf::MutableSchemaStore.new(path: Deimos.config.schema.path)
17
19
  end
18
20
 
19
21
  def supports_key_schemas?
@@ -29,7 +31,7 @@ module Deimos
29
31
  def encode_key(key_id, key, topic: nil)
30
32
  begin
31
33
  @key_schema ||= @schema_store.find("#{@schema}_key")
32
- rescue SchemaRegistry::SchemaNotFoundError
34
+ rescue AvroTurf::SchemaNotFoundError
33
35
  @key_schema = generate_key_schema(key_id)
34
36
  end
35
37
  field_name = _field_name_from_schema(@key_schema)
@@ -182,7 +184,7 @@ module Deimos
182
184
  # @return [Avro::Schema]
183
185
  def avro_schema(schema=nil)
184
186
  schema ||= @schema
185
- @schema_store.find("#{@namespace}.#{schema}")
187
+ @schema_store.find(schema, @namespace)
186
188
  end
187
189
 
188
190
  # @param value_schema [Hash]
@@ -8,24 +8,23 @@ module Deimos
8
8
  class AvroLocal < AvroBase
9
9
  # @override
10
10
  def decode_payload(payload, schema:)
11
- stream = StringIO.new(payload)
12
- schema = @schema_store.find("#{@namespace}.#{schema}")
13
- reader = Avro::IO::DatumReader.new(nil, schema)
14
- Avro::DataFile::Reader.new(stream, reader).first
11
+ avro_turf.decode(payload, schema_name: schema, namespace: @namespace)
15
12
  end
16
13
 
17
14
  # @override
18
- def encode_payload(payload, schema: nil, subject: nil)
19
- stream = StringIO.new
20
- schema = schema_store.find("#{@namespace}.#{schema}")
21
- writer = Avro::IO::DatumWriter.new(schema)
22
-
23
- dw = Avro::DataFile::Writer.new(stream, writer, schema)
24
- dw << payload.to_h
25
- dw.close
26
- stream.string
15
+ def encode_payload(payload, schema: nil, topic: nil)
16
+ avro_turf.encode(payload, schema_name: schema, namespace: @namespace)
27
17
  end
28
18
 
19
+ private
20
+
21
+ # @return [AvroTurf]
22
+ def avro_turf
23
+ @avro_turf ||= AvroTurf.new(
24
+ schemas_path: Deimos.config.schema.path,
25
+ schema_store: @schema_store
26
+ )
27
+ end
29
28
  end
30
29
  end
31
30
  end
@@ -1,6 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require_relative 'avro_base'
4
+ require 'avro_turf/messaging'
4
5
 
5
6
  module Deimos
6
7
  module SchemaBackends
@@ -8,27 +9,27 @@ module Deimos
8
9
  class AvroSchemaRegistry < AvroBase
9
10
  # @override
10
11
  def decode_payload(payload, schema:)
11
- schema_registry.decode(payload.to_s)
12
+ avro_turf_messaging.decode(payload.to_s, schema_name: schema)
12
13
  end
13
14
 
14
15
  # @override
15
- def encode_payload(payload, schema: nil, subject: nil)
16
- schema_registry.encode(payload, subject: subject || schema, schema_name: "#{@namespace}.#{schema}")
16
+ def encode_payload(payload, schema: nil, topic: nil)
17
+ avro_turf_messaging.encode(payload, schema_name: schema, subject: topic || schema)
17
18
  end
18
19
 
19
20
  private
20
21
 
21
- # @return [SchemaRegistry::Client]
22
- def schema_registry
23
- @schema_registry ||= SchemaRegistry::Client.new(
24
- registry_url: Deimos.config.schema.registry_url,
25
- logger: Karafka.logger,
26
- user: Deimos.config.schema.user,
27
- password: Deimos.config.schema.password,
28
- schema_type: SchemaRegistry::Schema::Avro
29
- )
30
- SchemaRegistry.avro_schema_path = Deimos.config.schema.path
31
- @schema_registry
22
+ # @return [AvroTurf::Messaging]
23
+ def avro_turf_messaging
24
+ @avro_turf_messaging ||= AvroTurf::Messaging.new(
25
+ schema_store: @schema_store,
26
+ registry_url: Deimos.config.schema.registry_url,
27
+ schemas_path: Deimos.config.schema.path,
28
+ user: Deimos.config.schema.user,
29
+ password: Deimos.config.schema.password,
30
+ namespace: @namespace,
31
+ logger: Karafka.logger
32
+ )
32
33
  end
33
34
  end
34
35
  end
@@ -13,7 +13,7 @@ module Deimos
13
13
  end
14
14
 
15
15
  # @override
16
- def encode_payload(payload, schema: nil, subject: nil)
16
+ def encode_payload(payload, schema: nil, topic: nil)
17
17
  payload.to_h.with_indifferent_access.to_json
18
18
  end
19
19
  end
@@ -56,10 +56,9 @@ module Deimos
56
56
  # @param schema [String,Symbol]
57
57
  # @param topic [String]
58
58
  # @return [String]
59
- def encode(payload, schema: nil, topic: nil, is_key: false)
59
+ def encode(payload, schema: nil, topic: nil)
60
60
  validate(payload, schema: schema || @schema)
61
- subject = is_key ? "#{topic}-key" : "#{topic}-value"
62
- encode_payload(payload, schema: schema || @schema, subject: subject)
61
+ encode_payload(payload, schema: schema || @schema, topic: topic)
63
62
  end
64
63
 
65
64
  # Decode a payload with a schema. Public method.
@@ -115,9 +114,9 @@ module Deimos
115
114
  # Encode a payload. To be defined by subclass.
116
115
  # @param payload [Hash]
117
116
  # @param schema [String,Symbol]
118
- # @param subject [String]
117
+ # @param topic [String]
119
118
  # @return [String]
120
- def encode_payload(_payload, schema:, subject: nil)
119
+ def encode_payload(_payload, schema:, topic: nil)
121
120
  raise MissingImplementationError
122
121
  end
123
122
 
@@ -15,7 +15,7 @@ module Deimos
15
15
  end
16
16
 
17
17
  # @override
18
- def encode_payload(payload, schema:, subject: nil)
18
+ def encode_payload(payload, schema:, topic: nil)
19
19
  payload.is_a?(String) ? 'payload-encoded' : payload.map { |k, v| [k, "encoded-#{v}"] }.to_json
20
20
  end
21
21
 
@@ -15,7 +15,7 @@ module Deimos
15
15
  end
16
16
 
17
17
  # @override
18
- def encode_payload(payload, schema:, subject: nil)
18
+ def encode_payload(payload, schema:, topic: nil)
19
19
  payload.to_s
20
20
  end
21
21
 
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require_relative 'base'
4
- require 'schema_registry_client'
4
+ require 'proto_turf'
5
5
 
6
6
  module Deimos
7
7
  module SchemaBackends
@@ -24,51 +24,26 @@ module Deimos
24
24
  float: :float,
25
25
  message: :record
26
26
  }.freeze
27
-
28
27
  def proto_schema(schema=@schema)
29
- proto = Google::Protobuf::DescriptorPool.generated_pool.lookup(schema)
30
- if proto.nil?
31
- raise "Could not find Protobuf schema '#{schema}'."
32
- end
33
-
34
- proto
28
+ Google::Protobuf::DescriptorPool.generated_pool.lookup(schema)
35
29
  end
36
30
 
37
31
  # @override
38
32
  def encode_key(key_id, key, topic: nil)
39
- if key.respond_to?(:to_h)
40
- hash = if key_id
41
- key_id.to_s.split('.')[...-1].each do |k|
42
- key = key.with_indifferent_access[k]
43
- end
44
- key.to_h.with_indifferent_access.slice(key_id.split('.').last)
45
- else
46
- key.to_h.sort.to_h
47
- end
48
- self.encode_proto_key(hash, topic: topic, field: key_id)
49
- elsif key_id
50
- hash = { key_id.to_s.split('.').last => key }
51
- self.encode_proto_key(hash, topic: topic, field: key_id)
33
+ if key.is_a?(Hash)
34
+ key_id ? key.with_indifferent_access[key_id].to_s : key.sort.to_h.to_json
52
35
  else
53
36
  key.to_s
54
37
  end
55
38
  end
56
39
 
57
- # @param hash [Hash]
58
- # @return [String]
59
- def encode_proto_key(hash, topic: nil)
60
- hash.sort.to_h.to_json
61
- end
62
-
63
- def decode_proto_key(payload)
64
- JSON.parse(payload)
65
- rescue StandardError
66
- payload
67
- end
68
-
69
40
  # @override
70
41
  def decode_key(payload, key_id)
71
- val = decode_proto_key(payload)
42
+ val = begin
43
+ JSON.parse(payload)
44
+ rescue StandardError
45
+ payload
46
+ end
72
47
  key_id ? val[key_id.to_s] : val
73
48
  end
74
49
 
@@ -110,8 +85,8 @@ module Deimos
110
85
  :mock
111
86
  end
112
87
 
113
- def supports_key_schemas?
114
- false
88
+ def generate_key_schema(_field_name)
89
+ raise 'Protobuf cannot generate key schemas! Please use field_config :plain'
115
90
  end
116
91
 
117
92
  end
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require_relative 'proto_base'
4
- require 'schema_registry_client'
4
+ require 'proto_turf'
5
5
 
6
6
  module Deimos
7
7
  module SchemaBackends
@@ -14,14 +14,14 @@ module Deimos
14
14
  end
15
15
 
16
16
  # @override
17
- def encode_payload(payload, schema: nil, subject: nil)
17
+ def encode_payload(payload, schema: nil, topic: nil)
18
18
  msg = payload.is_a?(Hash) ? proto_schema.msgclass.new(**payload) : payload
19
19
  proto_schema.msgclass.encode(msg)
20
20
  end
21
21
 
22
- # @return [SchemaRegistry::Client]
23
- def self.schema_registry
24
- @schema_registry ||= SchemaRegistry::Client.new(
22
+ # @return [ProtoTurf]
23
+ def self.proto_turf
24
+ @proto_turf ||= ProtoTurf.new(
25
25
  registry_url: Deimos.config.schema.registry_url,
26
26
  logger: Karafka.logger
27
27
  )
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require_relative 'proto_base'
4
- require 'schema_registry_client'
4
+ require 'proto_turf'
5
5
 
6
6
  module Deimos
7
7
  module SchemaBackends
@@ -10,47 +10,22 @@ module Deimos
10
10
 
11
11
  # @override
12
12
  def decode_payload(payload, schema:)
13
- self.class.schema_registry.decode(payload)
13
+ self.class.proto_turf.decode(payload)
14
14
  end
15
15
 
16
16
  # @override
17
- def encode_payload(payload, schema: nil, subject: nil)
17
+ def encode_payload(payload, schema: nil, topic: nil)
18
18
  msg = payload.is_a?(Hash) ? proto_schema.msgclass.new(**payload) : payload
19
- encoder = subject&.ends_with?('-key') ? self.class.key_schema_registry : self.class.schema_registry
20
- encoder.encode(msg, subject: subject)
19
+ self.class.proto_turf.encode(msg, subject: topic)
21
20
  end
22
21
 
23
- # @override
24
- def encode_proto_key(key, topic: nil, field: nil)
25
- schema_text = SchemaRegistry::Output::JsonSchema.output(proto_schema.to_proto, path: field)
26
- self.class.key_schema_registry.encode(key, subject: "#{topic}-key", schema_text: schema_text)
27
- end
28
-
29
- # @override
30
- def decode_proto_key(payload)
31
- self.class.key_schema_registry.decode(payload)
32
- end
33
-
34
- # @return [SchemaRegistry::Client]
35
- def self.schema_registry
36
- @schema_registry ||= SchemaRegistry::Client.new(
22
+ # @return [ProtoTurf]
23
+ def self.proto_turf
24
+ @proto_turf ||= ProtoTurf.new(
37
25
  registry_url: Deimos.config.schema.registry_url,
38
- user: Deimos.config.schema.user,
39
- password: Deimos.config.schema.password,
40
26
  logger: Karafka.logger
41
27
  )
42
28
  end
43
-
44
- def self.key_schema_registry
45
- @key_schema_registry ||= SchemaRegistry::Client.new(
46
- registry_url: Deimos.config.schema.registry_url,
47
- user: Deimos.config.schema.user,
48
- password: Deimos.config.schema.password,
49
- logger: Karafka.logger,
50
- schema_type: SchemaRegistry::Schema::ProtoJsonSchema
51
- )
52
- end
53
-
54
29
  end
55
30
  end
56
31
  end
@@ -16,6 +16,14 @@ module Deimos
16
16
  def self.included(base)
17
17
  super
18
18
  base.include Karafka::Testing::RSpec::Helpers
19
+
20
+ # Ensure that we only use Karafka.producer, not the producers we set up for multi-broker
21
+ # configs. Only Karafka.producer works with Karafka test helpers.
22
+ RSpec.configure do |config|
23
+ config.before(:each) do
24
+ allow(Deimos).to receive(:producer_for).and_return(Karafka.producer)
25
+ end
26
+ end
19
27
  end
20
28
 
21
29
  # @return [Array<Hash>]
@@ -36,7 +36,7 @@ module Deimos
36
36
  if self.key_field
37
37
  self.backend.encode_key(self.key_field, key, topic: @topic)
38
38
  else
39
- self.backend.encode(key, topic: @topic, is_key: true)
39
+ self.backend.encode(key, topic: @topic)
40
40
  end
41
41
  end
42
42
 
@@ -205,9 +205,9 @@ module Deimos
205
205
  batch_size = batch.size
206
206
  current_index = 0
207
207
 
208
- batch[current_index..].in_groups_of(batch_size, false).each do |group|
208
+ batch[current_index..-1].in_groups_of(batch_size, false).each do |group|
209
209
  @logger.debug("Publishing #{group.size} messages to #{@current_topic}")
210
- Karafka.producer.produce_many_sync(group)
210
+ Deimos.producer_for(@current_topic).produce_many_sync(group)
211
211
  current_index += group.size
212
212
  @logger.info("Sent #{group.size} messages to #{@current_topic}")
213
213
  end
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Deimos
4
- VERSION = '2.3.0-beta4'
4
+ VERSION = '2.3.0'
5
5
  end
data/lib/deimos.rb CHANGED
@@ -142,23 +142,34 @@ module Deimos
142
142
  signal_handler.run!
143
143
  end
144
144
 
145
- def setup_karafka
146
- Karafka.producer.middleware.append(Deimos::ProducerMiddleware)
147
- # for multiple setup calls
148
- Karafka.producer.config.kafka =
149
- Karafka::Setup::AttributesMap.producer(Karafka::Setup::Config.config.kafka.dup)
150
- EVENT_TYPES.each { |type| Karafka.monitor.notifications_bus.register_event(type) }
151
-
152
- Karafka.producer.monitor.subscribe(ProducerMetricsListener.new)
145
+ def setup_producers
146
+ @producers = {}
147
+ producers_by_broker = {}
148
+ Deimos.karafka_configs.each do |topic|
149
+ broker = topic.kafka[:'bootstrap.servers']
150
+ producers_by_broker[broker] ||= ::WaterDrop::Producer.new do |p_config|
151
+ config_hash = Karafka::Setup::Config.config.kafka.merge(topic.kafka)
152
+ p_config.kafka = Karafka::Setup::AttributesMap.producer(config_hash)
153
+ end
154
+ @producers[topic.name] = producers_by_broker[broker]
155
+ end
156
+ end
153
157
 
154
- Karafka.producer.monitor.subscribe('error.occurred') do |event|
155
- if event.payload.key?(:messages)
156
- topic = event[:messages].first[:topic]
157
- config = Deimos.karafka_config_for(topic: topic)
158
- message = Deimos::Logging.messages_log_text(config&.payload_log, event[:messages])
159
- Karafka.logger.error("Error producing messages: #{event[:error].message} #{message.to_json}")
158
+ def setup_karafka
159
+ setup_producers
160
+ waterdrop_producers.each do |producer|
161
+ producer.middleware.append(Deimos::ProducerMiddleware)
162
+ producer.monitor.subscribe(ProducerMetricsListener.new)
163
+ producer.monitor.subscribe('error.occurred') do |event|
164
+ if event.payload.key?(:messages)
165
+ topic = event[:messages].first[:topic]
166
+ config = Deimos.karafka_config_for(topic: topic)
167
+ message = Deimos::Logging.messages_log_text(config&.payload_log, event[:messages])
168
+ Karafka.logger.error("Error producing messages: #{event[:error].message} #{message.to_json}")
169
+ end
160
170
  end
161
171
  end
172
+ EVENT_TYPES.each { |type| Karafka.monitor.notifications_bus.register_event(type) }
162
173
  end
163
174
 
164
175
  # @return [Array<Karafka::Routing::Topic]
@@ -176,7 +187,16 @@ module Deimos
176
187
  end
177
188
  end
178
189
 
179
- # @param handler_class [Class]
190
+ # @return [Array<::WaterDrop::Producer>]
191
+ def waterdrop_producers
192
+ (@producers.values + [Karafka.producer]).uniq
193
+ end
194
+
195
+ # @param topic [String]
196
+ def producer_for(topic)
197
+ @producers[topic] || Karafka.producer
198
+ end
199
+
180
200
  # @return [String,nil]
181
201
  def topic_for_consumer(handler_class)
182
202
  Deimos.karafka_configs.each do |topic|
@@ -48,7 +48,7 @@ module Deimos
48
48
  # @return [String]
49
49
  def schema
50
50
  last_dot = self.full_schema.rindex('.')
51
- self.full_schema[(last_dot + 1)..]
51
+ self.full_schema[(last_dot + 1)..-1]
52
52
  end
53
53
 
54
54
  # @return [String]