deimos-ruby 2.2.3.pre.beta2 → 2.3.0.pre.beta1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (41) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop.yml +1 -1
  3. data/CHANGELOG.md +1 -1
  4. data/CLAUDE.md +270 -0
  5. data/README.md +0 -2
  6. data/deimos-ruby.gemspec +2 -2
  7. data/lib/deimos/backends/kafka.rb +1 -1
  8. data/lib/deimos/backends/kafka_async.rb +1 -2
  9. data/lib/deimos/config/configuration.rb +1 -1
  10. data/lib/deimos/ext/producer_middleware.rb +2 -2
  11. data/lib/deimos/kafka_source.rb +1 -1
  12. data/lib/deimos/metrics/datadog.rb +1 -3
  13. data/lib/deimos/schema_backends/avro_base.rb +4 -6
  14. data/lib/deimos/schema_backends/avro_local.rb +13 -12
  15. data/lib/deimos/schema_backends/avro_schema_registry.rb +14 -15
  16. data/lib/deimos/schema_backends/avro_validation.rb +1 -1
  17. data/lib/deimos/schema_backends/base.rb +5 -4
  18. data/lib/deimos/schema_backends/mock.rb +1 -1
  19. data/lib/deimos/schema_backends/plain.rb +1 -1
  20. data/lib/deimos/schema_backends/proto_base.rb +34 -10
  21. data/lib/deimos/schema_backends/proto_local.rb +5 -5
  22. data/lib/deimos/schema_backends/proto_schema_registry.rb +27 -6
  23. data/lib/deimos/test_helpers.rb +0 -5
  24. data/lib/deimos/transcoder.rb +1 -1
  25. data/lib/deimos/utils/outbox_producer.rb +2 -2
  26. data/lib/deimos/version.rb +1 -1
  27. data/lib/deimos.rb +15 -35
  28. data/lib/generators/deimos/active_record_generator.rb +1 -1
  29. data/lib/generators/deimos/schema_class_generator.rb +3 -3
  30. data/lib/generators/deimos/v2_generator.rb +2 -2
  31. data/spec/deimos_spec.rb +0 -32
  32. data/spec/gen/sample/v1/sample_key_pb.rb +17 -0
  33. data/spec/generators/schema_class_generator_spec.rb +1 -1
  34. data/spec/protos/sample/v1/sample_key.proto +7 -0
  35. data/spec/schema_backends/avro_base_shared.rb +1 -1
  36. data/spec/schema_backends/avro_local_spec.rb +1 -8
  37. data/spec/schema_backends/avro_schema_registry_spec.rb +7 -7
  38. data/spec/schema_backends/base_spec.rb +2 -2
  39. data/spec/schema_backends/proto_schema_registry_spec.rb +222 -19
  40. data/spec/spec_helper.rb +1 -1
  41. metadata +32 -35
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: bf4ddbfc7afab6d397364f0108d2eb845e2b646e8615783d2376b92edc34d380
4
- data.tar.gz: 5f3a404983d2dfa8f0c5324d8ba4d728b1f976940b9242756d81fcc53a6a6cab
3
+ metadata.gz: fa14b17ba7b1cef1f42545415c749c2ae6681aaae7d65497a80820ede0d7d506
4
+ data.tar.gz: 238d530e1be139a8985a27f29dde91572dc361fc43ab1e0a8a3fc479a2eaf2ab
5
5
  SHA512:
6
- metadata.gz: db87d1ed05ccae6430adfd1749902b319e53b7039f29512fb8335cceef4395e4ad26ad0a6437b87ec622f301923a3b27f9855471f3b8a1b5c5a86e4991d51dc0
7
- data.tar.gz: 996d61652323b29bf40152ccb9fb71c74b050948bc1363b0d1a26191e8595240a20344b6256a0cfa6821ae31c7f1ee64501b54bfb071564bb06abc81e831a790
6
+ metadata.gz: 3ce9fa60268a647866e0f88253e300bf8a02ff675f8b05380815a593d79911e22864648c53602eab493e3900f96d0403e11db54a1af873f45347a06e77a32821
7
+ data.tar.gz: 8a14f011128bfc9c5ed1693a012dc6aad3dfe2a344f56e6a46c0c18e77d9f8f3e3d0163704ffa562e9feee4417865a81c5a126dc6147832259b87a240a600993
data/.rubocop.yml CHANGED
@@ -4,7 +4,7 @@ plugins:
4
4
  - rubocop-rspec
5
5
 
6
6
  AllCops:
7
- TargetRubyVersion: 2.5
7
+ TargetRubyVersion: 3.0
8
8
  Exclude:
9
9
  - lib/deimos/monkey_patches/*.rb
10
10
  - spec/gen/**/*.rb
data/CHANGELOG.md CHANGED
@@ -7,7 +7,7 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
7
7
 
8
8
  ## UNRELEASED
9
9
 
10
- - Feature: Support broker setting per topic in producer configs.
10
+ - Major change: Switch from using `avro_turf` and `proto_turf` to use `schema_registry_client`, which handles both Avro and Protobuf.
11
11
 
12
12
  # 2.2.2 - 2025-11-7
13
13
 
data/CLAUDE.md ADDED
@@ -0,0 +1,270 @@
1
+ # CLAUDE.md
2
+
3
+ This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
4
+
5
+ ## Overview
6
+
7
+ Deimos is a Ruby framework for Kafka development that marries Kafka with schema definitions (Avro/Protobuf), ActiveRecord, and provides a comprehensive toolbox. Built on top of Karafka (which itself builds on RDKafka), it provides schema encoding/decoding, database integration, metrics, tracing, and test helpers.
8
+
9
+ ## Development Commands
10
+
11
+ ### Testing
12
+
13
+ ```bash
14
+ # Run all tests
15
+ bundle exec rspec
16
+
17
+ # Run a single test file
18
+ bundle exec rspec spec/path/to/file_spec.rb
19
+
20
+ # Run a specific test
21
+ bundle exec rspec spec/path/to/file_spec.rb:LINE_NUMBER
22
+ ```
23
+
24
+ ### Linting
25
+
26
+ ```bash
27
+ # Run Rubocop linter
28
+ bundle exec rubocop
29
+
30
+ # Auto-correct issues
31
+ bundle exec rubocop -a
32
+ ```
33
+
34
+ ### Schema Class Generation
35
+
36
+ When modifying schema-related code, you may need to regenerate test schema classes:
37
+
38
+ ```bash
39
+ # Regenerate test schema classes (used after schema changes)
40
+ bundle exec ./regenerate_test_schema_classes.rb
41
+
42
+ # Generate Protobuf classes
43
+ protoc -I spec/protos --ruby_out=spec/gen --ruby_opt=paths=source_relative spec/protos/**/*.proto
44
+ ```
45
+
46
+ ### Rake Tasks
47
+
48
+ ```bash
49
+ # Start Deimos consumer (in Rails environment)
50
+ rake deimos:start
51
+
52
+ # Start outbox backend producer
53
+ rake deimos:outbox
54
+
55
+ # Start database poller
56
+ rake deimos:db_poller
57
+
58
+ # Generate schema classes
59
+ rake deimos:generate_schema_classes
60
+ ```
61
+
62
+ ## Architecture
63
+
64
+ ### Layer Structure
65
+
66
+ Deimos is built in layers:
67
+ 1. **RDKafka** - Low-level Kafka client providing producer/consumer APIs
68
+ 2. **Karafka** - Threaded consumers wrapper with lifecycle management
69
+ 3. **Deimos** - Full framework with schema integration, database features, metrics, and utilities
70
+
71
+ ### Key Directories
72
+
73
+ - `lib/deimos/` - Core Deimos code
74
+ - `lib/deimos/backends/` - Producer backends (kafka, kafka_async, outbox, test)
75
+ - `lib/deimos/schema_backends/` - Schema handlers (Avro local/registry/validation, Protobuf, plain, mock)
76
+ - `lib/deimos/metrics/` - Metrics providers (DataDog, mock)
77
+ - `lib/deimos/tracing/` - Tracing providers (DataDog, mock)
78
+ - `lib/deimos/utils/` - Utilities (DB poller, outbox producer, etc.)
79
+ - `lib/deimos/config/` - Configuration classes
80
+ - `lib/deimos/ext/` - Extensions to Karafka routing
81
+ - `lib/generators/` - Rails generators for migrations
82
+ - `lib/tasks/` - Rake tasks
83
+
84
+ ### Core Concepts
85
+
86
+ #### Schema Backends
87
+
88
+ Schema backends encode/decode payloads. All backends must implement:
89
+ - `encode(payload, topic:)` - Encode payload to binary/string format
90
+ - `decode(payload)` - Decode binary/string to hash
91
+ - `validate(payload)` - Validate payload against schema
92
+ - `coerce(payload)` - Coerce payload to match schema types
93
+ - `schema_fields` - List fields in schema (used with ActiveRecord)
94
+ - Define a `mock` backend for testing
95
+
96
+ Available backends: `:avro_local`, `:avro_schema_registry`, `:avro_validation`, `:proto_schema_registry`, `:proto_local`, `:mock`, `:plain`
97
+
98
+ #### Producer Backends
99
+
100
+ Producer backends determine how messages are sent. All backends inherit from `Deimos::Backends::Base` and implement `execute(messages)`.
101
+
102
+ Available backends:
103
+ - `:kafka` - Send directly to Kafka (default)
104
+ - `:kafka_async` - Async variant of kafka backend
105
+ - `:outbox` - Transactional outbox pattern (save to DB, send async)
106
+ - `:test` - For testing (stores messages in memory)
107
+
108
+ #### Consumer Types
109
+
110
+ - `Deimos::Consumer` - Base consumer class
111
+ - Per-message: Override `consume_message(message)` and set `each_message true`
112
+ - Batch: Override `consume_batch` (receives `messages` collection)
113
+
114
+ - `Deimos::ActiveRecordConsumer` - Automatically saves/updates ActiveRecord models
115
+ - Per-message mode: Uses `fetch_record`, `assign_key`, `destroy_record`
116
+ - Batch mode: Uses `activerecord-import` for bulk operations
117
+ - Override `record_attributes(payload, key)` to customize attributes
118
+
119
+ #### Producers
120
+
121
+ - `Deimos::Producer` - Base producer class
122
+ - Call `self.produce([{payload: ..., key: ..., topic: ...}])`
123
+ - Override `partition_key(payload)` for custom partitioning
124
+
125
+ - `Deimos::ActiveRecordProducer` - Produces from ActiveRecord models
126
+ - Set `record_class Widget, refetch: false`
127
+ - Override `generate_payload(attributes, record)` to customize payload
128
+ - Override `watched_attributes(record)` to add non-schema fields
129
+
130
+ #### Key Configuration
131
+
132
+ Every producer must define `key_config`:
133
+ - `key_config none: true` - No keys (events)
134
+ - `key_config plain: true` - Unencoded keys (legacy)
135
+ - `key_config schema: 'MySchema-key'` - Use existing key schema
136
+ - `key_config field: 'my_id'` - Auto-generate key schema from value field
137
+
138
+ #### KafkaSource Mixin
139
+
140
+ The `Deimos::KafkaSource` mixin adds callbacks to ActiveRecord models to automatically send Kafka messages on save/destroy:
141
+
142
+ ```ruby
143
+ class Widget < ActiveRecord::Base
144
+ include Deimos::KafkaSource
145
+
146
+ def self.kafka_producers
147
+ [MyProducer]
148
+ end
149
+
150
+ def self.kafka_config
151
+ { update: true, delete: true, import: true, create: true }
152
+ end
153
+ end
154
+ ```
155
+
156
+ #### Outbox Pattern (Database Backend)
157
+
158
+ The outbox pattern provides transactional guarantees:
159
+ 1. Messages are validated, encoded, and saved to `kafka_messages` table
160
+ 2. Separate thread pool (via `Deimos::Utils::OutboxProducer`) reads from DB and sends to Kafka
161
+ 3. Uses `kafka_topic_info` table for topic-level locking
162
+ 4. Runs via `rake deimos:outbox` or `Deimos.start_outbox_backend!(thread_count: N)`
163
+
164
+ #### Database Poller
165
+
166
+ Polls database tables and produces messages:
167
+ 1. Configure with `Deimos.configure { db_poller { producer_class 'MyProducer' } }`
168
+ 2. Two modes:
169
+ - Time-based (default): Uses `updated_at` and `id` columns
170
+ - State-based: Updates state column after publishing
171
+ 3. Tracks progress in `poll_info` table
172
+ 4. Runs via `rake deimos:db_poller`
173
+
174
+ ### Configuration
175
+
176
+ Configuration uses the `fig_tree` gem. See `lib/deimos/config/configuration.rb` for the schema. Configure via:
177
+
178
+ ```ruby
179
+ Deimos.configure do |config|
180
+ config.producers.backend = :outbox
181
+ config.schema.backend = :avro_schema_registry
182
+ config.schema.registry_url = 'http://localhost:8081'
183
+ end
184
+ ```
185
+
186
+ ### Karafka Integration
187
+
188
+ Deimos extends Karafka routing with schema configuration:
189
+
190
+ ```ruby
191
+ Karafka::App.routes.draw do
192
+ topic 'my-topic' do
193
+ consumer MyConsumer
194
+ schema 'MySchema'
195
+ namespace 'com.my-namespace'
196
+ key_config field: 'test_id'
197
+ end
198
+ end
199
+ ```
200
+
201
+ The `lib/deimos/ext/` directory contains the routing extensions that add schema-aware capabilities to Karafka's routing DSL.
202
+
203
+ ### Shared Configuration
204
+
205
+ Both producers and consumers use the `SharedConfig` module to standardize schema settings, topic configuration, key handling, etc.
206
+
207
+ ### Testing
208
+
209
+ Test helpers in `lib/deimos/test_helpers.rb` provide:
210
+ - `test_consume_message(consumer, payload)` - Test message consumption
211
+ - `test_consume_batch(consumer, payloads)` - Test batch consumption
212
+ - `expect(topic).to have_sent(payload, key, partition_key, headers)` - Assert messages sent
213
+ - `Deimos::TestHelpers.sent_messages` - Inspect sent messages
214
+
215
+ Configure for tests:
216
+ ```ruby
217
+ Deimos.config.schema.backend = :avro_validation # Validates but doesn't encode
218
+ Deimos.config.producers.backend = :test # Stores in memory
219
+ ```
220
+
221
+ ### Instrumentation
222
+
223
+ Deimos sends events through Karafka's instrumentation monitor:
224
+ - `deimos.encode_message` - Schema encoding
225
+ - `deimos.outbox.produce` - Outbox messages processed
226
+ - `deimos.ar_consumer.consume_batch` - ActiveRecord batch consumption
227
+ - `deimos.batch_consumption.valid_records` - Valid records upserted
228
+ - `deimos.batch_consumption.invalid_records` - Invalid records rejected
229
+
230
+ Subscribe with: `Karafka.monitor.subscribe('event.name') { |event| ... }`
231
+
232
+ ## Database Integration
233
+
234
+ Deimos uses two primary database tables (created via generators):
235
+
236
+ - `kafka_messages` - Stores outbox messages (topic, key, payload, partition_key)
237
+ - `kafka_topic_info` - Locks topics for outbox processing
238
+ - `poll_info` - Tracks database poller progress
239
+
240
+ ## Special Considerations
241
+
242
+ ### Schema Changes
243
+
244
+ When adding/modifying schemas:
245
+ 1. Update schema files in configured schema path
246
+ 2. Run `rake deimos:generate_schema_classes` if using schema classes
247
+ 3. Regenerate test schemas with `./regenerate_test_schema_classes.rb`
248
+
249
+ ### Protobuf
250
+
251
+ - Requires `schema_registry_client` gem in Gemfile
252
+ - Protobuf payloads must be Protobuf message objects, not hashes
253
+ - Protobuf should NOT be used for keys (unstable binary encoding)
254
+
255
+ ### ActiveRecord Batch Consumers
256
+
257
+ - Only supports primary keys as identifiers by default
258
+ - Skips ActiveRecord callbacks (uses `activerecord-import` for bulk SQL)
259
+ - Set `compacted false` to process all messages vs. only last per key
260
+ - Requires `bulk_import_id_column` config when saving to multiple tables
261
+
262
+ ### Error Handling
263
+
264
+ - Set `config.consumers.reraise_errors = false` to swallow non-fatal errors
265
+ - Define fatal errors via `config.fatal_error` (global) or `fatal_error?(exception, payload, metadata)` (per-consumer)
266
+ - Prevents consumer from getting stuck on bad messages
267
+
268
+ ## Integration Tests
269
+
270
+ Integration tests run against real databases (PostgreSQL, MySQL/Trilogy, SQLite). They are marked with `:integration` metadata and use different database configs from `DbConfigs::DB_OPTIONS`.
data/README.md CHANGED
@@ -94,8 +94,6 @@ Currently we have the following possible schema backends:
94
94
  * Protobuf Schema Registry (use Protobuf with the Confluent Schema Registry)
95
95
  * Mock (no actual encoding/decoding).
96
96
 
97
- Note that to use Protobuf, you must include the [proto_turf](https://github.com/flipp-oss/proto_turf) gem in your Gemfile.
98
-
99
97
  Other possible schemas could [JSONSchema](https://json-schema.org/), etc. Feel free to
100
98
  contribute!
101
99
 
data/deimos-ruby.gemspec CHANGED
@@ -17,10 +17,10 @@ Gem::Specification.new do |spec|
17
17
  spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
18
18
  spec.require_paths = ['lib']
19
19
 
20
- spec.add_dependency('avro_turf', '>= 1.4', '< 2')
21
20
  spec.add_dependency('benchmark', '~> 0.5')
22
21
  spec.add_dependency('fig_tree', '~> 0.2.0')
23
22
  spec.add_dependency('karafka', '~> 2.0')
23
+ spec.add_dependency('schema_registry_client')
24
24
  spec.add_dependency('sigurd', '>= 0.1.0', '< 1.0')
25
25
 
26
26
  spec.add_development_dependency('activerecord-import')
@@ -33,7 +33,6 @@ Gem::Specification.new do |spec|
33
33
  spec.add_development_dependency('guard-rubocop', '~> 1')
34
34
  spec.add_development_dependency('karafka-testing', '~> 2.0')
35
35
  spec.add_development_dependency('pg', '~> 1.1')
36
- spec.add_development_dependency('proto_turf')
37
36
  spec.add_development_dependency('rails', '~> 8.0')
38
37
  spec.add_development_dependency('rake', '~> 13')
39
38
  spec.add_development_dependency('rspec', '~> 3')
@@ -42,6 +41,7 @@ Gem::Specification.new do |spec|
42
41
  spec.add_development_dependency('rspec-snapshot', '~> 2.0')
43
42
  spec.add_development_dependency('rubocop', '~> 1.0')
44
43
  spec.add_development_dependency('rubocop-rspec', '3.8')
44
+ spec.add_development_dependency('schema_registry_client')
45
45
  spec.add_development_dependency('sord', '>= 5.0')
46
46
  spec.add_development_dependency('sqlite3', '~> 2.7')
47
47
  spec.add_development_dependency('steep', '~> 1.0')
@@ -6,7 +6,7 @@ module Deimos
6
6
  class Kafka < Base
7
7
  # :nodoc:
8
8
  def self.execute(producer_class:, messages:)
9
- Deimos.producer_for(producer_class.topic).produce_many_sync(messages)
9
+ Karafka.producer.produce_many_sync(messages)
10
10
  end
11
11
  end
12
12
  end
@@ -4,10 +4,9 @@ module Deimos
4
4
  module Backends
5
5
  # Backend which produces to Kafka via an async producer.
6
6
  class KafkaAsync < Base
7
-
8
7
  # :nodoc:
9
8
  def self.execute(producer_class:, messages:)
10
- Deimos.producer_for(producer_class.topic).produce_many_async(messages)
9
+ Karafka.producer.produce_many_async(messages)
11
10
  end
12
11
  end
13
12
  end
@@ -43,7 +43,7 @@ module Deimos
43
43
  'Please provide a directory.'
44
44
  end
45
45
 
46
- Dir["./#{Deimos.config.schema.generated_class_path}/**/*.rb"].sort.
46
+ Dir["./#{Deimos.config.schema.generated_class_path}/**/*.rb"].
47
47
  each { |f| require f }
48
48
  rescue LoadError
49
49
  raise 'Cannot load schema classes. Please regenerate classes with' \
@@ -83,7 +83,7 @@ module Deimos
83
83
  nil
84
84
  else
85
85
  encoder.encode(message.payload,
86
- topic: "#{Deimos.config.producers.topic_prefix}#{config.name}-value")
86
+ topic: "#{Deimos.config.producers.topic_prefix}#{config.name}")
87
87
  end
88
88
  end
89
89
 
@@ -96,7 +96,7 @@ module Deimos
96
96
  if config.deserializers[:key].respond_to?(:encode_key)
97
97
  config.deserializers[:key].encode_key(key)
98
98
  elsif key
99
- config.deserializers[:payload].encode(key)
99
+ config.deserializers[:payload].encode(key, is_key: true)
100
100
  else
101
101
  key
102
102
  end
@@ -118,7 +118,7 @@ end
118
118
  unique_columns = column_names.map(&:to_s) -
119
119
  options[:on_duplicate_key_update].map(&:to_s) - %w(id created_at)
120
120
  records = hashes_without_id.map do |hash|
121
- self.where(unique_columns.map { |c| [c, hash[c]] }.to_h).first
121
+ self.where(unique_columns.to_h { |c| [c, hash[c]] }).first
122
122
  end
123
123
  self.kafka_producers.each { |p| p.send_events(records) }
124
124
  else
@@ -58,9 +58,7 @@ module Deimos
58
58
  end
59
59
  end
60
60
  Karafka::Setup::Config.setup if Karafka.producer.nil?
61
- Deimos.waterdrop_producers.each do |producer|
62
- producer.monitor.subscribe(waterdrop_listener)
63
- end
61
+ Karafka.producer.monitor.subscribe(waterdrop_listener)
64
62
  end
65
63
 
66
64
  # :nodoc:
@@ -1,9 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require_relative 'base'
4
- require 'avro'
5
- require 'avro_turf'
6
- require 'avro_turf/mutable_schema_store'
4
+ require 'schema_registry_client'
7
5
  require_relative 'avro_schema_coercer'
8
6
 
9
7
  module Deimos
@@ -15,7 +13,7 @@ module Deimos
15
13
  # @override
16
14
  def initialize(schema:, namespace:)
17
15
  super
18
- @schema_store = AvroTurf::MutableSchemaStore.new(path: Deimos.config.schema.path)
16
+ @schema_store = SchemaRegistry::AvroSchemaStore.new(path: Deimos.config.schema.path)
19
17
  end
20
18
 
21
19
  def supports_key_schemas?
@@ -31,7 +29,7 @@ module Deimos
31
29
  def encode_key(key_id, key, topic: nil)
32
30
  begin
33
31
  @key_schema ||= @schema_store.find("#{@schema}_key")
34
- rescue AvroTurf::SchemaNotFoundError
32
+ rescue SchemaRegistry::SchemaNotFoundError
35
33
  @key_schema = generate_key_schema(key_id)
36
34
  end
37
35
  field_name = _field_name_from_schema(@key_schema)
@@ -184,7 +182,7 @@ module Deimos
184
182
  # @return [Avro::Schema]
185
183
  def avro_schema(schema=nil)
186
184
  schema ||= @schema
187
- @schema_store.find(schema, @namespace)
185
+ @schema_store.find("#{@namespace}.#{schema}")
188
186
  end
189
187
 
190
188
  # @param value_schema [Hash]
@@ -8,23 +8,24 @@ module Deimos
8
8
  class AvroLocal < AvroBase
9
9
  # @override
10
10
  def decode_payload(payload, schema:)
11
- avro_turf.decode(payload, schema_name: schema, namespace: @namespace)
11
+ stream = StringIO.new(payload)
12
+ schema = @schema_store.find("#{@namespace}.#{schema}")
13
+ reader = Avro::IO::DatumReader.new(nil, schema)
14
+ Avro::DataFile::Reader.new(stream, reader).first
12
15
  end
13
16
 
14
17
  # @override
15
- def encode_payload(payload, schema: nil, topic: nil)
16
- avro_turf.encode(payload, schema_name: schema, namespace: @namespace)
17
- end
18
-
19
- private
18
+ def encode_payload(payload, schema: nil, subject: nil)
19
+ stream = StringIO.new
20
+ schema = schema_store.find("#{@namespace}.#{schema}")
21
+ writer = Avro::IO::DatumWriter.new(schema)
20
22
 
21
- # @return [AvroTurf]
22
- def avro_turf
23
- @avro_turf ||= AvroTurf.new(
24
- schemas_path: Deimos.config.schema.path,
25
- schema_store: @schema_store
26
- )
23
+ dw = Avro::DataFile::Writer.new(stream, writer, schema)
24
+ dw << payload.to_h
25
+ dw.close
26
+ stream.string
27
27
  end
28
+
28
29
  end
29
30
  end
30
31
  end
@@ -1,7 +1,6 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require_relative 'avro_base'
4
- require 'avro_turf/messaging'
5
4
 
6
5
  module Deimos
7
6
  module SchemaBackends
@@ -9,27 +8,27 @@ module Deimos
9
8
  class AvroSchemaRegistry < AvroBase
10
9
  # @override
11
10
  def decode_payload(payload, schema:)
12
- avro_turf_messaging.decode(payload.to_s, schema_name: schema)
11
+ schema_registry.decode(payload.to_s)
13
12
  end
14
13
 
15
14
  # @override
16
- def encode_payload(payload, schema: nil, topic: nil)
17
- avro_turf_messaging.encode(payload, schema_name: schema, subject: topic || schema)
15
+ def encode_payload(payload, schema: nil, subject: nil)
16
+ schema_registry.encode(payload, subject: subject || schema, schema_name: "#{@namespace}.#{schema}")
18
17
  end
19
18
 
20
19
  private
21
20
 
22
- # @return [AvroTurf::Messaging]
23
- def avro_turf_messaging
24
- @avro_turf_messaging ||= AvroTurf::Messaging.new(
25
- schema_store: @schema_store,
26
- registry_url: Deimos.config.schema.registry_url,
27
- schemas_path: Deimos.config.schema.path,
28
- user: Deimos.config.schema.user,
29
- password: Deimos.config.schema.password,
30
- namespace: @namespace,
31
- logger: Karafka.logger
32
- )
21
+ # @return [SchemaRegistry::Client]
22
+ def schema_registry
23
+ @schema_registry ||= SchemaRegistry::Client.new(
24
+ registry_url: Deimos.config.schema.registry_url,
25
+ logger: Karafka.logger,
26
+ user: Deimos.config.schema.user,
27
+ password: Deimos.config.schema.password,
28
+ schema_type: SchemaRegistry::Schema::Avro
29
+ )
30
+ SchemaRegistry.avro_schema_path = Deimos.config.schema.path
31
+ @schema_registry
33
32
  end
34
33
  end
35
34
  end
@@ -13,7 +13,7 @@ module Deimos
13
13
  end
14
14
 
15
15
  # @override
16
- def encode_payload(payload, schema: nil, topic: nil)
16
+ def encode_payload(payload, schema: nil, subject: nil)
17
17
  payload.to_h.with_indifferent_access.to_json
18
18
  end
19
19
  end
@@ -56,9 +56,10 @@ module Deimos
56
56
  # @param schema [String,Symbol]
57
57
  # @param topic [String]
58
58
  # @return [String]
59
- def encode(payload, schema: nil, topic: nil)
59
+ def encode(payload, schema: nil, topic: nil, is_key: false)
60
60
  validate(payload, schema: schema || @schema)
61
- encode_payload(payload, schema: schema || @schema, topic: topic)
61
+ subject = is_key ? "#{topic}-key" : "#{topic}-value"
62
+ encode_payload(payload, schema: schema || @schema, subject: subject)
62
63
  end
63
64
 
64
65
  # Decode a payload with a schema. Public method.
@@ -114,9 +115,9 @@ module Deimos
114
115
  # Encode a payload. To be defined by subclass.
115
116
  # @param payload [Hash]
116
117
  # @param schema [String,Symbol]
117
- # @param topic [String]
118
+ # @param subject [String]
118
119
  # @return [String]
119
- def encode_payload(_payload, schema:, topic: nil)
120
+ def encode_payload(_payload, schema:, subject: nil)
120
121
  raise MissingImplementationError
121
122
  end
122
123
 
@@ -15,7 +15,7 @@ module Deimos
15
15
  end
16
16
 
17
17
  # @override
18
- def encode_payload(payload, schema:, topic: nil)
18
+ def encode_payload(payload, schema:, subject: nil)
19
19
  payload.is_a?(String) ? 'payload-encoded' : payload.map { |k, v| [k, "encoded-#{v}"] }.to_json
20
20
  end
21
21
 
@@ -15,7 +15,7 @@ module Deimos
15
15
  end
16
16
 
17
17
  # @override
18
- def encode_payload(payload, schema:, topic: nil)
18
+ def encode_payload(payload, schema:, subject: nil)
19
19
  payload.to_s
20
20
  end
21
21
 
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require_relative 'base'
4
- require 'proto_turf'
4
+ require 'schema_registry_client'
5
5
 
6
6
  module Deimos
7
7
  module SchemaBackends
@@ -24,26 +24,46 @@ module Deimos
24
24
  float: :float,
25
25
  message: :record
26
26
  }.freeze
27
+
27
28
  def proto_schema(schema=@schema)
28
29
  Google::Protobuf::DescriptorPool.generated_pool.lookup(schema)
29
30
  end
30
31
 
31
32
  # @override
32
33
  def encode_key(key_id, key, topic: nil)
33
- if key.is_a?(Hash)
34
- key_id ? key.with_indifferent_access[key_id].to_s : key.sort.to_h.to_json
34
+ if key.respond_to?(:to_h)
35
+ hash = if key_id
36
+ key_id.to_s.split('.')[...-1].each do |k|
37
+ key = key.with_indifferent_access[k]
38
+ end
39
+ key.to_h.with_indifferent_access.slice(key_id.split('.').last)
40
+ else
41
+ key.to_h.sort.to_h
42
+ end
43
+ self.encode_proto_key(hash, topic: topic, field: key_id)
44
+ elsif key_id
45
+ hash = { key_id.to_s.split('.').last => key }
46
+ self.encode_proto_key(hash, topic: topic, field: key_id)
35
47
  else
36
48
  key.to_s
37
49
  end
38
50
  end
39
51
 
52
+ # @param hash [Hash]
53
+ # @return [String]
54
+ def encode_proto_key(hash, topic: nil)
55
+ hash.sort.to_h.to_json
56
+ end
57
+
58
+ def decode_proto_key(payload)
59
+ JSON.parse(payload)
60
+ rescue StandardError
61
+ payload
62
+ end
63
+
40
64
  # @override
41
65
  def decode_key(payload, key_id)
42
- val = begin
43
- JSON.parse(payload)
44
- rescue StandardError
45
- payload
46
- end
66
+ val = decode_proto_key(payload)
47
67
  key_id ? val[key_id.to_s] : val
48
68
  end
49
69
 
@@ -85,8 +105,12 @@ module Deimos
85
105
  :mock
86
106
  end
87
107
 
88
- def generate_key_schema(_field_name)
89
- raise 'Protobuf cannot generate key schemas! Please use field_config :plain'
108
+ def generate_key_schema(field_name)
109
+ SchemaRegistry::Output::JsonSchema.output(proto_schema, field: field_name)
110
+ end
111
+
112
+ def supports_key_schemas?
113
+ true
90
114
  end
91
115
 
92
116
  end