deimos-ruby 2.3.0 → 2.4.0.pre.beta2

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (38) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop.yml +1 -1
  3. data/CHANGELOG.md +2 -0
  4. data/CLAUDE.md +270 -0
  5. data/README.md +10 -5
  6. data/deimos-ruby.gemspec +2 -2
  7. data/lib/deimos/config/configuration.rb +1 -1
  8. data/lib/deimos/ext/producer_middleware.rb +2 -2
  9. data/lib/deimos/kafka_source.rb +1 -1
  10. data/lib/deimos/message.rb +11 -2
  11. data/lib/deimos/schema_backends/avro_base.rb +4 -6
  12. data/lib/deimos/schema_backends/avro_local.rb +13 -12
  13. data/lib/deimos/schema_backends/avro_schema_registry.rb +14 -15
  14. data/lib/deimos/schema_backends/avro_validation.rb +1 -1
  15. data/lib/deimos/schema_backends/base.rb +5 -4
  16. data/lib/deimos/schema_backends/mock.rb +1 -1
  17. data/lib/deimos/schema_backends/plain.rb +1 -1
  18. data/lib/deimos/schema_backends/proto_base.rb +36 -11
  19. data/lib/deimos/schema_backends/proto_local.rb +5 -5
  20. data/lib/deimos/schema_backends/proto_schema_registry.rb +37 -7
  21. data/lib/deimos/test_helpers.rb +7 -0
  22. data/lib/deimos/transcoder.rb +1 -1
  23. data/lib/deimos/utils/outbox_producer.rb +1 -1
  24. data/lib/deimos/version.rb +1 -1
  25. data/lib/deimos.rb +9 -1
  26. data/lib/generators/deimos/active_record_generator.rb +1 -1
  27. data/lib/generators/deimos/schema_class_generator.rb +3 -3
  28. data/lib/generators/deimos/v2_generator.rb +2 -2
  29. data/spec/gen/sample/v1/sample_key_pb.rb +17 -0
  30. data/spec/generators/schema_class_generator_spec.rb +1 -1
  31. data/spec/protos/sample/v1/sample_key.proto +7 -0
  32. data/spec/schema_backends/avro_base_shared.rb +1 -1
  33. data/spec/schema_backends/avro_local_spec.rb +1 -8
  34. data/spec/schema_backends/avro_schema_registry_spec.rb +7 -7
  35. data/spec/schema_backends/base_spec.rb +2 -2
  36. data/spec/schema_backends/proto_schema_registry_spec.rb +222 -19
  37. data/spec/spec_helper.rb +1 -1
  38. metadata +32 -35
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 9811f05eec0d9b6777e2ed047a2e81b0a23bd770b52866bf8c8a79f84c8ec586
4
- data.tar.gz: c84cba2ecf750f4349ba86e47ddab2d01a4dd8e5f8fd9ab008b3fcfeb14b1425
3
+ metadata.gz: bc23c10fe732ab1eb2b6c0e2daf5bdb943a939510624b821456d0d383224f579
4
+ data.tar.gz: 84fdb7e495d76bb27b0592572f13249a7c8e28c3d123c68d15ac5d30ff883421
5
5
  SHA512:
6
- metadata.gz: b8319ada2f2b715e750e7de5caf6c6117d6154da6277988e12851f420c69a2f3adc0ab30594a23bca86ec8ffa57ef2cc3e5ab9758421e26fbe91cf682ad28e04
7
- data.tar.gz: 2174b8d776b547689422d6af381f423680d61df368be813e53f213eb8d084971b338a3382d8775e07ef324b9443fc1f5427fed834c54629d52c88728d016fd49
6
+ metadata.gz: 9b47d42b33e3c66ed3fd852570490e6a719d55c6709b706b38b58144a192b40a5edef6c6b5be516c89c3798069ea22f6be2134e1000f41ba3110f92b5face15d
7
+ data.tar.gz: 3fea215d836fbcc1e10df7971e7112194ada61a95dab4be1524cafc275d523b7ee186887298a0a89e162945aeaf2b83d5826031844e9b26dd25039d7797d6c75
data/.rubocop.yml CHANGED
@@ -4,7 +4,7 @@ plugins:
4
4
  - rubocop-rspec
5
5
 
6
6
  AllCops:
7
- TargetRubyVersion: 2.5
7
+ TargetRubyVersion: 3.0
8
8
  Exclude:
9
9
  - lib/deimos/monkey_patches/*.rb
10
10
  - spec/gen/**/*.rb
data/CHANGELOG.md CHANGED
@@ -7,6 +7,8 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
7
7
 
8
8
  ## UNRELEASED
9
9
 
10
+ - Major change: Switch from using `avro_turf` and `proto_turf` to use `schema_registry_client`, which handles both Avro and Protobuf.
11
+
10
12
  # 2.3.0 - 2026-01-13
11
13
 
12
14
  - Feature: Support broker setting per topic in producer configs.
data/CLAUDE.md ADDED
@@ -0,0 +1,270 @@
1
+ # CLAUDE.md
2
+
3
+ This file provides guidance to Claude Code (claude.ai/code) when working with code in this repository.
4
+
5
+ ## Overview
6
+
7
+ Deimos is a Ruby framework for Kafka development that marries Kafka with schema definitions (Avro/Protobuf), ActiveRecord, and provides a comprehensive toolbox. Built on top of Karafka (which itself builds on RDKafka), it provides schema encoding/decoding, database integration, metrics, tracing, and test helpers.
8
+
9
+ ## Development Commands
10
+
11
+ ### Testing
12
+
13
+ ```bash
14
+ # Run all tests
15
+ bundle exec rspec
16
+
17
+ # Run a single test file
18
+ bundle exec rspec spec/path/to/file_spec.rb
19
+
20
+ # Run a specific test
21
+ bundle exec rspec spec/path/to/file_spec.rb:LINE_NUMBER
22
+ ```
23
+
24
+ ### Linting
25
+
26
+ ```bash
27
+ # Run Rubocop linter
28
+ bundle exec rubocop
29
+
30
+ # Auto-correct issues
31
+ bundle exec rubocop -a
32
+ ```
33
+
34
+ ### Schema Class Generation
35
+
36
+ When modifying schema-related code, you may need to regenerate test schema classes:
37
+
38
+ ```bash
39
+ # Regenerate test schema classes (used after schema changes)
40
+ bundle exec ./regenerate_test_schema_classes.rb
41
+
42
+ # Generate Protobuf classes
43
+ protoc -I spec/protos --ruby_out=spec/gen --ruby_opt=paths=source_relative spec/protos/**/*.proto
44
+ ```
45
+
46
+ ### Rake Tasks
47
+
48
+ ```bash
49
+ # Start Deimos consumer (in Rails environment)
50
+ rake deimos:start
51
+
52
+ # Start outbox backend producer
53
+ rake deimos:outbox
54
+
55
+ # Start database poller
56
+ rake deimos:db_poller
57
+
58
+ # Generate schema classes
59
+ rake deimos:generate_schema_classes
60
+ ```
61
+
62
+ ## Architecture
63
+
64
+ ### Layer Structure
65
+
66
+ Deimos is built in layers:
67
+ 1. **RDKafka** - Low-level Kafka client providing producer/consumer APIs
68
+ 2. **Karafka** - Threaded consumers wrapper with lifecycle management
69
+ 3. **Deimos** - Full framework with schema integration, database features, metrics, and utilities
70
+
71
+ ### Key Directories
72
+
73
+ - `lib/deimos/` - Core Deimos code
74
+ - `lib/deimos/backends/` - Producer backends (kafka, kafka_async, outbox, test)
75
+ - `lib/deimos/schema_backends/` - Schema handlers (Avro local/registry/validation, Protobuf, plain, mock)
76
+ - `lib/deimos/metrics/` - Metrics providers (DataDog, mock)
77
+ - `lib/deimos/tracing/` - Tracing providers (DataDog, mock)
78
+ - `lib/deimos/utils/` - Utilities (DB poller, outbox producer, etc.)
79
+ - `lib/deimos/config/` - Configuration classes
80
+ - `lib/deimos/ext/` - Extensions to Karafka routing
81
+ - `lib/generators/` - Rails generators for migrations
82
+ - `lib/tasks/` - Rake tasks
83
+
84
+ ### Core Concepts
85
+
86
+ #### Schema Backends
87
+
88
+ Schema backends encode/decode payloads. All backends must implement:
89
+ - `encode(payload, topic:)` - Encode payload to binary/string format
90
+ - `decode(payload)` - Decode binary/string to hash
91
+ - `validate(payload)` - Validate payload against schema
92
+ - `coerce(payload)` - Coerce payload to match schema types
93
+ - `schema_fields` - List fields in schema (used with ActiveRecord)
94
+ - Define a `mock` backend for testing
95
+
96
+ Available backends: `:avro_local`, `:avro_schema_registry`, `:avro_validation`, `:proto_schema_registry`, `:proto_local`, `:mock`, `:plain`
97
+
98
+ #### Producer Backends
99
+
100
+ Producer backends determine how messages are sent. All backends inherit from `Deimos::Backends::Base` and implement `execute(messages)`.
101
+
102
+ Available backends:
103
+ - `:kafka` - Send directly to Kafka (default)
104
+ - `:kafka_async` - Async variant of kafka backend
105
+ - `:outbox` - Transactional outbox pattern (save to DB, send async)
106
+ - `:test` - For testing (stores messages in memory)
107
+
108
+ #### Consumer Types
109
+
110
+ - `Deimos::Consumer` - Base consumer class
111
+ - Per-message: Override `consume_message(message)` and set `each_message true`
112
+ - Batch: Override `consume_batch` (receives `messages` collection)
113
+
114
+ - `Deimos::ActiveRecordConsumer` - Automatically saves/updates ActiveRecord models
115
+ - Per-message mode: Uses `fetch_record`, `assign_key`, `destroy_record`
116
+ - Batch mode: Uses `activerecord-import` for bulk operations
117
+ - Override `record_attributes(payload, key)` to customize attributes
118
+
119
+ #### Producers
120
+
121
+ - `Deimos::Producer` - Base producer class
122
+ - Call `self.produce([{payload: ..., key: ..., topic: ...}])`
123
+ - Override `partition_key(payload)` for custom partitioning
124
+
125
+ - `Deimos::ActiveRecordProducer` - Produces from ActiveRecord models
126
+ - Set `record_class Widget, refetch: false`
127
+ - Override `generate_payload(attributes, record)` to customize payload
128
+ - Override `watched_attributes(record)` to add non-schema fields
129
+
130
+ #### Key Configuration
131
+
132
+ Every producer must define `key_config`:
133
+ - `key_config none: true` - No keys (events)
134
+ - `key_config plain: true` - Unencoded keys (legacy)
135
+ - `key_config schema: 'MySchema-key'` - Use existing key schema
136
+ - `key_config field: 'my_id'` - Auto-generate key schema from value field
137
+
138
+ #### KafkaSource Mixin
139
+
140
+ The `Deimos::KafkaSource` mixin adds callbacks to ActiveRecord models to automatically send Kafka messages on save/destroy:
141
+
142
+ ```ruby
143
+ class Widget < ActiveRecord::Base
144
+ include Deimos::KafkaSource
145
+
146
+ def self.kafka_producers
147
+ [MyProducer]
148
+ end
149
+
150
+ def self.kafka_config
151
+ { update: true, delete: true, import: true, create: true }
152
+ end
153
+ end
154
+ ```
155
+
156
+ #### Outbox Pattern (Database Backend)
157
+
158
+ The outbox pattern provides transactional guarantees:
159
+ 1. Messages are validated, encoded, and saved to `kafka_messages` table
160
+ 2. Separate thread pool (via `Deimos::Utils::OutboxProducer`) reads from DB and sends to Kafka
161
+ 3. Uses `kafka_topic_info` table for topic-level locking
162
+ 4. Runs via `rake deimos:outbox` or `Deimos.start_outbox_backend!(thread_count: N)`
163
+
164
+ #### Database Poller
165
+
166
+ Polls database tables and produces messages:
167
+ 1. Configure with `Deimos.configure { db_poller { producer_class 'MyProducer' } }`
168
+ 2. Two modes:
169
+ - Time-based (default): Uses `updated_at` and `id` columns
170
+ - State-based: Updates state column after publishing
171
+ 3. Tracks progress in `poll_info` table
172
+ 4. Runs via `rake deimos:db_poller`
173
+
174
+ ### Configuration
175
+
176
+ Configuration uses the `fig_tree` gem. See `lib/deimos/config/configuration.rb` for the schema. Configure via:
177
+
178
+ ```ruby
179
+ Deimos.configure do |config|
180
+ config.producers.backend = :outbox
181
+ config.schema.backend = :avro_schema_registry
182
+ config.schema.registry_url = 'http://localhost:8081'
183
+ end
184
+ ```
185
+
186
+ ### Karafka Integration
187
+
188
+ Deimos extends Karafka routing with schema configuration:
189
+
190
+ ```ruby
191
+ Karafka::App.routes.draw do
192
+ topic 'my-topic' do
193
+ consumer MyConsumer
194
+ schema 'MySchema'
195
+ namespace 'com.my-namespace'
196
+ key_config field: 'test_id'
197
+ end
198
+ end
199
+ ```
200
+
201
+ The `lib/deimos/ext/` directory contains the routing extensions that add schema-aware capabilities to Karafka's routing DSL.
202
+
203
+ ### Shared Configuration
204
+
205
+ Both producers and consumers use the `SharedConfig` module to standardize schema settings, topic configuration, key handling, etc.
206
+
207
+ ### Testing
208
+
209
+ Test helpers in `lib/deimos/test_helpers.rb` provide:
210
+ - `test_consume_message(consumer, payload)` - Test message consumption
211
+ - `test_consume_batch(consumer, payloads)` - Test batch consumption
212
+ - `expect(topic).to have_sent(payload, key, partition_key, headers)` - Assert messages sent
213
+ - `Deimos::TestHelpers.sent_messages` - Inspect sent messages
214
+
215
+ Configure for tests:
216
+ ```ruby
217
+ Deimos.config.schema.backend = :avro_validation # Validates but doesn't encode
218
+ Deimos.config.producers.backend = :test # Stores in memory
219
+ ```
220
+
221
+ ### Instrumentation
222
+
223
+ Deimos sends events through Karafka's instrumentation monitor:
224
+ - `deimos.encode_message` - Schema encoding
225
+ - `deimos.outbox.produce` - Outbox messages processed
226
+ - `deimos.ar_consumer.consume_batch` - ActiveRecord batch consumption
227
+ - `deimos.batch_consumption.valid_records` - Valid records upserted
228
+ - `deimos.batch_consumption.invalid_records` - Invalid records rejected
229
+
230
+ Subscribe with: `Karafka.monitor.subscribe('event.name') { |event| ... }`
231
+
232
+ ## Database Integration
233
+
234
+ Deimos uses two primary database tables (created via generators):
235
+
236
+ - `kafka_messages` - Stores outbox messages (topic, key, payload, partition_key)
237
+ - `kafka_topic_info` - Locks topics for outbox processing
238
+ - `poll_info` - Tracks database poller progress
239
+
240
+ ## Special Considerations
241
+
242
+ ### Schema Changes
243
+
244
+ When adding/modifying schemas:
245
+ 1. Update schema files in configured schema path
246
+ 2. Run `rake deimos:generate_schema_classes` if using schema classes
247
+ 3. Regenerate test schemas with `./regenerate_test_schema_classes.rb`
248
+
249
+ ### Protobuf
250
+
251
+ - Requires `schema_registry_client` gem in Gemfile
252
+ - Protobuf payloads must be Protobuf message objects, not hashes
253
+ - Protobuf should NOT be used for keys (unstable binary encoding)
254
+
255
+ ### ActiveRecord Batch Consumers
256
+
257
+ - Only supports primary keys as identifiers by default
258
+ - Skips ActiveRecord callbacks (uses `activerecord-import` for bulk SQL)
259
+ - Set `compacted false` to process all messages vs. only last per key
260
+ - Requires `bulk_import_id_column` config when saving to multiple tables
261
+
262
+ ### Error Handling
263
+
264
+ - Set `config.consumers.reraise_errors = false` to swallow non-fatal errors
265
+ - Define fatal errors via `config.fatal_error` (global) or `fatal_error?(exception, payload, metadata)` (per-consumer)
266
+ - Prevents consumer from getting stuck on bad messages
267
+
268
+ ## Integration Tests
269
+
270
+ Integration tests run against real databases (PostgreSQL, MySQL/Trilogy, SQLite). They are marked with `:integration` metadata and use different database configs from `DbConfigs::DB_OPTIONS`.
data/README.md CHANGED
@@ -91,12 +91,11 @@ Currently we have the following possible schema backends:
91
91
  * Avro Schema Registry (use the Confluent Schema Registry)
92
92
  * Avro Validation (validate using an Avro schema but leave decoded - this is useful
93
93
  for unit testing and development)
94
+ * Protobuf Local (use pure Protobuf)
94
95
  * Protobuf Schema Registry (use Protobuf with the Confluent Schema Registry)
95
96
  * Mock (no actual encoding/decoding).
96
97
 
97
- Note that to use Protobuf, you must include the [proto_turf](https://github.com/flipp-oss/proto_turf) gem in your Gemfile.
98
-
99
- Other possible schemas could [JSONSchema](https://json-schema.org/), etc. Feel free to
98
+ Other possible schemas could include [JSONSchema](https://json-schema.org/), etc. Feel free to
100
99
  contribute!
101
100
 
102
101
  To create a new schema backend, please see the existing examples [here](lib/deimos/schema_backends).
@@ -283,7 +282,7 @@ MyProducer.publish({
283
282
  ```
284
283
 
285
284
  > [!IMPORTANT]
286
- > Protobuf should *not* be used as a key schema, since the binary encoding is [unstable](https://protobuf.dev/programming-guides/encoding/#implications) and may break partitioning. Deimos will automatically convert key fields to plain values and key hashes to JSON.
285
+ > Protobuf should *not* be used as a key schema, since the binary encoding is [unstable](https://protobuf.dev/programming-guides/encoding/#implications) and may break partitioning. Deimos will automatically convert keys to sorted JSON, and will use JSON Schema in the schema registry.
287
286
 
288
287
  ## Instrumentation
289
288
 
@@ -1012,7 +1011,13 @@ end
1012
1011
  # test can have the same settings every time it is run
1013
1012
  after(:each) do
1014
1013
  Deimos.config.reset!
1015
- Deimos.config.schema.backend = :avro_validation
1014
+ # set specific settings here
1015
+ Deimos.config.schema.path = 'my/schema/path'
1016
+ end
1017
+
1018
+ around(:each) do |ex|
1019
+ # replace e.g. avro_schema_registry with avro_validation, proto_schema_registry with proto_local
1020
+ Deimos::TestHelpers.with_mock_backends { ex.run }
1016
1021
  end
1017
1022
  ```
1018
1023
 
data/deimos-ruby.gemspec CHANGED
@@ -17,10 +17,10 @@ Gem::Specification.new do |spec|
17
17
  spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
18
18
  spec.require_paths = ['lib']
19
19
 
20
- spec.add_dependency('avro_turf', '>= 1.4', '< 2')
21
20
  spec.add_dependency('benchmark', '~> 0.5')
22
21
  spec.add_dependency('fig_tree', '~> 0.2.0')
23
22
  spec.add_dependency('karafka', '~> 2.0')
23
+ spec.add_dependency('schema_registry_client')
24
24
  spec.add_dependency('sigurd', '>= 0.1.0', '< 1.0')
25
25
 
26
26
  spec.add_development_dependency('activerecord-import')
@@ -33,7 +33,6 @@ Gem::Specification.new do |spec|
33
33
  spec.add_development_dependency('guard-rubocop', '~> 1')
34
34
  spec.add_development_dependency('karafka-testing', '~> 2.0')
35
35
  spec.add_development_dependency('pg', '~> 1.1')
36
- spec.add_development_dependency('proto_turf')
37
36
  spec.add_development_dependency('rails', '~> 8.0')
38
37
  spec.add_development_dependency('rake', '~> 13')
39
38
  spec.add_development_dependency('rspec', '~> 3')
@@ -42,6 +41,7 @@ Gem::Specification.new do |spec|
42
41
  spec.add_development_dependency('rspec-snapshot', '~> 2.0')
43
42
  spec.add_development_dependency('rubocop', '~> 1.0')
44
43
  spec.add_development_dependency('rubocop-rspec', '3.8')
44
+ spec.add_development_dependency('schema_registry_client')
45
45
  spec.add_development_dependency('sord', '>= 5.0')
46
46
  spec.add_development_dependency('sqlite3', '~> 2.7')
47
47
  spec.add_development_dependency('steep', '~> 1.0')
@@ -43,7 +43,7 @@ module Deimos
43
43
  'Please provide a directory.'
44
44
  end
45
45
 
46
- Dir["./#{Deimos.config.schema.generated_class_path}/**/*.rb"].sort.
46
+ Dir["./#{Deimos.config.schema.generated_class_path}/**/*.rb"].
47
47
  each { |f| require f }
48
48
  rescue LoadError
49
49
  raise 'Cannot load schema classes. Please regenerate classes with' \
@@ -83,7 +83,7 @@ module Deimos
83
83
  nil
84
84
  else
85
85
  encoder.encode(message.payload,
86
- topic: "#{Deimos.config.producers.topic_prefix}#{config.name}-value")
86
+ topic: "#{Deimos.config.producers.topic_prefix}#{config.name}")
87
87
  end
88
88
  end
89
89
 
@@ -96,7 +96,7 @@ module Deimos
96
96
  if config.deserializers[:key].respond_to?(:encode_key)
97
97
  config.deserializers[:key].encode_key(key)
98
98
  elsif key
99
- config.deserializers[:payload].encode(key)
99
+ config.deserializers[:payload].encode(key, is_key: true)
100
100
  else
101
101
  key
102
102
  end
@@ -118,7 +118,7 @@ end
118
118
  unique_columns = column_names.map(&:to_s) -
119
119
  options[:on_duplicate_key_update].map(&:to_s) - %w(id created_at)
120
120
  records = hashes_without_id.map do |hash|
121
- self.where(unique_columns.map { |c| [c, hash[c]] }.to_h).first
121
+ self.where(unique_columns.to_h { |c| [c, hash[c]] }).first
122
122
  end
123
123
  self.kafka_producers.each { |p| p.send_events(records) }
124
124
  else
@@ -38,10 +38,19 @@ module Deimos
38
38
  def add_fields(fields)
39
39
  return if @payload.to_h.with_indifferent_access.except(:payload_key, :partition_key).blank?
40
40
 
41
- if fields.include?('message_id')
41
+ if @payload.respond_to?(:message_id)
42
+ if fields.include?('message_id') && @payload.message_id.blank?
43
+ @payload.message_id = SecureRandom.uuid
44
+ end
45
+ elsif fields.include?('message_id')
42
46
  @payload['message_id'] ||= SecureRandom.uuid
43
47
  end
44
- if fields.include?('timestamp')
48
+
49
+ if @payload.respond_to?(:timestamp)
50
+ if fields.include?('timestamp') && @payload.timestamp.blank?
51
+ @payload.timestamp = Time.now.in_time_zone.to_s
52
+ end
53
+ elsif fields.include?('timestamp')
45
54
  @payload['timestamp'] ||= Time.now.in_time_zone.to_s
46
55
  end
47
56
  end
@@ -1,9 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require_relative 'base'
4
- require 'avro'
5
- require 'avro_turf'
6
- require 'avro_turf/mutable_schema_store'
4
+ require 'schema_registry_client'
7
5
  require_relative 'avro_schema_coercer'
8
6
 
9
7
  module Deimos
@@ -15,7 +13,7 @@ module Deimos
15
13
  # @override
16
14
  def initialize(schema:, namespace:)
17
15
  super
18
- @schema_store = AvroTurf::MutableSchemaStore.new(path: Deimos.config.schema.path)
16
+ @schema_store = SchemaRegistry::AvroSchemaStore.new(path: Deimos.config.schema.path)
19
17
  end
20
18
 
21
19
  def supports_key_schemas?
@@ -31,7 +29,7 @@ module Deimos
31
29
  def encode_key(key_id, key, topic: nil)
32
30
  begin
33
31
  @key_schema ||= @schema_store.find("#{@schema}_key")
34
- rescue AvroTurf::SchemaNotFoundError
32
+ rescue SchemaRegistry::SchemaNotFoundError
35
33
  @key_schema = generate_key_schema(key_id)
36
34
  end
37
35
  field_name = _field_name_from_schema(@key_schema)
@@ -184,7 +182,7 @@ module Deimos
184
182
  # @return [Avro::Schema]
185
183
  def avro_schema(schema=nil)
186
184
  schema ||= @schema
187
- @schema_store.find(schema, @namespace)
185
+ @schema_store.find("#{@namespace}.#{schema}")
188
186
  end
189
187
 
190
188
  # @param value_schema [Hash]
@@ -8,23 +8,24 @@ module Deimos
8
8
  class AvroLocal < AvroBase
9
9
  # @override
10
10
  def decode_payload(payload, schema:)
11
- avro_turf.decode(payload, schema_name: schema, namespace: @namespace)
11
+ stream = StringIO.new(payload)
12
+ schema = @schema_store.find("#{@namespace}.#{schema}")
13
+ reader = Avro::IO::DatumReader.new(nil, schema)
14
+ Avro::DataFile::Reader.new(stream, reader).first
12
15
  end
13
16
 
14
17
  # @override
15
- def encode_payload(payload, schema: nil, topic: nil)
16
- avro_turf.encode(payload, schema_name: schema, namespace: @namespace)
17
- end
18
-
19
- private
18
+ def encode_payload(payload, schema: nil, subject: nil)
19
+ stream = StringIO.new
20
+ schema = schema_store.find("#{@namespace}.#{schema}")
21
+ writer = Avro::IO::DatumWriter.new(schema)
20
22
 
21
- # @return [AvroTurf]
22
- def avro_turf
23
- @avro_turf ||= AvroTurf.new(
24
- schemas_path: Deimos.config.schema.path,
25
- schema_store: @schema_store
26
- )
23
+ dw = Avro::DataFile::Writer.new(stream, writer, schema)
24
+ dw << payload.to_h
25
+ dw.close
26
+ stream.string
27
27
  end
28
+
28
29
  end
29
30
  end
30
31
  end
@@ -1,7 +1,6 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require_relative 'avro_base'
4
- require 'avro_turf/messaging'
5
4
 
6
5
  module Deimos
7
6
  module SchemaBackends
@@ -9,27 +8,27 @@ module Deimos
9
8
  class AvroSchemaRegistry < AvroBase
10
9
  # @override
11
10
  def decode_payload(payload, schema:)
12
- avro_turf_messaging.decode(payload.to_s, schema_name: schema)
11
+ schema_registry.decode(payload.to_s)
13
12
  end
14
13
 
15
14
  # @override
16
- def encode_payload(payload, schema: nil, topic: nil)
17
- avro_turf_messaging.encode(payload, schema_name: schema, subject: topic || schema)
15
+ def encode_payload(payload, schema: nil, subject: nil)
16
+ schema_registry.encode(payload, subject: subject || schema, schema_name: "#{@namespace}.#{schema}")
18
17
  end
19
18
 
20
19
  private
21
20
 
22
- # @return [AvroTurf::Messaging]
23
- def avro_turf_messaging
24
- @avro_turf_messaging ||= AvroTurf::Messaging.new(
25
- schema_store: @schema_store,
26
- registry_url: Deimos.config.schema.registry_url,
27
- schemas_path: Deimos.config.schema.path,
28
- user: Deimos.config.schema.user,
29
- password: Deimos.config.schema.password,
30
- namespace: @namespace,
31
- logger: Karafka.logger
32
- )
21
+ # @return [SchemaRegistry::Client]
22
+ def schema_registry
23
+ @schema_registry ||= SchemaRegistry::Client.new(
24
+ registry_url: Deimos.config.schema.registry_url,
25
+ logger: Karafka.logger,
26
+ user: Deimos.config.schema.user,
27
+ password: Deimos.config.schema.password,
28
+ schema_type: SchemaRegistry::Schema::Avro
29
+ )
30
+ SchemaRegistry.avro_schema_path = Deimos.config.schema.path
31
+ @schema_registry
33
32
  end
34
33
  end
35
34
  end
@@ -13,7 +13,7 @@ module Deimos
13
13
  end
14
14
 
15
15
  # @override
16
- def encode_payload(payload, schema: nil, topic: nil)
16
+ def encode_payload(payload, schema: nil, subject: nil)
17
17
  payload.to_h.with_indifferent_access.to_json
18
18
  end
19
19
  end
@@ -56,9 +56,10 @@ module Deimos
56
56
  # @param schema [String,Symbol]
57
57
  # @param topic [String]
58
58
  # @return [String]
59
- def encode(payload, schema: nil, topic: nil)
59
+ def encode(payload, schema: nil, topic: nil, is_key: false)
60
60
  validate(payload, schema: schema || @schema)
61
- encode_payload(payload, schema: schema || @schema, topic: topic)
61
+ subject = is_key ? "#{topic}-key" : "#{topic}-value"
62
+ encode_payload(payload, schema: schema || @schema, subject: subject)
62
63
  end
63
64
 
64
65
  # Decode a payload with a schema. Public method.
@@ -114,9 +115,9 @@ module Deimos
114
115
  # Encode a payload. To be defined by subclass.
115
116
  # @param payload [Hash]
116
117
  # @param schema [String,Symbol]
117
- # @param topic [String]
118
+ # @param subject [String]
118
119
  # @return [String]
119
- def encode_payload(_payload, schema:, topic: nil)
120
+ def encode_payload(_payload, schema:, subject: nil)
120
121
  raise MissingImplementationError
121
122
  end
122
123
 
@@ -15,7 +15,7 @@ module Deimos
15
15
  end
16
16
 
17
17
  # @override
18
- def encode_payload(payload, schema:, topic: nil)
18
+ def encode_payload(payload, schema:, subject: nil)
19
19
  payload.is_a?(String) ? 'payload-encoded' : payload.map { |k, v| [k, "encoded-#{v}"] }.to_json
20
20
  end
21
21
 
@@ -15,7 +15,7 @@ module Deimos
15
15
  end
16
16
 
17
17
  # @override
18
- def encode_payload(payload, schema:, topic: nil)
18
+ def encode_payload(payload, schema:, subject: nil)
19
19
  payload.to_s
20
20
  end
21
21