deimos-ruby 1.8.2.pre.beta1 → 1.8.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (40) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +48 -0
  3. data/Gemfile.lock +84 -79
  4. data/README.md +41 -3
  5. data/deimos-ruby.gemspec +2 -2
  6. data/docs/CONFIGURATION.md +1 -0
  7. data/docs/INTEGRATION_TESTS.md +52 -0
  8. data/docs/PULL_REQUEST_TEMPLATE.md +1 -0
  9. data/docs/UPGRADING.md +128 -0
  10. data/lib/deimos/active_record_consume/message_consumption.rb +9 -0
  11. data/lib/deimos/active_record_consumer.rb +8 -0
  12. data/lib/deimos/backends/db.rb +10 -1
  13. data/lib/deimos/config/configurable.rb +12 -0
  14. data/lib/deimos/config/configuration.rb +4 -0
  15. data/lib/deimos/config/phobos_config.rb +4 -1
  16. data/lib/deimos/kafka_source.rb +3 -2
  17. data/lib/deimos/kafka_topic_info.rb +2 -5
  18. data/lib/deimos/producer.rb +5 -3
  19. data/lib/deimos/schema_backends/avro_schema_coercer.rb +5 -3
  20. data/lib/deimos/schema_backends/avro_schema_registry.rb +1 -1
  21. data/lib/deimos/utils/db_poller.rb +2 -1
  22. data/lib/deimos/utils/db_producer.rb +5 -1
  23. data/lib/deimos/utils/inline_consumer.rb +9 -3
  24. data/lib/deimos/utils/schema_controller_mixin.rb +5 -1
  25. data/lib/deimos/version.rb +1 -1
  26. data/spec/active_record_consumer_spec.rb +13 -0
  27. data/spec/backends/db_spec.rb +6 -0
  28. data/spec/config/configuration_spec.rb +15 -0
  29. data/spec/generators/active_record_generator_spec.rb +1 -1
  30. data/spec/kafka_source_spec.rb +83 -0
  31. data/spec/kafka_topic_info_spec.rb +6 -6
  32. data/spec/producer_spec.rb +49 -0
  33. data/spec/schema_backends/avro_base_shared.rb +26 -1
  34. data/spec/schemas/com/my-namespace/request/CreateTopic.avsc +11 -0
  35. data/spec/schemas/com/my-namespace/response/CreateTopic.avsc +11 -0
  36. data/spec/spec_helper.rb +1 -1
  37. data/spec/utils/db_producer_spec.rb +27 -0
  38. data/spec/utils/inline_consumer_spec.rb +31 -0
  39. data/spec/utils/schema_controller_mixin_spec.rb +16 -0
  40. metadata +21 -13
@@ -43,6 +43,12 @@ each_db_config(Deimos::Backends::Db) do
43
43
  described_class.publish(producer_class: MyNoKeyProducer,
44
44
  messages: [messages.first])
45
45
  expect(Deimos::KafkaMessage.count).to eq(4)
46
+ end
46
47
 
48
+ it 'should add messages with Hash keys with JSON encoding' do
49
+ described_class.publish(producer_class: MyProducer,
50
+ messages: [build_message({ foo: 0 }, 'my-topic', { 'test_id' => 0 })])
51
+ expect(Deimos::KafkaMessage.count).to eq(1)
52
+ expect(Deimos::KafkaMessage.last.partition_key).to eq(%(---\ntest_id: 0\n))
47
53
  end
48
54
  end
@@ -6,6 +6,14 @@ class MyConfigConsumer < Deimos::Consumer
6
6
  def consume
7
7
  end
8
8
  end
9
+
10
+ # Mock consumer 2
11
+ class MyConfigConsumer2 < Deimos::Consumer
12
+ # :no-doc:
13
+ def consume
14
+ end
15
+ end
16
+
9
17
  describe Deimos, 'configuration' do
10
18
  it 'should configure with deprecated fields' do
11
19
  logger = Logger.new(nil)
@@ -171,6 +179,13 @@ describe Deimos, 'configuration' do
171
179
  offset_retention_time 13
172
180
  heartbeat_interval 13
173
181
  end
182
+ consumer do
183
+ disabled true
184
+ class_name 'MyConfigConsumer2'
185
+ schema 'blah2'
186
+ topic 'blah2'
187
+ group_id 'myconsumerid2'
188
+ end
174
189
  end
175
190
 
176
191
  expect(described_class.config.phobos_config).
@@ -16,7 +16,7 @@ RSpec.describe Deimos::Generators::ActiveRecordGenerator do
16
16
  files = Dir['db/migrate/*.rb']
17
17
  expect(files.length).to eq(1)
18
18
  results = <<~MIGRATION
19
- class CreateGeneratedTable < ActiveRecord::Migration[6.0]
19
+ class CreateGeneratedTable < ActiveRecord::Migration[6.1]
20
20
  def up
21
21
  if table_exists?(:generated_table)
22
22
  warn "generated_table already exists, exiting"
@@ -225,5 +225,88 @@ module KafkaSourceSpec
225
225
  expect(Deimos::KafkaMessage.count).to eq(0)
226
226
  end
227
227
  end
228
+
229
+ context 'with import hooks disabled' do
230
+ before(:each) do
231
+ # Dummy class we can include the mixin in. Has a backing table created
232
+ # earlier and has the import hook disabled
233
+ class WidgetNoImportHook < ActiveRecord::Base
234
+ include Deimos::KafkaSource
235
+ self.table_name = 'widgets'
236
+
237
+ # :nodoc:
238
+ def self.kafka_config
239
+ {
240
+ update: true,
241
+ delete: true,
242
+ import: false,
243
+ create: true
244
+ }
245
+ end
246
+
247
+ # :nodoc:
248
+ def self.kafka_producers
249
+ [WidgetProducer]
250
+ end
251
+ end
252
+ WidgetNoImportHook.reset_column_information
253
+ end
254
+
255
+ it 'should not fail when bulk-importing with existing records' do
256
+ widget1 = WidgetNoImportHook.create(widget_id: 1, name: 'Widget 1')
257
+ widget2 = WidgetNoImportHook.create(widget_id: 2, name: 'Widget 2')
258
+ widget1.name = 'New Widget No Import Hook 1'
259
+ widget2.name = 'New Widget No Import Hook 2'
260
+
261
+ expect {
262
+ WidgetNoImportHook.import([widget1, widget2], on_duplicate_key_update: %i(widget_id name))
263
+ }.not_to raise_error
264
+
265
+ expect('my-topic').not_to have_sent({
266
+ widget_id: 1,
267
+ name: 'New Widget No Import Hook 1',
268
+ id: widget1.id,
269
+ created_at: anything,
270
+ updated_at: anything
271
+ }, widget1.id)
272
+ expect('my-topic').not_to have_sent({
273
+ widget_id: 2,
274
+ name: 'New Widget No Import Hook 2',
275
+ id: widget2.id,
276
+ created_at: anything,
277
+ updated_at: anything
278
+ }, widget2.id)
279
+ end
280
+
281
+ it 'should not fail when mixing existing and new records' do
282
+ widget1 = WidgetNoImportHook.create(widget_id: 1, name: 'Widget 1')
283
+ expect('my-topic').to have_sent({
284
+ widget_id: 1,
285
+ name: 'Widget 1',
286
+ id: widget1.id,
287
+ created_at: anything,
288
+ updated_at: anything
289
+ }, widget1.id)
290
+
291
+ widget2 = WidgetNoImportHook.new(widget_id: 2, name: 'Widget 2')
292
+ widget1.name = 'New Widget 1'
293
+ WidgetNoImportHook.import([widget1, widget2], on_duplicate_key_update: %i(widget_id))
294
+ widgets = WidgetNoImportHook.all
295
+ expect('my-topic').not_to have_sent({
296
+ widget_id: 1,
297
+ name: 'New Widget 1',
298
+ id: widgets[0].id,
299
+ created_at: anything,
300
+ updated_at: anything
301
+ }, widgets[0].id)
302
+ expect('my-topic').not_to have_sent({
303
+ widget_id: 2,
304
+ name: 'Widget 2',
305
+ id: widgets[1].id,
306
+ created_at: anything,
307
+ updated_at: anything
308
+ }, widgets[1].id)
309
+ end
310
+ end
228
311
  end
229
312
  end
@@ -51,13 +51,13 @@ each_db_config(Deimos::KafkaTopicInfo) do
51
51
  expect(record.locked_at).to eq(nil)
52
52
  expect(record.error).to eq(false)
53
53
  expect(record.retries).to eq(0)
54
- expect(record.last_processed_at.to_s).to eq(Time.zone.now.to_s)
54
+ expect(record.last_processed_at.in_time_zone.to_s).to eq(Time.zone.now.to_s)
55
55
  record = Deimos::KafkaTopicInfo.last
56
56
  expect(record.locked_by).not_to eq(nil)
57
57
  expect(record.locked_at).not_to eq(nil)
58
58
  expect(record.error).not_to eq(false)
59
59
  expect(record.retries).not_to eq(0)
60
- expect(record.last_processed_at.to_s).to eq(20.seconds.ago.to_s)
60
+ expect(record.last_processed_at.in_time_zone.to_s).to eq(20.seconds.ago.to_s)
61
61
  end
62
62
  end
63
63
 
@@ -70,11 +70,11 @@ each_db_config(Deimos::KafkaTopicInfo) do
70
70
  locked_by: 'me', locked_at: 1.minute.ago)
71
71
 
72
72
  expect(Deimos::KafkaTopicInfo.count).to eq(3)
73
- Deimos::KafkaTopicInfo.all.each { |t| expect(t.last_processed_at.to_s).to eq(old_time) }
73
+ Deimos::KafkaTopicInfo.all.each { |t| expect(t.last_processed_at.in_time_zone.to_s).to eq(old_time) }
74
74
  Deimos::KafkaTopicInfo.ping_empty_topics(%w(topic1))
75
- expect(t1.reload.last_processed_at.to_s).to eq(old_time) # was passed as an exception
76
- expect(t2.reload.last_processed_at.to_s).to eq(Time.zone.now.to_s)
77
- expect(t3.reload.last_processed_at.to_s).to eq(old_time) # is locked
75
+ expect(t1.reload.last_processed_at.in_time_zone.to_s).to eq(old_time) # was passed as an exception
76
+ expect(t2.reload.last_processed_at.in_time_zone.to_s).to eq(Time.zone.now.to_s)
77
+ expect(t3.reload.last_processed_at.in_time_zone.to_s).to eq(old_time) # is locked
78
78
  end
79
79
  end
80
80
 
@@ -64,6 +64,14 @@ module ProducerTest
64
64
  end
65
65
  stub_const('MyErrorProducer', producer_class)
66
66
 
67
+ producer_class = Class.new(Deimos::Producer) do
68
+ schema 'MySchema'
69
+ namespace 'com.my-namespace'
70
+ topic nil
71
+ key_config none: true
72
+ end
73
+ stub_const('MyNoTopicProducer', producer_class)
74
+
67
75
  end
68
76
 
69
77
  it 'should fail on invalid message with error handler' do
@@ -228,6 +236,7 @@ module ProducerTest
228
236
  end
229
237
 
230
238
  it 'should encode the key' do
239
+ Deimos.configure { |c| c.producers.topic_prefix = nil }
231
240
  expect(MyProducer.encoder).to receive(:encode_key).with('test_id', 'foo', topic: 'my-topic-key')
232
241
  expect(MyProducer.encoder).to receive(:encode_key).with('test_id', 'bar', topic: 'my-topic-key')
233
242
  expect(MyProducer.encoder).to receive(:encode).with({
@@ -245,6 +254,21 @@ module ProducerTest
245
254
  )
246
255
  end
247
256
 
257
+ it 'should encode the key with topic prefix' do
258
+ Deimos.configure { |c| c.producers.topic_prefix = 'prefix.' }
259
+ expect(MyProducer.encoder).to receive(:encode_key).with('test_id', 'foo', topic: 'prefix.my-topic-key')
260
+ expect(MyProducer.encoder).to receive(:encode_key).with('test_id', 'bar', topic: 'prefix.my-topic-key')
261
+ expect(MyProducer.encoder).to receive(:encode).with({ 'test_id' => 'foo',
262
+ 'some_int' => 123 },
263
+ { topic: 'prefix.my-topic-value' })
264
+ expect(MyProducer.encoder).to receive(:encode).with({ 'test_id' => 'bar',
265
+ 'some_int' => 124 },
266
+ { topic: 'prefix.my-topic-value' })
267
+
268
+ MyProducer.publish_list([{ 'test_id' => 'foo', 'some_int' => 123 },
269
+ { 'test_id' => 'bar', 'some_int' => 124 }])
270
+ end
271
+
248
272
  it 'should not encode with plaintext key' do
249
273
  expect(MyNonEncodedProducer.key_encoder).not_to receive(:encode_key)
250
274
 
@@ -296,6 +320,31 @@ module ProducerTest
296
320
  )
297
321
  end
298
322
 
323
+ it 'should raise error if blank topic is passed in explicitly' do
324
+ expect {
325
+ MyProducer.publish_list(
326
+ [{ 'test_id' => 'foo',
327
+ 'some_int' => 123 },
328
+ { 'test_id' => 'bar',
329
+ 'some_int' => 124 }],
330
+ topic: ''
331
+ )
332
+ }.to raise_error(RuntimeError,
333
+ 'Topic not specified. Please specify the topic.')
334
+ end
335
+
336
+ it 'should raise error if the producer has not been initialized with a topic' do
337
+ expect {
338
+ MyNoTopicProducer.publish_list(
339
+ [{ 'test_id' => 'foo',
340
+ 'some_int' => 123 },
341
+ { 'test_id' => 'bar',
342
+ 'some_int' => 124 }]
343
+ )
344
+ }.to raise_error(RuntimeError,
345
+ 'Topic not specified. Please specify the topic.')
346
+ end
347
+
299
348
  it 'should error with nothing set' do
300
349
  expect {
301
350
  MyErrorProducer.publish_list(
@@ -42,6 +42,20 @@ RSpec.shared_examples_for('an Avro backend') do
42
42
  {
43
43
  'name' => 'union-int-field',
44
44
  'type' => %w(null int)
45
+ },
46
+ {
47
+ 'name' => 'timestamp-millis-field',
48
+ 'type' => {
49
+ 'type' => 'long',
50
+ 'logicalType' => 'timestamp-millis'
51
+ }
52
+ },
53
+ {
54
+ 'name' => 'timestamp-micros-field',
55
+ 'type' => {
56
+ 'type' => 'long',
57
+ 'logicalType' => 'timestamp-micros'
58
+ }
45
59
  }
46
60
  ]
47
61
  }
@@ -95,7 +109,9 @@ RSpec.shared_examples_for('an Avro backend') do
95
109
  'string-field' => 'hi mom',
96
110
  'boolean-field' => true,
97
111
  'union-field' => nil,
98
- 'union-int-field' => nil
112
+ 'union-int-field' => nil,
113
+ 'timestamp-millis-field' => Time.utc(2020, 11, 12, 13, 14, 15, 909_090),
114
+ 'timestamp-micros-field' => Time.utc(2020, 11, 12, 13, 14, 15, 909_090)
99
115
  }
100
116
  end
101
117
 
@@ -169,6 +185,15 @@ RSpec.shared_examples_for('an Avro backend') do
169
185
  expect(result['union-field']).to eq('itsme')
170
186
  end
171
187
 
188
+ it 'should not convert timestamp-millis' do
189
+ result = backend.coerce(payload)
190
+ expect(result['timestamp-millis-field']).to eq(Time.utc(2020, 11, 12, 13, 14, 15, 909_090))
191
+ end
192
+
193
+ it 'should not convert timestamp-micros' do
194
+ result = backend.coerce(payload)
195
+ expect(result['timestamp-micros-field']).to eq(Time.utc(2020, 11, 12, 13, 14, 15, 909_090))
196
+ end
172
197
  end
173
198
 
174
199
  end
@@ -0,0 +1,11 @@
1
+ {
2
+ "namespace": "com.my-namespace.request",
3
+ "name": "CreateTopic",
4
+ "type": "record",
5
+ "fields": [
6
+ {
7
+ "name": "request_id",
8
+ "type": "string"
9
+ }
10
+ ]
11
+ }
@@ -0,0 +1,11 @@
1
+ {
2
+ "namespace": "com.my-namespace.response",
3
+ "name": "CreateTopic",
4
+ "type": "record",
5
+ "fields": [
6
+ {
7
+ "name": "response_id",
8
+ "type": "string"
9
+ }
10
+ ]
11
+ }
@@ -87,7 +87,7 @@ module DbConfigs
87
87
  port: 3306,
88
88
  username: 'root',
89
89
  database: 'test',
90
- host: ENV['MYSQL_HOST'] || 'localhost'
90
+ host: ENV['MYSQL_HOST'] || '127.0.0.1'
91
91
  },
92
92
  {
93
93
  adapter: 'sqlite3',
@@ -96,7 +96,32 @@ each_db_config(Deimos::Utils::DbProducer) do
96
96
  expect(phobos_producer).to have_received(:publish_list).with(['A'] * 100).once
97
97
  expect(phobos_producer).to have_received(:publish_list).with(['A'] * 10).once
98
98
  expect(phobos_producer).to have_received(:publish_list).with(['A']).once
99
+ end
100
+
101
+ it 'should not resend batches of sent messages' do
102
+ allow(phobos_producer).to receive(:publish_list) do |group|
103
+ raise Kafka::BufferOverflow if group.any?('A') && group.size >= 1000
104
+ raise Kafka::BufferOverflow if group.any?('BIG') && group.size >= 10
105
+ end
106
+ allow(Deimos.config.metrics).to receive(:increment)
107
+ batch = ['A'] * 450 + ['BIG'] * 550
108
+ producer.produce_messages(batch)
109
+
110
+ expect(phobos_producer).to have_received(:publish_list).with(batch)
111
+ expect(phobos_producer).to have_received(:publish_list).with(['A'] * 100).exactly(4).times
112
+ expect(phobos_producer).to have_received(:publish_list).with(['A'] * 50 + ['BIG'] * 50)
113
+ expect(phobos_producer).to have_received(:publish_list).with(['A'] * 10).exactly(5).times
114
+ expect(phobos_producer).to have_received(:publish_list).with(['BIG'] * 1).exactly(550).times
99
115
 
116
+ expect(Deimos.config.metrics).to have_received(:increment).with('publish',
117
+ tags: %w(status:success topic:),
118
+ by: 100).exactly(4).times
119
+ expect(Deimos.config.metrics).to have_received(:increment).with('publish',
120
+ tags: %w(status:success topic:),
121
+ by: 10).exactly(5).times
122
+ expect(Deimos.config.metrics).to have_received(:increment).with('publish',
123
+ tags: %w(status:success topic:),
124
+ by: 1).exactly(550).times
100
125
  end
101
126
 
102
127
  describe '#compact_messages' do
@@ -289,6 +314,8 @@ each_db_config(Deimos::Utils::DbProducer) do
289
314
  message: "mess#{i}",
290
315
  partition_key: "key#{i}"
291
316
  )
317
+ end
318
+ (5..8).each do |i|
292
319
  Deimos::KafkaMessage.create!(
293
320
  id: i,
294
321
  topic: 'my-topic2',
@@ -0,0 +1,31 @@
1
+ # frozen_string_literal: true
2
+
3
+ describe Deimos::Utils::SeekListener do
4
+
5
+ describe '#start_listener' do
6
+ let(:consumer) { instance_double(Kafka::Consumer) }
7
+ let(:handler) { class_double(Deimos::Utils::MessageBankHandler) }
8
+
9
+ before(:each) do
10
+ allow(handler).to receive(:start)
11
+ allow(consumer).to receive(:subscribe)
12
+ allow_any_instance_of(Phobos::Listener).to receive(:create_kafka_consumer).and_return(consumer)
13
+ allow_any_instance_of(Kafka::Client).to receive(:last_offset_for).and_return(100)
14
+ stub_const('Deimos::Utils::SeekListener::MAX_SEEK_RETRIES', 2)
15
+ end
16
+
17
+ it 'should seek offset' do
18
+ allow(consumer).to receive(:seek)
19
+ expect(consumer).to receive(:seek).once
20
+ seek_listener = described_class.new({ handler: handler, group_id: 999, topic: 'test_topic' })
21
+ seek_listener.start_listener
22
+ end
23
+
24
+ it 'should retry on errors when seeking offset' do
25
+ allow(consumer).to receive(:seek).and_raise(StandardError)
26
+ expect(consumer).to receive(:seek).twice
27
+ seek_listener = described_class.new({ handler: handler, group_id: 999, topic: 'test_topic' })
28
+ seek_listener.start_listener
29
+ end
30
+ end
31
+ end
@@ -17,6 +17,7 @@ RSpec.describe Deimos::Utils::SchemaControllerMixin, type: :controller do
17
17
  request_namespace 'com.my-namespace.request'
18
18
  response_namespace 'com.my-namespace.response'
19
19
  schemas :index, :show
20
+ schemas create: 'CreateTopic'
20
21
  schemas :update, request: 'UpdateRequest', response: 'UpdateResponse'
21
22
 
22
23
  # :nodoc:
@@ -29,6 +30,11 @@ RSpec.describe Deimos::Utils::SchemaControllerMixin, type: :controller do
29
30
  render_schema({ 'response_id' => payload[:request_id] + ' dad' })
30
31
  end
31
32
 
33
+ # :nodoc:
34
+ def create
35
+ render_schema({ 'response_id' => payload[:request_id] + ' bro' })
36
+ end
37
+
32
38
  # :nodoc:
33
39
  def update
34
40
  render_schema({ 'update_response_id' => payload[:update_request_id] + ' sis' })
@@ -65,4 +71,14 @@ RSpec.describe Deimos::Utils::SchemaControllerMixin, type: :controller do
65
71
  expect(response_backend.decode(response.body)).to eq({ 'update_response_id' => 'hi sis' })
66
72
  end
67
73
 
74
+ it 'should render the correct response for create' do
75
+ request_backend = Deimos.schema_backend(schema: 'CreateTopic',
76
+ namespace: 'com.my-namespace.request')
77
+ response_backend = Deimos.schema_backend(schema: 'CreateTopic',
78
+ namespace: 'com.my-namespace.response')
79
+ request.content_type = 'avro/binary'
80
+ post :create, params: { id: 1 }, body: request_backend.encode({ 'request_id' => 'hi' })
81
+ expect(response_backend.decode(response.body)).to eq({ 'response_id' => 'hi bro' })
82
+ end
83
+
68
84
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: deimos-ruby
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.8.2.pre.beta1
4
+ version: 1.8.5
5
5
  platform: ruby
6
6
  authors:
7
7
  - Daniel Orner
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2020-09-09 00:00:00.000000000 Z
11
+ date: 2021-01-13 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: avro_turf
@@ -280,30 +280,30 @@ dependencies:
280
280
  name: rubocop
281
281
  requirement: !ruby/object:Gem::Requirement
282
282
  requirements:
283
- - - "~>"
283
+ - - '='
284
284
  - !ruby/object:Gem::Version
285
- version: '0.72'
285
+ version: 0.88.0
286
286
  type: :development
287
287
  prerelease: false
288
288
  version_requirements: !ruby/object:Gem::Requirement
289
289
  requirements:
290
- - - "~>"
290
+ - - '='
291
291
  - !ruby/object:Gem::Version
292
- version: '0.72'
292
+ version: 0.88.0
293
293
  - !ruby/object:Gem::Dependency
294
294
  name: rubocop-rspec
295
295
  requirement: !ruby/object:Gem::Requirement
296
296
  requirements:
297
- - - "~>"
297
+ - - '='
298
298
  - !ruby/object:Gem::Version
299
- version: '1.27'
299
+ version: 1.42.0
300
300
  type: :development
301
301
  prerelease: false
302
302
  version_requirements: !ruby/object:Gem::Requirement
303
303
  requirements:
304
- - - "~>"
304
+ - - '='
305
305
  - !ruby/object:Gem::Version
306
- version: '1.27'
306
+ version: 1.42.0
307
307
  - !ruby/object:Gem::Dependency
308
308
  name: sqlite3
309
309
  requirement: !ruby/object:Gem::Requirement
@@ -348,7 +348,9 @@ files:
348
348
  - docs/ARCHITECTURE.md
349
349
  - docs/CONFIGURATION.md
350
350
  - docs/DATABASE_BACKEND.md
351
+ - docs/INTEGRATION_TESTS.md
351
352
  - docs/PULL_REQUEST_TEMPLATE.md
353
+ - docs/UPGRADING.md
352
354
  - lib/deimos.rb
353
355
  - lib/deimos/active_record_consume/batch_consumption.rb
354
356
  - lib/deimos/active_record_consume/batch_slicer.rb
@@ -453,14 +455,17 @@ files:
453
455
  - spec/schemas/com/my-namespace/Wibble.avsc
454
456
  - spec/schemas/com/my-namespace/Widget.avsc
455
457
  - spec/schemas/com/my-namespace/WidgetTheSecond.avsc
458
+ - spec/schemas/com/my-namespace/request/CreateTopic.avsc
456
459
  - spec/schemas/com/my-namespace/request/Index.avsc
457
460
  - spec/schemas/com/my-namespace/request/UpdateRequest.avsc
461
+ - spec/schemas/com/my-namespace/response/CreateTopic.avsc
458
462
  - spec/schemas/com/my-namespace/response/Index.avsc
459
463
  - spec/schemas/com/my-namespace/response/UpdateResponse.avsc
460
464
  - spec/spec_helper.rb
461
465
  - spec/utils/db_poller_spec.rb
462
466
  - spec/utils/db_producer_spec.rb
463
467
  - spec/utils/deadlock_retry_spec.rb
468
+ - spec/utils/inline_consumer_spec.rb
464
469
  - spec/utils/lag_reporter_spec.rb
465
470
  - spec/utils/platform_schema_validation_spec.rb
466
471
  - spec/utils/schema_controller_mixin_spec.rb
@@ -483,11 +488,11 @@ required_ruby_version: !ruby/object:Gem::Requirement
483
488
  version: '0'
484
489
  required_rubygems_version: !ruby/object:Gem::Requirement
485
490
  requirements:
486
- - - ">"
491
+ - - ">="
487
492
  - !ruby/object:Gem::Version
488
- version: 1.3.1
493
+ version: '0'
489
494
  requirements: []
490
- rubygems_version: 3.1.3
495
+ rubygems_version: 3.0.9
491
496
  signing_key:
492
497
  specification_version: 4
493
498
  summary: Kafka libraries for Ruby.
@@ -534,14 +539,17 @@ test_files:
534
539
  - spec/schemas/com/my-namespace/Wibble.avsc
535
540
  - spec/schemas/com/my-namespace/Widget.avsc
536
541
  - spec/schemas/com/my-namespace/WidgetTheSecond.avsc
542
+ - spec/schemas/com/my-namespace/request/CreateTopic.avsc
537
543
  - spec/schemas/com/my-namespace/request/Index.avsc
538
544
  - spec/schemas/com/my-namespace/request/UpdateRequest.avsc
545
+ - spec/schemas/com/my-namespace/response/CreateTopic.avsc
539
546
  - spec/schemas/com/my-namespace/response/Index.avsc
540
547
  - spec/schemas/com/my-namespace/response/UpdateResponse.avsc
541
548
  - spec/spec_helper.rb
542
549
  - spec/utils/db_poller_spec.rb
543
550
  - spec/utils/db_producer_spec.rb
544
551
  - spec/utils/deadlock_retry_spec.rb
552
+ - spec/utils/inline_consumer_spec.rb
545
553
  - spec/utils/lag_reporter_spec.rb
546
554
  - spec/utils/platform_schema_validation_spec.rb
547
555
  - spec/utils/schema_controller_mixin_spec.rb