deimos-ruby 1.8.2 → 1.8.3

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 237cb6cd22b6a057003fedae1caeec81c999a3d9b90d837bb8b0a4add43ca4e2
4
- data.tar.gz: fab51933cdcd5c5fd10b8c92bbc67854fe56f23cdbec41660579f18b9075e09c
3
+ metadata.gz: 2badd671866a8fbde743e03acbc2da623279ac08abd5ed983ae9a15a0bef415d
4
+ data.tar.gz: e4b3cd2e80b13fc00ff54ebc85b59ab2e1daad052ab170dafd9a75f57eb80ce3
5
5
  SHA512:
6
- metadata.gz: 193b9b593b4f92edecc6c239eb6106fbe40e0acd1453694508f3fb02525166c39e4f1eba71230362493cd1647ab19e98a63ecf55f5371b844e9d0c38ffa5b2ea
7
- data.tar.gz: e7d42ec7ec2bc864735c9230a925685935678d2dd0f3918426926dc9f358d5eb810dd5b324bcacaf9201c78bdfbbfa58037a4cac4cf4ca66257737440b3d2c97
6
+ metadata.gz: 98b41fa7354e624a4538ac5006a091e1a144a2caaa57e2ef77c214e350cd8dcbf2b126c01a3663365d28bbed19baa7525e3c52390cc60f6378537ad8fe6bee12
7
+ data.tar.gz: 208ba417a35c7042155383d5c0eb9d9e35be95c0459cf2e58cdc24168c8fdf30af16434fcc66c612d197aab38279d790907f146e19b6a36157cde008c5aa60a3
@@ -7,6 +7,15 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
7
7
 
8
8
  ## UNRELEASED
9
9
 
10
+ ## 1.8.3 - 2020-11-18
11
+
12
+ ### Fixes :wrench:
13
+ - Do not resend already sent messages when splitting up batches
14
+ (fixes [#24](https://github.com/flipp-oss/deimos/issues/24))
15
+ - KafkaSource crashing on bulk-imports if import hooks are disabled
16
+ (fixes [#73](https://github.com/flipp-oss/deimos/issues/73))
17
+ - #96 Use string-safe encoding for partition keys
18
+
10
19
  ## 1.8.2 - 2020-09-25
11
20
 
12
21
  ### Features :star:
data/README.md CHANGED
@@ -42,6 +42,7 @@ Please see the following for further information not covered by this readme:
42
42
  * [Configuration Reference](docs/CONFIGURATION.md)
43
43
  * [Database Backend Feature](docs/DATABASE_BACKEND.md)
44
44
  * [Upgrading Deimos](docs/UPGRADING.md)
45
+ * [Contributing to Integration Tests](docs/INTEGRATION_TESTS.md)
45
46
 
46
47
  # Installation
47
48
 
@@ -0,0 +1,52 @@
1
+ # Running Integration Tests
2
+
3
+ This repo includes integration tests in the [spec/utils](spec/utils) directory.
4
+ Here, there are tests for more deimos features that include a database integration like
5
+ * [Database Poller](README.md#Database Poller)
6
+ * [Database Backend](docs/DATABASE_BACKEND.md)
7
+ * [Deadlock Retrying](lib/deimos/utils/deadlock_retry.rb)
8
+
9
+ You will need to set up the following databases to develop and create unit tests in these test suites.
10
+ * [SQLite](#SQLite)
11
+ * [MySQL](#MySQL)
12
+ * [PostgreSQL](#PostgreSQL)
13
+
14
+ ### SQLite
15
+ This database is covered through the `sqlite3` gem.
16
+
17
+ ## MySQL
18
+ ### Setting up a local MySQL server (Mac)
19
+ ```bash
20
+ # Download MySQL (Optionally, choose a version you are comfortable with)
21
+ brew install mysql
22
+ # Start automatically after rebooting your machine
23
+ brew services start mysql
24
+
25
+ # Cleanup once you are done with MySQL
26
+ brew services stop mysql
27
+ ```
28
+
29
+ ## PostgreSQL
30
+ ### Setting up a local PostgreSQL server (Mac)
31
+ ```bash
32
+ # Install postgres if it's not already installed
33
+ brew install postgres
34
+
35
+ # Initialize and Start up postgres db
36
+ brew services start postgres
37
+ initdb /usr/local/var/postgres
38
+ # Create the default database and user
39
+ # Use the password "root"
40
+ createuser -s --password postgres
41
+
42
+ # Cleanup once done with Postgres
43
+ killall postgres
44
+ brew services stop postgres
45
+ ```
46
+
47
+ ## Running Integration Tests
48
+ You must specify the tag "integration" when running these these test suites.
49
+ This can be done through the CLI with the `--tag integration` argument.
50
+ ```bash
51
+ rspec spec/utils/ --tag integration
52
+ ```
@@ -14,7 +14,7 @@ module Deimos
14
14
  message = Deimos::KafkaMessage.new(
15
15
  message: m.encoded_payload ? m.encoded_payload.to_s.b : nil,
16
16
  topic: m.topic,
17
- partition_key: m.partition_key || m.key
17
+ partition_key: partition_key_for(m)
18
18
  )
19
19
  message.key = m.encoded_key.to_s.b unless producer_class.config[:no_keys]
20
20
  message
@@ -26,6 +26,15 @@ module Deimos
26
26
  by: records.size
27
27
  )
28
28
  end
29
+
30
+ # @param message [Deimos::Message]
31
+ # @return [String] the partition key to use for this message
32
+ def partition_key_for(message)
33
+ return message.partition_key if message.partition_key.present?
34
+ return message.key unless message.key.is_a?(Hash)
35
+
36
+ message.key.to_yaml
37
+ end
29
38
  end
30
39
  end
31
40
  end
@@ -15,6 +15,11 @@ module Deimos
15
15
  # enabled true
16
16
  # ca_cert_file 'my_file'
17
17
  # end
18
+ # config.kafka do
19
+ # ssl do
20
+ # enabled true
21
+ # end
22
+ # end
18
23
  # end
19
24
  # - Allows for arrays of configurations:
20
25
  # Deimos.configure do |config|
@@ -88,8 +88,9 @@ module Deimos
88
88
  array_of_attributes,
89
89
  options={})
90
90
  results = super
91
- return unless self.kafka_config[:import]
92
- return if array_of_attributes.empty?
91
+ if !self.kafka_config[:import] || array_of_attributes.empty?
92
+ return results
93
+ end
93
94
 
94
95
  # This will contain an array of hashes, where each hash is the actual
95
96
  # attribute hash that created the object.
@@ -190,11 +190,14 @@ module Deimos
190
190
  end
191
191
  end
192
192
 
193
+ # Produce messages in batches, reducing the size 1/10 if the batch is too
194
+ # large. Does not retry batches of messages that have already been sent.
193
195
  # @param batch [Array<Hash>]
194
196
  def produce_messages(batch)
195
197
  batch_size = batch.size
198
+ current_index = 0
196
199
  begin
197
- batch.in_groups_of(batch_size, false).each do |group|
200
+ batch[current_index..-1].in_groups_of(batch_size, false).each do |group|
198
201
  @logger.debug("Publishing #{group.size} messages to #{@current_topic}")
199
202
  producer.publish_list(group)
200
203
  Deimos.config.metrics&.increment(
@@ -202,6 +205,7 @@ module Deimos
202
205
  tags: %W(status:success topic:#{@current_topic}),
203
206
  by: group.size
204
207
  )
208
+ current_index += group.size
205
209
  @logger.info("Sent #{group.size} messages to #{@current_topic}")
206
210
  end
207
211
  rescue Kafka::BufferOverflow, Kafka::MessageSizeTooLarge,
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Deimos
4
- VERSION = '1.8.2'
4
+ VERSION = '1.8.3'
5
5
  end
@@ -43,6 +43,12 @@ each_db_config(Deimos::Backends::Db) do
43
43
  described_class.publish(producer_class: MyNoKeyProducer,
44
44
  messages: [messages.first])
45
45
  expect(Deimos::KafkaMessage.count).to eq(4)
46
+ end
46
47
 
48
+ it 'should add messages with Hash keys with JSON encoding' do
49
+ described_class.publish(producer_class: MyProducer,
50
+ messages: [build_message({ foo: 0 }, 'my-topic', { 'test_id' => 0 })])
51
+ expect(Deimos::KafkaMessage.count).to eq(1)
52
+ expect(Deimos::KafkaMessage.last.partition_key).to eq(%(---\ntest_id: 0\n))
47
53
  end
48
54
  end
@@ -225,5 +225,88 @@ module KafkaSourceSpec
225
225
  expect(Deimos::KafkaMessage.count).to eq(0)
226
226
  end
227
227
  end
228
+
229
+ context 'with import hooks disabled' do
230
+ before(:each) do
231
+ # Dummy class we can include the mixin in. Has a backing table created
232
+ # earlier and has the import hook disabled
233
+ class WidgetNoImportHook < ActiveRecord::Base
234
+ include Deimos::KafkaSource
235
+ self.table_name = 'widgets'
236
+
237
+ # :nodoc:
238
+ def self.kafka_config
239
+ {
240
+ update: true,
241
+ delete: true,
242
+ import: false,
243
+ create: true
244
+ }
245
+ end
246
+
247
+ # :nodoc:
248
+ def self.kafka_producers
249
+ [WidgetProducer]
250
+ end
251
+ end
252
+ WidgetNoImportHook.reset_column_information
253
+ end
254
+
255
+ it 'should not fail when bulk-importing with existing records' do
256
+ widget1 = WidgetNoImportHook.create(widget_id: 1, name: 'Widget 1')
257
+ widget2 = WidgetNoImportHook.create(widget_id: 2, name: 'Widget 2')
258
+ widget1.name = 'New Widget No Import Hook 1'
259
+ widget2.name = 'New Widget No Import Hook 2'
260
+
261
+ expect {
262
+ WidgetNoImportHook.import([widget1, widget2], on_duplicate_key_update: %i(widget_id name))
263
+ }.not_to raise_error
264
+
265
+ expect('my-topic').not_to have_sent({
266
+ widget_id: 1,
267
+ name: 'New Widget No Import Hook 1',
268
+ id: widget1.id,
269
+ created_at: anything,
270
+ updated_at: anything
271
+ }, widget1.id)
272
+ expect('my-topic').not_to have_sent({
273
+ widget_id: 2,
274
+ name: 'New Widget No Import Hook 2',
275
+ id: widget2.id,
276
+ created_at: anything,
277
+ updated_at: anything
278
+ }, widget2.id)
279
+ end
280
+
281
+ it 'should not fail when mixing existing and new records' do
282
+ widget1 = WidgetNoImportHook.create(widget_id: 1, name: 'Widget 1')
283
+ expect('my-topic').to have_sent({
284
+ widget_id: 1,
285
+ name: 'Widget 1',
286
+ id: widget1.id,
287
+ created_at: anything,
288
+ updated_at: anything
289
+ }, widget1.id)
290
+
291
+ widget2 = WidgetNoImportHook.new(widget_id: 2, name: 'Widget 2')
292
+ widget1.name = 'New Widget 1'
293
+ WidgetNoImportHook.import([widget1, widget2], on_duplicate_key_update: %i(widget_id))
294
+ widgets = WidgetNoImportHook.all
295
+ expect('my-topic').not_to have_sent({
296
+ widget_id: 1,
297
+ name: 'New Widget 1',
298
+ id: widgets[0].id,
299
+ created_at: anything,
300
+ updated_at: anything
301
+ }, widgets[0].id)
302
+ expect('my-topic').not_to have_sent({
303
+ widget_id: 2,
304
+ name: 'Widget 2',
305
+ id: widgets[1].id,
306
+ created_at: anything,
307
+ updated_at: anything
308
+ }, widgets[1].id)
309
+ end
310
+ end
228
311
  end
229
312
  end
@@ -96,7 +96,32 @@ each_db_config(Deimos::Utils::DbProducer) do
96
96
  expect(phobos_producer).to have_received(:publish_list).with(['A'] * 100).once
97
97
  expect(phobos_producer).to have_received(:publish_list).with(['A'] * 10).once
98
98
  expect(phobos_producer).to have_received(:publish_list).with(['A']).once
99
+ end
100
+
101
+ it 'should not resend batches of sent messages' do
102
+ allow(phobos_producer).to receive(:publish_list) do |group|
103
+ raise Kafka::BufferOverflow if group.any?('A') && group.size >= 1000
104
+ raise Kafka::BufferOverflow if group.any?('BIG') && group.size >= 10
105
+ end
106
+ allow(Deimos.config.metrics).to receive(:increment)
107
+ batch = ['A'] * 450 + ['BIG'] * 550
108
+ producer.produce_messages(batch)
109
+
110
+ expect(phobos_producer).to have_received(:publish_list).with(batch)
111
+ expect(phobos_producer).to have_received(:publish_list).with(['A'] * 100).exactly(4).times
112
+ expect(phobos_producer).to have_received(:publish_list).with(['A'] * 50 + ['BIG'] * 50)
113
+ expect(phobos_producer).to have_received(:publish_list).with(['A'] * 10).exactly(5).times
114
+ expect(phobos_producer).to have_received(:publish_list).with(['BIG'] * 1).exactly(550).times
99
115
 
116
+ expect(Deimos.config.metrics).to have_received(:increment).with('publish',
117
+ tags: %w(status:success topic:),
118
+ by: 100).exactly(4).times
119
+ expect(Deimos.config.metrics).to have_received(:increment).with('publish',
120
+ tags: %w(status:success topic:),
121
+ by: 10).exactly(5).times
122
+ expect(Deimos.config.metrics).to have_received(:increment).with('publish',
123
+ tags: %w(status:success topic:),
124
+ by: 1).exactly(550).times
100
125
  end
101
126
 
102
127
  describe '#compact_messages' do
@@ -289,6 +314,8 @@ each_db_config(Deimos::Utils::DbProducer) do
289
314
  message: "mess#{i}",
290
315
  partition_key: "key#{i}"
291
316
  )
317
+ end
318
+ (5..8).each do |i|
292
319
  Deimos::KafkaMessage.create!(
293
320
  id: i,
294
321
  topic: 'my-topic2',
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: deimos-ruby
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.8.2
4
+ version: 1.8.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Daniel Orner
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2020-09-25 00:00:00.000000000 Z
11
+ date: 2020-11-18 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: avro_turf
@@ -348,6 +348,7 @@ files:
348
348
  - docs/ARCHITECTURE.md
349
349
  - docs/CONFIGURATION.md
350
350
  - docs/DATABASE_BACKEND.md
351
+ - docs/INTEGRATION_TESTS.md
351
352
  - docs/PULL_REQUEST_TEMPLATE.md
352
353
  - docs/UPGRADING.md
353
354
  - lib/deimos.rb