deimos-ruby 1.8.2.pre.beta2 → 1.8.6

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: b773de54759babb229e8f3bc6b42931dab7695b7197d601ad4ae08015c0b401f
4
- data.tar.gz: 4af1aa98ffdfc013c4cac02fa316371ad138ec0d807ed58426a7ad57d6c1e719
3
+ metadata.gz: 65a333a1b518855f322e74ee096dc555c70dffa1b258d406bdea574584336f74
4
+ data.tar.gz: 6e6b64330307f03ed0311c96ff897c6573500a1de9cac997e4fade0b5987e6d9
5
5
  SHA512:
6
- metadata.gz: 319d01a4c42c5efe72bd4959fb57548102c3f2b4305f3d94510747fd372d516ebad3e34ff74f9e6744641728efce9eb7a20f2d89c37d6bd90847253698a05238
7
- data.tar.gz: 9e6999cca87ca448167a9e61a0bfa5914b36fd74c053f132bd02ab5200be840984546469affccfd07ce77d4d098a2096b13e42febd4ce7059ec882c436580cd8
6
+ metadata.gz: dd188a47a95c60a1d129314da2eec7f80cd842eb713e975d582edfa07a3cf4e966cd4433082439d162e0a29b84d8c72b92408f936eeedd3d9f058c002581d1da
7
+ data.tar.gz: 0c2a7e11df203cda96dba6ca16809c88e18d149b8e23a05d76641cd1039a70284315b497ca324155f4fb060099a3dd32be83b9fe9da5be5708cbc91954dfeddc
@@ -7,6 +7,50 @@ and this project adheres to [Semantic Versioning](https://semver.org/spec/v2.0.0
7
7
 
8
8
  ## UNRELEASED
9
9
 
10
+ ## 1.8.6 - 2021-01-14
11
+
12
+ - ### Fixes :wrench:
13
+ - Fix for configuration bug with Ruby 3.0 (** instead of passing hash)
14
+
15
+ ## 1.8.5 - 2021-01-13
16
+
17
+ - ### Fixes :wrench:
18
+ - Fixes for Rails 6.1 (remove usage of `update_attributes!`)
19
+
20
+ ## 1.8.4 - 2020-12-02
21
+
22
+ ### Features :star:
23
+ - Add overridable "process_message?" method to ActiveRecordConsumer to allow for skipping of saving/updating records
24
+
25
+ ### Fixes :wrench:
26
+
27
+ - Do not apply type coercion to `timestamp-millis` and `timestamp-micros` logical types (fixes [#97](https://github.com/flipp-oss/deimos/issues/97))
28
+
29
+ ## 1.8.3 - 2020-11-18
30
+
31
+ ### Fixes :wrench:
32
+ - Do not resend already sent messages when splitting up batches
33
+ (fixes [#24](https://github.com/flipp-oss/deimos/issues/24))
34
+ - KafkaSource crashing on bulk-imports if import hooks are disabled
35
+ (fixes [#73](https://github.com/flipp-oss/deimos/issues/73))
36
+ - #96 Use string-safe encoding for partition keys
37
+ - Retry on offset seek failures in inline consumer
38
+ (fixes [#5](Inline consumer should use retries when seeking))
39
+
40
+ ## 1.8.2 - 2020-09-25
41
+
42
+ ### Features :star:
43
+ - Add "disabled" config field to consumers to allow disabling
44
+ individual consumers without having to comment out their
45
+ entries and possibly affecting unit tests.
46
+
47
+ ### Fixes :wrench:
48
+ - Prepend topic_prefix while encoding messages
49
+ (fixes [#37](https://github.com/flipp-oss/deimos/issues/37))
50
+ - Raise error if producing without a topic
51
+ (fixes [#50](https://github.com/flipp-oss/deimos/issues/50))
52
+ - Don't try to load producers/consumers when running rake tasks involving webpacker or assets
53
+
10
54
  ## 1.8.2-beta2 - 2020-09-15
11
55
 
12
56
  ### Features :star:
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- deimos-ruby (1.8.2.pre.beta2)
4
+ deimos-ruby (1.8.6)
5
5
  avro_turf (~> 0.11)
6
6
  phobos (~> 1.9)
7
7
  ruby-kafka (~> 0.7)
@@ -10,63 +10,67 @@ PATH
10
10
  GEM
11
11
  remote: https://rubygems.org/
12
12
  specs:
13
- actioncable (6.0.3.2)
14
- actionpack (= 6.0.3.2)
13
+ actioncable (6.1.1)
14
+ actionpack (= 6.1.1)
15
+ activesupport (= 6.1.1)
15
16
  nio4r (~> 2.0)
16
17
  websocket-driver (>= 0.6.1)
17
- actionmailbox (6.0.3.2)
18
- actionpack (= 6.0.3.2)
19
- activejob (= 6.0.3.2)
20
- activerecord (= 6.0.3.2)
21
- activestorage (= 6.0.3.2)
22
- activesupport (= 6.0.3.2)
18
+ actionmailbox (6.1.1)
19
+ actionpack (= 6.1.1)
20
+ activejob (= 6.1.1)
21
+ activerecord (= 6.1.1)
22
+ activestorage (= 6.1.1)
23
+ activesupport (= 6.1.1)
23
24
  mail (>= 2.7.1)
24
- actionmailer (6.0.3.2)
25
- actionpack (= 6.0.3.2)
26
- actionview (= 6.0.3.2)
27
- activejob (= 6.0.3.2)
25
+ actionmailer (6.1.1)
26
+ actionpack (= 6.1.1)
27
+ actionview (= 6.1.1)
28
+ activejob (= 6.1.1)
29
+ activesupport (= 6.1.1)
28
30
  mail (~> 2.5, >= 2.5.4)
29
31
  rails-dom-testing (~> 2.0)
30
- actionpack (6.0.3.2)
31
- actionview (= 6.0.3.2)
32
- activesupport (= 6.0.3.2)
33
- rack (~> 2.0, >= 2.0.8)
32
+ actionpack (6.1.1)
33
+ actionview (= 6.1.1)
34
+ activesupport (= 6.1.1)
35
+ rack (~> 2.0, >= 2.0.9)
34
36
  rack-test (>= 0.6.3)
35
37
  rails-dom-testing (~> 2.0)
36
38
  rails-html-sanitizer (~> 1.0, >= 1.2.0)
37
- actiontext (6.0.3.2)
38
- actionpack (= 6.0.3.2)
39
- activerecord (= 6.0.3.2)
40
- activestorage (= 6.0.3.2)
41
- activesupport (= 6.0.3.2)
39
+ actiontext (6.1.1)
40
+ actionpack (= 6.1.1)
41
+ activerecord (= 6.1.1)
42
+ activestorage (= 6.1.1)
43
+ activesupport (= 6.1.1)
42
44
  nokogiri (>= 1.8.5)
43
- actionview (6.0.3.2)
44
- activesupport (= 6.0.3.2)
45
+ actionview (6.1.1)
46
+ activesupport (= 6.1.1)
45
47
  builder (~> 3.1)
46
48
  erubi (~> 1.4)
47
49
  rails-dom-testing (~> 2.0)
48
50
  rails-html-sanitizer (~> 1.1, >= 1.2.0)
49
- activejob (6.0.3.2)
50
- activesupport (= 6.0.3.2)
51
+ activejob (6.1.1)
52
+ activesupport (= 6.1.1)
51
53
  globalid (>= 0.3.6)
52
- activemodel (6.0.3.2)
53
- activesupport (= 6.0.3.2)
54
- activerecord (6.0.3.2)
55
- activemodel (= 6.0.3.2)
56
- activesupport (= 6.0.3.2)
57
- activerecord-import (1.0.5)
54
+ activemodel (6.1.1)
55
+ activesupport (= 6.1.1)
56
+ activerecord (6.1.1)
57
+ activemodel (= 6.1.1)
58
+ activesupport (= 6.1.1)
59
+ activerecord-import (1.0.7)
58
60
  activerecord (>= 3.2)
59
- activestorage (6.0.3.2)
60
- actionpack (= 6.0.3.2)
61
- activejob (= 6.0.3.2)
62
- activerecord (= 6.0.3.2)
61
+ activestorage (6.1.1)
62
+ actionpack (= 6.1.1)
63
+ activejob (= 6.1.1)
64
+ activerecord (= 6.1.1)
65
+ activesupport (= 6.1.1)
63
66
  marcel (~> 0.3.1)
64
- activesupport (6.0.3.2)
67
+ mimemagic (~> 0.3.2)
68
+ activesupport (6.1.1)
65
69
  concurrent-ruby (~> 1.0, >= 1.0.2)
66
- i18n (>= 0.7, < 2)
67
- minitest (~> 5.1)
68
- tzinfo (~> 1.1)
69
- zeitwerk (~> 2.2, >= 2.2.2)
70
+ i18n (>= 1.6, < 2)
71
+ minitest (>= 5.1)
72
+ tzinfo (~> 2.0)
73
+ zeitwerk (~> 2.3)
70
74
  ast (2.4.1)
71
75
  avro (1.9.2)
72
76
  multi_json
@@ -83,11 +87,11 @@ GEM
83
87
  ddtrace (0.37.0)
84
88
  msgpack
85
89
  diff-lcs (1.4.4)
86
- digest-crc (0.6.1)
87
- rake (~> 13.0)
90
+ digest-crc (0.6.3)
91
+ rake (>= 12.0.0, < 14.0.0)
88
92
  dogstatsd-ruby (4.8.1)
89
- erubi (1.9.0)
90
- excon (0.76.0)
93
+ erubi (1.10.0)
94
+ excon (0.78.1)
91
95
  exponential-backoff (0.0.4)
92
96
  ffi (1.13.1)
93
97
  formatador (0.2.5)
@@ -110,7 +114,7 @@ GEM
110
114
  guard-rubocop (1.3.0)
111
115
  guard (~> 2.0)
112
116
  rubocop (~> 0.20)
113
- i18n (1.8.4)
117
+ i18n (1.8.7)
114
118
  concurrent-ruby (~> 1.0)
115
119
  listen (3.2.1)
116
120
  rb-fsevent (~> 0.10, >= 0.10.3)
@@ -119,7 +123,7 @@ GEM
119
123
  logging (2.3.0)
120
124
  little-plugger (~> 1.1)
121
125
  multi_json (~> 1.14)
122
- loofah (2.6.0)
126
+ loofah (2.8.0)
123
127
  crass (~> 1.0.2)
124
128
  nokogiri (>= 1.5.9)
125
129
  lumberjack (1.2.6)
@@ -130,15 +134,16 @@ GEM
130
134
  method_source (1.0.0)
131
135
  mimemagic (0.3.5)
132
136
  mini_mime (1.0.2)
133
- mini_portile2 (2.4.0)
134
- minitest (5.14.1)
137
+ mini_portile2 (2.5.0)
138
+ minitest (5.14.3)
135
139
  msgpack (1.3.3)
136
140
  multi_json (1.15.0)
137
141
  mysql2 (0.5.3)
138
142
  nenv (0.3.0)
139
- nio4r (2.5.2)
140
- nokogiri (1.10.10)
141
- mini_portile2 (~> 2.4.0)
143
+ nio4r (2.5.4)
144
+ nokogiri (1.11.1)
145
+ mini_portile2 (~> 2.5.0)
146
+ racc (~> 1.4)
142
147
  notiffany (0.1.3)
143
148
  nenv (~> 0.1)
144
149
  shellany (~> 0.0)
@@ -157,37 +162,38 @@ GEM
157
162
  pry (0.13.1)
158
163
  coderay (~> 1.1)
159
164
  method_source (~> 1.0)
165
+ racc (1.5.2)
160
166
  rack (2.2.3)
161
167
  rack-test (1.1.0)
162
168
  rack (>= 1.0, < 3)
163
- rails (6.0.3.2)
164
- actioncable (= 6.0.3.2)
165
- actionmailbox (= 6.0.3.2)
166
- actionmailer (= 6.0.3.2)
167
- actionpack (= 6.0.3.2)
168
- actiontext (= 6.0.3.2)
169
- actionview (= 6.0.3.2)
170
- activejob (= 6.0.3.2)
171
- activemodel (= 6.0.3.2)
172
- activerecord (= 6.0.3.2)
173
- activestorage (= 6.0.3.2)
174
- activesupport (= 6.0.3.2)
175
- bundler (>= 1.3.0)
176
- railties (= 6.0.3.2)
169
+ rails (6.1.1)
170
+ actioncable (= 6.1.1)
171
+ actionmailbox (= 6.1.1)
172
+ actionmailer (= 6.1.1)
173
+ actionpack (= 6.1.1)
174
+ actiontext (= 6.1.1)
175
+ actionview (= 6.1.1)
176
+ activejob (= 6.1.1)
177
+ activemodel (= 6.1.1)
178
+ activerecord (= 6.1.1)
179
+ activestorage (= 6.1.1)
180
+ activesupport (= 6.1.1)
181
+ bundler (>= 1.15.0)
182
+ railties (= 6.1.1)
177
183
  sprockets-rails (>= 2.0.0)
178
184
  rails-dom-testing (2.0.3)
179
185
  activesupport (>= 4.2.0)
180
186
  nokogiri (>= 1.6)
181
187
  rails-html-sanitizer (1.3.0)
182
188
  loofah (~> 2.3)
183
- railties (6.0.3.2)
184
- actionpack (= 6.0.3.2)
185
- activesupport (= 6.0.3.2)
189
+ railties (6.1.1)
190
+ actionpack (= 6.1.1)
191
+ activesupport (= 6.1.1)
186
192
  method_source
187
193
  rake (>= 0.8.7)
188
- thor (>= 0.20.3, < 2.0)
194
+ thor (~> 1.0)
189
195
  rainbow (3.0.0)
190
- rake (13.0.1)
196
+ rake (13.0.3)
191
197
  rb-fsevent (0.10.4)
192
198
  rb-inotify (0.10.1)
193
199
  ffi (~> 1.0)
@@ -239,20 +245,19 @@ GEM
239
245
  sprockets (4.0.2)
240
246
  concurrent-ruby (~> 1.0)
241
247
  rack (> 1, < 3)
242
- sprockets-rails (3.2.1)
248
+ sprockets-rails (3.2.2)
243
249
  actionpack (>= 4.0)
244
250
  activesupport (>= 4.0)
245
251
  sprockets (>= 3.0.0)
246
252
  sqlite3 (1.4.2)
247
253
  thor (1.0.1)
248
- thread_safe (0.3.6)
249
- tzinfo (1.2.7)
250
- thread_safe (~> 0.1)
254
+ tzinfo (2.0.4)
255
+ concurrent-ruby (~> 1.0)
251
256
  unicode-display_width (1.7.0)
252
257
  websocket-driver (0.7.3)
253
258
  websocket-extensions (>= 0.1.0)
254
259
  websocket-extensions (0.1.5)
255
- zeitwerk (2.4.0)
260
+ zeitwerk (2.4.2)
256
261
 
257
262
  PLATFORMS
258
263
  ruby
@@ -274,9 +279,9 @@ DEPENDENCIES
274
279
  rspec (~> 3)
275
280
  rspec-rails (~> 4)
276
281
  rspec_junit_formatter (~> 0.3)
277
- rubocop (~> 0.72)
278
- rubocop-rspec (~> 1.27)
282
+ rubocop (= 0.88.0)
283
+ rubocop-rspec (= 1.42.0)
279
284
  sqlite3 (~> 1.3)
280
285
 
281
286
  BUNDLED WITH
282
- 2.1.4
287
+ 2.2.5
data/README.md CHANGED
@@ -42,6 +42,7 @@ Please see the following for further information not covered by this readme:
42
42
  * [Configuration Reference](docs/CONFIGURATION.md)
43
43
  * [Database Backend Feature](docs/DATABASE_BACKEND.md)
44
44
  * [Upgrading Deimos](docs/UPGRADING.md)
45
+ * [Contributing to Integration Tests](docs/INTEGRATION_TESTS.md)
45
46
 
46
47
  # Installation
47
48
 
@@ -625,6 +626,15 @@ class MyConsumer < Deimos::ActiveRecordConsumer
625
626
  def record_key(payload)
626
627
  super
627
628
  end
629
+
630
+ # Optional override, returns true by default.
631
+ # When this method returns true, a record corresponding to the message
632
+ # is created/updated.
633
+ # When this method returns false, message processing is skipped and a
634
+ # corresponding record will NOT be created/updated.
635
+ def process_message?(payload)
636
+ super
637
+ end
628
638
  end
629
639
  ```
630
640
 
@@ -38,7 +38,7 @@ Gem::Specification.new do |spec|
38
38
  spec.add_development_dependency('rspec', '~> 3')
39
39
  spec.add_development_dependency('rspec_junit_formatter', '~>0.3')
40
40
  spec.add_development_dependency('rspec-rails', '~> 4')
41
- spec.add_development_dependency('rubocop', '~> 0.72')
42
- spec.add_development_dependency('rubocop-rspec', '~> 1.27')
41
+ spec.add_development_dependency('rubocop', '0.88.0')
42
+ spec.add_development_dependency('rubocop-rspec', '1.42.0')
43
43
  spec.add_development_dependency('sqlite3', '~> 1.3')
44
44
  end
@@ -79,6 +79,7 @@ topic|nil|Topic to produce to.
79
79
  schema|nil|This is optional but strongly recommended for testing purposes; this will validate against a local schema file used as the reader schema, as well as being able to write tests against this schema. This is recommended since it ensures you are always getting the values you expect.
80
80
  namespace|nil|Namespace of the schema to use when finding it locally.
81
81
  key_config|nil|Configuration hash for message keys. See [Kafka Message Keys](../README.md#installation)
82
+ disabled|false|Set to true to skip starting an actual listener for this consumer on startup.
82
83
  group_id|nil|ID of the consumer group.
83
84
  max_concurrency|1|Number of threads created for this listener. Each thread will behave as an independent consumer. They don't share any state.
84
85
  start_from_beginning|true|Once the consumer group has checkpointed its progress in the topic's partitions, the consumers will always start from the checkpointed offsets, regardless of config. As such, this setting only applies when the consumer initially starts consuming from a topic
@@ -0,0 +1,52 @@
1
+ # Running Integration Tests
2
+
3
+ This repo includes integration tests in the [spec/utils](spec/utils) directory.
4
+ Here, there are tests for more deimos features that include a database integration like
5
+ * [Database Poller](README.md#Database Poller)
6
+ * [Database Backend](docs/DATABASE_BACKEND.md)
7
+ * [Deadlock Retrying](lib/deimos/utils/deadlock_retry.rb)
8
+
9
+ You will need to set up the following databases to develop and create unit tests in these test suites.
10
+ * [SQLite](#SQLite)
11
+ * [MySQL](#MySQL)
12
+ * [PostgreSQL](#PostgreSQL)
13
+
14
+ ### SQLite
15
+ This database is covered through the `sqlite3` gem.
16
+
17
+ ## MySQL
18
+ ### Setting up a local MySQL server (Mac)
19
+ ```bash
20
+ # Download MySQL (Optionally, choose a version you are comfortable with)
21
+ brew install mysql
22
+ # Start automatically after rebooting your machine
23
+ brew services start mysql
24
+
25
+ # Cleanup once you are done with MySQL
26
+ brew services stop mysql
27
+ ```
28
+
29
+ ## PostgreSQL
30
+ ### Setting up a local PostgreSQL server (Mac)
31
+ ```bash
32
+ # Install postgres if it's not already installed
33
+ brew install postgres
34
+
35
+ # Initialize and Start up postgres db
36
+ brew services start postgres
37
+ initdb /usr/local/var/postgres
38
+ # Create the default database and user
39
+ # Use the password "root"
40
+ createuser -s --password postgres
41
+
42
+ # Cleanup once done with Postgres
43
+ killall postgres
44
+ brew services stop postgres
45
+ ```
46
+
47
+ ## Running Integration Tests
48
+ You must specify the tag "integration" when running these these test suites.
49
+ This can be done through the CLI with the `--tag integration` argument.
50
+ ```bash
51
+ rspec spec/utils/ --tag integration
52
+ ```
@@ -28,6 +28,7 @@ Please describe the tests that you ran to verify your changes. Provide instructi
28
28
  - [ ] I have performed a self-review of my own code
29
29
  - [ ] I have commented my code, particularly in hard-to-understand areas
30
30
  - [ ] I have made corresponding changes to the documentation
31
+ - [ ] I have added a line in the CHANGELOG describing this change, under the UNRELEASED heading
31
32
  - [ ] My changes generate no new warnings
32
33
  - [ ] I have added tests that prove my fix is effective or that my feature works
33
34
  - [ ] New and existing unit tests pass locally with my changes
@@ -26,6 +26,15 @@ module Deimos
26
26
 
27
27
  # :nodoc:
28
28
  def consume(payload, metadata)
29
+ unless self.process_message?(payload)
30
+ Deimos.config.logger.debug(
31
+ message: 'Skipping processing of message',
32
+ payload: payload,
33
+ metadata: metadata
34
+ )
35
+ return
36
+ end
37
+
29
38
  key = metadata.with_indifferent_access[:key]
30
39
  klass = self.class.config[:record_class]
31
40
  record = fetch_record(klass, (payload || {}).with_indifferent_access, key)
@@ -55,5 +55,13 @@ module Deimos
55
55
  def record_attributes(payload, _key=nil)
56
56
  @converter.convert(payload)
57
57
  end
58
+
59
+ # Override this message to conditionally save records
60
+ # @param payload [Hash] The kafka message as a hash
61
+ # @return [Boolean] if true, record is created/update.
62
+ # If false, record processing is skipped but message offset is still committed.
63
+ def process_message?(_payload)
64
+ true
65
+ end
58
66
  end
59
67
  end
@@ -14,7 +14,7 @@ module Deimos
14
14
  message = Deimos::KafkaMessage.new(
15
15
  message: m.encoded_payload ? m.encoded_payload.to_s.b : nil,
16
16
  topic: m.topic,
17
- partition_key: m.partition_key || m.key
17
+ partition_key: partition_key_for(m)
18
18
  )
19
19
  message.key = m.encoded_key.to_s.b unless producer_class.config[:no_keys]
20
20
  message
@@ -26,6 +26,15 @@ module Deimos
26
26
  by: records.size
27
27
  )
28
28
  end
29
+
30
+ # @param message [Deimos::Message]
31
+ # @return [String] the partition key to use for this message
32
+ def partition_key_for(message)
33
+ return message.partition_key if message.partition_key.present?
34
+ return message.key unless message.key.is_a?(Hash)
35
+
36
+ message.key.to_yaml
37
+ end
29
38
  end
30
39
  end
31
40
  end
@@ -15,6 +15,11 @@ module Deimos
15
15
  # enabled true
16
16
  # ca_cert_file 'my_file'
17
17
  # end
18
+ # config.kafka do
19
+ # ssl do
20
+ # enabled true
21
+ # end
22
+ # end
18
23
  # end
19
24
  # - Allows for arrays of configurations:
20
25
  # Deimos.configure do |config|
@@ -245,6 +250,13 @@ module Deimos
245
250
 
246
251
  # Configure the settings with values.
247
252
  def configure(&block)
253
+ if defined?(Rake) && defined?(Rake.application)
254
+ tasks = Rake.application.top_level_tasks
255
+ if tasks.any? { |t| %w(assets webpacker yarn).include?(t.split(':').first) }
256
+ puts 'Skipping Deimos configuration since we are in JS/CSS compilation'
257
+ return
258
+ end
259
+ end
248
260
  config.run_callbacks(:configure) do
249
261
  config.instance_eval(&block)
250
262
  end
@@ -67,7 +67,7 @@ module Deimos
67
67
  topic(kafka_config.topic) if kafka_config.topic.present? && klass.respond_to?(:topic)
68
68
  schema(kafka_config.schema) if kafka_config.schema.present?
69
69
  namespace(kafka_config.namespace) if kafka_config.namespace.present?
70
- key_config(kafka_config.key_config) if kafka_config.key_config.present?
70
+ key_config(**kafka_config.key_config) if kafka_config.key_config.present?
71
71
  end
72
72
  end
73
73
 
@@ -319,6 +319,10 @@ module Deimos
319
319
  # Key configuration (see docs).
320
320
  # @return [Hash]
321
321
  setting :key_config
322
+ # Set to true to ignore the consumer in the Phobos config and not actually start up a
323
+ # listener.
324
+ # @return [Boolean]
325
+ setting :disabled, false
322
326
 
323
327
  # These are the phobos "listener" configs. See CONFIGURATION.md for more
324
328
  # info.
@@ -63,8 +63,10 @@ module Deimos
63
63
  }
64
64
 
65
65
  p_config[:listeners] = self.consumer_objects.map do |consumer|
66
+ next nil if consumer.disabled
67
+
66
68
  hash = consumer.to_h.reject do |k, _|
67
- %i(class_name schema namespace key_config backoff).include?(k)
69
+ %i(class_name schema namespace key_config backoff disabled).include?(k)
68
70
  end
69
71
  hash = hash.map { |k, v| [k, v.is_a?(Symbol) ? v.to_s : v] }.to_h
70
72
  hash[:handler] = consumer.class_name
@@ -73,6 +75,7 @@ module Deimos
73
75
  end
74
76
  hash
75
77
  end
78
+ p_config[:listeners].compact!
76
79
 
77
80
  if self.kafka.ssl.enabled
78
81
  %w(ca_cert client_cert client_cert_key).each do |key|
@@ -88,8 +88,9 @@ module Deimos
88
88
  array_of_attributes,
89
89
  options={})
90
90
  results = super
91
- return unless self.kafka_config[:import]
92
- return if array_of_attributes.empty?
91
+ if !self.kafka_config[:import] || array_of_attributes.empty?
92
+ return results
93
+ end
93
94
 
94
95
  # This will contain an array of hashes, where each hash is the actual
95
96
  # attribute hash that created the object.
@@ -85,11 +85,8 @@ module Deimos
85
85
  locked_at: Time.zone.now,
86
86
  error: true,
87
87
  retries: record.retries + 1 }
88
- if ActiveRecord::VERSION::MAJOR >= 4
89
- record.update!(attr_hash)
90
- else
91
- record.update_attributes!(attr_hash)
92
- end
88
+ record.attributes = attr_hash
89
+ record.save!
93
90
  end
94
91
 
95
92
  # Update the locked_at timestamp to indicate that the producer is still
@@ -104,6 +104,8 @@ module Deimos
104
104
  Deimos.config.producers.disabled ||
105
105
  Deimos.producers_disabled?(self)
106
106
 
107
+ raise 'Topic not specified. Please specify the topic.' if topic.blank?
108
+
107
109
  backend_class = determine_backend_class(sync, force_send)
108
110
  Deimos.instrument(
109
111
  'encode_messages',
@@ -183,7 +185,7 @@ module Deimos
183
185
  nil
184
186
  else
185
187
  encoder.encode(message.payload,
186
- topic: "#{config[:topic]}-value")
188
+ topic: "#{Deimos.config.producers.topic_prefix}#{config[:topic]}-value")
187
189
  end
188
190
  end
189
191
 
@@ -201,9 +203,9 @@ module Deimos
201
203
  end
202
204
 
203
205
  if config[:key_field]
204
- encoder.encode_key(config[:key_field], key, topic: "#{config[:topic]}-key")
206
+ encoder.encode_key(config[:key_field], key, topic: "#{Deimos.config.producers.topic_prefix}#{config[:topic]}-key")
205
207
  elsif config[:key_schema]
206
- key_encoder.encode(key, topic: "#{config[:topic]}-key")
208
+ key_encoder.encode(key, topic: "#{Deimos.config.producers.topic_prefix}#{config[:topic]}-key")
207
209
  else
208
210
  key
209
211
  end
@@ -44,9 +44,11 @@ module Deimos
44
44
 
45
45
  case field_type
46
46
  when :int, :long
47
- if val.is_a?(Integer) ||
48
- _is_integer_string?(val) ||
49
- int_classes.any? { |klass| val.is_a?(klass) }
47
+ if %w(timestamp-millis timestamp-micros).include?(type.logical_type)
48
+ val
49
+ elsif val.is_a?(Integer) ||
50
+ _is_integer_string?(val) ||
51
+ int_classes.any? { |klass| val.is_a?(klass) }
50
52
  val.to_i
51
53
  else
52
54
  val # this will fail
@@ -142,7 +142,8 @@ module Deimos
142
142
  last_id = record.public_send(id_method)
143
143
  last_updated_at = last_updated(record)
144
144
  @producer.send_events(batch)
145
- @info.update_attributes!(last_sent: last_updated_at, last_sent_id: last_id)
145
+ @info.attributes = { last_sent: last_updated_at, last_sent_id: last_id }
146
+ @info.save!
146
147
  end
147
148
  end
148
149
  end
@@ -190,11 +190,14 @@ module Deimos
190
190
  end
191
191
  end
192
192
 
193
+ # Produce messages in batches, reducing the size 1/10 if the batch is too
194
+ # large. Does not retry batches of messages that have already been sent.
193
195
  # @param batch [Array<Hash>]
194
196
  def produce_messages(batch)
195
197
  batch_size = batch.size
198
+ current_index = 0
196
199
  begin
197
- batch.in_groups_of(batch_size, false).each do |group|
200
+ batch[current_index..-1].in_groups_of(batch_size, false).each do |group|
198
201
  @logger.debug("Publishing #{group.size} messages to #{@current_topic}")
199
202
  producer.publish_list(group)
200
203
  Deimos.config.metrics&.increment(
@@ -202,6 +205,7 @@ module Deimos
202
205
  tags: %W(status:success topic:#{@current_topic}),
203
206
  by: group.size
204
207
  )
208
+ current_index += group.size
205
209
  @logger.info("Sent #{group.size} messages to #{@current_topic}")
206
210
  end
207
211
  rescue Kafka::BufferOverflow, Kafka::MessageSizeTooLarge,
@@ -6,6 +6,7 @@ module Deimos
6
6
  module Utils
7
7
  # Listener that can seek to get the last X messages in a topic.
8
8
  class SeekListener < Phobos::Listener
9
+ MAX_SEEK_RETRIES = 3
9
10
  attr_accessor :num_messages
10
11
 
11
12
  # :nodoc:
@@ -13,8 +14,10 @@ module Deimos
13
14
  @num_messages ||= 10
14
15
  @consumer = create_kafka_consumer
15
16
  @consumer.subscribe(topic, @subscribe_opts)
17
+ attempt = 0
16
18
 
17
19
  begin
20
+ attempt += 1
18
21
  last_offset = @kafka_client.last_offset_for(topic, 0)
19
22
  offset = last_offset - num_messages
20
23
  if offset.positive?
@@ -22,7 +25,11 @@ module Deimos
22
25
  @consumer.seek(topic, 0, offset)
23
26
  end
24
27
  rescue StandardError => e
25
- "Could not seek to offset: #{e.message}"
28
+ if attempt < MAX_SEEK_RETRIES
29
+ sleep(1.seconds * attempt)
30
+ retry
31
+ end
32
+ log_error("Could not seek to offset: #{e.message} after #{MAX_SEEK_RETRIES} retries", listener_metadata)
26
33
  end
27
34
 
28
35
  instrument('listener.start_handler', listener_metadata) do
@@ -50,7 +57,6 @@ module Deimos
50
57
 
51
58
  # :nodoc:
52
59
  def consume(payload, metadata)
53
- puts "Got #{payload}"
54
60
  self.class.total_messages << {
55
61
  key: metadata[:key],
56
62
  payload: payload
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Deimos
4
- VERSION = '1.8.2-beta2'
4
+ VERSION = '1.8.6'
5
5
  end
@@ -137,5 +137,18 @@ module ActiveRecordConsumerTest
137
137
  expect(Widget.find_by_test_id('id1').some_int).to eq(3)
138
138
  expect(Widget.find_by_test_id('id2').some_int).to eq(4)
139
139
  end
140
+
141
+ it 'should not create record of process_message returns false' do
142
+ MyConsumer.any_instance.stub(:process_message?).and_return(false)
143
+ expect(Widget.count).to eq(0)
144
+ test_consume_message(MyConsumer, {
145
+ test_id: 'abc',
146
+ some_int: 3,
147
+ updated_at: 1.day.ago.to_i,
148
+ some_datetime_int: Time.zone.now.to_i,
149
+ timestamp: 2.minutes.ago.to_s
150
+ }, { call_original: true, key: 5 })
151
+ expect(Widget.count).to eq(0)
152
+ end
140
153
  end
141
154
  end
@@ -43,6 +43,12 @@ each_db_config(Deimos::Backends::Db) do
43
43
  described_class.publish(producer_class: MyNoKeyProducer,
44
44
  messages: [messages.first])
45
45
  expect(Deimos::KafkaMessage.count).to eq(4)
46
+ end
46
47
 
48
+ it 'should add messages with Hash keys with JSON encoding' do
49
+ described_class.publish(producer_class: MyProducer,
50
+ messages: [build_message({ foo: 0 }, 'my-topic', { 'test_id' => 0 })])
51
+ expect(Deimos::KafkaMessage.count).to eq(1)
52
+ expect(Deimos::KafkaMessage.last.partition_key).to eq(%(---\ntest_id: 0\n))
47
53
  end
48
54
  end
@@ -6,6 +6,14 @@ class MyConfigConsumer < Deimos::Consumer
6
6
  def consume
7
7
  end
8
8
  end
9
+
10
+ # Mock consumer 2
11
+ class MyConfigConsumer2 < Deimos::Consumer
12
+ # :no-doc:
13
+ def consume
14
+ end
15
+ end
16
+
9
17
  describe Deimos, 'configuration' do
10
18
  it 'should configure with deprecated fields' do
11
19
  logger = Logger.new(nil)
@@ -171,6 +179,13 @@ describe Deimos, 'configuration' do
171
179
  offset_retention_time 13
172
180
  heartbeat_interval 13
173
181
  end
182
+ consumer do
183
+ disabled true
184
+ class_name 'MyConfigConsumer2'
185
+ schema 'blah2'
186
+ topic 'blah2'
187
+ group_id 'myconsumerid2'
188
+ end
174
189
  end
175
190
 
176
191
  expect(described_class.config.phobos_config).
@@ -16,7 +16,7 @@ RSpec.describe Deimos::Generators::ActiveRecordGenerator do
16
16
  files = Dir['db/migrate/*.rb']
17
17
  expect(files.length).to eq(1)
18
18
  results = <<~MIGRATION
19
- class CreateGeneratedTable < ActiveRecord::Migration[6.0]
19
+ class CreateGeneratedTable < ActiveRecord::Migration[6.1]
20
20
  def up
21
21
  if table_exists?(:generated_table)
22
22
  warn "generated_table already exists, exiting"
@@ -225,5 +225,88 @@ module KafkaSourceSpec
225
225
  expect(Deimos::KafkaMessage.count).to eq(0)
226
226
  end
227
227
  end
228
+
229
+ context 'with import hooks disabled' do
230
+ before(:each) do
231
+ # Dummy class we can include the mixin in. Has a backing table created
232
+ # earlier and has the import hook disabled
233
+ class WidgetNoImportHook < ActiveRecord::Base
234
+ include Deimos::KafkaSource
235
+ self.table_name = 'widgets'
236
+
237
+ # :nodoc:
238
+ def self.kafka_config
239
+ {
240
+ update: true,
241
+ delete: true,
242
+ import: false,
243
+ create: true
244
+ }
245
+ end
246
+
247
+ # :nodoc:
248
+ def self.kafka_producers
249
+ [WidgetProducer]
250
+ end
251
+ end
252
+ WidgetNoImportHook.reset_column_information
253
+ end
254
+
255
+ it 'should not fail when bulk-importing with existing records' do
256
+ widget1 = WidgetNoImportHook.create(widget_id: 1, name: 'Widget 1')
257
+ widget2 = WidgetNoImportHook.create(widget_id: 2, name: 'Widget 2')
258
+ widget1.name = 'New Widget No Import Hook 1'
259
+ widget2.name = 'New Widget No Import Hook 2'
260
+
261
+ expect {
262
+ WidgetNoImportHook.import([widget1, widget2], on_duplicate_key_update: %i(widget_id name))
263
+ }.not_to raise_error
264
+
265
+ expect('my-topic').not_to have_sent({
266
+ widget_id: 1,
267
+ name: 'New Widget No Import Hook 1',
268
+ id: widget1.id,
269
+ created_at: anything,
270
+ updated_at: anything
271
+ }, widget1.id)
272
+ expect('my-topic').not_to have_sent({
273
+ widget_id: 2,
274
+ name: 'New Widget No Import Hook 2',
275
+ id: widget2.id,
276
+ created_at: anything,
277
+ updated_at: anything
278
+ }, widget2.id)
279
+ end
280
+
281
+ it 'should not fail when mixing existing and new records' do
282
+ widget1 = WidgetNoImportHook.create(widget_id: 1, name: 'Widget 1')
283
+ expect('my-topic').to have_sent({
284
+ widget_id: 1,
285
+ name: 'Widget 1',
286
+ id: widget1.id,
287
+ created_at: anything,
288
+ updated_at: anything
289
+ }, widget1.id)
290
+
291
+ widget2 = WidgetNoImportHook.new(widget_id: 2, name: 'Widget 2')
292
+ widget1.name = 'New Widget 1'
293
+ WidgetNoImportHook.import([widget1, widget2], on_duplicate_key_update: %i(widget_id))
294
+ widgets = WidgetNoImportHook.all
295
+ expect('my-topic').not_to have_sent({
296
+ widget_id: 1,
297
+ name: 'New Widget 1',
298
+ id: widgets[0].id,
299
+ created_at: anything,
300
+ updated_at: anything
301
+ }, widgets[0].id)
302
+ expect('my-topic').not_to have_sent({
303
+ widget_id: 2,
304
+ name: 'Widget 2',
305
+ id: widgets[1].id,
306
+ created_at: anything,
307
+ updated_at: anything
308
+ }, widgets[1].id)
309
+ end
310
+ end
228
311
  end
229
312
  end
@@ -51,13 +51,13 @@ each_db_config(Deimos::KafkaTopicInfo) do
51
51
  expect(record.locked_at).to eq(nil)
52
52
  expect(record.error).to eq(false)
53
53
  expect(record.retries).to eq(0)
54
- expect(record.last_processed_at.to_s).to eq(Time.zone.now.to_s)
54
+ expect(record.last_processed_at.in_time_zone.to_s).to eq(Time.zone.now.to_s)
55
55
  record = Deimos::KafkaTopicInfo.last
56
56
  expect(record.locked_by).not_to eq(nil)
57
57
  expect(record.locked_at).not_to eq(nil)
58
58
  expect(record.error).not_to eq(false)
59
59
  expect(record.retries).not_to eq(0)
60
- expect(record.last_processed_at.to_s).to eq(20.seconds.ago.to_s)
60
+ expect(record.last_processed_at.in_time_zone.to_s).to eq(20.seconds.ago.to_s)
61
61
  end
62
62
  end
63
63
 
@@ -70,11 +70,11 @@ each_db_config(Deimos::KafkaTopicInfo) do
70
70
  locked_by: 'me', locked_at: 1.minute.ago)
71
71
 
72
72
  expect(Deimos::KafkaTopicInfo.count).to eq(3)
73
- Deimos::KafkaTopicInfo.all.each { |t| expect(t.last_processed_at.to_s).to eq(old_time) }
73
+ Deimos::KafkaTopicInfo.all.each { |t| expect(t.last_processed_at.in_time_zone.to_s).to eq(old_time) }
74
74
  Deimos::KafkaTopicInfo.ping_empty_topics(%w(topic1))
75
- expect(t1.reload.last_processed_at.to_s).to eq(old_time) # was passed as an exception
76
- expect(t2.reload.last_processed_at.to_s).to eq(Time.zone.now.to_s)
77
- expect(t3.reload.last_processed_at.to_s).to eq(old_time) # is locked
75
+ expect(t1.reload.last_processed_at.in_time_zone.to_s).to eq(old_time) # was passed as an exception
76
+ expect(t2.reload.last_processed_at.in_time_zone.to_s).to eq(Time.zone.now.to_s)
77
+ expect(t3.reload.last_processed_at.in_time_zone.to_s).to eq(old_time) # is locked
78
78
  end
79
79
  end
80
80
 
@@ -64,6 +64,14 @@ module ProducerTest
64
64
  end
65
65
  stub_const('MyErrorProducer', producer_class)
66
66
 
67
+ producer_class = Class.new(Deimos::Producer) do
68
+ schema 'MySchema'
69
+ namespace 'com.my-namespace'
70
+ topic nil
71
+ key_config none: true
72
+ end
73
+ stub_const('MyNoTopicProducer', producer_class)
74
+
67
75
  end
68
76
 
69
77
  it 'should fail on invalid message with error handler' do
@@ -228,6 +236,7 @@ module ProducerTest
228
236
  end
229
237
 
230
238
  it 'should encode the key' do
239
+ Deimos.configure { |c| c.producers.topic_prefix = nil }
231
240
  expect(MyProducer.encoder).to receive(:encode_key).with('test_id', 'foo', topic: 'my-topic-key')
232
241
  expect(MyProducer.encoder).to receive(:encode_key).with('test_id', 'bar', topic: 'my-topic-key')
233
242
  expect(MyProducer.encoder).to receive(:encode).with({
@@ -245,6 +254,21 @@ module ProducerTest
245
254
  )
246
255
  end
247
256
 
257
+ it 'should encode the key with topic prefix' do
258
+ Deimos.configure { |c| c.producers.topic_prefix = 'prefix.' }
259
+ expect(MyProducer.encoder).to receive(:encode_key).with('test_id', 'foo', topic: 'prefix.my-topic-key')
260
+ expect(MyProducer.encoder).to receive(:encode_key).with('test_id', 'bar', topic: 'prefix.my-topic-key')
261
+ expect(MyProducer.encoder).to receive(:encode).with({ 'test_id' => 'foo',
262
+ 'some_int' => 123 },
263
+ { topic: 'prefix.my-topic-value' })
264
+ expect(MyProducer.encoder).to receive(:encode).with({ 'test_id' => 'bar',
265
+ 'some_int' => 124 },
266
+ { topic: 'prefix.my-topic-value' })
267
+
268
+ MyProducer.publish_list([{ 'test_id' => 'foo', 'some_int' => 123 },
269
+ { 'test_id' => 'bar', 'some_int' => 124 }])
270
+ end
271
+
248
272
  it 'should not encode with plaintext key' do
249
273
  expect(MyNonEncodedProducer.key_encoder).not_to receive(:encode_key)
250
274
 
@@ -296,6 +320,31 @@ module ProducerTest
296
320
  )
297
321
  end
298
322
 
323
+ it 'should raise error if blank topic is passed in explicitly' do
324
+ expect {
325
+ MyProducer.publish_list(
326
+ [{ 'test_id' => 'foo',
327
+ 'some_int' => 123 },
328
+ { 'test_id' => 'bar',
329
+ 'some_int' => 124 }],
330
+ topic: ''
331
+ )
332
+ }.to raise_error(RuntimeError,
333
+ 'Topic not specified. Please specify the topic.')
334
+ end
335
+
336
+ it 'should raise error if the producer has not been initialized with a topic' do
337
+ expect {
338
+ MyNoTopicProducer.publish_list(
339
+ [{ 'test_id' => 'foo',
340
+ 'some_int' => 123 },
341
+ { 'test_id' => 'bar',
342
+ 'some_int' => 124 }]
343
+ )
344
+ }.to raise_error(RuntimeError,
345
+ 'Topic not specified. Please specify the topic.')
346
+ end
347
+
299
348
  it 'should error with nothing set' do
300
349
  expect {
301
350
  MyErrorProducer.publish_list(
@@ -42,6 +42,20 @@ RSpec.shared_examples_for('an Avro backend') do
42
42
  {
43
43
  'name' => 'union-int-field',
44
44
  'type' => %w(null int)
45
+ },
46
+ {
47
+ 'name' => 'timestamp-millis-field',
48
+ 'type' => {
49
+ 'type' => 'long',
50
+ 'logicalType' => 'timestamp-millis'
51
+ }
52
+ },
53
+ {
54
+ 'name' => 'timestamp-micros-field',
55
+ 'type' => {
56
+ 'type' => 'long',
57
+ 'logicalType' => 'timestamp-micros'
58
+ }
45
59
  }
46
60
  ]
47
61
  }
@@ -95,7 +109,9 @@ RSpec.shared_examples_for('an Avro backend') do
95
109
  'string-field' => 'hi mom',
96
110
  'boolean-field' => true,
97
111
  'union-field' => nil,
98
- 'union-int-field' => nil
112
+ 'union-int-field' => nil,
113
+ 'timestamp-millis-field' => Time.utc(2020, 11, 12, 13, 14, 15, 909_090),
114
+ 'timestamp-micros-field' => Time.utc(2020, 11, 12, 13, 14, 15, 909_090)
99
115
  }
100
116
  end
101
117
 
@@ -169,6 +185,15 @@ RSpec.shared_examples_for('an Avro backend') do
169
185
  expect(result['union-field']).to eq('itsme')
170
186
  end
171
187
 
188
+ it 'should not convert timestamp-millis' do
189
+ result = backend.coerce(payload)
190
+ expect(result['timestamp-millis-field']).to eq(Time.utc(2020, 11, 12, 13, 14, 15, 909_090))
191
+ end
192
+
193
+ it 'should not convert timestamp-micros' do
194
+ result = backend.coerce(payload)
195
+ expect(result['timestamp-micros-field']).to eq(Time.utc(2020, 11, 12, 13, 14, 15, 909_090))
196
+ end
172
197
  end
173
198
 
174
199
  end
@@ -87,7 +87,7 @@ module DbConfigs
87
87
  port: 3306,
88
88
  username: 'root',
89
89
  database: 'test',
90
- host: ENV['MYSQL_HOST'] || 'localhost'
90
+ host: ENV['MYSQL_HOST'] || '127.0.0.1'
91
91
  },
92
92
  {
93
93
  adapter: 'sqlite3',
@@ -96,7 +96,32 @@ each_db_config(Deimos::Utils::DbProducer) do
96
96
  expect(phobos_producer).to have_received(:publish_list).with(['A'] * 100).once
97
97
  expect(phobos_producer).to have_received(:publish_list).with(['A'] * 10).once
98
98
  expect(phobos_producer).to have_received(:publish_list).with(['A']).once
99
+ end
100
+
101
+ it 'should not resend batches of sent messages' do
102
+ allow(phobos_producer).to receive(:publish_list) do |group|
103
+ raise Kafka::BufferOverflow if group.any?('A') && group.size >= 1000
104
+ raise Kafka::BufferOverflow if group.any?('BIG') && group.size >= 10
105
+ end
106
+ allow(Deimos.config.metrics).to receive(:increment)
107
+ batch = ['A'] * 450 + ['BIG'] * 550
108
+ producer.produce_messages(batch)
109
+
110
+ expect(phobos_producer).to have_received(:publish_list).with(batch)
111
+ expect(phobos_producer).to have_received(:publish_list).with(['A'] * 100).exactly(4).times
112
+ expect(phobos_producer).to have_received(:publish_list).with(['A'] * 50 + ['BIG'] * 50)
113
+ expect(phobos_producer).to have_received(:publish_list).with(['A'] * 10).exactly(5).times
114
+ expect(phobos_producer).to have_received(:publish_list).with(['BIG'] * 1).exactly(550).times
99
115
 
116
+ expect(Deimos.config.metrics).to have_received(:increment).with('publish',
117
+ tags: %w(status:success topic:),
118
+ by: 100).exactly(4).times
119
+ expect(Deimos.config.metrics).to have_received(:increment).with('publish',
120
+ tags: %w(status:success topic:),
121
+ by: 10).exactly(5).times
122
+ expect(Deimos.config.metrics).to have_received(:increment).with('publish',
123
+ tags: %w(status:success topic:),
124
+ by: 1).exactly(550).times
100
125
  end
101
126
 
102
127
  describe '#compact_messages' do
@@ -289,6 +314,8 @@ each_db_config(Deimos::Utils::DbProducer) do
289
314
  message: "mess#{i}",
290
315
  partition_key: "key#{i}"
291
316
  )
317
+ end
318
+ (5..8).each do |i|
292
319
  Deimos::KafkaMessage.create!(
293
320
  id: i,
294
321
  topic: 'my-topic2',
@@ -0,0 +1,31 @@
1
+ # frozen_string_literal: true
2
+
3
+ describe Deimos::Utils::SeekListener do
4
+
5
+ describe '#start_listener' do
6
+ let(:consumer) { instance_double(Kafka::Consumer) }
7
+ let(:handler) { class_double(Deimos::Utils::MessageBankHandler) }
8
+
9
+ before(:each) do
10
+ allow(handler).to receive(:start)
11
+ allow(consumer).to receive(:subscribe)
12
+ allow_any_instance_of(Phobos::Listener).to receive(:create_kafka_consumer).and_return(consumer)
13
+ allow_any_instance_of(Kafka::Client).to receive(:last_offset_for).and_return(100)
14
+ stub_const('Deimos::Utils::SeekListener::MAX_SEEK_RETRIES', 2)
15
+ end
16
+
17
+ it 'should seek offset' do
18
+ allow(consumer).to receive(:seek)
19
+ expect(consumer).to receive(:seek).once
20
+ seek_listener = described_class.new({ handler: handler, group_id: 999, topic: 'test_topic' })
21
+ seek_listener.start_listener
22
+ end
23
+
24
+ it 'should retry on errors when seeking offset' do
25
+ allow(consumer).to receive(:seek).and_raise(StandardError)
26
+ expect(consumer).to receive(:seek).twice
27
+ seek_listener = described_class.new({ handler: handler, group_id: 999, topic: 'test_topic' })
28
+ seek_listener.start_listener
29
+ end
30
+ end
31
+ end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: deimos-ruby
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.8.2.pre.beta2
4
+ version: 1.8.6
5
5
  platform: ruby
6
6
  authors:
7
7
  - Daniel Orner
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2020-09-15 00:00:00.000000000 Z
11
+ date: 2021-01-14 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: avro_turf
@@ -280,30 +280,30 @@ dependencies:
280
280
  name: rubocop
281
281
  requirement: !ruby/object:Gem::Requirement
282
282
  requirements:
283
- - - "~>"
283
+ - - '='
284
284
  - !ruby/object:Gem::Version
285
- version: '0.72'
285
+ version: 0.88.0
286
286
  type: :development
287
287
  prerelease: false
288
288
  version_requirements: !ruby/object:Gem::Requirement
289
289
  requirements:
290
- - - "~>"
290
+ - - '='
291
291
  - !ruby/object:Gem::Version
292
- version: '0.72'
292
+ version: 0.88.0
293
293
  - !ruby/object:Gem::Dependency
294
294
  name: rubocop-rspec
295
295
  requirement: !ruby/object:Gem::Requirement
296
296
  requirements:
297
- - - "~>"
297
+ - - '='
298
298
  - !ruby/object:Gem::Version
299
- version: '1.27'
299
+ version: 1.42.0
300
300
  type: :development
301
301
  prerelease: false
302
302
  version_requirements: !ruby/object:Gem::Requirement
303
303
  requirements:
304
- - - "~>"
304
+ - - '='
305
305
  - !ruby/object:Gem::Version
306
- version: '1.27'
306
+ version: 1.42.0
307
307
  - !ruby/object:Gem::Dependency
308
308
  name: sqlite3
309
309
  requirement: !ruby/object:Gem::Requirement
@@ -348,6 +348,7 @@ files:
348
348
  - docs/ARCHITECTURE.md
349
349
  - docs/CONFIGURATION.md
350
350
  - docs/DATABASE_BACKEND.md
351
+ - docs/INTEGRATION_TESTS.md
351
352
  - docs/PULL_REQUEST_TEMPLATE.md
352
353
  - docs/UPGRADING.md
353
354
  - lib/deimos.rb
@@ -464,6 +465,7 @@ files:
464
465
  - spec/utils/db_poller_spec.rb
465
466
  - spec/utils/db_producer_spec.rb
466
467
  - spec/utils/deadlock_retry_spec.rb
468
+ - spec/utils/inline_consumer_spec.rb
467
469
  - spec/utils/lag_reporter_spec.rb
468
470
  - spec/utils/platform_schema_validation_spec.rb
469
471
  - spec/utils/schema_controller_mixin_spec.rb
@@ -486,11 +488,11 @@ required_ruby_version: !ruby/object:Gem::Requirement
486
488
  version: '0'
487
489
  required_rubygems_version: !ruby/object:Gem::Requirement
488
490
  requirements:
489
- - - ">"
491
+ - - ">="
490
492
  - !ruby/object:Gem::Version
491
- version: 1.3.1
493
+ version: '0'
492
494
  requirements: []
493
- rubygems_version: 3.1.2
495
+ rubygems_version: 3.0.9
494
496
  signing_key:
495
497
  specification_version: 4
496
498
  summary: Kafka libraries for Ruby.
@@ -547,6 +549,7 @@ test_files:
547
549
  - spec/utils/db_poller_spec.rb
548
550
  - spec/utils/db_producer_spec.rb
549
551
  - spec/utils/deadlock_retry_spec.rb
552
+ - spec/utils/inline_consumer_spec.rb
550
553
  - spec/utils/lag_reporter_spec.rb
551
554
  - spec/utils/platform_schema_validation_spec.rb
552
555
  - spec/utils/schema_controller_mixin_spec.rb