deimos-temp-fork 0.0.1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (146) hide show
  1. checksums.yaml +7 -0
  2. data/.circleci/config.yml +83 -0
  3. data/.gitignore +41 -0
  4. data/.gitmodules +0 -0
  5. data/.rspec +1 -0
  6. data/.rubocop.yml +333 -0
  7. data/.ruby-gemset +1 -0
  8. data/.ruby-version +1 -0
  9. data/CHANGELOG.md +349 -0
  10. data/CODE_OF_CONDUCT.md +77 -0
  11. data/Dockerfile +23 -0
  12. data/Gemfile +6 -0
  13. data/Gemfile.lock +286 -0
  14. data/Guardfile +22 -0
  15. data/LICENSE.md +195 -0
  16. data/README.md +1099 -0
  17. data/Rakefile +13 -0
  18. data/bin/deimos +4 -0
  19. data/deimos-ruby.gemspec +44 -0
  20. data/docker-compose.yml +71 -0
  21. data/docs/ARCHITECTURE.md +140 -0
  22. data/docs/CONFIGURATION.md +236 -0
  23. data/docs/DATABASE_BACKEND.md +147 -0
  24. data/docs/INTEGRATION_TESTS.md +52 -0
  25. data/docs/PULL_REQUEST_TEMPLATE.md +35 -0
  26. data/docs/UPGRADING.md +128 -0
  27. data/lib/deimos-temp-fork.rb +95 -0
  28. data/lib/deimos/active_record_consume/batch_consumption.rb +164 -0
  29. data/lib/deimos/active_record_consume/batch_slicer.rb +27 -0
  30. data/lib/deimos/active_record_consume/message_consumption.rb +79 -0
  31. data/lib/deimos/active_record_consume/schema_model_converter.rb +52 -0
  32. data/lib/deimos/active_record_consumer.rb +67 -0
  33. data/lib/deimos/active_record_producer.rb +87 -0
  34. data/lib/deimos/backends/base.rb +32 -0
  35. data/lib/deimos/backends/db.rb +41 -0
  36. data/lib/deimos/backends/kafka.rb +33 -0
  37. data/lib/deimos/backends/kafka_async.rb +33 -0
  38. data/lib/deimos/backends/test.rb +20 -0
  39. data/lib/deimos/batch_consumer.rb +7 -0
  40. data/lib/deimos/config/configuration.rb +381 -0
  41. data/lib/deimos/config/phobos_config.rb +137 -0
  42. data/lib/deimos/consume/batch_consumption.rb +150 -0
  43. data/lib/deimos/consume/message_consumption.rb +94 -0
  44. data/lib/deimos/consumer.rb +104 -0
  45. data/lib/deimos/instrumentation.rb +76 -0
  46. data/lib/deimos/kafka_message.rb +60 -0
  47. data/lib/deimos/kafka_source.rb +128 -0
  48. data/lib/deimos/kafka_topic_info.rb +102 -0
  49. data/lib/deimos/message.rb +79 -0
  50. data/lib/deimos/metrics/datadog.rb +47 -0
  51. data/lib/deimos/metrics/mock.rb +39 -0
  52. data/lib/deimos/metrics/provider.rb +36 -0
  53. data/lib/deimos/monkey_patches/phobos_cli.rb +35 -0
  54. data/lib/deimos/monkey_patches/phobos_producer.rb +51 -0
  55. data/lib/deimos/poll_info.rb +9 -0
  56. data/lib/deimos/producer.rb +224 -0
  57. data/lib/deimos/railtie.rb +8 -0
  58. data/lib/deimos/schema_backends/avro_base.rb +140 -0
  59. data/lib/deimos/schema_backends/avro_local.rb +30 -0
  60. data/lib/deimos/schema_backends/avro_schema_coercer.rb +119 -0
  61. data/lib/deimos/schema_backends/avro_schema_registry.rb +34 -0
  62. data/lib/deimos/schema_backends/avro_validation.rb +21 -0
  63. data/lib/deimos/schema_backends/base.rb +150 -0
  64. data/lib/deimos/schema_backends/mock.rb +42 -0
  65. data/lib/deimos/shared_config.rb +63 -0
  66. data/lib/deimos/test_helpers.rb +360 -0
  67. data/lib/deimos/tracing/datadog.rb +35 -0
  68. data/lib/deimos/tracing/mock.rb +40 -0
  69. data/lib/deimos/tracing/provider.rb +29 -0
  70. data/lib/deimos/utils/db_poller.rb +150 -0
  71. data/lib/deimos/utils/db_producer.rb +243 -0
  72. data/lib/deimos/utils/deadlock_retry.rb +68 -0
  73. data/lib/deimos/utils/inline_consumer.rb +150 -0
  74. data/lib/deimos/utils/lag_reporter.rb +175 -0
  75. data/lib/deimos/utils/schema_controller_mixin.rb +115 -0
  76. data/lib/deimos/version.rb +5 -0
  77. data/lib/generators/deimos/active_record/templates/migration.rb.tt +28 -0
  78. data/lib/generators/deimos/active_record/templates/model.rb.tt +5 -0
  79. data/lib/generators/deimos/active_record_generator.rb +79 -0
  80. data/lib/generators/deimos/db_backend/templates/migration +25 -0
  81. data/lib/generators/deimos/db_backend/templates/rails3_migration +31 -0
  82. data/lib/generators/deimos/db_backend_generator.rb +48 -0
  83. data/lib/generators/deimos/db_poller/templates/migration +11 -0
  84. data/lib/generators/deimos/db_poller/templates/rails3_migration +16 -0
  85. data/lib/generators/deimos/db_poller_generator.rb +48 -0
  86. data/lib/tasks/deimos.rake +34 -0
  87. data/spec/active_record_batch_consumer_spec.rb +481 -0
  88. data/spec/active_record_consume/batch_slicer_spec.rb +42 -0
  89. data/spec/active_record_consume/schema_model_converter_spec.rb +105 -0
  90. data/spec/active_record_consumer_spec.rb +154 -0
  91. data/spec/active_record_producer_spec.rb +85 -0
  92. data/spec/backends/base_spec.rb +10 -0
  93. data/spec/backends/db_spec.rb +54 -0
  94. data/spec/backends/kafka_async_spec.rb +11 -0
  95. data/spec/backends/kafka_spec.rb +11 -0
  96. data/spec/batch_consumer_spec.rb +256 -0
  97. data/spec/config/configuration_spec.rb +248 -0
  98. data/spec/consumer_spec.rb +209 -0
  99. data/spec/deimos_spec.rb +169 -0
  100. data/spec/generators/active_record_generator_spec.rb +56 -0
  101. data/spec/handlers/my_batch_consumer.rb +10 -0
  102. data/spec/handlers/my_consumer.rb +10 -0
  103. data/spec/kafka_listener_spec.rb +55 -0
  104. data/spec/kafka_source_spec.rb +381 -0
  105. data/spec/kafka_topic_info_spec.rb +111 -0
  106. data/spec/message_spec.rb +19 -0
  107. data/spec/phobos.bad_db.yml +73 -0
  108. data/spec/phobos.yml +77 -0
  109. data/spec/producer_spec.rb +498 -0
  110. data/spec/rake_spec.rb +19 -0
  111. data/spec/schema_backends/avro_base_shared.rb +199 -0
  112. data/spec/schema_backends/avro_local_spec.rb +32 -0
  113. data/spec/schema_backends/avro_schema_registry_spec.rb +32 -0
  114. data/spec/schema_backends/avro_validation_spec.rb +24 -0
  115. data/spec/schema_backends/base_spec.rb +33 -0
  116. data/spec/schemas/com/my-namespace/Generated.avsc +71 -0
  117. data/spec/schemas/com/my-namespace/MyNestedSchema.avsc +62 -0
  118. data/spec/schemas/com/my-namespace/MySchema-key.avsc +13 -0
  119. data/spec/schemas/com/my-namespace/MySchema.avsc +18 -0
  120. data/spec/schemas/com/my-namespace/MySchemaCompound-key.avsc +18 -0
  121. data/spec/schemas/com/my-namespace/MySchemaWithBooleans.avsc +18 -0
  122. data/spec/schemas/com/my-namespace/MySchemaWithDateTimes.avsc +33 -0
  123. data/spec/schemas/com/my-namespace/MySchemaWithId.avsc +28 -0
  124. data/spec/schemas/com/my-namespace/MySchemaWithUniqueId.avsc +32 -0
  125. data/spec/schemas/com/my-namespace/Wibble.avsc +43 -0
  126. data/spec/schemas/com/my-namespace/Widget.avsc +27 -0
  127. data/spec/schemas/com/my-namespace/WidgetTheSecond.avsc +27 -0
  128. data/spec/schemas/com/my-namespace/request/CreateTopic.avsc +11 -0
  129. data/spec/schemas/com/my-namespace/request/Index.avsc +11 -0
  130. data/spec/schemas/com/my-namespace/request/UpdateRequest.avsc +11 -0
  131. data/spec/schemas/com/my-namespace/response/CreateTopic.avsc +11 -0
  132. data/spec/schemas/com/my-namespace/response/Index.avsc +11 -0
  133. data/spec/schemas/com/my-namespace/response/UpdateResponse.avsc +11 -0
  134. data/spec/spec_helper.rb +267 -0
  135. data/spec/utils/db_poller_spec.rb +320 -0
  136. data/spec/utils/db_producer_spec.rb +514 -0
  137. data/spec/utils/deadlock_retry_spec.rb +74 -0
  138. data/spec/utils/inline_consumer_spec.rb +31 -0
  139. data/spec/utils/lag_reporter_spec.rb +76 -0
  140. data/spec/utils/platform_schema_validation_spec.rb +0 -0
  141. data/spec/utils/schema_controller_mixin_spec.rb +84 -0
  142. data/support/deimos-solo.png +0 -0
  143. data/support/deimos-with-name-next.png +0 -0
  144. data/support/deimos-with-name.png +0 -0
  145. data/support/flipp-logo.png +0 -0
  146. metadata +551 -0
@@ -0,0 +1,11 @@
1
+ # frozen_string_literal: true
2
+
3
+ RSpec.describe Deimos::Backends::KafkaAsync do
4
+ include_context 'with publish_backend'
5
+ it 'should publish to Kafka asynchronously' do
6
+ producer = instance_double(Phobos::Producer::ClassMethods::PublicAPI)
7
+ expect(producer).to receive(:async_publish_list).with(messages.map(&:encoded_hash))
8
+ expect(described_class).to receive(:producer).and_return(producer)
9
+ described_class.publish(producer_class: MyProducer, messages: messages)
10
+ end
11
+ end
@@ -0,0 +1,11 @@
1
+ # frozen_string_literal: true
2
+
3
+ RSpec.describe Deimos::Backends::Kafka do
4
+ include_context 'with publish_backend'
5
+ it 'should publish to Kafka synchronously' do
6
+ producer = instance_double(Phobos::Producer::ClassMethods::PublicAPI)
7
+ expect(producer).to receive(:publish_list).with(messages.map(&:encoded_hash))
8
+ expect(described_class).to receive(:producer).and_return(producer)
9
+ described_class.publish(producer_class: MyProducer, messages: messages)
10
+ end
11
+ end
@@ -0,0 +1,256 @@
1
+ # frozen_string_literal: true
2
+
3
+ # :nodoc:
4
+ module ConsumerTest
5
+ describe Deimos::Consumer, 'Batch Consumer' do
6
+
7
+ prepend_before(:each) do
8
+ # :nodoc:
9
+ consumer_class = Class.new(described_class) do
10
+ schema 'MySchema'
11
+ namespace 'com.my-namespace'
12
+ key_config field: 'test_id'
13
+
14
+ # :nodoc:
15
+ def consume_batch(_payloads, _metadata)
16
+ raise 'This should not be called unless call_original is set'
17
+ end
18
+ end
19
+ stub_const('ConsumerTest::MyBatchConsumer', consumer_class)
20
+ end
21
+
22
+ let(:batch) do
23
+ [
24
+ { 'test_id' => 'foo', 'some_int' => 123 },
25
+ { 'test_id' => 'bar', 'some_int' => 456 }
26
+ ]
27
+ end
28
+
29
+ let(:invalid_payloads) do
30
+ batch.concat([{ 'invalid' => 'key' }])
31
+ end
32
+
33
+ it 'should provide backwards compatibility for BatchConsumer class' do
34
+ consumer_class = Class.new(Deimos::BatchConsumer) do
35
+ schema 'MySchema'
36
+ namespace 'com.my-namespace'
37
+ key_config field: 'test_id'
38
+
39
+ # :nodoc:
40
+ def consume_batch(_payloads, _metadata)
41
+ raise 'This should not be called unless call_original is set'
42
+ end
43
+ end
44
+ stub_const('ConsumerTest::MyOldBatchConsumer', consumer_class)
45
+
46
+ test_consume_batch(MyOldBatchConsumer, batch) do |received, _metadata|
47
+ expect(received).to eq(batch)
48
+ end
49
+ end
50
+
51
+ it 'should consume a batch of messages' do
52
+ test_consume_batch(MyBatchConsumer, batch) do |received, _metadata|
53
+ expect(received).to eq(batch)
54
+ end
55
+ end
56
+
57
+ it 'should consume a message on a topic' do
58
+ test_consume_batch('my_batch_consume_topic', batch) do |received, _metadata|
59
+ expect(received).to eq(batch)
60
+ end
61
+ end
62
+
63
+ it 'should fail on an invalid message in the batch' do
64
+ test_consume_batch_invalid_message(MyBatchConsumer, batch.concat(invalid_payloads))
65
+ end
66
+
67
+ describe 'when reraising errors is disabled' do
68
+ before(:each) do
69
+ Deimos.configure { |config| config.consumers.reraise_errors = false }
70
+ end
71
+
72
+ it 'should not fail when before_consume_batch fails' do
73
+ expect {
74
+ test_consume_batch(
75
+ MyBatchConsumer,
76
+ batch,
77
+ skip_expectation: true
78
+ ) { raise 'OH NOES' }
79
+ }.not_to raise_error
80
+ end
81
+
82
+ it 'should not fail when consume_batch fails' do
83
+ expect {
84
+ test_consume_batch(
85
+ MyBatchConsumer,
86
+ invalid_payloads,
87
+ skip_expectation: true
88
+ )
89
+ }.not_to raise_error
90
+ end
91
+ end
92
+
93
+ describe 'decoding' do
94
+ let(:keys) do
95
+ batch.map { |v| v.slice('test_id') }
96
+ end
97
+
98
+ it 'should decode payloads for all messages in the batch' do
99
+ test_consume_batch('my_batch_consume_topic', batch) do |received, _metadata|
100
+ # Mock decoder simply returns the payload
101
+ expect(received).to eq(batch)
102
+ end
103
+ end
104
+
105
+ it 'should decode keys for all messages in the batch' do
106
+ expect_any_instance_of(ConsumerTest::MyBatchConsumer).
107
+ to receive(:decode_key).with(keys[0]).and_call_original
108
+ expect_any_instance_of(ConsumerTest::MyBatchConsumer).
109
+ to receive(:decode_key).with(keys[1]).and_call_original
110
+
111
+ test_consume_batch('my_batch_consume_topic', batch, keys: keys) do |_received, metadata|
112
+ # Mock decode_key extracts the value of the first field as the key
113
+ expect(metadata[:keys]).to eq(%w(foo bar))
114
+ expect(metadata[:first_offset]).to eq(1)
115
+ end
116
+ end
117
+
118
+ it 'should decode plain keys for all messages in the batch' do
119
+ consumer_class = Class.new(described_class) do
120
+ schema 'MySchema'
121
+ namespace 'com.my-namespace'
122
+ key_config plain: true
123
+ end
124
+ stub_const('ConsumerTest::MyBatchConsumer', consumer_class)
125
+
126
+ test_consume_batch('my_batch_consume_topic', batch, keys: [1, 2]) do |_received, metadata|
127
+ expect(metadata[:keys]).to eq([1, 2])
128
+ end
129
+ end
130
+ end
131
+
132
+ describe 'timestamps' do
133
+ before(:each) do
134
+ # :nodoc:
135
+ consumer_class = Class.new(described_class) do
136
+ schema 'MySchemaWithDateTimes'
137
+ namespace 'com.my-namespace'
138
+ key_config plain: true
139
+
140
+ # :nodoc:
141
+ def consume_batch(_payloads, _metadata)
142
+ raise 'This should not be called unless call_original is set'
143
+ end
144
+ end
145
+ stub_const('ConsumerTest::MyBatchConsumer', consumer_class)
146
+ allow(Deimos.config.metrics).to receive(:histogram)
147
+ end
148
+
149
+ let(:batch_with_time) do
150
+ [
151
+ {
152
+ 'test_id' => 'foo',
153
+ 'some_int' => 123,
154
+ 'updated_at' => Time.now.to_i,
155
+ 'timestamp' => 2.minutes.ago.to_s
156
+ },
157
+ {
158
+ 'test_id' => 'bar',
159
+ 'some_int' => 456,
160
+ 'updated_at' => Time.now.to_i,
161
+ 'timestamp' => 3.minutes.ago.to_s
162
+ }
163
+ ]
164
+ end
165
+
166
+ let(:invalid_times) do
167
+ [
168
+ {
169
+ 'test_id' => 'baz',
170
+ 'some_int' => 123,
171
+ 'updated_at' => Time.now.to_i,
172
+ 'timestamp' => 'yesterday morning'
173
+ },
174
+ {
175
+ 'test_id' => 'ok',
176
+ 'some_int' => 456,
177
+ 'updated_at' => Time.now.to_i,
178
+ 'timestamp' => ''
179
+ },
180
+ {
181
+ 'test_id' => 'hello',
182
+ 'some_int' => 456,
183
+ 'updated_at' => Time.now.to_i,
184
+ 'timestamp' => '1234567890'
185
+ }
186
+ ]
187
+ end
188
+
189
+ it 'should consume a batch' do
190
+ expect(Deimos.config.metrics).
191
+ to receive(:histogram).with('handler',
192
+ a_kind_of(Numeric),
193
+ tags: %w(time:time_delayed topic:my-topic)).twice
194
+
195
+ test_consume_batch('my_batch_consume_topic', batch_with_time) do |received, _metadata|
196
+ expect(received).to eq(batch_with_time)
197
+ end
198
+ end
199
+
200
+ it 'should fail nicely and ignore timestamps with the wrong format' do
201
+ batch = invalid_times.concat(batch_with_time)
202
+
203
+ expect(Deimos.config.metrics).
204
+ to receive(:histogram).with('handler',
205
+ a_kind_of(Numeric),
206
+ tags: %w(time:time_delayed topic:my-topic)).twice
207
+
208
+ test_consume_batch('my_batch_consume_topic', batch) do |received, _metadata|
209
+ expect(received).to eq(batch)
210
+ end
211
+ end
212
+ end
213
+
214
+ describe 'logging' do
215
+ before(:each) do
216
+ # :nodoc:
217
+ consumer_class = Class.new(described_class) do
218
+ schema 'MySchemaWithUniqueId'
219
+ namespace 'com.my-namespace'
220
+ key_config plain: true
221
+
222
+ # :nodoc:
223
+ def consume_batch(_payloads, _metadata)
224
+ raise 'This should not be called unless call_original is set'
225
+ end
226
+ end
227
+ stub_const('ConsumerTest::MyBatchConsumer', consumer_class)
228
+ allow(Deimos.config.metrics).to receive(:histogram)
229
+ end
230
+
231
+ it 'should log message identifiers' do
232
+ batch_with_message_id = [
233
+ { 'id' => 1, 'test_id' => 'foo', 'some_int' => 5,
234
+ 'timestamp' => 3.minutes.ago.to_s, 'message_id' => 'one' },
235
+ { 'id' => 2, 'test_id' => 'bar', 'some_int' => 6,
236
+ 'timestamp' => 2.minutes.ago.to_s, 'message_id' => 'two' }
237
+ ]
238
+
239
+ allow(Deimos.config.logger).
240
+ to receive(:info)
241
+
242
+ expect(Deimos.config.logger).
243
+ to receive(:info).
244
+ with(hash_including(
245
+ message_ids: [
246
+ { key: 1, message_id: 'one' },
247
+ { key: 2, message_id: 'two' }
248
+ ]
249
+ )).
250
+ twice
251
+
252
+ test_consume_batch('my_batch_consume_topic', batch_with_message_id, keys: [1, 2])
253
+ end
254
+ end
255
+ end
256
+ end
@@ -0,0 +1,248 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Mock consumer
4
+ class MyConfigConsumer < Deimos::Consumer
5
+ # :no-doc:
6
+ def consume
7
+ end
8
+ end
9
+
10
+ # Mock consumer 2
11
+ class MyConfigConsumer2 < Deimos::Consumer
12
+ # :no-doc:
13
+ def consume
14
+ end
15
+ end
16
+
17
+ describe Deimos, 'configuration' do
18
+ it 'should configure with deprecated fields' do
19
+ logger = Logger.new(nil)
20
+ described_class.configure do
21
+ kafka_logger logger
22
+ reraise_consumer_errors true
23
+ schema_registry_url 'http://schema.registry'
24
+ seed_broker 'whatever'
25
+ schema_path 'some_path'
26
+ producer_schema_namespace 'namespace'
27
+ producer_topic_prefix 'prefix'
28
+ disable_producers true
29
+ ssl_enabled true
30
+ ssl_ca_cert 'cert'
31
+ ssl_client_cert 'cert'
32
+ ssl_client_cert_key 'key'
33
+ publish_backend 'db'
34
+ report_lag true
35
+ end
36
+
37
+ expect(described_class.config.kafka.logger).to eq(logger)
38
+ expect(described_class.config.consumers.reraise_errors).to eq(true)
39
+ expect(described_class.config.schema.registry_url).to eq('http://schema.registry')
40
+ expect(described_class.config.kafka.seed_brokers).to eq('whatever')
41
+ expect(described_class.config.producers.schema_namespace).to eq('namespace')
42
+ expect(described_class.config.producers.topic_prefix).to eq('prefix')
43
+ expect(described_class.config.producers.disabled).to eq(true)
44
+ expect(described_class.config.kafka.ssl.enabled).to eq(true)
45
+ expect(described_class.config.kafka.ssl.ca_cert).to eq('cert')
46
+ expect(described_class.config.kafka.ssl.client_cert).to eq('cert')
47
+ expect(described_class.config.kafka.ssl.client_cert_key).to eq('key')
48
+ expect(described_class.config.producers.backend).to eq('db')
49
+ expect(described_class.config.consumers.report_lag).to eq(true)
50
+ end
51
+
52
+ it 'reads existing Phobos config YML files' do
53
+ described_class.config.reset!
54
+ described_class.configure { |c| c.phobos_config_file = File.join(File.dirname(__FILE__), '..', 'phobos.yml') }
55
+ expect(described_class.config.phobos_config).to match(
56
+ logger: an_instance_of(Logger),
57
+ backoff: { min_ms: 1000, max_ms: 60_000 },
58
+ consumer: {
59
+ session_timeout: 300,
60
+ offset_commit_interval: 10,
61
+ offset_commit_threshold: 0,
62
+ heartbeat_interval: 10
63
+ },
64
+ custom_kafka_logger: an_instance_of(Logger),
65
+ custom_logger: an_instance_of(Logger),
66
+ kafka: {
67
+ client_id: 'phobos',
68
+ connect_timeout: 15,
69
+ socket_timeout: 15,
70
+ ssl_verify_hostname: true,
71
+ seed_brokers: ['localhost:9092']
72
+ },
73
+ listeners: [
74
+ {
75
+ topic: 'my_consume_topic',
76
+ group_id: 'my_group_id',
77
+ max_concurrency: 1,
78
+ start_from_beginning: true,
79
+ max_bytes_per_partition: 524_288,
80
+ min_bytes: 1,
81
+ max_wait_time: 5,
82
+ force_encoding: nil,
83
+ delivery: 'batch',
84
+ session_timeout: 300,
85
+ offset_commit_interval: 10,
86
+ offset_commit_threshold: 0,
87
+ offset_retention_time: nil,
88
+ heartbeat_interval: 10,
89
+ handler: 'ConsumerTest::MyConsumer'
90
+ }, {
91
+ topic: 'my_batch_consume_topic',
92
+ group_id: 'my_batch_group_id',
93
+ max_concurrency: 1,
94
+ start_from_beginning: true,
95
+ max_bytes_per_partition: 500.kilobytes,
96
+ min_bytes: 1,
97
+ max_wait_time: 5,
98
+ force_encoding: nil,
99
+ delivery: 'inline_batch',
100
+ session_timeout: 300,
101
+ offset_commit_interval: 10,
102
+ offset_commit_threshold: 0,
103
+ offset_retention_time: nil,
104
+ heartbeat_interval: 10,
105
+ handler: 'ConsumerTest::MyBatchConsumer'
106
+ }
107
+ ],
108
+ producer: {
109
+ ack_timeout: 5,
110
+ required_acks: :all,
111
+ max_retries: 2,
112
+ retry_backoff: 1,
113
+ max_buffer_size: 10_000,
114
+ max_buffer_bytesize: 10_000_000,
115
+ compression_codec: nil,
116
+ compression_threshold: 1,
117
+ max_queue_size: 10_000,
118
+ delivery_threshold: 0,
119
+ delivery_interval: 0
120
+ }
121
+ )
122
+ end
123
+
124
+ specify '#phobos_config' do
125
+ logger1 = Logger.new(nil)
126
+ logger2 = Logger.new(nil)
127
+ described_class.config.reset!
128
+ described_class.configure do
129
+ phobos_logger logger1
130
+ kafka do
131
+ logger logger2
132
+ seed_brokers 'my-seed-brokers'
133
+ client_id 'phobos2'
134
+ connect_timeout 30
135
+ socket_timeout 30
136
+ ssl.enabled(true)
137
+ ssl.ca_cert('cert')
138
+ ssl.client_cert('cert')
139
+ ssl.client_cert_key('key')
140
+ ssl.verify_hostname(false)
141
+ end
142
+ consumers do
143
+ session_timeout 30
144
+ offset_commit_interval 5
145
+ offset_commit_threshold 0
146
+ heartbeat_interval 5
147
+ backoff 5..10
148
+ end
149
+ producers do
150
+ ack_timeout 3
151
+ required_acks 1
152
+ max_retries 1
153
+ retry_backoff 2
154
+ max_buffer_size 5
155
+ max_buffer_bytesize 5
156
+ compression_codec :snappy
157
+ compression_threshold 2
158
+ max_queue_size 10
159
+ delivery_threshold 1
160
+ delivery_interval 1
161
+ persistent_connections true
162
+ end
163
+ consumer do
164
+ class_name 'MyConfigConsumer'
165
+ schema 'blah'
166
+ topic 'blah'
167
+ group_id 'myconsumerid'
168
+ max_concurrency 1
169
+ start_from_beginning true
170
+ max_bytes_per_partition 10
171
+ min_bytes 5
172
+ max_wait_time 5
173
+ force_encoding true
174
+ delivery :message
175
+ backoff 100..200
176
+ session_timeout 10
177
+ offset_commit_interval 13
178
+ offset_commit_threshold 13
179
+ offset_retention_time 13
180
+ heartbeat_interval 13
181
+ end
182
+ consumer do
183
+ disabled true
184
+ class_name 'MyConfigConsumer2'
185
+ schema 'blah2'
186
+ topic 'blah2'
187
+ group_id 'myconsumerid2'
188
+ end
189
+ end
190
+
191
+ expect(described_class.config.phobos_config).
192
+ to match(
193
+ logger: an_instance_of(Logger),
194
+ backoff: { min_ms: 5, max_ms: 10 },
195
+ consumer: {
196
+ session_timeout: 30,
197
+ offset_commit_interval: 5,
198
+ offset_commit_threshold: 0,
199
+ heartbeat_interval: 5
200
+ },
201
+ custom_kafka_logger: logger2,
202
+ custom_logger: logger1,
203
+ kafka: {
204
+ client_id: 'phobos2',
205
+ connect_timeout: 30,
206
+ socket_timeout: 30,
207
+ ssl_ca_cert: 'cert',
208
+ ssl_client_cert: 'cert',
209
+ ssl_client_cert_key: 'key',
210
+ ssl_verify_hostname: false,
211
+ seed_brokers: ['my-seed-brokers']
212
+ },
213
+ listeners: [
214
+ {
215
+ topic: 'blah',
216
+ group_id: 'myconsumerid',
217
+ max_concurrency: 1,
218
+ start_from_beginning: true,
219
+ max_bytes_per_partition: 10,
220
+ min_bytes: 5,
221
+ max_wait_time: 5,
222
+ force_encoding: true,
223
+ delivery: 'message',
224
+ backoff: { min_ms: 100, max_ms: 200 },
225
+ session_timeout: 10,
226
+ offset_commit_interval: 13,
227
+ offset_commit_threshold: 13,
228
+ offset_retention_time: 13,
229
+ heartbeat_interval: 13,
230
+ handler: 'MyConfigConsumer'
231
+ }
232
+ ],
233
+ producer: {
234
+ ack_timeout: 3,
235
+ required_acks: 1,
236
+ max_retries: 1,
237
+ retry_backoff: 2,
238
+ max_buffer_size: 5,
239
+ max_buffer_bytesize: 5,
240
+ compression_codec: :snappy,
241
+ compression_threshold: 2,
242
+ max_queue_size: 10,
243
+ delivery_threshold: 1,
244
+ delivery_interval: 1
245
+ }
246
+ )
247
+ end
248
+ end