deimos-ruby 1.0.0.pre.beta22

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (100) hide show
  1. checksums.yaml +7 -0
  2. data/.circleci/config.yml +74 -0
  3. data/.gitignore +41 -0
  4. data/.gitmodules +0 -0
  5. data/.rspec +1 -0
  6. data/.rubocop.yml +321 -0
  7. data/.ruby-gemset +1 -0
  8. data/.ruby-version +1 -0
  9. data/CHANGELOG.md +32 -0
  10. data/CODE_OF_CONDUCT.md +77 -0
  11. data/Dockerfile +23 -0
  12. data/Gemfile +6 -0
  13. data/Gemfile.lock +165 -0
  14. data/Guardfile +22 -0
  15. data/LICENSE.md +195 -0
  16. data/README.md +752 -0
  17. data/Rakefile +13 -0
  18. data/bin/deimos +4 -0
  19. data/deimos-kafka.gemspec +42 -0
  20. data/docker-compose.yml +71 -0
  21. data/docs/DATABASE_BACKEND.md +147 -0
  22. data/docs/PULL_REQUEST_TEMPLATE.md +34 -0
  23. data/lib/deimos/active_record_consumer.rb +81 -0
  24. data/lib/deimos/active_record_producer.rb +64 -0
  25. data/lib/deimos/avro_data_coder.rb +89 -0
  26. data/lib/deimos/avro_data_decoder.rb +36 -0
  27. data/lib/deimos/avro_data_encoder.rb +51 -0
  28. data/lib/deimos/backends/db.rb +27 -0
  29. data/lib/deimos/backends/kafka.rb +27 -0
  30. data/lib/deimos/backends/kafka_async.rb +27 -0
  31. data/lib/deimos/configuration.rb +90 -0
  32. data/lib/deimos/consumer.rb +164 -0
  33. data/lib/deimos/instrumentation.rb +71 -0
  34. data/lib/deimos/kafka_message.rb +27 -0
  35. data/lib/deimos/kafka_source.rb +126 -0
  36. data/lib/deimos/kafka_topic_info.rb +86 -0
  37. data/lib/deimos/message.rb +74 -0
  38. data/lib/deimos/metrics/datadog.rb +47 -0
  39. data/lib/deimos/metrics/mock.rb +39 -0
  40. data/lib/deimos/metrics/provider.rb +38 -0
  41. data/lib/deimos/monkey_patches/phobos_cli.rb +35 -0
  42. data/lib/deimos/monkey_patches/phobos_producer.rb +51 -0
  43. data/lib/deimos/monkey_patches/ruby_kafka_heartbeat.rb +85 -0
  44. data/lib/deimos/monkey_patches/schema_store.rb +19 -0
  45. data/lib/deimos/producer.rb +218 -0
  46. data/lib/deimos/publish_backend.rb +30 -0
  47. data/lib/deimos/railtie.rb +8 -0
  48. data/lib/deimos/schema_coercer.rb +108 -0
  49. data/lib/deimos/shared_config.rb +59 -0
  50. data/lib/deimos/test_helpers.rb +356 -0
  51. data/lib/deimos/tracing/datadog.rb +35 -0
  52. data/lib/deimos/tracing/mock.rb +40 -0
  53. data/lib/deimos/tracing/provider.rb +31 -0
  54. data/lib/deimos/utils/db_producer.rb +122 -0
  55. data/lib/deimos/utils/executor.rb +117 -0
  56. data/lib/deimos/utils/inline_consumer.rb +144 -0
  57. data/lib/deimos/utils/lag_reporter.rb +182 -0
  58. data/lib/deimos/utils/platform_schema_validation.rb +0 -0
  59. data/lib/deimos/utils/signal_handler.rb +68 -0
  60. data/lib/deimos/version.rb +5 -0
  61. data/lib/deimos.rb +133 -0
  62. data/lib/generators/deimos/db_backend/templates/migration +24 -0
  63. data/lib/generators/deimos/db_backend/templates/rails3_migration +30 -0
  64. data/lib/generators/deimos/db_backend_generator.rb +48 -0
  65. data/lib/tasks/deimos.rake +27 -0
  66. data/spec/active_record_consumer_spec.rb +81 -0
  67. data/spec/active_record_producer_spec.rb +107 -0
  68. data/spec/avro_data_decoder_spec.rb +18 -0
  69. data/spec/avro_data_encoder_spec.rb +37 -0
  70. data/spec/backends/db_spec.rb +35 -0
  71. data/spec/backends/kafka_async_spec.rb +11 -0
  72. data/spec/backends/kafka_spec.rb +11 -0
  73. data/spec/consumer_spec.rb +169 -0
  74. data/spec/deimos_spec.rb +120 -0
  75. data/spec/kafka_source_spec.rb +168 -0
  76. data/spec/kafka_topic_info_spec.rb +88 -0
  77. data/spec/phobos.bad_db.yml +73 -0
  78. data/spec/phobos.yml +73 -0
  79. data/spec/producer_spec.rb +397 -0
  80. data/spec/publish_backend_spec.rb +10 -0
  81. data/spec/schemas/com/my-namespace/MySchema-key.avsc +13 -0
  82. data/spec/schemas/com/my-namespace/MySchema.avsc +18 -0
  83. data/spec/schemas/com/my-namespace/MySchemaWithBooleans.avsc +18 -0
  84. data/spec/schemas/com/my-namespace/MySchemaWithDateTimes.avsc +33 -0
  85. data/spec/schemas/com/my-namespace/MySchemaWithId.avsc +28 -0
  86. data/spec/schemas/com/my-namespace/MySchemaWithUniqueId.avsc +32 -0
  87. data/spec/schemas/com/my-namespace/Widget.avsc +27 -0
  88. data/spec/schemas/com/my-namespace/WidgetTheSecond.avsc +27 -0
  89. data/spec/spec_helper.rb +207 -0
  90. data/spec/updateable_schema_store_spec.rb +36 -0
  91. data/spec/utils/db_producer_spec.rb +259 -0
  92. data/spec/utils/executor_spec.rb +42 -0
  93. data/spec/utils/lag_reporter_spec.rb +69 -0
  94. data/spec/utils/platform_schema_validation_spec.rb +0 -0
  95. data/spec/utils/signal_handler_spec.rb +16 -0
  96. data/support/deimos-solo.png +0 -0
  97. data/support/deimos-with-name-next.png +0 -0
  98. data/support/deimos-with-name.png +0 -0
  99. data/support/flipp-logo.png +0 -0
  100. metadata +452 -0
@@ -0,0 +1,207 @@
1
+ # frozen_string_literal: true
2
+
3
+ $LOAD_PATH.unshift(File.expand_path('../lib', __dir__))
4
+ require 'active_record'
5
+ require 'deimos'
6
+ require 'deimos/metrics/mock'
7
+ require 'deimos/tracing/mock'
8
+ require 'deimos/test_helpers'
9
+ require 'active_support/testing/time_helpers'
10
+
11
+ # Helpers for Executor/DbProducer
12
+ module TestRunners
13
+ # Execute a block until it stops failing. This is helpful for testing threads
14
+ # where we need to wait for them to continue but don't want to rely on
15
+ # sleeping for X seconds, which is crazy brittle and slow.
16
+ def wait_for
17
+ start_time = Time.now
18
+ begin
19
+ yield
20
+ rescue Exception # rubocop:disable Lint/RescueException
21
+ raise if Time.now - start_time > 2 # 2 seconds is probably plenty of time! <_<
22
+
23
+ sleep(0.1)
24
+ retry
25
+ end
26
+ end
27
+
28
+ # Test runner
29
+ class TestRunner
30
+ attr_accessor :id, :started, :stopped, :should_error
31
+ # :nodoc:
32
+ def initialize(id=nil)
33
+ @id = id
34
+ end
35
+
36
+ # :nodoc:
37
+ def start
38
+ if @should_error
39
+ @should_error = false
40
+ raise 'OH NOES'
41
+ end
42
+ @started = true
43
+ end
44
+
45
+ # :nodoc:
46
+ def stop
47
+ @stopped = true
48
+ end
49
+ end
50
+ end
51
+
52
+ # :nodoc:
53
+ module DbConfigs
54
+ # @param payload [Hash]
55
+ # @param topic [String]
56
+ # @param key [String]
57
+ def build_message(payload, topic, key)
58
+ message = Deimos::Message.new(payload, Deimos::Producer,
59
+ topic: topic, key: key)
60
+ message.encoded_payload = message.payload
61
+ message.encoded_key = message.key
62
+ message
63
+ end
64
+
65
+ DB_OPTIONS = [
66
+ {
67
+ adapter: 'postgresql',
68
+ port: 5432,
69
+ username: 'postgres',
70
+ password: 'root',
71
+ database: 'postgres',
72
+ host: ENV['PG_HOST'] || 'localhost'
73
+ },
74
+ {
75
+ adapter: 'mysql2',
76
+ port: 3306,
77
+ username: 'root',
78
+ database: 'test',
79
+ host: ENV['MYSQL_HOST'] || 'localhost'
80
+ },
81
+ {
82
+ adapter: 'sqlite3',
83
+ database: 'test.sqlite3'
84
+ } # this one always needs to be last for non-integration tests
85
+ ].freeze
86
+
87
+ # For each config, run some tests.
88
+ def each_db_config(subject, &block)
89
+ DB_OPTIONS.each do |options|
90
+ describe subject, :integration, db_config: options do
91
+
92
+ include_context 'with DB'
93
+ describe options[:adapter] do # rubocop:disable RSpec/EmptyExampleGroup
94
+ self.instance_eval(&block)
95
+ end
96
+ end
97
+ end
98
+ end
99
+
100
+ # Set up the given database.
101
+ def setup_db(options)
102
+ ActiveRecord::Base.establish_connection(options)
103
+ migration_class_name = 'DbBackendMigration'
104
+ migration_version = '[5.2]'
105
+ migration = ERB.new(
106
+ File.read('lib/generators/deimos/db_backend/templates/migration')
107
+ ).result(binding)
108
+ eval(migration) # rubocop:disable Security/Eval
109
+ ActiveRecord::Migration.new.run(DbBackendMigration, direction: :up)
110
+
111
+ ActiveRecord::Base.descendants.each do |klass|
112
+ klass.reset_sequence_name if klass.respond_to?(:reset_sequence_name)
113
+ # reset internal variables - terrible hack to trick Rails into doing this
114
+ table_name = klass.table_name
115
+ klass.table_name = "#{table_name}2"
116
+ klass.table_name = table_name
117
+ end
118
+ end
119
+ end
120
+
121
+ RSpec.configure do |config|
122
+ config.extend(DbConfigs)
123
+ include DbConfigs
124
+ config.include TestRunners
125
+ config.full_backtrace = true
126
+
127
+ # true by default for RSpec 4.0
128
+ config.shared_context_metadata_behavior = :apply_to_host_groups
129
+
130
+ config.before(:all) do
131
+ Time.zone = 'EST'
132
+ ActiveRecord::Base.logger = Logger.new('/dev/null')
133
+ ActiveRecord::Base.establish_connection(
134
+ 'adapter' => 'sqlite3',
135
+ 'database' => 'test.sqlite3'
136
+ )
137
+ end
138
+ config.include Deimos::TestHelpers
139
+ config.include ActiveSupport::Testing::TimeHelpers
140
+ config.before(:suite) do
141
+ Time.zone = 'EST'
142
+ ActiveRecord::Base.logger = Logger.new('/dev/null')
143
+ setup_db(DbConfigs::DB_OPTIONS.last)
144
+ Deimos.configure do |fr_config|
145
+ fr_config.phobos_config_file = File.join(File.dirname(__FILE__), 'phobos.yml')
146
+ fr_config.schema_path = File.join(File.expand_path(__dir__), 'schemas')
147
+ fr_config.reraise_consumer_errors = true
148
+ fr_config.schema_registry_url = ENV['SCHEMA_REGISTRY'] || 'http://localhost:8081'
149
+ fr_config.seed_broker = ENV['KAFKA_SEED_BROKER'] || 'localhost:9092'
150
+ fr_config.logger = Logger.new('/dev/null')
151
+
152
+ # Use Mock Metrics and Tracing for rspecs
153
+ fr_config.metrics = Deimos::Metrics::Mock.new
154
+ fr_config.tracer = Deimos::Tracing::Mock.new
155
+ end
156
+ end
157
+
158
+ config.before(:each) do |ex|
159
+ stub_producers_and_consumers! unless ex.metadata[:integration]
160
+
161
+ @previous_config = Deimos.config.dup
162
+ @previous_phobos_config = Phobos.config.dup
163
+ end
164
+
165
+ config.after(:each) do
166
+ Deimos.config = @previous_config
167
+ Phobos.instance_variable_set(:@config, @previous_phobos_config)
168
+ end
169
+
170
+ end
171
+
172
+ RSpec.shared_context('with DB') do
173
+ before(:all) do
174
+ setup_db(self.class.metadata[:db_config] || DbConfigs::DB_OPTIONS.last)
175
+ end
176
+
177
+ after(:each) do
178
+ Deimos::KafkaMessage.delete_all
179
+ Deimos::KafkaTopicInfo.delete_all
180
+ end
181
+ end
182
+
183
+ RSpec.shared_context('with publish_backend') do
184
+ before(:each) do
185
+ producer_class = Class.new(Deimos::Producer) do
186
+ schema 'MySchema'
187
+ namespace 'com.my-namespace'
188
+ topic 'my-topic'
189
+ key_config field: 'test_id'
190
+ end
191
+ stub_const('MyProducer', producer_class)
192
+
193
+ producer_class = Class.new(Deimos::Producer) do
194
+ schema 'MySchema'
195
+ namespace 'com.my-namespace'
196
+ topic 'my-topic'
197
+ key_config none: true
198
+ end
199
+ stub_const('MyNoKeyProducer', producer_class)
200
+ end
201
+
202
+ let(:messages) do
203
+ (1..3).map do |i|
204
+ build_message({ foo: i }, 'my-topic', "foo#{i}")
205
+ end
206
+ end
207
+ end
@@ -0,0 +1,36 @@
1
+ # frozen_string_literal: true
2
+
3
+ describe AvroTurf::SchemaStore do
4
+
5
+ it 'should add an in-memory schema' do
6
+ schema_store = described_class.new(path: Deimos.config.schema_path)
7
+ schema_store.load_schemas!
8
+ found_schema = schema_store.find('MySchema', 'com.my-namespace').as_json
9
+ expect(found_schema['name']).to eq('MySchema')
10
+ expect(found_schema['namespace']).to eq('com.my-namespace')
11
+ expect(found_schema['fields'].size).to eq(2)
12
+ expect(found_schema['fields'][0]['type']['type_sym']).to eq('string')
13
+ expect(found_schema['fields'][0]['name']).to eq('test_id')
14
+ new_schema = {
15
+ 'namespace' => 'com.my-namespace',
16
+ 'name' => 'MyNewSchema',
17
+ 'type' => 'record',
18
+ 'doc' => 'Test schema',
19
+ 'fields' => [
20
+ {
21
+ 'name' => 'my_id',
22
+ 'type' => 'int',
23
+ 'doc' => 'test int'
24
+ }
25
+ ]
26
+ }
27
+ schema_store.add_schema(new_schema)
28
+ found_schema = schema_store.find('MyNewSchema', 'com.my-namespace').
29
+ as_json
30
+ expect(found_schema['name']).to eq('MyNewSchema')
31
+ expect(found_schema['namespace']).to eq('com.my-namespace')
32
+ expect(found_schema['fields'].size).to eq(1)
33
+ expect(found_schema['fields'][0]['type']['type_sym']).to eq('int')
34
+ expect(found_schema['fields'][0]['name']).to eq('my_id')
35
+ end
36
+ end
@@ -0,0 +1,259 @@
1
+ # frozen_string_literal: true
2
+
3
+ each_db_config(Deimos::Utils::DbProducer) do
4
+ let(:producer) do
5
+ producer = described_class.new
6
+ allow(producer).to receive(:sleep)
7
+ allow(producer).to receive(:producer).and_return(phobos_producer)
8
+ producer
9
+ end
10
+
11
+ let(:phobos_producer) do
12
+ pp = instance_double(Phobos::Producer::PublicAPI)
13
+ allow(pp).to receive(:publish_list)
14
+ pp
15
+ end
16
+
17
+ before(:each) do
18
+ stub_const('Deimos::Utils::DbProducer::BATCH_SIZE', 2)
19
+ end
20
+
21
+ specify '#process_next_messages' do
22
+ expect(producer).to receive(:retrieve_topics).and_return(%w(topic1 topic2))
23
+ expect(producer).to receive(:process_topic).twice
24
+ expect(producer).to receive(:sleep).with(0.5)
25
+ producer.process_next_messages
26
+ end
27
+
28
+ specify '#retrieve_topics' do
29
+ (1..3).each do |i|
30
+ Deimos::KafkaMessage.create!(topic: "topic#{i}",
31
+ key: 'blergkey',
32
+ message: 'blerg')
33
+ end
34
+ expect(producer.retrieve_topics).
35
+ to contain_exactly('topic1', 'topic2', 'topic3')
36
+ end
37
+
38
+ specify '#retrieve_messages' do
39
+ (1..3).each do |i|
40
+ Deimos::KafkaMessage.create!(topic: 'topic1',
41
+ message: 'blah',
42
+ key: "key#{i}")
43
+ end
44
+ stub_const('Deimos::Utils::DbProducer::BATCH_SIZE', 5)
45
+ producer.current_topic = 'topic1'
46
+ messages = producer.retrieve_messages
47
+ expect(messages.size).to eq(3)
48
+ expect(messages).to all(be_a_kind_of(Deimos::KafkaMessage))
49
+ end
50
+
51
+ describe '#produce_messages' do
52
+
53
+ it 'should produce normally' do
54
+ batch = ['A'] * 1000
55
+ expect(phobos_producer).to receive(:publish_list).with(batch).once
56
+ expect(Deimos.config.metrics).to receive(:increment).with('publish',
57
+ tags: %w(status:success topic:),
58
+ by: 1000).once
59
+ producer.produce_messages(batch)
60
+ end
61
+
62
+ it 'should split the batch size on buffer overflow' do
63
+ class_producer = double(Phobos::Producer::ClassMethods::PublicAPI, # rubocop:disable RSpec/VerifiedDoubles
64
+ sync_producer_shutdown: nil)
65
+ allow(producer.class).to receive(:producer).and_return(class_producer)
66
+ expect(class_producer).to receive(:sync_producer_shutdown).twice
67
+ count = 0
68
+ allow(phobos_producer).to receive(:publish_list) do
69
+ count += 1
70
+ raise Kafka::BufferOverflow if count < 3
71
+ end
72
+ allow(Deimos.config.metrics).to receive(:increment)
73
+ batch = ['A'] * 1000
74
+ producer.produce_messages(batch)
75
+ expect(phobos_producer).to have_received(:publish_list).with(batch)
76
+ expect(phobos_producer).to have_received(:publish_list).with(['A'] * 100)
77
+ expect(phobos_producer).to have_received(:publish_list).with(['A'] * 10).exactly(100).times
78
+ expect(Deimos.config.metrics).to have_received(:increment).with('publish',
79
+ tags: %w(status:success topic:),
80
+ by: 10).exactly(100).times
81
+ end
82
+
83
+ it "should raise an error if it can't split any more" do
84
+ allow(phobos_producer).to receive(:publish_list) do
85
+ raise Kafka::BufferOverflow
86
+ end
87
+ expect(Deimos.config.metrics).not_to receive(:increment)
88
+ batch = ['A'] * 1000
89
+ expect { producer.produce_messages(batch) }.to raise_error(Kafka::BufferOverflow)
90
+ expect(phobos_producer).to have_received(:publish_list).with(batch)
91
+ expect(phobos_producer).to have_received(:publish_list).with(['A'] * 100).once
92
+ expect(phobos_producer).to have_received(:publish_list).with(['A'] * 10).once
93
+ expect(phobos_producer).to have_received(:publish_list).with(['A']).once
94
+
95
+ end
96
+ end
97
+
98
+ describe '#process_topic' do
99
+ before(:each) do
100
+ producer.id = 'abc'
101
+ end
102
+
103
+ it 'should do nothing if lock fails' do
104
+ expect(Deimos::KafkaTopicInfo).to receive(:lock).
105
+ with('my-topic', 'abc').and_return(false)
106
+ expect(producer).not_to receive(:retrieve_messages)
107
+ producer.process_topic('my-topic')
108
+ end
109
+
110
+ it 'should complete successfully' do
111
+ messages = (1..4).map do |i|
112
+ Deimos::KafkaMessage.new(
113
+ topic: 'my-topic',
114
+ message: "mess#{i}",
115
+ partition_key: "key#{i}"
116
+ )
117
+ end
118
+ expect(Deimos::KafkaTopicInfo).to receive(:lock).
119
+ with('my-topic', 'abc').and_return(true)
120
+ expect(producer).to receive(:retrieve_messages).ordered.
121
+ and_return(messages[0..1])
122
+ expect(producer).to receive(:produce_messages).ordered.with([
123
+ {
124
+ payload: 'mess1',
125
+ key: nil,
126
+ partition_key: 'key1',
127
+ topic: 'my-topic'
128
+ },
129
+ {
130
+ payload: 'mess2',
131
+ key: nil,
132
+ partition_key: 'key2',
133
+ topic: 'my-topic'
134
+ }
135
+ ])
136
+ expect(producer).to receive(:retrieve_messages).ordered.
137
+ and_return(messages[2..3])
138
+ expect(producer).to receive(:produce_messages).ordered.with([
139
+ {
140
+ payload: 'mess3',
141
+ partition_key: 'key3',
142
+ key: nil,
143
+ topic: 'my-topic'
144
+ },
145
+ {
146
+ payload: 'mess4',
147
+ partition_key: 'key4',
148
+ key: nil,
149
+ topic: 'my-topic'
150
+ }
151
+ ])
152
+ expect(producer).to receive(:retrieve_messages).ordered.
153
+ and_return([])
154
+ expect(Deimos::KafkaTopicInfo).to receive(:heartbeat).
155
+ with('my-topic', 'abc').twice
156
+ expect(Deimos::KafkaTopicInfo).to receive(:clear_lock).
157
+ with('my-topic', 'abc').once
158
+ producer.process_topic('my-topic')
159
+ end
160
+
161
+ it 'should register an error if it gets an error' do
162
+ expect(producer).to receive(:retrieve_messages).and_raise('OH NOES')
163
+ expect(Deimos::KafkaTopicInfo).to receive(:register_error).
164
+ with('my-topic', 'abc')
165
+ expect(producer).not_to receive(:produce_messages)
166
+ producer.process_topic('my-topic')
167
+ end
168
+
169
+ it 'should move on if it gets a partial batch' do
170
+ expect(producer).to receive(:retrieve_messages).ordered.
171
+ and_return([Deimos::KafkaMessage.new(
172
+ topic: 'my-topic',
173
+ message: 'mess1'
174
+ )])
175
+ expect(producer).to receive(:produce_messages).once
176
+ producer.process_topic('my-topic')
177
+ end
178
+
179
+ end
180
+
181
+ example 'Full integration test' do
182
+ (1..4).each do |i|
183
+ (1..2).each do |j|
184
+ Deimos::KafkaMessage.create!(topic: "topic#{j}",
185
+ message: "mess#{i}",
186
+ partition_key: "key#{i}")
187
+ Deimos::KafkaMessage.create!(topic: "topic#{j + 2}",
188
+ key: "key#{i}",
189
+ partition_key: "key#{i}",
190
+ message: "mess#{i}")
191
+ end
192
+ end
193
+ allow(producer).to receive(:produce_messages)
194
+
195
+ producer.process_next_messages
196
+ expect(Deimos::KafkaTopicInfo.count).to eq(4)
197
+ topics = Deimos::KafkaTopicInfo.select('distinct topic').map(&:topic)
198
+ expect(topics).to contain_exactly('topic1', 'topic2', 'topic3', 'topic4')
199
+ expect(Deimos::KafkaMessage.count).to eq(0)
200
+
201
+ expect(producer).to have_received(:produce_messages).with([
202
+ {
203
+ payload: 'mess1',
204
+ partition_key: 'key1',
205
+ key: nil,
206
+ topic: 'topic1'
207
+ },
208
+ {
209
+ payload: 'mess2',
210
+ key: nil,
211
+ partition_key: 'key2',
212
+ topic: 'topic1'
213
+ }
214
+ ])
215
+ expect(producer).to have_received(:produce_messages).with([
216
+ {
217
+ payload: 'mess3',
218
+ key: nil,
219
+ partition_key: 'key3',
220
+ topic: 'topic1'
221
+ },
222
+ {
223
+ payload: 'mess4',
224
+ key: nil,
225
+ partition_key: 'key4',
226
+ topic: 'topic1'
227
+ }
228
+ ])
229
+ expect(producer).to have_received(:produce_messages).with([
230
+ {
231
+ payload: 'mess1',
232
+ key: 'key1',
233
+ partition_key: 'key1',
234
+ topic: 'topic3'
235
+ },
236
+ {
237
+ payload: 'mess2',
238
+ partition_key: 'key2',
239
+ key: 'key2',
240
+ topic: 'topic3'
241
+ }
242
+ ])
243
+ expect(producer).to have_received(:produce_messages).with([
244
+ {
245
+ payload: 'mess3',
246
+ key: 'key3',
247
+ partition_key: 'key3',
248
+ topic: 'topic3'
249
+ },
250
+ {
251
+ payload: 'mess4',
252
+ partition_key: 'key4',
253
+ key: 'key4',
254
+ topic: 'topic3'
255
+ }
256
+ ])
257
+ end
258
+
259
+ end
@@ -0,0 +1,42 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'spec_helper'
4
+
5
+ RSpec.describe Deimos::Utils::Executor do
6
+
7
+ let(:executor) { described_class.new(runners) }
8
+ let(:runners) { (1..2).map { |i| TestRunners::TestRunner.new(i) } }
9
+
10
+ it 'starts and stops configured runners' do
11
+ runners.each do |r|
12
+ expect(r.started).to be_falsey
13
+ expect(r.stopped).to be_falsey
14
+ end
15
+ executor.start
16
+ wait_for do
17
+ runners.each do |r|
18
+ expect(r.started).to be_truthy
19
+ expect(r.stopped).to be_falsey
20
+ end
21
+ executor.stop
22
+ runners.each do |r|
23
+ expect(r.started).to be_truthy
24
+ expect(r.stopped).to be_truthy
25
+ end
26
+ end
27
+ end
28
+
29
+ it 'reconnects crashed runners' do
30
+ allow(executor).to receive(:handle_crashed_runner).and_call_original
31
+ allow(executor).to receive(:handle_crashed_runner).and_call_original
32
+ runners.each { |r| r.should_error = true }
33
+ executor.start
34
+ wait_for do
35
+ expect(executor).to have_received(:handle_crashed_runner).with(runners[0], anything, 0).once
36
+ expect(executor).to have_received(:handle_crashed_runner).with(runners[1], anything, 0).once
37
+ runners.each { |r| expect(r.started).to be_truthy }
38
+ executor.stop
39
+ end
40
+ end
41
+
42
+ end
@@ -0,0 +1,69 @@
1
+ # frozen_string_literal: true
2
+
3
+ describe Deimos::Utils::LagReporter do
4
+
5
+ before(:each) do
6
+ kafka_client = instance_double(Kafka::Client)
7
+ allow(kafka_client).to receive(:last_offset_for).and_return(100)
8
+ allow(Phobos).to receive(:create_kafka_client).and_return(kafka_client)
9
+ Deimos.configure { |c| c.report_lag = true }
10
+ end
11
+
12
+ after(:each) do
13
+ described_class.reset
14
+ Deimos.configure { |c| c.report_lag = false }
15
+ end
16
+
17
+ it 'should not report lag before ready' do
18
+ expect(Deimos.config.metrics).not_to receive(:gauge)
19
+ ActiveSupport::Notifications.instrument(
20
+ 'heartbeat.consumer.kafka',
21
+ group_id: 'group1', topic_partitions: { 'my-topic': [1] }
22
+ )
23
+
24
+ end
25
+
26
+ it 'should report lag' do
27
+ expect(Deimos.config.metrics).to receive(:gauge).ordered.twice.
28
+ with('consumer_lag', 95,
29
+ tags: %w(
30
+ consumer_group:group1
31
+ partition:1
32
+ topic:my-topic
33
+ ))
34
+ expect(Deimos.config.metrics).to receive(:gauge).ordered.once.
35
+ with('consumer_lag', 80,
36
+ tags: %w(
37
+ consumer_group:group1
38
+ partition:2
39
+ topic:my-topic
40
+ ))
41
+ expect(Deimos.config.metrics).to receive(:gauge).ordered.once.
42
+ with('consumer_lag', 0,
43
+ tags: %w(
44
+ consumer_group:group1
45
+ partition:2
46
+ topic:my-topic
47
+ ))
48
+ ActiveSupport::Notifications.instrument(
49
+ 'seek.consumer.kafka',
50
+ offset: 5, topic: 'my-topic', group_id: 'group1', partition: 1
51
+ )
52
+ ActiveSupport::Notifications.instrument(
53
+ 'start_process_message.consumer.kafka',
54
+ offset_lag: 80, topic: 'my-topic', group_id: 'group1', partition: 2
55
+ )
56
+ ActiveSupport::Notifications.instrument(
57
+ 'heartbeat.consumer.kafka',
58
+ group_id: 'group1', topic_partitions: { 'my-topic': [1, 2] }
59
+ )
60
+ ActiveSupport::Notifications.instrument(
61
+ 'start_process_batch.consumer.kafka',
62
+ offset_lag: 0, topic: 'my-topic', group_id: 'group1', partition: 2
63
+ )
64
+ ActiveSupport::Notifications.instrument(
65
+ 'heartbeat.consumer.kafka',
66
+ group_id: 'group1', topic_partitions: { 'my-topic': [1, 2] }
67
+ )
68
+ end
69
+ end
File without changes
@@ -0,0 +1,16 @@
1
+ # frozen_string_literal: true
2
+
3
+ RSpec.describe Deimos::Utils::SignalHandler do
4
+ describe '#run!' do
5
+
6
+ it 'starts and stops the runner' do
7
+ runner = TestRunners::TestRunner.new
8
+ expect(runner).to receive(:start)
9
+ expect(runner).to receive(:stop)
10
+
11
+ signal_handler = described_class.new(runner)
12
+ signal_handler.send(:unblock, described_class::SIGNALS.first)
13
+ signal_handler.run!
14
+ end
15
+ end
16
+ end
Binary file
Binary file
Binary file
Binary file