deimos-ruby 1.8.0.pre.beta1 → 1.8.1.pre.beta4

Sign up to get free protection for your applications and to get access to all the features.
Files changed (37) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop.yml +8 -4
  3. data/CHANGELOG.md +42 -0
  4. data/Gemfile.lock +101 -73
  5. data/README.md +78 -1
  6. data/deimos-ruby.gemspec +2 -2
  7. data/lib/deimos.rb +4 -3
  8. data/lib/deimos/consume/batch_consumption.rb +2 -0
  9. data/lib/deimos/consume/message_consumption.rb +1 -0
  10. data/lib/deimos/instrumentation.rb +10 -5
  11. data/lib/deimos/kafka_topic_info.rb +21 -2
  12. data/lib/deimos/schema_backends/avro_base.rb +33 -1
  13. data/lib/deimos/schema_backends/avro_schema_coercer.rb +30 -9
  14. data/lib/deimos/schema_backends/base.rb +21 -2
  15. data/lib/deimos/utils/db_producer.rb +57 -19
  16. data/lib/deimos/utils/schema_controller_mixin.rb +111 -0
  17. data/lib/deimos/version.rb +1 -1
  18. data/lib/generators/deimos/active_record/templates/migration.rb.tt +28 -0
  19. data/lib/generators/deimos/active_record/templates/model.rb.tt +5 -0
  20. data/lib/generators/deimos/active_record_generator.rb +79 -0
  21. data/lib/generators/deimos/db_backend/templates/migration +1 -0
  22. data/lib/generators/deimos/db_backend/templates/rails3_migration +1 -0
  23. data/spec/batch_consumer_spec.rb +1 -0
  24. data/spec/generators/active_record_generator_spec.rb +56 -0
  25. data/spec/kafka_listener_spec.rb +54 -0
  26. data/spec/kafka_topic_info_spec.rb +39 -16
  27. data/spec/producer_spec.rb +36 -0
  28. data/spec/schemas/com/my-namespace/Generated.avsc +71 -0
  29. data/spec/schemas/com/my-namespace/MyNestedSchema.avsc +62 -0
  30. data/spec/schemas/com/my-namespace/request/Index.avsc +11 -0
  31. data/spec/schemas/com/my-namespace/request/UpdateRequest.avsc +11 -0
  32. data/spec/schemas/com/my-namespace/response/Index.avsc +11 -0
  33. data/spec/schemas/com/my-namespace/response/UpdateResponse.avsc +11 -0
  34. data/spec/spec_helper.rb +7 -0
  35. data/spec/utils/db_producer_spec.rb +84 -10
  36. data/spec/utils/schema_controller_mixin_spec.rb +68 -0
  37. metadata +40 -24
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Deimos
4
- VERSION = '1.8.0-beta1'
4
+ VERSION = '1.8.1-beta4'
5
5
  end
@@ -0,0 +1,28 @@
1
+ class <%= migration_class_name %> < ActiveRecord::Migration<%= migration_version %>
2
+ def up
3
+ if table_exists?(:<%= table_name %>)
4
+ warn "<%= table_name %> already exists, exiting"
5
+ return
6
+ end
7
+ create_table :<%= table_name %> do |t|
8
+ <%- fields.each do |key| -%>
9
+ <%- next if %w(id message_id timestamp).include?(key.name) -%>
10
+ <%- sql_type = schema_base.sql_type(key)
11
+ if %w(record array map).include?(sql_type)
12
+ conn = ActiveRecord::Base.connection
13
+ sql_type = conn.respond_to?(:supports_json?) && conn.supports_json? ? :json : :string
14
+ end
15
+ -%>
16
+ t.<%= sql_type %> :<%= key.name %>
17
+ <%- end -%>
18
+ end
19
+
20
+ # TODO add indexes as necessary
21
+ end
22
+
23
+ def down
24
+ return unless table_exists?(:<%= table_name %>)
25
+ drop_table :<%= table_name %>
26
+ end
27
+
28
+ end
@@ -0,0 +1,5 @@
1
+ class <%= table_name.classify %> < ApplicationRecord
2
+ <%- fields.select { |f| f.enum_values.any? }.each do |field| -%>
3
+ enum <%= field.name %>: {<%= field.enum_values.map { |v| "#{v}: '#{v}'"}.join(', ') %>}
4
+ <% end -%>
5
+ end
@@ -0,0 +1,79 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'rails/generators'
4
+ require 'rails/generators/active_record/migration'
5
+ require 'rails/version'
6
+
7
+ # Generates a new consumer.
8
+ module Deimos
9
+ module Generators
10
+ # Generator for ActiveRecord model and migration.
11
+ class ActiveRecordGenerator < Rails::Generators::Base
12
+ include Rails::Generators::Migration
13
+ if Rails.version < '4'
14
+ extend(ActiveRecord::Generators::Migration)
15
+ else
16
+ include ActiveRecord::Generators::Migration
17
+ end
18
+ source_root File.expand_path('active_record/templates', __dir__)
19
+
20
+ argument :table_name, desc: 'The table to create.', required: true
21
+ argument :full_schema, desc: 'The fully qualified schema name.', required: true
22
+
23
+ no_commands do
24
+
25
+ # @return [String]
26
+ def db_migrate_path
27
+ if defined?(Rails.application) && Rails.application
28
+ paths = Rails.application.config.paths['db/migrate']
29
+ paths.respond_to?(:to_ary) ? paths.to_ary.first : paths.to_a.first
30
+ else
31
+ 'db/migrate'
32
+ end
33
+ end
34
+
35
+ # @return [String]
36
+ def migration_version
37
+ "[#{ActiveRecord::Migration.current_version}]"
38
+ rescue StandardError
39
+ ''
40
+ end
41
+
42
+ # @return [String]
43
+ def table_class
44
+ self.table_name.classify
45
+ end
46
+
47
+ # @return [String]
48
+ def schema
49
+ last_dot = self.full_schema.rindex('.')
50
+ self.full_schema[last_dot + 1..-1]
51
+ end
52
+
53
+ # @return [String]
54
+ def namespace
55
+ last_dot = self.full_schema.rindex('.')
56
+ self.full_schema[0...last_dot]
57
+ end
58
+
59
+ # @return [Deimos::SchemaBackends::Base]
60
+ def schema_base
61
+ @schema_base ||= Deimos.schema_backend_class.new(schema: schema, namespace: namespace)
62
+ end
63
+
64
+ # @return [Array<SchemaField>]
65
+ def fields
66
+ schema_base.schema_fields
67
+ end
68
+
69
+ end
70
+
71
+ desc 'Generate migration for a table based on an existing schema.'
72
+ # :nodoc:
73
+ def generate
74
+ migration_template('migration.rb', "db/migrate/create_#{table_name.underscore}.rb")
75
+ template('model.rb', "app/models/#{table_name.underscore}.rb")
76
+ end
77
+ end
78
+ end
79
+ end
@@ -16,6 +16,7 @@ class <%= migration_class_name %> < ActiveRecord::Migration<%= migration_version
16
16
  t.datetime :locked_at
17
17
  t.boolean :error, null: false, default: false
18
18
  t.integer :retries, null: false, default: 0
19
+ t.datetime :last_processed_at
19
20
  end
20
21
  add_index :kafka_topic_info, :topic, unique: true
21
22
  add_index :kafka_topic_info, [:locked_by, :error]
@@ -16,6 +16,7 @@ class <%= migration_class_name %> < ActiveRecord::Migration<%= migration_version
16
16
  t.datetime :locked_at
17
17
  t.boolean :error, null: false, default: false
18
18
  t.integer :retries, null: false, default: 0
19
+ t.datetime :last_processed_at
19
20
  end
20
21
  add_index :kafka_topic_info, :topic, unique: true
21
22
  add_index :kafka_topic_info, [:locked_by, :error]
@@ -111,6 +111,7 @@ module ConsumerTest
111
111
  test_consume_batch('my_batch_consume_topic', batch, keys: keys) do |_received, metadata|
112
112
  # Mock decode_key extracts the value of the first field as the key
113
113
  expect(metadata[:keys]).to eq(%w(foo bar))
114
+ expect(metadata[:first_offset]).to eq(1)
114
115
  end
115
116
  end
116
117
 
@@ -0,0 +1,56 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'generators/deimos/active_record_generator'
4
+
5
+ RSpec.describe Deimos::Generators::ActiveRecordGenerator do
6
+
7
+ after(:each) do
8
+ FileUtils.rm_rf('db') if File.exist?('db')
9
+ FileUtils.rm_rf('app') if File.exist?('app')
10
+ end
11
+
12
+ it 'should generate a migration' do
13
+ expect(Dir['db/migrate/*.rb']).to be_empty
14
+ expect(Dir['app/models/*.rb']).to be_empty
15
+ described_class.start(['generated_table', 'com.my-namespace.Generated'])
16
+ files = Dir['db/migrate/*.rb']
17
+ expect(files.length).to eq(1)
18
+ results = <<~MIGRATION
19
+ class CreateGeneratedTable < ActiveRecord::Migration[6.0]
20
+ def up
21
+ if table_exists?(:generated_table)
22
+ warn "generated_table already exists, exiting"
23
+ return
24
+ end
25
+ create_table :generated_table do |t|
26
+ t.string :a_string
27
+ t.integer :a_int
28
+ t.bigint :a_long
29
+ t.float :a_float
30
+ t.float :a_double
31
+ t.string :an_enum
32
+ t.json :an_array
33
+ t.json :a_map
34
+ t.json :a_record
35
+ end
36
+
37
+ # TODO add indexes as necessary
38
+ end
39
+
40
+ def down
41
+ return unless table_exists?(:generated_table)
42
+ drop_table :generated_table
43
+ end
44
+
45
+ end
46
+ MIGRATION
47
+ expect(File.read(files[0])).to eq(results)
48
+ model = <<~MODEL
49
+ class GeneratedTable < ApplicationRecord
50
+ enum an_enum: {sym1: 'sym1', sym2: 'sym2'}
51
+ end
52
+ MODEL
53
+ expect(File.read('app/models/generated_table.rb')).to eq(model)
54
+ end
55
+
56
+ end
@@ -0,0 +1,54 @@
1
+ # frozen_string_literal: true
2
+
3
+ describe Deimos::KafkaListener do
4
+ include_context 'with widgets'
5
+
6
+ prepend_before(:each) do
7
+ producer_class = Class.new(Deimos::Producer) do
8
+ schema 'MySchema'
9
+ namespace 'com.my-namespace'
10
+ topic 'my-topic'
11
+ key_config none: true
12
+ end
13
+ stub_const('MyProducer', producer_class)
14
+ end
15
+
16
+ before(:each) do
17
+ Deimos.configure do |c|
18
+ c.producers.backend = :kafka
19
+ c.schema.backend = :avro_local
20
+ end
21
+ allow_any_instance_of(Kafka::Cluster).to receive(:add_target_topics)
22
+ allow_any_instance_of(Kafka::Cluster).to receive(:partitions_for).
23
+ and_raise(Kafka::Error)
24
+ end
25
+
26
+ describe '.send_produce_error' do
27
+ let(:payloads) do
28
+ [{ 'test_id' => 'foo', 'some_int' => 123 },
29
+ { 'test_id' => 'bar', 'some_int' => 124 }]
30
+ end
31
+
32
+ it 'should listen to publishing errors and republish as Deimos events' do
33
+ Deimos.subscribe('produce_error') do |event|
34
+ expect(event.payload).to include(
35
+ producer: MyProducer,
36
+ topic: 'my-topic',
37
+ payloads: payloads
38
+ )
39
+ end
40
+ expect(Deimos.config.metrics).to receive(:increment).
41
+ with('publish_error', tags: %w(topic:my-topic), by: 2)
42
+ expect { MyProducer.publish_list(payloads) }.to raise_error(Kafka::DeliveryFailed)
43
+ end
44
+
45
+ it 'should not send any notifications when producer is not found' do
46
+ Deimos.subscribe('produce_error') do |_|
47
+ raise 'OH NOES'
48
+ end
49
+ allow(Deimos::Producer).to receive(:descendants).and_return([])
50
+ expect(Deimos.config.metrics).not_to receive(:increment).with('publish_error', anything)
51
+ expect { MyProducer.publish_list(payloads) }.to raise_error(Kafka::DeliveryFailed)
52
+ end
53
+ end
54
+ end
@@ -37,22 +37,45 @@ each_db_config(Deimos::KafkaTopicInfo) do
37
37
  end
38
38
 
39
39
  specify '#clear_lock' do
40
- described_class.create!(topic: 'my-topic', locked_by: 'abc',
41
- locked_at: 10.seconds.ago, error: true, retries: 1)
42
- described_class.create!(topic: 'my-topic2', locked_by: 'def',
43
- locked_at: 10.seconds.ago, error: true, retries: 1)
44
- described_class.clear_lock('my-topic', 'abc')
45
- expect(described_class.count).to eq(2)
46
- record = described_class.first
47
- expect(record.locked_by).to eq(nil)
48
- expect(record.locked_at).to eq(nil)
49
- expect(record.error).to eq(false)
50
- expect(record.retries).to eq(0)
51
- record = described_class.last
52
- expect(record.locked_by).not_to eq(nil)
53
- expect(record.locked_at).not_to eq(nil)
54
- expect(record.error).not_to eq(false)
55
- expect(record.retries).not_to eq(0)
40
+ freeze_time do
41
+ Deimos::KafkaTopicInfo.create!(topic: 'my-topic', locked_by: 'abc',
42
+ locked_at: 10.seconds.ago, error: true, retries: 1,
43
+ last_processed_at: 20.seconds.ago)
44
+ Deimos::KafkaTopicInfo.create!(topic: 'my-topic2', locked_by: 'def',
45
+ locked_at: 10.seconds.ago, error: true, retries: 1,
46
+ last_processed_at: 20.seconds.ago)
47
+ Deimos::KafkaTopicInfo.clear_lock('my-topic', 'abc')
48
+ expect(Deimos::KafkaTopicInfo.count).to eq(2)
49
+ record = Deimos::KafkaTopicInfo.first
50
+ expect(record.locked_by).to eq(nil)
51
+ expect(record.locked_at).to eq(nil)
52
+ expect(record.error).to eq(false)
53
+ expect(record.retries).to eq(0)
54
+ expect(record.last_processed_at.to_s).to eq(Time.zone.now.to_s)
55
+ record = Deimos::KafkaTopicInfo.last
56
+ expect(record.locked_by).not_to eq(nil)
57
+ expect(record.locked_at).not_to eq(nil)
58
+ expect(record.error).not_to eq(false)
59
+ expect(record.retries).not_to eq(0)
60
+ expect(record.last_processed_at.to_s).to eq(20.seconds.ago.to_s)
61
+ end
62
+ end
63
+
64
+ specify '#ping_empty_topics' do
65
+ freeze_time do
66
+ old_time = 1.hour.ago.to_s
67
+ t1 = Deimos::KafkaTopicInfo.create!(topic: 'topic1', last_processed_at: old_time)
68
+ t2 = Deimos::KafkaTopicInfo.create!(topic: 'topic2', last_processed_at: old_time)
69
+ t3 = Deimos::KafkaTopicInfo.create!(topic: 'topic3', last_processed_at: old_time,
70
+ locked_by: 'me', locked_at: 1.minute.ago)
71
+
72
+ expect(Deimos::KafkaTopicInfo.count).to eq(3)
73
+ Deimos::KafkaTopicInfo.all.each { |t| expect(t.last_processed_at.to_s).to eq(old_time) }
74
+ Deimos::KafkaTopicInfo.ping_empty_topics(%w(topic1))
75
+ expect(t1.reload.last_processed_at.to_s).to eq(old_time) # was passed as an exception
76
+ expect(t2.reload.last_processed_at.to_s).to eq(Time.zone.now.to_s)
77
+ expect(t3.reload.last_processed_at.to_s).to eq(old_time) # is locked
78
+ end
56
79
  end
57
80
 
58
81
  specify '#register_error' do
@@ -41,6 +41,14 @@ module ProducerTest
41
41
  end
42
42
  stub_const('MyNoKeyProducer', producer_class)
43
43
 
44
+ producer_class = Class.new(Deimos::Producer) do
45
+ schema 'MyNestedSchema'
46
+ namespace 'com.my-namespace'
47
+ topic 'my-topic'
48
+ key_config field: 'test_id'
49
+ end
50
+ stub_const('MyNestedSchemaProducer', producer_class)
51
+
44
52
  producer_class = Class.new(Deimos::Producer) do
45
53
  schema 'MySchema'
46
54
  namespace 'com.my-namespace'
@@ -233,6 +241,34 @@ module ProducerTest
233
241
  )
234
242
  end
235
243
 
244
+ it 'should properly encode and coerce values with a nested record' do
245
+ expect(MyNestedSchemaProducer.encoder).to receive(:encode_key).with('test_id', 'foo', topic: 'my-topic-key')
246
+ MyNestedSchemaProducer.publish(
247
+ 'test_id' => 'foo',
248
+ 'test_float' => BigDecimal('123.456'),
249
+ 'test_array' => ['1'],
250
+ 'some_nested_record' => {
251
+ 'some_int' => 123,
252
+ 'some_float' => BigDecimal('456.789'),
253
+ 'some_string' => '123',
254
+ 'some_optional_int' => nil
255
+ },
256
+ 'some_optional_record' => nil
257
+ )
258
+ expect(MyNestedSchemaProducer.topic).to have_sent(
259
+ 'test_id' => 'foo',
260
+ 'test_float' => 123.456,
261
+ 'test_array' => ['1'],
262
+ 'some_nested_record' => {
263
+ 'some_int' => 123,
264
+ 'some_float' => 456.789,
265
+ 'some_string' => '123',
266
+ 'some_optional_int' => nil
267
+ },
268
+ 'some_optional_record' => nil
269
+ )
270
+ end
271
+
236
272
  it 'should error with nothing set' do
237
273
  expect {
238
274
  MyErrorProducer.publish_list(
@@ -0,0 +1,71 @@
1
+ {
2
+ "namespace": "com.my-namespace",
3
+ "name": "Generated",
4
+ "type": "record",
5
+ "doc": "Test schema",
6
+ "fields": [
7
+ {
8
+ "name": "a_string",
9
+ "type": "string"
10
+ },
11
+ {
12
+ "name": "a_int",
13
+ "type": "int"
14
+ },
15
+ {
16
+ "name": "a_long",
17
+ "type": "long"
18
+ },
19
+ {
20
+ "name": "a_float",
21
+ "type": "float"
22
+ },
23
+ {
24
+ "name": "a_double",
25
+ "type": "double"
26
+ },
27
+ {
28
+ "name": "an_enum",
29
+ "type": {
30
+ "type": "enum",
31
+ "name": "AnEnum",
32
+ "symbols": ["sym1", "sym2"]
33
+ }
34
+ },
35
+ {
36
+ "name": "an_array",
37
+ "type": {
38
+ "type": "array",
39
+ "items": "int"
40
+ }
41
+ },
42
+ {
43
+ "name": "a_map",
44
+ "type": {
45
+ "type": "map",
46
+ "values": "string"
47
+ }
48
+ },
49
+ {
50
+ "name": "timestamp",
51
+ "type": "string"
52
+ },
53
+ {
54
+ "name": "message_id",
55
+ "type": "string"
56
+ },
57
+ {
58
+ "name": "a_record",
59
+ "type": {
60
+ "type": "record",
61
+ "name": "ARecord",
62
+ "fields": [
63
+ {
64
+ "name": "a_record_field",
65
+ "type": "string"
66
+ }
67
+ ]
68
+ }
69
+ }
70
+ ]
71
+ }