deimos-kafka 1.0.0.pre.beta15

Sign up to get free protection for your applications and to get access to all the features.
Files changed (100) hide show
  1. checksums.yaml +7 -0
  2. data/.circleci/config.yml +74 -0
  3. data/.gitignore +41 -0
  4. data/.gitmodules +0 -0
  5. data/.rspec +1 -0
  6. data/.rubocop.yml +321 -0
  7. data/.ruby-gemset +1 -0
  8. data/.ruby-version +1 -0
  9. data/CHANGELOG.md +9 -0
  10. data/CODE_OF_CONDUCT.md +77 -0
  11. data/Dockerfile +23 -0
  12. data/Gemfile +6 -0
  13. data/Gemfile.lock +165 -0
  14. data/Guardfile +22 -0
  15. data/LICENSE.md +195 -0
  16. data/README.md +742 -0
  17. data/Rakefile +13 -0
  18. data/bin/deimos +4 -0
  19. data/deimos-kafka.gemspec +42 -0
  20. data/docker-compose.yml +71 -0
  21. data/docs/DATABASE_BACKEND.md +147 -0
  22. data/docs/PULL_REQUEST_TEMPLATE.md +34 -0
  23. data/lib/deimos.rb +134 -0
  24. data/lib/deimos/active_record_consumer.rb +81 -0
  25. data/lib/deimos/active_record_producer.rb +64 -0
  26. data/lib/deimos/avro_data_coder.rb +89 -0
  27. data/lib/deimos/avro_data_decoder.rb +36 -0
  28. data/lib/deimos/avro_data_encoder.rb +51 -0
  29. data/lib/deimos/backends/db.rb +27 -0
  30. data/lib/deimos/backends/kafka.rb +27 -0
  31. data/lib/deimos/backends/kafka_async.rb +27 -0
  32. data/lib/deimos/configuration.rb +88 -0
  33. data/lib/deimos/consumer.rb +164 -0
  34. data/lib/deimos/instrumentation.rb +71 -0
  35. data/lib/deimos/kafka_message.rb +27 -0
  36. data/lib/deimos/kafka_source.rb +126 -0
  37. data/lib/deimos/kafka_topic_info.rb +79 -0
  38. data/lib/deimos/message.rb +74 -0
  39. data/lib/deimos/metrics/datadog.rb +47 -0
  40. data/lib/deimos/metrics/mock.rb +39 -0
  41. data/lib/deimos/metrics/provider.rb +38 -0
  42. data/lib/deimos/monkey_patches/phobos_cli.rb +35 -0
  43. data/lib/deimos/monkey_patches/phobos_producer.rb +51 -0
  44. data/lib/deimos/monkey_patches/ruby_kafka_heartbeat.rb +85 -0
  45. data/lib/deimos/monkey_patches/schema_store.rb +19 -0
  46. data/lib/deimos/producer.rb +218 -0
  47. data/lib/deimos/publish_backend.rb +30 -0
  48. data/lib/deimos/railtie.rb +8 -0
  49. data/lib/deimos/schema_coercer.rb +108 -0
  50. data/lib/deimos/shared_config.rb +59 -0
  51. data/lib/deimos/test_helpers.rb +356 -0
  52. data/lib/deimos/tracing/datadog.rb +35 -0
  53. data/lib/deimos/tracing/mock.rb +40 -0
  54. data/lib/deimos/tracing/provider.rb +31 -0
  55. data/lib/deimos/utils/db_producer.rb +95 -0
  56. data/lib/deimos/utils/executor.rb +117 -0
  57. data/lib/deimos/utils/inline_consumer.rb +144 -0
  58. data/lib/deimos/utils/lag_reporter.rb +182 -0
  59. data/lib/deimos/utils/platform_schema_validation.rb +0 -0
  60. data/lib/deimos/utils/signal_handler.rb +68 -0
  61. data/lib/deimos/version.rb +5 -0
  62. data/lib/generators/deimos/db_backend/templates/migration +24 -0
  63. data/lib/generators/deimos/db_backend/templates/rails3_migration +30 -0
  64. data/lib/generators/deimos/db_backend_generator.rb +48 -0
  65. data/lib/tasks/deimos.rake +17 -0
  66. data/spec/active_record_consumer_spec.rb +81 -0
  67. data/spec/active_record_producer_spec.rb +107 -0
  68. data/spec/avro_data_decoder_spec.rb +18 -0
  69. data/spec/avro_data_encoder_spec.rb +37 -0
  70. data/spec/backends/db_spec.rb +35 -0
  71. data/spec/backends/kafka_async_spec.rb +11 -0
  72. data/spec/backends/kafka_spec.rb +11 -0
  73. data/spec/consumer_spec.rb +169 -0
  74. data/spec/deimos_spec.rb +117 -0
  75. data/spec/kafka_source_spec.rb +168 -0
  76. data/spec/kafka_topic_info_spec.rb +88 -0
  77. data/spec/phobos.bad_db.yml +73 -0
  78. data/spec/phobos.yml +73 -0
  79. data/spec/producer_spec.rb +397 -0
  80. data/spec/publish_backend_spec.rb +10 -0
  81. data/spec/schemas/com/my-namespace/MySchema-key.avsc +13 -0
  82. data/spec/schemas/com/my-namespace/MySchema.avsc +18 -0
  83. data/spec/schemas/com/my-namespace/MySchemaWithBooleans.avsc +18 -0
  84. data/spec/schemas/com/my-namespace/MySchemaWithDateTimes.avsc +33 -0
  85. data/spec/schemas/com/my-namespace/MySchemaWithId.avsc +28 -0
  86. data/spec/schemas/com/my-namespace/MySchemaWithUniqueId.avsc +32 -0
  87. data/spec/schemas/com/my-namespace/Widget.avsc +27 -0
  88. data/spec/schemas/com/my-namespace/WidgetTheSecond.avsc +27 -0
  89. data/spec/spec_helper.rb +207 -0
  90. data/spec/updateable_schema_store_spec.rb +36 -0
  91. data/spec/utils/db_producer_spec.rb +208 -0
  92. data/spec/utils/executor_spec.rb +42 -0
  93. data/spec/utils/lag_reporter_spec.rb +69 -0
  94. data/spec/utils/platform_schema_validation_spec.rb +0 -0
  95. data/spec/utils/signal_handler_spec.rb +16 -0
  96. data/support/deimos-solo.png +0 -0
  97. data/support/deimos-with-name-next.png +0 -0
  98. data/support/deimos-with-name.png +0 -0
  99. data/support/flipp-logo.png +0 -0
  100. metadata +452 -0
@@ -0,0 +1,168 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'activerecord-import'
4
+
5
+ # Wrap in a module so our classes don't leak out afterwards
6
+ module KafkaSourceSpec
7
+ RSpec.describe Deimos::KafkaSource do
8
+ before(:all) do
9
+ ActiveRecord::Base.connection.create_table(:widgets, force: true) do |t|
10
+ t.integer(:widget_id)
11
+ t.string(:description)
12
+ t.string(:model_id, default: '')
13
+ t.string(:name)
14
+ t.timestamps
15
+ end
16
+ ActiveRecord::Base.connection.add_index(:widgets, :widget_id)
17
+
18
+ # Dummy producer which mimicks the behavior of a real producer
19
+ class WidgetProducer < Deimos::ActiveRecordProducer
20
+ topic 'my-topic'
21
+ namespace 'com.my-namespace'
22
+ schema 'Widget'
23
+ key_config field: :id
24
+ end
25
+
26
+ # Dummy producer which mimicks the behavior of a real producer
27
+ class WidgetProducerTheSecond < Deimos::ActiveRecordProducer
28
+ topic 'my-topic-the-second'
29
+ namespace 'com.my-namespace'
30
+ schema 'WidgetTheSecond'
31
+ key_config field: :id
32
+ end
33
+
34
+ # Dummy class we can include the mixin in. Has a backing table created
35
+ # earlier.
36
+ class Widget < ActiveRecord::Base
37
+ include Deimos::KafkaSource
38
+
39
+ # :nodoc:
40
+ def self.kafka_producers
41
+ [WidgetProducer, WidgetProducerTheSecond]
42
+ end
43
+ end
44
+ Widget.reset_column_information
45
+
46
+ end
47
+
48
+ after(:all) do
49
+ ActiveRecord::Base.connection.drop_table(:widgets)
50
+ end
51
+
52
+ before(:each) do
53
+ Widget.delete_all
54
+ end
55
+
56
+ it 'should send events on creation, update, and deletion' do
57
+ widget = Widget.create!(widget_id: 1, name: 'widget')
58
+ expect('my-topic').to have_sent({
59
+ widget_id: 1,
60
+ name: 'widget',
61
+ id: widget.id,
62
+ created_at: anything,
63
+ updated_at: anything
64
+ }, 1)
65
+ expect('my-topic-the-second').to have_sent({
66
+ widget_id: 1,
67
+ model_id: '',
68
+ id: widget.id,
69
+ created_at: anything,
70
+ updated_at: anything
71
+ }, 1)
72
+ widget.update_attribute(:name, 'widget 2')
73
+ expect('my-topic').to have_sent({
74
+ widget_id: 1,
75
+ name: 'widget 2',
76
+ id: widget.id,
77
+ created_at: anything,
78
+ updated_at: anything
79
+ }, 1)
80
+ expect('my-topic-the-second').to have_sent({
81
+ widget_id: 1,
82
+ model_id: '',
83
+ id: widget.id,
84
+ created_at: anything,
85
+ updated_at: anything
86
+ }, 1)
87
+ widget.destroy
88
+ expect('my-topic').to have_sent(nil, 1)
89
+ expect('my-topic-the-second').to have_sent(nil, 1)
90
+ end
91
+
92
+ it 'should send events on import' do
93
+ widgets = (1..3).map do |i|
94
+ Widget.new(widget_id: i, name: "Widget #{i}")
95
+ end
96
+ Widget.import(widgets)
97
+ widgets = Widget.all
98
+ expect('my-topic').to have_sent({
99
+ widget_id: 1,
100
+ name: 'Widget 1',
101
+ id: widgets[0].id,
102
+ created_at: anything,
103
+ updated_at: anything
104
+ }, widgets[0].id)
105
+ expect('my-topic').to have_sent({
106
+ widget_id: 2,
107
+ name: 'Widget 2',
108
+ id: widgets[1].id,
109
+ created_at: anything,
110
+ updated_at: anything
111
+ }, widgets[1].id)
112
+ expect('my-topic').to have_sent({
113
+ widget_id: 3,
114
+ name: 'Widget 3',
115
+ id: widgets[2].id,
116
+ created_at: anything,
117
+ updated_at: anything
118
+ }, widgets[2].id)
119
+ end
120
+
121
+ it 'should send events even if the save fails' do
122
+ widget = Widget.create!(widget_id: 1, name: 'widget')
123
+ expect('my-topic').to have_sent({
124
+ widget_id: 1,
125
+ name: widget.name,
126
+ id: widget.id,
127
+ created_at: anything,
128
+ updated_at: anything
129
+ }, widget.id)
130
+ clear_kafka_messages!
131
+ Widget.transaction do
132
+ widget.update_attribute(:name, 'widget 3')
133
+ raise ActiveRecord::Rollback
134
+ end
135
+ expect('my-topic').to have_sent(anything)
136
+ end
137
+
138
+ it 'should not send events if an unrelated field changes' do
139
+ widget = Widget.create!(widget_id: 1, name: 'widget')
140
+ clear_kafka_messages!
141
+ widget.update_attribute(:description, 'some description')
142
+ expect('my-topic').not_to have_sent(anything)
143
+ end
144
+
145
+ context 'with DB backend' do
146
+ before(:each) do
147
+ Deimos.configure do |config|
148
+ config.publish_backend = :db
149
+ end
150
+ setup_db(DB_OPTIONS.last) # sqlite
151
+ allow(Deimos::Producer).to receive(:produce_batch).and_call_original
152
+ end
153
+
154
+ it 'should save to the DB' do
155
+ Widget.create!(widget_id: 1, name: 'widget')
156
+ expect(Deimos::KafkaMessage.count).to eq(2) # 2 producers
157
+ end
158
+
159
+ it 'should not save with a rollback' do
160
+ Widget.transaction do
161
+ Widget.create!(widget_id: 1, name: 'widget')
162
+ raise ActiveRecord::Rollback
163
+ end
164
+ expect(Deimos::KafkaMessage.count).to eq(0)
165
+ end
166
+ end
167
+ end
168
+ end
@@ -0,0 +1,88 @@
1
+ # frozen_string_literal: true
2
+
3
+ each_db_config(Deimos::KafkaTopicInfo) do
4
+
5
+ it 'should lock the topic' do
6
+ expect(described_class.lock('my-topic', 'abc')).to be_truthy
7
+ expect(described_class.lock('my-topic', 'def')).to be_falsey
8
+ expect(described_class.lock('my-topic2', 'def')).to be_truthy
9
+ expect(described_class.count).to eq(2)
10
+ expect(described_class.first.locked_by).to eq('abc')
11
+ expect(described_class.last.locked_by).to eq('def')
12
+ end
13
+
14
+ it "should lock the topic if it's old" do
15
+ described_class.create!(topic: 'my-topic', locked_by: 'abc', error: true,
16
+ locked_at: 2.minutes.ago)
17
+ expect(described_class.lock('my-topic', 'abc')).to be_truthy
18
+ expect(described_class.count).to eq(1)
19
+ expect(described_class.first.locked_by).to eq('abc')
20
+
21
+ end
22
+
23
+ it "should lock the topic if it's not currently locked" do
24
+ described_class.create!(topic: 'my-topic', locked_by: nil,
25
+ locked_at: nil)
26
+ expect(described_class.lock('my-topic', 'abc')).to be_truthy
27
+ expect(described_class.count).to eq(1)
28
+ expect(described_class.first.locked_by).to eq('abc')
29
+ end
30
+
31
+ it "should not lock the topic if it's errored" do
32
+ described_class.create!(topic: 'my-topic', locked_by: nil,
33
+ locked_at: nil, error: true)
34
+ expect(described_class.lock('my-topic', 'abc')).to be_falsey
35
+ expect(described_class.count).to eq(1)
36
+ expect(described_class.first.locked_by).to eq(nil)
37
+ end
38
+
39
+ specify '#clear_lock' do
40
+ described_class.create!(topic: 'my-topic', locked_by: 'abc',
41
+ locked_at: 10.seconds.ago, error: true, retries: 1)
42
+ described_class.create!(topic: 'my-topic2', locked_by: 'def',
43
+ locked_at: 10.seconds.ago, error: true, retries: 1)
44
+ described_class.clear_lock('my-topic', 'abc')
45
+ expect(described_class.count).to eq(2)
46
+ record = described_class.first
47
+ expect(record.locked_by).to eq(nil)
48
+ expect(record.locked_at).to eq(nil)
49
+ expect(record.error).to eq(false)
50
+ expect(record.retries).to eq(0)
51
+ record = described_class.last
52
+ expect(record.locked_by).not_to eq(nil)
53
+ expect(record.locked_at).not_to eq(nil)
54
+ expect(record.error).not_to eq(false)
55
+ expect(record.retries).not_to eq(0)
56
+ end
57
+
58
+ specify '#register_error' do
59
+ freeze_time do
60
+ described_class.create!(topic: 'my-topic', locked_by: 'abc',
61
+ locked_at: 10.seconds.ago)
62
+ described_class.create!(topic: 'my-topic2', locked_by: 'def',
63
+ locked_at: 10.seconds.ago, error: true, retries: 1)
64
+ described_class.register_error('my-topic', 'abc')
65
+ record = described_class.first
66
+ expect(record.locked_by).to be_nil
67
+ expect(record.locked_at).to eq(Time.zone.now)
68
+ expect(record.error).to be_truthy
69
+ expect(record.retries).to eq(1)
70
+
71
+ described_class.register_error('my-topic2', 'def')
72
+ record = described_class.last
73
+ expect(record.error).to be_truthy
74
+ expect(record.retries).to eq(2)
75
+ expect(record.locked_at).to eq(Time.zone.now)
76
+ end
77
+ end
78
+
79
+ specify '#heartbeat' do
80
+ freeze_time do
81
+ described_class.create!(topic: 'my-topic', locked_by: 'abc',
82
+ locked_at: 10.seconds.ago)
83
+ described_class.heartbeat('my-topic', 'abc')
84
+ expect(described_class.last.locked_at).to eq(Time.zone.now)
85
+ end
86
+ end
87
+
88
+ end
@@ -0,0 +1,73 @@
1
+ logger:
2
+ # Optional log file, set to false or remove to disable it
3
+ file: log/phobos.log
4
+ # Optional output format for stdout, default is false (human readable).
5
+ # Set to true to enable json output.
6
+ stdout_json: false
7
+ level: debug
8
+ # Comment the block to disable ruby-kafka logs
9
+ ruby_kafka:
10
+ level: debug
11
+
12
+ kafka:
13
+ # identifier for this application
14
+ client_id: phobos
15
+ # timeout setting for connecting to brokers
16
+ connect_timeout: 15
17
+ # timeout setting for socket connections
18
+ socket_timeout: 15
19
+
20
+ producer:
21
+ # number of seconds a broker can wait for replicas to acknowledge
22
+ # a write before responding with a timeout
23
+ ack_timeout: 5
24
+ # number of replicas that must acknowledge a write, or `:all`
25
+ # if all in-sync replicas must acknowledge
26
+ required_acks: 1
27
+ # number of retries that should be attempted before giving up sending
28
+ # messages to the cluster. Does not include the original attempt
29
+ max_retries: 2
30
+ # number of seconds to wait between retries
31
+ retry_backoff: 1
32
+ # number of messages allowed in the buffer before new writes will
33
+ # raise {BufferOverflow} exceptions
34
+ max_buffer_size: 10000
35
+ # maximum size of the buffer in bytes. Attempting to produce messages
36
+ # when the buffer reaches this size will result in {BufferOverflow} being raised
37
+ max_buffer_bytesize: 10000000
38
+ # name of the compression codec to use, or nil if no compression should be performed.
39
+ # Valid codecs: `:snappy` and `:gzip`
40
+ compression_codec:
41
+ # number of messages that needs to be in a message set before it should be compressed.
42
+ # Note that message sets are per-partition rather than per-topic or per-producer
43
+ compression_threshold: 1
44
+ # maximum number of messages allowed in the queue. Only used for async_producer
45
+ max_queue_size: 10000
46
+ # if greater than zero, the number of buffered messages that will automatically
47
+ # trigger a delivery. Only used for async_producer
48
+ delivery_threshold: 0
49
+ # if greater than zero, the number of seconds between automatic message
50
+ # deliveries. Only used for async_producer
51
+ delivery_interval: 0
52
+
53
+ consumer:
54
+ # number of seconds after which, if a client hasn't contacted the Kafka cluster,
55
+ # it will be kicked out of the group
56
+ session_timeout: 300
57
+ # interval between offset commits, in seconds
58
+ offset_commit_interval: 10
59
+ # number of messages that can be processed before their offsets are committed.
60
+ # If zero, offset commits are not triggered by message processing
61
+ offset_commit_threshold: 0
62
+ # interval between heartbeats; must be less than the session window
63
+ heartbeat_interval: 10
64
+
65
+ backoff:
66
+ min_ms: 1000
67
+ max_ms: 60000
68
+
69
+ listeners:
70
+ - handler: ConsumerTest::MyConsumer
71
+ topic: my_consume_topic
72
+ group_id: my_group_id
73
+ max_bytes_per_partition: 524288 # 512 KB
@@ -0,0 +1,73 @@
1
+ logger:
2
+ # Optional log file, set to false or remove to disable it
3
+ file: log/phobos.log
4
+ # Optional output format for stdout, default is false (human readable).
5
+ # Set to true to enable json output.
6
+ stdout_json: false
7
+ level: debug
8
+ # Comment the block to disable ruby-kafka logs
9
+ ruby_kafka:
10
+ level: debug
11
+
12
+ kafka:
13
+ # identifier for this application
14
+ client_id: phobos
15
+ # timeout setting for connecting to brokers
16
+ connect_timeout: 15
17
+ # timeout setting for socket connections
18
+ socket_timeout: 15
19
+
20
+ producer:
21
+ # number of seconds a broker can wait for replicas to acknowledge
22
+ # a write before responding with a timeout
23
+ ack_timeout: 5
24
+ # number of replicas that must acknowledge a write, or `:all`
25
+ # if all in-sync replicas must acknowledge
26
+ required_acks: :all
27
+ # number of retries that should be attempted before giving up sending
28
+ # messages to the cluster. Does not include the original attempt
29
+ max_retries: 2
30
+ # number of seconds to wait between retries
31
+ retry_backoff: 1
32
+ # number of messages allowed in the buffer before new writes will
33
+ # raise {BufferOverflow} exceptions
34
+ max_buffer_size: 10000
35
+ # maximum size of the buffer in bytes. Attempting to produce messages
36
+ # when the buffer reaches this size will result in {BufferOverflow} being raised
37
+ max_buffer_bytesize: 10000000
38
+ # name of the compression codec to use, or nil if no compression should be performed.
39
+ # Valid codecs: `:snappy` and `:gzip`
40
+ compression_codec:
41
+ # number of messages that needs to be in a message set before it should be compressed.
42
+ # Note that message sets are per-partition rather than per-topic or per-producer
43
+ compression_threshold: 1
44
+ # maximum number of messages allowed in the queue. Only used for async_producer
45
+ max_queue_size: 10000
46
+ # if greater than zero, the number of buffered messages that will automatically
47
+ # trigger a delivery. Only used for async_producer
48
+ delivery_threshold: 0
49
+ # if greater than zero, the number of seconds between automatic message
50
+ # deliveries. Only used for async_producer
51
+ delivery_interval: 0
52
+
53
+ consumer:
54
+ # number of seconds after which, if a client hasn't contacted the Kafka cluster,
55
+ # it will be kicked out of the group
56
+ session_timeout: 300
57
+ # interval between offset commits, in seconds
58
+ offset_commit_interval: 10
59
+ # number of messages that can be processed before their offsets are committed.
60
+ # If zero, offset commits are not triggered by message processing
61
+ offset_commit_threshold: 0
62
+ # interval between heartbeats; must be less than the session window
63
+ heartbeat_interval: 10
64
+
65
+ backoff:
66
+ min_ms: 1000
67
+ max_ms: 60000
68
+
69
+ listeners:
70
+ - handler: ConsumerTest::MyConsumer
71
+ topic: my_consume_topic
72
+ group_id: my_group_id
73
+ max_bytes_per_partition: 524288 # 512 KB
@@ -0,0 +1,397 @@
1
+ # frozen_string_literal: true
2
+
3
+ # :nodoc:
4
+ module ProducerTest
5
+ describe Deimos::Producer do
6
+
7
+ prepend_before(:each) do
8
+ producer_class = Class.new(Deimos::Producer) do
9
+ schema 'MySchema'
10
+ namespace 'com.my-namespace'
11
+ topic 'my-topic'
12
+ key_config field: 'test_id'
13
+ end
14
+ stub_const('MyProducer', producer_class)
15
+
16
+ producer_class = Class.new(Deimos::Producer) do
17
+ schema 'MySchemaWithId'
18
+ namespace 'com.my-namespace'
19
+ topic 'my-topic'
20
+ key_config plain: true
21
+ end
22
+ stub_const('MyProducerWithID', producer_class)
23
+
24
+ producer_class = Class.new(Deimos::Producer) do
25
+ schema 'MySchema'
26
+ namespace 'com.my-namespace'
27
+ topic 'my-topic'
28
+ key_config plain: true
29
+ # :nodoc:
30
+ def self.partition_key(payload)
31
+ payload[:payload_key] ? payload[:payload_key] + '1' : nil
32
+ end
33
+ end
34
+ stub_const('MyNonEncodedProducer', producer_class)
35
+
36
+ producer_class = Class.new(Deimos::Producer) do
37
+ schema 'MySchema'
38
+ namespace 'com.my-namespace'
39
+ topic 'my-topic2'
40
+ key_config none: true
41
+ end
42
+ stub_const('MyNoKeyProducer', producer_class)
43
+
44
+ producer_class = Class.new(Deimos::Producer) do
45
+ schema 'MySchema'
46
+ namespace 'com.my-namespace'
47
+ topic 'my-topic2'
48
+ key_config schema: 'MySchema-key'
49
+ end
50
+ stub_const('MySchemaProducer', producer_class)
51
+
52
+ producer_class = Class.new(Deimos::Producer) do
53
+ schema 'MySchema'
54
+ namespace 'com.my-namespace'
55
+ topic 'my-topic'
56
+ end
57
+ stub_const('MyErrorProducer', producer_class)
58
+
59
+ end
60
+
61
+ it 'should fail on invalid message with error handler' do
62
+ subscriber = Deimos.subscribe('produce') do |event|
63
+ expect(event.payload[:payloads]).to eq([{ 'invalid' => 'key' }])
64
+ end
65
+ expect { MyProducer.publish('invalid' => 'key', :payload_key => 'key') }.
66
+ to raise_error(Avro::SchemaValidator::ValidationError)
67
+ Deimos.unsubscribe(subscriber)
68
+ end
69
+
70
+ it 'should produce a message' do
71
+ expect(described_class).to receive(:produce_batch).once.with(
72
+ Deimos::Backends::KafkaAsync,
73
+ [
74
+ Deimos::Message.new({ 'test_id' => 'foo', 'some_int' => 123 },
75
+ MyProducer,
76
+ topic: 'my-topic',
77
+ partition_key: 'foo',
78
+ key: 'foo'),
79
+ Deimos::Message.new({ 'test_id' => 'bar', 'some_int' => 124 },
80
+ MyProducer,
81
+ topic: 'my-topic',
82
+ partition_key: 'bar',
83
+ key: 'bar')
84
+ ]
85
+ )
86
+
87
+ MyProducer.publish_list(
88
+ [{ 'test_id' => 'foo', 'some_int' => 123 },
89
+ { 'test_id' => 'bar', 'some_int' => 124 }]
90
+ )
91
+ expect(
92
+ was_message_sent?({ 'test_id' => 'foo', 'some_int' => 123 }, 'my-topic')
93
+ ).to be(true)
94
+ expect(
95
+ was_message_sent?({ 'test_id' => 'foo', 'some_int' => 123 }, 'your-topic')
96
+ ).to be(false)
97
+ expect(
98
+ was_message_sent?({ 'test_id' => 'foo2', 'some_int' => 123 }, 'my-topic')
99
+ ).to be(false)
100
+ end
101
+
102
+ it 'should add a message ID' do
103
+ payload = { 'test_id' => 'foo',
104
+ 'some_int' => 123,
105
+ 'message_id' => a_kind_of(String),
106
+ 'timestamp' => a_kind_of(String) }
107
+ expect(described_class).to receive(:produce_batch).once do |_, messages|
108
+ expect(messages.size).to eq(1)
109
+ expect(messages[0].to_h).
110
+ to match(
111
+ payload: payload,
112
+ topic: 'my-topic',
113
+ partition_key: 'key',
114
+ metadata: {
115
+ producer_name: 'MyProducerWithID',
116
+ decoded_payload: payload
117
+ },
118
+ key: 'key'
119
+ )
120
+ end
121
+ MyProducerWithID.publish_list(
122
+ [{ 'test_id' => 'foo', 'some_int' => 123, :payload_key => 'key' }]
123
+ )
124
+ end
125
+
126
+ it 'should not publish if publish disabled' do
127
+ expect(described_class).not_to receive(:produce_batch)
128
+ Deimos.configure { |c| c.disable_producers = true }
129
+ MyProducer.publish_list(
130
+ [{ 'test_id' => 'foo', 'some_int' => 123 },
131
+ { 'test_id' => 'bar', 'some_int' => 124 }]
132
+ )
133
+ expect(MyProducer.topic).not_to have_sent(anything)
134
+ end
135
+
136
+ it 'should not send messages if inside a disable_producers block' do
137
+ Deimos.disable_producers do
138
+ MyProducer.publish_list(
139
+ [{ 'test_id' => 'foo', 'some_int' => 123 },
140
+ { 'test_id' => 'bar', 'some_int' => 124 }]
141
+ )
142
+ end
143
+ expect(MyProducer.topic).not_to have_sent(anything)
144
+ MyProducer.publish_list(
145
+ [{ 'test_id' => 'foo', 'some_int' => 123 },
146
+ { 'test_id' => 'bar', 'some_int' => 124 }]
147
+ )
148
+ expect(MyProducer.topic).to have_sent(anything)
149
+ end
150
+
151
+ it 'should produce to a prefixed topic' do
152
+ Deimos.configure { |c| c.producer_topic_prefix = 'prefix.' }
153
+ payload = { 'test_id' => 'foo', 'some_int' => 123 }
154
+ expect(described_class).to receive(:produce_batch).once do |_, messages|
155
+ expect(messages.size).to eq(1)
156
+ expect(messages[0].to_h).
157
+ to eq(
158
+ payload: payload,
159
+ topic: 'prefix.my-topic',
160
+ partition_key: 'foo',
161
+ metadata: {
162
+ producer_name: 'MyProducer',
163
+ decoded_payload: payload
164
+ },
165
+ key: 'foo'
166
+ )
167
+ end
168
+
169
+ MyProducer.publish_list([payload])
170
+ Deimos.configure { |c| c.producer_topic_prefix = nil }
171
+ expect(described_class).to receive(:produce_batch).once do |_, messages|
172
+ expect(messages.size).to eq(1)
173
+ expect(messages[0].to_h).
174
+ to eq(
175
+ payload: payload,
176
+ topic: 'my-topic',
177
+ partition_key: 'foo',
178
+ metadata: {
179
+ producer_name: 'MyProducer',
180
+ decoded_payload: payload
181
+ },
182
+ key: 'foo'
183
+ )
184
+ end
185
+
186
+ MyProducer.publish_list(
187
+ [{ 'test_id' => 'foo', 'some_int' => 123 }]
188
+ )
189
+ end
190
+
191
+ it 'should encode the key' do
192
+ encoder = instance_double(Deimos::AvroDataEncoder)
193
+ allow(Deimos::Message).to receive(:new).and_wrap_original do |m, hash, producer|
194
+ message = m.call(hash, producer)
195
+ allow(message).to receive(:add_fields)
196
+ allow(message).to receive(:coerce_fields)
197
+ message
198
+ end
199
+ allow(MyProducer).to receive(:encoder).and_return(encoder).at_least(:once)
200
+ allow(encoder).to receive(:avro_schema)
201
+ expect(encoder).to receive(:encode_key).with('test_id', 'foo', 'my-topic-key')
202
+ expect(encoder).to receive(:encode_key).with('test_id', 'bar', 'my-topic-key')
203
+ expect(encoder).to receive(:encode).with({
204
+ 'test_id' => 'foo',
205
+ 'some_int' => 123
206
+ }, { topic: 'my-topic-value' })
207
+ expect(encoder).to receive(:encode).with({
208
+ 'test_id' => 'bar',
209
+ 'some_int' => 124
210
+ }, { topic: 'my-topic-value' })
211
+
212
+ MyProducer.publish_list(
213
+ [{ 'test_id' => 'foo', 'some_int' => 123 },
214
+ { 'test_id' => 'bar', 'some_int' => 124 }]
215
+ )
216
+ end
217
+
218
+ it 'should not encode with plaintext key' do
219
+ key_encoder = Deimos::AvroDataEncoder.new(
220
+ schema: 'MySchema',
221
+ namespace: 'com.my-namespace'
222
+ )
223
+ allow(key_encoder).to receive(:encode)
224
+ allow(MyNonEncodedProducer).to receive(:encoder).and_return(key_encoder)
225
+ expect(key_encoder).not_to receive(:encode_key)
226
+
227
+ MyNonEncodedProducer.publish_list(
228
+ [{ 'test_id' => 'foo', 'some_int' => 123, :payload_key => 'foo_key' },
229
+ { 'test_id' => 'bar', 'some_int' => 124, :payload_key => 'bar_key' }]
230
+ )
231
+ end
232
+
233
+ it 'should encode with a schema' do
234
+
235
+ encoder = instance_double(Deimos::AvroDataEncoder)
236
+ expect(MySchemaProducer).to receive(:key_encoder).and_return(encoder).
237
+ at_least(:once)
238
+ expect(encoder).to receive(:encode).with({ 'test_id' => 'foo_key' },
239
+ { topic: 'my-topic2-key' })
240
+ expect(encoder).to receive(:encode).with({ 'test_id' => 'bar_key' },
241
+ { topic: 'my-topic2-key' })
242
+
243
+ MySchemaProducer.publish_list(
244
+ [{ 'test_id' => 'foo', 'some_int' => 123,
245
+ :payload_key => { 'test_id' => 'foo_key' } },
246
+ { 'test_id' => 'bar', 'some_int' => 124,
247
+ :payload_key => { 'test_id' => 'bar_key' } }]
248
+ )
249
+ end
250
+
251
+ it 'should error with nothing set' do
252
+ expect {
253
+ MyErrorProducer.publish_list(
254
+ [{ 'test_id' => 'foo', 'some_int' => 123, :payload_key => '123' }]
255
+ )
256
+ }.to raise_error('No key config given - if you are not encoding keys, please use `key_config plain: true`')
257
+ end
258
+
259
+ it 'should error if no key given and none is not the config' do
260
+ expect {
261
+ MyNonEncodedProducer.publish_list(
262
+ [{ 'test_id' => 'foo', 'some_int' => 123 }]
263
+ )
264
+ } .to raise_error('No key given but a key is required! Use `key_config none: true` to avoid using keys.')
265
+ end
266
+
267
+ it 'should allow nil keys if none: true is configured' do
268
+ expect {
269
+ MyNoKeyProducer.publish_list(
270
+ [{ 'test_id' => 'foo', 'some_int' => 123 }]
271
+ )
272
+ } .not_to raise_error
273
+ end
274
+
275
+ it 'should use a partition key' do
276
+ MyNonEncodedProducer.publish_list([{
277
+ 'test_id' => 'foo',
278
+ 'some_int' => 123,
279
+ :payload_key => '123'
280
+ },
281
+ {
282
+ 'test_id' => 'bar',
283
+ 'some_int' => 456,
284
+ :payload_key => '456'
285
+ }])
286
+ expect(MyNonEncodedProducer.topic).to have_sent({
287
+ 'test_id' => 'foo',
288
+ 'some_int' => 123
289
+ }, '123', '1231')
290
+ expect(MyNonEncodedProducer.topic).to have_sent({
291
+ 'test_id' => 'bar',
292
+ 'some_int' => 456
293
+ }, '456', '4561')
294
+ end
295
+
296
+ describe 'disabling' do
297
+ it 'should disable globally' do
298
+ Deimos.disable_producers do
299
+ Deimos.disable_producers do # test nested
300
+ MyProducer.publish(
301
+ 'test_id' => 'foo',
302
+ 'some_int' => 123,
303
+ :payload_key => '123'
304
+ )
305
+ MyProducerWithID.publish(
306
+ 'test_id' => 'foo', 'some_int' => 123
307
+ )
308
+ expect('my-topic').not_to have_sent(anything)
309
+ expect(Deimos).to be_producers_disabled
310
+ expect(Deimos).to be_producers_disabled([MyProducer])
311
+ end
312
+ end
313
+
314
+ MyProducerWithID.publish(
315
+ 'test_id' => 'foo', 'some_int' => 123, :payload_key => 123
316
+ )
317
+ expect('my-topic').
318
+ to have_sent('test_id' => 'foo', 'some_int' => 123,
319
+ 'message_id' => anything, 'timestamp' => anything)
320
+ expect(Deimos).not_to be_producers_disabled
321
+ expect(Deimos).not_to be_producers_disabled([MyProducer])
322
+ end
323
+
324
+ it 'should disable a single producer' do
325
+ Deimos.disable_producers(MyProducer) do # test nested
326
+ Deimos.disable_producers(MyProducer) do
327
+ MySchemaProducer.publish(
328
+ 'test_id' => 'foo', 'some_int' => 123,
329
+ :payload_key => { 'test_id' => 'foo_key' }
330
+ )
331
+ MyProducer.publish(
332
+ 'test_id' => 'foo',
333
+ 'some_int' => 123,
334
+ :payload_key => '123'
335
+ )
336
+ expect('my-topic').not_to have_sent(anything)
337
+ expect('my-topic2').to have_sent('test_id' => 'foo', 'some_int' => 123)
338
+ expect(Deimos).not_to be_producers_disabled
339
+ expect(Deimos).to be_producers_disabled(MyProducer)
340
+ expect(Deimos).not_to be_producers_disabled(MySchemaProducer)
341
+ end
342
+ end
343
+ expect(Deimos).not_to be_producers_disabled
344
+ expect(Deimos).not_to be_producers_disabled(MyProducer)
345
+ expect(Deimos).not_to be_producers_disabled(MySchemaProducer)
346
+ MyProducer.publish(
347
+ 'test_id' => 'foo',
348
+ 'some_int' => 123,
349
+ :payload_key => '123'
350
+ )
351
+ expect('my-topic').
352
+ to have_sent('test_id' => 'foo', 'some_int' => 123)
353
+ end
354
+
355
+ end
356
+
357
+ describe '#determine_backend_class' do
358
+ it 'should return kafka_async if sync is false' do
359
+ expect(described_class.determine_backend_class(false, false)).
360
+ to eq(Deimos::Backends::KafkaAsync)
361
+ expect(described_class.determine_backend_class(nil, false)).
362
+ to eq(Deimos::Backends::KafkaAsync)
363
+ end
364
+
365
+ it 'should return kafka if sync is true' do
366
+ expect(described_class.determine_backend_class(true, false)).
367
+ to eq(Deimos::Backends::Kafka)
368
+ end
369
+
370
+ it 'should return db if db is set' do
371
+ allow(Deimos.config).to receive(:publish_backend).and_return(:db)
372
+ expect(described_class.determine_backend_class(true, false)).
373
+ to eq(Deimos::Backends::Db)
374
+ expect(described_class.determine_backend_class(false, false)).
375
+ to eq(Deimos::Backends::Db)
376
+ end
377
+
378
+ it 'should return kafka if force_send is true' do
379
+ allow(Deimos.config).to receive(:publish_backend).and_return(:db)
380
+ expect(described_class.determine_backend_class(true, true)).
381
+ to eq(Deimos::Backends::Kafka)
382
+ expect(described_class.determine_backend_class(false, true)).
383
+ to eq(Deimos::Backends::KafkaAsync)
384
+ end
385
+
386
+ it 'should use the default sync if set' do
387
+ expect(described_class.determine_backend_class(true, true)).
388
+ to eq(Deimos::Backends::Kafka)
389
+ expect(described_class.determine_backend_class(false, true)).
390
+ to eq(Deimos::Backends::KafkaAsync)
391
+ expect(described_class.determine_backend_class(nil, true)).
392
+ to eq(Deimos::Backends::Kafka)
393
+ end
394
+ end
395
+
396
+ end
397
+ end