deimos-temp-fork 0.0.1

Sign up to get free protection for your applications and to get access to all the features.
Files changed (146) hide show
  1. checksums.yaml +7 -0
  2. data/.circleci/config.yml +83 -0
  3. data/.gitignore +41 -0
  4. data/.gitmodules +0 -0
  5. data/.rspec +1 -0
  6. data/.rubocop.yml +333 -0
  7. data/.ruby-gemset +1 -0
  8. data/.ruby-version +1 -0
  9. data/CHANGELOG.md +349 -0
  10. data/CODE_OF_CONDUCT.md +77 -0
  11. data/Dockerfile +23 -0
  12. data/Gemfile +6 -0
  13. data/Gemfile.lock +286 -0
  14. data/Guardfile +22 -0
  15. data/LICENSE.md +195 -0
  16. data/README.md +1099 -0
  17. data/Rakefile +13 -0
  18. data/bin/deimos +4 -0
  19. data/deimos-ruby.gemspec +44 -0
  20. data/docker-compose.yml +71 -0
  21. data/docs/ARCHITECTURE.md +140 -0
  22. data/docs/CONFIGURATION.md +236 -0
  23. data/docs/DATABASE_BACKEND.md +147 -0
  24. data/docs/INTEGRATION_TESTS.md +52 -0
  25. data/docs/PULL_REQUEST_TEMPLATE.md +35 -0
  26. data/docs/UPGRADING.md +128 -0
  27. data/lib/deimos-temp-fork.rb +95 -0
  28. data/lib/deimos/active_record_consume/batch_consumption.rb +164 -0
  29. data/lib/deimos/active_record_consume/batch_slicer.rb +27 -0
  30. data/lib/deimos/active_record_consume/message_consumption.rb +79 -0
  31. data/lib/deimos/active_record_consume/schema_model_converter.rb +52 -0
  32. data/lib/deimos/active_record_consumer.rb +67 -0
  33. data/lib/deimos/active_record_producer.rb +87 -0
  34. data/lib/deimos/backends/base.rb +32 -0
  35. data/lib/deimos/backends/db.rb +41 -0
  36. data/lib/deimos/backends/kafka.rb +33 -0
  37. data/lib/deimos/backends/kafka_async.rb +33 -0
  38. data/lib/deimos/backends/test.rb +20 -0
  39. data/lib/deimos/batch_consumer.rb +7 -0
  40. data/lib/deimos/config/configuration.rb +381 -0
  41. data/lib/deimos/config/phobos_config.rb +137 -0
  42. data/lib/deimos/consume/batch_consumption.rb +150 -0
  43. data/lib/deimos/consume/message_consumption.rb +94 -0
  44. data/lib/deimos/consumer.rb +104 -0
  45. data/lib/deimos/instrumentation.rb +76 -0
  46. data/lib/deimos/kafka_message.rb +60 -0
  47. data/lib/deimos/kafka_source.rb +128 -0
  48. data/lib/deimos/kafka_topic_info.rb +102 -0
  49. data/lib/deimos/message.rb +79 -0
  50. data/lib/deimos/metrics/datadog.rb +47 -0
  51. data/lib/deimos/metrics/mock.rb +39 -0
  52. data/lib/deimos/metrics/provider.rb +36 -0
  53. data/lib/deimos/monkey_patches/phobos_cli.rb +35 -0
  54. data/lib/deimos/monkey_patches/phobos_producer.rb +51 -0
  55. data/lib/deimos/poll_info.rb +9 -0
  56. data/lib/deimos/producer.rb +224 -0
  57. data/lib/deimos/railtie.rb +8 -0
  58. data/lib/deimos/schema_backends/avro_base.rb +140 -0
  59. data/lib/deimos/schema_backends/avro_local.rb +30 -0
  60. data/lib/deimos/schema_backends/avro_schema_coercer.rb +119 -0
  61. data/lib/deimos/schema_backends/avro_schema_registry.rb +34 -0
  62. data/lib/deimos/schema_backends/avro_validation.rb +21 -0
  63. data/lib/deimos/schema_backends/base.rb +150 -0
  64. data/lib/deimos/schema_backends/mock.rb +42 -0
  65. data/lib/deimos/shared_config.rb +63 -0
  66. data/lib/deimos/test_helpers.rb +360 -0
  67. data/lib/deimos/tracing/datadog.rb +35 -0
  68. data/lib/deimos/tracing/mock.rb +40 -0
  69. data/lib/deimos/tracing/provider.rb +29 -0
  70. data/lib/deimos/utils/db_poller.rb +150 -0
  71. data/lib/deimos/utils/db_producer.rb +243 -0
  72. data/lib/deimos/utils/deadlock_retry.rb +68 -0
  73. data/lib/deimos/utils/inline_consumer.rb +150 -0
  74. data/lib/deimos/utils/lag_reporter.rb +175 -0
  75. data/lib/deimos/utils/schema_controller_mixin.rb +115 -0
  76. data/lib/deimos/version.rb +5 -0
  77. data/lib/generators/deimos/active_record/templates/migration.rb.tt +28 -0
  78. data/lib/generators/deimos/active_record/templates/model.rb.tt +5 -0
  79. data/lib/generators/deimos/active_record_generator.rb +79 -0
  80. data/lib/generators/deimos/db_backend/templates/migration +25 -0
  81. data/lib/generators/deimos/db_backend/templates/rails3_migration +31 -0
  82. data/lib/generators/deimos/db_backend_generator.rb +48 -0
  83. data/lib/generators/deimos/db_poller/templates/migration +11 -0
  84. data/lib/generators/deimos/db_poller/templates/rails3_migration +16 -0
  85. data/lib/generators/deimos/db_poller_generator.rb +48 -0
  86. data/lib/tasks/deimos.rake +34 -0
  87. data/spec/active_record_batch_consumer_spec.rb +481 -0
  88. data/spec/active_record_consume/batch_slicer_spec.rb +42 -0
  89. data/spec/active_record_consume/schema_model_converter_spec.rb +105 -0
  90. data/spec/active_record_consumer_spec.rb +154 -0
  91. data/spec/active_record_producer_spec.rb +85 -0
  92. data/spec/backends/base_spec.rb +10 -0
  93. data/spec/backends/db_spec.rb +54 -0
  94. data/spec/backends/kafka_async_spec.rb +11 -0
  95. data/spec/backends/kafka_spec.rb +11 -0
  96. data/spec/batch_consumer_spec.rb +256 -0
  97. data/spec/config/configuration_spec.rb +248 -0
  98. data/spec/consumer_spec.rb +209 -0
  99. data/spec/deimos_spec.rb +169 -0
  100. data/spec/generators/active_record_generator_spec.rb +56 -0
  101. data/spec/handlers/my_batch_consumer.rb +10 -0
  102. data/spec/handlers/my_consumer.rb +10 -0
  103. data/spec/kafka_listener_spec.rb +55 -0
  104. data/spec/kafka_source_spec.rb +381 -0
  105. data/spec/kafka_topic_info_spec.rb +111 -0
  106. data/spec/message_spec.rb +19 -0
  107. data/spec/phobos.bad_db.yml +73 -0
  108. data/spec/phobos.yml +77 -0
  109. data/spec/producer_spec.rb +498 -0
  110. data/spec/rake_spec.rb +19 -0
  111. data/spec/schema_backends/avro_base_shared.rb +199 -0
  112. data/spec/schema_backends/avro_local_spec.rb +32 -0
  113. data/spec/schema_backends/avro_schema_registry_spec.rb +32 -0
  114. data/spec/schema_backends/avro_validation_spec.rb +24 -0
  115. data/spec/schema_backends/base_spec.rb +33 -0
  116. data/spec/schemas/com/my-namespace/Generated.avsc +71 -0
  117. data/spec/schemas/com/my-namespace/MyNestedSchema.avsc +62 -0
  118. data/spec/schemas/com/my-namespace/MySchema-key.avsc +13 -0
  119. data/spec/schemas/com/my-namespace/MySchema.avsc +18 -0
  120. data/spec/schemas/com/my-namespace/MySchemaCompound-key.avsc +18 -0
  121. data/spec/schemas/com/my-namespace/MySchemaWithBooleans.avsc +18 -0
  122. data/spec/schemas/com/my-namespace/MySchemaWithDateTimes.avsc +33 -0
  123. data/spec/schemas/com/my-namespace/MySchemaWithId.avsc +28 -0
  124. data/spec/schemas/com/my-namespace/MySchemaWithUniqueId.avsc +32 -0
  125. data/spec/schemas/com/my-namespace/Wibble.avsc +43 -0
  126. data/spec/schemas/com/my-namespace/Widget.avsc +27 -0
  127. data/spec/schemas/com/my-namespace/WidgetTheSecond.avsc +27 -0
  128. data/spec/schemas/com/my-namespace/request/CreateTopic.avsc +11 -0
  129. data/spec/schemas/com/my-namespace/request/Index.avsc +11 -0
  130. data/spec/schemas/com/my-namespace/request/UpdateRequest.avsc +11 -0
  131. data/spec/schemas/com/my-namespace/response/CreateTopic.avsc +11 -0
  132. data/spec/schemas/com/my-namespace/response/Index.avsc +11 -0
  133. data/spec/schemas/com/my-namespace/response/UpdateResponse.avsc +11 -0
  134. data/spec/spec_helper.rb +267 -0
  135. data/spec/utils/db_poller_spec.rb +320 -0
  136. data/spec/utils/db_producer_spec.rb +514 -0
  137. data/spec/utils/deadlock_retry_spec.rb +74 -0
  138. data/spec/utils/inline_consumer_spec.rb +31 -0
  139. data/spec/utils/lag_reporter_spec.rb +76 -0
  140. data/spec/utils/platform_schema_validation_spec.rb +0 -0
  141. data/spec/utils/schema_controller_mixin_spec.rb +84 -0
  142. data/support/deimos-solo.png +0 -0
  143. data/support/deimos-with-name-next.png +0 -0
  144. data/support/deimos-with-name.png +0 -0
  145. data/support/flipp-logo.png +0 -0
  146. metadata +551 -0
@@ -0,0 +1,19 @@
1
+ # frozen_string_literal: true
2
+
3
+ RSpec.describe(Deimos::Message) do
4
+ it 'should detect tombstones' do
5
+ expect(described_class.new(nil, nil, key: 'key1')).
6
+ to be_tombstone
7
+ expect(described_class.new({ v: 'val1' }, nil, key: 'key1')).
8
+ not_to be_tombstone
9
+ expect(described_class.new({ v: '' }, nil, key: 'key1')).
10
+ not_to be_tombstone
11
+ expect(described_class.new({ v: 'val1' }, nil, key: nil)).
12
+ not_to be_tombstone
13
+ end
14
+
15
+ it 'can support complex keys/values' do
16
+ expect { described_class.new({ a: 1, b: 2 }, nil, key: { c: 3, d: 4 }) }.
17
+ not_to raise_exception
18
+ end
19
+ end
@@ -0,0 +1,73 @@
1
+ logger:
2
+ # Optional log file, set to false or remove to disable it
3
+ file: log/phobos.log
4
+ # Optional output format for stdout, default is false (human readable).
5
+ # Set to true to enable json output.
6
+ stdout_json: false
7
+ level: debug
8
+ # Comment the block to disable ruby-kafka logs
9
+ ruby_kafka:
10
+ level: debug
11
+
12
+ kafka:
13
+ # identifier for this application
14
+ client_id: phobos
15
+ # timeout setting for connecting to brokers
16
+ connect_timeout: 15
17
+ # timeout setting for socket connections
18
+ socket_timeout: 15
19
+
20
+ producer:
21
+ # number of seconds a broker can wait for replicas to acknowledge
22
+ # a write before responding with a timeout
23
+ ack_timeout: 5
24
+ # number of replicas that must acknowledge a write, or `:all`
25
+ # if all in-sync replicas must acknowledge
26
+ required_acks: 1
27
+ # number of retries that should be attempted before giving up sending
28
+ # messages to the cluster. Does not include the original attempt
29
+ max_retries: 2
30
+ # number of seconds to wait between retries
31
+ retry_backoff: 1
32
+ # number of messages allowed in the buffer before new writes will
33
+ # raise {BufferOverflow} exceptions
34
+ max_buffer_size: 10000
35
+ # maximum size of the buffer in bytes. Attempting to produce messages
36
+ # when the buffer reaches this size will result in {BufferOverflow} being raised
37
+ max_buffer_bytesize: 10000000
38
+ # name of the compression codec to use, or nil if no compression should be performed.
39
+ # Valid codecs: `:snappy` and `:gzip`
40
+ compression_codec:
41
+ # number of messages that needs to be in a message set before it should be compressed.
42
+ # Note that message sets are per-partition rather than per-topic or per-producer
43
+ compression_threshold: 1
44
+ # maximum number of messages allowed in the queue. Only used for async_producer
45
+ max_queue_size: 10000
46
+ # if greater than zero, the number of buffered messages that will automatically
47
+ # trigger a delivery. Only used for async_producer
48
+ delivery_threshold: 0
49
+ # if greater than zero, the number of seconds between automatic message
50
+ # deliveries. Only used for async_producer
51
+ delivery_interval: 0
52
+
53
+ consumer:
54
+ # number of seconds after which, if a client hasn't contacted the Kafka cluster,
55
+ # it will be kicked out of the group
56
+ session_timeout: 300
57
+ # interval between offset commits, in seconds
58
+ offset_commit_interval: 10
59
+ # number of messages that can be processed before their offsets are committed.
60
+ # If zero, offset commits are not triggered by message processing
61
+ offset_commit_threshold: 0
62
+ # interval between heartbeats; must be less than the session window
63
+ heartbeat_interval: 10
64
+
65
+ backoff:
66
+ min_ms: 1000
67
+ max_ms: 60000
68
+
69
+ listeners:
70
+ - handler: ConsumerTest::MyConsumer
71
+ topic: my_consume_topic
72
+ group_id: my_group_id
73
+ max_bytes_per_partition: 524288 # 512 KB
data/spec/phobos.yml ADDED
@@ -0,0 +1,77 @@
1
+ logger:
2
+ # Optional log file, set to false or remove to disable it
3
+ file: log/phobos.log
4
+ # Optional output format for stdout, default is false (human readable).
5
+ # Set to true to enable json output.
6
+ stdout_json: false
7
+ level: debug
8
+ # Comment the block to disable ruby-kafka logs
9
+ ruby_kafka:
10
+ level: debug
11
+
12
+ kafka:
13
+ # identifier for this application
14
+ client_id: phobos
15
+ # timeout setting for connecting to brokers
16
+ connect_timeout: 15
17
+ # timeout setting for socket connections
18
+ socket_timeout: 15
19
+
20
+ producer:
21
+ # number of seconds a broker can wait for replicas to acknowledge
22
+ # a write before responding with a timeout
23
+ ack_timeout: 5
24
+ # number of replicas that must acknowledge a write, or `:all`
25
+ # if all in-sync replicas must acknowledge
26
+ required_acks: :all
27
+ # number of retries that should be attempted before giving up sending
28
+ # messages to the cluster. Does not include the original attempt
29
+ max_retries: 2
30
+ # number of seconds to wait between retries
31
+ retry_backoff: 1
32
+ # number of messages allowed in the buffer before new writes will
33
+ # raise {BufferOverflow} exceptions
34
+ max_buffer_size: 10000
35
+ # maximum size of the buffer in bytes. Attempting to produce messages
36
+ # when the buffer reaches this size will result in {BufferOverflow} being raised
37
+ max_buffer_bytesize: 10000000
38
+ # name of the compression codec to use, or nil if no compression should be performed.
39
+ # Valid codecs: `:snappy` and `:gzip`
40
+ compression_codec:
41
+ # number of messages that needs to be in a message set before it should be compressed.
42
+ # Note that message sets are per-partition rather than per-topic or per-producer
43
+ compression_threshold: 1
44
+ # maximum number of messages allowed in the queue. Only used for async_producer
45
+ max_queue_size: 10000
46
+ # if greater than zero, the number of buffered messages that will automatically
47
+ # trigger a delivery. Only used for async_producer
48
+ delivery_threshold: 0
49
+ # if greater than zero, the number of seconds between automatic message
50
+ # deliveries. Only used for async_producer
51
+ delivery_interval: 0
52
+
53
+ consumer:
54
+ # number of seconds after which, if a client hasn't contacted the Kafka cluster,
55
+ # it will be kicked out of the group
56
+ session_timeout: 300
57
+ # interval between offset commits, in seconds
58
+ offset_commit_interval: 10
59
+ # number of messages that can be processed before their offsets are committed.
60
+ # If zero, offset commits are not triggered by message processing
61
+ offset_commit_threshold: 0
62
+ # interval between heartbeats; must be less than the session window
63
+ heartbeat_interval: 10
64
+
65
+ backoff:
66
+ min_ms: 1000
67
+ max_ms: 60000
68
+
69
+ listeners:
70
+ - handler: ConsumerTest::MyConsumer
71
+ topic: my_consume_topic
72
+ group_id: my_group_id
73
+ max_bytes_per_partition: 524288 # 512 KB
74
+ - handler: ConsumerTest::MyBatchConsumer
75
+ topic: my_batch_consume_topic
76
+ group_id: my_batch_group_id
77
+ delivery: inline_batch
@@ -0,0 +1,498 @@
1
+ # frozen_string_literal: true
2
+
3
+ # :nodoc:
4
+ module ProducerTest
5
+ describe Deimos::Producer do
6
+
7
+ prepend_before(:each) do
8
+ producer_class = Class.new(Deimos::Producer) do
9
+ schema 'MySchema'
10
+ namespace 'com.my-namespace'
11
+ topic 'my-topic'
12
+ key_config field: 'test_id'
13
+ end
14
+ stub_const('MyProducer', producer_class)
15
+
16
+ producer_class = Class.new(Deimos::Producer) do
17
+ schema 'MySchemaWithId'
18
+ namespace 'com.my-namespace'
19
+ topic 'my-topic'
20
+ key_config plain: true
21
+ end
22
+ stub_const('MyProducerWithID', producer_class)
23
+
24
+ producer_class = Class.new(Deimos::Producer) do
25
+ schema 'MySchema'
26
+ namespace 'com.my-namespace'
27
+ topic 'my-topic'
28
+ key_config plain: true
29
+ # :nodoc:
30
+ def self.partition_key(payload)
31
+ payload[:payload_key] ? payload[:payload_key] + '1' : nil
32
+ end
33
+ end
34
+ stub_const('MyNonEncodedProducer', producer_class)
35
+
36
+ producer_class = Class.new(Deimos::Producer) do
37
+ schema 'MySchema'
38
+ namespace 'com.my-namespace'
39
+ topic 'my-topic2'
40
+ key_config none: true
41
+ end
42
+ stub_const('MyNoKeyProducer', producer_class)
43
+
44
+ producer_class = Class.new(Deimos::Producer) do
45
+ schema 'MyNestedSchema'
46
+ namespace 'com.my-namespace'
47
+ topic 'my-topic'
48
+ key_config field: 'test_id'
49
+ end
50
+ stub_const('MyNestedSchemaProducer', producer_class)
51
+
52
+ producer_class = Class.new(Deimos::Producer) do
53
+ schema 'MySchema'
54
+ namespace 'com.my-namespace'
55
+ topic 'my-topic2'
56
+ key_config schema: 'MySchema-key'
57
+ end
58
+ stub_const('MySchemaProducer', producer_class)
59
+
60
+ producer_class = Class.new(Deimos::Producer) do
61
+ schema 'MySchema'
62
+ namespace 'com.my-namespace'
63
+ topic 'my-topic'
64
+ end
65
+ stub_const('MyErrorProducer', producer_class)
66
+
67
+ producer_class = Class.new(Deimos::Producer) do
68
+ schema 'MySchema'
69
+ namespace 'com.my-namespace'
70
+ topic nil
71
+ key_config none: true
72
+ end
73
+ stub_const('MyNoTopicProducer', producer_class)
74
+
75
+ end
76
+
77
+ it 'should fail on invalid message with error handler' do
78
+ subscriber = Deimos.subscribe('produce') do |event|
79
+ expect(event.payload[:payloads]).to eq([{ 'invalid' => 'key' }])
80
+ end
81
+ expect(MyProducer.encoder).to receive(:validate).and_raise('OH NOES')
82
+ expect { MyProducer.publish('invalid' => 'key', :payload_key => 'key') }.
83
+ to raise_error('OH NOES')
84
+ Deimos.unsubscribe(subscriber)
85
+ end
86
+
87
+ it 'should produce a message' do
88
+ expect(described_class).to receive(:produce_batch).once.with(
89
+ Deimos::Backends::Test,
90
+ [
91
+ Deimos::Message.new({ 'test_id' => 'foo', 'some_int' => 123 },
92
+ MyProducer,
93
+ topic: 'my-topic',
94
+ partition_key: 'foo',
95
+ key: 'foo'),
96
+ Deimos::Message.new({ 'test_id' => 'bar', 'some_int' => 124 },
97
+ MyProducer,
98
+ topic: 'my-topic',
99
+ partition_key: 'bar',
100
+ key: 'bar')
101
+ ]
102
+ ).and_call_original
103
+
104
+ MyProducer.publish_list(
105
+ [{ 'test_id' => 'foo', 'some_int' => 123 },
106
+ { 'test_id' => 'bar', 'some_int' => 124 }]
107
+ )
108
+ expect('my-topic').to have_sent('test_id' => 'foo', 'some_int' => 123)
109
+ expect('your-topic').not_to have_sent('test_id' => 'foo', 'some_int' => 123)
110
+ expect('my-topic').not_to have_sent('test_id' => 'foo2', 'some_int' => 123)
111
+ end
112
+
113
+ it 'should allow setting the topic from publish_list' do
114
+ expect(described_class).to receive(:produce_batch).once.with(
115
+ Deimos::Backends::Test,
116
+ [
117
+ Deimos::Message.new({ 'test_id' => 'foo', 'some_int' => 123 },
118
+ MyProducer,
119
+ topic: 'a-new-topic',
120
+ partition_key: 'foo',
121
+ key: 'foo'),
122
+ Deimos::Message.new({ 'test_id' => 'bar', 'some_int' => 124 },
123
+ MyProducer,
124
+ topic: 'a-new-topic',
125
+ partition_key: 'bar',
126
+ key: 'bar')
127
+ ]
128
+ ).and_call_original
129
+
130
+ MyProducer.publish_list(
131
+ [{ 'test_id' => 'foo', 'some_int' => 123 },
132
+ { 'test_id' => 'bar', 'some_int' => 124 }],
133
+ topic: 'a-new-topic'
134
+ )
135
+ expect('a-new-topic').to have_sent('test_id' => 'foo', 'some_int' => 123)
136
+ expect('my-topic').not_to have_sent('test_id' => 'foo', 'some_int' => 123)
137
+ expect('my-topic').not_to have_sent('test_id' => 'foo2', 'some_int' => 123)
138
+ end
139
+
140
+ it 'should add a message ID' do
141
+ payload = { 'test_id' => 'foo',
142
+ 'some_int' => 123,
143
+ 'message_id' => a_kind_of(String),
144
+ 'timestamp' => a_kind_of(String) }
145
+ expect(described_class).to receive(:produce_batch).once do |_, messages|
146
+ expect(messages.size).to eq(1)
147
+ expect(messages[0].to_h).
148
+ to match(
149
+ payload: payload,
150
+ topic: 'my-topic',
151
+ partition_key: 'key',
152
+ metadata: {
153
+ producer_name: 'MyProducerWithID',
154
+ decoded_payload: payload
155
+ },
156
+ key: 'key'
157
+ )
158
+ end
159
+ MyProducerWithID.publish_list(
160
+ [{ 'test_id' => 'foo', 'some_int' => 123, :payload_key => 'key' }]
161
+ )
162
+ end
163
+
164
+ it 'should not publish if publish disabled' do
165
+ expect(described_class).not_to receive(:produce_batch)
166
+ Deimos.configure { |c| c.producers.disabled = true }
167
+ MyProducer.publish_list(
168
+ [{ 'test_id' => 'foo', 'some_int' => 123 },
169
+ { 'test_id' => 'bar', 'some_int' => 124 }]
170
+ )
171
+ expect(MyProducer.topic).not_to have_sent(anything)
172
+ end
173
+
174
+ it 'should not send messages if inside a disable_producers block' do
175
+ Deimos.disable_producers do
176
+ MyProducer.publish_list(
177
+ [{ 'test_id' => 'foo', 'some_int' => 123 },
178
+ { 'test_id' => 'bar', 'some_int' => 124 }]
179
+ )
180
+ end
181
+ expect(MyProducer.topic).not_to have_sent(anything)
182
+ MyProducer.publish_list(
183
+ [{ 'test_id' => 'foo', 'some_int' => 123 },
184
+ { 'test_id' => 'bar', 'some_int' => 124 }]
185
+ )
186
+ expect(MyProducer.topic).to have_sent(anything)
187
+ end
188
+
189
+ it 'should send messages after a crash' do
190
+ expect {
191
+ Deimos.disable_producers do
192
+ raise 'OH NOES'
193
+ end
194
+ }.to raise_error('OH NOES')
195
+ expect(Deimos).not_to be_producers_disabled
196
+ end
197
+
198
+ it 'should produce to a prefixed topic' do
199
+ Deimos.configure { |c| c.producers.topic_prefix = 'prefix.' }
200
+ payload = { 'test_id' => 'foo', 'some_int' => 123 }
201
+ expect(described_class).to receive(:produce_batch).once do |_, messages|
202
+ expect(messages.size).to eq(1)
203
+ expect(messages[0].to_h).
204
+ to eq(
205
+ payload: payload,
206
+ topic: 'prefix.my-topic',
207
+ partition_key: 'foo',
208
+ metadata: {
209
+ producer_name: 'MyProducer',
210
+ decoded_payload: payload
211
+ },
212
+ key: 'foo'
213
+ )
214
+ end
215
+
216
+ MyProducer.publish_list([payload])
217
+ Deimos.configure { |c| c.producers.topic_prefix = nil }
218
+ expect(described_class).to receive(:produce_batch).once do |_, messages|
219
+ expect(messages.size).to eq(1)
220
+ expect(messages[0].to_h).
221
+ to eq(
222
+ payload: payload,
223
+ topic: 'my-topic',
224
+ partition_key: 'foo',
225
+ metadata: {
226
+ producer_name: 'MyProducer',
227
+ decoded_payload: payload
228
+ },
229
+ key: 'foo'
230
+ )
231
+ end
232
+
233
+ MyProducer.publish_list(
234
+ [{ 'test_id' => 'foo', 'some_int' => 123 }]
235
+ )
236
+ end
237
+
238
+ it 'should encode the key' do
239
+ Deimos.configure { |c| c.producers.topic_prefix = nil }
240
+ expect(MyProducer.encoder).to receive(:encode_key).with('test_id', 'foo', topic: 'my-topic-key')
241
+ expect(MyProducer.encoder).to receive(:encode_key).with('test_id', 'bar', topic: 'my-topic-key')
242
+ expect(MyProducer.encoder).to receive(:encode).with({
243
+ 'test_id' => 'foo',
244
+ 'some_int' => 123
245
+ }, { topic: 'my-topic-value' })
246
+ expect(MyProducer.encoder).to receive(:encode).with({
247
+ 'test_id' => 'bar',
248
+ 'some_int' => 124
249
+ }, { topic: 'my-topic-value' })
250
+
251
+ MyProducer.publish_list(
252
+ [{ 'test_id' => 'foo', 'some_int' => 123 },
253
+ { 'test_id' => 'bar', 'some_int' => 124 }]
254
+ )
255
+ end
256
+
257
+ it 'should encode the key with topic prefix' do
258
+ Deimos.configure { |c| c.producers.topic_prefix = 'prefix.' }
259
+ expect(MyProducer.encoder).to receive(:encode_key).with('test_id', 'foo', topic: 'prefix.my-topic-key')
260
+ expect(MyProducer.encoder).to receive(:encode_key).with('test_id', 'bar', topic: 'prefix.my-topic-key')
261
+ expect(MyProducer.encoder).to receive(:encode).with({ 'test_id' => 'foo',
262
+ 'some_int' => 123 },
263
+ { topic: 'prefix.my-topic-value' })
264
+ expect(MyProducer.encoder).to receive(:encode).with({ 'test_id' => 'bar',
265
+ 'some_int' => 124 },
266
+ { topic: 'prefix.my-topic-value' })
267
+
268
+ MyProducer.publish_list([{ 'test_id' => 'foo', 'some_int' => 123 },
269
+ { 'test_id' => 'bar', 'some_int' => 124 }])
270
+ end
271
+
272
+ it 'should not encode with plaintext key' do
273
+ expect(MyNonEncodedProducer.key_encoder).not_to receive(:encode_key)
274
+
275
+ MyNonEncodedProducer.publish_list(
276
+ [{ 'test_id' => 'foo', 'some_int' => 123, :payload_key => 'foo_key' },
277
+ { 'test_id' => 'bar', 'some_int' => 124, :payload_key => 'bar_key' }]
278
+ )
279
+ end
280
+
281
+ it 'should encode with a schema' do
282
+ expect(MySchemaProducer.key_encoder).to receive(:encode).with({ 'test_id' => 'foo_key' },
283
+ { topic: 'my-topic2-key' })
284
+ expect(MySchemaProducer.key_encoder).to receive(:encode).with({ 'test_id' => 'bar_key' },
285
+ { topic: 'my-topic2-key' })
286
+
287
+ MySchemaProducer.publish_list(
288
+ [{ 'test_id' => 'foo', 'some_int' => 123,
289
+ :payload_key => { 'test_id' => 'foo_key' } },
290
+ { 'test_id' => 'bar', 'some_int' => 124,
291
+ :payload_key => { 'test_id' => 'bar_key' } }]
292
+ )
293
+ end
294
+
295
+ it 'should properly encode and coerce values with a nested record' do
296
+ expect(MyNestedSchemaProducer.encoder).to receive(:encode_key).with('test_id', 'foo', topic: 'my-topic-key')
297
+ MyNestedSchemaProducer.publish(
298
+ 'test_id' => 'foo',
299
+ 'test_float' => BigDecimal('123.456'),
300
+ 'test_array' => ['1'],
301
+ 'some_nested_record' => {
302
+ 'some_int' => 123,
303
+ 'some_float' => BigDecimal('456.789'),
304
+ 'some_string' => '123',
305
+ 'some_optional_int' => nil
306
+ },
307
+ 'some_optional_record' => nil
308
+ )
309
+ expect(MyNestedSchemaProducer.topic).to have_sent(
310
+ 'test_id' => 'foo',
311
+ 'test_float' => 123.456,
312
+ 'test_array' => ['1'],
313
+ 'some_nested_record' => {
314
+ 'some_int' => 123,
315
+ 'some_float' => 456.789,
316
+ 'some_string' => '123',
317
+ 'some_optional_int' => nil
318
+ },
319
+ 'some_optional_record' => nil
320
+ )
321
+ end
322
+
323
+ it 'should raise error if blank topic is passed in explicitly' do
324
+ expect {
325
+ MyProducer.publish_list(
326
+ [{ 'test_id' => 'foo',
327
+ 'some_int' => 123 },
328
+ { 'test_id' => 'bar',
329
+ 'some_int' => 124 }],
330
+ topic: ''
331
+ )
332
+ }.to raise_error(RuntimeError,
333
+ 'Topic not specified. Please specify the topic.')
334
+ end
335
+
336
+ it 'should raise error if the producer has not been initialized with a topic' do
337
+ expect {
338
+ MyNoTopicProducer.publish_list(
339
+ [{ 'test_id' => 'foo',
340
+ 'some_int' => 123 },
341
+ { 'test_id' => 'bar',
342
+ 'some_int' => 124 }]
343
+ )
344
+ }.to raise_error(RuntimeError,
345
+ 'Topic not specified. Please specify the topic.')
346
+ end
347
+
348
+ it 'should error with nothing set' do
349
+ expect {
350
+ MyErrorProducer.publish_list(
351
+ [{ 'test_id' => 'foo', 'some_int' => 123, :payload_key => '123' }]
352
+ )
353
+ }.to raise_error('No key config given - if you are not encoding keys, please use `key_config plain: true`')
354
+ end
355
+
356
+ it 'should error if no key given and none is not the config' do
357
+ expect {
358
+ MyNonEncodedProducer.publish_list(
359
+ [{ 'test_id' => 'foo', 'some_int' => 123 }]
360
+ )
361
+ }.to raise_error('No key given but a key is required! Use `key_config none: true` to avoid using keys.')
362
+ end
363
+
364
+ it 'should allow nil keys if none: true is configured' do
365
+ expect {
366
+ MyNoKeyProducer.publish_list(
367
+ [{ 'test_id' => 'foo', 'some_int' => 123 }]
368
+ )
369
+ }.not_to raise_error
370
+ end
371
+
372
+ it 'should use a partition key' do
373
+ MyNonEncodedProducer.publish_list([{
374
+ 'test_id' => 'foo',
375
+ 'some_int' => 123,
376
+ :payload_key => '123'
377
+ },
378
+ {
379
+ 'test_id' => 'bar',
380
+ 'some_int' => 456,
381
+ :payload_key => '456'
382
+ }])
383
+ expect(MyNonEncodedProducer.topic).to have_sent({
384
+ 'test_id' => 'foo',
385
+ 'some_int' => 123
386
+ }, '123', '1231')
387
+ expect(MyNonEncodedProducer.topic).to have_sent({
388
+ 'test_id' => 'bar',
389
+ 'some_int' => 456
390
+ }, '456', '4561')
391
+ end
392
+
393
+ describe 'disabling' do
394
+ it 'should disable globally' do
395
+ Deimos.disable_producers do
396
+ Deimos.disable_producers do # test nested
397
+ MyProducer.publish(
398
+ 'test_id' => 'foo',
399
+ 'some_int' => 123,
400
+ :payload_key => '123'
401
+ )
402
+ MyProducerWithID.publish(
403
+ 'test_id' => 'foo', 'some_int' => 123
404
+ )
405
+ expect('my-topic').not_to have_sent(anything)
406
+ expect(Deimos).to be_producers_disabled
407
+ expect(Deimos).to be_producers_disabled([MyProducer])
408
+ end
409
+ end
410
+
411
+ MyProducerWithID.publish(
412
+ 'test_id' => 'foo', 'some_int' => 123, :payload_key => 123
413
+ )
414
+ expect('my-topic').
415
+ to have_sent('test_id' => 'foo', 'some_int' => 123,
416
+ 'message_id' => anything, 'timestamp' => anything)
417
+ expect(Deimos).not_to be_producers_disabled
418
+ expect(Deimos).not_to be_producers_disabled([MyProducer])
419
+ end
420
+
421
+ it 'should disable a single producer' do
422
+ Deimos.disable_producers(MyProducer) do # test nested
423
+ Deimos.disable_producers(MyProducer) do
424
+ MySchemaProducer.publish(
425
+ 'test_id' => 'foo', 'some_int' => 123,
426
+ :payload_key => { 'test_id' => 'foo_key' }
427
+ )
428
+ MyProducer.publish(
429
+ 'test_id' => 'foo',
430
+ 'some_int' => 123,
431
+ :payload_key => '123'
432
+ )
433
+ expect('my-topic').not_to have_sent(anything)
434
+ expect('my-topic2').to have_sent('test_id' => 'foo', 'some_int' => 123)
435
+ expect(Deimos).not_to be_producers_disabled
436
+ expect(Deimos).to be_producers_disabled(MyProducer)
437
+ expect(Deimos).not_to be_producers_disabled(MySchemaProducer)
438
+ end
439
+ end
440
+ expect(Deimos).not_to be_producers_disabled
441
+ expect(Deimos).not_to be_producers_disabled(MyProducer)
442
+ expect(Deimos).not_to be_producers_disabled(MySchemaProducer)
443
+ MyProducer.publish(
444
+ 'test_id' => 'foo',
445
+ 'some_int' => 123,
446
+ :payload_key => '123'
447
+ )
448
+ expect('my-topic').
449
+ to have_sent('test_id' => 'foo', 'some_int' => 123)
450
+ end
451
+
452
+ end
453
+
454
+ describe '#determine_backend_class' do
455
+ before(:each) do
456
+ Deimos.configure { |c| c.producers.backend = :kafka_async }
457
+ end
458
+
459
+ it 'should return kafka_async if sync is false' do
460
+ expect(described_class.determine_backend_class(false, false)).
461
+ to eq(Deimos::Backends::KafkaAsync)
462
+ expect(described_class.determine_backend_class(nil, false)).
463
+ to eq(Deimos::Backends::KafkaAsync)
464
+ end
465
+
466
+ it 'should return kafka if sync is true' do
467
+ expect(described_class.determine_backend_class(true, false)).
468
+ to eq(Deimos::Backends::Kafka)
469
+ end
470
+
471
+ it 'should return db if db is set' do
472
+ Deimos.configure { producers.backend = :db }
473
+ expect(described_class.determine_backend_class(true, false)).
474
+ to eq(Deimos::Backends::Db)
475
+ expect(described_class.determine_backend_class(false, false)).
476
+ to eq(Deimos::Backends::Db)
477
+ end
478
+
479
+ it 'should return kafka if force_send is true' do
480
+ Deimos.configure { producers.backend = :db }
481
+ expect(described_class.determine_backend_class(true, true)).
482
+ to eq(Deimos::Backends::Kafka)
483
+ expect(described_class.determine_backend_class(false, true)).
484
+ to eq(Deimos::Backends::KafkaAsync)
485
+ end
486
+
487
+ it 'should use the default sync if set' do
488
+ expect(described_class.determine_backend_class(true, true)).
489
+ to eq(Deimos::Backends::Kafka)
490
+ expect(described_class.determine_backend_class(false, true)).
491
+ to eq(Deimos::Backends::KafkaAsync)
492
+ expect(described_class.determine_backend_class(nil, true)).
493
+ to eq(Deimos::Backends::Kafka)
494
+ end
495
+ end
496
+
497
+ end
498
+ end