deimos-ruby 1.24.2 → 2.0.0.pre.alpha1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (120) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop_todo.yml +0 -17
  3. data/.tool-versions +1 -0
  4. data/CHANGELOG.md +5 -0
  5. data/README.md +287 -498
  6. data/deimos-ruby.gemspec +4 -4
  7. data/docs/CONFIGURATION.md +133 -226
  8. data/docs/UPGRADING.md +237 -0
  9. data/lib/deimos/active_record_consume/batch_consumption.rb +29 -28
  10. data/lib/deimos/active_record_consume/mass_updater.rb +59 -4
  11. data/lib/deimos/active_record_consume/message_consumption.rb +15 -21
  12. data/lib/deimos/active_record_consumer.rb +36 -21
  13. data/lib/deimos/active_record_producer.rb +28 -9
  14. data/lib/deimos/backends/base.rb +4 -35
  15. data/lib/deimos/backends/kafka.rb +6 -22
  16. data/lib/deimos/backends/kafka_async.rb +6 -22
  17. data/lib/deimos/backends/{db.rb → outbox.rb} +13 -9
  18. data/lib/deimos/config/configuration.rb +116 -379
  19. data/lib/deimos/consume/batch_consumption.rb +24 -124
  20. data/lib/deimos/consume/message_consumption.rb +36 -63
  21. data/lib/deimos/consumer.rb +16 -75
  22. data/lib/deimos/ext/consumer_route.rb +35 -0
  23. data/lib/deimos/ext/producer_middleware.rb +94 -0
  24. data/lib/deimos/ext/producer_route.rb +22 -0
  25. data/lib/deimos/ext/redraw.rb +29 -0
  26. data/lib/deimos/ext/routing_defaults.rb +72 -0
  27. data/lib/deimos/ext/schema_route.rb +70 -0
  28. data/lib/deimos/kafka_message.rb +2 -2
  29. data/lib/deimos/kafka_source.rb +2 -7
  30. data/lib/deimos/kafka_topic_info.rb +1 -1
  31. data/lib/deimos/logging.rb +71 -0
  32. data/lib/deimos/message.rb +2 -11
  33. data/lib/deimos/metrics/datadog.rb +40 -1
  34. data/lib/deimos/metrics/provider.rb +4 -4
  35. data/lib/deimos/producer.rb +39 -116
  36. data/lib/deimos/railtie.rb +6 -0
  37. data/lib/deimos/schema_backends/avro_base.rb +21 -21
  38. data/lib/deimos/schema_backends/avro_schema_registry.rb +1 -2
  39. data/lib/deimos/schema_backends/avro_validation.rb +2 -2
  40. data/lib/deimos/schema_backends/base.rb +19 -12
  41. data/lib/deimos/schema_backends/mock.rb +6 -1
  42. data/lib/deimos/schema_backends/plain.rb +47 -0
  43. data/lib/deimos/schema_class/base.rb +2 -2
  44. data/lib/deimos/schema_class/enum.rb +1 -1
  45. data/lib/deimos/schema_class/record.rb +2 -2
  46. data/lib/deimos/test_helpers.rb +95 -320
  47. data/lib/deimos/tracing/provider.rb +6 -6
  48. data/lib/deimos/transcoder.rb +88 -0
  49. data/lib/deimos/utils/db_poller/base.rb +16 -14
  50. data/lib/deimos/utils/db_poller/state_based.rb +3 -3
  51. data/lib/deimos/utils/db_poller/time_based.rb +4 -4
  52. data/lib/deimos/utils/db_poller.rb +1 -1
  53. data/lib/deimos/utils/deadlock_retry.rb +1 -1
  54. data/lib/deimos/utils/{db_producer.rb → outbox_producer.rb} +16 -47
  55. data/lib/deimos/utils/schema_class.rb +0 -7
  56. data/lib/deimos/version.rb +1 -1
  57. data/lib/deimos.rb +79 -26
  58. data/lib/generators/deimos/{db_backend_generator.rb → outbox_backend_generator.rb} +4 -4
  59. data/lib/generators/deimos/schema_class_generator.rb +0 -1
  60. data/lib/generators/deimos/v2/templates/karafka.rb.tt +149 -0
  61. data/lib/generators/deimos/v2_generator.rb +193 -0
  62. data/lib/tasks/deimos.rake +5 -7
  63. data/spec/active_record_batch_consumer_association_spec.rb +22 -13
  64. data/spec/active_record_batch_consumer_spec.rb +84 -65
  65. data/spec/active_record_consume/batch_consumption_spec.rb +10 -10
  66. data/spec/active_record_consume/batch_slicer_spec.rb +12 -12
  67. data/spec/active_record_consume/mass_updater_spec.rb +137 -0
  68. data/spec/active_record_consumer_spec.rb +29 -13
  69. data/spec/active_record_producer_spec.rb +36 -26
  70. data/spec/backends/base_spec.rb +0 -23
  71. data/spec/backends/kafka_async_spec.rb +1 -3
  72. data/spec/backends/kafka_spec.rb +1 -3
  73. data/spec/backends/{db_spec.rb → outbox_spec.rb} +14 -20
  74. data/spec/batch_consumer_spec.rb +66 -116
  75. data/spec/consumer_spec.rb +53 -147
  76. data/spec/deimos_spec.rb +10 -126
  77. data/spec/kafka_source_spec.rb +19 -52
  78. data/spec/karafka/karafka.rb +69 -0
  79. data/spec/karafka_config/karafka_spec.rb +97 -0
  80. data/spec/logging_spec.rb +25 -0
  81. data/spec/message_spec.rb +9 -9
  82. data/spec/producer_spec.rb +112 -254
  83. data/spec/rake_spec.rb +1 -3
  84. data/spec/schema_backends/avro_validation_spec.rb +1 -1
  85. data/spec/schemas/com/my-namespace/MySchemaWithTitle.avsc +22 -0
  86. data/spec/snapshots/consumers-no-nest.snap +49 -0
  87. data/spec/snapshots/consumers.snap +49 -0
  88. data/spec/snapshots/consumers_and_producers-no-nest.snap +49 -0
  89. data/spec/snapshots/consumers_and_producers.snap +49 -0
  90. data/spec/snapshots/consumers_circular-no-nest.snap +49 -0
  91. data/spec/snapshots/consumers_circular.snap +49 -0
  92. data/spec/snapshots/consumers_complex_types-no-nest.snap +49 -0
  93. data/spec/snapshots/consumers_complex_types.snap +49 -0
  94. data/spec/snapshots/consumers_nested-no-nest.snap +49 -0
  95. data/spec/snapshots/consumers_nested.snap +49 -0
  96. data/spec/snapshots/namespace_folders.snap +49 -0
  97. data/spec/snapshots/namespace_map.snap +49 -0
  98. data/spec/snapshots/producers_with_key-no-nest.snap +49 -0
  99. data/spec/snapshots/producers_with_key.snap +49 -0
  100. data/spec/spec_helper.rb +61 -29
  101. data/spec/utils/db_poller_spec.rb +49 -39
  102. data/spec/utils/{db_producer_spec.rb → outbox_producer_spec.rb} +17 -184
  103. metadata +58 -67
  104. data/lib/deimos/batch_consumer.rb +0 -7
  105. data/lib/deimos/config/phobos_config.rb +0 -163
  106. data/lib/deimos/instrumentation.rb +0 -95
  107. data/lib/deimos/monkey_patches/phobos_cli.rb +0 -35
  108. data/lib/deimos/utils/inline_consumer.rb +0 -158
  109. data/lib/deimos/utils/lag_reporter.rb +0 -186
  110. data/lib/deimos/utils/schema_controller_mixin.rb +0 -129
  111. data/spec/config/configuration_spec.rb +0 -321
  112. data/spec/kafka_listener_spec.rb +0 -55
  113. data/spec/phobos.bad_db.yml +0 -73
  114. data/spec/phobos.yml +0 -77
  115. data/spec/utils/inline_consumer_spec.rb +0 -31
  116. data/spec/utils/lag_reporter_spec.rb +0 -76
  117. data/spec/utils/platform_schema_validation_spec.rb +0 -0
  118. data/spec/utils/schema_controller_mixin_spec.rb +0 -84
  119. /data/lib/generators/deimos/{db_backend → outbox_backend}/templates/migration +0 -0
  120. /data/lib/generators/deimos/{db_backend → outbox_backend}/templates/rails3_migration +0 -0
data/spec/deimos_spec.rb CHANGED
@@ -2,72 +2,12 @@
2
2
 
3
3
  describe Deimos do
4
4
 
5
- let(:phobos_configuration) do
6
- { 'logger' =>
7
- { 'file' => 'log/phobos.log',
8
- 'stdout_json' => false,
9
- 'level' => 'debug',
10
- 'ruby_kafka' =>
11
- { 'level' => 'debug' } },
12
- 'kafka' =>
13
- { 'client_id' => 'phobos',
14
- 'connect_timeout' => 15,
15
- 'socket_timeout' => 15,
16
- 'seed_brokers' => 'my_seed_broker.com',
17
- 'ssl_ca_cert' => 'my_ssl_ca_cert',
18
- 'ssl_client_cert' => 'my_ssl_client_cert',
19
- 'ssl_client_cert_key' => 'my_ssl_client_cert_key' },
20
- 'producer' =>
21
- { 'ack_timeout' => 5,
22
- 'required_acks' => :all,
23
- 'max_retries' => 2,
24
- 'retry_backoff' => 1,
25
- 'max_buffer_size' => 10_000,
26
- 'max_buffer_bytesize' => 10_000_000,
27
- 'compression_codec' => nil,
28
- 'compression_threshold' => 1,
29
- 'max_queue_size' => 10_000,
30
- 'delivery_threshold' => 0,
31
- 'delivery_interval' => 0 },
32
- 'consumer' =>
33
- { 'session_timeout' => 300,
34
- 'offset_commit_interval' => 10,
35
- 'offset_commit_threshold' => 0,
36
- 'heartbeat_interval' => 10 },
37
- 'backoff' =>
38
- { 'min_ms' => 1000,
39
- 'max_ms' => 60_000 },
40
- 'listeners' => [
41
- { 'handler' => 'ConsumerTest::MyConsumer',
42
- 'topic' => 'my_consume_topic',
43
- 'group_id' => 'my_group_id',
44
- 'max_bytes_per_partition' => 524_288 },
45
- { 'handler' => 'ConsumerTest::MyBatchConsumer',
46
- 'topic' => 'my_batch_consume_topic',
47
- 'group_id' => 'my_batch_group_id',
48
- 'delivery' => 'inline_batch' }
49
- ],
50
- 'custom_logger' => nil,
51
- 'custom_kafka_logger' => nil }
52
- end
53
-
54
- let(:config_path) { File.join(File.dirname(__FILE__), 'phobos.yml') }
55
-
56
5
  it 'should have a version number' do
57
6
  expect(Deimos::VERSION).not_to be_nil
58
7
  end
59
8
 
60
- it 'should error if required_acks is not all' do
61
- expect {
62
- described_class.configure do |config|
63
- config.producers.backend = :db
64
- config.phobos_config_file = File.join(File.dirname(__FILE__), 'phobos.bad_db.yml')
65
- end
66
- }.to raise_error('Cannot set producers.backend to :db unless producers.required_acks is set to ":all"!')
67
- end
68
-
69
- describe '#start_db_backend!' do
70
- it 'should start if backend is db and thread_count is > 0' do
9
+ describe '#start_outbox_backend!' do
10
+ it 'should start if backend is outbox and thread_count is > 0' do
71
11
  signal_handler = instance_double(Sigurd::SignalHandler)
72
12
  allow(signal_handler).to receive(:run!)
73
13
  expect(Sigurd::Executor).to receive(:new).
@@ -77,9 +17,9 @@ describe Deimos do
77
17
  signal_handler
78
18
  end
79
19
  described_class.configure do |config|
80
- config.producers.backend = :db
20
+ config.producers.backend = :outbox
81
21
  end
82
- described_class.start_db_backend!(thread_count: 2)
22
+ described_class.start_outbox_backend!(thread_count: 2)
83
23
  end
84
24
 
85
25
  it 'should not start if backend is not db' do
@@ -87,83 +27,27 @@ describe Deimos do
87
27
  described_class.configure do |config|
88
28
  config.producers.backend = :kafka
89
29
  end
90
- expect { described_class.start_db_backend!(thread_count: 2) }.
91
- to raise_error('Publish backend is not set to :db, exiting')
30
+ expect { described_class.start_outbox_backend!(thread_count: 2) }.
31
+ to raise_error('Publish backend is not set to :outbox, exiting')
92
32
  end
93
33
 
94
34
  it 'should not start if thread_count is nil' do
95
35
  expect(Sigurd::SignalHandler).not_to receive(:new)
96
36
  described_class.configure do |config|
97
- config.producers.backend = :db
37
+ config.producers.backend = :outbox
98
38
  end
99
- expect { described_class.start_db_backend!(thread_count: nil) }.
39
+ expect { described_class.start_outbox_backend!(thread_count: nil) }.
100
40
  to raise_error('Thread count is not given or set to zero, exiting')
101
41
  end
102
42
 
103
43
  it 'should not start if thread_count is 0' do
104
44
  expect(Sigurd::SignalHandler).not_to receive(:new)
105
45
  described_class.configure do |config|
106
- config.producers.backend = :db
46
+ config.producers.backend = :outbox
107
47
  end
108
- expect { described_class.start_db_backend!(thread_count: 0) }.
48
+ expect { described_class.start_outbox_backend!(thread_count: 0) }.
109
49
  to raise_error('Thread count is not given or set to zero, exiting')
110
50
  end
111
51
  end
112
52
 
113
- describe 'delivery configuration' do
114
- before(:each) do
115
- allow(YAML).to receive(:load).and_return(phobos_configuration)
116
- end
117
-
118
- it 'should not raise an error with properly configured handlers' do
119
- expect {
120
- described_class.configure do
121
- consumer do
122
- class_name 'ConsumerTest::MyConsumer'
123
- delivery :message
124
- end
125
- consumer do
126
- class_name 'ConsumerTest::MyConsumer'
127
- delivery :batch
128
- end
129
- consumer do
130
- class_name 'ConsumerTest::MyBatchConsumer'
131
- delivery :inline_batch
132
- end
133
- end
134
- }.not_to raise_error
135
- end
136
-
137
- it 'should raise an error if inline_batch listeners do not implement consume_batch' do
138
- expect {
139
- described_class.configure do
140
- consumer do
141
- class_name 'ConsumerTest::MyConsumer'
142
- delivery :inline_batch
143
- end
144
- end
145
- }.to raise_error('BatchConsumer ConsumerTest::MyConsumer does not implement `consume_batch`')
146
- end
147
-
148
- it 'should raise an error if Consumers do not have message or batch delivery' do
149
- expect {
150
- described_class.configure do
151
- consumer do
152
- class_name 'ConsumerTest::MyBatchConsumer'
153
- delivery :message
154
- end
155
- end
156
- }.to raise_error('Non-batch Consumer ConsumerTest::MyBatchConsumer does not implement `consume`')
157
- end
158
-
159
- it 'should treat nil as `batch`' do
160
- expect {
161
- described_class.configure do
162
- consumer do
163
- class_name 'ConsumerTest::MyConsumer'
164
- end
165
- end
166
- }.not_to raise_error
167
- end
168
- end
169
53
  end
@@ -17,18 +17,10 @@ module KafkaSourceSpec
17
17
 
18
18
  # Dummy producer which mimicks the behavior of a real producer
19
19
  class WidgetProducer < Deimos::ActiveRecordProducer
20
- topic 'my-topic'
21
- namespace 'com.my-namespace'
22
- schema 'Widget'
23
- key_config field: :id
24
20
  end
25
21
 
26
22
  # Dummy producer which mimicks the behavior of a real producer
27
23
  class WidgetProducerTheSecond < Deimos::ActiveRecordProducer
28
- topic 'my-topic-the-second'
29
- namespace 'com.my-namespace'
30
- schema 'WidgetTheSecond'
31
- key_config field: :id
32
24
  end
33
25
 
34
26
  # Dummy class we can include the mixin in. Has a backing table created
@@ -51,6 +43,22 @@ module KafkaSourceSpec
51
43
 
52
44
  before(:each) do
53
45
  Widget.delete_all
46
+ Karafka::App.routes.redraw do
47
+ topic 'my-topic' do
48
+ namespace 'com.my-namespace'
49
+ schema 'Widget'
50
+ key_config field: :id
51
+ producer_class WidgetProducer
52
+ end
53
+
54
+ topic 'my-topic-the-second' do
55
+ namespace 'com.my-namespace'
56
+ schema 'WidgetTheSecond'
57
+ key_config field: :id
58
+ producer_class WidgetProducerTheSecond
59
+ end
60
+
61
+ end
54
62
  end
55
63
 
56
64
  it 'should send events on creation, update, and deletion' do
@@ -206,10 +214,9 @@ module KafkaSourceSpec
206
214
  context 'with DB backend' do
207
215
  before(:each) do
208
216
  Deimos.configure do |config|
209
- config.producers.backend = :db
217
+ config.producers.backend = :outbox
210
218
  end
211
219
  setup_db(DB_OPTIONS.last) # sqlite
212
- allow(Deimos::Producer).to receive(:produce_batch).and_call_original
213
220
  end
214
221
 
215
222
  it 'should save to the DB' do
@@ -309,46 +316,6 @@ module KafkaSourceSpec
309
316
  end
310
317
  end
311
318
 
312
- context 'with AR models that implement the kafka_producer interface' do
313
- before(:each) do
314
- # Dummy class we can include the mixin in. Has a backing table created
315
- # earlier and has the import hook disabled
316
- deprecated_class = Class.new(ActiveRecord::Base) do
317
- include Deimos::KafkaSource
318
- self.table_name = 'widgets'
319
-
320
- # :nodoc:
321
- def self.kafka_config
322
- {
323
- update: true,
324
- delete: true,
325
- import: false,
326
- create: true
327
- }
328
- end
329
-
330
- # :nodoc:
331
- def self.kafka_producer
332
- WidgetProducer
333
- end
334
- end
335
- stub_const('WidgetDeprecated', deprecated_class)
336
- WidgetDeprecated.reset_column_information
337
- end
338
-
339
- it 'logs a warning and sends the message as usual' do
340
- expect(Deimos.config.logger).to receive(:warn).with({ message: WidgetDeprecated::DEPRECATION_WARNING })
341
- widget = WidgetDeprecated.create(widget_id: 1, name: 'Widget 1')
342
- expect('my-topic').to have_sent({
343
- widget_id: 1,
344
- name: 'Widget 1',
345
- id: widget.id,
346
- created_at: anything,
347
- updated_at: anything
348
- }, widget.id)
349
- end
350
- end
351
-
352
319
  context 'with AR models that do not implement any producer interface' do
353
320
  before(:each) do
354
321
  # Dummy class we can include the mixin in. Has a backing table created
@@ -371,10 +338,10 @@ module KafkaSourceSpec
371
338
  WidgetBuggy.reset_column_information
372
339
  end
373
340
 
374
- it 'raises a NotImplementedError exception' do
341
+ it 'raises a MissingImplementationError exception' do
375
342
  expect {
376
343
  WidgetBuggy.create(widget_id: 1, name: 'Widget 1')
377
- }.to raise_error(NotImplementedError)
344
+ }.to raise_error(Deimos::MissingImplementationError)
378
345
  end
379
346
  end
380
347
  end
@@ -0,0 +1,69 @@
1
+ # frozen_string_literal: true
2
+ class KarafkaApp < Karafka::App
3
+ setup do |config|
4
+ config.kafka = { 'bootstrap.servers': '127.0.0.1:9092' }
5
+ config.client_id = 'example_app'
6
+ # Recreate consumers with each batch. This will allow Rails code reload to work in the
7
+ # development mode. Otherwise Karafka process would not be aware of code changes
8
+ config.consumer_persistence = !Rails.env.development?
9
+ end
10
+
11
+ # Comment out this part if you are not using instrumentation and/or you are not
12
+ # interested in logging events for certain environments. Since instrumentation
13
+ # notifications add extra boilerplate, if you want to achieve max performance,
14
+ # listen to only what you really need for given environment.
15
+ Karafka.monitor.subscribe(Karafka::Instrumentation::LoggerListener.new)
16
+ # Karafka.monitor.subscribe(Karafka::Instrumentation::ProctitleListener.new)
17
+
18
+ # This logger prints the producer development info using the Karafka logger.
19
+ # It is similar to the consumer logger listener but producer oriented.
20
+ Karafka.producer.monitor.subscribe(
21
+ WaterDrop::Instrumentation::LoggerListener.new(
22
+ # Log producer operations using the Karafka logger
23
+ Karafka.logger,
24
+ # If you set this to true, logs will contain each message details
25
+ # Please note, that this can be extensive
26
+ log_messages: false
27
+ )
28
+ )
29
+
30
+ # You can subscribe to all consumer related errors and record/track then that way
31
+ #
32
+ # Karafka.monitor.subscribe 'error.occurred' do |event|
33
+ # type = event[:type]
34
+ # error = event[:error]
35
+ # details = (error.backtrace || []).join("\n")
36
+ # ErrorTracker.send_error(error, type, details)
37
+ # end
38
+
39
+ # You can subscribe to all producer related errors and record/track then that way
40
+ # Please note, that producer and consumer have their own notifications pipeline so you need to
41
+ # setup error tracking independently for each of them
42
+ #
43
+ # Karafka.producer.monitor.subscribe('error.occurred') do |event|
44
+ # type = event[:type]
45
+ # error = event[:error]
46
+ # details = (error.backtrace || []).join("\n")
47
+ # ErrorTracker.send_error(error, type, details)
48
+ # end
49
+
50
+ routes.draw do
51
+ # Uncomment this if you use Karafka with ActiveJob
52
+ # You need to define the topic per each queue name you use
53
+ # active_job_topic :default
54
+ # topic :example do
55
+ # Uncomment this if you want Karafka to manage your topics configuration
56
+ # Managing topics configuration via routing will allow you to ensure config consistency
57
+ # across multiple environments
58
+ #
59
+ # config(partitions: 2, 'cleanup.policy': 'compact')
60
+ # consumer ExampleConsumer
61
+ # end
62
+ end
63
+ end
64
+
65
+ # Karafka now features a Web UI!
66
+ # Visit the setup documentation to get started and enhance your experience.
67
+ #
68
+ # https://karafka.io/docs/Web-UI-Getting-Started
69
+ Deimos.setup_karafka
@@ -0,0 +1,97 @@
1
+ RSpec.describe 'Karafka configs' do
2
+ before(:each) do
3
+ KarafkaApp.routes.clear
4
+ $found_stuff = nil
5
+ end
6
+
7
+ let(:consumer_class) do
8
+ Class.new(Deimos::Consumer) do
9
+ def consume_message(message)
10
+ $found_stuff = message.payload
11
+ end
12
+ end
13
+ end
14
+
15
+ let(:producer_class) do
16
+ Class.new(Deimos::Producer) do
17
+ end
18
+ end
19
+
20
+ describe 'producers' do
21
+ before(:each) do
22
+ stub_const('MyProducer', producer_class)
23
+ end
24
+
25
+ it 'should work with key none' do
26
+ KarafkaApp.routes.draw do
27
+ topic 'MyTopic' do
28
+ producer_class MyProducer
29
+ schema 'MySchema'
30
+ namespace 'com.my-namespace'
31
+ key_config(none: true)
32
+ end
33
+ end
34
+ producer_class.publish({test_id: "id1", some_int: 5})
35
+ expect('MyTopic').to have_sent({test_id: "id1", some_int: 5})
36
+ end
37
+
38
+ it 'should work with key plain' do
39
+ KarafkaApp.routes.draw do
40
+ topic 'MyTopic' do
41
+ producer_class MyProducer
42
+ schema 'MySchema'
43
+ namespace 'com.my-namespace'
44
+ key_config({plain: true})
45
+ end
46
+ end
47
+ producer_class.publish({test_id: "id1", some_int: 5, payload_key: 'key'})
48
+ expect('MyTopic').to have_sent({test_id: "id1", some_int: 5}, 'key')
49
+ end
50
+
51
+ it 'should work with key field' do
52
+ KarafkaApp.routes.draw do
53
+ topic 'MyTopic' do
54
+ producer_class MyProducer
55
+ schema 'MySchema'
56
+ namespace 'com.my-namespace'
57
+ key_config({field: :test_id})
58
+ end
59
+ end
60
+ producer_class.publish({test_id: "id1", some_int: 5})
61
+ expect('MyTopic').to have_sent({test_id: "id1", some_int: 5}, 'id1')
62
+ end
63
+
64
+ it 'should work with key schema' do
65
+ KarafkaApp.routes.draw do
66
+ topic 'MyTopic' do
67
+ producer_class MyProducer
68
+ schema 'MySchema'
69
+ namespace 'com.my-namespace'
70
+ key_config({schema: 'MySchema_key'})
71
+ end
72
+ end
73
+ producer_class.publish({test_id: "id1", some_int: 5, payload_key: {test_id: 'id3'}})
74
+ expect('MyTopic').to have_sent({test_id: "id1", some_int: 5}, { test_id: 'id3'})
75
+ end
76
+
77
+ end
78
+
79
+ it 'should be able to pick up a consumer' do
80
+ stub_const('MyConsumer', consumer_class)
81
+ KarafkaApp.routes.draw do
82
+ topic 'MyTopic' do
83
+ consumer MyConsumer
84
+ schema 'MySchema'
85
+ namespace 'com.my-namespace'
86
+ key_config({field: :test_id})
87
+ end
88
+ end
89
+
90
+ test_consume_message('MyTopic', {test_id: "id1", some_int: 5}, key: "id1")
91
+ expect($found_stuff).to eq({'test_id' => "id1", 'some_int' => 5})
92
+ $found_stuff = nil
93
+ test_consume_message(MyConsumer, {test_id: "id1", some_int: 5}, key: "id1")
94
+ expect($found_stuff).to eq({'test_id' => "id1", 'some_int' => 5})
95
+ end
96
+
97
+ end
@@ -0,0 +1,25 @@
1
+ RSpec.describe Deimos::Logging do
2
+ include_context 'with publish_backend'
3
+ describe '#messages_log_text' do
4
+ it 'should return whole payload (default behavior)' do
5
+ log_message = described_class.messages_log_text(:payloads, messages)
6
+ expect(log_message[:payloads].count).to eq(3)
7
+ expect(log_message[:payloads].first[:payload]).to eq({ some_int: 1, test_id: 'foo1' })
8
+ expect(log_message[:payloads].first[:key]).to eq('foo1')
9
+ end
10
+
11
+ it 'should return only keys of messages' do
12
+ log_message = described_class.messages_log_text(:keys, messages)
13
+ expect(log_message[:payload_keys].count).to eq(3)
14
+ expect(log_message[:payload_keys]).to be_a(Array)
15
+ expect(log_message[:payload_keys].first).to eq('foo1')
16
+ end
17
+
18
+ it 'should return only messages count' do
19
+ log_message = described_class.messages_log_text(:count, messages)
20
+ expect(log_message[:payloads_count]).to be_a(Integer)
21
+ expect(log_message[:payloads_count]).to eq(3)
22
+ end
23
+ end
24
+
25
+ end
data/spec/message_spec.rb CHANGED
@@ -2,37 +2,37 @@
2
2
 
3
3
  RSpec.describe(Deimos::Message) do
4
4
  it 'should detect tombstones' do
5
- expect(described_class.new(nil, nil, key: 'key1')).
5
+ expect(described_class.new(nil, key: 'key1')).
6
6
  to be_tombstone
7
- expect(described_class.new({ v: 'val1' }, nil, key: 'key1')).
7
+ expect(described_class.new({ v: 'val1' }, key: 'key1')).
8
8
  not_to be_tombstone
9
- expect(described_class.new({ v: '' }, nil, key: 'key1')).
9
+ expect(described_class.new({ v: '' }, key: 'key1')).
10
10
  not_to be_tombstone
11
- expect(described_class.new({ v: 'val1' }, nil, key: nil)).
11
+ expect(described_class.new({ v: 'val1' }, key: nil)).
12
12
  not_to be_tombstone
13
13
  end
14
14
 
15
15
  it 'can support complex keys/values' do
16
- expect { described_class.new({ a: 1, b: 2 }, nil, key: { c: 3, d: 4 }) }.
16
+ expect { described_class.new({ a: 1, b: 2 }, key: { c: 3, d: 4 }) }.
17
17
  not_to raise_exception
18
18
  end
19
19
 
20
20
  describe 'headers' do
21
21
  it 'returns nil when not set' do
22
- expect(described_class.new({ v: 'val1' }, nil, key: 'key1')).
22
+ expect(described_class.new({ v: 'val1' }, key: 'key1')).
23
23
  to have_attributes(headers: nil)
24
24
  end
25
25
 
26
26
  it 'can set and get headers' do
27
- expect(described_class.new({ v: 'val1' }, nil, key: 'key1', headers: { a: 1 })).
27
+ expect(described_class.new({ v: 'val1' }, key: 'key1', headers: { a: 1 })).
28
28
  to have_attributes(headers: { a: 1 })
29
29
  end
30
30
 
31
31
  it 'includes headers when converting to Hash' do
32
- expect(described_class.new({ v: 'val1' }, nil, key: 'key1', headers: { a: 1 }).to_h).
32
+ expect(described_class.new({ v: 'val1' }, key: 'key1', headers: { a: 1 }).to_h).
33
33
  to include(headers: { a: 1 })
34
34
 
35
- expect(described_class.new({ v: 'val1' }, nil, key: 'key1', headers: { a: 1 }).encoded_hash).
35
+ expect(described_class.new({ v: 'val1' }, key: 'key1', headers: { a: 1 }).encoded_hash).
36
36
  to include(headers: { a: 1 })
37
37
  end
38
38
  end