nulogy_message_bus_consumer 0.3.2 → 1.0.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (102) hide show
  1. checksums.yaml +4 -4
  2. data/Rakefile +2 -3
  3. data/config/credentials/message-bus-us-east-1.key +1 -0
  4. data/config/credentials/message-bus-us-east-1.yml.enc +1 -0
  5. data/lib/nulogy_message_bus_consumer.rb +27 -6
  6. data/lib/nulogy_message_bus_consumer/clock.rb +13 -0
  7. data/lib/nulogy_message_bus_consumer/config.rb +18 -4
  8. data/lib/nulogy_message_bus_consumer/deployment/ecs.rb +23 -0
  9. data/lib/nulogy_message_bus_consumer/lag_tracker.rb +53 -0
  10. data/lib/nulogy_message_bus_consumer/message.rb +1 -1
  11. data/lib/nulogy_message_bus_consumer/null_logger.rb +6 -3
  12. data/lib/nulogy_message_bus_consumer/pipeline.rb +1 -1
  13. data/lib/nulogy_message_bus_consumer/steps/commit_on_success.rb +2 -1
  14. data/lib/nulogy_message_bus_consumer/steps/connect_to_message_bus.rb +23 -9
  15. data/lib/nulogy_message_bus_consumer/steps/deduplicate_messages.rb +1 -1
  16. data/lib/nulogy_message_bus_consumer/steps/log_messages.rb +4 -11
  17. data/lib/nulogy_message_bus_consumer/steps/stream_messages.rb +2 -2
  18. data/lib/nulogy_message_bus_consumer/steps/stream_messages_until_none_are_left.rb +2 -2
  19. data/lib/nulogy_message_bus_consumer/steps/timed_task.rb +42 -0
  20. data/lib/nulogy_message_bus_consumer/tasks/log_consumer_lag.rb +45 -0
  21. data/lib/nulogy_message_bus_consumer/tasks/prune_processed_messages.rb +37 -0
  22. data/lib/nulogy_message_bus_consumer/tasks/supervise_consumer_lag.rb +65 -0
  23. data/lib/nulogy_message_bus_consumer/version.rb +1 -1
  24. data/lib/tasks/engine/message_bus_consumer.rake +7 -8
  25. data/spec/dummy/Rakefile +6 -0
  26. data/spec/dummy/app/assets/config/manifest.js +3 -0
  27. data/spec/dummy/app/assets/stylesheets/application.css +15 -0
  28. data/spec/dummy/app/channels/application_cable/channel.rb +4 -0
  29. data/spec/dummy/app/channels/application_cable/connection.rb +4 -0
  30. data/spec/dummy/app/controllers/application_controller.rb +2 -0
  31. data/spec/dummy/app/helpers/application_helper.rb +2 -0
  32. data/spec/dummy/app/javascript/packs/application.js +15 -0
  33. data/spec/dummy/app/jobs/application_job.rb +7 -0
  34. data/spec/dummy/app/mailers/application_mailer.rb +4 -0
  35. data/spec/dummy/app/models/application_record.rb +3 -0
  36. data/spec/dummy/app/views/layouts/application.html.erb +14 -0
  37. data/spec/dummy/app/views/layouts/mailer.html.erb +13 -0
  38. data/spec/dummy/app/views/layouts/mailer.text.erb +1 -0
  39. data/spec/dummy/bin/rails +4 -0
  40. data/spec/dummy/bin/rake +4 -0
  41. data/spec/dummy/bin/setup +33 -0
  42. data/spec/dummy/config.ru +5 -0
  43. data/spec/dummy/config/application.rb +29 -0
  44. data/spec/dummy/config/boot.rb +5 -0
  45. data/spec/dummy/config/cable.yml +10 -0
  46. data/spec/dummy/config/credentials/message-bus-us-east-1.key +1 -0
  47. data/spec/dummy/config/credentials/message-bus-us-east-1.yml.enc +1 -0
  48. data/spec/dummy/config/database.yml +27 -0
  49. data/spec/dummy/config/environment.rb +5 -0
  50. data/spec/dummy/config/environments/development.rb +62 -0
  51. data/spec/dummy/config/environments/production.rb +112 -0
  52. data/spec/dummy/config/environments/test.rb +49 -0
  53. data/spec/dummy/config/initializers/application_controller_renderer.rb +8 -0
  54. data/spec/dummy/config/initializers/assets.rb +12 -0
  55. data/spec/dummy/config/initializers/backtrace_silencers.rb +7 -0
  56. data/spec/dummy/config/initializers/content_security_policy.rb +28 -0
  57. data/spec/dummy/config/initializers/cookies_serializer.rb +5 -0
  58. data/spec/dummy/config/initializers/filter_parameter_logging.rb +4 -0
  59. data/spec/dummy/config/initializers/inflections.rb +16 -0
  60. data/spec/dummy/config/initializers/message_bus_consumer.rb +5 -0
  61. data/spec/dummy/config/initializers/mime_types.rb +4 -0
  62. data/spec/dummy/config/initializers/wrap_parameters.rb +14 -0
  63. data/spec/dummy/config/locales/en.yml +33 -0
  64. data/spec/dummy/config/puma.rb +36 -0
  65. data/spec/dummy/config/routes.rb +3 -0
  66. data/spec/dummy/config/spring.rb +6 -0
  67. data/spec/dummy/config/storage.yml +34 -0
  68. data/spec/dummy/db/schema.rb +21 -0
  69. data/spec/dummy/log/development.log +4 -0
  70. data/spec/dummy/log/production.log +18 -0
  71. data/spec/dummy/log/test.log +7949 -0
  72. data/spec/dummy/public/404.html +67 -0
  73. data/spec/dummy/public/422.html +67 -0
  74. data/spec/dummy/public/500.html +66 -0
  75. data/spec/dummy/public/apple-touch-icon-precomposed.png +0 -0
  76. data/spec/dummy/public/apple-touch-icon.png +0 -0
  77. data/spec/dummy/public/favicon.ico +0 -0
  78. data/spec/dummy/tmp/development_secret.txt +1 -0
  79. data/spec/integration/nulogy_message_bus_consumer/auditor_spec.rb +59 -0
  80. data/spec/integration/nulogy_message_bus_consumer/kafka_utils_spec.rb +41 -0
  81. data/spec/integration/nulogy_message_bus_consumer/steps/commit_on_success_spec.rb +131 -0
  82. data/spec/integration/nulogy_message_bus_consumer/steps/connect_to_message_bus_spec.rb +53 -0
  83. data/spec/integration/nulogy_message_bus_consumer/tasks/prune_processed_messages_spec.rb +32 -0
  84. data/spec/integration/nulogy_message_bus_consumer/tasks/supervise_consumer_lag_spec.rb +33 -0
  85. data/spec/integration/test_topic_spec.rb +39 -0
  86. data/spec/spec_helper.rb +50 -0
  87. data/spec/support/kafka.rb +74 -0
  88. data/spec/support/middleware_tap.rb +12 -0
  89. data/spec/support/skip.rb +9 -0
  90. data/spec/support/test_topic.rb +48 -0
  91. data/spec/unit/nulogy_message_bus_consumer/config_spec.rb +20 -0
  92. data/spec/unit/nulogy_message_bus_consumer/lag_tracker.rb +35 -0
  93. data/spec/unit/nulogy_message_bus_consumer/message_spec.rb +84 -0
  94. data/spec/unit/nulogy_message_bus_consumer/pipeline_spec.rb +49 -0
  95. data/spec/unit/nulogy_message_bus_consumer/steps/commit_on_success_spec.rb +58 -0
  96. data/spec/unit/nulogy_message_bus_consumer/steps/deduplicate_messages_spec.rb +56 -0
  97. data/spec/unit/nulogy_message_bus_consumer/steps/log_messages_spec.rb +70 -0
  98. data/spec/unit/nulogy_message_bus_consumer/steps/stream_messages_spec.rb +35 -0
  99. data/spec/unit/nulogy_message_bus_consumer/tasks/calculator_spec.rb +67 -0
  100. data/spec/unit/nulogy_message_bus_consumer_spec.rb +30 -0
  101. metadata +209 -21
  102. data/lib/nulogy_message_bus_consumer/steps/monitor_replication_lag.rb +0 -51
@@ -0,0 +1,53 @@
1
+ RSpec.describe NulogyMessageBusConsumer::Steps::ConnectToMessageBus do
2
+ subject(:pipeline) do
3
+ pipeline = NulogyMessageBusConsumer.recommended_consumer_pipeline(config: config)
4
+ pipeline.insert(tap, after: NulogyMessageBusConsumer::Steps::ConnectToMessageBus)
5
+ pipeline.append(message_handler_spy)
6
+ pipeline
7
+ end
8
+
9
+ let(:topic) { TestTopic.new }
10
+ let(:config) { topic.config }
11
+ let(:tap) { MiddlewareTap.new }
12
+ let(:message_handler_spy) { double }
13
+
14
+ after { topic.close }
15
+
16
+ it "receives messages", skip: flakey_in_ci do
17
+ called = false
18
+ expect(message_handler_spy).to receive(:call) do |message:, **_kargs|
19
+ expect(message).to have_attributes(event_data: {data: "Some Payload"})
20
+ called = true
21
+ :success
22
+ end
23
+
24
+ pipeline_thread = start(pipeline, tap)
25
+
26
+ topic.produce_one_message(
27
+ key: "Some Key",
28
+ payload: message_payload(data: "Some Payload")
29
+ )
30
+
31
+ NulogyMessageBusConsumer::KafkaUtils.wait_for { called }
32
+ Thread.kill(pipeline_thread)
33
+ end
34
+
35
+ def start(pipeline, tap)
36
+ thr = Thread.new { pipeline.invoke }
37
+ wait_for_partition_assignment(tap)
38
+ thr
39
+ end
40
+
41
+ def wait_for_partition_assignment(tap)
42
+ NulogyMessageBusConsumer::KafkaUtils.wait_for { tap.arguments[:kafka_consumer] }
43
+ NulogyMessageBusConsumer::KafkaUtils.wait_for_assignment(tap.arguments[:kafka_consumer])
44
+ end
45
+
46
+ def message_payload(**payload)
47
+ JSON.dump(
48
+ id: SecureRandom.uuid,
49
+ created_at: 1_000,
50
+ event_json: JSON.dump(payload)
51
+ )
52
+ end
53
+ end
@@ -0,0 +1,32 @@
1
+ module NulogyMessageBusConsumer
2
+ module Tasks
3
+ RSpec.describe PruneProcessedMessages do
4
+ let(:logger) { spy }
5
+ let(:task) { described_class.new(logger, 10, 1.minute) }
6
+
7
+ it "prunes old messages" do
8
+ processed_message(2.minutes.ago)
9
+ processed_message(1.minute.ago)
10
+
11
+ task.call
12
+
13
+ expect(ProcessedMessage.count).to be(0)
14
+ expect(logger).to have_received(:info).with(/Pruned 2 processed messages/)
15
+ end
16
+
17
+ it "does not prune newer messages" do
18
+ processed_message(2.minutes.ago)
19
+ processed_message(1.minute.ago)
20
+ keep = processed_message(1.minute.from_now)
21
+
22
+ task.call
23
+
24
+ expect(ProcessedMessage.ids).to contain_exactly(keep.id)
25
+ end
26
+
27
+ def processed_message(ago)
28
+ ProcessedMessage.create!(id: SecureRandom.uuid, created_at: ago)
29
+ end
30
+ end
31
+ end
32
+ end
@@ -0,0 +1,33 @@
1
+ module NulogyMessageBusConsumer
2
+ module Tasks
3
+ RSpec.describe SuperviseConsumerLag do
4
+ let(:killable) { spy }
5
+ let(:logger) { spy }
6
+ let(:tracker) { LagTracker.new(failing_checks: 2) }
7
+ let(:task) { described_class.new(logger, tracker: tracker, killable: killable) }
8
+
9
+ it "kills the main thread after lag does not change" do
10
+ lag = {
11
+ "partition-1" => 2,
12
+ "partition-2" => 3
13
+ }
14
+ tracker.update({"topic" => lag})
15
+ tracker.update({"topic" => lag})
16
+ consumer = instance_double(
17
+ Rdkafka::Consumer,
18
+ committed: nil,
19
+ lag: {"topic" => lag}
20
+ )
21
+ # skip waiting for assignment
22
+ allow(KafkaUtils).to receive(:wait_for_assignment).with(consumer).and_return(nil)
23
+ allow(Thread.current).to receive(:exit)
24
+ task.extract_args(kafka_consumer: consumer)
25
+
26
+ task.call
27
+
28
+ expect(logger).to have_received(:warn).with(/Assigned partition lag has not changed/)
29
+ expect(killable).to have_received(:kill)
30
+ end
31
+ end
32
+ end
33
+ end
@@ -0,0 +1,39 @@
1
+ RSpec.describe TestTopic do
2
+ let(:topic) { TestTopic.new }
3
+
4
+ after { topic.close }
5
+
6
+ context "smoke test for specs" do
7
+ it "publishes and receives messages" do
8
+ topic.produce_one_message(
9
+ key: "Some Key",
10
+ payload: "Some Payload"
11
+ )
12
+
13
+ message = topic.consume_one_message
14
+ expect(message).to have_attributes(
15
+ key: "Some Key",
16
+ payload: "Some Payload"
17
+ )
18
+ end
19
+
20
+ it "receives returns nil when no messages are received" do
21
+ consumer = topic.consumer
22
+ message = consumer.poll(1)
23
+ expect(message).to be(nil)
24
+ end
25
+ end
26
+
27
+ context "spec helpers" do
28
+ it "creates topics" do
29
+ create_topic(topic.topic_name)
30
+ expect(list_topics).to include(topic.topic_name)
31
+ end
32
+
33
+ it "deletes topics" do
34
+ create_topic(topic.topic_name)
35
+ delete_topic(topic.topic_name)
36
+ expect(list_topics).not_to include(topic.topic_name)
37
+ end
38
+ end
39
+ end
@@ -0,0 +1,50 @@
1
+ require "bundler/setup"
2
+ require "rspec/json_expectations"
3
+ ENV["RAILS_ENV"] ||= "test"
4
+ require File.expand_path("dummy/config/environment", __dir__)
5
+ abort("The Rails environment is running in production mode!") if Rails.env.production?
6
+ require "rspec/rails"
7
+ require "dotenv"
8
+
9
+ env_file = ENV["CI"] == "true" ? ".env.ci" : ".env.local"
10
+ raise "Expected #{env_file}" unless File.exist?(env_file)
11
+
12
+ Dotenv.load(env_file)
13
+
14
+ # Load RSpec helpers.
15
+ ENGINE_ROOT ||= File.expand_path("..", __dir__)
16
+ Dir[File.join(ENGINE_ROOT, "spec/support/**/*.rb")].sort.each { |f| require f }
17
+
18
+ # Load migrations from the dummy app.
19
+ # ActiveRecord::Migrator.migrations_paths = File.join(ENGINE_ROOT, 'spec/dummy/db/migrate')
20
+ begin
21
+ ActiveRecord::Migration.maintain_test_schema!
22
+ rescue ActiveRecord::PendingMigrationError => e
23
+ puts e.to_s.strip
24
+ exit 1
25
+ end
26
+
27
+ RSpec.configure do |config|
28
+ config.example_status_persistence_file_path = ".rspec_status"
29
+
30
+ # Disable RSpec exposing methods globally on `Module` and `main`
31
+ config.disable_monkey_patching!
32
+
33
+ config.use_transactional_fixtures = true
34
+
35
+ config.expect_with :rspec do |c|
36
+ c.syntax = :expect
37
+ c.include_chain_clauses_in_custom_matcher_descriptions = true
38
+ end
39
+
40
+ config.mock_with :rspec do |mocks|
41
+ mocks.verify_partial_doubles = true
42
+ end
43
+
44
+ config.shared_context_metadata_behavior = :apply_to_host_groups
45
+ config.filter_rails_from_backtrace!
46
+ config.use_transactional_fixtures = true
47
+
48
+ config.include(Kafka)
49
+ config.extend(Skip)
50
+ end
@@ -0,0 +1,74 @@
1
+ require "open3"
2
+
3
+ module Kafka
4
+ module_function
5
+
6
+ def kafka_config
7
+ config = {
8
+ "auto.offset.reset": "beginning",
9
+ "bootstrap.servers": test_bootstrap_servers,
10
+ "enable.auto.commit": false,
11
+ "group.id": random_consumer_group
12
+ }
13
+
14
+ Rdkafka::Config.new(config)
15
+ end
16
+
17
+ def random_topic_name
18
+ "test-topic-#{SecureRandom.uuid}"
19
+ end
20
+
21
+ def random_consumer_group
22
+ "ruby-test-consumer-group-#{SecureRandom.uuid}"
23
+ end
24
+
25
+ def test_bootstrap_servers
26
+ "#{ENV["MBC_KAFKA_HOST"]}:#{ENV["MBC_KAFKA_PORT"]}"
27
+ end
28
+
29
+ def setup_kafka_producer
30
+ kafka_config.producer
31
+ end
32
+
33
+ def setup_kafka_consumer(topic_name)
34
+ consumer = kafka_config.consumer
35
+ consumer.subscribe(topic_name)
36
+ NulogyMessageBusConsumer::KafkaUtils.wait_for_assignment(consumer)
37
+ consumer
38
+ end
39
+
40
+ def create_topic(topic_name)
41
+ run("kaf topic create #{topic_name} --brokers #{test_bootstrap_servers} --replicas 1 --partitions 3")
42
+ end
43
+
44
+ def delete_topic(topic_name)
45
+ run("kaf topic delete #{topic_name} --brokers #{test_bootstrap_servers}")
46
+ end
47
+
48
+ def list_topics
49
+ topics = run("kaf topics --brokers #{test_bootstrap_servers}")
50
+ topics.split(" ")
51
+ end
52
+
53
+ def run(command)
54
+ stdout, stderr, status = Open3.capture3(command)
55
+ raise <<~OUTPUT if status != 0
56
+ Command `#{command}` failed with:
57
+ STDOUT:
58
+ #{stdout}
59
+
60
+ STDERR:
61
+ #{stderr}
62
+ OUTPUT
63
+
64
+ stdout
65
+ end
66
+
67
+ def test_config(topic_name)
68
+ NulogyMessageBusConsumer::Config.new(
69
+ consumer_group_id: random_consumer_group,
70
+ bootstrap_servers: test_bootstrap_servers,
71
+ topic_name: topic_name
72
+ )
73
+ end
74
+ end
@@ -0,0 +1,12 @@
1
+ class MiddlewareTap
2
+ attr_reader :arguments
3
+
4
+ def initialize
5
+ @arguments = {}
6
+ end
7
+
8
+ def call(**arguments)
9
+ @arguments = arguments
10
+ yield
11
+ end
12
+ end
@@ -0,0 +1,9 @@
1
+ module Skip
2
+ def flakey_in_ci
3
+ if ENV.fetch("CI", false)
4
+ "This spec is terribly flakey in CI, so we only run it locally"
5
+ else
6
+ false
7
+ end
8
+ end
9
+ end
@@ -0,0 +1,48 @@
1
+ class TestTopic
2
+ attr_reader :topic_name
3
+
4
+ def initialize(topic_name: Kafka.random_topic_name)
5
+ @topic_name = topic_name
6
+ end
7
+
8
+ def consumer
9
+ @consumer ||= Kafka.setup_kafka_consumer(topic_name)
10
+ end
11
+
12
+ def producer
13
+ @producer ||= Kafka.setup_kafka_producer
14
+ end
15
+
16
+ def produce_one_message(key: "TEST KEY", payload: '{ "KEY": "TEST PAYLOAD" }', **kwargs)
17
+ if kwargs.key?(:topic)
18
+ raise "Do not specify the topic when producing with a TestTopic. Create a new TestTopic instead."
19
+ end
20
+
21
+ id = SecureRandom.uuid
22
+ if kwargs.key?(:event_json)
23
+ kwargs[:payload] = JSON.dump({id: id, event_json: JSON.dump(kwargs.delete(:event_json))})
24
+ end
25
+
26
+ producer.produce(
27
+ topic: topic_name,
28
+ key: key,
29
+ payload: payload,
30
+ **kwargs
31
+ ).wait
32
+
33
+ id
34
+ end
35
+
36
+ def consume_one_message
37
+ consumer.poll(250)
38
+ end
39
+
40
+ def config
41
+ Kafka.test_config(topic_name)
42
+ end
43
+
44
+ def close
45
+ @producer&.close
46
+ @consumer&.close
47
+ end
48
+ end
@@ -0,0 +1,20 @@
1
+ module NulogyMessageBusConsumer
2
+ RSpec.describe Config do
3
+ describe "defaults" do
4
+ it "lag_check_interval_seconds to 20" do
5
+ expect(Config.new.lag_check_interval_seconds).to be(20)
6
+ end
7
+
8
+ it "lag_checks to 6" do
9
+ expect(Config.new.lag_checks).to be(6)
10
+ end
11
+ end
12
+
13
+ it "merges defaults" do
14
+ config = Config.new(lag_checks: 3)
15
+
16
+ expect(config.lag_checks).to be(3)
17
+ expect(config.lag_check_interval_seconds).to be(20)
18
+ end
19
+ end
20
+ end
@@ -0,0 +1,35 @@
1
+ module NulogyMessageBusConsumer
2
+ RSpec.describe LagTracker do
3
+ it "does not fail when values change" do
4
+ tracker = described_class.new(failing_checks: 1)
5
+
6
+ tracker.update({"one" => {"0" => 1, "1" => 1}})
7
+ tracker.update({"one" => {"0" => 2, "1" => 0}})
8
+
9
+ expect(tracker).not_to be_failing
10
+ end
11
+
12
+ it "tracks failed when value unchanged enough" do
13
+ tracker = described_class.new(failing_checks: 2)
14
+
15
+ tracker.update({"one" => {"0" => 1, "1" => 1}})
16
+ tracker.update({"one" => {"0" => 1, "1" => 2}})
17
+ tracker.update({"one" => {"0" => 1, "1" => 3}})
18
+
19
+ expect(tracker).to be_failing
20
+ expect(tracker.failed).to eq({
21
+ "one" => ["0"]
22
+ })
23
+ end
24
+
25
+ it "ignores unchanged 0 values" do
26
+ tracker = described_class.new(failing_checks: 1)
27
+
28
+ tracker.update({"one" => {"0" => 0}})
29
+ tracker.update({"one" => {"0" => 0}})
30
+ tracker.update({"one" => {"0" => 0}})
31
+
32
+ expect(tracker).not_to be_failing
33
+ end
34
+ end
35
+ end
@@ -0,0 +1,84 @@
1
+ RSpec.describe NulogyMessageBusConsumer::Message do
2
+ describe ".from_kafka" do
3
+ let(:message_id) { SecureRandom.uuid }
4
+ let(:subscription_id) { SecureRandom.uuid }
5
+ let(:company_uuid) { SecureRandom.uuid }
6
+ let(:partition_key) { [company_uuid, subscription_id].join(",") }
7
+
8
+ it "builds a message from a kafka message" do
9
+ kafka_message = build_kafka_message(
10
+ payload: JSON.dump(
11
+ id: message_id,
12
+ subscription_id: subscription_id,
13
+ company_uuid: company_uuid,
14
+ event_json: JSON.dump(hello: "world")
15
+ )
16
+ )
17
+
18
+ built = NulogyMessageBusConsumer::Message.from_kafka(kafka_message)
19
+
20
+ expect(built).to have_attributes(
21
+ id: message_id,
22
+ subscription_id: subscription_id,
23
+ company_uuid: company_uuid,
24
+ event_data: {hello: "world"},
25
+ event_data_unparsed: include_json(hello: "world")
26
+ )
27
+ end
28
+
29
+ it "expects the event_json to be valid json" do
30
+ kafka_message = build_kafka_message(
31
+ payload: JSON.dump(event_json: "not json")
32
+ )
33
+
34
+ built = NulogyMessageBusConsumer::Message.from_kafka(kafka_message)
35
+
36
+ expect(built).to have_attributes(
37
+ event_data: {},
38
+ event_data_unparsed: "not json"
39
+ )
40
+ end
41
+
42
+ it "falls back on some depricated columns" do
43
+ kafka_message = build_kafka_message(
44
+ payload: JSON.dump(
45
+ public_subscription_id: subscription_id,
46
+ tenant_id: company_uuid
47
+ )
48
+ )
49
+
50
+ built = NulogyMessageBusConsumer::Message.from_kafka(kafka_message)
51
+
52
+ expect(built).to have_attributes(
53
+ subscription_id: subscription_id,
54
+ company_uuid: company_uuid
55
+ )
56
+ end
57
+ end
58
+
59
+ def build_kafka_message(overrides = {}) # rubocop:disable Metrics/MethodLength
60
+ attributes = {
61
+ topic: "some_topic",
62
+ partition: 3,
63
+ payload: JSON.dump(
64
+ id: message_id,
65
+ subscription_id: subscription_id,
66
+ company_uuid: company_uuid,
67
+ event_json: JSON.dump(
68
+ hello: "world"
69
+ )
70
+ ),
71
+ key: JSON.dump(
72
+ schema: {},
73
+ payload: {
74
+ partition_key: partition_key
75
+ }
76
+ ),
77
+ offset: 13,
78
+ timestamp: Time.zone.now,
79
+ headers: {key: "value"}
80
+ }.merge(overrides)
81
+
82
+ instance_double("Rdkafka::Consumer::Message", attributes)
83
+ end
84
+ end