nulogy_message_bus_consumer 0.4.0 → 0.5.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/lib/nulogy_message_bus_consumer/version.rb +1 -1
- data/spec/dummy/Rakefile +6 -0
- data/spec/dummy/app/assets/config/manifest.js +3 -0
- data/spec/dummy/app/assets/stylesheets/application.css +15 -0
- data/spec/dummy/app/channels/application_cable/channel.rb +4 -0
- data/spec/dummy/app/channels/application_cable/connection.rb +4 -0
- data/spec/dummy/app/controllers/application_controller.rb +2 -0
- data/spec/dummy/app/helpers/application_helper.rb +2 -0
- data/spec/dummy/app/javascript/packs/application.js +15 -0
- data/spec/dummy/app/jobs/application_job.rb +7 -0
- data/spec/dummy/app/mailers/application_mailer.rb +4 -0
- data/spec/dummy/app/models/application_record.rb +3 -0
- data/spec/dummy/app/views/layouts/application.html.erb +14 -0
- data/spec/dummy/app/views/layouts/mailer.html.erb +13 -0
- data/spec/dummy/app/views/layouts/mailer.text.erb +1 -0
- data/spec/dummy/bin/rails +4 -0
- data/spec/dummy/bin/rake +4 -0
- data/spec/dummy/bin/setup +33 -0
- data/spec/dummy/config.ru +5 -0
- data/spec/dummy/config/application.rb +29 -0
- data/spec/dummy/config/boot.rb +5 -0
- data/spec/dummy/config/cable.yml +10 -0
- data/spec/dummy/config/credentials/message-bus-us-east-1.key +1 -0
- data/spec/dummy/config/credentials/message-bus-us-east-1.yml.enc +1 -0
- data/spec/dummy/config/database.yml +27 -0
- data/spec/dummy/config/environment.rb +5 -0
- data/spec/dummy/config/environments/development.rb +62 -0
- data/spec/dummy/config/environments/production.rb +112 -0
- data/spec/dummy/config/environments/test.rb +49 -0
- data/spec/dummy/config/initializers/application_controller_renderer.rb +8 -0
- data/spec/dummy/config/initializers/assets.rb +12 -0
- data/spec/dummy/config/initializers/backtrace_silencers.rb +7 -0
- data/spec/dummy/config/initializers/content_security_policy.rb +28 -0
- data/spec/dummy/config/initializers/cookies_serializer.rb +5 -0
- data/spec/dummy/config/initializers/filter_parameter_logging.rb +4 -0
- data/spec/dummy/config/initializers/inflections.rb +16 -0
- data/spec/dummy/config/initializers/message_bus_consumer.rb +5 -0
- data/spec/dummy/config/initializers/mime_types.rb +4 -0
- data/spec/dummy/config/initializers/wrap_parameters.rb +14 -0
- data/spec/dummy/config/locales/en.yml +33 -0
- data/spec/dummy/config/puma.rb +36 -0
- data/spec/dummy/config/routes.rb +3 -0
- data/spec/dummy/config/spring.rb +6 -0
- data/spec/dummy/config/storage.yml +34 -0
- data/spec/dummy/db/schema.rb +21 -0
- data/spec/dummy/log/development.log +4 -0
- data/spec/dummy/log/production.log +18 -0
- data/spec/dummy/log/test.log +6083 -0
- data/spec/dummy/public/404.html +67 -0
- data/spec/dummy/public/422.html +67 -0
- data/spec/dummy/public/500.html +66 -0
- data/spec/dummy/public/apple-touch-icon-precomposed.png +0 -0
- data/spec/dummy/public/apple-touch-icon.png +0 -0
- data/spec/dummy/public/favicon.ico +0 -0
- data/spec/dummy/tmp/development_secret.txt +1 -0
- data/spec/integration/nulogy_message_bus_consumer/auditor_spec.rb +59 -0
- data/spec/integration/nulogy_message_bus_consumer/kafka_utils_spec.rb +41 -0
- data/spec/integration/nulogy_message_bus_consumer/steps/commit_on_success_spec.rb +131 -0
- data/spec/integration/nulogy_message_bus_consumer/steps/connect_to_message_bus_spec.rb +54 -0
- data/spec/integration/nulogy_message_bus_consumer/steps/supervise_consumer_lag_spec.rb +54 -0
- data/spec/integration/test_topic_spec.rb +39 -0
- data/spec/spec_helper.rb +49 -0
- data/spec/support/kafka.rb +74 -0
- data/spec/support/middleware_tap.rb +12 -0
- data/spec/support/test_topic.rb +48 -0
- data/spec/unit/nulogy_message_bus_consumer/config_spec.rb +20 -0
- data/spec/unit/nulogy_message_bus_consumer/lag_tracker.rb +35 -0
- data/spec/unit/nulogy_message_bus_consumer/message_spec.rb +84 -0
- data/spec/unit/nulogy_message_bus_consumer/pipeline_spec.rb +49 -0
- data/spec/unit/nulogy_message_bus_consumer/steps/commit_on_success_spec.rb +58 -0
- data/spec/unit/nulogy_message_bus_consumer/steps/deduplicate_messages_spec.rb +56 -0
- data/spec/unit/nulogy_message_bus_consumer/steps/log_messages_spec.rb +70 -0
- data/spec/unit/nulogy_message_bus_consumer/steps/monitor_replication_lag/calculator_spec.rb +63 -0
- data/spec/unit/nulogy_message_bus_consumer/steps/stream_messages_spec.rb +35 -0
- data/spec/unit/nulogy_message_bus_consumer_spec.rb +30 -0
- metadata +159 -11
@@ -0,0 +1,54 @@
|
|
1
|
+
RSpec.describe NulogyMessageBusConsumer::Steps::ConnectToMessageBus do
|
2
|
+
subject(:pipeline) do
|
3
|
+
pipeline = NulogyMessageBusConsumer.recommended_consumer_pipeline(config: config)
|
4
|
+
pipeline.insert(tap, after: NulogyMessageBusConsumer::Steps::ConnectToMessageBus)
|
5
|
+
pipeline.append(message_handler_spy)
|
6
|
+
pipeline
|
7
|
+
end
|
8
|
+
|
9
|
+
let(:topic) { TestTopic.new }
|
10
|
+
let(:config) { topic.config }
|
11
|
+
let(:tap) { MiddlewareTap.new }
|
12
|
+
let(:message_handler_spy) { double }
|
13
|
+
|
14
|
+
after { topic.close }
|
15
|
+
|
16
|
+
# TODO: This spec is terribly flakey.
|
17
|
+
xit "receives messages" do
|
18
|
+
called = false
|
19
|
+
expect(message_handler_spy).to receive(:call) do |message:, **_kargs|
|
20
|
+
expect(message).to have_attributes(event_data: {data: "Some Payload"})
|
21
|
+
called = true
|
22
|
+
:success
|
23
|
+
end
|
24
|
+
|
25
|
+
pipeline_thread = start(pipeline, tap)
|
26
|
+
|
27
|
+
topic.produce_one_message(
|
28
|
+
key: "Some Key",
|
29
|
+
payload: message_payload(data: "Some Payload")
|
30
|
+
)
|
31
|
+
|
32
|
+
NulogyMessageBusConsumer::KafkaUtils.wait_for { called }
|
33
|
+
Thread.kill(pipeline_thread)
|
34
|
+
end
|
35
|
+
|
36
|
+
def start(pipeline, tap)
|
37
|
+
thr = Thread.new { pipeline.invoke }
|
38
|
+
wait_for_partition_assignment(tap)
|
39
|
+
thr
|
40
|
+
end
|
41
|
+
|
42
|
+
def wait_for_partition_assignment(tap)
|
43
|
+
NulogyMessageBusConsumer::KafkaUtils.wait_for { tap.arguments[:kafka_consumer] }
|
44
|
+
NulogyMessageBusConsumer::KafkaUtils.wait_for_assignment(tap.arguments[:kafka_consumer])
|
45
|
+
end
|
46
|
+
|
47
|
+
def message_payload(**payload)
|
48
|
+
JSON.dump(
|
49
|
+
id: SecureRandom.uuid,
|
50
|
+
created_at: 1_000,
|
51
|
+
event_json: JSON.dump(payload)
|
52
|
+
)
|
53
|
+
end
|
54
|
+
end
|
@@ -0,0 +1,54 @@
|
|
1
|
+
module NulogyMessageBusConsumer
|
2
|
+
module Steps
|
3
|
+
RSpec.describe SuperviseConsumerLag do
|
4
|
+
# RSpec does not like when objects (spies, doubles, etc) are used
|
5
|
+
# outside of its scope, e.g.
|
6
|
+
# The use of doubles or partial doubles from rspec-mocks outside of the per-test lifecycle is not supported. (RSpec::Mocks::OutsideOfExampleError)
|
7
|
+
let(:thread_spy) do
|
8
|
+
Class.new do
|
9
|
+
attr_reader :killed
|
10
|
+
|
11
|
+
def initialize
|
12
|
+
@killed = false
|
13
|
+
end
|
14
|
+
|
15
|
+
def kill
|
16
|
+
@killed = true
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
21
|
+
it "kills the main thread after lag does not change" do
|
22
|
+
thread = thread_spy.new
|
23
|
+
logger = spy
|
24
|
+
|
25
|
+
consumer = instance_double(
|
26
|
+
Rdkafka::Consumer,
|
27
|
+
committed: nil,
|
28
|
+
lag: {}
|
29
|
+
)
|
30
|
+
# skip waiting for assignment
|
31
|
+
allow(KafkaUtils).to receive(:wait_for_assignment).with(consumer).and_return(nil)
|
32
|
+
tracker = instance_double(
|
33
|
+
LagTracker,
|
34
|
+
failing_checks: 3,
|
35
|
+
failing?: true,
|
36
|
+
failed: {"topic" => ["partition1", "partition2"]}
|
37
|
+
).as_null_object
|
38
|
+
|
39
|
+
supervisor = described_class.new(
|
40
|
+
logger,
|
41
|
+
tracker: tracker,
|
42
|
+
killable: thread
|
43
|
+
)
|
44
|
+
|
45
|
+
supervisor.call(kafka_consumer: consumer) {}
|
46
|
+
|
47
|
+
KafkaUtils.wait_for { thread.killed }
|
48
|
+
|
49
|
+
expect(logger).to have_received(:warn).with(/Assigned partition lag has not changed/)
|
50
|
+
expect(thread.killed).to be(true)
|
51
|
+
end
|
52
|
+
end
|
53
|
+
end
|
54
|
+
end
|
@@ -0,0 +1,39 @@
|
|
1
|
+
RSpec.describe TestTopic do
|
2
|
+
let(:topic) { TestTopic.new }
|
3
|
+
|
4
|
+
after { topic.close }
|
5
|
+
|
6
|
+
context "smoke test for specs" do
|
7
|
+
it "publishes and receives messages" do
|
8
|
+
topic.produce_one_message(
|
9
|
+
key: "Some Key",
|
10
|
+
payload: "Some Payload"
|
11
|
+
)
|
12
|
+
|
13
|
+
message = topic.consume_one_message
|
14
|
+
expect(message).to have_attributes(
|
15
|
+
key: "Some Key",
|
16
|
+
payload: "Some Payload"
|
17
|
+
)
|
18
|
+
end
|
19
|
+
|
20
|
+
it "receives returns nil when no messages are received" do
|
21
|
+
consumer = topic.consumer
|
22
|
+
message = consumer.poll(1)
|
23
|
+
expect(message).to be(nil)
|
24
|
+
end
|
25
|
+
end
|
26
|
+
|
27
|
+
context "spec helpers" do
|
28
|
+
it "creates topics" do
|
29
|
+
create_topic(topic.topic_name)
|
30
|
+
expect(list_topics).to include(topic.topic_name)
|
31
|
+
end
|
32
|
+
|
33
|
+
it "deletes topics" do
|
34
|
+
create_topic(topic.topic_name)
|
35
|
+
delete_topic(topic.topic_name)
|
36
|
+
expect(list_topics).not_to include(topic.topic_name)
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
data/spec/spec_helper.rb
ADDED
@@ -0,0 +1,49 @@
|
|
1
|
+
require "bundler/setup"
|
2
|
+
require "rspec/json_expectations"
|
3
|
+
ENV["RAILS_ENV"] ||= "test"
|
4
|
+
require File.expand_path("dummy/config/environment", __dir__)
|
5
|
+
abort("The Rails environment is running in production mode!") if Rails.env.production?
|
6
|
+
require "rspec/rails"
|
7
|
+
require "dotenv"
|
8
|
+
|
9
|
+
env_file = ENV["CI"] == "true" ? ".env.ci" : ".env.local"
|
10
|
+
raise "Expected #{env_file}" unless File.exist?(env_file)
|
11
|
+
|
12
|
+
Dotenv.load(env_file)
|
13
|
+
|
14
|
+
# Load RSpec helpers.
|
15
|
+
ENGINE_ROOT ||= File.expand_path("..", __dir__)
|
16
|
+
Dir[File.join(ENGINE_ROOT, "spec/support/**/*.rb")].sort.each { |f| require f }
|
17
|
+
|
18
|
+
# Load migrations from the dummy app.
|
19
|
+
# ActiveRecord::Migrator.migrations_paths = File.join(ENGINE_ROOT, 'spec/dummy/db/migrate')
|
20
|
+
begin
|
21
|
+
ActiveRecord::Migration.maintain_test_schema!
|
22
|
+
rescue ActiveRecord::PendingMigrationError => e
|
23
|
+
puts e.to_s.strip
|
24
|
+
exit 1
|
25
|
+
end
|
26
|
+
|
27
|
+
RSpec.configure do |config|
|
28
|
+
config.example_status_persistence_file_path = ".rspec_status"
|
29
|
+
|
30
|
+
# Disable RSpec exposing methods globally on `Module` and `main`
|
31
|
+
config.disable_monkey_patching!
|
32
|
+
|
33
|
+
config.use_transactional_fixtures = true
|
34
|
+
|
35
|
+
config.expect_with :rspec do |c|
|
36
|
+
c.syntax = :expect
|
37
|
+
c.include_chain_clauses_in_custom_matcher_descriptions = true
|
38
|
+
end
|
39
|
+
|
40
|
+
config.mock_with :rspec do |mocks|
|
41
|
+
mocks.verify_partial_doubles = true
|
42
|
+
end
|
43
|
+
|
44
|
+
config.shared_context_metadata_behavior = :apply_to_host_groups
|
45
|
+
config.filter_rails_from_backtrace!
|
46
|
+
config.use_transactional_fixtures = true
|
47
|
+
|
48
|
+
config.include(Kafka)
|
49
|
+
end
|
@@ -0,0 +1,74 @@
|
|
1
|
+
require "open3"
|
2
|
+
|
3
|
+
module Kafka
|
4
|
+
module_function
|
5
|
+
|
6
|
+
def kafka_config
|
7
|
+
config = {
|
8
|
+
"auto.offset.reset": "beginning",
|
9
|
+
"bootstrap.servers": test_bootstrap_servers,
|
10
|
+
"enable.auto.commit": false,
|
11
|
+
"group.id": random_consumer_group
|
12
|
+
}
|
13
|
+
|
14
|
+
Rdkafka::Config.new(config)
|
15
|
+
end
|
16
|
+
|
17
|
+
def random_topic_name
|
18
|
+
"test-topic-#{SecureRandom.uuid}"
|
19
|
+
end
|
20
|
+
|
21
|
+
def random_consumer_group
|
22
|
+
"ruby-test-consumer-group-#{SecureRandom.uuid}"
|
23
|
+
end
|
24
|
+
|
25
|
+
def test_bootstrap_servers
|
26
|
+
"#{ENV["MBC_KAFKA_HOST"]}:#{ENV["MBC_KAFKA_PORT"]}"
|
27
|
+
end
|
28
|
+
|
29
|
+
def setup_kafka_producer
|
30
|
+
kafka_config.producer
|
31
|
+
end
|
32
|
+
|
33
|
+
def setup_kafka_consumer(topic_name)
|
34
|
+
consumer = kafka_config.consumer
|
35
|
+
consumer.subscribe(topic_name)
|
36
|
+
NulogyMessageBusConsumer::KafkaUtils.wait_for_assignment(consumer)
|
37
|
+
consumer
|
38
|
+
end
|
39
|
+
|
40
|
+
def create_topic(topic_name)
|
41
|
+
run("kaf topic create #{topic_name} --brokers #{test_bootstrap_servers} --replicas 1 --partitions 3")
|
42
|
+
end
|
43
|
+
|
44
|
+
def delete_topic(topic_name)
|
45
|
+
run("kaf topic delete #{topic_name} --brokers #{test_bootstrap_servers}")
|
46
|
+
end
|
47
|
+
|
48
|
+
def list_topics
|
49
|
+
topics = run("kaf topics --brokers #{test_bootstrap_servers}")
|
50
|
+
topics.split(" ")
|
51
|
+
end
|
52
|
+
|
53
|
+
def run(command)
|
54
|
+
stdout, stderr, status = Open3.capture3(command)
|
55
|
+
raise <<~OUTPUT if status != 0
|
56
|
+
Command `#{command}` failed with:
|
57
|
+
STDOUT:
|
58
|
+
#{stdout}
|
59
|
+
|
60
|
+
STDERR:
|
61
|
+
#{stderr}
|
62
|
+
OUTPUT
|
63
|
+
|
64
|
+
stdout
|
65
|
+
end
|
66
|
+
|
67
|
+
def test_config(topic_name)
|
68
|
+
NulogyMessageBusConsumer::Config.new(
|
69
|
+
consumer_group_id: random_consumer_group,
|
70
|
+
bootstrap_servers: test_bootstrap_servers,
|
71
|
+
topic_name: topic_name
|
72
|
+
)
|
73
|
+
end
|
74
|
+
end
|
@@ -0,0 +1,48 @@
|
|
1
|
+
class TestTopic
|
2
|
+
attr_reader :topic_name
|
3
|
+
|
4
|
+
def initialize(topic_name: Kafka.random_topic_name)
|
5
|
+
@topic_name = topic_name
|
6
|
+
end
|
7
|
+
|
8
|
+
def consumer
|
9
|
+
@consumer ||= Kafka.setup_kafka_consumer(topic_name)
|
10
|
+
end
|
11
|
+
|
12
|
+
def producer
|
13
|
+
@producer ||= Kafka.setup_kafka_producer
|
14
|
+
end
|
15
|
+
|
16
|
+
def produce_one_message(key: "TEST KEY", payload: '{ "KEY": "TEST PAYLOAD" }', **kwargs)
|
17
|
+
if kwargs.key?(:topic)
|
18
|
+
raise "Do not specify the topic when producing with a TestTopic. Create a new TestTopic instead."
|
19
|
+
end
|
20
|
+
|
21
|
+
id = SecureRandom.uuid
|
22
|
+
if kwargs.key?(:event_json)
|
23
|
+
kwargs[:payload] = JSON.dump({id: id, event_json: JSON.dump(kwargs.delete(:event_json))})
|
24
|
+
end
|
25
|
+
|
26
|
+
producer.produce(
|
27
|
+
topic: topic_name,
|
28
|
+
key: key,
|
29
|
+
payload: payload,
|
30
|
+
**kwargs
|
31
|
+
).wait
|
32
|
+
|
33
|
+
id
|
34
|
+
end
|
35
|
+
|
36
|
+
def consume_one_message
|
37
|
+
consumer.poll(250)
|
38
|
+
end
|
39
|
+
|
40
|
+
def config
|
41
|
+
Kafka.test_config(topic_name)
|
42
|
+
end
|
43
|
+
|
44
|
+
def close
|
45
|
+
@producer&.close
|
46
|
+
@consumer&.close
|
47
|
+
end
|
48
|
+
end
|
@@ -0,0 +1,20 @@
|
|
1
|
+
module NulogyMessageBusConsumer
|
2
|
+
RSpec.describe Config do
|
3
|
+
describe "defaults" do
|
4
|
+
it "lag_check_interval_seconds to 20" do
|
5
|
+
expect(Config.new.lag_check_interval_seconds).to be(20)
|
6
|
+
end
|
7
|
+
|
8
|
+
it "lag_checks to 6" do
|
9
|
+
expect(Config.new.lag_checks).to be(6)
|
10
|
+
end
|
11
|
+
end
|
12
|
+
|
13
|
+
it "merges defaults" do
|
14
|
+
config = Config.new(lag_checks: 3)
|
15
|
+
|
16
|
+
expect(config.lag_checks).to be(3)
|
17
|
+
expect(config.lag_check_interval_seconds).to be(20)
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
@@ -0,0 +1,35 @@
|
|
1
|
+
module NulogyMessageBusConsumer
|
2
|
+
RSpec.describe LagTracker do
|
3
|
+
it "does not fail when values change" do
|
4
|
+
tracker = described_class.new(failing_checks: 1)
|
5
|
+
|
6
|
+
tracker.update({"one" => {"0" => 1, "1" => 1}})
|
7
|
+
tracker.update({"one" => {"0" => 2, "1" => 0}})
|
8
|
+
|
9
|
+
expect(tracker).not_to be_failing
|
10
|
+
end
|
11
|
+
|
12
|
+
it "tracks failed when value unchanged enough" do
|
13
|
+
tracker = described_class.new(failing_checks: 2)
|
14
|
+
|
15
|
+
tracker.update({"one" => {"0" => 1, "1" => 1}})
|
16
|
+
tracker.update({"one" => {"0" => 1, "1" => 2}})
|
17
|
+
tracker.update({"one" => {"0" => 1, "1" => 3}})
|
18
|
+
|
19
|
+
expect(tracker).to be_failing
|
20
|
+
expect(tracker.failed).to eq({
|
21
|
+
"one" => ["0"]
|
22
|
+
})
|
23
|
+
end
|
24
|
+
|
25
|
+
it "ignores unchanged 0 values" do
|
26
|
+
tracker = described_class.new(failing_checks: 1)
|
27
|
+
|
28
|
+
tracker.update({"one" => {"0" => 0}})
|
29
|
+
tracker.update({"one" => {"0" => 0}})
|
30
|
+
tracker.update({"one" => {"0" => 0}})
|
31
|
+
|
32
|
+
expect(tracker).not_to be_failing
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
@@ -0,0 +1,84 @@
|
|
1
|
+
RSpec.describe NulogyMessageBusConsumer::Message do
|
2
|
+
describe ".from_kafka" do
|
3
|
+
let(:message_id) { SecureRandom.uuid }
|
4
|
+
let(:subscription_id) { SecureRandom.uuid }
|
5
|
+
let(:company_uuid) { SecureRandom.uuid }
|
6
|
+
let(:partition_key) { [company_uuid, subscription_id].join(",") }
|
7
|
+
|
8
|
+
it "builds a message from a kafka message" do
|
9
|
+
kafka_message = build_kafka_message(
|
10
|
+
payload: JSON.dump(
|
11
|
+
id: message_id,
|
12
|
+
subscription_id: subscription_id,
|
13
|
+
company_uuid: company_uuid,
|
14
|
+
event_json: JSON.dump(hello: "world")
|
15
|
+
)
|
16
|
+
)
|
17
|
+
|
18
|
+
built = NulogyMessageBusConsumer::Message.from_kafka(kafka_message)
|
19
|
+
|
20
|
+
expect(built).to have_attributes(
|
21
|
+
id: message_id,
|
22
|
+
subscription_id: subscription_id,
|
23
|
+
company_uuid: company_uuid,
|
24
|
+
event_data: {hello: "world"},
|
25
|
+
event_data_unparsed: include_json(hello: "world")
|
26
|
+
)
|
27
|
+
end
|
28
|
+
|
29
|
+
it "expects the event_json to be valid json" do
|
30
|
+
kafka_message = build_kafka_message(
|
31
|
+
payload: JSON.dump(event_json: "not json")
|
32
|
+
)
|
33
|
+
|
34
|
+
built = NulogyMessageBusConsumer::Message.from_kafka(kafka_message)
|
35
|
+
|
36
|
+
expect(built).to have_attributes(
|
37
|
+
event_data: {},
|
38
|
+
event_data_unparsed: "not json"
|
39
|
+
)
|
40
|
+
end
|
41
|
+
|
42
|
+
it "falls back on some depricated columns" do
|
43
|
+
kafka_message = build_kafka_message(
|
44
|
+
payload: JSON.dump(
|
45
|
+
public_subscription_id: subscription_id,
|
46
|
+
tenant_id: company_uuid
|
47
|
+
)
|
48
|
+
)
|
49
|
+
|
50
|
+
built = NulogyMessageBusConsumer::Message.from_kafka(kafka_message)
|
51
|
+
|
52
|
+
expect(built).to have_attributes(
|
53
|
+
subscription_id: subscription_id,
|
54
|
+
company_uuid: company_uuid
|
55
|
+
)
|
56
|
+
end
|
57
|
+
end
|
58
|
+
|
59
|
+
def build_kafka_message(overrides = {}) # rubocop:disable Metrics/MethodLength
|
60
|
+
attributes = {
|
61
|
+
topic: "some_topic",
|
62
|
+
partition: 3,
|
63
|
+
payload: JSON.dump(
|
64
|
+
id: message_id,
|
65
|
+
subscription_id: subscription_id,
|
66
|
+
company_uuid: company_uuid,
|
67
|
+
event_json: JSON.dump(
|
68
|
+
hello: "world"
|
69
|
+
)
|
70
|
+
),
|
71
|
+
key: JSON.dump(
|
72
|
+
schema: {},
|
73
|
+
payload: {
|
74
|
+
partition_key: partition_key
|
75
|
+
}
|
76
|
+
),
|
77
|
+
offset: 13,
|
78
|
+
timestamp: Time.zone.now,
|
79
|
+
headers: {key: "value"}
|
80
|
+
}.merge(overrides)
|
81
|
+
|
82
|
+
instance_double("Rdkafka::Consumer::Message", attributes)
|
83
|
+
end
|
84
|
+
end
|