nulogy_message_bus_consumer 0.5.0 → 1.0.0.alpha

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (78) hide show
  1. checksums.yaml +4 -4
  2. data/lib/nulogy_message_bus_consumer/message.rb +13 -11
  3. data/lib/nulogy_message_bus_consumer/version.rb +1 -1
  4. metadata +11 -159
  5. data/spec/dummy/Rakefile +0 -6
  6. data/spec/dummy/app/assets/config/manifest.js +0 -3
  7. data/spec/dummy/app/assets/stylesheets/application.css +0 -15
  8. data/spec/dummy/app/channels/application_cable/channel.rb +0 -4
  9. data/spec/dummy/app/channels/application_cable/connection.rb +0 -4
  10. data/spec/dummy/app/controllers/application_controller.rb +0 -2
  11. data/spec/dummy/app/helpers/application_helper.rb +0 -2
  12. data/spec/dummy/app/javascript/packs/application.js +0 -15
  13. data/spec/dummy/app/jobs/application_job.rb +0 -7
  14. data/spec/dummy/app/mailers/application_mailer.rb +0 -4
  15. data/spec/dummy/app/models/application_record.rb +0 -3
  16. data/spec/dummy/app/views/layouts/application.html.erb +0 -14
  17. data/spec/dummy/app/views/layouts/mailer.html.erb +0 -13
  18. data/spec/dummy/app/views/layouts/mailer.text.erb +0 -1
  19. data/spec/dummy/bin/rails +0 -4
  20. data/spec/dummy/bin/rake +0 -4
  21. data/spec/dummy/bin/setup +0 -33
  22. data/spec/dummy/config/application.rb +0 -29
  23. data/spec/dummy/config/boot.rb +0 -5
  24. data/spec/dummy/config/cable.yml +0 -10
  25. data/spec/dummy/config/credentials/message-bus-us-east-1.key +0 -1
  26. data/spec/dummy/config/credentials/message-bus-us-east-1.yml.enc +0 -1
  27. data/spec/dummy/config/database.yml +0 -27
  28. data/spec/dummy/config/environment.rb +0 -5
  29. data/spec/dummy/config/environments/development.rb +0 -62
  30. data/spec/dummy/config/environments/production.rb +0 -112
  31. data/spec/dummy/config/environments/test.rb +0 -49
  32. data/spec/dummy/config/initializers/application_controller_renderer.rb +0 -8
  33. data/spec/dummy/config/initializers/assets.rb +0 -12
  34. data/spec/dummy/config/initializers/backtrace_silencers.rb +0 -7
  35. data/spec/dummy/config/initializers/content_security_policy.rb +0 -28
  36. data/spec/dummy/config/initializers/cookies_serializer.rb +0 -5
  37. data/spec/dummy/config/initializers/filter_parameter_logging.rb +0 -4
  38. data/spec/dummy/config/initializers/inflections.rb +0 -16
  39. data/spec/dummy/config/initializers/message_bus_consumer.rb +0 -5
  40. data/spec/dummy/config/initializers/mime_types.rb +0 -4
  41. data/spec/dummy/config/initializers/wrap_parameters.rb +0 -14
  42. data/spec/dummy/config/locales/en.yml +0 -33
  43. data/spec/dummy/config/puma.rb +0 -36
  44. data/spec/dummy/config/routes.rb +0 -3
  45. data/spec/dummy/config/spring.rb +0 -6
  46. data/spec/dummy/config/storage.yml +0 -34
  47. data/spec/dummy/config.ru +0 -5
  48. data/spec/dummy/db/schema.rb +0 -21
  49. data/spec/dummy/log/development.log +0 -4
  50. data/spec/dummy/log/production.log +0 -18
  51. data/spec/dummy/log/test.log +0 -6083
  52. data/spec/dummy/public/404.html +0 -67
  53. data/spec/dummy/public/422.html +0 -67
  54. data/spec/dummy/public/500.html +0 -66
  55. data/spec/dummy/public/apple-touch-icon-precomposed.png +0 -0
  56. data/spec/dummy/public/apple-touch-icon.png +0 -0
  57. data/spec/dummy/public/favicon.ico +0 -0
  58. data/spec/dummy/tmp/development_secret.txt +0 -1
  59. data/spec/integration/nulogy_message_bus_consumer/auditor_spec.rb +0 -59
  60. data/spec/integration/nulogy_message_bus_consumer/kafka_utils_spec.rb +0 -41
  61. data/spec/integration/nulogy_message_bus_consumer/steps/commit_on_success_spec.rb +0 -131
  62. data/spec/integration/nulogy_message_bus_consumer/steps/connect_to_message_bus_spec.rb +0 -54
  63. data/spec/integration/nulogy_message_bus_consumer/steps/supervise_consumer_lag_spec.rb +0 -54
  64. data/spec/integration/test_topic_spec.rb +0 -39
  65. data/spec/spec_helper.rb +0 -49
  66. data/spec/support/kafka.rb +0 -74
  67. data/spec/support/middleware_tap.rb +0 -12
  68. data/spec/support/test_topic.rb +0 -48
  69. data/spec/unit/nulogy_message_bus_consumer/config_spec.rb +0 -20
  70. data/spec/unit/nulogy_message_bus_consumer/lag_tracker.rb +0 -35
  71. data/spec/unit/nulogy_message_bus_consumer/message_spec.rb +0 -84
  72. data/spec/unit/nulogy_message_bus_consumer/pipeline_spec.rb +0 -49
  73. data/spec/unit/nulogy_message_bus_consumer/steps/commit_on_success_spec.rb +0 -58
  74. data/spec/unit/nulogy_message_bus_consumer/steps/deduplicate_messages_spec.rb +0 -56
  75. data/spec/unit/nulogy_message_bus_consumer/steps/log_messages_spec.rb +0 -70
  76. data/spec/unit/nulogy_message_bus_consumer/steps/monitor_replication_lag/calculator_spec.rb +0 -63
  77. data/spec/unit/nulogy_message_bus_consumer/steps/stream_messages_spec.rb +0 -35
  78. data/spec/unit/nulogy_message_bus_consumer_spec.rb +0 -30
@@ -1,74 +0,0 @@
1
- require "open3"
2
-
3
- module Kafka
4
- module_function
5
-
6
- def kafka_config
7
- config = {
8
- "auto.offset.reset": "beginning",
9
- "bootstrap.servers": test_bootstrap_servers,
10
- "enable.auto.commit": false,
11
- "group.id": random_consumer_group
12
- }
13
-
14
- Rdkafka::Config.new(config)
15
- end
16
-
17
- def random_topic_name
18
- "test-topic-#{SecureRandom.uuid}"
19
- end
20
-
21
- def random_consumer_group
22
- "ruby-test-consumer-group-#{SecureRandom.uuid}"
23
- end
24
-
25
- def test_bootstrap_servers
26
- "#{ENV["MBC_KAFKA_HOST"]}:#{ENV["MBC_KAFKA_PORT"]}"
27
- end
28
-
29
- def setup_kafka_producer
30
- kafka_config.producer
31
- end
32
-
33
- def setup_kafka_consumer(topic_name)
34
- consumer = kafka_config.consumer
35
- consumer.subscribe(topic_name)
36
- NulogyMessageBusConsumer::KafkaUtils.wait_for_assignment(consumer)
37
- consumer
38
- end
39
-
40
- def create_topic(topic_name)
41
- run("kaf topic create #{topic_name} --brokers #{test_bootstrap_servers} --replicas 1 --partitions 3")
42
- end
43
-
44
- def delete_topic(topic_name)
45
- run("kaf topic delete #{topic_name} --brokers #{test_bootstrap_servers}")
46
- end
47
-
48
- def list_topics
49
- topics = run("kaf topics --brokers #{test_bootstrap_servers}")
50
- topics.split(" ")
51
- end
52
-
53
- def run(command)
54
- stdout, stderr, status = Open3.capture3(command)
55
- raise <<~OUTPUT if status != 0
56
- Command `#{command}` failed with:
57
- STDOUT:
58
- #{stdout}
59
-
60
- STDERR:
61
- #{stderr}
62
- OUTPUT
63
-
64
- stdout
65
- end
66
-
67
- def test_config(topic_name)
68
- NulogyMessageBusConsumer::Config.new(
69
- consumer_group_id: random_consumer_group,
70
- bootstrap_servers: test_bootstrap_servers,
71
- topic_name: topic_name
72
- )
73
- end
74
- end
@@ -1,12 +0,0 @@
1
- class MiddlewareTap
2
- attr_reader :arguments
3
-
4
- def initialize
5
- @arguments = {}
6
- end
7
-
8
- def call(**arguments)
9
- @arguments = arguments
10
- yield
11
- end
12
- end
@@ -1,48 +0,0 @@
1
- class TestTopic
2
- attr_reader :topic_name
3
-
4
- def initialize(topic_name: Kafka.random_topic_name)
5
- @topic_name = topic_name
6
- end
7
-
8
- def consumer
9
- @consumer ||= Kafka.setup_kafka_consumer(topic_name)
10
- end
11
-
12
- def producer
13
- @producer ||= Kafka.setup_kafka_producer
14
- end
15
-
16
- def produce_one_message(key: "TEST KEY", payload: '{ "KEY": "TEST PAYLOAD" }', **kwargs)
17
- if kwargs.key?(:topic)
18
- raise "Do not specify the topic when producing with a TestTopic. Create a new TestTopic instead."
19
- end
20
-
21
- id = SecureRandom.uuid
22
- if kwargs.key?(:event_json)
23
- kwargs[:payload] = JSON.dump({id: id, event_json: JSON.dump(kwargs.delete(:event_json))})
24
- end
25
-
26
- producer.produce(
27
- topic: topic_name,
28
- key: key,
29
- payload: payload,
30
- **kwargs
31
- ).wait
32
-
33
- id
34
- end
35
-
36
- def consume_one_message
37
- consumer.poll(250)
38
- end
39
-
40
- def config
41
- Kafka.test_config(topic_name)
42
- end
43
-
44
- def close
45
- @producer&.close
46
- @consumer&.close
47
- end
48
- end
@@ -1,20 +0,0 @@
1
- module NulogyMessageBusConsumer
2
- RSpec.describe Config do
3
- describe "defaults" do
4
- it "lag_check_interval_seconds to 20" do
5
- expect(Config.new.lag_check_interval_seconds).to be(20)
6
- end
7
-
8
- it "lag_checks to 6" do
9
- expect(Config.new.lag_checks).to be(6)
10
- end
11
- end
12
-
13
- it "merges defaults" do
14
- config = Config.new(lag_checks: 3)
15
-
16
- expect(config.lag_checks).to be(3)
17
- expect(config.lag_check_interval_seconds).to be(20)
18
- end
19
- end
20
- end
@@ -1,35 +0,0 @@
1
- module NulogyMessageBusConsumer
2
- RSpec.describe LagTracker do
3
- it "does not fail when values change" do
4
- tracker = described_class.new(failing_checks: 1)
5
-
6
- tracker.update({"one" => {"0" => 1, "1" => 1}})
7
- tracker.update({"one" => {"0" => 2, "1" => 0}})
8
-
9
- expect(tracker).not_to be_failing
10
- end
11
-
12
- it "tracks failed when value unchanged enough" do
13
- tracker = described_class.new(failing_checks: 2)
14
-
15
- tracker.update({"one" => {"0" => 1, "1" => 1}})
16
- tracker.update({"one" => {"0" => 1, "1" => 2}})
17
- tracker.update({"one" => {"0" => 1, "1" => 3}})
18
-
19
- expect(tracker).to be_failing
20
- expect(tracker.failed).to eq({
21
- "one" => ["0"]
22
- })
23
- end
24
-
25
- it "ignores unchanged 0 values" do
26
- tracker = described_class.new(failing_checks: 1)
27
-
28
- tracker.update({"one" => {"0" => 0}})
29
- tracker.update({"one" => {"0" => 0}})
30
- tracker.update({"one" => {"0" => 0}})
31
-
32
- expect(tracker).not_to be_failing
33
- end
34
- end
35
- end
@@ -1,84 +0,0 @@
1
- RSpec.describe NulogyMessageBusConsumer::Message do
2
- describe ".from_kafka" do
3
- let(:message_id) { SecureRandom.uuid }
4
- let(:subscription_id) { SecureRandom.uuid }
5
- let(:company_uuid) { SecureRandom.uuid }
6
- let(:partition_key) { [company_uuid, subscription_id].join(",") }
7
-
8
- it "builds a message from a kafka message" do
9
- kafka_message = build_kafka_message(
10
- payload: JSON.dump(
11
- id: message_id,
12
- subscription_id: subscription_id,
13
- company_uuid: company_uuid,
14
- event_json: JSON.dump(hello: "world")
15
- )
16
- )
17
-
18
- built = NulogyMessageBusConsumer::Message.from_kafka(kafka_message)
19
-
20
- expect(built).to have_attributes(
21
- id: message_id,
22
- subscription_id: subscription_id,
23
- company_uuid: company_uuid,
24
- event_data: {hello: "world"},
25
- event_data_unparsed: include_json(hello: "world")
26
- )
27
- end
28
-
29
- it "expects the event_json to be valid json" do
30
- kafka_message = build_kafka_message(
31
- payload: JSON.dump(event_json: "not json")
32
- )
33
-
34
- built = NulogyMessageBusConsumer::Message.from_kafka(kafka_message)
35
-
36
- expect(built).to have_attributes(
37
- event_data: {},
38
- event_data_unparsed: "not json"
39
- )
40
- end
41
-
42
- it "falls back on some depricated columns" do
43
- kafka_message = build_kafka_message(
44
- payload: JSON.dump(
45
- public_subscription_id: subscription_id,
46
- tenant_id: company_uuid
47
- )
48
- )
49
-
50
- built = NulogyMessageBusConsumer::Message.from_kafka(kafka_message)
51
-
52
- expect(built).to have_attributes(
53
- subscription_id: subscription_id,
54
- company_uuid: company_uuid
55
- )
56
- end
57
- end
58
-
59
- def build_kafka_message(overrides = {}) # rubocop:disable Metrics/MethodLength
60
- attributes = {
61
- topic: "some_topic",
62
- partition: 3,
63
- payload: JSON.dump(
64
- id: message_id,
65
- subscription_id: subscription_id,
66
- company_uuid: company_uuid,
67
- event_json: JSON.dump(
68
- hello: "world"
69
- )
70
- ),
71
- key: JSON.dump(
72
- schema: {},
73
- payload: {
74
- partition_key: partition_key
75
- }
76
- ),
77
- offset: 13,
78
- timestamp: Time.zone.now,
79
- headers: {key: "value"}
80
- }.merge(overrides)
81
-
82
- instance_double("Rdkafka::Consumer::Message", attributes)
83
- end
84
- end
@@ -1,49 +0,0 @@
1
- RSpec.describe NulogyMessageBusConsumer::Pipeline do
2
- let(:handler) { spy }
3
-
4
- describe "#invoke_pipeline" do
5
- it "raises if handler is not provided" do
6
- expect { invoke_pipeline }.to raise_error("Handlers are the end of the line. Do not use yield.")
7
- end
8
-
9
- it "calls the handler with message" do
10
- message = NulogyMessageBusConsumer::Message.new
11
-
12
- expect(handler).to receive(:call).with(message: message)
13
-
14
- invoke_pipeline(
15
- ->(**_, &block) { block.yield(message: message) },
16
- handler
17
- )
18
- end
19
-
20
- it "passes information along" do
21
- message = NulogyMessageBusConsumer::Message.new
22
-
23
- expect(handler).to receive(:call).with(message: message)
24
-
25
- invoke_pipeline(
26
- ->(**_, &block) { block.yield(message: message) },
27
- ->(**_, &block) { block.yield },
28
- handler
29
- )
30
- end
31
-
32
- it "raises when overwriting an existing key" do
33
- message = NulogyMessageBusConsumer::Message.new
34
- other_message = NulogyMessageBusConsumer::Message.new
35
-
36
- expect {
37
- invoke_pipeline(
38
- ->(**_, &block) { block.yield(message: message) },
39
- ->(**_, &block) { block.yield(message: other_message) },
40
- handler
41
- )
42
- }.to raise_error("Cannot override existing argument(s): message")
43
- end
44
- end
45
-
46
- def invoke_pipeline(*steps)
47
- NulogyMessageBusConsumer::Pipeline.new(steps).invoke
48
- end
49
- end
@@ -1,58 +0,0 @@
1
- RSpec.describe NulogyMessageBusConsumer::Steps::CommitOnSuccess do
2
- subject(:step) { NulogyMessageBusConsumer::Steps::CommitOnSuccess.new }
3
-
4
- let(:kafka_consumer) { spy }
5
- let(:message) { NulogyMessageBusConsumer::Message.new }
6
-
7
- context "when result is :success" do
8
- it "returns the result" do
9
- result = step.call(kafka_consumer: kafka_consumer, message: message) { :success }
10
-
11
- expect(result).to be(:success)
12
- end
13
-
14
- it "commits the message offset" do
15
- expect(kafka_consumer).to receive(:store_offset).with(message)
16
- expect(kafka_consumer).to receive(:commit)
17
-
18
- step.call(kafka_consumer: kafka_consumer, message: message) { :success }
19
- end
20
- end
21
-
22
- context "when result is :failure" do
23
- it "returns the result" do
24
- result = step.call(kafka_consumer: kafka_consumer, message: message) { :failure }
25
-
26
- expect(result).to be(:failure)
27
- end
28
-
29
- it "reconnects to Kafka when failure" do
30
- # When subscribing, we subscribe to a topic (i.e. the keys)
31
- topic_partition_list = {topic_name: %i[partition_1 partition_2]}
32
- allow(kafka_consumer).to receive(:subscription).and_return(topic_partition_list)
33
-
34
- expect(kafka_consumer).to receive(:unsubscribe)
35
- expect(kafka_consumer).to receive(:subscribe).with(:topic_name)
36
-
37
- step.call(kafka_consumer: kafka_consumer, message: message) { :failure }
38
- end
39
- end
40
-
41
- context "when result is something else" do
42
- it "raises" do
43
- expect {
44
- step.call(kafka_consumer: kafka_consumer, message: message) { :bogus_value }
45
- }.to raise_error("'bogus_value' is not a valid processing outcome. Must be :success or :failure")
46
- end
47
- end
48
-
49
- context "when downstream steps raise" do
50
- it "does not commit" do
51
- expect(kafka_consumer).not_to receive(:commit)
52
-
53
- expect {
54
- step.call(kafka_consumer: kafka_consumer, message: message) { raise "oopsie" }
55
- }.to raise_error("oopsie")
56
- end
57
- end
58
- end
@@ -1,56 +0,0 @@
1
- RSpec.describe NulogyMessageBusConsumer::Steps::DeduplicateMessages do
2
- subject(:deduplicate_messages) { NulogyMessageBusConsumer::Steps::DeduplicateMessages.new(logger) }
3
-
4
- let(:logger) { spy }
5
- let(:id) { SecureRandom.uuid }
6
- let(:message) { NulogyMessageBusConsumer::Message.new(id: id) }
7
-
8
- context "when a duplicate message is consumed" do
9
- it "logs a duplicate was detected" do
10
- expect(logger).to receive(:warn).with(include_json(
11
- event: "duplicate_message_detected",
12
- kafka_message_id: id
13
- ))
14
-
15
- deduplicate_messages.call(message: message) { :success }
16
- deduplicate_messages.call(message: message) { raise "Not evaluated" }
17
- end
18
-
19
- it "returns :success" do
20
- deduplicate_messages.call(message: message) { :success }
21
-
22
- result = deduplicate_messages.call(message: message) { raise "Not evaluated" }
23
-
24
- expect(result).to be(:success)
25
- end
26
- end
27
-
28
- context "when a duplicate message is processed at the same time" do
29
- it "logs a duplicate was detected" do
30
- expect(logger).to receive(:warn).with(include_json(
31
- event: "duplicate_message_detected",
32
- kafka_message_id: id
33
- ))
34
-
35
- deduplicate_messages.call(message: message) do
36
- NulogyMessageBusConsumer::ProcessedMessage.create!(id: id)
37
- :success
38
- end
39
- end
40
-
41
- it "returns :success" do
42
- result = deduplicate_messages.call(message: message) {
43
- NulogyMessageBusConsumer::ProcessedMessage.create!(id: id)
44
- :success
45
- }
46
-
47
- expect(result).to be(:success)
48
- end
49
- end
50
-
51
- it "records message was processed" do
52
- deduplicate_messages.call(message: message) { :success }
53
-
54
- expect(NulogyMessageBusConsumer::ProcessedMessage.exists?(id: id)).to be(true)
55
- end
56
- end
@@ -1,70 +0,0 @@
1
- RSpec.describe NulogyMessageBusConsumer::Steps::LogMessages do
2
- let(:logger) { spy }
3
-
4
- it "logs the message was received" do
5
- log_message = NulogyMessageBusConsumer::Steps::LogMessages.new(logger)
6
-
7
- expect(logger).to receive(:info).with(include_json(
8
- event: "message_received",
9
- kafka_message_id: "id",
10
- message: "Received id"
11
- ))
12
-
13
- log_message.call(message: message) {}
14
- end
15
-
16
- it "logs the message was processed" do
17
- log_message = NulogyMessageBusConsumer::Steps::LogMessages.new(logger)
18
-
19
- expect(logger).to receive(:info).with(include_json(
20
- event: "message_processed",
21
- kafka_message_id: "id",
22
- message: match(/^Processed id/)
23
- ))
24
-
25
- log_message.call(message: message) {}
26
- end
27
-
28
- it "logs the message's time to processed" do
29
- # clock always gives milliseconds since epoch
30
- clock = instance_double(NulogyMessageBusConsumer::Clock, ms: 1_000_001_234)
31
- log_message = NulogyMessageBusConsumer::Steps::LogMessages.new(logger, clock: clock)
32
-
33
- expect(logger).to receive(:info).with(include_json(
34
- event: "message_processed",
35
- kafka_message_id: "id",
36
- time_to_processed: 1234
37
- ))
38
-
39
- # debezium converts time with zone to nanoseconds since epoch
40
- log_message.call(message: message(created_at: 1_000_000_000_000)) {}
41
- end
42
-
43
- it "logs the message's result" do
44
- log_message = NulogyMessageBusConsumer::Steps::LogMessages.new(logger)
45
-
46
- expect(logger).to receive(:info).with(include_json(
47
- event: "message_processed",
48
- result: "success"
49
- ))
50
-
51
- log_message.call(message: message) { :success }
52
-
53
- expect(logger).to receive(:info).with(include_json(
54
- event: "message_processed",
55
- result: "failure"
56
- ))
57
-
58
- log_message.call(message: message) { :failure }
59
- end
60
-
61
- it "returns a result" do
62
- log_message = NulogyMessageBusConsumer::Steps::LogMessages.new(logger)
63
-
64
- expect(log_message.call(message: message) { :return_value }).to eq(:return_value)
65
- end
66
-
67
- def message(id: "id", created_at: 1_000, **attrs)
68
- NulogyMessageBusConsumer::Message.new(id: id, created_at: created_at, **attrs)
69
- end
70
- end
@@ -1,63 +0,0 @@
1
- RSpec.describe NulogyMessageBusConsumer::Steps::LogConsumerLag::Calculator do
2
- subject(:calculator) { NulogyMessageBusConsumer::Steps::LogConsumerLag::Calculator }
3
-
4
- describe ".add_max_lag" do
5
- it "adds max lag per partition" do
6
- lag_per_topic = {
7
- topic_1: {
8
- partition_1: 100,
9
- partition_2: 900,
10
- partition_3: 100
11
- },
12
- topic_2: {
13
- partition_1: 100,
14
- partition_2: 200,
15
- partition_3: 100
16
- }
17
- }
18
-
19
- lag_with_max = calculator.add_max_lag(lag_per_topic)
20
-
21
- expect(lag_with_max).to match(
22
- _max: 900,
23
- topic_1: {
24
- _max: 900,
25
- partition_1: 100,
26
- partition_2: 900,
27
- partition_3: 100
28
- },
29
- topic_2: {
30
- _max: 200,
31
- partition_1: 100,
32
- partition_2: 200,
33
- partition_3: 100
34
- }
35
- )
36
- end
37
-
38
- it "sets the max to 0 when no partitions are subscribed to" do
39
- lag_per_topic = {
40
- topic_1: {}
41
- }
42
-
43
- lag_with_max = calculator.add_max_lag(lag_per_topic)
44
-
45
- expect(lag_with_max).to match(
46
- _max: 0,
47
- topic_1: {
48
- _max: 0
49
- }
50
- )
51
- end
52
-
53
- it "sets the max to 0 when no topics are subscribed to" do
54
- lag_per_topic = {}
55
-
56
- lag_with_max = calculator.add_max_lag(lag_per_topic)
57
-
58
- expect(lag_with_max).to match(
59
- _max: 0
60
- )
61
- end
62
- end
63
- end
@@ -1,35 +0,0 @@
1
- RSpec.describe NulogyMessageBusConsumer::Steps::StreamMessages do
2
- subject(:step) { NulogyMessageBusConsumer::Steps::StreamMessages.new(logger) }
3
-
4
- let(:config) { NulogyMessageBusConsumer::Config.new }
5
- let(:logger) { spy }
6
-
7
- it "logs errors while streaming from kafka" do
8
- kafka_consumer = instance_double("Rdkafka::Consumer")
9
- allow(kafka_consumer).to receive(:subscribe)
10
- allow(kafka_consumer).to receive(:unsubscribe)
11
- allow(kafka_consumer).to receive(:each).and_raise(StandardError, "streaming failed")
12
-
13
- expect(logger).to receive(:error).with(include_json(
14
- event: "message_processing_errored",
15
- class: "StandardError",
16
- message: "streaming failed"
17
- ))
18
-
19
- expect {
20
- step.call(kafka_consumer: kafka_consumer)
21
- }.to raise_error(StandardError)
22
- end
23
-
24
- context "when handler returns failure" do
25
- it "does not commit the message in kafka" do
26
- kafka_consumer = spy
27
- allow(kafka_consumer).to receive(:each).and_yield(anything)
28
- allow(NulogyMessageBusConsumer::Message).to receive(:from_kafka).and_return(NulogyMessageBusConsumer::Message.new)
29
-
30
- expect(kafka_consumer).not_to receive(:commit)
31
-
32
- step.call(kafka_consumer: kafka_consumer) { :failure }
33
- end
34
- end
35
- end
@@ -1,30 +0,0 @@
1
- RSpec.describe NulogyMessageBusConsumer do
2
- describe "#configure" do
3
- before do
4
- config = NulogyMessageBusConsumer.config
5
- config.instance_variable_names.each do |variable_name|
6
- config.remove_instance_variable(variable_name)
7
- end
8
- end
9
-
10
- it "allows configuration to be provided as arguments" do
11
- NulogyMessageBusConsumer.configure(
12
- bootstrap_servers: "some servers"
13
- )
14
-
15
- expect(NulogyMessageBusConsumer.config).to have_attributes(
16
- bootstrap_servers: "some servers"
17
- )
18
- end
19
-
20
- it "allows configuration to be provided as a block" do
21
- NulogyMessageBusConsumer.configure do |config|
22
- config.bootstrap_servers = "some servers"
23
- end
24
-
25
- expect(NulogyMessageBusConsumer.config).to have_attributes(
26
- bootstrap_servers: "some servers"
27
- )
28
- end
29
- end
30
- end