nulogy_message_bus_consumer 0.3.0 → 0.5.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (99) hide show
  1. checksums.yaml +4 -4
  2. data/Rakefile +5 -4
  3. data/config/credentials/message-bus-us-east-1.key +1 -0
  4. data/config/credentials/message-bus-us-east-1.yml.enc +1 -0
  5. data/lib/nulogy_message_bus_consumer.rb +18 -6
  6. data/lib/nulogy_message_bus_consumer/clock.rb +13 -0
  7. data/lib/nulogy_message_bus_consumer/config.rb +12 -4
  8. data/lib/nulogy_message_bus_consumer/deployment/ecs.rb +23 -0
  9. data/lib/nulogy_message_bus_consumer/handlers/log_unprocessed_messages.rb +2 -1
  10. data/lib/nulogy_message_bus_consumer/kafka_utils.rb +2 -1
  11. data/lib/nulogy_message_bus_consumer/lag_tracker.rb +53 -0
  12. data/lib/nulogy_message_bus_consumer/message.rb +12 -5
  13. data/lib/nulogy_message_bus_consumer/null_logger.rb +6 -3
  14. data/lib/nulogy_message_bus_consumer/pipeline.rb +6 -3
  15. data/lib/nulogy_message_bus_consumer/steps/commit_on_success.rb +1 -0
  16. data/lib/nulogy_message_bus_consumer/steps/connect_to_message_bus.rb +27 -8
  17. data/lib/nulogy_message_bus_consumer/steps/deduplicate_messages.rb +1 -1
  18. data/lib/nulogy_message_bus_consumer/steps/{monitor_replication_lag.rb → log_consumer_lag.rb} +3 -3
  19. data/lib/nulogy_message_bus_consumer/steps/log_messages.rb +14 -3
  20. data/lib/nulogy_message_bus_consumer/steps/stream_messages.rb +2 -2
  21. data/lib/nulogy_message_bus_consumer/steps/stream_messages_until_none_are_left.rb +2 -2
  22. data/lib/nulogy_message_bus_consumer/steps/supervise_consumer_lag.rb +76 -0
  23. data/lib/nulogy_message_bus_consumer/version.rb +1 -1
  24. data/lib/tasks/engine/message_bus_consumer.rake +9 -10
  25. data/spec/dummy/Rakefile +6 -0
  26. data/spec/dummy/app/assets/config/manifest.js +3 -0
  27. data/spec/dummy/app/assets/stylesheets/application.css +15 -0
  28. data/spec/dummy/app/channels/application_cable/channel.rb +4 -0
  29. data/spec/dummy/app/channels/application_cable/connection.rb +4 -0
  30. data/spec/dummy/app/controllers/application_controller.rb +2 -0
  31. data/spec/dummy/app/helpers/application_helper.rb +2 -0
  32. data/spec/dummy/app/javascript/packs/application.js +15 -0
  33. data/spec/dummy/app/jobs/application_job.rb +7 -0
  34. data/spec/dummy/app/mailers/application_mailer.rb +4 -0
  35. data/spec/dummy/app/models/application_record.rb +3 -0
  36. data/spec/dummy/app/views/layouts/application.html.erb +14 -0
  37. data/spec/dummy/app/views/layouts/mailer.html.erb +13 -0
  38. data/spec/dummy/app/views/layouts/mailer.text.erb +1 -0
  39. data/spec/dummy/bin/rails +4 -0
  40. data/spec/dummy/bin/rake +4 -0
  41. data/spec/dummy/bin/setup +33 -0
  42. data/spec/dummy/config.ru +5 -0
  43. data/spec/dummy/config/application.rb +29 -0
  44. data/spec/dummy/config/boot.rb +5 -0
  45. data/spec/dummy/config/cable.yml +10 -0
  46. data/spec/dummy/config/credentials/message-bus-us-east-1.key +1 -0
  47. data/spec/dummy/config/credentials/message-bus-us-east-1.yml.enc +1 -0
  48. data/spec/dummy/config/database.yml +27 -0
  49. data/spec/dummy/config/environment.rb +5 -0
  50. data/spec/dummy/config/environments/development.rb +62 -0
  51. data/spec/dummy/config/environments/production.rb +112 -0
  52. data/spec/dummy/config/environments/test.rb +49 -0
  53. data/spec/dummy/config/initializers/application_controller_renderer.rb +8 -0
  54. data/spec/dummy/config/initializers/assets.rb +12 -0
  55. data/spec/dummy/config/initializers/backtrace_silencers.rb +7 -0
  56. data/spec/dummy/config/initializers/content_security_policy.rb +28 -0
  57. data/spec/dummy/config/initializers/cookies_serializer.rb +5 -0
  58. data/spec/dummy/config/initializers/filter_parameter_logging.rb +4 -0
  59. data/spec/dummy/config/initializers/inflections.rb +16 -0
  60. data/spec/dummy/config/initializers/message_bus_consumer.rb +5 -0
  61. data/spec/dummy/config/initializers/mime_types.rb +4 -0
  62. data/spec/dummy/config/initializers/wrap_parameters.rb +14 -0
  63. data/spec/dummy/config/locales/en.yml +33 -0
  64. data/spec/dummy/config/puma.rb +36 -0
  65. data/spec/dummy/config/routes.rb +3 -0
  66. data/spec/dummy/config/spring.rb +6 -0
  67. data/spec/dummy/config/storage.yml +34 -0
  68. data/spec/dummy/db/schema.rb +21 -0
  69. data/spec/dummy/log/development.log +4 -0
  70. data/spec/dummy/log/production.log +18 -0
  71. data/spec/dummy/log/test.log +6083 -0
  72. data/spec/dummy/public/404.html +67 -0
  73. data/spec/dummy/public/422.html +67 -0
  74. data/spec/dummy/public/500.html +66 -0
  75. data/spec/dummy/public/apple-touch-icon-precomposed.png +0 -0
  76. data/spec/dummy/public/apple-touch-icon.png +0 -0
  77. data/spec/dummy/public/favicon.ico +0 -0
  78. data/spec/dummy/tmp/development_secret.txt +1 -0
  79. data/spec/integration/nulogy_message_bus_consumer/auditor_spec.rb +59 -0
  80. data/spec/integration/nulogy_message_bus_consumer/kafka_utils_spec.rb +41 -0
  81. data/spec/integration/nulogy_message_bus_consumer/steps/commit_on_success_spec.rb +131 -0
  82. data/spec/integration/nulogy_message_bus_consumer/steps/connect_to_message_bus_spec.rb +54 -0
  83. data/spec/integration/nulogy_message_bus_consumer/steps/supervise_consumer_lag_spec.rb +54 -0
  84. data/spec/integration/test_topic_spec.rb +39 -0
  85. data/spec/spec_helper.rb +49 -0
  86. data/spec/support/kafka.rb +74 -0
  87. data/spec/support/middleware_tap.rb +12 -0
  88. data/spec/support/test_topic.rb +48 -0
  89. data/spec/unit/nulogy_message_bus_consumer/config_spec.rb +20 -0
  90. data/spec/unit/nulogy_message_bus_consumer/lag_tracker.rb +35 -0
  91. data/spec/unit/nulogy_message_bus_consumer/message_spec.rb +84 -0
  92. data/spec/unit/nulogy_message_bus_consumer/pipeline_spec.rb +49 -0
  93. data/spec/unit/nulogy_message_bus_consumer/steps/commit_on_success_spec.rb +58 -0
  94. data/spec/unit/nulogy_message_bus_consumer/steps/deduplicate_messages_spec.rb +56 -0
  95. data/spec/unit/nulogy_message_bus_consumer/steps/log_messages_spec.rb +70 -0
  96. data/spec/unit/nulogy_message_bus_consumer/steps/monitor_replication_lag/calculator_spec.rb +63 -0
  97. data/spec/unit/nulogy_message_bus_consumer/steps/stream_messages_spec.rb +35 -0
  98. data/spec/unit/nulogy_message_bus_consumer_spec.rb +30 -0
  99. metadata +251 -27
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 2faa5c725b717d217b37a0c9019ecd3f3ae9d40885a2e3b1ce898e626bbb2d3c
4
- data.tar.gz: 865137198c57cfd60bdea31c482a66240a284c34d4f1149033160c28fb2a6cef
3
+ metadata.gz: 1dc5baf6066f0cf264ecf41623c1269580c8c000bd456505b3114918048b8598
4
+ data.tar.gz: 1a2e52f75e766213995846d7214d8f4e5248f011071f7d8a3fed7fb6c0fe2f03
5
5
  SHA512:
6
- metadata.gz: 82c5cf164680d27183b17b341b362c5f1ec70a26fecf79d4f8d982ea3506e107ac8e216604c5182469c627892f3c423336053a2502c8e10c574c85af5dd82402
7
- data.tar.gz: d2109e0aefef9eecebfb57425316cf36dd7cdb92a4356253efb73271f2fed5e4c1ed59e56a46b44a2dc869dd3acb5285b88ae11d6d72b15f50a7506ed85058a2
6
+ metadata.gz: 02effdb3a64405b47af2b6b63d1f4be505db7dcb8e632c568645af473e307030e7e0ac6615c48dc097fefaa32987f76b539ad28cfc65f5a7de98727b09d97052
7
+ data.tar.gz: 93dde5b9d7a0c12c973f6b2516d2867cc376298417c0e3ae8529f63e52974d8e489bd833494951db8cbb418eea7df124c2df02481a85eee8b7c9a575eefc8ceb
data/Rakefile CHANGED
@@ -18,13 +18,14 @@ APP_RAKEFILE = File.expand_path("spec/dummy/Rakefile", __dir__)
18
18
  load "rails/tasks/engine.rake"
19
19
  load "rails/tasks/statistics.rake"
20
20
 
21
+ require "rspec/core"
22
+ require "rspec/core/rake_task"
21
23
  RSpec::Core::RakeTask.new(:spec)
22
- require "rubocop/rake_task"
23
- RuboCop::RakeTask.new
24
- task default: [:spec, :rubocop]
24
+ require "standard/rake"
25
+ task default: %i[spec standard]
25
26
 
26
27
  require "rake/release"
27
28
 
28
29
  Rake::Release::Task.load_all do |spec|
29
- spec.version_tag = "nulogy_message_bus_producer-v#{spec.version}"
30
+ spec.version_tag = "nulogy_message_bus_consumer-v#{spec.version}"
30
31
  end
@@ -0,0 +1 @@
1
+ dfa19863b2709390893da4c2fb85579a
@@ -0,0 +1 @@
1
+ /WXdqUYePaHqAq3P0iTEsrLiMfRKzp2qYYh7K+q6LUNgi4eHNv0+SoLdI1bUNb9UaGvDPGNT3fCwAdkurk5Iud16ok3b4wD6yZ7UkfqbXqZaKH/dciQ5s63p9Hiuq1rbfcqoZ3KR1SXYAwvy8vNqbdwbAzz2N66B1wE5fNibZZlrWzXJjLReiTcNyxNbCPz6vwEwFF52RntuYlIJo4Nkm8vEk3No+HWrOkM8xptr5qApbd+RowLCLZ4/kcAMDB/XPiGobf0AOFv1NUR/9ChEy20usa8Fqd6HtEn4A25HnAC0uaN0K8ZRXjxhMpnXtfMBItn7yxyJ1ubjRZK5a1xVBRU7L/CVV9ZuIsqAHL1++gH5FBrEe83ZIUhN7AzngMDlOPKGiCLiZLrm18I1AEQrD7tJLyXos15AeAzj--cER8cN0iMLwu8Le+--FL7dhMTgr6xL6SkMnYKmeg==
@@ -1,10 +1,14 @@
1
+ require "active_record/railtie"
2
+ require "active_support/core_ext/time/zones"
1
3
  require "rdkafka"
2
4
 
3
5
  require "nulogy_message_bus_consumer/engine"
4
-
6
+ require "nulogy_message_bus_consumer/clock"
5
7
  require "nulogy_message_bus_consumer/config"
8
+ require "nulogy_message_bus_consumer/deployment/ecs"
6
9
  require "nulogy_message_bus_consumer/handlers/log_unprocessed_messages"
7
10
  require "nulogy_message_bus_consumer/kafka_utils"
11
+ require "nulogy_message_bus_consumer/lag_tracker"
8
12
  require "nulogy_message_bus_consumer/message"
9
13
  require "nulogy_message_bus_consumer/null_logger"
10
14
  require "nulogy_message_bus_consumer/pipeline"
@@ -12,11 +16,12 @@ require "nulogy_message_bus_consumer/processed_message"
12
16
  require "nulogy_message_bus_consumer/steps/commit_on_success"
13
17
  require "nulogy_message_bus_consumer/steps/connect_to_message_bus"
14
18
  require "nulogy_message_bus_consumer/steps/deduplicate_messages"
19
+ require "nulogy_message_bus_consumer/steps/log_consumer_lag"
15
20
  require "nulogy_message_bus_consumer/steps/log_messages"
16
- require "nulogy_message_bus_consumer/steps/monitor_replication_lag"
17
21
  require "nulogy_message_bus_consumer/steps/seek_beginning_of_topic"
18
22
  require "nulogy_message_bus_consumer/steps/stream_messages"
19
23
  require "nulogy_message_bus_consumer/steps/stream_messages_until_none_are_left"
24
+ require "nulogy_message_bus_consumer/steps/supervise_consumer_lag"
20
25
 
21
26
  module NulogyMessageBusConsumer
22
27
  module_function
@@ -31,7 +36,7 @@ module NulogyMessageBusConsumer
31
36
  end
32
37
 
33
38
  def logger
34
- @@logger ||= NullLogger.new
39
+ @logger ||= NullLogger.new
35
40
  end
36
41
 
37
42
  def invoke_pipeline(*steps)
@@ -40,14 +45,21 @@ module NulogyMessageBusConsumer
40
45
 
41
46
  def recommended_consumer_pipeline(config: self.config, logger: self.logger)
42
47
  Pipeline.new([
43
- # The first three are really system processing steps
48
+ # System processing/health steps.
49
+ # Note: that since they are before `StreamMessages`, they will only
50
+ # be called once, without any messages.
44
51
  Steps::ConnectToMessageBus.new(config, logger),
45
- Steps::MonitorReplicationLag.new(logger),
52
+ Steps::LogConsumerLag.new(logger),
53
+ Steps::SuperviseConsumerLag.new(
54
+ logger,
55
+ check_interval_seconds: config.lag_check_interval_seconds,
56
+ tracker: LagTracker.new(failing_checks: config.lag_checks)
57
+ ),
46
58
  Steps::StreamMessages.new(logger),
47
59
  # Message processing steps start here.
48
60
  Steps::LogMessages.new(logger),
49
61
  Steps::CommitOnSuccess.new,
50
- Steps::DeduplicateMessages.new(logger),
62
+ Steps::DeduplicateMessages.new(logger)
51
63
  ])
52
64
  end
53
65
 
@@ -0,0 +1,13 @@
1
+ module NulogyMessageBusConsumer
2
+ # Note: Since this calls Time.zone, it is NOt thread-safe
3
+ class Clock
4
+ def now
5
+ Time.zone.now.to_datetime
6
+ end
7
+
8
+ # milliseconds since epoch
9
+ def ms
10
+ now.strftime("%Q").to_i
11
+ end
12
+ end
13
+ end
@@ -1,11 +1,19 @@
1
1
  module NulogyMessageBusConsumer
2
2
  class Config
3
- attr_accessor :consumer_group_id
4
- attr_accessor :bootstrap_servers
5
- attr_accessor :topic_name
3
+ attr_accessor :bootstrap_servers,
4
+ :client_id,
5
+ :consumer_group_id,
6
+ :lag_check_interval_seconds,
7
+ :lag_checks,
8
+ :topic_name
6
9
 
7
10
  def initialize(options = {})
8
- update(options)
11
+ defaults = {
12
+ lag_check_interval_seconds: 20,
13
+ lag_checks: 6
14
+ }
15
+
16
+ update(defaults.merge(options))
9
17
  end
10
18
 
11
19
  def update(options = {})
@@ -0,0 +1,23 @@
1
+ module NulogyMessageBusConsumer
2
+ module Deployment
3
+ module ECS
4
+ module_function
5
+
6
+ # Try to get the TaskID from metadata server:
7
+ # https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-metadata-endpoint-v4.html
8
+ # Otherwise, return nil
9
+ def task_id
10
+ data = `curl --silent "$ECS_CONTAINER_METADATA_URI_V4/task"`
11
+
12
+ return if data.empty?
13
+
14
+ json = JSON.parse(data)
15
+ arn = json["TaskARN"]
16
+
17
+ return unless arn
18
+
19
+ arn.split("/").last
20
+ end
21
+ end
22
+ end
23
+ end
@@ -7,9 +7,10 @@ module NulogyMessageBusConsumer
7
7
 
8
8
  def call(message:, **_)
9
9
  return if ProcessedMessage.exists?(id: message.id)
10
+
10
11
  @logger.warn(JSON.dump(
11
12
  event: "unprocessed_message",
12
- kafka_message: message.to_h,
13
+ kafka_message: message.to_h
13
14
  ))
14
15
  end
15
16
  end
@@ -13,13 +13,14 @@ module NulogyMessageBusConsumer
13
13
  def wait_for(attempts: 100, interval: 0.1)
14
14
  attempts.times do
15
15
  break if yield
16
+
16
17
  sleep interval
17
18
  end
18
19
  end
19
20
 
20
21
  def every_message_until_none_are_left(consumer)
21
22
  Enumerator.new do |yielder|
22
- while message = consumer.poll(250)
23
+ while (message = consumer.poll(250))
23
24
  yielder.yield(message)
24
25
  end
25
26
  end
@@ -0,0 +1,53 @@
1
+ require "set"
2
+
3
+ module NulogyMessageBusConsumer
4
+ # Keeps track of how many times a topic's partition has not changed (non-zero) lag between update calls.
5
+ class LagTracker
6
+ attr_reader :failing_checks
7
+
8
+ def initialize(failing_checks: 3)
9
+ @failing_checks = failing_checks
10
+ @tracked = Hash.new { |h, topic| h[topic] = {} }
11
+ @failed = Hash.new { |h, topic| h[topic] = Set.new }
12
+ end
13
+
14
+ def update(topic_partitions)
15
+ topic_partitions.each_pair do |topic, partitions|
16
+ partitions.each_pair do |partition, value|
17
+ update_topic_partition(topic, partition, value)
18
+ end
19
+ end
20
+ end
21
+
22
+ def failing?
23
+ @failed.any?
24
+ end
25
+
26
+ def failed
27
+ @failed.transform_values { |v| v.to_a.sort }
28
+ end
29
+
30
+ private
31
+
32
+ def update_topic_partition(topic, partition, value)
33
+ current_value, count = @tracked.dig(topic, partition)
34
+
35
+ new_value, new_count =
36
+ if current_value == value && !value.zero?
37
+ [current_value, count + 1]
38
+ else
39
+ [value, 0]
40
+ end
41
+
42
+ @tracked[topic][partition] = [new_value, new_count]
43
+
44
+ if new_count >= @failing_checks
45
+ @failed[topic] << partition
46
+ end
47
+ end
48
+
49
+ def exists?(topic, partition)
50
+ @tracked.dig(topic, partition)
51
+ end
52
+ end
53
+ end
@@ -1,9 +1,5 @@
1
1
  module NulogyMessageBusConsumer
2
2
  class Message
3
- def initialize(attrs = {})
4
- attrs.each { |key, value| instance_variable_set("@#{key}", value) }
5
- end
6
-
7
3
  attr_reader :event_data
8
4
  attr_reader :event_data_unparsed
9
5
  attr_reader :id
@@ -14,10 +10,20 @@ module NulogyMessageBusConsumer
14
10
  attr_reader :company_uuid
15
11
  attr_reader :timestamp
16
12
  attr_reader :topic
13
+ attr_reader :created_at
14
+
15
+ def initialize(attrs = {})
16
+ attrs.each { |key, value| instance_variable_set("@#{key}", value) }
17
+ end
17
18
 
18
19
  def self.from_kafka(kafka_message)
19
20
  envelope_data = JSON.parse(kafka_message.payload, symbolize_names: true)
20
- event_data = JSON.parse(envelope_data[:event_json], symbolize_names: true) rescue {}
21
+ event_data =
22
+ begin
23
+ JSON.parse(envelope_data[:event_json], symbolize_names: true)
24
+ rescue
25
+ {}
26
+ end
21
27
 
22
28
  new(
23
29
  event_data: event_data,
@@ -30,6 +36,7 @@ module NulogyMessageBusConsumer
30
36
  company_uuid: envelope_data[:company_uuid] || envelope_data[:tenant_id],
31
37
  timestamp: kafka_message.timestamp,
32
38
  topic: kafka_message.topic,
39
+ created_at: envelope_data[:created_at]
33
40
  )
34
41
  end
35
42
 
@@ -1,9 +1,12 @@
1
1
  module NulogyMessageBusConsumer
2
2
  class NullLogger
3
- def info(*_) end
3
+ def info(*_)
4
+ end
4
5
 
5
- def error(*_) end
6
+ def error(*_)
7
+ end
6
8
 
7
- def warn(*_) end
9
+ def warn(*_)
10
+ end
8
11
  end
9
12
  end
@@ -9,7 +9,7 @@ module NulogyMessageBusConsumer
9
9
  end
10
10
 
11
11
  def insert(step, after:)
12
- index = @steps.find_index { |step| step.is_a?(after) }
12
+ index = @steps.find_index { |s| s.is_a?(after) }
13
13
  @steps.insert(index + 1, step)
14
14
  end
15
15
 
@@ -25,7 +25,7 @@ module NulogyMessageBusConsumer
25
25
  @steps.reverse.reduce(last_step) do |composed_steps, previous_step|
26
26
  lambda do |**args|
27
27
  invoke_next = compose_with_merged_args(args, composed_steps)
28
- previous_step.call(**args, &invoke_next)
28
+ previous_step.call(**args, &invoke_next)
29
29
  end
30
30
  end
31
31
  end
@@ -33,7 +33,10 @@ module NulogyMessageBusConsumer
33
33
  def compose_with_merged_args(existing_args, func)
34
34
  lambda do |**yielded_args|
35
35
  args_to_be_overridden = existing_args.keys & yielded_args.keys
36
- raise "Cannot override existing argument(s): #{args_to_be_overridden.join(', ')}" if args_to_be_overridden.any?
36
+ if args_to_be_overridden.any?
37
+ raise "Cannot override existing argument(s): #{args_to_be_overridden.join(", ")}"
38
+ end
39
+
37
40
  func.call(**existing_args.merge(yielded_args))
38
41
  end
39
42
  end
@@ -7,6 +7,7 @@ module NulogyMessageBusConsumer
7
7
  raise_if_invalid(result)
8
8
 
9
9
  if result == :success
10
+ kafka_consumer.store_offset(message)
10
11
  kafka_consumer.commit
11
12
  else
12
13
  reconnect_to_reprocess_same_message(kafka_consumer)
@@ -1,33 +1,52 @@
1
1
  module NulogyMessageBusConsumer
2
2
  module Steps
3
3
  class ConnectToMessageBus
4
- def initialize(config, logger)
4
+ def initialize(config, logger, kafka_consumer: nil)
5
5
  @config = config
6
6
  @logger = logger
7
+ @kafka_consumer = kafka_consumer
7
8
  end
8
9
 
9
10
  def call(**_)
10
11
  @logger.info("Connecting to the MessageBus")
11
- consumer = Rdkafka::Config.new(consumer_config).consumer
12
12
  @logger.info("Using consumer group id: #{@config.consumer_group_id}")
13
13
 
14
- consumer.subscribe(@config.topic_name)
15
- @logger.info("Listening for kafka messages on topic #{@config.topic_name}")
14
+ subscribe
15
+
16
+ trap("TERM") { kafka_consumer.close }
16
17
 
17
- trap("TERM") { consumer.close }
18
+ wait_for_assignment
18
19
 
19
- KafkaUtils.wait_for_assignment(consumer)
20
- yield(kafka_consumer: consumer)
20
+ yield(kafka_consumer: kafka_consumer)
21
21
  end
22
22
 
23
23
  private
24
24
 
25
+ def kafka_consumer
26
+ @kafka_consumer ||= Rdkafka::Config.new(consumer_config).consumer
27
+ end
28
+
25
29
  def consumer_config
26
- {
30
+ config = {
27
31
  "bootstrap.servers": @config.bootstrap_servers,
28
32
  "enable.auto.commit": false,
29
33
  "group.id": @config.consumer_group_id,
34
+ "enable.auto.offset.store": false
30
35
  }
36
+
37
+ config["client.id"] = @config.client_id if @config.client_id
38
+
39
+ config
40
+ end
41
+
42
+ def subscribe
43
+ kafka_consumer.subscribe(@config.topic_name)
44
+ @logger.info("Listening for kafka messages on topic #{@config.topic_name}")
45
+ end
46
+
47
+ def wait_for_assignment
48
+ KafkaUtils.wait_for_assignment(kafka_consumer)
49
+ @logger.info("Connected as client: #{kafka_consumer.member_id}")
31
50
  end
32
51
  end
33
52
  end
@@ -41,7 +41,7 @@ module NulogyMessageBusConsumer
41
41
  def log_duplicate(message)
42
42
  @logger.warn(JSON.dump({
43
43
  event: "duplicate_message_detected",
44
- kafka_message_id: message.id,
44
+ kafka_message_id: message.id
45
45
  }))
46
46
  end
47
47
  end
@@ -1,6 +1,6 @@
1
1
  module NulogyMessageBusConsumer
2
2
  module Steps
3
- class MonitorReplicationLag
3
+ class LogConsumerLag
4
4
  def initialize(logger)
5
5
  @logger = logger
6
6
  end
@@ -22,9 +22,9 @@ module NulogyMessageBusConsumer
22
22
 
23
23
  @logger.info(JSON.dump({
24
24
  event: "consumer_lag",
25
- topics: Calculator.add_max_lag(lag_per_topic),
25
+ topics: Calculator.add_max_lag(lag_per_topic)
26
26
  }))
27
- STDOUT.flush
27
+ $stdout.flush
28
28
 
29
29
  sleep 60
30
30
  end
@@ -1,28 +1,39 @@
1
1
  module NulogyMessageBusConsumer
2
2
  module Steps
3
3
  class LogMessages
4
- def initialize(logger)
4
+ def initialize(logger, clock: Clock.new)
5
5
  @logger = logger
6
+ @clock = clock
6
7
  end
7
8
 
8
9
  def call(message:, **_)
9
10
  @logger.info(JSON.dump({
10
11
  event: "message_received",
11
12
  kafka_message_id: message.id,
12
- message: "Received #{message.id}",
13
+ message: "Received #{message.id}"
13
14
  }))
14
15
 
15
16
  result = yield
16
17
 
18
+ millis = diff_millis(message.created_at, @clock.ms)
17
19
  @logger.info(JSON.dump({
18
20
  event: "message_processed",
19
21
  kafka_message_id: message.id,
20
- message: "Processed #{message.id}",
22
+ message: "Processed #{message.id} (#{message.topic}##{message.partition}@#{message.offset})",
21
23
  result: result,
24
+ time_to_processed: millis
22
25
  }))
23
26
 
24
27
  result
25
28
  end
29
+
30
+ # Debezium appears to be giving us nanos since epoch
31
+ # https://github.com/debezium/debezium/blob/5a115e902cdc1dc399ec02758dd1039a33e99bc2/debezium-core/src/main/java/io/debezium/jdbc/JdbcValueConverters.java#L237
32
+ def diff_millis(oldest_nanos, newest_millis)
33
+ old_millis = oldest_nanos / 1000
34
+
35
+ newest_millis - old_millis
36
+ end
26
37
  end
27
38
  end
28
39
  end