nulogy_message_bus_consumer 0.3.0 → 1.0.0.alpha

Sign up to get free protection for your applications and to get access to all the features.
Files changed (25) hide show
  1. checksums.yaml +4 -4
  2. data/Rakefile +5 -4
  3. data/config/credentials/message-bus-us-east-1.key +1 -0
  4. data/config/credentials/message-bus-us-east-1.yml.enc +1 -0
  5. data/lib/nulogy_message_bus_consumer.rb +18 -6
  6. data/lib/nulogy_message_bus_consumer/clock.rb +13 -0
  7. data/lib/nulogy_message_bus_consumer/config.rb +12 -4
  8. data/lib/nulogy_message_bus_consumer/deployment/ecs.rb +23 -0
  9. data/lib/nulogy_message_bus_consumer/handlers/log_unprocessed_messages.rb +2 -1
  10. data/lib/nulogy_message_bus_consumer/kafka_utils.rb +2 -1
  11. data/lib/nulogy_message_bus_consumer/lag_tracker.rb +53 -0
  12. data/lib/nulogy_message_bus_consumer/message.rb +21 -12
  13. data/lib/nulogy_message_bus_consumer/null_logger.rb +6 -3
  14. data/lib/nulogy_message_bus_consumer/pipeline.rb +6 -3
  15. data/lib/nulogy_message_bus_consumer/steps/commit_on_success.rb +1 -0
  16. data/lib/nulogy_message_bus_consumer/steps/connect_to_message_bus.rb +27 -8
  17. data/lib/nulogy_message_bus_consumer/steps/deduplicate_messages.rb +1 -1
  18. data/lib/nulogy_message_bus_consumer/steps/{monitor_replication_lag.rb → log_consumer_lag.rb} +3 -3
  19. data/lib/nulogy_message_bus_consumer/steps/log_messages.rb +14 -3
  20. data/lib/nulogy_message_bus_consumer/steps/stream_messages.rb +2 -2
  21. data/lib/nulogy_message_bus_consumer/steps/stream_messages_until_none_are_left.rb +2 -2
  22. data/lib/nulogy_message_bus_consumer/steps/supervise_consumer_lag.rb +76 -0
  23. data/lib/nulogy_message_bus_consumer/version.rb +1 -1
  24. data/lib/tasks/engine/message_bus_consumer.rake +9 -10
  25. metadata +100 -24
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 2faa5c725b717d217b37a0c9019ecd3f3ae9d40885a2e3b1ce898e626bbb2d3c
4
- data.tar.gz: 865137198c57cfd60bdea31c482a66240a284c34d4f1149033160c28fb2a6cef
3
+ metadata.gz: 1d17557a978d8762a83afcb1a8027296854d516cde57540243d48c80cc46c012
4
+ data.tar.gz: 02c45bfab242f35a0143d090a08387f80db0030854a28b161792cb09efded53c
5
5
  SHA512:
6
- metadata.gz: 82c5cf164680d27183b17b341b362c5f1ec70a26fecf79d4f8d982ea3506e107ac8e216604c5182469c627892f3c423336053a2502c8e10c574c85af5dd82402
7
- data.tar.gz: d2109e0aefef9eecebfb57425316cf36dd7cdb92a4356253efb73271f2fed5e4c1ed59e56a46b44a2dc869dd3acb5285b88ae11d6d72b15f50a7506ed85058a2
6
+ metadata.gz: 8c7ef5e49a3aff7e09e963592fc79f7435a29d3567764d0cec1e98d52fea31d18f1ee2ea4b3d429c2e550307f0fab539733d700716b89b45c05cfee269e87955
7
+ data.tar.gz: 0be71a225c18e458d440f80eefadfb5b1a841ad57d619af434e12832c48ff31c1ce2614aaf5786330e2282e418838a223f91c82ebae74cedda3801446620fbb2
data/Rakefile CHANGED
@@ -18,13 +18,14 @@ APP_RAKEFILE = File.expand_path("spec/dummy/Rakefile", __dir__)
18
18
  load "rails/tasks/engine.rake"
19
19
  load "rails/tasks/statistics.rake"
20
20
 
21
+ require "rspec/core"
22
+ require "rspec/core/rake_task"
21
23
  RSpec::Core::RakeTask.new(:spec)
22
- require "rubocop/rake_task"
23
- RuboCop::RakeTask.new
24
- task default: [:spec, :rubocop]
24
+ require "standard/rake"
25
+ task default: %i[spec standard]
25
26
 
26
27
  require "rake/release"
27
28
 
28
29
  Rake::Release::Task.load_all do |spec|
29
- spec.version_tag = "nulogy_message_bus_producer-v#{spec.version}"
30
+ spec.version_tag = "nulogy_message_bus_consumer-v#{spec.version}"
30
31
  end
@@ -0,0 +1 @@
1
+ dfa19863b2709390893da4c2fb85579a
@@ -0,0 +1 @@
1
+ /WXdqUYePaHqAq3P0iTEsrLiMfRKzp2qYYh7K+q6LUNgi4eHNv0+SoLdI1bUNb9UaGvDPGNT3fCwAdkurk5Iud16ok3b4wD6yZ7UkfqbXqZaKH/dciQ5s63p9Hiuq1rbfcqoZ3KR1SXYAwvy8vNqbdwbAzz2N66B1wE5fNibZZlrWzXJjLReiTcNyxNbCPz6vwEwFF52RntuYlIJo4Nkm8vEk3No+HWrOkM8xptr5qApbd+RowLCLZ4/kcAMDB/XPiGobf0AOFv1NUR/9ChEy20usa8Fqd6HtEn4A25HnAC0uaN0K8ZRXjxhMpnXtfMBItn7yxyJ1ubjRZK5a1xVBRU7L/CVV9ZuIsqAHL1++gH5FBrEe83ZIUhN7AzngMDlOPKGiCLiZLrm18I1AEQrD7tJLyXos15AeAzj--cER8cN0iMLwu8Le+--FL7dhMTgr6xL6SkMnYKmeg==
@@ -1,10 +1,14 @@
1
+ require "active_record/railtie"
2
+ require "active_support/core_ext/time/zones"
1
3
  require "rdkafka"
2
4
 
3
5
  require "nulogy_message_bus_consumer/engine"
4
-
6
+ require "nulogy_message_bus_consumer/clock"
5
7
  require "nulogy_message_bus_consumer/config"
8
+ require "nulogy_message_bus_consumer/deployment/ecs"
6
9
  require "nulogy_message_bus_consumer/handlers/log_unprocessed_messages"
7
10
  require "nulogy_message_bus_consumer/kafka_utils"
11
+ require "nulogy_message_bus_consumer/lag_tracker"
8
12
  require "nulogy_message_bus_consumer/message"
9
13
  require "nulogy_message_bus_consumer/null_logger"
10
14
  require "nulogy_message_bus_consumer/pipeline"
@@ -12,11 +16,12 @@ require "nulogy_message_bus_consumer/processed_message"
12
16
  require "nulogy_message_bus_consumer/steps/commit_on_success"
13
17
  require "nulogy_message_bus_consumer/steps/connect_to_message_bus"
14
18
  require "nulogy_message_bus_consumer/steps/deduplicate_messages"
19
+ require "nulogy_message_bus_consumer/steps/log_consumer_lag"
15
20
  require "nulogy_message_bus_consumer/steps/log_messages"
16
- require "nulogy_message_bus_consumer/steps/monitor_replication_lag"
17
21
  require "nulogy_message_bus_consumer/steps/seek_beginning_of_topic"
18
22
  require "nulogy_message_bus_consumer/steps/stream_messages"
19
23
  require "nulogy_message_bus_consumer/steps/stream_messages_until_none_are_left"
24
+ require "nulogy_message_bus_consumer/steps/supervise_consumer_lag"
20
25
 
21
26
  module NulogyMessageBusConsumer
22
27
  module_function
@@ -31,7 +36,7 @@ module NulogyMessageBusConsumer
31
36
  end
32
37
 
33
38
  def logger
34
- @@logger ||= NullLogger.new
39
+ @logger ||= NullLogger.new
35
40
  end
36
41
 
37
42
  def invoke_pipeline(*steps)
@@ -40,14 +45,21 @@ module NulogyMessageBusConsumer
40
45
 
41
46
  def recommended_consumer_pipeline(config: self.config, logger: self.logger)
42
47
  Pipeline.new([
43
- # The first three are really system processing steps
48
+ # System processing/health steps.
49
+ # Note: that since they are before `StreamMessages`, they will only
50
+ # be called once, without any messages.
44
51
  Steps::ConnectToMessageBus.new(config, logger),
45
- Steps::MonitorReplicationLag.new(logger),
52
+ Steps::LogConsumerLag.new(logger),
53
+ Steps::SuperviseConsumerLag.new(
54
+ logger,
55
+ check_interval_seconds: config.lag_check_interval_seconds,
56
+ tracker: LagTracker.new(failing_checks: config.lag_checks)
57
+ ),
46
58
  Steps::StreamMessages.new(logger),
47
59
  # Message processing steps start here.
48
60
  Steps::LogMessages.new(logger),
49
61
  Steps::CommitOnSuccess.new,
50
- Steps::DeduplicateMessages.new(logger),
62
+ Steps::DeduplicateMessages.new(logger)
51
63
  ])
52
64
  end
53
65
 
@@ -0,0 +1,13 @@
1
+ module NulogyMessageBusConsumer
2
+ # Note: Since this calls Time.zone, it is NOt thread-safe
3
+ class Clock
4
+ def now
5
+ Time.zone.now.to_datetime
6
+ end
7
+
8
+ # milliseconds since epoch
9
+ def ms
10
+ now.strftime("%Q").to_i
11
+ end
12
+ end
13
+ end
@@ -1,11 +1,19 @@
1
1
  module NulogyMessageBusConsumer
2
2
  class Config
3
- attr_accessor :consumer_group_id
4
- attr_accessor :bootstrap_servers
5
- attr_accessor :topic_name
3
+ attr_accessor :bootstrap_servers,
4
+ :client_id,
5
+ :consumer_group_id,
6
+ :lag_check_interval_seconds,
7
+ :lag_checks,
8
+ :topic_name
6
9
 
7
10
  def initialize(options = {})
8
- update(options)
11
+ defaults = {
12
+ lag_check_interval_seconds: 20,
13
+ lag_checks: 6
14
+ }
15
+
16
+ update(defaults.merge(options))
9
17
  end
10
18
 
11
19
  def update(options = {})
@@ -0,0 +1,23 @@
1
+ module NulogyMessageBusConsumer
2
+ module Deployment
3
+ module ECS
4
+ module_function
5
+
6
+ # Try to get the TaskID from metadata server:
7
+ # https://docs.aws.amazon.com/AmazonECS/latest/developerguide/task-metadata-endpoint-v4.html
8
+ # Otherwise, return nil
9
+ def task_id
10
+ data = `curl --silent "$ECS_CONTAINER_METADATA_URI_V4/task"`
11
+
12
+ return if data.empty?
13
+
14
+ json = JSON.parse(data)
15
+ arn = json["TaskARN"]
16
+
17
+ return unless arn
18
+
19
+ arn.split("/").last
20
+ end
21
+ end
22
+ end
23
+ end
@@ -7,9 +7,10 @@ module NulogyMessageBusConsumer
7
7
 
8
8
  def call(message:, **_)
9
9
  return if ProcessedMessage.exists?(id: message.id)
10
+
10
11
  @logger.warn(JSON.dump(
11
12
  event: "unprocessed_message",
12
- kafka_message: message.to_h,
13
+ kafka_message: message.to_h
13
14
  ))
14
15
  end
15
16
  end
@@ -13,13 +13,14 @@ module NulogyMessageBusConsumer
13
13
  def wait_for(attempts: 100, interval: 0.1)
14
14
  attempts.times do
15
15
  break if yield
16
+
16
17
  sleep interval
17
18
  end
18
19
  end
19
20
 
20
21
  def every_message_until_none_are_left(consumer)
21
22
  Enumerator.new do |yielder|
22
- while message = consumer.poll(250)
23
+ while (message = consumer.poll(250))
23
24
  yielder.yield(message)
24
25
  end
25
26
  end
@@ -0,0 +1,53 @@
1
+ require "set"
2
+
3
+ module NulogyMessageBusConsumer
4
+ # Keeps track of how many times a topic's partition has not changed (non-zero) lag between update calls.
5
+ class LagTracker
6
+ attr_reader :failing_checks
7
+
8
+ def initialize(failing_checks: 3)
9
+ @failing_checks = failing_checks
10
+ @tracked = Hash.new { |h, topic| h[topic] = {} }
11
+ @failed = Hash.new { |h, topic| h[topic] = Set.new }
12
+ end
13
+
14
+ def update(topic_partitions)
15
+ topic_partitions.each_pair do |topic, partitions|
16
+ partitions.each_pair do |partition, value|
17
+ update_topic_partition(topic, partition, value)
18
+ end
19
+ end
20
+ end
21
+
22
+ def failing?
23
+ @failed.any?
24
+ end
25
+
26
+ def failed
27
+ @failed.transform_values { |v| v.to_a.sort }
28
+ end
29
+
30
+ private
31
+
32
+ def update_topic_partition(topic, partition, value)
33
+ current_value, count = @tracked.dig(topic, partition)
34
+
35
+ new_value, new_count =
36
+ if current_value == value && !value.zero?
37
+ [current_value, count + 1]
38
+ else
39
+ [value, 0]
40
+ end
41
+
42
+ @tracked[topic][partition] = [new_value, new_count]
43
+
44
+ if new_count >= @failing_checks
45
+ @failed[topic] << partition
46
+ end
47
+ end
48
+
49
+ def exists?(topic, partition)
50
+ @tracked.dig(topic, partition)
51
+ end
52
+ end
53
+ end
@@ -1,23 +1,30 @@
1
1
  module NulogyMessageBusConsumer
2
2
  class Message
3
+ attr_reader :company_uuid,
4
+ :created_at,
5
+ :event_data,
6
+ :event_data_unparsed,
7
+ :id,
8
+ :key,
9
+ :offset,
10
+ :partition,
11
+ :subscription_id,
12
+ :timestamp,
13
+ :topic,
14
+ :type
15
+
3
16
  def initialize(attrs = {})
4
17
  attrs.each { |key, value| instance_variable_set("@#{key}", value) }
5
18
  end
6
19
 
7
- attr_reader :event_data
8
- attr_reader :event_data_unparsed
9
- attr_reader :id
10
- attr_reader :key
11
- attr_reader :offset
12
- attr_reader :partition
13
- attr_reader :subscription_id
14
- attr_reader :company_uuid
15
- attr_reader :timestamp
16
- attr_reader :topic
17
-
18
20
  def self.from_kafka(kafka_message)
19
21
  envelope_data = JSON.parse(kafka_message.payload, symbolize_names: true)
20
- event_data = JSON.parse(envelope_data[:event_json], symbolize_names: true) rescue {}
22
+ event_data =
23
+ begin
24
+ JSON.parse(envelope_data[:event_json], symbolize_names: true)
25
+ rescue
26
+ {}
27
+ end
21
28
 
22
29
  new(
23
30
  event_data: event_data,
@@ -30,6 +37,8 @@ module NulogyMessageBusConsumer
30
37
  company_uuid: envelope_data[:company_uuid] || envelope_data[:tenant_id],
31
38
  timestamp: kafka_message.timestamp,
32
39
  topic: kafka_message.topic,
40
+ type: envelope_data[:type],
41
+ created_at: envelope_data[:created_at]
33
42
  )
34
43
  end
35
44
 
@@ -1,9 +1,12 @@
1
1
  module NulogyMessageBusConsumer
2
2
  class NullLogger
3
- def info(*_) end
3
+ def info(*_)
4
+ end
4
5
 
5
- def error(*_) end
6
+ def error(*_)
7
+ end
6
8
 
7
- def warn(*_) end
9
+ def warn(*_)
10
+ end
8
11
  end
9
12
  end
@@ -9,7 +9,7 @@ module NulogyMessageBusConsumer
9
9
  end
10
10
 
11
11
  def insert(step, after:)
12
- index = @steps.find_index { |step| step.is_a?(after) }
12
+ index = @steps.find_index { |s| s.is_a?(after) }
13
13
  @steps.insert(index + 1, step)
14
14
  end
15
15
 
@@ -25,7 +25,7 @@ module NulogyMessageBusConsumer
25
25
  @steps.reverse.reduce(last_step) do |composed_steps, previous_step|
26
26
  lambda do |**args|
27
27
  invoke_next = compose_with_merged_args(args, composed_steps)
28
- previous_step.call(**args, &invoke_next)
28
+ previous_step.call(**args, &invoke_next)
29
29
  end
30
30
  end
31
31
  end
@@ -33,7 +33,10 @@ module NulogyMessageBusConsumer
33
33
  def compose_with_merged_args(existing_args, func)
34
34
  lambda do |**yielded_args|
35
35
  args_to_be_overridden = existing_args.keys & yielded_args.keys
36
- raise "Cannot override existing argument(s): #{args_to_be_overridden.join(', ')}" if args_to_be_overridden.any?
36
+ if args_to_be_overridden.any?
37
+ raise "Cannot override existing argument(s): #{args_to_be_overridden.join(", ")}"
38
+ end
39
+
37
40
  func.call(**existing_args.merge(yielded_args))
38
41
  end
39
42
  end
@@ -7,6 +7,7 @@ module NulogyMessageBusConsumer
7
7
  raise_if_invalid(result)
8
8
 
9
9
  if result == :success
10
+ kafka_consumer.store_offset(message)
10
11
  kafka_consumer.commit
11
12
  else
12
13
  reconnect_to_reprocess_same_message(kafka_consumer)
@@ -1,33 +1,52 @@
1
1
  module NulogyMessageBusConsumer
2
2
  module Steps
3
3
  class ConnectToMessageBus
4
- def initialize(config, logger)
4
+ def initialize(config, logger, kafka_consumer: nil)
5
5
  @config = config
6
6
  @logger = logger
7
+ @kafka_consumer = kafka_consumer
7
8
  end
8
9
 
9
10
  def call(**_)
10
11
  @logger.info("Connecting to the MessageBus")
11
- consumer = Rdkafka::Config.new(consumer_config).consumer
12
12
  @logger.info("Using consumer group id: #{@config.consumer_group_id}")
13
13
 
14
- consumer.subscribe(@config.topic_name)
15
- @logger.info("Listening for kafka messages on topic #{@config.topic_name}")
14
+ subscribe
15
+
16
+ trap("TERM") { kafka_consumer.close }
16
17
 
17
- trap("TERM") { consumer.close }
18
+ wait_for_assignment
18
19
 
19
- KafkaUtils.wait_for_assignment(consumer)
20
- yield(kafka_consumer: consumer)
20
+ yield(kafka_consumer: kafka_consumer)
21
21
  end
22
22
 
23
23
  private
24
24
 
25
+ def kafka_consumer
26
+ @kafka_consumer ||= Rdkafka::Config.new(consumer_config).consumer
27
+ end
28
+
25
29
  def consumer_config
26
- {
30
+ config = {
27
31
  "bootstrap.servers": @config.bootstrap_servers,
28
32
  "enable.auto.commit": false,
29
33
  "group.id": @config.consumer_group_id,
34
+ "enable.auto.offset.store": false
30
35
  }
36
+
37
+ config["client.id"] = @config.client_id if @config.client_id
38
+
39
+ config
40
+ end
41
+
42
+ def subscribe
43
+ kafka_consumer.subscribe(@config.topic_name)
44
+ @logger.info("Listening for kafka messages on topic #{@config.topic_name}")
45
+ end
46
+
47
+ def wait_for_assignment
48
+ KafkaUtils.wait_for_assignment(kafka_consumer)
49
+ @logger.info("Connected as client: #{kafka_consumer.member_id}")
31
50
  end
32
51
  end
33
52
  end
@@ -41,7 +41,7 @@ module NulogyMessageBusConsumer
41
41
  def log_duplicate(message)
42
42
  @logger.warn(JSON.dump({
43
43
  event: "duplicate_message_detected",
44
- kafka_message_id: message.id,
44
+ kafka_message_id: message.id
45
45
  }))
46
46
  end
47
47
  end
@@ -1,6 +1,6 @@
1
1
  module NulogyMessageBusConsumer
2
2
  module Steps
3
- class MonitorReplicationLag
3
+ class LogConsumerLag
4
4
  def initialize(logger)
5
5
  @logger = logger
6
6
  end
@@ -22,9 +22,9 @@ module NulogyMessageBusConsumer
22
22
 
23
23
  @logger.info(JSON.dump({
24
24
  event: "consumer_lag",
25
- topics: Calculator.add_max_lag(lag_per_topic),
25
+ topics: Calculator.add_max_lag(lag_per_topic)
26
26
  }))
27
- STDOUT.flush
27
+ $stdout.flush
28
28
 
29
29
  sleep 60
30
30
  end
@@ -1,28 +1,39 @@
1
1
  module NulogyMessageBusConsumer
2
2
  module Steps
3
3
  class LogMessages
4
- def initialize(logger)
4
+ def initialize(logger, clock: Clock.new)
5
5
  @logger = logger
6
+ @clock = clock
6
7
  end
7
8
 
8
9
  def call(message:, **_)
9
10
  @logger.info(JSON.dump({
10
11
  event: "message_received",
11
12
  kafka_message_id: message.id,
12
- message: "Received #{message.id}",
13
+ message: "Received #{message.id}"
13
14
  }))
14
15
 
15
16
  result = yield
16
17
 
18
+ millis = diff_millis(message.created_at, @clock.ms)
17
19
  @logger.info(JSON.dump({
18
20
  event: "message_processed",
19
21
  kafka_message_id: message.id,
20
- message: "Processed #{message.id}",
22
+ message: "Processed #{message.id} (#{message.topic}##{message.partition}@#{message.offset})",
21
23
  result: result,
24
+ time_to_processed: millis
22
25
  }))
23
26
 
24
27
  result
25
28
  end
29
+
30
+ # Debezium appears to be giving us nanos since epoch
31
+ # https://github.com/debezium/debezium/blob/5a115e902cdc1dc399ec02758dd1039a33e99bc2/debezium-core/src/main/java/io/debezium/jdbc/JdbcValueConverters.java#L237
32
+ def diff_millis(oldest_nanos, newest_millis)
33
+ old_millis = oldest_nanos / 1000
34
+
35
+ newest_millis - old_millis
36
+ end
26
37
  end
27
38
  end
28
39
  end
@@ -12,11 +12,11 @@ module NulogyMessageBusConsumer
12
12
  kafka_message: kafka_message
13
13
  )
14
14
  end
15
- rescue StandardError => e
15
+ rescue => e
16
16
  @logger.error(JSON.dump({
17
17
  event: "message_processing_errored",
18
18
  class: e.class,
19
- message: e.message,
19
+ message: e.message
20
20
  }))
21
21
 
22
22
  raise
@@ -12,11 +12,11 @@ module NulogyMessageBusConsumer
12
12
  kafka_message: kafka_message
13
13
  )
14
14
  end
15
- rescue StandardError => e
15
+ rescue => e
16
16
  @logger.error(JSON.dump({
17
17
  event: "message_processing_errored",
18
18
  class: e.class,
19
- message: e.message,
19
+ message: e.message
20
20
  }))
21
21
 
22
22
  raise
@@ -0,0 +1,76 @@
1
+ module NulogyMessageBusConsumer
2
+ module Steps
3
+ # Supervises the consumer's lag.
4
+ #
5
+ # If a partition's lag is non-zero and does not change for an extended period
6
+ # of time, then kill the main thread.
7
+ #
8
+ # That period of time is check_interval_seconds * LagTracker#failing_checks
9
+ # With the defaults, that would be 20 * 6 ~ 120 seconds = 2 minutes.
10
+ #
11
+ # Note that this strategy may not work for a busy integration.
12
+ # Consumer lag monitoring should alert in that case.
13
+ # However, this strategy may help alleviate alerts for low traffic or off-peak
14
+ # environments.
15
+ #
16
+ # We've come across cases where the consumer lag is still being logged,
17
+ # messages are being processed, but the consumer is not consuming messages
18
+ # in particular topics.
19
+ #
20
+ # Killing the main thread causes ECS to restart the task.
21
+ class SuperviseConsumerLag
22
+ def initialize(logger, tracker: NulogyMessageBusConsumer::LagTracker.new(failing_checks: 6), killable: nil, check_interval_seconds: 20)
23
+ @logger = logger
24
+ @tracker = tracker
25
+ @killable = killable
26
+ @check_interval_seconds = check_interval_seconds
27
+ end
28
+
29
+ def call(kafka_consumer:, **_)
30
+ @consumer = kafka_consumer
31
+ @killable ||= Thread.current
32
+
33
+ run
34
+
35
+ yield
36
+ end
37
+
38
+ private
39
+
40
+ def run
41
+ Thread.abort_on_exception = true
42
+
43
+ Thread.new do
44
+ NulogyMessageBusConsumer::KafkaUtils.wait_for_assignment(@consumer)
45
+
46
+ loop do
47
+ @tracker.update(@consumer.lag(@consumer.committed))
48
+
49
+ if @tracker.failing?
50
+ log_failed_partitions
51
+
52
+ @killable.kill
53
+ Thread.current.exit
54
+ end
55
+
56
+ sleep @check_interval_seconds
57
+ end
58
+ end
59
+ end
60
+
61
+ def log_failed_partitions
62
+ seconds = @check_interval_seconds * @tracker.failing_checks
63
+ failed = @tracker
64
+ .failed
65
+ .map { |topic, partitions| "#{topic}: #{partitions.join(",")}" }
66
+ .join(", ")
67
+
68
+ @logger.warn(JSON.dump({
69
+ event: "message_processing_warning",
70
+ message: "Assigned partition lag has not changed in #{seconds} seconds: #{failed}"
71
+ }))
72
+ $stdout.flush
73
+ end
74
+ end
75
+ end
76
+ end
@@ -1,3 +1,3 @@
1
1
  module NulogyMessageBusConsumer
2
- VERSION = "0.3.0"
2
+ VERSION = "1.0.0.alpha".freeze
3
3
  end
@@ -1,16 +1,15 @@
1
1
  namespace :message_bus_consumer do
2
2
  desc "Verifies that the messages in the message bus have been processed"
3
- task :audit => :environment do
4
- config = build_audit_config
3
+ task audit: :environment do
4
+ logger = Rails.logger
5
+ config = NulogyMessageBusConsumer::Config.new(
6
+ consumer_group_id: ENV.fetch("MB_AUDIT_GROUP"),
7
+ bootstrap_servers: ENV.fetch("MB_BOOTSTRAP_SERVERS"),
8
+ topic_name: ENV.fetch("MB_CONSUMER_TOPIC")
9
+ )
5
10
 
6
11
  NulogyMessageBusConsumer
7
- .consumer_audit_pipeline(config: config)
12
+ .consumer_audit_pipeline(config: config, logger: logger)
8
13
  .invoke
9
14
  end
10
-
11
- def build_audit_config
12
- audit_config = NulogyMessageBusConsumer.config.dup
13
- audit_config.consumer_group_id = "#{audit_config.consumer_group_id}_consumer_audit"
14
- NulogyMessageBusConsumer.config
15
- end
16
- end
15
+ end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: nulogy_message_bus_consumer
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.0
4
+ version: 1.0.0.alpha
5
5
  platform: ruby
6
6
  authors:
7
7
  - Nulogy
8
8
  autorequire:
9
9
  bindir: bin
10
10
  cert_chain: []
11
- date: 2020-10-05 00:00:00.000000000 Z
11
+ date: 2021-04-13 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: activerecord
@@ -52,34 +52,104 @@ dependencies:
52
52
  - - ">="
53
53
  - !ruby/object:Gem::Version
54
54
  version: '0'
55
+ - !ruby/object:Gem::Dependency
56
+ name: bundler-audit
57
+ requirement: !ruby/object:Gem::Requirement
58
+ requirements:
59
+ - - '='
60
+ - !ruby/object:Gem::Version
61
+ version: 0.7.0.1
62
+ type: :development
63
+ prerelease: false
64
+ version_requirements: !ruby/object:Gem::Requirement
65
+ requirements:
66
+ - - '='
67
+ - !ruby/object:Gem::Version
68
+ version: 0.7.0.1
69
+ - !ruby/object:Gem::Dependency
70
+ name: dotenv
71
+ requirement: !ruby/object:Gem::Requirement
72
+ requirements:
73
+ - - '='
74
+ - !ruby/object:Gem::Version
75
+ version: 2.7.6
76
+ type: :development
77
+ prerelease: false
78
+ version_requirements: !ruby/object:Gem::Requirement
79
+ requirements:
80
+ - - '='
81
+ - !ruby/object:Gem::Version
82
+ version: 2.7.6
83
+ - !ruby/object:Gem::Dependency
84
+ name: pg
85
+ requirement: !ruby/object:Gem::Requirement
86
+ requirements:
87
+ - - '='
88
+ - !ruby/object:Gem::Version
89
+ version: 1.2.3
90
+ type: :development
91
+ prerelease: false
92
+ version_requirements: !ruby/object:Gem::Requirement
93
+ requirements:
94
+ - - '='
95
+ - !ruby/object:Gem::Version
96
+ version: 1.2.3
97
+ - !ruby/object:Gem::Dependency
98
+ name: pry
99
+ requirement: !ruby/object:Gem::Requirement
100
+ requirements:
101
+ - - ">="
102
+ - !ruby/object:Gem::Version
103
+ version: '0'
104
+ type: :development
105
+ prerelease: false
106
+ version_requirements: !ruby/object:Gem::Requirement
107
+ requirements:
108
+ - - ">="
109
+ - !ruby/object:Gem::Version
110
+ version: '0'
111
+ - !ruby/object:Gem::Dependency
112
+ name: pry-byebug
113
+ requirement: !ruby/object:Gem::Requirement
114
+ requirements:
115
+ - - ">="
116
+ - !ruby/object:Gem::Version
117
+ version: '0'
118
+ type: :development
119
+ prerelease: false
120
+ version_requirements: !ruby/object:Gem::Requirement
121
+ requirements:
122
+ - - ">="
123
+ - !ruby/object:Gem::Version
124
+ version: '0'
55
125
  - !ruby/object:Gem::Dependency
56
126
  name: rails
57
127
  requirement: !ruby/object:Gem::Requirement
58
128
  requirements:
59
129
  - - '='
60
130
  - !ruby/object:Gem::Version
61
- version: 6.0.3
131
+ version: 6.0.3.5
62
132
  type: :development
63
133
  prerelease: false
64
134
  version_requirements: !ruby/object:Gem::Requirement
65
135
  requirements:
66
136
  - - '='
67
137
  - !ruby/object:Gem::Version
68
- version: 6.0.3
138
+ version: 6.0.3.5
69
139
  - !ruby/object:Gem::Dependency
70
140
  name: rake-release
71
141
  requirement: !ruby/object:Gem::Requirement
72
142
  requirements:
73
143
  - - '='
74
144
  - !ruby/object:Gem::Version
75
- version: 1.2.1
145
+ version: 1.3.0
76
146
  type: :development
77
147
  prerelease: false
78
148
  version_requirements: !ruby/object:Gem::Requirement
79
149
  requirements:
80
150
  - - '='
81
151
  - !ruby/object:Gem::Version
82
- version: 1.2.1
152
+ version: 1.3.0
83
153
  - !ruby/object:Gem::Dependency
84
154
  name: rspec
85
155
  requirement: !ruby/object:Gem::Requirement
@@ -95,75 +165,75 @@ dependencies:
95
165
  - !ruby/object:Gem::Version
96
166
  version: 3.9.0
97
167
  - !ruby/object:Gem::Dependency
98
- name: rspec-rails
168
+ name: rspec-json_expectations
99
169
  requirement: !ruby/object:Gem::Requirement
100
170
  requirements:
101
171
  - - '='
102
172
  - !ruby/object:Gem::Version
103
- version: 4.0.1
173
+ version: 2.2.0
104
174
  type: :development
105
175
  prerelease: false
106
176
  version_requirements: !ruby/object:Gem::Requirement
107
177
  requirements:
108
178
  - - '='
109
179
  - !ruby/object:Gem::Version
110
- version: 4.0.1
180
+ version: 2.2.0
111
181
  - !ruby/object:Gem::Dependency
112
- name: rspec-json_expectations
182
+ name: rspec-rails
113
183
  requirement: !ruby/object:Gem::Requirement
114
184
  requirements:
115
185
  - - '='
116
186
  - !ruby/object:Gem::Version
117
- version: 2.2.0
187
+ version: 4.0.1
118
188
  type: :development
119
189
  prerelease: false
120
190
  version_requirements: !ruby/object:Gem::Requirement
121
191
  requirements:
122
192
  - - '='
123
193
  - !ruby/object:Gem::Version
124
- version: 2.2.0
194
+ version: 4.0.1
125
195
  - !ruby/object:Gem::Dependency
126
- name: rubocop
196
+ name: standard
127
197
  requirement: !ruby/object:Gem::Requirement
128
198
  requirements:
129
199
  - - '='
130
200
  - !ruby/object:Gem::Version
131
- version: 0.81.0
201
+ version: 0.11.0
132
202
  type: :development
133
203
  prerelease: false
134
204
  version_requirements: !ruby/object:Gem::Requirement
135
205
  requirements:
136
206
  - - '='
137
207
  - !ruby/object:Gem::Version
138
- version: 0.81.0
208
+ version: 0.11.0
139
209
  - !ruby/object:Gem::Dependency
140
- name: rubocop-rspec
210
+ name: rubocop-rails
141
211
  requirement: !ruby/object:Gem::Requirement
142
212
  requirements:
143
213
  - - '='
144
214
  - !ruby/object:Gem::Version
145
- version: 1.38.1
215
+ version: 2.5.2
146
216
  type: :development
147
217
  prerelease: false
148
218
  version_requirements: !ruby/object:Gem::Requirement
149
219
  requirements:
150
220
  - - '='
151
221
  - !ruby/object:Gem::Version
152
- version: 1.38.1
222
+ version: 2.5.2
153
223
  - !ruby/object:Gem::Dependency
154
- name: pg
224
+ name: rubocop-rspec
155
225
  requirement: !ruby/object:Gem::Requirement
156
226
  requirements:
157
227
  - - '='
158
228
  - !ruby/object:Gem::Version
159
- version: 1.2.3
229
+ version: 1.38.1
160
230
  type: :development
161
231
  prerelease: false
162
232
  version_requirements: !ruby/object:Gem::Requirement
163
233
  requirements:
164
234
  - - '='
165
235
  - !ruby/object:Gem::Version
166
- version: 1.2.3
236
+ version: 1.38.1
167
237
  description:
168
238
  email:
169
239
  - tass@nulogy.com
@@ -172,13 +242,18 @@ extensions: []
172
242
  extra_rdoc_files: []
173
243
  files:
174
244
  - Rakefile
245
+ - config/credentials/message-bus-us-east-1.key
246
+ - config/credentials/message-bus-us-east-1.yml.enc
175
247
  - config/routes.rb
176
248
  - db/migrate/20200509095105_create_message_bus_processed_messages.rb
177
249
  - lib/nulogy_message_bus_consumer.rb
250
+ - lib/nulogy_message_bus_consumer/clock.rb
178
251
  - lib/nulogy_message_bus_consumer/config.rb
252
+ - lib/nulogy_message_bus_consumer/deployment/ecs.rb
179
253
  - lib/nulogy_message_bus_consumer/engine.rb
180
254
  - lib/nulogy_message_bus_consumer/handlers/log_unprocessed_messages.rb
181
255
  - lib/nulogy_message_bus_consumer/kafka_utils.rb
256
+ - lib/nulogy_message_bus_consumer/lag_tracker.rb
182
257
  - lib/nulogy_message_bus_consumer/message.rb
183
258
  - lib/nulogy_message_bus_consumer/null_logger.rb
184
259
  - lib/nulogy_message_bus_consumer/pipeline.rb
@@ -186,11 +261,12 @@ files:
186
261
  - lib/nulogy_message_bus_consumer/steps/commit_on_success.rb
187
262
  - lib/nulogy_message_bus_consumer/steps/connect_to_message_bus.rb
188
263
  - lib/nulogy_message_bus_consumer/steps/deduplicate_messages.rb
264
+ - lib/nulogy_message_bus_consumer/steps/log_consumer_lag.rb
189
265
  - lib/nulogy_message_bus_consumer/steps/log_messages.rb
190
- - lib/nulogy_message_bus_consumer/steps/monitor_replication_lag.rb
191
266
  - lib/nulogy_message_bus_consumer/steps/seek_beginning_of_topic.rb
192
267
  - lib/nulogy_message_bus_consumer/steps/stream_messages.rb
193
268
  - lib/nulogy_message_bus_consumer/steps/stream_messages_until_none_are_left.rb
269
+ - lib/nulogy_message_bus_consumer/steps/supervise_consumer_lag.rb
194
270
  - lib/nulogy_message_bus_consumer/version.rb
195
271
  - lib/tasks/engine/message_bus_consumer.rake
196
272
  homepage: https://github.com/nulogy/message-bus/tree/master/gems/nulogy_message_bus_consumer
@@ -208,9 +284,9 @@ required_ruby_version: !ruby/object:Gem::Requirement
208
284
  version: '0'
209
285
  required_rubygems_version: !ruby/object:Gem::Requirement
210
286
  requirements:
211
- - - ">="
287
+ - - ">"
212
288
  - !ruby/object:Gem::Version
213
- version: '0'
289
+ version: 1.3.1
214
290
  requirements: []
215
291
  rubygems_version: 3.0.3
216
292
  signing_key: