sbmt-kafka_consumer 2.0.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (76) hide show
  1. checksums.yaml +7 -0
  2. data/.rspec +3 -0
  3. data/.rubocop.yml +34 -0
  4. data/Appraisals +23 -0
  5. data/CHANGELOG.md +292 -0
  6. data/Gemfile +5 -0
  7. data/LICENSE +21 -0
  8. data/README.md +296 -0
  9. data/Rakefile +12 -0
  10. data/config.ru +9 -0
  11. data/dip.yml +84 -0
  12. data/docker-compose.yml +68 -0
  13. data/exe/kafka_consumer +16 -0
  14. data/lefthook-local.dip_example.yml +4 -0
  15. data/lefthook.yml +6 -0
  16. data/lib/generators/kafka_consumer/concerns/configuration.rb +30 -0
  17. data/lib/generators/kafka_consumer/consumer/USAGE +24 -0
  18. data/lib/generators/kafka_consumer/consumer/consumer_generator.rb +41 -0
  19. data/lib/generators/kafka_consumer/consumer/templates/consumer.rb.erb +9 -0
  20. data/lib/generators/kafka_consumer/consumer/templates/consumer_group.yml.erb +13 -0
  21. data/lib/generators/kafka_consumer/inbox_consumer/USAGE +22 -0
  22. data/lib/generators/kafka_consumer/inbox_consumer/inbox_consumer_generator.rb +48 -0
  23. data/lib/generators/kafka_consumer/inbox_consumer/templates/consumer_group.yml.erb +22 -0
  24. data/lib/generators/kafka_consumer/install/USAGE +9 -0
  25. data/lib/generators/kafka_consumer/install/install_generator.rb +22 -0
  26. data/lib/generators/kafka_consumer/install/templates/Kafkafile +3 -0
  27. data/lib/generators/kafka_consumer/install/templates/kafka_consumer.yml +59 -0
  28. data/lib/sbmt/kafka_consumer/app_initializer.rb +13 -0
  29. data/lib/sbmt/kafka_consumer/base_consumer.rb +104 -0
  30. data/lib/sbmt/kafka_consumer/cli.rb +55 -0
  31. data/lib/sbmt/kafka_consumer/client_configurer.rb +73 -0
  32. data/lib/sbmt/kafka_consumer/config/auth.rb +56 -0
  33. data/lib/sbmt/kafka_consumer/config/consumer.rb +16 -0
  34. data/lib/sbmt/kafka_consumer/config/consumer_group.rb +9 -0
  35. data/lib/sbmt/kafka_consumer/config/deserializer.rb +15 -0
  36. data/lib/sbmt/kafka_consumer/config/kafka.rb +32 -0
  37. data/lib/sbmt/kafka_consumer/config/metrics.rb +10 -0
  38. data/lib/sbmt/kafka_consumer/config/probes/endpoints.rb +13 -0
  39. data/lib/sbmt/kafka_consumer/config/probes/liveness_probe.rb +11 -0
  40. data/lib/sbmt/kafka_consumer/config/probes/readiness_probe.rb +10 -0
  41. data/lib/sbmt/kafka_consumer/config/probes.rb +8 -0
  42. data/lib/sbmt/kafka_consumer/config/topic.rb +14 -0
  43. data/lib/sbmt/kafka_consumer/config.rb +76 -0
  44. data/lib/sbmt/kafka_consumer/inbox_consumer.rb +129 -0
  45. data/lib/sbmt/kafka_consumer/instrumentation/base_monitor.rb +25 -0
  46. data/lib/sbmt/kafka_consumer/instrumentation/chainable_monitor.rb +31 -0
  47. data/lib/sbmt/kafka_consumer/instrumentation/listener_helper.rb +47 -0
  48. data/lib/sbmt/kafka_consumer/instrumentation/liveness_listener.rb +71 -0
  49. data/lib/sbmt/kafka_consumer/instrumentation/logger_listener.rb +44 -0
  50. data/lib/sbmt/kafka_consumer/instrumentation/open_telemetry_loader.rb +23 -0
  51. data/lib/sbmt/kafka_consumer/instrumentation/open_telemetry_tracer.rb +106 -0
  52. data/lib/sbmt/kafka_consumer/instrumentation/readiness_listener.rb +38 -0
  53. data/lib/sbmt/kafka_consumer/instrumentation/sentry_tracer.rb +103 -0
  54. data/lib/sbmt/kafka_consumer/instrumentation/tracer.rb +18 -0
  55. data/lib/sbmt/kafka_consumer/instrumentation/tracing_monitor.rb +17 -0
  56. data/lib/sbmt/kafka_consumer/instrumentation/yabeda_metrics_listener.rb +186 -0
  57. data/lib/sbmt/kafka_consumer/probes/host.rb +75 -0
  58. data/lib/sbmt/kafka_consumer/probes/probe.rb +33 -0
  59. data/lib/sbmt/kafka_consumer/railtie.rb +31 -0
  60. data/lib/sbmt/kafka_consumer/routing/karafka_v1_consumer_mapper.rb +12 -0
  61. data/lib/sbmt/kafka_consumer/routing/karafka_v2_consumer_mapper.rb +9 -0
  62. data/lib/sbmt/kafka_consumer/serialization/base_deserializer.rb +19 -0
  63. data/lib/sbmt/kafka_consumer/serialization/json_deserializer.rb +18 -0
  64. data/lib/sbmt/kafka_consumer/serialization/null_deserializer.rb +13 -0
  65. data/lib/sbmt/kafka_consumer/serialization/protobuf_deserializer.rb +27 -0
  66. data/lib/sbmt/kafka_consumer/server.rb +35 -0
  67. data/lib/sbmt/kafka_consumer/simple_logging_consumer.rb +11 -0
  68. data/lib/sbmt/kafka_consumer/testing/shared_contexts/with_sbmt_karafka_consumer.rb +61 -0
  69. data/lib/sbmt/kafka_consumer/testing.rb +5 -0
  70. data/lib/sbmt/kafka_consumer/types.rb +15 -0
  71. data/lib/sbmt/kafka_consumer/version.rb +7 -0
  72. data/lib/sbmt/kafka_consumer/yabeda_configurer.rb +91 -0
  73. data/lib/sbmt/kafka_consumer.rb +59 -0
  74. data/rubocop/rspec.yml +29 -0
  75. data/sbmt-kafka_consumer.gemspec +70 -0
  76. metadata +571 -0
@@ -0,0 +1,9 @@
1
+ module Sbmt
2
+ module KafkaConsumer
3
+ module Routing
4
+ # uses default karafka v2 mapper
5
+ # exists just for naming consistency with KarafkaV1ConsumerMapper
6
+ class KarafkaV2ConsumerMapper < Karafka::Routing::ConsumerMapper; end
7
+ end
8
+ end
9
+ end
@@ -0,0 +1,19 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Sbmt
4
+ module KafkaConsumer
5
+ module Serialization
6
+ class BaseDeserializer
7
+ attr_reader :skip_decoding_error
8
+
9
+ def initialize(skip_decoding_error: false)
10
+ @skip_decoding_error = skip_decoding_error
11
+ end
12
+
13
+ def call(_message)
14
+ raise NotImplementedError, "Implement this in a subclass"
15
+ end
16
+ end
17
+ end
18
+ end
19
+ end
@@ -0,0 +1,18 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Sbmt
4
+ module KafkaConsumer
5
+ module Serialization
6
+ class JsonDeserializer < BaseDeserializer
7
+ def call(message)
8
+ # nil payload can be present for example for tombstone messages
9
+ message.raw_payload.nil? ? nil : ::JSON.parse(message.raw_payload)
10
+ rescue JSON::ParserError => e
11
+ raise Sbmt::KafkaConsumer::SkipUndeserializableMessage, "cannot decode message: #{e.message}, payload: #{message.raw_payload}" if skip_decoding_error
12
+
13
+ raise
14
+ end
15
+ end
16
+ end
17
+ end
18
+ end
@@ -0,0 +1,13 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Sbmt
4
+ module KafkaConsumer
5
+ module Serialization
6
+ class NullDeserializer < BaseDeserializer
7
+ def call(message)
8
+ message.raw_payload
9
+ end
10
+ end
11
+ end
12
+ end
13
+ end
@@ -0,0 +1,27 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "google/protobuf"
4
+
5
+ module Sbmt
6
+ module KafkaConsumer
7
+ module Serialization
8
+ class ProtobufDeserializer < BaseDeserializer
9
+ attr_reader :message_decoder
10
+
11
+ def initialize(message_decoder_klass:, skip_decoding_error: false)
12
+ super(skip_decoding_error: skip_decoding_error)
13
+
14
+ @message_decoder = message_decoder_klass.constantize
15
+ end
16
+
17
+ def call(message)
18
+ message_decoder.decode(message.raw_payload)
19
+ rescue Google::Protobuf::ParseError, ArgumentError => e
20
+ raise Sbmt::KafkaConsumer::SkipUndeserializableMessage, "cannot decode message: #{e.message}, payload: #{message.raw_payload}" if skip_decoding_error
21
+
22
+ raise
23
+ end
24
+ end
25
+ end
26
+ end
27
+ end
@@ -0,0 +1,35 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Sbmt
4
+ module KafkaConsumer
5
+ class Server < Karafka::Server
6
+ class << self
7
+ # original klass tries to validate karafka-specific server cli-options which we override
8
+ # see Karafka::Server for details
9
+ def run
10
+ Karafka::Server.listeners = []
11
+ Karafka::Server.workers = []
12
+
13
+ process.on_sigint { Karafka::Server.stop }
14
+ process.on_sigquit { Karafka::Server.stop }
15
+ process.on_sigterm { Karafka::Server.stop }
16
+ process.on_sigtstp { Karafka::Server.quiet }
17
+ process.supervise
18
+
19
+ $stdout.puts "Starting server"
20
+ Karafka::Server.start
21
+
22
+ sleep(0.1) until Karafka::App.terminated?
23
+ # rubocop:disable Lint/RescueException
24
+ rescue Exception => e
25
+ $stdout.puts "Cannot start server: #{e.message}"
26
+
27
+ # rubocop:enable Lint/RescueException
28
+ Karafka::Server.stop
29
+
30
+ raise e
31
+ end
32
+ end
33
+ end
34
+ end
35
+ end
@@ -0,0 +1,11 @@
1
+ # frozen_string_literal: true
2
+
3
+ class Sbmt::KafkaConsumer::SimpleLoggingConsumer < Sbmt::KafkaConsumer::BaseConsumer
4
+ private
5
+
6
+ def log_payload?
7
+ true
8
+ end
9
+
10
+ def process_message(_message); end
11
+ end
@@ -0,0 +1,61 @@
1
+ # frozen_string_literal: true
2
+
3
+ RSpec.shared_context "with sbmt karafka consumer" do
4
+ subject(:consume_with_sbmt_karafka) do
5
+ coordinator.increment
6
+ consumer.on_consume
7
+ end
8
+
9
+ let(:coordinator) {
10
+ instance = Karafka::Processing::Coordinator.new(test_topic, 0, instance_double(Karafka::TimeTrackers::Pause))
11
+ instance.instance_variable_set(:@seek_offset, -1)
12
+ instance
13
+ }
14
+ let(:test_consumer_group) { Karafka::Routing::ConsumerGroup.new(:test_group) }
15
+ let(:test_topic) { Karafka::Routing::Topic.new(:test_topic, test_consumer_group) }
16
+ let(:kafka_client) { instance_double(Karafka::Connection::Client) }
17
+ let(:null_deserializer) { Sbmt::KafkaConsumer::Serialization::NullDeserializer.new }
18
+
19
+ let(:consumer) {
20
+ build_consumer(described_class.new)
21
+ }
22
+
23
+ before {
24
+ Sbmt::KafkaConsumer::ClientConfigurer.configure!
25
+ allow(kafka_client).to receive(:assignment_lost?).and_return(false)
26
+ allow(kafka_client).to receive(:mark_as_consumed!).and_return(true)
27
+ }
28
+
29
+ def publish_to_sbmt_karafka(raw_payload, opts = {})
30
+ message = Karafka::Messages::Message.new(raw_payload, Karafka::Messages::Metadata.new(metadata_defaults.merge(opts)))
31
+ consumer.messages = Karafka::Messages::Messages.new(
32
+ [message],
33
+ Karafka::Messages::BatchMetadata.new(
34
+ topic: test_topic.name,
35
+ partition: 0,
36
+ processed_at: Time.zone.now,
37
+ created_at: Time.zone.now
38
+ )
39
+ )
40
+ end
41
+
42
+ # @return [Hash] message default options
43
+ def metadata_defaults
44
+ {
45
+ deserializer: null_deserializer,
46
+ headers: {},
47
+ key: nil,
48
+ offset: 0,
49
+ partition: 0,
50
+ received_at: Time.current,
51
+ topic: test_topic.name
52
+ }
53
+ end
54
+
55
+ def build_consumer(instance)
56
+ instance.coordinator = coordinator
57
+ instance.client = kafka_client
58
+ instance.singleton_class.include Karafka::Processing::Strategies::Default
59
+ instance
60
+ end
61
+ end
@@ -0,0 +1,5 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "rspec"
4
+
5
+ Dir["#{__dir__}/testing/shared_contexts/*.rb"].sort.each { |f| require f }
@@ -0,0 +1,15 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Sbmt
4
+ module KafkaConsumer
5
+ module Types
6
+ include Dry.Types
7
+
8
+ ConfigAttrs = Dry::Types["hash"].constructor { |hsh| hsh.deep_symbolize_keys }
9
+
10
+ ConfigConsumer = Types.Constructor(Config::Consumer)
11
+ ConfigDeserializer = Types.Constructor(Config::Deserializer)
12
+ ConfigTopic = Types.Constructor(Config::Topic)
13
+ end
14
+ end
15
+ end
@@ -0,0 +1,7 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Sbmt
4
+ module KafkaConsumer
5
+ VERSION = "2.0.0"
6
+ end
7
+ end
@@ -0,0 +1,91 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Sbmt
4
+ module KafkaConsumer
5
+ class YabedaConfigurer
6
+ SIZE_BUCKETS = [1, 10, 100, 1000, 10_000, 100_000, 1_000_000].freeze
7
+ LATENCY_BUCKETS = [0.0001, 0.001, 0.01, 0.1, 1.0, 10, 100, 1000].freeze
8
+ DELAY_BUCKETS = [1, 3, 10, 30, 100, 300, 1000, 3000, 10_000, 30_000].freeze
9
+ def self.configure
10
+ Yabeda.configure do
11
+ group :kafka_api do
12
+ counter :calls,
13
+ tags: %i[client broker api],
14
+ comment: "API calls"
15
+ histogram :latency,
16
+ tags: %i[client broker api],
17
+ buckets: LATENCY_BUCKETS,
18
+ comment: "API latency"
19
+ histogram :request_size,
20
+ tags: %i[client broker api],
21
+ buckets: SIZE_BUCKETS,
22
+ comment: "API request size"
23
+ histogram :response_size,
24
+ tags: %i[client broker api],
25
+ buckets: SIZE_BUCKETS,
26
+ comment: "API response size"
27
+ counter :errors,
28
+ tags: %i[client broker api],
29
+ comment: "API errors"
30
+ end
31
+
32
+ group :kafka_consumer do
33
+ counter :consumer_group_rebalances,
34
+ tags: %i[client group_id state],
35
+ comment: "Consumer group rebalances"
36
+
37
+ counter :process_messages,
38
+ tags: %i[client group_id topic partition],
39
+ comment: "Messages consumed"
40
+
41
+ counter :process_message_errors,
42
+ tags: %i[client group_id topic partition],
43
+ comment: "Messages failed to process"
44
+
45
+ histogram :process_message_latency,
46
+ tags: %i[client group_id topic partition],
47
+ buckets: LATENCY_BUCKETS,
48
+ comment: "Consumer latency"
49
+
50
+ gauge :offset_lag,
51
+ tags: %i[client group_id topic partition],
52
+ comment: "Consumer offset lag"
53
+
54
+ gauge :time_lag,
55
+ tags: %i[client group_id topic partition],
56
+ comment: "Consumer time lag"
57
+
58
+ counter :process_batch_errors,
59
+ tags: %i[client group_id topic partition],
60
+ comment: "Messages failed to process"
61
+
62
+ histogram :process_batch_latency,
63
+ tags: %i[client group_id topic partition],
64
+ buckets: LATENCY_BUCKETS,
65
+ comment: "Consumer batch latency"
66
+
67
+ histogram :batch_size,
68
+ tags: %i[client group_id topic partition],
69
+ buckets: SIZE_BUCKETS,
70
+ comment: "Consumer batch size"
71
+
72
+ counter :leave_group_errors,
73
+ tags: %i[client group_id],
74
+ comment: "Consumer group leave errors"
75
+
76
+ gauge :pause_duration,
77
+ tags: %i[client group_id topic partition],
78
+ comment: "Consumer pause duration"
79
+
80
+ counter :inbox_consumes,
81
+ tags: %i[
82
+ client group_id topic partition
83
+ inbox_name event_name status
84
+ ],
85
+ comment: "Inbox item consumes"
86
+ end
87
+ end
88
+ end
89
+ end
90
+ end
91
+ end
@@ -0,0 +1,59 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "zeitwerk"
4
+ require "karafka"
5
+ require "active_record"
6
+ require "yabeda"
7
+ require "anyway_config"
8
+ require "thor"
9
+ require "dry/types"
10
+ require "dry-struct"
11
+
12
+ begin
13
+ require "sbmt/outbox"
14
+ rescue LoadError
15
+ # sbmt-outbox is an optional dependency
16
+ end
17
+
18
+ require "anyway/rails" if defined?(Rails)
19
+ require_relative "kafka_consumer/railtie" if defined?(Rails::Railtie)
20
+
21
+ module Sbmt
22
+ module KafkaConsumer
23
+ class << self
24
+ delegate :monitor, to: Karafka
25
+
26
+ def logger
27
+ @logger ||= Rails.logger
28
+ end
29
+ end
30
+
31
+ class Error < StandardError; end
32
+
33
+ class SkipUndeserializableMessage < Error; end
34
+ end
35
+ end
36
+
37
+ loader = Zeitwerk::Loader.new
38
+ # we need to set parent dir as gem autoloading root
39
+ # see https://github.com/fxn/zeitwerk/issues/138#issuecomment-709640940 for details
40
+ loader.push_dir(File.join(__dir__, ".."))
41
+ loader.tag = "sbmt-kafka_consumer"
42
+
43
+ # protobuf is an optional dependency
44
+ loader.do_not_eager_load("#{__dir__}/kafka_consumer/serialization/protobuf_deserializer.rb")
45
+ loader.do_not_eager_load("#{__dir__}/kafka_consumer/instrumentation/open_telemetry_loader.rb")
46
+ loader.do_not_eager_load("#{__dir__}/kafka_consumer/instrumentation/open_telemetry_tracer.rb")
47
+ loader.do_not_eager_load("#{__dir__}/kafka_consumer/instrumentation/sentry_tracer.rb")
48
+
49
+ # completely ignore testing helpers
50
+ # because testing.rb just requires some files and does not contain any constants (e.g. Testing) which Zeitwerk expects
51
+ loader.ignore("#{__dir__}/kafka_consumer/testing.rb")
52
+ loader.ignore("#{__dir__}/kafka_consumer/testing")
53
+ loader.ignore("#{File.expand_path("../", __dir__)}/generators")
54
+
55
+ loader.inflector.inflect("cli" => "CLI")
56
+ loader.inflector.inflect("version" => "VERSION")
57
+
58
+ loader.setup
59
+ loader.eager_load
data/rubocop/rspec.yml ADDED
@@ -0,0 +1,29 @@
1
+ RSpec/AnyInstance:
2
+ Enabled: false
3
+
4
+ RSpec/MultipleExpectations:
5
+ Enabled: false
6
+
7
+ RSpec/LetSetup:
8
+ Enabled: false
9
+
10
+ RSpec/StubbedMock:
11
+ Enabled: false
12
+
13
+ RSpec/MessageSpies:
14
+ Enabled: false
15
+
16
+ RSpec/NestedGroups:
17
+ Enabled: false
18
+
19
+ RSpec/EmptyExampleGroup:
20
+ Enabled: false
21
+
22
+ RSpec/ExampleLength:
23
+ Enabled: false
24
+
25
+ RSpec/MultipleMemoizedHelpers:
26
+ Enabled: false
27
+
28
+ RSpec/VariableName:
29
+ Enabled: false
@@ -0,0 +1,70 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative "lib/sbmt/kafka_consumer/version"
4
+
5
+ Gem::Specification.new do |spec|
6
+ spec.name = "sbmt-kafka_consumer"
7
+ spec.license = "MIT"
8
+ spec.version = Sbmt::KafkaConsumer::VERSION
9
+ spec.authors = ["Sbermarket Ruby-Platform Team"]
10
+
11
+ spec.summary = "Ruby gem for consuming Kafka messages"
12
+ spec.description = "This gem is used for consuming Kafka messages. It represents a wrapper over Karafka gem and is recommended for using as a transport with sbmt-outbox"
13
+ spec.homepage = "https://github.com/SberMarket-Tech/sbmt-kafka_consumer"
14
+ spec.required_ruby_version = ">= 2.7.0"
15
+
16
+ spec.metadata["allowed_push_host"] = "https://rubygems.org"
17
+
18
+ spec.metadata["homepage_uri"] = spec.homepage
19
+ spec.metadata["source_code_uri"] = spec.homepage
20
+ spec.metadata["changelog_uri"] = "#{spec.homepage}/blob/master/CHANGELOG.md"
21
+ spec.metadata["rubygems_mfa_required"] = "false" # rubocop:disable Gemspec/RequireMFA
22
+
23
+ # Specify which files should be added to the gem when it is released.
24
+ # The `git ls-files -z` loads the files in the RubyGem that have been added into git.
25
+ spec.files = Dir.chdir(__dir__) do
26
+ `git ls-files -z`.split("\x0").reject do |f|
27
+ (f == __FILE__) || f.match(%r{\A(?:(?:bin|test|spec|features)/|\.(?:git|travis|circleci)|appveyor)})
28
+ end
29
+ end
30
+ spec.bindir = "exe"
31
+ spec.executables = spec.files.grep(%r{\Aexe/}) { |f| File.basename(f) }
32
+ spec.require_paths = ["lib"]
33
+
34
+ spec.add_dependency "rails", ">= 5.2"
35
+ spec.add_dependency "zeitwerk", "~> 2.3"
36
+ spec.add_dependency "karafka", "~> 2.2"
37
+ spec.add_dependency "yabeda", ">= 0.11"
38
+ spec.add_dependency "anyway_config", ">= 2.4.0"
39
+ spec.add_dependency "thor"
40
+ spec.add_dependency "dry-struct"
41
+
42
+ spec.add_development_dependency "appraisal", ">= 2.4"
43
+ spec.add_development_dependency "bundler", ">= 2.1"
44
+ spec.add_development_dependency "combustion", ">= 1.3"
45
+ spec.add_development_dependency "rake", ">= 13.0"
46
+ spec.add_development_dependency "dry-monads", ">= 1.3"
47
+ spec.add_development_dependency "factory_bot_rails"
48
+ spec.add_development_dependency "pg"
49
+ spec.add_development_dependency "google-protobuf"
50
+ spec.add_development_dependency "sentry-rails", ">= 5.2"
51
+ spec.add_development_dependency "opentelemetry-sdk"
52
+ spec.add_development_dependency "opentelemetry-api", ">= 0.17"
53
+ spec.add_development_dependency "opentelemetry-common", ">= 0.17"
54
+ spec.add_development_dependency "opentelemetry-instrumentation-base", ">= 0.17"
55
+ spec.add_development_dependency "rspec", ">= 3.0"
56
+ spec.add_development_dependency "rspec_junit_formatter", ">= 0.6"
57
+ spec.add_development_dependency "rspec-rails", ">= 4.0"
58
+ spec.add_development_dependency "rubocop-rails", ">= 2.5"
59
+ spec.add_development_dependency "rubocop-rspec", ">= 2.11"
60
+ spec.add_development_dependency "sbmt-outbox", ">= 5.0"
61
+ spec.add_development_dependency "simplecov", ">= 0.16"
62
+ spec.add_development_dependency "standard", ">= 1.12"
63
+
64
+ # let metrics and probes work in dev-mode with combustion
65
+ # e.g. RAILS_ENV=development bundle exec kafka_consumer
66
+ spec.add_development_dependency "yabeda-prometheus-mmap"
67
+ spec.add_development_dependency "webrick"
68
+ spec.add_development_dependency "rack"
69
+ spec.add_development_dependency "http_health_check"
70
+ end