sbmt-kafka_consumer 2.0.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (76) hide show
  1. checksums.yaml +7 -0
  2. data/.rspec +3 -0
  3. data/.rubocop.yml +34 -0
  4. data/Appraisals +23 -0
  5. data/CHANGELOG.md +292 -0
  6. data/Gemfile +5 -0
  7. data/LICENSE +21 -0
  8. data/README.md +296 -0
  9. data/Rakefile +12 -0
  10. data/config.ru +9 -0
  11. data/dip.yml +84 -0
  12. data/docker-compose.yml +68 -0
  13. data/exe/kafka_consumer +16 -0
  14. data/lefthook-local.dip_example.yml +4 -0
  15. data/lefthook.yml +6 -0
  16. data/lib/generators/kafka_consumer/concerns/configuration.rb +30 -0
  17. data/lib/generators/kafka_consumer/consumer/USAGE +24 -0
  18. data/lib/generators/kafka_consumer/consumer/consumer_generator.rb +41 -0
  19. data/lib/generators/kafka_consumer/consumer/templates/consumer.rb.erb +9 -0
  20. data/lib/generators/kafka_consumer/consumer/templates/consumer_group.yml.erb +13 -0
  21. data/lib/generators/kafka_consumer/inbox_consumer/USAGE +22 -0
  22. data/lib/generators/kafka_consumer/inbox_consumer/inbox_consumer_generator.rb +48 -0
  23. data/lib/generators/kafka_consumer/inbox_consumer/templates/consumer_group.yml.erb +22 -0
  24. data/lib/generators/kafka_consumer/install/USAGE +9 -0
  25. data/lib/generators/kafka_consumer/install/install_generator.rb +22 -0
  26. data/lib/generators/kafka_consumer/install/templates/Kafkafile +3 -0
  27. data/lib/generators/kafka_consumer/install/templates/kafka_consumer.yml +59 -0
  28. data/lib/sbmt/kafka_consumer/app_initializer.rb +13 -0
  29. data/lib/sbmt/kafka_consumer/base_consumer.rb +104 -0
  30. data/lib/sbmt/kafka_consumer/cli.rb +55 -0
  31. data/lib/sbmt/kafka_consumer/client_configurer.rb +73 -0
  32. data/lib/sbmt/kafka_consumer/config/auth.rb +56 -0
  33. data/lib/sbmt/kafka_consumer/config/consumer.rb +16 -0
  34. data/lib/sbmt/kafka_consumer/config/consumer_group.rb +9 -0
  35. data/lib/sbmt/kafka_consumer/config/deserializer.rb +15 -0
  36. data/lib/sbmt/kafka_consumer/config/kafka.rb +32 -0
  37. data/lib/sbmt/kafka_consumer/config/metrics.rb +10 -0
  38. data/lib/sbmt/kafka_consumer/config/probes/endpoints.rb +13 -0
  39. data/lib/sbmt/kafka_consumer/config/probes/liveness_probe.rb +11 -0
  40. data/lib/sbmt/kafka_consumer/config/probes/readiness_probe.rb +10 -0
  41. data/lib/sbmt/kafka_consumer/config/probes.rb +8 -0
  42. data/lib/sbmt/kafka_consumer/config/topic.rb +14 -0
  43. data/lib/sbmt/kafka_consumer/config.rb +76 -0
  44. data/lib/sbmt/kafka_consumer/inbox_consumer.rb +129 -0
  45. data/lib/sbmt/kafka_consumer/instrumentation/base_monitor.rb +25 -0
  46. data/lib/sbmt/kafka_consumer/instrumentation/chainable_monitor.rb +31 -0
  47. data/lib/sbmt/kafka_consumer/instrumentation/listener_helper.rb +47 -0
  48. data/lib/sbmt/kafka_consumer/instrumentation/liveness_listener.rb +71 -0
  49. data/lib/sbmt/kafka_consumer/instrumentation/logger_listener.rb +44 -0
  50. data/lib/sbmt/kafka_consumer/instrumentation/open_telemetry_loader.rb +23 -0
  51. data/lib/sbmt/kafka_consumer/instrumentation/open_telemetry_tracer.rb +106 -0
  52. data/lib/sbmt/kafka_consumer/instrumentation/readiness_listener.rb +38 -0
  53. data/lib/sbmt/kafka_consumer/instrumentation/sentry_tracer.rb +103 -0
  54. data/lib/sbmt/kafka_consumer/instrumentation/tracer.rb +18 -0
  55. data/lib/sbmt/kafka_consumer/instrumentation/tracing_monitor.rb +17 -0
  56. data/lib/sbmt/kafka_consumer/instrumentation/yabeda_metrics_listener.rb +186 -0
  57. data/lib/sbmt/kafka_consumer/probes/host.rb +75 -0
  58. data/lib/sbmt/kafka_consumer/probes/probe.rb +33 -0
  59. data/lib/sbmt/kafka_consumer/railtie.rb +31 -0
  60. data/lib/sbmt/kafka_consumer/routing/karafka_v1_consumer_mapper.rb +12 -0
  61. data/lib/sbmt/kafka_consumer/routing/karafka_v2_consumer_mapper.rb +9 -0
  62. data/lib/sbmt/kafka_consumer/serialization/base_deserializer.rb +19 -0
  63. data/lib/sbmt/kafka_consumer/serialization/json_deserializer.rb +18 -0
  64. data/lib/sbmt/kafka_consumer/serialization/null_deserializer.rb +13 -0
  65. data/lib/sbmt/kafka_consumer/serialization/protobuf_deserializer.rb +27 -0
  66. data/lib/sbmt/kafka_consumer/server.rb +35 -0
  67. data/lib/sbmt/kafka_consumer/simple_logging_consumer.rb +11 -0
  68. data/lib/sbmt/kafka_consumer/testing/shared_contexts/with_sbmt_karafka_consumer.rb +61 -0
  69. data/lib/sbmt/kafka_consumer/testing.rb +5 -0
  70. data/lib/sbmt/kafka_consumer/types.rb +15 -0
  71. data/lib/sbmt/kafka_consumer/version.rb +7 -0
  72. data/lib/sbmt/kafka_consumer/yabeda_configurer.rb +91 -0
  73. data/lib/sbmt/kafka_consumer.rb +59 -0
  74. data/rubocop/rspec.yml +29 -0
  75. data/sbmt-kafka_consumer.gemspec +70 -0
  76. metadata +571 -0
data/dip.yml ADDED
@@ -0,0 +1,84 @@
1
+ version: '7'
2
+
3
+ environment:
4
+ RUBY_VERSION: '3.2'
5
+
6
+ compose:
7
+ files:
8
+ - docker-compose.yml
9
+
10
+ interaction:
11
+ bash:
12
+ description: Open the Bash shell in app's container
13
+ service: ruby
14
+ command: /bin/bash
15
+
16
+ bundle:
17
+ description: Run Bundler commands
18
+ service: ruby
19
+ command: bundle
20
+
21
+ rails:
22
+ description: Run RoR commands
23
+ service: ruby
24
+ command: bundle exec rails
25
+
26
+ appraisal:
27
+ description: Run Appraisal commands
28
+ service: ruby
29
+ command: bundle exec appraisal
30
+
31
+ rspec:
32
+ description: Run Rspec commands
33
+ service: ruby
34
+ command: bundle exec rspec
35
+ subcommands:
36
+ all:
37
+ command: bundle exec appraisal rspec
38
+ rails-6.0:
39
+ command: bundle exec appraisal rails-6.0 rspec
40
+ rails-6.1:
41
+ command: bundle exec appraisal rails-6.1 rspec
42
+ rails-7.0:
43
+ command: bundle exec appraisal rails-7.0 rspec
44
+ rails-7.1:
45
+ command: bundle exec appraisal rails-7.1 rspec
46
+
47
+ rubocop:
48
+ description: Run Ruby linter
49
+ service: ruby
50
+ command: bundle exec rubocop
51
+
52
+ setup:
53
+ description: Install deps
54
+ service: ruby
55
+ command: bin/setup
56
+
57
+ test:
58
+ description: Run linters, run all tests
59
+ service: ruby
60
+ command: bin/test
61
+
62
+ kafka-consumer:
63
+ description: Run kafka consumer
64
+ service: ruby
65
+ command: bundle exec kafka_consumer
66
+
67
+ kafka-producer:
68
+ description: Run kafka producer commands
69
+ service: kafka
70
+ command: kafka-console-producer.sh --bootstrap-server kafka:9092
71
+ subcommands:
72
+ inbox:
73
+ command: kafka-console-producer.sh --bootstrap-server kafka:9092 --topic topic_with_inbox_items
74
+ json:
75
+ command: kafka-console-producer.sh --bootstrap-server kafka:9092 --topic topic_with_json_data
76
+ protobuf:
77
+ command: kafka-console-producer.sh --bootstrap-server kafka:9092 --topic topic_with_protobuf_data
78
+
79
+ provision:
80
+ - dip compose down --volumes
81
+ - cp -f lefthook-local.dip_example.yml lefthook-local.yml
82
+ - rm -f Gemfile.lock
83
+ - rm -f gemfiles/*gemfile*
84
+ - dip setup
@@ -0,0 +1,68 @@
1
+ services:
2
+ ruby:
3
+ image: ruby:${RUBY_VERSION:-3.2}
4
+ environment:
5
+ HISTFILE: /app/tmp/.bash_history
6
+ BUNDLE_PATH: /usr/local/bundle
7
+ BUNDLE_CONFIG: /app/.bundle/config
8
+ DATABASE_URL: postgres://postgres:@postgres:5432
9
+ KAFKAFILE: spec/internal/Kafkafile
10
+ depends_on:
11
+ kafka:
12
+ condition: service_started
13
+ postgres:
14
+ condition: service_started
15
+ command: bash
16
+ working_dir: /app
17
+ volumes:
18
+ - .:/app:cached
19
+ - bundler_data:/usr/local/bundle
20
+
21
+ postgres:
22
+ image: postgres:13
23
+ environment:
24
+ POSTGRES_HOST_AUTH_METHOD: trust
25
+ ports:
26
+ - 5432
27
+ healthcheck:
28
+ test: pg_isready -U postgres -h 127.0.0.1
29
+ interval: 10s
30
+
31
+ kafka:
32
+ image: bitnami/kafka:2.7.0
33
+ ports:
34
+ - '9092:9092'
35
+ environment:
36
+ - KAFKA_ZOOKEEPER_CONNECT=zookeeper:2181
37
+ - ALLOW_PLAINTEXT_LISTENER=yes
38
+ - KAFKA_CFG_AUTO_CREATE_TOPICS_ENABLE=true
39
+ - KAFKA_CFG_LISTENER_SECURITY_PROTOCOL_MAP=CLIENT:PLAINTEXT,INTERNAL:PLAINTEXT
40
+ - KAFKA_CFG_LISTENERS=CLIENT://:9092,INTERNAL://:9091
41
+ - KAFKA_CFG_ADVERTISED_LISTENERS=CLIENT://kafka:9092,INTERNAL://kafka:9091
42
+ - KAFKA_INTER_BROKER_LISTENER_NAME=INTERNAL
43
+ depends_on:
44
+ - zookeeper
45
+ healthcheck:
46
+ # we don't have `nc` installed in kafka image :(
47
+ test:
48
+ - CMD-SHELL
49
+ - echo 'exit' | curl --silent -f telnet://0.0.0.0:9092
50
+ interval: 15s
51
+ timeout: 5s
52
+ retries: 15
53
+
54
+ zookeeper:
55
+ image: bitnami/zookeeper:3.5
56
+ ports:
57
+ - '2181:2181'
58
+ environment:
59
+ - ALLOW_ANONYMOUS_LOGIN=yes
60
+ healthcheck:
61
+ test: ["CMD-SHELL", "echo ruok | nc localhost 2181"]
62
+ interval: 2s
63
+ timeout: 2s
64
+ retries: 15
65
+
66
+ volumes:
67
+ bundler_data:
68
+ kafka:
@@ -0,0 +1,16 @@
1
+ #!/usr/local/bin/ruby
2
+ # frozen_string_literal: true
3
+
4
+ require "bundler/setup"
5
+ require "sbmt/kafka_consumer"
6
+
7
+ # rubocop:disable Lint/RescueException
8
+ begin
9
+ Sbmt::KafkaConsumer::CLI.start(ARGV)
10
+ rescue Exception => e
11
+ warn "KafkaConsumer exited with error"
12
+ warn(e.message) if e.respond_to?(:message)
13
+ warn(e.backtrace.join("\n")) if e.respond_to?(:backtrace) && e.backtrace.respond_to?(:join)
14
+ exit 1
15
+ end
16
+ # rubocop:enable Lint/RescueException
@@ -0,0 +1,4 @@
1
+ pre-commit:
2
+ commands:
3
+ rubocop:
4
+ run: dip {cmd}
data/lefthook.yml ADDED
@@ -0,0 +1,6 @@
1
+ pre-commit:
2
+ commands:
3
+ rubocop:
4
+ tags: backend
5
+ glob: "{*.rb,**/*.rb,Gemfile,Rakefile}"
6
+ run: bundle exec rubocop -A --force-exclusion {staged_files} && git add {staged_files}
@@ -0,0 +1,30 @@
1
+ # frozen_string_literal: true
2
+
3
+ module KafkaConsumer
4
+ module Generators
5
+ module Concerns
6
+ module Configuration
7
+ extend ActiveSupport::Concern
8
+
9
+ CONFIG_PATH = "config/kafka_consumer.yml"
10
+
11
+ def check_config_file!
12
+ config_path = File.expand_path(CONFIG_PATH)
13
+ return if File.exist?(config_path)
14
+
15
+ generate = ask "The file #{config_path} does not appear to exist. " \
16
+ "Would you like to generate it? [Yn]"
17
+
18
+ generator_name = "kafka_consumer:install"
19
+ if (generate.presence || "y").casecmp("y").zero?
20
+ generate generator_name
21
+ else
22
+ raise Rails::Generators::Error, "Please generate #{config_path} " \
23
+ "by running `bin/rails g #{generator_name}` " \
24
+ "or add this file manually."
25
+ end
26
+ end
27
+ end
28
+ end
29
+ end
30
+ end
@@ -0,0 +1,24 @@
1
+ Description:
2
+ Stubs out a new non-inbox consumer. Pass the consumer name, either
3
+ CamelCased or under_scored.
4
+
5
+ Example:
6
+ bin/rails generate kafka_consumer:consumer Test
7
+
8
+ This will create:
9
+ app/consumers/test_consumer.rb
10
+
11
+ This will optionally insert:
12
+ 'group_key':
13
+ name: <%= ENV.fetch('ENV_VARIABLE_WITH_GROUP_NAME'){ 'group.name' } %><%= ENV.fetch('ENV_VARIABLE_WITH_GROUP_SUFFIX'){ '' } %>
14
+ topics:
15
+ - name: 'topic.name'
16
+ consumer:
17
+ klass: "TestConsumer"
18
+ # init_attrs:
19
+ # skip_on_error: false # This is the default value
20
+ deserializer:
21
+ klass: "Sbmt::KafkaConsumer::Serialization::ProtobufDeserializer"
22
+ init_attrs:
23
+ message_decoder_klass: "YourMessageDecoderClassName"
24
+ # skip_decoding_error: false # This is the default value
@@ -0,0 +1,41 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "rails/generators/named_base"
4
+ require "generators/kafka_consumer/concerns/configuration"
5
+
6
+ module KafkaConsumer
7
+ module Generators
8
+ class ConsumerGenerator < Rails::Generators::NamedBase
9
+ include Concerns::Configuration
10
+
11
+ source_root File.expand_path("templates", __dir__)
12
+
13
+ def insert_consumer_class
14
+ @consumer_name = "#{name.classify}Consumer"
15
+ template "consumer.rb.erb", "app/consumers/#{file_path}_consumer.rb"
16
+ end
17
+
18
+ def configure_consumer_group
19
+ @group_key = ask "Would you also configure a consumer group?" \
20
+ " Type the group's key (e.g. my_consumer_group) or press Enter to skip this action"
21
+ return if @group_key.blank?
22
+
23
+ check_config_file!
24
+
25
+ @group_name = ask "Type the group's name (e.g. my.consumer.group)"
26
+ @topic = ask "Type the group topic's name"
27
+ insert_into_file CONFIG_PATH, group_template.result(binding), after: "consumer_groups:\n"
28
+ end
29
+
30
+ private
31
+
32
+ def group_template_path
33
+ File.join(ConsumerGenerator.source_root, "consumer_group.yml.erb")
34
+ end
35
+
36
+ def group_template
37
+ ERB.new(File.read(group_template_path), trim_mode: "%-")
38
+ end
39
+ end
40
+ end
41
+ end
@@ -0,0 +1,9 @@
1
+ # frozen_string_literal: true
2
+
3
+ <%- module_namespacing do -%>
4
+ class <%= @consumer_name %> < Sbmt::KafkaConsumer::BaseConsumer
5
+ def process_message(_message)
6
+ # Add message processing here
7
+ end
8
+ end
9
+ <%- end -%>
@@ -0,0 +1,13 @@
1
+ '<%= @group_key %>':
2
+ name: <%%= ENV.fetch('ENV_VARIABLE_WITH_GROUP_NAME'){ "<%= @group_name %>" } %><%%= ENV.fetch('ENV_VARIABLE_WITH_GROUP_SUFFIX'){ "" } %>
3
+ topics:
4
+ - name: "<%= @topic.presence || "insert-your-topic-name-here" %>"
5
+ consumer:
6
+ klass: "<%= @consumer_name %>"
7
+ # init_attrs:
8
+ # skip_on_error: false # This is the default value
9
+ deserializer:
10
+ klass: "Sbmt::KafkaConsumer::Serialization::ProtobufDeserializer"
11
+ init_attrs:
12
+ message_decoder_klass: "YourMessageDecoderClassName"
13
+ # skip_decoding_error: false # This is the default value
@@ -0,0 +1,22 @@
1
+ Description:
2
+ Inserts a consumer group's default configuration.
3
+ It accepts a group key, a group name and an optional array of topics as arguments.
4
+
5
+
6
+ Example:
7
+ bin/rails generate kafka_consumer:inbox_consumer group_key group.name topic.name
8
+
9
+ This will insert:
10
+ 'group_key':
11
+ name: <%= ENV.fetch('ENV_VARIABLE_WITH_GROUP_NAME'){ 'group.name' } %><%= ENV.fetch('ENV_VARIABLE_WITH_GROUP_SUFFIX'){ '' } %>
12
+ topics:
13
+ - name: 'topic.name'
14
+ consumer:
15
+ # Change the line below to the desired consumer
16
+ # if InboxConsumer doesn't suit your needs
17
+ klass: "Sbmt::KafkaConsumer::InboxConsumer"
18
+ init_attrs:
19
+ name: "test_items"
20
+ inbox_item: "SomeModelInboxItem" # Change this to your item class name
21
+ # deserializer: # This deserializer is used by default
22
+ # klass: "Sbmt::KafkaConsumer::Serialization::NullDeserializer"
@@ -0,0 +1,48 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "rails/generators/named_base"
4
+ require "generators/kafka_consumer/concerns/configuration"
5
+
6
+ module KafkaConsumer
7
+ module Generators
8
+ class InboxConsumerGenerator < Rails::Generators::NamedBase
9
+ include Concerns::Configuration
10
+
11
+ source_root File.expand_path("templates", __dir__)
12
+
13
+ argument :group_name, type: :string, banner: "group.name"
14
+ argument :topics, type: :array, default: [], banner: "topic topic"
15
+
16
+ def process_topics
17
+ check_config_file!
18
+
19
+ @items = {}
20
+ topics.each do |topic|
21
+ inbox_item = ask "Would you also add an InboxItem class for topic '#{topic}'?" \
22
+ " Type item's name in the form of SomeModel::InboxItem or press Enter" \
23
+ " to skip creating item's class"
24
+ @items[topic] = if inbox_item.blank?
25
+ nil
26
+ else
27
+ generate "outbox:item", inbox_item, "--kind inbox"
28
+ inbox_item.classify
29
+ end
30
+ end
31
+ end
32
+
33
+ def insert_consumer_group
34
+ insert_into_file CONFIG_PATH, group_template.result(binding), after: "consumer_groups:\n"
35
+ end
36
+
37
+ private
38
+
39
+ def group_template_path
40
+ File.join(InboxConsumerGenerator.source_root, "consumer_group.yml.erb")
41
+ end
42
+
43
+ def group_template
44
+ ERB.new(File.read(group_template_path), trim_mode: "%-")
45
+ end
46
+ end
47
+ end
48
+ end
@@ -0,0 +1,22 @@
1
+ '<%= file_name %>':
2
+ name: <%%= ENV.fetch('ENV_VARIABLE_WITH_GROUP_NAME'){ '<%= group_name %>' } %><%%= ENV.fetch('CONSUMER_GROUP_SUFFIX'){ '' } %>
3
+ <%- if @items.empty? -%>
4
+ topics: []
5
+ <%- else -%>
6
+ topics:
7
+ <%- @items.each do |topic, item_name| -%>
8
+ <%- next if topic.blank? -%>
9
+ <%- inbox_item = item_name.presence || "YourModelName::InboxItem" -%>
10
+ <%- consumer_name = inbox_item.split('::').first.presence || "#{topic}_item" -%>
11
+ - name: "<%= topic %>"
12
+ consumer:
13
+ # Change the line below to the desired consumer
14
+ # if InboxConsumer doesn't suit your needs
15
+ klass: "Sbmt::KafkaConsumer::InboxConsumer"
16
+ init_attrs:
17
+ name: "<%= consumer_name.underscore.pluralize %>"
18
+ inbox_item: "<%= inbox_item %>"
19
+ # deserializer: # This deserializer is used by default
20
+ # klass: "Sbmt::KafkaConsumer::Serialization::NullDeserializer"
21
+ <%- end -%>
22
+ <%- end -%>
@@ -0,0 +1,9 @@
1
+ Description:
2
+ Generates the Kafka consumer's initial setup
3
+
4
+ Example:
5
+ bin/rails generate kafka_consumer:install
6
+
7
+ This will create:
8
+ Kafkafile
9
+ config/kafka_consumer.yml
@@ -0,0 +1,22 @@
1
+ # frozen_string_literal: true
2
+
3
+ require "rails/generators/base"
4
+ require "generators/kafka_consumer/concerns/configuration"
5
+
6
+ module KafkaConsumer
7
+ module Generators
8
+ class InstallGenerator < Rails::Generators::Base
9
+ include Concerns::Configuration
10
+
11
+ source_root File.expand_path("templates", __dir__)
12
+
13
+ def create_kafkafile
14
+ copy_file "Kafkafile", "./Kafkafile"
15
+ end
16
+
17
+ def create_kafka_consumer_yml
18
+ copy_file "kafka_consumer.yml", CONFIG_PATH
19
+ end
20
+ end
21
+ end
22
+ end
@@ -0,0 +1,3 @@
1
+ # frozen_string_literal: true
2
+
3
+ require_relative "config/environment"
@@ -0,0 +1,59 @@
1
+ default: &default
2
+ client_id: 'some-name'
3
+ max_wait_time: 1
4
+ shutdown_timeout: 60
5
+ concurrency: 4
6
+ pause_timeout: 1
7
+ pause_max_timeout: 30
8
+ pause_with_exponential_backoff: true
9
+ auth:
10
+ kind: plaintext
11
+ kafka:
12
+ servers: "kafka:9092"
13
+ heartbeat_timeout: 5
14
+ session_timeout: 30
15
+ reconnect_timeout: 3
16
+ connect_timeout: 5
17
+ socket_timeout: 30
18
+ kafka_options:
19
+ allow.auto.create.topics: true
20
+ consumer_groups:
21
+ # group_ref_id_1:
22
+ # name: cg_with_single_topic
23
+ # topics:
24
+ # - name: topic_with_inbox_items
25
+ # consumer:
26
+ # klass: "Sbmt::KafkaConsumer::InboxConsumer"
27
+ # init_attrs:
28
+ # name: "test_items"
29
+ # inbox_item: "TestInboxItem"
30
+ # deserializer:
31
+ # klass: "Sbmt::KafkaConsumer::Serialization::NullDeserializer"
32
+ # group_ref_id_2:
33
+ # name: cg_with_multiple_topics
34
+ # topics:
35
+ # - name: topic_with_json_data
36
+ # consumer:
37
+ # klass: "Sbmt::KafkaConsumer::SimpleLoggingConsumer"
38
+ # deserializer:
39
+ # klass: "Sbmt::KafkaConsumer::Serialization::JsonDeserializer"
40
+ # - name: topic_with_protobuf_data
41
+ # consumer:
42
+ # klass: "Sbmt::KafkaConsumer::SimpleLoggingConsumer"
43
+ # deserializer:
44
+ # klass: "Sbmt::KafkaConsumer::Serialization::ProtobufDeserializer"
45
+ # init_attrs:
46
+ # message_decoder_klass: "Sso::UserRegistration"
47
+ # skip_decoding_error: true
48
+ probes:
49
+ port: 9394
50
+
51
+ development:
52
+ <<: *default
53
+ test:
54
+ <<: *default
55
+ deliver: false
56
+ staging: &staging
57
+ <<: *default
58
+ production:
59
+ <<: *staging
@@ -0,0 +1,13 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Sbmt
4
+ module KafkaConsumer
5
+ module AppInitializer
6
+ extend self
7
+
8
+ def initialize!
9
+ ClientConfigurer.configure!
10
+ end
11
+ end
12
+ end
13
+ end
@@ -0,0 +1,104 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Sbmt
4
+ module KafkaConsumer
5
+ class BaseConsumer < Karafka::BaseConsumer
6
+ attr_reader :trace_id
7
+
8
+ def self.consumer_klass(skip_on_error: false)
9
+ Class.new(self) do
10
+ const_set(:SKIP_ON_ERROR, skip_on_error)
11
+
12
+ def self.name
13
+ superclass.name
14
+ end
15
+ end
16
+ end
17
+
18
+ def consume
19
+ ::Rails.application.executor.wrap do
20
+ messages.each do |message|
21
+ with_instrumentation(message) { do_consume(message) }
22
+ end
23
+ end
24
+ end
25
+
26
+ private
27
+
28
+ def with_instrumentation(message)
29
+ @trace_id = SecureRandom.base58
30
+
31
+ logger.tagged(
32
+ trace_id: trace_id,
33
+ topic: message.metadata.topic, partition: message.metadata.partition,
34
+ key: message.metadata.key, offset: message.metadata.offset
35
+ ) do
36
+ ::Sbmt::KafkaConsumer.monitor.instrument(
37
+ "consumer.consumed_one",
38
+ caller: self, message: message, trace_id: trace_id
39
+ ) do
40
+ do_consume(message)
41
+ rescue SkipUndeserializableMessage => ex
42
+ instrument_error(ex, message)
43
+ logger.warn("skipping undeserializable message: #{ex.message}")
44
+ rescue => ex
45
+ instrument_error(ex, message)
46
+
47
+ if skip_on_error
48
+ logger.warn("skipping unprocessable message: #{ex.message}, message: #{message_payload(message).inspect}")
49
+ else
50
+ raise ex
51
+ end
52
+ end
53
+ end
54
+ end
55
+
56
+ def do_consume(message)
57
+ log_message(message) if log_payload?
58
+
59
+ # deserialization process is lazy (and cached)
60
+ # so we trigger it explicitly to catch undeserializable message early
61
+ message.payload
62
+
63
+ process_message(message)
64
+
65
+ mark_as_consumed!(message)
66
+ end
67
+
68
+ def skip_on_error
69
+ self.class::SKIP_ON_ERROR
70
+ end
71
+
72
+ # can be overridden in consumer to enable message logging
73
+ def log_payload?
74
+ false
75
+ end
76
+
77
+ def logger
78
+ ::Sbmt::KafkaConsumer.logger
79
+ end
80
+
81
+ def process_message(_message)
82
+ raise NotImplementedError, "Implement this in a subclass"
83
+ end
84
+
85
+ def log_message(message)
86
+ logger.info("#{message_payload(message).inspect}, message_key: #{message.metadata.key}, message_headers: #{message.metadata.headers}")
87
+ end
88
+
89
+ def instrument_error(error, message)
90
+ ::Sbmt::KafkaConsumer.monitor.instrument(
91
+ "error.occurred",
92
+ error: error,
93
+ caller: self,
94
+ message: message,
95
+ type: "consumer.base.consume_one"
96
+ )
97
+ end
98
+
99
+ def message_payload(message)
100
+ message.payload || message.raw_payload
101
+ end
102
+ end
103
+ end
104
+ end
@@ -0,0 +1,55 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Sbmt
4
+ module KafkaConsumer
5
+ class CLI < Thor
6
+ def self.exit_on_failure?
7
+ true
8
+ end
9
+
10
+ default_command :start
11
+
12
+ desc "start", "Start kafka_consumer worker"
13
+ option :consumer_group_id,
14
+ aliases: "-g",
15
+ desc: "Consumer group id to start",
16
+ repeatable: true
17
+ option :concurrency,
18
+ aliases: "-c",
19
+ type: :numeric,
20
+ default: 5,
21
+ desc: "Number of threads, overrides global kafka.concurrency config"
22
+ def start
23
+ $stdout.puts "Initializing KafkaConsumer"
24
+ $stdout.puts "Version: #{VERSION}"
25
+
26
+ load_environment
27
+
28
+ $stdout.sync = true
29
+
30
+ $stdout.puts "Configuring client"
31
+ ClientConfigurer.configure!(
32
+ consumer_groups: options[:consumer_group_id],
33
+ concurrency: options[:concurrency]
34
+ )
35
+ $stdout.puts "Client configured routes: #{ClientConfigurer.routes.inspect}"
36
+
37
+ $stdout.puts "Starting probes/metrics http-server"
38
+ Sbmt::KafkaConsumer::Probes::Host.run_async
39
+
40
+ Sbmt::KafkaConsumer::Server.run
41
+ end
42
+
43
+ private
44
+
45
+ def load_environment
46
+ env_file_path = ENV["KAFKAFILE"] || "#{Dir.pwd}/Kafkafile"
47
+
48
+ if File.exist?(env_file_path)
49
+ $stdout.puts "Loading env from Kafkafile: #{env_file_path}"
50
+ load(env_file_path)
51
+ end
52
+ end
53
+ end
54
+ end
55
+ end