karafka 1.0.0 → 1.0.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 49921e358054ab6470899c0e122cc9c93f5398ea
4
- data.tar.gz: 183d5ff52e6f4ce09110287147d735929ff2a6e7
3
+ metadata.gz: c7d3b30943ab27b3ae2d8609e645590ca918ff2b
4
+ data.tar.gz: 0b6b31b10d070c07d45ab6bb8bc177e94d4d9c3c
5
5
  SHA512:
6
- metadata.gz: cd8ad63256d3133dbc1c4a4676b54c64815353047a8cb42c87e6ef343ff6af12099ff07d03d01b4a3b61fc648d79dc6b13c1f969a48f3ab04f1dc3a91799e99b
7
- data.tar.gz: 345addc070e2b13fc7d11f07faefccbcfbffeedad3ce106d98a335c0e54f5c29731fc9288c46b114aee4df62f7578c2e120e8b61b7a378c517881221b2254e4e
6
+ metadata.gz: b8a3f16977e6af10f6cda783c326be7bb8540b483cacfe225fc7c422cad091d9014950c6a6de4a9f5cd033b974a41de73bc445e7813063d12013684f056d35df
7
+ data.tar.gz: 2e425da4e9e420ad092b3b563e5fa796f3f73159bc9c5d6cdf77bbf8463842f0a9379fe156b54328a3f65144f08ec657fc6f65aa01295a3efc40181e4bd0d858
data/.ruby-version CHANGED
@@ -1 +1 @@
1
- 2.4.1
1
+ 2.4.2
data/.travis.yml CHANGED
@@ -8,7 +8,8 @@ rvm:
8
8
  - 2.3.4
9
9
  - 2.4.0
10
10
  - 2.4.1
11
- - jruby-9.1.12.0
11
+ - 2.4.2
12
+ - jruby-head
12
13
  script: bundle exec rspec spec/
13
14
  env:
14
15
  global:
data/CHANGELOG.md CHANGED
@@ -1,5 +1,24 @@
1
1
  # Karafka framework changelog
2
2
 
3
+ ## 1.0.1
4
+ - #210 - LoadError: cannot load such file -- [...]/karafka.rb
5
+ - Ruby 2.4.2 as a default (+travis integration)
6
+ - JRuby upgrade
7
+ - Expanded persistence layer (moved to a namespace for easier future development)
8
+ - #213 - Misleading error when non-existing dependency is required
9
+ - #212 - Make params react to #topic, #partition, #offset
10
+ - #215 - Consumer group route dynamic options are ignored
11
+ - #217 - check RUBY_ENGINE constant if RUBY_VERSION is missing (#217)
12
+ - #218 - add configuration setting to control Celluloid's shutdown timeout
13
+ - Renamed Karafka::Routing::Mapper to Karafka::Routing::TopicMapper to match naming conventions
14
+ - #219 - Allow explicit consumer group names, without prefixes
15
+ - Fix to early removed pid upon shutdown of demonized process
16
+ - max_wait_time updated to match https://github.com/zendesk/ruby-kafka/issues/433
17
+ - #230 - Better uri validation for seed brokers (incompatibility as the kafka:// or kafka+ssl:// is required)
18
+ - Small internal docs fixes
19
+ - Dry::Validation::MissingMessageError: message for broker_schema? was not found
20
+ - #238 - warning: already initialized constant Karafka::Schemas::URI_SCHEMES
21
+
3
22
  ## 1.0.0
4
23
 
5
24
  ### Closed issues:
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- karafka (1.0.0)
4
+ karafka (1.0.1)
5
5
  activesupport (>= 5.0)
6
6
  celluloid
7
7
  dry-configurable (~> 0.7)
@@ -12,12 +12,12 @@ PATH
12
12
  require_all (>= 1.4)
13
13
  ruby-kafka (>= 0.4)
14
14
  thor (~> 0.19)
15
- waterdrop (>= 0.4)
15
+ waterdrop (~> 0.4)
16
16
 
17
17
  GEM
18
18
  remote: https://rubygems.org/
19
19
  specs:
20
- activesupport (5.1.3)
20
+ activesupport (5.1.4)
21
21
  concurrent-ruby (~> 1.0, >= 1.0.2)
22
22
  i18n (~> 0.7)
23
23
  minitest (~> 5.1)
@@ -48,54 +48,55 @@ GEM
48
48
  dry-container (0.6.0)
49
49
  concurrent-ruby (~> 1.0)
50
50
  dry-configurable (~> 0.1, >= 0.1.3)
51
- dry-core (0.3.3)
51
+ dry-core (0.3.4)
52
52
  concurrent-ruby (~> 1.0)
53
53
  dry-equalizer (0.2.0)
54
- dry-logic (0.4.1)
54
+ dry-logic (0.4.2)
55
55
  dry-container (~> 0.2, >= 0.2.6)
56
56
  dry-core (~> 0.2)
57
57
  dry-equalizer (~> 0.2)
58
- dry-types (0.11.1)
58
+ dry-types (0.12.1)
59
59
  concurrent-ruby (~> 1.0)
60
60
  dry-configurable (~> 0.1)
61
61
  dry-container (~> 0.3)
62
62
  dry-core (~> 0.2, >= 0.2.1)
63
63
  dry-equalizer (~> 0.2)
64
- dry-logic (~> 0.4, >= 0.4.0)
64
+ dry-logic (~> 0.4, >= 0.4.2)
65
65
  inflecto (~> 0.0.0, >= 0.0.2)
66
- dry-validation (0.11.0)
66
+ dry-validation (0.11.1)
67
67
  concurrent-ruby (~> 1.0)
68
68
  dry-configurable (~> 0.1, >= 0.1.3)
69
69
  dry-core (~> 0.2, >= 0.2.1)
70
70
  dry-equalizer (~> 0.2)
71
71
  dry-logic (~> 0.4, >= 0.4.0)
72
- dry-types (~> 0.11.0)
72
+ dry-types (~> 0.12.0)
73
73
  envlogic (1.0.4)
74
74
  activesupport
75
75
  hitimes (1.2.6)
76
- i18n (0.8.6)
76
+ i18n (0.9.0)
77
+ concurrent-ruby (~> 1.0)
77
78
  inflecto (0.0.2)
78
79
  json (2.1.0)
79
80
  minitest (5.10.3)
80
81
  multi_json (1.12.2)
81
82
  null-logger (0.1.4)
82
- rake (12.0.0)
83
+ rake (12.2.1)
83
84
  require_all (1.4.0)
84
- rspec (3.6.0)
85
- rspec-core (~> 3.6.0)
86
- rspec-expectations (~> 3.6.0)
87
- rspec-mocks (~> 3.6.0)
88
- rspec-core (3.6.0)
89
- rspec-support (~> 3.6.0)
90
- rspec-expectations (3.6.0)
85
+ rspec (3.7.0)
86
+ rspec-core (~> 3.7.0)
87
+ rspec-expectations (~> 3.7.0)
88
+ rspec-mocks (~> 3.7.0)
89
+ rspec-core (3.7.0)
90
+ rspec-support (~> 3.7.0)
91
+ rspec-expectations (3.7.0)
91
92
  diff-lcs (>= 1.2.0, < 2.0)
92
- rspec-support (~> 3.6.0)
93
- rspec-mocks (3.6.0)
93
+ rspec-support (~> 3.7.0)
94
+ rspec-mocks (3.7.0)
94
95
  diff-lcs (>= 1.2.0, < 2.0)
95
- rspec-support (~> 3.6.0)
96
- rspec-support (3.6.0)
97
- ruby-kafka (0.4.1)
98
- simplecov (0.15.0)
96
+ rspec-support (~> 3.7.0)
97
+ rspec-support (3.7.0)
98
+ ruby-kafka (0.4.3)
99
+ simplecov (0.15.1)
99
100
  docile (~> 1.1.0)
100
101
  json (>= 1.8, < 3)
101
102
  simplecov-html (~> 0.10.0)
@@ -105,7 +106,7 @@ GEM
105
106
  timecop (0.9.1)
106
107
  timers (4.1.2)
107
108
  hitimes
108
- tzinfo (1.2.3)
109
+ tzinfo (1.2.4)
109
110
  thread_safe (~> 0.1)
110
111
  waterdrop (0.4.0)
111
112
  bundler
@@ -125,4 +126,4 @@ DEPENDENCIES
125
126
  timecop
126
127
 
127
128
  BUNDLED WITH
128
- 1.14.6
129
+ 1.15.4
data/README.md CHANGED
@@ -1,6 +1,6 @@
1
1
  ![karafka logo](https://raw.githubusercontent.com/karafka/misc/master/logo/karafka_logotype_transparent2.png)
2
2
 
3
- [![Build Status](https://travis-ci.org/karafka/karafka.png)](https://travis-ci.org/karafka/karafka)
3
+ [![Build Status](https://travis-ci.org/karafka/karafka.svg?branch=master)](https://travis-ci.org/karafka/karafka)
4
4
  [![Backers on Open Collective](https://opencollective.com/karafka/backers/badge.svg)](#backers) [![Sponsors on Open Collective](https://opencollective.com/karafka/sponsors/badge.svg)](#sponsors) [![Join the chat at https://gitter.im/karafka/karafka](https://badges.gitter.im/karafka/karafka.svg)](https://gitter.im/karafka/karafka?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge)
5
5
 
6
6
  Framework used to simplify Apache Kafka based Ruby applications development.
@@ -11,7 +11,7 @@ Karafka not only handles incoming messages but also provides tools for building
11
11
 
12
12
  ## How does it work
13
13
 
14
- Karafka provides a higher-level abstraction that allows you to focus on your business logic development, instead of focusing on implementing lower level abstration layers. It provides developers with a set of tools that are dedicated for building multi-topic applications similarly to how Rails applications are being built.
14
+ Karafka provides a higher-level abstraction that allows you to focus on your business logic development, instead of focusing on implementing lower level abstraction layers. It provides developers with a set of tools that are dedicated for building multi-topic applications similarly to how Rails applications are being built.
15
15
 
16
16
  Karafka based applications can be easily deployed to any type of infrastructure, including those based on:
17
17
 
@@ -73,7 +73,7 @@ Commit, do not mess with Rakefile, version, or history. (if you want to have you
73
73
 
74
74
  [![coditsu](https://coditsu.io/assets/quality_bar.svg)](https://app.coditsu.io/karafka/repositories/karafka)
75
75
 
76
- Each pull request must pass our quality requirements. To check if everything is as it should be, we use [Coditsu](https://coditsu.io) that combinse multiple linters and code analyzers for both code and documentation.
76
+ Each pull request must pass our quality requirements. To check if everything is as it should be, we use [Coditsu](https://coditsu.io) that combines multiple linters and code analyzers for both code and documentation.
77
77
 
78
78
  Unfortunately, it does not yet support independent forks, however you should be fine by looking at what we require.
79
79
 
data/bin/karafka CHANGED
@@ -1,7 +1,19 @@
1
1
  #!/usr/bin/env ruby
2
2
 
3
3
  require 'karafka'
4
- require Karafka.boot_file.to_s
4
+
5
+ # If there is a boot file, we need to require it as we expect it to contain
6
+ # Karafka app setup, routes, etc
7
+ if File.exist?(Karafka.boot_file)
8
+ require Karafka.boot_file.to_s
9
+ else
10
+ # However when it is unavailable, we still want to be able to run help command
11
+ # and install command as they don't require configured app itself to run
12
+ raise(
13
+ Karafka::Errors::MissingBootFile,
14
+ Karafka.boot_file
15
+ ) unless %w[-h install].include?(ARGV[0])
16
+ end
5
17
 
6
18
  Karafka::Cli.prepare
7
19
  Karafka::Cli.start
data/config/errors.yml ADDED
@@ -0,0 +1,6 @@
1
+ en:
2
+ errors:
3
+ broker_schema?: >
4
+ has an invalid format.
5
+ Expected schema, host and port number.
6
+ Example: kafka://127.0.0.1:9092 or kafka+ssl://127.0.0.1:9092
data/karafka.gemspec CHANGED
@@ -16,17 +16,17 @@ Gem::Specification.new do |spec|
16
16
  spec.description = 'Framework used to simplify Apache Kafka based Ruby applications development'
17
17
  spec.license = 'MIT'
18
18
 
19
- spec.add_dependency 'ruby-kafka', '>= 0.4'
20
- spec.add_dependency 'celluloid'
21
- spec.add_dependency 'envlogic', '~> 1.0'
22
- spec.add_dependency 'waterdrop', '>= 0.4'
23
- spec.add_dependency 'rake', '>= 11.3'
24
- spec.add_dependency 'thor', '~> 0.19'
25
19
  spec.add_dependency 'activesupport', '>= 5.0'
26
- spec.add_dependency 'dry-validation', '~> 0.11'
20
+ spec.add_dependency 'celluloid'
27
21
  spec.add_dependency 'dry-configurable', '~> 0.7'
22
+ spec.add_dependency 'dry-validation', '~> 0.11'
23
+ spec.add_dependency 'envlogic', '~> 1.0'
28
24
  spec.add_dependency 'multi_json', '>= 1.12'
25
+ spec.add_dependency 'rake', '>= 11.3'
29
26
  spec.add_dependency 'require_all', '>= 1.4'
27
+ spec.add_dependency 'ruby-kafka', '>= 0.4'
28
+ spec.add_dependency 'thor', '~> 0.19'
29
+ spec.add_dependency 'waterdrop', '~> 0.4'
30
30
 
31
31
  spec.required_ruby_version = '>= 2.3.0'
32
32
 
@@ -53,7 +53,7 @@ module Karafka
53
53
  # @see http://api.rubyonrails.org/classes/ActiveSupport/Callbacks/ClassMethods.html#method-i-get_callbacks
54
54
  define_callbacks :after_received
55
55
 
56
- attr_accessor :params_batch
56
+ attr_reader :params_batch
57
57
 
58
58
  class << self
59
59
  attr_reader :topic
@@ -65,7 +65,6 @@ module Karafka
65
65
  def topic=(topic)
66
66
  @topic = topic
67
67
  Controllers::Includer.call(self)
68
- @topic
69
68
  end
70
69
 
71
70
  # Creates a callback that will be executed after receiving message but before executing the
@@ -18,6 +18,8 @@ module Karafka
18
18
  # end
19
19
  # end
20
20
  class Base
21
+ include Thor::Shell
22
+
21
23
  # We can use it to call other cli methods via this object
22
24
  attr_reader :cli
23
25
 
@@ -15,7 +15,7 @@ module Karafka
15
15
  if any_topics
16
16
  puts "#{topic.name} =>"
17
17
 
18
- topic.responder.topics.each do |_name, responder_topic|
18
+ topic.responder.topics.each_value do |responder_topic|
19
19
  features = []
20
20
  features << (responder_topic.required? ? 'always' : 'conditionally')
21
21
  features << (responder_topic.multiple_usage? ? 'one or more' : 'exactly once')
@@ -30,13 +30,13 @@ module Karafka
30
30
  Celluloid.boot
31
31
  end
32
32
 
33
- # Remove pidfile on shutdown
34
- ObjectSpace.define_finalizer(String.new, proc { send(:clean) })
35
-
36
33
  # We assign active topics on a server level, as only server is expected to listen on
37
34
  # part of the topics
38
35
  Karafka::Server.consumer_groups = cli.options[:consumer_groups]
39
36
 
37
+ # Remove pidfile on shutdown, just before the server instance is going to be GCed
38
+ ObjectSpace.define_finalizer(self, proc { send(:clean) })
39
+
40
40
  # After we fork, we can boot celluloid again
41
41
  Karafka::Server.run
42
42
  end
@@ -62,7 +62,7 @@ module Karafka
62
62
 
63
63
  # Removes a pidfile (if exist)
64
64
  def clean
65
- FileUtils.rm_f(cli.options[:pid])
65
+ FileUtils.rm_f(cli.options[:pid]) if cli.options[:pid]
66
66
  end
67
67
  end
68
68
  end
@@ -41,25 +41,24 @@ module Karafka
41
41
  # @return [Hash] hash with all the settings required by Kafka#consumer method
42
42
  def consumer(consumer_group)
43
43
  settings = { group_id: consumer_group.id }
44
- settings = fetch_for(:consumer, settings)
44
+ settings = fetch_for(:consumer, consumer_group, settings)
45
45
  sanitize(settings)
46
46
  end
47
47
 
48
48
  # Builds all the configuration settings for kafka consumer consume_each_batch and
49
49
  # consume_each_message methods
50
- # @param _consumer_group [Karafka::Routing::ConsumerGroup] consumer group details
50
+ # @param consumer_group [Karafka::Routing::ConsumerGroup] consumer group details
51
51
  # @return [Hash] hash with all the settings required by
52
52
  # Kafka::Consumer#consume_each_message and Kafka::Consumer#consume_each_batch method
53
- def consuming(_consumer_group)
54
- sanitize(fetch_for(:consuming))
53
+ def consuming(consumer_group)
54
+ sanitize(fetch_for(:consuming, consumer_group))
55
55
  end
56
56
 
57
57
  # Builds all the configuration settings for kafka consumer#subscribe method
58
58
  # @param topic [Karafka::Routing::Topic] topic that holds details for a given subscription
59
59
  # @return [Hash] hash with all the settings required by kafka consumer#subscribe method
60
60
  def subscription(topic)
61
- settings = { start_from_beginning: topic.start_from_beginning }
62
- settings = fetch_for(:subscription, settings)
61
+ settings = fetch_for(:subscription, topic)
63
62
  [Karafka::App.config.topic_mapper.outgoing(topic.name), sanitize(settings)]
64
63
  end
65
64
 
@@ -74,13 +73,19 @@ module Karafka
74
73
 
75
74
  # Fetches proper settings for a given map namespace
76
75
  # @param namespace_key [Symbol] namespace from attributes map config adapter hash
76
+ # @param route_layer [Object] route topic or consumer group
77
77
  # @param preexisting_settings [Hash] hash with some preexisting settings that might have
78
78
  # been loaded in a different way
79
- def fetch_for(namespace_key, preexisting_settings = {})
80
- kafka_configs.each do |setting_name, setting_value|
79
+ def fetch_for(namespace_key, route_layer, preexisting_settings = {})
80
+ kafka_configs.each_key do |setting_name|
81
+ # Ignore settings that are not related to our namespace
81
82
  next unless AttributesMap.config_adapter[namespace_key].include?(setting_name)
83
+ # Ignore settings that are already initialized
84
+ # In case they are in preexisting settings fetched differently
82
85
  next if preexisting_settings.keys.include?(setting_name)
83
- preexisting_settings[setting_name] = setting_value
86
+ # Fetch all the settings from a given layer object. Objects can handle the fallback
87
+ # to the kafka settings, so
88
+ preexisting_settings[setting_name] = route_layer.send(setting_name)
84
89
  end
85
90
 
86
91
  preexisting_settings
@@ -20,15 +20,17 @@ module Karafka
20
20
  # @see topic_mapper internal docs
21
21
  mapped_topic_name = Karafka::App.config.topic_mapper.incoming(kafka_messages[0].topic)
22
22
  topic = Routing::Router.find("#{group_id}_#{mapped_topic_name}")
23
-
24
- # Depending on a case (persisted or not) we might use new controller instance per each
25
- # batch, or use the same instance for all of them (for implementing buffering, etc)
26
- controller = Persistence.fetch(topic, kafka_messages[0].partition, :controller) do
23
+ controller = Persistence::Controller.fetch(topic, kafka_messages[0].partition) do
27
24
  topic.controller.new
28
25
  end
29
26
 
30
- handler = topic.batch_processing ? :process_batch : :process_each
31
- send(handler, controller, kafka_messages)
27
+ # Depending on a case (persisted or not) we might use new controller instance per each
28
+ # batch, or use the same instance for all of them (for implementing buffering, etc)
29
+ send(
30
+ topic.batch_processing ? :process_batch : :process_each,
31
+ controller,
32
+ kafka_messages
33
+ )
32
34
  end
33
35
 
34
36
  private
@@ -29,5 +29,8 @@ module Karafka
29
29
 
30
30
  # Raised when configuration doesn't match with validation schema
31
31
  InvalidConfiguration = Class.new(BaseError)
32
+
33
+ # Raised when we try to use Karafka CLI commands (except install) without a bootfile
34
+ MissingBootFile = Class.new(BaseError)
32
35
  end
33
36
  end
@@ -16,6 +16,18 @@ module Karafka
16
16
  key
17
17
  ].freeze
18
18
 
19
+ # Params attributes that should be available via a method call invocation for Kafka
20
+ # client compatibility.
21
+ # Kafka passes internally Kafka::FetchedMessage object and the ruby-kafka consumer
22
+ # uses those fields via method calls, so in order to be able to pass there our params
23
+ # objects, have to have same api.
24
+ PARAMS_METHOD_ATTRIBUTES = %i[
25
+ topic
26
+ partition
27
+ offset
28
+ key
29
+ ].freeze
30
+
19
31
  class << self
20
32
  # We allow building instances only via the #build method
21
33
 
@@ -51,6 +63,18 @@ module Karafka
51
63
  end
52
64
  end
53
65
  end
66
+
67
+ # Defines a method call accessor to a particular hash field.
68
+ # @note Won't work for complex key names that contain spaces, etc
69
+ # @param key [Symbol] name of a field that we want to retrieve with a method call
70
+ # @example
71
+ # key_attr_reader :example
72
+ # params.example #=> 'my example value'
73
+ def key_attr_reader(key)
74
+ define_method key do
75
+ self[key]
76
+ end
77
+ end
54
78
  end
55
79
 
56
80
  # @return [Karafka::Params::Params] this will trigger parser execution. If we decide to
@@ -63,6 +87,7 @@ module Karafka
63
87
  merge!(parse(delete(:value)))
64
88
  end
65
89
 
90
+ PARAMS_METHOD_ATTRIBUTES.each(&method(:key_attr_reader))
66
91
 
67
92
  private
68
93
 
@@ -13,9 +13,7 @@ module Karafka
13
13
  def initialize(*args)
14
14
  super
15
15
 
16
- @config.each do |key, _value|
17
- rebuild(key)
18
- end
16
+ @config.each_key(&method(:rebuild))
19
17
  end
20
18
 
21
19
  private
@@ -0,0 +1,23 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Module used to provide a persistent cache layer for Karafka components that need to be
5
+ # shared inside of a same thread
6
+ module Persistence
7
+ # Module used to provide a persistent cache across batch requests for a given
8
+ # topic and partition to store some additional details when the persistent mode
9
+ # for a given topic is turned on
10
+ class Controller
11
+ # Used to build (if block given) and/or fetch a current controller instance that will be used
12
+ # to process messages from a given topic and partition
13
+ # @return [Karafka::BaseController] base controller descendant
14
+ # @param topic [Karafka::Routing::Topic] topic instance for which we might cache
15
+ # @param partition [Integer] number of partition for which we want to cache
16
+ def self.fetch(topic, partition)
17
+ return yield unless topic.persistent
18
+ Thread.current[topic.id] ||= {}
19
+ Thread.current[topic.id][partition] ||= yield
20
+ end
21
+ end
22
+ end
23
+ end
@@ -28,7 +28,7 @@ module Karafka
28
28
  hashed_group = consumer_group.to_h
29
29
  validation_result = Karafka::Schemas::ConsumerGroup.call(hashed_group)
30
30
  return if validation_result.success?
31
- raise Errors::InvalidConfiguration, [validation_result.errors, hashed_group]
31
+ raise Errors::InvalidConfiguration, validation_result.errors
32
32
  end
33
33
  end
34
34
 
@@ -18,7 +18,7 @@ module Karafka
18
18
  # kafka and don't understand the concept of consumer groups.
19
19
  def initialize(name)
20
20
  @name = name
21
- @id = "#{Karafka::App.config.client_id.to_s.underscore}_#{@name}"
21
+ @id = Karafka::App.config.consumer_mapper.call(name)
22
22
  @topics = []
23
23
  end
24
24
 
@@ -0,0 +1,33 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Routing
5
+ # Default consumer mapper that builds consumer ids based on app id and consumer group name
6
+ # Different mapper can be used in case of preexisting consumer names or for applying
7
+ # other naming conventions not compatible wiih Karafkas client_id + consumer name concept
8
+ #
9
+ # @example Mapper for using consumer groups without a client_id prefix
10
+ # module MyMapper
11
+ # def self.call(raw_consumer_group_name)
12
+ # raw_consumer_group_name
13
+ # end
14
+ # end
15
+ #
16
+ # @example Mapper for replacing "_" with "." in topic names
17
+ # module MyMapper
18
+ # def self.call(raw_consumer_group_name)
19
+ # [
20
+ # Karafka::App.config.client_id.to_s.underscope,
21
+ # raw_consumer_group_name
22
+ # ].join('_').gsub('_', '.')
23
+ # end
24
+ # end
25
+ module ConsumerMapper
26
+ # @param raw_consumer_group_name [String, Symbol] string or symbolized consumer group name
27
+ # @return [String] remapped final consumer group name
28
+ def self.call(raw_consumer_group_name)
29
+ "#{Karafka::App.config.client_id.to_s.underscore}_#{raw_consumer_group_name}"
30
+ end
31
+ end
32
+ end
33
+ end
@@ -2,7 +2,7 @@
2
2
 
3
3
  module Karafka
4
4
  module Routing
5
- # Default routes mapper that does not remap things
5
+ # Default topic mapper that does not remap things
6
6
  # Mapper can be used for Kafka providers that require namespaced topic names. Instead of being
7
7
  # provider dependent, we can then define mapper and use internally "pure" topic names in
8
8
  # routes and responders
@@ -32,7 +32,7 @@ module Karafka
32
32
  # topic.to_s.gsub('_', '.')
33
33
  # end
34
34
  # end
35
- module Mapper
35
+ module TopicMapper
36
36
  class << self
37
37
  # @param topic [String, Symbol] topic
38
38
  # @return [String, Symbol] same topic as on input
@@ -13,6 +13,12 @@ module Karafka
13
13
  # so we validate all of that once all the routes are defined and ready
14
14
  Config = Dry::Validation.Schema do
15
15
  required(:client_id).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
16
+ required(:consumer_mapper)
17
+ required(:topic_mapper)
18
+
19
+ required(:celluloid).schema do
20
+ required(:shutdown_timeout).filled(:int?, gteq?: 0)
21
+ end
16
22
 
17
23
  optional(:backend).filled
18
24
 
@@ -2,23 +2,31 @@
2
2
 
3
3
  module Karafka
4
4
  module Schemas
5
- # Consumer group topic validation rules
6
- ConsumerGroupTopic = Dry::Validation.Schema do
7
- required(:id).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
8
- required(:name).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
9
- required(:backend).filled(included_in?: %i[inline sidekiq])
10
- required(:controller).filled
11
- required(:parser).filled
12
- required(:max_bytes_per_partition).filled(:int?, gteq?: 0)
13
- required(:start_from_beginning).filled(:bool?)
14
- required(:batch_processing).filled(:bool?)
15
- required(:persistent).filled(:bool?)
16
- end
17
-
18
5
  # Schema for single full route (consumer group + topics) validation.
19
6
  ConsumerGroup = Dry::Validation.Schema do
7
+ # Valid uri schemas of Kafka broker url
8
+ # The ||= is due to the behavior of require_all that resolves dependencies
9
+ # but someetimes loads things twice
10
+ URI_SCHEMES ||= %w[kafka kafka+ssl].freeze
11
+
12
+ configure do
13
+ config.messages_file = File.join(
14
+ Karafka.gem_root, 'config', 'errors.yml'
15
+ )
16
+
17
+ # Uri validator to check if uri is in a Karafka acceptable format
18
+ # @param uri [String] uri we want to validate
19
+ # @return [Boolean] true if it is a valid uri, otherwise false
20
+ def broker_schema?(uri)
21
+ uri = URI.parse(uri)
22
+ URI_SCHEMES.include?(uri.scheme) && uri.port
23
+ rescue URI::InvalidURIError
24
+ return false
25
+ end
26
+ end
27
+
20
28
  required(:id).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
21
- required(:seed_brokers).filled(:array?)
29
+ required(:seed_brokers).filled { each(:broker_schema?) }
22
30
  required(:session_timeout).filled(:int?)
23
31
  required(:pause_timeout).filled(:int?, gteq?: 0)
24
32
  required(:offset_commit_interval).filled(:int?)
@@ -0,0 +1,18 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Schemas
5
+ # Consumer group topic validation rules
6
+ ConsumerGroupTopic = Dry::Validation.Schema do
7
+ required(:id).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
8
+ required(:name).filled(:str?, format?: Karafka::Schemas::TOPIC_REGEXP)
9
+ required(:backend).filled(included_in?: %i[inline sidekiq])
10
+ required(:controller).filled
11
+ required(:parser).filled
12
+ required(:max_bytes_per_partition).filled(:int?, gteq?: 0)
13
+ required(:start_from_beginning).filled(:bool?)
14
+ required(:batch_processing).filled(:bool?)
15
+ required(:persistent).filled(:bool?)
16
+ end
17
+ end
18
+ end
@@ -24,12 +24,16 @@ module Karafka
24
24
  setting :logger, -> { ::Karafka::Logger.instance }
25
25
  # option monitor [Instance] monitor that we will to use (defaults to Karafka::Monitor)
26
26
  setting :monitor, -> { ::Karafka::Monitor.instance }
27
+ # Mapper used to remap consumer groups ids, so in case users migrate from other tools
28
+ # or they need to maintain their own internal consumer group naming conventions, they
29
+ # can easily do it, replacing the default client_id + consumer name pattern concept
30
+ setting :consumer_mapper, -> { Routing::ConsumerMapper }
27
31
  # Mapper used to remap names of topics, so we can have a clean internal topic namings
28
32
  # despite using any Kafka provider that uses namespacing, etc
29
33
  # It needs to implement two methods:
30
34
  # - #incoming - for remapping from the incoming message to our internal format
31
35
  # - #outgoing - for remapping from internal topic name into outgoing message
32
- setting :topic_mapper, -> { Routing::Mapper }
36
+ setting :topic_mapper, -> { Routing::TopicMapper }
33
37
  # If batch_consuming is true, we will consume kafka messages in batches instead of 1 by 1
34
38
  # @note Consuming does not equal processing, see batch_processing description for details
35
39
  setting :batch_consuming, true
@@ -43,6 +47,18 @@ module Karafka
43
47
  # incoming batch. It's disabled by default, not to create more objects that needed on
44
48
  # each batch
45
49
  setting :persistent, true
50
+ # This is configured automatically, don't overwrite it!
51
+ # Each consumer group requires separate thread, so number of threads should be equal to
52
+ # number of consumer groups
53
+ setting :concurrency, -> { ::Karafka::App.consumer_groups.count }
54
+
55
+ # option celluloid [Hash] - optional - celluloid configuration options
56
+ setting :celluloid do
57
+ # options shutdown_timeout [Integer] How many seconds should we wait for actors (listeners)
58
+ # before forcefully shutting them
59
+ setting :shutdown_timeout, 30
60
+ end
61
+
46
62
  # Connection pool options are used for producer (Waterdrop) - by default it will adapt to
47
63
  # number of active actors
48
64
  setting :connection_pool do
@@ -86,9 +102,14 @@ module Karafka
86
102
  # returning messages from the server; if `max_wait_time` is reached, this
87
103
  # is ignored.
88
104
  setting :min_bytes, 1
89
- # option max_wait_time [Integer, Float] the maximum duration of time to wait before
90
- # returning messages from the server, in seconds.
91
- setting :max_wait_time, 5
105
+ # option max_wait_time [Integer, Float] max_wait_time is the maximum number of seconds to
106
+ # wait before returning data from a single message fetch. By setting this high you also
107
+ # increase the processing throughput – and by setting it low you set a bound on latency.
108
+ # This configuration overrides `min_bytes`, so you'll _always_ get data back within the
109
+ # time specified. The default value is one second. If you want to have at most five
110
+ # seconds of latency, set `max_wait_time` to 5. You should make sure
111
+ # max_wait_time * num brokers + heartbeat_interval is less than session_timeout.
112
+ setting :max_wait_time, 1
92
113
  # option reconnect_timeout [Integer] How long should we wait before trying to reconnect to
93
114
  # Kafka cluster that went down (in seconds)
94
115
  setting :reconnect_timeout, 5
@@ -109,9 +130,9 @@ module Karafka
109
130
  setting :ssl_ca_cert, nil
110
131
  # option ssl_ca_cert_file_path [String] SSL CA certificate file path
111
132
  setting :ssl_ca_cert_file_path, nil
112
- # option client_cert [String] SSL client certificate
133
+ # option ssl_client_cert [String] SSL client certificate
113
134
  setting :ssl_client_cert, nil
114
- # option client_cert_key [String] SSL client certificate password
135
+ # option ssl_client_cert_key [String] SSL client certificate password
115
136
  setting :ssl_client_cert_key, nil
116
137
  # option sasl_gssapi_principal [String] sasl principal
117
138
  setting :sasl_gssapi_principal, nil
@@ -125,11 +146,6 @@ module Karafka
125
146
  setting :sasl_plain_password, nil
126
147
  end
127
148
 
128
- # This is configured automatically, don't overwrite it!
129
- # Each consumer group requires separate thread, so number of threads should be equal to
130
- # number of consumer groups
131
- setting :concurrency, -> { ::Karafka::App.consumer_groups.count }
132
-
133
149
  class << self
134
150
  # Configurating method
135
151
  # @yield Runs a block of code providing a config singleton instance to it
@@ -5,16 +5,13 @@ module Karafka
5
5
  class Configurators
6
6
  # Class responsible for setting up Celluloid settings
7
7
  class Celluloid < Base
8
- # How many seconds should we wait for actors (listeners) before forcefully shutting them
9
- SHUTDOWN_TIME = 30
10
-
11
8
  # Sets up a Karafka logger as celluloid logger
12
9
  def setup
13
10
  ::Celluloid.logger = ::Karafka.logger
14
11
  # This is just a precaution - it should automatically close the current
15
12
  # connection and shutdown actor - but in case it didn't (hanged, etc)
16
13
  # we will kill it after waiting for some time
17
- ::Celluloid.shutdown_timeout = SHUTDOWN_TIME
14
+ ::Celluloid.shutdown_timeout = config.celluloid.shutdown_timeout
18
15
  end
19
16
  end
20
17
  end
@@ -3,5 +3,5 @@
3
3
  # Main module namespace
4
4
  module Karafka
5
5
  # Current Karafka version
6
- VERSION = '1.0.0'
6
+ VERSION = '1.0.1'
7
7
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.0.0
4
+ version: 1.0.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Maciej Mensfeld
@@ -10,22 +10,22 @@ authors:
10
10
  autorequire:
11
11
  bindir: bin
12
12
  cert_chain: []
13
- date: 2017-09-06 00:00:00.000000000 Z
13
+ date: 2017-10-27 00:00:00.000000000 Z
14
14
  dependencies:
15
15
  - !ruby/object:Gem::Dependency
16
- name: ruby-kafka
16
+ name: activesupport
17
17
  requirement: !ruby/object:Gem::Requirement
18
18
  requirements:
19
19
  - - ">="
20
20
  - !ruby/object:Gem::Version
21
- version: '0.4'
21
+ version: '5.0'
22
22
  type: :runtime
23
23
  prerelease: false
24
24
  version_requirements: !ruby/object:Gem::Requirement
25
25
  requirements:
26
26
  - - ">="
27
27
  - !ruby/object:Gem::Version
28
- version: '0.4'
28
+ version: '5.0'
29
29
  - !ruby/object:Gem::Dependency
30
30
  name: celluloid
31
31
  requirement: !ruby/object:Gem::Requirement
@@ -41,131 +41,131 @@ dependencies:
41
41
  - !ruby/object:Gem::Version
42
42
  version: '0'
43
43
  - !ruby/object:Gem::Dependency
44
- name: envlogic
44
+ name: dry-configurable
45
45
  requirement: !ruby/object:Gem::Requirement
46
46
  requirements:
47
47
  - - "~>"
48
48
  - !ruby/object:Gem::Version
49
- version: '1.0'
49
+ version: '0.7'
50
50
  type: :runtime
51
51
  prerelease: false
52
52
  version_requirements: !ruby/object:Gem::Requirement
53
53
  requirements:
54
54
  - - "~>"
55
55
  - !ruby/object:Gem::Version
56
- version: '1.0'
56
+ version: '0.7'
57
57
  - !ruby/object:Gem::Dependency
58
- name: waterdrop
58
+ name: dry-validation
59
59
  requirement: !ruby/object:Gem::Requirement
60
60
  requirements:
61
- - - ">="
61
+ - - "~>"
62
62
  - !ruby/object:Gem::Version
63
- version: '0.4'
63
+ version: '0.11'
64
64
  type: :runtime
65
65
  prerelease: false
66
66
  version_requirements: !ruby/object:Gem::Requirement
67
67
  requirements:
68
- - - ">="
68
+ - - "~>"
69
69
  - !ruby/object:Gem::Version
70
- version: '0.4'
70
+ version: '0.11'
71
71
  - !ruby/object:Gem::Dependency
72
- name: rake
72
+ name: envlogic
73
73
  requirement: !ruby/object:Gem::Requirement
74
74
  requirements:
75
- - - ">="
75
+ - - "~>"
76
76
  - !ruby/object:Gem::Version
77
- version: '11.3'
77
+ version: '1.0'
78
78
  type: :runtime
79
79
  prerelease: false
80
80
  version_requirements: !ruby/object:Gem::Requirement
81
81
  requirements:
82
- - - ">="
82
+ - - "~>"
83
83
  - !ruby/object:Gem::Version
84
- version: '11.3'
84
+ version: '1.0'
85
85
  - !ruby/object:Gem::Dependency
86
- name: thor
86
+ name: multi_json
87
87
  requirement: !ruby/object:Gem::Requirement
88
88
  requirements:
89
- - - "~>"
89
+ - - ">="
90
90
  - !ruby/object:Gem::Version
91
- version: '0.19'
91
+ version: '1.12'
92
92
  type: :runtime
93
93
  prerelease: false
94
94
  version_requirements: !ruby/object:Gem::Requirement
95
95
  requirements:
96
- - - "~>"
96
+ - - ">="
97
97
  - !ruby/object:Gem::Version
98
- version: '0.19'
98
+ version: '1.12'
99
99
  - !ruby/object:Gem::Dependency
100
- name: activesupport
100
+ name: rake
101
101
  requirement: !ruby/object:Gem::Requirement
102
102
  requirements:
103
103
  - - ">="
104
104
  - !ruby/object:Gem::Version
105
- version: '5.0'
105
+ version: '11.3'
106
106
  type: :runtime
107
107
  prerelease: false
108
108
  version_requirements: !ruby/object:Gem::Requirement
109
109
  requirements:
110
110
  - - ">="
111
111
  - !ruby/object:Gem::Version
112
- version: '5.0'
112
+ version: '11.3'
113
113
  - !ruby/object:Gem::Dependency
114
- name: dry-validation
114
+ name: require_all
115
115
  requirement: !ruby/object:Gem::Requirement
116
116
  requirements:
117
- - - "~>"
117
+ - - ">="
118
118
  - !ruby/object:Gem::Version
119
- version: '0.11'
119
+ version: '1.4'
120
120
  type: :runtime
121
121
  prerelease: false
122
122
  version_requirements: !ruby/object:Gem::Requirement
123
123
  requirements:
124
- - - "~>"
124
+ - - ">="
125
125
  - !ruby/object:Gem::Version
126
- version: '0.11'
126
+ version: '1.4'
127
127
  - !ruby/object:Gem::Dependency
128
- name: dry-configurable
128
+ name: ruby-kafka
129
129
  requirement: !ruby/object:Gem::Requirement
130
130
  requirements:
131
- - - "~>"
131
+ - - ">="
132
132
  - !ruby/object:Gem::Version
133
- version: '0.7'
133
+ version: '0.4'
134
134
  type: :runtime
135
135
  prerelease: false
136
136
  version_requirements: !ruby/object:Gem::Requirement
137
137
  requirements:
138
- - - "~>"
138
+ - - ">="
139
139
  - !ruby/object:Gem::Version
140
- version: '0.7'
140
+ version: '0.4'
141
141
  - !ruby/object:Gem::Dependency
142
- name: multi_json
142
+ name: thor
143
143
  requirement: !ruby/object:Gem::Requirement
144
144
  requirements:
145
- - - ">="
145
+ - - "~>"
146
146
  - !ruby/object:Gem::Version
147
- version: '1.12'
147
+ version: '0.19'
148
148
  type: :runtime
149
149
  prerelease: false
150
150
  version_requirements: !ruby/object:Gem::Requirement
151
151
  requirements:
152
- - - ">="
152
+ - - "~>"
153
153
  - !ruby/object:Gem::Version
154
- version: '1.12'
154
+ version: '0.19'
155
155
  - !ruby/object:Gem::Dependency
156
- name: require_all
156
+ name: waterdrop
157
157
  requirement: !ruby/object:Gem::Requirement
158
158
  requirements:
159
- - - ">="
159
+ - - "~>"
160
160
  - !ruby/object:Gem::Version
161
- version: '1.4'
161
+ version: '0.4'
162
162
  type: :runtime
163
163
  prerelease: false
164
164
  version_requirements: !ruby/object:Gem::Requirement
165
165
  requirements:
166
- - - ">="
166
+ - - "~>"
167
167
  - !ruby/object:Gem::Version
168
- version: '1.4'
168
+ version: '0.4'
169
169
  description: Framework used to simplify Apache Kafka based Ruby applications development
170
170
  email:
171
171
  - maciej@coditsu.io
@@ -192,6 +192,7 @@ files:
192
192
  - README.md
193
193
  - Rakefile
194
194
  - bin/karafka
195
+ - config/errors.yml
195
196
  - karafka.gemspec
196
197
  - lib/karafka.rb
197
198
  - lib/karafka/app.rb
@@ -225,18 +226,20 @@ files:
225
226
  - lib/karafka/params/params_batch.rb
226
227
  - lib/karafka/parsers/json.rb
227
228
  - lib/karafka/patches/dry_configurable.rb
228
- - lib/karafka/persistence.rb
229
+ - lib/karafka/persistence/controller.rb
229
230
  - lib/karafka/process.rb
230
231
  - lib/karafka/responders/builder.rb
231
232
  - lib/karafka/responders/topic.rb
232
233
  - lib/karafka/routing/builder.rb
233
234
  - lib/karafka/routing/consumer_group.rb
234
- - lib/karafka/routing/mapper.rb
235
+ - lib/karafka/routing/consumer_mapper.rb
235
236
  - lib/karafka/routing/proxy.rb
236
237
  - lib/karafka/routing/router.rb
237
238
  - lib/karafka/routing/topic.rb
239
+ - lib/karafka/routing/topic_mapper.rb
238
240
  - lib/karafka/schemas/config.rb
239
241
  - lib/karafka/schemas/consumer_group.rb
242
+ - lib/karafka/schemas/consumer_group_topic.rb
240
243
  - lib/karafka/schemas/responder_usage.rb
241
244
  - lib/karafka/schemas/server_cli_options.rb
242
245
  - lib/karafka/server.rb
@@ -1,18 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Module used to provide a persistent cache across batch requests for a given
5
- # topic and partition to store some additional details when the persistent mode
6
- # for a given topic is turned on
7
- module Persistence
8
- # @param topic [Karafka::Routing::Topic] topic instance for which we might cache
9
- # @param partition [Integer] number of partition for which we want to cache
10
- # @param resource [Symbol] name of the resource that we want to store
11
- def self.fetch(topic, partition, resource)
12
- return yield unless topic.persistent
13
- Thread.current[topic.id] ||= {}
14
- Thread.current[topic.id][partition] ||= {}
15
- Thread.current[topic.id][partition][resource] ||= yield
16
- end
17
- end
18
- end