karafka 1.0.0 → 1.2.0

Sign up to get free protection for your applications and to get access to all the features.
Files changed (83) hide show
  1. checksums.yaml +5 -5
  2. data/.ruby-version +1 -1
  3. data/.travis.yml +3 -1
  4. data/CHANGELOG.md +90 -3
  5. data/CONTRIBUTING.md +5 -6
  6. data/Gemfile +1 -1
  7. data/Gemfile.lock +59 -64
  8. data/README.md +28 -57
  9. data/bin/karafka +13 -1
  10. data/config/errors.yml +6 -0
  11. data/karafka.gemspec +10 -9
  12. data/lib/karafka.rb +19 -10
  13. data/lib/karafka/app.rb +8 -15
  14. data/lib/karafka/attributes_map.rb +4 -4
  15. data/lib/karafka/backends/inline.rb +2 -3
  16. data/lib/karafka/base_consumer.rb +68 -0
  17. data/lib/karafka/base_responder.rb +41 -17
  18. data/lib/karafka/callbacks.rb +30 -0
  19. data/lib/karafka/callbacks/config.rb +22 -0
  20. data/lib/karafka/callbacks/dsl.rb +16 -0
  21. data/lib/karafka/cli/base.rb +2 -0
  22. data/lib/karafka/cli/flow.rb +1 -1
  23. data/lib/karafka/cli/info.rb +1 -2
  24. data/lib/karafka/cli/install.rb +2 -3
  25. data/lib/karafka/cli/server.rb +9 -12
  26. data/lib/karafka/connection/client.rb +117 -0
  27. data/lib/karafka/connection/config_adapter.rb +30 -14
  28. data/lib/karafka/connection/delegator.rb +46 -0
  29. data/lib/karafka/connection/listener.rb +22 -20
  30. data/lib/karafka/consumers/callbacks.rb +54 -0
  31. data/lib/karafka/consumers/includer.rb +51 -0
  32. data/lib/karafka/consumers/responders.rb +24 -0
  33. data/lib/karafka/{controllers → consumers}/single_params.rb +3 -3
  34. data/lib/karafka/errors.rb +19 -2
  35. data/lib/karafka/fetcher.rb +30 -28
  36. data/lib/karafka/helpers/class_matcher.rb +8 -8
  37. data/lib/karafka/helpers/config_retriever.rb +2 -2
  38. data/lib/karafka/instrumentation/listener.rb +112 -0
  39. data/lib/karafka/instrumentation/logger.rb +55 -0
  40. data/lib/karafka/instrumentation/monitor.rb +64 -0
  41. data/lib/karafka/loader.rb +0 -1
  42. data/lib/karafka/params/dsl.rb +156 -0
  43. data/lib/karafka/params/params_batch.rb +7 -2
  44. data/lib/karafka/patches/dry_configurable.rb +7 -7
  45. data/lib/karafka/patches/ruby_kafka.rb +34 -0
  46. data/lib/karafka/persistence/client.rb +25 -0
  47. data/lib/karafka/persistence/consumer.rb +38 -0
  48. data/lib/karafka/persistence/topic.rb +29 -0
  49. data/lib/karafka/process.rb +6 -5
  50. data/lib/karafka/responders/builder.rb +15 -14
  51. data/lib/karafka/responders/topic.rb +8 -1
  52. data/lib/karafka/routing/builder.rb +2 -2
  53. data/lib/karafka/routing/consumer_group.rb +1 -1
  54. data/lib/karafka/routing/consumer_mapper.rb +34 -0
  55. data/lib/karafka/routing/router.rb +1 -1
  56. data/lib/karafka/routing/topic.rb +5 -11
  57. data/lib/karafka/routing/{mapper.rb → topic_mapper.rb} +2 -2
  58. data/lib/karafka/schemas/config.rb +4 -5
  59. data/lib/karafka/schemas/consumer_group.rb +45 -24
  60. data/lib/karafka/schemas/consumer_group_topic.rb +18 -0
  61. data/lib/karafka/schemas/responder_usage.rb +1 -0
  62. data/lib/karafka/server.rb +39 -20
  63. data/lib/karafka/setup/config.rb +74 -51
  64. data/lib/karafka/setup/configurators/base.rb +6 -12
  65. data/lib/karafka/setup/configurators/params.rb +25 -0
  66. data/lib/karafka/setup/configurators/water_drop.rb +15 -14
  67. data/lib/karafka/setup/dsl.rb +22 -0
  68. data/lib/karafka/templates/{application_controller.rb.example → application_consumer.rb.example} +2 -3
  69. data/lib/karafka/templates/karafka.rb.example +18 -5
  70. data/lib/karafka/version.rb +1 -1
  71. metadata +87 -63
  72. data/.github/ISSUE_TEMPLATE.md +0 -2
  73. data/Rakefile +0 -7
  74. data/lib/karafka/base_controller.rb +0 -118
  75. data/lib/karafka/connection/messages_consumer.rb +0 -106
  76. data/lib/karafka/connection/messages_processor.rb +0 -59
  77. data/lib/karafka/controllers/includer.rb +0 -51
  78. data/lib/karafka/controllers/responders.rb +0 -19
  79. data/lib/karafka/logger.rb +0 -53
  80. data/lib/karafka/monitor.rb +0 -98
  81. data/lib/karafka/params/params.rb +0 -101
  82. data/lib/karafka/persistence.rb +0 -18
  83. data/lib/karafka/setup/configurators/celluloid.rb +0 -22
@@ -1,101 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Params namespace encapsulating all the logic that is directly related to params handling
5
- module Params
6
- # Class-wrapper for hash with indifferent access with additional lazy loading feature
7
- # It provides lazy loading not only until the first usage, but also allows us to skip
8
- # using parser until we execute our logic. That way we can operate with
9
- # heavy-parsing data without slowing down the whole application.
10
- class Params < HashWithIndifferentAccess
11
- # Kafka::FetchedMessage attributes that we want to use inside of params
12
- KAFKA_MESSAGE_ATTRIBUTES = %i[
13
- value
14
- partition
15
- offset
16
- key
17
- ].freeze
18
-
19
- class << self
20
- # We allow building instances only via the #build method
21
-
22
- # @param message [Kafka::FetchedMessage, Hash] message that we get out of Kafka
23
- # in case of building params inside main Karafka process in
24
- # Karafka::Connection::Consumer, or a hash when we retrieve data that is already parsed
25
- # @param parser [Class] parser class that we will use to unparse data
26
- # @return [Karafka::Params::Params] Karafka params object not yet used parser for
27
- # retrieving data that we've got from Kafka
28
- # @example Build params instance from a hash
29
- # Karafka::Params::Params.build({ key: 'value' }) #=> params object
30
- # @example Build params instance from a Kafka::FetchedMessage object
31
- # Karafka::Params::Params.build(message) #=> params object
32
- def build(message, parser)
33
- # Hash case happens inside backends that interchange data
34
- if message.is_a?(Hash)
35
- new(parser: parser).send(:merge!, message)
36
- else
37
- # This happens inside Kafka::FetchedMessagesProcessor
38
- new(
39
- parser: parser,
40
- parsed: false,
41
- received_at: Time.now
42
- ).tap do |instance|
43
- KAFKA_MESSAGE_ATTRIBUTES.each do |attribute|
44
- instance[attribute] = message.send(attribute)
45
- end
46
-
47
- # When we get raw messages, they might have a topic, that was modified by a
48
- # topic mapper. We need to "reverse" this change and map back to the non-modified
49
- # format, so our internal flow is not corrupted with the mapping
50
- instance[:topic] = Karafka::App.config.topic_mapper.incoming(message.topic)
51
- end
52
- end
53
- end
54
- end
55
-
56
- # @return [Karafka::Params::Params] this will trigger parser execution. If we decide to
57
- # retrieve data, parser will be executed to parse data. Output of parsing will be merged
58
- # to the current object. This object will be also marked as already parsed, so we won't
59
- # parse it again.
60
- def retrieve!
61
- return self if self[:parsed]
62
-
63
- merge!(parse(delete(:value)))
64
- end
65
-
66
-
67
- private
68
-
69
- # Overwritten merge! method - it behaves differently for keys that are the same in our hash
70
- # and in a other_hash - it will not replace keys that are the same in our hash
71
- # and in the other one
72
- # @param other_hash [Hash, HashWithIndifferentAccess] hash that we want to merge into current
73
- # @return [Karafka::Params::Params] our parameters hash with merged values
74
- # @example Merge with hash without same keys
75
- # new(a: 1, b: 2).merge!(c: 3) #=> { a: 1, b: 2, c: 3 }
76
- # @example Merge with hash with same keys (symbol based)
77
- # new(a: 1).merge!(a: 2) #=> { a: 1 }
78
- # @example Merge with hash with same keys (string based)
79
- # new(a: 1).merge!('a' => 2) #=> { a: 1 }
80
- # @example Merge with hash with same keys (current string based)
81
- # new('a' => 1).merge!(a: 2) #=> { a: 1 }
82
- def merge!(other_hash)
83
- super(other_hash) { |_key, base_value, _new_value| base_value }
84
- end
85
-
86
- # @param value [String] Raw data that we want to parse using controller's parser
87
- # @note If something goes wrong, it will return raw data in a hash with a message key
88
- # @return [Hash] parsed data or a hash with message key containing raw data if something
89
- # went wrong during parsing
90
- def parse(value)
91
- self[:parser].parse(value)
92
- # We catch both of them, because for default JSON - we use JSON parser directly
93
- rescue ::Karafka::Errors::ParserError => e
94
- Karafka.monitor.notice_error(self.class, e)
95
- raise e
96
- ensure
97
- self[:parsed] = true
98
- end
99
- end
100
- end
101
- end
@@ -1,18 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Module used to provide a persistent cache across batch requests for a given
5
- # topic and partition to store some additional details when the persistent mode
6
- # for a given topic is turned on
7
- module Persistence
8
- # @param topic [Karafka::Routing::Topic] topic instance for which we might cache
9
- # @param partition [Integer] number of partition for which we want to cache
10
- # @param resource [Symbol] name of the resource that we want to store
11
- def self.fetch(topic, partition, resource)
12
- return yield unless topic.persistent
13
- Thread.current[topic.id] ||= {}
14
- Thread.current[topic.id][partition] ||= {}
15
- Thread.current[topic.id][partition][resource] ||= yield
16
- end
17
- end
18
- end
@@ -1,22 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- module Setup
5
- class Configurators
6
- # Class responsible for setting up Celluloid settings
7
- class Celluloid < Base
8
- # How many seconds should we wait for actors (listeners) before forcefully shutting them
9
- SHUTDOWN_TIME = 30
10
-
11
- # Sets up a Karafka logger as celluloid logger
12
- def setup
13
- ::Celluloid.logger = ::Karafka.logger
14
- # This is just a precaution - it should automatically close the current
15
- # connection and shutdown actor - but in case it didn't (hanged, etc)
16
- # we will kill it after waiting for some time
17
- ::Celluloid.shutdown_timeout = SHUTDOWN_TIME
18
- end
19
- end
20
- end
21
- end
22
- end