karafka 2.4.9 → 2.4.10

Sign up to get free protection for your applications and to get access to all the features.
Files changed (46) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.ruby-version +1 -1
  4. data/CHANGELOG.md +9 -0
  5. data/Gemfile.lock +3 -3
  6. data/config/locales/errors.yml +1 -0
  7. data/config/locales/pro_errors.yml +17 -0
  8. data/lib/karafka/base_consumer.rb +23 -0
  9. data/lib/karafka/contracts/consumer_group.rb +17 -0
  10. data/lib/karafka/instrumentation/logger_listener.rb +3 -0
  11. data/lib/karafka/instrumentation/notifications.rb +3 -0
  12. data/lib/karafka/instrumentation/vendors/appsignal/client.rb +32 -11
  13. data/lib/karafka/instrumentation/vendors/appsignal/errors_listener.rb +1 -1
  14. data/lib/karafka/messages/message.rb +6 -0
  15. data/lib/karafka/pro/loader.rb +2 -1
  16. data/lib/karafka/pro/routing/features/recurring_tasks/builder.rb +9 -8
  17. data/lib/karafka/pro/routing/features/scheduled_messages/builder.rb +131 -0
  18. data/lib/karafka/pro/routing/features/scheduled_messages/config.rb +28 -0
  19. data/lib/karafka/pro/routing/features/scheduled_messages/contracts/topic.rb +40 -0
  20. data/lib/karafka/pro/routing/features/scheduled_messages/proxy.rb +27 -0
  21. data/lib/karafka/pro/routing/features/scheduled_messages/topic.rb +44 -0
  22. data/lib/karafka/pro/routing/features/scheduled_messages.rb +24 -0
  23. data/lib/karafka/pro/scheduled_messages/consumer.rb +185 -0
  24. data/lib/karafka/pro/scheduled_messages/contracts/config.rb +56 -0
  25. data/lib/karafka/pro/scheduled_messages/contracts/message.rb +61 -0
  26. data/lib/karafka/pro/scheduled_messages/daily_buffer.rb +79 -0
  27. data/lib/karafka/pro/scheduled_messages/day.rb +45 -0
  28. data/lib/karafka/pro/scheduled_messages/deserializers/headers.rb +46 -0
  29. data/lib/karafka/pro/scheduled_messages/deserializers/payload.rb +35 -0
  30. data/lib/karafka/pro/scheduled_messages/dispatcher.rb +122 -0
  31. data/lib/karafka/pro/scheduled_messages/errors.rb +28 -0
  32. data/lib/karafka/pro/scheduled_messages/max_epoch.rb +41 -0
  33. data/lib/karafka/pro/scheduled_messages/proxy.rb +176 -0
  34. data/lib/karafka/pro/scheduled_messages/schema_validator.rb +37 -0
  35. data/lib/karafka/pro/scheduled_messages/serializer.rb +55 -0
  36. data/lib/karafka/pro/scheduled_messages/setup/config.rb +60 -0
  37. data/lib/karafka/pro/scheduled_messages/state.rb +62 -0
  38. data/lib/karafka/pro/scheduled_messages/tracker.rb +64 -0
  39. data/lib/karafka/pro/scheduled_messages.rb +67 -0
  40. data/lib/karafka/processing/executor.rb +6 -0
  41. data/lib/karafka/processing/strategies/default.rb +10 -0
  42. data/lib/karafka/railtie.rb +0 -20
  43. data/lib/karafka/version.rb +1 -1
  44. data.tar.gz.sig +0 -0
  45. metadata +26 -3
  46. metadata.gz.sig +3 -2
@@ -0,0 +1,41 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ module ScheduledMessages
17
+ # Simple max value accumulator. When we dispatch messages we can store the max timestamp
18
+ # until which messages were dispatched by us. This allows us to quickly skip those messages
19
+ # during recovery, because we do know, they were dispatched.
20
+ class MaxEpoch
21
+ def initialize
22
+ @max = -1
23
+ end
24
+
25
+ # Updates epoch if bigger than current max
26
+ # @param new_max [Integer] potential new max epoch
27
+ def update(new_max)
28
+ return unless new_max
29
+ return unless new_max > @max
30
+
31
+ @max = new_max
32
+ end
33
+
34
+ # @return [Integer] max epoch recorded
35
+ def to_i
36
+ @max
37
+ end
38
+ end
39
+ end
40
+ end
41
+ end
@@ -0,0 +1,176 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ module ScheduledMessages
17
+ # Proxy used to wrap the scheduled messages with the correct dispatch envelope.
18
+ # Each message that goes to the scheduler topic needs to have specific headers and other
19
+ # details that are required by the system so we know how and when to dispatch it.
20
+ #
21
+ # Each message that goes to the proxy topic needs to have a unique key. We inject those
22
+ # automatically unless user provides one in an envelope. Since we want to make sure, that
23
+ # the messages dispatched by the user all go to the same partition (if with same key), we
24
+ # inject a partition_key based on the user key or other details if present. That allows us
25
+ # to make sure, that they will always go to the same partition on our side.
26
+ #
27
+ # This wrapper validates the initial message that user wants to send in the future, as well
28
+ # as the envelope and specific requirements for a message to be send in the future
29
+ module Proxy
30
+ # General WaterDrop message contract. Before we envelop a message, we need to be certain
31
+ # it is correct, hence we use this contract.
32
+ MSG_CONTRACT = ::WaterDrop::Contracts::Message.new(
33
+ # Payload size is a subject to the target producer dispatch validation, so we set it
34
+ # to 100MB basically to ignore it here.
35
+ max_payload_size: 104_857_600
36
+ )
37
+
38
+ # Post-rebind contract to ensure, that user provided all needed details that would allow
39
+ # the system to operate correctly
40
+ POST_CONTRACT = Contracts::Message.new
41
+
42
+ # Attributes used to build a partition key for the schedules topic dispatch of a given
43
+ # message. We use this order as this order describes the priority of usage.
44
+ PARTITION_KEY_BASE_ATTRIBUTES = %i[
45
+ partition
46
+ partition_key
47
+ ].freeze
48
+
49
+ private_constant :MSG_CONTRACT, :POST_CONTRACT, :PARTITION_KEY_BASE_ATTRIBUTES
50
+
51
+ class << self
52
+ # Generates a schedule message envelope wrapping the original dispatch
53
+ #
54
+ # @param message [Hash] message hash of a message that would originally go to WaterDrop
55
+ # producer directly.
56
+ # @param epoch [Integer] time in the future (or now) when dispatch this message in the
57
+ # Unix epoch timestamp
58
+ # @param envelope [Hash] Special details that the envelop needs to have, like a unique
59
+ # key. If unique key is not provided we build a random unique one and use a
60
+ # partition_key based on the original message key (if present) to ensure that all
61
+ # relevant messages are dispatched to the same topic partition.
62
+ # @return [Hash] dispatched message wrapped with an envelope
63
+ #
64
+ # @note This proxy does **not** inject the dispatched messages topic unless provided in
65
+ # the envelope. That's because user can have multiple scheduled messages topics to
66
+ # group outgoing messages, etc.
67
+ def schedule(message:, epoch:, envelope: {})
68
+ # We need to ensure that the message we want to proxy is fully legit. Otherwise, since
69
+ # we envelope details like target topic, we could end up having incorrect data to
70
+ # schedule
71
+ MSG_CONTRACT.validate!(message, WaterDrop::Errors::MessageInvalidError)
72
+
73
+ headers = (message[:headers] || {}).merge(
74
+ 'schedule_schema_version' => ScheduledMessages::SCHEMA_VERSION,
75
+ 'schedule_target_epoch' => epoch.to_i.to_s,
76
+ 'schedule_source_type' => 'schedule'
77
+ )
78
+
79
+ export(headers, message, :topic)
80
+ export(headers, message, :partition)
81
+ export(headers, message, :key)
82
+ export(headers, message, :partition_key)
83
+
84
+ proxy_message = {
85
+ payload: message[:payload],
86
+ headers: headers
87
+ }.merge(envelope)
88
+
89
+ enrich(proxy_message, message)
90
+
91
+ # Final validation to make sure all user provided extra data and what we have built
92
+ # complies with our requirements
93
+ POST_CONTRACT.validate!(proxy_message)
94
+ # After proxy specific validations we also ensure, that the final form is correct
95
+ MSG_CONTRACT.validate!(proxy_message, WaterDrop::Errors::MessageInvalidError)
96
+
97
+ proxy_message
98
+ end
99
+
100
+ # Generates a tombstone message to cancel already scheduled message dispatch
101
+ # @param key [String] key used by the original message as a unique identifier
102
+ # @param envelope [Hash] Special details that can identify the message location like
103
+ # topic and partition (if used) so the cancellation goes to the correct location.
104
+ # @return [Hash] cancellation message
105
+ #
106
+ # @note Technically it is a tombstone but we differentiate just for the sake of ability
107
+ # to debug stuff if needed
108
+ def cancel(key:, envelope: {})
109
+ {
110
+ key: key,
111
+ payload: nil,
112
+ headers: {
113
+ 'schedule_schema_version' => ScheduledMessages::SCHEMA_VERSION,
114
+ 'schedule_source_type' => 'cancel'
115
+ }
116
+ }.merge(envelope)
117
+ end
118
+
119
+ # Builds tombstone with the dispatched message details. Those details can be used
120
+ # in Web UI, etc when analyzing dispatches.
121
+ # @param message [Karafka::Messages::Message] message we want to tombstone
122
+ # topic and partition (if used) so the cancellation goes to the correct location.
123
+ def tombstone(message:)
124
+ {
125
+ key: message.key,
126
+ payload: nil,
127
+ topic: message.topic,
128
+ partition: message.partition,
129
+ headers: message.raw_headers.merge(
130
+ 'schedule_schema_version' => ScheduledMessages::SCHEMA_VERSION,
131
+ 'schedule_source_type' => 'tombstone',
132
+ 'schedule_source_offset' => message.offset.to_s
133
+ )
134
+ }
135
+ end
136
+
137
+ private
138
+
139
+ # Transfers the message key attributes into headers. Since we need to have our own
140
+ # envelope key and other details, we transfer the original message details into headers
141
+ # so we can re-use them when we dispatch the scheduled messages at an appropriate time
142
+ #
143
+ # @param headers [Hash] envelope headers to which we will add appropriate attribute
144
+ # @param message [Hash] original user message
145
+ # @param attribute [Symbol] attribute we're interested in exporting to headers
146
+ # @note Modifies headers in place
147
+ def export(headers, message, attribute)
148
+ return unless message.key?(attribute)
149
+
150
+ headers["schedule_target_#{attribute}"] = message.fetch(attribute).to_s
151
+ end
152
+
153
+ # Adds the key and (if applicable) partition key to ensure, that related messages that
154
+ # user wants to dispatch in the future, are all in the same topic partition.
155
+ # @param proxy_message [Hash] our message envelope
156
+ # @param message [Hash] user original message
157
+ # @note Modifies `proxy_message` in place
158
+ def enrich(proxy_message, message)
159
+ # If there is an envelope message key already, nothing needed
160
+ return if proxy_message.key?(:key)
161
+
162
+ proxy_message[:key] = "#{message[:topic]}-#{SecureRandom.uuid}"
163
+
164
+ PARTITION_KEY_BASE_ATTRIBUTES.each do |attribute|
165
+ next unless message.key?(attribute)
166
+ # Do not overwrite if explicitely set by the user
167
+ next if proxy_message.key?(attribute)
168
+
169
+ proxy_message[:partition_key] = message.fetch(attribute).to_s
170
+ end
171
+ end
172
+ end
173
+ end
174
+ end
175
+ end
176
+ end
@@ -0,0 +1,37 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ module ScheduledMessages
17
+ # Validator that checks if we can process this scheduled message
18
+ # If we encounter message that has a schema version higher than our process is aware of
19
+ # we raise and error and do not process. This is to make sure we do not deal with messages
20
+ # that are not compatible in case of schema changes.
21
+ module SchemaValidator
22
+ class << self
23
+ # Check if we can work with this schema message and raise error if not.
24
+ #
25
+ # @param message [Karafka::Messages::Message]
26
+ def call(message)
27
+ message_version = message.headers['schedule_schema_version']
28
+
29
+ return if message_version <= ScheduledMessages::SCHEMA_VERSION
30
+
31
+ raise Errors::IncompatibleSchemaError, message_version
32
+ end
33
+ end
34
+ end
35
+ end
36
+ end
37
+ end
@@ -0,0 +1,55 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ module ScheduledMessages
17
+ # Serializers used to build payloads (if applicable) for dispatch
18
+ # @note We only deal with states payload. Other payloads are not ours but end users.
19
+ class Serializer
20
+ include ::Karafka::Core::Helpers::Time
21
+
22
+ # @param tracker [Tracker] tracker based on which we build the state
23
+ # @return [String] compressed payload with the state details
24
+ def state(tracker)
25
+ data = {
26
+ schema_version: ScheduledMessages::STATES_SCHEMA_VERSION,
27
+ dispatched_at: float_now,
28
+ state: tracker.state,
29
+ daily: tracker.daily
30
+ }
31
+
32
+ compress(
33
+ serialize(data)
34
+ )
35
+ end
36
+
37
+ private
38
+
39
+ # @param hash [Hash] hash to cast to json
40
+ # @return [String] json hash
41
+ def serialize(hash)
42
+ hash.to_json
43
+ end
44
+
45
+ # Compresses the provided data
46
+ #
47
+ # @param data [String] data to compress
48
+ # @return [String] compressed data
49
+ def compress(data)
50
+ Zlib::Deflate.deflate(data)
51
+ end
52
+ end
53
+ end
54
+ end
55
+ end
@@ -0,0 +1,60 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ module ScheduledMessages
17
+ # Setup and config related recurring tasks components
18
+ module Setup
19
+ # Config for recurring tasks
20
+ class Config
21
+ extend ::Karafka::Core::Configurable
22
+
23
+ setting(:consumer_class, default: Consumer)
24
+ setting(:group_id, default: 'karafka_scheduled_messages')
25
+
26
+ # By default we will run the scheduling every 15 seconds since we provide a minute-based
27
+ # precision. Can be increased when having dedicated processes to run this. Lower values
28
+ # mean more frequent execution on low-throughput topics meaning higher precision.
29
+ setting(:interval, default: 15_000)
30
+
31
+ # How many messages should be flush in one go from the dispatcher at most. If we have
32
+ # more messages to dispatch, they will be chunked.
33
+ setting(:flush_batch_size, default: 1_000)
34
+
35
+ # Producer to use. By default uses default Karafka producer.
36
+ setting(
37
+ :producer,
38
+ constructor: -> { ::Karafka.producer },
39
+ lazy: true
40
+ )
41
+
42
+ # Class we use to dispatch messages
43
+ setting(:dispatcher_class, default: Dispatcher)
44
+
45
+ # Postfix for the states topic to build the states based on the group name + postfix
46
+ setting(:states_postfix, default: '_states')
47
+
48
+ setting(:deserializers) do
49
+ # Deserializer for schedules messages to convert epochs
50
+ setting(:headers, default: Deserializers::Headers.new)
51
+ # Only applicable to states
52
+ setting(:payload, default: Deserializers::Payload.new)
53
+ end
54
+
55
+ configure
56
+ end
57
+ end
58
+ end
59
+ end
60
+ end
@@ -0,0 +1,62 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ module ScheduledMessages
17
+ # Represents the loading/bootstrapping state of the given topic partition
18
+ #
19
+ # Bootstrapping can be in the following states:
20
+ # - fresh - when we got an assignment but we did not load the schedule yet
21
+ # - loading - when we are in the process of bootstrapping the daily state and we consume
22
+ # historical messages to build the needed schedules.
23
+ # - loaded - state in which we finished loading all the schedules and we can dispatch
24
+ # messages when the time comes and we can process real-time incoming schedules and
25
+ # changes to schedules as they appear in the stream.
26
+ class State
27
+ # @param loaded [nil, false, true] is the state loaded or not yet. `nil` indicates, it is
28
+ # a fresh, pre-seek state.
29
+ def initialize(loaded = nil)
30
+ @loaded = loaded
31
+ end
32
+
33
+ # @return [Boolean] are we in a fresh, pre-bootstrap state
34
+ def fresh?
35
+ @loaded.nil?
36
+ end
37
+
38
+ # Marks the current state as fully loaded
39
+ def loaded!
40
+ @loaded = true
41
+ end
42
+
43
+ # @return [Boolean] are we in a loaded state
44
+ def loaded?
45
+ @loaded == true
46
+ end
47
+
48
+ # @return [String] current state string representation
49
+ def to_s
50
+ case @loaded
51
+ when nil
52
+ 'fresh'
53
+ when false
54
+ 'loading'
55
+ when true
56
+ 'loaded'
57
+ end
58
+ end
59
+ end
60
+ end
61
+ end
62
+ end
@@ -0,0 +1,64 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ module ScheduledMessages
17
+ # Tracks basic state and metrics about schedules to be dispatched
18
+ #
19
+ # It provides accurate today dispatch taken from daily buffer and estimates for future days
20
+ class Tracker
21
+ # @return [Hash<String, Integer>]
22
+ attr_reader :daily
23
+
24
+ # @return [String] current state
25
+ attr_accessor :state
26
+
27
+ def initialize
28
+ @daily = Hash.new { |h, k| h[k] = 0 }
29
+ @created_at = Time.now.to_i
30
+ end
31
+
32
+ # Accurate (because coming from daily buffer) number of things to schedule
33
+ #
34
+ # @param sum [Integer]
35
+ def today=(sum)
36
+ @daily[epoch_to_date(@created_at)] = sum
37
+ end
38
+
39
+ # Tracks message dispatch
40
+ #
41
+ # It is only relevant for future days as for today we use accurate metrics from the daily
42
+ # buffer
43
+ #
44
+ # @param message [Karafka::Messages::Message] schedule message. Should **not** be a
45
+ # tombstone message. Tombstone messages cancellations are not tracked because it would
46
+ # drastically increase complexity. For given day we use the accurate counter and for
47
+ # future days we use estimates.
48
+ def track(message)
49
+ epoch = message.headers['schedule_target_epoch']
50
+
51
+ @daily[epoch_to_date(epoch)] += 1
52
+ end
53
+
54
+ private
55
+
56
+ # @param epoch [Integer] epoch time
57
+ # @return [String] epoch matching date
58
+ def epoch_to_date(epoch)
59
+ Time.at(epoch).utc.to_date.to_s
60
+ end
61
+ end
62
+ end
63
+ end
64
+ end
@@ -0,0 +1,67 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ # This feature allows for proxying messages via a special topic that can dispatch them
17
+ # at a later time, hence scheduled messages. Such messages need to have a special format
18
+ # but aside from that they are regular Kafka messages.
19
+ #
20
+ # This work was conceptually inspired by the Go scheduler:
21
+ # https://github.com/etf1/kafka-message-scheduler though I did not look at the implementation
22
+ # itself. Just the concept of daily in-memory scheduling.
23
+ module ScheduledMessages
24
+ # Version of the schema we use for envelops in scheduled messages.
25
+ # We use it to detect any potential upgrades similar to other components of Karafka and to
26
+ # stop processing of incompatible versions
27
+ SCHEMA_VERSION = '1.0.0'
28
+
29
+ # Version of the states schema. Used to publish per partition simple aggregated metrics
30
+ # that can be used for schedules reporting
31
+ STATES_SCHEMA_VERSION = '1.0.0'
32
+
33
+ class << self
34
+ # Runs the `Proxy.call`
35
+ # @param kwargs [Hash] things requested by the proxy
36
+ # @return [Hash] message wrapped with the scheduled message envelope
37
+ def schedule(**kwargs)
38
+ Proxy.schedule(**kwargs)
39
+ end
40
+
41
+ # Generates a tombstone message to cancel given dispatch (if not yet happened)
42
+ # @param kwargs [Hash] things requested by the proxy
43
+ # @return [Hash] tombstone cancelling message
44
+ def cancel(**kwargs)
45
+ Proxy.cancel(**kwargs)
46
+ end
47
+
48
+ # Below are private APIs
49
+
50
+ # Sets up additional config scope, validations and other things
51
+ #
52
+ # @param config [Karafka::Core::Configurable::Node] root node config
53
+ def pre_setup(config)
54
+ # Expand the config with this feature specific stuff
55
+ config.instance_eval do
56
+ setting(:scheduled_messages, default: Setup::Config.config)
57
+ end
58
+ end
59
+
60
+ # @param config [Karafka::Core::Configurable::Node] root node config
61
+ def post_setup(config)
62
+ RecurringTasks::Contracts::Config.new.validate!(config.to_h)
63
+ end
64
+ end
65
+ end
66
+ end
67
+ end
@@ -181,6 +181,12 @@ module Karafka
181
181
  # overhead as this will happen only once per consumer lifetime
182
182
  consumer.messages = empty_messages
183
183
 
184
+ # Run the post-initialization hook for users that need to run some actions when consumer
185
+ # is built and ready (all basic state and info).
186
+ # Users should **not** overwrite the `#initialize` because it won't have all the needed
187
+ # data assigned yet.
188
+ consumer.on_initialized
189
+
184
190
  consumer
185
191
  end
186
192
  end
@@ -31,6 +31,16 @@ module Karafka
31
31
  RUBY
32
32
  end
33
33
 
34
+ # Runs the post-creation, post-assignment code
35
+ # @note It runs in the listener loop. Should **not** be used for anything heavy or
36
+ # with any potential errors. Mostly for initialization of states, etc.
37
+ def handle_initialized
38
+ Karafka.monitor.instrument('consumer.initialize', caller: self)
39
+ Karafka.monitor.instrument('consumer.initialized', caller: self) do
40
+ initialized
41
+ end
42
+ end
43
+
34
44
  # Marks message as consumed in an async way.
35
45
  #
36
46
  # @param message [Messages::Message] last successfully processed message.
@@ -65,26 +65,6 @@ if Karafka.rails?
65
65
  app.config.autoload_paths += %w[app/consumers]
66
66
  end
67
67
 
68
- initializer 'karafka.release_active_record_connections' do
69
- rails7plus = Rails.gem_version >= Gem::Version.new('7.0.0')
70
-
71
- ActiveSupport.on_load(:active_record) do
72
- ::Karafka::App.monitor.subscribe('worker.completed') do
73
- # Always release the connection after processing is done. Otherwise thread may hang
74
- # blocking the reload and further processing
75
- # @see https://github.com/rails/rails/issues/44183
76
- #
77
- # The change technically happens in 7.1 but 7.0 already supports this so we can make
78
- # a proper change for 7.0+
79
- if rails7plus
80
- ActiveRecord::Base.connection_handler.clear_active_connections!
81
- else
82
- ActiveRecord::Base.clear_active_connections!
83
- end
84
- end
85
- end
86
- end
87
-
88
68
  initializer 'karafka.require_karafka_boot_file' do |app|
89
69
  rails6plus = Rails.gem_version >= Gem::Version.new('6.0.0')
90
70
 
@@ -3,5 +3,5 @@
3
3
  # Main module namespace
4
4
  module Karafka
5
5
  # Current Karafka version
6
- VERSION = '2.4.9'
6
+ VERSION = '2.4.10'
7
7
  end
data.tar.gz.sig CHANGED
Binary file