karafka 2.5.0.beta1 → 2.5.0.rc1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.github/workflows/ci.yml +5 -5
- data/.github/workflows/push.yml +35 -0
- data/CHANGELOG.md +17 -2
- data/Gemfile +3 -3
- data/Gemfile.lock +37 -15
- data/README.md +1 -1
- data/Rakefile +4 -0
- data/bin/integrations +2 -1
- data/examples/payloads/avro/.gitkeep +0 -0
- data/karafka.gemspec +1 -6
- data/lib/karafka/admin/configs.rb +5 -1
- data/lib/karafka/admin.rb +18 -15
- data/lib/karafka/cli/topics/align.rb +7 -4
- data/lib/karafka/cli/topics/base.rb +17 -0
- data/lib/karafka/cli/topics/create.rb +9 -7
- data/lib/karafka/cli/topics/delete.rb +4 -2
- data/lib/karafka/cli/topics/help.rb +39 -0
- data/lib/karafka/cli/topics/repartition.rb +4 -2
- data/lib/karafka/cli/topics.rb +10 -3
- data/lib/karafka/cli.rb +2 -0
- data/lib/karafka/connection/client.rb +19 -2
- data/lib/karafka/connection/proxy.rb +1 -1
- data/lib/karafka/constraints.rb +3 -3
- data/lib/karafka/errors.rb +35 -2
- data/lib/karafka/helpers/interval_runner.rb +8 -0
- data/lib/karafka/pro/processing/coordinators/errors_tracker.rb +5 -0
- data/lib/karafka/pro/processing/strategies/dlq/default.rb +4 -3
- data/lib/karafka/pro/scheduled_messages/consumer.rb +50 -14
- data/lib/karafka/pro/scheduled_messages/dispatcher.rb +2 -1
- data/lib/karafka/pro/scheduled_messages/serializer.rb +2 -4
- data/lib/karafka/pro/scheduled_messages/state.rb +20 -23
- data/lib/karafka/pro/scheduled_messages/tracker.rb +34 -8
- data/lib/karafka/server.rb +14 -19
- data/lib/karafka/version.rb +1 -1
- metadata +13 -37
- checksums.yaml.gz.sig +0 -0
- data/certs/cert.pem +0 -26
- data.tar.gz.sig +0 -1
- metadata.gz.sig +0 -0
- /data/examples/payloads/json/{enrollment_event.json → sample_set_01/enrollment_event.json} +0 -0
- /data/examples/payloads/json/{ingestion_event.json → sample_set_01/ingestion_event.json} +0 -0
- /data/examples/payloads/json/{transaction_event.json → sample_set_01/transaction_event.json} +0 -0
- /data/examples/payloads/json/{user_event.json → sample_set_01/user_event.json} +0 -0
@@ -44,7 +44,7 @@ module Karafka
|
|
44
44
|
# clusters can handle our requests.
|
45
45
|
#
|
46
46
|
# @param topic [String] topic name
|
47
|
-
# @param partition [
|
47
|
+
# @param partition [Integer] partition number
|
48
48
|
# @return [Array<Integer, Integer>] watermark offsets
|
49
49
|
def query_watermark_offsets(topic, partition)
|
50
50
|
l_config = @config.query_watermark_offsets
|
data/lib/karafka/constraints.rb
CHANGED
@@ -15,13 +15,13 @@ module Karafka
|
|
15
15
|
# Skip verification if web is not used at all
|
16
16
|
return unless require_version('karafka/web')
|
17
17
|
|
18
|
-
# All good if version higher than 0.
|
19
|
-
return if version(Karafka::Web::VERSION) >= version('0.
|
18
|
+
# All good if version higher than 0.10.0 because we expect 0.10.0 or higher
|
19
|
+
return if version(Karafka::Web::VERSION) >= version('0.10.0')
|
20
20
|
|
21
21
|
# If older web-ui used, we cannot allow it
|
22
22
|
raise(
|
23
23
|
Errors::DependencyConstraintsError,
|
24
|
-
'karafka-web < 0.
|
24
|
+
'karafka-web < 0.10.0 is not compatible with this karafka version'
|
25
25
|
)
|
26
26
|
end
|
27
27
|
|
data/lib/karafka/errors.rb
CHANGED
@@ -22,7 +22,34 @@ module Karafka
|
|
22
22
|
InvalidConfigurationError = Class.new(BaseError)
|
23
23
|
|
24
24
|
# Raised when we try to use Karafka CLI commands (except install) without a boot file
|
25
|
-
MissingBootFileError = Class.new(BaseError)
|
25
|
+
MissingBootFileError = Class.new(BaseError) do
|
26
|
+
# @param boot_file_path [Pathname] path where the boot file should be
|
27
|
+
def initialize(boot_file_path)
|
28
|
+
message = <<~MSG
|
29
|
+
|
30
|
+
\e[31mKarafka Boot File Missing:\e[0m #{boot_file_path}
|
31
|
+
|
32
|
+
Cannot find Karafka boot file - this file configures your Karafka application.
|
33
|
+
|
34
|
+
\e[33mQuick fixes:\e[0m
|
35
|
+
\e[32m1.\e[0m Navigate to your Karafka app directory
|
36
|
+
\e[32m2.\e[0m Check if following file exists: \e[36m#{boot_file_path}\e[0m
|
37
|
+
\e[32m3.\e[0m Install Karafka if needed: \e[36mkarafka install\e[0m
|
38
|
+
|
39
|
+
\e[33mCommon causes:\e[0m
|
40
|
+
\e[31m•\e[0m Wrong directory (not in Karafka app root)
|
41
|
+
\e[31m•\e[0m File was accidentally moved or deleted
|
42
|
+
\e[31m•\e[0m New project needing initialization
|
43
|
+
|
44
|
+
For setup help: \e[34mhttps://karafka.io/docs/Getting-Started\e[0m
|
45
|
+
MSG
|
46
|
+
|
47
|
+
super(message)
|
48
|
+
# In case of this error backtrace is irrelevant and we want to print comprehensive error
|
49
|
+
# message without backtrace, this is why nullified.
|
50
|
+
set_backtrace([])
|
51
|
+
end
|
52
|
+
end
|
26
53
|
|
27
54
|
# Raised when we've waited enough for shutting down a non-responsive process
|
28
55
|
ForcefulShutdownError = Class.new(BaseError)
|
@@ -65,7 +92,13 @@ module Karafka
|
|
65
92
|
ResultNotVisibleError = Class.new(BaseError)
|
66
93
|
|
67
94
|
# Raised when there is an attempt to run an unrecognized CLI command
|
68
|
-
UnrecognizedCommandError = Class.new(BaseError)
|
95
|
+
UnrecognizedCommandError = Class.new(BaseError) do
|
96
|
+
# Overwritten not to print backtrace for unknown CLI command
|
97
|
+
def initialize(*args)
|
98
|
+
super
|
99
|
+
set_backtrace([])
|
100
|
+
end
|
101
|
+
end
|
69
102
|
|
70
103
|
# Raised when you were executing a command and it could not finish successfully because of
|
71
104
|
# a setup state or parameters configuration
|
@@ -30,6 +30,14 @@ module Karafka
|
|
30
30
|
@block.call
|
31
31
|
end
|
32
32
|
|
33
|
+
# Runs the requested code bypassing any time frequencies
|
34
|
+
# Useful when we have certain actions that usually need to run periodically but in some
|
35
|
+
# cases need to run asap
|
36
|
+
def call!
|
37
|
+
@last_called_at = monotonic_now
|
38
|
+
@block.call
|
39
|
+
end
|
40
|
+
|
33
41
|
# Resets the runner, so next `#call` will run the underlying code
|
34
42
|
def reset
|
35
43
|
@last_called_at = monotonic_now - @interval
|
@@ -22,6 +22,9 @@ module Karafka
|
|
22
22
|
# @return [Hash]
|
23
23
|
attr_reader :counts
|
24
24
|
|
25
|
+
# @return [String]
|
26
|
+
attr_reader :trace_id
|
27
|
+
|
25
28
|
# Max errors we keep in memory.
|
26
29
|
# We do not want to keep more because for DLQ-less this would cause memory-leaks.
|
27
30
|
# We do however count per class for granular error counting
|
@@ -41,6 +44,7 @@ module Karafka
|
|
41
44
|
@topic = topic
|
42
45
|
@partition = partition
|
43
46
|
@limit = limit
|
47
|
+
@trace_id = SecureRandom.uuid
|
44
48
|
end
|
45
49
|
|
46
50
|
# Clears all the errors
|
@@ -54,6 +58,7 @@ module Karafka
|
|
54
58
|
@errors.shift if @errors.size >= @limit
|
55
59
|
@errors << error
|
56
60
|
@counts[error.class] += 1
|
61
|
+
@trace_id = SecureRandom.uuid
|
57
62
|
end
|
58
63
|
|
59
64
|
# @return [Boolean] is the error tracker empty
|
@@ -149,15 +149,16 @@ module Karafka
|
|
149
149
|
|
150
150
|
dlq_message = {
|
151
151
|
topic: @_dispatch_to_dlq_topic || topic.dead_letter_queue.topic,
|
152
|
-
key:
|
152
|
+
key: skippable_message.raw_key,
|
153
|
+
partition_key: source_partition,
|
153
154
|
payload: skippable_message.raw_payload,
|
154
155
|
headers: skippable_message.raw_headers.merge(
|
155
156
|
'source_topic' => topic.name,
|
156
157
|
'source_partition' => source_partition,
|
157
158
|
'source_offset' => skippable_message.offset.to_s,
|
158
159
|
'source_consumer_group' => topic.consumer_group.id,
|
159
|
-
'
|
160
|
-
'
|
160
|
+
'source_attempts' => attempt.to_s,
|
161
|
+
'source_trace_id' => errors_tracker.trace_id
|
161
162
|
)
|
162
163
|
}
|
163
164
|
|
@@ -8,13 +8,27 @@ module Karafka
|
|
8
8
|
module ScheduledMessages
|
9
9
|
# Consumer that coordinates scheduling of messages when the time comes
|
10
10
|
class Consumer < ::Karafka::BaseConsumer
|
11
|
+
include Helpers::ConfigImporter.new(
|
12
|
+
dispatcher_class: %i[scheduled_messages dispatcher_class]
|
13
|
+
)
|
14
|
+
|
15
|
+
# In case there is an extremely high turnover of messages, EOF may never kick in,
|
16
|
+
# effectively not changing status from loading to loaded. We use the time consumer instance
|
17
|
+
# was created + a buffer time to detect such a case (loading + messages from the time it
|
18
|
+
# was already running) to switch the state despite no EOF
|
19
|
+
# This is in seconds
|
20
|
+
GRACE_PERIOD = 15
|
21
|
+
|
22
|
+
private_constant :GRACE_PERIOD
|
23
|
+
|
11
24
|
# Prepares the initial state of all stateful components
|
12
25
|
def initialized
|
13
26
|
clear!
|
14
27
|
# Max epoch is always moving forward with the time. Never backwards, hence we do not
|
15
28
|
# reset it at all.
|
16
29
|
@max_epoch = MaxEpoch.new
|
17
|
-
@state = State.new
|
30
|
+
@state = State.new
|
31
|
+
@reloads = 0
|
18
32
|
end
|
19
33
|
|
20
34
|
# Processes messages and runs dispatch (via tick) if needed
|
@@ -23,11 +37,25 @@ module Karafka
|
|
23
37
|
|
24
38
|
messages.each do |message|
|
25
39
|
SchemaValidator.call(message)
|
40
|
+
|
41
|
+
# We always track offsets of messages, even if they would be later on skipped or
|
42
|
+
# ignored for any reason. That way we have debug info that is useful once in a while.
|
43
|
+
@tracker.offsets(message)
|
44
|
+
|
26
45
|
process_message(message)
|
27
46
|
end
|
28
47
|
|
29
48
|
@states_reporter.call
|
30
49
|
|
50
|
+
recent_timestamp = messages.last.timestamp.to_i
|
51
|
+
post_started_timestamp = @tracker.started_at + GRACE_PERIOD
|
52
|
+
|
53
|
+
# If we started getting messages that are beyond the current time, it means we have
|
54
|
+
# loaded enough to start scheduling. The upcoming messages are from the future looking
|
55
|
+
# from perspective of the current consumer start. We add a bit of grace period not to
|
56
|
+
# deal with edge cases
|
57
|
+
loaded! if @state.loading? && recent_timestamp > post_started_timestamp
|
58
|
+
|
31
59
|
eofed if eofed?
|
32
60
|
|
33
61
|
# Unless given day data is fully loaded we should not dispatch any notifications nor
|
@@ -51,11 +79,7 @@ module Karafka
|
|
51
79
|
return if reload!
|
52
80
|
|
53
81
|
# If end of the partition is reached, it always means all data is loaded
|
54
|
-
|
55
|
-
|
56
|
-
tags.add(:state, @state.to_s)
|
57
|
-
|
58
|
-
@states_reporter.call
|
82
|
+
loaded!
|
59
83
|
end
|
60
84
|
|
61
85
|
# Performs periodic operations when no new data is provided to the topic partition
|
@@ -86,6 +110,12 @@ module Karafka
|
|
86
110
|
@states_reporter.call
|
87
111
|
end
|
88
112
|
|
113
|
+
# Move the state to shutdown and publish immediately
|
114
|
+
def shutdown
|
115
|
+
@state.stopped!
|
116
|
+
@states_reporter.call!
|
117
|
+
end
|
118
|
+
|
89
119
|
private
|
90
120
|
|
91
121
|
# Takes each message and adds it to the daily accumulator if needed or performs other
|
@@ -100,7 +130,7 @@ module Karafka
|
|
100
130
|
time = message.headers['schedule_target_epoch']
|
101
131
|
|
102
132
|
# Do not track historical below today as those will be reflected in the daily buffer
|
103
|
-
@tracker.
|
133
|
+
@tracker.future(message) if time >= @today.starts_at
|
104
134
|
|
105
135
|
if time > @today.ends_at || time < @max_epoch.to_i
|
106
136
|
# Clean the message immediately when not needed (won't be scheduled) to preserve
|
@@ -128,6 +158,7 @@ module Karafka
|
|
128
158
|
# If this is a new assignment we always need to seek from beginning to load the data
|
129
159
|
if @state.fresh?
|
130
160
|
clear!
|
161
|
+
@reloads += 1
|
131
162
|
seek(:earliest)
|
132
163
|
|
133
164
|
return true
|
@@ -139,6 +170,7 @@ module Karafka
|
|
139
170
|
# If day has ended we reload and start new day with new schedules
|
140
171
|
if @today.ended?
|
141
172
|
clear!
|
173
|
+
@reloads += 1
|
142
174
|
seek(:earliest)
|
143
175
|
|
144
176
|
return true
|
@@ -147,6 +179,13 @@ module Karafka
|
|
147
179
|
false
|
148
180
|
end
|
149
181
|
|
182
|
+
# Moves the state to loaded and publishes the state update
|
183
|
+
def loaded!
|
184
|
+
@state.loaded!
|
185
|
+
tags.add(:state, @state.to_s)
|
186
|
+
@states_reporter.call!
|
187
|
+
end
|
188
|
+
|
150
189
|
# Resets all buffers and states so we can start a new day with a clean slate
|
151
190
|
# We can fully recreate the dispatcher because any undispatched messages will be dispatched
|
152
191
|
# with the new day dispatcher after it is reloaded.
|
@@ -154,22 +193,19 @@ module Karafka
|
|
154
193
|
@daily_buffer = DailyBuffer.new
|
155
194
|
@today = Day.new
|
156
195
|
@tracker = Tracker.new
|
157
|
-
@state = State.new
|
158
|
-
@
|
196
|
+
@state = State.new
|
197
|
+
@state.loading!
|
198
|
+
@dispatcher = dispatcher_class.new(topic.name, partition)
|
159
199
|
@states_reporter = Helpers::IntervalRunner.new do
|
160
200
|
@tracker.today = @daily_buffer.size
|
161
201
|
@tracker.state = @state.to_s
|
202
|
+
@tracker.reloads = @reloads
|
162
203
|
|
163
204
|
@dispatcher.state(@tracker)
|
164
205
|
end
|
165
206
|
|
166
207
|
tags.add(:state, @state.to_s)
|
167
208
|
end
|
168
|
-
|
169
|
-
# @return [Karafka::Core::Configurable::Node] Schedules config node
|
170
|
-
def config
|
171
|
-
@config ||= Karafka::App.config.scheduled_messages
|
172
|
-
end
|
173
209
|
end
|
174
210
|
end
|
175
211
|
end
|
@@ -70,7 +70,8 @@ module Karafka
|
|
70
70
|
config.producer.produce_async(
|
71
71
|
topic: "#{@topic}#{config.states_postfix}",
|
72
72
|
payload: @serializer.state(tracker),
|
73
|
-
key
|
73
|
+
# We use the state as a key, so we always have one state transition data available
|
74
|
+
key: "#{tracker.state}_state",
|
74
75
|
partition: @partition,
|
75
76
|
headers: { 'zlib' => 'true' }
|
76
77
|
)
|
@@ -16,10 +16,8 @@ module Karafka
|
|
16
16
|
def state(tracker)
|
17
17
|
data = {
|
18
18
|
schema_version: ScheduledMessages::STATES_SCHEMA_VERSION,
|
19
|
-
dispatched_at: float_now
|
20
|
-
|
21
|
-
daily: tracker.daily
|
22
|
-
}
|
19
|
+
dispatched_at: float_now
|
20
|
+
}.merge(tracker.to_h)
|
23
21
|
|
24
22
|
compress(
|
25
23
|
serialize(data)
|
@@ -15,38 +15,35 @@ module Karafka
|
|
15
15
|
# - loaded - state in which we finished loading all the schedules and we can dispatch
|
16
16
|
# messages when the time comes and we can process real-time incoming schedules and
|
17
17
|
# changes to schedules as they appear in the stream.
|
18
|
+
# - shutdown - the states are no longer available as the consumer has shut down
|
18
19
|
class State
|
19
|
-
#
|
20
|
-
|
21
|
-
|
22
|
-
|
23
|
-
|
20
|
+
# Available states scheduling of messages may be in
|
21
|
+
STATES = %w[
|
22
|
+
fresh
|
23
|
+
loading
|
24
|
+
loaded
|
25
|
+
stopped
|
26
|
+
].freeze
|
24
27
|
|
25
|
-
|
26
|
-
def fresh?
|
27
|
-
@loaded.nil?
|
28
|
-
end
|
28
|
+
private_constant :STATES
|
29
29
|
|
30
|
-
|
31
|
-
|
32
|
-
@loaded = true
|
30
|
+
def initialize
|
31
|
+
@state = 'fresh'
|
33
32
|
end
|
34
33
|
|
35
|
-
|
36
|
-
|
37
|
-
|
34
|
+
STATES.each do |state|
|
35
|
+
define_method :"#{state}!" do
|
36
|
+
@state = state
|
37
|
+
end
|
38
|
+
|
39
|
+
define_method :"#{state}?" do
|
40
|
+
@state == state
|
41
|
+
end
|
38
42
|
end
|
39
43
|
|
40
44
|
# @return [String] current state string representation
|
41
45
|
def to_s
|
42
|
-
|
43
|
-
when nil
|
44
|
-
'fresh'
|
45
|
-
when false
|
46
|
-
'loading'
|
47
|
-
when true
|
48
|
-
'loaded'
|
49
|
-
end
|
46
|
+
@state
|
50
47
|
end
|
51
48
|
end
|
52
49
|
end
|
@@ -10,25 +10,40 @@ module Karafka
|
|
10
10
|
#
|
11
11
|
# It provides accurate today dispatch taken from daily buffer and estimates for future days
|
12
12
|
class Tracker
|
13
|
-
# @return [Hash<String, Integer>]
|
14
|
-
attr_reader :daily
|
15
|
-
|
16
13
|
# @return [String] current state
|
17
14
|
attr_accessor :state
|
18
15
|
|
16
|
+
attr_writer :reloads
|
17
|
+
|
18
|
+
# @return [Integer] time epoch when this tracker was started
|
19
|
+
attr_reader :started_at
|
20
|
+
|
19
21
|
def initialize
|
20
22
|
@daily = Hash.new { |h, k| h[k] = 0 }
|
21
|
-
@
|
23
|
+
@started_at = Time.now.to_i
|
24
|
+
@offsets = { low: -1, high: -1 }
|
25
|
+
@state = 'fresh'
|
26
|
+
@reloads = 0
|
22
27
|
end
|
23
28
|
|
24
|
-
#
|
29
|
+
# Tracks offsets of visited messages
|
30
|
+
#
|
31
|
+
# @param message [Karafka::Messages::Message]
|
32
|
+
def offsets(message)
|
33
|
+
message_offset = message.offset
|
34
|
+
|
35
|
+
@offsets[:low] = message_offset if @offsets[:low].negative?
|
36
|
+
@offsets[:high] = message.offset
|
37
|
+
end
|
38
|
+
|
39
|
+
# Accurate (because coming from daily buffer) number of things to schedule daily
|
25
40
|
#
|
26
41
|
# @param sum [Integer]
|
27
42
|
def today=(sum)
|
28
|
-
@daily[epoch_to_date(@
|
43
|
+
@daily[epoch_to_date(@started_at)] = sum
|
29
44
|
end
|
30
45
|
|
31
|
-
# Tracks message dispatch
|
46
|
+
# Tracks future message dispatch
|
32
47
|
#
|
33
48
|
# It is only relevant for future days as for today we use accurate metrics from the daily
|
34
49
|
# buffer
|
@@ -37,12 +52,23 @@ module Karafka
|
|
37
52
|
# tombstone message. Tombstone messages cancellations are not tracked because it would
|
38
53
|
# drastically increase complexity. For given day we use the accurate counter and for
|
39
54
|
# future days we use estimates.
|
40
|
-
def
|
55
|
+
def future(message)
|
41
56
|
epoch = message.headers['schedule_target_epoch']
|
42
57
|
|
43
58
|
@daily[epoch_to_date(epoch)] += 1
|
44
59
|
end
|
45
60
|
|
61
|
+
# @return [Hash] hash with details that we want to expose
|
62
|
+
def to_h
|
63
|
+
{
|
64
|
+
state: @state,
|
65
|
+
offsets: @offsets,
|
66
|
+
daily: @daily,
|
67
|
+
started_at: @started_at,
|
68
|
+
reloads: @reloads
|
69
|
+
}.freeze
|
70
|
+
end
|
71
|
+
|
46
72
|
private
|
47
73
|
|
48
74
|
# @param epoch [Integer] epoch time
|
data/lib/karafka/server.rb
CHANGED
@@ -9,6 +9,15 @@ module Karafka
|
|
9
9
|
|
10
10
|
private_constant :FORCEFUL_SHUTDOWN_WAIT
|
11
11
|
|
12
|
+
extend Helpers::ConfigImporter.new(
|
13
|
+
cli_contract: %i[internal cli contract],
|
14
|
+
activity_manager: %i[internal routing activity_manager],
|
15
|
+
supervision_sleep: %i[internal supervision_sleep],
|
16
|
+
shutdown_timeout: %i[shutdown_timeout],
|
17
|
+
forceful_exit_code: %i[internal forceful_exit_code],
|
18
|
+
process: %i[internal process]
|
19
|
+
)
|
20
|
+
|
12
21
|
class << self
|
13
22
|
# Set of consuming threads. Each consumer thread contains a single consumer
|
14
23
|
attr_accessor :listeners
|
@@ -42,9 +51,7 @@ module Karafka
|
|
42
51
|
# embedded
|
43
52
|
# We cannot validate this during the start because config needs to be populated and routes
|
44
53
|
# need to be defined.
|
45
|
-
|
46
|
-
config.internal.routing.activity_manager.to_h
|
47
|
-
)
|
54
|
+
cli_contract.validate!(activity_manager.to_h)
|
48
55
|
|
49
56
|
# We clear as we do not want parent handlers in case of working from fork
|
50
57
|
process.clear
|
@@ -99,18 +106,18 @@ module Karafka
|
|
99
106
|
|
100
107
|
Karafka::App.stop!
|
101
108
|
|
102
|
-
timeout =
|
109
|
+
timeout = shutdown_timeout
|
103
110
|
|
104
111
|
# We check from time to time (for the timeout period) if all the threads finished
|
105
112
|
# their work and if so, we can just return and normal shutdown process will take place
|
106
113
|
# We divide it by 1000 because we use time in ms.
|
107
|
-
((timeout / 1_000) * (1 /
|
114
|
+
((timeout / 1_000) * (1 / supervision_sleep)).to_i.times do
|
108
115
|
all_listeners_stopped = listeners.all?(&:stopped?)
|
109
116
|
all_workers_stopped = workers.none?(&:alive?)
|
110
117
|
|
111
118
|
return if all_listeners_stopped && all_workers_stopped
|
112
119
|
|
113
|
-
sleep(
|
120
|
+
sleep(supervision_sleep)
|
114
121
|
end
|
115
122
|
|
116
123
|
raise Errors::ForcefulShutdownError
|
@@ -148,7 +155,7 @@ module Karafka
|
|
148
155
|
return unless process.supervised?
|
149
156
|
|
150
157
|
# exit! is not within the instrumentation as it would not trigger due to exit
|
151
|
-
Kernel.exit!(
|
158
|
+
Kernel.exit!(forceful_exit_code)
|
152
159
|
ensure
|
153
160
|
# We need to check if it wasn't an early exit to make sure that only on stop invocation
|
154
161
|
# can change the status after everything is closed
|
@@ -172,18 +179,6 @@ module Karafka
|
|
172
179
|
# in one direction
|
173
180
|
Karafka::App.quiet!
|
174
181
|
end
|
175
|
-
|
176
|
-
private
|
177
|
-
|
178
|
-
# @return [Karafka::Core::Configurable::Node] root config node
|
179
|
-
def config
|
180
|
-
Karafka::App.config
|
181
|
-
end
|
182
|
-
|
183
|
-
# @return [Karafka::Process] process wrapper instance used to catch system signal calls
|
184
|
-
def process
|
185
|
-
config.internal.process
|
186
|
-
end
|
187
182
|
end
|
188
183
|
|
189
184
|
# Always start with standalone so there always is a value for the execution mode.
|
data/lib/karafka/version.rb
CHANGED
metadata
CHANGED
@@ -1,39 +1,12 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: karafka
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 2.5.0.
|
4
|
+
version: 2.5.0.rc1
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Maciej Mensfeld
|
8
8
|
bindir: bin
|
9
|
-
cert_chain:
|
10
|
-
- |
|
11
|
-
-----BEGIN CERTIFICATE-----
|
12
|
-
MIIEcDCCAtigAwIBAgIBATANBgkqhkiG9w0BAQsFADA/MRAwDgYDVQQDDAdjb250
|
13
|
-
YWN0MRcwFQYKCZImiZPyLGQBGRYHa2FyYWZrYTESMBAGCgmSJomT8ixkARkWAmlv
|
14
|
-
MB4XDTI0MDgyMzEwMTkyMFoXDTQ5MDgxNzEwMTkyMFowPzEQMA4GA1UEAwwHY29u
|
15
|
-
dGFjdDEXMBUGCgmSJomT8ixkARkWB2thcmFma2ExEjAQBgoJkiaJk/IsZAEZFgJp
|
16
|
-
bzCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBAKjLhLjQqUlNayxkXnO+
|
17
|
-
PsmCDs/KFIzhrsYMfLZRZNaWmzV3ujljMOdDjd4snM2X06C41iVdQPWjpe3j8vVe
|
18
|
-
ZXEWR/twSbOP6Eeg8WVH2wCOo0x5i7yhVn4UBLH4JpfEMCbemVcWQ9ry9OMg4WpH
|
19
|
-
Uu4dRwxFV7hzCz3p0QfNLRI4miAxnGWcnlD98IJRjBAksTuR1Llj0vbOrDGsL9ZT
|
20
|
-
JeXP2gdRLd8SqzAFJEWrbeTBCBU7gfSh3oMg5SVDLjaqf7Kz5wC/8bDZydzanOxB
|
21
|
-
T6CDXPsCnllmvTNx2ei2T5rGYJOzJeNTmJLLK6hJWUlAvaQSvCwZRvFJ0tVGLEoS
|
22
|
-
flqSr6uGyyl1eMUsNmsH4BqPEYcAV6P2PKTv2vUR8AP0raDvZ3xL1TKvfRb8xRpo
|
23
|
-
vPopCGlY5XBWEc6QERHfVLTIVsjnls2/Ujj4h8/TSfqqYnaHKefIMLbuD/tquMjD
|
24
|
-
iWQsW2qStBV0T+U7FijKxVfrfqZP7GxQmDAc9o1iiyAa3QIDAQABo3cwdTAJBgNV
|
25
|
-
HRMEAjAAMAsGA1UdDwQEAwIEsDAdBgNVHQ4EFgQU3O4dTXmvE7YpAkszGzR9DdL9
|
26
|
-
sbEwHQYDVR0RBBYwFIESY29udGFjdEBrYXJhZmthLmlvMB0GA1UdEgQWMBSBEmNv
|
27
|
-
bnRhY3RAa2FyYWZrYS5pbzANBgkqhkiG9w0BAQsFAAOCAYEAVKTfoLXn7mqdSxIR
|
28
|
-
eqxcR6Huudg1jes81s1+X0uiRTR3hxxKZ3Y82cPsee9zYWyBrN8TA4KA0WILTru7
|
29
|
-
Ygxvzha0SRPsSiaKLmgOJ+61ebI4+bOORzIJLpD6GxCxu1r7MI4+0r1u1xe0EWi8
|
30
|
-
agkVo1k4Vi8cKMLm6Gl9b3wG9zQBw6fcgKwmpjKiNnOLP+OytzUANrIUJjoq6oal
|
31
|
-
TC+f/Uc0TLaRqUaW/bejxzDWWHoM3SU6aoLPuerglzp9zZVzihXwx3jPLUVKDFpF
|
32
|
-
Rl2lcBDxlpYGueGo0/oNzGJAAy6js8jhtHC9+19PD53vk7wHtFTZ/0ugDQYnwQ+x
|
33
|
-
oml2fAAuVWpTBCgOVFe6XCQpMKopzoxQ1PjKztW2KYxgJdIBX87SnL3aWuBQmhRd
|
34
|
-
i9zWxov0mr44TWegTVeypcWGd/0nxu1+QHVNHJrpqlPBRvwQsUm7fwmRInGpcaB8
|
35
|
-
ap8wNYvryYzrzvzUxIVFBVM5PacgkFqRmolCa8I7tdKQN+R1
|
36
|
-
-----END CERTIFICATE-----
|
9
|
+
cert_chain: []
|
37
10
|
date: 1980-01-02 00:00:00.000000000 Z
|
38
11
|
dependencies:
|
39
12
|
- !ruby/object:Gem::Dependency
|
@@ -76,14 +49,14 @@ dependencies:
|
|
76
49
|
requirements:
|
77
50
|
- - ">="
|
78
51
|
- !ruby/object:Gem::Version
|
79
|
-
version: 0.19.
|
52
|
+
version: 0.19.5
|
80
53
|
type: :runtime
|
81
54
|
prerelease: false
|
82
55
|
version_requirements: !ruby/object:Gem::Requirement
|
83
56
|
requirements:
|
84
57
|
- - ">="
|
85
58
|
- !ruby/object:Gem::Version
|
86
|
-
version: 0.19.
|
59
|
+
version: 0.19.5
|
87
60
|
- !ruby/object:Gem::Dependency
|
88
61
|
name: waterdrop
|
89
62
|
requirement: !ruby/object:Gem::Requirement
|
@@ -138,6 +111,7 @@ files:
|
|
138
111
|
- ".github/ISSUE_TEMPLATE/bug_report.md"
|
139
112
|
- ".github/ISSUE_TEMPLATE/feature_request.md"
|
140
113
|
- ".github/workflows/ci.yml"
|
114
|
+
- ".github/workflows/push.yml"
|
141
115
|
- ".github/workflows/verify-action-pins.yml"
|
142
116
|
- ".gitignore"
|
143
117
|
- ".rspec"
|
@@ -152,6 +126,7 @@ files:
|
|
152
126
|
- LICENSE-COMM
|
153
127
|
- LICENSE-LGPL
|
154
128
|
- README.md
|
129
|
+
- Rakefile
|
155
130
|
- SECURITY.md
|
156
131
|
- bin/benchmarks
|
157
132
|
- bin/clean_kafka
|
@@ -167,15 +142,15 @@ files:
|
|
167
142
|
- bin/verify_license_integrity
|
168
143
|
- bin/verify_topics_naming
|
169
144
|
- bin/wait_for_kafka
|
170
|
-
- certs/cert.pem
|
171
145
|
- certs/karafka-pro.pem
|
172
146
|
- config/locales/errors.yml
|
173
147
|
- config/locales/pro_errors.yml
|
174
148
|
- docker-compose.yml
|
175
|
-
- examples/payloads/
|
176
|
-
- examples/payloads/json/
|
177
|
-
- examples/payloads/json/
|
178
|
-
- examples/payloads/json/
|
149
|
+
- examples/payloads/avro/.gitkeep
|
150
|
+
- examples/payloads/json/sample_set_01/enrollment_event.json
|
151
|
+
- examples/payloads/json/sample_set_01/ingestion_event.json
|
152
|
+
- examples/payloads/json/sample_set_01/transaction_event.json
|
153
|
+
- examples/payloads/json/sample_set_01/user_event.json
|
179
154
|
- karafka.gemspec
|
180
155
|
- lib/active_job/karafka.rb
|
181
156
|
- lib/active_job/queue_adapters/karafka_adapter.rb
|
@@ -207,6 +182,7 @@ files:
|
|
207
182
|
- lib/karafka/cli/topics/base.rb
|
208
183
|
- lib/karafka/cli/topics/create.rb
|
209
184
|
- lib/karafka/cli/topics/delete.rb
|
185
|
+
- lib/karafka/cli/topics/help.rb
|
210
186
|
- lib/karafka/cli/topics/migrate.rb
|
211
187
|
- lib/karafka/cli/topics/plan.rb
|
212
188
|
- lib/karafka/cli/topics/repartition.rb
|
@@ -644,7 +620,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
644
620
|
- !ruby/object:Gem::Version
|
645
621
|
version: '0'
|
646
622
|
requirements: []
|
647
|
-
rubygems_version: 3.6.
|
623
|
+
rubygems_version: 3.6.7
|
648
624
|
specification_version: 4
|
649
625
|
summary: Karafka is Ruby and Rails efficient Kafka processing framework.
|
650
626
|
test_files: []
|
checksums.yaml.gz.sig
DELETED
Binary file
|
data/certs/cert.pem
DELETED
@@ -1,26 +0,0 @@
|
|
1
|
-
-----BEGIN CERTIFICATE-----
|
2
|
-
MIIEcDCCAtigAwIBAgIBATANBgkqhkiG9w0BAQsFADA/MRAwDgYDVQQDDAdjb250
|
3
|
-
YWN0MRcwFQYKCZImiZPyLGQBGRYHa2FyYWZrYTESMBAGCgmSJomT8ixkARkWAmlv
|
4
|
-
MB4XDTI0MDgyMzEwMTkyMFoXDTQ5MDgxNzEwMTkyMFowPzEQMA4GA1UEAwwHY29u
|
5
|
-
dGFjdDEXMBUGCgmSJomT8ixkARkWB2thcmFma2ExEjAQBgoJkiaJk/IsZAEZFgJp
|
6
|
-
bzCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBAKjLhLjQqUlNayxkXnO+
|
7
|
-
PsmCDs/KFIzhrsYMfLZRZNaWmzV3ujljMOdDjd4snM2X06C41iVdQPWjpe3j8vVe
|
8
|
-
ZXEWR/twSbOP6Eeg8WVH2wCOo0x5i7yhVn4UBLH4JpfEMCbemVcWQ9ry9OMg4WpH
|
9
|
-
Uu4dRwxFV7hzCz3p0QfNLRI4miAxnGWcnlD98IJRjBAksTuR1Llj0vbOrDGsL9ZT
|
10
|
-
JeXP2gdRLd8SqzAFJEWrbeTBCBU7gfSh3oMg5SVDLjaqf7Kz5wC/8bDZydzanOxB
|
11
|
-
T6CDXPsCnllmvTNx2ei2T5rGYJOzJeNTmJLLK6hJWUlAvaQSvCwZRvFJ0tVGLEoS
|
12
|
-
flqSr6uGyyl1eMUsNmsH4BqPEYcAV6P2PKTv2vUR8AP0raDvZ3xL1TKvfRb8xRpo
|
13
|
-
vPopCGlY5XBWEc6QERHfVLTIVsjnls2/Ujj4h8/TSfqqYnaHKefIMLbuD/tquMjD
|
14
|
-
iWQsW2qStBV0T+U7FijKxVfrfqZP7GxQmDAc9o1iiyAa3QIDAQABo3cwdTAJBgNV
|
15
|
-
HRMEAjAAMAsGA1UdDwQEAwIEsDAdBgNVHQ4EFgQU3O4dTXmvE7YpAkszGzR9DdL9
|
16
|
-
sbEwHQYDVR0RBBYwFIESY29udGFjdEBrYXJhZmthLmlvMB0GA1UdEgQWMBSBEmNv
|
17
|
-
bnRhY3RAa2FyYWZrYS5pbzANBgkqhkiG9w0BAQsFAAOCAYEAVKTfoLXn7mqdSxIR
|
18
|
-
eqxcR6Huudg1jes81s1+X0uiRTR3hxxKZ3Y82cPsee9zYWyBrN8TA4KA0WILTru7
|
19
|
-
Ygxvzha0SRPsSiaKLmgOJ+61ebI4+bOORzIJLpD6GxCxu1r7MI4+0r1u1xe0EWi8
|
20
|
-
agkVo1k4Vi8cKMLm6Gl9b3wG9zQBw6fcgKwmpjKiNnOLP+OytzUANrIUJjoq6oal
|
21
|
-
TC+f/Uc0TLaRqUaW/bejxzDWWHoM3SU6aoLPuerglzp9zZVzihXwx3jPLUVKDFpF
|
22
|
-
Rl2lcBDxlpYGueGo0/oNzGJAAy6js8jhtHC9+19PD53vk7wHtFTZ/0ugDQYnwQ+x
|
23
|
-
oml2fAAuVWpTBCgOVFe6XCQpMKopzoxQ1PjKztW2KYxgJdIBX87SnL3aWuBQmhRd
|
24
|
-
i9zWxov0mr44TWegTVeypcWGd/0nxu1+QHVNHJrpqlPBRvwQsUm7fwmRInGpcaB8
|
25
|
-
ap8wNYvryYzrzvzUxIVFBVM5PacgkFqRmolCa8I7tdKQN+R1
|
26
|
-
-----END CERTIFICATE-----
|
data.tar.gz.sig
DELETED
@@ -1 +0,0 @@
|
|
1
|
-
`E�����S �i7�L�(�D��9�A���`�"a)��
|
metadata.gz.sig
DELETED
Binary file
|