karafka 2.0.0.beta5 → 2.0.0.rc3

Sign up to get free protection for your applications and to get access to all the features.
Files changed (53) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/CHANGELOG.md +28 -0
  4. data/CONTRIBUTING.md +0 -5
  5. data/Gemfile.lock +12 -42
  6. data/LICENSE-COMM +1 -1
  7. data/README.md +44 -16
  8. data/bin/stress_many +1 -1
  9. data/bin/stress_one +1 -1
  10. data/config/errors.yml +52 -5
  11. data/docker-compose.yml +7 -0
  12. data/karafka.gemspec +2 -4
  13. data/lib/karafka/active_job/consumer.rb +2 -0
  14. data/lib/karafka/active_job/job_options_contract.rb +8 -2
  15. data/lib/karafka/base_consumer.rb +4 -6
  16. data/lib/karafka/cli/install.rb +15 -2
  17. data/lib/karafka/cli/server.rb +4 -2
  18. data/lib/karafka/connection/client.rb +20 -17
  19. data/lib/karafka/connection/listener.rb +12 -24
  20. data/lib/karafka/connection/pauses_manager.rb +0 -8
  21. data/lib/karafka/contracts/base.rb +2 -8
  22. data/lib/karafka/contracts/config.rb +71 -51
  23. data/lib/karafka/contracts/consumer_group.rb +25 -18
  24. data/lib/karafka/contracts/consumer_group_topic.rb +30 -16
  25. data/lib/karafka/contracts/server_cli_options.rb +18 -7
  26. data/lib/karafka/helpers/colorize.rb +20 -0
  27. data/lib/karafka/instrumentation/logger_listener.rb +8 -2
  28. data/lib/karafka/instrumentation/vendors/datadog/dashboard.json +1 -0
  29. data/lib/karafka/instrumentation/vendors/datadog/listener.rb +232 -0
  30. data/lib/karafka/pro/active_job/dispatcher.rb +5 -2
  31. data/lib/karafka/pro/active_job/job_options_contract.rb +11 -6
  32. data/lib/karafka/pro/base_consumer.rb +21 -12
  33. data/lib/karafka/pro/contracts/base.rb +21 -0
  34. data/lib/karafka/pro/contracts/consumer_group.rb +34 -0
  35. data/lib/karafka/pro/contracts/consumer_group_topic.rb +33 -0
  36. data/lib/karafka/pro/loader.rb +23 -3
  37. data/lib/karafka/pro/processing/coordinator.rb +51 -0
  38. data/lib/karafka/pro/processing/partitioner.rb +60 -0
  39. data/lib/karafka/pro/routing/builder_extensions.rb +30 -0
  40. data/lib/karafka/pro/routing/{extensions.rb → topic_extensions.rb} +7 -1
  41. data/lib/karafka/processing/coordinator.rb +6 -2
  42. data/lib/karafka/processing/coordinators_buffer.rb +3 -7
  43. data/lib/karafka/processing/executor.rb +1 -1
  44. data/lib/karafka/processing/jobs_queue.rb +11 -0
  45. data/lib/karafka/processing/partitioner.rb +22 -0
  46. data/lib/karafka/processing/worker.rb +4 -2
  47. data/lib/karafka/setup/config.rb +9 -3
  48. data/lib/karafka/templates/example_consumer.rb.erb +2 -2
  49. data/lib/karafka/version.rb +1 -1
  50. data/lib/karafka.rb +2 -2
  51. data.tar.gz.sig +0 -0
  52. metadata +15 -34
  53. metadata.gz.sig +0 -0
@@ -2,7 +2,7 @@
2
2
 
3
3
  module Karafka
4
4
  module Processing
5
- # Buffer used to build and store coordinators per topic partition
5
+ # Coordinators builder used to build coordinators per topic partition
6
6
  #
7
7
  # It provides direct pauses access for revocation
8
8
  #
@@ -34,17 +34,13 @@ module Karafka
34
34
  # @param topic [String] topic name
35
35
  # @param partition [Integer] partition number
36
36
  def revoke(topic, partition)
37
- @pauses_manager.revoke(topic, partition)
37
+ return unless @coordinators[topic].key?(partition)
38
38
 
39
39
  # The fact that we delete here does not change the fact that the executor still holds the
40
40
  # reference to this coordinator. We delete it here, as we will no longer process any
41
41
  # new stuff with it and we may need a new coordinator if we regain this partition, but the
42
42
  # coordinator may still be in use
43
- coordinator = @coordinators[topic].delete(partition)
44
-
45
- return unless coordinator
46
-
47
- coordinator.revoke
43
+ @coordinators[topic].delete(partition).revoke
48
44
  end
49
45
 
50
46
  # Clears coordinators and re-created the pauses manager
@@ -71,7 +71,7 @@ module Karafka
71
71
 
72
72
  # Runs consumer after consumption code
73
73
  def after_consume
74
- consumer.on_after_consume if @consumer
74
+ consumer.on_after_consume
75
75
  end
76
76
 
77
77
  # Runs the controller `#revoked` method that should be triggered when a given consumer is
@@ -119,6 +119,17 @@ module Karafka
119
119
  @semaphores[group_id].pop while wait?(group_id)
120
120
  end
121
121
 
122
+ # - `processing` - number of jobs that are currently being processed (active work)
123
+ # - `enqueued` - number of jobs in the queue that are waiting to be picked up by a worker
124
+ #
125
+ # @return [Hash] hash with basic usage statistics of this queue.
126
+ def statistics
127
+ {
128
+ processing: size - @queue.size,
129
+ enqueued: @queue.size
130
+ }.freeze
131
+ end
132
+
122
133
  private
123
134
 
124
135
  # @param group_id [String] id of the group in which jobs we're interested.
@@ -0,0 +1,22 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Processing
5
+ # Basic partitioner for work division
6
+ # It does not divide any work.
7
+ class Partitioner
8
+ # @param subscription_group [Karafka::Routing::SubscriptionGroup] subscription group
9
+ def initialize(subscription_group)
10
+ @subscription_group = subscription_group
11
+ end
12
+
13
+ # @param _topic [String] topic name
14
+ # @param messages [Array<Karafka::Messages::Message>] karafka messages
15
+ # @yieldparam [Integer] group id
16
+ # @yieldparam [Array<Karafka::Messages::Message>] karafka messages
17
+ def call(_topic, messages)
18
+ yield(0, messages)
19
+ end
20
+ end
21
+ end
22
+ end
@@ -47,9 +47,11 @@ module Karafka
47
47
  job = @jobs_queue.pop
48
48
 
49
49
  if job
50
- Karafka.monitor.instrument('worker.process', caller: self, job: job)
50
+ instrument_details = { caller: self, job: job, jobs_queue: @jobs_queue }
51
51
 
52
- Karafka.monitor.instrument('worker.processed', caller: self, job: job) do
52
+ Karafka.monitor.instrument('worker.process', instrument_details)
53
+
54
+ Karafka.monitor.instrument('worker.processed', instrument_details) do
53
55
  job.before_call
54
56
 
55
57
  # If a job is marked as non blocking, we can run a tick in the job queue and if there
@@ -12,7 +12,7 @@ module Karafka
12
12
  # enough and will still keep the code simple
13
13
  # @see Karafka::Setup::Configurators::Base for more details about configurators api
14
14
  class Config
15
- extend Dry::Configurable
15
+ extend ::WaterDrop::Configurable
16
16
 
17
17
  # Defaults for kafka settings, that will be overwritten only if not present already
18
18
  KAFKA_DEFAULTS = {
@@ -60,9 +60,9 @@ module Karafka
60
60
  # option [Boolean] should we leave offset management to the user
61
61
  setting :manual_offset_management, default: false
62
62
  # options max_messages [Integer] how many messages do we want to fetch from Kafka in one go
63
- setting :max_messages, default: 1_000
63
+ setting :max_messages, default: 100
64
64
  # option [Integer] number of milliseconds we can wait while fetching data
65
- setting :max_wait_time, default: 10_000
65
+ setting :max_wait_time, default: 1_000
66
66
  # option shutdown_timeout [Integer] the number of milliseconds after which Karafka no
67
67
  # longer waits for the consumers to stop gracefully but instead we force terminate
68
68
  # everything.
@@ -107,6 +107,8 @@ module Karafka
107
107
  setting :jobs_builder, default: Processing::JobsBuilder.new
108
108
  # option coordinator [Class] work coordinator we want to user for processing coordination
109
109
  setting :coordinator_class, default: Processing::Coordinator
110
+ # option partitioner_class [Class] partitioner we use against a batch of data
111
+ setting :partitioner_class, default: Processing::Partitioner
110
112
  end
111
113
 
112
114
  # Karafka components for ActiveJob
@@ -121,6 +123,10 @@ module Karafka
121
123
  end
122
124
  end
123
125
 
126
+ # This will load all the defaults that can be later overwritten.
127
+ # Thanks to that we have an initial state out of the box.
128
+ configure
129
+
124
130
  class << self
125
131
  # Configuring method
126
132
  # @param block [Proc] block we want to execute with the config instance
@@ -7,10 +7,10 @@ class ExampleConsumer < ApplicationConsumer
7
7
  end
8
8
 
9
9
  # Run anything upon partition being revoked
10
- # def on_revoked
10
+ # def revoked
11
11
  # end
12
12
 
13
13
  # Define here any teardown things you want when Karafka server stops
14
- # def on_shutdown
14
+ # def shutdown
15
15
  # end
16
16
  end
@@ -3,5 +3,5 @@
3
3
  # Main module namespace
4
4
  module Karafka
5
5
  # Current Karafka version
6
- VERSION = '2.0.0.beta5'
6
+ VERSION = '2.0.0.rc3'
7
7
  end
data/lib/karafka.rb CHANGED
@@ -12,8 +12,6 @@
12
12
  openssl
13
13
  base64
14
14
  date
15
- dry-configurable
16
- dry-validation
17
15
  dry/events/publisher
18
16
  dry/monitor/notifications
19
17
  zeitwerk
@@ -90,5 +88,7 @@ loader = Zeitwerk::Loader.for_gem
90
88
  loader.ignore(Karafka.gem_root.join('lib/active_job'))
91
89
  # Do not load pro components, this will be handled by license manager
92
90
  loader.ignore(Karafka.gem_root.join('lib/karafka/pro'))
91
+ # Do not load vendors instrumentation components. Those need to be required manually if needed
92
+ loader.ignore(Karafka.gem_root.join('lib/karafka/instrumentation/vendors'))
93
93
  loader.setup
94
94
  loader.eager_load
data.tar.gz.sig CHANGED
Binary file
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: karafka
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.0.0.beta5
4
+ version: 2.0.0.rc3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Maciej Mensfeld
@@ -34,22 +34,8 @@ cert_chain:
34
34
  R2P11bWoCtr70BsccVrN8jEhzwXngMyI2gVt750Y+dbTu1KgRqZKp/ECe7ZzPzXj
35
35
  pIy9vHxTANKYVyI4qj8OrFdEM5BQNu8oQpL0iQ==
36
36
  -----END CERTIFICATE-----
37
- date: 2022-07-05 00:00:00.000000000 Z
37
+ date: 2022-07-26 00:00:00.000000000 Z
38
38
  dependencies:
39
- - !ruby/object:Gem::Dependency
40
- name: dry-configurable
41
- requirement: !ruby/object:Gem::Requirement
42
- requirements:
43
- - - "~>"
44
- - !ruby/object:Gem::Version
45
- version: '0.13'
46
- type: :runtime
47
- prerelease: false
48
- version_requirements: !ruby/object:Gem::Requirement
49
- requirements:
50
- - - "~>"
51
- - !ruby/object:Gem::Version
52
- version: '0.13'
53
39
  - !ruby/object:Gem::Dependency
54
40
  name: dry-monitor
55
41
  requirement: !ruby/object:Gem::Requirement
@@ -64,20 +50,6 @@ dependencies:
64
50
  - - "~>"
65
51
  - !ruby/object:Gem::Version
66
52
  version: '0.5'
67
- - !ruby/object:Gem::Dependency
68
- name: dry-validation
69
- requirement: !ruby/object:Gem::Requirement
70
- requirements:
71
- - - "~>"
72
- - !ruby/object:Gem::Version
73
- version: '1.7'
74
- type: :runtime
75
- prerelease: false
76
- version_requirements: !ruby/object:Gem::Requirement
77
- requirements:
78
- - - "~>"
79
- - !ruby/object:Gem::Version
80
- version: '1.7'
81
53
  - !ruby/object:Gem::Dependency
82
54
  name: rdkafka
83
55
  requirement: !ruby/object:Gem::Requirement
@@ -112,7 +84,7 @@ dependencies:
112
84
  requirements:
113
85
  - - ">="
114
86
  - !ruby/object:Gem::Version
115
- version: 2.3.1
87
+ version: 2.3.3
116
88
  - - "<"
117
89
  - !ruby/object:Gem::Version
118
90
  version: 3.0.0
@@ -122,7 +94,7 @@ dependencies:
122
94
  requirements:
123
95
  - - ">="
124
96
  - !ruby/object:Gem::Version
125
- version: 2.3.1
97
+ version: 2.3.3
126
98
  - - "<"
127
99
  - !ruby/object:Gem::Version
128
100
  version: 3.0.0
@@ -212,6 +184,7 @@ files:
212
184
  - lib/karafka/env.rb
213
185
  - lib/karafka/errors.rb
214
186
  - lib/karafka/helpers/async.rb
187
+ - lib/karafka/helpers/colorize.rb
215
188
  - lib/karafka/helpers/multi_delegator.rb
216
189
  - lib/karafka/instrumentation.rb
217
190
  - lib/karafka/instrumentation/callbacks/error.rb
@@ -220,6 +193,8 @@ files:
220
193
  - lib/karafka/instrumentation/logger_listener.rb
221
194
  - lib/karafka/instrumentation/monitor.rb
222
195
  - lib/karafka/instrumentation/proctitle_listener.rb
196
+ - lib/karafka/instrumentation/vendors/datadog/dashboard.json
197
+ - lib/karafka/instrumentation/vendors/datadog/listener.rb
223
198
  - lib/karafka/licenser.rb
224
199
  - lib/karafka/messages/batch_metadata.rb
225
200
  - lib/karafka/messages/builders/batch_metadata.rb
@@ -235,13 +210,18 @@ files:
235
210
  - lib/karafka/pro/active_job/dispatcher.rb
236
211
  - lib/karafka/pro/active_job/job_options_contract.rb
237
212
  - lib/karafka/pro/base_consumer.rb
213
+ - lib/karafka/pro/contracts/base.rb
214
+ - lib/karafka/pro/contracts/consumer_group.rb
215
+ - lib/karafka/pro/contracts/consumer_group_topic.rb
238
216
  - lib/karafka/pro/loader.rb
239
217
  - lib/karafka/pro/performance_tracker.rb
240
218
  - lib/karafka/pro/processing/coordinator.rb
241
219
  - lib/karafka/pro/processing/jobs/consume_non_blocking.rb
242
220
  - lib/karafka/pro/processing/jobs_builder.rb
221
+ - lib/karafka/pro/processing/partitioner.rb
243
222
  - lib/karafka/pro/processing/scheduler.rb
244
- - lib/karafka/pro/routing/extensions.rb
223
+ - lib/karafka/pro/routing/builder_extensions.rb
224
+ - lib/karafka/pro/routing/topic_extensions.rb
245
225
  - lib/karafka/process.rb
246
226
  - lib/karafka/processing/coordinator.rb
247
227
  - lib/karafka/processing/coordinators_buffer.rb
@@ -253,6 +233,7 @@ files:
253
233
  - lib/karafka/processing/jobs/shutdown.rb
254
234
  - lib/karafka/processing/jobs_builder.rb
255
235
  - lib/karafka/processing/jobs_queue.rb
236
+ - lib/karafka/processing/partitioner.rb
256
237
  - lib/karafka/processing/result.rb
257
238
  - lib/karafka/processing/scheduler.rb
258
239
  - lib/karafka/processing/worker.rb
@@ -306,5 +287,5 @@ requirements: []
306
287
  rubygems_version: 3.3.7
307
288
  signing_key:
308
289
  specification_version: 4
309
- summary: Ruby based framework for working with Apache Kafka
290
+ summary: Efficient Kafka processing framework for Ruby and Rails
310
291
  test_files: []
metadata.gz.sig CHANGED
Binary file