karafka 2.0.0.beta1 → 2.0.0.beta4

Sign up to get free protection for your applications and to get access to all the features.
Files changed (62) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/workflows/ci.yml +9 -23
  4. data/CHANGELOG.md +47 -0
  5. data/Gemfile.lock +8 -8
  6. data/bin/integrations +36 -14
  7. data/bin/scenario +29 -0
  8. data/bin/wait_for_kafka +20 -0
  9. data/config/errors.yml +1 -0
  10. data/docker-compose.yml +12 -0
  11. data/karafka.gemspec +2 -2
  12. data/lib/active_job/karafka.rb +2 -2
  13. data/lib/karafka/active_job/routing/extensions.rb +31 -0
  14. data/lib/karafka/base_consumer.rb +65 -42
  15. data/lib/karafka/connection/client.rb +65 -19
  16. data/lib/karafka/connection/listener.rb +99 -34
  17. data/lib/karafka/connection/listeners_batch.rb +24 -0
  18. data/lib/karafka/connection/messages_buffer.rb +50 -54
  19. data/lib/karafka/connection/raw_messages_buffer.rb +101 -0
  20. data/lib/karafka/contracts/config.rb +9 -1
  21. data/lib/karafka/helpers/async.rb +33 -0
  22. data/lib/karafka/instrumentation/logger_listener.rb +34 -10
  23. data/lib/karafka/instrumentation/monitor.rb +3 -1
  24. data/lib/karafka/licenser.rb +26 -7
  25. data/lib/karafka/messages/batch_metadata.rb +26 -3
  26. data/lib/karafka/messages/builders/batch_metadata.rb +17 -29
  27. data/lib/karafka/messages/builders/message.rb +1 -0
  28. data/lib/karafka/messages/builders/messages.rb +4 -12
  29. data/lib/karafka/pro/active_job/consumer.rb +49 -0
  30. data/lib/karafka/pro/active_job/dispatcher.rb +10 -10
  31. data/lib/karafka/pro/active_job/job_options_contract.rb +9 -9
  32. data/lib/karafka/pro/base_consumer.rb +76 -0
  33. data/lib/karafka/pro/loader.rb +30 -13
  34. data/lib/karafka/pro/performance_tracker.rb +9 -9
  35. data/lib/karafka/pro/processing/jobs/consume_non_blocking.rb +37 -0
  36. data/lib/karafka/pro/processing/jobs_builder.rb +31 -0
  37. data/lib/karafka/pro/routing/extensions.rb +32 -0
  38. data/lib/karafka/pro/scheduler.rb +54 -0
  39. data/lib/karafka/processing/executor.rb +34 -7
  40. data/lib/karafka/processing/executors_buffer.rb +15 -7
  41. data/lib/karafka/processing/jobs/base.rb +21 -4
  42. data/lib/karafka/processing/jobs/consume.rb +12 -5
  43. data/lib/karafka/processing/jobs_builder.rb +28 -0
  44. data/lib/karafka/processing/jobs_queue.rb +15 -12
  45. data/lib/karafka/processing/result.rb +34 -0
  46. data/lib/karafka/processing/worker.rb +23 -17
  47. data/lib/karafka/processing/workers_batch.rb +5 -0
  48. data/lib/karafka/routing/consumer_group.rb +1 -1
  49. data/lib/karafka/routing/subscription_group.rb +2 -2
  50. data/lib/karafka/routing/subscription_groups_builder.rb +3 -2
  51. data/lib/karafka/routing/topic.rb +5 -0
  52. data/lib/karafka/routing/topics.rb +38 -0
  53. data/lib/karafka/runner.rb +19 -27
  54. data/lib/karafka/scheduler.rb +10 -11
  55. data/lib/karafka/server.rb +24 -23
  56. data/lib/karafka/setup/config.rb +4 -1
  57. data/lib/karafka/status.rb +1 -3
  58. data/lib/karafka/version.rb +1 -1
  59. data.tar.gz.sig +0 -0
  60. metadata +20 -5
  61. metadata.gz.sig +0 -0
  62. data/lib/karafka/active_job/routing_extensions.rb +0 -18
@@ -0,0 +1,33 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Helpers
5
+ # Allows a given class to run async in a separate thread. Provides also few methods we may
6
+ # want to use to control the underlying thread
7
+ #
8
+ # @note Thread running code needs to manage it's own exceptions. If they leak out, they will
9
+ # abort thread on exception.
10
+ module Async
11
+ class << self
12
+ # Adds forwardable to redirect thread-based control methods to the underlying thread that
13
+ # runs the async operations
14
+ #
15
+ # @param base [Class] class we're including this module in
16
+ def included(base)
17
+ base.extend ::Forwardable
18
+
19
+ base.def_delegators :@thread, :join, :terminate, :alive?
20
+ end
21
+ end
22
+
23
+ # Runs the `#call` method in a new thread
24
+ def async_call
25
+ @thread = Thread.new do
26
+ Thread.current.abort_on_exception = true
27
+
28
+ call
29
+ end
30
+ end
31
+ end
32
+ end
33
+ end
@@ -15,16 +15,43 @@ module Karafka
15
15
 
16
16
  # Logs each messages fetching attempt
17
17
  #
18
- # @param _event [Dry::Events::Event] event details including payload
19
- def on_connection_listener_fetch_loop(_event)
20
- info 'Receiving new messages from Kafka...'
18
+ # @param event [Dry::Events::Event] event details including payload
19
+ def on_connection_listener_fetch_loop(event)
20
+ listener = event[:caller]
21
+ info "[#{listener.id}] Polling messages..."
21
22
  end
22
23
 
23
24
  # Logs about messages that we've received from Kafka
24
25
  #
25
26
  # @param event [Dry::Events::Event] event details including payload
26
27
  def on_connection_listener_fetch_loop_received(event)
27
- info "Received #{event[:messages_buffer].size} new messages from Kafka"
28
+ listener = event[:caller]
29
+ time = event[:time]
30
+ messages_count = event[:messages_buffer].size
31
+ info "[#{listener.id}] Polled #{messages_count} messages in #{time}ms"
32
+ end
33
+
34
+ # Prints info about the fact that a given job has started
35
+ #
36
+ # @param event [Dry::Events::Event] event details including payload
37
+ def on_worker_process(event)
38
+ job = event[:job]
39
+ job_type = job.class.to_s.split('::').last
40
+ consumer = job.executor.topic.consumer
41
+ topic = job.executor.topic.name
42
+ info "[#{job.id}] #{job_type} job for #{consumer} on #{topic} started"
43
+ end
44
+
45
+ # Prints info about the fact that a given job has finished
46
+ #
47
+ # @param event [Dry::Events::Event] event details including payload
48
+ def on_worker_processed(event)
49
+ job = event[:job]
50
+ time = event[:time]
51
+ job_type = job.class.to_s.split('::').last
52
+ consumer = job.executor.topic.consumer
53
+ topic = job.executor.topic.name
54
+ info "[#{job.id}] #{job_type} job for #{consumer} on #{topic} finished in #{time}ms"
28
55
  end
29
56
 
30
57
  # Logs info about system signals that Karafka received.
@@ -52,16 +79,14 @@ module Karafka
52
79
  #
53
80
  # @param _event [Dry::Events::Event] event details including payload
54
81
  def on_app_stopping(_event)
55
- # We use a separate thread as logging can't be called from trap context
56
- Thread.new { info 'Stopping Karafka server' }
82
+ info 'Stopping Karafka server'
57
83
  end
58
84
 
59
85
  # Logs info that we stopped the Karafka server.
60
86
  #
61
87
  # @param _event [Dry::Events::Event] event details including payload
62
88
  def on_app_stopped(_event)
63
- # We use a separate thread as logging can't be called from trap context
64
- Thread.new { info 'Stopped Karafka server' }
89
+ info 'Stopped Karafka server'
65
90
  end
66
91
 
67
92
  # There are many types of errors that can occur in many places, but we provide a single
@@ -95,8 +120,7 @@ module Karafka
95
120
  fatal "Runner crashed due to an error: #{error}"
96
121
  fatal details
97
122
  when 'app.stopping.error'
98
- # We use a separate thread as logging can't be called from trap context
99
- Thread.new { error 'Forceful Karafka server stop' }
123
+ error 'Forceful Karafka server stop'
100
124
  when 'librdkafka.error'
101
125
  error "librdkafka internal error occurred: #{error}"
102
126
  error details
@@ -22,7 +22,6 @@ module Karafka
22
22
  app.stopping
23
23
  app.stopped
24
24
 
25
- consumer.prepared
26
25
  consumer.consumed
27
26
  consumer.revoked
28
27
  consumer.shutdown
@@ -33,6 +32,9 @@ module Karafka
33
32
  connection.listener.fetch_loop
34
33
  connection.listener.fetch_loop.received
35
34
 
35
+ worker.process
36
+ worker.processed
37
+
36
38
  statistics.emitted
37
39
 
38
40
  error.occurred
@@ -33,6 +33,8 @@ module Karafka
33
33
 
34
34
  return if license_config.expires_on > Date.today
35
35
 
36
+ raise_expired_license_token_in_dev(license_config.expires_on)
37
+
36
38
  notify_if_license_expired(license_config.expires_on)
37
39
  end
38
40
 
@@ -53,24 +55,41 @@ module Karafka
53
55
  )
54
56
  end
55
57
 
58
+ # Raises an error for test and dev environments if running pro with expired license
59
+ # We never want to cause any non-dev problems and we should never crash anything else than
60
+ # tests and development envs.
61
+ #
62
+ # @param expires_on [Date] when the license expires
63
+ def raise_expired_license_token_in_dev(expires_on)
64
+ env = Karafka::App.env
65
+
66
+ return unless env.development? || env.test?
67
+
68
+ raise Errors::ExpiredLicenseTokenError.new, expired_message(expires_on)
69
+ end
70
+
56
71
  # We do not raise an error here as we don't want to cause any problems to someone that runs
57
72
  # Karafka on production. Error message is enough.
58
73
  #
59
74
  # @param expires_on [Date] when the license expires
60
75
  def notify_if_license_expired(expires_on)
61
- message = <<~MSG.tr("\n", ' ')
62
- Your license expired on #{expires_on}.
63
- Please reach us at contact@karafka.io or visit https://karafka.io to obtain a valid one.
64
- MSG
65
-
66
- Karafka.logger.error(message)
76
+ Karafka.logger.error(expired_message(expires_on))
67
77
 
68
78
  Karafka.monitor.instrument(
69
79
  'error.occurred',
70
80
  caller: self,
71
- error: Errors::ExpiredLicenseTokenError.new(message),
81
+ error: Errors::ExpiredLicenseTokenError.new(expired_message(expires_on)),
72
82
  type: 'licenser.expired'
73
83
  )
74
84
  end
85
+
86
+ # @param expires_on [Date] when the license expires
87
+ # @return [String] expired message
88
+ def expired_message(expires_on)
89
+ <<~MSG.tr("\n", ' ')
90
+ Your license expired on #{expires_on}.
91
+ Please reach us at contact@karafka.io or visit https://karafka.io to obtain a valid one.
92
+ MSG
93
+ end
75
94
  end
76
95
  end
@@ -13,10 +13,33 @@ module Karafka
13
13
  :deserializer,
14
14
  :partition,
15
15
  :topic,
16
+ :created_at,
16
17
  :scheduled_at,
17
- :consumption_lag,
18
- :processing_lag,
18
+ :processed_at,
19
19
  keyword_init: true
20
- )
20
+ ) do
21
+ # This lag describes how long did it take for a message to be consumed from the moment it was
22
+ # created
23
+ def consumption_lag
24
+ time_distance_in_ms(processed_at, created_at)
25
+ end
26
+
27
+ # This lag describes how long did a batch have to wait before it was picked up by one of the
28
+ # workers
29
+ def processing_lag
30
+ time_distance_in_ms(processed_at, scheduled_at)
31
+ end
32
+
33
+ private
34
+
35
+ # Computes time distance in between two times in ms
36
+ #
37
+ # @param time1 [Time]
38
+ # @param time2 [Time]
39
+ # @return [Integer] distance in between two times in ms
40
+ def time_distance_in_ms(time1, time2)
41
+ ((time1 - time2) * 1_000).round
42
+ end
43
+ end
21
44
  end
22
45
  end
@@ -8,42 +8,30 @@ module Karafka
8
8
  class << self
9
9
  # Creates metadata based on the kafka batch data.
10
10
  #
11
- # @param kafka_batch [Array<Rdkafka::Consumer::Message>] raw fetched messages
11
+ # @param messages [Array<Karafka::Messages::Message>] messages array
12
12
  # @param topic [Karafka::Routing::Topic] topic for which we've fetched the batch
13
13
  # @param scheduled_at [Time] moment when the batch was scheduled for processing
14
14
  # @return [Karafka::Messages::BatchMetadata] batch metadata object
15
- # @note Regarding the time lags: we can use the current time here, as batch metadata is
16
- # created in the worker. So whenever this is being built, it means that the processing
17
- # of this batch has already started.
18
- def call(kafka_batch, topic, scheduled_at)
19
- now = Time.now
20
-
15
+ #
16
+ # @note We do not set `processed_at` as this needs to be assigned when the batch is
17
+ # picked up for processing.
18
+ def call(messages, topic, scheduled_at)
21
19
  Karafka::Messages::BatchMetadata.new(
22
- size: kafka_batch.count,
23
- first_offset: kafka_batch.first.offset,
24
- last_offset: kafka_batch.last.offset,
20
+ size: messages.count,
21
+ first_offset: messages.first.offset,
22
+ last_offset: messages.last.offset,
25
23
  deserializer: topic.deserializer,
26
- partition: kafka_batch[0].partition,
24
+ partition: messages.first.partition,
27
25
  topic: topic.name,
26
+ # We go with the assumption that the creation of the whole batch is the last message
27
+ # creation time
28
+ created_at: messages.last.timestamp,
29
+ # When this batch was built and scheduled for execution
28
30
  scheduled_at: scheduled_at,
29
- # This lag describes how long did it take for a message to be consumed from the
30
- # moment it was created
31
- consumption_lag: time_distance_in_ms(now, kafka_batch.last.timestamp),
32
- # This lag describes how long did a batch have to wait before it was picked up by
33
- # one of the workers
34
- processing_lag: time_distance_in_ms(now, scheduled_at)
35
- ).freeze
36
- end
37
-
38
- private
39
-
40
- # Computes time distance in between two times in ms
41
- #
42
- # @param time1 [Time]
43
- # @param time2 [Time]
44
- # @return [Integer] distance in between two times in ms
45
- def time_distance_in_ms(time1, time2)
46
- ((time1 - time2) * 1_000).round
31
+ # We build the batch metadata when we pick up the job in the worker, thus we can use
32
+ # current time here
33
+ processed_at: Time.now
34
+ )
47
35
  end
48
36
  end
49
37
  end
@@ -26,6 +26,7 @@ module Karafka
26
26
  received_at: received_at
27
27
  ).freeze
28
28
 
29
+ # Karafka messages cannot be frozen because of the lazy deserialization feature
29
30
  Karafka::Messages::Message.new(
30
31
  kafka_message.payload,
31
32
  metadata
@@ -9,27 +9,19 @@ module Karafka
9
9
  # Creates messages batch with messages inside based on the incoming messages and the
10
10
  # topic from which it comes.
11
11
  #
12
- # @param kafka_messages [Array<Rdkafka::Consumer::Message>] raw fetched messages
12
+ # @param messages [Array<Karafka::Messages::Message>] karafka messages array
13
13
  # @param topic [Karafka::Routing::Topic] topic for which we're received messages
14
14
  # @param received_at [Time] moment in time when the messages were received
15
15
  # @return [Karafka::Messages::Messages] messages batch object
16
- def call(kafka_messages, topic, received_at)
17
- messages_array = kafka_messages.map do |message|
18
- Karafka::Messages::Builders::Message.call(
19
- message,
20
- topic,
21
- received_at
22
- )
23
- end
24
-
16
+ def call(messages, topic, received_at)
25
17
  metadata = BatchMetadata.call(
26
- kafka_messages,
18
+ messages,
27
19
  topic,
28
20
  received_at
29
21
  ).freeze
30
22
 
31
23
  Karafka::Messages::Messages.new(
32
- messages_array,
24
+ messages,
33
25
  metadata
34
26
  ).freeze
35
27
  end
@@ -0,0 +1,49 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component.
4
+ # All of the commercial components are present in the lib/karafka/pro directory of this
5
+ # repository and their usage requires commercial license agreement.
6
+ #
7
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
8
+ #
9
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
10
+ # your code to Maciej Mensfeld.
11
+
12
+ module Karafka
13
+ module Pro
14
+ module ActiveJob
15
+ # Pro ActiveJob consumer that is suppose to handle long-running jobs as well as short
16
+ # running jobs
17
+ #
18
+ # When in LRJ, it will pause a given partition forever and will resume its processing only
19
+ # when all the jobs are done processing.
20
+ #
21
+ # It contains slightly better revocation warranties than the regular blocking consumer as
22
+ # it can stop processing batch of jobs in the middle after the revocation.
23
+ class Consumer < Karafka::Pro::BaseConsumer
24
+ # Runs ActiveJob jobs processing and handles lrj if needed
25
+ def consume
26
+ messages.each do |message|
27
+ # If for any reason we've lost this partition, not worth iterating over new messages
28
+ # as they are no longer ours
29
+ return if revoked?
30
+ break if Karafka::App.stopping?
31
+
32
+ ::ActiveJob::Base.execute(
33
+ ::ActiveSupport::JSON.decode(message.raw_payload)
34
+ )
35
+
36
+ mark_as_consumed(message)
37
+
38
+ # We check it twice as the job may be long running
39
+ # If marking fails, it also means it got revoked and we can stop consuming
40
+ return if revoked?
41
+
42
+ # Do not process more if we are shutting down
43
+ break if Karafka::App.stopping?
44
+ end
45
+ end
46
+ end
47
+ end
48
+ end
49
+ end
@@ -1,24 +1,24 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ # This Karafka component is a Pro component.
4
+ # All of the commercial components are present in the lib/karafka/pro directory of this
5
+ # repository and their usage requires commercial license agreement.
6
+ #
7
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
8
+ #
9
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
10
+ # your code to Maciej Mensfeld.
11
+
3
12
  module Karafka
4
13
  module Pro
5
14
  # Karafka Pro ActiveJob components
6
15
  module ActiveJob
7
- # This Karafka component is a Pro component.
8
- # All of the commercial components are present in the lib/karafka/pro directory of this
9
- # repository and their usage requires commercial license agreement.
10
- #
11
- # Karafka has also commercial-friendly license, commercial support and commercial components.
12
- #
13
- # By sending a pull request to the pro components, you are agreeing to transfer the copyright
14
- # of your code to Maciej Mensfeld.
15
-
16
16
  # Pro dispatcher that sends the ActiveJob job to a proper topic based on the queue name
17
17
  # and that allows to inject additional options into the producer, effectively allowing for a
18
18
  # much better and more granular control over the dispatch and consumption process.
19
19
  class Dispatcher < ::Karafka::ActiveJob::Dispatcher
20
20
  # Defaults for dispatching
21
- # The can be updated by using `#karafka_options` on the job
21
+ # They can be updated by using `#karafka_options` on the job
22
22
  DEFAULTS = {
23
23
  dispatch_method: :produce_async,
24
24
  # We don't create a dummy proc based partitioner as we would have to evaluate it with
@@ -1,17 +1,17 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ # This Karafka component is a Pro component.
4
+ # All of the commercial components are present in the lib/karafka/pro directory of this
5
+ # repository and their usage requires commercial license agreement.
6
+ #
7
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
8
+ #
9
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
10
+ # your code to Maciej Mensfeld.
11
+
3
12
  module Karafka
4
13
  module Pro
5
14
  module ActiveJob
6
- # This Karafka component is a Pro component.
7
- # All of the commercial components are present in the lib/karafka/pro directory of this
8
- # repository and their usage requires commercial license agreement.
9
- #
10
- # Karafka has also commercial-friendly license, commercial support and commercial components.
11
- #
12
- # By sending a pull request to the pro components, you are agreeing to transfer the copyright
13
- # of your code to Maciej Mensfeld.
14
-
15
15
  # Contract for validating the options that can be altered with `#karafka_options` per job
16
16
  # class that works with Pro features.
17
17
  class JobOptionsContract < ::Karafka::ActiveJob::JobOptionsContract
@@ -0,0 +1,76 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component.
4
+ # All of the commercial components are present in the lib/karafka/pro directory of this
5
+ # repository and their usage requires commercial license agreement.
6
+ #
7
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
8
+ #
9
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
10
+ # your code to Maciej Mensfeld.
11
+
12
+ module Karafka
13
+ module Pro
14
+ # Karafka PRO consumer.
15
+ #
16
+ # If you use PRO, all your consumers should inherit (indirectly) from it.
17
+ #
18
+ # @note In case of using lrj, manual pausing may not be the best idea as resume needs to happen
19
+ # after each batch is processed.
20
+ class BaseConsumer < Karafka::BaseConsumer
21
+ # Pause for tops 31 years
22
+ MAX_PAUSE_TIME = 1_000_000_000_000
23
+
24
+ private_constant :MAX_PAUSE_TIME
25
+
26
+ # Pauses processing of a given partition until we're done with the processing
27
+ # This ensures, that we can easily poll not reaching the `max.poll.interval`
28
+ def on_before_consume
29
+ # Pause at the first message in a batch. That way in case of a crash, we will not loose
30
+ # any messages
31
+ return unless topic.long_running_job?
32
+
33
+ pause(messages.first.offset, MAX_PAUSE_TIME)
34
+ end
35
+
36
+ # Runs extra logic after consumption that is related to handling long running jobs
37
+ # @note This overwrites the '#on_after_consume' from the base consumer
38
+ def on_after_consume
39
+ # Nothing to do if we lost the partition
40
+ return if revoked?
41
+
42
+ if @consumption.success?
43
+ pause_tracker.reset
44
+
45
+ # We use the non-blocking one here. If someone needs the blocking one, can implement it
46
+ # with manual offset management
47
+ # Mark as consumed only if manual offset management is not on
48
+ mark_as_consumed(messages.last) unless topic.manual_offset_management?
49
+
50
+ # If this is not a long running job there is nothing for us to do here
51
+ return unless topic.long_running_job?
52
+
53
+ # Once processing is done, we move to the new offset based on commits
54
+ # Here, in case manual offset management is off, we have the new proper offset of a
55
+ # first message from another batch from `@seek_offset`. If manual offset management
56
+ # is on, we move to place where the user indicated it was finished.
57
+ seek(@seek_offset || messages.first.offset)
58
+ resume
59
+ else
60
+ # If processing failed, we need to pause
61
+ pause(@seek_offset || messages.first.offset)
62
+ end
63
+ end
64
+
65
+ # Marks this consumer revoked state as true
66
+ # This allows us for things like lrj to finish early as this state may change during lrj
67
+ # execution
68
+ def on_revoked
69
+ # @note This may already be set to true if we tried to commit offsets and failed. In case
70
+ # like this it will automatically be marked as revoked.
71
+ @revoked = true
72
+ super
73
+ end
74
+ end
75
+ end
76
+ end
@@ -1,31 +1,48 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ # This Karafka component is a Pro component.
4
+ # All of the commercial components are present in the lib/karafka/pro directory of this
5
+ # repository and their usage requires commercial license agreement.
6
+ #
7
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
8
+ #
9
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
10
+ # your code to Maciej Mensfeld.
11
+
3
12
  module Karafka
4
13
  module Pro
5
- # This Karafka component is a Pro component.
6
- # All of the commercial components are present in the lib/karafka/pro directory of this
7
- # repository and their usage requires commercial license agreement.
8
- #
9
- # Karafka has also commercial-friendly license, commercial support and commercial components.
10
- #
11
- # By sending a pull request to the pro components, you are agreeing to transfer the copyright
12
- # of your code to Maciej Mensfeld.
13
-
14
14
  # Loader requires and loads all the pro components only when they are needed
15
15
  class Loader
16
+ # All the pro components that need to be loaded
17
+ COMPONENTS = %w[
18
+ base_consumer
19
+ performance_tracker
20
+ scheduler
21
+ processing/jobs/consume_non_blocking
22
+ processing/jobs_builder
23
+ routing/extensions
24
+ active_job/consumer
25
+ active_job/dispatcher
26
+ active_job/job_options_contract
27
+ ].freeze
28
+
29
+ private_constant :COMPONENTS
30
+
16
31
  class << self
17
32
  # Loads all the pro components and configures them wherever it is expected
18
33
  # @param config [Dry::Configurable::Config] whole app config that we can alter with pro
19
34
  # components
20
35
  def setup(config)
21
- require_relative 'performance_tracker'
22
- require_relative 'active_job/dispatcher'
23
- require_relative 'active_job/job_options_contract'
36
+ COMPONENTS.each { |component| require_relative(component) }
24
37
 
38
+ config.internal.scheduler = Scheduler.new
39
+ config.internal.jobs_builder = Processing::JobsBuilder.new
40
+ config.internal.active_job.consumer = ActiveJob::Consumer
25
41
  config.internal.active_job.dispatcher = ActiveJob::Dispatcher.new
26
42
  config.internal.active_job.job_options_contract = ActiveJob::JobOptionsContract.new
27
43
 
28
- # Monitor time needed to process each message from a single partition
44
+ ::Karafka::Routing::Topic.include(Routing::Extensions)
45
+
29
46
  config.monitor.subscribe(PerformanceTracker.instance)
30
47
  end
31
48
  end
@@ -1,16 +1,16 @@
1
1
  # frozen_string_literal: true
2
2
 
3
+ # This Karafka component is a Pro component.
4
+ # All of the commercial components are present in the lib/karafka/pro directory of this
5
+ # repository and their usage requires commercial license agreement.
6
+ #
7
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
8
+ #
9
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
10
+ # your code to Maciej Mensfeld.
11
+
3
12
  module Karafka
4
13
  module Pro
5
- # This Karafka component is a Pro component.
6
- # All of the commercial components are present in the lib/karafka/pro directory of this
7
- # repository and their usage requires commercial license agreement.
8
- #
9
- # Karafka has also commercial-friendly license, commercial support and commercial components.
10
- #
11
- # By sending a pull request to the pro components, you are agreeing to transfer the copyright
12
- # of your code to Maciej Mensfeld.
13
-
14
14
  # Tracker used to keep track of performance metrics
15
15
  # It provides insights that can be used to optimize processing flow
16
16
  class PerformanceTracker
@@ -0,0 +1,37 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component.
4
+ # All of the commercial components are present in the lib/karafka/pro directory of this
5
+ # repository and their usage requires commercial license agreement.
6
+ #
7
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
8
+ #
9
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
10
+ # your code to Maciej Mensfeld.
11
+
12
+ module Karafka
13
+ module Pro
14
+ # Pro components related to processing part of Karafka
15
+ module Processing
16
+ # Pro jobs
17
+ module Jobs
18
+ # The main job type in a non-blocking variant.
19
+ # This variant works "like" the regular consumption but pauses the partition for as long
20
+ # as it is needed until a job is done.
21
+ #
22
+ # It can be useful when having long lasting jobs that would exceed `max.poll.interval`
23
+ # if would block.
24
+ #
25
+ # @note It needs to be working with a proper consumer that will handle the partition
26
+ # management. This layer of the framework knows nothing about Kafka messages consumption.
27
+ class ConsumeNonBlocking < ::Karafka::Processing::Jobs::Consume
28
+ # Releases the blocking lock after it is done with the preparation phase for this job
29
+ def before_call
30
+ super
31
+ @non_blocking = true
32
+ end
33
+ end
34
+ end
35
+ end
36
+ end
37
+ end