karafka 2.5.4 → 2.5.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (260) hide show
  1. checksums.yaml +4 -4
  2. data/CHANGELOG.md +3 -0
  3. data/LICENSE-COMM +4 -2
  4. data/lib/karafka/admin/acl.rb +127 -80
  5. data/lib/karafka/admin/configs.rb +84 -70
  6. data/lib/karafka/admin/consumer_groups.rb +377 -330
  7. data/lib/karafka/admin/replication.rb +287 -263
  8. data/lib/karafka/admin/topics.rb +232 -186
  9. data/lib/karafka/admin.rb +277 -117
  10. data/lib/karafka/pro/active_job/consumer.rb +19 -2
  11. data/lib/karafka/pro/active_job/dispatcher.rb +19 -2
  12. data/lib/karafka/pro/active_job/job_options_contract.rb +19 -2
  13. data/lib/karafka/pro/base_consumer.rb +19 -2
  14. data/lib/karafka/pro/cleaner/errors.rb +19 -2
  15. data/lib/karafka/pro/cleaner/messages/message.rb +19 -2
  16. data/lib/karafka/pro/cleaner/messages/messages.rb +19 -2
  17. data/lib/karafka/pro/cleaner/messages/metadata.rb +19 -2
  18. data/lib/karafka/pro/cleaner.rb +19 -2
  19. data/lib/karafka/pro/cli/contracts/server.rb +19 -2
  20. data/lib/karafka/pro/cli/parallel_segments/base.rb +19 -2
  21. data/lib/karafka/pro/cli/parallel_segments/collapse.rb +19 -2
  22. data/lib/karafka/pro/cli/parallel_segments/distribute.rb +19 -2
  23. data/lib/karafka/pro/cli/parallel_segments.rb +19 -2
  24. data/lib/karafka/pro/connection/manager.rb +19 -2
  25. data/lib/karafka/pro/connection/multiplexing/listener.rb +19 -2
  26. data/lib/karafka/pro/contracts/base.rb +19 -2
  27. data/lib/karafka/pro/encryption/cipher.rb +19 -2
  28. data/lib/karafka/pro/encryption/contracts/config.rb +19 -2
  29. data/lib/karafka/pro/encryption/errors.rb +19 -2
  30. data/lib/karafka/pro/encryption/messages/middleware.rb +19 -2
  31. data/lib/karafka/pro/encryption/messages/parser.rb +19 -2
  32. data/lib/karafka/pro/encryption/setup/config.rb +19 -2
  33. data/lib/karafka/pro/encryption.rb +19 -2
  34. data/lib/karafka/pro/instrumentation/performance_tracker.rb +19 -2
  35. data/lib/karafka/pro/iterator/expander.rb +19 -2
  36. data/lib/karafka/pro/iterator/tpl_builder.rb +19 -2
  37. data/lib/karafka/pro/iterator.rb +19 -2
  38. data/lib/karafka/pro/loader.rb +19 -2
  39. data/lib/karafka/pro/processing/adaptive_iterator/consumer.rb +19 -2
  40. data/lib/karafka/pro/processing/adaptive_iterator/tracker.rb +19 -2
  41. data/lib/karafka/pro/processing/collapser.rb +19 -2
  42. data/lib/karafka/pro/processing/coordinator.rb +19 -2
  43. data/lib/karafka/pro/processing/coordinators/errors_tracker.rb +19 -2
  44. data/lib/karafka/pro/processing/coordinators/filters_applier.rb +19 -2
  45. data/lib/karafka/pro/processing/coordinators/virtual_offset_manager.rb +19 -2
  46. data/lib/karafka/pro/processing/executor.rb +19 -2
  47. data/lib/karafka/pro/processing/expansions_selector.rb +19 -2
  48. data/lib/karafka/pro/processing/filters/base.rb +19 -2
  49. data/lib/karafka/pro/processing/filters/delayer.rb +19 -2
  50. data/lib/karafka/pro/processing/filters/expirer.rb +19 -2
  51. data/lib/karafka/pro/processing/filters/inline_insights_delayer.rb +19 -2
  52. data/lib/karafka/pro/processing/filters/throttler.rb +19 -2
  53. data/lib/karafka/pro/processing/filters/virtual_limiter.rb +19 -2
  54. data/lib/karafka/pro/processing/jobs/consume_non_blocking.rb +19 -2
  55. data/lib/karafka/pro/processing/jobs/eofed_non_blocking.rb +19 -2
  56. data/lib/karafka/pro/processing/jobs/periodic.rb +19 -2
  57. data/lib/karafka/pro/processing/jobs/periodic_non_blocking.rb +19 -2
  58. data/lib/karafka/pro/processing/jobs/revoked_non_blocking.rb +19 -2
  59. data/lib/karafka/pro/processing/jobs_builder.rb +19 -2
  60. data/lib/karafka/pro/processing/jobs_queue.rb +19 -2
  61. data/lib/karafka/pro/processing/offset_metadata/consumer.rb +19 -2
  62. data/lib/karafka/pro/processing/offset_metadata/fetcher.rb +19 -2
  63. data/lib/karafka/pro/processing/offset_metadata/listener.rb +19 -2
  64. data/lib/karafka/pro/processing/parallel_segments/filters/base.rb +19 -2
  65. data/lib/karafka/pro/processing/parallel_segments/filters/default.rb +19 -2
  66. data/lib/karafka/pro/processing/parallel_segments/filters/mom.rb +19 -2
  67. data/lib/karafka/pro/processing/partitioner.rb +19 -2
  68. data/lib/karafka/pro/processing/periodic_job/consumer.rb +19 -2
  69. data/lib/karafka/pro/processing/piping/consumer.rb +19 -2
  70. data/lib/karafka/pro/processing/schedulers/base.rb +19 -2
  71. data/lib/karafka/pro/processing/schedulers/default.rb +19 -2
  72. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_lrj_mom.rb +19 -2
  73. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_lrj_mom_vp.rb +19 -2
  74. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_mom.rb +19 -2
  75. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_mom_vp.rb +19 -2
  76. data/lib/karafka/pro/processing/strategies/aj/dlq_lrj_mom.rb +19 -2
  77. data/lib/karafka/pro/processing/strategies/aj/dlq_lrj_mom_vp.rb +19 -2
  78. data/lib/karafka/pro/processing/strategies/aj/dlq_mom.rb +19 -2
  79. data/lib/karafka/pro/processing/strategies/aj/dlq_mom_vp.rb +19 -2
  80. data/lib/karafka/pro/processing/strategies/aj/ftr_lrj_mom.rb +19 -2
  81. data/lib/karafka/pro/processing/strategies/aj/ftr_lrj_mom_vp.rb +19 -2
  82. data/lib/karafka/pro/processing/strategies/aj/ftr_mom.rb +19 -2
  83. data/lib/karafka/pro/processing/strategies/aj/ftr_mom_vp.rb +19 -2
  84. data/lib/karafka/pro/processing/strategies/aj/lrj_mom.rb +19 -2
  85. data/lib/karafka/pro/processing/strategies/aj/lrj_mom_vp.rb +19 -2
  86. data/lib/karafka/pro/processing/strategies/aj/mom.rb +19 -2
  87. data/lib/karafka/pro/processing/strategies/aj/mom_vp.rb +19 -2
  88. data/lib/karafka/pro/processing/strategies/base.rb +19 -2
  89. data/lib/karafka/pro/processing/strategies/default.rb +19 -2
  90. data/lib/karafka/pro/processing/strategies/dlq/default.rb +19 -2
  91. data/lib/karafka/pro/processing/strategies/dlq/ftr.rb +19 -2
  92. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj.rb +19 -2
  93. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj_mom.rb +19 -2
  94. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj_mom_vp.rb +19 -2
  95. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj_vp.rb +19 -2
  96. data/lib/karafka/pro/processing/strategies/dlq/ftr_mom.rb +19 -2
  97. data/lib/karafka/pro/processing/strategies/dlq/ftr_mom_vp.rb +19 -2
  98. data/lib/karafka/pro/processing/strategies/dlq/ftr_vp.rb +19 -2
  99. data/lib/karafka/pro/processing/strategies/dlq/lrj.rb +19 -2
  100. data/lib/karafka/pro/processing/strategies/dlq/lrj_mom.rb +19 -2
  101. data/lib/karafka/pro/processing/strategies/dlq/lrj_mom_vp.rb +19 -2
  102. data/lib/karafka/pro/processing/strategies/dlq/lrj_vp.rb +19 -2
  103. data/lib/karafka/pro/processing/strategies/dlq/mom.rb +19 -2
  104. data/lib/karafka/pro/processing/strategies/dlq/mom_vp.rb +19 -2
  105. data/lib/karafka/pro/processing/strategies/dlq/vp.rb +19 -2
  106. data/lib/karafka/pro/processing/strategies/ftr/default.rb +19 -2
  107. data/lib/karafka/pro/processing/strategies/ftr/vp.rb +19 -2
  108. data/lib/karafka/pro/processing/strategies/lrj/default.rb +19 -2
  109. data/lib/karafka/pro/processing/strategies/lrj/ftr.rb +19 -2
  110. data/lib/karafka/pro/processing/strategies/lrj/ftr_mom.rb +19 -2
  111. data/lib/karafka/pro/processing/strategies/lrj/ftr_mom_vp.rb +19 -2
  112. data/lib/karafka/pro/processing/strategies/lrj/ftr_vp.rb +19 -2
  113. data/lib/karafka/pro/processing/strategies/lrj/mom.rb +19 -2
  114. data/lib/karafka/pro/processing/strategies/lrj/mom_vp.rb +19 -2
  115. data/lib/karafka/pro/processing/strategies/lrj/vp.rb +19 -2
  116. data/lib/karafka/pro/processing/strategies/mom/default.rb +19 -2
  117. data/lib/karafka/pro/processing/strategies/mom/ftr.rb +19 -2
  118. data/lib/karafka/pro/processing/strategies/mom/ftr_vp.rb +19 -2
  119. data/lib/karafka/pro/processing/strategies/mom/vp.rb +19 -2
  120. data/lib/karafka/pro/processing/strategies/vp/default.rb +19 -2
  121. data/lib/karafka/pro/processing/strategies.rb +19 -2
  122. data/lib/karafka/pro/processing/strategy_selector.rb +19 -2
  123. data/lib/karafka/pro/processing/subscription_groups_coordinator.rb +19 -2
  124. data/lib/karafka/pro/processing/virtual_partitions/distributors/balanced.rb +19 -2
  125. data/lib/karafka/pro/processing/virtual_partitions/distributors/base.rb +19 -2
  126. data/lib/karafka/pro/processing/virtual_partitions/distributors/consistent.rb +19 -2
  127. data/lib/karafka/pro/recurring_tasks/consumer.rb +19 -2
  128. data/lib/karafka/pro/recurring_tasks/contracts/config.rb +19 -2
  129. data/lib/karafka/pro/recurring_tasks/contracts/task.rb +19 -2
  130. data/lib/karafka/pro/recurring_tasks/deserializer.rb +19 -2
  131. data/lib/karafka/pro/recurring_tasks/dispatcher.rb +19 -2
  132. data/lib/karafka/pro/recurring_tasks/errors.rb +19 -2
  133. data/lib/karafka/pro/recurring_tasks/executor.rb +19 -2
  134. data/lib/karafka/pro/recurring_tasks/listener.rb +19 -2
  135. data/lib/karafka/pro/recurring_tasks/matcher.rb +19 -2
  136. data/lib/karafka/pro/recurring_tasks/schedule.rb +19 -2
  137. data/lib/karafka/pro/recurring_tasks/serializer.rb +19 -2
  138. data/lib/karafka/pro/recurring_tasks/setup/config.rb +19 -2
  139. data/lib/karafka/pro/recurring_tasks/task.rb +19 -2
  140. data/lib/karafka/pro/recurring_tasks.rb +19 -2
  141. data/lib/karafka/pro/routing/features/active_job/builder.rb +19 -2
  142. data/lib/karafka/pro/routing/features/active_job.rb +19 -2
  143. data/lib/karafka/pro/routing/features/adaptive_iterator/config.rb +19 -2
  144. data/lib/karafka/pro/routing/features/adaptive_iterator/contracts/topic.rb +19 -2
  145. data/lib/karafka/pro/routing/features/adaptive_iterator/topic.rb +19 -2
  146. data/lib/karafka/pro/routing/features/adaptive_iterator.rb +19 -2
  147. data/lib/karafka/pro/routing/features/base.rb +19 -2
  148. data/lib/karafka/pro/routing/features/dead_letter_queue/contracts/topic.rb +19 -2
  149. data/lib/karafka/pro/routing/features/dead_letter_queue/topic.rb +19 -2
  150. data/lib/karafka/pro/routing/features/dead_letter_queue.rb +19 -2
  151. data/lib/karafka/pro/routing/features/delaying/config.rb +19 -2
  152. data/lib/karafka/pro/routing/features/delaying/contracts/topic.rb +19 -2
  153. data/lib/karafka/pro/routing/features/delaying/topic.rb +19 -2
  154. data/lib/karafka/pro/routing/features/delaying.rb +19 -2
  155. data/lib/karafka/pro/routing/features/direct_assignments/config.rb +19 -2
  156. data/lib/karafka/pro/routing/features/direct_assignments/contracts/consumer_group.rb +19 -2
  157. data/lib/karafka/pro/routing/features/direct_assignments/contracts/topic.rb +19 -2
  158. data/lib/karafka/pro/routing/features/direct_assignments/subscription_group.rb +19 -2
  159. data/lib/karafka/pro/routing/features/direct_assignments/topic.rb +19 -2
  160. data/lib/karafka/pro/routing/features/direct_assignments.rb +19 -2
  161. data/lib/karafka/pro/routing/features/expiring/config.rb +19 -2
  162. data/lib/karafka/pro/routing/features/expiring/contracts/topic.rb +19 -2
  163. data/lib/karafka/pro/routing/features/expiring/topic.rb +19 -2
  164. data/lib/karafka/pro/routing/features/expiring.rb +19 -2
  165. data/lib/karafka/pro/routing/features/filtering/config.rb +19 -2
  166. data/lib/karafka/pro/routing/features/filtering/contracts/topic.rb +19 -2
  167. data/lib/karafka/pro/routing/features/filtering/topic.rb +19 -2
  168. data/lib/karafka/pro/routing/features/filtering.rb +19 -2
  169. data/lib/karafka/pro/routing/features/inline_insights/config.rb +19 -2
  170. data/lib/karafka/pro/routing/features/inline_insights/contracts/topic.rb +19 -2
  171. data/lib/karafka/pro/routing/features/inline_insights/topic.rb +19 -2
  172. data/lib/karafka/pro/routing/features/inline_insights.rb +19 -2
  173. data/lib/karafka/pro/routing/features/long_running_job/config.rb +19 -2
  174. data/lib/karafka/pro/routing/features/long_running_job/contracts/topic.rb +19 -2
  175. data/lib/karafka/pro/routing/features/long_running_job/topic.rb +19 -2
  176. data/lib/karafka/pro/routing/features/long_running_job.rb +19 -2
  177. data/lib/karafka/pro/routing/features/multiplexing/config.rb +19 -2
  178. data/lib/karafka/pro/routing/features/multiplexing/contracts/topic.rb +19 -2
  179. data/lib/karafka/pro/routing/features/multiplexing/patches/contracts/consumer_group.rb +19 -2
  180. data/lib/karafka/pro/routing/features/multiplexing/proxy.rb +19 -2
  181. data/lib/karafka/pro/routing/features/multiplexing/subscription_group.rb +19 -2
  182. data/lib/karafka/pro/routing/features/multiplexing/subscription_groups_builder.rb +19 -2
  183. data/lib/karafka/pro/routing/features/multiplexing.rb +19 -2
  184. data/lib/karafka/pro/routing/features/non_blocking_job/topic.rb +19 -2
  185. data/lib/karafka/pro/routing/features/non_blocking_job.rb +19 -2
  186. data/lib/karafka/pro/routing/features/offset_metadata/config.rb +19 -2
  187. data/lib/karafka/pro/routing/features/offset_metadata/contracts/topic.rb +19 -2
  188. data/lib/karafka/pro/routing/features/offset_metadata/topic.rb +19 -2
  189. data/lib/karafka/pro/routing/features/offset_metadata.rb +19 -2
  190. data/lib/karafka/pro/routing/features/parallel_segments/builder.rb +19 -2
  191. data/lib/karafka/pro/routing/features/parallel_segments/config.rb +19 -2
  192. data/lib/karafka/pro/routing/features/parallel_segments/consumer_group.rb +19 -2
  193. data/lib/karafka/pro/routing/features/parallel_segments/contracts/consumer_group.rb +19 -2
  194. data/lib/karafka/pro/routing/features/parallel_segments/topic.rb +19 -2
  195. data/lib/karafka/pro/routing/features/parallel_segments.rb +19 -2
  196. data/lib/karafka/pro/routing/features/patterns/builder.rb +19 -2
  197. data/lib/karafka/pro/routing/features/patterns/config.rb +19 -2
  198. data/lib/karafka/pro/routing/features/patterns/consumer_group.rb +19 -2
  199. data/lib/karafka/pro/routing/features/patterns/contracts/consumer_group.rb +19 -2
  200. data/lib/karafka/pro/routing/features/patterns/contracts/pattern.rb +19 -2
  201. data/lib/karafka/pro/routing/features/patterns/contracts/topic.rb +19 -2
  202. data/lib/karafka/pro/routing/features/patterns/detector.rb +19 -2
  203. data/lib/karafka/pro/routing/features/patterns/pattern.rb +19 -2
  204. data/lib/karafka/pro/routing/features/patterns/patterns.rb +19 -2
  205. data/lib/karafka/pro/routing/features/patterns/topic.rb +19 -2
  206. data/lib/karafka/pro/routing/features/patterns/topics.rb +19 -2
  207. data/lib/karafka/pro/routing/features/patterns.rb +19 -2
  208. data/lib/karafka/pro/routing/features/pausing/config.rb +19 -2
  209. data/lib/karafka/pro/routing/features/pausing/contracts/topic.rb +19 -2
  210. data/lib/karafka/pro/routing/features/pausing/topic.rb +19 -2
  211. data/lib/karafka/pro/routing/features/pausing.rb +19 -2
  212. data/lib/karafka/pro/routing/features/periodic_job/config.rb +19 -2
  213. data/lib/karafka/pro/routing/features/periodic_job/contracts/topic.rb +19 -2
  214. data/lib/karafka/pro/routing/features/periodic_job/topic.rb +19 -2
  215. data/lib/karafka/pro/routing/features/periodic_job.rb +19 -2
  216. data/lib/karafka/pro/routing/features/recurring_tasks/builder.rb +19 -2
  217. data/lib/karafka/pro/routing/features/recurring_tasks/config.rb +19 -2
  218. data/lib/karafka/pro/routing/features/recurring_tasks/contracts/topic.rb +19 -2
  219. data/lib/karafka/pro/routing/features/recurring_tasks/proxy.rb +19 -2
  220. data/lib/karafka/pro/routing/features/recurring_tasks/topic.rb +19 -2
  221. data/lib/karafka/pro/routing/features/recurring_tasks.rb +19 -2
  222. data/lib/karafka/pro/routing/features/scheduled_messages/builder.rb +19 -2
  223. data/lib/karafka/pro/routing/features/scheduled_messages/config.rb +19 -2
  224. data/lib/karafka/pro/routing/features/scheduled_messages/contracts/topic.rb +19 -2
  225. data/lib/karafka/pro/routing/features/scheduled_messages/proxy.rb +19 -2
  226. data/lib/karafka/pro/routing/features/scheduled_messages/topic.rb +19 -2
  227. data/lib/karafka/pro/routing/features/scheduled_messages.rb +19 -2
  228. data/lib/karafka/pro/routing/features/swarm/config.rb +19 -2
  229. data/lib/karafka/pro/routing/features/swarm/contracts/routing.rb +19 -2
  230. data/lib/karafka/pro/routing/features/swarm/contracts/topic.rb +19 -2
  231. data/lib/karafka/pro/routing/features/swarm/topic.rb +19 -2
  232. data/lib/karafka/pro/routing/features/swarm.rb +19 -2
  233. data/lib/karafka/pro/routing/features/throttling/config.rb +19 -2
  234. data/lib/karafka/pro/routing/features/throttling/contracts/topic.rb +19 -2
  235. data/lib/karafka/pro/routing/features/throttling/topic.rb +19 -2
  236. data/lib/karafka/pro/routing/features/throttling.rb +19 -2
  237. data/lib/karafka/pro/routing/features/virtual_partitions/config.rb +19 -2
  238. data/lib/karafka/pro/routing/features/virtual_partitions/contracts/topic.rb +19 -2
  239. data/lib/karafka/pro/routing/features/virtual_partitions/topic.rb +19 -2
  240. data/lib/karafka/pro/routing/features/virtual_partitions.rb +19 -2
  241. data/lib/karafka/pro/scheduled_messages/consumer.rb +19 -2
  242. data/lib/karafka/pro/scheduled_messages/contracts/config.rb +19 -2
  243. data/lib/karafka/pro/scheduled_messages/contracts/message.rb +19 -2
  244. data/lib/karafka/pro/scheduled_messages/daily_buffer.rb +19 -2
  245. data/lib/karafka/pro/scheduled_messages/day.rb +19 -2
  246. data/lib/karafka/pro/scheduled_messages/deserializers/headers.rb +19 -2
  247. data/lib/karafka/pro/scheduled_messages/deserializers/payload.rb +19 -2
  248. data/lib/karafka/pro/scheduled_messages/dispatcher.rb +19 -2
  249. data/lib/karafka/pro/scheduled_messages/errors.rb +19 -2
  250. data/lib/karafka/pro/scheduled_messages/max_epoch.rb +19 -2
  251. data/lib/karafka/pro/scheduled_messages/proxy.rb +19 -2
  252. data/lib/karafka/pro/scheduled_messages/schema_validator.rb +19 -2
  253. data/lib/karafka/pro/scheduled_messages/serializer.rb +19 -2
  254. data/lib/karafka/pro/scheduled_messages/setup/config.rb +19 -2
  255. data/lib/karafka/pro/scheduled_messages/state.rb +19 -2
  256. data/lib/karafka/pro/scheduled_messages/tracker.rb +19 -2
  257. data/lib/karafka/pro/scheduled_messages.rb +19 -2
  258. data/lib/karafka/pro/swarm/liveness_listener.rb +19 -2
  259. data/lib/karafka/version.rb +1 -1
  260. metadata +2 -2
data/lib/karafka/admin.rb CHANGED
@@ -6,9 +6,9 @@ module Karafka
6
6
  # @note It always initializes a new admin instance as we want to ensure it is always closed
7
7
  # Since admin actions are not performed that often, that should be ok.
8
8
  #
9
- # @note It always uses the primary defined cluster and does not support multi-cluster work.
10
- # Cluster on which operations are performed can be changed via `admin.kafka` config, however
11
- # there is no multi-cluster runtime support.
9
+ # @note By default it uses the primary defined cluster. For multi-cluster operations, create
10
+ # an Admin instance with custom kafka configuration:
11
+ # `Karafka::Admin.new(kafka: { 'bootstrap.servers': 'other:9092' })`
12
12
  class Admin
13
13
  extend Core::Helpers::Time
14
14
 
@@ -22,6 +22,22 @@ module Karafka
22
22
  admin_kafka: %i[admin kafka]
23
23
  )
24
24
 
25
+ # Custom kafka configuration for this admin instance
26
+ # @return [Hash] custom kafka settings to merge with defaults
27
+ attr_reader :custom_kafka
28
+
29
+ # Creates a new Admin instance
30
+ #
31
+ # @param kafka [Hash] custom kafka configuration to merge with app defaults.
32
+ # Useful for multi-cluster operations where you want to target a different cluster.
33
+ #
34
+ # @example Create admin for a different cluster
35
+ # admin = Karafka::Admin.new(kafka: { 'bootstrap.servers': 'other-cluster:9092' })
36
+ # admin.cluster_info
37
+ def initialize(kafka: {})
38
+ @custom_kafka = kafka
39
+ end
40
+
25
41
  class << self
26
42
  # Delegate topic-related operations to Topics class
27
43
 
@@ -32,7 +48,7 @@ module Karafka
32
48
  # @param settings [Hash] kafka extra settings
33
49
  # @see Topics.read
34
50
  def read_topic(name, partition, count, start_offset = -1, settings = {})
35
- Topics.read(name, partition, count, start_offset, settings)
51
+ new.read_topic(name, partition, count, start_offset, settings)
36
52
  end
37
53
 
38
54
  # @param name [String] topic name
@@ -41,33 +57,33 @@ module Karafka
41
57
  # @param topic_config [Hash] topic config details
42
58
  # @see Topics.create
43
59
  def create_topic(name, partitions, replication_factor, topic_config = {})
44
- Topics.create(name, partitions, replication_factor, topic_config)
60
+ new.create_topic(name, partitions, replication_factor, topic_config)
45
61
  end
46
62
 
47
63
  # @param name [String] topic name
48
64
  # @see Topics.delete
49
65
  def delete_topic(name)
50
- Topics.delete(name)
66
+ new.delete_topic(name)
51
67
  end
52
68
 
53
69
  # @param name [String] topic name
54
70
  # @param partitions [Integer] total number of partitions we expect to end up with
55
71
  # @see Topics.create_partitions
56
72
  def create_partitions(name, partitions)
57
- Topics.create_partitions(name, partitions)
73
+ new.create_partitions(name, partitions)
58
74
  end
59
75
 
60
76
  # @param name_or_hash [String, Symbol, Hash] topic name or hash with topics and partitions
61
77
  # @param partition [Integer, nil] partition (nil when using hash format)
62
78
  # @see Topics.read_watermark_offsets
63
79
  def read_watermark_offsets(name_or_hash, partition = nil)
64
- Topics.read_watermark_offsets(name_or_hash, partition)
80
+ new.read_watermark_offsets(name_or_hash, partition)
65
81
  end
66
82
 
67
83
  # @param topic_name [String] name of the topic we're interested in
68
84
  # @see Topics.info
69
85
  def topic_info(topic_name)
70
- Topics.info(topic_name)
86
+ new.topic_info(topic_name)
71
87
  end
72
88
 
73
89
  # @param consumer_group_id [String] id of the consumer group for which we want to move the
@@ -75,7 +91,7 @@ module Karafka
75
91
  # @param topics_with_partitions_and_offsets [Hash] Hash with list of topics and settings
76
92
  # @see ConsumerGroups.seek
77
93
  def seek_consumer_group(consumer_group_id, topics_with_partitions_and_offsets)
78
- ConsumerGroups.seek(consumer_group_id, topics_with_partitions_and_offsets)
94
+ new.seek_consumer_group(consumer_group_id, topics_with_partitions_and_offsets)
79
95
  end
80
96
 
81
97
  # Takes consumer group and its topics and copies all the offsets to a new named group
@@ -86,7 +102,7 @@ module Karafka
86
102
  # @return [Boolean] true if anything was migrated, otherwise false
87
103
  # @see ConsumerGroups.copy
88
104
  def copy_consumer_group(previous_name, new_name, topics)
89
- ConsumerGroups.copy(previous_name, new_name, topics)
105
+ new.copy_consumer_group(previous_name, new_name, topics)
90
106
  end
91
107
 
92
108
  # Takes consumer group and its topics and migrates all the offsets to a new named group
@@ -100,7 +116,12 @@ module Karafka
100
116
  # nothing really to rename
101
117
  # @see ConsumerGroups.rename
102
118
  def rename_consumer_group(previous_name, new_name, topics, delete_previous: true)
103
- ConsumerGroups.rename(previous_name, new_name, topics, delete_previous: delete_previous)
119
+ new.rename_consumer_group(
120
+ previous_name,
121
+ new_name,
122
+ topics,
123
+ delete_previous: delete_previous
124
+ )
104
125
  end
105
126
 
106
127
  # Removes given consumer group (if exists)
@@ -108,7 +129,7 @@ module Karafka
108
129
  # @param consumer_group_id [String] consumer group name
109
130
  # @see ConsumerGroups.delete
110
131
  def delete_consumer_group(consumer_group_id)
111
- ConsumerGroups.delete(consumer_group_id)
132
+ new.delete_consumer_group(consumer_group_id)
112
133
  end
113
134
 
114
135
  # Triggers a rebalance for the specified consumer group
@@ -117,7 +138,7 @@ module Karafka
117
138
  # @see ConsumerGroups.trigger_rebalance
118
139
  # @note This API should be used only for development.
119
140
  def trigger_rebalance(consumer_group_id)
120
- ConsumerGroups.trigger_rebalance(consumer_group_id)
141
+ new.trigger_rebalance(consumer_group_id)
121
142
  end
122
143
 
123
144
  # Reads lags and offsets for given topics in the context of consumer groups defined in the
@@ -131,7 +152,7 @@ module Karafka
131
152
  # partitions with lags and offsets
132
153
  # @see ConsumerGroups.read_lags_with_offsets
133
154
  def read_lags_with_offsets(consumer_groups_with_topics = {}, active_topics_only: true)
134
- ConsumerGroups.read_lags_with_offsets(
155
+ new.read_lags_with_offsets(
135
156
  consumer_groups_with_topics,
136
157
  active_topics_only: active_topics_only
137
158
  )
@@ -177,16 +198,16 @@ module Karafka
177
198
  #
178
199
  # @see Replication.plan for more details
179
200
  def plan_topic_replication(topic:, replication_factor:, brokers: nil)
180
- Replication.plan(
201
+ new.plan_topic_replication(
181
202
  topic: topic,
182
- to: replication_factor,
203
+ replication_factor: replication_factor,
183
204
  brokers: brokers
184
205
  )
185
206
  end
186
207
 
187
208
  # @return [Rdkafka::Metadata] cluster metadata info
188
209
  def cluster_info
189
- with_admin(&:metadata)
210
+ new.cluster_info
190
211
  end
191
212
 
192
213
  # Creates consumer instance and yields it. After usage it closes the consumer instance
@@ -196,126 +217,265 @@ module Karafka
196
217
  #
197
218
  # @note We always ship and yield a proxied consumer because admin API performance is not
198
219
  # that relevant. That is, there are no high frequency calls that would have to be delegated
199
- def with_consumer(settings = {})
200
- bind_id = SecureRandom.uuid
201
-
202
- consumer = config(:consumer, settings).consumer(native_kafka_auto_start: false)
203
- bind_oauth(bind_id, consumer)
204
-
205
- consumer.start
206
- proxy = Karafka::Connection::Proxy.new(consumer)
207
- yield(proxy)
208
- ensure
209
- # Always unsubscribe consumer just to be sure, that no metadata requests are running
210
- # when we close the consumer. This in theory should prevent from some race-conditions
211
- # that originate from librdkafka
212
- begin
213
- consumer&.unsubscribe
214
- # Ignore any errors and continue to close consumer despite them
215
- rescue Rdkafka::RdkafkaError
216
- nil
217
- end
218
-
219
- consumer&.close
220
-
221
- unbind_oauth(bind_id)
220
+ def with_consumer(settings = {}, &)
221
+ new.with_consumer(settings, &)
222
222
  end
223
223
 
224
224
  # Creates admin instance and yields it. After usage it closes the admin instance
225
- def with_admin
226
- bind_id = SecureRandom.uuid
225
+ def with_admin(&)
226
+ new.with_admin(&)
227
+ end
228
+ end
227
229
 
228
- admin = config(:producer, {}).admin(
229
- native_kafka_auto_start: false,
230
- native_kafka_poll_timeout_ms: poll_timeout
231
- )
230
+ # Instance methods - these use the custom kafka configuration
232
231
 
233
- bind_oauth(bind_id, admin)
232
+ # @param name [String, Symbol] topic name
233
+ # @param partition [Integer] partition
234
+ # @param count [Integer] how many messages we want to get at most
235
+ # @param start_offset [Integer, Time] offset from which we should start
236
+ # @param settings [Hash] kafka extra settings (optional)
237
+ # @see Topics#read
238
+ def read_topic(name, partition, count, start_offset = -1, settings = {})
239
+ Topics.new(kafka: @custom_kafka).read(name, partition, count, start_offset, settings)
240
+ end
234
241
 
235
- admin.start
236
- proxy = Karafka::Connection::Proxy.new(admin)
237
- yield(proxy)
238
- ensure
239
- admin&.close
242
+ # @param name [String] topic name
243
+ # @param partitions [Integer] number of partitions for this topic
244
+ # @param replication_factor [Integer] number of replicas
245
+ # @param topic_config [Hash] topic config details
246
+ # @see Topics#create
247
+ def create_topic(name, partitions, replication_factor, topic_config = {})
248
+ Topics.new(kafka: @custom_kafka).create(name, partitions, replication_factor, topic_config)
249
+ end
240
250
 
241
- unbind_oauth(bind_id)
242
- end
251
+ # @param name [String] topic name
252
+ # @see Topics#delete
253
+ def delete_topic(name)
254
+ Topics.new(kafka: @custom_kafka).delete(name)
255
+ end
256
+
257
+ # @param name [String] topic name
258
+ # @param partitions [Integer] total number of partitions we expect to end up with
259
+ # @see Topics#create_partitions
260
+ def create_partitions(name, partitions)
261
+ Topics.new(kafka: @custom_kafka).create_partitions(name, partitions)
262
+ end
263
+
264
+ # @param name_or_hash [String, Symbol, Hash] topic name or hash with topics and partitions
265
+ # @param partition [Integer, nil] partition (nil when using hash format)
266
+ # @see Topics#read_watermark_offsets
267
+ def read_watermark_offsets(name_or_hash, partition = nil)
268
+ Topics.new(kafka: @custom_kafka).read_watermark_offsets(name_or_hash, partition)
269
+ end
270
+
271
+ # @param topic_name [String] name of the topic we're interested in
272
+ # @see Topics#info
273
+ def topic_info(topic_name)
274
+ Topics.new(kafka: @custom_kafka).info(topic_name)
275
+ end
276
+
277
+ # @param consumer_group_id [String] consumer group for which we want to move offsets
278
+ # @param topics_with_partitions_and_offsets [Hash] hash with topics and settings
279
+ # @see ConsumerGroups#seek
280
+ def seek_consumer_group(consumer_group_id, topics_with_partitions_and_offsets)
281
+ ConsumerGroups.new(kafka: @custom_kafka).seek(
282
+ consumer_group_id,
283
+ topics_with_partitions_and_offsets
284
+ )
285
+ end
286
+
287
+ # @param previous_name [String] old consumer group name
288
+ # @param new_name [String] new consumer group name
289
+ # @param topics [Array<String>] topics for which we want to copy offsets
290
+ # @see ConsumerGroups#copy
291
+ def copy_consumer_group(previous_name, new_name, topics)
292
+ ConsumerGroups.new(kafka: @custom_kafka).copy(previous_name, new_name, topics)
293
+ end
294
+
295
+ # @param previous_name [String] old consumer group name
296
+ # @param new_name [String] new consumer group name
297
+ # @param topics [Array<String>] topics for which we want to migrate offsets
298
+ # @param delete_previous [Boolean] should we delete previous consumer group after rename
299
+ # @see ConsumerGroups#rename
300
+ def rename_consumer_group(previous_name, new_name, topics, delete_previous: true)
301
+ ConsumerGroups.new(kafka: @custom_kafka).rename(
302
+ previous_name,
303
+ new_name,
304
+ topics,
305
+ delete_previous: delete_previous
306
+ )
307
+ end
308
+
309
+ # @param consumer_group_id [String] consumer group name
310
+ # @see ConsumerGroups#delete
311
+ def delete_consumer_group(consumer_group_id)
312
+ ConsumerGroups.new(kafka: @custom_kafka).delete(consumer_group_id)
313
+ end
314
+
315
+ # @param consumer_group_id [String] consumer group id to trigger rebalance for
316
+ # @see ConsumerGroups#trigger_rebalance
317
+ def trigger_rebalance(consumer_group_id)
318
+ ConsumerGroups.new(kafka: @custom_kafka).trigger_rebalance(consumer_group_id)
319
+ end
243
320
 
244
- private
321
+ # @param consumer_groups_with_topics [Hash{String => Array<String>}] hash with consumer
322
+ # groups names with array of topics
323
+ # @param active_topics_only [Boolean] if set to false, will select also inactive topics
324
+ # @see ConsumerGroups#read_lags_with_offsets
325
+ def read_lags_with_offsets(consumer_groups_with_topics = {}, active_topics_only: true)
326
+ ConsumerGroups.new(kafka: @custom_kafka).read_lags_with_offsets(
327
+ consumer_groups_with_topics,
328
+ active_topics_only: active_topics_only
329
+ )
330
+ end
331
+
332
+ # @param topic [String] topic name to plan replication for
333
+ # @param replication_factor [Integer] target replication factor
334
+ # @param brokers [Hash, nil] optional manual broker assignments per partition
335
+ # @see Replication#plan
336
+ def plan_topic_replication(topic:, replication_factor:, brokers: nil)
337
+ Replication.new(kafka: @custom_kafka).plan(
338
+ topic: topic,
339
+ to: replication_factor,
340
+ brokers: brokers
341
+ )
342
+ end
245
343
 
246
- # @return [Integer] number of seconds to wait. `rdkafka` requires this value
247
- # (`max_wait_time`) to be provided in seconds while we define it in ms hence the conversion
248
- def max_wait_time_seconds
249
- max_wait_time / 1_000.0
344
+ # @return [Rdkafka::Metadata] cluster metadata info
345
+ def cluster_info
346
+ with_admin(&:metadata)
347
+ end
348
+
349
+ # Creates consumer instance and yields it. After usage it closes the consumer instance
350
+ # This API can be used in other pieces of code and allows for low-level consumer usage
351
+ #
352
+ # @param settings [Hash] extra settings to customize consumer
353
+ #
354
+ # @note We always ship and yield a proxied consumer because admin API performance is not
355
+ # that relevant. That is, there are no high frequency calls that would have to be delegated
356
+ def with_consumer(settings = {})
357
+ bind_id = SecureRandom.uuid
358
+
359
+ consumer = config(:consumer, settings).consumer(native_kafka_auto_start: false)
360
+ bind_oauth(bind_id, consumer)
361
+
362
+ consumer.start
363
+ proxy = Karafka::Connection::Proxy.new(consumer)
364
+ yield(proxy)
365
+ ensure
366
+ # Always unsubscribe consumer just to be sure, that no metadata requests are running
367
+ # when we close the consumer. This in theory should prevent from some race-conditions
368
+ # that originate from librdkafka
369
+ begin
370
+ consumer&.unsubscribe
371
+ # Ignore any errors and continue to close consumer despite them
372
+ rescue Rdkafka::RdkafkaError
373
+ nil
250
374
  end
251
375
 
252
- # Adds a new callback for given rdkafka instance for oauth token refresh (if needed)
253
- #
254
- # @param id [String, Symbol] unique (for the lifetime of instance) id that we use for
255
- # callback referencing
256
- # @param instance [Rdkafka::Consumer, Rdkafka::Admin] rdkafka instance to be used to set
257
- # appropriate oauth token when needed
258
- def bind_oauth(id, instance)
259
- Karafka::Core::Instrumentation.oauthbearer_token_refresh_callbacks.add(
260
- id,
261
- Instrumentation::Callbacks::OauthbearerTokenRefresh.new(
262
- instance
263
- )
376
+ consumer&.close
377
+
378
+ unbind_oauth(bind_id)
379
+ end
380
+
381
+ # Creates admin instance and yields it. After usage it closes the admin instance
382
+ def with_admin
383
+ bind_id = SecureRandom.uuid
384
+
385
+ admin = config(:producer, {}).admin(
386
+ native_kafka_auto_start: false,
387
+ native_kafka_poll_timeout_ms: self.class.poll_timeout
388
+ )
389
+
390
+ bind_oauth(bind_id, admin)
391
+
392
+ admin.start
393
+ proxy = Karafka::Connection::Proxy.new(admin)
394
+ yield(proxy)
395
+ ensure
396
+ admin&.close
397
+
398
+ unbind_oauth(bind_id)
399
+ end
400
+
401
+ private
402
+
403
+ # @return [Integer] number of seconds to wait. `rdkafka` requires this value
404
+ # (`max_wait_time`) to be provided in seconds while we define it in ms hence the conversion
405
+ def max_wait_time_seconds
406
+ self.class.max_wait_time / 1_000.0
407
+ end
408
+
409
+ # Adds a new callback for given rdkafka instance for oauth token refresh (if needed)
410
+ #
411
+ # @param id [String, Symbol] unique (for the lifetime of instance) id that we use for
412
+ # callback referencing
413
+ # @param instance [Rdkafka::Consumer, Rdkafka::Admin] rdkafka instance to be used to set
414
+ # appropriate oauth token when needed
415
+ def bind_oauth(id, instance)
416
+ Karafka::Core::Instrumentation.oauthbearer_token_refresh_callbacks.add(
417
+ id,
418
+ Instrumentation::Callbacks::OauthbearerTokenRefresh.new(
419
+ instance
264
420
  )
265
- end
421
+ )
422
+ end
266
423
 
267
- # Removes the callback from no longer used instance
268
- #
269
- # @param id [String, Symbol] unique (for the lifetime of instance) id that we use for
270
- # callback referencing
271
- def unbind_oauth(id)
272
- Karafka::Core::Instrumentation.oauthbearer_token_refresh_callbacks.delete(id)
273
- end
424
+ # Removes the callback from no longer used instance
425
+ #
426
+ # @param id [String, Symbol] unique (for the lifetime of instance) id that we use for
427
+ # callback referencing
428
+ def unbind_oauth(id)
429
+ Karafka::Core::Instrumentation.oauthbearer_token_refresh_callbacks.delete(id)
430
+ end
274
431
 
275
- # There are some cases where rdkafka admin operations finish successfully but without the
276
- # callback being triggered to materialize the post-promise object. Until this is fixed we
277
- # can figure out, that operation we wanted to do finished successfully by checking that the
278
- # effect of the command (new topic, more partitions, etc) is handled. Exactly for that we
279
- # use the breaker. It we get a timeout, we can check that what we wanted to achieve has
280
- # happened via the breaker check, hence we do not need to wait any longer.
281
- #
282
- # @param handler [Proc] the wait handler operation
283
- # @param breaker [Proc] extra condition upon timeout that indicates things were finished ok
284
- def with_re_wait(handler, breaker)
285
- start_time = monotonic_now
286
- # Convert milliseconds to seconds for sleep
287
- sleep_time = retry_backoff / 1000.0
432
+ # There are some cases where rdkafka admin operations finish successfully but without the
433
+ # callback being triggered to materialize the post-promise object. Until this is fixed we
434
+ # can figure out, that operation we wanted to do finished successfully by checking that the
435
+ # effect of the command (new topic, more partitions, etc) is handled. Exactly for that we
436
+ # use the breaker. It we get a timeout, we can check that what we wanted to achieve has
437
+ # happened via the breaker check, hence we do not need to wait any longer.
438
+ #
439
+ # @param handler [Proc] the wait handler operation
440
+ # @param breaker [Proc] extra condition upon timeout that indicates things were finished ok
441
+ def with_re_wait(handler, breaker)
442
+ start_time = self.class.monotonic_now
443
+ # Convert milliseconds to seconds for sleep
444
+ sleep_time = self.class.retry_backoff / 1000.0
288
445
 
289
- loop do
290
- handler.call
446
+ loop do
447
+ handler.call
291
448
 
292
- sleep(sleep_time)
449
+ sleep(sleep_time)
293
450
 
294
- return if breaker.call
295
- rescue Rdkafka::AbstractHandle::WaitTimeoutError
296
- return if breaker.call
451
+ return if breaker.call
452
+ rescue Rdkafka::AbstractHandle::WaitTimeoutError
453
+ return if breaker.call
297
454
 
298
- next if monotonic_now - start_time < max_retries_duration
455
+ next if self.class.monotonic_now - start_time < self.class.max_retries_duration
299
456
 
300
- raise(Errors::ResultNotVisibleError)
301
- end
457
+ raise(Errors::ResultNotVisibleError)
302
458
  end
459
+ end
303
460
 
304
- # @param type [Symbol] type of config we want
305
- # @param settings [Hash] extra settings for config (if needed)
306
- # @return [::Rdkafka::Config] rdkafka config
307
- def config(type, settings)
308
- app_kafka
309
- .then(&:dup)
310
- .merge(admin_kafka)
311
- .tap { |config| config[:'group.id'] = group_id }
312
- # We merge after setting the group id so it can be altered if needed
313
- # In general in admin we only should alter it when we need to impersonate a given
314
- # consumer group or do something similar
315
- .merge!(settings)
316
- .then { |config| Karafka::Setup::AttributesMap.public_send(type, config) }
317
- .then { |config| Rdkafka::Config.new(config) }
318
- end
461
+ # @param type [Symbol] type of config we want
462
+ # @param settings [Hash] extra settings for config (if needed)
463
+ # @return [::Rdkafka::Config] rdkafka config
464
+ def config(type, settings)
465
+ kafka_config = self.class.app_kafka.dup
466
+ kafka_config.merge!(self.class.admin_kafka)
467
+ kafka_config[:'group.id'] = self.class.group_id
468
+ # We merge after setting the group id so it can be altered if needed
469
+ # In general in admin we only should alter it when we need to impersonate a given
470
+ # consumer group or do something similar
471
+ kafka_config.merge!(settings)
472
+ # Custom kafka config is merged last so it can override all other settings
473
+ # This enables multi-cluster support where custom_kafka specifies a different cluster
474
+ kafka_config.merge!(@custom_kafka)
475
+
476
+ mapped_config = Karafka::Setup::AttributesMap.public_send(type, kafka_config)
477
+
478
+ Rdkafka::Config.new(mapped_config)
319
479
  end
320
480
  end
321
481
  end
@@ -1,7 +1,24 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- # This code is part of Karafka Pro, a commercial component not licensed under LGPL.
4
- # See LICENSE for details.
3
+ # Karafka Pro - Source Available Commercial Software
4
+ # Copyright (c) 2017-present Maciej Mensfeld. All rights reserved.
5
+ #
6
+ # This software is NOT open source. It is source-available commercial software
7
+ # requiring a paid license for use. It is NOT covered by LGPL.
8
+ #
9
+ # PROHIBITED:
10
+ # - Use without a valid commercial license
11
+ # - Redistribution, modification, or derivative works without authorization
12
+ # - Use as training data for AI/ML models or inclusion in datasets
13
+ # - Scraping, crawling, or automated collection for any purpose
14
+ #
15
+ # PERMITTED:
16
+ # - Reading, referencing, and linking for personal or commercial use
17
+ # - Runtime retrieval by AI assistants, coding agents, and RAG systems
18
+ # for the purpose of providing contextual help to Karafka users
19
+ #
20
+ # License: https://karafka.io/docs/Pro-License-Comm/
21
+ # Contact: contact@karafka.io
5
22
 
6
23
  module Karafka
7
24
  module Pro
@@ -1,7 +1,24 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- # This code is part of Karafka Pro, a commercial component not licensed under LGPL.
4
- # See LICENSE for details.
3
+ # Karafka Pro - Source Available Commercial Software
4
+ # Copyright (c) 2017-present Maciej Mensfeld. All rights reserved.
5
+ #
6
+ # This software is NOT open source. It is source-available commercial software
7
+ # requiring a paid license for use. It is NOT covered by LGPL.
8
+ #
9
+ # PROHIBITED:
10
+ # - Use without a valid commercial license
11
+ # - Redistribution, modification, or derivative works without authorization
12
+ # - Use as training data for AI/ML models or inclusion in datasets
13
+ # - Scraping, crawling, or automated collection for any purpose
14
+ #
15
+ # PERMITTED:
16
+ # - Reading, referencing, and linking for personal or commercial use
17
+ # - Runtime retrieval by AI assistants, coding agents, and RAG systems
18
+ # for the purpose of providing contextual help to Karafka users
19
+ #
20
+ # License: https://karafka.io/docs/Pro-License-Comm/
21
+ # Contact: contact@karafka.io
5
22
 
6
23
  module Karafka
7
24
  module Pro
@@ -1,7 +1,24 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- # This code is part of Karafka Pro, a commercial component not licensed under LGPL.
4
- # See LICENSE for details.
3
+ # Karafka Pro - Source Available Commercial Software
4
+ # Copyright (c) 2017-present Maciej Mensfeld. All rights reserved.
5
+ #
6
+ # This software is NOT open source. It is source-available commercial software
7
+ # requiring a paid license for use. It is NOT covered by LGPL.
8
+ #
9
+ # PROHIBITED:
10
+ # - Use without a valid commercial license
11
+ # - Redistribution, modification, or derivative works without authorization
12
+ # - Use as training data for AI/ML models or inclusion in datasets
13
+ # - Scraping, crawling, or automated collection for any purpose
14
+ #
15
+ # PERMITTED:
16
+ # - Reading, referencing, and linking for personal or commercial use
17
+ # - Runtime retrieval by AI assistants, coding agents, and RAG systems
18
+ # for the purpose of providing contextual help to Karafka users
19
+ #
20
+ # License: https://karafka.io/docs/Pro-License-Comm/
21
+ # Contact: contact@karafka.io
5
22
 
6
23
  module Karafka
7
24
  module Pro
@@ -1,7 +1,24 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- # This code is part of Karafka Pro, a commercial component not licensed under LGPL.
4
- # See LICENSE for details.
3
+ # Karafka Pro - Source Available Commercial Software
4
+ # Copyright (c) 2017-present Maciej Mensfeld. All rights reserved.
5
+ #
6
+ # This software is NOT open source. It is source-available commercial software
7
+ # requiring a paid license for use. It is NOT covered by LGPL.
8
+ #
9
+ # PROHIBITED:
10
+ # - Use without a valid commercial license
11
+ # - Redistribution, modification, or derivative works without authorization
12
+ # - Use as training data for AI/ML models or inclusion in datasets
13
+ # - Scraping, crawling, or automated collection for any purpose
14
+ #
15
+ # PERMITTED:
16
+ # - Reading, referencing, and linking for personal or commercial use
17
+ # - Runtime retrieval by AI assistants, coding agents, and RAG systems
18
+ # for the purpose of providing contextual help to Karafka users
19
+ #
20
+ # License: https://karafka.io/docs/Pro-License-Comm/
21
+ # Contact: contact@karafka.io
5
22
 
6
23
  module Karafka
7
24
  module Pro
@@ -1,7 +1,24 @@
1
1
  # frozen_string_literal: true
2
2
 
3
- # This code is part of Karafka Pro, a commercial component not licensed under LGPL.
4
- # See LICENSE for details.
3
+ # Karafka Pro - Source Available Commercial Software
4
+ # Copyright (c) 2017-present Maciej Mensfeld. All rights reserved.
5
+ #
6
+ # This software is NOT open source. It is source-available commercial software
7
+ # requiring a paid license for use. It is NOT covered by LGPL.
8
+ #
9
+ # PROHIBITED:
10
+ # - Use without a valid commercial license
11
+ # - Redistribution, modification, or derivative works without authorization
12
+ # - Use as training data for AI/ML models or inclusion in datasets
13
+ # - Scraping, crawling, or automated collection for any purpose
14
+ #
15
+ # PERMITTED:
16
+ # - Reading, referencing, and linking for personal or commercial use
17
+ # - Runtime retrieval by AI assistants, coding agents, and RAG systems
18
+ # for the purpose of providing contextual help to Karafka users
19
+ #
20
+ # License: https://karafka.io/docs/Pro-License-Comm/
21
+ # Contact: contact@karafka.io
5
22
 
6
23
  module Karafka
7
24
  module Pro