karafka 1.4.12 → 2.2.10

Sign up to get free protection for your applications and to get access to all the features.
Files changed (359) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/FUNDING.yml +1 -0
  4. data/.github/ISSUE_TEMPLATE/bug_report.md +10 -9
  5. data/.github/workflows/ci.yml +169 -31
  6. data/.rspec +4 -0
  7. data/.ruby-version +1 -1
  8. data/CHANGELOG.md +716 -607
  9. data/CONTRIBUTING.md +10 -19
  10. data/Gemfile +7 -0
  11. data/Gemfile.lock +69 -92
  12. data/LICENSE +17 -0
  13. data/LICENSE-COMM +89 -0
  14. data/LICENSE-LGPL +165 -0
  15. data/README.md +48 -47
  16. data/bin/benchmarks +99 -0
  17. data/bin/create_token +22 -0
  18. data/bin/integrations +310 -0
  19. data/bin/karafka +5 -14
  20. data/bin/record_rss +50 -0
  21. data/bin/rspecs +6 -0
  22. data/bin/scenario +29 -0
  23. data/bin/stress_many +13 -0
  24. data/bin/stress_one +13 -0
  25. data/bin/verify_license_integrity +37 -0
  26. data/bin/wait_for_kafka +24 -0
  27. data/certs/cert_chain.pem +26 -0
  28. data/certs/karafka-pro.pem +11 -0
  29. data/config/locales/errors.yml +97 -0
  30. data/config/locales/pro_errors.yml +59 -0
  31. data/docker-compose.yml +19 -11
  32. data/karafka.gemspec +26 -22
  33. data/lib/active_job/karafka.rb +17 -0
  34. data/lib/active_job/queue_adapters/karafka_adapter.rb +32 -0
  35. data/lib/karafka/active_job/consumer.rb +49 -0
  36. data/lib/karafka/active_job/current_attributes/loading.rb +36 -0
  37. data/lib/karafka/active_job/current_attributes/persistence.rb +28 -0
  38. data/lib/karafka/active_job/current_attributes.rb +42 -0
  39. data/lib/karafka/active_job/dispatcher.rb +69 -0
  40. data/lib/karafka/active_job/job_extensions.rb +34 -0
  41. data/lib/karafka/active_job/job_options_contract.rb +32 -0
  42. data/lib/karafka/admin.rb +313 -0
  43. data/lib/karafka/app.rb +47 -23
  44. data/lib/karafka/base_consumer.rb +260 -29
  45. data/lib/karafka/cli/base.rb +67 -36
  46. data/lib/karafka/cli/console.rb +18 -12
  47. data/lib/karafka/cli/help.rb +24 -0
  48. data/lib/karafka/cli/info.rb +47 -12
  49. data/lib/karafka/cli/install.rb +23 -14
  50. data/lib/karafka/cli/server.rb +101 -44
  51. data/lib/karafka/cli/topics.rb +146 -0
  52. data/lib/karafka/cli.rb +24 -27
  53. data/lib/karafka/connection/client.rb +553 -90
  54. data/lib/karafka/connection/consumer_group_coordinator.rb +48 -0
  55. data/lib/karafka/connection/listener.rb +294 -38
  56. data/lib/karafka/connection/listeners_batch.rb +40 -0
  57. data/lib/karafka/connection/messages_buffer.rb +84 -0
  58. data/lib/karafka/connection/pauses_manager.rb +46 -0
  59. data/lib/karafka/connection/proxy.rb +98 -0
  60. data/lib/karafka/connection/raw_messages_buffer.rb +101 -0
  61. data/lib/karafka/connection/rebalance_manager.rb +105 -0
  62. data/lib/karafka/contracts/base.rb +17 -0
  63. data/lib/karafka/contracts/config.rb +130 -11
  64. data/lib/karafka/contracts/consumer_group.rb +32 -187
  65. data/lib/karafka/contracts/server_cli_options.rb +80 -19
  66. data/lib/karafka/contracts/topic.rb +65 -0
  67. data/lib/karafka/contracts.rb +1 -1
  68. data/lib/karafka/embedded.rb +36 -0
  69. data/lib/karafka/env.rb +46 -0
  70. data/lib/karafka/errors.rb +37 -21
  71. data/lib/karafka/helpers/async.rb +33 -0
  72. data/lib/karafka/helpers/colorize.rb +26 -0
  73. data/lib/karafka/helpers/multi_delegator.rb +2 -2
  74. data/lib/karafka/instrumentation/callbacks/error.rb +39 -0
  75. data/lib/karafka/instrumentation/callbacks/rebalance.rb +64 -0
  76. data/lib/karafka/instrumentation/callbacks/statistics.rb +51 -0
  77. data/lib/karafka/instrumentation/logger_listener.rb +303 -0
  78. data/lib/karafka/instrumentation/monitor.rb +13 -61
  79. data/lib/karafka/instrumentation/notifications.rb +79 -0
  80. data/lib/karafka/instrumentation/proctitle_listener.rb +7 -16
  81. data/lib/karafka/instrumentation/vendors/appsignal/base.rb +30 -0
  82. data/lib/karafka/instrumentation/vendors/appsignal/client.rb +122 -0
  83. data/lib/karafka/instrumentation/vendors/appsignal/dashboard.json +222 -0
  84. data/lib/karafka/instrumentation/vendors/appsignal/errors_listener.rb +30 -0
  85. data/lib/karafka/instrumentation/vendors/appsignal/metrics_listener.rb +331 -0
  86. data/lib/karafka/instrumentation/vendors/datadog/dashboard.json +1 -0
  87. data/lib/karafka/instrumentation/vendors/datadog/logger_listener.rb +155 -0
  88. data/lib/karafka/instrumentation/vendors/datadog/metrics_listener.rb +264 -0
  89. data/lib/karafka/instrumentation/vendors/kubernetes/liveness_listener.rb +176 -0
  90. data/lib/karafka/licenser.rb +78 -0
  91. data/lib/karafka/messages/batch_metadata.rb +52 -0
  92. data/lib/karafka/messages/builders/batch_metadata.rb +60 -0
  93. data/lib/karafka/messages/builders/message.rb +40 -0
  94. data/lib/karafka/messages/builders/messages.rb +36 -0
  95. data/lib/karafka/{params/params.rb → messages/message.rb} +20 -13
  96. data/lib/karafka/messages/messages.rb +71 -0
  97. data/lib/karafka/{params → messages}/metadata.rb +4 -6
  98. data/lib/karafka/messages/parser.rb +14 -0
  99. data/lib/karafka/messages/seek.rb +12 -0
  100. data/lib/karafka/patches/rdkafka/bindings.rb +122 -0
  101. data/lib/karafka/patches/rdkafka/opaque.rb +36 -0
  102. data/lib/karafka/pro/active_job/consumer.rb +47 -0
  103. data/lib/karafka/pro/active_job/dispatcher.rb +86 -0
  104. data/lib/karafka/pro/active_job/job_options_contract.rb +45 -0
  105. data/lib/karafka/pro/cleaner/errors.rb +27 -0
  106. data/lib/karafka/pro/cleaner/messages/message.rb +46 -0
  107. data/lib/karafka/pro/cleaner/messages/messages.rb +42 -0
  108. data/lib/karafka/pro/cleaner.rb +41 -0
  109. data/lib/karafka/pro/contracts/base.rb +23 -0
  110. data/lib/karafka/pro/contracts/server_cli_options.rb +111 -0
  111. data/lib/karafka/pro/encryption/cipher.rb +58 -0
  112. data/lib/karafka/pro/encryption/contracts/config.rb +79 -0
  113. data/lib/karafka/pro/encryption/errors.rb +27 -0
  114. data/lib/karafka/pro/encryption/messages/middleware.rb +46 -0
  115. data/lib/karafka/pro/encryption/messages/parser.rb +56 -0
  116. data/lib/karafka/pro/encryption/setup/config.rb +48 -0
  117. data/lib/karafka/pro/encryption.rb +47 -0
  118. data/lib/karafka/pro/iterator/expander.rb +95 -0
  119. data/lib/karafka/pro/iterator/tpl_builder.rb +155 -0
  120. data/lib/karafka/pro/iterator.rb +170 -0
  121. data/lib/karafka/pro/loader.rb +106 -0
  122. data/lib/karafka/pro/performance_tracker.rb +84 -0
  123. data/lib/karafka/pro/processing/collapser.rb +62 -0
  124. data/lib/karafka/pro/processing/coordinator.rb +147 -0
  125. data/lib/karafka/pro/processing/filters/base.rb +61 -0
  126. data/lib/karafka/pro/processing/filters/delayer.rb +70 -0
  127. data/lib/karafka/pro/processing/filters/expirer.rb +51 -0
  128. data/lib/karafka/pro/processing/filters/inline_insights_delayer.rb +78 -0
  129. data/lib/karafka/pro/processing/filters/throttler.rb +84 -0
  130. data/lib/karafka/pro/processing/filters/virtual_limiter.rb +52 -0
  131. data/lib/karafka/pro/processing/filters_applier.rb +105 -0
  132. data/lib/karafka/pro/processing/jobs/consume_non_blocking.rb +39 -0
  133. data/lib/karafka/pro/processing/jobs/revoked_non_blocking.rb +37 -0
  134. data/lib/karafka/pro/processing/jobs_builder.rb +50 -0
  135. data/lib/karafka/pro/processing/partitioner.rb +69 -0
  136. data/lib/karafka/pro/processing/scheduler.rb +75 -0
  137. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_lrj_mom.rb +70 -0
  138. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_lrj_mom_vp.rb +76 -0
  139. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_mom.rb +72 -0
  140. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_mom_vp.rb +76 -0
  141. data/lib/karafka/pro/processing/strategies/aj/dlq_lrj_mom.rb +66 -0
  142. data/lib/karafka/pro/processing/strategies/aj/dlq_lrj_mom_vp.rb +70 -0
  143. data/lib/karafka/pro/processing/strategies/aj/dlq_mom.rb +64 -0
  144. data/lib/karafka/pro/processing/strategies/aj/dlq_mom_vp.rb +69 -0
  145. data/lib/karafka/pro/processing/strategies/aj/ftr_lrj_mom.rb +38 -0
  146. data/lib/karafka/pro/processing/strategies/aj/ftr_lrj_mom_vp.rb +66 -0
  147. data/lib/karafka/pro/processing/strategies/aj/ftr_mom.rb +38 -0
  148. data/lib/karafka/pro/processing/strategies/aj/ftr_mom_vp.rb +58 -0
  149. data/lib/karafka/pro/processing/strategies/aj/lrj_mom.rb +37 -0
  150. data/lib/karafka/pro/processing/strategies/aj/lrj_mom_vp.rb +82 -0
  151. data/lib/karafka/pro/processing/strategies/aj/mom.rb +36 -0
  152. data/lib/karafka/pro/processing/strategies/aj/mom_vp.rb +52 -0
  153. data/lib/karafka/pro/processing/strategies/base.rb +26 -0
  154. data/lib/karafka/pro/processing/strategies/default.rb +105 -0
  155. data/lib/karafka/pro/processing/strategies/dlq/default.rb +137 -0
  156. data/lib/karafka/pro/processing/strategies/dlq/ftr.rb +61 -0
  157. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj.rb +75 -0
  158. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj_mom.rb +71 -0
  159. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj_mom_vp.rb +43 -0
  160. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj_vp.rb +41 -0
  161. data/lib/karafka/pro/processing/strategies/dlq/ftr_mom.rb +69 -0
  162. data/lib/karafka/pro/processing/strategies/dlq/ftr_mom_vp.rb +41 -0
  163. data/lib/karafka/pro/processing/strategies/dlq/ftr_vp.rb +40 -0
  164. data/lib/karafka/pro/processing/strategies/dlq/lrj.rb +64 -0
  165. data/lib/karafka/pro/processing/strategies/dlq/lrj_mom.rb +65 -0
  166. data/lib/karafka/pro/processing/strategies/dlq/lrj_mom_vp.rb +36 -0
  167. data/lib/karafka/pro/processing/strategies/dlq/lrj_vp.rb +39 -0
  168. data/lib/karafka/pro/processing/strategies/dlq/mom.rb +68 -0
  169. data/lib/karafka/pro/processing/strategies/dlq/mom_vp.rb +37 -0
  170. data/lib/karafka/pro/processing/strategies/dlq/vp.rb +40 -0
  171. data/lib/karafka/pro/processing/strategies/ftr/default.rb +111 -0
  172. data/lib/karafka/pro/processing/strategies/ftr/vp.rb +40 -0
  173. data/lib/karafka/pro/processing/strategies/lrj/default.rb +85 -0
  174. data/lib/karafka/pro/processing/strategies/lrj/ftr.rb +69 -0
  175. data/lib/karafka/pro/processing/strategies/lrj/ftr_mom.rb +67 -0
  176. data/lib/karafka/pro/processing/strategies/lrj/ftr_mom_vp.rb +40 -0
  177. data/lib/karafka/pro/processing/strategies/lrj/ftr_vp.rb +39 -0
  178. data/lib/karafka/pro/processing/strategies/lrj/mom.rb +77 -0
  179. data/lib/karafka/pro/processing/strategies/lrj/mom_vp.rb +38 -0
  180. data/lib/karafka/pro/processing/strategies/lrj/vp.rb +36 -0
  181. data/lib/karafka/pro/processing/strategies/mom/default.rb +46 -0
  182. data/lib/karafka/pro/processing/strategies/mom/ftr.rb +53 -0
  183. data/lib/karafka/pro/processing/strategies/mom/ftr_vp.rb +37 -0
  184. data/lib/karafka/pro/processing/strategies/mom/vp.rb +35 -0
  185. data/lib/karafka/pro/processing/strategies/vp/default.rb +124 -0
  186. data/lib/karafka/pro/processing/strategies.rb +22 -0
  187. data/lib/karafka/pro/processing/strategy_selector.rb +84 -0
  188. data/lib/karafka/pro/processing/virtual_offset_manager.rb +147 -0
  189. data/lib/karafka/pro/routing/features/active_job/builder.rb +45 -0
  190. data/lib/karafka/pro/routing/features/active_job.rb +26 -0
  191. data/lib/karafka/pro/routing/features/base.rb +24 -0
  192. data/lib/karafka/pro/routing/features/dead_letter_queue/contracts/topic.rb +53 -0
  193. data/lib/karafka/pro/routing/features/dead_letter_queue.rb +27 -0
  194. data/lib/karafka/pro/routing/features/delaying/config.rb +27 -0
  195. data/lib/karafka/pro/routing/features/delaying/contracts/topic.rb +41 -0
  196. data/lib/karafka/pro/routing/features/delaying/topic.rb +59 -0
  197. data/lib/karafka/pro/routing/features/delaying.rb +29 -0
  198. data/lib/karafka/pro/routing/features/expiring/config.rb +27 -0
  199. data/lib/karafka/pro/routing/features/expiring/contracts/topic.rb +41 -0
  200. data/lib/karafka/pro/routing/features/expiring/topic.rb +59 -0
  201. data/lib/karafka/pro/routing/features/expiring.rb +27 -0
  202. data/lib/karafka/pro/routing/features/filtering/config.rb +40 -0
  203. data/lib/karafka/pro/routing/features/filtering/contracts/topic.rb +44 -0
  204. data/lib/karafka/pro/routing/features/filtering/topic.rb +51 -0
  205. data/lib/karafka/pro/routing/features/filtering.rb +27 -0
  206. data/lib/karafka/pro/routing/features/inline_insights/config.rb +32 -0
  207. data/lib/karafka/pro/routing/features/inline_insights/contracts/topic.rb +41 -0
  208. data/lib/karafka/pro/routing/features/inline_insights/topic.rb +52 -0
  209. data/lib/karafka/pro/routing/features/inline_insights.rb +26 -0
  210. data/lib/karafka/pro/routing/features/long_running_job/config.rb +28 -0
  211. data/lib/karafka/pro/routing/features/long_running_job/contracts/topic.rb +40 -0
  212. data/lib/karafka/pro/routing/features/long_running_job/topic.rb +42 -0
  213. data/lib/karafka/pro/routing/features/long_running_job.rb +28 -0
  214. data/lib/karafka/pro/routing/features/patterns/builder.rb +38 -0
  215. data/lib/karafka/pro/routing/features/patterns/config.rb +54 -0
  216. data/lib/karafka/pro/routing/features/patterns/consumer_group.rb +72 -0
  217. data/lib/karafka/pro/routing/features/patterns/contracts/consumer_group.rb +62 -0
  218. data/lib/karafka/pro/routing/features/patterns/contracts/pattern.rb +46 -0
  219. data/lib/karafka/pro/routing/features/patterns/contracts/topic.rb +41 -0
  220. data/lib/karafka/pro/routing/features/patterns/detector.rb +71 -0
  221. data/lib/karafka/pro/routing/features/patterns/pattern.rb +95 -0
  222. data/lib/karafka/pro/routing/features/patterns/patterns.rb +35 -0
  223. data/lib/karafka/pro/routing/features/patterns/topic.rb +50 -0
  224. data/lib/karafka/pro/routing/features/patterns/topics.rb +53 -0
  225. data/lib/karafka/pro/routing/features/patterns.rb +33 -0
  226. data/lib/karafka/pro/routing/features/pausing/contracts/topic.rb +51 -0
  227. data/lib/karafka/pro/routing/features/pausing/topic.rb +44 -0
  228. data/lib/karafka/pro/routing/features/pausing.rb +25 -0
  229. data/lib/karafka/pro/routing/features/throttling/config.rb +32 -0
  230. data/lib/karafka/pro/routing/features/throttling/contracts/topic.rb +44 -0
  231. data/lib/karafka/pro/routing/features/throttling/topic.rb +69 -0
  232. data/lib/karafka/pro/routing/features/throttling.rb +30 -0
  233. data/lib/karafka/pro/routing/features/virtual_partitions/config.rb +30 -0
  234. data/lib/karafka/pro/routing/features/virtual_partitions/contracts/topic.rb +55 -0
  235. data/lib/karafka/pro/routing/features/virtual_partitions/topic.rb +56 -0
  236. data/lib/karafka/pro/routing/features/virtual_partitions.rb +27 -0
  237. data/lib/karafka/pro.rb +13 -0
  238. data/lib/karafka/process.rb +24 -8
  239. data/lib/karafka/processing/coordinator.rb +181 -0
  240. data/lib/karafka/processing/coordinators_buffer.rb +62 -0
  241. data/lib/karafka/processing/executor.rb +155 -0
  242. data/lib/karafka/processing/executors_buffer.rb +72 -0
  243. data/lib/karafka/processing/expansions_selector.rb +22 -0
  244. data/lib/karafka/processing/inline_insights/consumer.rb +41 -0
  245. data/lib/karafka/processing/inline_insights/listener.rb +19 -0
  246. data/lib/karafka/processing/inline_insights/tracker.rb +128 -0
  247. data/lib/karafka/processing/jobs/base.rb +55 -0
  248. data/lib/karafka/processing/jobs/consume.rb +45 -0
  249. data/lib/karafka/processing/jobs/idle.rb +24 -0
  250. data/lib/karafka/processing/jobs/revoked.rb +22 -0
  251. data/lib/karafka/processing/jobs/shutdown.rb +23 -0
  252. data/lib/karafka/processing/jobs_builder.rb +28 -0
  253. data/lib/karafka/processing/jobs_queue.rb +150 -0
  254. data/lib/karafka/processing/partitioner.rb +24 -0
  255. data/lib/karafka/processing/result.rb +42 -0
  256. data/lib/karafka/processing/scheduler.rb +22 -0
  257. data/lib/karafka/processing/strategies/aj_dlq_mom.rb +44 -0
  258. data/lib/karafka/processing/strategies/aj_mom.rb +21 -0
  259. data/lib/karafka/processing/strategies/base.rb +52 -0
  260. data/lib/karafka/processing/strategies/default.rb +158 -0
  261. data/lib/karafka/processing/strategies/dlq.rb +88 -0
  262. data/lib/karafka/processing/strategies/dlq_mom.rb +49 -0
  263. data/lib/karafka/processing/strategies/mom.rb +29 -0
  264. data/lib/karafka/processing/strategy_selector.rb +47 -0
  265. data/lib/karafka/processing/worker.rb +93 -0
  266. data/lib/karafka/processing/workers_batch.rb +27 -0
  267. data/lib/karafka/railtie.rb +141 -0
  268. data/lib/karafka/routing/activity_manager.rb +84 -0
  269. data/lib/karafka/routing/builder.rb +45 -19
  270. data/lib/karafka/routing/consumer_group.rb +56 -20
  271. data/lib/karafka/routing/consumer_mapper.rb +1 -12
  272. data/lib/karafka/routing/features/active_job/builder.rb +33 -0
  273. data/lib/karafka/routing/features/active_job/config.rb +15 -0
  274. data/lib/karafka/routing/features/active_job/contracts/topic.rb +44 -0
  275. data/lib/karafka/routing/features/active_job/proxy.rb +14 -0
  276. data/lib/karafka/routing/features/active_job/topic.rb +33 -0
  277. data/lib/karafka/routing/features/active_job.rb +13 -0
  278. data/lib/karafka/routing/features/base/expander.rb +59 -0
  279. data/lib/karafka/routing/features/base.rb +71 -0
  280. data/lib/karafka/routing/features/dead_letter_queue/config.rb +19 -0
  281. data/lib/karafka/routing/features/dead_letter_queue/contracts/topic.rb +46 -0
  282. data/lib/karafka/routing/features/dead_letter_queue/topic.rb +41 -0
  283. data/lib/karafka/routing/features/dead_letter_queue.rb +16 -0
  284. data/lib/karafka/routing/features/declaratives/config.rb +18 -0
  285. data/lib/karafka/routing/features/declaratives/contracts/topic.rb +33 -0
  286. data/lib/karafka/routing/features/declaratives/topic.rb +44 -0
  287. data/lib/karafka/routing/features/declaratives.rb +14 -0
  288. data/lib/karafka/routing/features/inline_insights/config.rb +15 -0
  289. data/lib/karafka/routing/features/inline_insights/contracts/topic.rb +27 -0
  290. data/lib/karafka/routing/features/inline_insights/topic.rb +31 -0
  291. data/lib/karafka/routing/features/inline_insights.rb +40 -0
  292. data/lib/karafka/routing/features/manual_offset_management/config.rb +15 -0
  293. data/lib/karafka/routing/features/manual_offset_management/contracts/topic.rb +27 -0
  294. data/lib/karafka/routing/features/manual_offset_management/topic.rb +35 -0
  295. data/lib/karafka/routing/features/manual_offset_management.rb +18 -0
  296. data/lib/karafka/routing/proxy.rb +22 -21
  297. data/lib/karafka/routing/router.rb +24 -10
  298. data/lib/karafka/routing/subscription_group.rb +110 -0
  299. data/lib/karafka/routing/subscription_groups_builder.rb +65 -0
  300. data/lib/karafka/routing/topic.rb +87 -24
  301. data/lib/karafka/routing/topics.rb +46 -0
  302. data/lib/karafka/runner.rb +52 -0
  303. data/lib/karafka/serialization/json/deserializer.rb +7 -15
  304. data/lib/karafka/server.rb +113 -37
  305. data/lib/karafka/setup/attributes_map.rb +348 -0
  306. data/lib/karafka/setup/config.rb +256 -175
  307. data/lib/karafka/status.rb +54 -7
  308. data/lib/karafka/templates/example_consumer.rb.erb +16 -0
  309. data/lib/karafka/templates/karafka.rb.erb +33 -55
  310. data/lib/karafka/time_trackers/base.rb +14 -0
  311. data/lib/karafka/time_trackers/pause.rb +122 -0
  312. data/lib/karafka/time_trackers/poll.rb +69 -0
  313. data/lib/karafka/version.rb +1 -1
  314. data/lib/karafka.rb +91 -17
  315. data/renovate.json +9 -0
  316. data.tar.gz.sig +0 -0
  317. metadata +330 -168
  318. metadata.gz.sig +0 -0
  319. data/MIT-LICENCE +0 -18
  320. data/certs/mensfeld.pem +0 -25
  321. data/config/errors.yml +0 -41
  322. data/lib/karafka/assignment_strategies/round_robin.rb +0 -13
  323. data/lib/karafka/attributes_map.rb +0 -63
  324. data/lib/karafka/backends/inline.rb +0 -16
  325. data/lib/karafka/base_responder.rb +0 -226
  326. data/lib/karafka/cli/flow.rb +0 -48
  327. data/lib/karafka/cli/missingno.rb +0 -19
  328. data/lib/karafka/code_reloader.rb +0 -67
  329. data/lib/karafka/connection/api_adapter.rb +0 -158
  330. data/lib/karafka/connection/batch_delegator.rb +0 -55
  331. data/lib/karafka/connection/builder.rb +0 -23
  332. data/lib/karafka/connection/message_delegator.rb +0 -36
  333. data/lib/karafka/consumers/batch_metadata.rb +0 -10
  334. data/lib/karafka/consumers/callbacks.rb +0 -71
  335. data/lib/karafka/consumers/includer.rb +0 -64
  336. data/lib/karafka/consumers/responders.rb +0 -24
  337. data/lib/karafka/consumers/single_params.rb +0 -15
  338. data/lib/karafka/contracts/consumer_group_topic.rb +0 -19
  339. data/lib/karafka/contracts/responder_usage.rb +0 -54
  340. data/lib/karafka/fetcher.rb +0 -42
  341. data/lib/karafka/helpers/class_matcher.rb +0 -88
  342. data/lib/karafka/helpers/config_retriever.rb +0 -46
  343. data/lib/karafka/helpers/inflector.rb +0 -26
  344. data/lib/karafka/instrumentation/stdout_listener.rb +0 -140
  345. data/lib/karafka/params/batch_metadata.rb +0 -26
  346. data/lib/karafka/params/builders/batch_metadata.rb +0 -30
  347. data/lib/karafka/params/builders/params.rb +0 -38
  348. data/lib/karafka/params/builders/params_batch.rb +0 -25
  349. data/lib/karafka/params/params_batch.rb +0 -60
  350. data/lib/karafka/patches/ruby_kafka.rb +0 -47
  351. data/lib/karafka/persistence/client.rb +0 -29
  352. data/lib/karafka/persistence/consumers.rb +0 -45
  353. data/lib/karafka/persistence/topics.rb +0 -48
  354. data/lib/karafka/responders/builder.rb +0 -36
  355. data/lib/karafka/responders/topic.rb +0 -55
  356. data/lib/karafka/routing/topic_mapper.rb +0 -53
  357. data/lib/karafka/serialization/json/serializer.rb +0 -31
  358. data/lib/karafka/setup/configurators/water_drop.rb +0 -36
  359. data/lib/karafka/templates/application_responder.rb.erb +0 -11
@@ -0,0 +1,52 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Messages
5
+ # Simple batch metadata object that stores all non-message information received from Kafka
6
+ # cluster while fetching the data.
7
+ #
8
+ # @note This metadata object refers to per batch metadata, not `#message.metadata`
9
+ BatchMetadata = Struct.new(
10
+ :size,
11
+ :first_offset,
12
+ :last_offset,
13
+ :deserializer,
14
+ :partition,
15
+ :topic,
16
+ :created_at,
17
+ :scheduled_at,
18
+ :processed_at,
19
+ keyword_init: true
20
+ ) do
21
+ # This lag describes how long did it take for a message to be consumed from the moment it was
22
+ # created
23
+ #
24
+ #
25
+ # @return [Integer] number of milliseconds
26
+ # @note In case of usage in workless flows, this value will be set to -1
27
+ def consumption_lag
28
+ processed_at ? time_distance_in_ms(processed_at, created_at) : -1
29
+ end
30
+
31
+ # This lag describes how long did a batch have to wait before it was picked up by one of the
32
+ # workers
33
+ #
34
+ # @return [Integer] number of milliseconds
35
+ # @note In case of usage in workless flows, this value will be set to -1
36
+ def processing_lag
37
+ processed_at ? time_distance_in_ms(processed_at, scheduled_at) : -1
38
+ end
39
+
40
+ private
41
+
42
+ # Computes time distance in between two times in ms
43
+ #
44
+ # @param time1 [Time]
45
+ # @param time2 [Time]
46
+ # @return [Integer] distance in between two times in ms
47
+ def time_distance_in_ms(time1, time2)
48
+ ((time1 - time2) * 1_000).round
49
+ end
50
+ end
51
+ end
52
+ end
@@ -0,0 +1,60 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Messages
5
+ module Builders
6
+ # Builder for creating batch metadata object based on the batch informations.
7
+ module BatchMetadata
8
+ class << self
9
+ # Creates metadata based on the kafka batch data.
10
+ #
11
+ # @param messages [Array<Karafka::Messages::Message>] messages array
12
+ # @param topic [Karafka::Routing::Topic] topic for which we've fetched the batch
13
+ # @param partition [Integer] partition of this metadata
14
+ # @param scheduled_at [Time] moment when the batch was scheduled for processing
15
+ # @return [Karafka::Messages::BatchMetadata] batch metadata object
16
+ #
17
+ # @note We do not set `processed_at` as this needs to be assigned when the batch is
18
+ # picked up for processing.
19
+ def call(messages, topic, partition, scheduled_at)
20
+ Karafka::Messages::BatchMetadata.new(
21
+ size: messages.count,
22
+ first_offset: messages.first&.offset || -1001,
23
+ last_offset: messages.last&.offset || -1001,
24
+ deserializer: topic.deserializer,
25
+ partition: partition,
26
+ topic: topic.name,
27
+ # We go with the assumption that the creation of the whole batch is the last message
28
+ # creation time
29
+ created_at: local_created_at(messages.last),
30
+ # When this batch was built and scheduled for execution
31
+ scheduled_at: scheduled_at,
32
+ # This needs to be set to a correct value prior to processing starting
33
+ processed_at: nil
34
+ )
35
+ end
36
+
37
+ private
38
+
39
+ # Code that aligns the batch creation at into our local time. If time of current machine
40
+ # and the Kafka cluster drift, this helps not to allow this to leak into the framework.
41
+ #
42
+ # @param last_message [::Karafka::Messages::Message, nil] last message from the batch or
43
+ # nil if no message
44
+ # @return [Time] batch creation time. Now if no messages (workless flow) or the last
45
+ # message time as long as the message is not from the future
46
+ # @note Message can be from the future in case consumer machine and Kafka cluster drift
47
+ # apart and the machine is behind the cluster.
48
+ def local_created_at(last_message)
49
+ now = ::Time.now
50
+
51
+ return now unless last_message
52
+
53
+ timestamp = last_message.timestamp
54
+ timestamp > now ? now : timestamp
55
+ end
56
+ end
57
+ end
58
+ end
59
+ end
60
+ end
@@ -0,0 +1,40 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Messages
5
+ # Builders encapsulate logic related to creating messages related objects.
6
+ module Builders
7
+ # Builder of a single message based on raw rdkafka message.
8
+ module Message
9
+ class << self
10
+ # @param kafka_message [Rdkafka::Consumer::Message] raw fetched message
11
+ # @param topic [Karafka::Routing::Topic] topic for which this message was fetched
12
+ # @param received_at [Time] moment when we've received the message
13
+ # @return [Karafka::Messages::Message] message object with payload and metadata
14
+ def call(kafka_message, topic, received_at)
15
+ metadata = Karafka::Messages::Metadata.new(
16
+ timestamp: kafka_message.timestamp,
17
+ headers: kafka_message.headers,
18
+ key: kafka_message.key,
19
+ offset: kafka_message.offset,
20
+ deserializer: topic.deserializer,
21
+ partition: kafka_message.partition,
22
+ topic: topic.name,
23
+ received_at: received_at
24
+ ).freeze
25
+
26
+ # Get the raw payload
27
+ payload = kafka_message.payload
28
+
29
+ # And nullify it in the kafka message. This can save a lot of memory when used with
30
+ # the Pro Cleaner API
31
+ kafka_message.instance_variable_set('@payload', nil)
32
+
33
+ # Karafka messages cannot be frozen because of the lazy deserialization feature
34
+ Karafka::Messages::Message.new(payload, metadata)
35
+ end
36
+ end
37
+ end
38
+ end
39
+ end
40
+ end
@@ -0,0 +1,36 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Messages
5
+ module Builders
6
+ # Builder for creating message batch instances.
7
+ module Messages
8
+ class << self
9
+ # Creates messages batch with messages inside based on the incoming messages and the
10
+ # topic from which it comes.
11
+ #
12
+ # @param messages [Array<Karafka::Messages::Message>] karafka messages array
13
+ # @param topic [Karafka::Routing::Topic] topic for which we're received messages
14
+ # @param partition [Integer] partition of those messages
15
+ # @param received_at [Time] moment in time when the messages were received
16
+ # @return [Karafka::Messages::Messages] messages batch object
17
+ def call(messages, topic, partition, received_at)
18
+ # We cannot freeze the batch metadata because it is altered with the processed_at time
19
+ # prior to the consumption. It is being frozen there
20
+ metadata = BatchMetadata.call(
21
+ messages,
22
+ topic,
23
+ partition,
24
+ received_at
25
+ )
26
+
27
+ Karafka::Messages::Messages.new(
28
+ messages,
29
+ metadata
30
+ ).freeze
31
+ end
32
+ end
33
+ end
34
+ end
35
+ end
36
+ end
@@ -1,20 +1,32 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Karafka
4
- # Params namespace encapsulating all the logic that is directly related to params handling
5
- module Params
4
+ # Messages namespace encapsulating all the logic that is directly related to messages handling
5
+ module Messages
6
6
  # It provides lazy loading not only until the first usage, but also allows us to skip
7
7
  # using deserializer until we execute our logic. That way we can operate with
8
8
  # heavy-deserialization data without slowing down the whole application.
9
- class Params
9
+ class Message
10
10
  extend Forwardable
11
11
 
12
- attr_reader :raw_payload, :metadata
12
+ class << self
13
+ # @return [Object] general parser
14
+ # @note We cache it here for performance reasons. It is 2.5x times faster than getting it
15
+ # via the config chain.
16
+ def parser
17
+ @parser ||= App.config.internal.messages.parser
18
+ end
19
+ end
20
+
21
+ attr_reader :metadata
22
+ # raw payload needs to be mutable as we want to have option to change it in the parser
23
+ # prior to the final deserialization
24
+ attr_accessor :raw_payload
13
25
 
14
26
  def_delegators :metadata, *Metadata.members
15
27
 
16
28
  # @param raw_payload [Object] incoming payload before deserialization
17
- # @param metadata [Karafka::Params::Metadata] message metadata object
29
+ # @param metadata [Karafka::Messages::Metadata] message metadata object
18
30
  def initialize(raw_payload, metadata)
19
31
  @raw_payload = raw_payload
20
32
  @metadata = metadata
@@ -33,21 +45,16 @@ module Karafka
33
45
  @payload
34
46
  end
35
47
 
36
- # @return [Boolean] did given params payload were deserialized already
48
+ # @return [Boolean] did we deserialize payload already
37
49
  def deserialized?
38
50
  @deserialized
39
51
  end
40
52
 
41
53
  private
42
54
 
43
- # @return [Object] tries de-serializes data
55
+ # @return [Object] deserialized data
44
56
  def deserialize
45
- Karafka.monitor.instrument('params.params.deserialize', caller: self) do
46
- metadata.deserializer.call(self)
47
- end
48
- rescue ::StandardError => e
49
- Karafka.monitor.instrument('params.params.deserialize.error', caller: self, error: e)
50
- raise e
57
+ self.class.parser.call(self)
51
58
  end
52
59
  end
53
60
  end
@@ -0,0 +1,71 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Messages
5
+ # Messages batch represents a set of messages received from Kafka of a single topic partition.
6
+ class Messages
7
+ include Enumerable
8
+
9
+ attr_reader :metadata
10
+
11
+ # @param messages_array [Array<Karafka::Messages::Message>] array with karafka messages
12
+ # @param metadata [Karafka::Messages::BatchMetadata]
13
+ # @return [Karafka::Messages::Messages] lazy evaluated messages batch object
14
+ def initialize(messages_array, metadata)
15
+ @messages_array = messages_array
16
+ @metadata = metadata
17
+ end
18
+
19
+ # @param block [Proc] block we want to execute per each message
20
+ # @note Invocation of this method will not cause loading and deserializing of messages.
21
+ def each(&block)
22
+ @messages_array.each(&block)
23
+ end
24
+
25
+ # Runs deserialization of all the messages and returns them
26
+ # @return [Array<Karafka::Messages::Message>]
27
+ def deserialize!
28
+ each(&:payload)
29
+ end
30
+
31
+ # @return [Array<Object>] array with deserialized payloads. This method can be useful when
32
+ # we don't care about metadata and just want to extract all the data payloads from the
33
+ # batch
34
+ def payloads
35
+ map(&:payload)
36
+ end
37
+
38
+ # @return [Array<String>] array with raw, not deserialized payloads
39
+ def raw_payloads
40
+ map(&:raw_payload)
41
+ end
42
+
43
+ # @return [Boolean] is the messages batch empty
44
+ def empty?
45
+ @messages_array.empty?
46
+ end
47
+
48
+ # @return [Karafka::Messages::Message] first message
49
+ def first
50
+ @messages_array.first
51
+ end
52
+
53
+ # @return [Karafka::Messages::Message] last message
54
+ def last
55
+ @messages_array.last
56
+ end
57
+
58
+ # @return [Integer] number of messages in the batch
59
+ def size
60
+ @messages_array.size
61
+ end
62
+
63
+ # @return [Array<Karafka::Messages::Message>] copy of the pure array with messages
64
+ def to_a
65
+ @messages_array.dup
66
+ end
67
+
68
+ alias count size
69
+ end
70
+ end
71
+ end
@@ -1,18 +1,16 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Karafka
4
- module Params
5
- # Single message / params metadata details that can be accessed without the need for the
6
- # payload deserialization
4
+ module Messages
5
+ # Single message metadata details that can be accessed without the need of deserialization.
7
6
  Metadata = Struct.new(
8
- :create_time,
7
+ :timestamp,
9
8
  :headers,
10
- :is_control_record,
11
9
  :key,
12
10
  :offset,
13
11
  :deserializer,
14
12
  :partition,
15
- :receive_time,
13
+ :received_at,
16
14
  :topic,
17
15
  keyword_init: true
18
16
  )
@@ -0,0 +1,14 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Messages
5
+ # Default message parser. The only thing it does, is calling the deserializer
6
+ class Parser
7
+ # @param message [::Karafka::Messages::Message]
8
+ # @return [Object] deserialized payload
9
+ def call(message)
10
+ message.metadata.deserializer.call(message)
11
+ end
12
+ end
13
+ end
14
+ end
@@ -0,0 +1,12 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Messages
5
+ # "Fake" message that we use as an abstraction layer when seeking back.
6
+ # This allows us to encapsulate a seek with a simple abstraction
7
+ #
8
+ # @note `#offset` can be either the offset value or the time of the offset
9
+ # (first equal or greater)
10
+ Seek = Struct.new(:topic, :partition, :offset)
11
+ end
12
+ end
@@ -0,0 +1,122 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Namespace for third-party libraries patches
5
+ module Patches
6
+ # Rdkafka patches specific to Karafka
7
+ module Rdkafka
8
+ # Binding patches that slightly change how rdkafka operates in certain places
9
+ module Bindings
10
+ include ::Rdkafka::Bindings
11
+
12
+ # Alias internally
13
+ RB = ::Rdkafka::Bindings
14
+
15
+ class << self
16
+ # Handle assignments on cooperative rebalance
17
+ #
18
+ # @param client_ptr [FFI::Pointer]
19
+ # @param code [Integer]
20
+ # @param partitions_ptr [FFI::Pointer]
21
+ # @param tpl [Rdkafka::Consumer::TopicPartitionList]
22
+ # @param opaque [Rdkafka::Opaque]
23
+ def on_cooperative_rebalance(client_ptr, code, partitions_ptr, tpl, opaque)
24
+ case code
25
+ when RB::RD_KAFKA_RESP_ERR__ASSIGN_PARTITIONS
26
+ opaque&.call_on_partitions_assign(tpl)
27
+ RB.rd_kafka_incremental_assign(client_ptr, partitions_ptr)
28
+ opaque&.call_on_partitions_assigned(tpl)
29
+ when RB::RD_KAFKA_RESP_ERR__REVOKE_PARTITIONS
30
+ opaque&.call_on_partitions_revoke(tpl)
31
+ RB.rd_kafka_commit(client_ptr, nil, false)
32
+ RB.rd_kafka_incremental_unassign(client_ptr, partitions_ptr)
33
+ opaque&.call_on_partitions_revoked(tpl)
34
+ else
35
+ opaque&.call_on_partitions_assign(tpl)
36
+ RB.rd_kafka_assign(client_ptr, FFI::Pointer::NULL)
37
+ opaque&.call_on_partitions_assigned(tpl)
38
+ end
39
+ end
40
+
41
+ # Handle assignments on a eager rebalance
42
+ #
43
+ # @param client_ptr [FFI::Pointer]
44
+ # @param code [Integer]
45
+ # @param partitions_ptr [FFI::Pointer]
46
+ # @param tpl [Rdkafka::Consumer::TopicPartitionList]
47
+ # @param opaque [Rdkafka::Opaque]
48
+ def on_eager_rebalance(client_ptr, code, partitions_ptr, tpl, opaque)
49
+ case code
50
+ when RB::RD_KAFKA_RESP_ERR__ASSIGN_PARTITIONS
51
+ opaque&.call_on_partitions_assign(tpl)
52
+ RB.rd_kafka_assign(client_ptr, partitions_ptr)
53
+ opaque&.call_on_partitions_assigned(tpl)
54
+ when RB::RD_KAFKA_RESP_ERR__REVOKE_PARTITIONS
55
+ opaque&.call_on_partitions_revoke(tpl)
56
+ RB.rd_kafka_commit(client_ptr, nil, false)
57
+ RB.rd_kafka_assign(client_ptr, FFI::Pointer::NULL)
58
+ opaque&.call_on_partitions_revoked(tpl)
59
+ else
60
+ opaque&.call_on_partitions_assign(tpl)
61
+ RB.rd_kafka_assign(client_ptr, FFI::Pointer::NULL)
62
+ opaque&.call_on_partitions_assigned(tpl)
63
+ end
64
+ end
65
+ end
66
+
67
+ # This patch changes few things:
68
+ # - it commits offsets (if any) upon partition revocation, so less jobs need to be
69
+ # reprocessed if they are assigned to a different process
70
+ # - reports callback errors into the errors instrumentation instead of the logger
71
+ # - catches only StandardError instead of Exception as we fully control the directly
72
+ # executed callbacks
73
+ #
74
+ # @see https://docs.confluent.io/2.0.0/clients/librdkafka/classRdKafka_1_1RebalanceCb.html
75
+ RebalanceCallback = FFI::Function.new(
76
+ :void, %i[pointer int pointer pointer]
77
+ ) do |client_ptr, code, partitions_ptr, opaque_ptr|
78
+ # Patch reference
79
+ pr = ::Karafka::Patches::Rdkafka::Bindings
80
+ tpl = ::Rdkafka::Consumer::TopicPartitionList.from_native_tpl(partitions_ptr).freeze
81
+ opaque = ::Rdkafka::Config.opaques[opaque_ptr.to_i]
82
+
83
+ if RB.rd_kafka_rebalance_protocol(client_ptr) == 'COOPERATIVE'
84
+ pr.on_cooperative_rebalance(client_ptr, code, partitions_ptr, tpl, opaque)
85
+ else
86
+ pr.on_eager_rebalance(client_ptr, code, partitions_ptr, tpl, opaque)
87
+ end
88
+ end
89
+ end
90
+ end
91
+ end
92
+ end
93
+
94
+ # We need to replace the original callback with ours.
95
+ # At the moment there is no API in rdkafka-ruby to do so
96
+ ::Rdkafka::Bindings.send(
97
+ :remove_const,
98
+ 'RebalanceCallback'
99
+ )
100
+
101
+ ::Rdkafka::Bindings.const_set(
102
+ 'RebalanceCallback',
103
+ Karafka::Patches::Rdkafka::Bindings::RebalanceCallback
104
+ )
105
+
106
+ ::Rdkafka::Bindings.attach_function(
107
+ :rd_kafka_rebalance_protocol,
108
+ %i[pointer],
109
+ :string
110
+ )
111
+
112
+ ::Rdkafka::Bindings.attach_function(
113
+ :rd_kafka_incremental_assign,
114
+ %i[pointer pointer],
115
+ :string
116
+ )
117
+
118
+ ::Rdkafka::Bindings.attach_function(
119
+ :rd_kafka_incremental_unassign,
120
+ %i[pointer pointer],
121
+ :string
122
+ )
@@ -0,0 +1,36 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Patches
5
+ module Rdkafka
6
+ # Patches allowing us to run events on both pre and post rebalance events.
7
+ # Thanks to that, we can easily connect to the whole flow despite of the moment when things
8
+ # are happening
9
+ module Opaque
10
+ # Handles pre-assign phase of rebalance
11
+ #
12
+ # @param tpl [Rdkafka::Consumer::TopicPartitionList]
13
+ def call_on_partitions_assign(tpl)
14
+ return unless consumer_rebalance_listener
15
+ return unless consumer_rebalance_listener.respond_to?(:on_partitions_assign)
16
+
17
+ consumer_rebalance_listener.on_partitions_assign(tpl)
18
+ end
19
+
20
+ # Handles pre-revoke phase of rebalance
21
+ #
22
+ # @param tpl [Rdkafka::Consumer::TopicPartitionList]
23
+ def call_on_partitions_revoke(tpl)
24
+ return unless consumer_rebalance_listener
25
+ return unless consumer_rebalance_listener.respond_to?(:on_partitions_revoke)
26
+
27
+ consumer_rebalance_listener.on_partitions_revoke(tpl)
28
+ end
29
+ end
30
+ end
31
+ end
32
+ end
33
+
34
+ ::Rdkafka::Opaque.include(
35
+ Karafka::Patches::Rdkafka::Opaque
36
+ )
@@ -0,0 +1,47 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ module ActiveJob
17
+ # Pro ActiveJob consumer that is suppose to handle long-running jobs as well as short
18
+ # running jobs
19
+ #
20
+ # When in LRJ, it will pause a given partition forever and will resume its processing only
21
+ # when all the jobs are done processing.
22
+ #
23
+ # It contains slightly better revocation warranties than the regular blocking consumer as
24
+ # it can stop processing batch of jobs in the middle after the revocation.
25
+ class Consumer < ::Karafka::ActiveJob::Consumer
26
+ # Runs ActiveJob jobs processing and handles lrj if needed
27
+ def consume
28
+ messages.each(clean: true) do |message|
29
+ # If for any reason we've lost this partition, not worth iterating over new messages
30
+ # as they are no longer ours
31
+ break if revoked?
32
+
33
+ # We cannot early stop when running virtual partitions because the intermediate state
34
+ # would force us not to commit the offsets. This would cause extensive
35
+ # double-processing
36
+ break if Karafka::App.stopping? && !topic.virtual_partitions?
37
+
38
+ consume_job(message)
39
+
40
+ # We can always mark because of the virtual offset management that we have in VPs
41
+ mark_as_consumed(message)
42
+ end
43
+ end
44
+ end
45
+ end
46
+ end
47
+ end
@@ -0,0 +1,86 @@
1
+ # frozen_string_literal: true
2
+
3
+ # This Karafka component is a Pro component under a commercial license.
4
+ # This Karafka component is NOT licensed under LGPL.
5
+ #
6
+ # All of the commercial components are present in the lib/karafka/pro directory of this
7
+ # repository and their usage requires commercial license agreement.
8
+ #
9
+ # Karafka has also commercial-friendly license, commercial support and commercial components.
10
+ #
11
+ # By sending a pull request to the pro components, you are agreeing to transfer the copyright of
12
+ # your code to Maciej Mensfeld.
13
+
14
+ module Karafka
15
+ module Pro
16
+ # Karafka Pro ActiveJob components
17
+ module ActiveJob
18
+ # Pro dispatcher that sends the ActiveJob job to a proper topic based on the queue name
19
+ # and that allows to inject additional options into the producer, effectively allowing for a
20
+ # much better and more granular control over the dispatch and consumption process.
21
+ class Dispatcher < ::Karafka::ActiveJob::Dispatcher
22
+ # Defaults for dispatching
23
+ # They can be updated by using `#karafka_options` on the job
24
+ DEFAULTS = {
25
+ dispatch_method: :produce_async,
26
+ dispatch_many_method: :produce_many_async,
27
+ # We don't create a dummy proc based partitioner as we would have to evaluate it with
28
+ # each job.
29
+ partitioner: nil,
30
+ # Allows for usage of `:key` or `:partition_key`
31
+ partition_key_type: :key
32
+ }.freeze
33
+
34
+ private_constant :DEFAULTS
35
+
36
+ # @param job [ActiveJob::Base] job
37
+ def dispatch(job)
38
+ ::Karafka.producer.public_send(
39
+ fetch_option(job, :dispatch_method, DEFAULTS),
40
+ dispatch_details(job).merge!(
41
+ topic: job.queue_name,
42
+ payload: ::ActiveSupport::JSON.encode(serialize_job(job))
43
+ )
44
+ )
45
+ end
46
+
47
+ # Bulk dispatches multiple jobs using the Rails 7.1+ API
48
+ # @param jobs [Array<ActiveJob::Base>] jobs we want to dispatch
49
+ def dispatch_many(jobs)
50
+ dispatches = Hash.new { |hash, key| hash[key] = [] }
51
+
52
+ jobs.each do |job|
53
+ d_method = fetch_option(job, :dispatch_many_method, DEFAULTS)
54
+
55
+ dispatches[d_method] << dispatch_details(job).merge!(
56
+ topic: job.queue_name,
57
+ payload: ::ActiveSupport::JSON.encode(serialize_job(job))
58
+ )
59
+ end
60
+
61
+ dispatches.each do |type, messages|
62
+ ::Karafka.producer.public_send(
63
+ type,
64
+ messages
65
+ )
66
+ end
67
+ end
68
+
69
+ private
70
+
71
+ # @param job [ActiveJob::Base] job instance
72
+ # @return [Hash] hash with dispatch details to which we merge topic and payload
73
+ def dispatch_details(job)
74
+ partitioner = fetch_option(job, :partitioner, DEFAULTS)
75
+ key_type = fetch_option(job, :partition_key_type, DEFAULTS)
76
+
77
+ return {} unless partitioner
78
+
79
+ {
80
+ key_type => partitioner.call(job)
81
+ }
82
+ end
83
+ end
84
+ end
85
+ end
86
+ end