karafka 1.4.12 → 2.2.10

Sign up to get free protection for your applications and to get access to all the features.
Files changed (359) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/FUNDING.yml +1 -0
  4. data/.github/ISSUE_TEMPLATE/bug_report.md +10 -9
  5. data/.github/workflows/ci.yml +169 -31
  6. data/.rspec +4 -0
  7. data/.ruby-version +1 -1
  8. data/CHANGELOG.md +716 -607
  9. data/CONTRIBUTING.md +10 -19
  10. data/Gemfile +7 -0
  11. data/Gemfile.lock +69 -92
  12. data/LICENSE +17 -0
  13. data/LICENSE-COMM +89 -0
  14. data/LICENSE-LGPL +165 -0
  15. data/README.md +48 -47
  16. data/bin/benchmarks +99 -0
  17. data/bin/create_token +22 -0
  18. data/bin/integrations +310 -0
  19. data/bin/karafka +5 -14
  20. data/bin/record_rss +50 -0
  21. data/bin/rspecs +6 -0
  22. data/bin/scenario +29 -0
  23. data/bin/stress_many +13 -0
  24. data/bin/stress_one +13 -0
  25. data/bin/verify_license_integrity +37 -0
  26. data/bin/wait_for_kafka +24 -0
  27. data/certs/cert_chain.pem +26 -0
  28. data/certs/karafka-pro.pem +11 -0
  29. data/config/locales/errors.yml +97 -0
  30. data/config/locales/pro_errors.yml +59 -0
  31. data/docker-compose.yml +19 -11
  32. data/karafka.gemspec +26 -22
  33. data/lib/active_job/karafka.rb +17 -0
  34. data/lib/active_job/queue_adapters/karafka_adapter.rb +32 -0
  35. data/lib/karafka/active_job/consumer.rb +49 -0
  36. data/lib/karafka/active_job/current_attributes/loading.rb +36 -0
  37. data/lib/karafka/active_job/current_attributes/persistence.rb +28 -0
  38. data/lib/karafka/active_job/current_attributes.rb +42 -0
  39. data/lib/karafka/active_job/dispatcher.rb +69 -0
  40. data/lib/karafka/active_job/job_extensions.rb +34 -0
  41. data/lib/karafka/active_job/job_options_contract.rb +32 -0
  42. data/lib/karafka/admin.rb +313 -0
  43. data/lib/karafka/app.rb +47 -23
  44. data/lib/karafka/base_consumer.rb +260 -29
  45. data/lib/karafka/cli/base.rb +67 -36
  46. data/lib/karafka/cli/console.rb +18 -12
  47. data/lib/karafka/cli/help.rb +24 -0
  48. data/lib/karafka/cli/info.rb +47 -12
  49. data/lib/karafka/cli/install.rb +23 -14
  50. data/lib/karafka/cli/server.rb +101 -44
  51. data/lib/karafka/cli/topics.rb +146 -0
  52. data/lib/karafka/cli.rb +24 -27
  53. data/lib/karafka/connection/client.rb +553 -90
  54. data/lib/karafka/connection/consumer_group_coordinator.rb +48 -0
  55. data/lib/karafka/connection/listener.rb +294 -38
  56. data/lib/karafka/connection/listeners_batch.rb +40 -0
  57. data/lib/karafka/connection/messages_buffer.rb +84 -0
  58. data/lib/karafka/connection/pauses_manager.rb +46 -0
  59. data/lib/karafka/connection/proxy.rb +98 -0
  60. data/lib/karafka/connection/raw_messages_buffer.rb +101 -0
  61. data/lib/karafka/connection/rebalance_manager.rb +105 -0
  62. data/lib/karafka/contracts/base.rb +17 -0
  63. data/lib/karafka/contracts/config.rb +130 -11
  64. data/lib/karafka/contracts/consumer_group.rb +32 -187
  65. data/lib/karafka/contracts/server_cli_options.rb +80 -19
  66. data/lib/karafka/contracts/topic.rb +65 -0
  67. data/lib/karafka/contracts.rb +1 -1
  68. data/lib/karafka/embedded.rb +36 -0
  69. data/lib/karafka/env.rb +46 -0
  70. data/lib/karafka/errors.rb +37 -21
  71. data/lib/karafka/helpers/async.rb +33 -0
  72. data/lib/karafka/helpers/colorize.rb +26 -0
  73. data/lib/karafka/helpers/multi_delegator.rb +2 -2
  74. data/lib/karafka/instrumentation/callbacks/error.rb +39 -0
  75. data/lib/karafka/instrumentation/callbacks/rebalance.rb +64 -0
  76. data/lib/karafka/instrumentation/callbacks/statistics.rb +51 -0
  77. data/lib/karafka/instrumentation/logger_listener.rb +303 -0
  78. data/lib/karafka/instrumentation/monitor.rb +13 -61
  79. data/lib/karafka/instrumentation/notifications.rb +79 -0
  80. data/lib/karafka/instrumentation/proctitle_listener.rb +7 -16
  81. data/lib/karafka/instrumentation/vendors/appsignal/base.rb +30 -0
  82. data/lib/karafka/instrumentation/vendors/appsignal/client.rb +122 -0
  83. data/lib/karafka/instrumentation/vendors/appsignal/dashboard.json +222 -0
  84. data/lib/karafka/instrumentation/vendors/appsignal/errors_listener.rb +30 -0
  85. data/lib/karafka/instrumentation/vendors/appsignal/metrics_listener.rb +331 -0
  86. data/lib/karafka/instrumentation/vendors/datadog/dashboard.json +1 -0
  87. data/lib/karafka/instrumentation/vendors/datadog/logger_listener.rb +155 -0
  88. data/lib/karafka/instrumentation/vendors/datadog/metrics_listener.rb +264 -0
  89. data/lib/karafka/instrumentation/vendors/kubernetes/liveness_listener.rb +176 -0
  90. data/lib/karafka/licenser.rb +78 -0
  91. data/lib/karafka/messages/batch_metadata.rb +52 -0
  92. data/lib/karafka/messages/builders/batch_metadata.rb +60 -0
  93. data/lib/karafka/messages/builders/message.rb +40 -0
  94. data/lib/karafka/messages/builders/messages.rb +36 -0
  95. data/lib/karafka/{params/params.rb → messages/message.rb} +20 -13
  96. data/lib/karafka/messages/messages.rb +71 -0
  97. data/lib/karafka/{params → messages}/metadata.rb +4 -6
  98. data/lib/karafka/messages/parser.rb +14 -0
  99. data/lib/karafka/messages/seek.rb +12 -0
  100. data/lib/karafka/patches/rdkafka/bindings.rb +122 -0
  101. data/lib/karafka/patches/rdkafka/opaque.rb +36 -0
  102. data/lib/karafka/pro/active_job/consumer.rb +47 -0
  103. data/lib/karafka/pro/active_job/dispatcher.rb +86 -0
  104. data/lib/karafka/pro/active_job/job_options_contract.rb +45 -0
  105. data/lib/karafka/pro/cleaner/errors.rb +27 -0
  106. data/lib/karafka/pro/cleaner/messages/message.rb +46 -0
  107. data/lib/karafka/pro/cleaner/messages/messages.rb +42 -0
  108. data/lib/karafka/pro/cleaner.rb +41 -0
  109. data/lib/karafka/pro/contracts/base.rb +23 -0
  110. data/lib/karafka/pro/contracts/server_cli_options.rb +111 -0
  111. data/lib/karafka/pro/encryption/cipher.rb +58 -0
  112. data/lib/karafka/pro/encryption/contracts/config.rb +79 -0
  113. data/lib/karafka/pro/encryption/errors.rb +27 -0
  114. data/lib/karafka/pro/encryption/messages/middleware.rb +46 -0
  115. data/lib/karafka/pro/encryption/messages/parser.rb +56 -0
  116. data/lib/karafka/pro/encryption/setup/config.rb +48 -0
  117. data/lib/karafka/pro/encryption.rb +47 -0
  118. data/lib/karafka/pro/iterator/expander.rb +95 -0
  119. data/lib/karafka/pro/iterator/tpl_builder.rb +155 -0
  120. data/lib/karafka/pro/iterator.rb +170 -0
  121. data/lib/karafka/pro/loader.rb +106 -0
  122. data/lib/karafka/pro/performance_tracker.rb +84 -0
  123. data/lib/karafka/pro/processing/collapser.rb +62 -0
  124. data/lib/karafka/pro/processing/coordinator.rb +147 -0
  125. data/lib/karafka/pro/processing/filters/base.rb +61 -0
  126. data/lib/karafka/pro/processing/filters/delayer.rb +70 -0
  127. data/lib/karafka/pro/processing/filters/expirer.rb +51 -0
  128. data/lib/karafka/pro/processing/filters/inline_insights_delayer.rb +78 -0
  129. data/lib/karafka/pro/processing/filters/throttler.rb +84 -0
  130. data/lib/karafka/pro/processing/filters/virtual_limiter.rb +52 -0
  131. data/lib/karafka/pro/processing/filters_applier.rb +105 -0
  132. data/lib/karafka/pro/processing/jobs/consume_non_blocking.rb +39 -0
  133. data/lib/karafka/pro/processing/jobs/revoked_non_blocking.rb +37 -0
  134. data/lib/karafka/pro/processing/jobs_builder.rb +50 -0
  135. data/lib/karafka/pro/processing/partitioner.rb +69 -0
  136. data/lib/karafka/pro/processing/scheduler.rb +75 -0
  137. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_lrj_mom.rb +70 -0
  138. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_lrj_mom_vp.rb +76 -0
  139. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_mom.rb +72 -0
  140. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_mom_vp.rb +76 -0
  141. data/lib/karafka/pro/processing/strategies/aj/dlq_lrj_mom.rb +66 -0
  142. data/lib/karafka/pro/processing/strategies/aj/dlq_lrj_mom_vp.rb +70 -0
  143. data/lib/karafka/pro/processing/strategies/aj/dlq_mom.rb +64 -0
  144. data/lib/karafka/pro/processing/strategies/aj/dlq_mom_vp.rb +69 -0
  145. data/lib/karafka/pro/processing/strategies/aj/ftr_lrj_mom.rb +38 -0
  146. data/lib/karafka/pro/processing/strategies/aj/ftr_lrj_mom_vp.rb +66 -0
  147. data/lib/karafka/pro/processing/strategies/aj/ftr_mom.rb +38 -0
  148. data/lib/karafka/pro/processing/strategies/aj/ftr_mom_vp.rb +58 -0
  149. data/lib/karafka/pro/processing/strategies/aj/lrj_mom.rb +37 -0
  150. data/lib/karafka/pro/processing/strategies/aj/lrj_mom_vp.rb +82 -0
  151. data/lib/karafka/pro/processing/strategies/aj/mom.rb +36 -0
  152. data/lib/karafka/pro/processing/strategies/aj/mom_vp.rb +52 -0
  153. data/lib/karafka/pro/processing/strategies/base.rb +26 -0
  154. data/lib/karafka/pro/processing/strategies/default.rb +105 -0
  155. data/lib/karafka/pro/processing/strategies/dlq/default.rb +137 -0
  156. data/lib/karafka/pro/processing/strategies/dlq/ftr.rb +61 -0
  157. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj.rb +75 -0
  158. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj_mom.rb +71 -0
  159. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj_mom_vp.rb +43 -0
  160. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj_vp.rb +41 -0
  161. data/lib/karafka/pro/processing/strategies/dlq/ftr_mom.rb +69 -0
  162. data/lib/karafka/pro/processing/strategies/dlq/ftr_mom_vp.rb +41 -0
  163. data/lib/karafka/pro/processing/strategies/dlq/ftr_vp.rb +40 -0
  164. data/lib/karafka/pro/processing/strategies/dlq/lrj.rb +64 -0
  165. data/lib/karafka/pro/processing/strategies/dlq/lrj_mom.rb +65 -0
  166. data/lib/karafka/pro/processing/strategies/dlq/lrj_mom_vp.rb +36 -0
  167. data/lib/karafka/pro/processing/strategies/dlq/lrj_vp.rb +39 -0
  168. data/lib/karafka/pro/processing/strategies/dlq/mom.rb +68 -0
  169. data/lib/karafka/pro/processing/strategies/dlq/mom_vp.rb +37 -0
  170. data/lib/karafka/pro/processing/strategies/dlq/vp.rb +40 -0
  171. data/lib/karafka/pro/processing/strategies/ftr/default.rb +111 -0
  172. data/lib/karafka/pro/processing/strategies/ftr/vp.rb +40 -0
  173. data/lib/karafka/pro/processing/strategies/lrj/default.rb +85 -0
  174. data/lib/karafka/pro/processing/strategies/lrj/ftr.rb +69 -0
  175. data/lib/karafka/pro/processing/strategies/lrj/ftr_mom.rb +67 -0
  176. data/lib/karafka/pro/processing/strategies/lrj/ftr_mom_vp.rb +40 -0
  177. data/lib/karafka/pro/processing/strategies/lrj/ftr_vp.rb +39 -0
  178. data/lib/karafka/pro/processing/strategies/lrj/mom.rb +77 -0
  179. data/lib/karafka/pro/processing/strategies/lrj/mom_vp.rb +38 -0
  180. data/lib/karafka/pro/processing/strategies/lrj/vp.rb +36 -0
  181. data/lib/karafka/pro/processing/strategies/mom/default.rb +46 -0
  182. data/lib/karafka/pro/processing/strategies/mom/ftr.rb +53 -0
  183. data/lib/karafka/pro/processing/strategies/mom/ftr_vp.rb +37 -0
  184. data/lib/karafka/pro/processing/strategies/mom/vp.rb +35 -0
  185. data/lib/karafka/pro/processing/strategies/vp/default.rb +124 -0
  186. data/lib/karafka/pro/processing/strategies.rb +22 -0
  187. data/lib/karafka/pro/processing/strategy_selector.rb +84 -0
  188. data/lib/karafka/pro/processing/virtual_offset_manager.rb +147 -0
  189. data/lib/karafka/pro/routing/features/active_job/builder.rb +45 -0
  190. data/lib/karafka/pro/routing/features/active_job.rb +26 -0
  191. data/lib/karafka/pro/routing/features/base.rb +24 -0
  192. data/lib/karafka/pro/routing/features/dead_letter_queue/contracts/topic.rb +53 -0
  193. data/lib/karafka/pro/routing/features/dead_letter_queue.rb +27 -0
  194. data/lib/karafka/pro/routing/features/delaying/config.rb +27 -0
  195. data/lib/karafka/pro/routing/features/delaying/contracts/topic.rb +41 -0
  196. data/lib/karafka/pro/routing/features/delaying/topic.rb +59 -0
  197. data/lib/karafka/pro/routing/features/delaying.rb +29 -0
  198. data/lib/karafka/pro/routing/features/expiring/config.rb +27 -0
  199. data/lib/karafka/pro/routing/features/expiring/contracts/topic.rb +41 -0
  200. data/lib/karafka/pro/routing/features/expiring/topic.rb +59 -0
  201. data/lib/karafka/pro/routing/features/expiring.rb +27 -0
  202. data/lib/karafka/pro/routing/features/filtering/config.rb +40 -0
  203. data/lib/karafka/pro/routing/features/filtering/contracts/topic.rb +44 -0
  204. data/lib/karafka/pro/routing/features/filtering/topic.rb +51 -0
  205. data/lib/karafka/pro/routing/features/filtering.rb +27 -0
  206. data/lib/karafka/pro/routing/features/inline_insights/config.rb +32 -0
  207. data/lib/karafka/pro/routing/features/inline_insights/contracts/topic.rb +41 -0
  208. data/lib/karafka/pro/routing/features/inline_insights/topic.rb +52 -0
  209. data/lib/karafka/pro/routing/features/inline_insights.rb +26 -0
  210. data/lib/karafka/pro/routing/features/long_running_job/config.rb +28 -0
  211. data/lib/karafka/pro/routing/features/long_running_job/contracts/topic.rb +40 -0
  212. data/lib/karafka/pro/routing/features/long_running_job/topic.rb +42 -0
  213. data/lib/karafka/pro/routing/features/long_running_job.rb +28 -0
  214. data/lib/karafka/pro/routing/features/patterns/builder.rb +38 -0
  215. data/lib/karafka/pro/routing/features/patterns/config.rb +54 -0
  216. data/lib/karafka/pro/routing/features/patterns/consumer_group.rb +72 -0
  217. data/lib/karafka/pro/routing/features/patterns/contracts/consumer_group.rb +62 -0
  218. data/lib/karafka/pro/routing/features/patterns/contracts/pattern.rb +46 -0
  219. data/lib/karafka/pro/routing/features/patterns/contracts/topic.rb +41 -0
  220. data/lib/karafka/pro/routing/features/patterns/detector.rb +71 -0
  221. data/lib/karafka/pro/routing/features/patterns/pattern.rb +95 -0
  222. data/lib/karafka/pro/routing/features/patterns/patterns.rb +35 -0
  223. data/lib/karafka/pro/routing/features/patterns/topic.rb +50 -0
  224. data/lib/karafka/pro/routing/features/patterns/topics.rb +53 -0
  225. data/lib/karafka/pro/routing/features/patterns.rb +33 -0
  226. data/lib/karafka/pro/routing/features/pausing/contracts/topic.rb +51 -0
  227. data/lib/karafka/pro/routing/features/pausing/topic.rb +44 -0
  228. data/lib/karafka/pro/routing/features/pausing.rb +25 -0
  229. data/lib/karafka/pro/routing/features/throttling/config.rb +32 -0
  230. data/lib/karafka/pro/routing/features/throttling/contracts/topic.rb +44 -0
  231. data/lib/karafka/pro/routing/features/throttling/topic.rb +69 -0
  232. data/lib/karafka/pro/routing/features/throttling.rb +30 -0
  233. data/lib/karafka/pro/routing/features/virtual_partitions/config.rb +30 -0
  234. data/lib/karafka/pro/routing/features/virtual_partitions/contracts/topic.rb +55 -0
  235. data/lib/karafka/pro/routing/features/virtual_partitions/topic.rb +56 -0
  236. data/lib/karafka/pro/routing/features/virtual_partitions.rb +27 -0
  237. data/lib/karafka/pro.rb +13 -0
  238. data/lib/karafka/process.rb +24 -8
  239. data/lib/karafka/processing/coordinator.rb +181 -0
  240. data/lib/karafka/processing/coordinators_buffer.rb +62 -0
  241. data/lib/karafka/processing/executor.rb +155 -0
  242. data/lib/karafka/processing/executors_buffer.rb +72 -0
  243. data/lib/karafka/processing/expansions_selector.rb +22 -0
  244. data/lib/karafka/processing/inline_insights/consumer.rb +41 -0
  245. data/lib/karafka/processing/inline_insights/listener.rb +19 -0
  246. data/lib/karafka/processing/inline_insights/tracker.rb +128 -0
  247. data/lib/karafka/processing/jobs/base.rb +55 -0
  248. data/lib/karafka/processing/jobs/consume.rb +45 -0
  249. data/lib/karafka/processing/jobs/idle.rb +24 -0
  250. data/lib/karafka/processing/jobs/revoked.rb +22 -0
  251. data/lib/karafka/processing/jobs/shutdown.rb +23 -0
  252. data/lib/karafka/processing/jobs_builder.rb +28 -0
  253. data/lib/karafka/processing/jobs_queue.rb +150 -0
  254. data/lib/karafka/processing/partitioner.rb +24 -0
  255. data/lib/karafka/processing/result.rb +42 -0
  256. data/lib/karafka/processing/scheduler.rb +22 -0
  257. data/lib/karafka/processing/strategies/aj_dlq_mom.rb +44 -0
  258. data/lib/karafka/processing/strategies/aj_mom.rb +21 -0
  259. data/lib/karafka/processing/strategies/base.rb +52 -0
  260. data/lib/karafka/processing/strategies/default.rb +158 -0
  261. data/lib/karafka/processing/strategies/dlq.rb +88 -0
  262. data/lib/karafka/processing/strategies/dlq_mom.rb +49 -0
  263. data/lib/karafka/processing/strategies/mom.rb +29 -0
  264. data/lib/karafka/processing/strategy_selector.rb +47 -0
  265. data/lib/karafka/processing/worker.rb +93 -0
  266. data/lib/karafka/processing/workers_batch.rb +27 -0
  267. data/lib/karafka/railtie.rb +141 -0
  268. data/lib/karafka/routing/activity_manager.rb +84 -0
  269. data/lib/karafka/routing/builder.rb +45 -19
  270. data/lib/karafka/routing/consumer_group.rb +56 -20
  271. data/lib/karafka/routing/consumer_mapper.rb +1 -12
  272. data/lib/karafka/routing/features/active_job/builder.rb +33 -0
  273. data/lib/karafka/routing/features/active_job/config.rb +15 -0
  274. data/lib/karafka/routing/features/active_job/contracts/topic.rb +44 -0
  275. data/lib/karafka/routing/features/active_job/proxy.rb +14 -0
  276. data/lib/karafka/routing/features/active_job/topic.rb +33 -0
  277. data/lib/karafka/routing/features/active_job.rb +13 -0
  278. data/lib/karafka/routing/features/base/expander.rb +59 -0
  279. data/lib/karafka/routing/features/base.rb +71 -0
  280. data/lib/karafka/routing/features/dead_letter_queue/config.rb +19 -0
  281. data/lib/karafka/routing/features/dead_letter_queue/contracts/topic.rb +46 -0
  282. data/lib/karafka/routing/features/dead_letter_queue/topic.rb +41 -0
  283. data/lib/karafka/routing/features/dead_letter_queue.rb +16 -0
  284. data/lib/karafka/routing/features/declaratives/config.rb +18 -0
  285. data/lib/karafka/routing/features/declaratives/contracts/topic.rb +33 -0
  286. data/lib/karafka/routing/features/declaratives/topic.rb +44 -0
  287. data/lib/karafka/routing/features/declaratives.rb +14 -0
  288. data/lib/karafka/routing/features/inline_insights/config.rb +15 -0
  289. data/lib/karafka/routing/features/inline_insights/contracts/topic.rb +27 -0
  290. data/lib/karafka/routing/features/inline_insights/topic.rb +31 -0
  291. data/lib/karafka/routing/features/inline_insights.rb +40 -0
  292. data/lib/karafka/routing/features/manual_offset_management/config.rb +15 -0
  293. data/lib/karafka/routing/features/manual_offset_management/contracts/topic.rb +27 -0
  294. data/lib/karafka/routing/features/manual_offset_management/topic.rb +35 -0
  295. data/lib/karafka/routing/features/manual_offset_management.rb +18 -0
  296. data/lib/karafka/routing/proxy.rb +22 -21
  297. data/lib/karafka/routing/router.rb +24 -10
  298. data/lib/karafka/routing/subscription_group.rb +110 -0
  299. data/lib/karafka/routing/subscription_groups_builder.rb +65 -0
  300. data/lib/karafka/routing/topic.rb +87 -24
  301. data/lib/karafka/routing/topics.rb +46 -0
  302. data/lib/karafka/runner.rb +52 -0
  303. data/lib/karafka/serialization/json/deserializer.rb +7 -15
  304. data/lib/karafka/server.rb +113 -37
  305. data/lib/karafka/setup/attributes_map.rb +348 -0
  306. data/lib/karafka/setup/config.rb +256 -175
  307. data/lib/karafka/status.rb +54 -7
  308. data/lib/karafka/templates/example_consumer.rb.erb +16 -0
  309. data/lib/karafka/templates/karafka.rb.erb +33 -55
  310. data/lib/karafka/time_trackers/base.rb +14 -0
  311. data/lib/karafka/time_trackers/pause.rb +122 -0
  312. data/lib/karafka/time_trackers/poll.rb +69 -0
  313. data/lib/karafka/version.rb +1 -1
  314. data/lib/karafka.rb +91 -17
  315. data/renovate.json +9 -0
  316. data.tar.gz.sig +0 -0
  317. metadata +330 -168
  318. metadata.gz.sig +0 -0
  319. data/MIT-LICENCE +0 -18
  320. data/certs/mensfeld.pem +0 -25
  321. data/config/errors.yml +0 -41
  322. data/lib/karafka/assignment_strategies/round_robin.rb +0 -13
  323. data/lib/karafka/attributes_map.rb +0 -63
  324. data/lib/karafka/backends/inline.rb +0 -16
  325. data/lib/karafka/base_responder.rb +0 -226
  326. data/lib/karafka/cli/flow.rb +0 -48
  327. data/lib/karafka/cli/missingno.rb +0 -19
  328. data/lib/karafka/code_reloader.rb +0 -67
  329. data/lib/karafka/connection/api_adapter.rb +0 -158
  330. data/lib/karafka/connection/batch_delegator.rb +0 -55
  331. data/lib/karafka/connection/builder.rb +0 -23
  332. data/lib/karafka/connection/message_delegator.rb +0 -36
  333. data/lib/karafka/consumers/batch_metadata.rb +0 -10
  334. data/lib/karafka/consumers/callbacks.rb +0 -71
  335. data/lib/karafka/consumers/includer.rb +0 -64
  336. data/lib/karafka/consumers/responders.rb +0 -24
  337. data/lib/karafka/consumers/single_params.rb +0 -15
  338. data/lib/karafka/contracts/consumer_group_topic.rb +0 -19
  339. data/lib/karafka/contracts/responder_usage.rb +0 -54
  340. data/lib/karafka/fetcher.rb +0 -42
  341. data/lib/karafka/helpers/class_matcher.rb +0 -88
  342. data/lib/karafka/helpers/config_retriever.rb +0 -46
  343. data/lib/karafka/helpers/inflector.rb +0 -26
  344. data/lib/karafka/instrumentation/stdout_listener.rb +0 -140
  345. data/lib/karafka/params/batch_metadata.rb +0 -26
  346. data/lib/karafka/params/builders/batch_metadata.rb +0 -30
  347. data/lib/karafka/params/builders/params.rb +0 -38
  348. data/lib/karafka/params/builders/params_batch.rb +0 -25
  349. data/lib/karafka/params/params_batch.rb +0 -60
  350. data/lib/karafka/patches/ruby_kafka.rb +0 -47
  351. data/lib/karafka/persistence/client.rb +0 -29
  352. data/lib/karafka/persistence/consumers.rb +0 -45
  353. data/lib/karafka/persistence/topics.rb +0 -48
  354. data/lib/karafka/responders/builder.rb +0 -36
  355. data/lib/karafka/responders/topic.rb +0 -55
  356. data/lib/karafka/routing/topic_mapper.rb +0 -53
  357. data/lib/karafka/serialization/json/serializer.rb +0 -31
  358. data/lib/karafka/setup/configurators/water_drop.rb +0 -36
  359. data/lib/karafka/templates/application_responder.rb.erb +0 -11
data/certs/mensfeld.pem DELETED
@@ -1,25 +0,0 @@
1
- -----BEGIN CERTIFICATE-----
2
- MIIEODCCAqCgAwIBAgIBATANBgkqhkiG9w0BAQsFADAjMSEwHwYDVQQDDBhtYWNp
3
- ZWovREM9bWVuc2ZlbGQvREM9cGwwHhcNMjEwODExMTQxNTEzWhcNMjIwODExMTQx
4
- NTEzWjAjMSEwHwYDVQQDDBhtYWNpZWovREM9bWVuc2ZlbGQvREM9cGwwggGiMA0G
5
- CSqGSIb3DQEBAQUAA4IBjwAwggGKAoIBgQDV2jKH4Ti87GM6nyT6D+ESzTI0MZDj
6
- ak2/TEwnxvijMJyCCPKT/qIkbW4/f0VHM4rhPr1nW73sb5SZBVFCLlJcOSKOBdUY
7
- TMY+SIXN2EtUaZuhAOe8LxtxjHTgRHvHcqUQMBENXTISNzCo32LnUxweu66ia4Pd
8
- 1mNRhzOqNv9YiBZvtBf7IMQ+sYdOCjboq2dlsWmJiwiDpY9lQBTnWORnT3mQxU5x
9
- vPSwnLB854cHdCS8fQo4DjeJBRZHhEbcE5sqhEMB3RZA3EtFVEXOxlNxVTS3tncI
10
- qyNXiWDaxcipaens4ObSY1C2HTV7OWb7OMqSCIybeYTSfkaSdqmcl4S6zxXkjH1J
11
- tnjayAVzD+QVXGijsPLE2PFnJAh9iDET2cMsjabO1f6l1OQNyAtqpcyQcgfnyW0z
12
- g7tGxTYD+6wJHffM9d9txOUw6djkF6bDxyqB8lo4Z3IObCx18AZjI9XPS9QG7w6q
13
- LCWuMG2lkCcRgASqaVk9fEf9yMc2xxz5o3kCAwEAAaN3MHUwCQYDVR0TBAIwADAL
14
- BgNVHQ8EBAMCBLAwHQYDVR0OBBYEFBqUFCKCOe5IuueUVqOB991jyCLLMB0GA1Ud
15
- EQQWMBSBEm1hY2llakBtZW5zZmVsZC5wbDAdBgNVHRIEFjAUgRJtYWNpZWpAbWVu
16
- c2ZlbGQucGwwDQYJKoZIhvcNAQELBQADggGBADD0/UuTTFgW+CGk2U0RDw2RBOca
17
- W2LTF/G7AOzuzD0Tc4voc7WXyrgKwJREv8rgBimLnNlgmFJLmtUCh2U/MgxvcilH
18
- yshYcbseNvjkrtYnLRlWZR4SSB6Zei5AlyGVQLPkvdsBpNegcG6w075YEwzX/38a
19
- 8V9B/Yri2OGELBz8ykl7BsXUgNoUPA/4pHF6YRLz+VirOaUIQ4JfY7xGj6fSOWWz
20
- /rQ/d77r6o1mfJYM/3BRVg73a3b7DmRnE5qjwmSaSQ7u802pJnLesmArch0xGCT/
21
- fMmRli1Qb+6qOTl9mzD6UDMAyFR4t6MStLm0mIEqM0nBO5nUdUWbC7l9qXEf8XBE
22
- 2DP28p3EqSuS+lKbAWKcqv7t0iRhhmaod+Yn9mcrLN1sa3q3KSQ9BCyxezCD4Mk2
23
- R2P11bWoCtr70BsccVrN8jEhzwXngMyI2gVt750Y+dbTu1KgRqZKp/ECe7ZzPzXj
24
- pIy9vHxTANKYVyI4qj8OrFdEM5BQNu8oQpL0iQ==
25
- -----END CERTIFICATE-----
data/config/errors.yml DELETED
@@ -1,41 +0,0 @@
1
- en:
2
- dry_validation:
3
- errors:
4
- invalid_broker_schema: >
5
- has an invalid format
6
- Expected schema, host and port number
7
- Example: kafka://127.0.0.1:9092 or kafka+ssl://127.0.0.1:9092
8
- invalid_certificate: >
9
- is not a valid certificate
10
- invalid_certificate_from_path: >
11
- is not a valid certificate
12
- invalid_private_key: >
13
- is not a valid private key
14
- max_timeout_size_for_exponential: >
15
- pause_timeout cannot be more than pause_max_timeout
16
- max_wait_time_limit:
17
- max_wait_time cannot be more than socket_timeout
18
- topics_names_not_unique: >
19
- all topic names within a single consumer group must be unique
20
- ssl_client_cert_with_ssl_client_cert_key: >
21
- Both ssl_client_cert and ssl_client_cert_key need to be provided
22
- ssl_client_cert_key_with_ssl_client_cert: >
23
- Both ssl_client_cert_key and ssl_client_cert need to be provided
24
- ssl_client_cert_chain_with_ssl_client_cert: >
25
- Both ssl_client_cert_chain and ssl_client_cert need to be provided
26
- ssl_client_cert_chain_with_ssl_client_cert_key: >
27
- Both ssl_client_cert_chain and ssl_client_cert_key need to be provided
28
- ssl_client_cert_key_password_with_ssl_client_cert_key: >
29
- Both ssl_client_cert_key_password and ssl_client_cert_key need to be provided
30
- does_not_respond_to_token: >
31
- needs to respond to a #token method
32
- required_usage_count: >
33
- Given topic must be used at least once
34
- pid_already_exists: >
35
- Pidfile already exists
36
- consumer_groups_inclusion: >
37
- Unknown consumer group
38
- does_not_exist:
39
- Given file does not exist or cannot be read
40
- does_not_respond_to_call: >
41
- needs to respond to a #call method
@@ -1,13 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Strategies for Kafka partitions assignments
5
- module AssignmentStrategies
6
- # Standard RoundRobin strategy
7
- class RoundRobin < SimpleDelegator
8
- def initialize
9
- super(Kafka::RoundRobinAssignmentStrategy.new)
10
- end
11
- end
12
- end
13
- end
@@ -1,63 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Both Karafka and Ruby-Kafka contain a lot of settings that can be applied on multiple
5
- # levels. In Karafka that is on consumer group and on the topic level. In Ruby-Kafka it
6
- # is on consumer, subscription and consumption levels. In order to maintain an order
7
- # in managing those settings, this module was created. It contains details on what setting
8
- # where should go and which layer (both on Karafka and Ruby-Kafka) is responsible for
9
- # setting it and sending it forward
10
- # @note Settings presented here cover all the settings that are being used across Karafka
11
- module AttributesMap
12
- class << self
13
- # What settings should go where in ruby-kafka
14
- # @return [Hash] hash with proper sections on what to proxy where in Ruby-Kafka
15
- # @note All other settings will be passed to Kafka.new method invocation.
16
- # All elements in this hash are just edge cases
17
- def api_adapter
18
- {
19
- consumer: %i[
20
- session_timeout offset_commit_interval offset_commit_threshold
21
- offset_retention_time heartbeat_interval fetcher_max_queue_size
22
- assignment_strategy
23
- ],
24
- subscribe: %i[start_from_beginning max_bytes_per_partition],
25
- consumption: %i[min_bytes max_bytes max_wait_time],
26
- pause: %i[pause_timeout pause_max_timeout pause_exponential_backoff],
27
- # All the options that are under kafka config namespace, but are not used
28
- # directly with kafka api, but from the Karafka user perspective, they are
29
- # still related to kafka. They should not be proxied anywhere
30
- ignored: %i[reconnect_timeout automatically_mark_as_consumed]
31
- }
32
- end
33
-
34
- # @return [Array<Symbol>] properties that can be set on a per topic level
35
- def topic
36
- (api_adapter[:subscribe] + %i[
37
- backend
38
- name
39
- deserializer
40
- responder
41
- batch_consuming
42
- ]).uniq
43
- end
44
-
45
- # @return [Array<Symbol>] properties that can be set on a per consumer group level
46
- # @note Note that there are settings directly extracted from the config kafka namespace
47
- # I did this that way, so I won't have to repeat same setting keys over and over again
48
- # Thanks to this solution, if any new setting is available for ruby-kafka, we just need
49
- # to add it to our configuration class and it will be handled automatically.
50
- def consumer_group
51
- # @note We don't ignore the api_adapter[:ignored] values as they should be ignored
52
- # only when proxying details go ruby-kafka. We use ignored fields internally in karafka
53
- ignored_settings = api_adapter[:subscribe]
54
- defined_settings = api_adapter.values.flatten
55
- karafka_settings = %i[batch_fetching]
56
-
57
- dynamically_proxied = Karafka::Setup::Config.config.kafka.to_h.keys
58
-
59
- (defined_settings + dynamically_proxied).uniq + karafka_settings - ignored_settings
60
- end
61
- end
62
- end
63
- end
@@ -1,16 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Namespace for all different backends Karafka supports
5
- module Backends
6
- # Backend that just runs stuff asap without any scheduling
7
- module Inline
8
- private
9
-
10
- # Executes consume code immediately (without enqueuing)
11
- def process
12
- Karafka.monitor.instrument('backends.inline.process', caller: self) { consume }
13
- end
14
- end
15
- end
16
- end
@@ -1,226 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Base responder from which all Karafka responders should inherit
5
- # Similar to Rails responders concept. It allows us to design flow from one app to another
6
- # by isolating what responses should be sent (and where) based on a given action
7
- # It differs from Rails responders in the way it works: in std http request we can have one
8
- # response, here we can have unlimited number of them
9
- #
10
- # It has a simple API for defining where should we respond (and if it is required)
11
- #
12
- # @example Basic usage (each registered topic is required to be used by default)
13
- # class Responder < BaseResponder
14
- # topic :new_action
15
- #
16
- # def respond(data)
17
- # respond_to :new_action, data
18
- # end
19
- # end
20
- #
21
- # @example Responding to a topic with extra options
22
- # class Responder < BaseResponder
23
- # topic :new_action
24
- #
25
- # def respond(data)
26
- # respond_to :new_action, data, partition_key: 'thing'
27
- # end
28
- # end
29
- #
30
- # @example Marking topic as not required (we won't have to use it)
31
- # class Responder < BaseResponder
32
- # topic :required_topic
33
- # topic :new_action, required: false
34
- #
35
- # def respond(data)
36
- # respond_to :required_topic, data
37
- # end
38
- # end
39
- #
40
- # @example Multiple times used topic
41
- # class Responder < BaseResponder
42
- # topic :required_topic
43
- #
44
- # def respond(data)
45
- # data.each do |subset|
46
- # respond_to :required_topic, subset
47
- # end
48
- # end
49
- # end
50
- #
51
- # @example Specify serializer for a topic
52
- # class Responder < BaseResponder
53
- # topic :xml_topic, serializer: MyXMLSerializer
54
- #
55
- # def respond(data)
56
- # data.each do |subset|
57
- # respond_to :xml_topic, subset
58
- # end
59
- # end
60
- # end
61
- #
62
- # @example Accept multiple arguments to a respond method
63
- # class Responder < BaseResponder
64
- # topic :users_actions
65
- # topic :articles_viewed
66
- #
67
- # def respond(user, article)
68
- # respond_to :users_actions, user
69
- # respond_to :articles_viewed, article
70
- # end
71
- # end
72
- class BaseResponder
73
- # Responder usage contract
74
- CONTRACT = Karafka::Contracts::ResponderUsage.new.freeze
75
-
76
- private_constant :CONTRACT
77
-
78
- class << self
79
- # Definitions of all topics that we want to be able to use in this responder should go here
80
- attr_accessor :topics
81
- # Contract that we can use to control and/or require some additional details upon options
82
- # that are being passed to the producer. This can be in particular useful if we want to make
83
- # sure that for example partition_key is always present.
84
- attr_accessor :options_contract
85
-
86
- # Registers a topic as on to which we will be able to respond
87
- # @param topic_name [Symbol, String] name of topic to which we want to respond
88
- # @param options [Hash] hash with optional configuration details
89
- def topic(topic_name, options = {})
90
- options[:serializer] ||= Karafka::App.config.serializer
91
- options[:registered] = true
92
- self.topics ||= {}
93
- topic_obj = Responders::Topic.new(topic_name, options)
94
- self.topics[topic_obj.name] = topic_obj
95
- end
96
-
97
- # A simple alias for easier standalone responder usage.
98
- # Instead of building it with new.call it allows (in case of using JSON serializer)
99
- # to just run it directly from the class level
100
- # @param data Anything that we want to respond with
101
- # @example Send user data with a responder
102
- # UsersCreatedResponder.call(@created_user)
103
- def call(*data)
104
- # Just in case there were no topics defined for a responder, we initialize with
105
- # empty hash not to handle a nil case
106
- self.topics ||= {}
107
- new.call(*data)
108
- end
109
- end
110
-
111
- attr_reader :messages_buffer
112
-
113
- # Creates a responder object
114
- # @return [Karafka::BaseResponder] base responder descendant responder
115
- def initialize
116
- @messages_buffer = {}
117
- end
118
-
119
- # Performs respond and validates that all the response requirement were met
120
- # @param data Anything that we want to respond with
121
- # @note We know that validators should be executed also before sending data to topics, however
122
- # the implementation gets way more complicated then, that's why we check after everything
123
- # was sent using responder
124
- # @example Send user data with a responder
125
- # UsersCreatedResponder.new.call(@created_user)
126
- # @example Send user data with a responder using non default Parser
127
- # UsersCreatedResponder.new(MyParser).call(@created_user)
128
- def call(*data)
129
- respond(*data)
130
- validate_usage!
131
- validate_options!
132
- deliver!
133
- end
134
-
135
- private
136
-
137
- # Checks if we met all the topics requirements. It will fail if we didn't send a message to
138
- # a registered required topic, etc.
139
- def validate_usage!
140
- registered_topics = self.class.topics.map do |name, topic|
141
- topic.to_h.merge!(
142
- usage_count: messages_buffer[name]&.count || 0
143
- )
144
- end
145
-
146
- used_topics = messages_buffer.map do |name, usage|
147
- topic = self.class.topics[name] || Responders::Topic.new(name, registered: false)
148
- topic.to_h.merge!(usage_count: usage.count)
149
- end
150
-
151
- result = CONTRACT.call(
152
- registered_topics: registered_topics,
153
- used_topics: used_topics
154
- )
155
-
156
- return if result.success?
157
-
158
- raise Karafka::Errors::InvalidResponderUsageError, result.errors.to_h
159
- end
160
-
161
- # Checks if we met all the options requirements before sending them to the producer.
162
- def validate_options!
163
- return true unless self.class.options_contract
164
-
165
- messages_buffer.each_value do |messages_set|
166
- messages_set.each do |message_data|
167
- result = self.class.options_contract.call(message_data.last)
168
- next if result.success?
169
-
170
- raise Karafka::Errors::InvalidResponderMessageOptionsError, result.errors.to_h
171
- end
172
- end
173
- end
174
-
175
- # Takes all the messages from the buffer and delivers them one by one
176
- # @note This method is executed after the validation, so we're sure that
177
- # what we send is legit and it will go to a proper topics
178
- def deliver!
179
- messages_buffer.each_value do |data_elements|
180
- data_elements.each do |data, options|
181
- # We map this topic name, so it will match namespaced/etc topic in Kafka
182
- # @note By default will not change topic (if default mapper used)
183
- mapped_topic = Karafka::App.config.topic_mapper.outgoing(options[:topic])
184
- external_options = options.merge(topic: mapped_topic)
185
- producer(options).call(data, external_options)
186
- end
187
- end
188
- end
189
-
190
- # Method that needs to be implemented in a subclass. It should handle responding
191
- # on registered topics
192
- # @param _data [Object] anything that we want to use to send to Kafka
193
- # @raise [NotImplementedError] This method needs to be implemented in a subclass
194
- def respond(*_data)
195
- raise NotImplementedError, 'Implement this in a subclass'
196
- end
197
-
198
- # This method allow us to respond to a single topic with a given data. It can be used
199
- # as many times as we need. Especially when we have 1:n flow
200
- # @param topic [Symbol, String] topic to which we want to respond
201
- # @param data [String, Object] string or object that we want to send
202
- # @param options [Hash] options for waterdrop (e.g. partition_key).
203
- # @note Respond to does not accept multiple data arguments.
204
- def respond_to(topic, data, options = {})
205
- # We normalize the format to string, as WaterDrop and Ruby-Kafka support only
206
- # string topics
207
- topic = topic.to_s
208
-
209
- messages_buffer[topic] ||= []
210
- messages_buffer[topic] << [
211
- self.class.topics[topic].serializer.call(data),
212
- options.merge(topic: topic)
213
- ]
214
- end
215
-
216
- # @param options [Hash] options for waterdrop
217
- # @return [Class] WaterDrop producer (sync or async based on the settings)
218
- def producer(options)
219
- if self.class.topics[options[:topic]].async?
220
- WaterDrop::AsyncProducer
221
- else
222
- WaterDrop::SyncProducer
223
- end
224
- end
225
- end
226
- end
@@ -1,48 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Karafka framework Cli
5
- class Cli < Thor
6
- # Description of topics flow (incoming/outgoing)
7
- class Flow < Base
8
- desc 'Print application data flow (incoming => outgoing)'
9
-
10
- # Print out all defined routes in alphabetical order
11
- def call
12
- topics.each do |topic|
13
- any_topics = !topic.responder&.topics.nil?
14
- log_messages = []
15
-
16
- if any_topics
17
- log_messages << "#{topic.name} =>"
18
-
19
- topic.responder.topics.each_value do |responder_topic|
20
- features = []
21
- features << (responder_topic.required? ? 'always' : 'conditionally')
22
-
23
- log_messages << format(responder_topic.name, "(#{features.join(', ')})")
24
- end
25
- else
26
- log_messages << "#{topic.name} => (nothing)"
27
- end
28
-
29
- Karafka.logger.info(log_messages.join("\n"))
30
- end
31
- end
32
-
33
- private
34
-
35
- # @return [Array<Karafka::Routing::Topic>] all topics sorted in alphabetical order
36
- def topics
37
- Karafka::App.consumer_groups.map(&:topics).flatten.sort_by(&:name)
38
- end
39
-
40
- # Formats a given value with label in a nice way
41
- # @param label [String] label describing value
42
- # @param value [String] value that should be printed
43
- def format(label, value)
44
- " - #{label}: #{value}"
45
- end
46
- end
47
- end
48
- end
@@ -1,19 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- class Cli < Thor
5
- # Command that gets invoked when no method is provided when running the CLI
6
- # It allows us to exit with exit code 1 instead of default 0 to indicate that something
7
- # was missing
8
- # @see https://github.com/karafka/karafka/issues/619
9
- class Missingno < Base
10
- desc 'Hidden command that gets invoked when no command is provided', hide: true
11
-
12
- # Prints an error about the lack of command (nothing selected)
13
- def call
14
- Karafka.logger.error('No command provided')
15
- exit 1
16
- end
17
- end
18
- end
19
- end
@@ -1,67 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Special type of a listener, that is not an instrumentation one, but one that triggers
5
- # code reload in the development mode after each fetched batch (or message)
6
- #
7
- # Please refer to the development code reload sections for details on the benefits and downsides
8
- # of the in-process code reloading
9
- class CodeReloader
10
- # This mutex is needed as we might have an application that has multiple consumer groups
11
- # running in separate threads and we should not trigger reload before fully reloading the app
12
- # in previous thread
13
- MUTEX = Mutex.new
14
-
15
- private_constant :MUTEX
16
-
17
- # @param reloaders [Array<Object>] any code loaders that we use in this app. Whether it is
18
- # the Rails loader, Zeitwerk or anything else that allows reloading triggering
19
- # @param block [Proc] yields given block just before reloading. This can be used to hook custom
20
- # reloading stuff, that ain't reloaders (for example for resetting dry-events registry)
21
- def initialize(*reloaders, &block)
22
- @reloaders = reloaders
23
- @block = block
24
- end
25
-
26
- # Binds to the instrumentation events and triggers reload
27
- # @param _event [Dry::Event] empty dry event
28
- # @note Since we de-register all the user defined objects and redraw routes, it means that
29
- # we won't be able to do a multi-batch buffering in the development mode as each of the
30
- # batches will be buffered on a newly created "per fetch" instance.
31
- def on_connection_listener_fetch_loop(_event)
32
- reload
33
- end
34
-
35
- private
36
-
37
- # Triggers reload of both standard and Rails reloaders as well as expires all internals of
38
- # Karafka, so it can be rediscovered and rebuilt
39
- def reload
40
- MUTEX.synchronize do
41
- if @reloaders[0].respond_to?(:execute)
42
- reload_with_rails
43
- else
44
- reload_without_rails
45
- end
46
- end
47
- end
48
-
49
- # Rails reloading procedure
50
- def reload_with_rails
51
- updatable = @reloaders.select(&:updated?)
52
-
53
- return if updatable.empty?
54
-
55
- updatable.each(&:execute)
56
- @block&.call
57
- Karafka::App.reload
58
- end
59
-
60
- # Zeitwerk and other reloaders
61
- def reload_without_rails
62
- @reloaders.each(&:reload)
63
- @block&.call
64
- Karafka::App.reload
65
- end
66
- end
67
- end
@@ -1,158 +0,0 @@
1
- # frozen_string_literal: true
2
-
3
- module Karafka
4
- # Namespace for all the things related to Kafka connection
5
- module Connection
6
- # Mapper used to convert our internal settings into ruby-kafka settings based on their
7
- # API requirements.
8
- # Since ruby-kafka has more and more options and there are few "levels" on which
9
- # we have to apply them (despite the fact, that in Karafka you configure all of it
10
- # in one place), we have to remap it into what ruby-kafka driver requires
11
- # @note The good thing about Kafka.new method is that it ignores all options that
12
- # do nothing. So we don't have to worry about injecting our internal settings
13
- # into the client and breaking stuff
14
- module ApiAdapter
15
- class << self
16
- # Builds all the configuration settings for Kafka.new method
17
- # @param consumer_group [Karafka::Routing::ConsumerGroup] consumer group details
18
- # @return [Array<Hash>] Array with all the client arguments including hash with all
19
- # the settings required by Kafka.new method
20
- # @note We return array, so we can inject any arguments we want, in case of changes in the
21
- # raw driver
22
- def client(consumer_group)
23
- # This one is a default that takes all the settings except special
24
- # cases defined in the map
25
- settings = {
26
- logger: ::Karafka.logger,
27
- client_id: ::Karafka::App.config.client_id
28
- }
29
-
30
- kafka_configs.each_key do |setting_name|
31
- # All options for config adapter should be ignored as we're just interested
32
- # in what is left, as we want to pass all the options that are "typical"
33
- # and not listed in the api_adapter special cases mapping. All the values
34
- # from the api_adapter mapping go somewhere else, not to the client directly
35
- next if AttributesMap.api_adapter.values.flatten.include?(setting_name)
36
-
37
- # Settings for each consumer group are either defined per consumer group or are
38
- # inherited from the global/general settings level, thus we don't have to fetch them
39
- # from the kafka settings as they are already on a consumer group level
40
- settings[setting_name] = consumer_group.public_send(setting_name)
41
- end
42
-
43
- settings_hash = sanitize(settings)
44
-
45
- # Normalization for the way Kafka::Client accepts arguments from 0.5.3
46
- [settings_hash.delete(:seed_brokers), settings_hash]
47
- end
48
-
49
- # Builds all the configuration settings for kafka#consumer method
50
- # @param consumer_group [Karafka::Routing::ConsumerGroup] consumer group details
51
- # @return [Hash] all the consumer keyword arguments including hash with all
52
- # the settings required by Kafka#consumer
53
- def consumer(consumer_group)
54
- settings = { group_id: consumer_group.id }
55
- settings = fetch_for(:consumer, consumer_group, settings)
56
- sanitize(settings)
57
- end
58
-
59
- # Builds all the configuration settings for kafka consumer consume_each_batch and
60
- # consume_each_message methods
61
- # @param consumer_group [Karafka::Routing::ConsumerGroup] consumer group details
62
- # @return [Hash] hash with all the arguments required by consuming method
63
- # including all the settings required by
64
- # Kafka::Consumer#consume_each_message and Kafka::Consumer#consume_each_batch method
65
- def consumption(consumer_group)
66
- sanitize(
67
- fetch_for(
68
- :consumption,
69
- consumer_group,
70
- automatically_mark_as_processed: consumer_group.automatically_mark_as_consumed
71
- )
72
- )
73
- end
74
-
75
- # Builds all the configuration settings for kafka consumer#subscribe method
76
- # @param topic [Karafka::Routing::Topic] topic that holds details for a given subscription
77
- # @return [Hash] hash with all the settings required by kafka consumer#subscribe method
78
- def subscribe(topic)
79
- settings = fetch_for(:subscribe, topic)
80
- [Karafka::App.config.topic_mapper.outgoing(topic.name), sanitize(settings)]
81
- end
82
-
83
- # Builds all the configuration settings required by kafka consumer#pause method
84
- # @param topic [String] topic that we want to pause
85
- # @param partition [Integer] number partition that we want to pause
86
- # @param consumer_group [Karafka::Routing::ConsumerGroup] consumer group details
87
- # @return [Hash] hash with all the details required to pause kafka consumer
88
- def pause(topic, partition, consumer_group)
89
- {
90
- args: [Karafka::App.config.topic_mapper.outgoing(topic), partition],
91
- kwargs: {
92
- timeout: consumer_group.pause_timeout,
93
- max_timeout: consumer_group.pause_max_timeout,
94
- exponential_backoff: consumer_group.pause_exponential_backoff
95
- }
96
- }
97
- end
98
-
99
- # Remaps topic details taking the topic mapper feature into consideration.
100
- # @param params [Karafka::Params::Params] params instance
101
- # @return [Array] array with all the details needed by ruby-kafka to mark message
102
- # as processed
103
- # @note When default empty topic mapper is used, no need for any conversion as the
104
- # internal and external format are exactly the same
105
- def mark_message_as_processed(params)
106
- # Majority of users don't use custom topic mappers. No need to change anything when it
107
- # is a default mapper that does not change anything. Only some cloud providers require
108
- # topics to be remapped
109
- return [params.metadata] if Karafka::App.config.topic_mapper.is_a?(
110
- Karafka::Routing::TopicMapper
111
- )
112
-
113
- # @note We don't use tap as it is around 13% slower than non-dup version
114
- dupped = params.metadata.dup
115
- dupped['topic'] = Karafka::App.config.topic_mapper.outgoing(params.metadata.topic)
116
- [dupped]
117
- end
118
-
119
- private
120
-
121
- # Fetches proper settings for a given map namespace
122
- # @param namespace_key [Symbol] namespace from attributes map config adapter hash
123
- # @param route_layer [Object] route topic or consumer group
124
- # @param preexisting_settings [Hash] hash with some preexisting settings that might have
125
- # been loaded in a different way
126
- def fetch_for(namespace_key, route_layer, preexisting_settings = {})
127
- kafka_configs.each_key do |setting_name|
128
- # Ignore settings that are not related to our namespace
129
- next unless AttributesMap.api_adapter[namespace_key].include?(setting_name)
130
-
131
- # Ignore settings that are already initialized
132
- # In case they are in preexisting settings fetched differently
133
- next if preexisting_settings.key?(setting_name)
134
-
135
- # Fetch all the settings from a given layer object. Objects can handle the fallback
136
- # to the kafka settings, so
137
- preexisting_settings[setting_name] = route_layer.send(setting_name)
138
- end
139
-
140
- preexisting_settings
141
- end
142
-
143
- # Removes nil containing keys from the final settings so it can use Kafkas driver
144
- # defaults for those
145
- # @param settings [Hash] settings that may contain nil values
146
- # @return [Hash] settings without nil using keys (non of karafka options should be nil)
147
- def sanitize(settings)
148
- settings.reject { |_key, value| value.nil? }
149
- end
150
-
151
- # @return [Hash] Kafka config details as a hash
152
- def kafka_configs
153
- ::Karafka::App.config.kafka.to_h
154
- end
155
- end
156
- end
157
- end
158
- end