karafka 1.4.12 → 2.2.10

Sign up to get free protection for your applications and to get access to all the features.
Files changed (359) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/FUNDING.yml +1 -0
  4. data/.github/ISSUE_TEMPLATE/bug_report.md +10 -9
  5. data/.github/workflows/ci.yml +169 -31
  6. data/.rspec +4 -0
  7. data/.ruby-version +1 -1
  8. data/CHANGELOG.md +716 -607
  9. data/CONTRIBUTING.md +10 -19
  10. data/Gemfile +7 -0
  11. data/Gemfile.lock +69 -92
  12. data/LICENSE +17 -0
  13. data/LICENSE-COMM +89 -0
  14. data/LICENSE-LGPL +165 -0
  15. data/README.md +48 -47
  16. data/bin/benchmarks +99 -0
  17. data/bin/create_token +22 -0
  18. data/bin/integrations +310 -0
  19. data/bin/karafka +5 -14
  20. data/bin/record_rss +50 -0
  21. data/bin/rspecs +6 -0
  22. data/bin/scenario +29 -0
  23. data/bin/stress_many +13 -0
  24. data/bin/stress_one +13 -0
  25. data/bin/verify_license_integrity +37 -0
  26. data/bin/wait_for_kafka +24 -0
  27. data/certs/cert_chain.pem +26 -0
  28. data/certs/karafka-pro.pem +11 -0
  29. data/config/locales/errors.yml +97 -0
  30. data/config/locales/pro_errors.yml +59 -0
  31. data/docker-compose.yml +19 -11
  32. data/karafka.gemspec +26 -22
  33. data/lib/active_job/karafka.rb +17 -0
  34. data/lib/active_job/queue_adapters/karafka_adapter.rb +32 -0
  35. data/lib/karafka/active_job/consumer.rb +49 -0
  36. data/lib/karafka/active_job/current_attributes/loading.rb +36 -0
  37. data/lib/karafka/active_job/current_attributes/persistence.rb +28 -0
  38. data/lib/karafka/active_job/current_attributes.rb +42 -0
  39. data/lib/karafka/active_job/dispatcher.rb +69 -0
  40. data/lib/karafka/active_job/job_extensions.rb +34 -0
  41. data/lib/karafka/active_job/job_options_contract.rb +32 -0
  42. data/lib/karafka/admin.rb +313 -0
  43. data/lib/karafka/app.rb +47 -23
  44. data/lib/karafka/base_consumer.rb +260 -29
  45. data/lib/karafka/cli/base.rb +67 -36
  46. data/lib/karafka/cli/console.rb +18 -12
  47. data/lib/karafka/cli/help.rb +24 -0
  48. data/lib/karafka/cli/info.rb +47 -12
  49. data/lib/karafka/cli/install.rb +23 -14
  50. data/lib/karafka/cli/server.rb +101 -44
  51. data/lib/karafka/cli/topics.rb +146 -0
  52. data/lib/karafka/cli.rb +24 -27
  53. data/lib/karafka/connection/client.rb +553 -90
  54. data/lib/karafka/connection/consumer_group_coordinator.rb +48 -0
  55. data/lib/karafka/connection/listener.rb +294 -38
  56. data/lib/karafka/connection/listeners_batch.rb +40 -0
  57. data/lib/karafka/connection/messages_buffer.rb +84 -0
  58. data/lib/karafka/connection/pauses_manager.rb +46 -0
  59. data/lib/karafka/connection/proxy.rb +98 -0
  60. data/lib/karafka/connection/raw_messages_buffer.rb +101 -0
  61. data/lib/karafka/connection/rebalance_manager.rb +105 -0
  62. data/lib/karafka/contracts/base.rb +17 -0
  63. data/lib/karafka/contracts/config.rb +130 -11
  64. data/lib/karafka/contracts/consumer_group.rb +32 -187
  65. data/lib/karafka/contracts/server_cli_options.rb +80 -19
  66. data/lib/karafka/contracts/topic.rb +65 -0
  67. data/lib/karafka/contracts.rb +1 -1
  68. data/lib/karafka/embedded.rb +36 -0
  69. data/lib/karafka/env.rb +46 -0
  70. data/lib/karafka/errors.rb +37 -21
  71. data/lib/karafka/helpers/async.rb +33 -0
  72. data/lib/karafka/helpers/colorize.rb +26 -0
  73. data/lib/karafka/helpers/multi_delegator.rb +2 -2
  74. data/lib/karafka/instrumentation/callbacks/error.rb +39 -0
  75. data/lib/karafka/instrumentation/callbacks/rebalance.rb +64 -0
  76. data/lib/karafka/instrumentation/callbacks/statistics.rb +51 -0
  77. data/lib/karafka/instrumentation/logger_listener.rb +303 -0
  78. data/lib/karafka/instrumentation/monitor.rb +13 -61
  79. data/lib/karafka/instrumentation/notifications.rb +79 -0
  80. data/lib/karafka/instrumentation/proctitle_listener.rb +7 -16
  81. data/lib/karafka/instrumentation/vendors/appsignal/base.rb +30 -0
  82. data/lib/karafka/instrumentation/vendors/appsignal/client.rb +122 -0
  83. data/lib/karafka/instrumentation/vendors/appsignal/dashboard.json +222 -0
  84. data/lib/karafka/instrumentation/vendors/appsignal/errors_listener.rb +30 -0
  85. data/lib/karafka/instrumentation/vendors/appsignal/metrics_listener.rb +331 -0
  86. data/lib/karafka/instrumentation/vendors/datadog/dashboard.json +1 -0
  87. data/lib/karafka/instrumentation/vendors/datadog/logger_listener.rb +155 -0
  88. data/lib/karafka/instrumentation/vendors/datadog/metrics_listener.rb +264 -0
  89. data/lib/karafka/instrumentation/vendors/kubernetes/liveness_listener.rb +176 -0
  90. data/lib/karafka/licenser.rb +78 -0
  91. data/lib/karafka/messages/batch_metadata.rb +52 -0
  92. data/lib/karafka/messages/builders/batch_metadata.rb +60 -0
  93. data/lib/karafka/messages/builders/message.rb +40 -0
  94. data/lib/karafka/messages/builders/messages.rb +36 -0
  95. data/lib/karafka/{params/params.rb → messages/message.rb} +20 -13
  96. data/lib/karafka/messages/messages.rb +71 -0
  97. data/lib/karafka/{params → messages}/metadata.rb +4 -6
  98. data/lib/karafka/messages/parser.rb +14 -0
  99. data/lib/karafka/messages/seek.rb +12 -0
  100. data/lib/karafka/patches/rdkafka/bindings.rb +122 -0
  101. data/lib/karafka/patches/rdkafka/opaque.rb +36 -0
  102. data/lib/karafka/pro/active_job/consumer.rb +47 -0
  103. data/lib/karafka/pro/active_job/dispatcher.rb +86 -0
  104. data/lib/karafka/pro/active_job/job_options_contract.rb +45 -0
  105. data/lib/karafka/pro/cleaner/errors.rb +27 -0
  106. data/lib/karafka/pro/cleaner/messages/message.rb +46 -0
  107. data/lib/karafka/pro/cleaner/messages/messages.rb +42 -0
  108. data/lib/karafka/pro/cleaner.rb +41 -0
  109. data/lib/karafka/pro/contracts/base.rb +23 -0
  110. data/lib/karafka/pro/contracts/server_cli_options.rb +111 -0
  111. data/lib/karafka/pro/encryption/cipher.rb +58 -0
  112. data/lib/karafka/pro/encryption/contracts/config.rb +79 -0
  113. data/lib/karafka/pro/encryption/errors.rb +27 -0
  114. data/lib/karafka/pro/encryption/messages/middleware.rb +46 -0
  115. data/lib/karafka/pro/encryption/messages/parser.rb +56 -0
  116. data/lib/karafka/pro/encryption/setup/config.rb +48 -0
  117. data/lib/karafka/pro/encryption.rb +47 -0
  118. data/lib/karafka/pro/iterator/expander.rb +95 -0
  119. data/lib/karafka/pro/iterator/tpl_builder.rb +155 -0
  120. data/lib/karafka/pro/iterator.rb +170 -0
  121. data/lib/karafka/pro/loader.rb +106 -0
  122. data/lib/karafka/pro/performance_tracker.rb +84 -0
  123. data/lib/karafka/pro/processing/collapser.rb +62 -0
  124. data/lib/karafka/pro/processing/coordinator.rb +147 -0
  125. data/lib/karafka/pro/processing/filters/base.rb +61 -0
  126. data/lib/karafka/pro/processing/filters/delayer.rb +70 -0
  127. data/lib/karafka/pro/processing/filters/expirer.rb +51 -0
  128. data/lib/karafka/pro/processing/filters/inline_insights_delayer.rb +78 -0
  129. data/lib/karafka/pro/processing/filters/throttler.rb +84 -0
  130. data/lib/karafka/pro/processing/filters/virtual_limiter.rb +52 -0
  131. data/lib/karafka/pro/processing/filters_applier.rb +105 -0
  132. data/lib/karafka/pro/processing/jobs/consume_non_blocking.rb +39 -0
  133. data/lib/karafka/pro/processing/jobs/revoked_non_blocking.rb +37 -0
  134. data/lib/karafka/pro/processing/jobs_builder.rb +50 -0
  135. data/lib/karafka/pro/processing/partitioner.rb +69 -0
  136. data/lib/karafka/pro/processing/scheduler.rb +75 -0
  137. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_lrj_mom.rb +70 -0
  138. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_lrj_mom_vp.rb +76 -0
  139. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_mom.rb +72 -0
  140. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_mom_vp.rb +76 -0
  141. data/lib/karafka/pro/processing/strategies/aj/dlq_lrj_mom.rb +66 -0
  142. data/lib/karafka/pro/processing/strategies/aj/dlq_lrj_mom_vp.rb +70 -0
  143. data/lib/karafka/pro/processing/strategies/aj/dlq_mom.rb +64 -0
  144. data/lib/karafka/pro/processing/strategies/aj/dlq_mom_vp.rb +69 -0
  145. data/lib/karafka/pro/processing/strategies/aj/ftr_lrj_mom.rb +38 -0
  146. data/lib/karafka/pro/processing/strategies/aj/ftr_lrj_mom_vp.rb +66 -0
  147. data/lib/karafka/pro/processing/strategies/aj/ftr_mom.rb +38 -0
  148. data/lib/karafka/pro/processing/strategies/aj/ftr_mom_vp.rb +58 -0
  149. data/lib/karafka/pro/processing/strategies/aj/lrj_mom.rb +37 -0
  150. data/lib/karafka/pro/processing/strategies/aj/lrj_mom_vp.rb +82 -0
  151. data/lib/karafka/pro/processing/strategies/aj/mom.rb +36 -0
  152. data/lib/karafka/pro/processing/strategies/aj/mom_vp.rb +52 -0
  153. data/lib/karafka/pro/processing/strategies/base.rb +26 -0
  154. data/lib/karafka/pro/processing/strategies/default.rb +105 -0
  155. data/lib/karafka/pro/processing/strategies/dlq/default.rb +137 -0
  156. data/lib/karafka/pro/processing/strategies/dlq/ftr.rb +61 -0
  157. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj.rb +75 -0
  158. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj_mom.rb +71 -0
  159. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj_mom_vp.rb +43 -0
  160. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj_vp.rb +41 -0
  161. data/lib/karafka/pro/processing/strategies/dlq/ftr_mom.rb +69 -0
  162. data/lib/karafka/pro/processing/strategies/dlq/ftr_mom_vp.rb +41 -0
  163. data/lib/karafka/pro/processing/strategies/dlq/ftr_vp.rb +40 -0
  164. data/lib/karafka/pro/processing/strategies/dlq/lrj.rb +64 -0
  165. data/lib/karafka/pro/processing/strategies/dlq/lrj_mom.rb +65 -0
  166. data/lib/karafka/pro/processing/strategies/dlq/lrj_mom_vp.rb +36 -0
  167. data/lib/karafka/pro/processing/strategies/dlq/lrj_vp.rb +39 -0
  168. data/lib/karafka/pro/processing/strategies/dlq/mom.rb +68 -0
  169. data/lib/karafka/pro/processing/strategies/dlq/mom_vp.rb +37 -0
  170. data/lib/karafka/pro/processing/strategies/dlq/vp.rb +40 -0
  171. data/lib/karafka/pro/processing/strategies/ftr/default.rb +111 -0
  172. data/lib/karafka/pro/processing/strategies/ftr/vp.rb +40 -0
  173. data/lib/karafka/pro/processing/strategies/lrj/default.rb +85 -0
  174. data/lib/karafka/pro/processing/strategies/lrj/ftr.rb +69 -0
  175. data/lib/karafka/pro/processing/strategies/lrj/ftr_mom.rb +67 -0
  176. data/lib/karafka/pro/processing/strategies/lrj/ftr_mom_vp.rb +40 -0
  177. data/lib/karafka/pro/processing/strategies/lrj/ftr_vp.rb +39 -0
  178. data/lib/karafka/pro/processing/strategies/lrj/mom.rb +77 -0
  179. data/lib/karafka/pro/processing/strategies/lrj/mom_vp.rb +38 -0
  180. data/lib/karafka/pro/processing/strategies/lrj/vp.rb +36 -0
  181. data/lib/karafka/pro/processing/strategies/mom/default.rb +46 -0
  182. data/lib/karafka/pro/processing/strategies/mom/ftr.rb +53 -0
  183. data/lib/karafka/pro/processing/strategies/mom/ftr_vp.rb +37 -0
  184. data/lib/karafka/pro/processing/strategies/mom/vp.rb +35 -0
  185. data/lib/karafka/pro/processing/strategies/vp/default.rb +124 -0
  186. data/lib/karafka/pro/processing/strategies.rb +22 -0
  187. data/lib/karafka/pro/processing/strategy_selector.rb +84 -0
  188. data/lib/karafka/pro/processing/virtual_offset_manager.rb +147 -0
  189. data/lib/karafka/pro/routing/features/active_job/builder.rb +45 -0
  190. data/lib/karafka/pro/routing/features/active_job.rb +26 -0
  191. data/lib/karafka/pro/routing/features/base.rb +24 -0
  192. data/lib/karafka/pro/routing/features/dead_letter_queue/contracts/topic.rb +53 -0
  193. data/lib/karafka/pro/routing/features/dead_letter_queue.rb +27 -0
  194. data/lib/karafka/pro/routing/features/delaying/config.rb +27 -0
  195. data/lib/karafka/pro/routing/features/delaying/contracts/topic.rb +41 -0
  196. data/lib/karafka/pro/routing/features/delaying/topic.rb +59 -0
  197. data/lib/karafka/pro/routing/features/delaying.rb +29 -0
  198. data/lib/karafka/pro/routing/features/expiring/config.rb +27 -0
  199. data/lib/karafka/pro/routing/features/expiring/contracts/topic.rb +41 -0
  200. data/lib/karafka/pro/routing/features/expiring/topic.rb +59 -0
  201. data/lib/karafka/pro/routing/features/expiring.rb +27 -0
  202. data/lib/karafka/pro/routing/features/filtering/config.rb +40 -0
  203. data/lib/karafka/pro/routing/features/filtering/contracts/topic.rb +44 -0
  204. data/lib/karafka/pro/routing/features/filtering/topic.rb +51 -0
  205. data/lib/karafka/pro/routing/features/filtering.rb +27 -0
  206. data/lib/karafka/pro/routing/features/inline_insights/config.rb +32 -0
  207. data/lib/karafka/pro/routing/features/inline_insights/contracts/topic.rb +41 -0
  208. data/lib/karafka/pro/routing/features/inline_insights/topic.rb +52 -0
  209. data/lib/karafka/pro/routing/features/inline_insights.rb +26 -0
  210. data/lib/karafka/pro/routing/features/long_running_job/config.rb +28 -0
  211. data/lib/karafka/pro/routing/features/long_running_job/contracts/topic.rb +40 -0
  212. data/lib/karafka/pro/routing/features/long_running_job/topic.rb +42 -0
  213. data/lib/karafka/pro/routing/features/long_running_job.rb +28 -0
  214. data/lib/karafka/pro/routing/features/patterns/builder.rb +38 -0
  215. data/lib/karafka/pro/routing/features/patterns/config.rb +54 -0
  216. data/lib/karafka/pro/routing/features/patterns/consumer_group.rb +72 -0
  217. data/lib/karafka/pro/routing/features/patterns/contracts/consumer_group.rb +62 -0
  218. data/lib/karafka/pro/routing/features/patterns/contracts/pattern.rb +46 -0
  219. data/lib/karafka/pro/routing/features/patterns/contracts/topic.rb +41 -0
  220. data/lib/karafka/pro/routing/features/patterns/detector.rb +71 -0
  221. data/lib/karafka/pro/routing/features/patterns/pattern.rb +95 -0
  222. data/lib/karafka/pro/routing/features/patterns/patterns.rb +35 -0
  223. data/lib/karafka/pro/routing/features/patterns/topic.rb +50 -0
  224. data/lib/karafka/pro/routing/features/patterns/topics.rb +53 -0
  225. data/lib/karafka/pro/routing/features/patterns.rb +33 -0
  226. data/lib/karafka/pro/routing/features/pausing/contracts/topic.rb +51 -0
  227. data/lib/karafka/pro/routing/features/pausing/topic.rb +44 -0
  228. data/lib/karafka/pro/routing/features/pausing.rb +25 -0
  229. data/lib/karafka/pro/routing/features/throttling/config.rb +32 -0
  230. data/lib/karafka/pro/routing/features/throttling/contracts/topic.rb +44 -0
  231. data/lib/karafka/pro/routing/features/throttling/topic.rb +69 -0
  232. data/lib/karafka/pro/routing/features/throttling.rb +30 -0
  233. data/lib/karafka/pro/routing/features/virtual_partitions/config.rb +30 -0
  234. data/lib/karafka/pro/routing/features/virtual_partitions/contracts/topic.rb +55 -0
  235. data/lib/karafka/pro/routing/features/virtual_partitions/topic.rb +56 -0
  236. data/lib/karafka/pro/routing/features/virtual_partitions.rb +27 -0
  237. data/lib/karafka/pro.rb +13 -0
  238. data/lib/karafka/process.rb +24 -8
  239. data/lib/karafka/processing/coordinator.rb +181 -0
  240. data/lib/karafka/processing/coordinators_buffer.rb +62 -0
  241. data/lib/karafka/processing/executor.rb +155 -0
  242. data/lib/karafka/processing/executors_buffer.rb +72 -0
  243. data/lib/karafka/processing/expansions_selector.rb +22 -0
  244. data/lib/karafka/processing/inline_insights/consumer.rb +41 -0
  245. data/lib/karafka/processing/inline_insights/listener.rb +19 -0
  246. data/lib/karafka/processing/inline_insights/tracker.rb +128 -0
  247. data/lib/karafka/processing/jobs/base.rb +55 -0
  248. data/lib/karafka/processing/jobs/consume.rb +45 -0
  249. data/lib/karafka/processing/jobs/idle.rb +24 -0
  250. data/lib/karafka/processing/jobs/revoked.rb +22 -0
  251. data/lib/karafka/processing/jobs/shutdown.rb +23 -0
  252. data/lib/karafka/processing/jobs_builder.rb +28 -0
  253. data/lib/karafka/processing/jobs_queue.rb +150 -0
  254. data/lib/karafka/processing/partitioner.rb +24 -0
  255. data/lib/karafka/processing/result.rb +42 -0
  256. data/lib/karafka/processing/scheduler.rb +22 -0
  257. data/lib/karafka/processing/strategies/aj_dlq_mom.rb +44 -0
  258. data/lib/karafka/processing/strategies/aj_mom.rb +21 -0
  259. data/lib/karafka/processing/strategies/base.rb +52 -0
  260. data/lib/karafka/processing/strategies/default.rb +158 -0
  261. data/lib/karafka/processing/strategies/dlq.rb +88 -0
  262. data/lib/karafka/processing/strategies/dlq_mom.rb +49 -0
  263. data/lib/karafka/processing/strategies/mom.rb +29 -0
  264. data/lib/karafka/processing/strategy_selector.rb +47 -0
  265. data/lib/karafka/processing/worker.rb +93 -0
  266. data/lib/karafka/processing/workers_batch.rb +27 -0
  267. data/lib/karafka/railtie.rb +141 -0
  268. data/lib/karafka/routing/activity_manager.rb +84 -0
  269. data/lib/karafka/routing/builder.rb +45 -19
  270. data/lib/karafka/routing/consumer_group.rb +56 -20
  271. data/lib/karafka/routing/consumer_mapper.rb +1 -12
  272. data/lib/karafka/routing/features/active_job/builder.rb +33 -0
  273. data/lib/karafka/routing/features/active_job/config.rb +15 -0
  274. data/lib/karafka/routing/features/active_job/contracts/topic.rb +44 -0
  275. data/lib/karafka/routing/features/active_job/proxy.rb +14 -0
  276. data/lib/karafka/routing/features/active_job/topic.rb +33 -0
  277. data/lib/karafka/routing/features/active_job.rb +13 -0
  278. data/lib/karafka/routing/features/base/expander.rb +59 -0
  279. data/lib/karafka/routing/features/base.rb +71 -0
  280. data/lib/karafka/routing/features/dead_letter_queue/config.rb +19 -0
  281. data/lib/karafka/routing/features/dead_letter_queue/contracts/topic.rb +46 -0
  282. data/lib/karafka/routing/features/dead_letter_queue/topic.rb +41 -0
  283. data/lib/karafka/routing/features/dead_letter_queue.rb +16 -0
  284. data/lib/karafka/routing/features/declaratives/config.rb +18 -0
  285. data/lib/karafka/routing/features/declaratives/contracts/topic.rb +33 -0
  286. data/lib/karafka/routing/features/declaratives/topic.rb +44 -0
  287. data/lib/karafka/routing/features/declaratives.rb +14 -0
  288. data/lib/karafka/routing/features/inline_insights/config.rb +15 -0
  289. data/lib/karafka/routing/features/inline_insights/contracts/topic.rb +27 -0
  290. data/lib/karafka/routing/features/inline_insights/topic.rb +31 -0
  291. data/lib/karafka/routing/features/inline_insights.rb +40 -0
  292. data/lib/karafka/routing/features/manual_offset_management/config.rb +15 -0
  293. data/lib/karafka/routing/features/manual_offset_management/contracts/topic.rb +27 -0
  294. data/lib/karafka/routing/features/manual_offset_management/topic.rb +35 -0
  295. data/lib/karafka/routing/features/manual_offset_management.rb +18 -0
  296. data/lib/karafka/routing/proxy.rb +22 -21
  297. data/lib/karafka/routing/router.rb +24 -10
  298. data/lib/karafka/routing/subscription_group.rb +110 -0
  299. data/lib/karafka/routing/subscription_groups_builder.rb +65 -0
  300. data/lib/karafka/routing/topic.rb +87 -24
  301. data/lib/karafka/routing/topics.rb +46 -0
  302. data/lib/karafka/runner.rb +52 -0
  303. data/lib/karafka/serialization/json/deserializer.rb +7 -15
  304. data/lib/karafka/server.rb +113 -37
  305. data/lib/karafka/setup/attributes_map.rb +348 -0
  306. data/lib/karafka/setup/config.rb +256 -175
  307. data/lib/karafka/status.rb +54 -7
  308. data/lib/karafka/templates/example_consumer.rb.erb +16 -0
  309. data/lib/karafka/templates/karafka.rb.erb +33 -55
  310. data/lib/karafka/time_trackers/base.rb +14 -0
  311. data/lib/karafka/time_trackers/pause.rb +122 -0
  312. data/lib/karafka/time_trackers/poll.rb +69 -0
  313. data/lib/karafka/version.rb +1 -1
  314. data/lib/karafka.rb +91 -17
  315. data/renovate.json +9 -0
  316. data.tar.gz.sig +0 -0
  317. metadata +330 -168
  318. metadata.gz.sig +0 -0
  319. data/MIT-LICENCE +0 -18
  320. data/certs/mensfeld.pem +0 -25
  321. data/config/errors.yml +0 -41
  322. data/lib/karafka/assignment_strategies/round_robin.rb +0 -13
  323. data/lib/karafka/attributes_map.rb +0 -63
  324. data/lib/karafka/backends/inline.rb +0 -16
  325. data/lib/karafka/base_responder.rb +0 -226
  326. data/lib/karafka/cli/flow.rb +0 -48
  327. data/lib/karafka/cli/missingno.rb +0 -19
  328. data/lib/karafka/code_reloader.rb +0 -67
  329. data/lib/karafka/connection/api_adapter.rb +0 -158
  330. data/lib/karafka/connection/batch_delegator.rb +0 -55
  331. data/lib/karafka/connection/builder.rb +0 -23
  332. data/lib/karafka/connection/message_delegator.rb +0 -36
  333. data/lib/karafka/consumers/batch_metadata.rb +0 -10
  334. data/lib/karafka/consumers/callbacks.rb +0 -71
  335. data/lib/karafka/consumers/includer.rb +0 -64
  336. data/lib/karafka/consumers/responders.rb +0 -24
  337. data/lib/karafka/consumers/single_params.rb +0 -15
  338. data/lib/karafka/contracts/consumer_group_topic.rb +0 -19
  339. data/lib/karafka/contracts/responder_usage.rb +0 -54
  340. data/lib/karafka/fetcher.rb +0 -42
  341. data/lib/karafka/helpers/class_matcher.rb +0 -88
  342. data/lib/karafka/helpers/config_retriever.rb +0 -46
  343. data/lib/karafka/helpers/inflector.rb +0 -26
  344. data/lib/karafka/instrumentation/stdout_listener.rb +0 -140
  345. data/lib/karafka/params/batch_metadata.rb +0 -26
  346. data/lib/karafka/params/builders/batch_metadata.rb +0 -30
  347. data/lib/karafka/params/builders/params.rb +0 -38
  348. data/lib/karafka/params/builders/params_batch.rb +0 -25
  349. data/lib/karafka/params/params_batch.rb +0 -60
  350. data/lib/karafka/patches/ruby_kafka.rb +0 -47
  351. data/lib/karafka/persistence/client.rb +0 -29
  352. data/lib/karafka/persistence/consumers.rb +0 -45
  353. data/lib/karafka/persistence/topics.rb +0 -48
  354. data/lib/karafka/responders/builder.rb +0 -36
  355. data/lib/karafka/responders/topic.rb +0 -55
  356. data/lib/karafka/routing/topic_mapper.rb +0 -53
  357. data/lib/karafka/serialization/json/serializer.rb +0 -31
  358. data/lib/karafka/setup/configurators/water_drop.rb +0 -36
  359. data/lib/karafka/templates/application_responder.rb.erb +0 -11
@@ -0,0 +1,155 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Instrumentation
5
+ # Namespace for vendor specific instrumentation
6
+ module Vendors
7
+ # Datadog specific instrumentation
8
+ module Datadog
9
+ # A karafka's logger listener for Datadog
10
+ # It depends on the 'ddtrace' gem
11
+ class LoggerListener
12
+ include ::Karafka::Core::Configurable
13
+ extend Forwardable
14
+
15
+ def_delegators :config, :client, :service_name
16
+
17
+ # `Datadog::Tracing` client that we should use to trace stuff
18
+ setting :client
19
+
20
+ # @see https://docs.datadoghq.com/tracing/trace_collection/dd_libraries/ruby
21
+ setting :service_name, default: nil
22
+
23
+ configure
24
+
25
+ # Log levels that we use in this particular listener
26
+ USED_LOG_LEVELS = %i[
27
+ info
28
+ error
29
+ fatal
30
+ ].freeze
31
+
32
+ private_constant :USED_LOG_LEVELS
33
+
34
+ # @param block [Proc] configuration block
35
+ def initialize(&block)
36
+ configure
37
+ setup(&block) if block
38
+ end
39
+
40
+ # @param block [Proc] configuration block
41
+ # @note We define this alias to be consistent with `WaterDrop#setup`
42
+ def setup(&block)
43
+ configure(&block)
44
+ end
45
+
46
+ # Prints info about the fact that a given job has started
47
+ #
48
+ # @param event [Karafka::Core::Monitoring::Event] event details including payload
49
+ def on_worker_process(event)
50
+ current_span = client.trace('karafka.consumer', service: service_name)
51
+ push_tags
52
+
53
+ job = event[:job]
54
+ job_type = job.class.to_s.split('::').last
55
+ consumer = job.executor.topic.consumer
56
+ topic = job.executor.topic.name
57
+
58
+ current_span.resource = "#{consumer}#consume"
59
+ info "[#{job.id}] #{job_type} job for #{consumer} on #{topic} started"
60
+
61
+ pop_tags
62
+ end
63
+
64
+ # Prints info about the fact that a given job has finished
65
+ #
66
+ # @param event [Karafka::Core::Monitoring::Event] event details including payload
67
+ def on_worker_processed(event)
68
+ push_tags
69
+
70
+ job = event[:job]
71
+ time = event[:time]
72
+ job_type = job.class.to_s.split('::').last
73
+ consumer = job.executor.topic.consumer
74
+ topic = job.executor.topic.name
75
+
76
+ info "[#{job.id}] #{job_type} job for #{consumer} on #{topic} finished in #{time}ms"
77
+
78
+ client.active_span&.finish
79
+
80
+ pop_tags
81
+ end
82
+
83
+ # There are many types of errors that can occur in many places, but we provide a single
84
+ # handler for all of them to simplify error instrumentation.
85
+ # @param event [Karafka::Core::Monitoring::Event] event details including payload
86
+ def on_error_occurred(event)
87
+ push_tags
88
+
89
+ error = event[:error]
90
+ client.active_span&.set_error(error)
91
+
92
+ case event[:type]
93
+ when 'consumer.consume.error'
94
+ error "Consumer consuming error: #{error}"
95
+ when 'consumer.revoked.error'
96
+ error "Consumer on revoked failed due to an error: #{error}"
97
+ when 'consumer.before_enqueue.error'
98
+ error "Consumer before enqueue failed due to an error: #{error}"
99
+ when 'consumer.before_consume.error'
100
+ error "Consumer before consume failed due to an error: #{error}"
101
+ when 'consumer.after_consume.error'
102
+ error "Consumer after consume failed due to an error: #{error}"
103
+ when 'consumer.shutdown.error'
104
+ error "Consumer on shutdown failed due to an error: #{error}"
105
+ when 'worker.process.error'
106
+ fatal "Worker processing failed due to an error: #{error}"
107
+ when 'connection.listener.fetch_loop.error'
108
+ error "Listener fetch loop error: #{error}"
109
+ when 'runner.call.error'
110
+ fatal "Runner crashed due to an error: #{error}"
111
+ when 'app.stopping.error'
112
+ error 'Forceful Karafka server stop'
113
+ when 'librdkafka.error'
114
+ error "librdkafka internal error occurred: #{error}"
115
+ # Those will only occur when retries in the client fail and when they did not stop
116
+ # after back-offs
117
+ when 'connection.client.poll.error'
118
+ error "Data polling error occurred: #{error}"
119
+ else
120
+ pop_tags
121
+ # This should never happen. Please contact the maintainers
122
+ raise Errors::UnsupportedCaseError, event
123
+ end
124
+
125
+ pop_tags
126
+ end
127
+
128
+ USED_LOG_LEVELS.each do |log_level|
129
+ define_method log_level do |*args|
130
+ Karafka.logger.send(log_level, *args)
131
+ end
132
+ end
133
+
134
+ # Pushes datadog's tags to the logger
135
+ # This is required when tracing log lines asynchronously to correlate logs of the same
136
+ # process together
137
+ def push_tags
138
+ return unless Karafka.logger.respond_to?(:push_tags)
139
+
140
+ Karafka.logger.push_tags(client.log_correlation)
141
+ end
142
+
143
+ # Pops datadog's tags from the logger
144
+ # This is required when tracing log lines asynchronously to avoid the logs of the
145
+ # different processes to be correlated
146
+ def pop_tags
147
+ return unless Karafka.logger.respond_to?(:pop_tags)
148
+
149
+ Karafka.logger.pop_tags
150
+ end
151
+ end
152
+ end
153
+ end
154
+ end
155
+ end
@@ -0,0 +1,264 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module Instrumentation
5
+ # Namespace for vendor specific instrumentation
6
+ module Vendors
7
+ # Datadog specific instrumentation
8
+ module Datadog
9
+ # Listener that can be used to subscribe to Karafka to receive stats via StatsD
10
+ # and/or Datadog
11
+ #
12
+ # @note You need to setup the `dogstatsd-ruby` client and assign it
13
+ class MetricsListener
14
+ include ::Karafka::Core::Configurable
15
+ extend Forwardable
16
+
17
+ def_delegators :config, :client, :rd_kafka_metrics, :namespace, :default_tags
18
+
19
+ # Value object for storing a single rdkafka metric publishing details
20
+ RdKafkaMetric = Struct.new(:type, :scope, :name, :key_location)
21
+
22
+ # Namespace under which the DD metrics should be published
23
+ setting :namespace, default: 'karafka'
24
+
25
+ # Datadog client that we should use to publish the metrics
26
+ setting :client
27
+
28
+ # Default tags we want to publish (for example hostname)
29
+ # Format as followed (example for hostname): `["host:#{Socket.gethostname}"]`
30
+ setting :default_tags, default: []
31
+
32
+ # All the rdkafka metrics we want to publish
33
+ #
34
+ # By default we publish quite a lot so this can be tuned
35
+ # Note, that the once with `_d` come from Karafka, not rdkafka or Kafka
36
+ setting :rd_kafka_metrics, default: [
37
+ # Client metrics
38
+ RdKafkaMetric.new(:count, :root, 'messages.consumed', 'rxmsgs_d'),
39
+ RdKafkaMetric.new(:count, :root, 'messages.consumed.bytes', 'rxmsg_bytes'),
40
+
41
+ # Broker metrics
42
+ RdKafkaMetric.new(:count, :brokers, 'consume.attempts', 'txretries_d'),
43
+ RdKafkaMetric.new(:count, :brokers, 'consume.errors', 'txerrs_d'),
44
+ RdKafkaMetric.new(:count, :brokers, 'receive.errors', 'rxerrs_d'),
45
+ RdKafkaMetric.new(:count, :brokers, 'connection.connects', 'connects_d'),
46
+ RdKafkaMetric.new(:count, :brokers, 'connection.disconnects', 'disconnects_d'),
47
+ RdKafkaMetric.new(:gauge, :brokers, 'network.latency.avg', %w[rtt avg]),
48
+ RdKafkaMetric.new(:gauge, :brokers, 'network.latency.p95', %w[rtt p95]),
49
+ RdKafkaMetric.new(:gauge, :brokers, 'network.latency.p99', %w[rtt p99]),
50
+
51
+ # Topics metrics
52
+ RdKafkaMetric.new(:gauge, :topics, 'consumer.lags', 'consumer_lag_stored'),
53
+ RdKafkaMetric.new(:gauge, :topics, 'consumer.lags_delta', 'consumer_lag_stored_d')
54
+ ].freeze
55
+
56
+ configure
57
+
58
+ # @param block [Proc] configuration block
59
+ def initialize(&block)
60
+ configure
61
+ setup(&block) if block
62
+ end
63
+
64
+ # @param block [Proc] configuration block
65
+ # @note We define this alias to be consistent with `WaterDrop#setup`
66
+ def setup(&block)
67
+ configure(&block)
68
+ end
69
+
70
+ # Hooks up to Karafka instrumentation for emitted statistics
71
+ #
72
+ # @param event [Karafka::Core::Monitoring::Event]
73
+ def on_statistics_emitted(event)
74
+ statistics = event[:statistics]
75
+ consumer_group_id = event[:consumer_group_id]
76
+
77
+ base_tags = default_tags + ["consumer_group:#{consumer_group_id}"]
78
+
79
+ rd_kafka_metrics.each do |metric|
80
+ report_metric(metric, statistics, base_tags)
81
+ end
82
+ end
83
+
84
+ # Increases the errors count by 1
85
+ #
86
+ # @param event [Karafka::Core::Monitoring::Event]
87
+ def on_error_occurred(event)
88
+ extra_tags = ["type:#{event[:type]}"]
89
+
90
+ if event.payload[:caller].respond_to?(:messages)
91
+ extra_tags += consumer_tags(event.payload[:caller])
92
+ end
93
+
94
+ count('error_occurred', 1, tags: default_tags + extra_tags)
95
+ end
96
+
97
+ # Reports how many messages we've polled and how much time did we spend on it
98
+ #
99
+ # @param event [Karafka::Core::Monitoring::Event]
100
+ def on_connection_listener_fetch_loop_received(event)
101
+ time_taken = event[:time]
102
+ messages_count = event[:messages_buffer].size
103
+
104
+ consumer_group_id = event[:subscription_group].consumer_group.id
105
+
106
+ extra_tags = ["consumer_group:#{consumer_group_id}"]
107
+
108
+ histogram('listener.polling.time_taken', time_taken, tags: default_tags + extra_tags)
109
+ histogram('listener.polling.messages', messages_count, tags: default_tags + extra_tags)
110
+ end
111
+
112
+ # Here we report majority of things related to processing as we have access to the
113
+ # consumer
114
+ # @param event [Karafka::Core::Monitoring::Event]
115
+ def on_consumer_consumed(event)
116
+ consumer = event.payload[:caller]
117
+ messages = consumer.messages
118
+ metadata = messages.metadata
119
+
120
+ tags = default_tags + consumer_tags(consumer)
121
+
122
+ count('consumer.messages', messages.count, tags: tags)
123
+ count('consumer.batches', 1, tags: tags)
124
+ gauge('consumer.offset', metadata.last_offset, tags: tags)
125
+ histogram('consumer.consumed.time_taken', event[:time], tags: tags)
126
+ histogram('consumer.batch_size', messages.count, tags: tags)
127
+ histogram('consumer.processing_lag', metadata.processing_lag, tags: tags)
128
+ histogram('consumer.consumption_lag', metadata.consumption_lag, tags: tags)
129
+ end
130
+
131
+ # @param event [Karafka::Core::Monitoring::Event]
132
+ def on_consumer_revoked(event)
133
+ tags = default_tags + consumer_tags(event.payload[:caller])
134
+
135
+ count('consumer.revoked', 1, tags: tags)
136
+ end
137
+
138
+ # @param event [Karafka::Core::Monitoring::Event]
139
+ def on_consumer_shutdown(event)
140
+ tags = default_tags + consumer_tags(event.payload[:caller])
141
+
142
+ count('consumer.shutdown', 1, tags: tags)
143
+ end
144
+
145
+ # Worker related metrics
146
+ # @param event [Karafka::Core::Monitoring::Event]
147
+ def on_worker_process(event)
148
+ jq_stats = event[:jobs_queue].statistics
149
+
150
+ gauge('worker.total_threads', Karafka::App.config.concurrency, tags: default_tags)
151
+ histogram('worker.processing', jq_stats[:busy], tags: default_tags)
152
+ histogram('worker.enqueued_jobs', jq_stats[:enqueued], tags: default_tags)
153
+ end
154
+
155
+ # We report this metric before and after processing for higher accuracy
156
+ # Without this, the utilization would not be fully reflected
157
+ # @param event [Karafka::Core::Monitoring::Event]
158
+ def on_worker_processed(event)
159
+ jq_stats = event[:jobs_queue].statistics
160
+
161
+ histogram('worker.processing', jq_stats[:busy], tags: default_tags)
162
+ end
163
+
164
+ private
165
+
166
+ %i[
167
+ count
168
+ gauge
169
+ histogram
170
+ increment
171
+ decrement
172
+ ].each do |metric_type|
173
+ class_eval <<~METHODS, __FILE__, __LINE__ + 1
174
+ def #{metric_type}(key, *args)
175
+ client.#{metric_type}(
176
+ namespaced_metric(key),
177
+ *args
178
+ )
179
+ end
180
+ METHODS
181
+ end
182
+
183
+ # Wraps metric name in listener's namespace
184
+ # @param metric_name [String] RdKafkaMetric name
185
+ # @return [String]
186
+ def namespaced_metric(metric_name)
187
+ "#{namespace}.#{metric_name}"
188
+ end
189
+
190
+ # Reports a given metric statistics to Datadog
191
+ # @param metric [RdKafkaMetric] metric value object
192
+ # @param statistics [Hash] hash with all the statistics emitted
193
+ # @param base_tags [Array<String>] base tags we want to start with
194
+ def report_metric(metric, statistics, base_tags)
195
+ case metric.scope
196
+ when :root
197
+ public_send(
198
+ metric.type,
199
+ metric.name,
200
+ statistics.fetch(*metric.key_location),
201
+ tags: base_tags
202
+ )
203
+ when :brokers
204
+ statistics.fetch('brokers').each_value do |broker_statistics|
205
+ # Skip bootstrap nodes
206
+ # Bootstrap nodes have nodeid -1, other nodes have positive
207
+ # node ids
208
+ next if broker_statistics['nodeid'] == -1
209
+
210
+ public_send(
211
+ metric.type,
212
+ metric.name,
213
+ broker_statistics.dig(*metric.key_location),
214
+ tags: base_tags + ["broker:#{broker_statistics['nodename']}"]
215
+ )
216
+ end
217
+ when :topics
218
+ statistics.fetch('topics').each do |topic_name, topic_values|
219
+ topic_values['partitions'].each do |partition_name, partition_statistics|
220
+ next if partition_name == '-1'
221
+ # Skip until lag info is available
222
+ next if partition_statistics['consumer_lag'] == -1
223
+ next if partition_statistics['consumer_lag_stored'] == -1
224
+
225
+ # Skip if we do not own the fetch assignment
226
+ next if partition_statistics['fetch_state'] == 'stopped'
227
+ next if partition_statistics['fetch_state'] == 'none'
228
+
229
+ public_send(
230
+ metric.type,
231
+ metric.name,
232
+ partition_statistics.dig(*metric.key_location),
233
+ tags: base_tags + [
234
+ "topic:#{topic_name}",
235
+ "partition:#{partition_name}"
236
+ ]
237
+ )
238
+ end
239
+ end
240
+ else
241
+ raise ArgumentError, metric.scope
242
+ end
243
+ end
244
+
245
+ # Builds basic per consumer tags for publication
246
+ #
247
+ # @param consumer [Karafka::BaseConsumer]
248
+ # @return [Array<String>]
249
+ def consumer_tags(consumer)
250
+ messages = consumer.messages
251
+ metadata = messages.metadata
252
+ consumer_group_id = consumer.topic.consumer_group.id
253
+
254
+ [
255
+ "topic:#{metadata.topic}",
256
+ "partition:#{metadata.partition}",
257
+ "consumer_group:#{consumer_group_id}"
258
+ ]
259
+ end
260
+ end
261
+ end
262
+ end
263
+ end
264
+ end
@@ -0,0 +1,176 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'socket'
4
+
5
+ module Karafka
6
+ module Instrumentation
7
+ module Vendors
8
+ # Namespace for instrumentation related with Kubernetes
9
+ module Kubernetes
10
+ # Kubernetes HTTP listener that does not only reply when process is not fully hanging, but
11
+ # also allows to define max time of processing and looping.
12
+ #
13
+ # Processes like Karafka server can hang while still being reachable. For example, in case
14
+ # something would hang inside of the user code, Karafka could stop polling and no new
15
+ # data would be processed, but process itself would still be active. This listener allows
16
+ # for defining of a ttl that gets bumped on each poll loop and before and after processing
17
+ # of a given messages batch.
18
+ class LivenessListener
19
+ include ::Karafka::Core::Helpers::Time
20
+
21
+ # All good with Karafka
22
+ OK_CODE = '204 No Content'
23
+
24
+ # Some timeouts, fail
25
+ FAIL_CODE = '500 Internal Server Error'
26
+
27
+ private_constant :OK_CODE, :FAIL_CODE
28
+
29
+ # @param hostname [String, nil] hostname or nil to bind on all
30
+ # @param port [Integer] TCP port on which we want to run our HTTP status server
31
+ # @param consuming_ttl [Integer] time in ms after which we consider consumption hanging.
32
+ # It allows us to define max consumption time after which k8s should consider given
33
+ # process as hanging
34
+ # @param polling_ttl [Integer] max time in ms for polling. If polling (any) does not
35
+ # happen that often, process should be considered dead.
36
+ # @note The default TTL matches the default `max.poll.interval.ms`
37
+ def initialize(
38
+ hostname: nil,
39
+ port: 3000,
40
+ consuming_ttl: 5 * 60 * 1_000,
41
+ polling_ttl: 5 * 60 * 1_000
42
+ )
43
+ @server = TCPServer.new(*[hostname, port].compact)
44
+ @polling_ttl = polling_ttl
45
+ @consuming_ttl = consuming_ttl
46
+ @mutex = Mutex.new
47
+ @pollings = {}
48
+ @consumptions = {}
49
+
50
+ Thread.new do
51
+ loop do
52
+ break unless respond
53
+ end
54
+ end
55
+ end
56
+
57
+ # Tick on each fetch
58
+ # @param _event [Karafka::Core::Monitoring::Event]
59
+ def on_connection_listener_fetch_loop(_event)
60
+ mark_polling_tick
61
+ end
62
+
63
+ # Tick on starting work
64
+ # @param _event [Karafka::Core::Monitoring::Event]
65
+ def on_consumer_consume(_event)
66
+ mark_consumption_tick
67
+ end
68
+
69
+ # Tick on finished work
70
+ # @param _event [Karafka::Core::Monitoring::Event]
71
+ def on_consumer_consumed(_event)
72
+ clear_consumption_tick
73
+ end
74
+
75
+ # @param _event [Karafka::Core::Monitoring::Event]
76
+ def on_consumer_revoke(_event)
77
+ mark_consumption_tick
78
+ end
79
+
80
+ # @param _event [Karafka::Core::Monitoring::Event]
81
+ def on_consumer_revoked(_event)
82
+ clear_consumption_tick
83
+ end
84
+
85
+ # @param _event [Karafka::Core::Monitoring::Event]
86
+ def on_consumer_shutting_down(_event)
87
+ mark_consumption_tick
88
+ end
89
+
90
+ # @param _event [Karafka::Core::Monitoring::Event]
91
+ def on_consumer_shutdown(_event)
92
+ clear_consumption_tick
93
+ end
94
+
95
+ # @param _event [Karafka::Core::Monitoring::Event]
96
+ def on_error_occurred(_event)
97
+ clear_consumption_tick
98
+ clear_polling_tick
99
+ end
100
+
101
+ # Stop the http server when we stop the process
102
+ # @param _event [Karafka::Core::Monitoring::Event]
103
+ def on_app_stopped(_event)
104
+ @server.close
105
+ end
106
+
107
+ private
108
+
109
+ # Wraps the logic with a mutex
110
+ # @param block [Proc] code we want to run in mutex
111
+ def synchronize(&block)
112
+ @mutex.synchronize(&block)
113
+ end
114
+
115
+ # @return [Integer] object id of the current thread
116
+ def thread_id
117
+ Thread.current.object_id
118
+ end
119
+
120
+ # Update the polling tick time for current thread
121
+ def mark_polling_tick
122
+ synchronize do
123
+ @pollings[thread_id] = monotonic_now
124
+ end
125
+ end
126
+
127
+ # Clear current thread polling time tracker
128
+ def clear_polling_tick
129
+ synchronize do
130
+ @pollings.delete(thread_id)
131
+ end
132
+ end
133
+
134
+ # Update the processing tick time
135
+ def mark_consumption_tick
136
+ synchronize do
137
+ @consumptions[thread_id] = monotonic_now
138
+ end
139
+ end
140
+
141
+ # Clear current thread consumption time tracker
142
+ def clear_consumption_tick
143
+ synchronize do
144
+ @consumptions.delete(thread_id)
145
+ end
146
+ end
147
+
148
+ # Responds to a HTTP request with the process liveness status
149
+ def respond
150
+ client = @server.accept
151
+ client.gets
152
+ client.print "HTTP/1.1 #{status}\r\n"
153
+ client.print "Content-Type: text/plain\r\n"
154
+ client.print "\r\n"
155
+ client.close
156
+
157
+ true
158
+ rescue Errno::ECONNRESET, Errno::EPIPE, IOError
159
+ !@server.closed?
160
+ end
161
+
162
+ # Did we exceed any of the ttls
163
+ # @return [String] 204 string if ok, 500 otherwise
164
+ def status
165
+ time = monotonic_now
166
+
167
+ return FAIL_CODE if @pollings.values.any? { |tick| (time - tick) > @polling_ttl }
168
+ return FAIL_CODE if @consumptions.values.any? { |tick| (time - tick) > @consuming_ttl }
169
+
170
+ OK_CODE
171
+ end
172
+ end
173
+ end
174
+ end
175
+ end
176
+ end
@@ -0,0 +1,78 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Checks the license presence for pro and loads pro components when needed (if any)
5
+ class Licenser
6
+ # Location in the gem where we store the public key
7
+ PUBLIC_KEY_LOCATION = File.join(Karafka.gem_root, 'certs', 'karafka-pro.pem')
8
+
9
+ private_constant :PUBLIC_KEY_LOCATION
10
+
11
+ class << self
12
+ # Tries to load the license and yields if successful
13
+ def detect
14
+ # If required, do not require again
15
+ require('karafka-license') unless const_defined?('::Karafka::License')
16
+
17
+ yield
18
+
19
+ true
20
+ rescue LoadError
21
+ false
22
+ end
23
+
24
+ # Tries to prepare license and verifies it
25
+ #
26
+ # @param license_config [Karafka::Core::Configurable::Node] config related to the licensing
27
+ def prepare_and_verify(license_config)
28
+ # If license is not loaded, nothing to do
29
+ return unless const_defined?('::Karafka::License')
30
+
31
+ prepare(license_config)
32
+ verify(license_config)
33
+ end
34
+
35
+ private
36
+
37
+ # @param license_config [Karafka::Core::Configurable::Node] config related to the licensing
38
+ def prepare(license_config)
39
+ license_config.token = Karafka::License.token
40
+ end
41
+
42
+ # Check license and setup license details (if needed)
43
+ # @param license_config [Karafka::Core::Configurable::Node] config related to the licensing
44
+ def verify(license_config)
45
+ public_key = OpenSSL::PKey::RSA.new(File.read(PUBLIC_KEY_LOCATION))
46
+
47
+ # We gsub and strip in case someone copy-pasted it as a multi line string
48
+ formatted_token = license_config.token.strip.delete("\n").delete(' ')
49
+ decoded_token = Base64.decode64(formatted_token)
50
+
51
+ begin
52
+ data = public_key.public_decrypt(decoded_token)
53
+ rescue OpenSSL::OpenSSLError
54
+ data = nil
55
+ end
56
+
57
+ details = data ? JSON.parse(data) : raise_invalid_license_token(license_config)
58
+
59
+ license_config.entity = details.fetch('entity')
60
+ end
61
+
62
+ # Raises an error with info, that used token is invalid
63
+ # @param license_config [Karafka::Core::Configurable::Node]
64
+ def raise_invalid_license_token(license_config)
65
+ # We set it to false so `Karafka.pro?` method behaves as expected
66
+ license_config.token = false
67
+
68
+ raise(
69
+ Errors::InvalidLicenseTokenError,
70
+ <<~MSG.tr("\n", ' ')
71
+ License key you provided is invalid.
72
+ Please reach us at contact@karafka.io or visit https://karafka.io to obtain a valid one.
73
+ MSG
74
+ )
75
+ end
76
+ end
77
+ end
78
+ end