karafka 1.4.12 → 2.2.10

Sign up to get free protection for your applications and to get access to all the features.
Files changed (359) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/FUNDING.yml +1 -0
  4. data/.github/ISSUE_TEMPLATE/bug_report.md +10 -9
  5. data/.github/workflows/ci.yml +169 -31
  6. data/.rspec +4 -0
  7. data/.ruby-version +1 -1
  8. data/CHANGELOG.md +716 -607
  9. data/CONTRIBUTING.md +10 -19
  10. data/Gemfile +7 -0
  11. data/Gemfile.lock +69 -92
  12. data/LICENSE +17 -0
  13. data/LICENSE-COMM +89 -0
  14. data/LICENSE-LGPL +165 -0
  15. data/README.md +48 -47
  16. data/bin/benchmarks +99 -0
  17. data/bin/create_token +22 -0
  18. data/bin/integrations +310 -0
  19. data/bin/karafka +5 -14
  20. data/bin/record_rss +50 -0
  21. data/bin/rspecs +6 -0
  22. data/bin/scenario +29 -0
  23. data/bin/stress_many +13 -0
  24. data/bin/stress_one +13 -0
  25. data/bin/verify_license_integrity +37 -0
  26. data/bin/wait_for_kafka +24 -0
  27. data/certs/cert_chain.pem +26 -0
  28. data/certs/karafka-pro.pem +11 -0
  29. data/config/locales/errors.yml +97 -0
  30. data/config/locales/pro_errors.yml +59 -0
  31. data/docker-compose.yml +19 -11
  32. data/karafka.gemspec +26 -22
  33. data/lib/active_job/karafka.rb +17 -0
  34. data/lib/active_job/queue_adapters/karafka_adapter.rb +32 -0
  35. data/lib/karafka/active_job/consumer.rb +49 -0
  36. data/lib/karafka/active_job/current_attributes/loading.rb +36 -0
  37. data/lib/karafka/active_job/current_attributes/persistence.rb +28 -0
  38. data/lib/karafka/active_job/current_attributes.rb +42 -0
  39. data/lib/karafka/active_job/dispatcher.rb +69 -0
  40. data/lib/karafka/active_job/job_extensions.rb +34 -0
  41. data/lib/karafka/active_job/job_options_contract.rb +32 -0
  42. data/lib/karafka/admin.rb +313 -0
  43. data/lib/karafka/app.rb +47 -23
  44. data/lib/karafka/base_consumer.rb +260 -29
  45. data/lib/karafka/cli/base.rb +67 -36
  46. data/lib/karafka/cli/console.rb +18 -12
  47. data/lib/karafka/cli/help.rb +24 -0
  48. data/lib/karafka/cli/info.rb +47 -12
  49. data/lib/karafka/cli/install.rb +23 -14
  50. data/lib/karafka/cli/server.rb +101 -44
  51. data/lib/karafka/cli/topics.rb +146 -0
  52. data/lib/karafka/cli.rb +24 -27
  53. data/lib/karafka/connection/client.rb +553 -90
  54. data/lib/karafka/connection/consumer_group_coordinator.rb +48 -0
  55. data/lib/karafka/connection/listener.rb +294 -38
  56. data/lib/karafka/connection/listeners_batch.rb +40 -0
  57. data/lib/karafka/connection/messages_buffer.rb +84 -0
  58. data/lib/karafka/connection/pauses_manager.rb +46 -0
  59. data/lib/karafka/connection/proxy.rb +98 -0
  60. data/lib/karafka/connection/raw_messages_buffer.rb +101 -0
  61. data/lib/karafka/connection/rebalance_manager.rb +105 -0
  62. data/lib/karafka/contracts/base.rb +17 -0
  63. data/lib/karafka/contracts/config.rb +130 -11
  64. data/lib/karafka/contracts/consumer_group.rb +32 -187
  65. data/lib/karafka/contracts/server_cli_options.rb +80 -19
  66. data/lib/karafka/contracts/topic.rb +65 -0
  67. data/lib/karafka/contracts.rb +1 -1
  68. data/lib/karafka/embedded.rb +36 -0
  69. data/lib/karafka/env.rb +46 -0
  70. data/lib/karafka/errors.rb +37 -21
  71. data/lib/karafka/helpers/async.rb +33 -0
  72. data/lib/karafka/helpers/colorize.rb +26 -0
  73. data/lib/karafka/helpers/multi_delegator.rb +2 -2
  74. data/lib/karafka/instrumentation/callbacks/error.rb +39 -0
  75. data/lib/karafka/instrumentation/callbacks/rebalance.rb +64 -0
  76. data/lib/karafka/instrumentation/callbacks/statistics.rb +51 -0
  77. data/lib/karafka/instrumentation/logger_listener.rb +303 -0
  78. data/lib/karafka/instrumentation/monitor.rb +13 -61
  79. data/lib/karafka/instrumentation/notifications.rb +79 -0
  80. data/lib/karafka/instrumentation/proctitle_listener.rb +7 -16
  81. data/lib/karafka/instrumentation/vendors/appsignal/base.rb +30 -0
  82. data/lib/karafka/instrumentation/vendors/appsignal/client.rb +122 -0
  83. data/lib/karafka/instrumentation/vendors/appsignal/dashboard.json +222 -0
  84. data/lib/karafka/instrumentation/vendors/appsignal/errors_listener.rb +30 -0
  85. data/lib/karafka/instrumentation/vendors/appsignal/metrics_listener.rb +331 -0
  86. data/lib/karafka/instrumentation/vendors/datadog/dashboard.json +1 -0
  87. data/lib/karafka/instrumentation/vendors/datadog/logger_listener.rb +155 -0
  88. data/lib/karafka/instrumentation/vendors/datadog/metrics_listener.rb +264 -0
  89. data/lib/karafka/instrumentation/vendors/kubernetes/liveness_listener.rb +176 -0
  90. data/lib/karafka/licenser.rb +78 -0
  91. data/lib/karafka/messages/batch_metadata.rb +52 -0
  92. data/lib/karafka/messages/builders/batch_metadata.rb +60 -0
  93. data/lib/karafka/messages/builders/message.rb +40 -0
  94. data/lib/karafka/messages/builders/messages.rb +36 -0
  95. data/lib/karafka/{params/params.rb → messages/message.rb} +20 -13
  96. data/lib/karafka/messages/messages.rb +71 -0
  97. data/lib/karafka/{params → messages}/metadata.rb +4 -6
  98. data/lib/karafka/messages/parser.rb +14 -0
  99. data/lib/karafka/messages/seek.rb +12 -0
  100. data/lib/karafka/patches/rdkafka/bindings.rb +122 -0
  101. data/lib/karafka/patches/rdkafka/opaque.rb +36 -0
  102. data/lib/karafka/pro/active_job/consumer.rb +47 -0
  103. data/lib/karafka/pro/active_job/dispatcher.rb +86 -0
  104. data/lib/karafka/pro/active_job/job_options_contract.rb +45 -0
  105. data/lib/karafka/pro/cleaner/errors.rb +27 -0
  106. data/lib/karafka/pro/cleaner/messages/message.rb +46 -0
  107. data/lib/karafka/pro/cleaner/messages/messages.rb +42 -0
  108. data/lib/karafka/pro/cleaner.rb +41 -0
  109. data/lib/karafka/pro/contracts/base.rb +23 -0
  110. data/lib/karafka/pro/contracts/server_cli_options.rb +111 -0
  111. data/lib/karafka/pro/encryption/cipher.rb +58 -0
  112. data/lib/karafka/pro/encryption/contracts/config.rb +79 -0
  113. data/lib/karafka/pro/encryption/errors.rb +27 -0
  114. data/lib/karafka/pro/encryption/messages/middleware.rb +46 -0
  115. data/lib/karafka/pro/encryption/messages/parser.rb +56 -0
  116. data/lib/karafka/pro/encryption/setup/config.rb +48 -0
  117. data/lib/karafka/pro/encryption.rb +47 -0
  118. data/lib/karafka/pro/iterator/expander.rb +95 -0
  119. data/lib/karafka/pro/iterator/tpl_builder.rb +155 -0
  120. data/lib/karafka/pro/iterator.rb +170 -0
  121. data/lib/karafka/pro/loader.rb +106 -0
  122. data/lib/karafka/pro/performance_tracker.rb +84 -0
  123. data/lib/karafka/pro/processing/collapser.rb +62 -0
  124. data/lib/karafka/pro/processing/coordinator.rb +147 -0
  125. data/lib/karafka/pro/processing/filters/base.rb +61 -0
  126. data/lib/karafka/pro/processing/filters/delayer.rb +70 -0
  127. data/lib/karafka/pro/processing/filters/expirer.rb +51 -0
  128. data/lib/karafka/pro/processing/filters/inline_insights_delayer.rb +78 -0
  129. data/lib/karafka/pro/processing/filters/throttler.rb +84 -0
  130. data/lib/karafka/pro/processing/filters/virtual_limiter.rb +52 -0
  131. data/lib/karafka/pro/processing/filters_applier.rb +105 -0
  132. data/lib/karafka/pro/processing/jobs/consume_non_blocking.rb +39 -0
  133. data/lib/karafka/pro/processing/jobs/revoked_non_blocking.rb +37 -0
  134. data/lib/karafka/pro/processing/jobs_builder.rb +50 -0
  135. data/lib/karafka/pro/processing/partitioner.rb +69 -0
  136. data/lib/karafka/pro/processing/scheduler.rb +75 -0
  137. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_lrj_mom.rb +70 -0
  138. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_lrj_mom_vp.rb +76 -0
  139. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_mom.rb +72 -0
  140. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_mom_vp.rb +76 -0
  141. data/lib/karafka/pro/processing/strategies/aj/dlq_lrj_mom.rb +66 -0
  142. data/lib/karafka/pro/processing/strategies/aj/dlq_lrj_mom_vp.rb +70 -0
  143. data/lib/karafka/pro/processing/strategies/aj/dlq_mom.rb +64 -0
  144. data/lib/karafka/pro/processing/strategies/aj/dlq_mom_vp.rb +69 -0
  145. data/lib/karafka/pro/processing/strategies/aj/ftr_lrj_mom.rb +38 -0
  146. data/lib/karafka/pro/processing/strategies/aj/ftr_lrj_mom_vp.rb +66 -0
  147. data/lib/karafka/pro/processing/strategies/aj/ftr_mom.rb +38 -0
  148. data/lib/karafka/pro/processing/strategies/aj/ftr_mom_vp.rb +58 -0
  149. data/lib/karafka/pro/processing/strategies/aj/lrj_mom.rb +37 -0
  150. data/lib/karafka/pro/processing/strategies/aj/lrj_mom_vp.rb +82 -0
  151. data/lib/karafka/pro/processing/strategies/aj/mom.rb +36 -0
  152. data/lib/karafka/pro/processing/strategies/aj/mom_vp.rb +52 -0
  153. data/lib/karafka/pro/processing/strategies/base.rb +26 -0
  154. data/lib/karafka/pro/processing/strategies/default.rb +105 -0
  155. data/lib/karafka/pro/processing/strategies/dlq/default.rb +137 -0
  156. data/lib/karafka/pro/processing/strategies/dlq/ftr.rb +61 -0
  157. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj.rb +75 -0
  158. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj_mom.rb +71 -0
  159. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj_mom_vp.rb +43 -0
  160. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj_vp.rb +41 -0
  161. data/lib/karafka/pro/processing/strategies/dlq/ftr_mom.rb +69 -0
  162. data/lib/karafka/pro/processing/strategies/dlq/ftr_mom_vp.rb +41 -0
  163. data/lib/karafka/pro/processing/strategies/dlq/ftr_vp.rb +40 -0
  164. data/lib/karafka/pro/processing/strategies/dlq/lrj.rb +64 -0
  165. data/lib/karafka/pro/processing/strategies/dlq/lrj_mom.rb +65 -0
  166. data/lib/karafka/pro/processing/strategies/dlq/lrj_mom_vp.rb +36 -0
  167. data/lib/karafka/pro/processing/strategies/dlq/lrj_vp.rb +39 -0
  168. data/lib/karafka/pro/processing/strategies/dlq/mom.rb +68 -0
  169. data/lib/karafka/pro/processing/strategies/dlq/mom_vp.rb +37 -0
  170. data/lib/karafka/pro/processing/strategies/dlq/vp.rb +40 -0
  171. data/lib/karafka/pro/processing/strategies/ftr/default.rb +111 -0
  172. data/lib/karafka/pro/processing/strategies/ftr/vp.rb +40 -0
  173. data/lib/karafka/pro/processing/strategies/lrj/default.rb +85 -0
  174. data/lib/karafka/pro/processing/strategies/lrj/ftr.rb +69 -0
  175. data/lib/karafka/pro/processing/strategies/lrj/ftr_mom.rb +67 -0
  176. data/lib/karafka/pro/processing/strategies/lrj/ftr_mom_vp.rb +40 -0
  177. data/lib/karafka/pro/processing/strategies/lrj/ftr_vp.rb +39 -0
  178. data/lib/karafka/pro/processing/strategies/lrj/mom.rb +77 -0
  179. data/lib/karafka/pro/processing/strategies/lrj/mom_vp.rb +38 -0
  180. data/lib/karafka/pro/processing/strategies/lrj/vp.rb +36 -0
  181. data/lib/karafka/pro/processing/strategies/mom/default.rb +46 -0
  182. data/lib/karafka/pro/processing/strategies/mom/ftr.rb +53 -0
  183. data/lib/karafka/pro/processing/strategies/mom/ftr_vp.rb +37 -0
  184. data/lib/karafka/pro/processing/strategies/mom/vp.rb +35 -0
  185. data/lib/karafka/pro/processing/strategies/vp/default.rb +124 -0
  186. data/lib/karafka/pro/processing/strategies.rb +22 -0
  187. data/lib/karafka/pro/processing/strategy_selector.rb +84 -0
  188. data/lib/karafka/pro/processing/virtual_offset_manager.rb +147 -0
  189. data/lib/karafka/pro/routing/features/active_job/builder.rb +45 -0
  190. data/lib/karafka/pro/routing/features/active_job.rb +26 -0
  191. data/lib/karafka/pro/routing/features/base.rb +24 -0
  192. data/lib/karafka/pro/routing/features/dead_letter_queue/contracts/topic.rb +53 -0
  193. data/lib/karafka/pro/routing/features/dead_letter_queue.rb +27 -0
  194. data/lib/karafka/pro/routing/features/delaying/config.rb +27 -0
  195. data/lib/karafka/pro/routing/features/delaying/contracts/topic.rb +41 -0
  196. data/lib/karafka/pro/routing/features/delaying/topic.rb +59 -0
  197. data/lib/karafka/pro/routing/features/delaying.rb +29 -0
  198. data/lib/karafka/pro/routing/features/expiring/config.rb +27 -0
  199. data/lib/karafka/pro/routing/features/expiring/contracts/topic.rb +41 -0
  200. data/lib/karafka/pro/routing/features/expiring/topic.rb +59 -0
  201. data/lib/karafka/pro/routing/features/expiring.rb +27 -0
  202. data/lib/karafka/pro/routing/features/filtering/config.rb +40 -0
  203. data/lib/karafka/pro/routing/features/filtering/contracts/topic.rb +44 -0
  204. data/lib/karafka/pro/routing/features/filtering/topic.rb +51 -0
  205. data/lib/karafka/pro/routing/features/filtering.rb +27 -0
  206. data/lib/karafka/pro/routing/features/inline_insights/config.rb +32 -0
  207. data/lib/karafka/pro/routing/features/inline_insights/contracts/topic.rb +41 -0
  208. data/lib/karafka/pro/routing/features/inline_insights/topic.rb +52 -0
  209. data/lib/karafka/pro/routing/features/inline_insights.rb +26 -0
  210. data/lib/karafka/pro/routing/features/long_running_job/config.rb +28 -0
  211. data/lib/karafka/pro/routing/features/long_running_job/contracts/topic.rb +40 -0
  212. data/lib/karafka/pro/routing/features/long_running_job/topic.rb +42 -0
  213. data/lib/karafka/pro/routing/features/long_running_job.rb +28 -0
  214. data/lib/karafka/pro/routing/features/patterns/builder.rb +38 -0
  215. data/lib/karafka/pro/routing/features/patterns/config.rb +54 -0
  216. data/lib/karafka/pro/routing/features/patterns/consumer_group.rb +72 -0
  217. data/lib/karafka/pro/routing/features/patterns/contracts/consumer_group.rb +62 -0
  218. data/lib/karafka/pro/routing/features/patterns/contracts/pattern.rb +46 -0
  219. data/lib/karafka/pro/routing/features/patterns/contracts/topic.rb +41 -0
  220. data/lib/karafka/pro/routing/features/patterns/detector.rb +71 -0
  221. data/lib/karafka/pro/routing/features/patterns/pattern.rb +95 -0
  222. data/lib/karafka/pro/routing/features/patterns/patterns.rb +35 -0
  223. data/lib/karafka/pro/routing/features/patterns/topic.rb +50 -0
  224. data/lib/karafka/pro/routing/features/patterns/topics.rb +53 -0
  225. data/lib/karafka/pro/routing/features/patterns.rb +33 -0
  226. data/lib/karafka/pro/routing/features/pausing/contracts/topic.rb +51 -0
  227. data/lib/karafka/pro/routing/features/pausing/topic.rb +44 -0
  228. data/lib/karafka/pro/routing/features/pausing.rb +25 -0
  229. data/lib/karafka/pro/routing/features/throttling/config.rb +32 -0
  230. data/lib/karafka/pro/routing/features/throttling/contracts/topic.rb +44 -0
  231. data/lib/karafka/pro/routing/features/throttling/topic.rb +69 -0
  232. data/lib/karafka/pro/routing/features/throttling.rb +30 -0
  233. data/lib/karafka/pro/routing/features/virtual_partitions/config.rb +30 -0
  234. data/lib/karafka/pro/routing/features/virtual_partitions/contracts/topic.rb +55 -0
  235. data/lib/karafka/pro/routing/features/virtual_partitions/topic.rb +56 -0
  236. data/lib/karafka/pro/routing/features/virtual_partitions.rb +27 -0
  237. data/lib/karafka/pro.rb +13 -0
  238. data/lib/karafka/process.rb +24 -8
  239. data/lib/karafka/processing/coordinator.rb +181 -0
  240. data/lib/karafka/processing/coordinators_buffer.rb +62 -0
  241. data/lib/karafka/processing/executor.rb +155 -0
  242. data/lib/karafka/processing/executors_buffer.rb +72 -0
  243. data/lib/karafka/processing/expansions_selector.rb +22 -0
  244. data/lib/karafka/processing/inline_insights/consumer.rb +41 -0
  245. data/lib/karafka/processing/inline_insights/listener.rb +19 -0
  246. data/lib/karafka/processing/inline_insights/tracker.rb +128 -0
  247. data/lib/karafka/processing/jobs/base.rb +55 -0
  248. data/lib/karafka/processing/jobs/consume.rb +45 -0
  249. data/lib/karafka/processing/jobs/idle.rb +24 -0
  250. data/lib/karafka/processing/jobs/revoked.rb +22 -0
  251. data/lib/karafka/processing/jobs/shutdown.rb +23 -0
  252. data/lib/karafka/processing/jobs_builder.rb +28 -0
  253. data/lib/karafka/processing/jobs_queue.rb +150 -0
  254. data/lib/karafka/processing/partitioner.rb +24 -0
  255. data/lib/karafka/processing/result.rb +42 -0
  256. data/lib/karafka/processing/scheduler.rb +22 -0
  257. data/lib/karafka/processing/strategies/aj_dlq_mom.rb +44 -0
  258. data/lib/karafka/processing/strategies/aj_mom.rb +21 -0
  259. data/lib/karafka/processing/strategies/base.rb +52 -0
  260. data/lib/karafka/processing/strategies/default.rb +158 -0
  261. data/lib/karafka/processing/strategies/dlq.rb +88 -0
  262. data/lib/karafka/processing/strategies/dlq_mom.rb +49 -0
  263. data/lib/karafka/processing/strategies/mom.rb +29 -0
  264. data/lib/karafka/processing/strategy_selector.rb +47 -0
  265. data/lib/karafka/processing/worker.rb +93 -0
  266. data/lib/karafka/processing/workers_batch.rb +27 -0
  267. data/lib/karafka/railtie.rb +141 -0
  268. data/lib/karafka/routing/activity_manager.rb +84 -0
  269. data/lib/karafka/routing/builder.rb +45 -19
  270. data/lib/karafka/routing/consumer_group.rb +56 -20
  271. data/lib/karafka/routing/consumer_mapper.rb +1 -12
  272. data/lib/karafka/routing/features/active_job/builder.rb +33 -0
  273. data/lib/karafka/routing/features/active_job/config.rb +15 -0
  274. data/lib/karafka/routing/features/active_job/contracts/topic.rb +44 -0
  275. data/lib/karafka/routing/features/active_job/proxy.rb +14 -0
  276. data/lib/karafka/routing/features/active_job/topic.rb +33 -0
  277. data/lib/karafka/routing/features/active_job.rb +13 -0
  278. data/lib/karafka/routing/features/base/expander.rb +59 -0
  279. data/lib/karafka/routing/features/base.rb +71 -0
  280. data/lib/karafka/routing/features/dead_letter_queue/config.rb +19 -0
  281. data/lib/karafka/routing/features/dead_letter_queue/contracts/topic.rb +46 -0
  282. data/lib/karafka/routing/features/dead_letter_queue/topic.rb +41 -0
  283. data/lib/karafka/routing/features/dead_letter_queue.rb +16 -0
  284. data/lib/karafka/routing/features/declaratives/config.rb +18 -0
  285. data/lib/karafka/routing/features/declaratives/contracts/topic.rb +33 -0
  286. data/lib/karafka/routing/features/declaratives/topic.rb +44 -0
  287. data/lib/karafka/routing/features/declaratives.rb +14 -0
  288. data/lib/karafka/routing/features/inline_insights/config.rb +15 -0
  289. data/lib/karafka/routing/features/inline_insights/contracts/topic.rb +27 -0
  290. data/lib/karafka/routing/features/inline_insights/topic.rb +31 -0
  291. data/lib/karafka/routing/features/inline_insights.rb +40 -0
  292. data/lib/karafka/routing/features/manual_offset_management/config.rb +15 -0
  293. data/lib/karafka/routing/features/manual_offset_management/contracts/topic.rb +27 -0
  294. data/lib/karafka/routing/features/manual_offset_management/topic.rb +35 -0
  295. data/lib/karafka/routing/features/manual_offset_management.rb +18 -0
  296. data/lib/karafka/routing/proxy.rb +22 -21
  297. data/lib/karafka/routing/router.rb +24 -10
  298. data/lib/karafka/routing/subscription_group.rb +110 -0
  299. data/lib/karafka/routing/subscription_groups_builder.rb +65 -0
  300. data/lib/karafka/routing/topic.rb +87 -24
  301. data/lib/karafka/routing/topics.rb +46 -0
  302. data/lib/karafka/runner.rb +52 -0
  303. data/lib/karafka/serialization/json/deserializer.rb +7 -15
  304. data/lib/karafka/server.rb +113 -37
  305. data/lib/karafka/setup/attributes_map.rb +348 -0
  306. data/lib/karafka/setup/config.rb +256 -175
  307. data/lib/karafka/status.rb +54 -7
  308. data/lib/karafka/templates/example_consumer.rb.erb +16 -0
  309. data/lib/karafka/templates/karafka.rb.erb +33 -55
  310. data/lib/karafka/time_trackers/base.rb +14 -0
  311. data/lib/karafka/time_trackers/pause.rb +122 -0
  312. data/lib/karafka/time_trackers/poll.rb +69 -0
  313. data/lib/karafka/version.rb +1 -1
  314. data/lib/karafka.rb +91 -17
  315. data/renovate.json +9 -0
  316. data.tar.gz.sig +0 -0
  317. metadata +330 -168
  318. metadata.gz.sig +0 -0
  319. data/MIT-LICENCE +0 -18
  320. data/certs/mensfeld.pem +0 -25
  321. data/config/errors.yml +0 -41
  322. data/lib/karafka/assignment_strategies/round_robin.rb +0 -13
  323. data/lib/karafka/attributes_map.rb +0 -63
  324. data/lib/karafka/backends/inline.rb +0 -16
  325. data/lib/karafka/base_responder.rb +0 -226
  326. data/lib/karafka/cli/flow.rb +0 -48
  327. data/lib/karafka/cli/missingno.rb +0 -19
  328. data/lib/karafka/code_reloader.rb +0 -67
  329. data/lib/karafka/connection/api_adapter.rb +0 -158
  330. data/lib/karafka/connection/batch_delegator.rb +0 -55
  331. data/lib/karafka/connection/builder.rb +0 -23
  332. data/lib/karafka/connection/message_delegator.rb +0 -36
  333. data/lib/karafka/consumers/batch_metadata.rb +0 -10
  334. data/lib/karafka/consumers/callbacks.rb +0 -71
  335. data/lib/karafka/consumers/includer.rb +0 -64
  336. data/lib/karafka/consumers/responders.rb +0 -24
  337. data/lib/karafka/consumers/single_params.rb +0 -15
  338. data/lib/karafka/contracts/consumer_group_topic.rb +0 -19
  339. data/lib/karafka/contracts/responder_usage.rb +0 -54
  340. data/lib/karafka/fetcher.rb +0 -42
  341. data/lib/karafka/helpers/class_matcher.rb +0 -88
  342. data/lib/karafka/helpers/config_retriever.rb +0 -46
  343. data/lib/karafka/helpers/inflector.rb +0 -26
  344. data/lib/karafka/instrumentation/stdout_listener.rb +0 -140
  345. data/lib/karafka/params/batch_metadata.rb +0 -26
  346. data/lib/karafka/params/builders/batch_metadata.rb +0 -30
  347. data/lib/karafka/params/builders/params.rb +0 -38
  348. data/lib/karafka/params/builders/params_batch.rb +0 -25
  349. data/lib/karafka/params/params_batch.rb +0 -60
  350. data/lib/karafka/patches/ruby_kafka.rb +0 -47
  351. data/lib/karafka/persistence/client.rb +0 -29
  352. data/lib/karafka/persistence/consumers.rb +0 -45
  353. data/lib/karafka/persistence/topics.rb +0 -48
  354. data/lib/karafka/responders/builder.rb +0 -36
  355. data/lib/karafka/responders/topic.rb +0 -55
  356. data/lib/karafka/routing/topic_mapper.rb +0 -53
  357. data/lib/karafka/serialization/json/serializer.rb +0 -31
  358. data/lib/karafka/setup/configurators/water_drop.rb +0 -36
  359. data/lib/karafka/templates/application_responder.rb.erb +0 -11
data/bin/scenario ADDED
@@ -0,0 +1,29 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ # Runner for non-parallel execution of a single scenario.
4
+ # It prints all the info stdout, etc and basically replaces itself with the scenario execution.
5
+ # It is useful when we work with a single spec and we need all the debug info
6
+
7
+ raise 'This code needs to be executed WITHOUT bundle exec' if Kernel.const_defined?(:Bundler)
8
+
9
+ require 'open3'
10
+ require 'fileutils'
11
+ require 'pathname'
12
+ require 'tmpdir'
13
+ require 'etc'
14
+
15
+ ROOT_PATH = Pathname.new(File.expand_path(File.join(File.dirname(__FILE__), '../')))
16
+
17
+ # Load all the specs
18
+ specs = Dir[ROOT_PATH.join('spec/integrations/**/*.rb')]
19
+
20
+ # If filters is provided, apply
21
+ # Allows to provide several filters one after another and applies all of them
22
+ ARGV.each do |filter|
23
+ specs.delete_if { |name| !name.include?(filter) }
24
+ end
25
+
26
+ raise ArgumentError, "No integration specs with filters: #{ARGV.join(', ')}" if specs.empty?
27
+ raise ArgumentError, "Many specs found with filters: #{ARGV.join(', ')}" if specs.size != 1
28
+
29
+ exec("bundle exec ruby -r #{ROOT_PATH}/spec/integrations_helper.rb #{specs[0]}")
data/bin/stress_many ADDED
@@ -0,0 +1,13 @@
1
+ #!/bin/bash
2
+
3
+ # Runs integration specs in an endless loop
4
+ # This allows us to ensure (after long enough time) that the integrations test suit is stable and
5
+ # that there are no anomalies when running it for a long period of time
6
+
7
+ set -e
8
+
9
+ while :
10
+ do
11
+ clear
12
+ bin/integrations $1
13
+ done
data/bin/stress_one ADDED
@@ -0,0 +1,13 @@
1
+ #!/bin/bash
2
+
3
+ # Runs a single integration spec in an endless loop
4
+ # This allows us to ensure (after long enough time) that the integration spec is stable and
5
+ # that there are no anomalies when running it for a long period of time
6
+
7
+ set -e
8
+
9
+ while :
10
+ do
11
+ clear
12
+ bin/scenario $1
13
+ done
@@ -0,0 +1,37 @@
1
+ #!/usr/bin/env bash
2
+
3
+ # This script verifies integrity of the Pro license
4
+ # Run it before bundle install to ensure, that what you are fetching is what you expect
5
+ # Run it after bundle install to ensure that the local artefact was not compromised
6
+
7
+ #!/usr/bin/env bash
8
+
9
+ set -e
10
+
11
+ if [ "$MODE" != "after" ]; then
12
+ # Check the remote license prior to bundle installing
13
+ curl \
14
+ --retry 5 \
15
+ --retry-delay 1 \
16
+ --fail \
17
+ -u $KARAFKA_PRO_USERNAME:$KARAFKA_PRO_PASSWORD \
18
+ https://gems.karafka.io/gems/karafka-license-$KARAFKA_PRO_VERSION.gem \
19
+ -o ./karafka-license.gem
20
+ else
21
+ # Check the local cached one after bundle install
22
+ cache_path=`ruby -e 'puts "#{Gem.dir}/cache/"'`
23
+ cp "$cache_path/karafka-license-$KARAFKA_PRO_VERSION.gem" ./karafka-license.gem
24
+ fi
25
+
26
+ detected=`sha256sum ./karafka-license.gem | awk '{ print $1 }'`
27
+
28
+ rm ./karafka-license.gem
29
+
30
+ echo -n "Karafka Pro license artifact checksum verification result: "
31
+
32
+ if [ "$detected" = "$KARAFKA_PRO_LICENSE_CHECKSUM" ]; then
33
+ echo "Success"
34
+ else
35
+ echo -e "\033[0;31mFailure!\033[0m"
36
+ exit 1
37
+ fi
@@ -0,0 +1,24 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ # Waits for Kafka to be ready
4
+ # Useful in CI where Kafka needs to be fully started before we run any tests
5
+
6
+ require 'karafka'
7
+
8
+ Karafka::App.setup do |config|
9
+ config.kafka[:'bootstrap.servers'] = '127.0.0.1:9092'
10
+ end
11
+
12
+ 60.times do
13
+ begin
14
+ # Stop if we can connect to the cluster and get info
15
+ exit if Karafka::Admin.cluster_info
16
+ rescue Rdkafka::RdkafkaError
17
+ puts "Kafka not available, retrying..."
18
+ sleep(1)
19
+ end
20
+ end
21
+
22
+ puts 'Kafka not available!'
23
+
24
+ exit 1
@@ -0,0 +1,26 @@
1
+ -----BEGIN CERTIFICATE-----
2
+ MIIEcDCCAtigAwIBAgIBATANBgkqhkiG9w0BAQsFADA/MRAwDgYDVQQDDAdjb250
3
+ YWN0MRcwFQYKCZImiZPyLGQBGRYHa2FyYWZrYTESMBAGCgmSJomT8ixkARkWAmlv
4
+ MB4XDTIzMDgyMTA3MjU1NFoXDTI0MDgyMDA3MjU1NFowPzEQMA4GA1UEAwwHY29u
5
+ dGFjdDEXMBUGCgmSJomT8ixkARkWB2thcmFma2ExEjAQBgoJkiaJk/IsZAEZFgJp
6
+ bzCCAaIwDQYJKoZIhvcNAQEBBQADggGPADCCAYoCggGBAOuZpyQKEwsTG9plLat7
7
+ 8bUaNuNBEnouTsNMr6X+XTgvyrAxTuocdsyP1sNCjdS1B8RiiDH1/Nt9qpvlBWon
8
+ sdJ1SYhaWNVfqiYStTDnCx3PRMmHRdD4KqUWKpN6VpZ1O/Zu+9Mw0COmvXgZuuO9
9
+ wMSJkXRo6dTCfMedLAIxjMeBIxtoLR2e6Jm6MR8+8WYYVWrO9kSOOt5eKQLBY7aK
10
+ b/Dc40EcJKPg3Z30Pia1M9ZyRlb6SOj6SKpHRqc7vbVQxjEw6Jjal1lZ49m3YZMd
11
+ ArMAs9lQZNdSw5/UX6HWWURLowg6k10RnhTUtYyzO9BFev0JFJftHnmuk8vtb+SD
12
+ 5VPmjFXg2VOcw0B7FtG75Vackk8QKfgVe3nSPhVpew2CSPlbJzH80wChbr19+e3+
13
+ YGr1tOiaJrL6c+PNmb0F31NXMKpj/r+n15HwlTMRxQrzFcgjBlxf2XFGnPQXHhBm
14
+ kp1OFnEq4GG9sON4glRldkwzi/f/fGcZmo5fm3d+0ZdNgwIDAQABo3cwdTAJBgNV
15
+ HRMEAjAAMAsGA1UdDwQEAwIEsDAdBgNVHQ4EFgQUPVH5+dLA80A1kJ2Uz5iGwfOa
16
+ 1+swHQYDVR0RBBYwFIESY29udGFjdEBrYXJhZmthLmlvMB0GA1UdEgQWMBSBEmNv
17
+ bnRhY3RAa2FyYWZrYS5pbzANBgkqhkiG9w0BAQsFAAOCAYEAnpa0jcN7JzREHMTQ
18
+ bfZ+xcvlrzuROMY6A3zIZmQgbnoZZNuX4cMRrT1p1HuwXpxdpHPw7dDjYqWw3+1h
19
+ 3mXLeMuk7amjQpYoSWU/OIZMhIsARra22UN8qkkUlUj3AwTaChVKN/bPJOM2DzfU
20
+ kz9vUgLeYYFfQbZqeI6SsM7ltilRV4W8D9yNUQQvOxCFxtLOetJ00fC/E7zMUzbK
21
+ IBwYFQYsbI6XQzgAIPW6nGSYKgRhkfpmquXSNKZRIQ4V6bFrufa+DzD0bt2ZA3ah
22
+ fMmJguyb5L2Gf1zpDXzFSPMG7YQFLzwYz1zZZvOU7/UCpQsHpID/YxqDp4+Dgb+Y
23
+ qma0whX8UG/gXFV2pYWpYOfpatvahwi+A1TwPQsuZwkkhi1OyF1At3RY+hjSXyav
24
+ AnG1dJU+yL2BK7vaVytLTstJME5mepSZ46qqIJXMuWob/YPDmVaBF39TDSG9e34s
25
+ msG3BiCqgOgHAnL23+CN3Rt8MsuRfEtoTKpJVcCfoEoNHOkc
26
+ -----END CERTIFICATE-----
@@ -0,0 +1,11 @@
1
+ -----BEGIN RSA PUBLIC KEY-----
2
+ MIIBigKCAYEApcd6ybskiNs9WUvBGVUE8GdWDehjZ9TyjSj/fDl/UcMYqY0R5YX9
3
+ tnYxEwZZRMdVltKWxr88Qmshh1IQz6CpJVbcfYjt/158pSGPm+AUua6tkLqIvZDM
4
+ ocFOMafmroI+BMuL+Zu5QH7HC2tkT16jclGYfMQkJjXVUQTk2UZr+94+8RlUz/CH
5
+ Y6hPA7xPgIyPfyPCxz1VWzAwXwT++NCJQPBr5MqT84LNSEzUSlR9pFNShf3UCUT+
6
+ 8LWOvjFSNGmMMSsbo2T7/+dz9/FM02YG00EO0x04qteggwcaEYLFrigDN6/fM0ih
7
+ BXZILnMUqC/qrfW2YFg4ZqKZJuxaALqqkPxrkBDYqoqcAloqn36jBSke6tc/2I/J
8
+ 2Afq3r53UoAbUH7h5I/L8YeaiA4MYjAuq724lHlrOmIr4D6yjYC0a1LGlPjLk869
9
+ 2nsVXNgomhVb071E6amR+rJJnfvkdZgCmEBFnqnBV5A1u4qgNsa2rVcD+gJRvb2T
10
+ aQtjlQWKPx5xAgMBAAE=
11
+ -----END RSA PUBLIC KEY-----
@@ -0,0 +1,97 @@
1
+ en:
2
+ validations:
3
+ config:
4
+ missing: needs to be present
5
+ client_id_format: 'needs to be a string with a Kafka accepted format'
6
+ license.entity_format: needs to be a string
7
+ license.token_format: needs to be either false or a string
8
+ license.expires_on_format: needs to be a valid date
9
+ concurrency_format: needs to be an integer bigger than 0
10
+ consumer_mapper_format: needs to be present
11
+ consumer_persistence_format: needs to be either true or false
12
+ pause_timeout_format: needs to be an integer bigger than 0
13
+ pause_max_timeout_format: needs to be an integer bigger than 0
14
+ pause_with_exponential_backoff_format: needs to be either true or false
15
+ shutdown_timeout_format: needs to be an integer bigger than 0
16
+ max_wait_time_format: needs to be an integer bigger than 0
17
+ kafka_format: needs to be a filled hash
18
+ internal.processing.jobs_builder_format: cannot be nil
19
+ internal.processing.scheduler_format: cannot be nil
20
+ internal.processing.coordinator_class_format: cannot be nil
21
+ internal.processing.partitioner_class_format: cannot be nil
22
+ internal.processing.strategy_selector_format: cannot be nil
23
+ internal.processing.expansions_selector_format: cannot be nil
24
+ internal.active_job.dispatcher: cannot be nil
25
+ internal.active_job.job_options_contract: cannot be nil
26
+ internal.active_job.consumer_class: cannot be nil
27
+ internal.status_format: needs to be present
28
+ internal.process_format: needs to be present
29
+ internal.routing.builder_format: needs to be present
30
+ internal.routing.subscription_groups_builder_format: needs to be present
31
+ internal.connection.proxy.query_watermark_offsets.timeout_format: needs to be an integer bigger than 0
32
+ internal.connection.proxy.query_watermark_offsets.max_attempts_format: needs to be an integer bigger than 0
33
+ internal.connection.proxy.query_watermark_offsets.wait_time_format: needs to be an integer bigger than 0
34
+ internal.connection.proxy.offsets_for_times.timeout_format: needs to be an integer bigger than 0
35
+ internal.connection.proxy.offsets_for_times.max_attempts_format: needs to be an integer bigger than 0
36
+ internal.connection.proxy.offsets_for_times.wait_time_format: needs to be an integer bigger than 0
37
+ key_must_be_a_symbol: All keys under the kafka settings scope need to be symbols
38
+ max_timeout_vs_pause_max_timeout: pause_timeout must be less or equal to pause_max_timeout
39
+ shutdown_timeout_vs_max_wait_time: shutdown_timeout must be more than max_wait_time
40
+ admin.kafka_format: needs to be a hash
41
+ admin.group_id_format: 'needs to be a string with a Kafka accepted format'
42
+ admin.max_wait_time_format: 'needs to be an integer bigger than 0'
43
+ admin.max_attempts_format: 'needs to be an integer bigger than 0'
44
+
45
+ server_cli_options:
46
+ missing: needs to be present
47
+ consumer_groups_inclusion: Unknown consumer group name
48
+ subscription_groups_inclusion: Unknown subscription group name
49
+ topics_inclusion: Unknown topic name
50
+ topics_missing: No topics to subscribe to
51
+
52
+ topic:
53
+ kafka: needs to be a hash with kafka scope settings details
54
+ missing: needs to be present
55
+ max_messages_format: 'needs to be an integer bigger than 0'
56
+ max_wait_time_format: 'needs to be an integer bigger than 0'
57
+ name_format: 'needs to be a string with a Kafka accepted format'
58
+ deserializer_format: needs to be present
59
+ consumer_format: needs to be present
60
+ id_format: 'needs to be a string with a Kafka accepted format'
61
+ initial_offset_format: needs to be either earliest or latest
62
+ subscription_group_name_format: must be a non-empty string
63
+ manual_offset_management.active_format: needs to be either true or false
64
+ manual_offset_management_must_be_enabled: cannot be disabled for ActiveJob topics
65
+ inline_insights.active_format: needs to be either true or false
66
+ consumer_active_job_missing: ActiveJob needs to be available
67
+ dead_letter_queue.max_retries_format: needs to be equal or bigger than 0
68
+ dead_letter_queue.topic_format: 'needs to be a string with a Kafka accepted format'
69
+ dead_letter_queue.active_format: needs to be either true or false
70
+ active_format: needs to be either true or false
71
+ declaratives.partitions_format: needs to be more or equal to 1
72
+ declaratives.active_format: needs to be true
73
+ declaratives.replication_factor_format: needs to be more or equal to 1
74
+ declaratives.details_format: needs to be a hash with only symbol keys
75
+ inconsistent_namespacing: |
76
+ needs to be consistent namespacing style
77
+ disable this validation by setting config.strict_topics_namespacing to false
78
+
79
+ consumer_group:
80
+ missing: needs to be present
81
+ topics_names_not_unique: all topic names within a single consumer group must be unique
82
+ id_format: 'needs to be a string with a Kafka accepted format'
83
+ topics_format: needs to be a non-empty array
84
+ topics_namespaced_names_not_unique: |
85
+ all topic names within a single consumer group must be unique considering namespacing styles
86
+ disable this validation by setting config.strict_topics_namespacing to false
87
+
88
+ job_options:
89
+ missing: needs to be present
90
+ dispatch_method_format: needs to be either :produce_async or :produce_sync
91
+ dispatch_many_method_format: needs to be either :produce_many_async or :produce_many_sync
92
+ partitioner_format: 'needs to respond to #call'
93
+ partition_key_type_format: 'needs to be either :key or :partition_key'
94
+
95
+ test:
96
+ missing: needs to be present
97
+ id_format: needs to be a String
@@ -0,0 +1,59 @@
1
+ en:
2
+ validations:
3
+ topic:
4
+ virtual_partitions.partitioner_respond_to_call: needs to be defined and needs to respond to `#call`
5
+ virtual_partitions.max_partitions_format: needs to be equal or more than 1
6
+
7
+ long_running_job.active_format: needs to be either true or false
8
+
9
+ dead_letter_queue_with_virtual_partitions: when using Dead Letter Queue with Virtual Partitions, at least one retry is required.
10
+
11
+ throttling.active_format: needs to be either true or false
12
+ throttling.limit_format: needs to be equal or more than 1
13
+ throttling.interval_format: needs to be equal or more than 1
14
+
15
+ filtering.active_missing: needs to be present
16
+ filtering.factory_format: 'needs to respond to #call'
17
+ filtering.factories_format: 'needs to contain only factories responding to #call'
18
+ filtering.active_format: 'needs to be boolean'
19
+
20
+ expiring.ttl_format: 'needs to be equal or more than 0 and an integer'
21
+ expiring.active_format: 'needs to be boolean'
22
+
23
+ delaying.delay_format: 'needs to be equal or more than 0 and an integer'
24
+ delaying.active_format: 'needs to be boolean'
25
+
26
+ pause_timeout_format: needs to be an integer bigger than 0
27
+ pause_max_timeout_format: needs to be an integer bigger than 0
28
+ pause_with_exponential_backoff_format: needs to be either true or false
29
+ pause_timeout_max_timeout_vs_pause_max_timeout: pause_timeout must be less or equal to pause_max_timeout
30
+
31
+ patterns.active_format: 'needs to be boolean'
32
+ patterns.type_format: 'needs to be :matcher, :discovered or :regular'
33
+
34
+ inline_insights.active_format: 'needs to be boolean'
35
+ inline_insights.required_format: 'needs to be boolean'
36
+
37
+ consumer_group:
38
+ patterns_format: must be an array with hashes
39
+ patterns_missing: needs to be present
40
+ patterns_regexps_not_unique: 'must be unique within consumer group'
41
+
42
+ pattern:
43
+ regexp_format: must be a regular expression
44
+ name_format: 'needs to be a string with a Kafka accepted format'
45
+ regexp_string_format: 'needs to be a string and start with ^'
46
+ missing: needs to be present
47
+
48
+ config:
49
+ encryption.active_format: 'needs to be either true or false'
50
+ encryption.public_key_invalid: 'is not a valid public RSA key'
51
+ encryption.public_key_needs_to_be_public: 'is a private RSA key not a public one'
52
+ encryption.private_keys_format: 'needs to be a hash of version and private key value'
53
+ encryption.private_keys_need_to_be_private: 'all keys need to be private'
54
+ encryption.version_format: must be a non-empty string
55
+ encryption.public_key_format: 'is not a valid public RSA key'
56
+ encryption.private_keys_invalid: 'contains an invalid private RSA key string'
57
+
58
+ patterns.ttl_format: needs to be an integer bigger than 0
59
+ patterns.ttl_missing: needs to be present
data/docker-compose.yml CHANGED
@@ -1,17 +1,25 @@
1
1
  version: '2'
2
+
2
3
  services:
3
- zookeeper:
4
- image: wurstmeister/zookeeper
5
- ports:
6
- - "2181:2181"
7
4
  kafka:
8
- image: wurstmeister/kafka:1.0.1
5
+ container_name: kafka
6
+ image: confluentinc/cp-kafka:7.5.1
7
+
9
8
  ports:
10
- - "9092:9092"
9
+ - 9092:9092
10
+
11
11
  environment:
12
- KAFKA_ADVERTISED_HOST_NAME: localhost
13
- KAFKA_ADVERTISED_PORT: 9092
14
- KAFKA_ZOOKEEPER_CONNECT: zookeeper:2181
12
+ CLUSTER_ID: kafka-docker-cluster-1
13
+ KAFKA_INTER_BROKER_LISTENER_NAME: PLAINTEXT
14
+ KAFKA_OFFSETS_TOPIC_REPLICATION_FACTOR: 1
15
+ KAFKA_PROCESS_ROLES: broker,controller
16
+ KAFKA_CONTROLLER_LISTENER_NAMES: CONTROLLER
17
+ KAFKA_LISTENERS: PLAINTEXT://:9092,CONTROLLER://:9093
18
+ KAFKA_LISTENER_SECURITY_PROTOCOL_MAP: CONTROLLER:PLAINTEXT,PLAINTEXT:PLAINTEXT
19
+ KAFKA_ADVERTISED_LISTENERS: PLAINTEXT://127.0.0.1:9092
20
+ KAFKA_BROKER_ID: 1
21
+ KAFKA_CONTROLLER_QUORUM_VOTERS: 1@127.0.0.1:9093
22
+ ALLOW_PLAINTEXT_LISTENER: 'yes'
15
23
  KAFKA_AUTO_CREATE_TOPICS_ENABLE: 'true'
16
- volumes:
17
- - /var/run/docker.sock:/var/run/docker.sock
24
+ KAFKA_TRANSACTION_STATE_LOG_REPLICATION_FACTOR: 1
25
+ KAFKA_TRANSACTION_STATE_LOG_MIN_ISR: 1
data/karafka.gemspec CHANGED
@@ -5,38 +5,42 @@ $LOAD_PATH.unshift(lib) unless $LOAD_PATH.include?(lib)
5
5
 
6
6
  require 'karafka/version'
7
7
 
8
- # rubocop:disable Metrics/BlockLength
9
8
  Gem::Specification.new do |spec|
10
9
  spec.name = 'karafka'
11
10
  spec.version = ::Karafka::VERSION
12
11
  spec.platform = Gem::Platform::RUBY
13
- spec.authors = ['Maciej Mensfeld', 'Pavlo Vavruk', 'Adam Gwozdowski']
14
- spec.email = %w[maciej@mensfeld.pl pavlo.vavruk@gmail.com adam99g@gmail.com]
12
+ spec.authors = ['Maciej Mensfeld']
13
+ spec.email = %w[contact@karafka.io]
15
14
  spec.homepage = 'https://karafka.io'
16
- spec.summary = 'Ruby based framework for working with Apache Kafka'
17
- spec.description = 'Framework used to simplify Apache Kafka based Ruby applications development'
18
- spec.license = 'MIT'
19
-
20
- spec.add_dependency 'dry-configurable', '~> 0.13'
21
- spec.add_dependency 'dry-inflector', '~> 0.2'
22
- spec.add_dependency 'dry-monitor', '~> 0.5'
23
- spec.add_dependency 'dry-validation', '~> 1.7'
24
- spec.add_dependency 'envlogic', '~> 1.1'
25
- spec.add_dependency 'ruby-kafka', '>= 1.3.0'
26
- spec.add_dependency 'thor', '>= 1.1'
27
- spec.add_dependency 'waterdrop', '~> 1.4'
28
- spec.add_dependency 'zeitwerk', '~> 2.4'
29
-
30
- spec.required_ruby_version = '>= 2.6.0'
15
+ spec.licenses = %w[LGPL-3.0 Commercial]
16
+ spec.summary = 'Karafka is Ruby and Rails efficient Kafka processing framework.'
17
+ spec.description = <<-DESC
18
+ Karafka is Ruby and Rails efficient Kafka processing framework.
19
+
20
+ Karafka allows you to capture everything that happens in your systems in large scale,
21
+ without having to focus on things that are not your business domain.
22
+ DESC
23
+
24
+ spec.add_dependency 'karafka-core', '>= 2.2.2', '< 2.3.0'
25
+ spec.add_dependency 'waterdrop', '>= 2.6.10', '< 3.0.0'
26
+ spec.add_dependency 'zeitwerk', '~> 2.3'
31
27
 
32
28
  if $PROGRAM_NAME.end_with?('gem')
33
29
  spec.signing_key = File.expand_path('~/.ssh/gem-private_key.pem')
34
30
  end
35
31
 
36
- spec.cert_chain = %w[certs/mensfeld.pem]
32
+ spec.cert_chain = %w[certs/cert_chain.pem]
37
33
  spec.files = `git ls-files -z`.split("\x0").reject { |f| f.match(%r{^(spec)/}) }
38
- spec.executables = spec.files.grep(%r{^bin/}) { |f| File.basename(f) }
34
+ spec.executables = %w[karafka]
39
35
  spec.require_paths = %w[lib]
40
- spec.metadata = { 'source_code_uri' => 'https://github.com/karafka/karafka' }
36
+
37
+ spec.metadata = {
38
+ 'funding_uri' => 'https://karafka.io/#become-pro',
39
+ 'homepage_uri' => 'https://karafka.io',
40
+ 'changelog_uri' => 'https://karafka.io/docs/Changelog-Karafka',
41
+ 'bug_tracker_uri' => 'https://github.com/karafka/karafka/issues',
42
+ 'source_code_uri' => 'https://github.com/karafka/karafka',
43
+ 'documentation_uri' => 'https://karafka.io/docs',
44
+ 'rubygems_mfa_required' => 'true'
45
+ }
41
46
  end
42
- # rubocop:enable Metrics/BlockLength
@@ -0,0 +1,17 @@
1
+ # frozen_string_literal: true
2
+
3
+ begin
4
+ # Do not load active job if already loaded
5
+ require 'active_job' unless Object.const_defined?('ActiveJob')
6
+
7
+ require_relative 'queue_adapters/karafka_adapter'
8
+
9
+ module ActiveJob
10
+ # Namespace for usage simplification outside of Rails where Railtie will not kick in.
11
+ # That way a require 'active_job/karafka' should be enough to use it
12
+ module Karafka
13
+ end
14
+ end
15
+ rescue LoadError
16
+ # We extend ActiveJob stuff in the railtie
17
+ end
@@ -0,0 +1,32 @@
1
+ # frozen_string_literal: true
2
+
3
+ # ActiveJob components to allow for jobs consumption with Karafka
4
+ module ActiveJob
5
+ # ActiveJob queue adapters
6
+ module QueueAdapters
7
+ # Karafka adapter for enqueuing jobs
8
+ # This is here for ease of integration with ActiveJob.
9
+ class KarafkaAdapter
10
+ # Enqueues the job using the configured dispatcher
11
+ #
12
+ # @param job [Object] job that should be enqueued
13
+ def enqueue(job)
14
+ ::Karafka::App.config.internal.active_job.dispatcher.dispatch(job)
15
+ end
16
+
17
+ # Enqueues multiple jobs in one go
18
+ # @param jobs [Array<Object>] jobs that we want to enqueue
19
+ def enqueue_all(jobs)
20
+ ::Karafka::App.config.internal.active_job.dispatcher.dispatch_many(jobs)
21
+ end
22
+
23
+ # Raises info, that Karafka backend does not support scheduling jobs
24
+ #
25
+ # @param _job [Object] job we cannot enqueue
26
+ # @param _timestamp [Time] time when job should run
27
+ def enqueue_at(_job, _timestamp)
28
+ raise NotImplementedError, 'This queueing backend does not support scheduling jobs.'
29
+ end
30
+ end
31
+ end
32
+ end
@@ -0,0 +1,49 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ # Namespace for all the ActiveJob related things from within Karafka
5
+ module ActiveJob
6
+ # This is the consumer for ActiveJob that eats the messages enqueued with it one after another.
7
+ # It marks the offset after each message, so we make sure, none of the jobs is executed twice
8
+ class Consumer < ::Karafka::BaseConsumer
9
+ # Executes the ActiveJob logic
10
+ # @note ActiveJob does not support batches, so we just run one message after another
11
+ def consume
12
+ messages.each do |message|
13
+ break if Karafka::App.stopping?
14
+
15
+ consume_job(message)
16
+
17
+ mark_as_consumed(message)
18
+ end
19
+ end
20
+
21
+ private
22
+
23
+ # Consumes a message with the job and runs needed instrumentation
24
+ #
25
+ # @param job_message [Karafka::Messages::Message] message with active job
26
+ def consume_job(job_message)
27
+ with_deserialized_job(job_message) do |job|
28
+ tags.add(:job_class, job['job_class'])
29
+
30
+ payload = { caller: self, job: job, message: job_message }
31
+
32
+ # We publish both to make it consistent with `consumer.x` events
33
+ Karafka.monitor.instrument('active_job.consume', payload)
34
+ Karafka.monitor.instrument('active_job.consumed', payload) do
35
+ ::ActiveJob::Base.execute(job)
36
+ end
37
+ end
38
+ end
39
+
40
+ # @param job_message [Karafka::Messages::Message] message with active job
41
+ def with_deserialized_job(job_message)
42
+ # We technically speaking could set this as deserializer and reference it from the
43
+ # message instead of using the `#raw_payload`. This is not done on purpose to simplify
44
+ # the ActiveJob setup here
45
+ yield ::ActiveSupport::JSON.decode(job_message.raw_payload)
46
+ end
47
+ end
48
+ end
49
+ end
@@ -0,0 +1,36 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module ActiveJob
5
+ module CurrentAttributes
6
+ # Module expanding the job deserialization to extract current attributes and load them
7
+ # for the time of the job execution
8
+ module Loading
9
+ # @param job_message [Karafka::Messages::Message] message with active job
10
+ def with_deserialized_job(job_message)
11
+ super(job_message) do |job|
12
+ resetable = []
13
+
14
+ _cattr_klasses.each do |key, cattr_klass_str|
15
+ next unless job.key?(key)
16
+
17
+ attributes = job.delete(key)
18
+
19
+ cattr_klass = cattr_klass_str.constantize
20
+
21
+ attributes.each do |name, value|
22
+ cattr_klass.public_send("#{name}=", value)
23
+ end
24
+
25
+ resetable << cattr_klass
26
+ end
27
+
28
+ yield(job)
29
+
30
+ resetable.each(&:reset)
31
+ end
32
+ end
33
+ end
34
+ end
35
+ end
36
+ end
@@ -0,0 +1,28 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Karafka
4
+ module ActiveJob
5
+ module CurrentAttributes
6
+ # Module adding the current attributes persistence into the ActiveJob jobs
7
+ module Persistence
8
+ # Alters the job serialization to inject the current attributes into the json before we
9
+ # send it to Kafka
10
+ #
11
+ # @param job [ActiveJob::Base] job
12
+ def serialize_job(job)
13
+ json = super(job)
14
+
15
+ _cattr_klasses.each do |key, cattr_klass_str|
16
+ next if json.key?(key)
17
+
18
+ attrs = cattr_klass_str.constantize.attributes
19
+
20
+ json[key] = attrs unless attrs.empty?
21
+ end
22
+
23
+ json
24
+ end
25
+ end
26
+ end
27
+ end
28
+ end
@@ -0,0 +1,42 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'active_support/current_attributes'
4
+ require_relative 'current_attributes/loading'
5
+ require_relative 'current_attributes/persistence'
6
+
7
+ # This code is based on Sidekiqs approach to persisting current attributes
8
+ # @see https://github.com/sidekiq/sidekiq/blob/main/lib/sidekiq/middleware/current_attributes.rb
9
+ module Karafka
10
+ module ActiveJob
11
+ # Module that allows to persist current attributes on Karafka jobs
12
+ module CurrentAttributes
13
+ # Allows for persistence of given current attributes via AJ + Karafka
14
+ #
15
+ # @param klasses [Array<String, Class>] classes or names of the current attributes classes
16
+ def persist(*klasses)
17
+ # Support for providing multiple classes
18
+ klasses = Array(klasses).flatten
19
+
20
+ [Dispatcher, Consumer]
21
+ .reject { |expandable| expandable.respond_to?(:_cattr_klasses) }
22
+ .each { |expandable| expandable.class_attribute :_cattr_klasses, default: {} }
23
+
24
+ # Do not double inject in case of running persist multiple times
25
+ Dispatcher.prepend(Persistence) unless Dispatcher.ancestors.include?(Persistence)
26
+ Consumer.prepend(Loading) unless Consumer.ancestors.include?(Loading)
27
+
28
+ klasses.map(&:to_s).each do |stringified_klass|
29
+ # Prevent registering same klass multiple times
30
+ next if Dispatcher._cattr_klasses.value?(stringified_klass)
31
+
32
+ key = "cattr_#{Dispatcher._cattr_klasses.count}"
33
+
34
+ Dispatcher._cattr_klasses[key] = stringified_klass
35
+ Consumer._cattr_klasses[key] = stringified_klass
36
+ end
37
+ end
38
+
39
+ module_function :persist
40
+ end
41
+ end
42
+ end