karafka 1.4.12 → 2.2.10

Sign up to get free protection for your applications and to get access to all the features.
Files changed (359) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/FUNDING.yml +1 -0
  4. data/.github/ISSUE_TEMPLATE/bug_report.md +10 -9
  5. data/.github/workflows/ci.yml +169 -31
  6. data/.rspec +4 -0
  7. data/.ruby-version +1 -1
  8. data/CHANGELOG.md +716 -607
  9. data/CONTRIBUTING.md +10 -19
  10. data/Gemfile +7 -0
  11. data/Gemfile.lock +69 -92
  12. data/LICENSE +17 -0
  13. data/LICENSE-COMM +89 -0
  14. data/LICENSE-LGPL +165 -0
  15. data/README.md +48 -47
  16. data/bin/benchmarks +99 -0
  17. data/bin/create_token +22 -0
  18. data/bin/integrations +310 -0
  19. data/bin/karafka +5 -14
  20. data/bin/record_rss +50 -0
  21. data/bin/rspecs +6 -0
  22. data/bin/scenario +29 -0
  23. data/bin/stress_many +13 -0
  24. data/bin/stress_one +13 -0
  25. data/bin/verify_license_integrity +37 -0
  26. data/bin/wait_for_kafka +24 -0
  27. data/certs/cert_chain.pem +26 -0
  28. data/certs/karafka-pro.pem +11 -0
  29. data/config/locales/errors.yml +97 -0
  30. data/config/locales/pro_errors.yml +59 -0
  31. data/docker-compose.yml +19 -11
  32. data/karafka.gemspec +26 -22
  33. data/lib/active_job/karafka.rb +17 -0
  34. data/lib/active_job/queue_adapters/karafka_adapter.rb +32 -0
  35. data/lib/karafka/active_job/consumer.rb +49 -0
  36. data/lib/karafka/active_job/current_attributes/loading.rb +36 -0
  37. data/lib/karafka/active_job/current_attributes/persistence.rb +28 -0
  38. data/lib/karafka/active_job/current_attributes.rb +42 -0
  39. data/lib/karafka/active_job/dispatcher.rb +69 -0
  40. data/lib/karafka/active_job/job_extensions.rb +34 -0
  41. data/lib/karafka/active_job/job_options_contract.rb +32 -0
  42. data/lib/karafka/admin.rb +313 -0
  43. data/lib/karafka/app.rb +47 -23
  44. data/lib/karafka/base_consumer.rb +260 -29
  45. data/lib/karafka/cli/base.rb +67 -36
  46. data/lib/karafka/cli/console.rb +18 -12
  47. data/lib/karafka/cli/help.rb +24 -0
  48. data/lib/karafka/cli/info.rb +47 -12
  49. data/lib/karafka/cli/install.rb +23 -14
  50. data/lib/karafka/cli/server.rb +101 -44
  51. data/lib/karafka/cli/topics.rb +146 -0
  52. data/lib/karafka/cli.rb +24 -27
  53. data/lib/karafka/connection/client.rb +553 -90
  54. data/lib/karafka/connection/consumer_group_coordinator.rb +48 -0
  55. data/lib/karafka/connection/listener.rb +294 -38
  56. data/lib/karafka/connection/listeners_batch.rb +40 -0
  57. data/lib/karafka/connection/messages_buffer.rb +84 -0
  58. data/lib/karafka/connection/pauses_manager.rb +46 -0
  59. data/lib/karafka/connection/proxy.rb +98 -0
  60. data/lib/karafka/connection/raw_messages_buffer.rb +101 -0
  61. data/lib/karafka/connection/rebalance_manager.rb +105 -0
  62. data/lib/karafka/contracts/base.rb +17 -0
  63. data/lib/karafka/contracts/config.rb +130 -11
  64. data/lib/karafka/contracts/consumer_group.rb +32 -187
  65. data/lib/karafka/contracts/server_cli_options.rb +80 -19
  66. data/lib/karafka/contracts/topic.rb +65 -0
  67. data/lib/karafka/contracts.rb +1 -1
  68. data/lib/karafka/embedded.rb +36 -0
  69. data/lib/karafka/env.rb +46 -0
  70. data/lib/karafka/errors.rb +37 -21
  71. data/lib/karafka/helpers/async.rb +33 -0
  72. data/lib/karafka/helpers/colorize.rb +26 -0
  73. data/lib/karafka/helpers/multi_delegator.rb +2 -2
  74. data/lib/karafka/instrumentation/callbacks/error.rb +39 -0
  75. data/lib/karafka/instrumentation/callbacks/rebalance.rb +64 -0
  76. data/lib/karafka/instrumentation/callbacks/statistics.rb +51 -0
  77. data/lib/karafka/instrumentation/logger_listener.rb +303 -0
  78. data/lib/karafka/instrumentation/monitor.rb +13 -61
  79. data/lib/karafka/instrumentation/notifications.rb +79 -0
  80. data/lib/karafka/instrumentation/proctitle_listener.rb +7 -16
  81. data/lib/karafka/instrumentation/vendors/appsignal/base.rb +30 -0
  82. data/lib/karafka/instrumentation/vendors/appsignal/client.rb +122 -0
  83. data/lib/karafka/instrumentation/vendors/appsignal/dashboard.json +222 -0
  84. data/lib/karafka/instrumentation/vendors/appsignal/errors_listener.rb +30 -0
  85. data/lib/karafka/instrumentation/vendors/appsignal/metrics_listener.rb +331 -0
  86. data/lib/karafka/instrumentation/vendors/datadog/dashboard.json +1 -0
  87. data/lib/karafka/instrumentation/vendors/datadog/logger_listener.rb +155 -0
  88. data/lib/karafka/instrumentation/vendors/datadog/metrics_listener.rb +264 -0
  89. data/lib/karafka/instrumentation/vendors/kubernetes/liveness_listener.rb +176 -0
  90. data/lib/karafka/licenser.rb +78 -0
  91. data/lib/karafka/messages/batch_metadata.rb +52 -0
  92. data/lib/karafka/messages/builders/batch_metadata.rb +60 -0
  93. data/lib/karafka/messages/builders/message.rb +40 -0
  94. data/lib/karafka/messages/builders/messages.rb +36 -0
  95. data/lib/karafka/{params/params.rb → messages/message.rb} +20 -13
  96. data/lib/karafka/messages/messages.rb +71 -0
  97. data/lib/karafka/{params → messages}/metadata.rb +4 -6
  98. data/lib/karafka/messages/parser.rb +14 -0
  99. data/lib/karafka/messages/seek.rb +12 -0
  100. data/lib/karafka/patches/rdkafka/bindings.rb +122 -0
  101. data/lib/karafka/patches/rdkafka/opaque.rb +36 -0
  102. data/lib/karafka/pro/active_job/consumer.rb +47 -0
  103. data/lib/karafka/pro/active_job/dispatcher.rb +86 -0
  104. data/lib/karafka/pro/active_job/job_options_contract.rb +45 -0
  105. data/lib/karafka/pro/cleaner/errors.rb +27 -0
  106. data/lib/karafka/pro/cleaner/messages/message.rb +46 -0
  107. data/lib/karafka/pro/cleaner/messages/messages.rb +42 -0
  108. data/lib/karafka/pro/cleaner.rb +41 -0
  109. data/lib/karafka/pro/contracts/base.rb +23 -0
  110. data/lib/karafka/pro/contracts/server_cli_options.rb +111 -0
  111. data/lib/karafka/pro/encryption/cipher.rb +58 -0
  112. data/lib/karafka/pro/encryption/contracts/config.rb +79 -0
  113. data/lib/karafka/pro/encryption/errors.rb +27 -0
  114. data/lib/karafka/pro/encryption/messages/middleware.rb +46 -0
  115. data/lib/karafka/pro/encryption/messages/parser.rb +56 -0
  116. data/lib/karafka/pro/encryption/setup/config.rb +48 -0
  117. data/lib/karafka/pro/encryption.rb +47 -0
  118. data/lib/karafka/pro/iterator/expander.rb +95 -0
  119. data/lib/karafka/pro/iterator/tpl_builder.rb +155 -0
  120. data/lib/karafka/pro/iterator.rb +170 -0
  121. data/lib/karafka/pro/loader.rb +106 -0
  122. data/lib/karafka/pro/performance_tracker.rb +84 -0
  123. data/lib/karafka/pro/processing/collapser.rb +62 -0
  124. data/lib/karafka/pro/processing/coordinator.rb +147 -0
  125. data/lib/karafka/pro/processing/filters/base.rb +61 -0
  126. data/lib/karafka/pro/processing/filters/delayer.rb +70 -0
  127. data/lib/karafka/pro/processing/filters/expirer.rb +51 -0
  128. data/lib/karafka/pro/processing/filters/inline_insights_delayer.rb +78 -0
  129. data/lib/karafka/pro/processing/filters/throttler.rb +84 -0
  130. data/lib/karafka/pro/processing/filters/virtual_limiter.rb +52 -0
  131. data/lib/karafka/pro/processing/filters_applier.rb +105 -0
  132. data/lib/karafka/pro/processing/jobs/consume_non_blocking.rb +39 -0
  133. data/lib/karafka/pro/processing/jobs/revoked_non_blocking.rb +37 -0
  134. data/lib/karafka/pro/processing/jobs_builder.rb +50 -0
  135. data/lib/karafka/pro/processing/partitioner.rb +69 -0
  136. data/lib/karafka/pro/processing/scheduler.rb +75 -0
  137. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_lrj_mom.rb +70 -0
  138. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_lrj_mom_vp.rb +76 -0
  139. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_mom.rb +72 -0
  140. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_mom_vp.rb +76 -0
  141. data/lib/karafka/pro/processing/strategies/aj/dlq_lrj_mom.rb +66 -0
  142. data/lib/karafka/pro/processing/strategies/aj/dlq_lrj_mom_vp.rb +70 -0
  143. data/lib/karafka/pro/processing/strategies/aj/dlq_mom.rb +64 -0
  144. data/lib/karafka/pro/processing/strategies/aj/dlq_mom_vp.rb +69 -0
  145. data/lib/karafka/pro/processing/strategies/aj/ftr_lrj_mom.rb +38 -0
  146. data/lib/karafka/pro/processing/strategies/aj/ftr_lrj_mom_vp.rb +66 -0
  147. data/lib/karafka/pro/processing/strategies/aj/ftr_mom.rb +38 -0
  148. data/lib/karafka/pro/processing/strategies/aj/ftr_mom_vp.rb +58 -0
  149. data/lib/karafka/pro/processing/strategies/aj/lrj_mom.rb +37 -0
  150. data/lib/karafka/pro/processing/strategies/aj/lrj_mom_vp.rb +82 -0
  151. data/lib/karafka/pro/processing/strategies/aj/mom.rb +36 -0
  152. data/lib/karafka/pro/processing/strategies/aj/mom_vp.rb +52 -0
  153. data/lib/karafka/pro/processing/strategies/base.rb +26 -0
  154. data/lib/karafka/pro/processing/strategies/default.rb +105 -0
  155. data/lib/karafka/pro/processing/strategies/dlq/default.rb +137 -0
  156. data/lib/karafka/pro/processing/strategies/dlq/ftr.rb +61 -0
  157. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj.rb +75 -0
  158. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj_mom.rb +71 -0
  159. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj_mom_vp.rb +43 -0
  160. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj_vp.rb +41 -0
  161. data/lib/karafka/pro/processing/strategies/dlq/ftr_mom.rb +69 -0
  162. data/lib/karafka/pro/processing/strategies/dlq/ftr_mom_vp.rb +41 -0
  163. data/lib/karafka/pro/processing/strategies/dlq/ftr_vp.rb +40 -0
  164. data/lib/karafka/pro/processing/strategies/dlq/lrj.rb +64 -0
  165. data/lib/karafka/pro/processing/strategies/dlq/lrj_mom.rb +65 -0
  166. data/lib/karafka/pro/processing/strategies/dlq/lrj_mom_vp.rb +36 -0
  167. data/lib/karafka/pro/processing/strategies/dlq/lrj_vp.rb +39 -0
  168. data/lib/karafka/pro/processing/strategies/dlq/mom.rb +68 -0
  169. data/lib/karafka/pro/processing/strategies/dlq/mom_vp.rb +37 -0
  170. data/lib/karafka/pro/processing/strategies/dlq/vp.rb +40 -0
  171. data/lib/karafka/pro/processing/strategies/ftr/default.rb +111 -0
  172. data/lib/karafka/pro/processing/strategies/ftr/vp.rb +40 -0
  173. data/lib/karafka/pro/processing/strategies/lrj/default.rb +85 -0
  174. data/lib/karafka/pro/processing/strategies/lrj/ftr.rb +69 -0
  175. data/lib/karafka/pro/processing/strategies/lrj/ftr_mom.rb +67 -0
  176. data/lib/karafka/pro/processing/strategies/lrj/ftr_mom_vp.rb +40 -0
  177. data/lib/karafka/pro/processing/strategies/lrj/ftr_vp.rb +39 -0
  178. data/lib/karafka/pro/processing/strategies/lrj/mom.rb +77 -0
  179. data/lib/karafka/pro/processing/strategies/lrj/mom_vp.rb +38 -0
  180. data/lib/karafka/pro/processing/strategies/lrj/vp.rb +36 -0
  181. data/lib/karafka/pro/processing/strategies/mom/default.rb +46 -0
  182. data/lib/karafka/pro/processing/strategies/mom/ftr.rb +53 -0
  183. data/lib/karafka/pro/processing/strategies/mom/ftr_vp.rb +37 -0
  184. data/lib/karafka/pro/processing/strategies/mom/vp.rb +35 -0
  185. data/lib/karafka/pro/processing/strategies/vp/default.rb +124 -0
  186. data/lib/karafka/pro/processing/strategies.rb +22 -0
  187. data/lib/karafka/pro/processing/strategy_selector.rb +84 -0
  188. data/lib/karafka/pro/processing/virtual_offset_manager.rb +147 -0
  189. data/lib/karafka/pro/routing/features/active_job/builder.rb +45 -0
  190. data/lib/karafka/pro/routing/features/active_job.rb +26 -0
  191. data/lib/karafka/pro/routing/features/base.rb +24 -0
  192. data/lib/karafka/pro/routing/features/dead_letter_queue/contracts/topic.rb +53 -0
  193. data/lib/karafka/pro/routing/features/dead_letter_queue.rb +27 -0
  194. data/lib/karafka/pro/routing/features/delaying/config.rb +27 -0
  195. data/lib/karafka/pro/routing/features/delaying/contracts/topic.rb +41 -0
  196. data/lib/karafka/pro/routing/features/delaying/topic.rb +59 -0
  197. data/lib/karafka/pro/routing/features/delaying.rb +29 -0
  198. data/lib/karafka/pro/routing/features/expiring/config.rb +27 -0
  199. data/lib/karafka/pro/routing/features/expiring/contracts/topic.rb +41 -0
  200. data/lib/karafka/pro/routing/features/expiring/topic.rb +59 -0
  201. data/lib/karafka/pro/routing/features/expiring.rb +27 -0
  202. data/lib/karafka/pro/routing/features/filtering/config.rb +40 -0
  203. data/lib/karafka/pro/routing/features/filtering/contracts/topic.rb +44 -0
  204. data/lib/karafka/pro/routing/features/filtering/topic.rb +51 -0
  205. data/lib/karafka/pro/routing/features/filtering.rb +27 -0
  206. data/lib/karafka/pro/routing/features/inline_insights/config.rb +32 -0
  207. data/lib/karafka/pro/routing/features/inline_insights/contracts/topic.rb +41 -0
  208. data/lib/karafka/pro/routing/features/inline_insights/topic.rb +52 -0
  209. data/lib/karafka/pro/routing/features/inline_insights.rb +26 -0
  210. data/lib/karafka/pro/routing/features/long_running_job/config.rb +28 -0
  211. data/lib/karafka/pro/routing/features/long_running_job/contracts/topic.rb +40 -0
  212. data/lib/karafka/pro/routing/features/long_running_job/topic.rb +42 -0
  213. data/lib/karafka/pro/routing/features/long_running_job.rb +28 -0
  214. data/lib/karafka/pro/routing/features/patterns/builder.rb +38 -0
  215. data/lib/karafka/pro/routing/features/patterns/config.rb +54 -0
  216. data/lib/karafka/pro/routing/features/patterns/consumer_group.rb +72 -0
  217. data/lib/karafka/pro/routing/features/patterns/contracts/consumer_group.rb +62 -0
  218. data/lib/karafka/pro/routing/features/patterns/contracts/pattern.rb +46 -0
  219. data/lib/karafka/pro/routing/features/patterns/contracts/topic.rb +41 -0
  220. data/lib/karafka/pro/routing/features/patterns/detector.rb +71 -0
  221. data/lib/karafka/pro/routing/features/patterns/pattern.rb +95 -0
  222. data/lib/karafka/pro/routing/features/patterns/patterns.rb +35 -0
  223. data/lib/karafka/pro/routing/features/patterns/topic.rb +50 -0
  224. data/lib/karafka/pro/routing/features/patterns/topics.rb +53 -0
  225. data/lib/karafka/pro/routing/features/patterns.rb +33 -0
  226. data/lib/karafka/pro/routing/features/pausing/contracts/topic.rb +51 -0
  227. data/lib/karafka/pro/routing/features/pausing/topic.rb +44 -0
  228. data/lib/karafka/pro/routing/features/pausing.rb +25 -0
  229. data/lib/karafka/pro/routing/features/throttling/config.rb +32 -0
  230. data/lib/karafka/pro/routing/features/throttling/contracts/topic.rb +44 -0
  231. data/lib/karafka/pro/routing/features/throttling/topic.rb +69 -0
  232. data/lib/karafka/pro/routing/features/throttling.rb +30 -0
  233. data/lib/karafka/pro/routing/features/virtual_partitions/config.rb +30 -0
  234. data/lib/karafka/pro/routing/features/virtual_partitions/contracts/topic.rb +55 -0
  235. data/lib/karafka/pro/routing/features/virtual_partitions/topic.rb +56 -0
  236. data/lib/karafka/pro/routing/features/virtual_partitions.rb +27 -0
  237. data/lib/karafka/pro.rb +13 -0
  238. data/lib/karafka/process.rb +24 -8
  239. data/lib/karafka/processing/coordinator.rb +181 -0
  240. data/lib/karafka/processing/coordinators_buffer.rb +62 -0
  241. data/lib/karafka/processing/executor.rb +155 -0
  242. data/lib/karafka/processing/executors_buffer.rb +72 -0
  243. data/lib/karafka/processing/expansions_selector.rb +22 -0
  244. data/lib/karafka/processing/inline_insights/consumer.rb +41 -0
  245. data/lib/karafka/processing/inline_insights/listener.rb +19 -0
  246. data/lib/karafka/processing/inline_insights/tracker.rb +128 -0
  247. data/lib/karafka/processing/jobs/base.rb +55 -0
  248. data/lib/karafka/processing/jobs/consume.rb +45 -0
  249. data/lib/karafka/processing/jobs/idle.rb +24 -0
  250. data/lib/karafka/processing/jobs/revoked.rb +22 -0
  251. data/lib/karafka/processing/jobs/shutdown.rb +23 -0
  252. data/lib/karafka/processing/jobs_builder.rb +28 -0
  253. data/lib/karafka/processing/jobs_queue.rb +150 -0
  254. data/lib/karafka/processing/partitioner.rb +24 -0
  255. data/lib/karafka/processing/result.rb +42 -0
  256. data/lib/karafka/processing/scheduler.rb +22 -0
  257. data/lib/karafka/processing/strategies/aj_dlq_mom.rb +44 -0
  258. data/lib/karafka/processing/strategies/aj_mom.rb +21 -0
  259. data/lib/karafka/processing/strategies/base.rb +52 -0
  260. data/lib/karafka/processing/strategies/default.rb +158 -0
  261. data/lib/karafka/processing/strategies/dlq.rb +88 -0
  262. data/lib/karafka/processing/strategies/dlq_mom.rb +49 -0
  263. data/lib/karafka/processing/strategies/mom.rb +29 -0
  264. data/lib/karafka/processing/strategy_selector.rb +47 -0
  265. data/lib/karafka/processing/worker.rb +93 -0
  266. data/lib/karafka/processing/workers_batch.rb +27 -0
  267. data/lib/karafka/railtie.rb +141 -0
  268. data/lib/karafka/routing/activity_manager.rb +84 -0
  269. data/lib/karafka/routing/builder.rb +45 -19
  270. data/lib/karafka/routing/consumer_group.rb +56 -20
  271. data/lib/karafka/routing/consumer_mapper.rb +1 -12
  272. data/lib/karafka/routing/features/active_job/builder.rb +33 -0
  273. data/lib/karafka/routing/features/active_job/config.rb +15 -0
  274. data/lib/karafka/routing/features/active_job/contracts/topic.rb +44 -0
  275. data/lib/karafka/routing/features/active_job/proxy.rb +14 -0
  276. data/lib/karafka/routing/features/active_job/topic.rb +33 -0
  277. data/lib/karafka/routing/features/active_job.rb +13 -0
  278. data/lib/karafka/routing/features/base/expander.rb +59 -0
  279. data/lib/karafka/routing/features/base.rb +71 -0
  280. data/lib/karafka/routing/features/dead_letter_queue/config.rb +19 -0
  281. data/lib/karafka/routing/features/dead_letter_queue/contracts/topic.rb +46 -0
  282. data/lib/karafka/routing/features/dead_letter_queue/topic.rb +41 -0
  283. data/lib/karafka/routing/features/dead_letter_queue.rb +16 -0
  284. data/lib/karafka/routing/features/declaratives/config.rb +18 -0
  285. data/lib/karafka/routing/features/declaratives/contracts/topic.rb +33 -0
  286. data/lib/karafka/routing/features/declaratives/topic.rb +44 -0
  287. data/lib/karafka/routing/features/declaratives.rb +14 -0
  288. data/lib/karafka/routing/features/inline_insights/config.rb +15 -0
  289. data/lib/karafka/routing/features/inline_insights/contracts/topic.rb +27 -0
  290. data/lib/karafka/routing/features/inline_insights/topic.rb +31 -0
  291. data/lib/karafka/routing/features/inline_insights.rb +40 -0
  292. data/lib/karafka/routing/features/manual_offset_management/config.rb +15 -0
  293. data/lib/karafka/routing/features/manual_offset_management/contracts/topic.rb +27 -0
  294. data/lib/karafka/routing/features/manual_offset_management/topic.rb +35 -0
  295. data/lib/karafka/routing/features/manual_offset_management.rb +18 -0
  296. data/lib/karafka/routing/proxy.rb +22 -21
  297. data/lib/karafka/routing/router.rb +24 -10
  298. data/lib/karafka/routing/subscription_group.rb +110 -0
  299. data/lib/karafka/routing/subscription_groups_builder.rb +65 -0
  300. data/lib/karafka/routing/topic.rb +87 -24
  301. data/lib/karafka/routing/topics.rb +46 -0
  302. data/lib/karafka/runner.rb +52 -0
  303. data/lib/karafka/serialization/json/deserializer.rb +7 -15
  304. data/lib/karafka/server.rb +113 -37
  305. data/lib/karafka/setup/attributes_map.rb +348 -0
  306. data/lib/karafka/setup/config.rb +256 -175
  307. data/lib/karafka/status.rb +54 -7
  308. data/lib/karafka/templates/example_consumer.rb.erb +16 -0
  309. data/lib/karafka/templates/karafka.rb.erb +33 -55
  310. data/lib/karafka/time_trackers/base.rb +14 -0
  311. data/lib/karafka/time_trackers/pause.rb +122 -0
  312. data/lib/karafka/time_trackers/poll.rb +69 -0
  313. data/lib/karafka/version.rb +1 -1
  314. data/lib/karafka.rb +91 -17
  315. data/renovate.json +9 -0
  316. data.tar.gz.sig +0 -0
  317. metadata +330 -168
  318. metadata.gz.sig +0 -0
  319. data/MIT-LICENCE +0 -18
  320. data/certs/mensfeld.pem +0 -25
  321. data/config/errors.yml +0 -41
  322. data/lib/karafka/assignment_strategies/round_robin.rb +0 -13
  323. data/lib/karafka/attributes_map.rb +0 -63
  324. data/lib/karafka/backends/inline.rb +0 -16
  325. data/lib/karafka/base_responder.rb +0 -226
  326. data/lib/karafka/cli/flow.rb +0 -48
  327. data/lib/karafka/cli/missingno.rb +0 -19
  328. data/lib/karafka/code_reloader.rb +0 -67
  329. data/lib/karafka/connection/api_adapter.rb +0 -158
  330. data/lib/karafka/connection/batch_delegator.rb +0 -55
  331. data/lib/karafka/connection/builder.rb +0 -23
  332. data/lib/karafka/connection/message_delegator.rb +0 -36
  333. data/lib/karafka/consumers/batch_metadata.rb +0 -10
  334. data/lib/karafka/consumers/callbacks.rb +0 -71
  335. data/lib/karafka/consumers/includer.rb +0 -64
  336. data/lib/karafka/consumers/responders.rb +0 -24
  337. data/lib/karafka/consumers/single_params.rb +0 -15
  338. data/lib/karafka/contracts/consumer_group_topic.rb +0 -19
  339. data/lib/karafka/contracts/responder_usage.rb +0 -54
  340. data/lib/karafka/fetcher.rb +0 -42
  341. data/lib/karafka/helpers/class_matcher.rb +0 -88
  342. data/lib/karafka/helpers/config_retriever.rb +0 -46
  343. data/lib/karafka/helpers/inflector.rb +0 -26
  344. data/lib/karafka/instrumentation/stdout_listener.rb +0 -140
  345. data/lib/karafka/params/batch_metadata.rb +0 -26
  346. data/lib/karafka/params/builders/batch_metadata.rb +0 -30
  347. data/lib/karafka/params/builders/params.rb +0 -38
  348. data/lib/karafka/params/builders/params_batch.rb +0 -25
  349. data/lib/karafka/params/params_batch.rb +0 -60
  350. data/lib/karafka/patches/ruby_kafka.rb +0 -47
  351. data/lib/karafka/persistence/client.rb +0 -29
  352. data/lib/karafka/persistence/consumers.rb +0 -45
  353. data/lib/karafka/persistence/topics.rb +0 -48
  354. data/lib/karafka/responders/builder.rb +0 -36
  355. data/lib/karafka/responders/topic.rb +0 -55
  356. data/lib/karafka/routing/topic_mapper.rb +0 -53
  357. data/lib/karafka/serialization/json/serializer.rb +0 -31
  358. data/lib/karafka/setup/configurators/water_drop.rb +0 -36
  359. data/lib/karafka/templates/application_responder.rb.erb +0 -11
@@ -12,19 +12,54 @@ module Karafka
12
12
  # enough and will still keep the code simple
13
13
  # @see Karafka::Setup::Configurators::Base for more details about configurators api
14
14
  class Config
15
- extend Dry::Configurable
15
+ extend ::Karafka::Core::Configurable
16
16
 
17
- # Contract for checking the config provided by the user
18
- CONTRACT = Karafka::Contracts::Config.new.freeze
17
+ # Defaults for kafka settings, that will be overwritten only if not present already
18
+ KAFKA_DEFAULTS = {
19
+ # We emit the statistics by default, so all the instrumentation and web-ui work out of
20
+ # the box, without requiring users to take any extra actions aside from enabling.
21
+ 'statistics.interval.ms': 5_000,
22
+ 'client.software.name': 'karafka',
23
+ 'client.software.version': [
24
+ "v#{Karafka::VERSION}",
25
+ "rdkafka-ruby-v#{Rdkafka::VERSION}",
26
+ "librdkafka-v#{Rdkafka::LIBRDKAFKA_VERSION}"
27
+ ].join('-')
28
+ }.freeze
19
29
 
20
- private_constant :CONTRACT
30
+ # Contains settings that should not be used in production but make life easier in dev
31
+ KAFKA_DEV_DEFAULTS = {
32
+ # Will create non-existing topics automatically.
33
+ # Note that the broker needs to be configured with `auto.create.topics.enable=true`
34
+ # While it is not recommended in prod, it simplifies work in dev
35
+ 'allow.auto.create.topics': 'true',
36
+ # We refresh the cluster state often as newly created topics in dev may not be detected
37
+ # fast enough. Fast enough means within reasonable time to provide decent user experience
38
+ # While it's only a one time thing for new topics, it can still be irritating to have to
39
+ # restart the process.
40
+ 'topic.metadata.refresh.interval.ms': 5_000
41
+ }.freeze
42
+
43
+ private_constant :KAFKA_DEFAULTS, :KAFKA_DEV_DEFAULTS
21
44
 
22
45
  # Available settings
46
+
47
+ # Namespace for Pro version related license management. If you use LGPL, no need to worry
48
+ # about any of this
49
+ setting :license do
50
+ # option token [String, false] - license token issued when you acquire a Pro license
51
+ # Leave false if using the LGPL version and all is going to work just fine :)
52
+ #
53
+ # @note By using the commercial components, you accept the LICENSE-COMM commercial license
54
+ # terms and conditions
55
+ setting :token, default: false
56
+ # option entity [String] for whom we did issue the license
57
+ setting :entity, default: ''
58
+ end
59
+
23
60
  # option client_id [String] kafka client_id - used to provide
24
61
  # default Kafka groups namespaces and identify that app in kafka
25
- setting :client_id
26
- # What backend do we want to use to process messages
27
- setting :backend, default: :inline
62
+ setting :client_id, default: 'karafka'
28
63
  # option logger [Instance] logger that we want to use
29
64
  setting :logger, default: ::Karafka::Instrumentation::Logger.new
30
65
  # option monitor [Instance] monitor that we will to use (defaults to Karafka::Monitor)
@@ -33,195 +68,241 @@ module Karafka
33
68
  # or they need to maintain their own internal consumer group naming conventions, they
34
69
  # can easily do it, replacing the default client_id + consumer name pattern concept
35
70
  setting :consumer_mapper, default: Routing::ConsumerMapper.new
36
- # Mapper used to remap names of topics, so we can have a clean internal topic naming
37
- # despite using any Kafka provider that uses namespacing, etc
38
- # It needs to implement two methods:
39
- # - #incoming - for remapping from the incoming message to our internal format
40
- # - #outgoing - for remapping from internal topic name into outgoing message
41
- setting :topic_mapper, default: Routing::TopicMapper.new
42
- # Default serializer for converting whatever we want to send to kafka to json
43
- setting :serializer, default: Karafka::Serialization::Json::Serializer.new
71
+ # option [Boolean] should we reload consumers with each incoming batch thus effectively
72
+ # supporting code reload (if someone reloads code) or should we keep the persistence
73
+ setting :consumer_persistence, default: true
44
74
  # Default deserializer for converting incoming data into ruby objects
45
75
  setting :deserializer, default: Karafka::Serialization::Json::Deserializer.new
46
- # If batch_fetching is true, we will fetch kafka messages in batches instead of 1 by 1
47
- # @note Fetching does not equal consuming, see batch_consuming description for details
48
- setting :batch_fetching, default: true
49
- # If batch_consuming is true, we will have access to #params_batch instead of #params.
50
- # #params_batch will contain params received from Kafka (may be more than 1) so we can
51
- # process them in batches
52
- setting :batch_consuming, default: false
53
- # option shutdown_timeout [Integer, nil] the number of seconds after which Karafka no
54
- # longer wait for the consumers to stop gracefully but instead we force terminate
76
+ # option [String] should we start with the earliest possible offset or latest
77
+ # This will set the `auto.offset.reset` value unless present in the kafka scope
78
+ setting :initial_offset, default: 'earliest'
79
+ # options max_messages [Integer] how many messages do we want to fetch from Kafka in one go
80
+ setting :max_messages, default: 100
81
+ # option [Integer] number of milliseconds we can wait while fetching data
82
+ setting :max_wait_time, default: 1_000
83
+ # option shutdown_timeout [Integer] the number of milliseconds after which Karafka no
84
+ # longer waits for the consumers to stop gracefully but instead we force terminate
55
85
  # everything.
56
- setting :shutdown_timeout, default: 60
57
-
58
- # option kafka [Hash] - optional - kafka configuration options
59
- setting :kafka do
60
- # Array with at least one host
61
- setting :seed_brokers, default: %w[kafka://127.0.0.1:9092]
62
- # option session_timeout [Integer] the number of seconds after which, if a client
63
- # hasn't contacted the Kafka cluster, it will be kicked out of the group.
64
- setting :session_timeout, default: 30
65
- # Time that a given partition will be paused from fetching messages, when message
66
- # consumption fails. It allows us to process other partitions, while the error is being
67
- # resolved and also "slows" things down, so it prevents from "eating" up all messages and
68
- # consuming them with failed code. Use `nil` if you want to pause forever and never retry.
69
- setting :pause_timeout, default: 10
70
- # option pause_max_timeout [Integer, nil] the maximum number of seconds to pause for,
71
- # or `nil` if no maximum should be enforced.
72
- setting :pause_max_timeout, default: nil
73
- # option pause_exponential_backoff [Boolean] whether to enable exponential backoff
74
- setting :pause_exponential_backoff, default: false
75
- # option offset_commit_interval [Integer] the interval between offset commits,
76
- # in seconds.
77
- setting :offset_commit_interval, default: 10
78
- # option offset_commit_threshold [Integer] the number of messages that can be
79
- # processed before their offsets are committed. If zero, offset commits are
80
- # not triggered by message consumption.
81
- setting :offset_commit_threshold, default: 0
82
- # option heartbeat_interval [Integer] the interval between heartbeats; must be less
83
- # than the session window.
84
- setting :heartbeat_interval, default: 10
85
- # option offset_retention_time [Integer] The length of the retention window, known as
86
- # offset retention time
87
- setting :offset_retention_time, default: nil
88
- # option fetcher_max_queue_size [Integer] max number of items in the fetch queue that
89
- # are stored for further processing. Note, that each item in the queue represents a
90
- # response from a single broker
91
- setting :fetcher_max_queue_size, default: 10
92
- # option assignment_strategy [Object] a strategy determining the assignment of
93
- # partitions to the consumers.
94
- setting :assignment_strategy, default: Karafka::AssignmentStrategies::RoundRobin.new
95
- # option max_bytes_per_partition [Integer] the maximum amount of data fetched
96
- # from a single partition at a time.
97
- setting :max_bytes_per_partition, default: 1_048_576
98
- # whether to consume messages starting at the beginning or to just consume new messages
99
- setting :start_from_beginning, default: true
100
- # option resolve_seed_brokers [Boolean] whether to resolve each hostname of the seed
101
- # brokers
102
- setting :resolve_seed_brokers, default: false
103
- # option min_bytes [Integer] the minimum number of bytes to read before
104
- # returning messages from the server; if `max_wait_time` is reached, this
105
- # is ignored.
106
- setting :min_bytes, default: 1
107
- # option max_bytes [Integer] the maximum number of bytes to read before returning messages
108
- # from each broker.
109
- setting :max_bytes, default: 10_485_760
110
- # option max_wait_time [Integer, Float] max_wait_time is the maximum number of seconds to
111
- # wait before returning data from a single message fetch. By setting this high you also
112
- # increase the fetching throughput - and by setting it low you set a bound on latency.
113
- # This configuration overrides `min_bytes`, so you'll _always_ get data back within the
114
- # time specified. The default value is one second. If you want to have at most five
115
- # seconds of latency, set `max_wait_time` to 5. You should make sure
116
- # max_wait_time * num brokers + heartbeat_interval is less than session_timeout.
117
- setting :max_wait_time, default: 1
118
- # option automatically_mark_as_consumed [Boolean] should we automatically mark received
119
- # messages as consumed (processed) after non-error consumption
120
- setting :automatically_mark_as_consumed, default: true
121
- # option reconnect_timeout [Integer] How long should we wait before trying to reconnect to
122
- # Kafka cluster that went down (in seconds)
123
- setting :reconnect_timeout, default: 5
124
- # option connect_timeout [Integer] Sets the number of seconds to wait while connecting to
125
- # a broker for the first time. When ruby-kafka initializes, it needs to connect to at
126
- # least one host.
127
- setting :connect_timeout, default: 10
128
- # option socket_timeout [Integer] Sets the number of seconds to wait when reading from or
129
- # writing to a socket connection to a broker. After this timeout expires the connection
130
- # will be killed. Note that some Kafka operations are by definition long-running, such as
131
- # waiting for new messages to arrive in a partition, so don't set this value too low
132
- setting :socket_timeout, default: 30
133
- # option partitioner [Object, nil] the partitioner that should be used by the client
134
- setting :partitioner, default: nil
135
-
136
- # SSL authentication related settings
137
- # option ca_cert [String, nil] SSL CA certificate
138
- setting :ssl_ca_cert, default: nil
139
- # option ssl_ca_cert_file_path [String, nil] SSL CA certificate file path
140
- setting :ssl_ca_cert_file_path, default: nil
141
- # option ssl_ca_certs_from_system [Boolean] Use the CA certs from your system's default
142
- # certificate store
143
- setting :ssl_ca_certs_from_system, default: false
144
- # option ssl_verify_hostname [Boolean] Verify the hostname for client certs
145
- setting :ssl_verify_hostname, default: true
146
- # option ssl_client_cert [String, nil] SSL client certificate
147
- setting :ssl_client_cert, default: nil
148
- # option ssl_client_cert_key [String, nil] SSL client certificate password
149
- setting :ssl_client_cert_key, default: nil
150
- # option sasl_gssapi_principal [String, nil] sasl principal
151
- setting :sasl_gssapi_principal, default: nil
152
- # option sasl_gssapi_keytab [String, nil] sasl keytab
153
- setting :sasl_gssapi_keytab, default: nil
154
- # option sasl_plain_authzid [String] The authorization identity to use
155
- setting :sasl_plain_authzid, default: ''
156
- # option sasl_plain_username [String, nil] The username used to authenticate
157
- setting :sasl_plain_username, default: nil
158
- # option sasl_plain_password [String, nil] The password used to authenticate
159
- setting :sasl_plain_password, default: nil
160
- # option sasl_scram_username [String, nil] The username used to authenticate
161
- setting :sasl_scram_username, default: nil
162
- # option sasl_scram_password [String, nil] The password used to authenticate
163
- setting :sasl_scram_password, default: nil
164
- # option sasl_scram_mechanism [String, nil] Scram mechanism, either 'sha256' or 'sha512'
165
- setting :sasl_scram_mechanism, default: nil
166
- # option sasl_over_ssl [Boolean] whether to enforce SSL with SASL
167
- setting :sasl_over_ssl, default: true
168
- # option ssl_client_cert_chain [String, nil] client cert chain or nil if not used
169
- setting :ssl_client_cert_chain, default: nil
170
- # option ssl_client_cert_key_password [String, nil] the password required to read
171
- # the ssl_client_cert_key
172
- setting :ssl_client_cert_key_password, default: nil
173
- # @param sasl_oauth_token_provider [Object, nil] OAuthBearer Token Provider instance that
174
- # implements method token.
175
- setting :sasl_oauth_token_provider, default: nil
86
+ setting :shutdown_timeout, default: 60_000
87
+ # option [Integer] number of threads in which we want to do parallel processing
88
+ setting :concurrency, default: 5
89
+ # option [Integer] how long should we wait upon processing error (milliseconds)
90
+ setting :pause_timeout, default: 1_000
91
+ # option [Integer] what is the max timeout in case of an exponential backoff (milliseconds)
92
+ setting :pause_max_timeout, default: 30_000
93
+ # option [Boolean] should we use exponential backoff
94
+ setting :pause_with_exponential_backoff, default: true
95
+ # option [::WaterDrop::Producer, nil]
96
+ # Unless configured, will be created once Karafka is configured based on user Karafka setup
97
+ setting :producer, default: nil
98
+ # option [Boolean] when set to true, Karafka will ensure that the routing topic naming
99
+ # convention is strict
100
+ # Disabling this may be needed in scenarios where we do not have control over topics names
101
+ # and/or we work with existing systems where we cannot change topics names.
102
+ setting :strict_topics_namespacing, default: true
103
+
104
+ # rdkafka default options
105
+ # @see https://github.com/edenhill/librdkafka/blob/master/CONFIGURATION.md
106
+ setting :kafka, default: {}
107
+
108
+ # Admin specific settings.
109
+ #
110
+ # Since admin operations are often specific, they may require specific librdkafka settings
111
+ # or other settings that are unique to admin.
112
+ setting :admin do
113
+ # Specific kafka settings that are tuned to operate within the Admin.
114
+ #
115
+ # Please do not change them unless you know what you are doing as their misconfiguration
116
+ # may cause Admin API to misbehave
117
+ # option [Hash] extra changes to the default root kafka settings
118
+ setting :kafka, default: {
119
+ # We want to know when there is no more data not to end up with an endless loop
120
+ 'enable.partition.eof': true,
121
+ # Do not publish statistics from admin as they are not relevant
122
+ 'statistics.interval.ms': 0,
123
+ # Fetch at most 5 MBs when using admin
124
+ 'fetch.message.max.bytes': 5 * 1_048_576,
125
+ # Do not commit offset automatically, this prevents offset tracking for operations
126
+ # involving a consumer instance
127
+ 'enable.auto.commit': false,
128
+ # Make sure that topic metadata lookups do not create topics accidentally
129
+ 'allow.auto.create.topics': false
130
+ }
131
+
132
+ # option [String] default name for the admin consumer group. Please note, that this is a
133
+ # subject to be remapped by the consumer mapper as any other consumer group in the routes
134
+ setting :group_id, default: 'karafka_admin'
135
+
136
+ # option max_wait_time [Integer] We wait only for this amount of time before raising error
137
+ # as we intercept this error and retry after checking that the operation was finished or
138
+ # failed using external factor.
139
+ setting :max_wait_time, default: 1_000
140
+
141
+ # How many times should be try. 1 000 ms x 60 => 60 seconds wait in total and then we give
142
+ # up on pending operations
143
+ setting :max_attempts, default: 60
176
144
  end
177
145
 
178
- # Namespace for internal settings that should not be modified
179
- # It's a temporary step to "declassify" several things internally before we move to a
180
- # non global state
146
+ # Namespace for internal settings that should not be modified directly
181
147
  setting :internal do
182
- # option routing_builder [Karafka::Routing::Builder] builder instance
183
- setting :routing_builder, default: Routing::Builder.new
184
148
  # option status [Karafka::Status] app status
185
149
  setting :status, default: Status.new
186
150
  # option process [Karafka::Process] process status
187
151
  # @note In the future, we need to have a single process representation for all the karafka
188
152
  # instances
189
153
  setting :process, default: Process.new
190
- # option fetcher [Karafka::Fetcher] fetcher instance
191
- setting :fetcher, default: Fetcher.new
192
- # option configurators [Array<Object>] all configurators that we want to run after
193
- # the setup
194
- setting :configurators, default: [Configurators::WaterDrop.new]
154
+
155
+ # Namespace for CLI related settings
156
+ setting :cli do
157
+ # option contract [Object] cli setup validation contract (in the context of options and
158
+ # topics)
159
+ setting :contract, default: Contracts::ServerCliOptions.new
160
+ end
161
+
162
+ setting :routing do
163
+ # option builder [Karafka::Routing::Builder] builder instance
164
+ setting :builder, default: Routing::Builder.new
165
+ # option subscription_groups_builder [Routing::SubscriptionGroupsBuilder] subscription
166
+ # group builder
167
+ setting :subscription_groups_builder, default: Routing::SubscriptionGroupsBuilder.new
168
+
169
+ # Internally assigned list of limits on routings active for the current process
170
+ # This can be altered by the CLI command
171
+ setting :activity_manager, default: Routing::ActivityManager.new
172
+ end
173
+
174
+ # Namespace for internal connection related settings
175
+ setting :connection do
176
+ # Settings that are altered by our client proxy layer
177
+ setting :proxy do
178
+ # Watermark offsets request settings
179
+ setting :query_watermark_offsets do
180
+ # timeout for this request. For busy or remote clusters, this should be high enough
181
+ setting :timeout, default: 5_000
182
+ # How many times should we try to run this call before raising an error
183
+ setting :max_attempts, default: 3
184
+ # How long should we wait before next attempt in case of a failure
185
+ setting :wait_time, default: 1_000
186
+ end
187
+
188
+ # Offsets for times request settings
189
+ setting :offsets_for_times do
190
+ # timeout for this request. For busy or remote clusters, this should be high enough
191
+ setting :timeout, default: 5_000
192
+ # How many times should we try to run this call before raising an error
193
+ setting :max_attempts, default: 3
194
+ # How long should we wait before next attempt in case of a failure
195
+ setting :wait_time, default: 1_000
196
+ end
197
+ end
198
+ end
199
+
200
+ setting :processing do
201
+ # option scheduler [Object] scheduler we will be using
202
+ setting :scheduler, default: Processing::Scheduler.new
203
+ # option jobs_builder [Object] jobs builder we want to use
204
+ setting :jobs_builder, default: Processing::JobsBuilder.new
205
+ # option coordinator [Class] work coordinator we want to user for processing coordination
206
+ setting :coordinator_class, default: Processing::Coordinator
207
+ # option partitioner_class [Class] partitioner we use against a batch of data
208
+ setting :partitioner_class, default: Processing::Partitioner
209
+ # option strategy_selector [Object] processing strategy selector to be used
210
+ setting :strategy_selector, default: Processing::StrategySelector.new
211
+ # option expansions_selector [Object] processing expansions selector to be used
212
+ setting :expansions_selector, default: Processing::ExpansionsSelector.new
213
+ end
214
+
215
+ # Things related to operating on messages
216
+ setting :messages do
217
+ # Parser is used to convert raw payload prior to deserialization
218
+ setting :parser, default: Messages::Parser.new
219
+ end
220
+
221
+ # Karafka components for ActiveJob
222
+ setting :active_job do
223
+ # option dispatcher [Karafka::ActiveJob::Dispatcher] default dispatcher for ActiveJob
224
+ setting :dispatcher, default: ActiveJob::Dispatcher.new
225
+ # option job_options_contract [Karafka::Contracts::JobOptionsContract] contract for
226
+ # ensuring, that extra job options defined are valid
227
+ setting :job_options_contract, default: ActiveJob::JobOptionsContract.new
228
+ # option consumer [Class] consumer class that should be used to consume ActiveJob data
229
+ setting :consumer_class, default: ActiveJob::Consumer
230
+ end
195
231
  end
196
232
 
233
+ # This will load all the defaults that can be later overwritten.
234
+ # Thanks to that we have an initial state out of the box.
235
+ configure
236
+
197
237
  class << self
198
238
  # Configuring method
199
- # @yield Runs a block of code providing a config singleton instance to it
200
- # @yieldparam [Karafka::Setup::Config] Karafka config instance
201
- def setup
202
- configure { |config| yield(config) }
203
- end
239
+ # @param block [Proc] block we want to execute with the config instance
240
+ def setup(&block)
241
+ # Will prepare and verify license if present
242
+ Licenser.prepare_and_verify(config.license)
243
+
244
+ # Pre-setup configure all routing features that would need this
245
+ Routing::Features::Base.pre_setup_all(config)
246
+
247
+ # Will configure all the pro components
248
+ # This needs to happen before end user configuration as the end user may overwrite some
249
+ # of the pro defaults with custom components
250
+ Pro::Loader.pre_setup_all(config) if Karafka.pro?
251
+
252
+ configure(&block)
253
+ merge_kafka_defaults!(config)
254
+
255
+ Contracts::Config.new.validate!(config.to_h)
256
+
257
+ configure_components
204
258
 
205
- # Everything that should be initialized after the setup
206
- # Components are in karafka/config directory and are all loaded one by one
207
- # If you want to configure a next component, please add a proper file to config dir
208
- def setup_components
209
- config
210
- .internal
211
- .configurators
212
- .each { |configurator| configurator.call(config) }
259
+ # Refreshes the references that are cached that might have been changed by the config
260
+ ::Karafka.refresh!
261
+
262
+ # Post-setup configure all routing features that would need this
263
+ Routing::Features::Base.post_setup_all(config)
264
+
265
+ # Runs things that need to be executed after config is defined and all the components
266
+ # are also configured
267
+ Pro::Loader.post_setup_all(config) if Karafka.pro?
268
+
269
+ Karafka::App.initialized!
213
270
  end
214
271
 
215
- # Validate config based on the config contract
216
- # @return [Boolean] true if configuration is valid
217
- # @raise [Karafka::Errors::InvalidConfigurationError] raised when configuration
218
- # doesn't match with the config contract
219
- def validate!
220
- validation_result = CONTRACT.call(config.to_h)
272
+ private
273
+
274
+ # Propagates the kafka setting defaults unless they are already present
275
+ # This makes it easier to set some values that users usually don't change but still allows
276
+ # them to overwrite the whole hash if they want to
277
+ # @param config [Karafka::Core::Configurable::Node] config of this producer
278
+ def merge_kafka_defaults!(config)
279
+ KAFKA_DEFAULTS.each do |key, value|
280
+ next if config.kafka.key?(key)
281
+
282
+ config.kafka[key] = value
283
+ end
221
284
 
222
- return true if validation_result.success?
285
+ # Use Karafka client_id as kafka client id if not set
286
+ config.kafka[:'client.id'] ||= config.client_id
287
+
288
+ return if Karafka::App.env.production?
289
+
290
+ KAFKA_DEV_DEFAULTS.each do |key, value|
291
+ next if config.kafka.key?(key)
292
+
293
+ config.kafka[key] = value
294
+ end
295
+ end
223
296
 
224
- raise Errors::InvalidConfigurationError, validation_result.errors.to_h
297
+ # Sets up all the components that are based on the user configuration
298
+ # @note At the moment it is only WaterDrop
299
+ def configure_components
300
+ config.producer ||= ::WaterDrop::Producer.new do |producer_config|
301
+ # In some cases WaterDrop updates the config and we don't want our consumer config to
302
+ # be polluted by those updates, that's why we copy
303
+ producer_config.kafka = AttributesMap.producer(config.kafka.dup)
304
+ producer_config.logger = config.logger
305
+ end
225
306
  end
226
307
  end
227
308
  end
@@ -3,15 +3,43 @@
3
3
  module Karafka
4
4
  # App status monitor
5
5
  class Status
6
- # Available states and their transitions
6
+ # Available states and their transitions.
7
7
  STATES = {
8
8
  initializing: :initialize!,
9
9
  initialized: :initialized!,
10
10
  running: :run!,
11
- stopping: :stop!
11
+ # will no longer pickup any work, but current work will be finished
12
+ quieting: :quiet!,
13
+ # no work is happening but we keep process with the assignments running
14
+ quiet: :quieted!,
15
+ # shutdown started
16
+ stopping: :stop!,
17
+ # all things are done and most of the things except critical are closed
18
+ stopped: :stopped!,
19
+ # immediately after this process exists
20
+ terminated: :terminate!
12
21
  }.freeze
13
22
 
14
- private_constant :STATES
23
+ # Mutex to ensure that state transitions are thread-safe
24
+ MUTEX = Mutex.new
25
+
26
+ private_constant :MUTEX
27
+
28
+ # By default we are in the initializing state
29
+ def initialize
30
+ initialize!
31
+ end
32
+
33
+ # @return [String] stringified current app status
34
+ def to_s
35
+ @status.to_s
36
+ end
37
+
38
+ # Resets the status state
39
+ # This is used mostly in the integration suite
40
+ def reset!
41
+ @status = :initializing
42
+ end
15
43
 
16
44
  STATES.each do |state, transition|
17
45
  define_method :"#{state}?" do
@@ -19,11 +47,30 @@ module Karafka
19
47
  end
20
48
 
21
49
  define_method transition do
22
- @status = state
23
- # Trap context disallows to run certain things that we instrument
24
- # so the state changes are executed from a separate thread
25
- Thread.new { Karafka.monitor.instrument("app.#{state}") }.join
50
+ MUTEX.synchronize do
51
+ # Do not allow reverse state transitions (we always go one way) or transition to the same
52
+ # state as currently
53
+ return if @status && STATES.keys.index(state) <= STATES.keys.index(@status)
54
+
55
+ @status = state
56
+
57
+ # Skip on creation (initializing)
58
+ # We skip as during this state we do not have yet a monitor
59
+ return if initializing?
60
+
61
+ Karafka.monitor.instrument("app.#{state}")
62
+ end
26
63
  end
27
64
  end
65
+
66
+ # @return [Boolean] true if we are in any of the status that would indicate we should no longer
67
+ # process incoming data. It is a meta status built from others and not a separate state in
68
+ # the sense of a state machine
69
+ def done?
70
+ # Short-track for the most common case not to invoke all others on normal execution
71
+ return false if running?
72
+
73
+ stopping? || stopped? || quieting? || quiet? || terminated?
74
+ end
28
75
  end
29
76
  end
@@ -0,0 +1,16 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Example consumer that prints messages payloads
4
+ class ExampleConsumer < ApplicationConsumer
5
+ def consume
6
+ messages.each { |message| puts message.payload }
7
+ end
8
+
9
+ # Run anything upon partition being revoked
10
+ # def revoked
11
+ # end
12
+
13
+ # Define here any teardown things you want when Karafka server stops
14
+ # def shutdown
15
+ # end
16
+ end