karafka 1.4.4 → 2.1.10

Sign up to get free protection for your applications and to get access to all the features.
Files changed (315) hide show
  1. checksums.yaml +4 -4
  2. checksums.yaml.gz.sig +0 -0
  3. data/.github/FUNDING.yml +1 -3
  4. data/.github/workflows/ci.yml +117 -36
  5. data/.rspec +4 -0
  6. data/.ruby-version +1 -1
  7. data/CHANGELOG.md +611 -578
  8. data/CONTRIBUTING.md +10 -19
  9. data/Gemfile +7 -0
  10. data/Gemfile.lock +59 -100
  11. data/LICENSE +17 -0
  12. data/LICENSE-COMM +89 -0
  13. data/LICENSE-LGPL +165 -0
  14. data/README.md +64 -66
  15. data/bin/benchmarks +85 -0
  16. data/bin/create_token +22 -0
  17. data/bin/integrations +297 -0
  18. data/bin/karafka +4 -12
  19. data/bin/rspecs +6 -0
  20. data/bin/scenario +29 -0
  21. data/bin/stress_many +13 -0
  22. data/bin/stress_one +13 -0
  23. data/bin/verify_license_integrity +37 -0
  24. data/certs/cert_chain.pem +26 -0
  25. data/certs/karafka-pro.pem +11 -0
  26. data/config/locales/errors.yml +84 -0
  27. data/config/locales/pro_errors.yml +39 -0
  28. data/docker-compose.yml +13 -3
  29. data/karafka.gemspec +27 -22
  30. data/lib/active_job/karafka.rb +17 -0
  31. data/lib/active_job/queue_adapters/karafka_adapter.rb +32 -0
  32. data/lib/karafka/active_job/consumer.rb +49 -0
  33. data/lib/karafka/active_job/current_attributes/loading.rb +36 -0
  34. data/lib/karafka/active_job/current_attributes/persistence.rb +28 -0
  35. data/lib/karafka/active_job/current_attributes.rb +42 -0
  36. data/lib/karafka/active_job/dispatcher.rb +69 -0
  37. data/lib/karafka/active_job/job_extensions.rb +34 -0
  38. data/lib/karafka/active_job/job_options_contract.rb +32 -0
  39. data/lib/karafka/admin.rb +286 -0
  40. data/lib/karafka/app.rb +47 -23
  41. data/lib/karafka/base_consumer.rb +247 -29
  42. data/lib/karafka/cli/base.rb +24 -4
  43. data/lib/karafka/cli/console.rb +13 -8
  44. data/lib/karafka/cli/info.rb +45 -10
  45. data/lib/karafka/cli/install.rb +22 -12
  46. data/lib/karafka/cli/server.rb +63 -41
  47. data/lib/karafka/cli/topics.rb +146 -0
  48. data/lib/karafka/cli.rb +4 -11
  49. data/lib/karafka/connection/client.rb +502 -89
  50. data/lib/karafka/connection/consumer_group_coordinator.rb +48 -0
  51. data/lib/karafka/connection/listener.rb +294 -38
  52. data/lib/karafka/connection/listeners_batch.rb +40 -0
  53. data/lib/karafka/connection/messages_buffer.rb +84 -0
  54. data/lib/karafka/connection/pauses_manager.rb +46 -0
  55. data/lib/karafka/connection/proxy.rb +92 -0
  56. data/lib/karafka/connection/raw_messages_buffer.rb +101 -0
  57. data/lib/karafka/connection/rebalance_manager.rb +90 -0
  58. data/lib/karafka/contracts/base.rb +17 -0
  59. data/lib/karafka/contracts/config.rb +88 -11
  60. data/lib/karafka/contracts/consumer_group.rb +32 -187
  61. data/lib/karafka/contracts/server_cli_options.rb +80 -19
  62. data/lib/karafka/contracts/topic.rb +65 -0
  63. data/lib/karafka/contracts.rb +1 -1
  64. data/lib/karafka/embedded.rb +36 -0
  65. data/lib/karafka/env.rb +46 -0
  66. data/lib/karafka/errors.rb +26 -21
  67. data/lib/karafka/helpers/async.rb +33 -0
  68. data/lib/karafka/helpers/colorize.rb +26 -0
  69. data/lib/karafka/helpers/multi_delegator.rb +2 -2
  70. data/lib/karafka/instrumentation/callbacks/error.rb +39 -0
  71. data/lib/karafka/instrumentation/callbacks/statistics.rb +51 -0
  72. data/lib/karafka/instrumentation/logger.rb +5 -9
  73. data/lib/karafka/instrumentation/logger_listener.rb +299 -0
  74. data/lib/karafka/instrumentation/monitor.rb +13 -61
  75. data/lib/karafka/instrumentation/notifications.rb +75 -0
  76. data/lib/karafka/instrumentation/proctitle_listener.rb +7 -16
  77. data/lib/karafka/instrumentation/vendors/datadog/dashboard.json +1 -0
  78. data/lib/karafka/instrumentation/vendors/datadog/logger_listener.rb +153 -0
  79. data/lib/karafka/instrumentation/vendors/datadog/metrics_listener.rb +264 -0
  80. data/lib/karafka/instrumentation/vendors/kubernetes/liveness_listener.rb +176 -0
  81. data/lib/karafka/licenser.rb +78 -0
  82. data/lib/karafka/messages/batch_metadata.rb +52 -0
  83. data/lib/karafka/messages/builders/batch_metadata.rb +40 -0
  84. data/lib/karafka/messages/builders/message.rb +36 -0
  85. data/lib/karafka/messages/builders/messages.rb +36 -0
  86. data/lib/karafka/{params/params.rb → messages/message.rb} +20 -13
  87. data/lib/karafka/messages/messages.rb +71 -0
  88. data/lib/karafka/{params → messages}/metadata.rb +4 -6
  89. data/lib/karafka/messages/parser.rb +14 -0
  90. data/lib/karafka/messages/seek.rb +12 -0
  91. data/lib/karafka/patches/rdkafka/bindings.rb +139 -0
  92. data/lib/karafka/pro/active_job/consumer.rb +47 -0
  93. data/lib/karafka/pro/active_job/dispatcher.rb +86 -0
  94. data/lib/karafka/pro/active_job/job_options_contract.rb +45 -0
  95. data/lib/karafka/pro/encryption/cipher.rb +58 -0
  96. data/lib/karafka/pro/encryption/contracts/config.rb +79 -0
  97. data/lib/karafka/pro/encryption/errors.rb +24 -0
  98. data/lib/karafka/pro/encryption/messages/middleware.rb +46 -0
  99. data/lib/karafka/pro/encryption/messages/parser.rb +56 -0
  100. data/lib/karafka/pro/encryption/setup/config.rb +48 -0
  101. data/lib/karafka/pro/encryption.rb +47 -0
  102. data/lib/karafka/pro/iterator/expander.rb +95 -0
  103. data/lib/karafka/pro/iterator/tpl_builder.rb +155 -0
  104. data/lib/karafka/pro/iterator.rb +170 -0
  105. data/lib/karafka/pro/loader.rb +102 -0
  106. data/lib/karafka/pro/performance_tracker.rb +84 -0
  107. data/lib/karafka/pro/processing/collapser.rb +62 -0
  108. data/lib/karafka/pro/processing/coordinator.rb +148 -0
  109. data/lib/karafka/pro/processing/filters/base.rb +61 -0
  110. data/lib/karafka/pro/processing/filters/delayer.rb +70 -0
  111. data/lib/karafka/pro/processing/filters/expirer.rb +51 -0
  112. data/lib/karafka/pro/processing/filters/throttler.rb +84 -0
  113. data/lib/karafka/pro/processing/filters/virtual_limiter.rb +52 -0
  114. data/lib/karafka/pro/processing/filters_applier.rb +105 -0
  115. data/lib/karafka/pro/processing/jobs/consume_non_blocking.rb +39 -0
  116. data/lib/karafka/pro/processing/jobs/revoked_non_blocking.rb +37 -0
  117. data/lib/karafka/pro/processing/jobs_builder.rb +50 -0
  118. data/lib/karafka/pro/processing/partitioner.rb +69 -0
  119. data/lib/karafka/pro/processing/scheduler.rb +75 -0
  120. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_lrj_mom.rb +70 -0
  121. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_lrj_mom_vp.rb +76 -0
  122. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_mom.rb +72 -0
  123. data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_mom_vp.rb +76 -0
  124. data/lib/karafka/pro/processing/strategies/aj/dlq_lrj_mom.rb +66 -0
  125. data/lib/karafka/pro/processing/strategies/aj/dlq_lrj_mom_vp.rb +70 -0
  126. data/lib/karafka/pro/processing/strategies/aj/dlq_mom.rb +64 -0
  127. data/lib/karafka/pro/processing/strategies/aj/dlq_mom_vp.rb +69 -0
  128. data/lib/karafka/pro/processing/strategies/aj/ftr_lrj_mom.rb +38 -0
  129. data/lib/karafka/pro/processing/strategies/aj/ftr_lrj_mom_vp.rb +66 -0
  130. data/lib/karafka/pro/processing/strategies/aj/ftr_mom.rb +38 -0
  131. data/lib/karafka/pro/processing/strategies/aj/ftr_mom_vp.rb +58 -0
  132. data/lib/karafka/pro/processing/strategies/aj/lrj_mom.rb +37 -0
  133. data/lib/karafka/pro/processing/strategies/aj/lrj_mom_vp.rb +82 -0
  134. data/lib/karafka/pro/processing/strategies/aj/mom.rb +36 -0
  135. data/lib/karafka/pro/processing/strategies/aj/mom_vp.rb +52 -0
  136. data/lib/karafka/pro/processing/strategies/base.rb +26 -0
  137. data/lib/karafka/pro/processing/strategies/default.rb +105 -0
  138. data/lib/karafka/pro/processing/strategies/dlq/default.rb +131 -0
  139. data/lib/karafka/pro/processing/strategies/dlq/ftr.rb +61 -0
  140. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj.rb +75 -0
  141. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj_mom.rb +71 -0
  142. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj_mom_vp.rb +43 -0
  143. data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj_vp.rb +41 -0
  144. data/lib/karafka/pro/processing/strategies/dlq/ftr_mom.rb +69 -0
  145. data/lib/karafka/pro/processing/strategies/dlq/ftr_mom_vp.rb +41 -0
  146. data/lib/karafka/pro/processing/strategies/dlq/ftr_vp.rb +40 -0
  147. data/lib/karafka/pro/processing/strategies/dlq/lrj.rb +64 -0
  148. data/lib/karafka/pro/processing/strategies/dlq/lrj_mom.rb +65 -0
  149. data/lib/karafka/pro/processing/strategies/dlq/lrj_mom_vp.rb +36 -0
  150. data/lib/karafka/pro/processing/strategies/dlq/lrj_vp.rb +39 -0
  151. data/lib/karafka/pro/processing/strategies/dlq/mom.rb +68 -0
  152. data/lib/karafka/pro/processing/strategies/dlq/mom_vp.rb +37 -0
  153. data/lib/karafka/pro/processing/strategies/dlq/vp.rb +40 -0
  154. data/lib/karafka/pro/processing/strategies/ftr/default.rb +111 -0
  155. data/lib/karafka/pro/processing/strategies/ftr/vp.rb +40 -0
  156. data/lib/karafka/pro/processing/strategies/lrj/default.rb +87 -0
  157. data/lib/karafka/pro/processing/strategies/lrj/ftr.rb +69 -0
  158. data/lib/karafka/pro/processing/strategies/lrj/ftr_mom.rb +67 -0
  159. data/lib/karafka/pro/processing/strategies/lrj/ftr_mom_vp.rb +40 -0
  160. data/lib/karafka/pro/processing/strategies/lrj/ftr_vp.rb +39 -0
  161. data/lib/karafka/pro/processing/strategies/lrj/mom.rb +82 -0
  162. data/lib/karafka/pro/processing/strategies/lrj/mom_vp.rb +38 -0
  163. data/lib/karafka/pro/processing/strategies/lrj/vp.rb +36 -0
  164. data/lib/karafka/pro/processing/strategies/mom/default.rb +46 -0
  165. data/lib/karafka/pro/processing/strategies/mom/ftr.rb +53 -0
  166. data/lib/karafka/pro/processing/strategies/mom/ftr_vp.rb +37 -0
  167. data/lib/karafka/pro/processing/strategies/mom/vp.rb +35 -0
  168. data/lib/karafka/pro/processing/strategies/vp/default.rb +104 -0
  169. data/lib/karafka/pro/processing/strategies.rb +22 -0
  170. data/lib/karafka/pro/processing/strategy_selector.rb +84 -0
  171. data/lib/karafka/pro/processing/virtual_offset_manager.rb +147 -0
  172. data/lib/karafka/pro/routing/features/base.rb +24 -0
  173. data/lib/karafka/pro/routing/features/dead_letter_queue/contract.rb +50 -0
  174. data/lib/karafka/pro/routing/features/dead_letter_queue.rb +27 -0
  175. data/lib/karafka/pro/routing/features/delaying/config.rb +27 -0
  176. data/lib/karafka/pro/routing/features/delaying/contract.rb +38 -0
  177. data/lib/karafka/pro/routing/features/delaying/topic.rb +59 -0
  178. data/lib/karafka/pro/routing/features/delaying.rb +29 -0
  179. data/lib/karafka/pro/routing/features/expiring/config.rb +27 -0
  180. data/lib/karafka/pro/routing/features/expiring/contract.rb +38 -0
  181. data/lib/karafka/pro/routing/features/expiring/topic.rb +59 -0
  182. data/lib/karafka/pro/routing/features/expiring.rb +27 -0
  183. data/lib/karafka/pro/routing/features/filtering/config.rb +40 -0
  184. data/lib/karafka/pro/routing/features/filtering/contract.rb +41 -0
  185. data/lib/karafka/pro/routing/features/filtering/topic.rb +51 -0
  186. data/lib/karafka/pro/routing/features/filtering.rb +27 -0
  187. data/lib/karafka/pro/routing/features/long_running_job/config.rb +28 -0
  188. data/lib/karafka/pro/routing/features/long_running_job/contract.rb +37 -0
  189. data/lib/karafka/pro/routing/features/long_running_job/topic.rb +42 -0
  190. data/lib/karafka/pro/routing/features/long_running_job.rb +28 -0
  191. data/lib/karafka/pro/routing/features/pausing/contract.rb +48 -0
  192. data/lib/karafka/pro/routing/features/pausing/topic.rb +44 -0
  193. data/lib/karafka/pro/routing/features/pausing.rb +25 -0
  194. data/lib/karafka/pro/routing/features/throttling/config.rb +32 -0
  195. data/lib/karafka/pro/routing/features/throttling/contract.rb +41 -0
  196. data/lib/karafka/pro/routing/features/throttling/topic.rb +69 -0
  197. data/lib/karafka/pro/routing/features/throttling.rb +30 -0
  198. data/lib/karafka/pro/routing/features/virtual_partitions/config.rb +30 -0
  199. data/lib/karafka/pro/routing/features/virtual_partitions/contract.rb +52 -0
  200. data/lib/karafka/pro/routing/features/virtual_partitions/topic.rb +56 -0
  201. data/lib/karafka/pro/routing/features/virtual_partitions.rb +27 -0
  202. data/lib/karafka/pro.rb +13 -0
  203. data/lib/karafka/process.rb +24 -8
  204. data/lib/karafka/processing/coordinator.rb +181 -0
  205. data/lib/karafka/processing/coordinators_buffer.rb +62 -0
  206. data/lib/karafka/processing/executor.rb +148 -0
  207. data/lib/karafka/processing/executors_buffer.rb +72 -0
  208. data/lib/karafka/processing/jobs/base.rb +55 -0
  209. data/lib/karafka/processing/jobs/consume.rb +45 -0
  210. data/lib/karafka/processing/jobs/idle.rb +24 -0
  211. data/lib/karafka/processing/jobs/revoked.rb +22 -0
  212. data/lib/karafka/processing/jobs/shutdown.rb +23 -0
  213. data/lib/karafka/processing/jobs_builder.rb +28 -0
  214. data/lib/karafka/processing/jobs_queue.rb +150 -0
  215. data/lib/karafka/processing/partitioner.rb +24 -0
  216. data/lib/karafka/processing/result.rb +42 -0
  217. data/lib/karafka/processing/scheduler.rb +22 -0
  218. data/lib/karafka/processing/strategies/aj_dlq_mom.rb +44 -0
  219. data/lib/karafka/processing/strategies/aj_mom.rb +21 -0
  220. data/lib/karafka/processing/strategies/base.rb +52 -0
  221. data/lib/karafka/processing/strategies/default.rb +158 -0
  222. data/lib/karafka/processing/strategies/dlq.rb +88 -0
  223. data/lib/karafka/processing/strategies/dlq_mom.rb +49 -0
  224. data/lib/karafka/processing/strategies/mom.rb +29 -0
  225. data/lib/karafka/processing/strategy_selector.rb +47 -0
  226. data/lib/karafka/processing/worker.rb +93 -0
  227. data/lib/karafka/processing/workers_batch.rb +27 -0
  228. data/lib/karafka/railtie.rb +125 -0
  229. data/lib/karafka/routing/activity_manager.rb +84 -0
  230. data/lib/karafka/routing/builder.rb +34 -23
  231. data/lib/karafka/routing/consumer_group.rb +47 -21
  232. data/lib/karafka/routing/consumer_mapper.rb +1 -12
  233. data/lib/karafka/routing/features/active_job/builder.rb +33 -0
  234. data/lib/karafka/routing/features/active_job/config.rb +15 -0
  235. data/lib/karafka/routing/features/active_job/contract.rb +41 -0
  236. data/lib/karafka/routing/features/active_job/topic.rb +33 -0
  237. data/lib/karafka/routing/features/active_job.rb +13 -0
  238. data/lib/karafka/routing/features/base/expander.rb +53 -0
  239. data/lib/karafka/routing/features/base.rb +34 -0
  240. data/lib/karafka/routing/features/dead_letter_queue/config.rb +19 -0
  241. data/lib/karafka/routing/features/dead_letter_queue/contract.rb +42 -0
  242. data/lib/karafka/routing/features/dead_letter_queue/topic.rb +41 -0
  243. data/lib/karafka/routing/features/dead_letter_queue.rb +16 -0
  244. data/lib/karafka/routing/features/declaratives/config.rb +18 -0
  245. data/lib/karafka/routing/features/declaratives/contract.rb +30 -0
  246. data/lib/karafka/routing/features/declaratives/topic.rb +44 -0
  247. data/lib/karafka/routing/features/declaratives.rb +14 -0
  248. data/lib/karafka/routing/features/manual_offset_management/config.rb +15 -0
  249. data/lib/karafka/routing/features/manual_offset_management/contract.rb +24 -0
  250. data/lib/karafka/routing/features/manual_offset_management/topic.rb +35 -0
  251. data/lib/karafka/routing/features/manual_offset_management.rb +18 -0
  252. data/lib/karafka/routing/proxy.rb +18 -20
  253. data/lib/karafka/routing/router.rb +28 -3
  254. data/lib/karafka/routing/subscription_group.rb +91 -0
  255. data/lib/karafka/routing/subscription_groups_builder.rb +58 -0
  256. data/lib/karafka/routing/topic.rb +77 -24
  257. data/lib/karafka/routing/topics.rb +46 -0
  258. data/lib/karafka/runner.rb +52 -0
  259. data/lib/karafka/serialization/json/deserializer.rb +7 -15
  260. data/lib/karafka/server.rb +108 -37
  261. data/lib/karafka/setup/attributes_map.rb +347 -0
  262. data/lib/karafka/setup/config.rb +183 -179
  263. data/lib/karafka/status.rb +54 -7
  264. data/lib/karafka/templates/example_consumer.rb.erb +16 -0
  265. data/lib/karafka/templates/karafka.rb.erb +34 -56
  266. data/lib/karafka/time_trackers/base.rb +14 -0
  267. data/lib/karafka/time_trackers/pause.rb +122 -0
  268. data/lib/karafka/time_trackers/poll.rb +69 -0
  269. data/lib/karafka/version.rb +1 -1
  270. data/lib/karafka.rb +90 -16
  271. data/renovate.json +6 -0
  272. data.tar.gz.sig +0 -0
  273. metadata +290 -172
  274. metadata.gz.sig +0 -0
  275. data/MIT-LICENCE +0 -18
  276. data/certs/mensfeld.pem +0 -25
  277. data/config/errors.yml +0 -41
  278. data/lib/karafka/assignment_strategies/round_robin.rb +0 -13
  279. data/lib/karafka/attributes_map.rb +0 -63
  280. data/lib/karafka/backends/inline.rb +0 -16
  281. data/lib/karafka/base_responder.rb +0 -226
  282. data/lib/karafka/cli/flow.rb +0 -48
  283. data/lib/karafka/cli/missingno.rb +0 -19
  284. data/lib/karafka/code_reloader.rb +0 -67
  285. data/lib/karafka/connection/api_adapter.rb +0 -159
  286. data/lib/karafka/connection/batch_delegator.rb +0 -55
  287. data/lib/karafka/connection/builder.rb +0 -23
  288. data/lib/karafka/connection/message_delegator.rb +0 -36
  289. data/lib/karafka/consumers/batch_metadata.rb +0 -10
  290. data/lib/karafka/consumers/callbacks.rb +0 -71
  291. data/lib/karafka/consumers/includer.rb +0 -64
  292. data/lib/karafka/consumers/responders.rb +0 -24
  293. data/lib/karafka/consumers/single_params.rb +0 -15
  294. data/lib/karafka/contracts/consumer_group_topic.rb +0 -19
  295. data/lib/karafka/contracts/responder_usage.rb +0 -54
  296. data/lib/karafka/fetcher.rb +0 -42
  297. data/lib/karafka/helpers/class_matcher.rb +0 -88
  298. data/lib/karafka/helpers/config_retriever.rb +0 -46
  299. data/lib/karafka/helpers/inflector.rb +0 -26
  300. data/lib/karafka/instrumentation/stdout_listener.rb +0 -140
  301. data/lib/karafka/params/batch_metadata.rb +0 -26
  302. data/lib/karafka/params/builders/batch_metadata.rb +0 -30
  303. data/lib/karafka/params/builders/params.rb +0 -38
  304. data/lib/karafka/params/builders/params_batch.rb +0 -25
  305. data/lib/karafka/params/params_batch.rb +0 -60
  306. data/lib/karafka/patches/ruby_kafka.rb +0 -47
  307. data/lib/karafka/persistence/client.rb +0 -29
  308. data/lib/karafka/persistence/consumers.rb +0 -45
  309. data/lib/karafka/persistence/topics.rb +0 -48
  310. data/lib/karafka/responders/builder.rb +0 -36
  311. data/lib/karafka/responders/topic.rb +0 -55
  312. data/lib/karafka/routing/topic_mapper.rb +0 -53
  313. data/lib/karafka/serialization/json/serializer.rb +0 -31
  314. data/lib/karafka/setup/configurators/water_drop.rb +0 -36
  315. data/lib/karafka/templates/application_responder.rb.erb +0 -11
data/README.md CHANGED
@@ -2,100 +2,98 @@
2
2
 
3
3
  [![Build Status](https://github.com/karafka/karafka/actions/workflows/ci.yml/badge.svg)](https://github.com/karafka/karafka/actions/workflows/ci.yml)
4
4
  [![Gem Version](https://badge.fury.io/rb/karafka.svg)](http://badge.fury.io/rb/karafka)
5
- [![Join the chat at https://gitter.im/karafka/karafka](https://badges.gitter.im/karafka/karafka.svg)](https://gitter.im/karafka/karafka)
5
+ [![Join the chat at https://slack.karafka.io](https://raw.githubusercontent.com/karafka/misc/master/slack.svg)](https://slack.karafka.io)
6
6
 
7
- **Note**: We're finishing the new Karafka `2.0` but for now, please use `1.4`. All the documentation presented here refers to `1.4`
8
- ..
9
- Documentation presented here refers to Karafka `1.4`.
7
+ **Note**: Upgrade instructions for migration from Karafka `1.4` to Karafka `2.0` can be found [here](https://karafka.io/docs/Upgrades-2.0/).
10
8
 
11
9
  ## About Karafka
12
10
 
13
- Framework used to simplify Apache Kafka based Ruby applications development.
14
-
15
- Karafka allows you to capture everything that happens in your systems in large scale, providing you with a seamless and stable core for consuming and processing this data, without having to focus on things that are not your business domain.
16
-
17
- Karafka not only handles incoming messages but also provides tools for building complex data-flow applications that receive and send messages.
18
-
19
- ## How does it work
20
-
21
- Karafka provides a higher-level abstraction that allows you to focus on your business logic development, instead of focusing on implementing lower level abstraction layers. It provides developers with a set of tools that are dedicated for building multi-topic applications similar to how Rails applications are being built.
22
-
23
- ### Some things you might wonder about:
11
+ Karafka is a Ruby and Rails multi-threaded efficient Kafka processing framework that:
12
+
13
+ - Has a built-in [Web UI](https://karafka.io/docs/Web-UI-Features/) providing a convenient way to monitor and manage Karafka-based applications.
14
+ - Supports parallel processing in [multiple threads](https://karafka.io/docs/Concurrency-and-multithreading) (also for a [single topic partition](https://karafka.io/docs/Pro-Virtual-Partitions) work)
15
+ - [Automatically integrates](https://karafka.io/docs/Integrating-with-Ruby-on-Rails-and-other-frameworks#integrating-with-ruby-on-rails) with Ruby on Rails
16
+ - Has [ActiveJob backend](https://karafka.io/docs/Active-Job) support (including [ordered jobs](https://karafka.io/docs/Pro-Enhanced-Active-Job#ordered-jobs))
17
+ - Has a seamless [Dead Letter Queue](https://karafka.io/docs/Dead-Letter-Queue/) functionality built-in
18
+ - Supports in-development [code reloading](https://karafka.io/docs/Auto-reload-of-code-changes-in-development)
19
+ - Is powered by [librdkafka](https://github.com/edenhill/librdkafka) (the Apache Kafka C/C++ client library)
20
+ - Has an out-of the box [StatsD/DataDog monitoring](https://karafka.io/docs/Monitoring-and-logging) with a dashboard template.
21
+
22
+ ```ruby
23
+ # Define what topics you want to consume with which consumers in karafka.rb
24
+ Karafka::App.routes.draw do
25
+ topic 'system_events' do
26
+ consumer EventsConsumer
27
+ end
28
+ end
29
+
30
+ # And create your consumers, within which your messages will be processed
31
+ class EventsConsumer < ApplicationConsumer
32
+ # Example that utilizes ActiveRecord#insert_all and Karafka batch processing
33
+ def consume
34
+ # Store all of the incoming Kafka events locally in an efficient way
35
+ Event.insert_all messages.payloads
36
+ end
37
+ end
38
+ ```
24
39
 
25
- - You can integrate Karafka with **any** Ruby-based application.
26
- - Karafka does **not** require Sidekiq or any other third party software (apart from Kafka itself).
27
- - Karafka works with Ruby on Rails but it is a **standalone** framework that can work without it.
28
- - Karafka has a **minimal** set of dependencies, so adding it won't be a huge burden for your already existing applications.
29
- - Karafka processes can be executed for a **given subset** of consumer groups and/or topics, so you can fine tune it depending on your business logic.
40
+ Karafka **uses** threads to handle many messages simultaneously in the same process. It does not require Rails but will integrate tightly with any Ruby on Rails applications to make event processing dead simple.
30
41
 
31
- Karafka based applications can be easily deployed to any type of infrastructure, including those based on:
42
+ ## Getting started
32
43
 
33
- * Heroku
34
- * Capistrano
35
- * Docker
36
- * Terraform
44
+ ![karafka web ui](https://raw.githubusercontent.com/karafka/misc/master/printscreens/web-ui.png)
37
45
 
38
- ## Support
46
+ If you're entirely new to the subject, you can start with our "Kafka on Rails" articles series, which will get you up and running with the terminology and basic ideas behind using Kafka:
39
47
 
40
- Karafka has a [Wiki pages](https://github.com/karafka/karafka/wiki) for almost everything and a pretty decent [FAQ](https://github.com/karafka/karafka/wiki/FAQ). It covers the whole installation, setup, and deployment along with other useful details on how to run Karafka.
48
+ - [Kafka on Rails: Using Kafka with Ruby on Rails Part 1 – Kafka basics and its advantages](https://mensfeld.pl/2017/11/kafka-on-rails-using-kafka-with-ruby-on-rails-part-1-kafka-basics-and-its-advantages/)
49
+ - [Kafka on Rails: Using Kafka with Ruby on Rails – Part 2 – Getting started with Rails and Kafka](https://mensfeld.pl/2018/01/kafka-on-rails-using-kafka-with-ruby-on-rails-part-2-getting-started-with-ruby-and-kafka/)
41
50
 
42
- If you have any questions about using Karafka, feel free to join our [Gitter](https://gitter.im/karafka/karafka) chat channel.
51
+ If you want to get started with Kafka and Karafka as fast as possible, then the best idea is to visit our [Getting started](https://karafka.io/docs/Getting-Started) guides and the [example apps repository](https://github.com/karafka/example-apps).
43
52
 
44
- ## Getting started
53
+ We also maintain many [integration specs](https://github.com/karafka/karafka/tree/master/spec/integrations) illustrating various use-cases and features of the framework.
45
54
 
46
- If you're completely new to the subject, you can start with our "Kafka on Rails" articles series, that will get you up and running with the terminology and basic ideas behind using Kafka:
55
+ ### TL;DR (1 minute from setup to publishing and consuming messages)
47
56
 
48
- - [Kafka on Rails: Using Kafka with Ruby on Rails Part 1 – Kafka basics and its advantages](https://mensfeld.pl/2017/11/kafka-on-rails-using-kafka-with-ruby-on-rails-part-1-kafka-basics-and-its-advantages/)
49
- - [Kafka on Rails: Using Kafka with Ruby on Rails – Part 2 – Getting started with Ruby and Kafka](https://mensfeld.pl/2018/01/kafka-on-rails-using-kafka-with-ruby-on-rails-part-2-getting-started-with-ruby-and-kafka/)
57
+ **Prerequisites**: Kafka running. You can start it by following instructions from [here](https://karafka.io/docs/Setting-up-Kafka).
50
58
 
51
- If you want to get started with Kafka and Karafka as fast as possible, then the best idea is to just clone our example repository:
59
+ 1. Add and install Karafka:
52
60
 
53
61
  ```bash
54
- git clone https://github.com/karafka/example-app ./example_app
55
- ```
62
+ # Make sure to install Karafka 2.1
63
+ bundle add karafka --version ">= 2.1.2"
56
64
 
57
- then, just bundle install all the dependencies:
58
-
59
- ```bash
60
- cd ./example_app
61
- bundle install
65
+ bundle exec karafka install
62
66
  ```
63
67
 
64
- and follow the instructions from the [example app Wiki](https://github.com/karafka/example-app/blob/master/README.md).
65
-
66
- **Note**: you need to ensure, that you have Kafka up and running and you need to configure Kafka seed_brokers in the ```karafka.rb``` file.
67
-
68
- If you need more details and know how on how to start Karafka with a clean installation, read the [Getting started page](https://github.com/karafka/karafka/wiki/Getting-started) section of our Wiki.
69
-
70
- ## Notice
71
-
72
- Karafka framework and Karafka team are __not__ related to Kafka streaming service called CloudKarafka in any matter. We don't recommend nor discourage usage of their platform.
68
+ 2. Dispatch a message to the example topic using the Rails or Ruby console:
73
69
 
74
- ## References
75
-
76
- * [Karafka framework](https://github.com/karafka/karafka)
77
- * [Karafka GitHub Actions](https://github.com/karafka/karafka/actions)
78
- * [Karafka Coditsu](https://app.coditsu.io/karafka/repositories/karafka)
70
+ ```ruby
71
+ Karafka.producer.produce_sync(topic: 'example', payload: { 'ping' => 'pong' }.to_json)
72
+ ```
79
73
 
80
- ## Note on contributions
74
+ 3. Run Karafka server and see the consumption magic happen:
81
75
 
82
- First, thank you for considering contributing to Karafka! It's people like you that make the open source community such a great community!
76
+ ```bash
77
+ bundle exec karafka server
83
78
 
84
- Each pull request must pass all the RSpec specs and meet our quality requirements.
79
+ [86d47f0b92f7] Polled 1 message in 1000ms
80
+ [3732873c8a74] Consume job for ExampleConsumer on example started
81
+ {"ping"=>"pong"}
82
+ [3732873c8a74] Consume job for ExampleConsumer on example finished in 0ms
83
+ ```
85
84
 
86
- To check if everything is as it should be, we use [Coditsu](https://coditsu.io) that combines multiple linters and code analyzers for both code and documentation. Once you're done with your changes, submit a pull request.
85
+ ## Want to Upgrade? LGPL is not for you? Want to help?
87
86
 
88
- Coditsu will automatically check your work against our quality standards. You can find your commit check results on the [builds page](https://app.coditsu.io/karafka/commit_builds) of Karafka organization.
87
+ I also sell Karafka Pro subscriptions. It includes a commercial-friendly license, priority support, architecture consultations, enhanced Web UI and high throughput data processing-related features (virtual partitions, long-running jobs, and more).
89
88
 
90
- [![coditsu](https://coditsu.io/assets/quality_bar.svg)](https://app.coditsu.io/karafka/commit_builds)
89
+ **10%** of the income will be distributed back to other OSS projects that Karafka uses under the hood.
91
90
 
92
- ## Contributors
91
+ Help me provide high-quality open-source software. Please see the Karafka [homepage](https://karafka.io/#become-pro) for more details.
93
92
 
94
- This project exists thanks to all the people who contribute.
95
- <a href="https://github.com/karafka/karafka/graphs/contributors"><img src="https://opencollective.com/karafka/contributors.svg?width=890" /></a>
93
+ ## Support
96
94
 
97
- ## Sponsors
95
+ Karafka has [Wiki pages](https://karafka.io/docs) for almost everything and a pretty decent [FAQ](https://karafka.io/docs/FAQ). It covers the installation, setup, and deployment, along with other useful details on how to run Karafka.
98
96
 
99
- We are looking for sustainable sponsorship. If your company is relying on Karafka framework or simply want to see Karafka evolve faster to meet your requirements, please consider backing the project.
97
+ If you have questions about using Karafka, feel free to join our [Slack](https://slack.karafka.io) channel.
100
98
 
101
- Please contact [Maciej Mensfeld](mailto:maciej@mensfeld.pl) directly for more details.
99
+ Karafka has [priority support](https://karafka.io/docs/Pro-Support) for technical and architectural questions that is part of the Karafka Pro subscription.
data/bin/benchmarks ADDED
@@ -0,0 +1,85 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ # Runner for running given benchmark cases
4
+ # Some of the cases require pre-populated data and we populate this in places that need it
5
+ # In other cases we generate this data in a background process, so the partitions data stream
6
+ # is consistent and we don't end up consuming huge batches of a single partition.
7
+
8
+ require 'open3'
9
+ require 'pathname'
10
+
11
+ $LOAD_PATH.unshift(File.dirname(__FILE__))
12
+ $LOAD_PATH.unshift(File.join(File.dirname(__FILE__), '..'))
13
+
14
+ ROOT_PATH = Pathname.new(File.expand_path(File.join(File.dirname(__FILE__), '../')))
15
+
16
+ # Load all the benchmarks
17
+ benchmarks = Dir[ROOT_PATH.join('spec/benchmarks/**/*.rb')]
18
+
19
+ # If filter is provided, apply
20
+ benchmarks.delete_if { |name| !name.include?(ARGV[0]) } if ARGV[0]
21
+
22
+ raise ArgumentError, "No benchmarks with filter: #{ARGV[0]}" if benchmarks.empty?
23
+
24
+ # We may skip seeding if we are running the benchmarks multiple times, then since we do not
25
+ # commit offsets we can skip generating more data
26
+ if ENV['SEED']
27
+ require 'spec/benchmarks_helper'
28
+
29
+ # We need to setup karafka here to have producer for data seeding
30
+ setup_karafka
31
+
32
+ # This takes some time but needs to run only once per benchmark session
33
+ puts 'Seeding benchmarks data...'
34
+
35
+ producer = Karafka::App.producer
36
+
37
+ # We make our data json compatible so we can also benchmark serialization
38
+ elements = Array.new(100_000) { { a: :b }.to_json }
39
+
40
+ # We do not populate data of benchmarks_0_10 as we use it with life-stream data only
41
+ %w[
42
+ benchmarks_00_01
43
+ benchmarks_00_05
44
+ ].each do |topic_name|
45
+ partitions_count = topic_name.split('_').last.to_i
46
+
47
+ partitions_count.times do |partition|
48
+ puts "Seeding #{topic_name}:#{partition}"
49
+
50
+ elements.each_slice(10_000) do |data_slice|
51
+ data = data_slice.map do |data|
52
+ { topic: topic_name, payload: data, partition: partition }
53
+ end
54
+
55
+ producer.buffer_many(data)
56
+ producer.flush_sync
57
+ end
58
+ end
59
+ end
60
+ end
61
+
62
+ # Selects requested benchmarks and runs them one after another
63
+ benchmarks.each do |benchmark_path|
64
+ puts "Running #{benchmark_path.gsub("#{ROOT_PATH}/spec/benchmarks/", '')}"
65
+
66
+ benchmark = "bundle exec ruby -r ./spec/benchmarks_helper.rb #{benchmark_path}"
67
+
68
+ Open3.popen3(benchmark) do |stdin, stdout, stderr, thread|
69
+ t1 = Thread.new do
70
+ while line = stdout.gets
71
+ puts(line)
72
+ end
73
+ rescue IOError
74
+ end
75
+
76
+ t2 = Thread.new do
77
+ while line = stderr.gets
78
+ puts(line)
79
+ end
80
+ rescue IOError
81
+ end
82
+
83
+ thread.join
84
+ end
85
+ end
data/bin/create_token ADDED
@@ -0,0 +1,22 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ require 'openssl'
4
+ require 'base64'
5
+ require 'json'
6
+ require 'date'
7
+
8
+ PRIVATE_KEY_LOCATION = File.join(Dir.home, '.ssh', 'karafka-pro', 'id_rsa')
9
+
10
+ # Name of the entity that acquires the license
11
+ ENTITY = ARGV[0]
12
+
13
+ raise ArgumentError, 'Entity missing' if ENTITY.nil? || ENTITY.empty?
14
+
15
+ pro_token_data = { entity: ENTITY }
16
+
17
+ # This code uses my private key to generate a new token for Karafka Pro capabilities
18
+ private_key = OpenSSL::PKey::RSA.new(File.read(PRIVATE_KEY_LOCATION))
19
+
20
+ bin_key = private_key.private_encrypt(pro_token_data.to_json)
21
+
22
+ puts Base64.encode64(bin_key)
data/bin/integrations ADDED
@@ -0,0 +1,297 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ # Runner to run integration specs in parallel
4
+
5
+ # Part of integration specs run linear without bundler.
6
+ # If we would run bundle exec when running this code, bundler would inject its own context
7
+ # into them, messing things up heavily
8
+ #
9
+ # Types of specs:
10
+ # - regular - can run in parallel, includes all the helpers
11
+ # - pristine - cannot run in parallel, uses custom bundler but includes helpers
12
+ # - poro - cannot run in parallel, uses custom bundler, does not include any helpers
13
+ raise 'This code needs to be executed WITHOUT bundle exec' if Kernel.const_defined?(:Bundler)
14
+
15
+ require 'open3'
16
+ require 'fileutils'
17
+ require 'pathname'
18
+ require 'tmpdir'
19
+ require 'etc'
20
+
21
+ ROOT_PATH = Pathname.new(File.expand_path(File.join(File.dirname(__FILE__), '../')))
22
+
23
+ # How many child processes with integration specs do we want to run in parallel
24
+ # When the value is high, there's a problem with thread allocation on Github CI, that is why
25
+ # we limit it. Locally we can run a lot of those, as many of them have sleeps and do not use a lot
26
+ # of CPU. Locally we also cannot go beyond certain limit due to how often and how many topics we
27
+ # create in Kafka. With an overloaded system, we start getting timeouts.
28
+ CONCURRENCY = ENV.key?('CI') ? 5 : Etc.nprocessors * 3
29
+
30
+ # How may bytes do we want to keep from the stdout in the buffer for when we need to print it
31
+ MAX_BUFFER_OUTPUT = 51_200
32
+
33
+ # Abstraction around a single test scenario execution process
34
+ class Scenario
35
+ # How long a scenario can run before we kill it
36
+ # This is a fail-safe just in case something would hang
37
+ MAX_RUN_TIME = 5 * 60 # 5 minutes tops
38
+
39
+ # There are rare cases where Karafka may force shutdown for some of the integration cases
40
+ # This includes exactly those
41
+ EXIT_CODES = {
42
+ default: [0],
43
+ 'consumption/worker_critical_error_behaviour_spec.rb' => [0, 2].freeze,
44
+ 'shutdown/on_hanging_jobs_and_a_shutdown_spec.rb' => [2].freeze,
45
+ 'shutdown/on_hanging_on_shutdown_job_and_a_shutdown_spec.rb' => [2].freeze,
46
+ 'shutdown/on_hanging_listener_and_shutdown_spec.rb' => [2].freeze
47
+ }.freeze
48
+
49
+ private_constant :MAX_RUN_TIME, :EXIT_CODES
50
+
51
+ # Creates scenario instance and runs in the background process
52
+ #
53
+ # @param path [String] path to the scenarios file
54
+ def initialize(path)
55
+ @path = path
56
+ # First 1024 characters from stdout
57
+ @stdout_head = ''
58
+ # Last 1024 characters from stdout
59
+ @stdout_tail = ''
60
+ end
61
+
62
+ # Starts running given scenario in a separate process
63
+ def start
64
+ @stdin, @stdout, @stderr, @wait_thr = Open3.popen3(init_and_build_cmd)
65
+ @started_at = current_time
66
+ end
67
+
68
+ # @return [String] integration spec name
69
+ def name
70
+ @path.gsub("#{ROOT_PATH}/spec/integrations/", '')
71
+ end
72
+
73
+ # @return [Symbol] type of spec
74
+ def type
75
+ scenario_dir = File.dirname(@path)
76
+
77
+ return :poro if scenario_dir.include?('_poro')
78
+ return :pristine if scenario_dir.include?('_pristine')
79
+
80
+ :regular
81
+ end
82
+
83
+ # @return [Boolean] any spec that is not a regular one should not run in parallel with others
84
+ def linear?
85
+ type != :regular
86
+ end
87
+
88
+ # @return [Boolean] did this scenario finished or is it still running
89
+ def finished?
90
+ # If the thread is running too long, kill it
91
+ if current_time - @started_at > MAX_RUN_TIME
92
+ begin
93
+ Process.kill('TERM', pid)
94
+ # It may finish right after we want to kill it, that's why we ignore this
95
+ rescue Errno::ESRCH
96
+ end
97
+ end
98
+
99
+ # We read it so it won't grow as we use our default logger that prints to both test.log and
100
+ # to stdout. Otherwise after reaching the buffer size, it would hang
101
+ buffer = ''
102
+ @stdout.read_nonblock(MAX_BUFFER_OUTPUT, buffer, exception: false)
103
+ @stdout_head = buffer if @stdout_head.empty?
104
+ @stdout_tail << buffer
105
+ @stdout_tail = @stdout_tail[-MAX_BUFFER_OUTPUT..-1] || @stdout_tail
106
+
107
+ !@wait_thr.alive?
108
+ end
109
+
110
+ # @return [Boolean] did this scenario finish successfully or not
111
+ def success?
112
+ expected_exit_codes = EXIT_CODES[name] || EXIT_CODES[:default]
113
+
114
+ expected_exit_codes.include?(exit_code)
115
+ end
116
+
117
+ # @return [Integer] pid of the process of this scenario
118
+ def pid
119
+ @wait_thr.pid
120
+ end
121
+
122
+ # @return [Integer] exit code of the process running given scenario
123
+ def exit_code
124
+ # There may be no exit status if we killed the thread
125
+ @wait_thr.value&.exitstatus || 123
126
+ end
127
+
128
+ # @return [String] exit status of the process
129
+ def exit_status
130
+ @wait_thr.value.to_s
131
+ end
132
+
133
+ # Prints a status report when scenario is finished and stdout if it failed
134
+ def report
135
+ if success?
136
+ print "\e[#{32}m#{'.'}\e[0m"
137
+ else
138
+ buffer = ''
139
+
140
+ @stderr.read_nonblock(MAX_BUFFER_OUTPUT, buffer, exception: false)
141
+
142
+ puts
143
+ puts "\e[#{31}m#{'[FAILED]'}\e[0m #{name}"
144
+ puts "Time taken: #{current_time - @started_at} seconds"
145
+ puts "Exit code: #{exit_code}"
146
+ puts "Exit status: #{exit_status}"
147
+ puts @stdout_head
148
+ puts '...'
149
+ puts @stdout_tail
150
+ puts buffer
151
+ puts
152
+ end
153
+ end
154
+
155
+ # @return [Float] number of seconds that a given spec took to run
156
+ def time_taken
157
+ @finished_at - @started_at
158
+ end
159
+
160
+ # Close all the files that are open, so they do not pile up
161
+ def close
162
+ @finished_at = current_time
163
+ @stdin.close
164
+ @stdout.close
165
+ @stderr.close
166
+ end
167
+
168
+ private
169
+
170
+ # Sets up a proper environment for a given spec to run and returns the run command
171
+ # @return [String] run command
172
+ def init_and_build_cmd
173
+ case type
174
+ when :poro
175
+ scenario_dir = File.dirname(@path)
176
+ # We copy the spec into a temp dir, not to pollute the spec location with logs, etc
177
+ temp_dir = Dir.mktmpdir
178
+ file_name = File.basename(@path)
179
+
180
+ FileUtils.cp_r("#{scenario_dir}/.", temp_dir)
181
+
182
+ <<~CMD
183
+ cd #{temp_dir} &&
184
+ KARAFKA_GEM_DIR=#{ROOT_PATH} \
185
+ BUNDLE_AUTO_INSTALL=true \
186
+ PRISTINE_MODE=true \
187
+ bundle exec ruby #{file_name}
188
+ CMD
189
+ when :pristine
190
+ scenario_dir = File.dirname(@path)
191
+ # We copy the spec into a temp dir, not to pollute the spec location with logs, etc
192
+ temp_dir = Dir.mktmpdir
193
+ file_name = File.basename(@path)
194
+
195
+ FileUtils.cp_r("#{scenario_dir}/.", temp_dir)
196
+
197
+ <<~CMD
198
+ cd #{temp_dir} &&
199
+ KARAFKA_GEM_DIR=#{ROOT_PATH} \
200
+ BUNDLE_AUTO_INSTALL=true \
201
+ PRISTINE_MODE=true \
202
+ bundle exec ruby -r #{ROOT_PATH}/spec/integrations_helper.rb #{file_name}
203
+ CMD
204
+ else
205
+ <<~CMD
206
+ KARAFKA_GEM_DIR=#{ROOT_PATH} \
207
+ bundle exec ruby -r ./spec/integrations_helper.rb #{@path}
208
+ CMD
209
+ end
210
+ end
211
+
212
+ # @return [Float] current machine time
213
+ def current_time
214
+ Process.clock_gettime(Process::CLOCK_MONOTONIC)
215
+ end
216
+ end
217
+
218
+ # Load all the specs
219
+ specs = Dir[ROOT_PATH.join('spec/integrations/**/*_spec.rb')]
220
+
221
+ # If filters is provided, apply
222
+ # Allows to provide several filters one after another and applies all of them
223
+ ARGV.each do |filter|
224
+ specs.delete_if { |name| !name.include?(filter) }
225
+ end
226
+
227
+
228
+ raise ArgumentError, "No integration specs with filters: #{ARGV.join(', ')}" if specs.empty?
229
+
230
+ # Randomize order
231
+ seed = (ENV['SEED'] || rand(0..10_000)).to_i
232
+
233
+ puts "Random seed: #{seed}"
234
+
235
+ scenarios = specs
236
+ .shuffle(random: Random.new(seed))
237
+ .map { |integration_test| Scenario.new(integration_test) }
238
+
239
+ regulars = scenarios.reject(&:linear?)
240
+ linears = scenarios - regulars
241
+
242
+ active_scenarios = []
243
+ finished_scenarios = []
244
+
245
+ while finished_scenarios.size < scenarios.size
246
+ # If we have space to run another scenario, we add it
247
+ if active_scenarios.size < CONCURRENCY
248
+ scenario = nil
249
+ # We can run only one linear at the same time due to concurrency issues within bundler
250
+ # Since they usually take longer than others, we try to run them as fast as possible when there
251
+ # is a slot
252
+ scenario = linears.pop unless active_scenarios.any?(&:linear?)
253
+ scenario ||= regulars.pop
254
+
255
+ if scenario
256
+ scenario.start
257
+ active_scenarios << scenario
258
+ end
259
+ end
260
+
261
+ active_scenarios.select(&:finished?).each do |exited|
262
+ scenario = active_scenarios.delete(exited)
263
+ scenario.report
264
+ scenario.close
265
+ finished_scenarios << scenario
266
+ end
267
+
268
+ sleep(0.1)
269
+ end
270
+
271
+ # Report longest scenarios
272
+ puts
273
+ puts "\nLongest scenarios:\n\n"
274
+
275
+ finished_scenarios.sort_by(&:time_taken).reverse.first(10).each do |long_scenario|
276
+ puts "[#{'%6.2f' % long_scenario.time_taken}] #{long_scenario.name}"
277
+ end
278
+
279
+ failed_scenarios = finished_scenarios.reject(&:success?)
280
+
281
+ if failed_scenarios.empty?
282
+ puts
283
+ else
284
+ # Report once more on the failed jobs
285
+ # This will only list scenarios that failed without printing their stdout here.
286
+ puts
287
+ puts "\nFailed scenarios:\n\n"
288
+
289
+ failed_scenarios.each do |scenario|
290
+ puts "\e[#{31}m#{'[FAILED]'}\e[0m #{scenario.name}"
291
+ end
292
+
293
+ puts
294
+
295
+ # Exit with 1 if not all scenarios were successful
296
+ exit 1
297
+ end
data/bin/karafka CHANGED
@@ -2,18 +2,10 @@
2
2
 
3
3
  require 'karafka'
4
4
 
5
- # If there is a boot file, we need to require it as we expect it to contain
6
- # Karafka app setup, routes, etc
7
- if File.exist?(Karafka.boot_file)
8
- require Karafka.boot_file.to_s
9
- else
10
- # However when it is unavailable, we still want to be able to run help command
11
- # and install command as they don't require configured app itself to run
12
- raise(
13
- Karafka::Errors::MissingBootFileError,
14
- Karafka.boot_file
15
- ) unless %w[-h install].include?(ARGV[0])
16
- end
5
+ # We set this to indicate, that the process in which we are (whatever it does) was started using
6
+ # our bin/karafka cli
7
+ ENV['KARAFKA_CLI'] = 'true'
17
8
 
9
+ Karafka::Cli::Base.load
18
10
  Karafka::Cli.prepare
19
11
  Karafka::Cli.start
data/bin/rspecs ADDED
@@ -0,0 +1,6 @@
1
+ #!/usr/bin/env bash
2
+
3
+ set -e
4
+
5
+ SPECS_TYPE=regular bundle exec rspec --tag ~type:pro
6
+ SPECS_TYPE=pro bundle exec rspec --tag type:pro
data/bin/scenario ADDED
@@ -0,0 +1,29 @@
1
+ #!/usr/bin/env ruby
2
+
3
+ # Runner for non-parallel execution of a single scenario.
4
+ # It prints all the info stdout, etc and basically replaces itself with the scenario execution.
5
+ # It is useful when we work with a single spec and we need all the debug info
6
+
7
+ raise 'This code needs to be executed WITHOUT bundle exec' if Kernel.const_defined?(:Bundler)
8
+
9
+ require 'open3'
10
+ require 'fileutils'
11
+ require 'pathname'
12
+ require 'tmpdir'
13
+ require 'etc'
14
+
15
+ ROOT_PATH = Pathname.new(File.expand_path(File.join(File.dirname(__FILE__), '../')))
16
+
17
+ # Load all the specs
18
+ specs = Dir[ROOT_PATH.join('spec/integrations/**/*.rb')]
19
+
20
+ # If filters is provided, apply
21
+ # Allows to provide several filters one after another and applies all of them
22
+ ARGV.each do |filter|
23
+ specs.delete_if { |name| !name.include?(filter) }
24
+ end
25
+
26
+ raise ArgumentError, "No integration specs with filters: #{ARGV.join(', ')}" if specs.empty?
27
+ raise ArgumentError, "Many specs found with filters: #{ARGV.join(', ')}" if specs.size != 1
28
+
29
+ exec("bundle exec ruby -r #{ROOT_PATH}/spec/integrations_helper.rb #{specs[0]}")
data/bin/stress_many ADDED
@@ -0,0 +1,13 @@
1
+ #!/bin/bash
2
+
3
+ # Runs integration specs in an endless loop
4
+ # This allows us to ensure (after long enough time) that the integrations test suit is stable and
5
+ # that there are no anomalies when running it for a long period of time
6
+
7
+ set -e
8
+
9
+ while :
10
+ do
11
+ clear
12
+ bin/integrations $1
13
+ done
data/bin/stress_one ADDED
@@ -0,0 +1,13 @@
1
+ #!/bin/bash
2
+
3
+ # Runs a single integration spec in an endless loop
4
+ # This allows us to ensure (after long enough time) that the integration spec is stable and
5
+ # that there are no anomalies when running it for a long period of time
6
+
7
+ set -e
8
+
9
+ while :
10
+ do
11
+ clear
12
+ bin/scenario $1
13
+ done