karafka 1.4.12 → 2.2.10
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- checksums.yaml.gz.sig +0 -0
- data/.github/FUNDING.yml +1 -0
- data/.github/ISSUE_TEMPLATE/bug_report.md +10 -9
- data/.github/workflows/ci.yml +169 -31
- data/.rspec +4 -0
- data/.ruby-version +1 -1
- data/CHANGELOG.md +716 -607
- data/CONTRIBUTING.md +10 -19
- data/Gemfile +7 -0
- data/Gemfile.lock +69 -92
- data/LICENSE +17 -0
- data/LICENSE-COMM +89 -0
- data/LICENSE-LGPL +165 -0
- data/README.md +48 -47
- data/bin/benchmarks +99 -0
- data/bin/create_token +22 -0
- data/bin/integrations +310 -0
- data/bin/karafka +5 -14
- data/bin/record_rss +50 -0
- data/bin/rspecs +6 -0
- data/bin/scenario +29 -0
- data/bin/stress_many +13 -0
- data/bin/stress_one +13 -0
- data/bin/verify_license_integrity +37 -0
- data/bin/wait_for_kafka +24 -0
- data/certs/cert_chain.pem +26 -0
- data/certs/karafka-pro.pem +11 -0
- data/config/locales/errors.yml +97 -0
- data/config/locales/pro_errors.yml +59 -0
- data/docker-compose.yml +19 -11
- data/karafka.gemspec +26 -22
- data/lib/active_job/karafka.rb +17 -0
- data/lib/active_job/queue_adapters/karafka_adapter.rb +32 -0
- data/lib/karafka/active_job/consumer.rb +49 -0
- data/lib/karafka/active_job/current_attributes/loading.rb +36 -0
- data/lib/karafka/active_job/current_attributes/persistence.rb +28 -0
- data/lib/karafka/active_job/current_attributes.rb +42 -0
- data/lib/karafka/active_job/dispatcher.rb +69 -0
- data/lib/karafka/active_job/job_extensions.rb +34 -0
- data/lib/karafka/active_job/job_options_contract.rb +32 -0
- data/lib/karafka/admin.rb +313 -0
- data/lib/karafka/app.rb +47 -23
- data/lib/karafka/base_consumer.rb +260 -29
- data/lib/karafka/cli/base.rb +67 -36
- data/lib/karafka/cli/console.rb +18 -12
- data/lib/karafka/cli/help.rb +24 -0
- data/lib/karafka/cli/info.rb +47 -12
- data/lib/karafka/cli/install.rb +23 -14
- data/lib/karafka/cli/server.rb +101 -44
- data/lib/karafka/cli/topics.rb +146 -0
- data/lib/karafka/cli.rb +24 -27
- data/lib/karafka/connection/client.rb +553 -90
- data/lib/karafka/connection/consumer_group_coordinator.rb +48 -0
- data/lib/karafka/connection/listener.rb +294 -38
- data/lib/karafka/connection/listeners_batch.rb +40 -0
- data/lib/karafka/connection/messages_buffer.rb +84 -0
- data/lib/karafka/connection/pauses_manager.rb +46 -0
- data/lib/karafka/connection/proxy.rb +98 -0
- data/lib/karafka/connection/raw_messages_buffer.rb +101 -0
- data/lib/karafka/connection/rebalance_manager.rb +105 -0
- data/lib/karafka/contracts/base.rb +17 -0
- data/lib/karafka/contracts/config.rb +130 -11
- data/lib/karafka/contracts/consumer_group.rb +32 -187
- data/lib/karafka/contracts/server_cli_options.rb +80 -19
- data/lib/karafka/contracts/topic.rb +65 -0
- data/lib/karafka/contracts.rb +1 -1
- data/lib/karafka/embedded.rb +36 -0
- data/lib/karafka/env.rb +46 -0
- data/lib/karafka/errors.rb +37 -21
- data/lib/karafka/helpers/async.rb +33 -0
- data/lib/karafka/helpers/colorize.rb +26 -0
- data/lib/karafka/helpers/multi_delegator.rb +2 -2
- data/lib/karafka/instrumentation/callbacks/error.rb +39 -0
- data/lib/karafka/instrumentation/callbacks/rebalance.rb +64 -0
- data/lib/karafka/instrumentation/callbacks/statistics.rb +51 -0
- data/lib/karafka/instrumentation/logger_listener.rb +303 -0
- data/lib/karafka/instrumentation/monitor.rb +13 -61
- data/lib/karafka/instrumentation/notifications.rb +79 -0
- data/lib/karafka/instrumentation/proctitle_listener.rb +7 -16
- data/lib/karafka/instrumentation/vendors/appsignal/base.rb +30 -0
- data/lib/karafka/instrumentation/vendors/appsignal/client.rb +122 -0
- data/lib/karafka/instrumentation/vendors/appsignal/dashboard.json +222 -0
- data/lib/karafka/instrumentation/vendors/appsignal/errors_listener.rb +30 -0
- data/lib/karafka/instrumentation/vendors/appsignal/metrics_listener.rb +331 -0
- data/lib/karafka/instrumentation/vendors/datadog/dashboard.json +1 -0
- data/lib/karafka/instrumentation/vendors/datadog/logger_listener.rb +155 -0
- data/lib/karafka/instrumentation/vendors/datadog/metrics_listener.rb +264 -0
- data/lib/karafka/instrumentation/vendors/kubernetes/liveness_listener.rb +176 -0
- data/lib/karafka/licenser.rb +78 -0
- data/lib/karafka/messages/batch_metadata.rb +52 -0
- data/lib/karafka/messages/builders/batch_metadata.rb +60 -0
- data/lib/karafka/messages/builders/message.rb +40 -0
- data/lib/karafka/messages/builders/messages.rb +36 -0
- data/lib/karafka/{params/params.rb → messages/message.rb} +20 -13
- data/lib/karafka/messages/messages.rb +71 -0
- data/lib/karafka/{params → messages}/metadata.rb +4 -6
- data/lib/karafka/messages/parser.rb +14 -0
- data/lib/karafka/messages/seek.rb +12 -0
- data/lib/karafka/patches/rdkafka/bindings.rb +122 -0
- data/lib/karafka/patches/rdkafka/opaque.rb +36 -0
- data/lib/karafka/pro/active_job/consumer.rb +47 -0
- data/lib/karafka/pro/active_job/dispatcher.rb +86 -0
- data/lib/karafka/pro/active_job/job_options_contract.rb +45 -0
- data/lib/karafka/pro/cleaner/errors.rb +27 -0
- data/lib/karafka/pro/cleaner/messages/message.rb +46 -0
- data/lib/karafka/pro/cleaner/messages/messages.rb +42 -0
- data/lib/karafka/pro/cleaner.rb +41 -0
- data/lib/karafka/pro/contracts/base.rb +23 -0
- data/lib/karafka/pro/contracts/server_cli_options.rb +111 -0
- data/lib/karafka/pro/encryption/cipher.rb +58 -0
- data/lib/karafka/pro/encryption/contracts/config.rb +79 -0
- data/lib/karafka/pro/encryption/errors.rb +27 -0
- data/lib/karafka/pro/encryption/messages/middleware.rb +46 -0
- data/lib/karafka/pro/encryption/messages/parser.rb +56 -0
- data/lib/karafka/pro/encryption/setup/config.rb +48 -0
- data/lib/karafka/pro/encryption.rb +47 -0
- data/lib/karafka/pro/iterator/expander.rb +95 -0
- data/lib/karafka/pro/iterator/tpl_builder.rb +155 -0
- data/lib/karafka/pro/iterator.rb +170 -0
- data/lib/karafka/pro/loader.rb +106 -0
- data/lib/karafka/pro/performance_tracker.rb +84 -0
- data/lib/karafka/pro/processing/collapser.rb +62 -0
- data/lib/karafka/pro/processing/coordinator.rb +147 -0
- data/lib/karafka/pro/processing/filters/base.rb +61 -0
- data/lib/karafka/pro/processing/filters/delayer.rb +70 -0
- data/lib/karafka/pro/processing/filters/expirer.rb +51 -0
- data/lib/karafka/pro/processing/filters/inline_insights_delayer.rb +78 -0
- data/lib/karafka/pro/processing/filters/throttler.rb +84 -0
- data/lib/karafka/pro/processing/filters/virtual_limiter.rb +52 -0
- data/lib/karafka/pro/processing/filters_applier.rb +105 -0
- data/lib/karafka/pro/processing/jobs/consume_non_blocking.rb +39 -0
- data/lib/karafka/pro/processing/jobs/revoked_non_blocking.rb +37 -0
- data/lib/karafka/pro/processing/jobs_builder.rb +50 -0
- data/lib/karafka/pro/processing/partitioner.rb +69 -0
- data/lib/karafka/pro/processing/scheduler.rb +75 -0
- data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_lrj_mom.rb +70 -0
- data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_lrj_mom_vp.rb +76 -0
- data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_mom.rb +72 -0
- data/lib/karafka/pro/processing/strategies/aj/dlq_ftr_mom_vp.rb +76 -0
- data/lib/karafka/pro/processing/strategies/aj/dlq_lrj_mom.rb +66 -0
- data/lib/karafka/pro/processing/strategies/aj/dlq_lrj_mom_vp.rb +70 -0
- data/lib/karafka/pro/processing/strategies/aj/dlq_mom.rb +64 -0
- data/lib/karafka/pro/processing/strategies/aj/dlq_mom_vp.rb +69 -0
- data/lib/karafka/pro/processing/strategies/aj/ftr_lrj_mom.rb +38 -0
- data/lib/karafka/pro/processing/strategies/aj/ftr_lrj_mom_vp.rb +66 -0
- data/lib/karafka/pro/processing/strategies/aj/ftr_mom.rb +38 -0
- data/lib/karafka/pro/processing/strategies/aj/ftr_mom_vp.rb +58 -0
- data/lib/karafka/pro/processing/strategies/aj/lrj_mom.rb +37 -0
- data/lib/karafka/pro/processing/strategies/aj/lrj_mom_vp.rb +82 -0
- data/lib/karafka/pro/processing/strategies/aj/mom.rb +36 -0
- data/lib/karafka/pro/processing/strategies/aj/mom_vp.rb +52 -0
- data/lib/karafka/pro/processing/strategies/base.rb +26 -0
- data/lib/karafka/pro/processing/strategies/default.rb +105 -0
- data/lib/karafka/pro/processing/strategies/dlq/default.rb +137 -0
- data/lib/karafka/pro/processing/strategies/dlq/ftr.rb +61 -0
- data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj.rb +75 -0
- data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj_mom.rb +71 -0
- data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj_mom_vp.rb +43 -0
- data/lib/karafka/pro/processing/strategies/dlq/ftr_lrj_vp.rb +41 -0
- data/lib/karafka/pro/processing/strategies/dlq/ftr_mom.rb +69 -0
- data/lib/karafka/pro/processing/strategies/dlq/ftr_mom_vp.rb +41 -0
- data/lib/karafka/pro/processing/strategies/dlq/ftr_vp.rb +40 -0
- data/lib/karafka/pro/processing/strategies/dlq/lrj.rb +64 -0
- data/lib/karafka/pro/processing/strategies/dlq/lrj_mom.rb +65 -0
- data/lib/karafka/pro/processing/strategies/dlq/lrj_mom_vp.rb +36 -0
- data/lib/karafka/pro/processing/strategies/dlq/lrj_vp.rb +39 -0
- data/lib/karafka/pro/processing/strategies/dlq/mom.rb +68 -0
- data/lib/karafka/pro/processing/strategies/dlq/mom_vp.rb +37 -0
- data/lib/karafka/pro/processing/strategies/dlq/vp.rb +40 -0
- data/lib/karafka/pro/processing/strategies/ftr/default.rb +111 -0
- data/lib/karafka/pro/processing/strategies/ftr/vp.rb +40 -0
- data/lib/karafka/pro/processing/strategies/lrj/default.rb +85 -0
- data/lib/karafka/pro/processing/strategies/lrj/ftr.rb +69 -0
- data/lib/karafka/pro/processing/strategies/lrj/ftr_mom.rb +67 -0
- data/lib/karafka/pro/processing/strategies/lrj/ftr_mom_vp.rb +40 -0
- data/lib/karafka/pro/processing/strategies/lrj/ftr_vp.rb +39 -0
- data/lib/karafka/pro/processing/strategies/lrj/mom.rb +77 -0
- data/lib/karafka/pro/processing/strategies/lrj/mom_vp.rb +38 -0
- data/lib/karafka/pro/processing/strategies/lrj/vp.rb +36 -0
- data/lib/karafka/pro/processing/strategies/mom/default.rb +46 -0
- data/lib/karafka/pro/processing/strategies/mom/ftr.rb +53 -0
- data/lib/karafka/pro/processing/strategies/mom/ftr_vp.rb +37 -0
- data/lib/karafka/pro/processing/strategies/mom/vp.rb +35 -0
- data/lib/karafka/pro/processing/strategies/vp/default.rb +124 -0
- data/lib/karafka/pro/processing/strategies.rb +22 -0
- data/lib/karafka/pro/processing/strategy_selector.rb +84 -0
- data/lib/karafka/pro/processing/virtual_offset_manager.rb +147 -0
- data/lib/karafka/pro/routing/features/active_job/builder.rb +45 -0
- data/lib/karafka/pro/routing/features/active_job.rb +26 -0
- data/lib/karafka/pro/routing/features/base.rb +24 -0
- data/lib/karafka/pro/routing/features/dead_letter_queue/contracts/topic.rb +53 -0
- data/lib/karafka/pro/routing/features/dead_letter_queue.rb +27 -0
- data/lib/karafka/pro/routing/features/delaying/config.rb +27 -0
- data/lib/karafka/pro/routing/features/delaying/contracts/topic.rb +41 -0
- data/lib/karafka/pro/routing/features/delaying/topic.rb +59 -0
- data/lib/karafka/pro/routing/features/delaying.rb +29 -0
- data/lib/karafka/pro/routing/features/expiring/config.rb +27 -0
- data/lib/karafka/pro/routing/features/expiring/contracts/topic.rb +41 -0
- data/lib/karafka/pro/routing/features/expiring/topic.rb +59 -0
- data/lib/karafka/pro/routing/features/expiring.rb +27 -0
- data/lib/karafka/pro/routing/features/filtering/config.rb +40 -0
- data/lib/karafka/pro/routing/features/filtering/contracts/topic.rb +44 -0
- data/lib/karafka/pro/routing/features/filtering/topic.rb +51 -0
- data/lib/karafka/pro/routing/features/filtering.rb +27 -0
- data/lib/karafka/pro/routing/features/inline_insights/config.rb +32 -0
- data/lib/karafka/pro/routing/features/inline_insights/contracts/topic.rb +41 -0
- data/lib/karafka/pro/routing/features/inline_insights/topic.rb +52 -0
- data/lib/karafka/pro/routing/features/inline_insights.rb +26 -0
- data/lib/karafka/pro/routing/features/long_running_job/config.rb +28 -0
- data/lib/karafka/pro/routing/features/long_running_job/contracts/topic.rb +40 -0
- data/lib/karafka/pro/routing/features/long_running_job/topic.rb +42 -0
- data/lib/karafka/pro/routing/features/long_running_job.rb +28 -0
- data/lib/karafka/pro/routing/features/patterns/builder.rb +38 -0
- data/lib/karafka/pro/routing/features/patterns/config.rb +54 -0
- data/lib/karafka/pro/routing/features/patterns/consumer_group.rb +72 -0
- data/lib/karafka/pro/routing/features/patterns/contracts/consumer_group.rb +62 -0
- data/lib/karafka/pro/routing/features/patterns/contracts/pattern.rb +46 -0
- data/lib/karafka/pro/routing/features/patterns/contracts/topic.rb +41 -0
- data/lib/karafka/pro/routing/features/patterns/detector.rb +71 -0
- data/lib/karafka/pro/routing/features/patterns/pattern.rb +95 -0
- data/lib/karafka/pro/routing/features/patterns/patterns.rb +35 -0
- data/lib/karafka/pro/routing/features/patterns/topic.rb +50 -0
- data/lib/karafka/pro/routing/features/patterns/topics.rb +53 -0
- data/lib/karafka/pro/routing/features/patterns.rb +33 -0
- data/lib/karafka/pro/routing/features/pausing/contracts/topic.rb +51 -0
- data/lib/karafka/pro/routing/features/pausing/topic.rb +44 -0
- data/lib/karafka/pro/routing/features/pausing.rb +25 -0
- data/lib/karafka/pro/routing/features/throttling/config.rb +32 -0
- data/lib/karafka/pro/routing/features/throttling/contracts/topic.rb +44 -0
- data/lib/karafka/pro/routing/features/throttling/topic.rb +69 -0
- data/lib/karafka/pro/routing/features/throttling.rb +30 -0
- data/lib/karafka/pro/routing/features/virtual_partitions/config.rb +30 -0
- data/lib/karafka/pro/routing/features/virtual_partitions/contracts/topic.rb +55 -0
- data/lib/karafka/pro/routing/features/virtual_partitions/topic.rb +56 -0
- data/lib/karafka/pro/routing/features/virtual_partitions.rb +27 -0
- data/lib/karafka/pro.rb +13 -0
- data/lib/karafka/process.rb +24 -8
- data/lib/karafka/processing/coordinator.rb +181 -0
- data/lib/karafka/processing/coordinators_buffer.rb +62 -0
- data/lib/karafka/processing/executor.rb +155 -0
- data/lib/karafka/processing/executors_buffer.rb +72 -0
- data/lib/karafka/processing/expansions_selector.rb +22 -0
- data/lib/karafka/processing/inline_insights/consumer.rb +41 -0
- data/lib/karafka/processing/inline_insights/listener.rb +19 -0
- data/lib/karafka/processing/inline_insights/tracker.rb +128 -0
- data/lib/karafka/processing/jobs/base.rb +55 -0
- data/lib/karafka/processing/jobs/consume.rb +45 -0
- data/lib/karafka/processing/jobs/idle.rb +24 -0
- data/lib/karafka/processing/jobs/revoked.rb +22 -0
- data/lib/karafka/processing/jobs/shutdown.rb +23 -0
- data/lib/karafka/processing/jobs_builder.rb +28 -0
- data/lib/karafka/processing/jobs_queue.rb +150 -0
- data/lib/karafka/processing/partitioner.rb +24 -0
- data/lib/karafka/processing/result.rb +42 -0
- data/lib/karafka/processing/scheduler.rb +22 -0
- data/lib/karafka/processing/strategies/aj_dlq_mom.rb +44 -0
- data/lib/karafka/processing/strategies/aj_mom.rb +21 -0
- data/lib/karafka/processing/strategies/base.rb +52 -0
- data/lib/karafka/processing/strategies/default.rb +158 -0
- data/lib/karafka/processing/strategies/dlq.rb +88 -0
- data/lib/karafka/processing/strategies/dlq_mom.rb +49 -0
- data/lib/karafka/processing/strategies/mom.rb +29 -0
- data/lib/karafka/processing/strategy_selector.rb +47 -0
- data/lib/karafka/processing/worker.rb +93 -0
- data/lib/karafka/processing/workers_batch.rb +27 -0
- data/lib/karafka/railtie.rb +141 -0
- data/lib/karafka/routing/activity_manager.rb +84 -0
- data/lib/karafka/routing/builder.rb +45 -19
- data/lib/karafka/routing/consumer_group.rb +56 -20
- data/lib/karafka/routing/consumer_mapper.rb +1 -12
- data/lib/karafka/routing/features/active_job/builder.rb +33 -0
- data/lib/karafka/routing/features/active_job/config.rb +15 -0
- data/lib/karafka/routing/features/active_job/contracts/topic.rb +44 -0
- data/lib/karafka/routing/features/active_job/proxy.rb +14 -0
- data/lib/karafka/routing/features/active_job/topic.rb +33 -0
- data/lib/karafka/routing/features/active_job.rb +13 -0
- data/lib/karafka/routing/features/base/expander.rb +59 -0
- data/lib/karafka/routing/features/base.rb +71 -0
- data/lib/karafka/routing/features/dead_letter_queue/config.rb +19 -0
- data/lib/karafka/routing/features/dead_letter_queue/contracts/topic.rb +46 -0
- data/lib/karafka/routing/features/dead_letter_queue/topic.rb +41 -0
- data/lib/karafka/routing/features/dead_letter_queue.rb +16 -0
- data/lib/karafka/routing/features/declaratives/config.rb +18 -0
- data/lib/karafka/routing/features/declaratives/contracts/topic.rb +33 -0
- data/lib/karafka/routing/features/declaratives/topic.rb +44 -0
- data/lib/karafka/routing/features/declaratives.rb +14 -0
- data/lib/karafka/routing/features/inline_insights/config.rb +15 -0
- data/lib/karafka/routing/features/inline_insights/contracts/topic.rb +27 -0
- data/lib/karafka/routing/features/inline_insights/topic.rb +31 -0
- data/lib/karafka/routing/features/inline_insights.rb +40 -0
- data/lib/karafka/routing/features/manual_offset_management/config.rb +15 -0
- data/lib/karafka/routing/features/manual_offset_management/contracts/topic.rb +27 -0
- data/lib/karafka/routing/features/manual_offset_management/topic.rb +35 -0
- data/lib/karafka/routing/features/manual_offset_management.rb +18 -0
- data/lib/karafka/routing/proxy.rb +22 -21
- data/lib/karafka/routing/router.rb +24 -10
- data/lib/karafka/routing/subscription_group.rb +110 -0
- data/lib/karafka/routing/subscription_groups_builder.rb +65 -0
- data/lib/karafka/routing/topic.rb +87 -24
- data/lib/karafka/routing/topics.rb +46 -0
- data/lib/karafka/runner.rb +52 -0
- data/lib/karafka/serialization/json/deserializer.rb +7 -15
- data/lib/karafka/server.rb +113 -37
- data/lib/karafka/setup/attributes_map.rb +348 -0
- data/lib/karafka/setup/config.rb +256 -175
- data/lib/karafka/status.rb +54 -7
- data/lib/karafka/templates/example_consumer.rb.erb +16 -0
- data/lib/karafka/templates/karafka.rb.erb +33 -55
- data/lib/karafka/time_trackers/base.rb +14 -0
- data/lib/karafka/time_trackers/pause.rb +122 -0
- data/lib/karafka/time_trackers/poll.rb +69 -0
- data/lib/karafka/version.rb +1 -1
- data/lib/karafka.rb +91 -17
- data/renovate.json +9 -0
- data.tar.gz.sig +0 -0
- metadata +330 -168
- metadata.gz.sig +0 -0
- data/MIT-LICENCE +0 -18
- data/certs/mensfeld.pem +0 -25
- data/config/errors.yml +0 -41
- data/lib/karafka/assignment_strategies/round_robin.rb +0 -13
- data/lib/karafka/attributes_map.rb +0 -63
- data/lib/karafka/backends/inline.rb +0 -16
- data/lib/karafka/base_responder.rb +0 -226
- data/lib/karafka/cli/flow.rb +0 -48
- data/lib/karafka/cli/missingno.rb +0 -19
- data/lib/karafka/code_reloader.rb +0 -67
- data/lib/karafka/connection/api_adapter.rb +0 -158
- data/lib/karafka/connection/batch_delegator.rb +0 -55
- data/lib/karafka/connection/builder.rb +0 -23
- data/lib/karafka/connection/message_delegator.rb +0 -36
- data/lib/karafka/consumers/batch_metadata.rb +0 -10
- data/lib/karafka/consumers/callbacks.rb +0 -71
- data/lib/karafka/consumers/includer.rb +0 -64
- data/lib/karafka/consumers/responders.rb +0 -24
- data/lib/karafka/consumers/single_params.rb +0 -15
- data/lib/karafka/contracts/consumer_group_topic.rb +0 -19
- data/lib/karafka/contracts/responder_usage.rb +0 -54
- data/lib/karafka/fetcher.rb +0 -42
- data/lib/karafka/helpers/class_matcher.rb +0 -88
- data/lib/karafka/helpers/config_retriever.rb +0 -46
- data/lib/karafka/helpers/inflector.rb +0 -26
- data/lib/karafka/instrumentation/stdout_listener.rb +0 -140
- data/lib/karafka/params/batch_metadata.rb +0 -26
- data/lib/karafka/params/builders/batch_metadata.rb +0 -30
- data/lib/karafka/params/builders/params.rb +0 -38
- data/lib/karafka/params/builders/params_batch.rb +0 -25
- data/lib/karafka/params/params_batch.rb +0 -60
- data/lib/karafka/patches/ruby_kafka.rb +0 -47
- data/lib/karafka/persistence/client.rb +0 -29
- data/lib/karafka/persistence/consumers.rb +0 -45
- data/lib/karafka/persistence/topics.rb +0 -48
- data/lib/karafka/responders/builder.rb +0 -36
- data/lib/karafka/responders/topic.rb +0 -55
- data/lib/karafka/routing/topic_mapper.rb +0 -53
- data/lib/karafka/serialization/json/serializer.rb +0 -31
- data/lib/karafka/setup/configurators/water_drop.rb +0 -36
- data/lib/karafka/templates/application_responder.rb.erb +0 -11
@@ -1,140 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
module Karafka
|
4
|
-
module Instrumentation
|
5
|
-
# Default listener that hooks up to our instrumentation and uses its events for logging
|
6
|
-
# It can be removed/replaced or anything without any harm to the Karafka app flow
|
7
|
-
class StdoutListener
|
8
|
-
# Log levels that we use in this particular listener
|
9
|
-
USED_LOG_LEVELS = %i[
|
10
|
-
debug
|
11
|
-
info
|
12
|
-
error
|
13
|
-
fatal
|
14
|
-
].freeze
|
15
|
-
|
16
|
-
# Logs details about incoming batches and with which consumer we will consume them
|
17
|
-
# @param event [Dry::Events::Event] event details including payload
|
18
|
-
def on_connection_batch_delegator_call(event)
|
19
|
-
consumer = event[:consumer]
|
20
|
-
topic = consumer.topic.name
|
21
|
-
kafka_messages = event[:kafka_batch].messages
|
22
|
-
info(
|
23
|
-
<<~MSG.chomp.tr("\n", ' ')
|
24
|
-
#{kafka_messages.count} messages
|
25
|
-
on #{topic} topic
|
26
|
-
delegated to #{consumer.class}
|
27
|
-
MSG
|
28
|
-
)
|
29
|
-
end
|
30
|
-
|
31
|
-
# Logs details about incoming message and with which consumer we will consume it
|
32
|
-
# @param event [Dry::Events::Event] event details including payload
|
33
|
-
def on_connection_message_delegator_call(event)
|
34
|
-
consumer = event[:consumer]
|
35
|
-
topic = consumer.topic.name
|
36
|
-
info "1 message on #{topic} topic delegated to #{consumer.class}"
|
37
|
-
end
|
38
|
-
|
39
|
-
# Logs details about each received message value deserialization
|
40
|
-
# @param event [Dry::Events::Event] event details including payload
|
41
|
-
def on_params_params_deserialize(event)
|
42
|
-
# Keep in mind, that a caller here is a param object not a controller,
|
43
|
-
# so it returns a topic as a string, not a routing topic
|
44
|
-
debug(
|
45
|
-
<<~MSG.chomp.tr("\n", ' ')
|
46
|
-
Params deserialization for #{event[:caller].metadata.topic} topic
|
47
|
-
successful in #{event[:time]} ms
|
48
|
-
MSG
|
49
|
-
)
|
50
|
-
end
|
51
|
-
|
52
|
-
# Logs unsuccessful deserialization attempts of incoming data
|
53
|
-
# @param event [Dry::Events::Event] event details including payload
|
54
|
-
def on_params_params_deserialize_error(event)
|
55
|
-
topic = event[:caller].metadata.topic
|
56
|
-
error = event[:error]
|
57
|
-
error "Params deserialization error for #{topic} topic: #{error}"
|
58
|
-
end
|
59
|
-
|
60
|
-
# Logs errors that occurred in a listener fetch loop
|
61
|
-
# @param event [Dry::Events::Event] event details including payload
|
62
|
-
# @note It's an error as we can recover from it not a fatal
|
63
|
-
def on_connection_listener_fetch_loop_error(event)
|
64
|
-
error "Listener fetch loop error: #{event[:error]}"
|
65
|
-
end
|
66
|
-
|
67
|
-
# Logs errors that are related to the connection itself
|
68
|
-
# @param event [Dry::Events::Event] event details including payload
|
69
|
-
# @note Karafka will attempt to reconnect, so an error not a fatal
|
70
|
-
def on_connection_client_fetch_loop_error(event)
|
71
|
-
error "Client fetch loop error: #{event[:error]}"
|
72
|
-
end
|
73
|
-
|
74
|
-
# Logs info about crashed fetcher
|
75
|
-
# @param event [Dry::Events::Event] event details including payload
|
76
|
-
# @note If this happens, Karafka will shutdown as it means a critical error
|
77
|
-
# in one of the threads
|
78
|
-
def on_fetcher_call_error(event)
|
79
|
-
fatal "Fetcher crash due to an error: #{event[:error]}"
|
80
|
-
end
|
81
|
-
|
82
|
-
# Logs info about processing of a certain dataset with an inline backend
|
83
|
-
# @param event [Dry::Events::Event] event details including payload
|
84
|
-
def on_backends_inline_process(event)
|
85
|
-
count = event[:caller].send(:params_batch).to_a.size
|
86
|
-
topic = event[:caller].topic.name
|
87
|
-
time = event[:time]
|
88
|
-
info "Inline processing of topic #{topic} with #{count} messages took #{time} ms"
|
89
|
-
end
|
90
|
-
|
91
|
-
# Logs info about system signals that Karafka received
|
92
|
-
# @param event [Dry::Events::Event] event details including payload
|
93
|
-
def on_process_notice_signal(event)
|
94
|
-
info "Received #{event[:signal]} system signal"
|
95
|
-
end
|
96
|
-
|
97
|
-
# Logs info about responder usage withing a controller flow
|
98
|
-
# @param event [Dry::Events::Event] event details including payload
|
99
|
-
def on_consumers_responders_respond_with(event)
|
100
|
-
calling = event[:caller]
|
101
|
-
responder = calling.topic.responder
|
102
|
-
data = event[:data]
|
103
|
-
info "Responded from #{calling.class} using #{responder} with following data #{data}"
|
104
|
-
end
|
105
|
-
|
106
|
-
# Logs info that we're initializing Karafka framework components
|
107
|
-
# @param _event [Dry::Events::Event] event details including payload
|
108
|
-
def on_app_initializing(_event)
|
109
|
-
info "Initializing Karafka framework #{::Process.pid}"
|
110
|
-
end
|
111
|
-
|
112
|
-
# Logs info that we're running Karafka app
|
113
|
-
# @param _event [Dry::Events::Event] event details including payload
|
114
|
-
def on_app_running(_event)
|
115
|
-
info "Running Karafka server #{::Process.pid}"
|
116
|
-
end
|
117
|
-
|
118
|
-
# Logs info that we're going to stop the Karafka server
|
119
|
-
# @param _event [Dry::Events::Event] event details including payload
|
120
|
-
def on_app_stopping(_event)
|
121
|
-
# We use a separate thread as logging can't be called from trap context
|
122
|
-
Thread.new { info "Stopping Karafka server #{::Process.pid}" }
|
123
|
-
end
|
124
|
-
|
125
|
-
# Logs an error that Karafka was unable to stop the server gracefully and it had to do a
|
126
|
-
# forced exit
|
127
|
-
# @param _event [Dry::Events::Event] event details including payload
|
128
|
-
def on_app_stopping_error(_event)
|
129
|
-
# We use a separate thread as logging can't be called from trap context
|
130
|
-
Thread.new { error "Forceful Karafka server #{::Process.pid} stop" }
|
131
|
-
end
|
132
|
-
|
133
|
-
USED_LOG_LEVELS.each do |log_level|
|
134
|
-
define_method log_level do |*args|
|
135
|
-
Karafka.logger.send(log_level, *args)
|
136
|
-
end
|
137
|
-
end
|
138
|
-
end
|
139
|
-
end
|
140
|
-
end
|
@@ -1,26 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
module Karafka
|
4
|
-
module Params
|
5
|
-
# Simple batch metadata object that stores all non-message information received from Kafka
|
6
|
-
# cluster while fetching the data
|
7
|
-
# @note This metadata object refers to per batch metadata, not `#params.metadata`
|
8
|
-
BatchMetadata = Struct.new(
|
9
|
-
:batch_size,
|
10
|
-
:first_offset,
|
11
|
-
:highwater_mark_offset,
|
12
|
-
:unknown_last_offset,
|
13
|
-
:last_offset,
|
14
|
-
:offset_lag,
|
15
|
-
:deserializer,
|
16
|
-
:partition,
|
17
|
-
:topic,
|
18
|
-
keyword_init: true
|
19
|
-
) do
|
20
|
-
# @return [Boolean] is the last offset known or unknown
|
21
|
-
def unknown_last_offset?
|
22
|
-
unknown_last_offset
|
23
|
-
end
|
24
|
-
end
|
25
|
-
end
|
26
|
-
end
|
@@ -1,30 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
module Karafka
|
4
|
-
module Params
|
5
|
-
module Builders
|
6
|
-
# Builder for creating batch metadata object based on the batch informations
|
7
|
-
module BatchMetadata
|
8
|
-
class << self
|
9
|
-
# Creates metadata based on the kafka batch data
|
10
|
-
# @param kafka_batch [Kafka::FetchedBatch] kafka batch details
|
11
|
-
# @param topic [Karafka::Routing::Topic] topic for which we've fetched the batch
|
12
|
-
# @return [Karafka::Params::BatchMetadata] batch metadata object
|
13
|
-
def from_kafka_batch(kafka_batch, topic)
|
14
|
-
Karafka::Params::BatchMetadata.new(
|
15
|
-
batch_size: kafka_batch.messages.count,
|
16
|
-
first_offset: kafka_batch.first_offset,
|
17
|
-
highwater_mark_offset: kafka_batch.highwater_mark_offset,
|
18
|
-
unknown_last_offset: kafka_batch.unknown_last_offset?,
|
19
|
-
last_offset: kafka_batch.last_offset,
|
20
|
-
offset_lag: kafka_batch.offset_lag,
|
21
|
-
deserializer: topic.deserializer,
|
22
|
-
partition: kafka_batch.partition,
|
23
|
-
topic: topic.name
|
24
|
-
).freeze
|
25
|
-
end
|
26
|
-
end
|
27
|
-
end
|
28
|
-
end
|
29
|
-
end
|
30
|
-
end
|
@@ -1,38 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
module Karafka
|
4
|
-
module Params
|
5
|
-
# Due to the fact, that we create params related objects in couple contexts / places
|
6
|
-
# plus backends can build up them their own way we have this namespace.
|
7
|
-
# It allows to isolate actual params objects from their building process that can be
|
8
|
-
# context dependent.
|
9
|
-
module Builders
|
10
|
-
# Builder for params
|
11
|
-
module Params
|
12
|
-
class << self
|
13
|
-
# @param kafka_message [Kafka::FetchedMessage] message fetched from Kafka
|
14
|
-
# @param topic [Karafka::Routing::Topic] topic for which this message was fetched
|
15
|
-
# @return [Karafka::Params::Params] params object with payload and message metadata
|
16
|
-
def from_kafka_message(kafka_message, topic)
|
17
|
-
metadata = Karafka::Params::Metadata.new(
|
18
|
-
create_time: kafka_message.create_time,
|
19
|
-
headers: kafka_message.headers || {},
|
20
|
-
is_control_record: kafka_message.is_control_record,
|
21
|
-
key: kafka_message.key,
|
22
|
-
offset: kafka_message.offset,
|
23
|
-
deserializer: topic.deserializer,
|
24
|
-
partition: kafka_message.partition,
|
25
|
-
receive_time: Time.now,
|
26
|
-
topic: topic.name
|
27
|
-
).freeze
|
28
|
-
|
29
|
-
Karafka::Params::Params.new(
|
30
|
-
kafka_message.value,
|
31
|
-
metadata
|
32
|
-
)
|
33
|
-
end
|
34
|
-
end
|
35
|
-
end
|
36
|
-
end
|
37
|
-
end
|
38
|
-
end
|
@@ -1,25 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
module Karafka
|
4
|
-
module Params
|
5
|
-
module Builders
|
6
|
-
# Builder for creating params batch instances
|
7
|
-
module ParamsBatch
|
8
|
-
class << self
|
9
|
-
# Creates params batch with params inside based on the incoming messages
|
10
|
-
# and the topic from which it comes
|
11
|
-
# @param kafka_messages [Array<Kafka::FetchedMessage>] raw fetched messages
|
12
|
-
# @param topic [Karafka::Routing::Topic] topic for which we're received messages
|
13
|
-
# @return [Karafka::Params::ParamsBatch<Karafka::Params::Params>] batch with params
|
14
|
-
def from_kafka_messages(kafka_messages, topic)
|
15
|
-
params_array = kafka_messages.map do |message|
|
16
|
-
Karafka::Params::Builders::Params.from_kafka_message(message, topic)
|
17
|
-
end
|
18
|
-
|
19
|
-
Karafka::Params::ParamsBatch.new(params_array).freeze
|
20
|
-
end
|
21
|
-
end
|
22
|
-
end
|
23
|
-
end
|
24
|
-
end
|
25
|
-
end
|
@@ -1,60 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
module Karafka
|
4
|
-
module Params
|
5
|
-
# Params batch represents a set of messages received from Kafka.
|
6
|
-
# @note Params internally are lazy loaded before first use. That way we can skip
|
7
|
-
# deserialization process if we have after_fetch that rejects some incoming messages
|
8
|
-
# without using params It can be also used when handling really heavy data.
|
9
|
-
class ParamsBatch
|
10
|
-
include Enumerable
|
11
|
-
|
12
|
-
# @param params_array [Array<Karafka::Params::Params>] array with karafka params
|
13
|
-
# @return [Karafka::Params::ParamsBatch] lazy evaluated params batch object
|
14
|
-
def initialize(params_array)
|
15
|
-
@params_array = params_array
|
16
|
-
end
|
17
|
-
|
18
|
-
# @yieldparam [Karafka::Params::Params] each params instance
|
19
|
-
# @note Invocation of this method will not cause loading and deserializing each param after
|
20
|
-
# another.
|
21
|
-
def each
|
22
|
-
@params_array.each { |param| yield(param) }
|
23
|
-
end
|
24
|
-
|
25
|
-
# @return [Array<Karafka::Params::Params>] returns all the params in a loaded state, so they
|
26
|
-
# can be used for batch insert, etc. Without invoking all, up until first use, they won't
|
27
|
-
# be deserialized
|
28
|
-
def deserialize!
|
29
|
-
each(&:payload)
|
30
|
-
end
|
31
|
-
|
32
|
-
# @return [Array<Object>] array with deserialized payloads. This method can be useful when
|
33
|
-
# we don't care about metadata and just want to extract all the data payloads from the
|
34
|
-
# batch
|
35
|
-
def payloads
|
36
|
-
map(&:payload)
|
37
|
-
end
|
38
|
-
|
39
|
-
# @return [Karafka::Params::Params] first element
|
40
|
-
def first
|
41
|
-
@params_array.first
|
42
|
-
end
|
43
|
-
|
44
|
-
# @return [Karafka::Params::Params] last element
|
45
|
-
def last
|
46
|
-
@params_array.last
|
47
|
-
end
|
48
|
-
|
49
|
-
# @return [Integer] number of messages in the batch
|
50
|
-
def size
|
51
|
-
@params_array.size
|
52
|
-
end
|
53
|
-
|
54
|
-
# @return [Array<Karafka::Params::Params>] pure array with params
|
55
|
-
def to_a
|
56
|
-
@params_array
|
57
|
-
end
|
58
|
-
end
|
59
|
-
end
|
60
|
-
end
|
@@ -1,47 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
module Karafka
|
4
|
-
# Namespace for various other libs patches
|
5
|
-
module Patches
|
6
|
-
# Patches for Ruby Kafka gem
|
7
|
-
module RubyKafka
|
8
|
-
# This patch allows us to inject business logic in between fetches and before the consumer
|
9
|
-
# stop, so we can perform stop commit or anything else that we need since
|
10
|
-
# ruby-kafka fetch loop does not allow that directly
|
11
|
-
# We don't won't to use poll ruby-kafka api as it brings many more problems that we would
|
12
|
-
# have to take care of. That way, nothing like that ever happens but we get the control
|
13
|
-
# over the stopping process that we need (since we're the once that initiate it for each
|
14
|
-
# thread)
|
15
|
-
def consumer_loop
|
16
|
-
super do
|
17
|
-
consumers = Karafka::Persistence::Consumers
|
18
|
-
.current
|
19
|
-
.values
|
20
|
-
.flat_map(&:values)
|
21
|
-
.select { |consumer| consumer.class.respond_to?(:after_fetch) }
|
22
|
-
|
23
|
-
if Karafka::App.stopping?
|
24
|
-
publish_event(consumers, 'before_stop')
|
25
|
-
Karafka::Persistence::Client.read.stop
|
26
|
-
else
|
27
|
-
publish_event(consumers, 'before_poll')
|
28
|
-
yield
|
29
|
-
publish_event(consumers, 'after_poll')
|
30
|
-
end
|
31
|
-
end
|
32
|
-
end
|
33
|
-
|
34
|
-
private
|
35
|
-
|
36
|
-
# Notifies consumers about particular events happening
|
37
|
-
# @param consumers [Array<Object>] all consumers that want to be notified about an event
|
38
|
-
# @param event_name [String] name of the event that happened
|
39
|
-
def publish_event(consumers, event_name)
|
40
|
-
consumers.each do |consumer|
|
41
|
-
key = "consumers.#{Helpers::Inflector.map(consumer.class.to_s)}.#{event_name}"
|
42
|
-
Karafka::App.monitor.instrument(key, context: consumer)
|
43
|
-
end
|
44
|
-
end
|
45
|
-
end
|
46
|
-
end
|
47
|
-
end
|
@@ -1,29 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
module Karafka
|
4
|
-
module Persistence
|
5
|
-
# Persistence layer to store current thread messages consumer client for further use
|
6
|
-
class Client
|
7
|
-
# Thread.current key under which we store current thread messages consumer client
|
8
|
-
PERSISTENCE_SCOPE = :client
|
9
|
-
|
10
|
-
private_constant :PERSISTENCE_SCOPE
|
11
|
-
|
12
|
-
class << self
|
13
|
-
# @param client [Karafka::Connection::Client] messages consumer client of
|
14
|
-
# a current thread
|
15
|
-
# @return [Karafka::Connection::Client] persisted messages consumer client
|
16
|
-
def write(client)
|
17
|
-
Thread.current[PERSISTENCE_SCOPE] = client
|
18
|
-
end
|
19
|
-
|
20
|
-
# @return [Karafka::Connection::Client] persisted messages consumer client
|
21
|
-
# @raise [Karafka::Errors::MissingClientError] raised when no thread messages consumer
|
22
|
-
# client but we try to use it anyway
|
23
|
-
def read
|
24
|
-
Thread.current[PERSISTENCE_SCOPE] || raise(Errors::MissingClientError)
|
25
|
-
end
|
26
|
-
end
|
27
|
-
end
|
28
|
-
end
|
29
|
-
end
|
@@ -1,45 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
module Karafka
|
4
|
-
# Module used to provide a persistent cache layer for Karafka components that need to be
|
5
|
-
# shared inside of a same thread
|
6
|
-
module Persistence
|
7
|
-
# Module used to provide a persistent cache across batch requests for a given
|
8
|
-
# topic and partition to store some additional details when the persistent mode
|
9
|
-
# for a given topic is turned on
|
10
|
-
class Consumers
|
11
|
-
# Thread.current scope under which we store consumers data
|
12
|
-
PERSISTENCE_SCOPE = :consumers
|
13
|
-
|
14
|
-
private_constant :PERSISTENCE_SCOPE
|
15
|
-
|
16
|
-
class << self
|
17
|
-
# @return [Hash] current thread's persistence scope hash with all the consumers
|
18
|
-
def current
|
19
|
-
Thread.current[PERSISTENCE_SCOPE] ||= Concurrent::Hash.new do |hash, key|
|
20
|
-
hash[key] = Concurrent::Hash.new
|
21
|
-
end
|
22
|
-
end
|
23
|
-
|
24
|
-
# Used to build (if block given) and/or fetch a current consumer instance that will be
|
25
|
-
# used to process messages from a given topic and partition
|
26
|
-
# @param topic [Karafka::Routing::Topic] topic instance for which we might cache
|
27
|
-
# @param partition [Integer] number of partition for which we want to cache
|
28
|
-
# @return [Karafka::BaseConsumer] base consumer descendant
|
29
|
-
def fetch(topic, partition)
|
30
|
-
current[topic][partition] ||= topic.consumer.new(topic)
|
31
|
-
end
|
32
|
-
|
33
|
-
# Removes all persisted instances of consumers from the consumer cache
|
34
|
-
# @note This is used to reload consumers instances when code reloading in development mode
|
35
|
-
# is present. This should not be used in production.
|
36
|
-
def clear
|
37
|
-
Thread
|
38
|
-
.list
|
39
|
-
.select { |thread| thread[PERSISTENCE_SCOPE] }
|
40
|
-
.each { |thread| thread[PERSISTENCE_SCOPE].clear }
|
41
|
-
end
|
42
|
-
end
|
43
|
-
end
|
44
|
-
end
|
45
|
-
end
|
@@ -1,48 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
module Karafka
|
4
|
-
module Persistence
|
5
|
-
# Local cache for routing topics
|
6
|
-
# We use it in order not to build string instances and remap incoming topic upon each
|
7
|
-
# message / message batches received
|
8
|
-
class Topics
|
9
|
-
# Thread.current scope under which we store topics data
|
10
|
-
PERSISTENCE_SCOPE = :topics
|
11
|
-
|
12
|
-
private_constant :PERSISTENCE_SCOPE
|
13
|
-
|
14
|
-
class << self
|
15
|
-
# @return [Concurrent::Hash] hash with all the topics from given groups
|
16
|
-
def current
|
17
|
-
Thread.current[PERSISTENCE_SCOPE] ||= Concurrent::Hash.new do |hash, key|
|
18
|
-
hash[key] = Concurrent::Hash.new
|
19
|
-
end
|
20
|
-
end
|
21
|
-
|
22
|
-
# @param group_id [String] group id for which we fetch a topic representation
|
23
|
-
# @param raw_topic_name [String] raw topic name (before remapping) for which we fetch a
|
24
|
-
# topic representation
|
25
|
-
# @return [Karafka::Routing::Topics] remapped topic representation that can be used further
|
26
|
-
# on when working with given parameters
|
27
|
-
def fetch(group_id, raw_topic_name)
|
28
|
-
current[group_id][raw_topic_name] ||= begin
|
29
|
-
# We map from incoming topic name, as it might be namespaced, etc.
|
30
|
-
# @see topic_mapper internal docs
|
31
|
-
mapped_topic_name = Karafka::App.config.topic_mapper.incoming(raw_topic_name)
|
32
|
-
Routing::Router.find("#{group_id}_#{mapped_topic_name}")
|
33
|
-
end
|
34
|
-
end
|
35
|
-
|
36
|
-
# Clears the whole topics cache for all the threads
|
37
|
-
# This is used for in-development code reloading as we need to get rid of all the
|
38
|
-
# preloaded and cached instances of objects to make it work
|
39
|
-
def clear
|
40
|
-
Thread
|
41
|
-
.list
|
42
|
-
.select { |thread| thread[PERSISTENCE_SCOPE] }
|
43
|
-
.each { |thread| thread[PERSISTENCE_SCOPE].clear }
|
44
|
-
end
|
45
|
-
end
|
46
|
-
end
|
47
|
-
end
|
48
|
-
end
|
@@ -1,36 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
module Karafka
|
4
|
-
# Responders namespace encapsulates all the internal responder implementation parts
|
5
|
-
module Responders
|
6
|
-
# Responders builder is used for finding (based on the consumer class name) a responder
|
7
|
-
# that match the consumer. We use it when user does not provide a responder inside routing,
|
8
|
-
# but he still names responder with the same convention (and namespaces) as consumer
|
9
|
-
#
|
10
|
-
# @example Matching responder exists
|
11
|
-
# Karafka::Responder::Builder(NewEventsConsumer).build #=> NewEventsResponder
|
12
|
-
# @example Matching responder does not exist
|
13
|
-
# Karafka::Responder::Builder(NewBuildsConsumer).build #=> nil
|
14
|
-
class Builder
|
15
|
-
# @param consumer_class [Karafka::BaseConsumer, nil] descendant of
|
16
|
-
# Karafka::BaseConsumer
|
17
|
-
# @example Tries to find a responder that matches a given consumer. If nothing found,
|
18
|
-
# will return nil (nil is accepted, because it means that a given consumer don't
|
19
|
-
# pipe stuff further on)
|
20
|
-
def initialize(consumer_class)
|
21
|
-
@consumer_class = consumer_class
|
22
|
-
end
|
23
|
-
|
24
|
-
# Tries to figure out a responder based on a consumer class name
|
25
|
-
# @return [Class] Responder class (not an instance)
|
26
|
-
# @return [nil] or nil if there's no matching responding class
|
27
|
-
def build
|
28
|
-
Helpers::ClassMatcher.new(
|
29
|
-
@consumer_class,
|
30
|
-
from: 'Consumer',
|
31
|
-
to: 'Responder'
|
32
|
-
).match
|
33
|
-
end
|
34
|
-
end
|
35
|
-
end
|
36
|
-
end
|
@@ -1,55 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
module Karafka
|
4
|
-
module Responders
|
5
|
-
# Topic describes a single topic on which we want to respond with responding requirements
|
6
|
-
# @example Define topic (required by default)
|
7
|
-
# Karafka::Responders::Topic.new(:topic_name, {}) #=> #<Karafka::Responders::Topic...
|
8
|
-
# @example Define optional topic
|
9
|
-
# Karafka::Responders::Topic.new(:topic_name, required: false)
|
10
|
-
class Topic
|
11
|
-
# Name of the topic on which we want to respond
|
12
|
-
attr_reader :name
|
13
|
-
|
14
|
-
# @param name [Symbol, String] name of a topic on which we want to respond
|
15
|
-
# @param options [Hash] non-default options for this topic
|
16
|
-
# @return [Karafka::Responders::Topic] topic description object
|
17
|
-
def initialize(name, options)
|
18
|
-
@name = name.to_s
|
19
|
-
@options = options
|
20
|
-
end
|
21
|
-
|
22
|
-
# @return [Boolean] is this a required topic (if not, it is optional)
|
23
|
-
def required?
|
24
|
-
@options.key?(:required) ? @options[:required] : true
|
25
|
-
end
|
26
|
-
|
27
|
-
# @return [Boolean] was usage of this topic registered or not
|
28
|
-
def registered?
|
29
|
-
@options[:registered] == true
|
30
|
-
end
|
31
|
-
|
32
|
-
# @return [Class] Class to use to serialize messages for this topic
|
33
|
-
def serializer
|
34
|
-
@options[:serializer]
|
35
|
-
end
|
36
|
-
|
37
|
-
# @return [Boolean] do we want to use async producer. Defaults to false as the sync producer
|
38
|
-
# is safer and introduces less problems
|
39
|
-
def async?
|
40
|
-
@options.key?(:async) ? @options[:async] : false
|
41
|
-
end
|
42
|
-
|
43
|
-
# @return [Hash] hash with this topic attributes and options
|
44
|
-
def to_h
|
45
|
-
{
|
46
|
-
name: name,
|
47
|
-
required: required?,
|
48
|
-
registered: registered?,
|
49
|
-
serializer: serializer,
|
50
|
-
async: async?
|
51
|
-
}
|
52
|
-
end
|
53
|
-
end
|
54
|
-
end
|
55
|
-
end
|
@@ -1,53 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
module Karafka
|
4
|
-
module Routing
|
5
|
-
# Default topic mapper that does not remap things
|
6
|
-
# Mapper can be used for Kafka providers that require namespaced topic names. Instead of being
|
7
|
-
# provider dependent, we can then define mapper and use internally "pure" topic names in
|
8
|
-
# routes and responders
|
9
|
-
#
|
10
|
-
# @example Mapper for mapping prefixed topics
|
11
|
-
# class MyMapper
|
12
|
-
# PREFIX = "my_user_name."
|
13
|
-
#
|
14
|
-
# def incoming(topic)
|
15
|
-
# topic.to_s.gsub(PREFIX, '')
|
16
|
-
# end
|
17
|
-
#
|
18
|
-
# def outgoing(topic)
|
19
|
-
# "#{PREFIX}#{topic}"
|
20
|
-
# end
|
21
|
-
# end
|
22
|
-
#
|
23
|
-
# @example Mapper for replacing "." with "_" in topic names
|
24
|
-
# class MyMapper
|
25
|
-
# PREFIX = "my_user_name."
|
26
|
-
#
|
27
|
-
# def incoming(topic)
|
28
|
-
# topic.to_s.gsub('.', '_')
|
29
|
-
# end
|
30
|
-
#
|
31
|
-
# def outgoing(topic)
|
32
|
-
# topic.to_s.gsub('_', '.')
|
33
|
-
# end
|
34
|
-
# end
|
35
|
-
class TopicMapper
|
36
|
-
# @param topic [String, Symbol] topic
|
37
|
-
# @return [String, Symbol] same topic as on input
|
38
|
-
# @example
|
39
|
-
# incoming('topic_name') #=> 'topic_name'
|
40
|
-
def incoming(topic)
|
41
|
-
topic
|
42
|
-
end
|
43
|
-
|
44
|
-
# @param topic [String, Symbol] topic
|
45
|
-
# @return [String, Symbol] same topic as on input
|
46
|
-
# @example
|
47
|
-
# outgoing('topic_name') #=> 'topic_name'
|
48
|
-
def outgoing(topic)
|
49
|
-
topic
|
50
|
-
end
|
51
|
-
end
|
52
|
-
end
|
53
|
-
end
|
@@ -1,31 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
module Karafka
|
4
|
-
# Module for all supported by default serialization and deserialization ways
|
5
|
-
module Serialization
|
6
|
-
module Json
|
7
|
-
# Default Karafka Json serializer for serializing data
|
8
|
-
class Serializer
|
9
|
-
# @param content [Object] any object that we want to convert to a json string
|
10
|
-
# @return [String] Valid JSON string containing serialized data
|
11
|
-
# @raise [Karafka::Errors::SerializationError] raised when we don't have a way to
|
12
|
-
# serialize provided data to json
|
13
|
-
# @note When string is passed to this method, we assume that it is already a json
|
14
|
-
# string and we don't serialize it again. This allows us to serialize data before
|
15
|
-
# it is being forwarded to this serializer if we want to have a custom (not that simple)
|
16
|
-
# json serialization
|
17
|
-
#
|
18
|
-
# @example From an ActiveRecord object
|
19
|
-
# Serializer.call(Repository.first) #=> "{\"repository\":{\"id\":\"04b504e0\"}}"
|
20
|
-
# @example From a string (no changes)
|
21
|
-
# Serializer.call("{\"a\":1}") #=> "{\"a\":1}"
|
22
|
-
def call(content)
|
23
|
-
return content if content.is_a?(String)
|
24
|
-
return content.to_json if content.respond_to?(:to_json)
|
25
|
-
|
26
|
-
raise Karafka::Errors::SerializationError, content
|
27
|
-
end
|
28
|
-
end
|
29
|
-
end
|
30
|
-
end
|
31
|
-
end
|