deimos-temp-fork 0.0.1
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +7 -0
- data/.circleci/config.yml +83 -0
- data/.gitignore +41 -0
- data/.gitmodules +0 -0
- data/.rspec +1 -0
- data/.rubocop.yml +333 -0
- data/.ruby-gemset +1 -0
- data/.ruby-version +1 -0
- data/CHANGELOG.md +349 -0
- data/CODE_OF_CONDUCT.md +77 -0
- data/Dockerfile +23 -0
- data/Gemfile +6 -0
- data/Gemfile.lock +286 -0
- data/Guardfile +22 -0
- data/LICENSE.md +195 -0
- data/README.md +1099 -0
- data/Rakefile +13 -0
- data/bin/deimos +4 -0
- data/deimos-ruby.gemspec +44 -0
- data/docker-compose.yml +71 -0
- data/docs/ARCHITECTURE.md +140 -0
- data/docs/CONFIGURATION.md +236 -0
- data/docs/DATABASE_BACKEND.md +147 -0
- data/docs/INTEGRATION_TESTS.md +52 -0
- data/docs/PULL_REQUEST_TEMPLATE.md +35 -0
- data/docs/UPGRADING.md +128 -0
- data/lib/deimos-temp-fork.rb +95 -0
- data/lib/deimos/active_record_consume/batch_consumption.rb +164 -0
- data/lib/deimos/active_record_consume/batch_slicer.rb +27 -0
- data/lib/deimos/active_record_consume/message_consumption.rb +79 -0
- data/lib/deimos/active_record_consume/schema_model_converter.rb +52 -0
- data/lib/deimos/active_record_consumer.rb +67 -0
- data/lib/deimos/active_record_producer.rb +87 -0
- data/lib/deimos/backends/base.rb +32 -0
- data/lib/deimos/backends/db.rb +41 -0
- data/lib/deimos/backends/kafka.rb +33 -0
- data/lib/deimos/backends/kafka_async.rb +33 -0
- data/lib/deimos/backends/test.rb +20 -0
- data/lib/deimos/batch_consumer.rb +7 -0
- data/lib/deimos/config/configuration.rb +381 -0
- data/lib/deimos/config/phobos_config.rb +137 -0
- data/lib/deimos/consume/batch_consumption.rb +150 -0
- data/lib/deimos/consume/message_consumption.rb +94 -0
- data/lib/deimos/consumer.rb +104 -0
- data/lib/deimos/instrumentation.rb +76 -0
- data/lib/deimos/kafka_message.rb +60 -0
- data/lib/deimos/kafka_source.rb +128 -0
- data/lib/deimos/kafka_topic_info.rb +102 -0
- data/lib/deimos/message.rb +79 -0
- data/lib/deimos/metrics/datadog.rb +47 -0
- data/lib/deimos/metrics/mock.rb +39 -0
- data/lib/deimos/metrics/provider.rb +36 -0
- data/lib/deimos/monkey_patches/phobos_cli.rb +35 -0
- data/lib/deimos/monkey_patches/phobos_producer.rb +51 -0
- data/lib/deimos/poll_info.rb +9 -0
- data/lib/deimos/producer.rb +224 -0
- data/lib/deimos/railtie.rb +8 -0
- data/lib/deimos/schema_backends/avro_base.rb +140 -0
- data/lib/deimos/schema_backends/avro_local.rb +30 -0
- data/lib/deimos/schema_backends/avro_schema_coercer.rb +119 -0
- data/lib/deimos/schema_backends/avro_schema_registry.rb +34 -0
- data/lib/deimos/schema_backends/avro_validation.rb +21 -0
- data/lib/deimos/schema_backends/base.rb +150 -0
- data/lib/deimos/schema_backends/mock.rb +42 -0
- data/lib/deimos/shared_config.rb +63 -0
- data/lib/deimos/test_helpers.rb +360 -0
- data/lib/deimos/tracing/datadog.rb +35 -0
- data/lib/deimos/tracing/mock.rb +40 -0
- data/lib/deimos/tracing/provider.rb +29 -0
- data/lib/deimos/utils/db_poller.rb +150 -0
- data/lib/deimos/utils/db_producer.rb +243 -0
- data/lib/deimos/utils/deadlock_retry.rb +68 -0
- data/lib/deimos/utils/inline_consumer.rb +150 -0
- data/lib/deimos/utils/lag_reporter.rb +175 -0
- data/lib/deimos/utils/schema_controller_mixin.rb +115 -0
- data/lib/deimos/version.rb +5 -0
- data/lib/generators/deimos/active_record/templates/migration.rb.tt +28 -0
- data/lib/generators/deimos/active_record/templates/model.rb.tt +5 -0
- data/lib/generators/deimos/active_record_generator.rb +79 -0
- data/lib/generators/deimos/db_backend/templates/migration +25 -0
- data/lib/generators/deimos/db_backend/templates/rails3_migration +31 -0
- data/lib/generators/deimos/db_backend_generator.rb +48 -0
- data/lib/generators/deimos/db_poller/templates/migration +11 -0
- data/lib/generators/deimos/db_poller/templates/rails3_migration +16 -0
- data/lib/generators/deimos/db_poller_generator.rb +48 -0
- data/lib/tasks/deimos.rake +34 -0
- data/spec/active_record_batch_consumer_spec.rb +481 -0
- data/spec/active_record_consume/batch_slicer_spec.rb +42 -0
- data/spec/active_record_consume/schema_model_converter_spec.rb +105 -0
- data/spec/active_record_consumer_spec.rb +154 -0
- data/spec/active_record_producer_spec.rb +85 -0
- data/spec/backends/base_spec.rb +10 -0
- data/spec/backends/db_spec.rb +54 -0
- data/spec/backends/kafka_async_spec.rb +11 -0
- data/spec/backends/kafka_spec.rb +11 -0
- data/spec/batch_consumer_spec.rb +256 -0
- data/spec/config/configuration_spec.rb +248 -0
- data/spec/consumer_spec.rb +209 -0
- data/spec/deimos_spec.rb +169 -0
- data/spec/generators/active_record_generator_spec.rb +56 -0
- data/spec/handlers/my_batch_consumer.rb +10 -0
- data/spec/handlers/my_consumer.rb +10 -0
- data/spec/kafka_listener_spec.rb +55 -0
- data/spec/kafka_source_spec.rb +381 -0
- data/spec/kafka_topic_info_spec.rb +111 -0
- data/spec/message_spec.rb +19 -0
- data/spec/phobos.bad_db.yml +73 -0
- data/spec/phobos.yml +77 -0
- data/spec/producer_spec.rb +498 -0
- data/spec/rake_spec.rb +19 -0
- data/spec/schema_backends/avro_base_shared.rb +199 -0
- data/spec/schema_backends/avro_local_spec.rb +32 -0
- data/spec/schema_backends/avro_schema_registry_spec.rb +32 -0
- data/spec/schema_backends/avro_validation_spec.rb +24 -0
- data/spec/schema_backends/base_spec.rb +33 -0
- data/spec/schemas/com/my-namespace/Generated.avsc +71 -0
- data/spec/schemas/com/my-namespace/MyNestedSchema.avsc +62 -0
- data/spec/schemas/com/my-namespace/MySchema-key.avsc +13 -0
- data/spec/schemas/com/my-namespace/MySchema.avsc +18 -0
- data/spec/schemas/com/my-namespace/MySchemaCompound-key.avsc +18 -0
- data/spec/schemas/com/my-namespace/MySchemaWithBooleans.avsc +18 -0
- data/spec/schemas/com/my-namespace/MySchemaWithDateTimes.avsc +33 -0
- data/spec/schemas/com/my-namespace/MySchemaWithId.avsc +28 -0
- data/spec/schemas/com/my-namespace/MySchemaWithUniqueId.avsc +32 -0
- data/spec/schemas/com/my-namespace/Wibble.avsc +43 -0
- data/spec/schemas/com/my-namespace/Widget.avsc +27 -0
- data/spec/schemas/com/my-namespace/WidgetTheSecond.avsc +27 -0
- data/spec/schemas/com/my-namespace/request/CreateTopic.avsc +11 -0
- data/spec/schemas/com/my-namespace/request/Index.avsc +11 -0
- data/spec/schemas/com/my-namespace/request/UpdateRequest.avsc +11 -0
- data/spec/schemas/com/my-namespace/response/CreateTopic.avsc +11 -0
- data/spec/schemas/com/my-namespace/response/Index.avsc +11 -0
- data/spec/schemas/com/my-namespace/response/UpdateResponse.avsc +11 -0
- data/spec/spec_helper.rb +267 -0
- data/spec/utils/db_poller_spec.rb +320 -0
- data/spec/utils/db_producer_spec.rb +514 -0
- data/spec/utils/deadlock_retry_spec.rb +74 -0
- data/spec/utils/inline_consumer_spec.rb +31 -0
- data/spec/utils/lag_reporter_spec.rb +76 -0
- data/spec/utils/platform_schema_validation_spec.rb +0 -0
- data/spec/utils/schema_controller_mixin_spec.rb +84 -0
- data/support/deimos-solo.png +0 -0
- data/support/deimos-with-name-next.png +0 -0
- data/support/deimos-with-name.png +0 -0
- data/support/flipp-logo.png +0 -0
- metadata +551 -0
@@ -0,0 +1,47 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'deimos/metrics/provider'
|
4
|
+
|
5
|
+
module Deimos
|
6
|
+
module Metrics
|
7
|
+
# A Metrics wrapper class for Datadog.
|
8
|
+
class Datadog < Metrics::Provider
|
9
|
+
# :nodoc:
|
10
|
+
def initialize(config, logger)
|
11
|
+
raise 'Metrics config must specify host_ip' if config[:host_ip].nil?
|
12
|
+
raise 'Metrics config must specify host_port' if config[:host_port].nil?
|
13
|
+
raise 'Metrics config must specify namespace' if config[:namespace].nil?
|
14
|
+
|
15
|
+
logger.info("DatadogMetricsProvider configured with: #{config}")
|
16
|
+
@client = Datadog::Statsd.new(
|
17
|
+
config[:host_ip],
|
18
|
+
config[:host_port]
|
19
|
+
)
|
20
|
+
@client.tags = config[:tags]
|
21
|
+
@client.namespace = config[:namespace]
|
22
|
+
end
|
23
|
+
|
24
|
+
# :nodoc:
|
25
|
+
def increment(metric_name, options={})
|
26
|
+
@client.increment(metric_name, options)
|
27
|
+
end
|
28
|
+
|
29
|
+
# :nodoc:
|
30
|
+
def gauge(metric_name, count, options={})
|
31
|
+
@client.gauge(metric_name, count, options)
|
32
|
+
end
|
33
|
+
|
34
|
+
# :nodoc:
|
35
|
+
def histogram(metric_name, count, options={})
|
36
|
+
@client.histogram(metric_name, count, options)
|
37
|
+
end
|
38
|
+
|
39
|
+
# :nodoc:
|
40
|
+
def time(metric_name, options={})
|
41
|
+
@client.time(metric_name, options) do
|
42
|
+
yield
|
43
|
+
end
|
44
|
+
end
|
45
|
+
end
|
46
|
+
end
|
47
|
+
end
|
@@ -0,0 +1,39 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'deimos/metrics/provider'
|
4
|
+
|
5
|
+
module Deimos
|
6
|
+
module Metrics
|
7
|
+
# A mock Metrics wrapper which just logs the metrics
|
8
|
+
class Mock
|
9
|
+
# :nodoc:
|
10
|
+
def initialize(logger=nil)
|
11
|
+
@logger = logger || Logger.new(STDOUT)
|
12
|
+
@logger.info('MockMetricsProvider initialized')
|
13
|
+
end
|
14
|
+
|
15
|
+
# :nodoc:
|
16
|
+
def increment(metric_name, options={})
|
17
|
+
@logger.info("MockMetricsProvider.increment: #{metric_name}, #{options}")
|
18
|
+
end
|
19
|
+
|
20
|
+
# :nodoc:
|
21
|
+
def gauge(metric_name, count, options={})
|
22
|
+
@logger.info("MockMetricsProvider.gauge: #{metric_name}, #{count}, #{options}")
|
23
|
+
end
|
24
|
+
|
25
|
+
# :nodoc:
|
26
|
+
def histogram(metric_name, count, options={})
|
27
|
+
@logger.info("MockMetricsProvider.histogram: #{metric_name}, #{count}, #{options}")
|
28
|
+
end
|
29
|
+
|
30
|
+
# :nodoc:
|
31
|
+
def time(metric_name, options={})
|
32
|
+
start_time = Time.now
|
33
|
+
yield
|
34
|
+
total_time = (Time.now - start_time).to_i
|
35
|
+
@logger.info("MockMetricsProvider.time: #{metric_name}, #{total_time}, #{options}")
|
36
|
+
end
|
37
|
+
end
|
38
|
+
end
|
39
|
+
end
|
@@ -0,0 +1,36 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Deimos
|
4
|
+
module Metrics
|
5
|
+
# Base class for all metrics providers.
|
6
|
+
class Provider
|
7
|
+
# Send an counter increment metric
|
8
|
+
# @param metric_name [String] The name of the counter metric
|
9
|
+
# @param options [Hash] Any additional options, e.g. :tags
|
10
|
+
def increment(metric_name, options={})
|
11
|
+
raise NotImplementedError
|
12
|
+
end
|
13
|
+
|
14
|
+
# Send an counter increment metric
|
15
|
+
# @param metric_name [String] The name of the counter metric
|
16
|
+
# @param options [Hash] Any additional options, e.g. :tags
|
17
|
+
def gauge(metric_name, count, options={})
|
18
|
+
raise NotImplementedError
|
19
|
+
end
|
20
|
+
|
21
|
+
# Send an counter increment metric
|
22
|
+
# @param metric_name [String] The name of the counter metric
|
23
|
+
# @param options [Hash] Any additional options, e.g. :tags
|
24
|
+
def histogram(metric_name, count, options={})
|
25
|
+
raise NotImplementedError
|
26
|
+
end
|
27
|
+
|
28
|
+
# Time a yielded block, and send a timer metric
|
29
|
+
# @param metric_name [String] The name of the metric
|
30
|
+
# @param options [Hash] Any additional options, e.g. :tags
|
31
|
+
def time(metric_name, options={})
|
32
|
+
raise NotImplementedError
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
36
|
+
end
|
@@ -0,0 +1,35 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'phobos/cli/start'
|
4
|
+
|
5
|
+
# :nodoc:
|
6
|
+
module Phobos
|
7
|
+
# :nodoc:
|
8
|
+
module CLI
|
9
|
+
# :nodoc:
|
10
|
+
class Start
|
11
|
+
# :nodoc:
|
12
|
+
def validate_listeners!
|
13
|
+
Phobos.config.listeners.each do |listener|
|
14
|
+
handler = listener.handler
|
15
|
+
begin
|
16
|
+
handler.constantize
|
17
|
+
rescue NameError
|
18
|
+
error_exit("Handler '#{handler}' not defined")
|
19
|
+
end
|
20
|
+
|
21
|
+
delivery = listener.delivery
|
22
|
+
if delivery.nil?
|
23
|
+
Phobos::CLI.logger.warn do
|
24
|
+
Hash(message: "Delivery option should be specified, defaulting to 'batch'"\
|
25
|
+
' - specify this option to silence this message')
|
26
|
+
end
|
27
|
+
elsif !Listener::DELIVERY_OPTS.include?(delivery)
|
28
|
+
error_exit("Invalid delivery option '#{delivery}'. Please specify one of: "\
|
29
|
+
"#{Listener::DELIVERY_OPTS.join(', ')}")
|
30
|
+
end
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
@@ -0,0 +1,51 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'phobos/producer'
|
4
|
+
|
5
|
+
module Phobos
|
6
|
+
module Producer
|
7
|
+
# :nodoc:
|
8
|
+
class PublicAPI
|
9
|
+
# :nodoc:
|
10
|
+
def publish(topic, payload, key=nil, partition_key=nil)
|
11
|
+
class_producer.publish(topic, payload, key, partition_key)
|
12
|
+
end
|
13
|
+
|
14
|
+
# :nodoc:
|
15
|
+
def async_publish(topic, payload, key=nil, partition_key=nil)
|
16
|
+
class_producer.async_publish(topic, payload, key, partition_key)
|
17
|
+
end
|
18
|
+
end
|
19
|
+
|
20
|
+
# :nodoc:
|
21
|
+
module ClassMethods
|
22
|
+
# :nodoc:
|
23
|
+
class PublicAPI
|
24
|
+
# :nodoc:
|
25
|
+
def publish(topic, payload, key=nil, partition_key=nil)
|
26
|
+
publish_list([{ topic: topic, payload: payload, key: key,
|
27
|
+
partition_key: partition_key }])
|
28
|
+
end
|
29
|
+
|
30
|
+
# :nodoc:
|
31
|
+
def async_publish(topic, payload, key=nil, partition_key=nil)
|
32
|
+
async_publish_list([{ topic: topic, payload: payload, key: key,
|
33
|
+
partition_key: partition_key }])
|
34
|
+
end
|
35
|
+
|
36
|
+
private
|
37
|
+
|
38
|
+
# :nodoc:
|
39
|
+
def produce_messages(producer, messages)
|
40
|
+
messages.each do |message|
|
41
|
+
partition_key = message[:partition_key] || message[:key]
|
42
|
+
producer.produce(message[:payload],
|
43
|
+
topic: message[:topic],
|
44
|
+
key: message[:key],
|
45
|
+
partition_key: partition_key)
|
46
|
+
end
|
47
|
+
end
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
51
|
+
end
|
@@ -0,0 +1,224 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'deimos/message'
|
4
|
+
require 'deimos/shared_config'
|
5
|
+
require 'phobos/producer'
|
6
|
+
require 'active_support/notifications'
|
7
|
+
|
8
|
+
# :nodoc:
|
9
|
+
module Deimos
|
10
|
+
class << self
|
11
|
+
# Run a block without allowing any messages to be produced to Kafka.
|
12
|
+
# Optionally add a list of producer classes to limit the disabling to those
|
13
|
+
# classes.
|
14
|
+
# @param producer_classes [Array<Class>|Class]
|
15
|
+
def disable_producers(*producer_classes, &block)
|
16
|
+
if producer_classes.any?
|
17
|
+
_disable_producer_classes(producer_classes, &block)
|
18
|
+
return
|
19
|
+
end
|
20
|
+
|
21
|
+
if Thread.current[:frk_disable_all_producers] # nested disable block
|
22
|
+
yield
|
23
|
+
return
|
24
|
+
end
|
25
|
+
|
26
|
+
begin
|
27
|
+
Thread.current[:frk_disable_all_producers] = true
|
28
|
+
yield
|
29
|
+
ensure
|
30
|
+
Thread.current[:frk_disable_all_producers] = false
|
31
|
+
end
|
32
|
+
end
|
33
|
+
|
34
|
+
# :nodoc:
|
35
|
+
def _disable_producer_classes(producer_classes)
|
36
|
+
Thread.current[:frk_disabled_producers] ||= Set.new
|
37
|
+
producers_to_disable = producer_classes -
|
38
|
+
Thread.current[:frk_disabled_producers].to_a
|
39
|
+
Thread.current[:frk_disabled_producers] += producers_to_disable
|
40
|
+
yield
|
41
|
+
Thread.current[:frk_disabled_producers] -= producers_to_disable
|
42
|
+
end
|
43
|
+
|
44
|
+
# Are producers disabled? If a class is passed in, check only that class.
|
45
|
+
# Otherwise check if the global disable flag is set.
|
46
|
+
# @return [Boolean]
|
47
|
+
def producers_disabled?(producer_class=nil)
|
48
|
+
Thread.current[:frk_disable_all_producers] ||
|
49
|
+
Thread.current[:frk_disabled_producers]&.include?(producer_class)
|
50
|
+
end
|
51
|
+
end
|
52
|
+
|
53
|
+
# Producer to publish messages to a given kafka topic.
|
54
|
+
class Producer
|
55
|
+
include SharedConfig
|
56
|
+
|
57
|
+
MAX_BATCH_SIZE = 500
|
58
|
+
|
59
|
+
class << self
|
60
|
+
# @return [Hash]
|
61
|
+
def config
|
62
|
+
@config ||= {
|
63
|
+
encode_key: true,
|
64
|
+
namespace: Deimos.config.producers.schema_namespace
|
65
|
+
}
|
66
|
+
end
|
67
|
+
|
68
|
+
# Set the topic.
|
69
|
+
# @param topic [String]
|
70
|
+
# @return [String] the current topic if no argument given.
|
71
|
+
def topic(topic=nil)
|
72
|
+
if topic
|
73
|
+
config[:topic] = topic
|
74
|
+
return
|
75
|
+
end
|
76
|
+
# accessor
|
77
|
+
"#{Deimos.config.producers.topic_prefix}#{config[:topic]}"
|
78
|
+
end
|
79
|
+
|
80
|
+
# Override the default partition key (which is the payload key).
|
81
|
+
# @param _payload [Hash] the payload being passed into the produce method.
|
82
|
+
# Will include `payload_key` if it is part of the original payload.
|
83
|
+
# @return [String]
|
84
|
+
def partition_key(_payload)
|
85
|
+
nil
|
86
|
+
end
|
87
|
+
|
88
|
+
# Publish the payload to the topic.
|
89
|
+
# @param payload [Hash] with an optional payload_key hash key.
|
90
|
+
# @param topic [String] if specifying the topic
|
91
|
+
def publish(payload, topic: self.topic)
|
92
|
+
publish_list([payload], topic: topic)
|
93
|
+
end
|
94
|
+
|
95
|
+
# Publish a list of messages.
|
96
|
+
# @param payloads [Hash|Array<Hash>] with optional payload_key hash key.
|
97
|
+
# @param sync [Boolean] if given, override the default setting of
|
98
|
+
# whether to publish synchronously.
|
99
|
+
# @param force_send [Boolean] if true, ignore the configured backend
|
100
|
+
# and send immediately to Kafka.
|
101
|
+
# @param topic [String] if specifying the topic
|
102
|
+
def publish_list(payloads, sync: nil, force_send: false, topic: self.topic)
|
103
|
+
return if Deimos.config.kafka.seed_brokers.blank? ||
|
104
|
+
Deimos.config.producers.disabled ||
|
105
|
+
Deimos.producers_disabled?(self)
|
106
|
+
|
107
|
+
raise 'Topic not specified. Please specify the topic.' if topic.blank?
|
108
|
+
|
109
|
+
backend_class = determine_backend_class(sync, force_send)
|
110
|
+
Deimos.instrument(
|
111
|
+
'encode_messages',
|
112
|
+
producer: self,
|
113
|
+
topic: topic,
|
114
|
+
payloads: payloads
|
115
|
+
) do
|
116
|
+
messages = Array(payloads).map { |p| Deimos::Message.new(p, self) }
|
117
|
+
messages.each { |m| _process_message(m, topic) }
|
118
|
+
messages.in_groups_of(MAX_BATCH_SIZE, false) do |batch|
|
119
|
+
self.produce_batch(backend_class, batch)
|
120
|
+
end
|
121
|
+
end
|
122
|
+
end
|
123
|
+
|
124
|
+
# @param sync [Boolean]
|
125
|
+
# @param force_send [Boolean]
|
126
|
+
# @return [Class < Deimos::Backend]
|
127
|
+
def determine_backend_class(sync, force_send)
|
128
|
+
backend = if force_send
|
129
|
+
:kafka
|
130
|
+
else
|
131
|
+
Deimos.config.producers.backend
|
132
|
+
end
|
133
|
+
if backend == :kafka_async && sync
|
134
|
+
backend = :kafka
|
135
|
+
elsif backend == :kafka && sync == false
|
136
|
+
backend = :kafka_async
|
137
|
+
end
|
138
|
+
"Deimos::Backends::#{backend.to_s.classify}".constantize
|
139
|
+
end
|
140
|
+
|
141
|
+
# Send a batch to the backend.
|
142
|
+
# @param backend [Class < Deimos::Backend]
|
143
|
+
# @param batch [Array<Deimos::Message>]
|
144
|
+
def produce_batch(backend, batch)
|
145
|
+
backend.publish(producer_class: self, messages: batch)
|
146
|
+
end
|
147
|
+
|
148
|
+
# @return [Deimos::SchemaBackends::Base]
|
149
|
+
def encoder
|
150
|
+
@encoder ||= Deimos.schema_backend(schema: config[:schema],
|
151
|
+
namespace: config[:namespace])
|
152
|
+
end
|
153
|
+
|
154
|
+
# @return [Deimos::SchemaBackends::Base]
|
155
|
+
def key_encoder
|
156
|
+
@key_encoder ||= Deimos.schema_backend(schema: config[:key_schema],
|
157
|
+
namespace: config[:namespace])
|
158
|
+
end
|
159
|
+
|
160
|
+
# Override this in active record producers to add
|
161
|
+
# non-schema fields to check for updates
|
162
|
+
# @return [Array<String>] fields to check for updates
|
163
|
+
def watched_attributes
|
164
|
+
self.encoder.schema_fields.map(&:name)
|
165
|
+
end
|
166
|
+
|
167
|
+
private
|
168
|
+
|
169
|
+
# @param message [Message]
|
170
|
+
# @param topic [String]
|
171
|
+
def _process_message(message, topic)
|
172
|
+
# this violates the Law of Demeter but it has to happen in a very
|
173
|
+
# specific order and requires a bunch of methods on the producer
|
174
|
+
# to work correctly.
|
175
|
+
message.add_fields(encoder.schema_fields.map(&:name))
|
176
|
+
message.partition_key = self.partition_key(message.payload)
|
177
|
+
message.key = _retrieve_key(message.payload)
|
178
|
+
# need to do this before _coerce_fields because that might result
|
179
|
+
# in an empty payload which is an *error* whereas this is intended.
|
180
|
+
message.payload = nil if message.payload.blank?
|
181
|
+
message.coerce_fields(encoder)
|
182
|
+
message.encoded_key = _encode_key(message.key)
|
183
|
+
message.topic = topic
|
184
|
+
message.encoded_payload = if message.payload.nil?
|
185
|
+
nil
|
186
|
+
else
|
187
|
+
encoder.encode(message.payload,
|
188
|
+
topic: "#{Deimos.config.producers.topic_prefix}#{config[:topic]}-value")
|
189
|
+
end
|
190
|
+
end
|
191
|
+
|
192
|
+
# @param key [Object]
|
193
|
+
# @return [String|Object]
|
194
|
+
def _encode_key(key)
|
195
|
+
if key.nil?
|
196
|
+
return nil if config[:no_keys] # no key is fine, otherwise it's a problem
|
197
|
+
|
198
|
+
raise 'No key given but a key is required! Use `key_config none: true` to avoid using keys.'
|
199
|
+
end
|
200
|
+
if config[:encode_key] && config[:key_field].nil? &&
|
201
|
+
config[:key_schema].nil?
|
202
|
+
raise 'No key config given - if you are not encoding keys, please use `key_config plain: true`'
|
203
|
+
end
|
204
|
+
|
205
|
+
if config[:key_field]
|
206
|
+
encoder.encode_key(config[:key_field], key, topic: "#{Deimos.config.producers.topic_prefix}#{config[:topic]}-key")
|
207
|
+
elsif config[:key_schema]
|
208
|
+
key_encoder.encode(key, topic: "#{Deimos.config.producers.topic_prefix}#{config[:topic]}-key")
|
209
|
+
else
|
210
|
+
key
|
211
|
+
end
|
212
|
+
end
|
213
|
+
|
214
|
+
# @param payload [Hash]
|
215
|
+
# @return [String]
|
216
|
+
def _retrieve_key(payload)
|
217
|
+
key = payload.delete(:payload_key)
|
218
|
+
return key if key
|
219
|
+
|
220
|
+
config[:key_field] ? payload[config[:key_field]] : nil
|
221
|
+
end
|
222
|
+
end
|
223
|
+
end
|
224
|
+
end
|