deimos-ruby 1.0.0.pre.beta22

Sign up to get free protection for your applications and to get access to all the features.
Files changed (100) hide show
  1. checksums.yaml +7 -0
  2. data/.circleci/config.yml +74 -0
  3. data/.gitignore +41 -0
  4. data/.gitmodules +0 -0
  5. data/.rspec +1 -0
  6. data/.rubocop.yml +321 -0
  7. data/.ruby-gemset +1 -0
  8. data/.ruby-version +1 -0
  9. data/CHANGELOG.md +32 -0
  10. data/CODE_OF_CONDUCT.md +77 -0
  11. data/Dockerfile +23 -0
  12. data/Gemfile +6 -0
  13. data/Gemfile.lock +165 -0
  14. data/Guardfile +22 -0
  15. data/LICENSE.md +195 -0
  16. data/README.md +752 -0
  17. data/Rakefile +13 -0
  18. data/bin/deimos +4 -0
  19. data/deimos-kafka.gemspec +42 -0
  20. data/docker-compose.yml +71 -0
  21. data/docs/DATABASE_BACKEND.md +147 -0
  22. data/docs/PULL_REQUEST_TEMPLATE.md +34 -0
  23. data/lib/deimos/active_record_consumer.rb +81 -0
  24. data/lib/deimos/active_record_producer.rb +64 -0
  25. data/lib/deimos/avro_data_coder.rb +89 -0
  26. data/lib/deimos/avro_data_decoder.rb +36 -0
  27. data/lib/deimos/avro_data_encoder.rb +51 -0
  28. data/lib/deimos/backends/db.rb +27 -0
  29. data/lib/deimos/backends/kafka.rb +27 -0
  30. data/lib/deimos/backends/kafka_async.rb +27 -0
  31. data/lib/deimos/configuration.rb +90 -0
  32. data/lib/deimos/consumer.rb +164 -0
  33. data/lib/deimos/instrumentation.rb +71 -0
  34. data/lib/deimos/kafka_message.rb +27 -0
  35. data/lib/deimos/kafka_source.rb +126 -0
  36. data/lib/deimos/kafka_topic_info.rb +86 -0
  37. data/lib/deimos/message.rb +74 -0
  38. data/lib/deimos/metrics/datadog.rb +47 -0
  39. data/lib/deimos/metrics/mock.rb +39 -0
  40. data/lib/deimos/metrics/provider.rb +38 -0
  41. data/lib/deimos/monkey_patches/phobos_cli.rb +35 -0
  42. data/lib/deimos/monkey_patches/phobos_producer.rb +51 -0
  43. data/lib/deimos/monkey_patches/ruby_kafka_heartbeat.rb +85 -0
  44. data/lib/deimos/monkey_patches/schema_store.rb +19 -0
  45. data/lib/deimos/producer.rb +218 -0
  46. data/lib/deimos/publish_backend.rb +30 -0
  47. data/lib/deimos/railtie.rb +8 -0
  48. data/lib/deimos/schema_coercer.rb +108 -0
  49. data/lib/deimos/shared_config.rb +59 -0
  50. data/lib/deimos/test_helpers.rb +356 -0
  51. data/lib/deimos/tracing/datadog.rb +35 -0
  52. data/lib/deimos/tracing/mock.rb +40 -0
  53. data/lib/deimos/tracing/provider.rb +31 -0
  54. data/lib/deimos/utils/db_producer.rb +122 -0
  55. data/lib/deimos/utils/executor.rb +117 -0
  56. data/lib/deimos/utils/inline_consumer.rb +144 -0
  57. data/lib/deimos/utils/lag_reporter.rb +182 -0
  58. data/lib/deimos/utils/platform_schema_validation.rb +0 -0
  59. data/lib/deimos/utils/signal_handler.rb +68 -0
  60. data/lib/deimos/version.rb +5 -0
  61. data/lib/deimos.rb +133 -0
  62. data/lib/generators/deimos/db_backend/templates/migration +24 -0
  63. data/lib/generators/deimos/db_backend/templates/rails3_migration +30 -0
  64. data/lib/generators/deimos/db_backend_generator.rb +48 -0
  65. data/lib/tasks/deimos.rake +27 -0
  66. data/spec/active_record_consumer_spec.rb +81 -0
  67. data/spec/active_record_producer_spec.rb +107 -0
  68. data/spec/avro_data_decoder_spec.rb +18 -0
  69. data/spec/avro_data_encoder_spec.rb +37 -0
  70. data/spec/backends/db_spec.rb +35 -0
  71. data/spec/backends/kafka_async_spec.rb +11 -0
  72. data/spec/backends/kafka_spec.rb +11 -0
  73. data/spec/consumer_spec.rb +169 -0
  74. data/spec/deimos_spec.rb +120 -0
  75. data/spec/kafka_source_spec.rb +168 -0
  76. data/spec/kafka_topic_info_spec.rb +88 -0
  77. data/spec/phobos.bad_db.yml +73 -0
  78. data/spec/phobos.yml +73 -0
  79. data/spec/producer_spec.rb +397 -0
  80. data/spec/publish_backend_spec.rb +10 -0
  81. data/spec/schemas/com/my-namespace/MySchema-key.avsc +13 -0
  82. data/spec/schemas/com/my-namespace/MySchema.avsc +18 -0
  83. data/spec/schemas/com/my-namespace/MySchemaWithBooleans.avsc +18 -0
  84. data/spec/schemas/com/my-namespace/MySchemaWithDateTimes.avsc +33 -0
  85. data/spec/schemas/com/my-namespace/MySchemaWithId.avsc +28 -0
  86. data/spec/schemas/com/my-namespace/MySchemaWithUniqueId.avsc +32 -0
  87. data/spec/schemas/com/my-namespace/Widget.avsc +27 -0
  88. data/spec/schemas/com/my-namespace/WidgetTheSecond.avsc +27 -0
  89. data/spec/spec_helper.rb +207 -0
  90. data/spec/updateable_schema_store_spec.rb +36 -0
  91. data/spec/utils/db_producer_spec.rb +259 -0
  92. data/spec/utils/executor_spec.rb +42 -0
  93. data/spec/utils/lag_reporter_spec.rb +69 -0
  94. data/spec/utils/platform_schema_validation_spec.rb +0 -0
  95. data/spec/utils/signal_handler_spec.rb +16 -0
  96. data/support/deimos-solo.png +0 -0
  97. data/support/deimos-with-name-next.png +0 -0
  98. data/support/deimos-with-name.png +0 -0
  99. data/support/flipp-logo.png +0 -0
  100. metadata +452 -0
@@ -0,0 +1,47 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'deimos/metrics/provider'
4
+
5
+ module Deimos
6
+ module Metrics
7
+ # A Metrics wrapper class for Datadog.
8
+ class Datadog < Metrics::Provider
9
+ # :nodoc:
10
+ def initialize(config, logger)
11
+ raise 'Metrics config must specify host_ip' if config[:host_ip].nil?
12
+ raise 'Metrics config must specify host_port' if config[:host_port].nil?
13
+ raise 'Metrics config must specify namespace' if config[:namespace].nil?
14
+
15
+ logger.info("DatadogMetricsProvider configured with: #{config}")
16
+ @client = Datadog::Statsd.new(
17
+ config[:host_ip],
18
+ config[:host_port]
19
+ )
20
+ @client.tags = config[:tags]
21
+ @client.namespace = config[:namespace]
22
+ end
23
+
24
+ # :nodoc:
25
+ def increment(metric_name, options={})
26
+ @client.increment(metric_name, options)
27
+ end
28
+
29
+ # :nodoc:
30
+ def gauge(metric_name, count, options={})
31
+ @client.gauge(metric_name, count, options)
32
+ end
33
+
34
+ # :nodoc:
35
+ def histogram(metric_name, count, options={})
36
+ @client.histogram(metric_name, count, options)
37
+ end
38
+
39
+ # :nodoc:
40
+ def time(metric_name, options={})
41
+ @client.time(metric_name, options) do
42
+ yield
43
+ end
44
+ end
45
+ end
46
+ end
47
+ end
@@ -0,0 +1,39 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'deimos/metrics/provider'
4
+
5
+ module Deimos
6
+ module Metrics
7
+ # A mock Metrics wrapper which just logs the metrics
8
+ class Mock
9
+ # :nodoc:
10
+ def initialize(logger=nil)
11
+ @logger = logger || Logger.new(STDOUT)
12
+ @logger.info('MockMetricsProvider initialized')
13
+ end
14
+
15
+ # :nodoc:
16
+ def increment(metric_name, options={})
17
+ @logger.info("MockMetricsProvider.increment: #{metric_name}, #{options}")
18
+ end
19
+
20
+ # :nodoc:
21
+ def gauge(metric_name, count, options={})
22
+ @logger.info("MockMetricsProvider.gauge: #{metric_name}, #{count}, #{options}")
23
+ end
24
+
25
+ # :nodoc:
26
+ def histogram(metric_name, count, options={})
27
+ @logger.info("MockMetricsProvider.histogram: #{metric_name}, #{count}, #{options}")
28
+ end
29
+
30
+ # :nodoc:
31
+ def time(metric_name, options={})
32
+ start_time = Time.now
33
+ yield
34
+ total_time = (Time.now - start_time).to_i
35
+ @logger.info("MockMetricsProvider.time: #{metric_name}, #{total_time}, #{options}")
36
+ end
37
+ end
38
+ end
39
+ end
@@ -0,0 +1,38 @@
1
+ # frozen_string_literal: true
2
+
3
+ # rubocop:disable Lint/UnusedMethodArgument
4
+ module Deimos
5
+ module Metrics
6
+ # Base class for all metrics providers.
7
+ class Provider
8
+ # Send an counter increment metric
9
+ # @param metric_name [String] The name of the counter metric
10
+ # @param options [Hash] Any additional options, e.g. :tags
11
+ def increment(metric_name, options={})
12
+ raise NotImplementedError
13
+ end
14
+
15
+ # Send an counter increment metric
16
+ # @param metric_name [String] The name of the counter metric
17
+ # @param options [Hash] Any additional options, e.g. :tags
18
+ def gauge(metric_name, count, options={})
19
+ raise NotImplementedError
20
+ end
21
+
22
+ # Send an counter increment metric
23
+ # @param metric_name [String] The name of the counter metric
24
+ # @param options [Hash] Any additional options, e.g. :tags
25
+ def histogram(metric_name, count, options={})
26
+ raise NotImplementedError
27
+ end
28
+
29
+ # Time a yielded block, and send a timer metric
30
+ # @param metric_name [String] The name of the metric
31
+ # @param options [Hash] Any additional options, e.g. :tags
32
+ def time(metric_name, options={})
33
+ raise NotImplementedError
34
+ end
35
+ end
36
+ end
37
+ end
38
+ # rubocop:enable Lint/UnusedMethodArgument
@@ -0,0 +1,35 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'phobos/cli/start'
4
+
5
+ # :nodoc:
6
+ module Phobos
7
+ # :nodoc:
8
+ module CLI
9
+ # :nodoc:
10
+ class Start
11
+ # :nodoc:
12
+ def validate_listeners!
13
+ Phobos.config.listeners.each do |listener|
14
+ handler = listener.handler
15
+ begin
16
+ handler.constantize
17
+ rescue NameError
18
+ error_exit("Handler '#{handler}' not defined")
19
+ end
20
+
21
+ delivery = listener.delivery
22
+ if delivery.nil?
23
+ Phobos::CLI.logger.warn do
24
+ Hash(message: "Delivery option should be specified, defaulting to 'batch'"\
25
+ ' - specify this option to silence this message')
26
+ end
27
+ elsif !Listener::DELIVERY_OPTS.include?(delivery)
28
+ error_exit("Invalid delivery option '#{delivery}'. Please specify one of: "\
29
+ "#{Listener::DELIVERY_OPTS.join(', ')}")
30
+ end
31
+ end
32
+ end
33
+ end
34
+ end
35
+ end
@@ -0,0 +1,51 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'phobos/producer'
4
+
5
+ module Phobos
6
+ module Producer
7
+ # :nodoc:
8
+ class PublicAPI
9
+ # :nodoc:
10
+ def publish(topic, payload, key=nil, partition_key=nil)
11
+ class_producer.publish(topic, payload, key, partition_key)
12
+ end
13
+
14
+ # :nodoc:
15
+ def async_publish(topic, payload, key=nil, partition_key=nil)
16
+ class_producer.async_publish(topic, payload, key, partition_key)
17
+ end
18
+ end
19
+
20
+ # :nodoc:
21
+ module ClassMethods
22
+ # :nodoc:
23
+ class PublicAPI
24
+ # :nodoc:
25
+ def publish(topic, payload, key=nil, partition_key=nil)
26
+ publish_list([{ topic: topic, payload: payload, key: key,
27
+ partition_key: partition_key }])
28
+ end
29
+
30
+ # :nodoc:
31
+ def async_publish(topic, payload, key=nil, partition_key=nil)
32
+ async_publish_list([{ topic: topic, payload: payload, key: key,
33
+ partition_key: partition_key }])
34
+ end
35
+
36
+ private
37
+
38
+ # :nodoc:
39
+ def produce_messages(producer, messages)
40
+ messages.each do |message|
41
+ partition_key = message[:partition_key] || message[:key]
42
+ producer.produce(message[:payload],
43
+ topic: message[:topic],
44
+ key: message[:key],
45
+ partition_key: partition_key)
46
+ end
47
+ end
48
+ end
49
+ end
50
+ end
51
+ end
@@ -0,0 +1,85 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Kafka
4
+ class Heartbeat
5
+ def initialize(group:, interval:, instrumenter:)
6
+ @group = group
7
+ @interval = interval
8
+ @last_heartbeat = Time.now
9
+ @instrumenter = instrumenter
10
+ end
11
+
12
+ def trigger!
13
+ @instrumenter.instrument('heartbeat.consumer',
14
+ group_id: @group.group_id,
15
+ topic_partitions: @group.assigned_partitions) do
16
+ @group.heartbeat
17
+ @last_heartbeat = Time.now
18
+ end
19
+ end
20
+ end
21
+
22
+ class Client
23
+ def consumer(
24
+ group_id:,
25
+ session_timeout: 30,
26
+ offset_commit_interval: 10,
27
+ offset_commit_threshold: 0,
28
+ heartbeat_interval: 10,
29
+ offset_retention_time: nil,
30
+ fetcher_max_queue_size: 100
31
+ )
32
+ cluster = initialize_cluster
33
+
34
+ instrumenter = DecoratingInstrumenter.new(@instrumenter,
35
+ group_id: group_id)
36
+
37
+ # The Kafka protocol expects the retention time to be in ms.
38
+ retention_time = (offset_retention_time && offset_retention_time * 1_000) || -1
39
+
40
+ group = ConsumerGroup.new(
41
+ cluster: cluster,
42
+ logger: @logger,
43
+ group_id: group_id,
44
+ session_timeout: session_timeout,
45
+ retention_time: retention_time,
46
+ instrumenter: instrumenter
47
+ )
48
+
49
+ fetcher = Fetcher.new(
50
+ cluster: initialize_cluster,
51
+ group: group,
52
+ logger: @logger,
53
+ instrumenter: instrumenter,
54
+ max_queue_size: fetcher_max_queue_size
55
+ )
56
+
57
+ offset_manager = OffsetManager.new(
58
+ cluster: cluster,
59
+ group: group,
60
+ fetcher: fetcher,
61
+ logger: @logger,
62
+ commit_interval: offset_commit_interval,
63
+ commit_threshold: offset_commit_threshold,
64
+ offset_retention_time: offset_retention_time
65
+ )
66
+
67
+ heartbeat = Heartbeat.new(
68
+ group: group,
69
+ interval: heartbeat_interval,
70
+ instrumenter: instrumenter
71
+ )
72
+
73
+ Consumer.new(
74
+ cluster: cluster,
75
+ logger: @logger,
76
+ instrumenter: instrumenter,
77
+ group: group,
78
+ offset_manager: offset_manager,
79
+ fetcher: fetcher,
80
+ session_timeout: session_timeout,
81
+ heartbeat: heartbeat
82
+ )
83
+ end
84
+ end
85
+ end
@@ -0,0 +1,19 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'avro_turf/schema_store'
4
+
5
+ # Allows us to add in-memory schemas to the schema store in
6
+ # addition to the ones stored in the file system.
7
+ class AvroTurf::SchemaStore
8
+ attr_accessor :schemas
9
+
10
+ # @param schema_hash [Hash]
11
+ def add_schema(schema_hash)
12
+ name = schema_hash['name']
13
+ namespace = schema_hash['namespace']
14
+ full_name = Avro::Name.make_fullname(name, namespace)
15
+ return if @schemas.key?(full_name)
16
+
17
+ Avro::Schema.real_parse(schema_hash, @schemas)
18
+ end
19
+ end
@@ -0,0 +1,218 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'deimos/avro_data_encoder'
4
+ require 'deimos/message'
5
+ require 'deimos/shared_config'
6
+ require 'deimos/schema_coercer'
7
+ require 'phobos/producer'
8
+ require 'active_support/notifications'
9
+
10
+ # :nodoc:
11
+ module Deimos
12
+ class << self
13
+ # Run a block without allowing any messages to be produced to Kafka.
14
+ # Optionally add a list of producer classes to limit the disabling to those
15
+ # classes.
16
+ # @param producer_classes [Array<Class>|Class]
17
+ def disable_producers(*producer_classes, &block)
18
+ if producer_classes.any?
19
+ _disable_producer_classes(producer_classes, &block)
20
+ return
21
+ end
22
+
23
+ if Thread.current[:frk_disable_all_producers] # nested disable block
24
+ yield
25
+ return
26
+ end
27
+
28
+ Thread.current[:frk_disable_all_producers] = true
29
+ yield
30
+ Thread.current[:frk_disable_all_producers] = false
31
+ end
32
+
33
+ # :nodoc:
34
+ def _disable_producer_classes(producer_classes)
35
+ Thread.current[:frk_disabled_producers] ||= Set.new
36
+ producers_to_disable = producer_classes -
37
+ Thread.current[:frk_disabled_producers].to_a
38
+ Thread.current[:frk_disabled_producers] += producers_to_disable
39
+ yield
40
+ Thread.current[:frk_disabled_producers] -= producers_to_disable
41
+ end
42
+
43
+ # Are producers disabled? If a class is passed in, check only that class.
44
+ # Otherwise check if the global disable flag is set.
45
+ # @return [Boolean]
46
+ def producers_disabled?(producer_class=nil)
47
+ Thread.current[:frk_disable_all_producers] ||
48
+ Thread.current[:frk_disabled_producers]&.include?(producer_class)
49
+ end
50
+ end
51
+
52
+ # Producer to publish messages to a given kafka topic.
53
+ class Producer
54
+ include SharedConfig
55
+
56
+ MAX_BATCH_SIZE = 500
57
+
58
+ class << self
59
+ # @return [Hash]
60
+ def config
61
+ @config ||= {
62
+ encode_key: true,
63
+ namespace: Deimos.config.producer_schema_namespace
64
+ }
65
+ end
66
+
67
+ # Set the topic.
68
+ # @param topic [String]
69
+ # @return [String] the current topic if no argument given.
70
+ def topic(topic=nil)
71
+ if topic
72
+ config[:topic] = topic
73
+ return
74
+ end
75
+ # accessor
76
+ "#{Deimos.config.producer_topic_prefix}#{config[:topic]}"
77
+ end
78
+
79
+ # Override the default partition key (which is the payload key).
80
+ # @param _payload [Hash] the payload being passed into the produce method.
81
+ # Will include `payload_key` if it is part of the original payload.
82
+ # @return [String]
83
+ def partition_key(_payload)
84
+ nil
85
+ end
86
+
87
+ # Publish the payload to the topic.
88
+ # @param payload [Hash] with an optional payload_key hash key.
89
+ def publish(payload)
90
+ publish_list([payload])
91
+ end
92
+
93
+ # Publish a list of messages.
94
+ # @param payloads [Hash|Array<Hash>] with optional payload_key hash key.
95
+ # @param sync [Boolean] if given, override the default setting of
96
+ # whether to publish synchronously.
97
+ # @param force_send [Boolean] if true, ignore the configured backend
98
+ # and send immediately to Kafka.
99
+ def publish_list(payloads, sync: nil, force_send: false)
100
+ return if Deimos.config.seed_broker.blank? ||
101
+ Deimos.config.disable_producers ||
102
+ Deimos.producers_disabled?(self)
103
+
104
+ backend_class = determine_backend_class(sync, force_send)
105
+ Deimos.instrument(
106
+ 'encode_messages',
107
+ producer: self,
108
+ topic: topic,
109
+ payloads: payloads
110
+ ) do
111
+ messages = Array(payloads).map { |p| Deimos::Message.new(p, self) }
112
+ messages.each(&method(:_process_message))
113
+ messages.in_groups_of(MAX_BATCH_SIZE, false) do |batch|
114
+ self.produce_batch(backend_class, batch)
115
+ end
116
+ end
117
+ end
118
+
119
+ # @param sync [Boolean]
120
+ # @param force_send [Boolean]
121
+ # @return [Class < Deimos::Backend]
122
+ def determine_backend_class(sync, force_send)
123
+ backend = if force_send
124
+ :kafka
125
+ else
126
+ Deimos.config.publish_backend
127
+ end
128
+ if backend == :kafka_async && sync
129
+ backend = :kafka
130
+ elsif backend == :kafka && sync == false
131
+ backend = :kafka_async
132
+ end
133
+ "Deimos::Backends::#{backend.to_s.classify}".constantize
134
+ end
135
+
136
+ # Send a batch to the backend.
137
+ # @param backend [Class < Deimos::Backend]
138
+ # @param batch [Array<Deimos::Message>]
139
+ def produce_batch(backend, batch)
140
+ backend.publish(producer_class: self, messages: batch)
141
+ end
142
+
143
+ # @return [AvroDataEncoder]
144
+ def encoder
145
+ @encoder ||= AvroDataEncoder.new(schema: config[:schema],
146
+ namespace: config[:namespace])
147
+ end
148
+
149
+ # @return [AvroDataEncoder]
150
+ def key_encoder
151
+ @key_encoder ||= AvroDataEncoder.new(schema: config[:key_schema],
152
+ namespace: config[:namespace])
153
+ end
154
+
155
+ # Override this in active record producers to add
156
+ # non-schema fields to check for updates
157
+ # @return [Array<String>] fields to check for updates
158
+ def watched_attributes
159
+ self.encoder.avro_schema.fields.map(&:name)
160
+ end
161
+
162
+ private
163
+
164
+ # @param message [Message]
165
+ def _process_message(message)
166
+ # this violates the Law of Demeter but it has to happen in a very
167
+ # specific order and requires a bunch of methods on the producer
168
+ # to work correctly.
169
+ message.add_fields(encoder.avro_schema)
170
+ message.partition_key = self.partition_key(message.payload)
171
+ message.key = _retrieve_key(message.payload)
172
+ # need to do this before _coerce_fields because that might result
173
+ # in an empty payload which is an *error* whereas this is intended.
174
+ message.payload = nil if message.payload.blank?
175
+ message.coerce_fields(encoder.avro_schema)
176
+ message.encoded_key = _encode_key(message.key)
177
+ message.topic = self.topic
178
+ message.encoded_payload = if message.payload.nil?
179
+ nil
180
+ else
181
+ encoder.encode(message.payload,
182
+ topic: "#{config[:topic]}-value")
183
+ end
184
+ end
185
+
186
+ # @param key [Object]
187
+ # @return [String|Object]
188
+ def _encode_key(key)
189
+ if key.nil?
190
+ return nil if config[:no_keys] # no key is fine, otherwise it's a problem
191
+
192
+ raise 'No key given but a key is required! Use `key_config none: true` to avoid using keys.'
193
+ end
194
+ if config[:encode_key] && config[:key_field].nil? &&
195
+ config[:key_schema].nil?
196
+ raise 'No key config given - if you are not encoding keys, please use `key_config plain: true`'
197
+ end
198
+
199
+ if config[:key_field]
200
+ encoder.encode_key(config[:key_field], key, "#{config[:topic]}-key")
201
+ elsif config[:key_schema]
202
+ key_encoder.encode(key, topic: "#{config[:topic]}-key")
203
+ else
204
+ key
205
+ end
206
+ end
207
+
208
+ # @param payload [Hash]
209
+ # @return [String]
210
+ def _retrieve_key(payload)
211
+ key = payload.delete(:payload_key)
212
+ return key if key
213
+
214
+ config[:key_field] ? payload[config[:key_field]] : nil
215
+ end
216
+ end
217
+ end
218
+ end
@@ -0,0 +1,30 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Deimos
4
+ # Abstract class for all publish backends.
5
+ class PublishBackend
6
+ class << self
7
+ # @param producer_class [Class < Deimos::Producer]
8
+ # @param messages [Array<Deimos::Message>]
9
+ def publish(producer_class:, messages:)
10
+ Deimos.config.logger.info(
11
+ message: 'Publishing messages',
12
+ topic: producer_class.topic,
13
+ payloads: messages.map do |message|
14
+ {
15
+ payload: message.payload,
16
+ key: message.key
17
+ }
18
+ end
19
+ )
20
+ execute(producer_class: producer_class, messages: messages)
21
+ end
22
+
23
+ # @param producer_class [Class < Deimos::Producer]
24
+ # @param messages [Array<Deimos::Message>]
25
+ def execute(producer_class:, messages:)
26
+ raise NotImplementedError
27
+ end
28
+ end
29
+ end
30
+ end
@@ -0,0 +1,8 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Add rake task to Rails.
4
+ class Deimos::Railtie < Rails::Railtie
5
+ rake_tasks do
6
+ load 'tasks/deimos.rake'
7
+ end
8
+ end
@@ -0,0 +1,108 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Deimos
4
+ # Class to coerce values in a payload to match a schema.
5
+ class SchemaCoercer
6
+ # @param schema [Avro::Schema]
7
+ def initialize(schema)
8
+ @schema = schema
9
+ end
10
+
11
+ # @param payload [Hash]
12
+ # @return [HashWithIndifferentAccess]
13
+ def coerce(payload)
14
+ result = {}
15
+ @schema.fields.each do |field|
16
+ name = field.name
17
+ next unless payload.key?(name)
18
+
19
+ val = payload[name]
20
+ result[name] = _coerce_type(field.type, val)
21
+ end
22
+ result.with_indifferent_access
23
+ end
24
+
25
+ private
26
+
27
+ # @param val [String]
28
+ # @return [Boolean]
29
+ def _is_integer_string?(val)
30
+ return false unless val.is_a?(String)
31
+
32
+ begin
33
+ true if Integer(val)
34
+ rescue StandardError
35
+ false
36
+ end
37
+ end
38
+
39
+ # @param val [String]
40
+ # @return [Boolean]
41
+ def _is_float_string?(val)
42
+ return false unless val.is_a?(String)
43
+
44
+ begin
45
+ true if Float(val)
46
+ rescue StandardError
47
+ false
48
+ end
49
+ end
50
+
51
+ # @param val [Object]
52
+ # @return [Boolean]
53
+ def _is_to_s_defined?(val)
54
+ return false if val.nil?
55
+
56
+ Object.instance_method(:to_s).bind(val).call != val.to_s
57
+ end
58
+
59
+ # @param type [Symbol]
60
+ # @param val [Object]
61
+ # @return [Object]
62
+ def _coerce_type(type, val)
63
+ int_classes = [Time, DateTime, ActiveSupport::TimeWithZone]
64
+ field_type = type.type.to_sym
65
+ if field_type == :union
66
+ union_types = type.schemas.map { |s| s.type.to_sym }
67
+ return nil if val.nil? && union_types.include?(:null)
68
+
69
+ field_type = union_types.find { |t| t != :null }
70
+ end
71
+
72
+ case field_type
73
+ when :int, :long
74
+ if val.is_a?(Integer) ||
75
+ _is_integer_string?(val) ||
76
+ int_classes.any? { |klass| val.is_a?(klass) }
77
+ val.to_i
78
+ else
79
+ val # this will fail
80
+ end
81
+
82
+ when :float, :double
83
+ if val.is_a?(Numeric) || _is_float_string?(val)
84
+ val.to_f
85
+ else
86
+ val # this will fail
87
+ end
88
+
89
+ when :string
90
+ if val.respond_to?(:to_str)
91
+ val.to_s
92
+ elsif _is_to_s_defined?(val)
93
+ val.to_s
94
+ else
95
+ val # this will fail
96
+ end
97
+ when :boolean
98
+ if val.nil? || val == false
99
+ false
100
+ else
101
+ true
102
+ end
103
+ else
104
+ val
105
+ end
106
+ end
107
+ end
108
+ end