deimos-ruby 1.24.2 → 2.0.0.pre.alpha1
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.rubocop_todo.yml +0 -17
- data/.tool-versions +1 -0
- data/CHANGELOG.md +5 -0
- data/README.md +287 -498
- data/deimos-ruby.gemspec +4 -4
- data/docs/CONFIGURATION.md +133 -226
- data/docs/UPGRADING.md +237 -0
- data/lib/deimos/active_record_consume/batch_consumption.rb +29 -28
- data/lib/deimos/active_record_consume/mass_updater.rb +59 -4
- data/lib/deimos/active_record_consume/message_consumption.rb +15 -21
- data/lib/deimos/active_record_consumer.rb +36 -21
- data/lib/deimos/active_record_producer.rb +28 -9
- data/lib/deimos/backends/base.rb +4 -35
- data/lib/deimos/backends/kafka.rb +6 -22
- data/lib/deimos/backends/kafka_async.rb +6 -22
- data/lib/deimos/backends/{db.rb → outbox.rb} +13 -9
- data/lib/deimos/config/configuration.rb +116 -379
- data/lib/deimos/consume/batch_consumption.rb +24 -124
- data/lib/deimos/consume/message_consumption.rb +36 -63
- data/lib/deimos/consumer.rb +16 -75
- data/lib/deimos/ext/consumer_route.rb +35 -0
- data/lib/deimos/ext/producer_middleware.rb +94 -0
- data/lib/deimos/ext/producer_route.rb +22 -0
- data/lib/deimos/ext/redraw.rb +29 -0
- data/lib/deimos/ext/routing_defaults.rb +72 -0
- data/lib/deimos/ext/schema_route.rb +70 -0
- data/lib/deimos/kafka_message.rb +2 -2
- data/lib/deimos/kafka_source.rb +2 -7
- data/lib/deimos/kafka_topic_info.rb +1 -1
- data/lib/deimos/logging.rb +71 -0
- data/lib/deimos/message.rb +2 -11
- data/lib/deimos/metrics/datadog.rb +40 -1
- data/lib/deimos/metrics/provider.rb +4 -4
- data/lib/deimos/producer.rb +39 -116
- data/lib/deimos/railtie.rb +6 -0
- data/lib/deimos/schema_backends/avro_base.rb +21 -21
- data/lib/deimos/schema_backends/avro_schema_registry.rb +1 -2
- data/lib/deimos/schema_backends/avro_validation.rb +2 -2
- data/lib/deimos/schema_backends/base.rb +19 -12
- data/lib/deimos/schema_backends/mock.rb +6 -1
- data/lib/deimos/schema_backends/plain.rb +47 -0
- data/lib/deimos/schema_class/base.rb +2 -2
- data/lib/deimos/schema_class/enum.rb +1 -1
- data/lib/deimos/schema_class/record.rb +2 -2
- data/lib/deimos/test_helpers.rb +95 -320
- data/lib/deimos/tracing/provider.rb +6 -6
- data/lib/deimos/transcoder.rb +88 -0
- data/lib/deimos/utils/db_poller/base.rb +16 -14
- data/lib/deimos/utils/db_poller/state_based.rb +3 -3
- data/lib/deimos/utils/db_poller/time_based.rb +4 -4
- data/lib/deimos/utils/db_poller.rb +1 -1
- data/lib/deimos/utils/deadlock_retry.rb +1 -1
- data/lib/deimos/utils/{db_producer.rb → outbox_producer.rb} +16 -47
- data/lib/deimos/utils/schema_class.rb +0 -7
- data/lib/deimos/version.rb +1 -1
- data/lib/deimos.rb +79 -26
- data/lib/generators/deimos/{db_backend_generator.rb → outbox_backend_generator.rb} +4 -4
- data/lib/generators/deimos/schema_class_generator.rb +0 -1
- data/lib/generators/deimos/v2/templates/karafka.rb.tt +149 -0
- data/lib/generators/deimos/v2_generator.rb +193 -0
- data/lib/tasks/deimos.rake +5 -7
- data/spec/active_record_batch_consumer_association_spec.rb +22 -13
- data/spec/active_record_batch_consumer_spec.rb +84 -65
- data/spec/active_record_consume/batch_consumption_spec.rb +10 -10
- data/spec/active_record_consume/batch_slicer_spec.rb +12 -12
- data/spec/active_record_consume/mass_updater_spec.rb +137 -0
- data/spec/active_record_consumer_spec.rb +29 -13
- data/spec/active_record_producer_spec.rb +36 -26
- data/spec/backends/base_spec.rb +0 -23
- data/spec/backends/kafka_async_spec.rb +1 -3
- data/spec/backends/kafka_spec.rb +1 -3
- data/spec/backends/{db_spec.rb → outbox_spec.rb} +14 -20
- data/spec/batch_consumer_spec.rb +66 -116
- data/spec/consumer_spec.rb +53 -147
- data/spec/deimos_spec.rb +10 -126
- data/spec/kafka_source_spec.rb +19 -52
- data/spec/karafka/karafka.rb +69 -0
- data/spec/karafka_config/karafka_spec.rb +97 -0
- data/spec/logging_spec.rb +25 -0
- data/spec/message_spec.rb +9 -9
- data/spec/producer_spec.rb +112 -254
- data/spec/rake_spec.rb +1 -3
- data/spec/schema_backends/avro_validation_spec.rb +1 -1
- data/spec/schemas/com/my-namespace/MySchemaWithTitle.avsc +22 -0
- data/spec/snapshots/consumers-no-nest.snap +49 -0
- data/spec/snapshots/consumers.snap +49 -0
- data/spec/snapshots/consumers_and_producers-no-nest.snap +49 -0
- data/spec/snapshots/consumers_and_producers.snap +49 -0
- data/spec/snapshots/consumers_circular-no-nest.snap +49 -0
- data/spec/snapshots/consumers_circular.snap +49 -0
- data/spec/snapshots/consumers_complex_types-no-nest.snap +49 -0
- data/spec/snapshots/consumers_complex_types.snap +49 -0
- data/spec/snapshots/consumers_nested-no-nest.snap +49 -0
- data/spec/snapshots/consumers_nested.snap +49 -0
- data/spec/snapshots/namespace_folders.snap +49 -0
- data/spec/snapshots/namespace_map.snap +49 -0
- data/spec/snapshots/producers_with_key-no-nest.snap +49 -0
- data/spec/snapshots/producers_with_key.snap +49 -0
- data/spec/spec_helper.rb +61 -29
- data/spec/utils/db_poller_spec.rb +49 -39
- data/spec/utils/{db_producer_spec.rb → outbox_producer_spec.rb} +17 -184
- metadata +58 -67
- data/lib/deimos/batch_consumer.rb +0 -7
- data/lib/deimos/config/phobos_config.rb +0 -163
- data/lib/deimos/instrumentation.rb +0 -95
- data/lib/deimos/monkey_patches/phobos_cli.rb +0 -35
- data/lib/deimos/utils/inline_consumer.rb +0 -158
- data/lib/deimos/utils/lag_reporter.rb +0 -186
- data/lib/deimos/utils/schema_controller_mixin.rb +0 -129
- data/spec/config/configuration_spec.rb +0 -321
- data/spec/kafka_listener_spec.rb +0 -55
- data/spec/phobos.bad_db.yml +0 -73
- data/spec/phobos.yml +0 -77
- data/spec/utils/inline_consumer_spec.rb +0 -31
- data/spec/utils/lag_reporter_spec.rb +0 -76
- data/spec/utils/platform_schema_validation_spec.rb +0 -0
- data/spec/utils/schema_controller_mixin_spec.rb +0 -84
- /data/lib/generators/deimos/{db_backend → outbox_backend}/templates/migration +0 -0
- /data/lib/generators/deimos/{db_backend → outbox_backend}/templates/rails3_migration +0 -0
@@ -1,158 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
# Class to consume messages. Can be used with integration testing frameworks.
|
4
|
-
# Assumes that you have a topic with only one partition.
|
5
|
-
module Deimos
|
6
|
-
module Utils
|
7
|
-
# Listener that can seek to get the last X messages in a topic.
|
8
|
-
class SeekListener < Phobos::Listener
|
9
|
-
# @return [Integer]
|
10
|
-
MAX_SEEK_RETRIES = 3
|
11
|
-
# @return [Integer]
|
12
|
-
attr_accessor :num_messages
|
13
|
-
|
14
|
-
# @return [void]
|
15
|
-
def start_listener
|
16
|
-
@num_messages ||= 10
|
17
|
-
@consumer = create_kafka_consumer
|
18
|
-
@consumer.subscribe(topic, @subscribe_opts)
|
19
|
-
attempt = 0
|
20
|
-
|
21
|
-
begin
|
22
|
-
attempt += 1
|
23
|
-
last_offset = @kafka_client.last_offset_for(topic, 0)
|
24
|
-
offset = last_offset - num_messages
|
25
|
-
if offset.positive?
|
26
|
-
Deimos.config.logger.info("Seeking to #{offset}")
|
27
|
-
@consumer.seek(topic, 0, offset)
|
28
|
-
end
|
29
|
-
rescue StandardError => e
|
30
|
-
if attempt < MAX_SEEK_RETRIES
|
31
|
-
sleep(1.seconds * attempt)
|
32
|
-
retry
|
33
|
-
end
|
34
|
-
log_error("Could not seek to offset: #{e.message} after #{MAX_SEEK_RETRIES} retries", listener_metadata)
|
35
|
-
end
|
36
|
-
|
37
|
-
instrument('listener.start_handler', listener_metadata) do
|
38
|
-
@handler_class.start(@kafka_client)
|
39
|
-
end
|
40
|
-
log_info('Listener started', listener_metadata)
|
41
|
-
end
|
42
|
-
end
|
43
|
-
|
44
|
-
# Class to return the messages consumed.
|
45
|
-
class MessageBankHandler < Deimos::Consumer
|
46
|
-
include Phobos::Handler
|
47
|
-
|
48
|
-
cattr_accessor :total_messages
|
49
|
-
|
50
|
-
# @param klass [Class<Deimos::Consumer>]
|
51
|
-
# @return [void]
|
52
|
-
def self.config_class=(klass)
|
53
|
-
self.config.merge!(klass.config)
|
54
|
-
end
|
55
|
-
|
56
|
-
# @param _kafka_client [Kafka::Client]
|
57
|
-
# @return [void]
|
58
|
-
def self.start(_kafka_client)
|
59
|
-
self.total_messages = []
|
60
|
-
end
|
61
|
-
|
62
|
-
# @param payload [Hash]
|
63
|
-
# @param metadata [Hash]
|
64
|
-
def consume(payload, metadata)
|
65
|
-
self.class.total_messages << {
|
66
|
-
key: metadata[:key],
|
67
|
-
payload: payload
|
68
|
-
}
|
69
|
-
end
|
70
|
-
end
|
71
|
-
|
72
|
-
# Class which can process/consume messages inline.
|
73
|
-
class InlineConsumer
|
74
|
-
# @return [Integer]
|
75
|
-
MAX_MESSAGE_WAIT_TIME = 1.second
|
76
|
-
# @return [Integer]
|
77
|
-
MAX_TOPIC_WAIT_TIME = 10.seconds
|
78
|
-
|
79
|
-
# Get the last X messages from a topic. You can specify a subclass of
|
80
|
-
# Deimos::Consumer or Deimos::Producer, or provide the
|
81
|
-
# schema, namespace and key_config directly.
|
82
|
-
# @param topic [String]
|
83
|
-
# @param config_class [Class<Deimos::Consumer>,Class<Deimos::Producer>]
|
84
|
-
# @param schema [String]
|
85
|
-
# @param namespace [String]
|
86
|
-
# @param key_config [Hash]
|
87
|
-
# @param num_messages [Integer]
|
88
|
-
# @return [Array<Hash>]
|
89
|
-
def self.get_messages_for(topic:, schema: nil, namespace: nil, key_config: nil,
|
90
|
-
config_class: nil, num_messages: 10)
|
91
|
-
if config_class
|
92
|
-
MessageBankHandler.config_class = config_class
|
93
|
-
elsif schema.nil? || key_config.nil?
|
94
|
-
raise 'You must specify either a config_class or a schema, namespace and key_config!'
|
95
|
-
else
|
96
|
-
MessageBankHandler.class_eval do
|
97
|
-
schema schema
|
98
|
-
namespace namespace
|
99
|
-
key_config key_config
|
100
|
-
@decoder = nil
|
101
|
-
@key_decoder = nil
|
102
|
-
end
|
103
|
-
end
|
104
|
-
self.consume(topic: topic,
|
105
|
-
frk_consumer: MessageBankHandler,
|
106
|
-
num_messages: num_messages)
|
107
|
-
messages = MessageBankHandler.total_messages
|
108
|
-
messages.size <= num_messages ? messages : messages[-num_messages..-1]
|
109
|
-
end
|
110
|
-
|
111
|
-
# Consume the last X messages from a topic.
|
112
|
-
# @param topic [String]
|
113
|
-
# @param frk_consumer [Class]
|
114
|
-
# @param num_messages [Integer] If this number is >= the number
|
115
|
-
# of messages in the topic, all messages will be consumed.
|
116
|
-
# @return [void]
|
117
|
-
def self.consume(topic:, frk_consumer:, num_messages: 10)
|
118
|
-
listener = SeekListener.new(
|
119
|
-
handler: frk_consumer,
|
120
|
-
group_id: SecureRandom.hex,
|
121
|
-
topic: topic,
|
122
|
-
heartbeat_interval: 1
|
123
|
-
)
|
124
|
-
listener.num_messages = num_messages
|
125
|
-
|
126
|
-
# Add the start_time and last_message_time attributes to the
|
127
|
-
# consumer class so we can kill it if it's gone on too long
|
128
|
-
class << frk_consumer
|
129
|
-
attr_accessor :start_time, :last_message_time
|
130
|
-
end
|
131
|
-
|
132
|
-
subscribers = []
|
133
|
-
subscribers << ActiveSupport::Notifications.
|
134
|
-
subscribe('phobos.listener.process_message') do
|
135
|
-
frk_consumer.last_message_time = Time.zone.now
|
136
|
-
end
|
137
|
-
subscribers << ActiveSupport::Notifications.
|
138
|
-
subscribe('phobos.listener.start_handler') do
|
139
|
-
frk_consumer.start_time = Time.zone.now
|
140
|
-
frk_consumer.last_message_time = nil
|
141
|
-
end
|
142
|
-
subscribers << ActiveSupport::Notifications.
|
143
|
-
subscribe('heartbeat.consumer.kafka') do
|
144
|
-
if frk_consumer.last_message_time
|
145
|
-
if Time.zone.now - frk_consumer.last_message_time > MAX_MESSAGE_WAIT_TIME
|
146
|
-
raise Phobos::AbortError
|
147
|
-
end
|
148
|
-
elsif Time.zone.now - frk_consumer.start_time > MAX_TOPIC_WAIT_TIME
|
149
|
-
Deimos.config.logger.error('Aborting - initial wait too long')
|
150
|
-
raise Phobos::AbortError
|
151
|
-
end
|
152
|
-
end
|
153
|
-
listener.start
|
154
|
-
subscribers.each { |s| ActiveSupport::Notifications.unsubscribe(s) }
|
155
|
-
end
|
156
|
-
end
|
157
|
-
end
|
158
|
-
end
|
@@ -1,186 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
require 'mutex_m'
|
4
|
-
|
5
|
-
# :nodoc:
|
6
|
-
module Deimos
|
7
|
-
module Utils
|
8
|
-
# Class that manages reporting lag.
|
9
|
-
class LagReporter
|
10
|
-
extend Mutex_m
|
11
|
-
|
12
|
-
# Class that has a list of topics
|
13
|
-
class ConsumerGroup
|
14
|
-
# @return [Hash<String, Topic>]
|
15
|
-
attr_accessor :topics
|
16
|
-
# @return [String]
|
17
|
-
attr_accessor :id
|
18
|
-
|
19
|
-
# @param id [String]
|
20
|
-
def initialize(id)
|
21
|
-
self.id = id
|
22
|
-
self.topics = {}
|
23
|
-
end
|
24
|
-
|
25
|
-
# @param topic [String]
|
26
|
-
# @param partition [Integer]
|
27
|
-
# @return [void]
|
28
|
-
def report_lag(topic, partition)
|
29
|
-
self.topics[topic.to_s] ||= Topic.new(topic, self)
|
30
|
-
self.topics[topic.to_s].report_lag(partition)
|
31
|
-
end
|
32
|
-
|
33
|
-
# @param topic [String]
|
34
|
-
# @param partition [Integer]
|
35
|
-
# @param offset [Integer]
|
36
|
-
# @return [void]
|
37
|
-
def assign_current_offset(topic, partition, offset)
|
38
|
-
self.topics[topic.to_s] ||= Topic.new(topic, self)
|
39
|
-
self.topics[topic.to_s].assign_current_offset(partition, offset)
|
40
|
-
end
|
41
|
-
end
|
42
|
-
|
43
|
-
# Topic which has a hash of partition => last known current offsets
|
44
|
-
class Topic
|
45
|
-
# @return [String]
|
46
|
-
attr_accessor :topic_name
|
47
|
-
# @return [Hash<Integer, Integer>]
|
48
|
-
attr_accessor :partition_current_offsets
|
49
|
-
# @return [ConsumerGroup]
|
50
|
-
attr_accessor :consumer_group
|
51
|
-
|
52
|
-
# @param topic_name [String]
|
53
|
-
# @param group [ConsumerGroup]
|
54
|
-
def initialize(topic_name, group)
|
55
|
-
self.topic_name = topic_name
|
56
|
-
self.consumer_group = group
|
57
|
-
self.partition_current_offsets = {}
|
58
|
-
end
|
59
|
-
|
60
|
-
# @param partition [Integer]
|
61
|
-
# @param offset [Integer]
|
62
|
-
# @return [void]
|
63
|
-
def assign_current_offset(partition, offset)
|
64
|
-
self.partition_current_offsets[partition.to_i] = offset
|
65
|
-
end
|
66
|
-
|
67
|
-
# @param partition [Integer]
|
68
|
-
# @param offset [Integer]
|
69
|
-
# @return [Integer]
|
70
|
-
def compute_lag(partition, offset)
|
71
|
-
begin
|
72
|
-
client = Phobos.create_kafka_client
|
73
|
-
last_offset = client.last_offset_for(self.topic_name, partition)
|
74
|
-
lag = last_offset - offset
|
75
|
-
rescue StandardError # don't do anything, just wait
|
76
|
-
Deimos.config.logger.
|
77
|
-
debug("Error computing lag for #{self.topic_name}, will retry")
|
78
|
-
end
|
79
|
-
lag || 0
|
80
|
-
end
|
81
|
-
|
82
|
-
# @param partition [Integer]
|
83
|
-
# @return [void]
|
84
|
-
def report_lag(partition)
|
85
|
-
current_offset = self.partition_current_offsets[partition.to_i]
|
86
|
-
return unless current_offset
|
87
|
-
|
88
|
-
lag = compute_lag(partition, current_offset)
|
89
|
-
group = self.consumer_group.id
|
90
|
-
Deimos.config.logger.
|
91
|
-
debug("Sending lag: #{group}/#{partition}: #{lag}")
|
92
|
-
Deimos.config.metrics&.gauge('consumer_lag', lag, tags: %W(
|
93
|
-
consumer_group:#{group}
|
94
|
-
partition:#{partition}
|
95
|
-
topic:#{self.topic_name}
|
96
|
-
))
|
97
|
-
end
|
98
|
-
end
|
99
|
-
|
100
|
-
@groups = {}
|
101
|
-
|
102
|
-
class << self
|
103
|
-
# Reset all group information.
|
104
|
-
# @return [void]
|
105
|
-
def reset
|
106
|
-
@groups = {}
|
107
|
-
end
|
108
|
-
|
109
|
-
# offset_lag = event.payload.fetch(:offset_lag)
|
110
|
-
# group_id = event.payload.fetch(:group_id)
|
111
|
-
# topic = event.payload.fetch(:topic)
|
112
|
-
# partition = event.payload.fetch(:partition)
|
113
|
-
# @param payload [Hash]
|
114
|
-
# @return [void]
|
115
|
-
def message_processed(payload)
|
116
|
-
offset = payload[:offset] || payload[:last_offset]
|
117
|
-
topic = payload[:topic]
|
118
|
-
group = payload[:group_id]
|
119
|
-
partition = payload[:partition]
|
120
|
-
|
121
|
-
synchronize do
|
122
|
-
@groups[group.to_s] ||= ConsumerGroup.new(group)
|
123
|
-
@groups[group.to_s].assign_current_offset(topic, partition, offset)
|
124
|
-
end
|
125
|
-
end
|
126
|
-
|
127
|
-
# @param payload [Hash]
|
128
|
-
# @return [void]
|
129
|
-
def offset_seek(payload)
|
130
|
-
offset = payload[:offset]
|
131
|
-
topic = payload[:topic]
|
132
|
-
group = payload[:group_id]
|
133
|
-
partition = payload[:partition]
|
134
|
-
|
135
|
-
synchronize do
|
136
|
-
@groups[group.to_s] ||= ConsumerGroup.new(group)
|
137
|
-
@groups[group.to_s].assign_current_offset(topic, partition, offset)
|
138
|
-
end
|
139
|
-
end
|
140
|
-
|
141
|
-
# @param payload [Hash]
|
142
|
-
# @return [void]
|
143
|
-
def heartbeat(payload)
|
144
|
-
group = payload[:group_id]
|
145
|
-
synchronize do
|
146
|
-
@groups[group.to_s] ||= ConsumerGroup.new(group)
|
147
|
-
consumer_group = @groups[group.to_s]
|
148
|
-
payload[:topic_partitions].each do |topic, partitions|
|
149
|
-
partitions.each do |partition|
|
150
|
-
consumer_group.report_lag(topic, partition)
|
151
|
-
end
|
152
|
-
end
|
153
|
-
end
|
154
|
-
end
|
155
|
-
end
|
156
|
-
end
|
157
|
-
end
|
158
|
-
|
159
|
-
ActiveSupport::Notifications.subscribe('start_process_message.consumer.kafka') do |*args|
|
160
|
-
next unless Deimos.config.consumers.report_lag
|
161
|
-
|
162
|
-
event = ActiveSupport::Notifications::Event.new(*args)
|
163
|
-
Deimos::Utils::LagReporter.message_processed(event.payload)
|
164
|
-
end
|
165
|
-
|
166
|
-
ActiveSupport::Notifications.subscribe('start_process_batch.consumer.kafka') do |*args|
|
167
|
-
next unless Deimos.config.consumers.report_lag
|
168
|
-
|
169
|
-
event = ActiveSupport::Notifications::Event.new(*args)
|
170
|
-
Deimos::Utils::LagReporter.message_processed(event.payload)
|
171
|
-
end
|
172
|
-
|
173
|
-
ActiveSupport::Notifications.subscribe('seek.consumer.kafka') do |*args|
|
174
|
-
next unless Deimos.config.consumers.report_lag
|
175
|
-
|
176
|
-
event = ActiveSupport::Notifications::Event.new(*args)
|
177
|
-
Deimos::Utils::LagReporter.offset_seek(event.payload)
|
178
|
-
end
|
179
|
-
|
180
|
-
ActiveSupport::Notifications.subscribe('heartbeat.consumer.kafka') do |*args|
|
181
|
-
next unless Deimos.config.consumers.report_lag
|
182
|
-
|
183
|
-
event = ActiveSupport::Notifications::Event.new(*args)
|
184
|
-
Deimos::Utils::LagReporter.heartbeat(event.payload)
|
185
|
-
end
|
186
|
-
end
|
@@ -1,129 +0,0 @@
|
|
1
|
-
# frozen_string_literal: true
|
2
|
-
|
3
|
-
module Deimos
|
4
|
-
module Utils
|
5
|
-
# Mixin to automatically decode schema-encoded payloads when given the correct content type,
|
6
|
-
# and provide the `render_schema` method to encode the payload for responses.
|
7
|
-
module SchemaControllerMixin
|
8
|
-
extend ActiveSupport::Concern
|
9
|
-
|
10
|
-
included do
|
11
|
-
Mime::Type.register('avro/binary', :avro)
|
12
|
-
|
13
|
-
attr_accessor :payload
|
14
|
-
|
15
|
-
if respond_to?(:before_filter)
|
16
|
-
before_filter(:decode_schema, if: :schema_format?)
|
17
|
-
else
|
18
|
-
before_action(:decode_schema, if: :schema_format?)
|
19
|
-
end
|
20
|
-
end
|
21
|
-
|
22
|
-
# :nodoc:
|
23
|
-
module ClassMethods
|
24
|
-
# @return [Hash<String, Hash<Symbol, String>>]
|
25
|
-
def schema_mapping
|
26
|
-
@schema_mapping ||= {}
|
27
|
-
end
|
28
|
-
|
29
|
-
# Indicate which schemas should be assigned to actions.
|
30
|
-
# @param actions [Symbol]
|
31
|
-
# @param kwactions [String]
|
32
|
-
# @param request [String]
|
33
|
-
# @param response [String]
|
34
|
-
# @return [void]
|
35
|
-
def schemas(*actions, request: nil, response: nil, **kwactions)
|
36
|
-
actions.each do |action|
|
37
|
-
request ||= action.to_s.titleize
|
38
|
-
response ||= action.to_s.titleize
|
39
|
-
schema_mapping[action.to_s] = { request: request, response: response }
|
40
|
-
end
|
41
|
-
kwactions.each do |key, val|
|
42
|
-
schema_mapping[key.to_s] = { request: val, response: val }
|
43
|
-
end
|
44
|
-
end
|
45
|
-
|
46
|
-
# @return [Hash<Symbol, String>]
|
47
|
-
def namespaces
|
48
|
-
@namespaces ||= {}
|
49
|
-
end
|
50
|
-
|
51
|
-
# Set the namespace for both requests and responses.
|
52
|
-
# @param name [String]
|
53
|
-
# @return [void]
|
54
|
-
def namespace(name)
|
55
|
-
request_namespace(name)
|
56
|
-
response_namespace(name)
|
57
|
-
end
|
58
|
-
|
59
|
-
# Set the namespace for requests.
|
60
|
-
# @param name [String]
|
61
|
-
# @return [void]
|
62
|
-
def request_namespace(name)
|
63
|
-
namespaces[:request] = name
|
64
|
-
end
|
65
|
-
|
66
|
-
# Set the namespace for repsonses.
|
67
|
-
# @param name [String]
|
68
|
-
# @return [void]
|
69
|
-
def response_namespace(name)
|
70
|
-
namespaces[:response] = name
|
71
|
-
end
|
72
|
-
end
|
73
|
-
|
74
|
-
# @return [Boolean]
|
75
|
-
def schema_format?
|
76
|
-
request.content_type == Deimos.schema_backend_class.content_type
|
77
|
-
end
|
78
|
-
|
79
|
-
# Get the namespace from either an existing instance variable, or tease it out of the schema.
|
80
|
-
# @param type [Symbol] :request or :response
|
81
|
-
# @return [Array<String, String>] the namespace and schema.
|
82
|
-
def parse_namespace(type)
|
83
|
-
namespace = self.class.namespaces[type]
|
84
|
-
schema = self.class.schema_mapping[params['action']][type]
|
85
|
-
if schema.nil?
|
86
|
-
raise "No #{type} schema defined for #{params[:controller]}##{params[:action]}!"
|
87
|
-
end
|
88
|
-
|
89
|
-
if namespace.nil?
|
90
|
-
last_period = schema.rindex('.')
|
91
|
-
namespace, schema = schema.split(last_period)
|
92
|
-
end
|
93
|
-
if namespace.nil? || schema.nil?
|
94
|
-
raise "No request namespace defined for #{params[:controller]}##{params[:action]}!"
|
95
|
-
end
|
96
|
-
|
97
|
-
[namespace, schema]
|
98
|
-
end
|
99
|
-
|
100
|
-
# Decode the payload with the parameters.
|
101
|
-
# @return [void]
|
102
|
-
def decode_schema
|
103
|
-
namespace, schema = parse_namespace(:request)
|
104
|
-
decoder = Deimos.schema_backend(schema: schema, namespace: namespace)
|
105
|
-
@payload = decoder.decode(request.body.read).with_indifferent_access
|
106
|
-
@payload.each do |key, value|
|
107
|
-
Deimos.config.tracer&.set_tag("body.#{key}", value)
|
108
|
-
end
|
109
|
-
if Deimos.config.schema.use_schema_classes
|
110
|
-
@payload = Utils::SchemaClass.instance(@payload, schema, namespace)
|
111
|
-
end
|
112
|
-
request.body.rewind if request.body.respond_to?(:rewind)
|
113
|
-
end
|
114
|
-
|
115
|
-
# Render a hash into a payload as specified by the configured schema and namespace.
|
116
|
-
# @param payload [Hash]
|
117
|
-
# @param schema [String]
|
118
|
-
# @param namespace [String]
|
119
|
-
# @return [void]
|
120
|
-
def render_schema(payload, schema: nil, namespace: nil)
|
121
|
-
namespace, schema = parse_namespace(:response) if !schema && !namespace
|
122
|
-
encoder = Deimos.schema_backend(schema: schema, namespace: namespace)
|
123
|
-
encoded = encoder.encode(payload.to_h, topic: "#{namespace}.#{schema}")
|
124
|
-
response.headers['Content-Type'] = encoder.class.content_type
|
125
|
-
send_data(encoded)
|
126
|
-
end
|
127
|
-
end
|
128
|
-
end
|
129
|
-
end
|