deimos-temp-fork 0.0.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (146) hide show
  1. checksums.yaml +7 -0
  2. data/.circleci/config.yml +83 -0
  3. data/.gitignore +41 -0
  4. data/.gitmodules +0 -0
  5. data/.rspec +1 -0
  6. data/.rubocop.yml +333 -0
  7. data/.ruby-gemset +1 -0
  8. data/.ruby-version +1 -0
  9. data/CHANGELOG.md +349 -0
  10. data/CODE_OF_CONDUCT.md +77 -0
  11. data/Dockerfile +23 -0
  12. data/Gemfile +6 -0
  13. data/Gemfile.lock +286 -0
  14. data/Guardfile +22 -0
  15. data/LICENSE.md +195 -0
  16. data/README.md +1099 -0
  17. data/Rakefile +13 -0
  18. data/bin/deimos +4 -0
  19. data/deimos-ruby.gemspec +44 -0
  20. data/docker-compose.yml +71 -0
  21. data/docs/ARCHITECTURE.md +140 -0
  22. data/docs/CONFIGURATION.md +236 -0
  23. data/docs/DATABASE_BACKEND.md +147 -0
  24. data/docs/INTEGRATION_TESTS.md +52 -0
  25. data/docs/PULL_REQUEST_TEMPLATE.md +35 -0
  26. data/docs/UPGRADING.md +128 -0
  27. data/lib/deimos-temp-fork.rb +95 -0
  28. data/lib/deimos/active_record_consume/batch_consumption.rb +164 -0
  29. data/lib/deimos/active_record_consume/batch_slicer.rb +27 -0
  30. data/lib/deimos/active_record_consume/message_consumption.rb +79 -0
  31. data/lib/deimos/active_record_consume/schema_model_converter.rb +52 -0
  32. data/lib/deimos/active_record_consumer.rb +67 -0
  33. data/lib/deimos/active_record_producer.rb +87 -0
  34. data/lib/deimos/backends/base.rb +32 -0
  35. data/lib/deimos/backends/db.rb +41 -0
  36. data/lib/deimos/backends/kafka.rb +33 -0
  37. data/lib/deimos/backends/kafka_async.rb +33 -0
  38. data/lib/deimos/backends/test.rb +20 -0
  39. data/lib/deimos/batch_consumer.rb +7 -0
  40. data/lib/deimos/config/configuration.rb +381 -0
  41. data/lib/deimos/config/phobos_config.rb +137 -0
  42. data/lib/deimos/consume/batch_consumption.rb +150 -0
  43. data/lib/deimos/consume/message_consumption.rb +94 -0
  44. data/lib/deimos/consumer.rb +104 -0
  45. data/lib/deimos/instrumentation.rb +76 -0
  46. data/lib/deimos/kafka_message.rb +60 -0
  47. data/lib/deimos/kafka_source.rb +128 -0
  48. data/lib/deimos/kafka_topic_info.rb +102 -0
  49. data/lib/deimos/message.rb +79 -0
  50. data/lib/deimos/metrics/datadog.rb +47 -0
  51. data/lib/deimos/metrics/mock.rb +39 -0
  52. data/lib/deimos/metrics/provider.rb +36 -0
  53. data/lib/deimos/monkey_patches/phobos_cli.rb +35 -0
  54. data/lib/deimos/monkey_patches/phobos_producer.rb +51 -0
  55. data/lib/deimos/poll_info.rb +9 -0
  56. data/lib/deimos/producer.rb +224 -0
  57. data/lib/deimos/railtie.rb +8 -0
  58. data/lib/deimos/schema_backends/avro_base.rb +140 -0
  59. data/lib/deimos/schema_backends/avro_local.rb +30 -0
  60. data/lib/deimos/schema_backends/avro_schema_coercer.rb +119 -0
  61. data/lib/deimos/schema_backends/avro_schema_registry.rb +34 -0
  62. data/lib/deimos/schema_backends/avro_validation.rb +21 -0
  63. data/lib/deimos/schema_backends/base.rb +150 -0
  64. data/lib/deimos/schema_backends/mock.rb +42 -0
  65. data/lib/deimos/shared_config.rb +63 -0
  66. data/lib/deimos/test_helpers.rb +360 -0
  67. data/lib/deimos/tracing/datadog.rb +35 -0
  68. data/lib/deimos/tracing/mock.rb +40 -0
  69. data/lib/deimos/tracing/provider.rb +29 -0
  70. data/lib/deimos/utils/db_poller.rb +150 -0
  71. data/lib/deimos/utils/db_producer.rb +243 -0
  72. data/lib/deimos/utils/deadlock_retry.rb +68 -0
  73. data/lib/deimos/utils/inline_consumer.rb +150 -0
  74. data/lib/deimos/utils/lag_reporter.rb +175 -0
  75. data/lib/deimos/utils/schema_controller_mixin.rb +115 -0
  76. data/lib/deimos/version.rb +5 -0
  77. data/lib/generators/deimos/active_record/templates/migration.rb.tt +28 -0
  78. data/lib/generators/deimos/active_record/templates/model.rb.tt +5 -0
  79. data/lib/generators/deimos/active_record_generator.rb +79 -0
  80. data/lib/generators/deimos/db_backend/templates/migration +25 -0
  81. data/lib/generators/deimos/db_backend/templates/rails3_migration +31 -0
  82. data/lib/generators/deimos/db_backend_generator.rb +48 -0
  83. data/lib/generators/deimos/db_poller/templates/migration +11 -0
  84. data/lib/generators/deimos/db_poller/templates/rails3_migration +16 -0
  85. data/lib/generators/deimos/db_poller_generator.rb +48 -0
  86. data/lib/tasks/deimos.rake +34 -0
  87. data/spec/active_record_batch_consumer_spec.rb +481 -0
  88. data/spec/active_record_consume/batch_slicer_spec.rb +42 -0
  89. data/spec/active_record_consume/schema_model_converter_spec.rb +105 -0
  90. data/spec/active_record_consumer_spec.rb +154 -0
  91. data/spec/active_record_producer_spec.rb +85 -0
  92. data/spec/backends/base_spec.rb +10 -0
  93. data/spec/backends/db_spec.rb +54 -0
  94. data/spec/backends/kafka_async_spec.rb +11 -0
  95. data/spec/backends/kafka_spec.rb +11 -0
  96. data/spec/batch_consumer_spec.rb +256 -0
  97. data/spec/config/configuration_spec.rb +248 -0
  98. data/spec/consumer_spec.rb +209 -0
  99. data/spec/deimos_spec.rb +169 -0
  100. data/spec/generators/active_record_generator_spec.rb +56 -0
  101. data/spec/handlers/my_batch_consumer.rb +10 -0
  102. data/spec/handlers/my_consumer.rb +10 -0
  103. data/spec/kafka_listener_spec.rb +55 -0
  104. data/spec/kafka_source_spec.rb +381 -0
  105. data/spec/kafka_topic_info_spec.rb +111 -0
  106. data/spec/message_spec.rb +19 -0
  107. data/spec/phobos.bad_db.yml +73 -0
  108. data/spec/phobos.yml +77 -0
  109. data/spec/producer_spec.rb +498 -0
  110. data/spec/rake_spec.rb +19 -0
  111. data/spec/schema_backends/avro_base_shared.rb +199 -0
  112. data/spec/schema_backends/avro_local_spec.rb +32 -0
  113. data/spec/schema_backends/avro_schema_registry_spec.rb +32 -0
  114. data/spec/schema_backends/avro_validation_spec.rb +24 -0
  115. data/spec/schema_backends/base_spec.rb +33 -0
  116. data/spec/schemas/com/my-namespace/Generated.avsc +71 -0
  117. data/spec/schemas/com/my-namespace/MyNestedSchema.avsc +62 -0
  118. data/spec/schemas/com/my-namespace/MySchema-key.avsc +13 -0
  119. data/spec/schemas/com/my-namespace/MySchema.avsc +18 -0
  120. data/spec/schemas/com/my-namespace/MySchemaCompound-key.avsc +18 -0
  121. data/spec/schemas/com/my-namespace/MySchemaWithBooleans.avsc +18 -0
  122. data/spec/schemas/com/my-namespace/MySchemaWithDateTimes.avsc +33 -0
  123. data/spec/schemas/com/my-namespace/MySchemaWithId.avsc +28 -0
  124. data/spec/schemas/com/my-namespace/MySchemaWithUniqueId.avsc +32 -0
  125. data/spec/schemas/com/my-namespace/Wibble.avsc +43 -0
  126. data/spec/schemas/com/my-namespace/Widget.avsc +27 -0
  127. data/spec/schemas/com/my-namespace/WidgetTheSecond.avsc +27 -0
  128. data/spec/schemas/com/my-namespace/request/CreateTopic.avsc +11 -0
  129. data/spec/schemas/com/my-namespace/request/Index.avsc +11 -0
  130. data/spec/schemas/com/my-namespace/request/UpdateRequest.avsc +11 -0
  131. data/spec/schemas/com/my-namespace/response/CreateTopic.avsc +11 -0
  132. data/spec/schemas/com/my-namespace/response/Index.avsc +11 -0
  133. data/spec/schemas/com/my-namespace/response/UpdateResponse.avsc +11 -0
  134. data/spec/spec_helper.rb +267 -0
  135. data/spec/utils/db_poller_spec.rb +320 -0
  136. data/spec/utils/db_producer_spec.rb +514 -0
  137. data/spec/utils/deadlock_retry_spec.rb +74 -0
  138. data/spec/utils/inline_consumer_spec.rb +31 -0
  139. data/spec/utils/lag_reporter_spec.rb +76 -0
  140. data/spec/utils/platform_schema_validation_spec.rb +0 -0
  141. data/spec/utils/schema_controller_mixin_spec.rb +84 -0
  142. data/support/deimos-solo.png +0 -0
  143. data/support/deimos-with-name-next.png +0 -0
  144. data/support/deimos-with-name.png +0 -0
  145. data/support/flipp-logo.png +0 -0
  146. metadata +551 -0
@@ -0,0 +1,68 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Deimos
4
+ module Utils
5
+ # Utility class to retry a given block if a a deadlock is encountered.
6
+ # Supports Postgres and MySQL deadlocks and lock wait timeouts.
7
+ class DeadlockRetry
8
+ class << self
9
+ # Maximum number of times to retry the block after encountering a deadlock
10
+ RETRY_COUNT = 2
11
+
12
+ # Need to match on error messages to support older Rails versions
13
+ DEADLOCK_MESSAGES = [
14
+ # MySQL
15
+ 'Deadlock found when trying to get lock',
16
+ 'Lock wait timeout exceeded',
17
+
18
+ # Postgres
19
+ 'deadlock detected'
20
+ ].freeze
21
+
22
+ # Retry the given block when encountering a deadlock. For any other
23
+ # exceptions, they are reraised. This is used to handle cases where
24
+ # the database may be busy but the transaction would succeed if
25
+ # retried later. Note that your block should be idempotent and it will
26
+ # be wrapped in a transaction.
27
+ # Sleeps for a random number of seconds to prevent multiple transactions
28
+ # from retrying at the same time.
29
+ # @param tags [Array] Tags to attach when logging and reporting metrics.
30
+ # @yield Yields to the block that may deadlock.
31
+ def wrap(tags=[])
32
+ count = RETRY_COUNT
33
+
34
+ begin
35
+ ActiveRecord::Base.transaction do
36
+ yield
37
+ end
38
+ rescue ActiveRecord::StatementInvalid => e
39
+ # Reraise if not a known deadlock
40
+ raise if DEADLOCK_MESSAGES.none? { |m| e.message.include?(m) }
41
+
42
+ # Reraise if all retries exhausted
43
+ raise if count <= 0
44
+
45
+ Deimos.config.logger.warn(
46
+ message: 'Deadlock encountered when trying to execute query. '\
47
+ "Retrying. #{count} attempt(s) remaining",
48
+ tags: tags
49
+ )
50
+
51
+ Deimos.config.metrics&.increment(
52
+ 'deadlock',
53
+ tags: tags
54
+ )
55
+
56
+ count -= 1
57
+
58
+ # Sleep for a random amount so that if there are multiple
59
+ # transactions deadlocking, they don't all retry at the same time
60
+ sleep(Random.rand(5.0) + 0.5)
61
+
62
+ retry
63
+ end
64
+ end
65
+ end
66
+ end
67
+ end
68
+ end
@@ -0,0 +1,150 @@
1
+ # frozen_string_literal: true
2
+
3
+ # Class to consume messages. Can be used with integration testing frameworks.
4
+ # Assumes that you have a topic with only one partition.
5
+ module Deimos
6
+ module Utils
7
+ # Listener that can seek to get the last X messages in a topic.
8
+ class SeekListener < Phobos::Listener
9
+ MAX_SEEK_RETRIES = 3
10
+ attr_accessor :num_messages
11
+
12
+ # :nodoc:
13
+ def start_listener
14
+ @num_messages ||= 10
15
+ @consumer = create_kafka_consumer
16
+ @consumer.subscribe(topic, @subscribe_opts)
17
+ attempt = 0
18
+
19
+ begin
20
+ attempt += 1
21
+ last_offset = @kafka_client.last_offset_for(topic, 0)
22
+ offset = last_offset - num_messages
23
+ if offset.positive?
24
+ Deimos.config.logger.info("Seeking to #{offset}")
25
+ @consumer.seek(topic, 0, offset)
26
+ end
27
+ rescue StandardError => e
28
+ if attempt < MAX_SEEK_RETRIES
29
+ sleep(1.seconds * attempt)
30
+ retry
31
+ end
32
+ log_error("Could not seek to offset: #{e.message} after #{MAX_SEEK_RETRIES} retries", listener_metadata)
33
+ end
34
+
35
+ instrument('listener.start_handler', listener_metadata) do
36
+ @handler_class.start(@kafka_client)
37
+ end
38
+ log_info('Listener started', listener_metadata)
39
+ end
40
+ end
41
+
42
+ # Class to return the messages consumed.
43
+ class MessageBankHandler < Deimos::Consumer
44
+ include Phobos::Handler
45
+
46
+ cattr_accessor :total_messages
47
+
48
+ # @param klass [Class < Deimos::Consumer]
49
+ def self.config_class=(klass)
50
+ self.config.merge!(klass.config)
51
+ end
52
+
53
+ # :nodoc:
54
+ def self.start(_kafka_client)
55
+ self.total_messages = []
56
+ end
57
+
58
+ # :nodoc:
59
+ def consume(payload, metadata)
60
+ self.class.total_messages << {
61
+ key: metadata[:key],
62
+ payload: payload
63
+ }
64
+ end
65
+ end
66
+
67
+ # Class which can process/consume messages inline.
68
+ class InlineConsumer
69
+ MAX_MESSAGE_WAIT_TIME = 1.second
70
+ MAX_TOPIC_WAIT_TIME = 10.seconds
71
+
72
+ # Get the last X messages from a topic. You can specify a subclass of
73
+ # Deimos::Consumer or Deimos::Producer, or provide the
74
+ # schema, namespace and key_config directly.
75
+ # @param topic [String]
76
+ # @param config_class [Class < Deimos::Consumer|Deimos::Producer>]
77
+ # @param schema [String]
78
+ # @param namespace [String]
79
+ # @param key_config [Hash]
80
+ # @param num_messages [Number]
81
+ # @return [Array<Hash>]
82
+ def self.get_messages_for(topic:, schema: nil, namespace: nil, key_config: nil,
83
+ config_class: nil, num_messages: 10)
84
+ if config_class
85
+ MessageBankHandler.config_class = config_class
86
+ elsif schema.nil? || key_config.nil?
87
+ raise 'You must specify either a config_class or a schema, namespace and key_config!'
88
+ else
89
+ MessageBankHandler.class_eval do
90
+ schema schema
91
+ namespace namespace
92
+ key_config key_config
93
+ @decoder = nil
94
+ @key_decoder = nil
95
+ end
96
+ end
97
+ self.consume(topic: topic,
98
+ frk_consumer: MessageBankHandler,
99
+ num_messages: num_messages)
100
+ messages = MessageBankHandler.total_messages
101
+ messages.size <= num_messages ? messages : messages[-num_messages..-1]
102
+ end
103
+
104
+ # Consume the last X messages from a topic.
105
+ # @param topic [String]
106
+ # @param frk_consumer [Class]
107
+ # @param num_messages [Integer] If this number is >= the number
108
+ # of messages in the topic, all messages will be consumed.
109
+ def self.consume(topic:, frk_consumer:, num_messages: 10)
110
+ listener = SeekListener.new(
111
+ handler: frk_consumer,
112
+ group_id: SecureRandom.hex,
113
+ topic: topic,
114
+ heartbeat_interval: 1
115
+ )
116
+ listener.num_messages = num_messages
117
+
118
+ # Add the start_time and last_message_time attributes to the
119
+ # consumer class so we can kill it if it's gone on too long
120
+ class << frk_consumer
121
+ attr_accessor :start_time, :last_message_time
122
+ end
123
+
124
+ subscribers = []
125
+ subscribers << ActiveSupport::Notifications.
126
+ subscribe('phobos.listener.process_message') do
127
+ frk_consumer.last_message_time = Time.zone.now
128
+ end
129
+ subscribers << ActiveSupport::Notifications.
130
+ subscribe('phobos.listener.start_handler') do
131
+ frk_consumer.start_time = Time.zone.now
132
+ frk_consumer.last_message_time = nil
133
+ end
134
+ subscribers << ActiveSupport::Notifications.
135
+ subscribe('heartbeat.consumer.kafka') do
136
+ if frk_consumer.last_message_time
137
+ if Time.zone.now - frk_consumer.last_message_time > MAX_MESSAGE_WAIT_TIME
138
+ raise Phobos::AbortError
139
+ end
140
+ elsif Time.zone.now - frk_consumer.start_time > MAX_TOPIC_WAIT_TIME
141
+ Deimos.config.logger.error('Aborting - initial wait too long')
142
+ raise Phobos::AbortError
143
+ end
144
+ end
145
+ listener.start
146
+ subscribers.each { |s| ActiveSupport::Notifications.unsubscribe(s) }
147
+ end
148
+ end
149
+ end
150
+ end
@@ -0,0 +1,175 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'mutex_m'
4
+
5
+ # :nodoc:
6
+ module Deimos
7
+ module Utils
8
+ # Class that manages reporting lag.
9
+ class LagReporter
10
+ extend Mutex_m
11
+
12
+ # Class that has a list of topics
13
+ class ConsumerGroup
14
+ # @return [Hash<String, Topic>]
15
+ attr_accessor :topics
16
+ # @return [String]
17
+ attr_accessor :id
18
+
19
+ # @param id [String]
20
+ def initialize(id)
21
+ self.id = id
22
+ self.topics = {}
23
+ end
24
+
25
+ # @param topic [String]
26
+ # @param partition [Integer]
27
+ def report_lag(topic, partition)
28
+ self.topics[topic.to_s] ||= Topic.new(topic, self)
29
+ self.topics[topic.to_s].report_lag(partition)
30
+ end
31
+
32
+ # @param topic [String]
33
+ # @param partition [Integer]
34
+ # @param offset [Integer]
35
+ def assign_current_offset(topic, partition, offset)
36
+ self.topics[topic.to_s] ||= Topic.new(topic, self)
37
+ self.topics[topic.to_s].assign_current_offset(partition, offset)
38
+ end
39
+ end
40
+
41
+ # Topic which has a hash of partition => last known current offsets
42
+ class Topic
43
+ # @return [String]
44
+ attr_accessor :topic_name
45
+ # @return [Hash<Integer, Integer>]
46
+ attr_accessor :partition_current_offsets
47
+ # @return [ConsumerGroup]
48
+ attr_accessor :consumer_group
49
+
50
+ # @param topic_name [String]
51
+ # @param group [ConsumerGroup]
52
+ def initialize(topic_name, group)
53
+ self.topic_name = topic_name
54
+ self.consumer_group = group
55
+ self.partition_current_offsets = {}
56
+ end
57
+
58
+ # @param partition [Integer]
59
+ def assign_current_offset(partition, offset)
60
+ self.partition_current_offsets[partition.to_i] = offset
61
+ end
62
+
63
+ # @param partition [Integer]
64
+ def compute_lag(partition, offset)
65
+ begin
66
+ client = Phobos.create_kafka_client
67
+ last_offset = client.last_offset_for(self.topic_name, partition)
68
+ lag = last_offset - offset
69
+ rescue StandardError # don't do anything, just wait
70
+ Deimos.config.logger.
71
+ debug("Error computing lag for #{self.topic_name}, will retry")
72
+ end
73
+ lag || 0
74
+ end
75
+
76
+ # @param partition [Integer]
77
+ def report_lag(partition)
78
+ current_offset = self.partition_current_offsets[partition.to_i]
79
+ return unless current_offset
80
+
81
+ lag = compute_lag(partition, current_offset)
82
+ group = self.consumer_group.id
83
+ Deimos.config.logger.
84
+ debug("Sending lag: #{group}/#{partition}: #{lag}")
85
+ Deimos.config.metrics&.gauge('consumer_lag', lag, tags: %W(
86
+ consumer_group:#{group}
87
+ partition:#{partition}
88
+ topic:#{self.topic_name}
89
+ ))
90
+ end
91
+ end
92
+
93
+ @groups = {}
94
+
95
+ class << self
96
+ # Reset all group information.
97
+ def reset
98
+ @groups = {}
99
+ end
100
+
101
+ # offset_lag = event.payload.fetch(:offset_lag)
102
+ # group_id = event.payload.fetch(:group_id)
103
+ # topic = event.payload.fetch(:topic)
104
+ # partition = event.payload.fetch(:partition)
105
+ # @param payload [Hash]
106
+ def message_processed(payload)
107
+ offset = payload[:offset] || payload[:last_offset]
108
+ topic = payload[:topic]
109
+ group = payload[:group_id]
110
+ partition = payload[:partition]
111
+
112
+ synchronize do
113
+ @groups[group.to_s] ||= ConsumerGroup.new(group)
114
+ @groups[group.to_s].assign_current_offset(topic, partition, offset)
115
+ end
116
+ end
117
+
118
+ # @param payload [Hash]
119
+ def offset_seek(payload)
120
+ offset = payload[:offset]
121
+ topic = payload[:topic]
122
+ group = payload[:group_id]
123
+ partition = payload[:partition]
124
+
125
+ synchronize do
126
+ @groups[group.to_s] ||= ConsumerGroup.new(group)
127
+ @groups[group.to_s].assign_current_offset(topic, partition, offset)
128
+ end
129
+ end
130
+
131
+ # @param payload [Hash]
132
+ def heartbeat(payload)
133
+ group = payload[:group_id]
134
+ synchronize do
135
+ @groups[group.to_s] ||= ConsumerGroup.new(group)
136
+ consumer_group = @groups[group.to_s]
137
+ payload[:topic_partitions].each do |topic, partitions|
138
+ partitions.each do |partition|
139
+ consumer_group.report_lag(topic, partition)
140
+ end
141
+ end
142
+ end
143
+ end
144
+ end
145
+ end
146
+ end
147
+
148
+ ActiveSupport::Notifications.subscribe('start_process_message.consumer.kafka') do |*args|
149
+ next unless Deimos.config.consumers.report_lag
150
+
151
+ event = ActiveSupport::Notifications::Event.new(*args)
152
+ Deimos::Utils::LagReporter.message_processed(event.payload)
153
+ end
154
+
155
+ ActiveSupport::Notifications.subscribe('start_process_batch.consumer.kafka') do |*args|
156
+ next unless Deimos.config.consumers.report_lag
157
+
158
+ event = ActiveSupport::Notifications::Event.new(*args)
159
+ Deimos::Utils::LagReporter.message_processed(event.payload)
160
+ end
161
+
162
+ ActiveSupport::Notifications.subscribe('seek.consumer.kafka') do |*args|
163
+ next unless Deimos.config.consumers.report_lag
164
+
165
+ event = ActiveSupport::Notifications::Event.new(*args)
166
+ Deimos::Utils::LagReporter.offset_seek(event.payload)
167
+ end
168
+
169
+ ActiveSupport::Notifications.subscribe('heartbeat.consumer.kafka') do |*args|
170
+ next unless Deimos.config.consumers.report_lag
171
+
172
+ event = ActiveSupport::Notifications::Event.new(*args)
173
+ Deimos::Utils::LagReporter.heartbeat(event.payload)
174
+ end
175
+ end
@@ -0,0 +1,115 @@
1
+ # frozen_string_literal: true
2
+
3
+ module Deimos
4
+ module Utils
5
+ # Mixin to automatically decode schema-encoded payloads when given the correct content type,
6
+ # and provide the `render_schema` method to encode the payload for responses.
7
+ module SchemaControllerMixin
8
+ extend ActiveSupport::Concern
9
+
10
+ included do
11
+ Mime::Type.register('avro/binary', :avro)
12
+
13
+ attr_accessor :payload
14
+
15
+ if respond_to?(:before_filter)
16
+ before_filter(:decode_schema, if: :schema_format?)
17
+ else
18
+ before_action(:decode_schema, if: :schema_format?)
19
+ end
20
+ end
21
+
22
+ # :nodoc:
23
+ module ClassMethods
24
+ # @return [Hash<String, Hash<Symbol, String>>]
25
+ def schema_mapping
26
+ @schema_mapping ||= {}
27
+ end
28
+
29
+ # Indicate which schemas should be assigned to actions.
30
+ # @param actions [Symbol]
31
+ # @param kwactions [String]
32
+ # @param request [String]
33
+ # @param response [String]
34
+ def schemas(*actions, request: nil, response: nil, **kwactions)
35
+ actions.each do |action|
36
+ request ||= action.to_s.titleize
37
+ response ||= action.to_s.titleize
38
+ schema_mapping[action.to_s] = { request: request, response: response }
39
+ end
40
+ kwactions.each do |key, val|
41
+ schema_mapping[key.to_s] = { request: val, response: val }
42
+ end
43
+ end
44
+
45
+ # @return [Hash<Symbol, String>]
46
+ def namespaces
47
+ @namespaces ||= {}
48
+ end
49
+
50
+ # Set the namespace for both requests and responses.
51
+ # @param name [String]
52
+ def namespace(name)
53
+ request_namespace(name)
54
+ response_namespace(name)
55
+ end
56
+
57
+ # Set the namespace for requests.
58
+ # @param name [String]
59
+ def request_namespace(name)
60
+ namespaces[:request] = name
61
+ end
62
+
63
+ # Set the namespace for repsonses.
64
+ # @param name [String]
65
+ def response_namespace(name)
66
+ namespaces[:response] = name
67
+ end
68
+ end
69
+
70
+ # @return [Boolean]
71
+ def schema_format?
72
+ request.content_type == Deimos.schema_backend_class.content_type
73
+ end
74
+
75
+ # Get the namespace from either an existing instance variable, or tease it out of the schema.
76
+ # @param type [Symbol] :request or :response
77
+ # @return [Array<String, String>] the namespace and schema.
78
+ def parse_namespace(type)
79
+ namespace = self.class.namespaces[type]
80
+ schema = self.class.schema_mapping[params['action']][type]
81
+ if schema.nil?
82
+ raise "No #{type} schema defined for #{params[:controller]}##{params[:action]}!"
83
+ end
84
+
85
+ if namespace.nil?
86
+ last_period = schema.rindex('.')
87
+ namespace, schema = schema.split(last_period)
88
+ end
89
+ if namespace.nil? || schema.nil?
90
+ raise "No request namespace defined for #{params[:controller]}##{params[:action]}!"
91
+ end
92
+
93
+ [namespace, schema]
94
+ end
95
+
96
+ # Decode the payload with the parameters.
97
+ def decode_schema
98
+ namespace, schema = parse_namespace(:request)
99
+ decoder = Deimos.schema_backend(schema: schema, namespace: namespace)
100
+ @payload = decoder.decode(request.body.read).with_indifferent_access
101
+ request.body.rewind if request.body.respond_to?(:rewind)
102
+ end
103
+
104
+ # Render a hash into a payload as specified by the configured schema and namespace.
105
+ # @param payload [Hash]
106
+ def render_schema(payload, schema: nil, namespace: nil)
107
+ namespace, schema = parse_namespace(:response) if !schema && !namespace
108
+ encoder = Deimos.schema_backend(schema: schema, namespace: namespace)
109
+ encoded = encoder.encode(payload)
110
+ response.headers['Content-Type'] = encoder.class.content_type
111
+ send_data(encoded)
112
+ end
113
+ end
114
+ end
115
+ end