deimos-ruby 1.24.2 → 2.0.0.pre.alpha1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (120) hide show
  1. checksums.yaml +4 -4
  2. data/.rubocop_todo.yml +0 -17
  3. data/.tool-versions +1 -0
  4. data/CHANGELOG.md +5 -0
  5. data/README.md +287 -498
  6. data/deimos-ruby.gemspec +4 -4
  7. data/docs/CONFIGURATION.md +133 -226
  8. data/docs/UPGRADING.md +237 -0
  9. data/lib/deimos/active_record_consume/batch_consumption.rb +29 -28
  10. data/lib/deimos/active_record_consume/mass_updater.rb +59 -4
  11. data/lib/deimos/active_record_consume/message_consumption.rb +15 -21
  12. data/lib/deimos/active_record_consumer.rb +36 -21
  13. data/lib/deimos/active_record_producer.rb +28 -9
  14. data/lib/deimos/backends/base.rb +4 -35
  15. data/lib/deimos/backends/kafka.rb +6 -22
  16. data/lib/deimos/backends/kafka_async.rb +6 -22
  17. data/lib/deimos/backends/{db.rb → outbox.rb} +13 -9
  18. data/lib/deimos/config/configuration.rb +116 -379
  19. data/lib/deimos/consume/batch_consumption.rb +24 -124
  20. data/lib/deimos/consume/message_consumption.rb +36 -63
  21. data/lib/deimos/consumer.rb +16 -75
  22. data/lib/deimos/ext/consumer_route.rb +35 -0
  23. data/lib/deimos/ext/producer_middleware.rb +94 -0
  24. data/lib/deimos/ext/producer_route.rb +22 -0
  25. data/lib/deimos/ext/redraw.rb +29 -0
  26. data/lib/deimos/ext/routing_defaults.rb +72 -0
  27. data/lib/deimos/ext/schema_route.rb +70 -0
  28. data/lib/deimos/kafka_message.rb +2 -2
  29. data/lib/deimos/kafka_source.rb +2 -7
  30. data/lib/deimos/kafka_topic_info.rb +1 -1
  31. data/lib/deimos/logging.rb +71 -0
  32. data/lib/deimos/message.rb +2 -11
  33. data/lib/deimos/metrics/datadog.rb +40 -1
  34. data/lib/deimos/metrics/provider.rb +4 -4
  35. data/lib/deimos/producer.rb +39 -116
  36. data/lib/deimos/railtie.rb +6 -0
  37. data/lib/deimos/schema_backends/avro_base.rb +21 -21
  38. data/lib/deimos/schema_backends/avro_schema_registry.rb +1 -2
  39. data/lib/deimos/schema_backends/avro_validation.rb +2 -2
  40. data/lib/deimos/schema_backends/base.rb +19 -12
  41. data/lib/deimos/schema_backends/mock.rb +6 -1
  42. data/lib/deimos/schema_backends/plain.rb +47 -0
  43. data/lib/deimos/schema_class/base.rb +2 -2
  44. data/lib/deimos/schema_class/enum.rb +1 -1
  45. data/lib/deimos/schema_class/record.rb +2 -2
  46. data/lib/deimos/test_helpers.rb +95 -320
  47. data/lib/deimos/tracing/provider.rb +6 -6
  48. data/lib/deimos/transcoder.rb +88 -0
  49. data/lib/deimos/utils/db_poller/base.rb +16 -14
  50. data/lib/deimos/utils/db_poller/state_based.rb +3 -3
  51. data/lib/deimos/utils/db_poller/time_based.rb +4 -4
  52. data/lib/deimos/utils/db_poller.rb +1 -1
  53. data/lib/deimos/utils/deadlock_retry.rb +1 -1
  54. data/lib/deimos/utils/{db_producer.rb → outbox_producer.rb} +16 -47
  55. data/lib/deimos/utils/schema_class.rb +0 -7
  56. data/lib/deimos/version.rb +1 -1
  57. data/lib/deimos.rb +79 -26
  58. data/lib/generators/deimos/{db_backend_generator.rb → outbox_backend_generator.rb} +4 -4
  59. data/lib/generators/deimos/schema_class_generator.rb +0 -1
  60. data/lib/generators/deimos/v2/templates/karafka.rb.tt +149 -0
  61. data/lib/generators/deimos/v2_generator.rb +193 -0
  62. data/lib/tasks/deimos.rake +5 -7
  63. data/spec/active_record_batch_consumer_association_spec.rb +22 -13
  64. data/spec/active_record_batch_consumer_spec.rb +84 -65
  65. data/spec/active_record_consume/batch_consumption_spec.rb +10 -10
  66. data/spec/active_record_consume/batch_slicer_spec.rb +12 -12
  67. data/spec/active_record_consume/mass_updater_spec.rb +137 -0
  68. data/spec/active_record_consumer_spec.rb +29 -13
  69. data/spec/active_record_producer_spec.rb +36 -26
  70. data/spec/backends/base_spec.rb +0 -23
  71. data/spec/backends/kafka_async_spec.rb +1 -3
  72. data/spec/backends/kafka_spec.rb +1 -3
  73. data/spec/backends/{db_spec.rb → outbox_spec.rb} +14 -20
  74. data/spec/batch_consumer_spec.rb +66 -116
  75. data/spec/consumer_spec.rb +53 -147
  76. data/spec/deimos_spec.rb +10 -126
  77. data/spec/kafka_source_spec.rb +19 -52
  78. data/spec/karafka/karafka.rb +69 -0
  79. data/spec/karafka_config/karafka_spec.rb +97 -0
  80. data/spec/logging_spec.rb +25 -0
  81. data/spec/message_spec.rb +9 -9
  82. data/spec/producer_spec.rb +112 -254
  83. data/spec/rake_spec.rb +1 -3
  84. data/spec/schema_backends/avro_validation_spec.rb +1 -1
  85. data/spec/schemas/com/my-namespace/MySchemaWithTitle.avsc +22 -0
  86. data/spec/snapshots/consumers-no-nest.snap +49 -0
  87. data/spec/snapshots/consumers.snap +49 -0
  88. data/spec/snapshots/consumers_and_producers-no-nest.snap +49 -0
  89. data/spec/snapshots/consumers_and_producers.snap +49 -0
  90. data/spec/snapshots/consumers_circular-no-nest.snap +49 -0
  91. data/spec/snapshots/consumers_circular.snap +49 -0
  92. data/spec/snapshots/consumers_complex_types-no-nest.snap +49 -0
  93. data/spec/snapshots/consumers_complex_types.snap +49 -0
  94. data/spec/snapshots/consumers_nested-no-nest.snap +49 -0
  95. data/spec/snapshots/consumers_nested.snap +49 -0
  96. data/spec/snapshots/namespace_folders.snap +49 -0
  97. data/spec/snapshots/namespace_map.snap +49 -0
  98. data/spec/snapshots/producers_with_key-no-nest.snap +49 -0
  99. data/spec/snapshots/producers_with_key.snap +49 -0
  100. data/spec/spec_helper.rb +61 -29
  101. data/spec/utils/db_poller_spec.rb +49 -39
  102. data/spec/utils/{db_producer_spec.rb → outbox_producer_spec.rb} +17 -184
  103. metadata +58 -67
  104. data/lib/deimos/batch_consumer.rb +0 -7
  105. data/lib/deimos/config/phobos_config.rb +0 -163
  106. data/lib/deimos/instrumentation.rb +0 -95
  107. data/lib/deimos/monkey_patches/phobos_cli.rb +0 -35
  108. data/lib/deimos/utils/inline_consumer.rb +0 -158
  109. data/lib/deimos/utils/lag_reporter.rb +0 -186
  110. data/lib/deimos/utils/schema_controller_mixin.rb +0 -129
  111. data/spec/config/configuration_spec.rb +0 -321
  112. data/spec/kafka_listener_spec.rb +0 -55
  113. data/spec/phobos.bad_db.yml +0 -73
  114. data/spec/phobos.yml +0 -77
  115. data/spec/utils/inline_consumer_spec.rb +0 -31
  116. data/spec/utils/lag_reporter_spec.rb +0 -76
  117. data/spec/utils/platform_schema_validation_spec.rb +0 -0
  118. data/spec/utils/schema_controller_mixin_spec.rb +0 -84
  119. /data/lib/generators/deimos/{db_backend → outbox_backend}/templates/migration +0 -0
  120. /data/lib/generators/deimos/{db_backend → outbox_backend}/templates/rails3_migration +0 -0
@@ -0,0 +1,70 @@
1
+ require "deimos/transcoder"
2
+ require "deimos/ext/producer_middleware"
3
+ require "deimos/schema_backends/plain"
4
+
5
+ module Deimos
6
+ class SchemaRoute < Karafka::Routing::Features::Base
7
+
8
+ module Topic
9
+ {
10
+ schema: nil,
11
+ namespace: nil,
12
+ key_config: {none: true},
13
+ use_schema_classes: Deimos.config.schema.use_schema_classes
14
+ }.each do |field, default|
15
+ define_method(field) do |*args|
16
+ @_deimos_config ||= {}
17
+ @_deimos_config[:schema] ||= {}
18
+ if args.any?
19
+ @_deimos_config[:schema][field] = args[0]
20
+ _deimos_setup_transcoders if schema && namespace
21
+ end
22
+ @_deimos_config[:schema][field] || default
23
+ end
24
+ end
25
+ def _deimos_setup_transcoders
26
+ payload = Transcoder.new(
27
+ schema: schema,
28
+ namespace: namespace,
29
+ use_schema_classes: use_schema_classes,
30
+ topic: name
31
+ )
32
+
33
+ key = nil
34
+
35
+ if key_config[:plain]
36
+ key = Transcoder.new(
37
+ schema: schema,
38
+ namespace: namespace,
39
+ use_schema_classes: use_schema_classes,
40
+ topic: name
41
+ )
42
+ key.backend = Deimos::SchemaBackends::Plain.new(schema: nil, namespace: nil)
43
+ elsif !key_config[:none]
44
+ if key_config[:field]
45
+ key = Transcoder.new(
46
+ schema: schema,
47
+ namespace: namespace,
48
+ use_schema_classes: use_schema_classes,
49
+ key_field: key_config[:field].to_s,
50
+ topic: name
51
+ )
52
+ elsif key_config[:schema]
53
+ key = Transcoder.new(
54
+ schema: key_config[:schema] || schema,
55
+ namespace: namespace,
56
+ use_schema_classes: use_schema_classes,
57
+ topic: self.name
58
+ )
59
+ else
60
+ raise 'No key config given - if you are not encoding keys, please use `key_config plain: true`'
61
+ end
62
+ end
63
+ deserializers.payload = payload
64
+ deserializers.key = key if key
65
+ end
66
+ end
67
+ end
68
+ end
69
+
70
+ Deimos::SchemaRoute.activate
@@ -49,8 +49,7 @@ module Deimos
49
49
  end
50
50
  end
51
51
 
52
- # @return [Hash]
53
- def phobos_message
52
+ def karafka_message
54
53
  {
55
54
  payload: self.message,
56
55
  partition_key: self.partition_key,
@@ -58,5 +57,6 @@ module Deimos
58
57
  topic: self.topic
59
58
  }
60
59
  end
60
+
61
61
  end
62
62
  end
@@ -31,7 +31,7 @@ module Deimos
31
31
  return unless self.class.kafka_config[:update]
32
32
 
33
33
  producers = self.class.kafka_producers
34
- fields = producers.flat_map(&:watched_attributes).uniq
34
+ fields = producers.flat_map { |p| p.watched_attributes(self) }.uniq
35
35
  fields -= ['updated_at']
36
36
  # Only send an event if a field we care about was changed.
37
37
  any_changes = fields.any? do |field|
@@ -71,12 +71,7 @@ module Deimos
71
71
 
72
72
  # @return [Array<Deimos::ActiveRecordProducer>] the producers to run.
73
73
  def kafka_producers
74
- if self.respond_to?(:kafka_producer)
75
- Deimos.config.logger.warn(message: DEPRECATION_WARNING)
76
- return [self.kafka_producer]
77
- end
78
-
79
- raise NotImplementedError
74
+ raise MissingImplementationError
80
75
  end
81
76
 
82
77
  # This is an internal method, part of the activerecord_import gem. It's
@@ -1,7 +1,7 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Deimos
4
- # Record that keeps track of which topics are being worked on by DbProducers.
4
+ # Record that keeps track of which topics are being worked on by OutboxProducers.
5
5
  class KafkaTopicInfo < ActiveRecord::Base
6
6
  self.table_name = 'kafka_topic_info'
7
7
 
@@ -0,0 +1,71 @@
1
+ module Deimos
2
+ module Logging
3
+ class << self
4
+
5
+ def log_add(method, msg)
6
+ Karafka.logger.tagged('Deimos') do |logger|
7
+ logger.send(method, msg.to_json)
8
+ end
9
+
10
+ end
11
+
12
+ def log_info(*args)
13
+ log_add(:info, *args)
14
+ end
15
+
16
+ def log_debug(*args)
17
+ log_add(:debug, *args)
18
+ end
19
+
20
+ def log_error(*args)
21
+ log_add(:error, *args)
22
+ end
23
+
24
+ def log_warn(*args)
25
+ log_add(:warn, *args)
26
+ end
27
+
28
+ def metadata_log_text(metadata)
29
+ metadata.to_h.slice(:timestamp, :offset, :first_offset, :last_offset, :partition, :topic, :size)
30
+ end
31
+
32
+ def _payloads(messages)
33
+
34
+ end
35
+
36
+ def messages_log_text(payload_log, messages)
37
+ log_message = {}
38
+
39
+ case payload_log
40
+ when :keys
41
+ keys = messages.map do |m|
42
+ m.respond_to?(:payload) ? m.key || m.payload['message_id'] : m[:key] || m[:payload_key] || m[:payload]['message_id']
43
+ end
44
+ log_message.merge!(
45
+ payload_keys: keys
46
+ )
47
+ when :count
48
+ log_message.merge!(
49
+ payloads_count: messages.count
50
+ )
51
+ when :headers
52
+ log_message.merge!(
53
+ payload_headers: messages.map { |m| m.respond_to?(:headers) ? m.headers : m[:headers] }
54
+ )
55
+ else
56
+ log_message.merge!(
57
+ payloads: messages.map do |m|
58
+ {
59
+ payload: m.respond_to?(:payload) ? m.payload : m[:payload],
60
+ key: m.respond_to?(:payload) ? m.key : m[:key]
61
+ }
62
+ end
63
+ )
64
+ end
65
+
66
+ log_message
67
+ end
68
+
69
+ end
70
+ end
71
+ end
@@ -17,17 +17,13 @@ module Deimos
17
17
  attr_accessor :encoded_payload
18
18
  # @return [String]
19
19
  attr_accessor :topic
20
- # @return [String]
21
- attr_accessor :producer_name
22
20
 
23
21
  # @param payload [Hash]
24
- # @param producer [Class]
25
22
  # @param topic [String]
26
23
  # @param key [String, Integer, Hash]
27
24
  # @param partition_key [Integer]
28
- def initialize(payload, producer, topic: nil, key: nil, headers: nil, partition_key: nil)
25
+ def initialize(payload, topic: nil, key: nil, headers: nil, partition_key: nil)
29
26
  @payload = payload&.with_indifferent_access
30
- @producer_name = producer&.name
31
27
  @topic = topic
32
28
  @key = key
33
29
  @headers = headers&.with_indifferent_access
@@ -64,11 +60,7 @@ module Deimos
64
60
  key: @encoded_key,
65
61
  headers: @headers,
66
62
  partition_key: @partition_key || @encoded_key,
67
- payload: @encoded_payload,
68
- metadata: {
69
- decoded_payload: @payload,
70
- producer_name: @producer_name
71
- }
63
+ payload: @encoded_payload
72
64
  }.delete_if { |k, v| k == :headers && v.nil? }
73
65
  end
74
66
 
@@ -82,7 +74,6 @@ module Deimos
82
74
  payload: @payload,
83
75
  metadata: {
84
76
  decoded_payload: @payload,
85
- producer_name: @producer_name
86
77
  }
87
78
  }.delete_if { |k, v| k == :headers && v.nil? }
88
79
  end
@@ -1,12 +1,15 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require 'deimos/metrics/provider'
4
+ require 'karafka/instrumentation/vendors/datadog/metrics_listener'
5
+ require 'waterdrop/instrumentation/vendors/datadog/metrics_listener'
4
6
 
5
7
  module Deimos
6
8
  module Metrics
7
9
  # A Metrics wrapper class for Datadog.
8
10
  class Datadog < Metrics::Provider
9
- # @param config [Hash]
11
+
12
+ # @param config [Hash] a hash of both client and Karakfa MetricsListener configs.
10
13
  # @param logger [Logger]
11
14
  def initialize(config, logger)
12
15
  raise 'Metrics config must specify host_ip' if config[:host_ip].nil?
@@ -14,12 +17,48 @@ module Deimos
14
17
  raise 'Metrics config must specify namespace' if config[:namespace].nil?
15
18
 
16
19
  logger.info("DatadogMetricsProvider configured with: #{config}")
20
+
17
21
  @client = ::Datadog::Statsd.new(
18
22
  config[:host_ip],
19
23
  config[:host_port],
20
24
  tags: config[:tags],
21
25
  namespace: config[:namespace]
22
26
  )
27
+ setup_karafka(config)
28
+ setup_waterdrop(config)
29
+ end
30
+
31
+ def setup_karafka(config={})
32
+ karafka_listener = ::Karafka::Instrumentation::Vendors::Datadog::MetricsListener.new do |karafka_config|
33
+ karafka_config.client = @client
34
+ if config[:karafka_namespace]
35
+ karafka_config.namespace = config[:karafka_namespace]
36
+ end
37
+ if config[:karafka_distribution_mode]
38
+ karafka_config.distribution_mode = config[:karafka_distribution_mode]
39
+ end
40
+ if config[:rd_kafka_metrics]
41
+ karafka_config.rd_kafka_metrics = config[:rd_kafka_metrics]
42
+ end
43
+ end
44
+ Karafka.monitor.subscribe(karafka_listener)
45
+ end
46
+
47
+ def setup_waterdrop(config)
48
+ waterdrop_listener = ::WaterDrop::Instrumentation::Vendors::Datadog::MetricsListener.new do |waterdrop_config|
49
+ waterdrop_config.client = @client
50
+ if config[:karafka_namespace]
51
+ waterdrop_config.namespace = config[:karafka_namespace]
52
+ end
53
+ if config[:karafka_distribution_mode]
54
+ waterdrop_config.distribution_mode = config[:karafka_distribution_mode]
55
+ end
56
+ if config[:rd_kafka_metrics]
57
+ karafka_config.rd_kafka_metrics = [] # handled in Karafka
58
+ end
59
+ end
60
+ Karafka::Setup::Config.setup if Karafka.producer.nil?
61
+ Karafka.producer.monitor.subscribe(waterdrop_listener)
23
62
  end
24
63
 
25
64
  # :nodoc:
@@ -9,7 +9,7 @@ module Deimos
9
9
  # @param options [Hash] Any additional options, e.g. :tags
10
10
  # @return [void]
11
11
  def increment(metric_name, options={})
12
- raise NotImplementedError
12
+ raise MissingImplementationError
13
13
  end
14
14
 
15
15
  # Send an counter increment metric
@@ -18,7 +18,7 @@ module Deimos
18
18
  # @param options [Hash] Any additional options, e.g. :tags
19
19
  # @return [void]
20
20
  def gauge(metric_name, count, options={})
21
- raise NotImplementedError
21
+ raise MissingImplementationError
22
22
  end
23
23
 
24
24
  # Send an counter increment metric
@@ -27,7 +27,7 @@ module Deimos
27
27
  # @param options [Hash] Any additional options, e.g. :tags
28
28
  # @return [void]
29
29
  def histogram(metric_name, count, options={})
30
- raise NotImplementedError
30
+ raise MissingImplementationError
31
31
  end
32
32
 
33
33
  # Time a yielded block, and send a timer metric
@@ -35,7 +35,7 @@ module Deimos
35
35
  # @param options [Hash] Any additional options, e.g. :tags
36
36
  # @return [void]
37
37
  def time(metric_name, options={})
38
- raise NotImplementedError
38
+ raise MissingImplementationError
39
39
  end
40
40
  end
41
41
  end
@@ -2,12 +2,12 @@
2
2
 
3
3
  require 'deimos/message'
4
4
  require 'deimos/shared_config'
5
- require 'phobos/producer'
6
5
  require 'active_support/notifications'
7
6
 
8
7
  # :nodoc:
9
8
  module Deimos
10
9
  class << self
10
+
11
11
  # Run a block without allowing any messages to be produced to Kafka.
12
12
  # Optionally add a list of producer classes to limit the disabling to those
13
13
  # classes.
@@ -50,6 +50,8 @@ module Deimos
50
50
  # @param producer_class [Class]
51
51
  # @return [Boolean]
52
52
  def producers_disabled?(producer_class=nil)
53
+ return true if Deimos.config.producers.disabled
54
+
53
55
  Thread.current[:frk_disable_all_producers] ||
54
56
  Thread.current[:frk_disabled_producers]&.include?(producer_class)
55
57
  end
@@ -64,26 +66,6 @@ module Deimos
64
66
 
65
67
  class << self
66
68
 
67
- # @return [Hash]
68
- def config
69
- @config ||= {
70
- encode_key: true,
71
- namespace: Deimos.config.producers.schema_namespace
72
- }
73
- end
74
-
75
- # Set the topic.
76
- # @param topic [String]
77
- # @return [String] the current topic if no argument given.
78
- def topic(topic=nil)
79
- if topic
80
- config[:topic] = topic
81
- return
82
- end
83
- # accessor
84
- "#{Deimos.config.producers.topic_prefix}#{config[:topic]}"
85
- end
86
-
87
69
  # Override the default partition key (which is the payload key).
88
70
  # @param _payload [Hash] the payload being passed into the produce method.
89
71
  # Will include `payload_key` if it is part of the original payload.
@@ -98,7 +80,22 @@ module Deimos
98
80
  # @param headers [Hash] if specifying headers
99
81
  # @return [void]
100
82
  def publish(payload, topic: self.topic, headers: nil)
101
- publish_list([payload], topic: topic, headers: headers)
83
+ produce([{payload: payload, topic: topic, headers: headers}])
84
+ end
85
+
86
+ # Produce a list of messages in WaterDrop message hash format.
87
+ # @param messages [Array<Hash>]
88
+ # @param backend [Class < Deimos::Backend]
89
+ def produce(messages, backend: determine_backend_class)
90
+ return if Deimos.producers_disabled?(self)
91
+
92
+ messages.each do |m|
93
+ m[:label] = m
94
+ m[:partition_key] ||= self.partition_key(m[:payload])
95
+ end
96
+ messages.in_groups_of(MAX_BATCH_SIZE, false) do |batch|
97
+ self.produce_batch(backend, batch)
98
+ end
102
99
  end
103
100
 
104
101
  # Publish a list of messages.
@@ -111,31 +108,31 @@ module Deimos
111
108
  # @param headers [Hash] if specifying headers
112
109
  # @return [void]
113
110
  def publish_list(payloads, sync: nil, force_send: false, topic: self.topic, headers: nil)
114
- return if Deimos.config.kafka.seed_brokers.blank? ||
115
- Deimos.config.producers.disabled ||
116
- Deimos.producers_disabled?(self)
117
-
118
- raise 'Topic not specified. Please specify the topic.' if topic.blank?
119
-
120
- backend_class = determine_backend_class(sync, force_send)
121
- Deimos.instrument(
122
- 'encode_messages',
123
- producer: self,
124
- topic: topic,
125
- payloads: payloads
126
- ) do
127
- messages = Array(payloads).map { |p| Deimos::Message.new(p.to_h, self, headers: headers) }
128
- messages.each { |m| _process_message(m, topic) }
129
- messages.in_groups_of(MAX_BATCH_SIZE, false) do |batch|
130
- self.produce_batch(backend_class, batch)
131
- end
111
+ backend = determine_backend_class(sync, force_send)
112
+
113
+ messages = Array(payloads).map do |p|
114
+ {
115
+ payload: p&.to_h,
116
+ headers: headers,
117
+ topic: topic,
118
+ partition_key: self.partition_key(p)
119
+ }
132
120
  end
121
+ self.produce(messages, backend: backend)
122
+ end
123
+
124
+ def karafka_config
125
+ Deimos.karafka_configs.find { |topic| topic.producer_class == self }
126
+ end
127
+
128
+ def topic
129
+ karafka_config.name
133
130
  end
134
131
 
135
132
  # @param sync [Boolean]
136
133
  # @param force_send [Boolean]
137
134
  # @return [Class<Deimos::Backends::Base>]
138
- def determine_backend_class(sync, force_send)
135
+ def determine_backend_class(sync=false, force_send=false)
139
136
  backend = if force_send
140
137
  :kafka
141
138
  else
@@ -151,86 +148,12 @@ module Deimos
151
148
 
152
149
  # Send a batch to the backend.
153
150
  # @param backend [Class<Deimos::Backends::Base>]
154
- # @param batch [Array<Deimos::Message>]
151
+ # @param batch [Array<Hash>]
155
152
  # @return [void]
156
153
  def produce_batch(backend, batch)
157
154
  backend.publish(producer_class: self, messages: batch)
158
155
  end
159
156
 
160
- # @return [Deimos::SchemaBackends::Base]
161
- def encoder
162
- @encoder ||= Deimos.schema_backend(schema: config[:schema],
163
- namespace: config[:namespace])
164
- end
165
-
166
- # @return [Deimos::SchemaBackends::Base]
167
- def key_encoder
168
- @key_encoder ||= Deimos.schema_backend(schema: config[:key_schema],
169
- namespace: config[:namespace])
170
- end
171
-
172
- # Override this in active record producers to add
173
- # non-schema fields to check for updates
174
- # @return [Array<String>] fields to check for updates
175
- def watched_attributes
176
- self.encoder.schema_fields.map(&:name)
177
- end
178
-
179
- private
180
-
181
- # @param message [Message]
182
- # @param topic [String]
183
- def _process_message(message, topic)
184
- # this violates the Law of Demeter but it has to happen in a very
185
- # specific order and requires a bunch of methods on the producer
186
- # to work correctly.
187
- message.add_fields(encoder.schema_fields.map(&:name))
188
- message.partition_key = self.partition_key(message.payload)
189
- message.key = _retrieve_key(message.payload)
190
- # need to do this before _coerce_fields because that might result
191
- # in an empty payload which is an *error* whereas this is intended.
192
- message.payload = nil if message.payload.blank?
193
- message.coerce_fields(encoder)
194
- message.encoded_key = _encode_key(message.key)
195
- message.topic = topic
196
- message.encoded_payload = if message.payload.nil?
197
- nil
198
- else
199
- encoder.encode(message.payload,
200
- topic: "#{Deimos.config.producers.topic_prefix}#{config[:topic]}-value")
201
- end
202
- end
203
-
204
- # @param key [Object]
205
- # @return [String|Object]
206
- def _encode_key(key)
207
- if key.nil?
208
- return nil if config[:no_keys] # no key is fine, otherwise it's a problem
209
-
210
- raise 'No key given but a key is required! Use `key_config none: true` to avoid using keys.'
211
- end
212
- if config[:encode_key] && config[:key_field].nil? &&
213
- config[:key_schema].nil?
214
- raise 'No key config given - if you are not encoding keys, please use `key_config plain: true`'
215
- end
216
-
217
- if config[:key_field]
218
- encoder.encode_key(config[:key_field], key, topic: "#{Deimos.config.producers.topic_prefix}#{config[:topic]}-key")
219
- elsif config[:key_schema]
220
- key_encoder.encode(key, topic: "#{Deimos.config.producers.topic_prefix}#{config[:topic]}-key")
221
- else
222
- key
223
- end
224
- end
225
-
226
- # @param payload [Hash]
227
- # @return [String]
228
- def _retrieve_key(payload)
229
- key = payload.delete(:payload_key)
230
- return key if key
231
-
232
- config[:key_field] ? payload[config[:key_field]] : nil
233
- end
234
157
  end
235
158
  end
236
159
  end
@@ -2,6 +2,12 @@
2
2
 
3
3
  # Add rake task to Rails.
4
4
  class Deimos::Railtie < Rails::Railtie
5
+ config.before_initialize do
6
+ if ARGV[0] == "deimos:v2"
7
+ FigTree.keep_removed_configs = true
8
+ end
9
+ end
10
+
5
11
  rake_tasks do
6
12
  load 'tasks/deimos.rake'
7
13
  end
@@ -20,15 +20,19 @@ module Deimos
20
20
 
21
21
  # @override
22
22
  def encode_key(key_id, key, topic: nil)
23
- @key_schema ||= _generate_key_schema(key_id)
23
+ begin
24
+ @key_schema ||= @schema_store.find("#{@schema}_key")
25
+ rescue AvroTurf::SchemaNotFoundError
26
+ @key_schema = generate_key_schema(key_id)
27
+ end
24
28
  field_name = _field_name_from_schema(@key_schema)
25
- payload = { field_name => key }
29
+ payload = key.is_a?(Hash) ? key : { field_name => key }
26
30
  encode(payload, schema: @key_schema['name'], topic: topic)
27
31
  end
28
32
 
29
33
  # @override
30
34
  def decode_key(payload, key_id)
31
- @key_schema ||= _generate_key_schema(key_id)
35
+ @key_schema ||= generate_key_schema(key_id)
32
36
  field_name = _field_name_from_schema(@key_schema)
33
37
  decode(payload, schema: @key_schema['name'])[field_name]
34
38
  end
@@ -85,7 +89,7 @@ module Deimos
85
89
 
86
90
  # @override
87
91
  def self.mock_backend
88
- :avro_validation
92
+ :avro_local
89
93
  end
90
94
 
91
95
  # @override
@@ -146,21 +150,8 @@ module Deimos
146
150
  end
147
151
  end
148
152
 
149
- private
150
-
151
- # @param schema [String]
152
- # @return [Avro::Schema]
153
- def avro_schema(schema=nil)
154
- schema ||= @schema
155
- @schema_store.find(schema, @namespace)
156
- end
157
-
158
- # Generate a key schema from the given value schema and key ID. This
159
- # is used when encoding or decoding keys from an existing value schema.
160
- # @param key_id [Symbol]
161
- # @return [Hash]
162
- def _generate_key_schema(key_id)
163
- key_field = avro_schema.fields.find { |f| f.name == key_id.to_s }
153
+ def generate_key_schema(field_name)
154
+ key_field = avro_schema.fields.find { |f| f.name == field_name.to_s }
164
155
  name = _key_schema_name(@schema)
165
156
  key_schema = {
166
157
  'type' => 'record',
@@ -169,13 +160,22 @@ module Deimos
169
160
  'doc' => "Key for #{@namespace}.#{@schema} - autogenerated by Deimos",
170
161
  'fields' => [
171
162
  {
172
- 'name' => key_id,
163
+ 'name' => field_name,
173
164
  'type' => key_field.type.type_sym.to_s
174
165
  }
175
166
  ]
176
167
  }
177
168
  @schema_store.add_schema(key_schema)
178
- key_schema
169
+ @key_schema = key_schema
170
+ end
171
+
172
+ private
173
+
174
+ # @param schema [String]
175
+ # @return [Avro::Schema]
176
+ def avro_schema(schema=nil)
177
+ schema ||= @schema
178
+ @schema_store.find(schema, @namespace)
179
179
  end
180
180
 
181
181
  # @param value_schema [Hash]
@@ -1,7 +1,6 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  require_relative 'avro_base'
4
- require_relative 'avro_validation'
5
4
  require 'avro_turf/messaging'
6
5
 
7
6
  module Deimos
@@ -29,7 +28,7 @@ module Deimos
29
28
  user: Deimos.config.schema.user,
30
29
  password: Deimos.config.schema.password,
31
30
  namespace: @namespace,
32
- logger: Deimos.config.logger
31
+ logger: Karafka.logger
33
32
  )
34
33
  end
35
34
  end
@@ -9,12 +9,12 @@ module Deimos
9
9
  class AvroValidation < AvroBase
10
10
  # @override
11
11
  def decode_payload(payload, schema: nil)
12
- payload.with_indifferent_access
12
+ JSON.parse(payload)
13
13
  end
14
14
 
15
15
  # @override
16
16
  def encode_payload(payload, schema: nil, topic: nil)
17
- payload.with_indifferent_access
17
+ payload.to_h.with_indifferent_access.to_json
18
18
  end
19
19
  end
20
20
  end