deimos-ruby 1.16.3 → 1.16.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (84) hide show
  1. checksums.yaml +4 -4
  2. data/.github/workflows/ci.yml +3 -3
  3. data/.gitignore +1 -0
  4. data/.rubocop.yml +20 -14
  5. data/.rubocop_todo.yml +364 -0
  6. data/.ruby-version +2 -1
  7. data/CHANGELOG.md +9 -0
  8. data/Gemfile +6 -0
  9. data/README.md +7 -1
  10. data/Steepfile +6 -0
  11. data/deimos-ruby.gemspec +3 -2
  12. data/lib/deimos/active_record_consume/batch_consumption.rb +7 -2
  13. data/lib/deimos/active_record_consume/batch_slicer.rb +2 -0
  14. data/lib/deimos/active_record_consume/message_consumption.rb +8 -4
  15. data/lib/deimos/active_record_consumer.rb +7 -4
  16. data/lib/deimos/active_record_producer.rb +3 -0
  17. data/lib/deimos/backends/base.rb +4 -2
  18. data/lib/deimos/backends/kafka.rb +1 -0
  19. data/lib/deimos/backends/kafka_async.rb +1 -0
  20. data/lib/deimos/config/configuration.rb +4 -0
  21. data/lib/deimos/config/phobos_config.rb +2 -1
  22. data/lib/deimos/consume/batch_consumption.rb +8 -1
  23. data/lib/deimos/consume/message_consumption.rb +4 -1
  24. data/lib/deimos/instrumentation.rb +11 -4
  25. data/lib/deimos/kafka_message.rb +1 -0
  26. data/lib/deimos/kafka_source.rb +5 -0
  27. data/lib/deimos/kafka_topic_info.rb +4 -0
  28. data/lib/deimos/message.rb +19 -2
  29. data/lib/deimos/metrics/datadog.rb +2 -1
  30. data/lib/deimos/metrics/mock.rb +2 -2
  31. data/lib/deimos/metrics/provider.rb +6 -0
  32. data/lib/deimos/monkey_patches/phobos_cli.rb +1 -1
  33. data/lib/deimos/monkey_patches/phobos_producer.rb +1 -0
  34. data/lib/deimos/producer.rb +12 -6
  35. data/lib/deimos/schema_backends/base.rb +31 -17
  36. data/lib/deimos/schema_backends/mock.rb +2 -2
  37. data/lib/deimos/schema_class/base.rb +9 -5
  38. data/lib/deimos/schema_class/enum.rb +4 -2
  39. data/lib/deimos/schema_class/record.rb +5 -5
  40. data/lib/deimos/shared_config.rb +6 -2
  41. data/lib/deimos/test_helpers.rb +21 -4
  42. data/lib/deimos/tracing/datadog.rb +1 -1
  43. data/lib/deimos/tracing/mock.rb +4 -3
  44. data/lib/deimos/tracing/provider.rb +5 -0
  45. data/lib/deimos/utils/db_poller.rb +9 -1
  46. data/lib/deimos/utils/db_producer.rb +14 -2
  47. data/lib/deimos/utils/deadlock_retry.rb +3 -0
  48. data/lib/deimos/utils/inline_consumer.rb +14 -6
  49. data/lib/deimos/utils/lag_reporter.rb +11 -0
  50. data/lib/deimos/utils/schema_controller_mixin.rb +8 -0
  51. data/lib/deimos/version.rb +1 -1
  52. data/lib/deimos.rb +3 -2
  53. data/lib/generators/deimos/active_record_generator.rb +1 -1
  54. data/lib/generators/deimos/db_backend_generator.rb +1 -0
  55. data/lib/generators/deimos/db_poller_generator.rb +1 -0
  56. data/lib/generators/deimos/schema_class/templates/schema_record.rb.tt +1 -1
  57. data/lib/generators/deimos/schema_class_generator.rb +13 -4
  58. data/rbs_collection.lock.yaml +176 -0
  59. data/rbs_collection.yaml +15 -0
  60. data/sig/avro.rbs +14 -0
  61. data/sig/defs.rbs +1867 -0
  62. data/sig/fig_tree.rbs +2 -0
  63. data/spec/consumer_spec.rb +14 -14
  64. data/spec/generators/schema_class/my_schema_spec.rb +3 -3
  65. data/spec/generators/schema_class/my_schema_with_complex_types_spec.rb +1 -1
  66. data/spec/producer_spec.rb +1 -1
  67. data/spec/schemas/my_namespace/my_schema_with_complex_type.rb +3 -3
  68. data/spec/snapshots/consumers-no-nest.snap +1 -1
  69. data/spec/snapshots/consumers.snap +1 -1
  70. data/spec/snapshots/consumers_and_producers-no-nest.snap +3 -3
  71. data/spec/snapshots/consumers_and_producers.snap +3 -3
  72. data/spec/snapshots/consumers_circular-no-nest.snap +1 -1
  73. data/spec/snapshots/consumers_circular.snap +1 -1
  74. data/spec/snapshots/consumers_complex_types-no-nest.snap +1 -1
  75. data/spec/snapshots/consumers_complex_types.snap +1 -1
  76. data/spec/snapshots/consumers_nested-no-nest.snap +1 -1
  77. data/spec/snapshots/consumers_nested.snap +1 -1
  78. data/spec/snapshots/namespace_folders.snap +3 -3
  79. data/spec/snapshots/producers_with_key-no-nest.snap +1 -1
  80. data/spec/snapshots/producers_with_key.snap +1 -1
  81. metadata +39 -21
  82. data/.gemfiles/avro_turf-0.gemfile +0 -3
  83. data/.gemfiles/avro_turf-1.gemfile +0 -3
  84. data/.ruby-gemset +0 -1
@@ -23,14 +23,16 @@ module Deimos
23
23
  include ActiveRecordConsume::BatchConsumption
24
24
 
25
25
  class << self
26
- # param klass [Class < ActiveRecord::Base] the class used to save to the
26
+ # @param klass [Class<ActiveRecord::Base>] the class used to save to the
27
27
  # database.
28
+ # @return [void]
28
29
  def record_class(klass)
29
30
  config[:record_class] = klass
30
31
  end
31
32
 
32
- # param val [Boolean] Turn pre-compaction of the batch on or off. If true,
33
+ # @param val [Boolean] Turn pre-compaction of the batch on or off. If true,
33
34
  # only the last message for each unique key in a batch is processed.
35
+ # @return [void]
34
36
  def compacted(val)
35
37
  config[:compacted] = val
36
38
  end
@@ -50,14 +52,15 @@ module Deimos
50
52
 
51
53
  # Override this method (with `super`) if you want to add/change the default
52
54
  # attributes set to the new/existing record.
53
- # @param payload [Hash|Deimos::SchemaClass::Record]
55
+ # @param payload [Hash,Deimos::SchemaClass::Record]
54
56
  # @param _key [String]
57
+ # @return [Hash]
55
58
  def record_attributes(payload, _key=nil)
56
59
  @converter.convert(payload)
57
60
  end
58
61
 
59
62
  # Override this message to conditionally save records
60
- # @param _payload [Hash|Deimos::SchemaClass::Record] The kafka message
63
+ # @param _payload [Hash,Deimos::SchemaClass::Record] The kafka message
61
64
  # @return [Boolean] if true, record is created/update.
62
65
  # If false, record processing is skipped but message offset is still committed.
63
66
  def process_message?(_payload)
@@ -17,6 +17,7 @@ module Deimos
17
17
  # @param refetch [Boolean] if true, and we are given a hash instead of
18
18
  # a record object, refetch the record to pass into the `generate_payload`
19
19
  # method.
20
+ # @return [void]
20
21
  def record_class(klass, refetch: true)
21
22
  config[:record_class] = klass
22
23
  config[:refetch_record] = refetch
@@ -24,12 +25,14 @@ module Deimos
24
25
 
25
26
  # @param record [ActiveRecord::Base]
26
27
  # @param force_send [Boolean]
28
+ # @return [void]
27
29
  def send_event(record, force_send: false)
28
30
  send_events([record], force_send: force_send)
29
31
  end
30
32
 
31
33
  # @param records [Array<ActiveRecord::Base>]
32
34
  # @param force_send [Boolean]
35
+ # @return [void]
33
36
  def send_events(records, force_send: false)
34
37
  primary_key = config[:record_class]&.primary_key
35
38
  messages = records.map do |record|
@@ -5,8 +5,9 @@ module Deimos
5
5
  # Abstract class for all publish backends.
6
6
  class Base
7
7
  class << self
8
- # @param producer_class [Class < Deimos::Producer]
8
+ # @param producer_class [Class<Deimos::Producer>]
9
9
  # @param messages [Array<Deimos::Message>]
10
+ # @return [void]
10
11
  def publish(producer_class:, messages:)
11
12
  Deimos.config.logger.info(
12
13
  message: 'Publishing messages',
@@ -21,8 +22,9 @@ module Deimos
21
22
  execute(producer_class: producer_class, messages: messages)
22
23
  end
23
24
 
24
- # @param producer_class [Class < Deimos::Producer]
25
+ # @param producer_class [Class<Deimos::Producer>]
25
26
  # @param messages [Array<Deimos::Message>]
27
+ # @return [void]
26
28
  def execute(producer_class:, messages:)
27
29
  raise NotImplementedError
28
30
  end
@@ -7,6 +7,7 @@ module Deimos
7
7
  include Phobos::Producer
8
8
 
9
9
  # Shut down the producer if necessary.
10
+ # @return [void]
10
11
  def self.shutdown_producer
11
12
  producer.sync_producer_shutdown if producer.respond_to?(:sync_producer_shutdown)
12
13
  producer.kafka_client&.close
@@ -7,6 +7,7 @@ module Deimos
7
7
  include Phobos::Producer
8
8
 
9
9
  # Shut down the producer cleanly.
10
+ # @return [void]
10
11
  def self.shutdown_producer
11
12
  producer.async_producer_shutdown
12
13
  producer.kafka_client&.close
@@ -32,6 +32,7 @@ module Deimos
32
32
  end
33
33
 
34
34
  # Loads generated classes
35
+ # @return [void]
35
36
  def self.load_generated_schema_classes
36
37
  if Deimos.config.schema.generated_class_path.nil?
37
38
  raise 'Cannot use schema classes without schema.generated_class_path. Please provide a directory.'
@@ -43,6 +44,7 @@ module Deimos
43
44
  end
44
45
 
45
46
  # Ensure everything is set up correctly for the DB backend.
47
+ # @!visibility private
46
48
  def self.validate_db_backend
47
49
  begin
48
50
  require 'activerecord-import'
@@ -56,6 +58,7 @@ module Deimos
56
58
 
57
59
  # Validate that consumers are configured correctly, including their
58
60
  # delivery mode.
61
+ # @!visibility private
59
62
  def self.validate_consumers
60
63
  Phobos.config.listeners.each do |listener|
61
64
  handler_class = listener.handler.constantize
@@ -74,6 +77,7 @@ module Deimos
74
77
  end
75
78
  end
76
79
 
80
+ # @!visibility private
77
81
  # @param kafka_config [FigTree::ConfigStruct]
78
82
  def self.configure_producer_or_consumer(kafka_config)
79
83
  klass = kafka_config.class_name.constantize
@@ -20,7 +20,7 @@ module Deimos
20
20
  }.to_h
21
21
  end
22
22
 
23
- # :nodoc:
23
+ # @return [void]
24
24
  def reset!
25
25
  super
26
26
  Phobos.configure(self.phobos_config)
@@ -115,6 +115,7 @@ module Deimos
115
115
  end
116
116
 
117
117
  # Legacy method to parse Phobos config file
118
+ # @!visibility private
118
119
  def phobos_config_file=(file)
119
120
  pconfig = YAML.load(ERB.new(File.read(File.expand_path(file))).result). # rubocop:disable Security/YAMLLoad
120
121
  with_indifferent_access
@@ -9,7 +9,9 @@ module Deimos
9
9
  extend ActiveSupport::Concern
10
10
  include Phobos::BatchHandler
11
11
 
12
- # :nodoc:
12
+ # @param batch [Array<String>]
13
+ # @param metadata [Hash]
14
+ # @return [void]
13
15
  def around_consume_batch(batch, metadata)
14
16
  payloads = []
15
17
  _with_span do
@@ -36,12 +38,14 @@ module Deimos
36
38
  # Consume a batch of incoming messages.
37
39
  # @param _payloads [Array<Phobos::BatchMessage>]
38
40
  # @param _metadata [Hash]
41
+ # @return [void]
39
42
  def consume_batch(_payloads, _metadata)
40
43
  raise NotImplementedError
41
44
  end
42
45
 
43
46
  protected
44
47
 
48
+ # @!visibility private
45
49
  def _received_batch(payloads, metadata)
46
50
  Deimos.config.logger.info(
47
51
  message: 'Got Kafka batch event',
@@ -70,6 +74,7 @@ module Deimos
70
74
  end
71
75
  end
72
76
 
77
+ # @!visibility private
73
78
  # @param exception [Throwable]
74
79
  # @param payloads [Array<Hash>]
75
80
  # @param metadata [Hash]
@@ -91,6 +96,7 @@ module Deimos
91
96
  _error(exception, payloads, metadata)
92
97
  end
93
98
 
99
+ # @!visibility private
94
100
  # @param time_taken [Float]
95
101
  # @param payloads [Array<Hash>]
96
102
  # @param metadata [Hash]
@@ -122,6 +128,7 @@ module Deimos
122
128
  )
123
129
  end
124
130
 
131
+ # @!visibility private
125
132
  # Get payload identifiers (key and message_id if present) for logging.
126
133
  # @param payloads [Array<Hash>]
127
134
  # @param metadata [Hash]
@@ -8,7 +8,9 @@ module Deimos
8
8
  extend ActiveSupport::Concern
9
9
  include Phobos::Handler
10
10
 
11
- # :nodoc:
11
+ # @param payload [String]
12
+ # @param metadata [Hash]
13
+ # @return [void]
12
14
  def around_consume(payload, metadata)
13
15
  decoded_payload = payload.nil? ? nil : payload.dup
14
16
  new_metadata = metadata.dup
@@ -28,6 +30,7 @@ module Deimos
28
30
  # Consume incoming messages.
29
31
  # @param _payload [String]
30
32
  # @param _metadata [Hash]
33
+ # @return [void]
31
34
  def consume(_payload, _metadata)
32
35
  raise NotImplementedError
33
36
  end
@@ -8,23 +8,29 @@ module Deimos
8
8
  # Copied from Phobos instrumentation.
9
9
  module Instrumentation
10
10
  extend ActiveSupport::Concern
11
+
12
+ # @return [String]
11
13
  NAMESPACE = 'Deimos'
12
14
 
13
15
  # :nodoc:
14
16
  module ClassMethods
15
- # :nodoc:
17
+ # @param event [String]
18
+ # @return [void]
16
19
  def subscribe(event)
17
20
  ActiveSupport::Notifications.subscribe("#{NAMESPACE}.#{event}") do |*args|
18
21
  yield(ActiveSupport::Notifications::Event.new(*args)) if block_given?
19
22
  end
20
23
  end
21
24
 
22
- # :nodoc:
25
+ # @param subscriber [ActiveSupport::Subscriber]
26
+ # @return [void]
23
27
  def unsubscribe(subscriber)
24
28
  ActiveSupport::Notifications.unsubscribe(subscriber)
25
29
  end
26
30
 
27
- # :nodoc:
31
+ # @param event [String]
32
+ # @param extra [Hash]
33
+ # @return [void]
28
34
  def instrument(event, extra={})
29
35
  ActiveSupport::Notifications.instrument("#{NAMESPACE}.#{event}", extra) do |extra2|
30
36
  yield(extra2) if block_given?
@@ -39,7 +45,8 @@ module Deimos
39
45
  module KafkaListener
40
46
  # Listens for any exceptions that happen during publishing and re-publishes
41
47
  # as a Deimos event.
42
- # @param event [ActiveSupport::Notification]
48
+ # @param event [ActiveSupport::Notifications::Event]
49
+ # @return [void]
43
50
  def self.send_produce_error(event)
44
51
  exception = event.payload[:exception_object]
45
52
  return if !exception || !exception.respond_to?(:failed_messages)
@@ -10,6 +10,7 @@ module Deimos
10
10
  # Ensure it gets turned into a string, e.g. for testing purposes. It
11
11
  # should already be a string.
12
12
  # @param mess [Object]
13
+ # @return [void]
13
14
  def message=(mess)
14
15
  write_attribute(:message, mess ? mess.to_s : nil)
15
16
  end
@@ -6,6 +6,7 @@ module Deimos
6
6
  module KafkaSource
7
7
  extend ActiveSupport::Concern
8
8
 
9
+ # @return [String]
9
10
  DEPRECATION_WARNING = 'The kafka_producer interface will be deprecated ' \
10
11
  'in future releases. Please use kafka_producers instead.'
11
12
 
@@ -16,6 +17,7 @@ module Deimos
16
17
  end
17
18
 
18
19
  # Send the newly created model to Kafka.
20
+ # @return [void]
19
21
  def send_kafka_event_on_create
20
22
  return unless self.persisted?
21
23
  return unless self.class.kafka_config[:create]
@@ -24,6 +26,7 @@ module Deimos
24
26
  end
25
27
 
26
28
  # Send the newly updated model to Kafka.
29
+ # @return [void]
27
30
  def send_kafka_event_on_update
28
31
  return unless self.class.kafka_config[:update]
29
32
 
@@ -41,6 +44,7 @@ module Deimos
41
44
  end
42
45
 
43
46
  # Send a deletion (null payload) event to Kafka.
47
+ # @return [void]
44
48
  def send_kafka_event_on_destroy
45
49
  return unless self.class.kafka_config[:delete]
46
50
 
@@ -80,6 +84,7 @@ module Deimos
80
84
  # the inputs (arrays, hashes, records etc.)
81
85
  # Basically we want to first do the import, then reload the records
82
86
  # and send them to Kafka.
87
+ # @!visibility private
83
88
  def import_without_validations_or_callbacks(column_names,
84
89
  array_of_attributes,
85
90
  options={})
@@ -50,6 +50,7 @@ module Deimos
50
50
  # moves on to the next one.
51
51
  # @param topic [String]
52
52
  # @param lock_id [String]
53
+ # @return [void]
53
54
  def clear_lock(topic, lock_id)
54
55
  self.where(topic: topic, locked_by: lock_id).
55
56
  update_all(locked_by: nil,
@@ -66,6 +67,7 @@ module Deimos
66
67
  # was in a good state.
67
68
  # @param except_topics [Array<String>] the list of topics we've just
68
69
  # realized had messages in them, meaning all other topics were empty.
70
+ # @return [void]
69
71
  def ping_empty_topics(except_topics)
70
72
  records = KafkaTopicInfo.where(locked_by: nil).
71
73
  where('topic not in(?)', except_topics)
@@ -79,6 +81,7 @@ module Deimos
79
81
  # and allows the caller to continue to the next topic.
80
82
  # @param topic [String]
81
83
  # @param lock_id [String]
84
+ # @return [void]
82
85
  def register_error(topic, lock_id)
83
86
  record = self.where(topic: topic, locked_by: lock_id).last
84
87
  attr_hash = { locked_by: nil,
@@ -93,6 +96,7 @@ module Deimos
93
96
  # working on those messages and to continue.
94
97
  # @param topic [String]
95
98
  # @param lock_id [String]
99
+ # @return [void]
96
100
  def heartbeat(topic, lock_id)
97
101
  self.where(topic: topic, locked_by: lock_id).
98
102
  update_all(locked_at: Time.zone.now)
@@ -3,11 +3,26 @@
3
3
  module Deimos
4
4
  # Basically a struct to hold the message as it's processed.
5
5
  class Message
6
- attr_accessor :payload, :key, :partition_key, :encoded_key,
7
- :encoded_payload, :topic, :producer_name
6
+ # @return [Hash]
7
+ attr_accessor :payload
8
+ # @return [Hash, String, Integer]
9
+ attr_accessor :key
10
+ # @return [Integer]
11
+ attr_accessor :partition_key
12
+ # @return [String]
13
+ attr_accessor :encoded_key
14
+ # @return [String]
15
+ attr_accessor :encoded_payload
16
+ # @return [String]
17
+ attr_accessor :topic
18
+ # @return [String]
19
+ attr_accessor :producer_name
8
20
 
9
21
  # @param payload [Hash]
10
22
  # @param producer [Class]
23
+ # @param topic [String]
24
+ # @param key [String, Integer, Hash]
25
+ # @param partition_key [Integer]
11
26
  def initialize(payload, producer, topic: nil, key: nil, partition_key: nil)
12
27
  @payload = payload&.with_indifferent_access
13
28
  @producer_name = producer&.name
@@ -19,6 +34,7 @@ module Deimos
19
34
  # Add message_id and timestamp default values if they are in the
20
35
  # schema and don't already have values.
21
36
  # @param fields [Array<String>] existing name fields in the schema.
37
+ # @return [void]
22
38
  def add_fields(fields)
23
39
  return if @payload.except(:payload_key, :partition_key).blank?
24
40
 
@@ -31,6 +47,7 @@ module Deimos
31
47
  end
32
48
 
33
49
  # @param encoder [Deimos::SchemaBackends::Base]
50
+ # @return [void]
34
51
  def coerce_fields(encoder)
35
52
  return if payload.nil?
36
53
 
@@ -6,7 +6,8 @@ module Deimos
6
6
  module Metrics
7
7
  # A Metrics wrapper class for Datadog.
8
8
  class Datadog < Metrics::Provider
9
- # :nodoc:
9
+ # @param config [Hash]
10
+ # @param logger [Logger]
10
11
  def initialize(config, logger)
11
12
  raise 'Metrics config must specify host_ip' if config[:host_ip].nil?
12
13
  raise 'Metrics config must specify host_port' if config[:host_port].nil?
@@ -5,8 +5,8 @@ require 'deimos/metrics/provider'
5
5
  module Deimos
6
6
  module Metrics
7
7
  # A mock Metrics wrapper which just logs the metrics
8
- class Mock
9
- # :nodoc:
8
+ class Mock < Provider
9
+ # @param logger [Logger,nil]
10
10
  def initialize(logger=nil)
11
11
  @logger = logger || Logger.new(STDOUT)
12
12
  @logger.info('MockMetricsProvider initialized')
@@ -7,20 +7,25 @@ module Deimos
7
7
  # Send an counter increment metric
8
8
  # @param metric_name [String] The name of the counter metric
9
9
  # @param options [Hash] Any additional options, e.g. :tags
10
+ # @return [void]
10
11
  def increment(metric_name, options={})
11
12
  raise NotImplementedError
12
13
  end
13
14
 
14
15
  # Send an counter increment metric
15
16
  # @param metric_name [String] The name of the counter metric
17
+ # @param count [Integer]
16
18
  # @param options [Hash] Any additional options, e.g. :tags
19
+ # @return [void]
17
20
  def gauge(metric_name, count, options={})
18
21
  raise NotImplementedError
19
22
  end
20
23
 
21
24
  # Send an counter increment metric
22
25
  # @param metric_name [String] The name of the counter metric
26
+ # @param count [Integer]
23
27
  # @param options [Hash] Any additional options, e.g. :tags
28
+ # @return [void]
24
29
  def histogram(metric_name, count, options={})
25
30
  raise NotImplementedError
26
31
  end
@@ -28,6 +33,7 @@ module Deimos
28
33
  # Time a yielded block, and send a timer metric
29
34
  # @param metric_name [String] The name of the metric
30
35
  # @param options [Hash] Any additional options, e.g. :tags
36
+ # @return [void]
31
37
  def time(metric_name, options={})
32
38
  raise NotImplementedError
33
39
  end
@@ -2,7 +2,7 @@
2
2
 
3
3
  require 'phobos/cli/start'
4
4
 
5
- # :nodoc:
5
+ #@!visibility private
6
6
  module Phobos
7
7
  # :nodoc:
8
8
  module CLI
@@ -2,6 +2,7 @@
2
2
 
3
3
  require 'phobos/producer'
4
4
 
5
+ #@!visibility private
5
6
  module Phobos
6
7
  module Producer
7
8
  # :nodoc:
@@ -11,7 +11,8 @@ module Deimos
11
11
  # Run a block without allowing any messages to be produced to Kafka.
12
12
  # Optionally add a list of producer classes to limit the disabling to those
13
13
  # classes.
14
- # @param producer_classes [Array<Class>|Class]
14
+ # @param producer_classes [Array<Class>, Class]
15
+ # @return [void]
15
16
  def disable_producers(*producer_classes, &block)
16
17
  if producer_classes.any?
17
18
  _disable_producer_classes(producer_classes, &block)
@@ -31,7 +32,7 @@ module Deimos
31
32
  end
32
33
  end
33
34
 
34
- # :nodoc:
35
+ # @!visibility private
35
36
  def _disable_producer_classes(producer_classes)
36
37
  Thread.current[:frk_disabled_producers] ||= Set.new
37
38
  producers_to_disable = producer_classes -
@@ -43,6 +44,7 @@ module Deimos
43
44
 
44
45
  # Are producers disabled? If a class is passed in, check only that class.
45
46
  # Otherwise check if the global disable flag is set.
47
+ # @param producer_class [Class]
46
48
  # @return [Boolean]
47
49
  def producers_disabled?(producer_class=nil)
48
50
  Thread.current[:frk_disable_all_producers] ||
@@ -54,6 +56,7 @@ module Deimos
54
56
  class Producer
55
57
  include SharedConfig
56
58
 
59
+ # @return [Integer]
57
60
  MAX_BATCH_SIZE = 500
58
61
 
59
62
  class << self
@@ -87,19 +90,21 @@ module Deimos
87
90
  end
88
91
 
89
92
  # Publish the payload to the topic.
90
- # @param payload [Hash|SchemaClass::Record] with an optional payload_key hash key.
93
+ # @param payload [Hash, SchemaClass::Record] with an optional payload_key hash key.
91
94
  # @param topic [String] if specifying the topic
95
+ # @return [void]
92
96
  def publish(payload, topic: self.topic)
93
97
  publish_list([payload], topic: topic)
94
98
  end
95
99
 
96
100
  # Publish a list of messages.
97
- # @param payloads [Array<Hash|SchemaClass::Record>] with optional payload_key hash key.
101
+ # @param payloads [Array<Hash, SchemaClass::Record>] with optional payload_key hash key.
98
102
  # @param sync [Boolean] if given, override the default setting of
99
103
  # whether to publish synchronously.
100
104
  # @param force_send [Boolean] if true, ignore the configured backend
101
105
  # and send immediately to Kafka.
102
106
  # @param topic [String] if specifying the topic
107
+ # @return [void]
103
108
  def publish_list(payloads, sync: nil, force_send: false, topic: self.topic)
104
109
  return if Deimos.config.kafka.seed_brokers.blank? ||
105
110
  Deimos.config.producers.disabled ||
@@ -124,7 +129,7 @@ module Deimos
124
129
 
125
130
  # @param sync [Boolean]
126
131
  # @param force_send [Boolean]
127
- # @return [Class < Deimos::Backend]
132
+ # @return [Class<Deimos::Backends::Base>]
128
133
  def determine_backend_class(sync, force_send)
129
134
  backend = if force_send
130
135
  :kafka
@@ -140,8 +145,9 @@ module Deimos
140
145
  end
141
146
 
142
147
  # Send a batch to the backend.
143
- # @param backend [Class < Deimos::Backend]
148
+ # @param backend [Class<Deimos::Backends::Base>]
144
149
  # @param batch [Array<Deimos::Message>]
150
+ # @return [void]
145
151
  def produce_batch(backend, batch)
146
152
  backend.publish(producer_class: self, messages: batch)
147
153
  end
@@ -3,11 +3,19 @@
3
3
  module Deimos
4
4
  # Represents a field in the schema.
5
5
  class SchemaField
6
- attr_accessor :name, :type, :enum_values, :default
6
+ # @return [String]
7
+ attr_accessor :name
8
+ # @return [String]
9
+ attr_accessor :type
10
+ # @return [Array<String>]
11
+ attr_accessor :enum_values
12
+ # @return [Object]
13
+ attr_accessor :default
7
14
 
8
15
  # @param name [String]
9
16
  # @param type [Object]
10
17
  # @param enum_values [Array<String>]
18
+ # @param default [Object]
11
19
  def initialize(name, type, enum_values=[], default=:no_default)
12
20
  @name = name
13
21
  @type = type
@@ -19,9 +27,14 @@ module Deimos
19
27
  module SchemaBackends
20
28
  # Base class for encoding / decoding.
21
29
  class Base
22
- attr_accessor :schema, :namespace, :key_schema
30
+ # @return [String]
31
+ attr_accessor :schema
32
+ # @return [String]
33
+ attr_accessor :namespace
34
+ # @return [String]
35
+ attr_accessor :key_schema
23
36
 
24
- # @param schema [String|Symbol]
37
+ # @param schema [String,Symbol]
25
38
  # @param namespace [String]
26
39
  def initialize(schema:, namespace: nil)
27
40
  @schema = schema
@@ -30,7 +43,7 @@ module Deimos
30
43
 
31
44
  # Encode a payload with a schema. Public method.
32
45
  # @param payload [Hash]
33
- # @param schema [Symbol|String]
46
+ # @param schema [String,Symbol]
34
47
  # @param topic [String]
35
48
  # @return [String]
36
49
  def encode(payload, schema: nil, topic: nil)
@@ -40,7 +53,7 @@ module Deimos
40
53
 
41
54
  # Decode a payload with a schema. Public method.
42
55
  # @param payload [String]
43
- # @param schema [Symbol|String]
56
+ # @param schema [String,Symbol]
44
57
  # @return [Hash,nil]
45
58
  def decode(payload, schema: nil)
46
59
  return nil if payload.nil?
@@ -90,25 +103,26 @@ module Deimos
90
103
 
91
104
  # Encode a payload. To be defined by subclass.
92
105
  # @param payload [Hash]
93
- # @param schema [Symbol|String]
106
+ # @param schema [String,Symbol]
94
107
  # @param topic [String]
95
108
  # @return [String]
96
- def encode_payload(_payload, schema:, topic: nil)
109
+ def encode_payload(payload, schema:, topic: nil)
97
110
  raise NotImplementedError
98
111
  end
99
112
 
100
113
  # Decode a payload. To be defined by subclass.
101
114
  # @param payload [String]
102
- # @param schema [String|Symbol]
115
+ # @param schema [String,Symbol]
103
116
  # @return [Hash]
104
- def decode_payload(_payload, schema:)
117
+ def decode_payload(payload, schema:)
105
118
  raise NotImplementedError
106
119
  end
107
120
 
108
121
  # Validate that a payload matches the schema. To be defined by subclass.
109
122
  # @param payload [Hash]
110
- # @param schema [String|Symbol]
111
- def validate(_payload, schema:)
123
+ # @param schema [String,Symbol]
124
+ # @return [void]
125
+ def validate(payload, schema:)
112
126
  raise NotImplementedError
113
127
  end
114
128
 
@@ -124,7 +138,7 @@ module Deimos
124
138
  # @param field [SchemaField]
125
139
  # @param value [Object]
126
140
  # @return [Object]
127
- def coerce_field(_field, _value)
141
+ def coerce_field(field, value)
128
142
  raise NotImplementedError
129
143
  end
130
144
 
@@ -140,19 +154,19 @@ module Deimos
140
154
  end
141
155
 
142
156
  # Encode a message key. To be defined by subclass.
143
- # @param key [String|Hash] the value to use as the key.
144
- # @param key_id [Symbol|String] the field name of the key.
157
+ # @param key [String,Hash] the value to use as the key.
158
+ # @param key_id [String,Symbol] the field name of the key.
145
159
  # @param topic [String]
146
160
  # @return [String]
147
- def encode_key(_key, _key_id, topic: nil)
161
+ def encode_key(key, key_id, topic: nil)
148
162
  raise NotImplementedError
149
163
  end
150
164
 
151
165
  # Decode a message key. To be defined by subclass.
152
166
  # @param payload [Hash] the message itself.
153
- # @param key_id [Symbol|String] the field in the message to decode.
167
+ # @param key_id [String,Symbol] the field in the message to decode.
154
168
  # @return [String]
155
- def decode_key(_payload, _key_id)
169
+ def decode_key(payload, key_id)
156
170
  raise NotImplementedError
157
171
  end
158
172
 
@@ -15,7 +15,7 @@ module Deimos
15
15
  end
16
16
 
17
17
  # @override
18
- def validate(_payload, schema:)
18
+ def validate(payload, schema:)
19
19
  end
20
20
 
21
21
  # @override
@@ -24,7 +24,7 @@ module Deimos
24
24
  end
25
25
 
26
26
  # @override
27
- def coerce_field(_type, value)
27
+ def coerce_field(field, value)
28
28
  value
29
29
  end
30
30