phobos 2.1.4 → 2.1.6

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -2,21 +2,32 @@
2
2
 
3
3
  module Phobos
4
4
  module BatchHandler
5
+ # @!visibility private
5
6
  def self.included(base)
6
7
  base.extend(ClassMethods)
7
8
  end
8
9
 
10
+ # @param _payloads [Array]
11
+ # @param _metadata [Hash<String, Object>]
12
+ # @return [void]
9
13
  def consume_batch(_payloads, _metadata)
10
14
  raise NotImplementedError
11
15
  end
12
16
 
17
+ # @param payloads [Array]
18
+ # @param metadata [Hash<String, Object>]
19
+ # @yield [Array, Hash<String, Object>]
20
+ # @return [void]
13
21
  def around_consume_batch(payloads, metadata)
14
22
  yield payloads, metadata
15
23
  end
16
24
 
17
25
  module ClassMethods
26
+ # @param kafka_client
27
+ # @return [void]
18
28
  def start(kafka_client); end
19
29
 
30
+ # @return [void]
20
31
  def stop; end
21
32
  end
22
33
  end
@@ -2,8 +2,23 @@
2
2
 
3
3
  module Phobos
4
4
  class BatchMessage
5
- attr_accessor :key, :partition, :offset, :payload, :headers
5
+ # @return
6
+ attr_accessor :key
7
+ # @return [Integer]
8
+ attr_accessor :partition
9
+ # @return [Integer]
10
+ attr_accessor :offset
11
+ # @return
12
+ attr_accessor :payload
13
+ # @return
14
+ attr_accessor :headers
6
15
 
16
+ # @param key
17
+ # @param partition [Integer]
18
+ # @param offset [Integer]
19
+ # @param payload
20
+ # @param headers
21
+ # @return [void]
7
22
  def initialize(key:, partition:, offset:, payload:, headers:)
8
23
  @key = key
9
24
  @partition = partition
@@ -12,6 +27,8 @@ module Phobos
12
27
  @headers = headers
13
28
  end
14
29
 
30
+ # @param other [Phobos::BatchMessage]
31
+ # @return [Boolean]
15
32
  def ==(other)
16
33
  [:key, :partition, :offset, :payload, :headers].all? do |s|
17
34
  public_send(s) == other.public_send(s)
data/lib/phobos/cli.rb CHANGED
@@ -4,6 +4,7 @@ require 'thor'
4
4
  require 'phobos/cli/start'
5
5
 
6
6
  module Phobos
7
+ # @!visibility private
7
8
  module CLI
8
9
  def self.logger
9
10
  @logger ||= Logging.logger[self].tap do |l|
@@ -11,7 +12,8 @@ module Phobos
11
12
  end
12
13
  end
13
14
 
14
- class Commands < Thor
15
+ # @!visibility private
16
+ class Commands < ::Thor
15
17
  include Thor::Actions
16
18
 
17
19
  map '-v' => :version
@@ -4,6 +4,8 @@ require 'phobos/deep_struct'
4
4
 
5
5
  module Phobos
6
6
  module Configuration
7
+ # @param configuration
8
+ # @return [void]
7
9
  def configure(configuration)
8
10
  @config = fetch_configuration(configuration)
9
11
  @config.class.send(:define_method, :producer_hash) do
@@ -16,7 +18,7 @@ module Phobos
16
18
  configure_logger
17
19
  end
18
20
 
19
- # :nodoc:
21
+ # @return [void]
20
22
  def configure_logger
21
23
  Logging.backtrace(true)
22
24
  Logging.logger.root.level = silence_log ? :fatal : config.logger.level
@@ -2,6 +2,7 @@
2
2
 
3
3
  module Phobos
4
4
  module Handler
5
+ # @!visibility private
5
6
  def self.included(base)
6
7
  base.extend(ClassMethods)
7
8
  end
@@ -9,9 +9,28 @@ module Phobos
9
9
  DEFAULT_MAX_BYTES_PER_PARTITION = 1_048_576 # 1 MB
10
10
  DELIVERY_OPTS = %w[batch message inline_batch].freeze
11
11
 
12
- attr_reader :group_id, :topic, :id
13
- attr_reader :handler_class, :encoding, :consumer
14
-
12
+ # @return [String]
13
+ attr_reader :group_id
14
+ # @return [String]
15
+ attr_reader :topic
16
+ attr_reader :id
17
+ # @return [Class<BasicObject>]
18
+ attr_reader :handler_class
19
+ attr_reader :encoding, :consumer
20
+
21
+ # @param handler [Class<BasicObject>]
22
+ # @param group_id [String]
23
+ # @param topic [String]
24
+ # @param min_bytes [Integer]
25
+ # @param max_wait_time [Integer]
26
+ # @param start_from_beginning [Boolean]
27
+ # @param delivery [String]
28
+ # @param max_bytes_per_partition [Integer]
29
+ # @param session_timeout [Integer]
30
+ # @param offset_commit_interval [Integer]
31
+ # @param heartbeat_interval [Integer]
32
+ # @param offset_commit_threshold [Integer]
33
+ # @param offset_retention_time [Integer]
15
34
  # rubocop:disable Metrics/MethodLength
16
35
  def initialize(handler:, group_id:, topic:, min_bytes: nil, max_wait_time: nil,
17
36
  force_encoding: nil, start_from_beginning: true, backoff: nil,
@@ -40,6 +59,7 @@ module Phobos
40
59
  end
41
60
  # rubocop:enable Metrics/MethodLength
42
61
 
62
+ # @return [void]
43
63
  def start
44
64
  @signal_to_stop = false
45
65
  start_listener
@@ -60,6 +80,7 @@ module Phobos
60
80
  stop_listener
61
81
  end
62
82
 
83
+ # @return [void]
63
84
  def stop
64
85
  return if should_stop?
65
86
 
@@ -2,10 +2,12 @@
2
2
 
3
3
  module Phobos
4
4
  module Producer
5
+ # @!visibility private
5
6
  def self.included(base)
6
7
  base.extend(Phobos::Producer::ClassMethods)
7
8
  end
8
9
 
10
+ # @return [Phobos::Producer::PublicAPI]
9
11
  def producer
10
12
  Phobos::Producer::PublicAPI.new(self)
11
13
  end
@@ -15,6 +17,12 @@ module Phobos
15
17
  @host_obj = host_obj
16
18
  end
17
19
 
20
+ # @param topic [String]
21
+ # @param payload [String]
22
+ # @param key [String]
23
+ # @param partition_key [Integer]
24
+ # @param headers [Hash]
25
+ # @return [void]
18
26
  def publish(topic:, payload:, key: nil, partition_key: nil, headers: nil)
19
27
  class_producer.publish(topic: topic,
20
28
  payload: payload,
@@ -23,6 +31,12 @@ module Phobos
23
31
  headers: headers)
24
32
  end
25
33
 
34
+ # @param topic [String]
35
+ # @param payload [String]
36
+ # @param key [String]
37
+ # @param partition_key [Integer]
38
+ # @param headers [Hash]
39
+ # @return [void]
26
40
  def async_publish(topic:, payload:, key: nil, partition_key: nil, headers: nil)
27
41
  class_producer.async_publish(topic: topic,
28
42
  payload: payload,
@@ -31,7 +45,7 @@ module Phobos
31
45
  headers: headers)
32
46
  end
33
47
 
34
- # @param messages [Array(Hash(:topic, :payload, :key, :headers))]
48
+ # @param messages [Array<Hash>]
35
49
  # e.g.: [
36
50
  # { topic: 'A', payload: 'message-1', key: '1', headers: { foo: 'bar' } },
37
51
  # { topic: 'B', payload: 'message-2', key: '2', headers: { foo: 'bar' } }
@@ -41,6 +55,7 @@ module Phobos
41
55
  class_producer.publish_list(messages)
42
56
  end
43
57
 
58
+ # @param messages [Array<Hash>]
44
59
  def async_publish_list(messages)
45
60
  class_producer.async_publish_list(messages)
46
61
  end
@@ -53,29 +68,35 @@ module Phobos
53
68
  end
54
69
 
55
70
  module ClassMethods
71
+ # @return [Phobos::Producer::ClassMethods::PublicAPI]
56
72
  def producer
57
73
  Phobos::Producer::ClassMethods::PublicAPI.new
58
74
  end
59
75
 
60
76
  class PublicAPI
77
+ # @return [Symbol]
61
78
  NAMESPACE = :phobos_producer_store
79
+ # @return [Array<Symbol>]
62
80
  ASYNC_PRODUCER_PARAMS = [:max_queue_size, :delivery_threshold, :delivery_interval].freeze
81
+ # @return [Array<Symbol>]
63
82
  INTERNAL_PRODUCER_PARAMS = [:persistent_connections].freeze
64
83
 
65
84
  # This method configures the kafka client used with publish operations
66
85
  # performed by the host class
67
86
  #
68
87
  # @param kafka_client [Kafka::Client]
69
- #
88
+ # @return [void]
70
89
  def configure_kafka_client(kafka_client)
71
90
  async_producer_shutdown
72
91
  producer_store[:kafka_client] = kafka_client
73
92
  end
74
93
 
94
+ # @return [Kafka::Client]
75
95
  def kafka_client
76
96
  producer_store[:kafka_client]
77
97
  end
78
98
 
99
+ # @return [Kafka::Producer]
79
100
  def create_sync_producer
80
101
  client = kafka_client || configure_kafka_client(Phobos.create_kafka_client(:producer))
81
102
  sync_producer = client.producer(**regular_configs)
@@ -85,20 +106,29 @@ module Phobos
85
106
  sync_producer
86
107
  end
87
108
 
109
+ # @return [Kafka::Producer]
88
110
  def sync_producer
89
111
  producer_store[:sync_producer]
90
112
  end
91
113
 
114
+ # @return [void]
92
115
  def sync_producer_shutdown
93
116
  sync_producer&.shutdown
94
117
  producer_store[:sync_producer] = nil
95
118
  end
96
119
 
120
+ # @param topic [String]
121
+ # @param payload [String]
122
+ # @param partition_key [Integer]
123
+ # @param headers [Hash]
124
+ # @return [void]
97
125
  def publish(topic:, payload:, key: nil, partition_key: nil, headers: nil)
98
126
  publish_list([{ topic: topic, payload: payload, key: key,
99
127
  partition_key: partition_key, headers: headers }])
100
128
  end
101
129
 
130
+ # @param messages [Array<Hash>]
131
+ # @return [void]
102
132
  def publish_list(messages)
103
133
  producer = sync_producer || create_sync_producer
104
134
  produce_messages(producer, messages)
@@ -107,39 +137,51 @@ module Phobos
107
137
  producer&.shutdown unless Phobos.config.producer_hash[:persistent_connections]
108
138
  end
109
139
 
140
+ # @return [Kafka::AsyncProducer]
110
141
  def create_async_producer
111
142
  client = kafka_client || configure_kafka_client(Phobos.create_kafka_client(:producer))
112
143
  async_producer = client.async_producer(**async_configs)
113
144
  producer_store[:async_producer] = async_producer
114
145
  end
115
146
 
147
+ # @return [Kafka::AsyncProducer]
116
148
  def async_producer
117
149
  producer_store[:async_producer]
118
150
  end
119
151
 
152
+ # @param topic [String]
153
+ # @param payload [String]
154
+ # @param partition_key [Integer]
155
+ # @param headers [Hash]
156
+ # @return [void]
120
157
  def async_publish(topic:, payload:, key: nil, partition_key: nil, headers: nil)
121
158
  async_publish_list([{ topic: topic, payload: payload, key: key,
122
159
  partition_key: partition_key, headers: headers }])
123
160
  end
124
161
 
162
+ # @param messages [Array<Hash>]
163
+ # @return [void]
125
164
  def async_publish_list(messages)
126
165
  producer = async_producer || create_async_producer
127
166
  produce_messages(producer, messages)
128
167
  producer.deliver_messages unless async_automatic_delivery?
129
168
  end
130
169
 
170
+ # @return [void]
131
171
  def async_producer_shutdown
132
172
  async_producer&.deliver_messages
133
173
  async_producer&.shutdown
134
174
  producer_store[:async_producer] = nil
135
175
  end
136
176
 
177
+ # @return [Hash]
137
178
  def regular_configs
138
179
  Phobos.config.producer_hash
139
180
  .reject { |k, _| ASYNC_PRODUCER_PARAMS.include?(k) }
140
181
  .reject { |k, _| INTERNAL_PRODUCER_PARAMS.include?(k) }
141
182
  end
142
183
 
184
+ # @return [Hash]
143
185
  def async_configs
144
186
  Phobos.config.producer_hash
145
187
  .reject { |k, _| INTERNAL_PRODUCER_PARAMS.include?(k) }
@@ -1,5 +1,6 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Phobos
4
- VERSION = '2.1.4'
4
+ # @return [String]
5
+ VERSION = '2.1.6'
5
6
  end
data/lib/phobos.rb CHANGED
@@ -34,6 +34,7 @@ Thread.abort_on_exception = true
34
34
  Logging.init :debug, :info, :warn, :error, :fatal
35
35
 
36
36
  # Monkey patch to fix this issue: https://github.com/zendesk/ruby-kafka/pull/732
37
+ # @!visibility private
37
38
  module Logging
38
39
  # :nodoc:
39
40
  class Logger
@@ -51,14 +52,21 @@ end
51
52
  module Phobos
52
53
  extend Configuration
53
54
  class << self
54
- attr_reader :config, :logger
55
+ # @return [Phobos::DeepStruct]
56
+ attr_reader :config
57
+ # @return [Logger]
58
+ attr_reader :logger
59
+ # @return [Boolean]
55
60
  attr_accessor :silence_log
56
61
 
62
+ # @param configuration [Hash<String, Object>]
63
+ # @return [void]
57
64
  def add_listeners(configuration)
58
65
  listeners_config = fetch_configuration(configuration)
59
66
  @config.listeners += listeners_config.listeners
60
67
  end
61
68
 
69
+ # @param config_key [String]
62
70
  def create_kafka_client(config_key = nil)
63
71
  kafka_config = config.kafka.to_hash.merge(logger: @ruby_kafka_logger)
64
72
 
@@ -69,6 +77,7 @@ module Phobos
69
77
  Kafka.new(**kafka_config)
70
78
  end
71
79
 
80
+ # @param backoff_config [Hash<Symbol, Integer>]
72
81
  def create_exponential_backoff(backoff_config = nil)
73
82
  backoff_config ||= Phobos.config.backoff.to_hash
74
83
  min = backoff_config[:min_ms] / 1000.0
@@ -76,6 +85,8 @@ module Phobos
76
85
  ExponentialBackoff.new(min, max).tap { |backoff| backoff.randomize_factor = rand }
77
86
  end
78
87
 
88
+ # @param message [String]
89
+ # @return [void]
79
90
  def deprecate(message)
80
91
  location = caller.find { |line| line !~ %r{/phobos/} }
81
92
  warn "DEPRECATION WARNING: #{message}: #{location}"
data/phobos.gemspec CHANGED
@@ -44,7 +44,7 @@ Gem::Specification.new do |spec|
44
44
  spec.metadata['allowed_push_host'] = 'https://rubygems.org'
45
45
 
46
46
  spec.files = `git ls-files -z`.split("\x0").reject do |f|
47
- f.match(%r{^(test|spec|features)/})
47
+ f.match(%r{^(test|spec|features|sorbet)/})
48
48
  end
49
49
  spec.bindir = 'bin'
50
50
  spec.executables = spec.files.grep(%r{^bin/phobos}) { |f| File.basename(f) }
@@ -58,7 +58,11 @@ Gem::Specification.new do |spec|
58
58
  spec.add_development_dependency 'rubocop', '0.62.0'
59
59
  spec.add_development_dependency 'rubocop_rules'
60
60
  spec.add_development_dependency 'simplecov'
61
+ spec.add_development_dependency 'sorbet'
62
+ spec.add_development_dependency 'sord'
63
+ spec.add_development_dependency 'tapioca'
61
64
  spec.add_development_dependency 'timecop'
65
+ spec.add_development_dependency 'yard'
62
66
 
63
67
  spec.add_dependency 'activesupport', '>= 3.0.0'
64
68
  spec.add_dependency 'concurrent-ruby', '>= 1.0.2'