phobos 2.1.4 → 2.1.5

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 02e1b97b3d44dffefffea99da0c15393997043598697d88fe3f7c7ab83549193
4
- data.tar.gz: 4e3ec45a1db997825ace585a1535ac5c1504105afabe0470c0c676f8f301c66a
3
+ metadata.gz: 24a11008734215b1d581b98e05eac705dd079e9fc6ccbd860c172c9d353ae967
4
+ data.tar.gz: ed890beddcc432d630a1f8484bd40383ca97fc0120359219012f3d30e9ee541b
5
5
  SHA512:
6
- metadata.gz: b249c05f44dfe0dcb0f81b8177dd27a129e4364e0dbcbf8492295d9823473a7de66ee913c088945346a4e1943b3f2226c8106083cf5502d90341b8bb8fbac0eb
7
- data.tar.gz: f6725f584a75f099bb2b4624338e424f7ba44859ace5c660bfc20d8f5681d663a1cd051381d18f832659f87bd1406d5c8aea3098112876a1f119649b56645be6
6
+ metadata.gz: 87a05771626c4c1aeb849cc7857769beca6648dac5e2f87e11fdb62ba798e036d6c366f848c274d7a17ff209f4754380266f21a6fe68d7c7cdcd70d369e6dea3
7
+ data.tar.gz: ec3e274766f29c73ba31037fb6cbce779b9d2bb4d89f7da127973046682fa3426450b006aba94c9f67ab559a062b2e4d1f28d25667dc9d9eb8de279d69eb84a1
data/CHANGELOG.md CHANGED
@@ -6,6 +6,10 @@ and this project adheres to [Semantic Versioning](http://semver.org/).
6
6
  ``
7
7
  ## UNRELEASED
8
8
 
9
+ ## [2.1.5] - 2022-09-08
10
+
11
+ - Added RBS signatures.
12
+
9
13
  ## [2.1.4] - 2022-06-15
10
14
 
11
15
  - Re-raise consuming errors so that threads don't die forever and are retried
@@ -2,21 +2,32 @@
2
2
 
3
3
  module Phobos
4
4
  module BatchHandler
5
+ # @!visibility private
5
6
  def self.included(base)
6
7
  base.extend(ClassMethods)
7
8
  end
8
9
 
10
+ # @param _payloads [Array]
11
+ # @param _metadata [Hash<String, Object>]
12
+ # @return [void]
9
13
  def consume_batch(_payloads, _metadata)
10
14
  raise NotImplementedError
11
15
  end
12
16
 
17
+ # @param payloads [Array]
18
+ # @param metadata [Hash<String, Object>]
19
+ # @yield [Array, Hash<String, Object>]
20
+ # @return [void]
13
21
  def around_consume_batch(payloads, metadata)
14
22
  yield payloads, metadata
15
23
  end
16
24
 
17
25
  module ClassMethods
26
+ # @param kafka_client
27
+ # @return [void]
18
28
  def start(kafka_client); end
19
29
 
30
+ # @return [void]
20
31
  def stop; end
21
32
  end
22
33
  end
@@ -2,8 +2,23 @@
2
2
 
3
3
  module Phobos
4
4
  class BatchMessage
5
- attr_accessor :key, :partition, :offset, :payload, :headers
5
+ # @return
6
+ attr_accessor :key
7
+ # @return [Integer]
8
+ attr_accessor :partition
9
+ # @return [Integer]
10
+ attr_accessor :offset
11
+ # @return
12
+ attr_accessor :payload
13
+ # @return
14
+ attr_accessor :headers
6
15
 
16
+ # @param key
17
+ # @param partition [Integer]
18
+ # @param offset [Integer]
19
+ # @param payload
20
+ # @param headers
21
+ # @return [void]
7
22
  def initialize(key:, partition:, offset:, payload:, headers:)
8
23
  @key = key
9
24
  @partition = partition
@@ -12,6 +27,8 @@ module Phobos
12
27
  @headers = headers
13
28
  end
14
29
 
30
+ # @param other [Phobos::BatchMessage]
31
+ # @return [Boolean]
15
32
  def ==(other)
16
33
  [:key, :partition, :offset, :payload, :headers].all? do |s|
17
34
  public_send(s) == other.public_send(s)
data/lib/phobos/cli.rb CHANGED
@@ -4,6 +4,7 @@ require 'thor'
4
4
  require 'phobos/cli/start'
5
5
 
6
6
  module Phobos
7
+ # @!visibility private
7
8
  module CLI
8
9
  def self.logger
9
10
  @logger ||= Logging.logger[self].tap do |l|
@@ -11,6 +12,7 @@ module Phobos
11
12
  end
12
13
  end
13
14
 
15
+ # @!visibility private
14
16
  class Commands < Thor
15
17
  include Thor::Actions
16
18
 
@@ -4,6 +4,8 @@ require 'phobos/deep_struct'
4
4
 
5
5
  module Phobos
6
6
  module Configuration
7
+ # @param configuration
8
+ # @return [void]
7
9
  def configure(configuration)
8
10
  @config = fetch_configuration(configuration)
9
11
  @config.class.send(:define_method, :producer_hash) do
@@ -16,7 +18,7 @@ module Phobos
16
18
  configure_logger
17
19
  end
18
20
 
19
- # :nodoc:
21
+ # @return [void]
20
22
  def configure_logger
21
23
  Logging.backtrace(true)
22
24
  Logging.logger.root.level = silence_log ? :fatal : config.logger.level
@@ -2,6 +2,7 @@
2
2
 
3
3
  module Phobos
4
4
  module Handler
5
+ # @!visibility private
5
6
  def self.included(base)
6
7
  base.extend(ClassMethods)
7
8
  end
@@ -9,9 +9,28 @@ module Phobos
9
9
  DEFAULT_MAX_BYTES_PER_PARTITION = 1_048_576 # 1 MB
10
10
  DELIVERY_OPTS = %w[batch message inline_batch].freeze
11
11
 
12
- attr_reader :group_id, :topic, :id
13
- attr_reader :handler_class, :encoding, :consumer
14
-
12
+ # @return [String]
13
+ attr_reader :group_id
14
+ # @return [String]
15
+ attr_reader :topic
16
+ attr_reader :id
17
+ # @return [Class]
18
+ attr_reader :handler_class
19
+ attr_reader :encoding, :consumer
20
+
21
+ # @param handler [Class]
22
+ # @param group_id [String]
23
+ # @param topic [String]
24
+ # @param min_bytes [Integer]
25
+ # @param max_wait_time [Integer]
26
+ # @param start_from_beginning [Boolean]
27
+ # @param delivery [String]
28
+ # @param max_bytes_per_partition [Integer]
29
+ # @param session_timeout [Integer]
30
+ # @param offset_commit_interval [Integer]
31
+ # @param heartbeat_interval [Integer]
32
+ # @param offset_commit_threshold [Integer]
33
+ # @param offset_retention_time [Integer]
15
34
  # rubocop:disable Metrics/MethodLength
16
35
  def initialize(handler:, group_id:, topic:, min_bytes: nil, max_wait_time: nil,
17
36
  force_encoding: nil, start_from_beginning: true, backoff: nil,
@@ -40,6 +59,7 @@ module Phobos
40
59
  end
41
60
  # rubocop:enable Metrics/MethodLength
42
61
 
62
+ # @return [void]
43
63
  def start
44
64
  @signal_to_stop = false
45
65
  start_listener
@@ -60,6 +80,7 @@ module Phobos
60
80
  stop_listener
61
81
  end
62
82
 
83
+ # @return [void]
63
84
  def stop
64
85
  return if should_stop?
65
86
 
@@ -2,10 +2,12 @@
2
2
 
3
3
  module Phobos
4
4
  module Producer
5
+ # @!visibility private
5
6
  def self.included(base)
6
7
  base.extend(Phobos::Producer::ClassMethods)
7
8
  end
8
9
 
10
+ # @return [Phobos::Producer::PublicAPI]
9
11
  def producer
10
12
  Phobos::Producer::PublicAPI.new(self)
11
13
  end
@@ -15,6 +17,12 @@ module Phobos
15
17
  @host_obj = host_obj
16
18
  end
17
19
 
20
+ # @param topic [String]
21
+ # @param payload [String]
22
+ # @param key [String]
23
+ # @param partition_key [Integer]
24
+ # @param headers [Hash]
25
+ # @return [void]
18
26
  def publish(topic:, payload:, key: nil, partition_key: nil, headers: nil)
19
27
  class_producer.publish(topic: topic,
20
28
  payload: payload,
@@ -23,6 +31,12 @@ module Phobos
23
31
  headers: headers)
24
32
  end
25
33
 
34
+ # @param topic [String]
35
+ # @param payload [String]
36
+ # @param key [String]
37
+ # @param partition_key [Integer]
38
+ # @param headers [Hash]
39
+ # @return [void]
26
40
  def async_publish(topic:, payload:, key: nil, partition_key: nil, headers: nil)
27
41
  class_producer.async_publish(topic: topic,
28
42
  payload: payload,
@@ -31,7 +45,7 @@ module Phobos
31
45
  headers: headers)
32
46
  end
33
47
 
34
- # @param messages [Array(Hash(:topic, :payload, :key, :headers))]
48
+ # @param messages [Array<Hash>]
35
49
  # e.g.: [
36
50
  # { topic: 'A', payload: 'message-1', key: '1', headers: { foo: 'bar' } },
37
51
  # { topic: 'B', payload: 'message-2', key: '2', headers: { foo: 'bar' } }
@@ -41,6 +55,7 @@ module Phobos
41
55
  class_producer.publish_list(messages)
42
56
  end
43
57
 
58
+ # @param messages [Array<Hash>]
44
59
  def async_publish_list(messages)
45
60
  class_producer.async_publish_list(messages)
46
61
  end
@@ -53,29 +68,35 @@ module Phobos
53
68
  end
54
69
 
55
70
  module ClassMethods
71
+ # @return [Phobos::Producer::ClassMethods::PublicAPI]
56
72
  def producer
57
73
  Phobos::Producer::ClassMethods::PublicAPI.new
58
74
  end
59
75
 
60
76
  class PublicAPI
77
+ # @return [Symbol]
61
78
  NAMESPACE = :phobos_producer_store
79
+ # @return [Array<Symbol>]
62
80
  ASYNC_PRODUCER_PARAMS = [:max_queue_size, :delivery_threshold, :delivery_interval].freeze
81
+ # @return [Array<Symbol>]
63
82
  INTERNAL_PRODUCER_PARAMS = [:persistent_connections].freeze
64
83
 
65
84
  # This method configures the kafka client used with publish operations
66
85
  # performed by the host class
67
86
  #
68
87
  # @param kafka_client [Kafka::Client]
69
- #
88
+ # @return [void]
70
89
  def configure_kafka_client(kafka_client)
71
90
  async_producer_shutdown
72
91
  producer_store[:kafka_client] = kafka_client
73
92
  end
74
93
 
94
+ # @return [Kafka::Client]
75
95
  def kafka_client
76
96
  producer_store[:kafka_client]
77
97
  end
78
98
 
99
+ # @return [Kafka::Producer]
79
100
  def create_sync_producer
80
101
  client = kafka_client || configure_kafka_client(Phobos.create_kafka_client(:producer))
81
102
  sync_producer = client.producer(**regular_configs)
@@ -85,20 +106,29 @@ module Phobos
85
106
  sync_producer
86
107
  end
87
108
 
109
+ # @return [Kafka::Producer]
88
110
  def sync_producer
89
111
  producer_store[:sync_producer]
90
112
  end
91
113
 
114
+ # @return [void]
92
115
  def sync_producer_shutdown
93
116
  sync_producer&.shutdown
94
117
  producer_store[:sync_producer] = nil
95
118
  end
96
119
 
120
+ # @param topic [String]
121
+ # @param payload [String]
122
+ # @param partition_key [Integer]
123
+ # @param headers [Hash]
124
+ # @return [void]
97
125
  def publish(topic:, payload:, key: nil, partition_key: nil, headers: nil)
98
126
  publish_list([{ topic: topic, payload: payload, key: key,
99
127
  partition_key: partition_key, headers: headers }])
100
128
  end
101
129
 
130
+ # @param messages [Array<Hash>]
131
+ # @return [void]
102
132
  def publish_list(messages)
103
133
  producer = sync_producer || create_sync_producer
104
134
  produce_messages(producer, messages)
@@ -107,39 +137,51 @@ module Phobos
107
137
  producer&.shutdown unless Phobos.config.producer_hash[:persistent_connections]
108
138
  end
109
139
 
140
+ # @return [Kafka::AsyncProducer]
110
141
  def create_async_producer
111
142
  client = kafka_client || configure_kafka_client(Phobos.create_kafka_client(:producer))
112
143
  async_producer = client.async_producer(**async_configs)
113
144
  producer_store[:async_producer] = async_producer
114
145
  end
115
146
 
147
+ # @return [Kafka::AsyncProducer]
116
148
  def async_producer
117
149
  producer_store[:async_producer]
118
150
  end
119
151
 
152
+ # @param topic [String]
153
+ # @param payload [String]
154
+ # @param partition_key [Integer]
155
+ # @param headers [Hash]
156
+ # @return [void]
120
157
  def async_publish(topic:, payload:, key: nil, partition_key: nil, headers: nil)
121
158
  async_publish_list([{ topic: topic, payload: payload, key: key,
122
159
  partition_key: partition_key, headers: headers }])
123
160
  end
124
161
 
162
+ # @param messages [Array<Hash>]
163
+ # @return [void]
125
164
  def async_publish_list(messages)
126
165
  producer = async_producer || create_async_producer
127
166
  produce_messages(producer, messages)
128
167
  producer.deliver_messages unless async_automatic_delivery?
129
168
  end
130
169
 
170
+ # @return [void]
131
171
  def async_producer_shutdown
132
172
  async_producer&.deliver_messages
133
173
  async_producer&.shutdown
134
174
  producer_store[:async_producer] = nil
135
175
  end
136
176
 
177
+ # @return [Hash]
137
178
  def regular_configs
138
179
  Phobos.config.producer_hash
139
180
  .reject { |k, _| ASYNC_PRODUCER_PARAMS.include?(k) }
140
181
  .reject { |k, _| INTERNAL_PRODUCER_PARAMS.include?(k) }
141
182
  end
142
183
 
184
+ # @return [Hash]
143
185
  def async_configs
144
186
  Phobos.config.producer_hash
145
187
  .reject { |k, _| INTERNAL_PRODUCER_PARAMS.include?(k) }
@@ -1,5 +1,6 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module Phobos
4
- VERSION = '2.1.4'
4
+ # @return [String]
5
+ VERSION = '2.1.5'
5
6
  end
data/lib/phobos.rb CHANGED
@@ -34,6 +34,7 @@ Thread.abort_on_exception = true
34
34
  Logging.init :debug, :info, :warn, :error, :fatal
35
35
 
36
36
  # Monkey patch to fix this issue: https://github.com/zendesk/ruby-kafka/pull/732
37
+ # @!visibility private
37
38
  module Logging
38
39
  # :nodoc:
39
40
  class Logger
@@ -51,14 +52,21 @@ end
51
52
  module Phobos
52
53
  extend Configuration
53
54
  class << self
54
- attr_reader :config, :logger
55
+ # @return [Phobos::DeepStruct]
56
+ attr_reader :config
57
+ # @return [Logger]
58
+ attr_reader :logger
59
+ # @return [Boolean]
55
60
  attr_accessor :silence_log
56
61
 
62
+ # @param configuration [Hash<String, Object>]
63
+ # @return [void]
57
64
  def add_listeners(configuration)
58
65
  listeners_config = fetch_configuration(configuration)
59
66
  @config.listeners += listeners_config.listeners
60
67
  end
61
68
 
69
+ # @param config_key [String]
62
70
  def create_kafka_client(config_key = nil)
63
71
  kafka_config = config.kafka.to_hash.merge(logger: @ruby_kafka_logger)
64
72
 
@@ -69,6 +77,7 @@ module Phobos
69
77
  Kafka.new(**kafka_config)
70
78
  end
71
79
 
80
+ # @param backoff_config [Hash<Symbol, Integer>]
72
81
  def create_exponential_backoff(backoff_config = nil)
73
82
  backoff_config ||= Phobos.config.backoff.to_hash
74
83
  min = backoff_config[:min_ms] / 1000.0
@@ -76,6 +85,8 @@ module Phobos
76
85
  ExponentialBackoff.new(min, max).tap { |backoff| backoff.randomize_factor = rand }
77
86
  end
78
87
 
88
+ # @param message [String]
89
+ # @return [void]
79
90
  def deprecate(message)
80
91
  location = caller.find { |line| line !~ %r{/phobos/} }
81
92
  warn "DEPRECATION WARNING: #{message}: #{location}"
data/phobos.gemspec CHANGED
@@ -58,7 +58,9 @@ Gem::Specification.new do |spec|
58
58
  spec.add_development_dependency 'rubocop', '0.62.0'
59
59
  spec.add_development_dependency 'rubocop_rules'
60
60
  spec.add_development_dependency 'simplecov'
61
+ spec.add_development_dependency 'sord'
61
62
  spec.add_development_dependency 'timecop'
63
+ spec.add_development_dependency 'yard'
62
64
 
63
65
  spec.add_dependency 'activesupport', '>= 3.0.0'
64
66
  spec.add_dependency 'concurrent-ruby', '>= 1.0.2'
data/sig/defs.rbs ADDED
@@ -0,0 +1,459 @@
1
+ # Please use this with at least the same consideration as you would when using OpenStruct.
2
+ # Right now we only use this to parse our internal configuration files. It is not meant to
3
+ # be used on incoming data.
4
+ module Phobos
5
+ extend Phobos::Configuration
6
+ VERSION: String
7
+
8
+ # _@param_ `configuration`
9
+ def self.add_listeners: (::Hash[String, Object] configuration) -> void
10
+
11
+ # _@param_ `config_key`
12
+ def self.create_kafka_client: (?String? config_key) -> untyped
13
+
14
+ # _@param_ `backoff_config`
15
+ def self.create_exponential_backoff: (?::Hash[Symbol, Integer]? backoff_config) -> untyped
16
+
17
+ # _@param_ `message`
18
+ def self.deprecate: (String message) -> void
19
+
20
+ # _@param_ `configuration`
21
+ def self.configure: (untyped configuration) -> void
22
+
23
+ def self.configure_logger: () -> void
24
+
25
+ def self.config: () -> Phobos::DeepStruct
26
+
27
+ def self.logger: () -> Logger
28
+
29
+ def self.silence_log: () -> bool
30
+
31
+ def self.silence_log=: (bool value) -> bool
32
+
33
+ module Log
34
+ def log_info: (untyped msg, ?untyped metadata) -> untyped
35
+
36
+ def log_debug: (untyped msg, ?untyped metadata) -> untyped
37
+
38
+ def log_error: (untyped msg, untyped metadata) -> untyped
39
+
40
+ def log_warn: (untyped msg, ?untyped metadata) -> untyped
41
+ end
42
+
43
+ module LoggerHelper
44
+ def self.log: (untyped method, untyped msg, untyped metadata) -> untyped
45
+ end
46
+
47
+ class Error < StandardError
48
+ end
49
+
50
+ class AbortError < Phobos::Error
51
+ end
52
+
53
+ module Handler
54
+ def consume: (untyped _payload, untyped _metadata) -> untyped
55
+
56
+ def around_consume: (untyped payload, untyped metadata) -> untyped
57
+
58
+ module ClassMethods
59
+ def start: (untyped kafka_client) -> untyped
60
+
61
+ def stop: () -> untyped
62
+ end
63
+ end
64
+
65
+ class Executor
66
+ include Phobos::Instrumentation
67
+ include Phobos::Log
68
+
69
+ def initialize: () -> void
70
+
71
+ def start: () -> untyped
72
+
73
+ def stop: () -> untyped
74
+
75
+ def log_info: (untyped msg, ?untyped metadata) -> untyped
76
+
77
+ def log_debug: (untyped msg, ?untyped metadata) -> untyped
78
+
79
+ def log_error: (untyped msg, untyped metadata) -> untyped
80
+
81
+ def log_warn: (untyped msg, ?untyped metadata) -> untyped
82
+
83
+ def instrument: (untyped event, ?untyped extra) -> untyped
84
+ end
85
+
86
+ # rubocop:disable Metrics/ParameterLists, Metrics/ClassLength
87
+ class Listener
88
+ include Phobos::Instrumentation
89
+ include Phobos::Log
90
+ DEFAULT_MAX_BYTES_PER_PARTITION: untyped
91
+ DELIVERY_OPTS: untyped
92
+
93
+ # rubocop:disable Metrics/MethodLength
94
+ #
95
+ # _@param_ `handler`
96
+ #
97
+ # _@param_ `group_id`
98
+ #
99
+ # _@param_ `topic`
100
+ #
101
+ # _@param_ `min_bytes`
102
+ #
103
+ # _@param_ `max_wait_time`
104
+ #
105
+ # _@param_ `start_from_beginning`
106
+ #
107
+ # _@param_ `delivery`
108
+ #
109
+ # _@param_ `max_bytes_per_partition`
110
+ #
111
+ # _@param_ `session_timeout`
112
+ #
113
+ # _@param_ `offset_commit_interval`
114
+ #
115
+ # _@param_ `heartbeat_interval`
116
+ #
117
+ # _@param_ `offset_commit_threshold`
118
+ #
119
+ # _@param_ `offset_retention_time`
120
+ def initialize: (
121
+ handler: Class,
122
+ group_id: String,
123
+ topic: String,
124
+ ?min_bytes: Integer?,
125
+ ?max_wait_time: Integer?,
126
+ ?force_encoding: untyped,
127
+ ?start_from_beginning: bool,
128
+ ?backoff: untyped,
129
+ ?delivery: String,
130
+ ?max_bytes_per_partition: Integer,
131
+ ?session_timeout: Integer?,
132
+ ?offset_commit_interval: Integer?,
133
+ ?heartbeat_interval: Integer?,
134
+ ?offset_commit_threshold: Integer?,
135
+ ?offset_retention_time: Integer?
136
+ ) -> void
137
+
138
+ def start: () -> void
139
+
140
+ def stop: () -> void
141
+
142
+ def create_exponential_backoff: () -> untyped
143
+
144
+ def should_stop?: () -> bool
145
+
146
+ def send_heartbeat_if_necessary: () -> untyped
147
+
148
+ def log_info: (untyped msg, ?untyped metadata) -> untyped
149
+
150
+ def log_debug: (untyped msg, ?untyped metadata) -> untyped
151
+
152
+ def log_error: (untyped msg, untyped metadata) -> untyped
153
+
154
+ def log_warn: (untyped msg, ?untyped metadata) -> untyped
155
+
156
+ def instrument: (untyped event, ?untyped extra) -> untyped
157
+
158
+ attr_reader group_id: String
159
+
160
+ attr_reader topic: String
161
+
162
+ # Returns the value of attribute id.
163
+ attr_reader id: untyped
164
+
165
+ attr_reader handler_class: Class
166
+
167
+ # Returns the value of attribute encoding.
168
+ attr_reader encoding: untyped
169
+
170
+ # Returns the value of attribute consumer.
171
+ attr_reader consumer: untyped
172
+ end
173
+
174
+ module Producer
175
+ def producer: () -> Phobos::Producer::PublicAPI
176
+
177
+ class PublicAPI
178
+ def initialize: (untyped host_obj) -> void
179
+
180
+ # _@param_ `topic`
181
+ #
182
+ # _@param_ `payload`
183
+ #
184
+ # _@param_ `key`
185
+ #
186
+ # _@param_ `partition_key`
187
+ #
188
+ # _@param_ `headers`
189
+ def publish: (
190
+ topic: String,
191
+ payload: String,
192
+ ?key: String?,
193
+ ?partition_key: Integer?,
194
+ ?headers: ::Hash[untyped, untyped]?
195
+ ) -> void
196
+
197
+ # _@param_ `topic`
198
+ #
199
+ # _@param_ `payload`
200
+ #
201
+ # _@param_ `key`
202
+ #
203
+ # _@param_ `partition_key`
204
+ #
205
+ # _@param_ `headers`
206
+ def async_publish: (
207
+ topic: String,
208
+ payload: String,
209
+ ?key: String?,
210
+ ?partition_key: Integer?,
211
+ ?headers: ::Hash[untyped, untyped]?
212
+ ) -> void
213
+
214
+ # _@param_ `messages` — e.g.: [ { topic: 'A', payload: 'message-1', key: '1', headers: { foo: 'bar' } }, { topic: 'B', payload: 'message-2', key: '2', headers: { foo: 'bar' } } ]
215
+ def publish_list: (::Array[::Hash[untyped, untyped]] messages) -> untyped
216
+
217
+ # _@param_ `messages`
218
+ def async_publish_list: (::Array[::Hash[untyped, untyped]] messages) -> untyped
219
+ end
220
+
221
+ module ClassMethods
222
+ def producer: () -> Phobos::Producer::ClassMethods::PublicAPI
223
+
224
+ class PublicAPI
225
+ NAMESPACE: Symbol
226
+ ASYNC_PRODUCER_PARAMS: ::Array[Symbol]
227
+ INTERNAL_PRODUCER_PARAMS: ::Array[Symbol]
228
+
229
+ # This method configures the kafka client used with publish operations
230
+ # performed by the host class
231
+ #
232
+ # _@param_ `kafka_client`
233
+ def configure_kafka_client: (Kafka::Client kafka_client) -> void
234
+
235
+ def kafka_client: () -> Kafka::Client
236
+
237
+ def create_sync_producer: () -> Kafka::Producer
238
+
239
+ def sync_producer: () -> Kafka::Producer
240
+
241
+ def sync_producer_shutdown: () -> void
242
+
243
+ # _@param_ `topic`
244
+ #
245
+ # _@param_ `payload`
246
+ #
247
+ # _@param_ `partition_key`
248
+ #
249
+ # _@param_ `headers`
250
+ def publish: (
251
+ topic: String,
252
+ payload: String,
253
+ ?key: untyped,
254
+ ?partition_key: Integer?,
255
+ ?headers: ::Hash[untyped, untyped]?
256
+ ) -> void
257
+
258
+ # _@param_ `messages`
259
+ def publish_list: (::Array[::Hash[untyped, untyped]] messages) -> void
260
+
261
+ def create_async_producer: () -> Kafka::AsyncProducer
262
+
263
+ def async_producer: () -> Kafka::AsyncProducer
264
+
265
+ # _@param_ `topic`
266
+ #
267
+ # _@param_ `payload`
268
+ #
269
+ # _@param_ `partition_key`
270
+ #
271
+ # _@param_ `headers`
272
+ def async_publish: (
273
+ topic: String,
274
+ payload: String,
275
+ ?key: untyped,
276
+ ?partition_key: Integer?,
277
+ ?headers: ::Hash[untyped, untyped]?
278
+ ) -> void
279
+
280
+ # _@param_ `messages`
281
+ def async_publish_list: (::Array[::Hash[untyped, untyped]] messages) -> void
282
+
283
+ def async_producer_shutdown: () -> void
284
+
285
+ def regular_configs: () -> ::Hash[untyped, untyped]
286
+
287
+ def async_configs: () -> ::Hash[untyped, untyped]
288
+ end
289
+ end
290
+ end
291
+
292
+ module Constants
293
+ LOG_DATE_PATTERN: untyped
294
+ KAFKA_CONSUMER_OPTS: untyped
295
+ LISTENER_OPTS: untyped
296
+ end
297
+
298
+ module Processor
299
+ include Phobos::Instrumentation
300
+ extend ActiveSupport::Concern
301
+ MAX_SLEEP_INTERVAL: untyped
302
+
303
+ def snooze: (untyped interval) -> untyped
304
+
305
+ def instrument: (untyped event, ?untyped extra) -> untyped
306
+ end
307
+
308
+ class DeepStruct < OpenStruct
309
+ # Based on
310
+ # https://docs.omniref.com/ruby/2.3.0/files/lib/ostruct.rb#line=88
311
+ def initialize: (?untyped hash) -> void
312
+
313
+ def to_h: () -> untyped
314
+ end
315
+
316
+ module Test
317
+ module Helper
318
+ TOPIC: untyped
319
+ GROUP: untyped
320
+
321
+ def process_message: (
322
+ handler: untyped,
323
+ payload: untyped,
324
+ ?metadata: untyped,
325
+ ?force_encoding: untyped
326
+ ) -> untyped
327
+ end
328
+ end
329
+
330
+ class EchoHandler
331
+ include Phobos::Handler
332
+
333
+ def consume: (untyped message, untyped metadata) -> untyped
334
+
335
+ def around_consume: (untyped payload, untyped metadata) -> untyped
336
+ end
337
+
338
+ module BatchHandler
339
+ # _@param_ `_payloads`
340
+ #
341
+ # _@param_ `_metadata`
342
+ def consume_batch: (::Array[untyped] _payloads, ::Hash[String, Object] _metadata) -> void
343
+
344
+ # _@param_ `payloads`
345
+ #
346
+ # _@param_ `metadata`
347
+ def around_consume_batch: (::Array[untyped] payloads, ::Hash[String, Object] metadata) -> void
348
+
349
+ module ClassMethods
350
+ # _@param_ `kafka_client`
351
+ def start: (untyped kafka_client) -> void
352
+
353
+ def stop: () -> void
354
+ end
355
+ end
356
+
357
+ class BatchMessage
358
+ # _@param_ `key`
359
+ #
360
+ # _@param_ `partition`
361
+ #
362
+ # _@param_ `offset`
363
+ #
364
+ # _@param_ `payload`
365
+ #
366
+ # _@param_ `headers`
367
+ def initialize: (
368
+ key: untyped,
369
+ partition: Integer,
370
+ offset: Integer,
371
+ payload: untyped,
372
+ headers: untyped
373
+ ) -> void
374
+
375
+ # _@param_ `other`
376
+ def ==: (Phobos::BatchMessage other) -> bool
377
+
378
+ attr_accessor key: untyped
379
+
380
+ attr_accessor partition: Integer
381
+
382
+ attr_accessor offset: Integer
383
+
384
+ attr_accessor payload: untyped
385
+
386
+ attr_accessor headers: untyped
387
+ end
388
+
389
+ module Configuration
390
+ # _@param_ `configuration`
391
+ def configure: (untyped configuration) -> void
392
+
393
+ def configure_logger: () -> void
394
+ end
395
+
396
+ module Instrumentation
397
+ NAMESPACE: untyped
398
+
399
+ def self.subscribe: (untyped event) -> untyped
400
+
401
+ def self.unsubscribe: (untyped subscriber) -> untyped
402
+
403
+ def instrument: (untyped event, ?untyped extra) -> untyped
404
+ end
405
+
406
+ module Actions
407
+ class ProcessBatch
408
+ include Phobos::Instrumentation
409
+ include Phobos::Log
410
+
411
+ def initialize: (listener: untyped, batch: untyped, listener_metadata: untyped) -> void
412
+
413
+ def execute: () -> untyped
414
+
415
+ def log_info: (untyped msg, ?untyped metadata) -> untyped
416
+
417
+ def log_debug: (untyped msg, ?untyped metadata) -> untyped
418
+
419
+ def log_error: (untyped msg, untyped metadata) -> untyped
420
+
421
+ def log_warn: (untyped msg, ?untyped metadata) -> untyped
422
+
423
+ def instrument: (untyped event, ?untyped extra) -> untyped
424
+
425
+ # Returns the value of attribute metadata.
426
+ attr_reader metadata: untyped
427
+ end
428
+
429
+ class ProcessMessage
430
+ include Phobos::Processor
431
+
432
+ def initialize: (listener: untyped, message: untyped, listener_metadata: untyped) -> void
433
+
434
+ def execute: () -> untyped
435
+
436
+ def snooze: (untyped interval) -> untyped
437
+
438
+ def instrument: (untyped event, ?untyped extra) -> untyped
439
+
440
+ # Returns the value of attribute metadata.
441
+ attr_reader metadata: untyped
442
+ end
443
+
444
+ class ProcessBatchInline
445
+ include Phobos::Processor
446
+
447
+ def initialize: (listener: untyped, batch: untyped, metadata: untyped) -> void
448
+
449
+ def execute: () -> untyped
450
+
451
+ def snooze: (untyped interval) -> untyped
452
+
453
+ def instrument: (untyped event, ?untyped extra) -> untyped
454
+
455
+ # Returns the value of attribute metadata.
456
+ attr_reader metadata: untyped
457
+ end
458
+ end
459
+ end
data/sig/kafka.rbs ADDED
@@ -0,0 +1,12 @@
1
+ # Ruby-Kafka definitions
2
+
3
+ module Kafka
4
+ class Client
5
+ end
6
+
7
+ class Producer
8
+ end
9
+
10
+ class AsyncProducer
11
+ end
12
+ end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: phobos
3
3
  version: !ruby/object:Gem::Version
4
- version: 2.1.4
4
+ version: 2.1.5
5
5
  platform: ruby
6
6
  authors:
7
7
  - Túlio Ornelas
@@ -15,7 +15,7 @@ authors:
15
15
  autorequire:
16
16
  bindir: bin
17
17
  cert_chain: []
18
- date: 2022-06-15 00:00:00.000000000 Z
18
+ date: 2022-09-08 00:00:00.000000000 Z
19
19
  dependencies:
20
20
  - !ruby/object:Gem::Dependency
21
21
  name: bundler
@@ -115,6 +115,20 @@ dependencies:
115
115
  - - ">="
116
116
  - !ruby/object:Gem::Version
117
117
  version: '0'
118
+ - !ruby/object:Gem::Dependency
119
+ name: sord
120
+ requirement: !ruby/object:Gem::Requirement
121
+ requirements:
122
+ - - ">="
123
+ - !ruby/object:Gem::Version
124
+ version: '0'
125
+ type: :development
126
+ prerelease: false
127
+ version_requirements: !ruby/object:Gem::Requirement
128
+ requirements:
129
+ - - ">="
130
+ - !ruby/object:Gem::Version
131
+ version: '0'
118
132
  - !ruby/object:Gem::Dependency
119
133
  name: timecop
120
134
  requirement: !ruby/object:Gem::Requirement
@@ -129,6 +143,20 @@ dependencies:
129
143
  - - ">="
130
144
  - !ruby/object:Gem::Version
131
145
  version: '0'
146
+ - !ruby/object:Gem::Dependency
147
+ name: yard
148
+ requirement: !ruby/object:Gem::Requirement
149
+ requirements:
150
+ - - ">="
151
+ - !ruby/object:Gem::Version
152
+ version: '0'
153
+ type: :development
154
+ prerelease: false
155
+ version_requirements: !ruby/object:Gem::Requirement
156
+ requirements:
157
+ - - ">="
158
+ - !ruby/object:Gem::Version
159
+ version: '0'
132
160
  - !ruby/object:Gem::Dependency
133
161
  name: activesupport
134
162
  requirement: !ruby/object:Gem::Requirement
@@ -294,6 +322,8 @@ files:
294
322
  - lib/phobos/version.rb
295
323
  - logo.png
296
324
  - phobos.gemspec
325
+ - sig/defs.rbs
326
+ - sig/kafka.rbs
297
327
  - utils/create-topic.sh
298
328
  homepage: https://github.com/klarna/phobos
299
329
  licenses:
@@ -315,7 +345,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
315
345
  - !ruby/object:Gem::Version
316
346
  version: '0'
317
347
  requirements: []
318
- rubygems_version: 3.3.5
348
+ rubygems_version: 3.3.21
319
349
  signing_key:
320
350
  specification_version: 4
321
351
  summary: Simplifying Kafka for ruby apps