phobos 2.1.4 → 2.1.6
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.gitattributes +4 -0
- data/CHANGELOG.md +4 -0
- data/Gemfile +4 -0
- data/bin/tapioca +29 -0
- data/defs.rbi +740 -0
- data/lib/phobos/batch_handler.rb +11 -0
- data/lib/phobos/batch_message.rb +18 -1
- data/lib/phobos/cli.rb +3 -1
- data/lib/phobos/configuration.rb +3 -1
- data/lib/phobos/handler.rb +1 -0
- data/lib/phobos/listener.rb +24 -3
- data/lib/phobos/producer.rb +44 -2
- data/lib/phobos/version.rb +2 -1
- data/lib/phobos.rb +12 -1
- data/phobos.gemspec +5 -1
- data/rbi/defs.rbi +601 -0
- metadata +63 -3
data/rbi/defs.rbi
ADDED
@@ -0,0 +1,601 @@
|
|
1
|
+
# typed: strong
|
2
|
+
# Please use this with at least the same consideration as you would when using OpenStruct.
|
3
|
+
# Right now we only use this to parse our internal configuration files. It is not meant to
|
4
|
+
# be used on incoming data.
|
5
|
+
module Phobos
|
6
|
+
extend Phobos::Configuration
|
7
|
+
VERSION = T.let('2.1.4', T.untyped)
|
8
|
+
|
9
|
+
class << self
|
10
|
+
sig { returns(Phobos::DeepStruct) }
|
11
|
+
attr_reader :config
|
12
|
+
|
13
|
+
sig { returns(Logger) }
|
14
|
+
attr_reader :logger
|
15
|
+
|
16
|
+
sig { returns(T::Boolean) }
|
17
|
+
attr_accessor :silence_log
|
18
|
+
end
|
19
|
+
|
20
|
+
# _@param_ `configuration`
|
21
|
+
sig { params(configuration: T::Hash[String, Object]).void }
|
22
|
+
def self.add_listeners(configuration); end
|
23
|
+
|
24
|
+
# _@param_ `config_key`
|
25
|
+
sig { params(config_key: T.nilable(String)).returns(T.untyped) }
|
26
|
+
def self.create_kafka_client(config_key = nil); end
|
27
|
+
|
28
|
+
# _@param_ `backoff_config`
|
29
|
+
sig { params(backoff_config: T.nilable(T::Hash[Symbol, Integer])).returns(T.untyped) }
|
30
|
+
def self.create_exponential_backoff(backoff_config = nil); end
|
31
|
+
|
32
|
+
# _@param_ `message`
|
33
|
+
sig { params(message: String).void }
|
34
|
+
def self.deprecate(message); end
|
35
|
+
|
36
|
+
# _@param_ `configuration`
|
37
|
+
sig { params(configuration: T.untyped).void }
|
38
|
+
def self.configure(configuration); end
|
39
|
+
|
40
|
+
sig { void }
|
41
|
+
def self.configure_logger; end
|
42
|
+
|
43
|
+
module Log
|
44
|
+
sig { params(msg: T.untyped, metadata: T.untyped).returns(T.untyped) }
|
45
|
+
def log_info(msg, metadata = {}); end
|
46
|
+
|
47
|
+
sig { params(msg: T.untyped, metadata: T.untyped).returns(T.untyped) }
|
48
|
+
def log_debug(msg, metadata = {}); end
|
49
|
+
|
50
|
+
sig { params(msg: T.untyped, metadata: T.untyped).returns(T.untyped) }
|
51
|
+
def log_error(msg, metadata); end
|
52
|
+
|
53
|
+
sig { params(msg: T.untyped, metadata: T.untyped).returns(T.untyped) }
|
54
|
+
def log_warn(msg, metadata = {}); end
|
55
|
+
end
|
56
|
+
|
57
|
+
module LoggerHelper
|
58
|
+
sig { params(method: T.untyped, msg: T.untyped, metadata: T.untyped).returns(T.untyped) }
|
59
|
+
def self.log(method, msg, metadata); end
|
60
|
+
end
|
61
|
+
|
62
|
+
class Error < StandardError
|
63
|
+
end
|
64
|
+
|
65
|
+
class AbortError < Phobos::Error
|
66
|
+
end
|
67
|
+
|
68
|
+
module Handler
|
69
|
+
sig { params(_payload: T.untyped, _metadata: T.untyped).returns(T.untyped) }
|
70
|
+
def consume(_payload, _metadata); end
|
71
|
+
|
72
|
+
sig { params(payload: T.untyped, metadata: T.untyped).returns(T.untyped) }
|
73
|
+
def around_consume(payload, metadata); end
|
74
|
+
|
75
|
+
module ClassMethods
|
76
|
+
sig { params(kafka_client: T.untyped).returns(T.untyped) }
|
77
|
+
def start(kafka_client); end
|
78
|
+
|
79
|
+
sig { returns(T.untyped) }
|
80
|
+
def stop; end
|
81
|
+
end
|
82
|
+
end
|
83
|
+
|
84
|
+
class Executor
|
85
|
+
include Phobos::Instrumentation
|
86
|
+
include Phobos::Log
|
87
|
+
|
88
|
+
sig { void }
|
89
|
+
def initialize; end
|
90
|
+
|
91
|
+
sig { returns(T.untyped) }
|
92
|
+
def start; end
|
93
|
+
|
94
|
+
sig { returns(T.untyped) }
|
95
|
+
def stop; end
|
96
|
+
|
97
|
+
sig { params(msg: T.untyped, metadata: T.untyped).returns(T.untyped) }
|
98
|
+
def log_info(msg, metadata = {}); end
|
99
|
+
|
100
|
+
sig { params(msg: T.untyped, metadata: T.untyped).returns(T.untyped) }
|
101
|
+
def log_debug(msg, metadata = {}); end
|
102
|
+
|
103
|
+
sig { params(msg: T.untyped, metadata: T.untyped).returns(T.untyped) }
|
104
|
+
def log_error(msg, metadata); end
|
105
|
+
|
106
|
+
sig { params(msg: T.untyped, metadata: T.untyped).returns(T.untyped) }
|
107
|
+
def log_warn(msg, metadata = {}); end
|
108
|
+
|
109
|
+
sig { params(event: T.untyped, extra: T.untyped).returns(T.untyped) }
|
110
|
+
def instrument(event, extra = {}); end
|
111
|
+
end
|
112
|
+
|
113
|
+
# rubocop:disable Metrics/ParameterLists, Metrics/ClassLength
|
114
|
+
class Listener
|
115
|
+
include Phobos::Instrumentation
|
116
|
+
include Phobos::Log
|
117
|
+
DEFAULT_MAX_BYTES_PER_PARTITION = T.let(1_048_576, T.untyped)
|
118
|
+
DELIVERY_OPTS = T.let(%w[batch message inline_batch].freeze, T.untyped)
|
119
|
+
|
120
|
+
# rubocop:disable Metrics/MethodLength
|
121
|
+
#
|
122
|
+
# _@param_ `handler`
|
123
|
+
#
|
124
|
+
# _@param_ `group_id`
|
125
|
+
#
|
126
|
+
# _@param_ `topic`
|
127
|
+
#
|
128
|
+
# _@param_ `min_bytes`
|
129
|
+
#
|
130
|
+
# _@param_ `max_wait_time`
|
131
|
+
#
|
132
|
+
# _@param_ `start_from_beginning`
|
133
|
+
#
|
134
|
+
# _@param_ `delivery`
|
135
|
+
#
|
136
|
+
# _@param_ `max_bytes_per_partition`
|
137
|
+
#
|
138
|
+
# _@param_ `session_timeout`
|
139
|
+
#
|
140
|
+
# _@param_ `offset_commit_interval`
|
141
|
+
#
|
142
|
+
# _@param_ `heartbeat_interval`
|
143
|
+
#
|
144
|
+
# _@param_ `offset_commit_threshold`
|
145
|
+
#
|
146
|
+
# _@param_ `offset_retention_time`
|
147
|
+
sig do
|
148
|
+
params(
|
149
|
+
handler: T.class_of(BasicObject),
|
150
|
+
group_id: String,
|
151
|
+
topic: String,
|
152
|
+
min_bytes: T.nilable(Integer),
|
153
|
+
max_wait_time: T.nilable(Integer),
|
154
|
+
force_encoding: T.untyped,
|
155
|
+
start_from_beginning: T::Boolean,
|
156
|
+
backoff: T.untyped,
|
157
|
+
delivery: String,
|
158
|
+
max_bytes_per_partition: Integer,
|
159
|
+
session_timeout: T.nilable(Integer),
|
160
|
+
offset_commit_interval: T.nilable(Integer),
|
161
|
+
heartbeat_interval: T.nilable(Integer),
|
162
|
+
offset_commit_threshold: T.nilable(Integer),
|
163
|
+
offset_retention_time: T.nilable(Integer)
|
164
|
+
).void
|
165
|
+
end
|
166
|
+
def initialize(handler:, group_id:, topic:, min_bytes: nil, max_wait_time: nil, force_encoding: nil, start_from_beginning: true, backoff: nil, delivery: 'batch', max_bytes_per_partition: DEFAULT_MAX_BYTES_PER_PARTITION, session_timeout: nil, offset_commit_interval: nil, heartbeat_interval: nil, offset_commit_threshold: nil, offset_retention_time: nil); end
|
167
|
+
|
168
|
+
sig { void }
|
169
|
+
def start; end
|
170
|
+
|
171
|
+
sig { void }
|
172
|
+
def stop; end
|
173
|
+
|
174
|
+
sig { returns(T.untyped) }
|
175
|
+
def create_exponential_backoff; end
|
176
|
+
|
177
|
+
sig { returns(T::Boolean) }
|
178
|
+
def should_stop?; end
|
179
|
+
|
180
|
+
sig { returns(T.untyped) }
|
181
|
+
def send_heartbeat_if_necessary; end
|
182
|
+
|
183
|
+
sig { params(msg: T.untyped, metadata: T.untyped).returns(T.untyped) }
|
184
|
+
def log_info(msg, metadata = {}); end
|
185
|
+
|
186
|
+
sig { params(msg: T.untyped, metadata: T.untyped).returns(T.untyped) }
|
187
|
+
def log_debug(msg, metadata = {}); end
|
188
|
+
|
189
|
+
sig { params(msg: T.untyped, metadata: T.untyped).returns(T.untyped) }
|
190
|
+
def log_error(msg, metadata); end
|
191
|
+
|
192
|
+
sig { params(msg: T.untyped, metadata: T.untyped).returns(T.untyped) }
|
193
|
+
def log_warn(msg, metadata = {}); end
|
194
|
+
|
195
|
+
sig { params(event: T.untyped, extra: T.untyped).returns(T.untyped) }
|
196
|
+
def instrument(event, extra = {}); end
|
197
|
+
|
198
|
+
sig { returns(String) }
|
199
|
+
attr_reader :group_id
|
200
|
+
|
201
|
+
sig { returns(String) }
|
202
|
+
attr_reader :topic
|
203
|
+
|
204
|
+
# Returns the value of attribute id.
|
205
|
+
sig { returns(T.untyped) }
|
206
|
+
attr_reader :id
|
207
|
+
|
208
|
+
sig { returns(T.class_of(BasicObject)) }
|
209
|
+
attr_reader :handler_class
|
210
|
+
|
211
|
+
# Returns the value of attribute encoding.
|
212
|
+
sig { returns(T.untyped) }
|
213
|
+
attr_reader :encoding
|
214
|
+
|
215
|
+
# Returns the value of attribute consumer.
|
216
|
+
sig { returns(T.untyped) }
|
217
|
+
attr_reader :consumer
|
218
|
+
end
|
219
|
+
|
220
|
+
module Producer
|
221
|
+
sig { returns(Phobos::Producer::PublicAPI) }
|
222
|
+
def producer; end
|
223
|
+
|
224
|
+
class PublicAPI
|
225
|
+
sig { params(host_obj: T.untyped).void }
|
226
|
+
def initialize(host_obj); end
|
227
|
+
|
228
|
+
# _@param_ `topic`
|
229
|
+
#
|
230
|
+
# _@param_ `payload`
|
231
|
+
#
|
232
|
+
# _@param_ `key`
|
233
|
+
#
|
234
|
+
# _@param_ `partition_key`
|
235
|
+
#
|
236
|
+
# _@param_ `headers`
|
237
|
+
sig do
|
238
|
+
params(
|
239
|
+
topic: String,
|
240
|
+
payload: String,
|
241
|
+
key: T.nilable(String),
|
242
|
+
partition_key: T.nilable(Integer),
|
243
|
+
headers: T.nilable(T::Hash[T.untyped, T.untyped])
|
244
|
+
).void
|
245
|
+
end
|
246
|
+
def publish(topic:, payload:, key: nil, partition_key: nil, headers: nil); end
|
247
|
+
|
248
|
+
# _@param_ `topic`
|
249
|
+
#
|
250
|
+
# _@param_ `payload`
|
251
|
+
#
|
252
|
+
# _@param_ `key`
|
253
|
+
#
|
254
|
+
# _@param_ `partition_key`
|
255
|
+
#
|
256
|
+
# _@param_ `headers`
|
257
|
+
sig do
|
258
|
+
params(
|
259
|
+
topic: String,
|
260
|
+
payload: String,
|
261
|
+
key: T.nilable(String),
|
262
|
+
partition_key: T.nilable(Integer),
|
263
|
+
headers: T.nilable(T::Hash[T.untyped, T.untyped])
|
264
|
+
).void
|
265
|
+
end
|
266
|
+
def async_publish(topic:, payload:, key: nil, partition_key: nil, headers: nil); end
|
267
|
+
|
268
|
+
# _@param_ `messages` — e.g.: [ { topic: 'A', payload: 'message-1', key: '1', headers: { foo: 'bar' } }, { topic: 'B', payload: 'message-2', key: '2', headers: { foo: 'bar' } } ]
|
269
|
+
sig { params(messages: T::Array[T::Hash[T.untyped, T.untyped]]).returns(T.untyped) }
|
270
|
+
def publish_list(messages); end
|
271
|
+
|
272
|
+
# _@param_ `messages`
|
273
|
+
sig { params(messages: T::Array[T::Hash[T.untyped, T.untyped]]).returns(T.untyped) }
|
274
|
+
def async_publish_list(messages); end
|
275
|
+
end
|
276
|
+
|
277
|
+
module ClassMethods
|
278
|
+
sig { returns(Phobos::Producer::ClassMethods::PublicAPI) }
|
279
|
+
def producer; end
|
280
|
+
|
281
|
+
class PublicAPI
|
282
|
+
NAMESPACE = T.let(:phobos_producer_store, T.untyped)
|
283
|
+
ASYNC_PRODUCER_PARAMS = T.let([:max_queue_size, :delivery_threshold, :delivery_interval].freeze, T.untyped)
|
284
|
+
INTERNAL_PRODUCER_PARAMS = T.let([:persistent_connections].freeze, T.untyped)
|
285
|
+
|
286
|
+
# This method configures the kafka client used with publish operations
|
287
|
+
# performed by the host class
|
288
|
+
#
|
289
|
+
# _@param_ `kafka_client`
|
290
|
+
sig { params(kafka_client: Kafka::Client).void }
|
291
|
+
def configure_kafka_client(kafka_client); end
|
292
|
+
|
293
|
+
sig { returns(Kafka::Client) }
|
294
|
+
def kafka_client; end
|
295
|
+
|
296
|
+
sig { returns(Kafka::Producer) }
|
297
|
+
def create_sync_producer; end
|
298
|
+
|
299
|
+
sig { returns(Kafka::Producer) }
|
300
|
+
def sync_producer; end
|
301
|
+
|
302
|
+
sig { void }
|
303
|
+
def sync_producer_shutdown; end
|
304
|
+
|
305
|
+
# _@param_ `topic`
|
306
|
+
#
|
307
|
+
# _@param_ `payload`
|
308
|
+
#
|
309
|
+
# _@param_ `partition_key`
|
310
|
+
#
|
311
|
+
# _@param_ `headers`
|
312
|
+
sig do
|
313
|
+
params(
|
314
|
+
topic: String,
|
315
|
+
payload: String,
|
316
|
+
key: T.untyped,
|
317
|
+
partition_key: T.nilable(Integer),
|
318
|
+
headers: T.nilable(T::Hash[T.untyped, T.untyped])
|
319
|
+
).void
|
320
|
+
end
|
321
|
+
def publish(topic:, payload:, key: nil, partition_key: nil, headers: nil); end
|
322
|
+
|
323
|
+
# _@param_ `messages`
|
324
|
+
sig { params(messages: T::Array[T::Hash[T.untyped, T.untyped]]).void }
|
325
|
+
def publish_list(messages); end
|
326
|
+
|
327
|
+
sig { returns(Kafka::AsyncProducer) }
|
328
|
+
def create_async_producer; end
|
329
|
+
|
330
|
+
sig { returns(Kafka::AsyncProducer) }
|
331
|
+
def async_producer; end
|
332
|
+
|
333
|
+
# _@param_ `topic`
|
334
|
+
#
|
335
|
+
# _@param_ `payload`
|
336
|
+
#
|
337
|
+
# _@param_ `partition_key`
|
338
|
+
#
|
339
|
+
# _@param_ `headers`
|
340
|
+
sig do
|
341
|
+
params(
|
342
|
+
topic: String,
|
343
|
+
payload: String,
|
344
|
+
key: T.untyped,
|
345
|
+
partition_key: T.nilable(Integer),
|
346
|
+
headers: T.nilable(T::Hash[T.untyped, T.untyped])
|
347
|
+
).void
|
348
|
+
end
|
349
|
+
def async_publish(topic:, payload:, key: nil, partition_key: nil, headers: nil); end
|
350
|
+
|
351
|
+
# _@param_ `messages`
|
352
|
+
sig { params(messages: T::Array[T::Hash[T.untyped, T.untyped]]).void }
|
353
|
+
def async_publish_list(messages); end
|
354
|
+
|
355
|
+
sig { void }
|
356
|
+
def async_producer_shutdown; end
|
357
|
+
|
358
|
+
sig { returns(T::Hash[T.untyped, T.untyped]) }
|
359
|
+
def regular_configs; end
|
360
|
+
|
361
|
+
sig { returns(T::Hash[T.untyped, T.untyped]) }
|
362
|
+
def async_configs; end
|
363
|
+
end
|
364
|
+
end
|
365
|
+
end
|
366
|
+
|
367
|
+
module Constants
|
368
|
+
LOG_DATE_PATTERN = T.let('%Y-%m-%dT%H:%M:%S:%L%zZ', T.untyped)
|
369
|
+
KAFKA_CONSUMER_OPTS = T.let([
|
370
|
+
:session_timeout,
|
371
|
+
:offset_commit_interval,
|
372
|
+
:offset_commit_threshold,
|
373
|
+
:heartbeat_interval,
|
374
|
+
:offset_retention_time
|
375
|
+
].freeze, T.untyped)
|
376
|
+
LISTENER_OPTS = T.let([
|
377
|
+
:handler,
|
378
|
+
:group_id,
|
379
|
+
:topic,
|
380
|
+
:min_bytes,
|
381
|
+
:max_wait_time,
|
382
|
+
:force_encoding,
|
383
|
+
:start_from_beginning,
|
384
|
+
:max_bytes_per_partition,
|
385
|
+
:backoff,
|
386
|
+
:delivery,
|
387
|
+
:session_timeout,
|
388
|
+
:offset_commit_interval,
|
389
|
+
:offset_commit_threshold,
|
390
|
+
:heartbeat_interval,
|
391
|
+
:offset_retention_time
|
392
|
+
].freeze, T.untyped)
|
393
|
+
end
|
394
|
+
|
395
|
+
module Processor
|
396
|
+
include Phobos::Instrumentation
|
397
|
+
extend ActiveSupport::Concern
|
398
|
+
MAX_SLEEP_INTERVAL = T.let(3, T.untyped)
|
399
|
+
|
400
|
+
sig { params(interval: T.untyped).returns(T.untyped) }
|
401
|
+
def snooze(interval); end
|
402
|
+
|
403
|
+
sig { params(event: T.untyped, extra: T.untyped).returns(T.untyped) }
|
404
|
+
def instrument(event, extra = {}); end
|
405
|
+
end
|
406
|
+
|
407
|
+
class DeepStruct < OpenStruct
|
408
|
+
# Based on
|
409
|
+
# https://docs.omniref.com/ruby/2.3.0/files/lib/ostruct.rb#line=88
|
410
|
+
sig { params(hash: T.untyped).void }
|
411
|
+
def initialize(hash = nil); end
|
412
|
+
|
413
|
+
sig { returns(T.untyped) }
|
414
|
+
def to_h; end
|
415
|
+
end
|
416
|
+
|
417
|
+
module Test
|
418
|
+
module Helper
|
419
|
+
TOPIC = T.let('test-topic', T.untyped)
|
420
|
+
GROUP = T.let('test-group', T.untyped)
|
421
|
+
|
422
|
+
sig do
|
423
|
+
params(
|
424
|
+
handler: T.untyped,
|
425
|
+
payload: T.untyped,
|
426
|
+
metadata: T.untyped,
|
427
|
+
force_encoding: T.untyped
|
428
|
+
).returns(T.untyped)
|
429
|
+
end
|
430
|
+
def process_message(handler:, payload:, metadata: {}, force_encoding: nil); end
|
431
|
+
end
|
432
|
+
end
|
433
|
+
|
434
|
+
class EchoHandler
|
435
|
+
include Phobos::Handler
|
436
|
+
|
437
|
+
sig { params(message: T.untyped, metadata: T.untyped).returns(T.untyped) }
|
438
|
+
def consume(message, metadata); end
|
439
|
+
|
440
|
+
sig { params(payload: T.untyped, metadata: T.untyped).returns(T.untyped) }
|
441
|
+
def around_consume(payload, metadata); end
|
442
|
+
end
|
443
|
+
|
444
|
+
module BatchHandler
|
445
|
+
# _@param_ `_payloads`
|
446
|
+
#
|
447
|
+
# _@param_ `_metadata`
|
448
|
+
sig { params(_payloads: T::Array[T.untyped], _metadata: T::Hash[String, Object]).void }
|
449
|
+
def consume_batch(_payloads, _metadata); end
|
450
|
+
|
451
|
+
# _@param_ `payloads`
|
452
|
+
#
|
453
|
+
# _@param_ `metadata`
|
454
|
+
sig { params(payloads: T::Array[T.untyped], metadata: T::Hash[String, Object]).void }
|
455
|
+
def around_consume_batch(payloads, metadata); end
|
456
|
+
|
457
|
+
module ClassMethods
|
458
|
+
# _@param_ `kafka_client`
|
459
|
+
sig { params(kafka_client: T.untyped).void }
|
460
|
+
def start(kafka_client); end
|
461
|
+
|
462
|
+
sig { void }
|
463
|
+
def stop; end
|
464
|
+
end
|
465
|
+
end
|
466
|
+
|
467
|
+
class BatchMessage
|
468
|
+
# _@param_ `key`
|
469
|
+
#
|
470
|
+
# _@param_ `partition`
|
471
|
+
#
|
472
|
+
# _@param_ `offset`
|
473
|
+
#
|
474
|
+
# _@param_ `payload`
|
475
|
+
#
|
476
|
+
# _@param_ `headers`
|
477
|
+
sig do
|
478
|
+
params(
|
479
|
+
key: T.untyped,
|
480
|
+
partition: Integer,
|
481
|
+
offset: Integer,
|
482
|
+
payload: T.untyped,
|
483
|
+
headers: T.untyped
|
484
|
+
).void
|
485
|
+
end
|
486
|
+
def initialize(key:, partition:, offset:, payload:, headers:); end
|
487
|
+
|
488
|
+
# _@param_ `other`
|
489
|
+
sig { params(other: Phobos::BatchMessage).returns(T::Boolean) }
|
490
|
+
def ==(other); end
|
491
|
+
|
492
|
+
sig { returns(T.untyped) }
|
493
|
+
attr_accessor :key
|
494
|
+
|
495
|
+
sig { returns(Integer) }
|
496
|
+
attr_accessor :partition
|
497
|
+
|
498
|
+
sig { returns(Integer) }
|
499
|
+
attr_accessor :offset
|
500
|
+
|
501
|
+
sig { returns(T.untyped) }
|
502
|
+
attr_accessor :payload
|
503
|
+
|
504
|
+
sig { returns(T.untyped) }
|
505
|
+
attr_accessor :headers
|
506
|
+
end
|
507
|
+
|
508
|
+
module Configuration
|
509
|
+
# _@param_ `configuration`
|
510
|
+
sig { params(configuration: T.untyped).void }
|
511
|
+
def configure(configuration); end
|
512
|
+
|
513
|
+
sig { void }
|
514
|
+
def configure_logger; end
|
515
|
+
end
|
516
|
+
|
517
|
+
module Instrumentation
|
518
|
+
NAMESPACE = T.let('phobos', T.untyped)
|
519
|
+
|
520
|
+
sig { params(event: T.untyped).returns(T.untyped) }
|
521
|
+
def self.subscribe(event); end
|
522
|
+
|
523
|
+
sig { params(subscriber: T.untyped).returns(T.untyped) }
|
524
|
+
def self.unsubscribe(subscriber); end
|
525
|
+
|
526
|
+
sig { params(event: T.untyped, extra: T.untyped).returns(T.untyped) }
|
527
|
+
def instrument(event, extra = {}); end
|
528
|
+
end
|
529
|
+
|
530
|
+
module Actions
|
531
|
+
class ProcessBatch
|
532
|
+
include Phobos::Instrumentation
|
533
|
+
include Phobos::Log
|
534
|
+
|
535
|
+
sig { params(listener: T.untyped, batch: T.untyped, listener_metadata: T.untyped).void }
|
536
|
+
def initialize(listener:, batch:, listener_metadata:); end
|
537
|
+
|
538
|
+
sig { returns(T.untyped) }
|
539
|
+
def execute; end
|
540
|
+
|
541
|
+
sig { params(msg: T.untyped, metadata: T.untyped).returns(T.untyped) }
|
542
|
+
def log_info(msg, metadata = {}); end
|
543
|
+
|
544
|
+
sig { params(msg: T.untyped, metadata: T.untyped).returns(T.untyped) }
|
545
|
+
def log_debug(msg, metadata = {}); end
|
546
|
+
|
547
|
+
sig { params(msg: T.untyped, metadata: T.untyped).returns(T.untyped) }
|
548
|
+
def log_error(msg, metadata); end
|
549
|
+
|
550
|
+
sig { params(msg: T.untyped, metadata: T.untyped).returns(T.untyped) }
|
551
|
+
def log_warn(msg, metadata = {}); end
|
552
|
+
|
553
|
+
sig { params(event: T.untyped, extra: T.untyped).returns(T.untyped) }
|
554
|
+
def instrument(event, extra = {}); end
|
555
|
+
|
556
|
+
# Returns the value of attribute metadata.
|
557
|
+
sig { returns(T.untyped) }
|
558
|
+
attr_reader :metadata
|
559
|
+
end
|
560
|
+
|
561
|
+
class ProcessMessage
|
562
|
+
include Phobos::Processor
|
563
|
+
|
564
|
+
sig { params(listener: T.untyped, message: T.untyped, listener_metadata: T.untyped).void }
|
565
|
+
def initialize(listener:, message:, listener_metadata:); end
|
566
|
+
|
567
|
+
sig { returns(T.untyped) }
|
568
|
+
def execute; end
|
569
|
+
|
570
|
+
sig { params(interval: T.untyped).returns(T.untyped) }
|
571
|
+
def snooze(interval); end
|
572
|
+
|
573
|
+
sig { params(event: T.untyped, extra: T.untyped).returns(T.untyped) }
|
574
|
+
def instrument(event, extra = {}); end
|
575
|
+
|
576
|
+
# Returns the value of attribute metadata.
|
577
|
+
sig { returns(T.untyped) }
|
578
|
+
attr_reader :metadata
|
579
|
+
end
|
580
|
+
|
581
|
+
class ProcessBatchInline
|
582
|
+
include Phobos::Processor
|
583
|
+
|
584
|
+
sig { params(listener: T.untyped, batch: T.untyped, metadata: T.untyped).void }
|
585
|
+
def initialize(listener:, batch:, metadata:); end
|
586
|
+
|
587
|
+
sig { returns(T.untyped) }
|
588
|
+
def execute; end
|
589
|
+
|
590
|
+
sig { params(interval: T.untyped).returns(T.untyped) }
|
591
|
+
def snooze(interval); end
|
592
|
+
|
593
|
+
sig { params(event: T.untyped, extra: T.untyped).returns(T.untyped) }
|
594
|
+
def instrument(event, extra = {}); end
|
595
|
+
|
596
|
+
# Returns the value of attribute metadata.
|
597
|
+
sig { returns(T.untyped) }
|
598
|
+
attr_reader :metadata
|
599
|
+
end
|
600
|
+
end
|
601
|
+
end
|
metadata
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: phobos
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 2.1.
|
4
|
+
version: 2.1.6
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Túlio Ornelas
|
@@ -15,7 +15,7 @@ authors:
|
|
15
15
|
autorequire:
|
16
16
|
bindir: bin
|
17
17
|
cert_chain: []
|
18
|
-
date:
|
18
|
+
date: 2023-08-31 00:00:00.000000000 Z
|
19
19
|
dependencies:
|
20
20
|
- !ruby/object:Gem::Dependency
|
21
21
|
name: bundler
|
@@ -115,6 +115,48 @@ dependencies:
|
|
115
115
|
- - ">="
|
116
116
|
- !ruby/object:Gem::Version
|
117
117
|
version: '0'
|
118
|
+
- !ruby/object:Gem::Dependency
|
119
|
+
name: sorbet
|
120
|
+
requirement: !ruby/object:Gem::Requirement
|
121
|
+
requirements:
|
122
|
+
- - ">="
|
123
|
+
- !ruby/object:Gem::Version
|
124
|
+
version: '0'
|
125
|
+
type: :development
|
126
|
+
prerelease: false
|
127
|
+
version_requirements: !ruby/object:Gem::Requirement
|
128
|
+
requirements:
|
129
|
+
- - ">="
|
130
|
+
- !ruby/object:Gem::Version
|
131
|
+
version: '0'
|
132
|
+
- !ruby/object:Gem::Dependency
|
133
|
+
name: sord
|
134
|
+
requirement: !ruby/object:Gem::Requirement
|
135
|
+
requirements:
|
136
|
+
- - ">="
|
137
|
+
- !ruby/object:Gem::Version
|
138
|
+
version: '0'
|
139
|
+
type: :development
|
140
|
+
prerelease: false
|
141
|
+
version_requirements: !ruby/object:Gem::Requirement
|
142
|
+
requirements:
|
143
|
+
- - ">="
|
144
|
+
- !ruby/object:Gem::Version
|
145
|
+
version: '0'
|
146
|
+
- !ruby/object:Gem::Dependency
|
147
|
+
name: tapioca
|
148
|
+
requirement: !ruby/object:Gem::Requirement
|
149
|
+
requirements:
|
150
|
+
- - ">="
|
151
|
+
- !ruby/object:Gem::Version
|
152
|
+
version: '0'
|
153
|
+
type: :development
|
154
|
+
prerelease: false
|
155
|
+
version_requirements: !ruby/object:Gem::Requirement
|
156
|
+
requirements:
|
157
|
+
- - ">="
|
158
|
+
- !ruby/object:Gem::Version
|
159
|
+
version: '0'
|
118
160
|
- !ruby/object:Gem::Dependency
|
119
161
|
name: timecop
|
120
162
|
requirement: !ruby/object:Gem::Requirement
|
@@ -129,6 +171,20 @@ dependencies:
|
|
129
171
|
- - ">="
|
130
172
|
- !ruby/object:Gem::Version
|
131
173
|
version: '0'
|
174
|
+
- !ruby/object:Gem::Dependency
|
175
|
+
name: yard
|
176
|
+
requirement: !ruby/object:Gem::Requirement
|
177
|
+
requirements:
|
178
|
+
- - ">="
|
179
|
+
- !ruby/object:Gem::Version
|
180
|
+
version: '0'
|
181
|
+
type: :development
|
182
|
+
prerelease: false
|
183
|
+
version_requirements: !ruby/object:Gem::Requirement
|
184
|
+
requirements:
|
185
|
+
- - ">="
|
186
|
+
- !ruby/object:Gem::Version
|
187
|
+
version: '0'
|
132
188
|
- !ruby/object:Gem::Dependency
|
133
189
|
name: activesupport
|
134
190
|
requirement: !ruby/object:Gem::Requirement
|
@@ -246,6 +302,7 @@ extra_rdoc_files: []
|
|
246
302
|
files:
|
247
303
|
- ".dockerignore"
|
248
304
|
- ".env"
|
305
|
+
- ".gitattributes"
|
249
306
|
- ".gitignore"
|
250
307
|
- ".rspec"
|
251
308
|
- ".rubocop.yml"
|
@@ -263,7 +320,9 @@ files:
|
|
263
320
|
- bin/console
|
264
321
|
- bin/phobos
|
265
322
|
- bin/setup
|
323
|
+
- bin/tapioca
|
266
324
|
- config/phobos.yml.example
|
325
|
+
- defs.rbi
|
267
326
|
- docker-compose.yml
|
268
327
|
- examples/handler_saving_events_database.rb
|
269
328
|
- examples/handler_using_async_producer.rb
|
@@ -294,6 +353,7 @@ files:
|
|
294
353
|
- lib/phobos/version.rb
|
295
354
|
- logo.png
|
296
355
|
- phobos.gemspec
|
356
|
+
- rbi/defs.rbi
|
297
357
|
- utils/create-topic.sh
|
298
358
|
homepage: https://github.com/klarna/phobos
|
299
359
|
licenses:
|
@@ -315,7 +375,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
|
|
315
375
|
- !ruby/object:Gem::Version
|
316
376
|
version: '0'
|
317
377
|
requirements: []
|
318
|
-
rubygems_version: 3.3.
|
378
|
+
rubygems_version: 3.3.3
|
319
379
|
signing_key:
|
320
380
|
specification_version: 4
|
321
381
|
summary: Simplifying Kafka for ruby apps
|