deimos-ruby 1.16.3 → 1.16.4

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (70) hide show
  1. checksums.yaml +4 -4
  2. data/.gitignore +1 -0
  3. data/CHANGELOG.md +5 -0
  4. data/Gemfile +6 -0
  5. data/README.md +5 -0
  6. data/lib/deimos/active_record_consume/batch_consumption.rb +7 -2
  7. data/lib/deimos/active_record_consume/batch_slicer.rb +2 -0
  8. data/lib/deimos/active_record_consume/message_consumption.rb +8 -4
  9. data/lib/deimos/active_record_consumer.rb +7 -4
  10. data/lib/deimos/active_record_producer.rb +3 -0
  11. data/lib/deimos/backends/base.rb +4 -2
  12. data/lib/deimos/backends/kafka.rb +1 -0
  13. data/lib/deimos/backends/kafka_async.rb +1 -0
  14. data/lib/deimos/config/configuration.rb +4 -0
  15. data/lib/deimos/config/phobos_config.rb +2 -1
  16. data/lib/deimos/consume/batch_consumption.rb +8 -1
  17. data/lib/deimos/consume/message_consumption.rb +4 -1
  18. data/lib/deimos/instrumentation.rb +11 -4
  19. data/lib/deimos/kafka_message.rb +1 -0
  20. data/lib/deimos/kafka_source.rb +5 -0
  21. data/lib/deimos/kafka_topic_info.rb +4 -0
  22. data/lib/deimos/message.rb +19 -2
  23. data/lib/deimos/metrics/datadog.rb +2 -1
  24. data/lib/deimos/metrics/mock.rb +2 -2
  25. data/lib/deimos/metrics/provider.rb +6 -0
  26. data/lib/deimos/monkey_patches/phobos_cli.rb +1 -1
  27. data/lib/deimos/monkey_patches/phobos_producer.rb +1 -0
  28. data/lib/deimos/producer.rb +12 -6
  29. data/lib/deimos/schema_backends/base.rb +31 -17
  30. data/lib/deimos/schema_backends/mock.rb +2 -2
  31. data/lib/deimos/schema_class/base.rb +9 -5
  32. data/lib/deimos/schema_class/enum.rb +4 -2
  33. data/lib/deimos/schema_class/record.rb +5 -5
  34. data/lib/deimos/shared_config.rb +6 -2
  35. data/lib/deimos/test_helpers.rb +21 -4
  36. data/lib/deimos/tracing/datadog.rb +1 -1
  37. data/lib/deimos/tracing/mock.rb +4 -3
  38. data/lib/deimos/tracing/provider.rb +5 -0
  39. data/lib/deimos/utils/db_poller.rb +9 -1
  40. data/lib/deimos/utils/db_producer.rb +14 -2
  41. data/lib/deimos/utils/deadlock_retry.rb +3 -0
  42. data/lib/deimos/utils/inline_consumer.rb +14 -6
  43. data/lib/deimos/utils/lag_reporter.rb +11 -0
  44. data/lib/deimos/utils/schema_controller_mixin.rb +8 -0
  45. data/lib/deimos/version.rb +1 -1
  46. data/lib/deimos.rb +3 -2
  47. data/lib/generators/deimos/active_record_generator.rb +1 -1
  48. data/lib/generators/deimos/db_backend_generator.rb +1 -0
  49. data/lib/generators/deimos/db_poller_generator.rb +1 -0
  50. data/lib/generators/deimos/schema_class/templates/schema_record.rb.tt +1 -1
  51. data/lib/generators/deimos/schema_class_generator.rb +12 -3
  52. data/rbs_collection.lock.yaml +176 -0
  53. data/rbs_collection.yaml +15 -0
  54. data/sig/avro.rbs +14 -0
  55. data/sig/defs.rbs +1859 -0
  56. data/sig/fig_tree.rbs +2 -0
  57. data/spec/snapshots/consumers-no-nest.snap +1 -1
  58. data/spec/snapshots/consumers.snap +1 -1
  59. data/spec/snapshots/consumers_and_producers-no-nest.snap +3 -3
  60. data/spec/snapshots/consumers_and_producers.snap +3 -3
  61. data/spec/snapshots/consumers_circular-no-nest.snap +1 -1
  62. data/spec/snapshots/consumers_circular.snap +1 -1
  63. data/spec/snapshots/consumers_complex_types-no-nest.snap +1 -1
  64. data/spec/snapshots/consumers_complex_types.snap +1 -1
  65. data/spec/snapshots/consumers_nested-no-nest.snap +1 -1
  66. data/spec/snapshots/consumers_nested.snap +1 -1
  67. data/spec/snapshots/namespace_folders.snap +3 -3
  68. data/spec/snapshots/producers_with_key-no-nest.snap +1 -1
  69. data/spec/snapshots/producers_with_key.snap +1 -1
  70. metadata +7 -2
data/sig/defs.rbs ADDED
@@ -0,0 +1,1859 @@
1
+ # Generates a new consumer.
2
+ module Deimos
3
+ include Deimos::Instrumentation
4
+ include FigTree
5
+ VERSION: String
6
+
7
+ def self.schema_backend_class: () -> singleton(Deimos::SchemaBackends::Base)
8
+
9
+ # _@param_ `schema`
10
+ #
11
+ # _@param_ `namespace`
12
+ def self.schema_backend: (schema: (String | Symbol), namespace: String) -> Deimos::SchemaBackends::Base
13
+
14
+ # _@param_ `schema`
15
+ #
16
+ # _@param_ `namespace`
17
+ #
18
+ # _@param_ `payload`
19
+ #
20
+ # _@param_ `subject`
21
+ def self.encode: (
22
+ schema: String,
23
+ namespace: String,
24
+ payload: ::Hash[untyped, untyped],
25
+ ?subject: String?
26
+ ) -> String
27
+
28
+ # _@param_ `schema`
29
+ #
30
+ # _@param_ `namespace`
31
+ #
32
+ # _@param_ `payload`
33
+ def self.decode: (schema: String, namespace: String, payload: String) -> ::Hash[untyped, untyped]?
34
+
35
+ # Start the DB producers to send Kafka messages.
36
+ #
37
+ # _@param_ `thread_count` — the number of threads to start.
38
+ def self.start_db_backend!: (?thread_count: Integer) -> void
39
+
40
+ # Run a block without allowing any messages to be produced to Kafka.
41
+ # Optionally add a list of producer classes to limit the disabling to those
42
+ # classes.
43
+ #
44
+ # _@param_ `producer_classes`
45
+ def self.disable_producers: (*(::Array[Class] | Class) producer_classes) -> void
46
+
47
+ # Are producers disabled? If a class is passed in, check only that class.
48
+ # Otherwise check if the global disable flag is set.
49
+ #
50
+ # _@param_ `producer_class`
51
+ def self.producers_disabled?: (?Class? producer_class) -> bool
52
+
53
+ # Loads generated classes
54
+ def self.load_generated_schema_classes: () -> void
55
+
56
+ # Basically a struct to hold the message as it's processed.
57
+ class Message
58
+ # _@param_ `payload`
59
+ #
60
+ # _@param_ `producer`
61
+ #
62
+ # _@param_ `topic`
63
+ #
64
+ # _@param_ `key`
65
+ #
66
+ # _@param_ `partition_key`
67
+ def initialize: (
68
+ ::Hash[untyped, untyped] payload,
69
+ Class producer,
70
+ ?topic: String?,
71
+ ?key: (String | Integer | ::Hash[untyped, untyped])?,
72
+ ?partition_key: Integer?
73
+ ) -> void
74
+
75
+ # Add message_id and timestamp default values if they are in the
76
+ # schema and don't already have values.
77
+ #
78
+ # _@param_ `fields` — existing name fields in the schema.
79
+ def add_fields: (::Array[String] fields) -> void
80
+
81
+ # _@param_ `encoder`
82
+ def coerce_fields: (Deimos::SchemaBackends::Base encoder) -> void
83
+
84
+ def encoded_hash: () -> ::Hash[untyped, untyped]
85
+
86
+ def to_h: () -> ::Hash[untyped, untyped]
87
+
88
+ # _@param_ `other`
89
+ def ==: (Message other) -> bool
90
+
91
+ # _@return_ — True if this message is a tombstone
92
+ def tombstone?: () -> bool
93
+
94
+ attr_accessor payload: ::Hash[untyped, untyped]
95
+
96
+ attr_accessor key: (::Hash[untyped, untyped] | String | Integer)
97
+
98
+ attr_accessor partition_key: Integer
99
+
100
+ attr_accessor encoded_key: String
101
+
102
+ attr_accessor encoded_payload: String
103
+
104
+ attr_accessor topic: String
105
+
106
+ attr_accessor producer_name: String
107
+ end
108
+
109
+ # Add rake task to Rails.
110
+ class Railtie < Rails::Railtie
111
+ end
112
+
113
+ # Basic consumer class. Inherit from this class and override either consume
114
+ # or consume_batch, depending on the delivery mode of your listener.
115
+ # `consume` -> use `delivery :message` or `delivery :batch`
116
+ # `consume_batch` -> use `delivery :inline_batch`
117
+ class Consumer
118
+ include Deimos::Consume::MessageConsumption
119
+ include Deimos::Consume::BatchConsumption
120
+ include Deimos::SharedConfig
121
+
122
+ def self.decoder: () -> Deimos::SchemaBackends::Base
123
+
124
+ def self.key_decoder: () -> Deimos::SchemaBackends::Base
125
+
126
+ # Helper method to decode an encoded key.
127
+ #
128
+ # _@param_ `key`
129
+ #
130
+ # _@return_ — the decoded key.
131
+ def decode_key: (String key) -> Object
132
+
133
+ # Helper method to decode an encoded message.
134
+ #
135
+ # _@param_ `payload`
136
+ #
137
+ # _@return_ — the decoded message.
138
+ def decode_message: (Object payload) -> Object
139
+
140
+ # _@param_ `batch`
141
+ #
142
+ # _@param_ `metadata`
143
+ def around_consume_batch: (::Array[String] batch, ::Hash[untyped, untyped] metadata) -> void
144
+
145
+ # Consume a batch of incoming messages.
146
+ #
147
+ # _@param_ `_payloads`
148
+ #
149
+ # _@param_ `_metadata`
150
+ def consume_batch: (::Array[Phobos::BatchMessage] _payloads, ::Hash[untyped, untyped] _metadata) -> void
151
+
152
+ # _@param_ `payload`
153
+ #
154
+ # _@param_ `metadata`
155
+ def around_consume: (String payload, ::Hash[untyped, untyped] metadata) -> void
156
+
157
+ # Consume incoming messages.
158
+ #
159
+ # _@param_ `_payload`
160
+ #
161
+ # _@param_ `_metadata`
162
+ def consume: (String _payload, ::Hash[untyped, untyped] _metadata) -> void
163
+ end
164
+
165
+ # Producer to publish messages to a given kafka topic.
166
+ class Producer
167
+ include Deimos::SharedConfig
168
+ MAX_BATCH_SIZE: Integer
169
+
170
+ def self.config: () -> ::Hash[untyped, untyped]
171
+
172
+ # Set the topic.
173
+ #
174
+ # _@param_ `topic`
175
+ #
176
+ # _@return_ — the current topic if no argument given.
177
+ def self.topic: (?String? topic) -> String
178
+
179
+ # Override the default partition key (which is the payload key).
180
+ # Will include `payload_key` if it is part of the original payload.
181
+ #
182
+ # _@param_ `_payload` — the payload being passed into the produce method.
183
+ def self.partition_key: (::Hash[untyped, untyped] _payload) -> String
184
+
185
+ # Publish the payload to the topic.
186
+ #
187
+ # _@param_ `payload` — with an optional payload_key hash key.
188
+ #
189
+ # _@param_ `topic` — if specifying the topic
190
+ def self.publish: ((::Hash[untyped, untyped] | SchemaClass::Record) payload, ?topic: String) -> void
191
+
192
+ # Publish a list of messages.
193
+ # whether to publish synchronously.
194
+ # and send immediately to Kafka.
195
+ #
196
+ # _@param_ `payloads` — with optional payload_key hash key.
197
+ #
198
+ # _@param_ `sync` — if given, override the default setting of
199
+ #
200
+ # _@param_ `force_send` — if true, ignore the configured backend
201
+ #
202
+ # _@param_ `topic` — if specifying the topic
203
+ def self.publish_list: (
204
+ ::Array[(::Hash[untyped, untyped] | SchemaClass::Record)] payloads,
205
+ ?sync: bool?,
206
+ ?force_send: bool,
207
+ ?topic: String
208
+ ) -> void
209
+
210
+ # _@param_ `sync`
211
+ #
212
+ # _@param_ `force_send`
213
+ def self.determine_backend_class: (bool sync, bool force_send) -> singleton(Deimos::Backends::Base)
214
+
215
+ # Send a batch to the backend.
216
+ #
217
+ # _@param_ `backend`
218
+ #
219
+ # _@param_ `batch`
220
+ def self.produce_batch: (singleton(Deimos::Backends::Base) backend, ::Array[Deimos::Message] batch) -> void
221
+
222
+ def self.encoder: () -> Deimos::SchemaBackends::Base
223
+
224
+ def self.key_encoder: () -> Deimos::SchemaBackends::Base
225
+
226
+ # Override this in active record producers to add
227
+ # non-schema fields to check for updates
228
+ #
229
+ # _@return_ — fields to check for updates
230
+ def self.watched_attributes: () -> ::Array[String]
231
+ end
232
+
233
+ # ActiveRecord class to record the last time we polled the database.
234
+ # For use with DbPoller.
235
+ class PollInfo < ActiveRecord::Base
236
+ end
237
+
238
+ module Backends
239
+ # Backend which saves messages to the database instead of immediately
240
+ # sending them.
241
+ class Db < Deimos::Backends::Base
242
+ # :nodoc:
243
+ def self.execute: (producer_class: singleton(Deimos::Producer), messages: ::Array[Deimos::Message]) -> void
244
+
245
+ # _@param_ `message`
246
+ #
247
+ # _@return_ — the partition key to use for this message
248
+ def self.partition_key_for: (Deimos::Message message) -> String
249
+ end
250
+
251
+ # Abstract class for all publish backends.
252
+ class Base
253
+ # _@param_ `producer_class`
254
+ #
255
+ # _@param_ `messages`
256
+ def self.publish: (producer_class: singleton(Deimos::Producer), messages: ::Array[Deimos::Message]) -> void
257
+
258
+ # _@param_ `producer_class`
259
+ #
260
+ # _@param_ `messages`
261
+ def self.execute: (producer_class: singleton(Deimos::Producer), messages: ::Array[Deimos::Message]) -> void
262
+ end
263
+
264
+ # Backend which saves messages to an in-memory hash.
265
+ class Test < Deimos::Backends::Base
266
+ def self.sent_messages: () -> ::Array[::Hash[untyped, untyped]]
267
+
268
+ def self.execute: (producer_class: singleton(Deimos::Producer), messages: ::Array[Deimos::Message]) -> void
269
+ end
270
+
271
+ # Default backend to produce to Kafka.
272
+ class Kafka < Deimos::Backends::Base
273
+ include Phobos::Producer
274
+
275
+ # Shut down the producer if necessary.
276
+ def self.shutdown_producer: () -> void
277
+
278
+ # :nodoc:
279
+ def self.execute: (producer_class: singleton(Deimos::Producer), messages: ::Array[Deimos::Message]) -> void
280
+ end
281
+
282
+ # Backend which produces to Kafka via an async producer.
283
+ class KafkaAsync < Deimos::Backends::Base
284
+ include Phobos::Producer
285
+
286
+ # Shut down the producer cleanly.
287
+ def self.shutdown_producer: () -> void
288
+
289
+ # :nodoc:
290
+ def self.execute: (producer_class: singleton(Deimos::Producer), messages: ::Array[Deimos::Message]) -> void
291
+ end
292
+ end
293
+
294
+ # Represents an object which needs to inform Kafka when it is saved or
295
+ # bulk imported.
296
+ module KafkaSource
297
+ extend ActiveSupport::Concern
298
+ DEPRECATION_WARNING: String
299
+
300
+ # Send the newly created model to Kafka.
301
+ def send_kafka_event_on_create: () -> void
302
+
303
+ # Send the newly updated model to Kafka.
304
+ def send_kafka_event_on_update: () -> void
305
+
306
+ # Send a deletion (null payload) event to Kafka.
307
+ def send_kafka_event_on_destroy: () -> void
308
+
309
+ # Payload to send after we are destroyed.
310
+ def deletion_payload: () -> ::Hash[untyped, untyped]
311
+
312
+ # :nodoc:
313
+ module ClassMethods
314
+ def kafka_config: () -> ::Hash[untyped, untyped]
315
+
316
+ # _@return_ — the producers to run.
317
+ def kafka_producers: () -> ::Array[Deimos::ActiveRecordProducer]
318
+ end
319
+ end
320
+
321
+ module Metrics
322
+ # A mock Metrics wrapper which just logs the metrics
323
+ class Mock < Deimos::Metrics::Provider
324
+ # _@param_ `logger`
325
+ def initialize: (?Logger? logger) -> void
326
+
327
+ # :nodoc:
328
+ def increment: (String metric_name, ?::Hash[untyped, untyped] options) -> void
329
+
330
+ # :nodoc:
331
+ def gauge: (String metric_name, Integer count, ?::Hash[untyped, untyped] options) -> void
332
+
333
+ # :nodoc:
334
+ def histogram: (String metric_name, Integer count, ?::Hash[untyped, untyped] options) -> void
335
+
336
+ # :nodoc:
337
+ def time: (String metric_name, ?::Hash[untyped, untyped] options) -> void
338
+ end
339
+
340
+ # A Metrics wrapper class for Datadog.
341
+ class Datadog < Deimos::Metrics::Provider
342
+ # _@param_ `config`
343
+ #
344
+ # _@param_ `logger`
345
+ def initialize: (::Hash[untyped, untyped] config, Logger logger) -> void
346
+
347
+ # :nodoc:
348
+ def increment: (String metric_name, ?::Hash[untyped, untyped] options) -> void
349
+
350
+ # :nodoc:
351
+ def gauge: (String metric_name, Integer count, ?::Hash[untyped, untyped] options) -> void
352
+
353
+ # :nodoc:
354
+ def histogram: (String metric_name, Integer count, ?::Hash[untyped, untyped] options) -> void
355
+
356
+ # :nodoc:
357
+ def time: (String metric_name, ?::Hash[untyped, untyped] options) -> void
358
+ end
359
+
360
+ # Base class for all metrics providers.
361
+ class Provider
362
+ # Send an counter increment metric
363
+ #
364
+ # _@param_ `metric_name` — The name of the counter metric
365
+ #
366
+ # _@param_ `options` — Any additional options, e.g. :tags
367
+ def increment: (String metric_name, ?::Hash[untyped, untyped] options) -> void
368
+
369
+ # Send an counter increment metric
370
+ #
371
+ # _@param_ `metric_name` — The name of the counter metric
372
+ #
373
+ # _@param_ `count`
374
+ #
375
+ # _@param_ `options` — Any additional options, e.g. :tags
376
+ def gauge: (String metric_name, Integer count, ?::Hash[untyped, untyped] options) -> void
377
+
378
+ # Send an counter increment metric
379
+ #
380
+ # _@param_ `metric_name` — The name of the counter metric
381
+ #
382
+ # _@param_ `count`
383
+ #
384
+ # _@param_ `options` — Any additional options, e.g. :tags
385
+ def histogram: (String metric_name, Integer count, ?::Hash[untyped, untyped] options) -> void
386
+
387
+ # Time a yielded block, and send a timer metric
388
+ #
389
+ # _@param_ `metric_name` — The name of the metric
390
+ #
391
+ # _@param_ `options` — Any additional options, e.g. :tags
392
+ def time: (String metric_name, ?::Hash[untyped, untyped] options) -> void
393
+ end
394
+ end
395
+
396
+ # Include this module in your RSpec spec_helper
397
+ # to stub out external dependencies
398
+ # and add methods to use to test encoding/decoding.
399
+ module TestHelpers
400
+ extend ActiveSupport::Concern
401
+
402
+ # for backwards compatibility
403
+ def self.sent_messages: () -> ::Array[::Hash[untyped, untyped]]
404
+
405
+ # Set the config to the right settings for a unit test
406
+ def self.unit_test!: () -> void
407
+
408
+ # Kafka test config with avro schema registry
409
+ def self.full_integration_test!: () -> void
410
+
411
+ # Set the config to the right settings for a kafka test
412
+ def self.kafka_test!: () -> void
413
+
414
+ # Clear all sent messages - e.g. if we want to check that
415
+ # particular messages were sent or not sent after a point in time.
416
+ def clear_kafka_messages!: () -> void
417
+
418
+ # Test that a given handler will consume a given payload correctly, i.e.
419
+ # that the schema is correct. If
420
+ # a block is given, that block will be executed when `consume` is called.
421
+ # Otherwise it will just confirm that `consume` is called at all.
422
+ # Deimos::Consumer or the topic as a string
423
+ # to continue as normal. Not compatible with a block.
424
+ # expectations on the consumer. Primarily used internally to Deimos.
425
+ #
426
+ # _@param_ `handler_class_or_topic` — Class which inherits from
427
+ #
428
+ # _@param_ `payload` — the payload to consume
429
+ #
430
+ # _@param_ `call_original` — if true, allow the consume handler
431
+ #
432
+ # _@param_ `skip_expectation` — Set to true to not place any
433
+ #
434
+ # _@param_ `key` — the key to use.
435
+ #
436
+ # _@param_ `partition_key` — the partition key to use.
437
+ def test_consume_message: (
438
+ (Class | String) handler_class_or_topic,
439
+ ::Hash[untyped, untyped] payload,
440
+ ?call_original: bool,
441
+ ?key: Object?,
442
+ ?partition_key: Object?,
443
+ ?skip_expectation: bool
444
+ ) -> void
445
+
446
+ # Check to see that a given message will fail due to validation errors.
447
+ #
448
+ # _@param_ `handler_class`
449
+ #
450
+ # _@param_ `payload`
451
+ def test_consume_invalid_message: (Class handler_class, ::Hash[untyped, untyped] payload) -> void
452
+
453
+ # Test that a given handler will consume a given batch payload correctly,
454
+ # i.e. that the schema is correct. If
455
+ # a block is given, that block will be executed when `consume` is called.
456
+ # Otherwise it will just confirm that `consume` is called at all.
457
+ # Deimos::Consumer or the topic as a string
458
+ #
459
+ # _@param_ `handler_class_or_topic` — Class which inherits from
460
+ #
461
+ # _@param_ `payloads` — the payload to consume
462
+ #
463
+ # _@param_ `keys`
464
+ #
465
+ # _@param_ `partition_keys`
466
+ #
467
+ # _@param_ `call_original`
468
+ #
469
+ # _@param_ `skip_expectation`
470
+ def test_consume_batch: (
471
+ (Class | String) handler_class_or_topic,
472
+ ::Array[::Hash[untyped, untyped]] payloads,
473
+ ?keys: ::Array[(::Hash[untyped, untyped] | String)],
474
+ ?partition_keys: ::Array[Integer],
475
+ ?call_original: bool,
476
+ ?skip_expectation: bool
477
+ ) -> void
478
+
479
+ # Check to see that a given message will fail due to validation errors.
480
+ #
481
+ # _@param_ `handler_class`
482
+ #
483
+ # _@param_ `payloads`
484
+ def test_consume_batch_invalid_message: (Class handler_class, ::Array[::Hash[untyped, untyped]] payloads) -> void
485
+ end
486
+
487
+ module Tracing
488
+ # Class that mocks out tracing functionality
489
+ class Mock < Deimos::Tracing::Provider
490
+ # _@param_ `logger`
491
+ def initialize: (?Logger? logger) -> void
492
+
493
+ # _@param_ `span_name`
494
+ #
495
+ # _@param_ `_options`
496
+ def start: (String span_name, ?::Hash[untyped, untyped] _options) -> Object
497
+
498
+ # :nodoc:
499
+ def finish: (Object span) -> void
500
+
501
+ # :nodoc:
502
+ def active_span: () -> Object
503
+
504
+ # :nodoc:
505
+ def set_tag: (String tag, String value, ?Object? span) -> void
506
+
507
+ # :nodoc:
508
+ def set_error: (Object span, Exception exception) -> void
509
+ end
510
+
511
+ # Tracing wrapper class for Datadog.
512
+ class Datadog < Deimos::Tracing::Provider
513
+ # _@param_ `config`
514
+ def initialize: (::Hash[untyped, untyped] config) -> void
515
+
516
+ # :nodoc:
517
+ def start: (String span_name, ?::Hash[untyped, untyped] options) -> Object
518
+
519
+ # :nodoc:
520
+ def finish: (Object span) -> void
521
+
522
+ # :nodoc:
523
+ def active_span: () -> Object
524
+
525
+ # :nodoc:
526
+ def set_error: (Object span, Exception exception) -> void
527
+
528
+ # :nodoc:
529
+ def set_tag: (String tag, String value, ?Object? span) -> void
530
+ end
531
+
532
+ # Base class for all tracing providers.
533
+ class Provider
534
+ # Returns a span object and starts the trace.
535
+ #
536
+ # _@param_ `span_name` — The name of the span/trace
537
+ #
538
+ # _@param_ `options` — Options for the span
539
+ #
540
+ # _@return_ — The span object
541
+ def start: (String span_name, ?::Hash[untyped, untyped] options) -> Object
542
+
543
+ # Finishes the trace on the span object.
544
+ #
545
+ # _@param_ `span` — The span to finish trace on
546
+ def finish: (Object span) -> void
547
+
548
+ # Set an error on the span.
549
+ #
550
+ # _@param_ `span` — The span to set error on
551
+ #
552
+ # _@param_ `exception` — The exception that occurred
553
+ def set_error: (Object span, Exception exception) -> void
554
+
555
+ # Get the currently activated span.
556
+ def active_span: () -> Object
557
+
558
+ # Set a tag to a span. Use the currently active span if not given.
559
+ #
560
+ # _@param_ `tag`
561
+ #
562
+ # _@param_ `value`
563
+ #
564
+ # _@param_ `span`
565
+ def set_tag: (String tag, String value, ?Object? span) -> void
566
+ end
567
+ end
568
+
569
+ # Store Kafka messages into the database.
570
+ class KafkaMessage < ActiveRecord::Base
571
+ # Ensure it gets turned into a string, e.g. for testing purposes. It
572
+ # should already be a string.
573
+ #
574
+ # _@param_ `mess`
575
+ def message=: (Object mess) -> void
576
+
577
+ # Decoded payload for this message.
578
+ def decoded_message: () -> ::Hash[untyped, untyped]
579
+
580
+ # Get a decoder to decode a set of messages on the given topic.
581
+ #
582
+ # _@param_ `topic`
583
+ def self.decoder: (String topic) -> Deimos::Consumer
584
+
585
+ # Decoded payloads for a list of messages.
586
+ #
587
+ # _@param_ `messages`
588
+ def self.decoded: (?::Array[Deimos::KafkaMessage] messages) -> ::Array[::Hash[untyped, untyped]]
589
+
590
+ def phobos_message: () -> ::Hash[untyped, untyped]
591
+ end
592
+
593
+ # Module that producers and consumers can share which sets up configuration.
594
+ module SharedConfig
595
+ extend ActiveSupport::Concern
596
+
597
+ # need to use this instead of class_methods to be backwards-compatible
598
+ # with Rails 3
599
+ module ClassMethods
600
+ def config: () -> ::Hash[untyped, untyped]
601
+
602
+ # Set the schema.
603
+ #
604
+ # _@param_ `schema`
605
+ def schema: (String schema) -> void
606
+
607
+ # Set the namespace.
608
+ #
609
+ # _@param_ `namespace`
610
+ def namespace: (String namespace) -> void
611
+
612
+ # Set key configuration.
613
+ #
614
+ # _@param_ `field` — the name of a field to use in the value schema as a generated key schema
615
+ #
616
+ # _@param_ `schema` — the name of a schema to use for the key
617
+ #
618
+ # _@param_ `plain` — if true, do not encode keys at all
619
+ #
620
+ # _@param_ `none` — if true, do not use keys at all
621
+ def key_config: (
622
+ ?plain: bool?,
623
+ ?field: Symbol?,
624
+ ?schema: (String | Symbol)?,
625
+ ?none: bool?
626
+ ) -> void
627
+
628
+ # _@param_ `use_schema_classes`
629
+ def schema_class_config: (bool use_schema_classes) -> void
630
+ end
631
+ end
632
+
633
+ # @deprecated Use Deimos::Consumer with `delivery: inline_batch` configured instead
634
+ class BatchConsumer < Deimos::Consumer
635
+ end
636
+
637
+ # Copied from Phobos instrumentation.
638
+ module Instrumentation
639
+ extend ActiveSupport::Concern
640
+ NAMESPACE: String
641
+
642
+ # :nodoc:
643
+ module ClassMethods
644
+ # _@param_ `event`
645
+ def subscribe: (String event) -> void
646
+
647
+ # _@param_ `subscriber`
648
+ def unsubscribe: (ActiveSupport::Subscriber subscriber) -> void
649
+
650
+ # _@param_ `event`
651
+ #
652
+ # _@param_ `extra`
653
+ def instrument: (String event, ?::Hash[untyped, untyped] extra) -> void
654
+ end
655
+ end
656
+
657
+ # This module listens to events published by RubyKafka.
658
+ module KafkaListener
659
+ # Listens for any exceptions that happen during publishing and re-publishes
660
+ # as a Deimos event.
661
+ #
662
+ # _@param_ `event`
663
+ def self.send_produce_error: (ActiveSupport::Notifications::Event event) -> void
664
+ end
665
+
666
+ module Utils
667
+ # Class which continually polls the database and sends Kafka messages.
668
+ class DbPoller
669
+ BATCH_SIZE: Integer
670
+
671
+ # Begin the DB Poller process.
672
+ def self.start!: () -> void
673
+
674
+ # _@param_ `config`
675
+ def initialize: (FigTree::ConfigStruct config) -> void
676
+
677
+ # Start the poll:
678
+ # 1) Grab the current PollInfo from the database indicating the last
679
+ # time we ran
680
+ # 2) On a loop, process all the recent updates between the last time
681
+ # we ran and now.
682
+ def start: () -> void
683
+
684
+ # Grab the PollInfo or create if it doesn't exist.
685
+ def retrieve_poll_info: () -> void
686
+
687
+ # Stop the poll.
688
+ def stop: () -> void
689
+
690
+ # Indicate whether this current loop should process updates. Most loops
691
+ # will busy-wait (sleeping 0.1 seconds) until it's ready.
692
+ def should_run?: () -> bool
693
+
694
+ # _@param_ `record`
695
+ def last_updated: (ActiveRecord::Base record) -> ActiveSupport::TimeWithZone
696
+
697
+ # Send messages for updated data.
698
+ def process_updates: () -> void
699
+
700
+ # _@param_ `time_from`
701
+ #
702
+ # _@param_ `time_to`
703
+ def fetch_results: (ActiveSupport::TimeWithZone time_from, ActiveSupport::TimeWithZone time_to) -> ActiveRecord::Relation
704
+
705
+ # _@param_ `batch`
706
+ def process_batch: (::Array[ActiveRecord::Base] batch) -> void
707
+
708
+ # Needed for Executor so it can identify the worker
709
+ attr_reader id: Integer
710
+ end
711
+
712
+ # Class which continually polls the kafka_messages table
713
+ # in the database and sends Kafka messages.
714
+ class DbProducer
715
+ include Phobos::Producer
716
+ BATCH_SIZE: Integer
717
+ DELETE_BATCH_SIZE: Integer
718
+ MAX_DELETE_ATTEMPTS: Integer
719
+
720
+ # _@param_ `logger`
721
+ def initialize: (?Logger logger) -> void
722
+
723
+ def config: () -> FigTree
724
+
725
+ # Start the poll.
726
+ def start: () -> void
727
+
728
+ # Stop the poll.
729
+ def stop: () -> void
730
+
731
+ # Complete one loop of processing all messages in the DB.
732
+ def process_next_messages: () -> void
733
+
734
+ def retrieve_topics: () -> ::Array[String]
735
+
736
+ # _@param_ `topic`
737
+ #
738
+ # _@return_ — the topic that was locked, or nil if none were.
739
+ def process_topic: (String topic) -> String?
740
+
741
+ # Process a single batch in a topic.
742
+ def process_topic_batch: () -> void
743
+
744
+ # _@param_ `messages`
745
+ def delete_messages: (::Array[Deimos::KafkaMessage] messages) -> void
746
+
747
+ def retrieve_messages: () -> ::Array[Deimos::KafkaMessage]
748
+
749
+ # _@param_ `messages`
750
+ def log_messages: (::Array[Deimos::KafkaMessage] messages) -> void
751
+
752
+ # Send metrics related to pending messages.
753
+ def send_pending_metrics: () -> void
754
+
755
+ # Shut down the sync producer if we have to. Phobos will automatically
756
+ # create a new one. We should call this if the producer can be in a bad
757
+ # state and e.g. we need to clear the buffer.
758
+ def shutdown_producer: () -> void
759
+
760
+ # Produce messages in batches, reducing the size 1/10 if the batch is too
761
+ # large. Does not retry batches of messages that have already been sent.
762
+ #
763
+ # _@param_ `batch`
764
+ def produce_messages: (::Array[::Hash[untyped, untyped]] batch) -> void
765
+
766
+ # _@param_ `batch`
767
+ def compact_messages: (::Array[Deimos::KafkaMessage] batch) -> ::Array[Deimos::KafkaMessage]
768
+
769
+ # Returns the value of attribute id.
770
+ attr_accessor id: untyped
771
+
772
+ # Returns the value of attribute current_topic.
773
+ attr_accessor current_topic: untyped
774
+ end
775
+
776
+ # Class that manages reporting lag.
777
+ class LagReporter
778
+ extend Mutex_m
779
+
780
+ # Reset all group information.
781
+ def self.reset: () -> void
782
+
783
+ # offset_lag = event.payload.fetch(:offset_lag)
784
+ # group_id = event.payload.fetch(:group_id)
785
+ # topic = event.payload.fetch(:topic)
786
+ # partition = event.payload.fetch(:partition)
787
+ #
788
+ # _@param_ `payload`
789
+ def self.message_processed: (::Hash[untyped, untyped] payload) -> void
790
+
791
+ # _@param_ `payload`
792
+ def self.offset_seek: (::Hash[untyped, untyped] payload) -> void
793
+
794
+ # _@param_ `payload`
795
+ def self.heartbeat: (::Hash[untyped, untyped] payload) -> void
796
+
797
+ # Class that has a list of topics
798
+ class ConsumerGroup
799
+ # _@param_ `id`
800
+ def initialize: (String id) -> void
801
+
802
+ # _@param_ `topic`
803
+ #
804
+ # _@param_ `partition`
805
+ def report_lag: (String topic, Integer partition) -> void
806
+
807
+ # _@param_ `topic`
808
+ #
809
+ # _@param_ `partition`
810
+ #
811
+ # _@param_ `offset`
812
+ def assign_current_offset: (String topic, Integer partition, Integer offset) -> void
813
+
814
+ attr_accessor topics: ::Hash[String, Topic]
815
+
816
+ attr_accessor id: String
817
+ end
818
+
819
+ # Topic which has a hash of partition => last known current offsets
820
+ class Topic
821
+ # _@param_ `topic_name`
822
+ #
823
+ # _@param_ `group`
824
+ def initialize: (String topic_name, ConsumerGroup group) -> void
825
+
826
+ # _@param_ `partition`
827
+ #
828
+ # _@param_ `offset`
829
+ def assign_current_offset: (Integer partition, Integer offset) -> void
830
+
831
+ # _@param_ `partition`
832
+ #
833
+ # _@param_ `offset`
834
+ def compute_lag: (Integer partition, Integer offset) -> Integer
835
+
836
+ # _@param_ `partition`
837
+ def report_lag: (Integer partition) -> void
838
+
839
+ attr_accessor topic_name: String
840
+
841
+ attr_accessor partition_current_offsets: ::Hash[Integer, Integer]
842
+
843
+ attr_accessor consumer_group: ConsumerGroup
844
+ end
845
+ end
846
+
847
+ # Class used by SchemaClassGenerator and Consumer/Producer interfaces
848
+ module SchemaClass
849
+ # _@param_ `namespace`
850
+ def self.modules_for: (String namespace) -> ::Array[String]
851
+
852
+ # Converts a raw payload into an instance of the Schema Class
853
+ #
854
+ # _@param_ `payload`
855
+ #
856
+ # _@param_ `schema`
857
+ #
858
+ # _@param_ `namespace`
859
+ def self.instance: ((::Hash[untyped, untyped] | Deimos::SchemaClass::Base) payload, String schema, ?String namespace) -> Deimos::SchemaClass::Record
860
+
861
+ # _@param_ `config` — Producer or Consumer config
862
+ def self.use?: (::Hash[untyped, untyped] config) -> bool
863
+ end
864
+
865
+ # Utility class to retry a given block if a a deadlock is encountered.
866
+ # Supports Postgres and MySQL deadlocks and lock wait timeouts.
867
+ class DeadlockRetry
868
+ RETRY_COUNT: Integer
869
+ DEADLOCK_MESSAGES: ::Array[String]
870
+
871
+ # Retry the given block when encountering a deadlock. For any other
872
+ # exceptions, they are reraised. This is used to handle cases where
873
+ # the database may be busy but the transaction would succeed if
874
+ # retried later. Note that your block should be idempotent and it will
875
+ # be wrapped in a transaction.
876
+ # Sleeps for a random number of seconds to prevent multiple transactions
877
+ # from retrying at the same time.
878
+ #
879
+ # _@param_ `tags` — Tags to attach when logging and reporting metrics.
880
+ def self.wrap: (?::Array[untyped] tags) -> void
881
+ end
882
+
883
+ # Listener that can seek to get the last X messages in a topic.
884
+ class SeekListener < Phobos::Listener
885
+ MAX_SEEK_RETRIES: Integer
886
+
887
+ def start_listener: () -> void
888
+
889
+ attr_accessor num_messages: Integer
890
+ end
891
+
892
+ # Class to return the messages consumed.
893
+ class MessageBankHandler < Deimos::Consumer
894
+ include Phobos::Handler
895
+
896
+ # _@param_ `klass`
897
+ def self.config_class=: (singleton(Deimos::Consumer) klass) -> void
898
+
899
+ # _@param_ `_kafka_client`
900
+ def self.start: (Kafka::Client _kafka_client) -> void
901
+
902
+ # _@param_ `payload`
903
+ #
904
+ # _@param_ `metadata`
905
+ def consume: (::Hash[untyped, untyped] payload, ::Hash[untyped, untyped] metadata) -> void
906
+ end
907
+
908
+ # Class which can process/consume messages inline.
909
+ class InlineConsumer
910
+ MAX_MESSAGE_WAIT_TIME: Integer
911
+ MAX_TOPIC_WAIT_TIME: Integer
912
+
913
+ # Get the last X messages from a topic. You can specify a subclass of
914
+ # Deimos::Consumer or Deimos::Producer, or provide the
915
+ # schema, namespace and key_config directly.
916
+ #
917
+ # _@param_ `topic`
918
+ #
919
+ # _@param_ `config_class`
920
+ #
921
+ # _@param_ `schema`
922
+ #
923
+ # _@param_ `namespace`
924
+ #
925
+ # _@param_ `key_config`
926
+ #
927
+ # _@param_ `num_messages`
928
+ def self.get_messages_for: (
929
+ topic: String,
930
+ ?schema: String?,
931
+ ?namespace: String?,
932
+ ?key_config: ::Hash[untyped, untyped]?,
933
+ ?config_class: (singleton(Deimos::Consumer) | singleton(Deimos::Producer))?,
934
+ ?num_messages: Integer
935
+ ) -> ::Array[::Hash[untyped, untyped]]
936
+
937
+ # Consume the last X messages from a topic.
938
+ #
939
+ # _@param_ `topic`
940
+ #
941
+ # _@param_ `frk_consumer`
942
+ #
943
+ # _@param_ `num_messages` — If this number is >= the number of messages in the topic, all messages will be consumed.
944
+ def self.consume: (topic: String, frk_consumer: Class, ?num_messages: Integer) -> void
945
+ end
946
+
947
+ # Mixin to automatically decode schema-encoded payloads when given the correct content type,
948
+ # and provide the `render_schema` method to encode the payload for responses.
949
+ module SchemaControllerMixin
950
+ extend ActiveSupport::Concern
951
+
952
+ def schema_format?: () -> bool
953
+
954
+ # Get the namespace from either an existing instance variable, or tease it out of the schema.
955
+ #
956
+ # _@param_ `type` — :request or :response
957
+ #
958
+ # _@return_ — the namespace and schema.
959
+ def parse_namespace: (Symbol _type) -> ::Array[(String | String)]
960
+
961
+ # Decode the payload with the parameters.
962
+ def decode_schema: () -> void
963
+
964
+ # Render a hash into a payload as specified by the configured schema and namespace.
965
+ #
966
+ # _@param_ `payload`
967
+ #
968
+ # _@param_ `schema`
969
+ #
970
+ # _@param_ `namespace`
971
+ def render_schema: (::Hash[untyped, untyped] payload, ?schema: String?, ?namespace: String?) -> void
972
+
973
+ # :nodoc:
974
+ module ClassMethods
975
+ def schema_mapping: () -> ::Hash[String, ::Hash[Symbol, String]]
976
+
977
+ # Indicate which schemas should be assigned to actions.
978
+ #
979
+ # _@param_ `actions`
980
+ #
981
+ # _@param_ `kwactions`
982
+ #
983
+ # _@param_ `request`
984
+ #
985
+ # _@param_ `response`
986
+ def schemas: (
987
+ *Symbol actions,
988
+ ?request: String?,
989
+ ?response: String?,
990
+ **String kwactions
991
+ ) -> void
992
+
993
+ def namespaces: () -> ::Hash[Symbol, String]
994
+
995
+ # Set the namespace for both requests and responses.
996
+ #
997
+ # _@param_ `name`
998
+ def namespace: (String name) -> void
999
+
1000
+ # Set the namespace for requests.
1001
+ #
1002
+ # _@param_ `name`
1003
+ def request_namespace: (String name) -> void
1004
+
1005
+ # Set the namespace for repsonses.
1006
+ #
1007
+ # _@param_ `name`
1008
+ def response_namespace: (String name) -> void
1009
+ end
1010
+ end
1011
+ end
1012
+
1013
+ # Record that keeps track of which topics are being worked on by DbProducers.
1014
+ class KafkaTopicInfo < ActiveRecord::Base
1015
+ # Lock a topic for the given ID. Returns whether the lock was successful.
1016
+ #
1017
+ # _@param_ `topic`
1018
+ #
1019
+ # _@param_ `lock_id`
1020
+ def self.lock: (String topic, String lock_id) -> bool
1021
+
1022
+ # This is called once a producer is finished working on a topic, i.e.
1023
+ # there are no more messages to fetch. It unlocks the topic and
1024
+ # moves on to the next one.
1025
+ #
1026
+ # _@param_ `topic`
1027
+ #
1028
+ # _@param_ `lock_id`
1029
+ def self.clear_lock: (String topic, String lock_id) -> void
1030
+
1031
+ # Update all topics that aren't currently locked and have no messages
1032
+ # waiting. It's OK if some messages get inserted in the middle of this
1033
+ # because the point is that at least within a few milliseconds of each
1034
+ # other, it wasn't locked and had no messages, meaning the topic
1035
+ # was in a good state.
1036
+ # realized had messages in them, meaning all other topics were empty.
1037
+ #
1038
+ # _@param_ `except_topics` — the list of topics we've just
1039
+ def self.ping_empty_topics: (::Array[String] except_topics) -> void
1040
+
1041
+ # The producer calls this if it gets an error sending messages. This
1042
+ # essentially locks down this topic for 1 minute (for all producers)
1043
+ # and allows the caller to continue to the next topic.
1044
+ #
1045
+ # _@param_ `topic`
1046
+ #
1047
+ # _@param_ `lock_id`
1048
+ def self.register_error: (String topic, String lock_id) -> void
1049
+
1050
+ # Update the locked_at timestamp to indicate that the producer is still
1051
+ # working on those messages and to continue.
1052
+ #
1053
+ # _@param_ `topic`
1054
+ #
1055
+ # _@param_ `lock_id`
1056
+ def self.heartbeat: (String topic, String lock_id) -> void
1057
+ end
1058
+
1059
+ module SchemaClass
1060
+ # Base Class for Schema Classes generated from Avro.
1061
+ class Base
1062
+ # _@param_ `_args`
1063
+ def initialize: (*::Array[Object] _args) -> void
1064
+
1065
+ # Converts the object to a hash which can be used for debugging or comparing objects.
1066
+ #
1067
+ # _@param_ `_opts`
1068
+ #
1069
+ # _@return_ — a hash representation of the payload
1070
+ def as_json: (?::Hash[untyped, untyped] _opts) -> ::Hash[untyped, untyped]
1071
+
1072
+ # _@param_ `key`
1073
+ #
1074
+ # _@param_ `val`
1075
+ def []=: ((String | Symbol) key, Object val) -> void
1076
+
1077
+ # _@param_ `other`
1078
+ def ==: (SchemaClass::Base other) -> bool
1079
+
1080
+ def inspect: () -> String
1081
+
1082
+ # Initializes this class from a given value
1083
+ #
1084
+ # _@param_ `value`
1085
+ def self.initialize_from_value: (Object value) -> SchemaClass::Base
1086
+
1087
+ def hash: () -> Integer
1088
+ end
1089
+
1090
+ # Base Class for Enum Classes generated from Avro.
1091
+ class Enum < Deimos::SchemaClass::Base
1092
+ # _@param_ `other`
1093
+ def ==: (Deimos::SchemaClass::Enum other) -> bool
1094
+
1095
+ def to_s: () -> String
1096
+
1097
+ # _@param_ `value`
1098
+ def initialize: (String value) -> void
1099
+
1100
+ # Returns all the valid symbols for this enum.
1101
+ def symbols: () -> ::Array[String]
1102
+
1103
+ def as_json: (?::Hash[untyped, untyped] _opts) -> String
1104
+
1105
+ def self.initialize_from_value: (Object value) -> SchemaClass::Enum
1106
+
1107
+ attr_accessor value: String
1108
+ end
1109
+
1110
+ # Base Class of Record Classes generated from Avro.
1111
+ class Record < Deimos::SchemaClass::Base
1112
+ # Converts the object attributes to a hash which can be used for Kafka
1113
+ #
1114
+ # _@return_ — the payload as a hash.
1115
+ def to_h: () -> ::Hash[untyped, untyped]
1116
+
1117
+ # Merge a hash or an identical schema object with this one and return a new object.
1118
+ #
1119
+ # _@param_ `other_hash`
1120
+ def merge: ((::Hash[untyped, untyped] | SchemaClass::Base) other_hash) -> SchemaClass::Base
1121
+
1122
+ # Element access method as if this Object were a hash
1123
+ #
1124
+ # _@param_ `key`
1125
+ #
1126
+ # _@return_ — The value of the attribute if exists, nil otherwise
1127
+ def []: ((String | Symbol) key) -> Object
1128
+
1129
+ def with_indifferent_access: () -> SchemaClass::Record
1130
+
1131
+ # Returns the schema name of the inheriting class.
1132
+ def schema: () -> String
1133
+
1134
+ # Returns the namespace for the schema of the inheriting class.
1135
+ def namespace: () -> String
1136
+
1137
+ # Returns the full schema name of the inheriting class.
1138
+ def full_schema: () -> String
1139
+
1140
+ # Returns the schema validator from the schema backend
1141
+ def validator: () -> Deimos::SchemaBackends::Base
1142
+
1143
+ # _@return_ — an array of fields names in the schema.
1144
+ def schema_fields: () -> ::Array[String]
1145
+
1146
+ def self.initialize_from_value: (Object value) -> SchemaClass::Record
1147
+ end
1148
+ end
1149
+
1150
+ # Module to handle phobos.yml as well as outputting the configuration to save
1151
+ # to Phobos itself.
1152
+ module PhobosConfig
1153
+ extend ActiveSupport::Concern
1154
+
1155
+ def to_h: () -> ::Hash[untyped, untyped]
1156
+
1157
+ def reset!: () -> void
1158
+
1159
+ # Create a hash representing the config that Phobos expects.
1160
+ def phobos_config: () -> ::Hash[untyped, untyped]
1161
+
1162
+ # _@param_ `key`
1163
+ def ssl_var_contents: (String key) -> String
1164
+ end
1165
+
1166
+ # Represents a field in the schema.
1167
+ class SchemaField
1168
+ # _@param_ `name`
1169
+ #
1170
+ # _@param_ `type`
1171
+ #
1172
+ # _@param_ `enum_values`
1173
+ #
1174
+ # _@param_ `default`
1175
+ def initialize: (
1176
+ String name,
1177
+ Object _type,
1178
+ ?::Array[String] enum_values,
1179
+ ?Object default
1180
+ ) -> void
1181
+
1182
+ attr_accessor name: String
1183
+
1184
+ attr_accessor type: String
1185
+
1186
+ attr_accessor enum_values: ::Array[String]
1187
+
1188
+ attr_accessor default: Object
1189
+ end
1190
+
1191
+ module SchemaBackends
1192
+ # Base class for encoding / decoding.
1193
+ class Base
1194
+ # _@param_ `schema`
1195
+ #
1196
+ # _@param_ `namespace`
1197
+ def initialize: (schema: (String | Symbol), ?namespace: String?) -> void
1198
+
1199
+ # Encode a payload with a schema. Public method.
1200
+ #
1201
+ # _@param_ `payload`
1202
+ #
1203
+ # _@param_ `schema`
1204
+ #
1205
+ # _@param_ `topic`
1206
+ def encode: (::Hash[untyped, untyped] payload, ?schema: (String | Symbol)?, ?topic: String?) -> String
1207
+
1208
+ # Decode a payload with a schema. Public method.
1209
+ #
1210
+ # _@param_ `payload`
1211
+ #
1212
+ # _@param_ `schema`
1213
+ def decode: (String payload, ?schema: (String | Symbol)?) -> ::Hash[untyped, untyped]?
1214
+
1215
+ # Given a hash, coerce its types to our schema. To be defined by subclass.
1216
+ #
1217
+ # _@param_ `payload`
1218
+ def coerce: (::Hash[untyped, untyped] payload) -> ::Hash[untyped, untyped]
1219
+
1220
+ # Indicate a class which should act as a mocked version of this backend.
1221
+ # This class should perform all validations but not actually do any
1222
+ # encoding.
1223
+ # Note that the "mock" version (e.g. avro_validation) should return
1224
+ # its own symbol when this is called, since it may be called multiple
1225
+ # times depending on the order of RSpec helpers.
1226
+ def self.mock_backend: () -> Symbol
1227
+
1228
+ # The content type to use when encoding / decoding requests over HTTP via ActionController.
1229
+ def self.content_type: () -> String
1230
+
1231
+ # Converts your schema to String form for generated YARD docs.
1232
+ # To be defined by subclass.
1233
+ #
1234
+ # _@param_ `schema`
1235
+ #
1236
+ # _@return_ — A string representation of the Type
1237
+ def self.field_type: (Object schema) -> String
1238
+
1239
+ # Encode a payload. To be defined by subclass.
1240
+ #
1241
+ # _@param_ `payload`
1242
+ #
1243
+ # _@param_ `schema`
1244
+ #
1245
+ # _@param_ `topic`
1246
+ def encode_payload: (::Hash[untyped, untyped] payload, schema: (String | Symbol), ?topic: String?) -> String
1247
+
1248
+ # Decode a payload. To be defined by subclass.
1249
+ #
1250
+ # _@param_ `payload`
1251
+ #
1252
+ # _@param_ `schema`
1253
+ def decode_payload: (String payload, schema: (String | Symbol)) -> ::Hash[untyped, untyped]
1254
+
1255
+ # Validate that a payload matches the schema. To be defined by subclass.
1256
+ #
1257
+ # _@param_ `payload`
1258
+ #
1259
+ # _@param_ `schema`
1260
+ def validate: (::Hash[untyped, untyped] payload, schema: (String | Symbol)) -> void
1261
+
1262
+ # List of field names belonging to the schema. To be defined by subclass.
1263
+ def schema_fields: () -> ::Array[SchemaField]
1264
+
1265
+ # Given a value and a field definition (as defined by whatever the
1266
+ # underlying schema library is), coerce the given value to
1267
+ # the given field type.
1268
+ #
1269
+ # _@param_ `field`
1270
+ #
1271
+ # _@param_ `value`
1272
+ def coerce_field: (SchemaField field, Object value) -> Object
1273
+
1274
+ # Given a field definition, return the SQL type that might be used in
1275
+ # ActiveRecord table creation - e.g. for Avro, a `long` type would
1276
+ # return `:bigint`. There are also special values that need to be returned:
1277
+ # `:array`, `:map` and `:record`, for types representing those structures.
1278
+ # `:enum` is also recognized.
1279
+ #
1280
+ # _@param_ `field`
1281
+ def sql_type: (SchemaField field) -> Symbol
1282
+
1283
+ # Encode a message key. To be defined by subclass.
1284
+ #
1285
+ # _@param_ `key` — the value to use as the key.
1286
+ #
1287
+ # _@param_ `key_id` — the field name of the key.
1288
+ #
1289
+ # _@param_ `topic`
1290
+ def encode_key: ((String | ::Hash[untyped, untyped]) key, (String | Symbol) key_id, ?topic: String?) -> String
1291
+
1292
+ # Decode a message key. To be defined by subclass.
1293
+ #
1294
+ # _@param_ `payload` — the message itself.
1295
+ #
1296
+ # _@param_ `key_id` — the field in the message to decode.
1297
+ def decode_key: (::Hash[untyped, untyped] payload, (String | Symbol) key_id) -> String
1298
+
1299
+ # Forcefully loads the schema into memory.
1300
+ #
1301
+ # _@return_ — The schema that is of use.
1302
+ def load_schema: () -> Object
1303
+
1304
+ attr_accessor schema: String
1305
+
1306
+ attr_accessor namespace: String
1307
+
1308
+ attr_accessor key_schema: String
1309
+ end
1310
+
1311
+ # Mock implementation of a schema backend that does no encoding or validation.
1312
+ class Mock < Deimos::SchemaBackends::Base
1313
+ def decode_payload: (String payload, schema: (String | Symbol)) -> ::Hash[untyped, untyped]
1314
+
1315
+ def encode_payload: (::Hash[untyped, untyped] payload, schema: (String | Symbol), ?topic: String?) -> String
1316
+
1317
+ def validate: (::Hash[untyped, untyped] payload, schema: (String | Symbol)) -> void
1318
+
1319
+ def schema_fields: () -> ::Array[SchemaField]
1320
+
1321
+ def coerce_field: (SchemaField field, Object value) -> Object
1322
+
1323
+ def encode_key: ((String | Symbol) key_id, (String | ::Hash[untyped, untyped]) key) -> String
1324
+
1325
+ def decode_key: (::Hash[untyped, untyped] payload, (String | Symbol) key_id) -> String
1326
+ end
1327
+
1328
+ # Encode / decode using Avro, either locally or via schema registry.
1329
+ class AvroBase < Deimos::SchemaBackends::Base
1330
+ def initialize: (schema: (String | Symbol), namespace: String) -> void
1331
+
1332
+ def encode_key: ((String | Symbol) key_id, (String | ::Hash[untyped, untyped]) key, ?topic: String?) -> String
1333
+
1334
+ def decode_key: (::Hash[untyped, untyped] payload, (String | Symbol) key_id) -> String
1335
+
1336
+ # :nodoc:
1337
+ def sql_type: (SchemaField field) -> Symbol
1338
+
1339
+ def coerce_field: (SchemaField field, Object value) -> Object
1340
+
1341
+ def schema_fields: () -> ::Array[SchemaField]
1342
+
1343
+ def validate: (::Hash[untyped, untyped] payload, schema: (String | Symbol)) -> void
1344
+
1345
+ def load_schema: () -> Avro::Schema
1346
+
1347
+ def self.mock_backend: () -> Symbol
1348
+
1349
+ def self.content_type: () -> String
1350
+
1351
+ # _@param_ `schema` — A named schema
1352
+ def self.schema_classname: (Avro::Schema::NamedSchema schema) -> String
1353
+
1354
+ # Converts Avro::Schema::NamedSchema's to String form for generated YARD docs.
1355
+ # Recursively handles the typing for Arrays, Maps and Unions.
1356
+ #
1357
+ # _@param_ `avro_schema`
1358
+ #
1359
+ # _@return_ — A string representation of the Type of this SchemaField
1360
+ def self.field_type: (Avro::Schema::NamedSchema avro_schema) -> String
1361
+
1362
+ # Returns the base type of this schema. Decodes Arrays, Maps and Unions
1363
+ #
1364
+ # _@param_ `schema`
1365
+ def self.schema_base_class: (Avro::Schema::NamedSchema schema) -> Avro::Schema::NamedSchema
1366
+
1367
+ # Returns the value of attribute schema_store.
1368
+ attr_accessor schema_store: untyped
1369
+ end
1370
+
1371
+ # Encode / decode using local Avro encoding.
1372
+ class AvroLocal < Deimos::SchemaBackends::AvroBase
1373
+ def decode_payload: (String payload, schema: (String | Symbol)) -> ::Hash[untyped, untyped]
1374
+
1375
+ def encode_payload: (::Hash[untyped, untyped] payload, ?schema: (String | Symbol)?, ?topic: String?) -> String
1376
+ end
1377
+
1378
+ # Leave Ruby hashes as is but validate them against the schema.
1379
+ # Useful for unit tests.
1380
+ class AvroValidation < Deimos::SchemaBackends::AvroBase
1381
+ def decode_payload: (String payload, ?schema: (String | Symbol)?) -> ::Hash[untyped, untyped]
1382
+
1383
+ def encode_payload: (::Hash[untyped, untyped] payload, ?schema: (String | Symbol)?, ?topic: String?) -> String
1384
+ end
1385
+
1386
+ # Encode / decode using the Avro schema registry.
1387
+ class AvroSchemaRegistry < Deimos::SchemaBackends::AvroBase
1388
+ def decode_payload: (String payload, schema: (String | Symbol)) -> ::Hash[untyped, untyped]
1389
+
1390
+ def encode_payload: (::Hash[untyped, untyped] payload, ?schema: (String | Symbol)?, ?topic: String?) -> String
1391
+ end
1392
+ end
1393
+
1394
+ # To configure batch vs. message mode, change the delivery mode of your
1395
+ # Phobos listener.
1396
+ # Message-by-message -> use `delivery: message` or `delivery: batch`
1397
+ # Batch -> use `delivery: inline_batch`
1398
+ class ActiveRecordConsumer < Deimos::Consumer
1399
+ include Deimos::ActiveRecordConsume::MessageConsumption
1400
+ include Deimos::ActiveRecordConsume::BatchConsumption
1401
+
1402
+ # database.
1403
+ #
1404
+ # _@param_ `klass` — the class used to save to the
1405
+ def self.record_class: (singleton(ActiveRecord::Base) klass) -> void
1406
+
1407
+ # only the last message for each unique key in a batch is processed.
1408
+ #
1409
+ # _@param_ `val` — Turn pre-compaction of the batch on or off. If true,
1410
+ def self.compacted: (bool val) -> void
1411
+
1412
+ # Setup
1413
+ def initialize: () -> void
1414
+
1415
+ # Override this method (with `super`) if you want to add/change the default
1416
+ # attributes set to the new/existing record.
1417
+ #
1418
+ # _@param_ `payload`
1419
+ #
1420
+ # _@param_ `_key`
1421
+ def record_attributes: ((::Hash[untyped, untyped] | Deimos::SchemaClass::Record) payload, ?String? _key) -> ::Hash[untyped, untyped]
1422
+
1423
+ # Override this message to conditionally save records
1424
+ #
1425
+ # _@param_ `_payload` — The kafka message
1426
+ #
1427
+ # _@return_ — if true, record is created/update.
1428
+ # If false, record processing is skipped but message offset is still committed.
1429
+ def process_message?: ((::Hash[untyped, untyped] | Deimos::SchemaClass::Record) _payload) -> bool
1430
+
1431
+ # Handle a batch of Kafka messages. Batches are split into "slices",
1432
+ # which are groups of independent messages that can be processed together
1433
+ # in a single database operation.
1434
+ # If two messages in a batch have the same key, we cannot process them
1435
+ # in the same operation as they would interfere with each other. Thus
1436
+ # they are split
1437
+ #
1438
+ # _@param_ `payloads` — Decoded payloads
1439
+ #
1440
+ # _@param_ `metadata` — Information about batch, including keys.
1441
+ def consume_batch: (::Array[(::Hash[untyped, untyped] | Deimos::SchemaClass::Record)] payloads, ::Hash[untyped, untyped] metadata) -> void
1442
+
1443
+ # Get unique key for the ActiveRecord instance from the incoming key.
1444
+ # Override this method (with super) to customize the set of attributes that
1445
+ # uniquely identifies each record in the database.
1446
+ #
1447
+ # _@param_ `key` — The encoded key.
1448
+ #
1449
+ # _@return_ — The key attributes.
1450
+ def record_key: (String key) -> ::Hash[untyped, untyped]
1451
+
1452
+ # Perform database operations for a batch of messages without compaction.
1453
+ # All messages are split into slices containing only unique keys, and
1454
+ # each slice is handles as its own batch.
1455
+ #
1456
+ # _@param_ `messages` — List of messages.
1457
+ def uncompacted_update: (::Array[Message] messages) -> void
1458
+
1459
+ # Perform database operations for a group of messages.
1460
+ # All messages with payloads are passed to upsert_records.
1461
+ # All tombstones messages are passed to remove_records.
1462
+ #
1463
+ # _@param_ `messages` — List of messages.
1464
+ def update_database: (::Array[Message] messages) -> void
1465
+
1466
+ # Upsert any non-deleted records
1467
+ # records to either be updated or inserted.
1468
+ #
1469
+ # _@param_ `messages` — List of messages for a group of
1470
+ def upsert_records: (::Array[Message] messages) -> void
1471
+
1472
+ # Delete any records with a tombstone.
1473
+ # deleted records.
1474
+ #
1475
+ # _@param_ `messages` — List of messages for a group of
1476
+ def remove_records: (::Array[Message] messages) -> void
1477
+
1478
+ # Create an ActiveRecord relation that matches all of the passed
1479
+ # records. Used for bulk deletion.
1480
+ #
1481
+ # _@param_ `records` — List of messages.
1482
+ #
1483
+ # _@return_ — Matching relation.
1484
+ def deleted_query: (::Array[Message] records) -> ActiveRecord::Relation
1485
+
1486
+ # Get the set of attribute names that uniquely identify messages in the
1487
+ # batch. Requires at least one record.
1488
+ #
1489
+ # _@param_ `records` — Non-empty list of messages.
1490
+ #
1491
+ # _@return_ — List of attribute names.
1492
+ def key_columns: (::Array[Message] records) -> ::Array[String]
1493
+
1494
+ # Compact a batch of messages, taking only the last message for each
1495
+ # unique key.
1496
+ #
1497
+ # _@param_ `batch` — Batch of messages.
1498
+ #
1499
+ # _@return_ — Compacted batch.
1500
+ def compact_messages: (::Array[Message] batch) -> ::Array[Message]
1501
+
1502
+ # Find the record specified by the given payload and key.
1503
+ # Default is to use the primary key column and the value of the first
1504
+ # field in the key.
1505
+ #
1506
+ # _@param_ `klass`
1507
+ #
1508
+ # _@param_ `_payload`
1509
+ #
1510
+ # _@param_ `key`
1511
+ def fetch_record: (singleton(ActiveRecord::Base) klass, (::Hash[untyped, untyped] | Deimos::SchemaClass::Record) _payload, Object key) -> ActiveRecord::Base
1512
+
1513
+ # Assign a key to a new record.
1514
+ #
1515
+ # _@param_ `record`
1516
+ #
1517
+ # _@param_ `_payload`
1518
+ #
1519
+ # _@param_ `key`
1520
+ def assign_key: (ActiveRecord::Base record, (::Hash[untyped, untyped] | Deimos::SchemaClass::Record) _payload, Object key) -> void
1521
+
1522
+ # _@param_ `payload` — Decoded payloads
1523
+ #
1524
+ # _@param_ `metadata` — Information about batch, including keys.
1525
+ def consume: ((::Hash[untyped, untyped] | Deimos::SchemaClass::Record) payload, ::Hash[untyped, untyped] metadata) -> void
1526
+
1527
+ # _@param_ `record`
1528
+ def save_record: (ActiveRecord::Base record) -> void
1529
+
1530
+ # Destroy a record that received a null payload. Override if you need
1531
+ # to do something other than a straight destroy (e.g. mark as archived).
1532
+ #
1533
+ # _@param_ `record`
1534
+ def destroy_record: (ActiveRecord::Base record) -> void
1535
+ end
1536
+
1537
+ # Class which automatically produces a record when given an ActiveRecord
1538
+ # instance or a list of them. Just call `send_events` on a list of records
1539
+ # and they will be auto-published. You can override `generate_payload`
1540
+ # to make changes to the payload before it's published.
1541
+ #
1542
+ # You can also call this with a list of hashes representing attributes.
1543
+ # This is common when using activerecord-import.
1544
+ class ActiveRecordProducer < Deimos::Producer
1545
+ MAX_BATCH_SIZE: Integer
1546
+
1547
+ # Indicate the class this producer is working on.
1548
+ # a record object, refetch the record to pass into the `generate_payload`
1549
+ # method.
1550
+ #
1551
+ # _@param_ `klass`
1552
+ #
1553
+ # _@param_ `refetch` — if true, and we are given a hash instead of
1554
+ def self.record_class: (Class klass, ?refetch: bool) -> void
1555
+
1556
+ # _@param_ `record`
1557
+ #
1558
+ # _@param_ `force_send`
1559
+ def self.send_event: (ActiveRecord::Base record, ?force_send: bool) -> void
1560
+
1561
+ # _@param_ `records`
1562
+ #
1563
+ # _@param_ `force_send`
1564
+ def self.send_events: (::Array[ActiveRecord::Base] records, ?force_send: bool) -> void
1565
+
1566
+ # Generate the payload, given a list of attributes or a record..
1567
+ # Can be overridden or added to by subclasses.
1568
+ # is not set.
1569
+ #
1570
+ # _@param_ `attributes`
1571
+ #
1572
+ # _@param_ `_record` — May be nil if refetch_record
1573
+ def self.generate_payload: (::Hash[untyped, untyped] attributes, ActiveRecord::Base _record) -> ::Hash[untyped, untyped]
1574
+
1575
+ # Query to use when polling the database with the DbPoller. Add
1576
+ # includes, joins, or wheres as necessary, or replace entirely.
1577
+ # than this value).
1578
+ #
1579
+ # _@param_ `time_from` — the time to start the query from.
1580
+ #
1581
+ # _@param_ `time_to` — the time to end the query.
1582
+ #
1583
+ # _@param_ `column_name` — the column name to look for.
1584
+ #
1585
+ # _@param_ `min_id` — the minimum ID (i.e. all IDs must be greater
1586
+ def self.poll_query: (
1587
+ time_from: Time,
1588
+ time_to: Time,
1589
+ ?column_name: Symbol,
1590
+ min_id: Numeric
1591
+ ) -> ActiveRecord::Relation
1592
+ end
1593
+
1594
+ module Consume
1595
+ # Helper methods used by batch consumers, i.e. those with "inline_batch"
1596
+ # delivery. Payloads are decoded then consumers are invoked with arrays
1597
+ # of messages to be handled at once
1598
+ module BatchConsumption
1599
+ include Phobos::BatchHandler
1600
+ extend ActiveSupport::Concern
1601
+
1602
+ # _@param_ `batch`
1603
+ #
1604
+ # _@param_ `metadata`
1605
+ def around_consume_batch: (::Array[String] batch, ::Hash[untyped, untyped] metadata) -> void
1606
+
1607
+ # Consume a batch of incoming messages.
1608
+ #
1609
+ # _@param_ `_payloads`
1610
+ #
1611
+ # _@param_ `_metadata`
1612
+ def consume_batch: (::Array[Phobos::BatchMessage] _payloads, ::Hash[untyped, untyped] _metadata) -> void
1613
+ end
1614
+
1615
+ # Methods used by message-by-message (non-batch) consumers. These consumers
1616
+ # are invoked for every individual message.
1617
+ module MessageConsumption
1618
+ include Phobos::Handler
1619
+ extend ActiveSupport::Concern
1620
+
1621
+ # _@param_ `payload`
1622
+ #
1623
+ # _@param_ `metadata`
1624
+ def around_consume: (String payload, ::Hash[untyped, untyped] metadata) -> void
1625
+
1626
+ # Consume incoming messages.
1627
+ #
1628
+ # _@param_ `_payload`
1629
+ #
1630
+ # _@param_ `_metadata`
1631
+ def consume: (String _payload, ::Hash[untyped, untyped] _metadata) -> void
1632
+ end
1633
+ end
1634
+
1635
+ module Generators
1636
+ # Generate the database backend migration.
1637
+ class DbPollerGenerator < Rails::Generators::Base
1638
+ include Rails::Generators::Migration
1639
+ include ActiveRecord::Generators::Migration
1640
+ extend ActiveRecord::Generators::Migration
1641
+
1642
+ def migration_version: () -> String
1643
+
1644
+ def db_migrate_path: () -> String
1645
+
1646
+ # Main method to create all the necessary files
1647
+ def generate: () -> void
1648
+ end
1649
+
1650
+ # Generate the database backend migration.
1651
+ class DbBackendGenerator < Rails::Generators::Base
1652
+ include Rails::Generators::Migration
1653
+ include ActiveRecord::Generators::Migration
1654
+ extend ActiveRecord::Generators::Migration
1655
+
1656
+ def migration_version: () -> String
1657
+
1658
+ def db_migrate_path: () -> String
1659
+
1660
+ # Main method to create all the necessary files
1661
+ def generate: () -> void
1662
+ end
1663
+
1664
+ # Generator for Schema Classes used for the IDE and consumer/producer interfaces
1665
+ class SchemaClassGenerator < Rails::Generators::Base
1666
+ SPECIAL_TYPES: ::Array[Symbol]
1667
+ INITIALIZE_WHITESPACE: String
1668
+ IGNORE_DEFAULTS: ::Array[String]
1669
+ SCHEMA_CLASS_FILE: String
1670
+ SCHEMA_RECORD_PATH: String
1671
+ SCHEMA_ENUM_PATH: String
1672
+
1673
+ def generate: () -> void
1674
+ end
1675
+
1676
+ # Generator for ActiveRecord model and migration.
1677
+ class ActiveRecordGenerator < Rails::Generators::Base
1678
+ include Rails::Generators::Migration
1679
+ include ActiveRecord::Generators::Migration
1680
+ extend ActiveRecord::Generators::Migration
1681
+
1682
+ def generate: () -> void
1683
+ end
1684
+ end
1685
+
1686
+ module ActiveRecordConsume
1687
+ # Helper class for breaking down batches into independent groups for
1688
+ # processing
1689
+ class BatchSlicer
1690
+ # Split the batch into a series of independent slices. Each slice contains
1691
+ # messages that can be processed in any order (i.e. they have distinct
1692
+ # keys). Messages with the same key will be separated into different
1693
+ # slices that maintain the correct order.
1694
+ # E.g. Given messages A1, A2, B1, C1, C2, C3, they will be sliced as:
1695
+ # [[A1, B1, C1], [A2, C2], [C3]]
1696
+ #
1697
+ # _@param_ `messages`
1698
+ def self.slice: (::Array[Message] messages) -> ::Array[::Array[Message]]
1699
+ end
1700
+
1701
+ # Methods for consuming batches of messages and saving them to the database
1702
+ # in bulk ActiveRecord operations.
1703
+ module BatchConsumption
1704
+ # Handle a batch of Kafka messages. Batches are split into "slices",
1705
+ # which are groups of independent messages that can be processed together
1706
+ # in a single database operation.
1707
+ # If two messages in a batch have the same key, we cannot process them
1708
+ # in the same operation as they would interfere with each other. Thus
1709
+ # they are split
1710
+ #
1711
+ # _@param_ `payloads` — Decoded payloads
1712
+ #
1713
+ # _@param_ `metadata` — Information about batch, including keys.
1714
+ def consume_batch: (::Array[(::Hash[untyped, untyped] | Deimos::SchemaClass::Record)] payloads, ::Hash[untyped, untyped] metadata) -> void
1715
+
1716
+ # Get unique key for the ActiveRecord instance from the incoming key.
1717
+ # Override this method (with super) to customize the set of attributes that
1718
+ # uniquely identifies each record in the database.
1719
+ #
1720
+ # _@param_ `key` — The encoded key.
1721
+ #
1722
+ # _@return_ — The key attributes.
1723
+ def record_key: (String key) -> ::Hash[untyped, untyped]
1724
+
1725
+ # Perform database operations for a batch of messages without compaction.
1726
+ # All messages are split into slices containing only unique keys, and
1727
+ # each slice is handles as its own batch.
1728
+ #
1729
+ # _@param_ `messages` — List of messages.
1730
+ def uncompacted_update: (::Array[Message] messages) -> void
1731
+
1732
+ # Perform database operations for a group of messages.
1733
+ # All messages with payloads are passed to upsert_records.
1734
+ # All tombstones messages are passed to remove_records.
1735
+ #
1736
+ # _@param_ `messages` — List of messages.
1737
+ def update_database: (::Array[Message] messages) -> void
1738
+
1739
+ # Upsert any non-deleted records
1740
+ # records to either be updated or inserted.
1741
+ #
1742
+ # _@param_ `messages` — List of messages for a group of
1743
+ def upsert_records: (::Array[Message] messages) -> void
1744
+
1745
+ # Delete any records with a tombstone.
1746
+ # deleted records.
1747
+ #
1748
+ # _@param_ `messages` — List of messages for a group of
1749
+ def remove_records: (::Array[Message] messages) -> void
1750
+
1751
+ # Create an ActiveRecord relation that matches all of the passed
1752
+ # records. Used for bulk deletion.
1753
+ #
1754
+ # _@param_ `records` — List of messages.
1755
+ #
1756
+ # _@return_ — Matching relation.
1757
+ def deleted_query: (::Array[Message] records) -> ActiveRecord::Relation
1758
+
1759
+ # Get the set of attribute names that uniquely identify messages in the
1760
+ # batch. Requires at least one record.
1761
+ #
1762
+ # _@param_ `records` — Non-empty list of messages.
1763
+ #
1764
+ # _@return_ — List of attribute names.
1765
+ def key_columns: (::Array[Message] records) -> ::Array[String]
1766
+
1767
+ # Compact a batch of messages, taking only the last message for each
1768
+ # unique key.
1769
+ #
1770
+ # _@param_ `batch` — Batch of messages.
1771
+ #
1772
+ # _@return_ — Compacted batch.
1773
+ def compact_messages: (::Array[Message] batch) -> ::Array[Message]
1774
+ end
1775
+
1776
+ # Methods for consuming individual messages and saving them to the database
1777
+ # as ActiveRecord instances.
1778
+ module MessageConsumption
1779
+ # Find the record specified by the given payload and key.
1780
+ # Default is to use the primary key column and the value of the first
1781
+ # field in the key.
1782
+ #
1783
+ # _@param_ `klass`
1784
+ #
1785
+ # _@param_ `_payload`
1786
+ #
1787
+ # _@param_ `key`
1788
+ def fetch_record: (singleton(ActiveRecord::Base) klass, (::Hash[untyped, untyped] | Deimos::SchemaClass::Record) _payload, Object key) -> ActiveRecord::Base
1789
+
1790
+ # Assign a key to a new record.
1791
+ #
1792
+ # _@param_ `record`
1793
+ #
1794
+ # _@param_ `_payload`
1795
+ #
1796
+ # _@param_ `key`
1797
+ def assign_key: (ActiveRecord::Base record, (::Hash[untyped, untyped] | Deimos::SchemaClass::Record) _payload, Object key) -> void
1798
+
1799
+ # _@param_ `payload` — Decoded payloads
1800
+ #
1801
+ # _@param_ `metadata` — Information about batch, including keys.
1802
+ def consume: ((::Hash[untyped, untyped] | Deimos::SchemaClass::Record) payload, ::Hash[untyped, untyped] metadata) -> void
1803
+
1804
+ # _@param_ `record`
1805
+ def save_record: (ActiveRecord::Base record) -> void
1806
+
1807
+ # Destroy a record that received a null payload. Override if you need
1808
+ # to do something other than a straight destroy (e.g. mark as archived).
1809
+ #
1810
+ # _@param_ `record`
1811
+ def destroy_record: (ActiveRecord::Base record) -> void
1812
+ end
1813
+
1814
+ # Convert a message with a schema to an ActiveRecord model
1815
+ class SchemaModelConverter
1816
+ # Create new converter
1817
+ #
1818
+ # _@param_ `decoder` — Incoming message schema.
1819
+ #
1820
+ # _@param_ `klass` — Model to map to.
1821
+ def initialize: (SchemaBackends::Base decoder, ActiveRecord::Base klass) -> void
1822
+
1823
+ # Convert a message from a decoded hash to a set of ActiveRecord
1824
+ # attributes. Attributes that don't exist in the model will be ignored.
1825
+ #
1826
+ # _@param_ `payload` — Decoded message payload.
1827
+ #
1828
+ # _@return_ — Model attributes.
1829
+ def convert: (::Hash[untyped, untyped] payload) -> ::Hash[untyped, untyped]
1830
+ end
1831
+ end
1832
+
1833
+ # Class to coerce values in a payload to match a schema.
1834
+ class AvroSchemaCoercer
1835
+ # _@param_ `schema`
1836
+ def initialize: (Avro::Schema schema) -> void
1837
+
1838
+ # Coerce sub-records in a payload to match the schema.
1839
+ #
1840
+ # _@param_ `type`
1841
+ #
1842
+ # _@param_ `val`
1843
+ def coerce_union: (Avro::Schema::UnionSchema _type, Object val) -> Object
1844
+
1845
+ # Coerce sub-records in a payload to match the schema.
1846
+ #
1847
+ # _@param_ `type`
1848
+ #
1849
+ # _@param_ `val`
1850
+ def coerce_record: (Avro::Schema::RecordSchema _type, Object val) -> Object
1851
+
1852
+ # Coerce values in a payload to match the schema.
1853
+ #
1854
+ # _@param_ `type`
1855
+ #
1856
+ # _@param_ `val`
1857
+ def coerce_type: (Avro::Schema _type, Object val) -> Object
1858
+ end
1859
+ end