deimos-ruby 1.16.3 → 1.16.5

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (84) hide show
  1. checksums.yaml +4 -4
  2. data/.github/workflows/ci.yml +3 -3
  3. data/.gitignore +1 -0
  4. data/.rubocop.yml +20 -14
  5. data/.rubocop_todo.yml +364 -0
  6. data/.ruby-version +2 -1
  7. data/CHANGELOG.md +9 -0
  8. data/Gemfile +6 -0
  9. data/README.md +7 -1
  10. data/Steepfile +6 -0
  11. data/deimos-ruby.gemspec +3 -2
  12. data/lib/deimos/active_record_consume/batch_consumption.rb +7 -2
  13. data/lib/deimos/active_record_consume/batch_slicer.rb +2 -0
  14. data/lib/deimos/active_record_consume/message_consumption.rb +8 -4
  15. data/lib/deimos/active_record_consumer.rb +7 -4
  16. data/lib/deimos/active_record_producer.rb +3 -0
  17. data/lib/deimos/backends/base.rb +4 -2
  18. data/lib/deimos/backends/kafka.rb +1 -0
  19. data/lib/deimos/backends/kafka_async.rb +1 -0
  20. data/lib/deimos/config/configuration.rb +4 -0
  21. data/lib/deimos/config/phobos_config.rb +2 -1
  22. data/lib/deimos/consume/batch_consumption.rb +8 -1
  23. data/lib/deimos/consume/message_consumption.rb +4 -1
  24. data/lib/deimos/instrumentation.rb +11 -4
  25. data/lib/deimos/kafka_message.rb +1 -0
  26. data/lib/deimos/kafka_source.rb +5 -0
  27. data/lib/deimos/kafka_topic_info.rb +4 -0
  28. data/lib/deimos/message.rb +19 -2
  29. data/lib/deimos/metrics/datadog.rb +2 -1
  30. data/lib/deimos/metrics/mock.rb +2 -2
  31. data/lib/deimos/metrics/provider.rb +6 -0
  32. data/lib/deimos/monkey_patches/phobos_cli.rb +1 -1
  33. data/lib/deimos/monkey_patches/phobos_producer.rb +1 -0
  34. data/lib/deimos/producer.rb +12 -6
  35. data/lib/deimos/schema_backends/base.rb +31 -17
  36. data/lib/deimos/schema_backends/mock.rb +2 -2
  37. data/lib/deimos/schema_class/base.rb +9 -5
  38. data/lib/deimos/schema_class/enum.rb +4 -2
  39. data/lib/deimos/schema_class/record.rb +5 -5
  40. data/lib/deimos/shared_config.rb +6 -2
  41. data/lib/deimos/test_helpers.rb +21 -4
  42. data/lib/deimos/tracing/datadog.rb +1 -1
  43. data/lib/deimos/tracing/mock.rb +4 -3
  44. data/lib/deimos/tracing/provider.rb +5 -0
  45. data/lib/deimos/utils/db_poller.rb +9 -1
  46. data/lib/deimos/utils/db_producer.rb +14 -2
  47. data/lib/deimos/utils/deadlock_retry.rb +3 -0
  48. data/lib/deimos/utils/inline_consumer.rb +14 -6
  49. data/lib/deimos/utils/lag_reporter.rb +11 -0
  50. data/lib/deimos/utils/schema_controller_mixin.rb +8 -0
  51. data/lib/deimos/version.rb +1 -1
  52. data/lib/deimos.rb +3 -2
  53. data/lib/generators/deimos/active_record_generator.rb +1 -1
  54. data/lib/generators/deimos/db_backend_generator.rb +1 -0
  55. data/lib/generators/deimos/db_poller_generator.rb +1 -0
  56. data/lib/generators/deimos/schema_class/templates/schema_record.rb.tt +1 -1
  57. data/lib/generators/deimos/schema_class_generator.rb +13 -4
  58. data/rbs_collection.lock.yaml +176 -0
  59. data/rbs_collection.yaml +15 -0
  60. data/sig/avro.rbs +14 -0
  61. data/sig/defs.rbs +1867 -0
  62. data/sig/fig_tree.rbs +2 -0
  63. data/spec/consumer_spec.rb +14 -14
  64. data/spec/generators/schema_class/my_schema_spec.rb +3 -3
  65. data/spec/generators/schema_class/my_schema_with_complex_types_spec.rb +1 -1
  66. data/spec/producer_spec.rb +1 -1
  67. data/spec/schemas/my_namespace/my_schema_with_complex_type.rb +3 -3
  68. data/spec/snapshots/consumers-no-nest.snap +1 -1
  69. data/spec/snapshots/consumers.snap +1 -1
  70. data/spec/snapshots/consumers_and_producers-no-nest.snap +3 -3
  71. data/spec/snapshots/consumers_and_producers.snap +3 -3
  72. data/spec/snapshots/consumers_circular-no-nest.snap +1 -1
  73. data/spec/snapshots/consumers_circular.snap +1 -1
  74. data/spec/snapshots/consumers_complex_types-no-nest.snap +1 -1
  75. data/spec/snapshots/consumers_complex_types.snap +1 -1
  76. data/spec/snapshots/consumers_nested-no-nest.snap +1 -1
  77. data/spec/snapshots/consumers_nested.snap +1 -1
  78. data/spec/snapshots/namespace_folders.snap +3 -3
  79. data/spec/snapshots/producers_with_key-no-nest.snap +1 -1
  80. data/spec/snapshots/producers_with_key.snap +1 -1
  81. metadata +39 -21
  82. data/.gemfiles/avro_turf-0.gemfile +0 -3
  83. data/.gemfiles/avro_turf-1.gemfile +0 -3
  84. data/.ruby-gemset +0 -1
data/sig/defs.rbs ADDED
@@ -0,0 +1,1867 @@
1
+ # Generates a new consumer.
2
+ module Deimos
3
+ include Deimos::Instrumentation
4
+ include FigTree
5
+ VERSION: untyped
6
+
7
+ def self.schema_backend_class: () -> singleton(Deimos::SchemaBackends::Base)
8
+
9
+ # _@param_ `schema`
10
+ #
11
+ # _@param_ `namespace`
12
+ def self.schema_backend: (schema: (String | Symbol), namespace: String) -> Deimos::SchemaBackends::Base
13
+
14
+ # _@param_ `schema`
15
+ #
16
+ # _@param_ `namespace`
17
+ #
18
+ # _@param_ `payload`
19
+ #
20
+ # _@param_ `subject`
21
+ def self.encode: (
22
+ schema: String,
23
+ namespace: String,
24
+ payload: ::Hash[untyped, untyped],
25
+ ?subject: String?
26
+ ) -> String
27
+
28
+ # _@param_ `schema`
29
+ #
30
+ # _@param_ `namespace`
31
+ #
32
+ # _@param_ `payload`
33
+ def self.decode: (schema: String, namespace: String, payload: String) -> ::Hash[untyped, untyped]?
34
+
35
+ # Start the DB producers to send Kafka messages.
36
+ #
37
+ # _@param_ `thread_count` — the number of threads to start.
38
+ def self.start_db_backend!: (?thread_count: Integer) -> void
39
+
40
+ # Run a block without allowing any messages to be produced to Kafka.
41
+ # Optionally add a list of producer classes to limit the disabling to those
42
+ # classes.
43
+ #
44
+ # _@param_ `producer_classes`
45
+ def self.disable_producers: (*(::Array[Class] | Class) producer_classes) -> void
46
+
47
+ # Are producers disabled? If a class is passed in, check only that class.
48
+ # Otherwise check if the global disable flag is set.
49
+ #
50
+ # _@param_ `producer_class`
51
+ def self.producers_disabled?: (?Class? producer_class) -> bool
52
+
53
+ # Loads generated classes
54
+ def self.load_generated_schema_classes: () -> void
55
+
56
+ # Basically a struct to hold the message as it's processed.
57
+ class Message
58
+ # _@param_ `payload`
59
+ #
60
+ # _@param_ `producer`
61
+ #
62
+ # _@param_ `topic`
63
+ #
64
+ # _@param_ `key`
65
+ #
66
+ # _@param_ `partition_key`
67
+ def initialize: (
68
+ ::Hash[untyped, untyped] payload,
69
+ Class producer,
70
+ ?topic: String?,
71
+ ?key: (String | Integer | ::Hash[untyped, untyped])?,
72
+ ?partition_key: Integer?
73
+ ) -> void
74
+
75
+ # Add message_id and timestamp default values if they are in the
76
+ # schema and don't already have values.
77
+ #
78
+ # _@param_ `fields` — existing name fields in the schema.
79
+ def add_fields: (::Array[String] fields) -> void
80
+
81
+ # _@param_ `encoder`
82
+ def coerce_fields: (Deimos::SchemaBackends::Base encoder) -> void
83
+
84
+ def encoded_hash: () -> ::Hash[untyped, untyped]
85
+
86
+ def to_h: () -> ::Hash[untyped, untyped]
87
+
88
+ # _@param_ `other`
89
+ def ==: (Message other) -> bool
90
+
91
+ # _@return_ — True if this message is a tombstone
92
+ def tombstone?: () -> bool
93
+
94
+ attr_accessor payload: ::Hash[untyped, untyped]
95
+
96
+ attr_accessor key: (::Hash[untyped, untyped] | String | Integer)
97
+
98
+ attr_accessor partition_key: Integer
99
+
100
+ attr_accessor encoded_key: String
101
+
102
+ attr_accessor encoded_payload: String
103
+
104
+ attr_accessor topic: String
105
+
106
+ attr_accessor producer_name: String
107
+ end
108
+
109
+ # Add rake task to Rails.
110
+ class Railtie < Rails::Railtie
111
+ end
112
+
113
+ # Basic consumer class. Inherit from this class and override either consume
114
+ # or consume_batch, depending on the delivery mode of your listener.
115
+ # `consume` -> use `delivery :message` or `delivery :batch`
116
+ # `consume_batch` -> use `delivery :inline_batch`
117
+ class Consumer
118
+ include Deimos::Consume::MessageConsumption
119
+ include Deimos::Consume::BatchConsumption
120
+ include Deimos::SharedConfig
121
+
122
+ def self.decoder: () -> Deimos::SchemaBackends::Base
123
+
124
+ def self.key_decoder: () -> Deimos::SchemaBackends::Base
125
+
126
+ # Helper method to decode an encoded key.
127
+ #
128
+ # _@param_ `key`
129
+ #
130
+ # _@return_ — the decoded key.
131
+ def decode_key: (String key) -> Object
132
+
133
+ # Helper method to decode an encoded message.
134
+ #
135
+ # _@param_ `payload`
136
+ #
137
+ # _@return_ — the decoded message.
138
+ def decode_message: (Object payload) -> Object
139
+
140
+ # _@param_ `batch`
141
+ #
142
+ # _@param_ `metadata`
143
+ def around_consume_batch: (::Array[String] batch, ::Hash[untyped, untyped] metadata) -> void
144
+
145
+ # Consume a batch of incoming messages.
146
+ #
147
+ # _@param_ `_payloads`
148
+ #
149
+ # _@param_ `_metadata`
150
+ def consume_batch: (::Array[Phobos::BatchMessage] _payloads, ::Hash[untyped, untyped] _metadata) -> void
151
+
152
+ # _@param_ `payload`
153
+ #
154
+ # _@param_ `metadata`
155
+ def around_consume: (String payload, ::Hash[untyped, untyped] metadata) -> void
156
+
157
+ # Consume incoming messages.
158
+ #
159
+ # _@param_ `_payload`
160
+ #
161
+ # _@param_ `_metadata`
162
+ def consume: (String _payload, ::Hash[untyped, untyped] _metadata) -> void
163
+ end
164
+
165
+ # Producer to publish messages to a given kafka topic.
166
+ class Producer
167
+ include Deimos::SharedConfig
168
+ MAX_BATCH_SIZE: Integer
169
+
170
+ def self.config: () -> ::Hash[untyped, untyped]
171
+
172
+ # Set the topic.
173
+ #
174
+ # _@param_ `topic`
175
+ #
176
+ # _@return_ — the current topic if no argument given.
177
+ def self.topic: (?String? topic) -> String
178
+
179
+ # Override the default partition key (which is the payload key).
180
+ # Will include `payload_key` if it is part of the original payload.
181
+ #
182
+ # _@param_ `_payload` — the payload being passed into the produce method.
183
+ def self.partition_key: (::Hash[untyped, untyped] _payload) -> String
184
+
185
+ # Publish the payload to the topic.
186
+ #
187
+ # _@param_ `payload` — with an optional payload_key hash key.
188
+ #
189
+ # _@param_ `topic` — if specifying the topic
190
+ def self.publish: ((::Hash[untyped, untyped] | SchemaClass::Record) payload, ?topic: String) -> void
191
+
192
+ # Publish a list of messages.
193
+ # whether to publish synchronously.
194
+ # and send immediately to Kafka.
195
+ #
196
+ # _@param_ `payloads` — with optional payload_key hash key.
197
+ #
198
+ # _@param_ `sync` — if given, override the default setting of
199
+ #
200
+ # _@param_ `force_send` — if true, ignore the configured backend
201
+ #
202
+ # _@param_ `topic` — if specifying the topic
203
+ def self.publish_list: (
204
+ ::Array[(::Hash[untyped, untyped] | SchemaClass::Record)] payloads,
205
+ ?sync: bool?,
206
+ ?force_send: bool,
207
+ ?topic: String
208
+ ) -> void
209
+
210
+ # _@param_ `sync`
211
+ #
212
+ # _@param_ `force_send`
213
+ def self.determine_backend_class: (bool sync, bool force_send) -> singleton(Deimos::Backends::Base)
214
+
215
+ # Send a batch to the backend.
216
+ #
217
+ # _@param_ `backend`
218
+ #
219
+ # _@param_ `batch`
220
+ def self.produce_batch: (singleton(Deimos::Backends::Base) backend, ::Array[Deimos::Message] batch) -> void
221
+
222
+ def self.encoder: () -> Deimos::SchemaBackends::Base
223
+
224
+ def self.key_encoder: () -> Deimos::SchemaBackends::Base
225
+
226
+ # Override this in active record producers to add
227
+ # non-schema fields to check for updates
228
+ #
229
+ # _@return_ — fields to check for updates
230
+ def self.watched_attributes: () -> ::Array[String]
231
+ end
232
+
233
+ # ActiveRecord class to record the last time we polled the database.
234
+ # For use with DbPoller.
235
+ class PollInfo < ActiveRecord::Base
236
+ end
237
+
238
+ module Backends
239
+ # Backend which saves messages to the database instead of immediately
240
+ # sending them.
241
+ class Db < Deimos::Backends::Base
242
+ # :nodoc:
243
+ def self.execute: (producer_class: singleton(Deimos::Producer), messages: ::Array[Deimos::Message]) -> void
244
+
245
+ # _@param_ `message`
246
+ #
247
+ # _@return_ — the partition key to use for this message
248
+ def self.partition_key_for: (Deimos::Message message) -> String
249
+ end
250
+
251
+ # Abstract class for all publish backends.
252
+ class Base
253
+ # _@param_ `producer_class`
254
+ #
255
+ # _@param_ `messages`
256
+ def self.publish: (producer_class: singleton(Deimos::Producer), messages: ::Array[Deimos::Message]) -> void
257
+
258
+ # _@param_ `producer_class`
259
+ #
260
+ # _@param_ `messages`
261
+ def self.execute: (producer_class: singleton(Deimos::Producer), messages: ::Array[Deimos::Message]) -> void
262
+ end
263
+
264
+ # Backend which saves messages to an in-memory hash.
265
+ class Test < Deimos::Backends::Base
266
+ def self.sent_messages: () -> ::Array[::Hash[untyped, untyped]]
267
+
268
+ def self.execute: (producer_class: singleton(Deimos::Producer), messages: ::Array[Deimos::Message]) -> void
269
+ end
270
+
271
+ # Default backend to produce to Kafka.
272
+ class Kafka < Deimos::Backends::Base
273
+ include Phobos::Producer
274
+
275
+ # Shut down the producer if necessary.
276
+ def self.shutdown_producer: () -> void
277
+
278
+ # :nodoc:
279
+ def self.execute: (producer_class: singleton(Deimos::Producer), messages: ::Array[Deimos::Message]) -> void
280
+ end
281
+
282
+ # Backend which produces to Kafka via an async producer.
283
+ class KafkaAsync < Deimos::Backends::Base
284
+ include Phobos::Producer
285
+
286
+ # Shut down the producer cleanly.
287
+ def self.shutdown_producer: () -> void
288
+
289
+ # :nodoc:
290
+ def self.execute: (producer_class: singleton(Deimos::Producer), messages: ::Array[Deimos::Message]) -> void
291
+ end
292
+ end
293
+
294
+ # Represents an object which needs to inform Kafka when it is saved or
295
+ # bulk imported.
296
+ module KafkaSource
297
+ extend ActiveSupport::Concern
298
+ DEPRECATION_WARNING: String
299
+
300
+ # Send the newly created model to Kafka.
301
+ def send_kafka_event_on_create: () -> void
302
+
303
+ # Send the newly updated model to Kafka.
304
+ def send_kafka_event_on_update: () -> void
305
+
306
+ # Send a deletion (null payload) event to Kafka.
307
+ def send_kafka_event_on_destroy: () -> void
308
+
309
+ # Payload to send after we are destroyed.
310
+ def deletion_payload: () -> ::Hash[untyped, untyped]
311
+
312
+ # :nodoc:
313
+ module ClassMethods
314
+ def kafka_config: () -> ::Hash[untyped, untyped]
315
+
316
+ # _@return_ — the producers to run.
317
+ def kafka_producers: () -> ::Array[Deimos::ActiveRecordProducer]
318
+ end
319
+ end
320
+
321
+ module Metrics
322
+ # A mock Metrics wrapper which just logs the metrics
323
+ class Mock < Deimos::Metrics::Provider
324
+ # _@param_ `logger`
325
+ def initialize: (?Logger? logger) -> void
326
+
327
+ # :nodoc:
328
+ def increment: (String metric_name, ?::Hash[untyped, untyped] options) -> void
329
+
330
+ # :nodoc:
331
+ def gauge: (String metric_name, Integer count, ?::Hash[untyped, untyped] options) -> void
332
+
333
+ # :nodoc:
334
+ def histogram: (String metric_name, Integer count, ?::Hash[untyped, untyped] options) -> void
335
+
336
+ # :nodoc:
337
+ def time: (String metric_name, ?::Hash[untyped, untyped] options) -> void
338
+ end
339
+
340
+ # A Metrics wrapper class for Datadog.
341
+ class Datadog < Deimos::Metrics::Provider
342
+ # _@param_ `config`
343
+ #
344
+ # _@param_ `logger`
345
+ def initialize: (::Hash[untyped, untyped] config, Logger logger) -> void
346
+
347
+ # :nodoc:
348
+ def increment: (String metric_name, ?::Hash[untyped, untyped] options) -> void
349
+
350
+ # :nodoc:
351
+ def gauge: (String metric_name, Integer count, ?::Hash[untyped, untyped] options) -> void
352
+
353
+ # :nodoc:
354
+ def histogram: (String metric_name, Integer count, ?::Hash[untyped, untyped] options) -> void
355
+
356
+ # :nodoc:
357
+ def time: (String metric_name, ?::Hash[untyped, untyped] options) -> void
358
+ end
359
+
360
+ # Base class for all metrics providers.
361
+ class Provider
362
+ # Send an counter increment metric
363
+ #
364
+ # _@param_ `metric_name` — The name of the counter metric
365
+ #
366
+ # _@param_ `options` — Any additional options, e.g. :tags
367
+ def increment: (String metric_name, ?::Hash[untyped, untyped] options) -> void
368
+
369
+ # Send an counter increment metric
370
+ #
371
+ # _@param_ `metric_name` — The name of the counter metric
372
+ #
373
+ # _@param_ `count`
374
+ #
375
+ # _@param_ `options` — Any additional options, e.g. :tags
376
+ def gauge: (String metric_name, Integer count, ?::Hash[untyped, untyped] options) -> void
377
+
378
+ # Send an counter increment metric
379
+ #
380
+ # _@param_ `metric_name` — The name of the counter metric
381
+ #
382
+ # _@param_ `count`
383
+ #
384
+ # _@param_ `options` — Any additional options, e.g. :tags
385
+ def histogram: (String metric_name, Integer count, ?::Hash[untyped, untyped] options) -> void
386
+
387
+ # Time a yielded block, and send a timer metric
388
+ #
389
+ # _@param_ `metric_name` — The name of the metric
390
+ #
391
+ # _@param_ `options` — Any additional options, e.g. :tags
392
+ def time: (String metric_name, ?::Hash[untyped, untyped] options) -> void
393
+ end
394
+ end
395
+
396
+ # Include this module in your RSpec spec_helper
397
+ # to stub out external dependencies
398
+ # and add methods to use to test encoding/decoding.
399
+ module TestHelpers
400
+ extend ActiveSupport::Concern
401
+
402
+ # for backwards compatibility
403
+ def self.sent_messages: () -> ::Array[::Hash[untyped, untyped]]
404
+
405
+ # Set the config to the right settings for a unit test
406
+ def self.unit_test!: () -> void
407
+
408
+ # Kafka test config with avro schema registry
409
+ def self.full_integration_test!: () -> void
410
+
411
+ # Set the config to the right settings for a kafka test
412
+ def self.kafka_test!: () -> void
413
+
414
+ # Clear all sent messages - e.g. if we want to check that
415
+ # particular messages were sent or not sent after a point in time.
416
+ def clear_kafka_messages!: () -> void
417
+
418
+ # Test that a given handler will consume a given payload correctly, i.e.
419
+ # that the schema is correct. If
420
+ # a block is given, that block will be executed when `consume` is called.
421
+ # Otherwise it will just confirm that `consume` is called at all.
422
+ # Deimos::Consumer or the topic as a string
423
+ # to continue as normal. Not compatible with a block.
424
+ # expectations on the consumer. Primarily used internally to Deimos.
425
+ #
426
+ # _@param_ `handler_class_or_topic` — Class which inherits from
427
+ #
428
+ # _@param_ `payload` — the payload to consume
429
+ #
430
+ # _@param_ `call_original` — if true, allow the consume handler
431
+ #
432
+ # _@param_ `skip_expectation` — Set to true to not place any
433
+ #
434
+ # _@param_ `key` — the key to use.
435
+ #
436
+ # _@param_ `partition_key` — the partition key to use.
437
+ def test_consume_message: (
438
+ (Class | String) handler_class_or_topic,
439
+ ::Hash[untyped, untyped] payload,
440
+ ?call_original: bool,
441
+ ?key: Object?,
442
+ ?partition_key: Object?,
443
+ ?skip_expectation: bool
444
+ ) -> void
445
+
446
+ # Check to see that a given message will fail due to validation errors.
447
+ #
448
+ # _@param_ `handler_class`
449
+ #
450
+ # _@param_ `payload`
451
+ def test_consume_invalid_message: (Class handler_class, ::Hash[untyped, untyped] payload) -> void
452
+
453
+ # Test that a given handler will consume a given batch payload correctly,
454
+ # i.e. that the schema is correct. If
455
+ # a block is given, that block will be executed when `consume` is called.
456
+ # Otherwise it will just confirm that `consume` is called at all.
457
+ # Deimos::Consumer or the topic as a string
458
+ #
459
+ # _@param_ `handler_class_or_topic` — Class which inherits from
460
+ #
461
+ # _@param_ `payloads` — the payload to consume
462
+ #
463
+ # _@param_ `keys`
464
+ #
465
+ # _@param_ `partition_keys`
466
+ #
467
+ # _@param_ `call_original`
468
+ #
469
+ # _@param_ `skip_expectation`
470
+ def test_consume_batch: (
471
+ (Class | String) handler_class_or_topic,
472
+ ::Array[::Hash[untyped, untyped]] payloads,
473
+ ?keys: ::Array[(::Hash[untyped, untyped] | String)],
474
+ ?partition_keys: ::Array[Integer],
475
+ ?call_original: bool,
476
+ ?skip_expectation: bool
477
+ ) -> void
478
+
479
+ # Check to see that a given message will fail due to validation errors.
480
+ #
481
+ # _@param_ `handler_class`
482
+ #
483
+ # _@param_ `payloads`
484
+ def test_consume_batch_invalid_message: (Class handler_class, ::Array[::Hash[untyped, untyped]] payloads) -> void
485
+ end
486
+
487
+ module Tracing
488
+ # Class that mocks out tracing functionality
489
+ class Mock < Deimos::Tracing::Provider
490
+ # _@param_ `logger`
491
+ def initialize: (?Logger? logger) -> void
492
+
493
+ # _@param_ `span_name`
494
+ #
495
+ # _@param_ `_options`
496
+ def start: (String span_name, ?::Hash[untyped, untyped] _options) -> Object
497
+
498
+ # :nodoc:
499
+ def finish: (Object span) -> void
500
+
501
+ # :nodoc:
502
+ def active_span: () -> Object
503
+
504
+ # :nodoc:
505
+ def set_tag: (String tag, String value, ?Object? span) -> void
506
+
507
+ # :nodoc:
508
+ def set_error: (Object span, Exception exception) -> void
509
+ end
510
+
511
+ # Tracing wrapper class for Datadog.
512
+ class Datadog < Deimos::Tracing::Provider
513
+ # _@param_ `config`
514
+ def initialize: (::Hash[untyped, untyped] config) -> void
515
+
516
+ # :nodoc:
517
+ def start: (String span_name, ?::Hash[untyped, untyped] options) -> Object
518
+
519
+ # :nodoc:
520
+ def finish: (Object span) -> void
521
+
522
+ # :nodoc:
523
+ def active_span: () -> Object
524
+
525
+ # :nodoc:
526
+ def set_error: (Object span, Exception exception) -> void
527
+
528
+ # :nodoc:
529
+ def set_tag: (String tag, String value, ?Object? span) -> void
530
+ end
531
+
532
+ # Base class for all tracing providers.
533
+ class Provider
534
+ # Returns a span object and starts the trace.
535
+ #
536
+ # _@param_ `span_name` — The name of the span/trace
537
+ #
538
+ # _@param_ `options` — Options for the span
539
+ #
540
+ # _@return_ — The span object
541
+ def start: (String span_name, ?::Hash[untyped, untyped] options) -> Object
542
+
543
+ # Finishes the trace on the span object.
544
+ #
545
+ # _@param_ `span` — The span to finish trace on
546
+ def finish: (Object span) -> void
547
+
548
+ # Set an error on the span.
549
+ #
550
+ # _@param_ `span` — The span to set error on
551
+ #
552
+ # _@param_ `exception` — The exception that occurred
553
+ def set_error: (Object span, Exception exception) -> void
554
+
555
+ # Get the currently activated span.
556
+ def active_span: () -> Object
557
+
558
+ # Set a tag to a span. Use the currently active span if not given.
559
+ #
560
+ # _@param_ `tag`
561
+ #
562
+ # _@param_ `value`
563
+ #
564
+ # _@param_ `span`
565
+ def set_tag: (String tag, String value, ?Object? span) -> void
566
+ end
567
+ end
568
+
569
+ # Store Kafka messages into the database.
570
+ class KafkaMessage < ActiveRecord::Base
571
+ # Ensure it gets turned into a string, e.g. for testing purposes. It
572
+ # should already be a string.
573
+ #
574
+ # _@param_ `mess`
575
+ def message=: (Object mess) -> void
576
+
577
+ # Decoded payload for this message.
578
+ def decoded_message: () -> ::Hash[untyped, untyped]
579
+
580
+ # Get a decoder to decode a set of messages on the given topic.
581
+ #
582
+ # _@param_ `topic`
583
+ def self.decoder: (String topic) -> Deimos::Consumer
584
+
585
+ # Decoded payloads for a list of messages.
586
+ #
587
+ # _@param_ `messages`
588
+ def self.decoded: (?::Array[Deimos::KafkaMessage] messages) -> ::Array[::Hash[untyped, untyped]]
589
+
590
+ def phobos_message: () -> ::Hash[untyped, untyped]
591
+ end
592
+
593
+ # Module that producers and consumers can share which sets up configuration.
594
+ module SharedConfig
595
+ extend ActiveSupport::Concern
596
+
597
+ # need to use this instead of class_methods to be backwards-compatible
598
+ # with Rails 3
599
+ module ClassMethods
600
+ def config: () -> ::Hash[untyped, untyped]
601
+
602
+ # Set the schema.
603
+ #
604
+ # _@param_ `schema`
605
+ def schema: (String schema) -> void
606
+
607
+ # Set the namespace.
608
+ #
609
+ # _@param_ `namespace`
610
+ def namespace: (String namespace) -> void
611
+
612
+ # Set key configuration.
613
+ #
614
+ # _@param_ `field` — the name of a field to use in the value schema as a generated key schema
615
+ #
616
+ # _@param_ `schema` — the name of a schema to use for the key
617
+ #
618
+ # _@param_ `plain` — if true, do not encode keys at all
619
+ #
620
+ # _@param_ `none` — if true, do not use keys at all
621
+ def key_config: (
622
+ ?plain: bool?,
623
+ ?field: Symbol?,
624
+ ?schema: (String | Symbol)?,
625
+ ?none: bool?
626
+ ) -> void
627
+
628
+ # _@param_ `use_schema_classes`
629
+ def schema_class_config: (bool use_schema_classes) -> void
630
+ end
631
+ end
632
+
633
+ # @deprecated Use Deimos::Consumer with `delivery: inline_batch` configured instead
634
+ class BatchConsumer < Deimos::Consumer
635
+ end
636
+
637
+ # Copied from Phobos instrumentation.
638
+ module Instrumentation
639
+ extend ActiveSupport::Concern
640
+ NAMESPACE: String
641
+
642
+ # :nodoc:
643
+ module ClassMethods
644
+ # _@param_ `event`
645
+ def subscribe: (String event) -> void
646
+
647
+ # _@param_ `subscriber`
648
+ def unsubscribe: (ActiveSupport::Subscriber subscriber) -> void
649
+
650
+ # _@param_ `event`
651
+ #
652
+ # _@param_ `extra`
653
+ def instrument: (String event, ?::Hash[untyped, untyped] extra) -> void
654
+ end
655
+ end
656
+
657
+ # This module listens to events published by RubyKafka.
658
+ module KafkaListener
659
+ # Listens for any exceptions that happen during publishing and re-publishes
660
+ # as a Deimos event.
661
+ #
662
+ # _@param_ `event`
663
+ def self.send_produce_error: (ActiveSupport::Notifications::Event event) -> void
664
+ end
665
+
666
+ module Utils
667
+ # Class which continually polls the database and sends Kafka messages.
668
+ class DbPoller
669
+ BATCH_SIZE: Integer
670
+
671
+ # Begin the DB Poller process.
672
+ def self.start!: () -> void
673
+
674
+ # _@param_ `config`
675
+ def initialize: (FigTree::ConfigStruct config) -> void
676
+
677
+ # Start the poll:
678
+ # 1) Grab the current PollInfo from the database indicating the last
679
+ # time we ran
680
+ # 2) On a loop, process all the recent updates between the last time
681
+ # we ran and now.
682
+ def start: () -> void
683
+
684
+ # Grab the PollInfo or create if it doesn't exist.
685
+ def retrieve_poll_info: () -> void
686
+
687
+ # Stop the poll.
688
+ def stop: () -> void
689
+
690
+ # Indicate whether this current loop should process updates. Most loops
691
+ # will busy-wait (sleeping 0.1 seconds) until it's ready.
692
+ def should_run?: () -> bool
693
+
694
+ # _@param_ `record`
695
+ def last_updated: (ActiveRecord::Base record) -> ActiveSupport::TimeWithZone
696
+
697
+ # Send messages for updated data.
698
+ def process_updates: () -> void
699
+
700
+ # _@param_ `time_from`
701
+ #
702
+ # _@param_ `time_to`
703
+ def fetch_results: (ActiveSupport::TimeWithZone time_from, ActiveSupport::TimeWithZone time_to) -> ActiveRecord::Relation
704
+
705
+ # _@param_ `batch`
706
+ def process_batch: (::Array[ActiveRecord::Base] batch) -> void
707
+
708
+ # Needed for Executor so it can identify the worker
709
+ attr_reader id: Integer
710
+ end
711
+
712
+ # Class which continually polls the kafka_messages table
713
+ # in the database and sends Kafka messages.
714
+ class DbProducer
715
+ include Phobos::Producer
716
+ BATCH_SIZE: Integer
717
+ DELETE_BATCH_SIZE: Integer
718
+ MAX_DELETE_ATTEMPTS: Integer
719
+
720
+ # _@param_ `logger`
721
+ def initialize: (?Logger logger) -> void
722
+
723
+ def config: () -> FigTree
724
+
725
+ # Start the poll.
726
+ def start: () -> void
727
+
728
+ # Stop the poll.
729
+ def stop: () -> void
730
+
731
+ # Complete one loop of processing all messages in the DB.
732
+ def process_next_messages: () -> void
733
+
734
+ def retrieve_topics: () -> ::Array[String]
735
+
736
+ # _@param_ `topic`
737
+ #
738
+ # _@return_ — the topic that was locked, or nil if none were.
739
+ def process_topic: (String topic) -> String?
740
+
741
+ # Process a single batch in a topic.
742
+ def process_topic_batch: () -> void
743
+
744
+ # _@param_ `messages`
745
+ def delete_messages: (::Array[Deimos::KafkaMessage] messages) -> void
746
+
747
+ def retrieve_messages: () -> ::Array[Deimos::KafkaMessage]
748
+
749
+ # _@param_ `messages`
750
+ def log_messages: (::Array[Deimos::KafkaMessage] messages) -> void
751
+
752
+ # Send metrics related to pending messages.
753
+ def send_pending_metrics: () -> void
754
+
755
+ # Shut down the sync producer if we have to. Phobos will automatically
756
+ # create a new one. We should call this if the producer can be in a bad
757
+ # state and e.g. we need to clear the buffer.
758
+ def shutdown_producer: () -> void
759
+
760
+ # Produce messages in batches, reducing the size 1/10 if the batch is too
761
+ # large. Does not retry batches of messages that have already been sent.
762
+ #
763
+ # _@param_ `batch`
764
+ def produce_messages: (::Array[::Hash[untyped, untyped]] batch) -> void
765
+
766
+ # _@param_ `batch`
767
+ def compact_messages: (::Array[Deimos::KafkaMessage] batch) -> ::Array[Deimos::KafkaMessage]
768
+
769
+ # Returns the value of attribute id.
770
+ attr_accessor id: untyped
771
+
772
+ # Returns the value of attribute current_topic.
773
+ attr_accessor current_topic: untyped
774
+ end
775
+
776
+ # Class that manages reporting lag.
777
+ class LagReporter
778
+ extend Mutex_m
779
+
780
+ # Reset all group information.
781
+ def self.reset: () -> void
782
+
783
+ # offset_lag = event.payload.fetch(:offset_lag)
784
+ # group_id = event.payload.fetch(:group_id)
785
+ # topic = event.payload.fetch(:topic)
786
+ # partition = event.payload.fetch(:partition)
787
+ #
788
+ # _@param_ `payload`
789
+ def self.message_processed: (::Hash[untyped, untyped] payload) -> void
790
+
791
+ # _@param_ `payload`
792
+ def self.offset_seek: (::Hash[untyped, untyped] payload) -> void
793
+
794
+ # _@param_ `payload`
795
+ def self.heartbeat: (::Hash[untyped, untyped] payload) -> void
796
+
797
+ # Class that has a list of topics
798
+ class ConsumerGroup
799
+ # _@param_ `id`
800
+ def initialize: (String id) -> void
801
+
802
+ # _@param_ `topic`
803
+ #
804
+ # _@param_ `partition`
805
+ def report_lag: (String topic, Integer partition) -> void
806
+
807
+ # _@param_ `topic`
808
+ #
809
+ # _@param_ `partition`
810
+ #
811
+ # _@param_ `offset`
812
+ def assign_current_offset: (String topic, Integer partition, Integer offset) -> void
813
+
814
+ attr_accessor topics: ::Hash[String, Topic]
815
+
816
+ attr_accessor id: String
817
+ end
818
+
819
+ # Topic which has a hash of partition => last known current offsets
820
+ class Topic
821
+ # _@param_ `topic_name`
822
+ #
823
+ # _@param_ `group`
824
+ def initialize: (String topic_name, ConsumerGroup group) -> void
825
+
826
+ # _@param_ `partition`
827
+ #
828
+ # _@param_ `offset`
829
+ def assign_current_offset: (Integer partition, Integer offset) -> void
830
+
831
+ # _@param_ `partition`
832
+ #
833
+ # _@param_ `offset`
834
+ def compute_lag: (Integer partition, Integer offset) -> Integer
835
+
836
+ # _@param_ `partition`
837
+ def report_lag: (Integer partition) -> void
838
+
839
+ attr_accessor topic_name: String
840
+
841
+ attr_accessor partition_current_offsets: ::Hash[Integer, Integer]
842
+
843
+ attr_accessor consumer_group: ConsumerGroup
844
+ end
845
+ end
846
+
847
+ # Class used by SchemaClassGenerator and Consumer/Producer interfaces
848
+ module SchemaClass
849
+ # _@param_ `namespace`
850
+ def self.modules_for: (String namespace) -> ::Array[String]
851
+
852
+ # Converts a raw payload into an instance of the Schema Class
853
+ #
854
+ # _@param_ `payload`
855
+ #
856
+ # _@param_ `schema`
857
+ #
858
+ # _@param_ `namespace`
859
+ def self.instance: ((::Hash[untyped, untyped] | Deimos::SchemaClass::Base) payload, String schema, ?String namespace) -> Deimos::SchemaClass::Record
860
+
861
+ # _@param_ `config` — Producer or Consumer config
862
+ def self.use?: (::Hash[untyped, untyped] config) -> bool
863
+ end
864
+
865
+ # Utility class to retry a given block if a a deadlock is encountered.
866
+ # Supports Postgres and MySQL deadlocks and lock wait timeouts.
867
+ class DeadlockRetry
868
+ RETRY_COUNT: Integer
869
+ DEADLOCK_MESSAGES: ::Array[String]
870
+
871
+ # Retry the given block when encountering a deadlock. For any other
872
+ # exceptions, they are reraised. This is used to handle cases where
873
+ # the database may be busy but the transaction would succeed if
874
+ # retried later. Note that your block should be idempotent and it will
875
+ # be wrapped in a transaction.
876
+ # Sleeps for a random number of seconds to prevent multiple transactions
877
+ # from retrying at the same time.
878
+ #
879
+ # _@param_ `tags` — Tags to attach when logging and reporting metrics.
880
+ def self.wrap: (?::Array[untyped] tags) -> void
881
+ end
882
+
883
+ # Listener that can seek to get the last X messages in a topic.
884
+ class SeekListener < Phobos::Listener
885
+ MAX_SEEK_RETRIES: Integer
886
+
887
+ def start_listener: () -> void
888
+
889
+ attr_accessor num_messages: Integer
890
+ end
891
+
892
+ # Class to return the messages consumed.
893
+ class MessageBankHandler < Deimos::Consumer
894
+ include Phobos::Handler
895
+
896
+ # _@param_ `klass`
897
+ def self.config_class=: (singleton(Deimos::Consumer) klass) -> void
898
+
899
+ # _@param_ `_kafka_client`
900
+ def self.start: (Kafka::Client _kafka_client) -> void
901
+
902
+ # _@param_ `payload`
903
+ #
904
+ # _@param_ `metadata`
905
+ def consume: (::Hash[untyped, untyped] payload, ::Hash[untyped, untyped] metadata) -> void
906
+ end
907
+
908
+ # Class which can process/consume messages inline.
909
+ class InlineConsumer
910
+ MAX_MESSAGE_WAIT_TIME: Integer
911
+ MAX_TOPIC_WAIT_TIME: Integer
912
+
913
+ # Get the last X messages from a topic. You can specify a subclass of
914
+ # Deimos::Consumer or Deimos::Producer, or provide the
915
+ # schema, namespace and key_config directly.
916
+ #
917
+ # _@param_ `topic`
918
+ #
919
+ # _@param_ `config_class`
920
+ #
921
+ # _@param_ `schema`
922
+ #
923
+ # _@param_ `namespace`
924
+ #
925
+ # _@param_ `key_config`
926
+ #
927
+ # _@param_ `num_messages`
928
+ def self.get_messages_for: (
929
+ topic: String,
930
+ ?schema: String?,
931
+ ?namespace: String?,
932
+ ?key_config: ::Hash[untyped, untyped]?,
933
+ ?config_class: (singleton(Deimos::Consumer) | singleton(Deimos::Producer))?,
934
+ ?num_messages: Integer
935
+ ) -> ::Array[::Hash[untyped, untyped]]
936
+
937
+ # Consume the last X messages from a topic.
938
+ #
939
+ # _@param_ `topic`
940
+ #
941
+ # _@param_ `frk_consumer`
942
+ #
943
+ # _@param_ `num_messages` — If this number is >= the number of messages in the topic, all messages will be consumed.
944
+ def self.consume: (topic: String, frk_consumer: Class, ?num_messages: Integer) -> void
945
+ end
946
+
947
+ # Mixin to automatically decode schema-encoded payloads when given the correct content type,
948
+ # and provide the `render_schema` method to encode the payload for responses.
949
+ module SchemaControllerMixin
950
+ extend ActiveSupport::Concern
951
+
952
+ def schema_format?: () -> bool
953
+
954
+ # Get the namespace from either an existing instance variable, or tease it out of the schema.
955
+ #
956
+ # _@param_ `type` — :request or :response
957
+ #
958
+ # _@return_ — the namespace and schema.
959
+ def parse_namespace: (Symbol _type) -> ::Array[(String | String)]
960
+
961
+ # Decode the payload with the parameters.
962
+ def decode_schema: () -> void
963
+
964
+ # Render a hash into a payload as specified by the configured schema and namespace.
965
+ #
966
+ # _@param_ `payload`
967
+ #
968
+ # _@param_ `schema`
969
+ #
970
+ # _@param_ `namespace`
971
+ def render_schema: (::Hash[untyped, untyped] payload, ?schema: String?, ?namespace: String?) -> void
972
+
973
+ # :nodoc:
974
+ module ClassMethods
975
+ def schema_mapping: () -> ::Hash[String, ::Hash[Symbol, String]]
976
+
977
+ # Indicate which schemas should be assigned to actions.
978
+ #
979
+ # _@param_ `actions`
980
+ #
981
+ # _@param_ `kwactions`
982
+ #
983
+ # _@param_ `request`
984
+ #
985
+ # _@param_ `response`
986
+ def schemas: (
987
+ *Symbol actions,
988
+ ?request: String?,
989
+ ?response: String?,
990
+ **String kwactions
991
+ ) -> void
992
+
993
+ def namespaces: () -> ::Hash[Symbol, String]
994
+
995
+ # Set the namespace for both requests and responses.
996
+ #
997
+ # _@param_ `name`
998
+ def namespace: (String name) -> void
999
+
1000
+ # Set the namespace for requests.
1001
+ #
1002
+ # _@param_ `name`
1003
+ def request_namespace: (String name) -> void
1004
+
1005
+ # Set the namespace for repsonses.
1006
+ #
1007
+ # _@param_ `name`
1008
+ def response_namespace: (String name) -> void
1009
+ end
1010
+ end
1011
+ end
1012
+
1013
+ # Record that keeps track of which topics are being worked on by DbProducers.
1014
+ class KafkaTopicInfo < ActiveRecord::Base
1015
+ # Lock a topic for the given ID. Returns whether the lock was successful.
1016
+ #
1017
+ # _@param_ `topic`
1018
+ #
1019
+ # _@param_ `lock_id`
1020
+ def self.lock: (String topic, String lock_id) -> bool
1021
+
1022
+ # This is called once a producer is finished working on a topic, i.e.
1023
+ # there are no more messages to fetch. It unlocks the topic and
1024
+ # moves on to the next one.
1025
+ #
1026
+ # _@param_ `topic`
1027
+ #
1028
+ # _@param_ `lock_id`
1029
+ def self.clear_lock: (String topic, String lock_id) -> void
1030
+
1031
+ # Update all topics that aren't currently locked and have no messages
1032
+ # waiting. It's OK if some messages get inserted in the middle of this
1033
+ # because the point is that at least within a few milliseconds of each
1034
+ # other, it wasn't locked and had no messages, meaning the topic
1035
+ # was in a good state.
1036
+ # realized had messages in them, meaning all other topics were empty.
1037
+ #
1038
+ # _@param_ `except_topics` — the list of topics we've just
1039
+ def self.ping_empty_topics: (::Array[String] except_topics) -> void
1040
+
1041
+ # The producer calls this if it gets an error sending messages. This
1042
+ # essentially locks down this topic for 1 minute (for all producers)
1043
+ # and allows the caller to continue to the next topic.
1044
+ #
1045
+ # _@param_ `topic`
1046
+ #
1047
+ # _@param_ `lock_id`
1048
+ def self.register_error: (String topic, String lock_id) -> void
1049
+
1050
+ # Update the locked_at timestamp to indicate that the producer is still
1051
+ # working on those messages and to continue.
1052
+ #
1053
+ # _@param_ `topic`
1054
+ #
1055
+ # _@param_ `lock_id`
1056
+ def self.heartbeat: (String topic, String lock_id) -> void
1057
+ end
1058
+
1059
+ module SchemaClass
1060
+ # Base Class for Schema Classes generated from Avro.
1061
+ class Base
1062
+ # _@param_ `_args`
1063
+ def initialize: (*::Array[Object] _args) -> void
1064
+
1065
+ # Converts the object to a hash which can be used for debugging or comparing objects.
1066
+ #
1067
+ # _@param_ `_opts`
1068
+ #
1069
+ # _@return_ — a hash representation of the payload
1070
+ def as_json: (?::Hash[untyped, untyped] _opts) -> ::Hash[untyped, untyped]
1071
+
1072
+ # _@param_ `key`
1073
+ #
1074
+ # _@param_ `val`
1075
+ def []=: ((String | Symbol) key, Object val) -> void
1076
+
1077
+ # _@param_ `other`
1078
+ def ==: (SchemaClass::Base other) -> bool
1079
+
1080
+ def inspect: () -> String
1081
+
1082
+ # Initializes this class from a given value
1083
+ #
1084
+ # _@param_ `value`
1085
+ def self.initialize_from_value: (Object value) -> SchemaClass::Base
1086
+
1087
+ def hash: () -> Integer
1088
+ end
1089
+
1090
+ # Base Class for Enum Classes generated from Avro.
1091
+ class Enum < Deimos::SchemaClass::Base
1092
+ # _@param_ `other`
1093
+ def ==: (Deimos::SchemaClass::Enum other) -> bool
1094
+
1095
+ def to_s: () -> String
1096
+
1097
+ # _@param_ `value`
1098
+ def initialize: (String value) -> void
1099
+
1100
+ # Returns all the valid symbols for this enum.
1101
+ def symbols: () -> ::Array[String]
1102
+
1103
+ def as_json: (?::Hash[untyped, untyped] _opts) -> String
1104
+
1105
+ def self.initialize_from_value: (Object value) -> SchemaClass::Enum
1106
+
1107
+ attr_accessor value: String
1108
+ end
1109
+
1110
+ # Base Class of Record Classes generated from Avro.
1111
+ class Record < Deimos::SchemaClass::Base
1112
+ # Converts the object attributes to a hash which can be used for Kafka
1113
+ #
1114
+ # _@return_ — the payload as a hash.
1115
+ def to_h: () -> ::Hash[untyped, untyped]
1116
+
1117
+ # Merge a hash or an identical schema object with this one and return a new object.
1118
+ #
1119
+ # _@param_ `other_hash`
1120
+ def merge: ((::Hash[untyped, untyped] | SchemaClass::Base) other_hash) -> SchemaClass::Base
1121
+
1122
+ # Element access method as if this Object were a hash
1123
+ #
1124
+ # _@param_ `key`
1125
+ #
1126
+ # _@return_ — The value of the attribute if exists, nil otherwise
1127
+ def []: ((String | Symbol) key) -> Object
1128
+
1129
+ def with_indifferent_access: () -> SchemaClass::Record
1130
+
1131
+ # Returns the schema name of the inheriting class.
1132
+ def schema: () -> String
1133
+
1134
+ # Returns the namespace for the schema of the inheriting class.
1135
+ def namespace: () -> String
1136
+
1137
+ # Returns the full schema name of the inheriting class.
1138
+ def full_schema: () -> String
1139
+
1140
+ # Returns the schema validator from the schema backend
1141
+ def validator: () -> Deimos::SchemaBackends::Base
1142
+
1143
+ # _@return_ — an array of fields names in the schema.
1144
+ def schema_fields: () -> ::Array[String]
1145
+
1146
+ def self.initialize_from_value: (Object value) -> SchemaClass::Record
1147
+
1148
+ # Returns the value of attribute tombstone_key.
1149
+ attr_accessor tombstone_key: untyped
1150
+ end
1151
+ end
1152
+
1153
+ # Module to handle phobos.yml as well as outputting the configuration to save
1154
+ # to Phobos itself.
1155
+ module PhobosConfig
1156
+ extend ActiveSupport::Concern
1157
+
1158
+ def to_h: () -> ::Hash[untyped, untyped]
1159
+
1160
+ def reset!: () -> void
1161
+
1162
+ # Create a hash representing the config that Phobos expects.
1163
+ def phobos_config: () -> ::Hash[untyped, untyped]
1164
+
1165
+ # _@param_ `key`
1166
+ def ssl_var_contents: (String key) -> String
1167
+ end
1168
+
1169
+ # Represents a field in the schema.
1170
+ class SchemaField
1171
+ # _@param_ `name`
1172
+ #
1173
+ # _@param_ `type`
1174
+ #
1175
+ # _@param_ `enum_values`
1176
+ #
1177
+ # _@param_ `default`
1178
+ def initialize: (
1179
+ String name,
1180
+ Object _type,
1181
+ ?::Array[String] enum_values,
1182
+ ?Object default
1183
+ ) -> void
1184
+
1185
+ attr_accessor name: String
1186
+
1187
+ attr_accessor type: String
1188
+
1189
+ attr_accessor enum_values: ::Array[String]
1190
+
1191
+ attr_accessor default: Object
1192
+ end
1193
+
1194
+ module SchemaBackends
1195
+ # Base class for encoding / decoding.
1196
+ class Base
1197
+ # _@param_ `schema`
1198
+ #
1199
+ # _@param_ `namespace`
1200
+ def initialize: (schema: (String | Symbol), ?namespace: String?) -> void
1201
+
1202
+ # Encode a payload with a schema. Public method.
1203
+ #
1204
+ # _@param_ `payload`
1205
+ #
1206
+ # _@param_ `schema`
1207
+ #
1208
+ # _@param_ `topic`
1209
+ def encode: (::Hash[untyped, untyped] payload, ?schema: (String | Symbol)?, ?topic: String?) -> String
1210
+
1211
+ # Decode a payload with a schema. Public method.
1212
+ #
1213
+ # _@param_ `payload`
1214
+ #
1215
+ # _@param_ `schema`
1216
+ def decode: (String payload, ?schema: (String | Symbol)?) -> ::Hash[untyped, untyped]?
1217
+
1218
+ # Given a hash, coerce its types to our schema. To be defined by subclass.
1219
+ #
1220
+ # _@param_ `payload`
1221
+ def coerce: (::Hash[untyped, untyped] payload) -> ::Hash[untyped, untyped]
1222
+
1223
+ # Indicate a class which should act as a mocked version of this backend.
1224
+ # This class should perform all validations but not actually do any
1225
+ # encoding.
1226
+ # Note that the "mock" version (e.g. avro_validation) should return
1227
+ # its own symbol when this is called, since it may be called multiple
1228
+ # times depending on the order of RSpec helpers.
1229
+ def self.mock_backend: () -> Symbol
1230
+
1231
+ # The content type to use when encoding / decoding requests over HTTP via ActionController.
1232
+ def self.content_type: () -> String
1233
+
1234
+ # Converts your schema to String form for generated YARD docs.
1235
+ # To be defined by subclass.
1236
+ #
1237
+ # _@param_ `schema`
1238
+ #
1239
+ # _@return_ — A string representation of the Type
1240
+ def self.field_type: (Object schema) -> String
1241
+
1242
+ # Encode a payload. To be defined by subclass.
1243
+ #
1244
+ # _@param_ `payload`
1245
+ #
1246
+ # _@param_ `schema`
1247
+ #
1248
+ # _@param_ `topic`
1249
+ def encode_payload: (::Hash[untyped, untyped] payload, schema: (String | Symbol), ?topic: String?) -> String
1250
+
1251
+ # Decode a payload. To be defined by subclass.
1252
+ #
1253
+ # _@param_ `payload`
1254
+ #
1255
+ # _@param_ `schema`
1256
+ def decode_payload: (String payload, schema: (String | Symbol)) -> ::Hash[untyped, untyped]
1257
+
1258
+ # Validate that a payload matches the schema. To be defined by subclass.
1259
+ #
1260
+ # _@param_ `payload`
1261
+ #
1262
+ # _@param_ `schema`
1263
+ def validate: (::Hash[untyped, untyped] payload, schema: (String | Symbol)) -> void
1264
+
1265
+ # List of field names belonging to the schema. To be defined by subclass.
1266
+ def schema_fields: () -> ::Array[SchemaField]
1267
+
1268
+ # Given a value and a field definition (as defined by whatever the
1269
+ # underlying schema library is), coerce the given value to
1270
+ # the given field type.
1271
+ #
1272
+ # _@param_ `field`
1273
+ #
1274
+ # _@param_ `value`
1275
+ def coerce_field: (SchemaField field, Object value) -> Object
1276
+
1277
+ # Given a field definition, return the SQL type that might be used in
1278
+ # ActiveRecord table creation - e.g. for Avro, a `long` type would
1279
+ # return `:bigint`. There are also special values that need to be returned:
1280
+ # `:array`, `:map` and `:record`, for types representing those structures.
1281
+ # `:enum` is also recognized.
1282
+ #
1283
+ # _@param_ `field`
1284
+ def sql_type: (SchemaField field) -> Symbol
1285
+
1286
+ # Encode a message key. To be defined by subclass.
1287
+ #
1288
+ # _@param_ `key` — the value to use as the key.
1289
+ #
1290
+ # _@param_ `key_id` — the field name of the key.
1291
+ #
1292
+ # _@param_ `topic`
1293
+ def encode_key: ((String | ::Hash[untyped, untyped]) key, (String | Symbol) key_id, ?topic: String?) -> String
1294
+
1295
+ # Decode a message key. To be defined by subclass.
1296
+ #
1297
+ # _@param_ `payload` — the message itself.
1298
+ #
1299
+ # _@param_ `key_id` — the field in the message to decode.
1300
+ def decode_key: (::Hash[untyped, untyped] payload, (String | Symbol) key_id) -> String
1301
+
1302
+ # Forcefully loads the schema into memory.
1303
+ #
1304
+ # _@return_ — The schema that is of use.
1305
+ def load_schema: () -> Object
1306
+
1307
+ attr_accessor schema: String
1308
+
1309
+ attr_accessor namespace: String
1310
+
1311
+ attr_accessor key_schema: String
1312
+ end
1313
+
1314
+ # Mock implementation of a schema backend that does no encoding or validation.
1315
+ class Mock < Deimos::SchemaBackends::Base
1316
+ def decode_payload: (String payload, schema: (String | Symbol)) -> ::Hash[untyped, untyped]
1317
+
1318
+ def encode_payload: (::Hash[untyped, untyped] payload, schema: (String | Symbol), ?topic: String?) -> String
1319
+
1320
+ def validate: (::Hash[untyped, untyped] payload, schema: (String | Symbol)) -> void
1321
+
1322
+ def schema_fields: () -> ::Array[SchemaField]
1323
+
1324
+ def coerce_field: (SchemaField field, Object value) -> Object
1325
+
1326
+ def encode_key: ((String | Symbol) key_id, (String | ::Hash[untyped, untyped]) key) -> String
1327
+
1328
+ def decode_key: (::Hash[untyped, untyped] payload, (String | Symbol) key_id) -> String
1329
+ end
1330
+
1331
+ # Encode / decode using Avro, either locally or via schema registry.
1332
+ class AvroBase < Deimos::SchemaBackends::Base
1333
+ def initialize: (schema: (String | Symbol), namespace: String) -> void
1334
+
1335
+ def encode_key: ((String | Symbol) key_id, (String | ::Hash[untyped, untyped]) key, ?topic: String?) -> String
1336
+
1337
+ def decode_key: (::Hash[untyped, untyped] payload, (String | Symbol) key_id) -> String
1338
+
1339
+ # :nodoc:
1340
+ def sql_type: (SchemaField field) -> Symbol
1341
+
1342
+ def coerce_field: (SchemaField field, Object value) -> Object
1343
+
1344
+ def schema_fields: () -> ::Array[SchemaField]
1345
+
1346
+ def validate: (::Hash[untyped, untyped] payload, schema: (String | Symbol)) -> void
1347
+
1348
+ def load_schema: () -> Avro::Schema
1349
+
1350
+ def self.mock_backend: () -> Symbol
1351
+
1352
+ def self.content_type: () -> String
1353
+
1354
+ # _@param_ `schema` — A named schema
1355
+ def self.schema_classname: (Avro::Schema::NamedSchema schema) -> String
1356
+
1357
+ # Converts Avro::Schema::NamedSchema's to String form for generated YARD docs.
1358
+ # Recursively handles the typing for Arrays, Maps and Unions.
1359
+ #
1360
+ # _@param_ `avro_schema`
1361
+ #
1362
+ # _@return_ — A string representation of the Type of this SchemaField
1363
+ def self.field_type: (Avro::Schema::NamedSchema avro_schema) -> String
1364
+
1365
+ # Returns the base type of this schema. Decodes Arrays, Maps and Unions
1366
+ #
1367
+ # _@param_ `schema`
1368
+ def self.schema_base_class: (Avro::Schema::NamedSchema schema) -> Avro::Schema::NamedSchema
1369
+
1370
+ # Returns the value of attribute schema_store.
1371
+ attr_accessor schema_store: untyped
1372
+ end
1373
+
1374
+ # Encode / decode using local Avro encoding.
1375
+ class AvroLocal < Deimos::SchemaBackends::AvroBase
1376
+ def decode_payload: (String payload, schema: (String | Symbol)) -> ::Hash[untyped, untyped]
1377
+
1378
+ def encode_payload: (::Hash[untyped, untyped] payload, ?schema: (String | Symbol)?, ?topic: String?) -> String
1379
+ end
1380
+
1381
+ # Leave Ruby hashes as is but validate them against the schema.
1382
+ # Useful for unit tests.
1383
+ class AvroValidation < Deimos::SchemaBackends::AvroBase
1384
+ def decode_payload: (String payload, ?schema: (String | Symbol)?) -> ::Hash[untyped, untyped]
1385
+
1386
+ def encode_payload: (::Hash[untyped, untyped] payload, ?schema: (String | Symbol)?, ?topic: String?) -> String
1387
+ end
1388
+
1389
+ # Encode / decode using the Avro schema registry.
1390
+ class AvroSchemaRegistry < Deimos::SchemaBackends::AvroBase
1391
+ def decode_payload: (String payload, schema: (String | Symbol)) -> ::Hash[untyped, untyped]
1392
+
1393
+ def encode_payload: (::Hash[untyped, untyped] payload, ?schema: (String | Symbol)?, ?topic: String?) -> String
1394
+ end
1395
+ end
1396
+
1397
+ # To configure batch vs. message mode, change the delivery mode of your
1398
+ # Phobos listener.
1399
+ # Message-by-message -> use `delivery: message` or `delivery: batch`
1400
+ # Batch -> use `delivery: inline_batch`
1401
+ class ActiveRecordConsumer < Deimos::Consumer
1402
+ include Deimos::ActiveRecordConsume::MessageConsumption
1403
+ include Deimos::ActiveRecordConsume::BatchConsumption
1404
+
1405
+ # database.
1406
+ #
1407
+ # _@param_ `klass` — the class used to save to the
1408
+ def self.record_class: (singleton(ActiveRecord::Base) klass) -> void
1409
+
1410
+ # only the last message for each unique key in a batch is processed.
1411
+ #
1412
+ # _@param_ `val` — Turn pre-compaction of the batch on or off. If true,
1413
+ def self.compacted: (bool val) -> void
1414
+
1415
+ # Setup
1416
+ def initialize: () -> void
1417
+
1418
+ # Override this method (with `super`) if you want to add/change the default
1419
+ # attributes set to the new/existing record.
1420
+ #
1421
+ # _@param_ `payload`
1422
+ #
1423
+ # _@param_ `_key`
1424
+ def record_attributes: ((::Hash[untyped, untyped] | Deimos::SchemaClass::Record) payload, ?String? _key) -> ::Hash[untyped, untyped]
1425
+
1426
+ # Override this message to conditionally save records
1427
+ #
1428
+ # _@param_ `_payload` — The kafka message
1429
+ #
1430
+ # _@return_ — if true, record is created/update.
1431
+ # If false, record processing is skipped but message offset is still committed.
1432
+ def process_message?: ((::Hash[untyped, untyped] | Deimos::SchemaClass::Record) _payload) -> bool
1433
+
1434
+ # Handle a batch of Kafka messages. Batches are split into "slices",
1435
+ # which are groups of independent messages that can be processed together
1436
+ # in a single database operation.
1437
+ # If two messages in a batch have the same key, we cannot process them
1438
+ # in the same operation as they would interfere with each other. Thus
1439
+ # they are split
1440
+ #
1441
+ # _@param_ `payloads` — Decoded payloads
1442
+ #
1443
+ # _@param_ `metadata` — Information about batch, including keys.
1444
+ def consume_batch: (::Array[(::Hash[untyped, untyped] | Deimos::SchemaClass::Record)] payloads, ::Hash[untyped, untyped] metadata) -> void
1445
+
1446
+ # Get unique key for the ActiveRecord instance from the incoming key.
1447
+ # Override this method (with super) to customize the set of attributes that
1448
+ # uniquely identifies each record in the database.
1449
+ #
1450
+ # _@param_ `key` — The encoded key.
1451
+ #
1452
+ # _@return_ — The key attributes.
1453
+ def record_key: (String key) -> ::Hash[untyped, untyped]
1454
+
1455
+ # Perform database operations for a batch of messages without compaction.
1456
+ # All messages are split into slices containing only unique keys, and
1457
+ # each slice is handles as its own batch.
1458
+ #
1459
+ # _@param_ `messages` — List of messages.
1460
+ def uncompacted_update: (::Array[Message] messages) -> void
1461
+
1462
+ # Perform database operations for a group of messages.
1463
+ # All messages with payloads are passed to upsert_records.
1464
+ # All tombstones messages are passed to remove_records.
1465
+ #
1466
+ # _@param_ `messages` — List of messages.
1467
+ def update_database: (::Array[Message] messages) -> void
1468
+
1469
+ # Upsert any non-deleted records
1470
+ # records to either be updated or inserted.
1471
+ #
1472
+ # _@param_ `messages` — List of messages for a group of
1473
+ def upsert_records: (::Array[Message] messages) -> void
1474
+
1475
+ # Delete any records with a tombstone.
1476
+ # deleted records.
1477
+ #
1478
+ # _@param_ `messages` — List of messages for a group of
1479
+ def remove_records: (::Array[Message] messages) -> void
1480
+
1481
+ # Create an ActiveRecord relation that matches all of the passed
1482
+ # records. Used for bulk deletion.
1483
+ #
1484
+ # _@param_ `records` — List of messages.
1485
+ #
1486
+ # _@return_ — Matching relation.
1487
+ def deleted_query: (::Array[Message] records) -> ActiveRecord::Relation
1488
+
1489
+ # Get the set of attribute names that uniquely identify messages in the
1490
+ # batch. Requires at least one record.
1491
+ #
1492
+ # _@param_ `records` — Non-empty list of messages.
1493
+ #
1494
+ # _@return_ — List of attribute names.
1495
+ def key_columns: (::Array[Message] records) -> ::Array[String]
1496
+
1497
+ # Compact a batch of messages, taking only the last message for each
1498
+ # unique key.
1499
+ #
1500
+ # _@param_ `batch` — Batch of messages.
1501
+ #
1502
+ # _@return_ — Compacted batch.
1503
+ def compact_messages: (::Array[Message] batch) -> ::Array[Message]
1504
+
1505
+ # Find the record specified by the given payload and key.
1506
+ # Default is to use the primary key column and the value of the first
1507
+ # field in the key.
1508
+ #
1509
+ # _@param_ `klass`
1510
+ #
1511
+ # _@param_ `_payload`
1512
+ #
1513
+ # _@param_ `key`
1514
+ def fetch_record: (singleton(ActiveRecord::Base) klass, (::Hash[untyped, untyped] | Deimos::SchemaClass::Record) _payload, Object key) -> ActiveRecord::Base
1515
+
1516
+ # Assign a key to a new record.
1517
+ #
1518
+ # _@param_ `record`
1519
+ #
1520
+ # _@param_ `_payload`
1521
+ #
1522
+ # _@param_ `key`
1523
+ def assign_key: (ActiveRecord::Base record, (::Hash[untyped, untyped] | Deimos::SchemaClass::Record) _payload, Object key) -> void
1524
+
1525
+ # _@param_ `payload` — Decoded payloads
1526
+ #
1527
+ # _@param_ `metadata` — Information about batch, including keys.
1528
+ def consume: ((::Hash[untyped, untyped] | Deimos::SchemaClass::Record) payload, ::Hash[untyped, untyped] metadata) -> void
1529
+
1530
+ # _@param_ `record`
1531
+ def save_record: (ActiveRecord::Base record) -> void
1532
+
1533
+ # Destroy a record that received a null payload. Override if you need
1534
+ # to do something other than a straight destroy (e.g. mark as archived).
1535
+ #
1536
+ # _@param_ `record`
1537
+ def destroy_record: (ActiveRecord::Base record) -> void
1538
+ end
1539
+
1540
+ # Class which automatically produces a record when given an ActiveRecord
1541
+ # instance or a list of them. Just call `send_events` on a list of records
1542
+ # and they will be auto-published. You can override `generate_payload`
1543
+ # to make changes to the payload before it's published.
1544
+ #
1545
+ # You can also call this with a list of hashes representing attributes.
1546
+ # This is common when using activerecord-import.
1547
+ class ActiveRecordProducer < Deimos::Producer
1548
+ MAX_BATCH_SIZE: Integer
1549
+
1550
+ # Indicate the class this producer is working on.
1551
+ # a record object, refetch the record to pass into the `generate_payload`
1552
+ # method.
1553
+ #
1554
+ # _@param_ `klass`
1555
+ #
1556
+ # _@param_ `refetch` — if true, and we are given a hash instead of
1557
+ def self.record_class: (Class klass, ?refetch: bool) -> void
1558
+
1559
+ # _@param_ `record`
1560
+ #
1561
+ # _@param_ `force_send`
1562
+ def self.send_event: (ActiveRecord::Base record, ?force_send: bool) -> void
1563
+
1564
+ # _@param_ `records`
1565
+ #
1566
+ # _@param_ `force_send`
1567
+ def self.send_events: (::Array[ActiveRecord::Base] records, ?force_send: bool) -> void
1568
+
1569
+ # Generate the payload, given a list of attributes or a record..
1570
+ # Can be overridden or added to by subclasses.
1571
+ # is not set.
1572
+ #
1573
+ # _@param_ `attributes`
1574
+ #
1575
+ # _@param_ `_record` — May be nil if refetch_record
1576
+ def self.generate_payload: (::Hash[untyped, untyped] attributes, ActiveRecord::Base _record) -> ::Hash[untyped, untyped]
1577
+
1578
+ # Query to use when polling the database with the DbPoller. Add
1579
+ # includes, joins, or wheres as necessary, or replace entirely.
1580
+ # than this value).
1581
+ #
1582
+ # _@param_ `time_from` — the time to start the query from.
1583
+ #
1584
+ # _@param_ `time_to` — the time to end the query.
1585
+ #
1586
+ # _@param_ `column_name` — the column name to look for.
1587
+ #
1588
+ # _@param_ `min_id` — the minimum ID (i.e. all IDs must be greater
1589
+ def self.poll_query: (
1590
+ time_from: Time,
1591
+ time_to: Time,
1592
+ ?column_name: Symbol,
1593
+ min_id: Numeric
1594
+ ) -> ActiveRecord::Relation
1595
+
1596
+ # Post process records after publishing
1597
+ #
1598
+ # _@param_ `records`
1599
+ def self.post_process: (::Array[ActiveRecord::Base] _records) -> untyped
1600
+ end
1601
+
1602
+ module Consume
1603
+ # Helper methods used by batch consumers, i.e. those with "inline_batch"
1604
+ # delivery. Payloads are decoded then consumers are invoked with arrays
1605
+ # of messages to be handled at once
1606
+ module BatchConsumption
1607
+ include Phobos::BatchHandler
1608
+ extend ActiveSupport::Concern
1609
+
1610
+ # _@param_ `batch`
1611
+ #
1612
+ # _@param_ `metadata`
1613
+ def around_consume_batch: (::Array[String] batch, ::Hash[untyped, untyped] metadata) -> void
1614
+
1615
+ # Consume a batch of incoming messages.
1616
+ #
1617
+ # _@param_ `_payloads`
1618
+ #
1619
+ # _@param_ `_metadata`
1620
+ def consume_batch: (::Array[Phobos::BatchMessage] _payloads, ::Hash[untyped, untyped] _metadata) -> void
1621
+ end
1622
+
1623
+ # Methods used by message-by-message (non-batch) consumers. These consumers
1624
+ # are invoked for every individual message.
1625
+ module MessageConsumption
1626
+ include Phobos::Handler
1627
+ extend ActiveSupport::Concern
1628
+
1629
+ # _@param_ `payload`
1630
+ #
1631
+ # _@param_ `metadata`
1632
+ def around_consume: (String payload, ::Hash[untyped, untyped] metadata) -> void
1633
+
1634
+ # Consume incoming messages.
1635
+ #
1636
+ # _@param_ `_payload`
1637
+ #
1638
+ # _@param_ `_metadata`
1639
+ def consume: (String _payload, ::Hash[untyped, untyped] _metadata) -> void
1640
+ end
1641
+ end
1642
+
1643
+ module Generators
1644
+ # Generate the database backend migration.
1645
+ class DbPollerGenerator < Rails::Generators::Base
1646
+ include Rails::Generators::Migration
1647
+ include ActiveRecord::Generators::Migration
1648
+ extend ActiveRecord::Generators::Migration
1649
+
1650
+ def migration_version: () -> String
1651
+
1652
+ def db_migrate_path: () -> String
1653
+
1654
+ # Main method to create all the necessary files
1655
+ def generate: () -> void
1656
+ end
1657
+
1658
+ # Generate the database backend migration.
1659
+ class DbBackendGenerator < Rails::Generators::Base
1660
+ include Rails::Generators::Migration
1661
+ include ActiveRecord::Generators::Migration
1662
+ extend ActiveRecord::Generators::Migration
1663
+
1664
+ def migration_version: () -> String
1665
+
1666
+ def db_migrate_path: () -> String
1667
+
1668
+ # Main method to create all the necessary files
1669
+ def generate: () -> void
1670
+ end
1671
+
1672
+ # Generator for Schema Classes used for the IDE and consumer/producer interfaces
1673
+ class SchemaClassGenerator < Rails::Generators::Base
1674
+ SPECIAL_TYPES: ::Array[Symbol]
1675
+ INITIALIZE_WHITESPACE: String
1676
+ IGNORE_DEFAULTS: ::Array[String]
1677
+ SCHEMA_CLASS_FILE: String
1678
+ SCHEMA_RECORD_PATH: String
1679
+ SCHEMA_ENUM_PATH: String
1680
+
1681
+ def generate: () -> void
1682
+ end
1683
+
1684
+ # Generator for ActiveRecord model and migration.
1685
+ class ActiveRecordGenerator < Rails::Generators::Base
1686
+ include Rails::Generators::Migration
1687
+ include ActiveRecord::Generators::Migration
1688
+ extend ActiveRecord::Generators::Migration
1689
+
1690
+ def generate: () -> void
1691
+ end
1692
+ end
1693
+
1694
+ module ActiveRecordConsume
1695
+ # Helper class for breaking down batches into independent groups for
1696
+ # processing
1697
+ class BatchSlicer
1698
+ # Split the batch into a series of independent slices. Each slice contains
1699
+ # messages that can be processed in any order (i.e. they have distinct
1700
+ # keys). Messages with the same key will be separated into different
1701
+ # slices that maintain the correct order.
1702
+ # E.g. Given messages A1, A2, B1, C1, C2, C3, they will be sliced as:
1703
+ # [[A1, B1, C1], [A2, C2], [C3]]
1704
+ #
1705
+ # _@param_ `messages`
1706
+ def self.slice: (::Array[Message] messages) -> ::Array[::Array[Message]]
1707
+ end
1708
+
1709
+ # Methods for consuming batches of messages and saving them to the database
1710
+ # in bulk ActiveRecord operations.
1711
+ module BatchConsumption
1712
+ # Handle a batch of Kafka messages. Batches are split into "slices",
1713
+ # which are groups of independent messages that can be processed together
1714
+ # in a single database operation.
1715
+ # If two messages in a batch have the same key, we cannot process them
1716
+ # in the same operation as they would interfere with each other. Thus
1717
+ # they are split
1718
+ #
1719
+ # _@param_ `payloads` — Decoded payloads
1720
+ #
1721
+ # _@param_ `metadata` — Information about batch, including keys.
1722
+ def consume_batch: (::Array[(::Hash[untyped, untyped] | Deimos::SchemaClass::Record)] payloads, ::Hash[untyped, untyped] metadata) -> void
1723
+
1724
+ # Get unique key for the ActiveRecord instance from the incoming key.
1725
+ # Override this method (with super) to customize the set of attributes that
1726
+ # uniquely identifies each record in the database.
1727
+ #
1728
+ # _@param_ `key` — The encoded key.
1729
+ #
1730
+ # _@return_ — The key attributes.
1731
+ def record_key: (String key) -> ::Hash[untyped, untyped]
1732
+
1733
+ # Perform database operations for a batch of messages without compaction.
1734
+ # All messages are split into slices containing only unique keys, and
1735
+ # each slice is handles as its own batch.
1736
+ #
1737
+ # _@param_ `messages` — List of messages.
1738
+ def uncompacted_update: (::Array[Message] messages) -> void
1739
+
1740
+ # Perform database operations for a group of messages.
1741
+ # All messages with payloads are passed to upsert_records.
1742
+ # All tombstones messages are passed to remove_records.
1743
+ #
1744
+ # _@param_ `messages` — List of messages.
1745
+ def update_database: (::Array[Message] messages) -> void
1746
+
1747
+ # Upsert any non-deleted records
1748
+ # records to either be updated or inserted.
1749
+ #
1750
+ # _@param_ `messages` — List of messages for a group of
1751
+ def upsert_records: (::Array[Message] messages) -> void
1752
+
1753
+ # Delete any records with a tombstone.
1754
+ # deleted records.
1755
+ #
1756
+ # _@param_ `messages` — List of messages for a group of
1757
+ def remove_records: (::Array[Message] messages) -> void
1758
+
1759
+ # Create an ActiveRecord relation that matches all of the passed
1760
+ # records. Used for bulk deletion.
1761
+ #
1762
+ # _@param_ `records` — List of messages.
1763
+ #
1764
+ # _@return_ — Matching relation.
1765
+ def deleted_query: (::Array[Message] records) -> ActiveRecord::Relation
1766
+
1767
+ # Get the set of attribute names that uniquely identify messages in the
1768
+ # batch. Requires at least one record.
1769
+ #
1770
+ # _@param_ `records` — Non-empty list of messages.
1771
+ #
1772
+ # _@return_ — List of attribute names.
1773
+ def key_columns: (::Array[Message] records) -> ::Array[String]
1774
+
1775
+ # Compact a batch of messages, taking only the last message for each
1776
+ # unique key.
1777
+ #
1778
+ # _@param_ `batch` — Batch of messages.
1779
+ #
1780
+ # _@return_ — Compacted batch.
1781
+ def compact_messages: (::Array[Message] batch) -> ::Array[Message]
1782
+ end
1783
+
1784
+ # Methods for consuming individual messages and saving them to the database
1785
+ # as ActiveRecord instances.
1786
+ module MessageConsumption
1787
+ # Find the record specified by the given payload and key.
1788
+ # Default is to use the primary key column and the value of the first
1789
+ # field in the key.
1790
+ #
1791
+ # _@param_ `klass`
1792
+ #
1793
+ # _@param_ `_payload`
1794
+ #
1795
+ # _@param_ `key`
1796
+ def fetch_record: (singleton(ActiveRecord::Base) klass, (::Hash[untyped, untyped] | Deimos::SchemaClass::Record) _payload, Object key) -> ActiveRecord::Base
1797
+
1798
+ # Assign a key to a new record.
1799
+ #
1800
+ # _@param_ `record`
1801
+ #
1802
+ # _@param_ `_payload`
1803
+ #
1804
+ # _@param_ `key`
1805
+ def assign_key: (ActiveRecord::Base record, (::Hash[untyped, untyped] | Deimos::SchemaClass::Record) _payload, Object key) -> void
1806
+
1807
+ # _@param_ `payload` — Decoded payloads
1808
+ #
1809
+ # _@param_ `metadata` — Information about batch, including keys.
1810
+ def consume: ((::Hash[untyped, untyped] | Deimos::SchemaClass::Record) payload, ::Hash[untyped, untyped] metadata) -> void
1811
+
1812
+ # _@param_ `record`
1813
+ def save_record: (ActiveRecord::Base record) -> void
1814
+
1815
+ # Destroy a record that received a null payload. Override if you need
1816
+ # to do something other than a straight destroy (e.g. mark as archived).
1817
+ #
1818
+ # _@param_ `record`
1819
+ def destroy_record: (ActiveRecord::Base record) -> void
1820
+ end
1821
+
1822
+ # Convert a message with a schema to an ActiveRecord model
1823
+ class SchemaModelConverter
1824
+ # Create new converter
1825
+ #
1826
+ # _@param_ `decoder` — Incoming message schema.
1827
+ #
1828
+ # _@param_ `klass` — Model to map to.
1829
+ def initialize: (SchemaBackends::Base decoder, ActiveRecord::Base klass) -> void
1830
+
1831
+ # Convert a message from a decoded hash to a set of ActiveRecord
1832
+ # attributes. Attributes that don't exist in the model will be ignored.
1833
+ #
1834
+ # _@param_ `payload` — Decoded message payload.
1835
+ #
1836
+ # _@return_ — Model attributes.
1837
+ def convert: (::Hash[untyped, untyped] payload) -> ::Hash[untyped, untyped]
1838
+ end
1839
+ end
1840
+
1841
+ # Class to coerce values in a payload to match a schema.
1842
+ class AvroSchemaCoercer
1843
+ # _@param_ `schema`
1844
+ def initialize: (Avro::Schema schema) -> void
1845
+
1846
+ # Coerce sub-records in a payload to match the schema.
1847
+ #
1848
+ # _@param_ `type`
1849
+ #
1850
+ # _@param_ `val`
1851
+ def coerce_union: (Avro::Schema::UnionSchema _type, Object val) -> Object
1852
+
1853
+ # Coerce sub-records in a payload to match the schema.
1854
+ #
1855
+ # _@param_ `type`
1856
+ #
1857
+ # _@param_ `val`
1858
+ def coerce_record: (Avro::Schema::RecordSchema _type, Object val) -> Object
1859
+
1860
+ # Coerce values in a payload to match the schema.
1861
+ #
1862
+ # _@param_ `type`
1863
+ #
1864
+ # _@param_ `val`
1865
+ def coerce_type: (Avro::Schema _type, Object val) -> Object
1866
+ end
1867
+ end