pigeon-rb 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/README.md +343 -0
- data/lib/pigeon/active_job_integration.rb +32 -0
- data/lib/pigeon/api.rb +200 -0
- data/lib/pigeon/configuration.rb +161 -0
- data/lib/pigeon/core.rb +104 -0
- data/lib/pigeon/encryption.rb +213 -0
- data/lib/pigeon/generators/hanami/migration_generator.rb +89 -0
- data/lib/pigeon/generators/rails/install_generator.rb +32 -0
- data/lib/pigeon/generators/rails/migration_generator.rb +20 -0
- data/lib/pigeon/generators/rails/templates/create_outbox_messages.rb.erb +34 -0
- data/lib/pigeon/generators/rails/templates/initializer.rb.erb +88 -0
- data/lib/pigeon/hanami_integration.rb +78 -0
- data/lib/pigeon/health_check/kafka.rb +37 -0
- data/lib/pigeon/health_check/processor.rb +70 -0
- data/lib/pigeon/health_check/queue.rb +69 -0
- data/lib/pigeon/health_check.rb +63 -0
- data/lib/pigeon/logging/structured_logger.rb +181 -0
- data/lib/pigeon/metrics/collector.rb +200 -0
- data/lib/pigeon/mock_producer.rb +18 -0
- data/lib/pigeon/models/adapters/active_record_adapter.rb +133 -0
- data/lib/pigeon/models/adapters/rom_adapter.rb +150 -0
- data/lib/pigeon/models/outbox_message.rb +182 -0
- data/lib/pigeon/monitoring.rb +113 -0
- data/lib/pigeon/outbox.rb +61 -0
- data/lib/pigeon/processor/background_processor.rb +109 -0
- data/lib/pigeon/processor.rb +798 -0
- data/lib/pigeon/publisher.rb +524 -0
- data/lib/pigeon/railtie.rb +29 -0
- data/lib/pigeon/schema.rb +35 -0
- data/lib/pigeon/security.rb +30 -0
- data/lib/pigeon/serializer.rb +77 -0
- data/lib/pigeon/tasks/pigeon.rake +64 -0
- data/lib/pigeon/trace_api.rb +37 -0
- data/lib/pigeon/tracing/core.rb +119 -0
- data/lib/pigeon/tracing/messaging.rb +144 -0
- data/lib/pigeon/tracing.rb +107 -0
- data/lib/pigeon/version.rb +5 -0
- data/lib/pigeon.rb +52 -0
- metadata +127 -0
@@ -0,0 +1,524 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "securerandom"
|
4
|
+
require "json"
|
5
|
+
require_relative "metrics/collector"
|
6
|
+
|
7
|
+
module Pigeon
|
8
|
+
# Publisher class for storing messages in the outbox
|
9
|
+
class Publisher
|
10
|
+
# Mock outbox message class for testing
|
11
|
+
class MockOutboxMessage < Pigeon::Models::OutboxMessage
|
12
|
+
def save?
|
13
|
+
true
|
14
|
+
end
|
15
|
+
|
16
|
+
def self.create(attributes = {})
|
17
|
+
new(attributes)
|
18
|
+
end
|
19
|
+
end
|
20
|
+
|
21
|
+
# Initialize a new publisher
|
22
|
+
def initialize
|
23
|
+
@metrics = Pigeon.config.metrics_collector || Pigeon::Metrics::Collector.new
|
24
|
+
end
|
25
|
+
|
26
|
+
# Publish a message to Kafka via the outbox pattern
|
27
|
+
# @param topic [String] Kafka topic
|
28
|
+
# @param payload [Hash, String] Message payload
|
29
|
+
# @param key [String, nil] Optional message key
|
30
|
+
# @param headers [Hash, nil] Optional message headers
|
31
|
+
# @param sync [Boolean] Whether to attempt immediate publishing
|
32
|
+
# @param partition [Integer, nil] Optional specific partition
|
33
|
+
# @param correlation_id [String, nil] Optional correlation ID for tracing
|
34
|
+
# @param max_retries [Integer, nil] Optional override for max retries
|
35
|
+
# @param schema_name [String, Symbol, nil] Optional schema name for validation
|
36
|
+
# @param schema [Hash, String, nil] Optional schema for validation
|
37
|
+
# @param encrypt [Boolean] Whether to encrypt the payload
|
38
|
+
# @param encryption_key [String, nil] Optional encryption key
|
39
|
+
# @param sensitive_fields [Array<String, Symbol>, nil] Optional sensitive fields to mask in logs
|
40
|
+
# @return [Pigeon::Models::OutboxMessage] The created outbox message record
|
41
|
+
def publish(topic:, payload:, **options)
|
42
|
+
Pigeon::Tracing.trace_publish(
|
43
|
+
topic: topic,
|
44
|
+
payload: payload,
|
45
|
+
key: options[:key],
|
46
|
+
headers: options[:headers],
|
47
|
+
correlation_id: options[:correlation_id]
|
48
|
+
) do |span, _context, trace_headers|
|
49
|
+
start_time = Time.now
|
50
|
+
correlation_id = prepare_publish_context(topic, payload, options, span)
|
51
|
+
serialized_payload = prepare_payload(payload, options)
|
52
|
+
merged_headers = merge_trace_headers(options[:headers], trace_headers)
|
53
|
+
|
54
|
+
message_attributes = build_message_attributes(
|
55
|
+
topic: topic,
|
56
|
+
key: options[:key],
|
57
|
+
headers: merged_headers,
|
58
|
+
partition: options[:partition],
|
59
|
+
payload: serialized_payload,
|
60
|
+
correlation_id: correlation_id,
|
61
|
+
max_retries: options[:max_retries]
|
62
|
+
)
|
63
|
+
|
64
|
+
outbox_message = create_outbox_message_in_transaction(message_attributes)
|
65
|
+
handle_publish_result(outbox_message, serialized_payload, topic, start_time, span, options)
|
66
|
+
|
67
|
+
outbox_message
|
68
|
+
end
|
69
|
+
end
|
70
|
+
|
71
|
+
# Publish a message directly to Kafka without using the outbox pattern
|
72
|
+
# This is useful for non-critical messages where at-least-once delivery is not required
|
73
|
+
# @param topic [String] Kafka topic
|
74
|
+
# @param payload [Hash, String] Message payload
|
75
|
+
# @param key [String, nil] Optional message key
|
76
|
+
# @param headers [Hash, nil] Optional message headers
|
77
|
+
# @param partition [Integer, nil] Optional specific partition
|
78
|
+
# @param schema_name [String, Symbol, nil] Optional schema name for validation
|
79
|
+
# @param schema [Hash, String, nil] Optional schema for validation
|
80
|
+
# @param encrypt [Boolean] Whether to encrypt the payload
|
81
|
+
# @param encryption_key [String, nil] Optional encryption key
|
82
|
+
# @param sensitive_fields [Array<String, Symbol>, nil] Optional sensitive fields to mask in logs
|
83
|
+
# @param correlation_id [String, nil] Optional correlation ID for tracing
|
84
|
+
# @return [Boolean] Whether the publish was successful
|
85
|
+
def publish_direct(topic:, payload:, **options)
|
86
|
+
Pigeon::Tracing.trace_publish(
|
87
|
+
topic: topic,
|
88
|
+
payload: payload,
|
89
|
+
key: options[:key],
|
90
|
+
headers: options[:headers],
|
91
|
+
correlation_id: options[:correlation_id]
|
92
|
+
) do |span, _context, trace_headers|
|
93
|
+
execute_direct_publish(topic, payload, options, span, trace_headers)
|
94
|
+
end
|
95
|
+
rescue Pigeon::Serializer::ValidationError, ArgumentError => e
|
96
|
+
handle_validation_error(e, topic)
|
97
|
+
rescue StandardError => e
|
98
|
+
handle_direct_publish_error(e, topic)
|
99
|
+
end
|
100
|
+
|
101
|
+
private
|
102
|
+
|
103
|
+
# Prepare publish context
|
104
|
+
# @param topic [String] Kafka topic
|
105
|
+
# @param payload [Hash, String] Message payload
|
106
|
+
# @param options [Hash] Publishing options
|
107
|
+
# @param span [OpenTelemetry::Trace::Span] Tracing span
|
108
|
+
# @return [String] Correlation ID
|
109
|
+
def prepare_publish_context(topic, payload, options, span)
|
110
|
+
validate_publish_params(topic, payload)
|
111
|
+
correlation_id = options[:correlation_id] || SecureRandom.uuid
|
112
|
+
masked_payload = mask_sensitive_fields(payload, options[:sensitive_fields])
|
113
|
+
|
114
|
+
span&.add_attributes("messaging.correlation_id" => correlation_id)
|
115
|
+
log_publish_attempt(topic, masked_payload, correlation_id)
|
116
|
+
validate_payload?(payload, options[:schema_name], options[:schema])
|
117
|
+
|
118
|
+
correlation_id
|
119
|
+
end
|
120
|
+
|
121
|
+
# Prepare payload for publishing
|
122
|
+
# @param payload [Hash, String] Message payload
|
123
|
+
# @param options [Hash] Publishing options
|
124
|
+
# @return [String] Prepared payload
|
125
|
+
def prepare_payload(payload, options)
|
126
|
+
should_encrypt = determine_encryption_setting(options[:encrypt])
|
127
|
+
serialized_payload = serialize_payload(payload)
|
128
|
+
serialized_payload = encrypt_payload(serialized_payload, options[:encryption_key]) if should_encrypt
|
129
|
+
serialized_payload
|
130
|
+
end
|
131
|
+
|
132
|
+
# Merge trace headers with provided headers
|
133
|
+
# @param headers [Hash, nil] Provided headers
|
134
|
+
# @param trace_headers [Hash, nil] Trace headers
|
135
|
+
# @return [Hash] Merged headers
|
136
|
+
def merge_trace_headers(headers, trace_headers)
|
137
|
+
merged_headers = headers ? headers.dup : {}
|
138
|
+
merged_headers.merge!(trace_headers) if trace_headers
|
139
|
+
merged_headers
|
140
|
+
end
|
141
|
+
|
142
|
+
# Handle publish result
|
143
|
+
# @param outbox_message [Pigeon::Models::OutboxMessage, nil] Created message
|
144
|
+
# @param serialized_payload [String] Serialized payload
|
145
|
+
# @param topic [String] Kafka topic
|
146
|
+
# @param start_time [Time] Start time
|
147
|
+
# @param span [OpenTelemetry::Trace::Span] Tracing span
|
148
|
+
# @param options [Hash] Publishing options
|
149
|
+
# @return [void]
|
150
|
+
def handle_publish_result(outbox_message, serialized_payload, topic, start_time, span, options)
|
151
|
+
if outbox_message
|
152
|
+
record_successful_publish_metrics(serialized_payload, topic, start_time)
|
153
|
+
span&.add_attributes("messaging.message_id" => outbox_message.id.to_s)
|
154
|
+
handle_sync_publishing(outbox_message, options[:sync], topic, options[:correlation_id])
|
155
|
+
else
|
156
|
+
record_failed_publish_metrics(topic, span)
|
157
|
+
end
|
158
|
+
end
|
159
|
+
|
160
|
+
# Record successful publish metrics
|
161
|
+
# @param serialized_payload [String] Serialized payload
|
162
|
+
# @param topic [String] Kafka topic
|
163
|
+
# @param start_time [Time] Start time
|
164
|
+
# @return [void]
|
165
|
+
def record_successful_publish_metrics(serialized_payload, topic, start_time)
|
166
|
+
@metrics.increment(:messages_published_to_outbox_total, 1, { topic: topic })
|
167
|
+
|
168
|
+
if serialized_payload.respond_to?(:bytesize)
|
169
|
+
payload_size = serialized_payload.bytesize
|
170
|
+
@metrics.histogram(:message_size_bytes, payload_size, { topic: topic })
|
171
|
+
end
|
172
|
+
|
173
|
+
publish_time = Time.now - start_time
|
174
|
+
@metrics.histogram(:message_publish_duration_seconds, publish_time, { topic: topic })
|
175
|
+
end
|
176
|
+
|
177
|
+
# Record failed publish metrics
|
178
|
+
# @param topic [String] Kafka topic
|
179
|
+
# @param span [OpenTelemetry::Trace::Span] Tracing span
|
180
|
+
# @return [void]
|
181
|
+
def record_failed_publish_metrics(topic, span)
|
182
|
+
@metrics.increment(:messages_publish_failures_total, 1, { topic: topic })
|
183
|
+
span&.record_exception(StandardError.new("Failed to create outbox message"))
|
184
|
+
span&.status = OpenTelemetry::Trace::Status.error("Failed to create outbox message") if span
|
185
|
+
end
|
186
|
+
|
187
|
+
# Execute direct publish
|
188
|
+
# @param topic [String] Kafka topic
|
189
|
+
# @param payload [Hash, String] Message payload
|
190
|
+
# @param options [Hash] Publishing options
|
191
|
+
# @param span [OpenTelemetry::Trace::Span] Tracing span
|
192
|
+
# @param trace_headers [Hash] Trace headers
|
193
|
+
# @return [Boolean] Success status
|
194
|
+
def execute_direct_publish(topic, payload, options, span, trace_headers)
|
195
|
+
start_time = Time.now
|
196
|
+
prepare_publish_context(topic, payload, options, span)
|
197
|
+
serialized_payload = prepare_payload(payload, options)
|
198
|
+
merged_headers = merge_trace_headers(options[:headers], trace_headers)
|
199
|
+
|
200
|
+
message_options = build_direct_message_options(
|
201
|
+
topic: topic,
|
202
|
+
key: options[:key],
|
203
|
+
headers: merged_headers,
|
204
|
+
partition: options[:partition]
|
205
|
+
)
|
206
|
+
|
207
|
+
record_direct_message_size(serialized_payload, topic)
|
208
|
+
Pigeon.karafka_producer.produce_sync(serialized_payload, **message_options)
|
209
|
+
record_direct_publish_success(topic, start_time)
|
210
|
+
|
211
|
+
true
|
212
|
+
end
|
213
|
+
|
214
|
+
# Record direct message size
|
215
|
+
# @param serialized_payload [String] Serialized payload
|
216
|
+
# @param topic [String] Kafka topic
|
217
|
+
# @return [void]
|
218
|
+
def record_direct_message_size(serialized_payload, topic)
|
219
|
+
return unless serialized_payload.respond_to?(:bytesize)
|
220
|
+
|
221
|
+
payload_size = serialized_payload.bytesize
|
222
|
+
@metrics.histogram(:direct_message_size_bytes, payload_size, { topic: topic })
|
223
|
+
end
|
224
|
+
|
225
|
+
# Record direct publish success
|
226
|
+
# @param topic [String] Kafka topic
|
227
|
+
# @param start_time [Time] Start time
|
228
|
+
# @return [void]
|
229
|
+
def record_direct_publish_success(topic, start_time)
|
230
|
+
@metrics.increment(:messages_published_direct_total, 1, { topic: topic })
|
231
|
+
publish_time = Time.now - start_time
|
232
|
+
@metrics.histogram(:message_publish_direct_duration_seconds, publish_time, { topic: topic })
|
233
|
+
end
|
234
|
+
|
235
|
+
# Handle validation error
|
236
|
+
# @param error [StandardError] Validation error
|
237
|
+
# @param topic [String] Kafka topic
|
238
|
+
# @return [void]
|
239
|
+
def handle_validation_error(error, topic)
|
240
|
+
@metrics.increment(:messages_validation_errors_total, 1, {
|
241
|
+
topic: topic,
|
242
|
+
error: error.class.name
|
243
|
+
})
|
244
|
+
raise error
|
245
|
+
end
|
246
|
+
|
247
|
+
# Handle direct publish error
|
248
|
+
# @param error [StandardError] Publish error
|
249
|
+
# @param topic [String] Kafka topic
|
250
|
+
# @return [Boolean] Always false
|
251
|
+
def handle_direct_publish_error(error, topic)
|
252
|
+
@metrics.increment(:messages_published_direct_error_total, 1, {
|
253
|
+
topic: topic,
|
254
|
+
error: error.class.name
|
255
|
+
})
|
256
|
+
|
257
|
+
Pigeon.config.logger.error("Failed to publish message directly: #{error.message}")
|
258
|
+
false
|
259
|
+
end
|
260
|
+
|
261
|
+
def validate_publish_params(topic, payload)
|
262
|
+
raise ArgumentError, "Topic is required" if topic.nil? || topic.empty?
|
263
|
+
raise ArgumentError, "Payload is required" if payload.nil?
|
264
|
+
end
|
265
|
+
|
266
|
+
def determine_encryption_setting(encrypt)
|
267
|
+
encrypt.nil? ? Pigeon.config.encrypt_payload : encrypt
|
268
|
+
end
|
269
|
+
|
270
|
+
def build_message_attributes(attributes)
|
271
|
+
{
|
272
|
+
topic: attributes[:topic],
|
273
|
+
key: attributes[:key],
|
274
|
+
headers: attributes[:headers] || {},
|
275
|
+
partition: attributes[:partition],
|
276
|
+
payload: attributes[:payload],
|
277
|
+
status: "pending",
|
278
|
+
correlation_id: attributes[:correlation_id],
|
279
|
+
max_retries: attributes[:max_retries] || Pigeon.config.max_retries
|
280
|
+
}
|
281
|
+
end
|
282
|
+
|
283
|
+
def build_direct_message_options(topic:, key:, headers:, partition:)
|
284
|
+
options = { topic: topic }
|
285
|
+
options[:key] = key if key
|
286
|
+
options[:headers] = headers if headers && !headers.empty?
|
287
|
+
options[:partition] = partition if partition
|
288
|
+
options
|
289
|
+
end
|
290
|
+
|
291
|
+
def handle_sync_publishing(outbox_message, sync, topic, correlation_id)
|
292
|
+
return unless sync
|
293
|
+
|
294
|
+
begin
|
295
|
+
start_time = Time.now
|
296
|
+
success = publish_to_kafka(outbox_message)
|
297
|
+
|
298
|
+
if success
|
299
|
+
outbox_message.mark_as_published
|
300
|
+
log_sync_success(topic, correlation_id)
|
301
|
+
|
302
|
+
# Record successful sync publication
|
303
|
+
@metrics.increment(:messages_published_sync_succeeded_total, 1, { topic: topic })
|
304
|
+
|
305
|
+
# Record sync publication latency
|
306
|
+
publish_time = Time.now - start_time
|
307
|
+
@metrics.histogram(:message_publish_sync_duration_seconds, publish_time, { topic: topic })
|
308
|
+
else
|
309
|
+
log_sync_failure(topic, correlation_id)
|
310
|
+
|
311
|
+
# Record failed sync publication
|
312
|
+
@metrics.increment(:messages_published_sync_failed_total, 1, { topic: topic })
|
313
|
+
end
|
314
|
+
rescue StandardError => e
|
315
|
+
log_sync_error(e, correlation_id)
|
316
|
+
|
317
|
+
# Record failed sync publication due to error
|
318
|
+
@metrics.increment(:messages_published_sync_error_total, 1, {
|
319
|
+
topic: topic,
|
320
|
+
error: e.class.name
|
321
|
+
})
|
322
|
+
|
323
|
+
# Message remains in the outbox for async processing
|
324
|
+
end
|
325
|
+
end
|
326
|
+
|
327
|
+
def log_sync_success(topic, correlation_id)
|
328
|
+
logger = Pigeon.config.logger
|
329
|
+
|
330
|
+
if logger.respond_to?(:with_context)
|
331
|
+
# Create a logger with correlation ID context
|
332
|
+
logger = logger.with_context(
|
333
|
+
correlation_id: correlation_id,
|
334
|
+
topic: topic
|
335
|
+
)
|
336
|
+
|
337
|
+
logger.info("Message published synchronously", {
|
338
|
+
action: "publish_sync",
|
339
|
+
status: "success"
|
340
|
+
})
|
341
|
+
else
|
342
|
+
# Fallback for standard loggers
|
343
|
+
logger.info("Message published synchronously to topic: #{topic} (correlation_id: #{correlation_id})")
|
344
|
+
end
|
345
|
+
end
|
346
|
+
|
347
|
+
def log_sync_failure(topic, correlation_id)
|
348
|
+
logger = Pigeon.config.logger
|
349
|
+
|
350
|
+
if logger.respond_to?(:with_context)
|
351
|
+
# Create a logger with correlation ID context
|
352
|
+
logger = logger.with_context(
|
353
|
+
correlation_id: correlation_id,
|
354
|
+
topic: topic
|
355
|
+
)
|
356
|
+
|
357
|
+
logger.error("Failed to publish message synchronously", {
|
358
|
+
action: "publish_sync",
|
359
|
+
status: "failure"
|
360
|
+
})
|
361
|
+
else
|
362
|
+
# Fallback for standard loggers
|
363
|
+
logger.error("Failed to publish message synchronously to topic: #{topic} (correlation_id: #{correlation_id})")
|
364
|
+
end
|
365
|
+
end
|
366
|
+
|
367
|
+
def log_sync_error(error, correlation_id)
|
368
|
+
logger = Pigeon.config.logger
|
369
|
+
|
370
|
+
if logger.respond_to?(:with_context)
|
371
|
+
# Create a logger with correlation ID context
|
372
|
+
logger = logger.with_context(
|
373
|
+
correlation_id: correlation_id
|
374
|
+
)
|
375
|
+
|
376
|
+
logger.error("Failed to publish message synchronously",
|
377
|
+
{
|
378
|
+
action: "publish_sync",
|
379
|
+
status: "error",
|
380
|
+
error_class: error.class.name,
|
381
|
+
error_message: error.message
|
382
|
+
},
|
383
|
+
error)
|
384
|
+
else
|
385
|
+
# Fallback for standard loggers
|
386
|
+
logger.error("Failed to publish message synchronously: #{error.message} (correlation_id: #{correlation_id})")
|
387
|
+
end
|
388
|
+
end
|
389
|
+
|
390
|
+
# Create outbox message in a transaction
|
391
|
+
# @param attributes [Hash] Message attributes
|
392
|
+
# @return [Pigeon::Models::OutboxMessage, nil] The created outbox message record
|
393
|
+
def create_outbox_message_in_transaction(attributes)
|
394
|
+
if defined?(ActiveRecord) && ActiveRecord::Base.respond_to?(:transaction)
|
395
|
+
# Use ActiveRecord transaction
|
396
|
+
ActiveRecord::Base.transaction do
|
397
|
+
Pigeon.create_outbox_message(attributes)
|
398
|
+
end
|
399
|
+
elsif defined?(ROM) && ROM.env.respond_to?(:transaction)
|
400
|
+
# Use ROM transaction
|
401
|
+
ROM.env.transaction do
|
402
|
+
Pigeon.create_outbox_message(attributes)
|
403
|
+
end
|
404
|
+
elsif defined?(RSpec)
|
405
|
+
# No transaction support or in test environment, use mock implementation
|
406
|
+
# In test environment, use the mock implementation
|
407
|
+
MockOutboxMessage.create(attributes)
|
408
|
+
else
|
409
|
+
# No transaction support, just create the message
|
410
|
+
Pigeon.create_outbox_message(attributes)
|
411
|
+
end
|
412
|
+
rescue StandardError => e
|
413
|
+
Pigeon.config.logger.error("Failed to create outbox message: #{e.message}")
|
414
|
+
nil
|
415
|
+
end
|
416
|
+
|
417
|
+
# Publish a message to Kafka
|
418
|
+
# @param message [Pigeon::Models::OutboxMessage] Outbox message to publish
|
419
|
+
# @return [Boolean] Whether the publish was successful
|
420
|
+
def publish_to_kafka(message)
|
421
|
+
# Prepare message options
|
422
|
+
message_options = { topic: message.topic }
|
423
|
+
message_options[:key] = message.key if message.key
|
424
|
+
message_options[:headers] = message.headers if message.headers && !message.headers.empty?
|
425
|
+
message_options[:partition] = message.partition if message.partition
|
426
|
+
|
427
|
+
# Use Karafka producer to send the message
|
428
|
+
Pigeon.karafka_producer.produce_sync(message.payload, **message_options)
|
429
|
+
true
|
430
|
+
rescue StandardError => e
|
431
|
+
if message.respond_to?(:error_message=)
|
432
|
+
message.error_message = "#{e.class}: #{e.message}"
|
433
|
+
message.save if message.respond_to?(:save)
|
434
|
+
end
|
435
|
+
raise e # Re-raise the error to be caught by the caller
|
436
|
+
end
|
437
|
+
|
438
|
+
# Validate payload against schema
|
439
|
+
# @param payload [Hash, String] Payload to validate
|
440
|
+
# @param schema_name [String, Symbol, nil] Schema name for validation
|
441
|
+
# @param schema [Hash, String, nil] Schema for validation
|
442
|
+
# @return [Boolean] Whether the payload is valid
|
443
|
+
# @raise [Pigeon::Serializer::ValidationError] If the payload is invalid
|
444
|
+
def validate_payload?(payload, schema_name, schema)
|
445
|
+
return true unless Pigeon.config.schema_validation_enabled
|
446
|
+
return true if schema_name.nil? && schema.nil?
|
447
|
+
|
448
|
+
if schema_name
|
449
|
+
Pigeon::Serializer.validate_with_registered_schema(payload, schema_name)
|
450
|
+
elsif schema
|
451
|
+
Pigeon::Serializer.validate(payload, schema)
|
452
|
+
end
|
453
|
+
|
454
|
+
true
|
455
|
+
end
|
456
|
+
|
457
|
+
# Serialize payload for publishing
|
458
|
+
# @param payload [Hash, String] Payload to serialize
|
459
|
+
# @return [String] Serialized payload
|
460
|
+
def serialize_payload(payload)
|
461
|
+
Pigeon::Serializer.serialize(payload)
|
462
|
+
end
|
463
|
+
|
464
|
+
# Encrypt payload for publishing
|
465
|
+
# @param payload [String] Serialized payload
|
466
|
+
# @param encryption_key [String, nil] Optional encryption key
|
467
|
+
# @return [Hash, String] Encrypted payload or original payload if encryption is disabled
|
468
|
+
def encrypt_payload(payload, encryption_key = nil)
|
469
|
+
Pigeon::Encryption.encrypt(payload, encryption_key)
|
470
|
+
end
|
471
|
+
|
472
|
+
# Mask sensitive fields in the payload for logging
|
473
|
+
# @param payload [Hash, String] Payload to mask
|
474
|
+
# @param sensitive_fields [Array<String, Symbol>, nil] Optional sensitive fields to mask
|
475
|
+
# @return [Hash, String] Masked payload
|
476
|
+
def mask_sensitive_fields(payload, sensitive_fields = nil)
|
477
|
+
# Use provided sensitive fields or get them from the configuration
|
478
|
+
fields_to_mask = sensitive_fields || Pigeon.config.sensitive_fields
|
479
|
+
return payload if fields_to_mask.nil? || fields_to_mask.empty?
|
480
|
+
|
481
|
+
Pigeon::Encryption.mask_payload(payload, fields_to_mask)
|
482
|
+
end
|
483
|
+
|
484
|
+
# Log the publish attempt with masked payload
|
485
|
+
# @param topic [String] Kafka topic
|
486
|
+
# @param payload [Hash, String] Masked payload
|
487
|
+
# @param correlation_id [String] Correlation ID
|
488
|
+
# @return [void]
|
489
|
+
def log_publish_attempt(topic, payload, correlation_id)
|
490
|
+
logger = Pigeon.config.logger
|
491
|
+
|
492
|
+
# Create a logger with correlation ID context if supported
|
493
|
+
if logger.respond_to?(:with_context)
|
494
|
+
logger = logger.with_context(
|
495
|
+
correlation_id: correlation_id,
|
496
|
+
topic: topic
|
497
|
+
)
|
498
|
+
|
499
|
+
# Log the basic information with context
|
500
|
+
logger.info("Publishing message", {
|
501
|
+
action: "publish",
|
502
|
+
sync: false
|
503
|
+
})
|
504
|
+
|
505
|
+
# Log the payload if debug level is enabled
|
506
|
+
if logger.debug?
|
507
|
+
payload_str = payload.is_a?(String) ? payload : payload.to_json
|
508
|
+
logger.debug("Message payload", {
|
509
|
+
payload: payload_str
|
510
|
+
})
|
511
|
+
end
|
512
|
+
else
|
513
|
+
# Fallback for standard loggers
|
514
|
+
logger.info("Publishing message to topic: #{topic} (correlation_id: #{correlation_id})")
|
515
|
+
|
516
|
+
# Log the payload if debug level is enabled
|
517
|
+
if logger.respond_to?(:debug?) && logger.debug?
|
518
|
+
payload_str = payload.is_a?(String) ? payload : payload.to_json
|
519
|
+
logger.debug("Message payload: #{payload_str}")
|
520
|
+
end
|
521
|
+
end
|
522
|
+
end
|
523
|
+
end
|
524
|
+
end
|
@@ -0,0 +1,29 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "rails"
|
4
|
+
require "pigeon"
|
5
|
+
|
6
|
+
module Pigeon
|
7
|
+
# Rails integration for Pigeon
|
8
|
+
class Railtie < Rails::Railtie
|
9
|
+
initializer "pigeon.configure" do |app|
|
10
|
+
# Set up default logger to use Rails logger
|
11
|
+
Pigeon.configure do |config|
|
12
|
+
config.logger = Rails.logger
|
13
|
+
end
|
14
|
+
|
15
|
+
app.config.paths["lib/generators"] ||= []
|
16
|
+
app.config.paths["lib/generators"] << File.expand_path("generators/rails", __dir__)
|
17
|
+
end
|
18
|
+
|
19
|
+
# Add rake tasks
|
20
|
+
rake_tasks do
|
21
|
+
load "pigeon/tasks/pigeon.rake"
|
22
|
+
end
|
23
|
+
|
24
|
+
# Add ActiveJob integration
|
25
|
+
config.after_initialize do
|
26
|
+
require "pigeon/active_job_integration" if defined?(ActiveJob)
|
27
|
+
end
|
28
|
+
end
|
29
|
+
end
|
@@ -0,0 +1,35 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Pigeon
|
4
|
+
# Schema-related functionality for Pigeon
|
5
|
+
module Schema
|
6
|
+
# Register a JSON schema for validation
|
7
|
+
# @param name [String, Symbol] Schema name
|
8
|
+
# @param schema [Hash, String] JSON schema
|
9
|
+
# @return [void]
|
10
|
+
def self.register_schema(name, schema)
|
11
|
+
Configuration.register_schema(name, schema)
|
12
|
+
end
|
13
|
+
|
14
|
+
# Get a registered schema
|
15
|
+
# @param name [String, Symbol] Schema name
|
16
|
+
# @return [Hash, String, nil] JSON schema or nil if not found
|
17
|
+
def self.schema(name)
|
18
|
+
Configuration.schema(name)
|
19
|
+
end
|
20
|
+
|
21
|
+
# Register a sensitive field for masking
|
22
|
+
# @param field [String, Symbol] Field name
|
23
|
+
# @return [void]
|
24
|
+
def self.register_sensitive_field(field)
|
25
|
+
Configuration.register_sensitive_field(field)
|
26
|
+
end
|
27
|
+
|
28
|
+
# Register multiple sensitive fields for masking
|
29
|
+
# @param fields [Array<String, Symbol>] Field names
|
30
|
+
# @return [void]
|
31
|
+
def self.register_sensitive_fields(fields)
|
32
|
+
Configuration.register_sensitive_fields(fields)
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
@@ -0,0 +1,30 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Pigeon
|
4
|
+
# Security-related functionality for Pigeon
|
5
|
+
module Security
|
6
|
+
# Encrypt a payload
|
7
|
+
# @param payload [String] Payload to encrypt
|
8
|
+
# @param encryption_key [String, nil] Optional encryption key
|
9
|
+
# @return [Hash] Encrypted payload with metadata
|
10
|
+
def self.encrypt(payload, encryption_key = nil)
|
11
|
+
Encryption.encrypt(payload, encryption_key)
|
12
|
+
end
|
13
|
+
|
14
|
+
# Decrypt a payload
|
15
|
+
# @param encrypted_payload [Hash] Encrypted payload with metadata
|
16
|
+
# @param encryption_key [String, nil] Optional encryption key
|
17
|
+
# @return [String] Decrypted payload
|
18
|
+
def self.decrypt(encrypted_payload, encryption_key = nil)
|
19
|
+
Encryption.decrypt(encrypted_payload, encryption_key)
|
20
|
+
end
|
21
|
+
|
22
|
+
# Mask sensitive data in a payload
|
23
|
+
# @param payload [Hash, String] Payload to mask
|
24
|
+
# @param sensitive_fields [Array<String, Symbol>] Fields to mask
|
25
|
+
# @return [Hash, String] Masked payload
|
26
|
+
def self.mask_payload(payload, sensitive_fields = nil)
|
27
|
+
Encryption.mask_payload(payload, sensitive_fields || Core.config.sensitive_fields)
|
28
|
+
end
|
29
|
+
end
|
30
|
+
end
|
@@ -0,0 +1,77 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "json"
|
4
|
+
require "json-schema" if Gem::Specification.find_all_by_name("json-schema").any?
|
5
|
+
|
6
|
+
module Pigeon
|
7
|
+
# Serializer module for handling payload serialization and validation
|
8
|
+
module Serializer
|
9
|
+
# Serialize a payload to JSON
|
10
|
+
# @param payload [Hash, Array, String] Payload to serialize
|
11
|
+
# @return [String] Serialized JSON string
|
12
|
+
def self.serialize(payload)
|
13
|
+
return payload if payload.is_a?(String)
|
14
|
+
|
15
|
+
::JSON.generate(payload)
|
16
|
+
rescue StandardError => e
|
17
|
+
raise SerializationError, "Failed to serialize payload: #{e.message}"
|
18
|
+
end
|
19
|
+
|
20
|
+
# Deserialize a JSON string to a Ruby object
|
21
|
+
# @param json_string [String] JSON string to deserialize
|
22
|
+
# @param symbolize_names [Boolean] Whether to symbolize hash keys
|
23
|
+
# @return [Hash, Array] Deserialized Ruby object
|
24
|
+
def self.deserialize(json_string, symbolize_names: false)
|
25
|
+
return json_string unless json_string.is_a?(String)
|
26
|
+
return {} if json_string.empty?
|
27
|
+
|
28
|
+
::JSON.parse(json_string, symbolize_names: symbolize_names)
|
29
|
+
rescue StandardError => e
|
30
|
+
raise DeserializationError, "Failed to deserialize JSON: #{e.message}"
|
31
|
+
end
|
32
|
+
|
33
|
+
# Validate a payload against a JSON schema
|
34
|
+
# @param payload [Hash, Array, String] Payload to validate
|
35
|
+
# @param schema [Hash, String] JSON schema to validate against
|
36
|
+
# @return [Boolean] Whether the payload is valid
|
37
|
+
# @raise [ValidationError] If the payload is invalid
|
38
|
+
def self.validate(payload, schema)
|
39
|
+
# Skip validation if json-schema gem is not available
|
40
|
+
unless defined?(::JSON::Validator)
|
41
|
+
Pigeon.config.logger.warn("json-schema gem not available, skipping validation")
|
42
|
+
return true
|
43
|
+
end
|
44
|
+
|
45
|
+
# Parse the payload if it's a string
|
46
|
+
payload_obj = payload.is_a?(String) ? deserialize(payload) : payload
|
47
|
+
|
48
|
+
# Parse the schema if it's a string
|
49
|
+
schema_obj = schema.is_a?(String) ? deserialize(schema) : schema
|
50
|
+
|
51
|
+
# Validate the payload against the schema
|
52
|
+
::JSON::Validator.validate!(schema_obj, payload_obj)
|
53
|
+
true
|
54
|
+
rescue ::JSON::Schema::ValidationError => e
|
55
|
+
raise ValidationError, "Payload validation failed: #{e.message}"
|
56
|
+
rescue StandardError => e
|
57
|
+
raise ValidationError, "Schema validation error: #{e.message}"
|
58
|
+
end
|
59
|
+
|
60
|
+
# Validate a payload against a registered schema
|
61
|
+
# @param payload [Hash, Array, String] Payload to validate
|
62
|
+
# @param schema_name [String, Symbol] Name of the registered schema
|
63
|
+
# @return [Boolean] Whether the payload is valid
|
64
|
+
# @raise [ValidationError] If the payload is invalid or schema not found
|
65
|
+
def self.validate_with_registered_schema(payload, schema_name)
|
66
|
+
schema = Pigeon.config.schemas[schema_name.to_sym]
|
67
|
+
raise ValidationError, "Schema '#{schema_name}' not found" unless schema
|
68
|
+
|
69
|
+
validate(payload, schema)
|
70
|
+
end
|
71
|
+
|
72
|
+
# Custom error classes
|
73
|
+
class SerializationError < StandardError; end
|
74
|
+
class DeserializationError < StandardError; end
|
75
|
+
class ValidationError < StandardError; end
|
76
|
+
end
|
77
|
+
end
|