duramq 0.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/README.md +105 -0
- data/lib/pigeon/configuration.rb +65 -0
- data/lib/pigeon/generators/hanami/migration_generator.rb +87 -0
- data/lib/pigeon/generators/rails/migration_generator.rb +20 -0
- data/lib/pigeon/generators/rails/templates/create_outbox_messages.rb.erb +34 -0
- data/lib/pigeon/models/adapters/active_record_adapter.rb +118 -0
- data/lib/pigeon/models/adapters/rom_adapter.rb +135 -0
- data/lib/pigeon/models/outbox_message.rb +168 -0
- data/lib/pigeon/processor.rb +75 -0
- data/lib/pigeon/publisher.rb +63 -0
- data/lib/pigeon/version.rb +5 -0
- data/lib/pigeon.rb +143 -0
- metadata +87 -0
checksums.yaml
ADDED
@@ -0,0 +1,7 @@
|
|
1
|
+
---
|
2
|
+
SHA256:
|
3
|
+
metadata.gz: c6583809e34810287dc5b0ff76a05cc1d71a951b7f9ced35c3b0e47c8757d0f8
|
4
|
+
data.tar.gz: 8ab59d3c4db498fb127d1fdeff2315f1c96b472591064616d26894c42ec2062f
|
5
|
+
SHA512:
|
6
|
+
metadata.gz: 9c62d951694151ca057f711000f655ebbb4ca925c8ed3c0d4b2db8dea954a25dd734dba48ca9932bab27400ffcbe963ff58b550961735d9ab134aa16e0d8d410
|
7
|
+
data.tar.gz: ba9b70907fc44a42edf5efacbb3b92b7a729fb63f6e1d15bc0116fedb997d2790cfcfc3fbff36065570860cf3e142484461f069ccb594d767501903ab95b7e88
|
data/README.md
ADDED
@@ -0,0 +1,105 @@
|
|
1
|
+
# Pigeon
|
2
|
+
|
3
|
+
A Ruby gem that implements the outbox pattern for Kafka message publishing to ensure message durability and delivery reliability.
|
4
|
+
|
5
|
+
## Installation
|
6
|
+
|
7
|
+
Add this line to your application's Gemfile:
|
8
|
+
|
9
|
+
```ruby
|
10
|
+
gem 'pigeon'
|
11
|
+
```
|
12
|
+
|
13
|
+
And then execute:
|
14
|
+
|
15
|
+
```bash
|
16
|
+
$ bundle install
|
17
|
+
```
|
18
|
+
|
19
|
+
Or install it yourself as:
|
20
|
+
|
21
|
+
```bash
|
22
|
+
$ gem install pigeon
|
23
|
+
```
|
24
|
+
|
25
|
+
## Usage
|
26
|
+
|
27
|
+
### Configuration
|
28
|
+
|
29
|
+
Configure the gem with your Karafka and Kafka details using dry-configurable:
|
30
|
+
|
31
|
+
```ruby
|
32
|
+
Pigeon.configure do |config|
|
33
|
+
config.client_id = "my-application"
|
34
|
+
config.kafka_brokers = ["kafka1:9092", "kafka2:9092"]
|
35
|
+
config.max_retries = 5
|
36
|
+
config.retry_delay = 60 # seconds
|
37
|
+
config.max_retry_delay = 3600 # 1 hour
|
38
|
+
config.encrypt_payload = false
|
39
|
+
config.retention_period = 7 # days
|
40
|
+
|
41
|
+
# Additional Karafka-specific configuration
|
42
|
+
config.karafka_config = {
|
43
|
+
delivery: :async,
|
44
|
+
kafka: {
|
45
|
+
'bootstrap.servers': 'kafka1:9092,kafka2:9092',
|
46
|
+
'request.required.acks': 1
|
47
|
+
}
|
48
|
+
}
|
49
|
+
end
|
50
|
+
```
|
51
|
+
|
52
|
+
You can also access configuration values directly:
|
53
|
+
|
54
|
+
```ruby
|
55
|
+
# Get the current configuration
|
56
|
+
client_id = Pigeon.config.client_id
|
57
|
+
max_retries = Pigeon.config.max_retries
|
58
|
+
```
|
59
|
+
|
60
|
+
### Publishing Messages
|
61
|
+
|
62
|
+
```ruby
|
63
|
+
# Simple publishing
|
64
|
+
Pigeon.publisher.publish(
|
65
|
+
topic: "my-topic",
|
66
|
+
payload: { user_id: 123, action: "signup" }
|
67
|
+
)
|
68
|
+
|
69
|
+
# With additional options
|
70
|
+
Pigeon.publisher.publish(
|
71
|
+
topic: "my-topic",
|
72
|
+
payload: { user_id: 123, action: "signup" },
|
73
|
+
key: "user-123",
|
74
|
+
headers: { "source" => "web-app" },
|
75
|
+
sync: true, # attempt immediate publishing
|
76
|
+
partition: 1
|
77
|
+
)
|
78
|
+
```
|
79
|
+
|
80
|
+
### Processing Messages
|
81
|
+
|
82
|
+
```ruby
|
83
|
+
# Process pending messages
|
84
|
+
stats = Pigeon.processor.process_pending(batch_size: 100)
|
85
|
+
puts "Processed: #{stats[:processed]}, Succeeded: #{stats[:succeeded]}, Failed: #{stats[:failed]}"
|
86
|
+
|
87
|
+
# Process a specific message
|
88
|
+
success = Pigeon.processor.process_message("message-id")
|
89
|
+
|
90
|
+
# Clean up old processed messages
|
91
|
+
cleaned = Pigeon.processor.cleanup_processed(older_than: 14) # days
|
92
|
+
puts "Cleaned up #{cleaned} messages"
|
93
|
+
```
|
94
|
+
|
95
|
+
## Development
|
96
|
+
|
97
|
+
After checking out the repo, run `bin/setup` to install dependencies. Then, run `rake spec` to run the tests. You can also run `bin/console` for an interactive prompt that will allow you to experiment.
|
98
|
+
|
99
|
+
## Contributing
|
100
|
+
|
101
|
+
Bug reports and pull requests are welcome on GitHub.
|
102
|
+
|
103
|
+
## License
|
104
|
+
|
105
|
+
The gem is available as open source under the terms of the [MIT License](https://opensource.org/licenses/MIT).
|
@@ -0,0 +1,65 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "dry-configurable"
|
4
|
+
require "logger"
|
5
|
+
|
6
|
+
module Pigeon
|
7
|
+
# Configuration class for Pigeon using dry-configurable
|
8
|
+
class Configuration
|
9
|
+
extend Dry::Configurable
|
10
|
+
|
11
|
+
# Karafka client ID
|
12
|
+
setting :client_id, default: "pigeon"
|
13
|
+
|
14
|
+
# Kafka seed brokers configuration
|
15
|
+
setting :kafka_brokers, default: ["localhost:9092"]
|
16
|
+
|
17
|
+
# Maximum number of retries for failed messages
|
18
|
+
setting :max_retries, default: 10
|
19
|
+
|
20
|
+
# Base retry delay (will be used with exponential backoff)
|
21
|
+
setting :retry_delay, default: 30 # seconds
|
22
|
+
|
23
|
+
# Maximum retry delay
|
24
|
+
setting :max_retry_delay, default: 86_400 # 24 hours
|
25
|
+
|
26
|
+
# Whether to encrypt message payloads
|
27
|
+
setting :encrypt_payload, default: false
|
28
|
+
|
29
|
+
# Retention period for processed messages
|
30
|
+
setting :retention_period, default: 7 # days
|
31
|
+
|
32
|
+
# Logger instance
|
33
|
+
setting :logger, default: Logger.new($stdout).tap { |l| l.level = Logger::INFO }
|
34
|
+
|
35
|
+
# Metrics collector
|
36
|
+
setting :metrics_collector, default: nil
|
37
|
+
|
38
|
+
# Tracer for OpenTelemetry
|
39
|
+
setting :tracer, default: nil
|
40
|
+
|
41
|
+
# Karafka additional configuration
|
42
|
+
setting :karafka_config, default: {}
|
43
|
+
|
44
|
+
class << self
|
45
|
+
# Reset the configuration to default values
|
46
|
+
# @return [void]
|
47
|
+
def reset_config
|
48
|
+
# For dry-configurable 1.x
|
49
|
+
if respond_to?(:settings)
|
50
|
+
settings.each_key do |key|
|
51
|
+
config[key] = settings[key].default
|
52
|
+
end
|
53
|
+
# For dry-configurable 0.x
|
54
|
+
elsif respond_to?(:config_option_definitions)
|
55
|
+
config_option_definitions.each_key do |key|
|
56
|
+
config[key] = config_option_definitions[key].default_value
|
57
|
+
end
|
58
|
+
else
|
59
|
+
# Simplest approach - just create a new configuration
|
60
|
+
@config = Dry::Configurable::Config.new
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
64
|
+
end
|
65
|
+
end
|
@@ -0,0 +1,87 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Pigeon
|
4
|
+
module Generators
|
5
|
+
module Hanami
|
6
|
+
# Generator for creating the outbox message table migration for Hanami applications
|
7
|
+
class MigrationGenerator
|
8
|
+
attr_reader :app_name
|
9
|
+
|
10
|
+
def initialize(app_name = nil)
|
11
|
+
@app_name = app_name || detect_app_name
|
12
|
+
end
|
13
|
+
|
14
|
+
# Generate the migration file
|
15
|
+
# @return [String] Path to the generated file
|
16
|
+
def generate
|
17
|
+
timestamp = Time.now.utc.strftime("%Y%m%d%H%M%S")
|
18
|
+
filename = "db/migrations/#{timestamp}_create_outbox_messages.rb"
|
19
|
+
|
20
|
+
# Create the migrations directory if it doesn't exist
|
21
|
+
FileUtils.mkdir_p("db/migrations")
|
22
|
+
|
23
|
+
# Write the migration file
|
24
|
+
File.write(filename, migration_content)
|
25
|
+
|
26
|
+
filename
|
27
|
+
end
|
28
|
+
|
29
|
+
private
|
30
|
+
|
31
|
+
# Detect the Hanami application name
|
32
|
+
# @return [String] Application name
|
33
|
+
def detect_app_name
|
34
|
+
if defined?(Hanami) && Hanami.respond_to?(:app)
|
35
|
+
Hanami.app.name.to_s
|
36
|
+
else
|
37
|
+
"App"
|
38
|
+
end
|
39
|
+
end
|
40
|
+
|
41
|
+
# Generate the migration content
|
42
|
+
# @return [String] Migration content
|
43
|
+
def migration_content
|
44
|
+
<<~RUBY
|
45
|
+
# frozen_string_literal: true
|
46
|
+
|
47
|
+
ROM::SQL.migration do
|
48
|
+
change do
|
49
|
+
create_table :outbox_messages do
|
50
|
+
primary_key :id, type: :uuid
|
51
|
+
|
52
|
+
# Message metadata
|
53
|
+
column :topic, String, null: false
|
54
|
+
column :key, String
|
55
|
+
column :headers, :jsonb
|
56
|
+
column :partition, Integer
|
57
|
+
|
58
|
+
# Message content
|
59
|
+
column :payload, String, text: true, null: false
|
60
|
+
|
61
|
+
# Processing metadata
|
62
|
+
column :status, String, null: false, default: "pending"
|
63
|
+
column :retry_count, Integer, null: false, default: 0
|
64
|
+
column :max_retries, Integer
|
65
|
+
column :error_message, String, text: true
|
66
|
+
column :correlation_id, String
|
67
|
+
|
68
|
+
# Timestamps
|
69
|
+
column :created_at, DateTime, null: false
|
70
|
+
column :updated_at, DateTime, null: false
|
71
|
+
column :published_at, DateTime
|
72
|
+
column :next_retry_at, DateTime
|
73
|
+
|
74
|
+
# Indexes
|
75
|
+
index :status
|
76
|
+
index :next_retry_at
|
77
|
+
index :correlation_id
|
78
|
+
index :created_at
|
79
|
+
end
|
80
|
+
end
|
81
|
+
end
|
82
|
+
RUBY
|
83
|
+
end
|
84
|
+
end
|
85
|
+
end
|
86
|
+
end
|
87
|
+
end
|
@@ -0,0 +1,20 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "rails/generators"
|
4
|
+
|
5
|
+
module Pigeon
|
6
|
+
module Generators
|
7
|
+
module Rails
|
8
|
+
# Generator for creating the outbox message table migration for Rails applications
|
9
|
+
class MigrationGenerator < ::Rails::Generators::Base
|
10
|
+
source_root File.expand_path("templates", __dir__)
|
11
|
+
desc "Creates a migration for the outbox message table"
|
12
|
+
|
13
|
+
def create_migration_file
|
14
|
+
timestamp = Time.now.utc.strftime("%Y%m%d%H%M%S")
|
15
|
+
template "create_outbox_messages.rb.erb", "db/migrate/#{timestamp}_create_outbox_messages.rb"
|
16
|
+
end
|
17
|
+
end
|
18
|
+
end
|
19
|
+
end
|
20
|
+
end
|
@@ -0,0 +1,34 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
class CreateOutboxMessages < ActiveRecord::Migration[<%= ::Rails::VERSION::MAJOR %>.<%= ::Rails::VERSION::MINOR %>]
|
4
|
+
def change
|
5
|
+
create_table :outbox_messages, id: :uuid do |t|
|
6
|
+
# Message metadata
|
7
|
+
t.string :topic, null: false
|
8
|
+
t.string :key
|
9
|
+
t.jsonb :headers
|
10
|
+
t.integer :partition
|
11
|
+
|
12
|
+
# Message content
|
13
|
+
t.text :payload, null: false
|
14
|
+
|
15
|
+
# Processing metadata
|
16
|
+
t.string :status, null: false, default: "pending"
|
17
|
+
t.integer :retry_count, null: false, default: 0
|
18
|
+
t.integer :max_retries
|
19
|
+
t.text :error_message
|
20
|
+
t.string :correlation_id
|
21
|
+
|
22
|
+
# Timestamps
|
23
|
+
t.timestamps
|
24
|
+
t.datetime :published_at
|
25
|
+
t.datetime :next_retry_at
|
26
|
+
end
|
27
|
+
|
28
|
+
# Add indexes for efficient querying
|
29
|
+
add_index :outbox_messages, :status
|
30
|
+
add_index :outbox_messages, :next_retry_at
|
31
|
+
add_index :outbox_messages, :correlation_id
|
32
|
+
add_index :outbox_messages, :created_at
|
33
|
+
end
|
34
|
+
end
|
@@ -0,0 +1,118 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Pigeon
|
4
|
+
module Models
|
5
|
+
module Adapters
|
6
|
+
# ActiveRecord adapter for OutboxMessage
|
7
|
+
class ActiveRecordAdapter < Pigeon::Models::OutboxMessage
|
8
|
+
# Define the ActiveRecord model
|
9
|
+
def self.define_model
|
10
|
+
return @model if @model
|
11
|
+
|
12
|
+
# Define the ActiveRecord model class
|
13
|
+
@model = Class.new(ActiveRecord::Base) do
|
14
|
+
self.table_name = "outbox_messages"
|
15
|
+
|
16
|
+
# Validations
|
17
|
+
validates :topic, presence: true
|
18
|
+
validates :payload, presence: true
|
19
|
+
validates :status, presence: true, inclusion: { in: Pigeon::Models::OutboxMessage::STATUSES }
|
20
|
+
validates :retry_count, presence: true, numericality: { only_integer: true, greater_than_or_equal_to: 0 }
|
21
|
+
|
22
|
+
# Serialize headers as JSON
|
23
|
+
serialize :headers, JSON if respond_to?(:serialize)
|
24
|
+
end
|
25
|
+
|
26
|
+
# Return the model class
|
27
|
+
@model
|
28
|
+
end
|
29
|
+
|
30
|
+
# Get the ActiveRecord model class
|
31
|
+
# @return [Class] ActiveRecord model class
|
32
|
+
def self.model
|
33
|
+
define_model
|
34
|
+
end
|
35
|
+
|
36
|
+
# Create a new outbox message
|
37
|
+
# @param attributes [Hash] Message attributes
|
38
|
+
# @return [OutboxMessage] New message instance
|
39
|
+
def self.create(attributes = {})
|
40
|
+
record = model.create!(prepare_attributes(attributes))
|
41
|
+
new_from_record(record)
|
42
|
+
end
|
43
|
+
|
44
|
+
# Find a message by ID
|
45
|
+
# @param id [String, Integer] Message ID
|
46
|
+
# @return [OutboxMessage, nil] Message instance or nil if not found
|
47
|
+
def self.find(id)
|
48
|
+
record = model.find_by(id: id)
|
49
|
+
record ? new_from_record(record) : nil
|
50
|
+
end
|
51
|
+
|
52
|
+
# Find messages by status
|
53
|
+
# @param status [String] Message status
|
54
|
+
# @param limit [Integer] Maximum number of messages to return
|
55
|
+
# @return [Array<OutboxMessage>] Array of message instances
|
56
|
+
def self.find_by_status(status, limit = 100)
|
57
|
+
records = model.where(status: status).order(created_at: :asc).limit(limit)
|
58
|
+
records.map { |record| new_from_record(record) }
|
59
|
+
end
|
60
|
+
|
61
|
+
# Find messages ready for retry
|
62
|
+
# @param limit [Integer] Maximum number of messages to return
|
63
|
+
# @return [Array<OutboxMessage>] Array of message instances
|
64
|
+
def self.find_ready_for_retry(limit = 100)
|
65
|
+
now = Time.now
|
66
|
+
records = model.where(status: "pending")
|
67
|
+
.where("next_retry_at IS NULL OR next_retry_at <= ?", now)
|
68
|
+
.order(created_at: :asc)
|
69
|
+
.limit(limit)
|
70
|
+
records.map { |record| new_from_record(record) }
|
71
|
+
end
|
72
|
+
|
73
|
+
# Create a new OutboxMessage instance from an ActiveRecord record
|
74
|
+
# @param record [ActiveRecord::Base] ActiveRecord record
|
75
|
+
# @return [OutboxMessage] New message instance
|
76
|
+
def self.new_from_record(record)
|
77
|
+
attributes = {}
|
78
|
+
ATTRIBUTES.each do |attr|
|
79
|
+
attributes[attr] = record.send(attr) if record.respond_to?(attr)
|
80
|
+
end
|
81
|
+
new(attributes).tap { |msg| msg.instance_variable_set(:@record, record) }
|
82
|
+
end
|
83
|
+
|
84
|
+
# Prepare attributes for ActiveRecord
|
85
|
+
# @param attributes [Hash] Raw attributes
|
86
|
+
# @return [Hash] Prepared attributes
|
87
|
+
def self.prepare_attributes(attributes)
|
88
|
+
attributes = attributes.dup
|
89
|
+
|
90
|
+
# Convert Time objects to the format expected by ActiveRecord
|
91
|
+
%i[created_at updated_at published_at next_retry_at].each do |attr|
|
92
|
+
attributes[attr] = attributes[attr].to_time if attributes[attr].is_a?(Time)
|
93
|
+
end
|
94
|
+
|
95
|
+
# Ensure headers is a hash
|
96
|
+
attributes[:headers] ||= {}
|
97
|
+
|
98
|
+
attributes
|
99
|
+
end
|
100
|
+
|
101
|
+
# Save the message
|
102
|
+
# @return [Boolean] Whether the save was successful
|
103
|
+
def save
|
104
|
+
if @record
|
105
|
+
@record.attributes = self.class.prepare_attributes(@attributes)
|
106
|
+
@record.save
|
107
|
+
else
|
108
|
+
@record = self.class.model.create!(self.class.prepare_attributes(@attributes))
|
109
|
+
true
|
110
|
+
end
|
111
|
+
rescue ActiveRecord::RecordInvalid => e
|
112
|
+
Pigeon.config.logger.error("Failed to save outbox message: #{e.message}")
|
113
|
+
false
|
114
|
+
end
|
115
|
+
end
|
116
|
+
end
|
117
|
+
end
|
118
|
+
end
|
@@ -0,0 +1,135 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Pigeon
|
4
|
+
module Models
|
5
|
+
module Adapters
|
6
|
+
# ROM adapter for OutboxMessage (for Hanami applications)
|
7
|
+
class RomAdapter < Pigeon::Models::OutboxMessage
|
8
|
+
# Define the ROM relation
|
9
|
+
def self.define_relation
|
10
|
+
return @relation if @relation
|
11
|
+
|
12
|
+
# Get the ROM container
|
13
|
+
container = Hanami.app["persistence.rom"]
|
14
|
+
|
15
|
+
# Define the relation if it doesn't exist
|
16
|
+
unless container.relations.key?(:outbox_messages)
|
17
|
+
container.register_relation(Class.new(ROM::Relation[:sql]) do
|
18
|
+
schema(:outbox_messages, infer: true)
|
19
|
+
|
20
|
+
# Query methods
|
21
|
+
def by_status(status)
|
22
|
+
where(status: status).order(:created_at)
|
23
|
+
end
|
24
|
+
|
25
|
+
def ready_for_retry
|
26
|
+
now = Time.now
|
27
|
+
where(status: "pending")
|
28
|
+
.where { next_retry_at.nil? | (next_retry_at <= now) }
|
29
|
+
.order(:created_at)
|
30
|
+
end
|
31
|
+
end)
|
32
|
+
end
|
33
|
+
|
34
|
+
# Get the relation
|
35
|
+
@relation = container.relations[:outbox_messages]
|
36
|
+
end
|
37
|
+
|
38
|
+
# Get the ROM relation
|
39
|
+
# @return [ROM::Relation] ROM relation
|
40
|
+
def self.relation
|
41
|
+
define_relation
|
42
|
+
end
|
43
|
+
|
44
|
+
# Get the ROM repository
|
45
|
+
# @return [ROM::Repository] ROM repository
|
46
|
+
def self.repository
|
47
|
+
@repository ||= Hanami.app["repositories.outbox_messages"]
|
48
|
+
end
|
49
|
+
|
50
|
+
# Create a new outbox message
|
51
|
+
# @param attributes [Hash] Message attributes
|
52
|
+
# @return [OutboxMessage] New message instance
|
53
|
+
def self.create(attributes = {})
|
54
|
+
attributes = prepare_attributes(attributes)
|
55
|
+
record = repository.create(attributes)
|
56
|
+
new_from_record(record)
|
57
|
+
end
|
58
|
+
|
59
|
+
# Find a message by ID
|
60
|
+
# @param id [String, Integer] Message ID
|
61
|
+
# @return [OutboxMessage, nil] Message instance or nil if not found
|
62
|
+
def self.find(id)
|
63
|
+
record = repository.find(id)
|
64
|
+
record ? new_from_record(record) : nil
|
65
|
+
rescue ROM::TupleCountMismatchError
|
66
|
+
nil
|
67
|
+
end
|
68
|
+
|
69
|
+
# Find messages by status
|
70
|
+
# @param status [String] Message status
|
71
|
+
# @param limit [Integer] Maximum number of messages to return
|
72
|
+
# @return [Array<OutboxMessage>] Array of message instances
|
73
|
+
def self.find_by_status(status, limit = 100)
|
74
|
+
records = relation.by_status(status).limit(limit).to_a
|
75
|
+
records.map { |record| new_from_record(record) }
|
76
|
+
end
|
77
|
+
|
78
|
+
# Find messages ready for retry
|
79
|
+
# @param limit [Integer] Maximum number of messages to return
|
80
|
+
# @return [Array<OutboxMessage>] Array of message instances
|
81
|
+
def self.find_ready_for_retry(limit = 100)
|
82
|
+
records = relation.ready_for_retry.limit(limit).to_a
|
83
|
+
records.map { |record| new_from_record(record) }
|
84
|
+
end
|
85
|
+
|
86
|
+
# Create a new OutboxMessage instance from a ROM record
|
87
|
+
# @param record [ROM::Struct] ROM record
|
88
|
+
# @return [OutboxMessage] New message instance
|
89
|
+
def self.new_from_record(record)
|
90
|
+
attributes = {}
|
91
|
+
ATTRIBUTES.each do |attr|
|
92
|
+
attributes[attr] = record.send(attr) if record.respond_to?(attr)
|
93
|
+
end
|
94
|
+
new(attributes).tap { |msg| msg.instance_variable_set(:@record, record) }
|
95
|
+
end
|
96
|
+
|
97
|
+
# Prepare attributes for ROM
|
98
|
+
# @param attributes [Hash] Raw attributes
|
99
|
+
# @return [Hash] Prepared attributes
|
100
|
+
def self.prepare_attributes(attributes)
|
101
|
+
attributes = attributes.dup
|
102
|
+
|
103
|
+
# Convert Time objects to the format expected by ROM
|
104
|
+
%i[created_at updated_at published_at next_retry_at].each do |attr|
|
105
|
+
attributes[attr] = attributes[attr].to_time if attributes[attr].is_a?(Time)
|
106
|
+
end
|
107
|
+
|
108
|
+
# Ensure headers is a hash
|
109
|
+
attributes[:headers] ||= {}
|
110
|
+
|
111
|
+
attributes
|
112
|
+
end
|
113
|
+
|
114
|
+
# Save the message
|
115
|
+
# @return [Boolean] Whether the save was successful
|
116
|
+
def save
|
117
|
+
if @record
|
118
|
+
# Update existing record
|
119
|
+
id = @record.id
|
120
|
+
attributes = self.class.prepare_attributes(@attributes)
|
121
|
+
self.class.repository.update(id, attributes)
|
122
|
+
else
|
123
|
+
# Create new record
|
124
|
+
attributes = self.class.prepare_attributes(@attributes)
|
125
|
+
@record = self.class.repository.create(attributes)
|
126
|
+
end
|
127
|
+
true
|
128
|
+
rescue StandardError => e
|
129
|
+
Pigeon.config.logger.error("Failed to save outbox message: #{e.message}")
|
130
|
+
false
|
131
|
+
end
|
132
|
+
end
|
133
|
+
end
|
134
|
+
end
|
135
|
+
end
|
@@ -0,0 +1,168 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
module Pigeon
|
4
|
+
module Models
|
5
|
+
# Base class for outbox message model
|
6
|
+
# This is a framework-agnostic representation of the outbox message
|
7
|
+
class OutboxMessage
|
8
|
+
# Attributes that should be present in all framework implementations
|
9
|
+
ATTRIBUTES = %i[
|
10
|
+
id
|
11
|
+
topic
|
12
|
+
key
|
13
|
+
headers
|
14
|
+
partition
|
15
|
+
payload
|
16
|
+
status
|
17
|
+
retry_count
|
18
|
+
max_retries
|
19
|
+
error_message
|
20
|
+
correlation_id
|
21
|
+
created_at
|
22
|
+
updated_at
|
23
|
+
published_at
|
24
|
+
next_retry_at
|
25
|
+
].freeze
|
26
|
+
|
27
|
+
# Valid status values
|
28
|
+
STATUSES = %w[pending processing published failed].freeze
|
29
|
+
|
30
|
+
# Default values for attributes
|
31
|
+
DEFAULTS = {
|
32
|
+
status: "pending",
|
33
|
+
retry_count: 0,
|
34
|
+
headers: {},
|
35
|
+
created_at: -> { Time.now },
|
36
|
+
updated_at: -> { Time.now }
|
37
|
+
}.freeze
|
38
|
+
|
39
|
+
# Create a new outbox message with the given attributes
|
40
|
+
# @param attributes [Hash] Message attributes
|
41
|
+
# @return [OutboxMessage] New message instance
|
42
|
+
def self.create(attributes = {})
|
43
|
+
new(attributes)
|
44
|
+
end
|
45
|
+
|
46
|
+
# Find a message by ID
|
47
|
+
# @param id [String, Integer] Message ID
|
48
|
+
# @return [OutboxMessage, nil] Message instance or nil if not found
|
49
|
+
def self.find(id)
|
50
|
+
raise NotImplementedError, "#{self.class.name}#find must be implemented by a framework adapter"
|
51
|
+
end
|
52
|
+
|
53
|
+
# Find messages by status
|
54
|
+
# @param status [String] Message status
|
55
|
+
# @param limit [Integer] Maximum number of messages to return
|
56
|
+
# @return [Array<OutboxMessage>] Array of message instances
|
57
|
+
def self.find_by_status(status, limit = 100)
|
58
|
+
raise NotImplementedError, "#{self.class.name}#find_by_status must be implemented by a framework adapter"
|
59
|
+
end
|
60
|
+
|
61
|
+
# Find messages ready for retry
|
62
|
+
# @param limit [Integer] Maximum number of messages to return
|
63
|
+
# @return [Array<OutboxMessage>] Array of message instances
|
64
|
+
def self.find_ready_for_retry(limit = 100)
|
65
|
+
raise NotImplementedError, "#{self.class.name}#find_ready_for_retry must be implemented by a framework adapter"
|
66
|
+
end
|
67
|
+
|
68
|
+
# Initialize a new outbox message
|
69
|
+
# @param attributes [Hash] Message attributes
|
70
|
+
def initialize(attributes = {})
|
71
|
+
@attributes = DEFAULTS.dup
|
72
|
+
attributes.each do |key, value|
|
73
|
+
send("#{key}=", value) if respond_to?("#{key}=")
|
74
|
+
end
|
75
|
+
end
|
76
|
+
|
77
|
+
# Get an attribute value
|
78
|
+
# @param name [Symbol] Attribute name
|
79
|
+
# @return [Object] Attribute value
|
80
|
+
def [](name)
|
81
|
+
@attributes[name.to_sym]
|
82
|
+
end
|
83
|
+
|
84
|
+
# Set an attribute value
|
85
|
+
# @param name [Symbol] Attribute name
|
86
|
+
# @param value [Object] Attribute value
|
87
|
+
def []=(name, value)
|
88
|
+
@attributes[name.to_sym] = value
|
89
|
+
end
|
90
|
+
|
91
|
+
# Get all attributes
|
92
|
+
# @return [Hash] All attributes
|
93
|
+
def attributes
|
94
|
+
@attributes.dup
|
95
|
+
end
|
96
|
+
|
97
|
+
# Save the message
|
98
|
+
# @return [Boolean] Whether the save was successful
|
99
|
+
def save
|
100
|
+
raise NotImplementedError, "#{self.class.name}#save must be implemented by a framework adapter"
|
101
|
+
end
|
102
|
+
|
103
|
+
# Update the message attributes
|
104
|
+
# @param attributes [Hash] New attribute values
|
105
|
+
# @return [Boolean] Whether the update was successful
|
106
|
+
def update(attributes = {})
|
107
|
+
attributes.each do |key, value|
|
108
|
+
send("#{key}=", value) if respond_to?("#{key}=")
|
109
|
+
end
|
110
|
+
save
|
111
|
+
end
|
112
|
+
|
113
|
+
# Mark the message as published
|
114
|
+
# @return [Boolean] Whether the update was successful
|
115
|
+
def mark_as_published
|
116
|
+
self.status = "published"
|
117
|
+
self.published_at = Time.now
|
118
|
+
self.updated_at = Time.now
|
119
|
+
save
|
120
|
+
end
|
121
|
+
|
122
|
+
# Mark the message as failed
|
123
|
+
# @param error [Exception, String] Error that caused the failure
|
124
|
+
# @return [Boolean] Whether the update was successful
|
125
|
+
def mark_as_failed(error = nil)
|
126
|
+
self.status = "failed"
|
127
|
+
self.error_message = error.is_a?(Exception) ? "#{error.class}: #{error.message}" : error.to_s
|
128
|
+
self.updated_at = Time.now
|
129
|
+
save
|
130
|
+
end
|
131
|
+
|
132
|
+
# Increment the retry count and set the next retry time
|
133
|
+
# @return [Boolean] Whether the update was successful
|
134
|
+
def increment_retry_count
|
135
|
+
self.retry_count += 1
|
136
|
+
self.next_retry_at = calculate_next_retry_time
|
137
|
+
self.updated_at = Time.now
|
138
|
+
save
|
139
|
+
end
|
140
|
+
|
141
|
+
# Calculate the next retry time based on exponential backoff
|
142
|
+
# @return [Time] Next retry time
|
143
|
+
def calculate_next_retry_time
|
144
|
+
base_delay = Pigeon.config.retry_delay || 30 # 30 seconds default
|
145
|
+
max_delay = Pigeon.config.max_retry_delay || 86_400 # 24 hours default
|
146
|
+
|
147
|
+
# Exponential backoff: delay = base_delay * (2 ^ retry_count)
|
148
|
+
delay = base_delay * (2**retry_count)
|
149
|
+
delay = [delay, max_delay].min # Cap at max delay
|
150
|
+
|
151
|
+
Time.now + delay
|
152
|
+
end
|
153
|
+
|
154
|
+
# Check if the message has exceeded the maximum retry count
|
155
|
+
# @return [Boolean] Whether the message has exceeded the maximum retry count
|
156
|
+
def max_retries_exceeded?
|
157
|
+
max_retries = self[:max_retries] || Pigeon.config.max_retries || 10
|
158
|
+
retry_count >= max_retries
|
159
|
+
end
|
160
|
+
|
161
|
+
# Define attribute accessors for all attributes
|
162
|
+
ATTRIBUTES.each do |attr|
|
163
|
+
define_method(attr) { @attributes[attr] }
|
164
|
+
define_method("#{attr}=") { |value| @attributes[attr] = value }
|
165
|
+
end
|
166
|
+
end
|
167
|
+
end
|
168
|
+
end
|
@@ -0,0 +1,75 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "json"
|
4
|
+
|
5
|
+
module Pigeon
|
6
|
+
# Processor class for handling outbox messages
|
7
|
+
class Processor
|
8
|
+
# Process pending outbox messages
|
9
|
+
# @param batch_size [Integer] Number of messages to process in one batch
|
10
|
+
# @return [Hash] Processing statistics
|
11
|
+
def process_pending(batch_size: 100)
|
12
|
+
# This is a placeholder implementation
|
13
|
+
# The actual implementation will be added in task 4.1
|
14
|
+
Pigeon.config.logger.info("Processing pending messages, batch size: #{batch_size}")
|
15
|
+
|
16
|
+
# In the actual implementation, we would:
|
17
|
+
# 1. Fetch pending messages from the database
|
18
|
+
# 2. Process them in batches using Karafka
|
19
|
+
# 3. Update their status in the database
|
20
|
+
|
21
|
+
# Return mock statistics
|
22
|
+
{
|
23
|
+
processed: 0,
|
24
|
+
succeeded: 0,
|
25
|
+
failed: 0,
|
26
|
+
retried: 0
|
27
|
+
}
|
28
|
+
end
|
29
|
+
|
30
|
+
# Process a specific outbox message
|
31
|
+
# @param message_id [String, Integer] ID of the message to process
|
32
|
+
# @return [Boolean] Success status
|
33
|
+
def process_message(message_id)
|
34
|
+
# This is a placeholder implementation
|
35
|
+
# The actual implementation will be added in task 4.1
|
36
|
+
Pigeon.config.logger.info("Processing message ID: #{message_id}")
|
37
|
+
|
38
|
+
# In the actual implementation, we would:
|
39
|
+
# 1. Fetch the message from the database
|
40
|
+
# 2. Send it to Kafka using Karafka
|
41
|
+
# 3. Update its status in the database
|
42
|
+
|
43
|
+
# Mock implementation using Karafka
|
44
|
+
begin
|
45
|
+
# Simulate sending a message with Karafka
|
46
|
+
Pigeon.karafka_producer.produce_async(
|
47
|
+
{ test: "data" }.to_json,
|
48
|
+
topic: "test-topic"
|
49
|
+
)
|
50
|
+
|
51
|
+
# Return success
|
52
|
+
true
|
53
|
+
rescue StandardError => e
|
54
|
+
Pigeon.config.logger.error("Failed to process message #{message_id}: #{e.message}")
|
55
|
+
false
|
56
|
+
end
|
57
|
+
end
|
58
|
+
|
59
|
+
# Clean up old processed messages
|
60
|
+
# @param older_than [ActiveSupport::Duration, Integer] Age threshold for cleanup
|
61
|
+
# @return [Integer] Number of records cleaned up
|
62
|
+
def cleanup_processed(older_than: 7)
|
63
|
+
# This is a placeholder implementation
|
64
|
+
# The actual implementation will be added in task 7.2
|
65
|
+
Pigeon.config.logger.info("Cleaning up processed messages older than #{older_than} days")
|
66
|
+
|
67
|
+
# In the actual implementation, we would:
|
68
|
+
# 1. Delete processed messages older than the specified threshold
|
69
|
+
# 2. Return the number of deleted records
|
70
|
+
|
71
|
+
# Return mock count
|
72
|
+
0
|
73
|
+
end
|
74
|
+
end
|
75
|
+
end
|
@@ -0,0 +1,63 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "securerandom"
|
4
|
+
require "json"
|
5
|
+
|
6
|
+
module Pigeon
|
7
|
+
# Publisher class for storing messages in the outbox
|
8
|
+
class Publisher
|
9
|
+
# Mock outbox message class for development
|
10
|
+
OutboxMessage = Struct.new(
|
11
|
+
:id, :topic, :key, :headers, :message_partition, :payload, :status, :created_at,
|
12
|
+
keyword_init: true
|
13
|
+
)
|
14
|
+
|
15
|
+
# Publish a message to Kafka via the outbox pattern
|
16
|
+
# @param topic [String] Kafka topic
|
17
|
+
# @param payload [Hash, String] Message payload
|
18
|
+
# @param key [String, nil] Optional message key
|
19
|
+
# @param headers [Hash, nil] Optional message headers
|
20
|
+
# @param sync [Boolean] Whether to attempt immediate publishing
|
21
|
+
# @param partition [Integer, nil] Optional specific partition
|
22
|
+
# @return [OutboxMessage] The created outbox message record
|
23
|
+
def publish(topic:, payload:, key: nil, headers: nil, sync: false, partition: nil)
|
24
|
+
# This is a placeholder implementation
|
25
|
+
# The actual implementation will be added in task 3.1
|
26
|
+
Pigeon.config.logger.info("Message published to topic: #{topic}")
|
27
|
+
|
28
|
+
# Prepare the message for Karafka
|
29
|
+
message_payload = payload.is_a?(String) ? payload : payload.to_json
|
30
|
+
|
31
|
+
# If sync is true, attempt to publish immediately using Karafka
|
32
|
+
if sync
|
33
|
+
begin
|
34
|
+
message_options = { topic: topic }
|
35
|
+
message_options[:key] = key if key
|
36
|
+
message_options[:headers] = headers if headers
|
37
|
+
message_options[:partition] = partition if partition
|
38
|
+
|
39
|
+
# Use Karafka producer to send the message
|
40
|
+
Pigeon.karafka_producer.produce_sync(message_payload, **message_options)
|
41
|
+
|
42
|
+
Pigeon.config.logger.info("Message published synchronously to topic: #{topic}")
|
43
|
+
rescue StandardError => e
|
44
|
+
Pigeon.config.logger.error("Failed to publish message synchronously: #{e.message}")
|
45
|
+
# Continue with storing in outbox
|
46
|
+
end
|
47
|
+
end
|
48
|
+
|
49
|
+
# Return a mock outbox message for now
|
50
|
+
# In the actual implementation, this will be stored in the database
|
51
|
+
OutboxMessage.new(
|
52
|
+
id: SecureRandom.uuid,
|
53
|
+
topic: topic,
|
54
|
+
key: key,
|
55
|
+
headers: headers,
|
56
|
+
message_partition: partition,
|
57
|
+
payload: payload,
|
58
|
+
status: sync ? "processed" : "pending",
|
59
|
+
created_at: Time.now
|
60
|
+
)
|
61
|
+
end
|
62
|
+
end
|
63
|
+
end
|
data/lib/pigeon.rb
ADDED
@@ -0,0 +1,143 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require "pigeon/version"
|
4
|
+
require "pigeon/configuration"
|
5
|
+
require "pigeon/models/outbox_message"
|
6
|
+
require "pigeon/models/adapters/active_record_adapter"
|
7
|
+
require "pigeon/models/adapters/rom_adapter"
|
8
|
+
require "pigeon/publisher"
|
9
|
+
require "pigeon/processor"
|
10
|
+
require "karafka"
|
11
|
+
|
12
|
+
module Pigeon
|
13
|
+
class Error < StandardError; end
|
14
|
+
|
15
|
+
# Configure the gem
|
16
|
+
# @yield [config] Configuration instance
|
17
|
+
# @example
|
18
|
+
# Pigeon.configure do |config|
|
19
|
+
# config.client_id = "my-application"
|
20
|
+
# config.kafka_brokers = ["kafka1:9092", "kafka2:9092"]
|
21
|
+
# config.max_retries = 5
|
22
|
+
# end
|
23
|
+
def self.configure
|
24
|
+
yield(Configuration.config) if block_given?
|
25
|
+
initialize_karafka if @karafka_initialized.nil?
|
26
|
+
end
|
27
|
+
|
28
|
+
# Get the configuration
|
29
|
+
# @return [Dry::Configurable::Config]
|
30
|
+
def self.config
|
31
|
+
Configuration.config
|
32
|
+
end
|
33
|
+
|
34
|
+
# Initialize the Karafka producer
|
35
|
+
# @return [Karafka::Producer]
|
36
|
+
def self.initialize_karafka # rubocop:disable Metrics/AbcSize
|
37
|
+
return @karafka_producer if @karafka_initialized
|
38
|
+
|
39
|
+
# Configure Karafka
|
40
|
+
begin
|
41
|
+
Karafka::Setup::Config.setup do |karafka_config|
|
42
|
+
karafka_config.client_id = config.client_id
|
43
|
+
|
44
|
+
# Set required Kafka configuration if not provided
|
45
|
+
if !config.karafka_config[:kafka] || config.karafka_config[:kafka].empty?
|
46
|
+
karafka_config.kafka = {
|
47
|
+
"bootstrap.servers": config.kafka_brokers.join(",")
|
48
|
+
}
|
49
|
+
end
|
50
|
+
|
51
|
+
# Apply any additional Karafka configuration
|
52
|
+
config.karafka_config.each do |key, value|
|
53
|
+
karafka_config.public_send("#{key}=", value) if karafka_config.respond_to?("#{key}=")
|
54
|
+
end
|
55
|
+
end
|
56
|
+
|
57
|
+
@karafka_initialized = true
|
58
|
+
@karafka_producer = Karafka.producer
|
59
|
+
rescue StandardError => e
|
60
|
+
config.logger.error("Failed to initialize Karafka: #{e.message}")
|
61
|
+
# Return a mock producer for testing
|
62
|
+
@karafka_initialized = true
|
63
|
+
@karafka_producer = MockProducer.new
|
64
|
+
end
|
65
|
+
|
66
|
+
@karafka_producer
|
67
|
+
end
|
68
|
+
|
69
|
+
# Mock producer for testing
|
70
|
+
class MockProducer
|
71
|
+
def produce_sync?(_payload, **_options)
|
72
|
+
true
|
73
|
+
end
|
74
|
+
|
75
|
+
def produce_async?(_payload, **_options)
|
76
|
+
true
|
77
|
+
end
|
78
|
+
|
79
|
+
# For compatibility with the real Karafka producer
|
80
|
+
alias produce_sync produce_sync?
|
81
|
+
alias produce_async produce_async?
|
82
|
+
end
|
83
|
+
|
84
|
+
# Get the Karafka producer instance
|
85
|
+
# @return [Karafka::Producer]
|
86
|
+
def self.karafka_producer
|
87
|
+
initialize_karafka unless @karafka_initialized
|
88
|
+
@karafka_producer
|
89
|
+
end
|
90
|
+
|
91
|
+
# Create a new publisher instance
|
92
|
+
# @return [Pigeon::Publisher]
|
93
|
+
def self.publisher
|
94
|
+
Publisher.new
|
95
|
+
end
|
96
|
+
|
97
|
+
# Create a new processor instance
|
98
|
+
# @return [Pigeon::Processor]
|
99
|
+
def self.processor
|
100
|
+
Processor.new
|
101
|
+
end
|
102
|
+
|
103
|
+
# Get the appropriate outbox message adapter based on the framework
|
104
|
+
# @return [Class] Adapter class
|
105
|
+
def self.outbox_message_adapter
|
106
|
+
if defined?(ActiveRecord)
|
107
|
+
Models::Adapters::ActiveRecordAdapter
|
108
|
+
elsif defined?(ROM) && defined?(Hanami)
|
109
|
+
Models::Adapters::RomAdapter
|
110
|
+
else
|
111
|
+
Models::OutboxMessage
|
112
|
+
end
|
113
|
+
end
|
114
|
+
|
115
|
+
# Create a new outbox message
|
116
|
+
# @param attributes [Hash] Message attributes
|
117
|
+
# @return [Pigeon::Models::OutboxMessage] New message instance
|
118
|
+
def self.create_outbox_message(attributes = {})
|
119
|
+
outbox_message_adapter.create(attributes)
|
120
|
+
end
|
121
|
+
|
122
|
+
# Find an outbox message by ID
|
123
|
+
# @param id [String, Integer] Message ID
|
124
|
+
# @return [Pigeon::Models::OutboxMessage, nil] Message instance or nil if not found
|
125
|
+
def self.find_outbox_message(id)
|
126
|
+
outbox_message_adapter.find(id)
|
127
|
+
end
|
128
|
+
|
129
|
+
# Find outbox messages by status
|
130
|
+
# @param status [String] Message status
|
131
|
+
# @param limit [Integer] Maximum number of messages to return
|
132
|
+
# @return [Array<Pigeon::Models::OutboxMessage>] Array of message instances
|
133
|
+
def self.find_outbox_messages_by_status(status, limit = 100)
|
134
|
+
outbox_message_adapter.find_by_status(status, limit)
|
135
|
+
end
|
136
|
+
|
137
|
+
# Find outbox messages ready for retry
|
138
|
+
# @param limit [Integer] Maximum number of messages to return
|
139
|
+
# @return [Array<Pigeon::Models::OutboxMessage>] Array of message instances
|
140
|
+
def self.find_outbox_messages_ready_for_retry(limit = 100)
|
141
|
+
outbox_message_adapter.find_ready_for_retry(limit)
|
142
|
+
end
|
143
|
+
end
|
metadata
ADDED
@@ -0,0 +1,87 @@
|
|
1
|
+
--- !ruby/object:Gem::Specification
|
2
|
+
name: duramq
|
3
|
+
version: !ruby/object:Gem::Version
|
4
|
+
version: 0.1.0
|
5
|
+
platform: ruby
|
6
|
+
authors:
|
7
|
+
- Khai Le
|
8
|
+
autorequire:
|
9
|
+
bindir: bin
|
10
|
+
cert_chain: []
|
11
|
+
date: 2025-07-23 00:00:00.000000000 Z
|
12
|
+
dependencies:
|
13
|
+
- !ruby/object:Gem::Dependency
|
14
|
+
name: dry-configurable
|
15
|
+
requirement: !ruby/object:Gem::Requirement
|
16
|
+
requirements:
|
17
|
+
- - "~>"
|
18
|
+
- !ruby/object:Gem::Version
|
19
|
+
version: '1.3'
|
20
|
+
type: :runtime
|
21
|
+
prerelease: false
|
22
|
+
version_requirements: !ruby/object:Gem::Requirement
|
23
|
+
requirements:
|
24
|
+
- - "~>"
|
25
|
+
- !ruby/object:Gem::Version
|
26
|
+
version: '1.3'
|
27
|
+
- !ruby/object:Gem::Dependency
|
28
|
+
name: karafka
|
29
|
+
requirement: !ruby/object:Gem::Requirement
|
30
|
+
requirements:
|
31
|
+
- - "~>"
|
32
|
+
- !ruby/object:Gem::Version
|
33
|
+
version: '2.5'
|
34
|
+
type: :runtime
|
35
|
+
prerelease: false
|
36
|
+
version_requirements: !ruby/object:Gem::Requirement
|
37
|
+
requirements:
|
38
|
+
- - "~>"
|
39
|
+
- !ruby/object:Gem::Version
|
40
|
+
version: '2.5'
|
41
|
+
description: A Ruby gem that implements the outbox pattern for Kafka message publishing
|
42
|
+
to ensure message durability and delivery reliability
|
43
|
+
email:
|
44
|
+
- khaile.to@gmail.com
|
45
|
+
executables: []
|
46
|
+
extensions: []
|
47
|
+
extra_rdoc_files: []
|
48
|
+
files:
|
49
|
+
- README.md
|
50
|
+
- lib/pigeon.rb
|
51
|
+
- lib/pigeon/configuration.rb
|
52
|
+
- lib/pigeon/generators/hanami/migration_generator.rb
|
53
|
+
- lib/pigeon/generators/rails/migration_generator.rb
|
54
|
+
- lib/pigeon/generators/rails/templates/create_outbox_messages.rb.erb
|
55
|
+
- lib/pigeon/models/adapters/active_record_adapter.rb
|
56
|
+
- lib/pigeon/models/adapters/rom_adapter.rb
|
57
|
+
- lib/pigeon/models/outbox_message.rb
|
58
|
+
- lib/pigeon/processor.rb
|
59
|
+
- lib/pigeon/publisher.rb
|
60
|
+
- lib/pigeon/version.rb
|
61
|
+
homepage: https://github.com/khaile/pigeon
|
62
|
+
licenses:
|
63
|
+
- MIT
|
64
|
+
metadata:
|
65
|
+
homepage_uri: https://github.com/khaile/pigeon
|
66
|
+
changelog_uri: https://github.com/khaile/pigeon/blob/main/CHANGELOG.md
|
67
|
+
rubygems_mfa_required: 'false'
|
68
|
+
post_install_message:
|
69
|
+
rdoc_options: []
|
70
|
+
require_paths:
|
71
|
+
- lib
|
72
|
+
required_ruby_version: !ruby/object:Gem::Requirement
|
73
|
+
requirements:
|
74
|
+
- - ">="
|
75
|
+
- !ruby/object:Gem::Version
|
76
|
+
version: 3.3.8
|
77
|
+
required_rubygems_version: !ruby/object:Gem::Requirement
|
78
|
+
requirements:
|
79
|
+
- - ">="
|
80
|
+
- !ruby/object:Gem::Version
|
81
|
+
version: '0'
|
82
|
+
requirements: []
|
83
|
+
rubygems_version: 3.5.22
|
84
|
+
signing_key:
|
85
|
+
specification_version: 4
|
86
|
+
summary: Kafka outbox pattern implementation for Ruby applications
|
87
|
+
test_files: []
|