pub_sub_model_sync 0.5.10 → 0.6.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +4 -4
- data/.github/workflows/ruby.yml +1 -1
- data/CHANGELOG.md +13 -0
- data/Dockerfile +6 -0
- data/Gemfile.lock +2 -1
- data/README.md +182 -97
- data/docker-compose.yaml +12 -0
- data/docs/notifications-diagram.png +0 -0
- data/lib/pub_sub_model_sync/base.rb +16 -3
- data/lib/pub_sub_model_sync/config.rb +1 -1
- data/lib/pub_sub_model_sync/message_processor.rb +3 -1
- data/lib/pub_sub_model_sync/message_publisher.rb +85 -18
- data/lib/pub_sub_model_sync/mock_google_service.rb +4 -0
- data/lib/pub_sub_model_sync/mock_kafka_service.rb +13 -0
- data/lib/pub_sub_model_sync/payload.rb +16 -4
- data/lib/pub_sub_model_sync/publisher.rb +23 -12
- data/lib/pub_sub_model_sync/publisher_concern.rb +37 -20
- data/lib/pub_sub_model_sync/service_base.rb +13 -4
- data/lib/pub_sub_model_sync/service_google.rb +52 -17
- data/lib/pub_sub_model_sync/service_kafka.rb +35 -12
- data/lib/pub_sub_model_sync/service_rabbit.rb +40 -33
- data/lib/pub_sub_model_sync/subscriber.rb +13 -11
- data/lib/pub_sub_model_sync/subscriber_concern.rb +8 -5
- data/lib/pub_sub_model_sync/tasks/worker.rake +11 -0
- data/lib/pub_sub_model_sync/version.rb +1 -1
- metadata +5 -2
@@ -4,8 +4,6 @@ require 'pub_sub_model_sync/payload'
|
|
4
4
|
module PubSubModelSync
|
5
5
|
class ServiceBase < PubSubModelSync::Base
|
6
6
|
SERVICE_KEY = 'service_model_sync'
|
7
|
-
PUBLISH_SETTINGS = {}.freeze
|
8
|
-
LISTEN_SETTINGS = {}.freeze
|
9
7
|
|
10
8
|
def listen_messages
|
11
9
|
raise 'method :listen_messages must be defined in service'
|
@@ -22,10 +20,20 @@ module PubSubModelSync
|
|
22
20
|
|
23
21
|
private
|
24
22
|
|
23
|
+
# @param payload (Payload)
|
24
|
+
# @return (String): Json Format
|
25
|
+
def encode_payload(payload)
|
26
|
+
data = payload.to_h
|
27
|
+
not_important_keys = %i[ordering_key topic_name forced_ordering_key]
|
28
|
+
reduce_payload_size = !config.debug
|
29
|
+
data[:headers].except!(*not_important_keys) if reduce_payload_size
|
30
|
+
data.to_json
|
31
|
+
end
|
32
|
+
|
25
33
|
# @param (String: Payload in json format)
|
26
34
|
def process_message(payload_info)
|
27
35
|
retries ||= 0
|
28
|
-
payload =
|
36
|
+
payload = decode_payload(payload_info)
|
29
37
|
return payload.process unless same_app_message?(payload)
|
30
38
|
|
31
39
|
log("Skipping message from same origin: #{[payload]}") if config.debug
|
@@ -45,7 +53,8 @@ module PubSubModelSync
|
|
45
53
|
retries == 1
|
46
54
|
end
|
47
55
|
|
48
|
-
|
56
|
+
# @return Payload
|
57
|
+
def decode_payload(payload_info)
|
49
58
|
info = JSON.parse(payload_info).deep_symbolize_keys
|
50
59
|
payload = ::PubSubModelSync::Payload.new(info[:data], info[:attributes], info[:headers])
|
51
60
|
log("Received message: #{[payload]}") if config.debug
|
@@ -7,50 +7,85 @@ end
|
|
7
7
|
|
8
8
|
module PubSubModelSync
|
9
9
|
class ServiceGoogle < ServiceBase
|
10
|
-
LISTEN_SETTINGS = {
|
10
|
+
LISTEN_SETTINGS = { message_ordering: true }.freeze
|
11
|
+
PUBLISH_SETTINGS = {}.freeze
|
11
12
|
TOPIC_SETTINGS = {}.freeze
|
12
13
|
SUBSCRIPTION_SETTINGS = { message_ordering: true }.freeze
|
13
|
-
|
14
|
+
|
15
|
+
# @!attribute topics (Hash): { key: Topic1, ... }
|
16
|
+
# @!attribute publish_topics (Hash): { key: Topic1, ... }
|
17
|
+
attr_accessor :service, :topics, :subscribers, :publish_topics
|
14
18
|
|
15
19
|
def initialize
|
16
20
|
@service = Google::Cloud::Pubsub.new(project: config.project,
|
17
21
|
credentials: config.credentials)
|
18
|
-
|
19
|
-
service.create_topic(config.topic_name, TOPIC_SETTINGS)
|
20
|
-
topic.enable_message_ordering!
|
22
|
+
Array(config.topic_name || 'model_sync').each(&method(:init_topic))
|
21
23
|
end
|
22
24
|
|
23
25
|
def listen_messages
|
24
|
-
@subscription = subscribe_to_topic
|
25
|
-
@subscriber = subscription.listen(LISTEN_SETTINGS, &method(:process_message))
|
26
26
|
log('Listener starting...')
|
27
|
-
|
27
|
+
@subscribers = subscribe_to_topics
|
28
28
|
log('Listener started')
|
29
29
|
sleep
|
30
|
-
subscriber.stop.wait!
|
30
|
+
subscribers.each { |subscriber| subscriber.stop.wait! }
|
31
31
|
log('Listener stopped')
|
32
32
|
end
|
33
33
|
|
34
|
+
# @param payload (PubSubModelSync::Payload)
|
34
35
|
def publish(payload)
|
35
|
-
|
36
|
-
|
36
|
+
message_topics = Array(payload.headers[:topic_name] || '').map(&method(:find_topic))
|
37
|
+
message_topics.each do |topic|
|
38
|
+
topic.publish_async(encode_payload(payload), message_headers(payload)) do |res|
|
39
|
+
raise 'Failed to publish the message.' unless res.succeeded?
|
40
|
+
end
|
37
41
|
end
|
38
42
|
end
|
39
43
|
|
40
44
|
def stop
|
41
45
|
log('Listener stopping...')
|
42
|
-
|
46
|
+
subscribers.each(&:stop!)
|
43
47
|
end
|
44
48
|
|
45
49
|
private
|
46
50
|
|
47
|
-
def
|
48
|
-
|
51
|
+
def find_topic(topic_name)
|
52
|
+
topic_name = topic_name.to_s
|
53
|
+
return topics.values.first unless topic_name.present?
|
54
|
+
|
55
|
+
topics[topic_name] || publish_topics[topic_name] || init_topic(topic_name, only_publish: true)
|
49
56
|
end
|
50
57
|
|
51
|
-
|
52
|
-
|
53
|
-
|
58
|
+
# @param only_publish (Boolean): if false is used to listen and publish messages
|
59
|
+
# @return (Topic): returns created or loaded topic
|
60
|
+
def init_topic(topic_name, only_publish: false)
|
61
|
+
topic_name = topic_name.to_s
|
62
|
+
@topics ||= {}
|
63
|
+
@publish_topics ||= {}
|
64
|
+
topic = service.topic(topic_name) || service.create_topic(topic_name, TOPIC_SETTINGS)
|
65
|
+
topic.enable_message_ordering!
|
66
|
+
publish_topics[topic_name] = topic if only_publish
|
67
|
+
topics[topic_name] = topic unless only_publish
|
68
|
+
topic
|
69
|
+
end
|
70
|
+
|
71
|
+
# @param payload (PubSubModelSync::Payload)
|
72
|
+
def message_headers(payload)
|
73
|
+
{
|
74
|
+
SERVICE_KEY => true,
|
75
|
+
ordering_key: payload.headers[:ordering_key]
|
76
|
+
}.merge(PUBLISH_SETTINGS)
|
77
|
+
end
|
78
|
+
|
79
|
+
# @return [Subscriber]
|
80
|
+
def subscribe_to_topics
|
81
|
+
topics.map do |key, topic|
|
82
|
+
subs_name = "#{config.subscription_key}_#{key}"
|
83
|
+
subscription = topic.subscription(subs_name) || topic.subscribe(subs_name, SUBSCRIPTION_SETTINGS)
|
84
|
+
subscriber = subscription.listen(LISTEN_SETTINGS, &method(:process_message))
|
85
|
+
subscriber.start
|
86
|
+
log("Subscribed to topic: #{topic.name} as: #{subs_name}")
|
87
|
+
subscriber
|
88
|
+
end
|
54
89
|
end
|
55
90
|
|
56
91
|
def process_message(received_message)
|
@@ -7,14 +7,20 @@ end
|
|
7
7
|
|
8
8
|
module PubSubModelSync
|
9
9
|
class ServiceKafka < ServiceBase
|
10
|
+
QTY_WORKERS = 10
|
11
|
+
LISTEN_SETTINGS = {}.freeze
|
12
|
+
PUBLISH_SETTINGS = {}.freeze
|
13
|
+
PRODUCER_SETTINGS = { delivery_threshold: 200, delivery_interval: 30 }.freeze
|
10
14
|
cattr_accessor :producer
|
11
|
-
|
15
|
+
|
16
|
+
# @!attribute topic_names (Array): ['topic 1', 'topic 2']
|
17
|
+
attr_accessor :service, :consumer, :topic_names
|
12
18
|
|
13
19
|
def initialize
|
14
|
-
@config = PubSubModelSync::Config
|
15
20
|
settings = config.kafka_connection
|
16
21
|
settings[1][:client_id] ||= config.subscription_key
|
17
22
|
@service = Kafka.new(*settings)
|
23
|
+
@topic_names = ensure_topics(Array(config.topic_name || 'model_sync'))
|
18
24
|
end
|
19
25
|
|
20
26
|
def listen_messages
|
@@ -28,12 +34,10 @@ module PubSubModelSync
|
|
28
34
|
end
|
29
35
|
|
30
36
|
def publish(payload)
|
31
|
-
|
32
|
-
|
33
|
-
|
34
|
-
|
35
|
-
producer.produce(payload.to_json, settings)
|
36
|
-
producer.deliver_messages
|
37
|
+
message_topics = Array(payload.headers[:topic_name] || topic_names.first)
|
38
|
+
message_topics.each do |topic_name|
|
39
|
+
producer.produce(encode_payload(payload), message_settings(payload, topic_name))
|
40
|
+
end
|
37
41
|
end
|
38
42
|
|
39
43
|
def stop
|
@@ -43,22 +47,41 @@ module PubSubModelSync
|
|
43
47
|
|
44
48
|
private
|
45
49
|
|
50
|
+
def message_settings(payload, topic_name)
|
51
|
+
{
|
52
|
+
topic: ensure_topics(topic_name),
|
53
|
+
partition_key: payload.headers[:ordering_key],
|
54
|
+
headers: { SERVICE_KEY => true }
|
55
|
+
}.merge(PUBLISH_SETTINGS)
|
56
|
+
end
|
57
|
+
|
46
58
|
def start_consumer
|
47
59
|
@consumer = service.consumer(group_id: config.subscription_key)
|
48
|
-
consumer.subscribe(
|
60
|
+
topic_names.each { |topic_name| consumer.subscribe(topic_name) }
|
49
61
|
end
|
50
62
|
|
51
63
|
def producer
|
52
64
|
return self.class.producer if self.class.producer
|
53
65
|
|
54
66
|
at_exit { self.class.producer.shutdown }
|
55
|
-
self.class.producer = service.
|
67
|
+
self.class.producer = service.async_producer(PRODUCER_SETTINGS)
|
56
68
|
end
|
57
69
|
|
58
70
|
def process_message(message)
|
59
|
-
|
71
|
+
super(message.value) if message.headers[SERVICE_KEY]
|
72
|
+
end
|
60
73
|
|
61
|
-
|
74
|
+
# Check topic existence, create if missing topic
|
75
|
+
# @param names (Array<String>|String)
|
76
|
+
# @return (Array|String) return @param names
|
77
|
+
def ensure_topics(names)
|
78
|
+
missing_topics = Array(names) - (@known_topics || service.topics)
|
79
|
+
missing_topics.each do |name|
|
80
|
+
service.create_topic(name)
|
81
|
+
end
|
82
|
+
@known_topics ||= [] # cache service.topics to reduce verification time
|
83
|
+
@known_topics = (@known_topics + Array(names)).uniq
|
84
|
+
names
|
62
85
|
end
|
63
86
|
end
|
64
87
|
end
|
@@ -7,18 +7,26 @@ end
|
|
7
7
|
|
8
8
|
module PubSubModelSync
|
9
9
|
class ServiceRabbit < ServiceBase
|
10
|
-
|
10
|
+
QUEUE_SETTINGS = { durable: true, auto_delete: false }.freeze
|
11
|
+
LISTEN_SETTINGS = { manual_ack: true }.freeze
|
12
|
+
PUBLISH_SETTINGS = {}.freeze
|
13
|
+
|
14
|
+
# @!attribute topic_names (Array): ['Topic 1', 'Topic 2']
|
15
|
+
# @!attribute channels (Array): [Channel1]
|
16
|
+
# @!attribute exchanges (Hash<key: Exchange>): {topic_name: Exchange1}
|
17
|
+
attr_accessor :service, :topic_names, :channels, :exchanges
|
11
18
|
|
12
19
|
def initialize
|
13
|
-
@config = PubSubModelSync::Config
|
14
20
|
@service = Bunny.new(*config.bunny_connection)
|
21
|
+
@topic_names = Array(config.topic_name || 'model_sync')
|
22
|
+
@channels = []
|
23
|
+
@exchanges = {}
|
15
24
|
end
|
16
25
|
|
17
26
|
def listen_messages
|
18
27
|
log('Listener starting...')
|
19
|
-
|
28
|
+
subscribe_to_queues { |queue| queue.subscribe(LISTEN_SETTINGS, &method(:process_message)) }
|
20
29
|
log('Listener started')
|
21
|
-
queue.subscribe(subscribe_settings, &method(:process_message))
|
22
30
|
loop { sleep 5 }
|
23
31
|
rescue PubSubModelSync::Runner::ShutDown
|
24
32
|
log('Listener stopped')
|
@@ -40,54 +48,53 @@ module PubSubModelSync
|
|
40
48
|
|
41
49
|
def stop
|
42
50
|
log('Listener stopping...')
|
43
|
-
|
51
|
+
channels.each(&:close)
|
44
52
|
service.close
|
45
53
|
end
|
46
54
|
|
47
55
|
private
|
48
56
|
|
49
|
-
def message_settings
|
57
|
+
def message_settings(payload)
|
50
58
|
{
|
51
|
-
routing_key:
|
59
|
+
routing_key: payload.headers[:ordering_key],
|
52
60
|
type: SERVICE_KEY,
|
53
61
|
persistent: true
|
54
62
|
}.merge(PUBLISH_SETTINGS)
|
55
63
|
end
|
56
64
|
|
57
|
-
def queue_settings
|
58
|
-
{ durable: true, auto_delete: false }
|
59
|
-
end
|
60
|
-
|
61
|
-
def subscribe_settings
|
62
|
-
{ manual_ack: false }.merge(LISTEN_SETTINGS)
|
63
|
-
end
|
64
|
-
|
65
65
|
def process_message(_delivery_info, meta_info, payload)
|
66
|
-
|
67
|
-
|
68
|
-
super(payload)
|
66
|
+
super(payload) if meta_info[:type] == SERVICE_KEY
|
69
67
|
end
|
70
68
|
|
71
|
-
def
|
72
|
-
|
73
|
-
|
74
|
-
|
75
|
-
|
69
|
+
def subscribe_to_queues(&block)
|
70
|
+
@channels = []
|
71
|
+
topic_names.each do |topic_name|
|
72
|
+
subscribe_to_exchange(topic_name) do |channel, exchange|
|
73
|
+
queue = channel.queue(config.subscription_key, QUEUE_SETTINGS)
|
74
|
+
queue.bind(exchange)
|
75
|
+
@channels << channel
|
76
|
+
block.call(queue)
|
77
|
+
end
|
78
|
+
end
|
76
79
|
end
|
77
80
|
|
78
|
-
def subscribe_to_exchange
|
79
|
-
|
80
|
-
|
81
|
+
def subscribe_to_exchange(topic_name, &block)
|
82
|
+
topic_name = topic_name.to_s
|
83
|
+
exchanges[topic_name] ||= begin
|
84
|
+
service.start
|
85
|
+
channel = service.create_channel
|
86
|
+
channel.fanout(topic_name)
|
87
|
+
end
|
88
|
+
block.call(channel, exchanges[topic_name])
|
81
89
|
end
|
82
90
|
|
83
91
|
def deliver_data(payload)
|
84
|
-
|
85
|
-
|
86
|
-
|
87
|
-
|
88
|
-
|
89
|
-
|
90
|
-
service.close
|
92
|
+
message_topics = Array(payload.headers[:topic_name] || topic_names.first)
|
93
|
+
message_topics.each do |topic_name|
|
94
|
+
subscribe_to_exchange(topic_name) do |_channel, exchange|
|
95
|
+
exchange.publish(encode_payload(payload), message_settings(payload))
|
96
|
+
end
|
97
|
+
end
|
91
98
|
end
|
92
99
|
end
|
93
100
|
end
|
@@ -5,24 +5,25 @@ module PubSubModelSync
|
|
5
5
|
attr_accessor :klass, :action, :attrs, :settings, :identifiers
|
6
6
|
attr_reader :payload
|
7
7
|
|
8
|
-
# @param settings: (Hash) { id: :id,
|
8
|
+
# @param settings: (Hash) { id: :id, mode: :model|:klass|:custom_model,
|
9
9
|
# from_klass: klass, from_action: action }
|
10
10
|
def initialize(klass, action, attrs: nil, settings: {})
|
11
|
-
|
12
|
-
|
11
|
+
@settings = { id: settings[:id] || :id,
|
12
|
+
mode: settings[:mode] || :klass,
|
13
|
+
from_klass: settings[:from_klass] || klass,
|
14
|
+
from_action: settings[:from_action] || action }
|
13
15
|
@klass = klass
|
14
16
|
@action = action
|
15
17
|
@attrs = attrs
|
16
|
-
@
|
17
|
-
@identifiers = Array(settings[:id]).map(&:to_sym)
|
18
|
+
@identifiers = Array(@settings[:id]).map(&:to_sym)
|
18
19
|
end
|
19
20
|
|
20
21
|
def process!(payload)
|
21
22
|
@payload = payload
|
22
|
-
|
23
|
-
|
24
|
-
|
25
|
-
|
23
|
+
case settings[:mode]
|
24
|
+
when :klass then run_class_message
|
25
|
+
when :custom_model then run_model_message(crud_action: false)
|
26
|
+
else run_model_message
|
26
27
|
end
|
27
28
|
end
|
28
29
|
|
@@ -34,9 +35,10 @@ module PubSubModelSync
|
|
34
35
|
end
|
35
36
|
|
36
37
|
# support for: create, update, destroy
|
37
|
-
def run_model_message
|
38
|
+
def run_model_message(crud_action: true)
|
38
39
|
model = find_model
|
39
40
|
model.ps_processed_payload = payload
|
41
|
+
return model.send(action, payload.data) if ensure_sync(model) && !crud_action
|
40
42
|
|
41
43
|
if action == :destroy
|
42
44
|
model.destroy! if ensure_sync(model)
|
@@ -48,7 +50,7 @@ module PubSubModelSync
|
|
48
50
|
|
49
51
|
def ensure_sync(model)
|
50
52
|
config = PubSubModelSync::Config
|
51
|
-
cancelled = model.ps_before_save_sync(payload) == :cancel
|
53
|
+
cancelled = model.ps_before_save_sync(action, payload) == :cancel
|
52
54
|
config.log("Cancelled sync with ps_before_save_sync: #{[payload]}") if cancelled && config.debug
|
53
55
|
!cancelled
|
54
56
|
end
|
@@ -9,21 +9,24 @@ module PubSubModelSync
|
|
9
9
|
|
10
10
|
# permit to apply custom actions before applying sync
|
11
11
|
# @return (nil|:cancel): nil to continue sync OR :cancel to skip sync
|
12
|
-
def ps_before_save_sync(_payload); end
|
12
|
+
def ps_before_save_sync(_action, _payload); end
|
13
13
|
|
14
14
|
module ClassMethods
|
15
15
|
def ps_subscribe(attrs, actions: nil, from_klass: name, id: :id)
|
16
|
-
settings = { id: id, from_klass: from_klass }
|
16
|
+
settings = { id: id, from_klass: from_klass, mode: :model }
|
17
17
|
actions ||= %i[create update destroy]
|
18
18
|
actions.each do |action|
|
19
19
|
add_ps_subscriber(action, attrs, settings)
|
20
20
|
end
|
21
21
|
end
|
22
22
|
|
23
|
+
def ps_subscribe_custom(action, from_klass: name, id: :id, from_action: nil)
|
24
|
+
settings = { id: id, mode: :custom_model, from_klass: from_klass, from_action: from_action }
|
25
|
+
add_ps_subscriber(action, nil, settings)
|
26
|
+
end
|
27
|
+
|
23
28
|
def ps_class_subscribe(action, from_action: nil, from_klass: nil)
|
24
|
-
settings = {
|
25
|
-
settings[:from_action] = from_action if from_action
|
26
|
-
settings[:from_klass] = from_klass if from_klass
|
29
|
+
settings = { mode: :klass, from_action: from_action, from_klass: from_klass }
|
27
30
|
add_ps_subscriber(action, nil, settings)
|
28
31
|
end
|
29
32
|
|
@@ -3,6 +3,17 @@
|
|
3
3
|
namespace :pub_sub_model_sync do
|
4
4
|
desc 'Start listening syncs'
|
5
5
|
task start: :environment do
|
6
|
+
# https://github.com/zendesk/ruby-kafka#consumer-groups
|
7
|
+
# Each consumer process will be assigned one or more partitions from each topic that the group
|
8
|
+
# subscribes to. In order to handle more messages, simply start more processes.
|
9
|
+
if PubSubModelSync::Config.service_name == :kafka
|
10
|
+
(PubSubModelSync::ServiceKafka::QTY_WORKERS - 1).times.each do
|
11
|
+
Thread.new do
|
12
|
+
Thread.current.abort_on_exception = true
|
13
|
+
PubSubModelSync::Runner.new.run
|
14
|
+
end
|
15
|
+
end
|
16
|
+
end
|
6
17
|
PubSubModelSync::Runner.new.run
|
7
18
|
end
|
8
19
|
end
|