pub_sub_model_sync 0.5.8.1 → 0.6.0

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,8 +4,6 @@ require 'pub_sub_model_sync/payload'
4
4
  module PubSubModelSync
5
5
  class ServiceBase < PubSubModelSync::Base
6
6
  SERVICE_KEY = 'service_model_sync'
7
- PUBLISH_SETTINGS = {}.freeze
8
- LISTEN_SETTINGS = {}.freeze
9
7
 
10
8
  def listen_messages
11
9
  raise 'method :listen_messages must be defined in service'
@@ -22,23 +20,45 @@ module PubSubModelSync
22
20
 
23
21
  private
24
22
 
23
+ # @param payload (Payload)
24
+ # @return (String): Json Format
25
+ def encode_payload(payload)
26
+ data = payload.to_h
27
+ not_important_keys = %i[ordering_key topic_name forced_ordering_key]
28
+ reduce_payload_size = !config.debug
29
+ data[:headers].except!(*not_important_keys) if reduce_payload_size
30
+ data.to_json
31
+ end
32
+
25
33
  # @param (String: Payload in json format)
26
34
  def process_message(payload_info)
27
- payload = parse_payload(payload_info)
28
- log("Received message: #{[payload]}") if config.debug
29
- if same_app_message?(payload)
30
- log("Skip message from same origin: #{[payload]}") if config.debug
35
+ retries ||= 0
36
+ payload = decode_payload(payload_info)
37
+ return payload.process unless same_app_message?(payload)
38
+
39
+ log("Skipping message from same origin: #{[payload]}") if config.debug
40
+ rescue => e
41
+ retry if can_retry_process_message?(e, payload, retries += 1)
42
+ end
43
+
44
+ def can_retry_process_message?(error, payload, retries)
45
+ error_payload = [payload, error.message, error.backtrace]
46
+ if retries == 1
47
+ log("Error while starting to process message (retrying...): #{error_payload}", :error)
48
+ rescue_database_connection if lost_db_connection_err?(error)
31
49
  else
32
- payload.process
50
+ log("Retried 1 time and error persists, exiting...: #{error_payload}", :error)
51
+ Process.exit!(true)
33
52
  end
34
- rescue => e
35
- error = [payload, e.message, e.backtrace]
36
- log("Error parsing received message: #{error}", :error)
53
+ retries == 1
37
54
  end
38
55
 
39
- def parse_payload(payload_info)
56
+ # @return Payload
57
+ def decode_payload(payload_info)
40
58
  info = JSON.parse(payload_info).deep_symbolize_keys
41
- ::PubSubModelSync::Payload.new(info[:data], info[:attributes], info[:headers])
59
+ payload = ::PubSubModelSync::Payload.new(info[:data], info[:attributes], info[:headers])
60
+ log("Received message: #{[payload]}") if config.debug
61
+ payload
42
62
  end
43
63
 
44
64
  # @param payload (Payload)
@@ -46,5 +66,19 @@ module PubSubModelSync
46
66
  key = payload.headers[:app_key]
47
67
  key && key == config.subscription_key
48
68
  end
69
+
70
+ def lost_db_connection_err?(error)
71
+ return true if error.class.name == 'PG::UnableToSend' # rubocop:disable Style/ClassEqualityComparison
72
+
73
+ error.message.match?(/lost connection/i)
74
+ end
75
+
76
+ def rescue_database_connection
77
+ log('Lost DB connection. Attempting to reconnect...', :warn)
78
+ ActiveRecord::Base.connection.reconnect!
79
+ rescue
80
+ log('Cannot reconnect to database, exiting...', :error)
81
+ Process.exit!(true)
82
+ end
49
83
  end
50
84
  end
@@ -7,50 +7,85 @@ end
7
7
 
8
8
  module PubSubModelSync
9
9
  class ServiceGoogle < ServiceBase
10
- LISTEN_SETTINGS = { threads: { callback: 1 }, message_ordering: true, streams: 1 }.freeze
11
- TOPIC_SETTINGS = { async: { threads: { publish: 1, callback: 1 } } }.freeze
10
+ LISTEN_SETTINGS = { message_ordering: true }.freeze
11
+ PUBLISH_SETTINGS = {}.freeze
12
+ TOPIC_SETTINGS = {}.freeze
12
13
  SUBSCRIPTION_SETTINGS = { message_ordering: true }.freeze
13
- attr_accessor :service, :topic, :subscription, :subscriber
14
+
15
+ # @!attribute topics (Hash): { key: Topic1, ... }
16
+ # @!attribute publish_topics (Hash): { key: Topic1, ... }
17
+ attr_accessor :service, :topics, :subscribers, :publish_topics
14
18
 
15
19
  def initialize
16
20
  @service = Google::Cloud::Pubsub.new(project: config.project,
17
21
  credentials: config.credentials)
18
- @topic = service.topic(config.topic_name) ||
19
- service.create_topic(config.topic_name, TOPIC_SETTINGS)
20
- topic.enable_message_ordering!
22
+ Array(config.topic_name || 'model_sync').each(&method(:init_topic))
21
23
  end
22
24
 
23
25
  def listen_messages
24
- @subscription = subscribe_to_topic
25
- @subscriber = subscription.listen(LISTEN_SETTINGS, &method(:process_message))
26
26
  log('Listener starting...')
27
- subscriber.start
27
+ @subscribers = subscribe_to_topics
28
28
  log('Listener started')
29
29
  sleep
30
- subscriber.stop.wait!
30
+ subscribers.each { |subscriber| subscriber.stop.wait! }
31
31
  log('Listener stopped')
32
32
  end
33
33
 
34
+ # @param payload (PubSubModelSync::Payload)
34
35
  def publish(payload)
35
- topic.publish_async(payload.to_json, message_headers) do |res|
36
- raise 'Failed to publish the message.' unless res.succeeded?
36
+ message_topics = Array(payload.headers[:topic_name] || '').map(&method(:find_topic))
37
+ message_topics.each do |topic|
38
+ topic.publish_async(encode_payload(payload), message_headers(payload)) do |res|
39
+ raise 'Failed to publish the message.' unless res.succeeded?
40
+ end
37
41
  end
38
42
  end
39
43
 
40
44
  def stop
41
45
  log('Listener stopping...')
42
- subscriber.stop!
46
+ subscribers.each(&:stop!)
43
47
  end
44
48
 
45
49
  private
46
50
 
47
- def message_headers
48
- { SERVICE_KEY => true, ordering_key: SERVICE_KEY }.merge(PUBLISH_SETTINGS)
51
+ def find_topic(topic_name)
52
+ topic_name = topic_name.to_s
53
+ return topics.values.first unless topic_name.present?
54
+
55
+ topics[topic_name] || publish_topics[topic_name] || init_topic(topic_name, only_publish: true)
49
56
  end
50
57
 
51
- def subscribe_to_topic
52
- topic.subscription(config.subscription_key) ||
53
- topic.subscribe(config.subscription_key, SUBSCRIPTION_SETTINGS)
58
+ # @param only_publish (Boolean): if false is used to listen and publish messages
59
+ # @return (Topic): returns created or loaded topic
60
+ def init_topic(topic_name, only_publish: false)
61
+ topic_name = topic_name.to_s
62
+ @topics ||= {}
63
+ @publish_topics ||= {}
64
+ topic = service.topic(topic_name) || service.create_topic(topic_name, TOPIC_SETTINGS)
65
+ topic.enable_message_ordering!
66
+ publish_topics[topic_name] = topic if only_publish
67
+ topics[topic_name] = topic unless only_publish
68
+ topic
69
+ end
70
+
71
+ # @param payload (PubSubModelSync::Payload)
72
+ def message_headers(payload)
73
+ {
74
+ SERVICE_KEY => true,
75
+ ordering_key: payload.headers[:ordering_key]
76
+ }.merge(PUBLISH_SETTINGS)
77
+ end
78
+
79
+ # @return [Subscriber]
80
+ def subscribe_to_topics
81
+ topics.map do |key, topic|
82
+ subs_name = "#{config.subscription_key}_#{key}"
83
+ subscription = topic.subscription(subs_name) || topic.subscribe(subs_name, SUBSCRIPTION_SETTINGS)
84
+ subscriber = subscription.listen(LISTEN_SETTINGS, &method(:process_message))
85
+ subscriber.start
86
+ log("Subscribed to topic: #{topic.name} as: #{subs_name}")
87
+ subscriber
88
+ end
54
89
  end
55
90
 
56
91
  def process_message(received_message)
@@ -7,14 +7,20 @@ end
7
7
 
8
8
  module PubSubModelSync
9
9
  class ServiceKafka < ServiceBase
10
+ QTY_WORKERS = 10
11
+ LISTEN_SETTINGS = {}.freeze
12
+ PUBLISH_SETTINGS = {}.freeze
13
+ PRODUCER_SETTINGS = { delivery_threshold: 200, delivery_interval: 30 }.freeze
10
14
  cattr_accessor :producer
11
- attr_accessor :config, :service, :consumer
15
+
16
+ # @!attribute topic_names (Array): ['topic 1', 'topic 2']
17
+ attr_accessor :service, :consumer, :topic_names
12
18
 
13
19
  def initialize
14
- @config = PubSubModelSync::Config
15
20
  settings = config.kafka_connection
16
21
  settings[1][:client_id] ||= config.subscription_key
17
22
  @service = Kafka.new(*settings)
23
+ @topic_names = ensure_topics(Array(config.topic_name || 'model_sync'))
18
24
  end
19
25
 
20
26
  def listen_messages
@@ -28,12 +34,10 @@ module PubSubModelSync
28
34
  end
29
35
 
30
36
  def publish(payload)
31
- settings = {
32
- topic: config.topic_name,
33
- headers: { SERVICE_KEY => true }
34
- }.merge(PUBLISH_SETTINGS)
35
- producer.produce(payload.to_json, settings)
36
- producer.deliver_messages
37
+ message_topics = Array(payload.headers[:topic_name] || topic_names.first)
38
+ message_topics.each do |topic_name|
39
+ producer.produce(encode_payload(payload), message_settings(payload, topic_name))
40
+ end
37
41
  end
38
42
 
39
43
  def stop
@@ -43,22 +47,41 @@ module PubSubModelSync
43
47
 
44
48
  private
45
49
 
50
+ def message_settings(payload, topic_name)
51
+ {
52
+ topic: ensure_topics(topic_name),
53
+ partition_key: payload.headers[:ordering_key],
54
+ headers: { SERVICE_KEY => true }
55
+ }.merge(PUBLISH_SETTINGS)
56
+ end
57
+
46
58
  def start_consumer
47
59
  @consumer = service.consumer(group_id: config.subscription_key)
48
- consumer.subscribe(config.topic_name)
60
+ topic_names.each { |topic_name| consumer.subscribe(topic_name) }
49
61
  end
50
62
 
51
63
  def producer
52
64
  return self.class.producer if self.class.producer
53
65
 
54
66
  at_exit { self.class.producer.shutdown }
55
- self.class.producer = service.producer
67
+ self.class.producer = service.async_producer(PRODUCER_SETTINGS)
56
68
  end
57
69
 
58
70
  def process_message(message)
59
- return unless message.headers[SERVICE_KEY]
71
+ super(message.value) if message.headers[SERVICE_KEY]
72
+ end
60
73
 
61
- super(message.value)
74
+ # Check topic existence, create if missing topic
75
+ # @param names (Array<String>|String)
76
+ # @return (Array|String) return @param names
77
+ def ensure_topics(names)
78
+ missing_topics = Array(names) - (@known_topics || service.topics)
79
+ missing_topics.each do |name|
80
+ service.create_topic(name)
81
+ end
82
+ @known_topics ||= [] # cache service.topics to reduce verification time
83
+ @known_topics = (@known_topics + Array(names)).uniq
84
+ names
62
85
  end
63
86
  end
64
87
  end
@@ -7,18 +7,26 @@ end
7
7
 
8
8
  module PubSubModelSync
9
9
  class ServiceRabbit < ServiceBase
10
- attr_accessor :config, :service, :channel, :queue, :topic
10
+ QUEUE_SETTINGS = { durable: true, auto_delete: false }.freeze
11
+ LISTEN_SETTINGS = { manual_ack: true }.freeze
12
+ PUBLISH_SETTINGS = {}.freeze
13
+
14
+ # @!attribute topic_names (Array): ['Topic 1', 'Topic 2']
15
+ # @!attribute channels (Array): [Channel1]
16
+ # @!attribute exchanges (Hash<key: Exchange>): {topic_name: Exchange1}
17
+ attr_accessor :service, :topic_names, :channels, :exchanges
11
18
 
12
19
  def initialize
13
- @config = PubSubModelSync::Config
14
20
  @service = Bunny.new(*config.bunny_connection)
21
+ @topic_names = Array(config.topic_name || 'model_sync')
22
+ @channels = []
23
+ @exchanges = {}
15
24
  end
16
25
 
17
26
  def listen_messages
18
27
  log('Listener starting...')
19
- subscribe_to_queue
28
+ subscribe_to_queues { |queue| queue.subscribe(LISTEN_SETTINGS, &method(:process_message)) }
20
29
  log('Listener started')
21
- queue.subscribe(subscribe_settings, &method(:process_message))
22
30
  loop { sleep 5 }
23
31
  rescue PubSubModelSync::Runner::ShutDown
24
32
  log('Listener stopped')
@@ -40,54 +48,53 @@ module PubSubModelSync
40
48
 
41
49
  def stop
42
50
  log('Listener stopping...')
43
- channel&.close
51
+ channels.each(&:close)
44
52
  service.close
45
53
  end
46
54
 
47
55
  private
48
56
 
49
- def message_settings
57
+ def message_settings(payload)
50
58
  {
51
- routing_key: queue.name,
59
+ routing_key: payload.headers[:ordering_key],
52
60
  type: SERVICE_KEY,
53
61
  persistent: true
54
62
  }.merge(PUBLISH_SETTINGS)
55
63
  end
56
64
 
57
- def queue_settings
58
- { durable: true, auto_delete: false }
59
- end
60
-
61
- def subscribe_settings
62
- { manual_ack: false }.merge(LISTEN_SETTINGS)
63
- end
64
-
65
65
  def process_message(_delivery_info, meta_info, payload)
66
- return unless meta_info[:type] == SERVICE_KEY
67
-
68
- super(payload)
66
+ super(payload) if meta_info[:type] == SERVICE_KEY
69
67
  end
70
68
 
71
- def subscribe_to_queue
72
- service.start
73
- @channel = service.create_channel
74
- @queue = channel.queue(config.subscription_key, queue_settings)
75
- subscribe_to_exchange
69
+ def subscribe_to_queues(&block)
70
+ @channels = []
71
+ topic_names.each do |topic_name|
72
+ subscribe_to_exchange(topic_name) do |channel, exchange|
73
+ queue = channel.queue(config.subscription_key, QUEUE_SETTINGS)
74
+ queue.bind(exchange)
75
+ @channels << channel
76
+ block.call(queue)
77
+ end
78
+ end
76
79
  end
77
80
 
78
- def subscribe_to_exchange
79
- @topic = channel.fanout(config.topic_name)
80
- queue.bind(topic, routing_key: queue.name)
81
+ def subscribe_to_exchange(topic_name, &block)
82
+ topic_name = topic_name.to_s
83
+ exchanges[topic_name] ||= begin
84
+ service.start
85
+ channel = service.create_channel
86
+ channel.fanout(topic_name)
87
+ end
88
+ block.call(channel, exchanges[topic_name])
81
89
  end
82
90
 
83
91
  def deliver_data(payload)
84
- subscribe_to_queue
85
- topic.publish(payload.to_json, message_settings)
86
-
87
- # Ugly fix: "IO timeout when reading 7 bytes"
88
- # https://stackoverflow.com/questions/39039129/rabbitmq-timeouterror-io-timeout-when-reading-7-bytes
89
- channel.close
90
- service.close
92
+ message_topics = Array(payload.headers[:topic_name] || topic_names.first)
93
+ message_topics.each do |topic_name|
94
+ subscribe_to_exchange(topic_name) do |_channel, exchange|
95
+ exchange.publish(encode_payload(payload), message_settings(payload))
96
+ end
97
+ end
91
98
  end
92
99
  end
93
100
  end
@@ -5,24 +5,25 @@ module PubSubModelSync
5
5
  attr_accessor :klass, :action, :attrs, :settings, :identifiers
6
6
  attr_reader :payload
7
7
 
8
- # @param settings: (Hash) { id: :id, direct_mode: false,
8
+ # @param settings: (Hash) { id: :id, mode: :model|:klass|:custom_model,
9
9
  # from_klass: klass, from_action: action }
10
10
  def initialize(klass, action, attrs: nil, settings: {})
11
- def_settings = { id: :id, direct_mode: false,
12
- from_klass: klass, from_action: action }
11
+ @settings = { id: settings[:id] || :id,
12
+ mode: settings[:mode] || :klass,
13
+ from_klass: settings[:from_klass] || klass,
14
+ from_action: settings[:from_action] || action }
13
15
  @klass = klass
14
16
  @action = action
15
17
  @attrs = attrs
16
- @settings = def_settings.merge(settings)
17
- @identifiers = Array(settings[:id]).map(&:to_sym)
18
+ @identifiers = Array(@settings[:id]).map(&:to_sym)
18
19
  end
19
20
 
20
21
  def process!(payload)
21
22
  @payload = payload
22
- if settings[:direct_mode]
23
- run_class_message
24
- else
25
- run_model_message
23
+ case settings[:mode]
24
+ when :klass then run_class_message
25
+ when :custom_model then run_model_message(crud_action: false)
26
+ else run_model_message
26
27
  end
27
28
  end
28
29
 
@@ -34,20 +35,26 @@ module PubSubModelSync
34
35
  end
35
36
 
36
37
  # support for: create, update, destroy
37
- def run_model_message
38
+ def run_model_message(crud_action: true)
38
39
  model = find_model
39
- return if model.ps_before_save_sync(payload) == :cancel
40
+ model.ps_processed_payload = payload
41
+ return model.send(action, payload.data) if ensure_sync(model) && !crud_action
40
42
 
41
43
  if action == :destroy
42
- model.destroy!
44
+ model.destroy! if ensure_sync(model)
43
45
  else
44
46
  populate_model(model)
45
- return if action == :update && !model.ps_subscriber_changed?(payload.data)
46
-
47
- model.save!
47
+ model.save! if ensure_sync(model)
48
48
  end
49
49
  end
50
50
 
51
+ def ensure_sync(model)
52
+ config = PubSubModelSync::Config
53
+ cancelled = model.ps_before_save_sync(action, payload) == :cancel
54
+ config.log("Cancelled sync with ps_before_save_sync: #{[payload]}") if cancelled && config.debug
55
+ !cancelled
56
+ end
57
+
51
58
  def find_model
52
59
  model_class = klass.constantize
53
60
  return model_class.ps_find_model(payload.data) if model_class.respond_to?(:ps_find_model)