pub_sub_model_sync 0.5.1 → 0.5.4.1

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 83c10bee147f91d55fa2fd1a8fc81f5864c2bb6af7476caa0de4044f9e6ad3e9
4
- data.tar.gz: 53e28b6855a0139df908a79e532f7138cba78e153d6e51e4a8f739063df290e7
3
+ metadata.gz: a06777a1f70a6efc94e7024b5b5c1f38d04f7eac8c9721a82fe84ffa042d15bb
4
+ data.tar.gz: f558c5116ec027c2b387dd04a16d03657d64f5fbefa244e9f075114cbd34ef21
5
5
  SHA512:
6
- metadata.gz: c269ccd1f3587605ff43342c3948cd03b171f0fc6b124815b98c5e4e2f1ab30d574f90f2c07087185042d20c2c5c1b3f7052b11b1679c002c6e24f7065125893
7
- data.tar.gz: b1702e276e58ab5d796856c5a14c975fe70959f2603a27afaad2110811c1053ebc88c0b7fd7f7466910f63e578efdab26fd943d725aa3b21412f3372e4b35ab5
6
+ metadata.gz: 41815c6d3390abcb71c3c4855e74e95ef7ead047e0e9315925eb8c1a56572f3458304d19bcdd38ceb51e203071f26a18071184fcbb766c47e5f617b2e0f530f4
7
+ data.tar.gz: 06f17b5f3ecfde95baf7c6465d371798a3257f2bbe86b288af4ab44250c8c8ff62faaa2597d4ddb520badf469a9558b2430e581d5d7c33bdfdd2d9e2ad83ae0a
@@ -1,5 +1,26 @@
1
1
  # Change Log
2
2
 
3
+ # 0.5.4.1 (January 8, 2021)
4
+ - fix: make sequential message processor google-pub/sub
5
+
6
+ # 0.5.4 (January 8, 2021)
7
+ - fix: exclude identifiers when syncing model
8
+ - feat: callbacks support for future extra params
9
+ - feat: make connectors configurable
10
+ - feat: add :process! and :process, :publish!, :publish methods to payload
11
+ - feat: auto retry 2 times when "could not obtain a database connection within 5.000 seconds..." error occurs
12
+
13
+ # 0.5.3 (December 30, 2020)
14
+ - fix: kafka consume all messages from different apps
15
+ - style: use the correct consumer key
16
+
17
+ # 0.5.2 (December 30, 2020)
18
+ - fix: rabbitmq deliver messages to all subscribers
19
+ - fix: rabbitmq persist messages to recover after restarting
20
+
21
+ # 0.5.1.1 (December 29, 2020)
22
+ - Hotfix: auto convert class name into string
23
+
3
24
  # 0.5.1 (December 24, 2020)
4
25
  - feat: rename publisher callbacks to be more understandable
5
26
  - feat: add callbacks to listen when processing a message (before saving sync)
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- pub_sub_model_sync (0.5.1)
4
+ pub_sub_model_sync (0.5.4.1)
5
5
  rails
6
6
 
7
7
  GEM
data/README.md CHANGED
@@ -28,10 +28,9 @@ And then execute: $ bundle install
28
28
  ```ruby
29
29
  # initializers/pub_sub_config.rb
30
30
  PubSubModelSync::Config.service_name = :google
31
- PubSubModelSync::Config.project = 'project-id'
31
+ PubSubModelSync::Config.project = 'google-project-id'
32
32
  PubSubModelSync::Config.credentials = 'path-to-the-config'
33
33
  PubSubModelSync::Config.topic_name = 'sample-topic'
34
- PubSubModelSync::Config.subscription_name = 'p1-subscriber'
35
34
  ```
36
35
  See details here:
37
36
  https://github.com/googleapis/google-cloud-ruby/tree/master/google-cloud-pubsub
@@ -40,7 +39,7 @@ And then execute: $ bundle install
40
39
  ```ruby
41
40
  PubSubModelSync::Config.service_name = :rabbitmq
42
41
  PubSubModelSync::Config.bunny_connection = 'amqp://guest:guest@localhost'
43
- PubSubModelSync::Config.queue_name = ''
42
+ PubSubModelSync::Config.queue_name = 'model-sync'
44
43
  PubSubModelSync::Config.topic_name = 'sample-topic'
45
44
  ```
46
45
  See details here: https://github.com/ruby-amqp/bunny
@@ -48,7 +47,7 @@ And then execute: $ bundle install
48
47
  - configuration for Apache Kafka (You need kafka installed)
49
48
  ```ruby
50
49
  PubSubModelSync::Config.service_name = :kafka
51
- PubSubModelSync::Config.kafka_connection = [["kafka1:9092", "localhost:2121"], logger: Rails.logger]
50
+ PubSubModelSync::Config.kafka_connection = [["kafka1:9092", "localhost:2121"], { logger: Rails.logger }]
52
51
  PubSubModelSync::Config.topic_name = 'sample-topic'
53
52
  ```
54
53
  See details here: https://github.com/zendesk/ruby-kafka
@@ -207,10 +206,13 @@ Note: Be careful with collision of names
207
206
  * action_name: (required, :sim) Action name
208
207
  * as_klass: (optional, :string) Custom class name (Default current model name)
209
208
 
210
- - Publish a class level notification (Same as above: manual call)
209
+ - Payload actions
211
210
  ```ruby
212
211
  payload = PubSubModelSync::Payload.new({ title: 'hello' }, { action: :greeting, klass: 'User' })
213
- payload.publish!
212
+ payload.publish! # publishes notification data. It raises exception if fails and does not call ```:on_error_publishing``` callback
213
+ payload.publish # publishes notification data. On error does not raise exception but calls ```:on_error_publishing``` callback
214
+ payload.process! # process a notification data. It raises exception if fails and does not call ```.on_error_processing``` callback
215
+ payload.publish # process a notification data. It does not raise exception if fails but calls ```.on_error_processing``` callback
214
216
  ```
215
217
 
216
218
  - Get crud publisher configured for the class
@@ -283,24 +285,26 @@ config = PubSubModelSync::Config
283
285
  config.debug = true
284
286
  ```
285
287
 
286
- - ```debug = true```
288
+ - ```.subscription_name = 'app-2'```
289
+ Permit to define a custom consumer identifier (Default: Rails application name)
290
+ - ```.debug = true```
287
291
  (true/false*) => show advanced log messages
288
- - ```logger = Rails.logger```
292
+ - ```.logger = Rails.logger```
289
293
  (Logger) => define custom logger
290
- - ```disabled_callback_publisher = ->(_model, _action) { false }```
294
+ - ```.disabled_callback_publisher = ->(_model, _action) { false }```
291
295
  (true/false*) => if true, does not listen model callbacks for auto sync (Create/Update/Destroy)
292
- - ```on_before_processing = ->(payload, subscriber) { puts payload }```
296
+ - ```.on_before_processing = ->(payload, {subscriber:}) { puts payload }```
293
297
  (Proc) => called before processing received message (:cancel can be returned to skip processing)
294
- - ```on_success_processing = ->(payload, subscriber) { puts payload }```
298
+ - ```.on_success_processing = ->(payload, {subscriber:}) { puts payload }```
295
299
  (Proc) => called when a message was successfully processed
296
- - ```on_error_processing = ->(exception, payload) { sleep 1; payload.process! }```
297
- (Proc) => called when a message failed when processing
298
- - ```on_before_publish = ->(payload) { puts payload }```
300
+ - ```.on_error_processing = ->(exception, {payload:, subscriber:}) { payload.delay(...).process! }```
301
+ (Proc) => called when a message failed when processing (delayed_job or similar can be used for retrying)
302
+ - ```.on_before_publish = ->(payload) { puts payload }```
299
303
  (Proc) => called before publishing a message (:cancel can be returned to skip publishing)
300
- - ```on_after_publish = ->(payload) { puts payload }```
304
+ - ```.on_after_publish = ->(payload) { puts payload }```
301
305
  (Proc) => called after publishing a message
302
- - ```on_error_publish = ->(exception, payload) { sleep 1; payload.publish! }```
303
- (Proc) => called when failed publishing a message
306
+ - ```.on_error_publish = ->(exception, {payload:}) { payload.delay(...).publish! }```
307
+ (Proc) => called when failed publishing a message (delayed_job or similar can be used for retrying)
304
308
 
305
309
  ## TODO
306
310
  - Add alias attributes when subscribing (similar to publisher)
@@ -308,6 +312,14 @@ config.debug = true
308
312
  - Auto publish update only if payload has changed
309
313
  - On delete, payload must only be composed by ids
310
314
 
315
+ ## Q&A
316
+ - Error "could not obtain a connection from the pool within 5.000 seconds"
317
+ This problem occurs because pub/sub dependencies (kafka, google-pubsub, rabbitmq) use many threads to perform notifications where the qty of threads is greater than qty of DB pools ([Google pubsub info](https://github.com/googleapis/google-cloud-ruby/blob/master/google-cloud-pubsub/lib/google/cloud/pubsub/subscription.rb#L888))
318
+ To fix the problem, edit config/database.yml and increase the quantity of ```pool: 10```
319
+ - Google pubsub: How to process notifications parallely and not sequentially (default 1 thread)?
320
+ ```ruby PubSubModelSync::ServiceGoogle::LISTEN_SETTINGS = { threads: { callback: qty_threads } } ```
321
+ Note: by this way some notifications can be processed before others thus missing relationship errors can appear
322
+
311
323
  ## Contributing
312
324
 
313
325
  Bug reports and pull requests are welcome on GitHub at https://github.com/owen2345/pub_sub_model_sync. This project is intended to be a safe, welcoming space for collaboration, and contributors are expected to adhere to the [Contributor Covenant](http://contributor-covenant.org) code of conduct.
@@ -13,5 +13,12 @@ module PubSubModelSync
13
13
  config.log message, kind
14
14
  end
15
15
  end
16
+
17
+ def retry_error(error_klass, qty: 2, &block)
18
+ @retries ||= 0
19
+ block.call
20
+ rescue error_klass => _e
21
+ (@retries += 1) <= qty ? retry : raise
22
+ end
16
23
  end
17
24
  end
@@ -10,12 +10,12 @@ module PubSubModelSync
10
10
  cattr_accessor(:debug) { false }
11
11
  cattr_accessor :logger # LoggerInst
12
12
 
13
- cattr_accessor(:on_before_processing) { ->(_payload, _subscriber) {} } # return :cancel to skip
14
- cattr_accessor(:on_success_processing) { ->(_payload, _subscriber) {} }
15
- cattr_accessor(:on_error_processing) { ->(_exception, _payload) {} }
13
+ cattr_accessor(:on_before_processing) { ->(_payload, _info) {} } # return :cancel to skip
14
+ cattr_accessor(:on_success_processing) { ->(_payload, _info) {} }
15
+ cattr_accessor(:on_error_processing) { ->(_exception, _info) {} }
16
16
  cattr_accessor(:on_before_publish) { ->(_payload) {} } # return :cancel to skip
17
17
  cattr_accessor(:on_after_publish) { ->(_payload) {} }
18
- cattr_accessor(:on_error_publish) { ->(_exception, _payload) {} }
18
+ cattr_accessor(:on_error_publish) { ->(_exception, _info) {} }
19
19
  cattr_accessor(:disabled_callback_publisher) { ->(_model, _action) { false } }
20
20
 
21
21
  # google service
@@ -2,7 +2,7 @@
2
2
 
3
3
  module PubSubModelSync
4
4
  class MessageProcessor < PubSubModelSync::Base
5
- attr_accessor :payload
5
+ attr_accessor :payload, :raise_error
6
6
 
7
7
  # @param payload (Payload): payload to be delivered
8
8
  # @Deprecated: def initialize(data, klass, action)
@@ -24,23 +24,25 @@ module PubSubModelSync
24
24
  def run_subscriber(subscriber)
25
25
  return unless processable?(subscriber)
26
26
 
27
- subscriber.process!(payload)
28
- config.on_success_processing.call(payload, subscriber)
29
- log "processed message with: #{payload}"
27
+ retry_error(ActiveRecord::ConnectionTimeoutError, qty: 2) do
28
+ subscriber.process!(payload)
29
+ res = config.on_success_processing.call(payload, { subscriber: subscriber })
30
+ log "processed message with: #{payload.inspect}" if res != :skip_log
31
+ end
30
32
  rescue => e
31
- print_subscriber_error(e)
33
+ raise_error ? raise : print_subscriber_error(e, subscriber)
32
34
  end
33
35
 
34
36
  def processable?(subscriber)
35
- cancel = config.on_before_processing.call(payload, subscriber) == :cancel
37
+ cancel = config.on_before_processing.call(payload, { subscriber: subscriber }) == :cancel
36
38
  log("process message cancelled: #{payload}") if cancel && config.debug
37
39
  !cancel
38
40
  end
39
41
 
40
42
  # @param error (Error)
41
- def print_subscriber_error(error)
43
+ def print_subscriber_error(error, subscriber)
42
44
  info = [payload, error.message, error.backtrace]
43
- res = config.on_error_processing.call(error, payload)
45
+ res = config.on_error_processing.call(error, { payload: payload, subscriber: subscriber })
44
46
  log("Error processing message: #{info}", :error) if res != :skip_log
45
47
  end
46
48
 
@@ -8,7 +8,7 @@ module PubSubModelSync
8
8
  end
9
9
 
10
10
  def publish_data(klass, data, action)
11
- payload = PubSubModelSync::Payload.new(data, { klass: klass, action: action.to_sym })
11
+ payload = PubSubModelSync::Payload.new(data, { klass: klass.to_s, action: action.to_sym })
12
12
  publish(payload)
13
13
  end
14
14
 
@@ -19,8 +19,7 @@ module PubSubModelSync
19
19
  return if model.ps_skip_sync?(action)
20
20
 
21
21
  publisher ||= model.class.ps_publisher(action)
22
- payload_info = publisher.payload(model, action)
23
- payload = PubSubModelSync::Payload.new(payload_info[:data], payload_info[:attrs])
22
+ payload = publisher.payload(model, action)
24
23
  res_before = model.ps_before_sync(action, payload.data)
25
24
  return if res_before == :cancel
26
25
 
@@ -28,7 +27,7 @@ module PubSubModelSync
28
27
  model.ps_after_sync(action, payload.data)
29
28
  end
30
29
 
31
- def publish(payload)
30
+ def publish(payload, raise_error: false)
32
31
  if config.on_before_publish.call(payload) == :cancel
33
32
  log("Publish message cancelled: #{payload}") if config.debug
34
33
  return
@@ -38,14 +37,14 @@ module PubSubModelSync
38
37
  connector.publish(payload)
39
38
  config.on_after_publish.call(payload)
40
39
  rescue => e
41
- notify_error(e, payload)
40
+ raise_error ? raise : notify_error(e, payload)
42
41
  end
43
42
 
44
43
  private
45
44
 
46
45
  def notify_error(exception, payload)
47
46
  info = [payload, exception.message, exception.backtrace]
48
- res = config.on_error_publish.call(exception, payload)
47
+ res = config.on_error_publish.call(exception, { payload: payload })
49
48
  log("Error publishing: #{info}", :error) if res != :skip_log
50
49
  end
51
50
  end
@@ -26,11 +26,23 @@ module PubSubModelSync
26
26
  end
27
27
 
28
28
  def process!
29
+ process do |publisher|
30
+ publisher.raise_error = true
31
+ end
32
+ end
33
+
34
+ def process
29
35
  publisher = PubSubModelSync::MessageProcessor.new(self)
36
+ yield(publisher) if block_given?
30
37
  publisher.process
31
38
  end
32
39
 
33
40
  def publish!
41
+ klass = PubSubModelSync::MessagePublisher
42
+ klass.publish(self, raise_error: true)
43
+ end
44
+
45
+ def publish
34
46
  klass = PubSubModelSync::MessagePublisher
35
47
  klass.publish(self)
36
48
  end
@@ -12,7 +12,7 @@ module PubSubModelSync
12
12
  end
13
13
 
14
14
  def payload(model, action)
15
- { data: payload_data(model), attrs: payload_attrs(model, action) }
15
+ PubSubModelSync::Payload.new(payload_data(model), payload_attrs(model, action))
16
16
  end
17
17
 
18
18
  private
@@ -4,6 +4,8 @@ require 'pub_sub_model_sync/payload'
4
4
  module PubSubModelSync
5
5
  class ServiceBase < PubSubModelSync::Base
6
6
  SERVICE_KEY = 'service_model_sync'
7
+ PUBLISH_SETTINGS = {}.freeze
8
+ LISTEN_SETTINGS = {}.freeze
7
9
 
8
10
  def listen_messages
9
11
  raise 'method :listen_messages must be defined in service'
@@ -7,6 +7,7 @@ end
7
7
 
8
8
  module PubSubModelSync
9
9
  class ServiceGoogle < ServiceBase
10
+ LISTEN_SETTINGS = { threads: { callback: 1 } }.freeze
10
11
  attr_accessor :service, :topic, :subscription, :subscriber
11
12
 
12
13
  def initialize
@@ -18,7 +19,7 @@ module PubSubModelSync
18
19
 
19
20
  def listen_messages
20
21
  @subscription = subscribe_to_topic
21
- @subscriber = subscription.listen(&method(:process_message))
22
+ @subscriber = subscription.listen(LISTEN_SETTINGS, &method(:process_message))
22
23
  log('Listener starting...')
23
24
  subscriber.start
24
25
  log('Listener started')
@@ -28,7 +29,7 @@ module PubSubModelSync
28
29
  end
29
30
 
30
31
  def publish(payload)
31
- topic.publish(payload.to_json, { SERVICE_KEY => true })
32
+ topic.publish(payload.to_json, { SERVICE_KEY => true }.merge(PUBLISH_SETTINGS))
32
33
  end
33
34
 
34
35
  def stop
@@ -39,8 +40,8 @@ module PubSubModelSync
39
40
  private
40
41
 
41
42
  def subscribe_to_topic
42
- topic.subscription(config.subscription_name) ||
43
- topic.subscribe(config.subscription_name)
43
+ topic.subscription(config.subscription_key) ||
44
+ topic.subscribe(config.subscription_key)
44
45
  end
45
46
 
46
47
  def process_message(received_message)
@@ -10,17 +10,17 @@ module PubSubModelSync
10
10
  cattr_accessor :producer
11
11
  attr_accessor :config, :service, :consumer
12
12
 
13
- CONSUMER_GROUP = 'service_model_sync'
14
-
15
13
  def initialize
16
14
  @config = PubSubModelSync::Config
17
- @service = Kafka.new(*config.kafka_connection)
15
+ settings = config.kafka_connection
16
+ settings[1][:client_id] ||= config.subscription_key
17
+ @service = Kafka.new(*settings)
18
18
  end
19
19
 
20
20
  def listen_messages
21
21
  log('Listener starting...')
22
22
  start_consumer
23
- consumer.each_message(&method(:process_message))
23
+ consumer.each_message(LISTEN_SETTINGS, &method(:process_message))
24
24
  rescue PubSubModelSync::Runner::ShutDown
25
25
  log('Listener stopped')
26
26
  rescue => e
@@ -28,7 +28,11 @@ module PubSubModelSync
28
28
  end
29
29
 
30
30
  def publish(payload)
31
- producer.produce(payload.to_json, message_settings)
31
+ settings = {
32
+ topic: config.topic_name,
33
+ headers: { SERVICE_KEY => true }
34
+ }.merge(PUBLISH_SETTINGS)
35
+ producer.produce(payload.to_json, settings)
32
36
  producer.deliver_messages
33
37
  end
34
38
 
@@ -39,12 +43,8 @@ module PubSubModelSync
39
43
 
40
44
  private
41
45
 
42
- def message_settings
43
- { topic: config.topic_name, headers: { SERVICE_KEY => true } }
44
- end
45
-
46
46
  def start_consumer
47
- @consumer = service.consumer(group_id: CONSUMER_GROUP)
47
+ @consumer = service.consumer(group_id: config.subscription_key)
48
48
  consumer.subscribe(config.topic_name)
49
49
  end
50
50
 
@@ -40,6 +40,7 @@ module PubSubModelSync
40
40
 
41
41
  def stop
42
42
  log('Listener stopping...')
43
+ channel&.close
43
44
  service.close
44
45
  end
45
46
 
@@ -48,12 +49,17 @@ module PubSubModelSync
48
49
  def message_settings
49
50
  {
50
51
  routing_key: queue.name,
51
- type: SERVICE_KEY
52
- }
52
+ type: SERVICE_KEY,
53
+ persistent: true
54
+ }.merge(PUBLISH_SETTINGS)
55
+ end
56
+
57
+ def queue_settings
58
+ { durable: true, auto_delete: false }
53
59
  end
54
60
 
55
61
  def subscribe_settings
56
- { manual_ack: false }
62
+ { manual_ack: false }.merge(LISTEN_SETTINGS)
57
63
  end
58
64
 
59
65
  def process_message(_delivery_info, meta_info, payload)
@@ -65,8 +71,7 @@ module PubSubModelSync
65
71
  def subscribe_to_queue
66
72
  service.start
67
73
  @channel = service.create_channel
68
- queue_settings = { durable: true, auto_delete: false }
69
- @queue = channel.queue(config.queue_name, queue_settings)
74
+ @queue = channel.queue(config.subscription_key, queue_settings)
70
75
  subscribe_to_exchange
71
76
  end
72
77
 
@@ -2,7 +2,7 @@
2
2
 
3
3
  module PubSubModelSync
4
4
  class Subscriber
5
- attr_accessor :klass, :action, :attrs, :settings
5
+ attr_accessor :klass, :action, :attrs, :settings, :identifiers
6
6
  attr_reader :payload
7
7
 
8
8
  # @param settings: (Hash) { id: :id, direct_mode: false,
@@ -14,6 +14,7 @@ module PubSubModelSync
14
14
  @action = action
15
15
  @attrs = attrs
16
16
  @settings = def_settings.merge(settings)
17
+ @identifiers = Array(settings[:id]).map(&:to_sym)
17
18
  end
18
19
 
19
20
  def process!(payload)
@@ -55,12 +56,11 @@ module PubSubModelSync
55
56
  end
56
57
 
57
58
  def model_identifiers
58
- identifiers = Array(settings[:id])
59
59
  identifiers.map { |key| [key, payload.data[key.to_sym]] }.to_h
60
60
  end
61
61
 
62
62
  def populate_model(model)
63
- values = payload.data.slice(*attrs)
63
+ values = payload.data.slice(*attrs).except(*identifiers)
64
64
  values.each do |attr, value|
65
65
  model.send("#{attr}=", value)
66
66
  end
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module PubSubModelSync
4
- VERSION = '0.5.1'
4
+ VERSION = '0.5.4.1'
5
5
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: pub_sub_model_sync
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.5.1
4
+ version: 0.5.4.1
5
5
  platform: ruby
6
6
  authors:
7
7
  - Owen
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2020-12-24 00:00:00.000000000 Z
11
+ date: 2021-01-08 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: rails