pub_sub_model_sync 1.7.2 → 1.9.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA256:
3
- metadata.gz: 982f8035fef9e312858a0283b2d1be71280a279ed6681b5ed2487a004b313363
4
- data.tar.gz: 8a227b2aa781f1a7613e44f7e907bc3f01316d46246b7cc6bb60283ed38764a7
3
+ metadata.gz: 450a4434b9a66faafa91e958b2c6aa42b4d9d1732cc591609e81f4d9f3d581e6
4
+ data.tar.gz: d9577d4757bd712fd8e8e8b8289dbce83ab57b53b92fb0d1f39f903fcca53a04
5
5
  SHA512:
6
- metadata.gz: fc7648e4cd28c450fb2c8725b345a80e969b97417463b825fbe8b5663b4b62a5ae46c2849fbcca03573c54ba293ea5eed8e7ccd82264828031a7409efd07092e
7
- data.tar.gz: c9ce313096c204ab367bbb8b5860c26d5170a9abe21ea25ed25caab1bb5df6a42f0969112bc3e8bd382c47d1f5edf6831beb28793301f05a2c3423585b9b5b60
6
+ metadata.gz: 39c90ccb110f229296fcfca67184b7b37bfe1a7c533fbbfe6b30c58330e4b46041e7775b5c7a3938c59412368dbce8cffe4684fc4ff2c15a46b56319026072cb
7
+ data.tar.gz: ae4ec687e8b8e1e19e89bf223b5abb12bcac57481ab2e1b5cfef2121433925f79f7c2a557709a51e8ddea8a52866db90d68f7c3cc1524c9548ddace85d1d62d3
data/Gemfile.lock CHANGED
@@ -1,7 +1,7 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- pub_sub_model_sync (1.7.1)
4
+ pub_sub_model_sync (1.8.0)
5
5
  rails
6
6
 
7
7
  GEM
data/README.md CHANGED
@@ -83,7 +83,9 @@ And then execute: $ bundle install
83
83
  PubSubModelSync::Config.topic_name = 'sample-topic'
84
84
  PubSubModelSync::Config.subscription_name = 'my-app3'
85
85
  ```
86
- See details here: https://github.com/zendesk/ruby-kafka
86
+ See details here: https://github.com/zendesk/ruby-kafka
87
+
88
+ *Important: The `topic_name` must be same for all applications, so that, the apps connect to the same topic*
87
89
 
88
90
  - Add publishers/subscribers to your models (See examples below)
89
91
 
@@ -382,8 +384,7 @@ Note: To reduce Payload size, some header info are not delivered (Enable debug m
382
384
 
383
385
  - Manual transactions
384
386
  `PubSubModelSync::MessagePublisher::transaction(key, headers: { target_app_key: 'my_other_app_key' } &block)`
385
- - `key` (String|nil) Key used as the ordering_key for all inner notifications (When nil, will use `ordering_key` of the first notification)
386
- - `max_buffer:` (Integer, default: `PubSubModelSync::Config.transactions_max_buffer`) Transaction buffer size (DEPRECATED).
387
+ - `key` (String|nil) Key used as the ordering_key for all inner notifications (When nil, will use `ordering_key` of the first notification)
387
388
  - `headers:` (Hash) Header settings to be added to each Payload's header inside this transaction
388
389
  Sample:
389
390
  ```ruby
@@ -542,7 +543,9 @@ config.debug = true
542
543
  - ```.on_error_publish = ->(exception, {payload:}) { payload.delay(...).retry_publish! }```
543
544
  (Proc) => called when failed publishing a notification (delayed_job or similar can be used for retrying)
544
545
  - ```.skip_cache = false```
545
- (true/false*) => Allow to skip payload optimization (cache settings)
546
+ (true/false*) => Allow to skip payload optimization (cache settings)
547
+ - ```.sync_mode = true```
548
+ (true/false*) => If `true`, the messages are delivered synchronously, else, they are delivered asynchronously (Currently, only GooglePubsub supports it). Also it can be enabled via env var: `PUBSUB_MODEL_SYNC_MODE=true`
546
549
  - ```.transactions_max_buffer = 1``` (Integer, default 1) Controls the maximum quantity of notifications to be enqueued to the transaction-buffer before delivering them and thus adds the ability to rollback notifications if the transaction fails.
547
550
  Once this quantity of notifications is reached, then all notifications of the current transaction will immediately be delivered (can be customized per transaction).
548
551
  Note: There is no way to rollback delivered notifications if current transaction fails later.
@@ -562,6 +565,7 @@ config.debug = true
562
565
  - Add subscription liveness checker using thread without db connection to check periodically pending notifications from google pubsub
563
566
  - Unify .stop() and 'Listener stopped'
564
567
  - TODO: Publish new version 1.2.1 (improve logs)
568
+ - Enable `async` mode for rabbitMQ and Kafka
565
569
 
566
570
  ## **Q&A**
567
571
  - I'm getting error "could not obtain a connection from the pool within 5.000 seconds"... what does this mean?
@@ -10,6 +10,7 @@ module PubSubModelSync
10
10
  cattr_accessor(:logger) { Rails.logger }
11
11
  cattr_accessor(:transactions_max_buffer) { 1 }
12
12
  cattr_accessor(:skip_cache) { false }
13
+ cattr_accessor(:sync_mode) { ENV['PUBSUB_MODEL_SYNC_MODE'] == 'true' }
13
14
 
14
15
  cattr_accessor(:on_before_processing) { ->(_payload, _info) {} } # return :cancel to skip
15
16
  cattr_accessor(:on_success_processing) { ->(_payload, _info) {} }
@@ -17,8 +17,7 @@ module PubSubModelSync
17
17
 
18
18
  def process!
19
19
  subscribers = filter_subscribers
20
- payload_info = { klass: payload.klass, action: payload.action, mode: payload.mode }
21
- log("No subscribers found for #{payload_info}", :warn) if config.debug && subscribers.empty?
20
+ log("No subscribers found for #{payload.uuid}", :warn) if config.debug && subscribers.empty?
22
21
  subscribers.each(&method(:run_subscriber))
23
22
  rescue => e
24
23
  print_error(e)
@@ -38,7 +37,7 @@ module PubSubModelSync
38
37
  processor = PubSubModelSync::RunSubscriber.new(subscriber, payload)
39
38
  return unless processable?(subscriber)
40
39
 
41
- log("Processing message #{[subscriber, payload]}...") if config.debug
40
+ log("Processing message #{[subscriber, payload.uuid]}...") if config.debug
42
41
  processor.call
43
42
  res = config.on_success_processing.call(payload, { subscriber: subscriber })
44
43
  log "processed message with: #{payload.inspect}" if res != :skip_log
@@ -48,7 +47,7 @@ module PubSubModelSync
48
47
 
49
48
  def processable?(subscriber)
50
49
  cancel = config.on_before_processing.call(payload, { subscriber: subscriber }) == :cancel
51
- log("process message cancelled: #{payload}") if cancel && config.debug
50
+ log("process message cancelled via on_before_processing: #{payload.uuid}") if cancel && config.debug
52
51
  !cancel
53
52
  end
54
53
 
@@ -94,7 +94,7 @@ module PubSubModelSync
94
94
  end
95
95
 
96
96
  def connector_publish(payload)
97
- log("Publishing message #{[payload]}...") if config.debug
97
+ log("Publishing message #{[payload.uuid]}...") if config.debug
98
98
  connector.publish(payload)
99
99
  log("Published message: #{[payload]}")
100
100
  config.on_after_publish.call(payload)
@@ -55,6 +55,14 @@ module PubSubModelSync
55
55
  (info[:mode] || :model).to_sym
56
56
  end
57
57
 
58
+ def uuid
59
+ headers[:uuid]
60
+ end
61
+
62
+ def ordering_key
63
+ headers[:ordering_key]
64
+ end
65
+
58
66
  # Process payload data
59
67
  # (If error will raise exception and wont call on_error_processing callback)
60
68
  def process!
@@ -88,10 +96,7 @@ module PubSubModelSync
88
96
 
89
97
  # @param attr_keys (Array<Symbol>) List of attributes to be excluded from payload
90
98
  def exclude_data_attrs(attr_keys)
91
- orig_data = data.clone
92
- headers[:excluded_attr_keys] = attr_keys.join(',')
93
99
  @data = data.except(*attr_keys)
94
- Config.log("Empty payload after payload optimization (original data: #{[self, orig_data]})") if @data == []
95
100
  end
96
101
 
97
102
  # Attributes to always be delivered after cache optimization
@@ -32,7 +32,7 @@ module PubSubModelSync
32
32
 
33
33
  def cache_disabled?
34
34
  res = config.skip_cache || Rails.cache.nil?
35
- log("Skipping cache, it was disabled: #{[payload]}") if res && debug?
35
+ log("Skipping cache, it was disabled: #{[payload.uuid]}") if res && debug?
36
36
  res
37
37
  end
38
38
 
@@ -44,7 +44,7 @@ module PubSubModelSync
44
44
  changed_keys = Hash[(payload.data.to_a - previous_payload_data.to_a)].keys.map(&:to_sym)
45
45
  required_keys = payload.cache_settings[:required].map(&:to_sym)
46
46
  invalid_keys = payload.data.keys - (changed_keys + required_keys)
47
- log("Excluding non changed attributes: #{invalid_keys} from: #{payload.inspect}") if debug?
47
+ log("Excluding non changed attributes: #{invalid_keys} from: #{payload.uuid}") if debug?
48
48
  payload.exclude_data_attrs(invalid_keys)
49
49
  end
50
50
  end
@@ -40,11 +40,11 @@ module PubSubModelSync
40
40
  call_action(model)
41
41
  end
42
42
 
43
- def ensure_sync(object)
43
+ def ensure_sync(object) # rubocop:disable Metrics/AbcSize
44
44
  res = true
45
45
  res = false if settings[:if] && !parse_condition(settings[:if], object)
46
46
  res = false if settings[:unless] && parse_condition(settings[:unless], object)
47
- log("Cancelled save sync by subscriber condition : #{[payload]}") if !res && debug?
47
+ log("Cancelled save sync by subscriber condition : #{[payload.uuid]}") if !res && debug?
48
48
  res
49
49
  end
50
50
 
@@ -42,7 +42,7 @@ module PubSubModelSync
42
42
  # @return [Payload,Nil]
43
43
  def decode_payload(payload_info)
44
44
  payload = ::PubSubModelSync::Payload.from_payload_data(JSON.parse(payload_info))
45
- log("Received message: #{[payload]}") if config.debug
45
+ log("Received message: #{payload.uuid}") if config.debug
46
46
  payload
47
47
  rescue => e
48
48
  error_payload = [payload_info, e.message, e.backtrace]
@@ -54,7 +54,7 @@ module PubSubModelSync
54
54
  def same_app_message?(payload)
55
55
  key = payload.headers[:app_key]
56
56
  res = key && key == config.subscription_key
57
- log("Skipping message from same origin: #{[payload]}") if res && config.debug
57
+ log("Skipping message from same origin: #{payload.uuid}") if res && config.debug
58
58
  res
59
59
  end
60
60
 
@@ -54,17 +54,25 @@ module PubSubModelSync
54
54
 
55
55
  def publish_to_topic(topic, payload)
56
56
  retries ||= 0
57
- topic.publish_async(encode_payload(payload), message_headers(payload)) do |res|
58
- raise StandardError, "Failed to publish the message. #{res.error}" unless res.succeeded?
59
- end
57
+ config.sync_mode ? topic.publish(*message_params(payload)) : publish_async(topic, payload)
60
58
  rescue Google::Cloud::PubSub::OrderingKeyError => e
61
59
  raise if (retries += 1) > 1
62
60
 
63
- log("Resuming ordering_key and retrying OrderingKeyError for #{payload.headers[:uuid]}: #{e.message}")
64
- topic.resume_publish(message_headers(payload)[:ordering_key])
61
+ log("Resuming ordering_key and retrying OrderingKeyError for #{payload.uuid}: #{e.message}")
62
+ topic.resume_publish(payload.ordering_key)
65
63
  retry
66
64
  end
67
65
 
66
+ def publish_async(topic, payload)
67
+ topic.publish_async(*message_params(payload)) do |result|
68
+ log "Published message: #{payload.uuid} (via async)" if result.succeeded? && config.debug
69
+ unless result.succeeded?
70
+ log("Error publishing: #{[payload, result.error]} (via async)", :error)
71
+ config.on_error_publish.call(StandardError.new(result.error), { payload: payload })
72
+ end
73
+ end
74
+ end
75
+
68
76
  # @param only_publish (Boolean): if false is used to listen and publish messages
69
77
  # @return (Topic): returns created or loaded topic
70
78
  def init_topic(topic_name, only_publish: false)
@@ -79,11 +87,12 @@ module PubSubModelSync
79
87
  end
80
88
 
81
89
  # @param payload (PubSubModelSync::Payload)
82
- def message_headers(payload)
83
- {
84
- SERVICE_KEY => true,
85
- ordering_key: payload.headers[:ordering_key]
86
- }.merge(PUBLISH_SETTINGS)
90
+ # @return [Array]
91
+ def message_params(payload)
92
+ [
93
+ encode_payload(payload),
94
+ { ordering_key: payload.ordering_key, SERVICE_KEY => true }.merge(PUBLISH_SETTINGS)
95
+ ]
87
96
  end
88
97
 
89
98
  # @return [Array<Subscriber>]
@@ -18,13 +18,9 @@ module PubSubModelSync
18
18
  end
19
19
 
20
20
  # @param payload (Payload)
21
+ # TODO: remove buffer (already managed by pubsub services when running with config.async = true)
21
22
  def add_payload(payload)
22
23
  payloads << payload
23
- print_log = config.debug && max_buffer > 1
24
- log("Payload added to current transaction: #{payload.inspect}") if print_log
25
- return unless payloads.count >= max_buffer
26
-
27
- log("Payloads buffer was filled, delivering current payloads: #{payloads.count}") if print_log
28
24
  deliver_payloads
29
25
  end
30
26
 
@@ -44,7 +40,6 @@ module PubSubModelSync
44
40
  end
45
41
 
46
42
  def rollback
47
- log("Rollback #{payloads.count} notifications", :warn) if children.any? && debug?
48
43
  self.children = []
49
44
  root&.rollback
50
45
  clean_publisher
@@ -1,5 +1,5 @@
1
1
  # frozen_string_literal: true
2
2
 
3
3
  module PubSubModelSync
4
- VERSION = '1.7.2'
4
+ VERSION = '1.9.0'
5
5
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: pub_sub_model_sync
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.7.2
4
+ version: 1.9.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Owen
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2022-12-29 00:00:00.000000000 Z
11
+ date: 2023-01-13 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: rails