redstream 0.2.0 → 0.3.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/CHANGELOG.md +3 -0
- data/README.md +3 -1
- data/lib/redstream/model.rb +4 -12
- data/lib/redstream/producer.rb +13 -42
- data/lib/redstream/version.rb +1 -1
- data/spec/redstream/producer_spec.rb +61 -27
- metadata +2 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 7c563156ded6459fa0a2c4fcd317d3d5a128a72fe8f0732b358672efec718a3b
|
4
|
+
data.tar.gz: 41c68a6d61e4287d5bffca7bd9277f55ef203d804b609b87de4b85cc7185560c
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 812e8cb419ea1a301e462250f20c798f92db02726b46e8eb196e169e8f6dcede85023cbecaa7b9b476d1e18e9b8902bfa68d36b49813dd4dea8f37b202fe9b5b
|
7
|
+
data.tar.gz: e36a3fb9130992efd3cd812ac8b1368ab7dc056cc88016208ec83cc629077650aca935bb2d556396a47cc980f428515713de3bda61fd023867970d55e470755f
|
data/CHANGELOG.md
CHANGED
data/README.md
CHANGED
@@ -97,7 +97,9 @@ any errors occurring in between `after_save` and `after_commit` result in
|
|
97
97
|
inconsistencies between your primary and secondary datastore. By using these
|
98
98
|
kinds of "delay" messages triggered by `after_save` and fetched after e.g. 5
|
99
99
|
minutes, errors occurring in between `after_save` and `after_commit` can be
|
100
|
-
fixed when the delay message get processed.
|
100
|
+
fixed when the delay message get processed. Please note that redstream deletes
|
101
|
+
delay messages after the messages for immediate retrieval have been
|
102
|
+
successfully sent, such that messages will not be processed twice, usually.
|
101
103
|
|
102
104
|
Any messages are fetched in batches, such that e.g. elasticsearch can be
|
103
105
|
updated using its bulk API. For instance, depending on which elasticsearch ruby
|
data/lib/redstream/model.rb
CHANGED
@@ -37,22 +37,14 @@ module Redstream
|
|
37
37
|
|
38
38
|
after_commit(on: [:create, :update]) do |object|
|
39
39
|
if object.saved_changes.present?
|
40
|
-
producer.queue(object)
|
41
|
-
|
42
|
-
if id = instance_variable_get(IVAR_DELAY_MESSAGE_ID)
|
43
|
-
producer.delete(object, id)
|
44
|
-
remove_instance_variable(IVAR_DELAY_MESSAGE_ID)
|
45
|
-
end
|
40
|
+
producer.queue(object, delay_message_id: instance_variable_get(IVAR_DELAY_MESSAGE_ID))
|
41
|
+
instance_variable_set(IVAR_DELAY_MESSAGE_ID, nil)
|
46
42
|
end
|
47
43
|
end
|
48
44
|
|
49
45
|
after_commit(on: :destroy) do |object|
|
50
|
-
producer.queue(object)
|
51
|
-
|
52
|
-
if id = instance_variable_get(IVAR_DELAY_MESSAGE_ID)
|
53
|
-
producer.delete(object, id)
|
54
|
-
remove_instance_variable(IVAR_DELAY_MESSAGE_ID)
|
55
|
-
end
|
46
|
+
producer.queue(object, delay_message_id: instance_variable_get(IVAR_DELAY_MESSAGE_ID))
|
47
|
+
instance_variable_set(IVAR_DELAY_MESSAGE_ID, nil)
|
56
48
|
end
|
57
49
|
end
|
58
50
|
|
data/lib/redstream/producer.rb
CHANGED
@@ -52,12 +52,11 @@ module Redstream
|
|
52
52
|
def bulk(records)
|
53
53
|
records_array = Array(records)
|
54
54
|
|
55
|
-
|
55
|
+
delay_message_ids = bulk_delay(records_array)
|
56
56
|
|
57
57
|
yield
|
58
58
|
|
59
|
-
bulk_queue(records_array)
|
60
|
-
bulk_delete(records_array, message_ids)
|
59
|
+
bulk_queue(records_array, delay_message_ids: delay_message_ids)
|
61
60
|
end
|
62
61
|
|
63
62
|
# @api private
|
@@ -73,7 +72,7 @@ module Redstream
|
|
73
72
|
Redstream.connection_pool.with do |redis|
|
74
73
|
redis.pipelined do
|
75
74
|
slice.each do |object|
|
76
|
-
redis.xadd
|
75
|
+
redis.xadd(Redstream.stream_key_name("#{stream_name(object)}.delay"), payload: JSON.dump(object.redstream_payload))
|
77
76
|
end
|
78
77
|
end
|
79
78
|
end
|
@@ -86,37 +85,19 @@ module Redstream
|
|
86
85
|
res
|
87
86
|
end
|
88
87
|
|
89
|
-
# @api private
|
90
|
-
#
|
91
|
-
# Deletes delay message from a delay stream in redis.
|
92
|
-
#
|
93
|
-
# @param records [#to_a] The object/objects that have beeen updated or deleted
|
94
|
-
# @param ids [#to_a] The ids of the respective delay messages
|
95
|
-
|
96
|
-
def bulk_delete(records, ids)
|
97
|
-
records.each_with_index.each_slice(250) do |slice|
|
98
|
-
Redstream.connection_pool.with do |redis|
|
99
|
-
redis.pipelined do
|
100
|
-
slice.each do |object, index|
|
101
|
-
redis.xdel Redstream.stream_key_name("#{stream_name(object)}.delay"), ids[index]
|
102
|
-
end
|
103
|
-
end
|
104
|
-
end
|
105
|
-
end
|
106
|
-
end
|
107
|
-
|
108
88
|
# @api private
|
109
89
|
#
|
110
90
|
# Writes messages to a stream in redis for immediate retrieval.
|
111
91
|
#
|
112
92
|
# @param records [#to_a] The object/objects that will be updated deleted
|
113
93
|
|
114
|
-
def bulk_queue(records)
|
115
|
-
records.each_slice(250) do |slice|
|
94
|
+
def bulk_queue(records, delay_message_ids:)
|
95
|
+
records.each_with_index.each_slice(250) do |slice|
|
116
96
|
Redstream.connection_pool.with do |redis|
|
117
97
|
redis.pipelined do
|
118
|
-
slice.each do |object|
|
119
|
-
redis.xadd
|
98
|
+
slice.each do |object, index|
|
99
|
+
redis.xadd(Redstream.stream_key_name(stream_name(object)), payload: JSON.dump(object.redstream_payload))
|
100
|
+
redis.xdel(Redstream.stream_key_name("#{stream_name(object)}.delay"), delay_message_ids[index]) if delay_message_ids
|
120
101
|
end
|
121
102
|
end
|
122
103
|
end
|
@@ -141,28 +122,18 @@ module Redstream
|
|
141
122
|
end
|
142
123
|
end
|
143
124
|
|
144
|
-
# @api private
|
145
|
-
#
|
146
|
-
# Deletes a single delay message from a delay stream in redis.
|
147
|
-
#
|
148
|
-
# @param object The object that has been updated, deleted, ect.
|
149
|
-
# @param id The redis message id
|
150
|
-
|
151
|
-
def delete(object, id)
|
152
|
-
Redstream.connection_pool.with do |redis|
|
153
|
-
redis.xdel Redstream.stream_key_name("#{stream_name(object)}.delay"), id
|
154
|
-
end
|
155
|
-
end
|
156
|
-
|
157
125
|
# @api private
|
158
126
|
#
|
159
127
|
# Writes a single message to a stream in redis for immediate retrieval.
|
160
128
|
#
|
161
129
|
# @param object The object hat will be updated, deleted, etc.
|
162
130
|
|
163
|
-
def queue(object)
|
131
|
+
def queue(object, delay_message_id:)
|
164
132
|
Redstream.connection_pool.with do |redis|
|
165
|
-
redis.
|
133
|
+
redis.pipelined do
|
134
|
+
redis.xadd(Redstream.stream_key_name(stream_name(object)), payload: JSON.dump(object.redstream_payload))
|
135
|
+
redis.xdel(Redstream.stream_key_name("#{stream_name(object)}.delay"), delay_message_id) if delay_message_id
|
136
|
+
end
|
166
137
|
end
|
167
138
|
|
168
139
|
true
|
data/lib/redstream/version.rb
CHANGED
@@ -7,9 +7,20 @@ RSpec.describe Redstream::Producer do
|
|
7
7
|
|
8
8
|
stream_key_name = Redstream.stream_key_name("products")
|
9
9
|
|
10
|
-
expect { Redstream::Producer.new.queue(product) }.to change { redis.xlen(stream_key_name) }.by(1)
|
10
|
+
expect { Redstream::Producer.new.queue(product, delay_message_id: nil) }.to change { redis.xlen(stream_key_name) }.by(1)
|
11
11
|
expect(redis.xrange(stream_key_name, "-", "+").last[1]).to eq("payload" => JSON.dump(product.redstream_payload))
|
12
12
|
end
|
13
|
+
|
14
|
+
it "deletes the delay message when given" do
|
15
|
+
product = create(:product)
|
16
|
+
|
17
|
+
producer = Redstream::Producer.new
|
18
|
+
|
19
|
+
id = producer.delay(product)
|
20
|
+
producer.queue(product, delay_message_id: id)
|
21
|
+
|
22
|
+
expect(redis.xlen(Redstream.stream_key_name("products.delay"))).to eq(0)
|
23
|
+
end
|
13
24
|
end
|
14
25
|
|
15
26
|
describe "#delay" do
|
@@ -31,16 +42,54 @@ RSpec.describe Redstream::Producer do
|
|
31
42
|
end
|
32
43
|
end
|
33
44
|
|
34
|
-
describe "#
|
35
|
-
it "
|
36
|
-
|
45
|
+
describe "#bulk" do
|
46
|
+
it "adds bulk delay messages for scopes" do
|
47
|
+
products = create_list(:product, 2)
|
48
|
+
|
49
|
+
stream_key_name = Redstream.stream_key_name("products")
|
50
|
+
|
51
|
+
expect(redis.xlen("#{stream_key_name}.delay")).to eq(0)
|
52
|
+
|
53
|
+
Redstream::Producer.new.bulk(Product.all) do
|
54
|
+
messages = redis.xrange("#{stream_key_name}.delay", "-", "+").last(2).map { |message| message[1] }
|
55
|
+
|
56
|
+
expect(messages).to eq([
|
57
|
+
{ "payload" => JSON.dump(products[0].redstream_payload) },
|
58
|
+
{ "payload" => JSON.dump(products[1].redstream_payload) }
|
59
|
+
])
|
60
|
+
end
|
61
|
+
end
|
62
|
+
|
63
|
+
it "adds bulk queue messages for scopes" do
|
64
|
+
products = create_list(:product, 2)
|
65
|
+
|
66
|
+
stream_key_name = Redstream.stream_key_name("products")
|
37
67
|
|
68
|
+
expect do
|
69
|
+
Redstream::Producer.new.bulk(Product.all) do
|
70
|
+
# nothing
|
71
|
+
end
|
72
|
+
end.to change { redis.xlen(stream_key_name) }.by(2)
|
73
|
+
|
74
|
+
messages = redis.xrange(stream_key_name, "-", "+").last(2).map { |message| message[1] }
|
75
|
+
|
76
|
+
expect(messages).to eq([
|
77
|
+
{ "payload" => JSON.dump(products[0].redstream_payload) },
|
78
|
+
{ "payload" => JSON.dump(products[1].redstream_payload) }
|
79
|
+
])
|
80
|
+
end
|
81
|
+
|
82
|
+
it "deletes the delay messages after the queue messages have been sent" do
|
83
|
+
products = create_list(:product, 2)
|
38
84
|
producer = Redstream::Producer.new
|
39
85
|
|
40
|
-
|
41
|
-
producer.delete(product, id)
|
86
|
+
other_delay_message_id = producer.delay(create(:product))
|
42
87
|
|
43
|
-
|
88
|
+
producer.bulk(products) do
|
89
|
+
expect(redis.xlen(Redstream.stream_key_name("products.delay"))).to eq(3)
|
90
|
+
end
|
91
|
+
|
92
|
+
expect(redis.xrange(Redstream.stream_key_name("products.delay"), "-", "+").map(&:first)).to eq([other_delay_message_id])
|
44
93
|
end
|
45
94
|
end
|
46
95
|
|
@@ -50,7 +99,7 @@ RSpec.describe Redstream::Producer do
|
|
50
99
|
|
51
100
|
stream_key_name = Redstream.stream_key_name("products")
|
52
101
|
|
53
|
-
expect { Redstream::Producer.new.bulk_queue(Product.all) }.to change { redis.xlen(stream_key_name) }.by(2)
|
102
|
+
expect { Redstream::Producer.new.bulk_queue(Product.all, delay_message_ids: nil) }.to change { redis.xlen(stream_key_name) }.by(2)
|
54
103
|
|
55
104
|
messages = redis.xrange(stream_key_name, "-", "+").last(2).map { |message| message[1] }
|
56
105
|
|
@@ -64,13 +113,12 @@ RSpec.describe Redstream::Producer do
|
|
64
113
|
products = create_list(:product, 2)
|
65
114
|
producer = Redstream::Producer.new
|
66
115
|
|
67
|
-
|
116
|
+
delay_message_ids = producer.bulk_delay(products)
|
117
|
+
other_delay_message_id = producer.delay(create(:product))
|
68
118
|
|
69
|
-
producer.bulk_queue(products)
|
70
|
-
expect(redis.xlen(Redstream.stream_key_name("products.delay"))).to eq(2)
|
71
|
-
end
|
119
|
+
producer.bulk_queue(products, delay_message_ids: delay_message_ids)
|
72
120
|
|
73
|
-
expect(redis.xrange(Redstream.stream_key_name("products.delay"), "-", "+").map(&:first)).to eq([
|
121
|
+
expect(redis.xrange(Redstream.stream_key_name("products.delay"), "-", "+").map(&:first)).to eq([other_delay_message_id])
|
74
122
|
end
|
75
123
|
end
|
76
124
|
|
@@ -100,18 +148,4 @@ RSpec.describe Redstream::Producer do
|
|
100
148
|
expect { Redstream::Producer.new(wait: 0).bulk_delay(products) }.to change { redis.xlen(stream_key_name) }.by(2)
|
101
149
|
end
|
102
150
|
end
|
103
|
-
|
104
|
-
describe "#bulk_delete" do
|
105
|
-
it "deletes delay messages for scopes" do
|
106
|
-
products = create_list(:product, 2)
|
107
|
-
producer = Redstream::Producer.new
|
108
|
-
|
109
|
-
other_id = producer.delay(create(:product))
|
110
|
-
|
111
|
-
ids = producer.bulk_delay(products)
|
112
|
-
producer.bulk_delete(products, ids)
|
113
|
-
|
114
|
-
expect(redis.xrange(Redstream.stream_key_name("products.delay"), "-", "+").map(&:first)).to eq([other_id])
|
115
|
-
end
|
116
|
-
end
|
117
151
|
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: redstream
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.3.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Benjamin Vetter
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2021-04-
|
11
|
+
date: 2021-04-26 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: activerecord
|