jstreams 0.1.0.alpha
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.circleci/config.yml +70 -0
- data/.gitignore +14 -0
- data/.rspec +3 -0
- data/.rubocop.yml +15 -0
- data/.travis.yml +7 -0
- data/CHANGELOG.md +5 -0
- data/CODE_OF_CONDUCT.md +74 -0
- data/Dockerfile +13 -0
- data/Gemfile +26 -0
- data/Gemfile.lock +160 -0
- data/Guardfile +18 -0
- data/LICENSE.txt +21 -0
- data/README.md +140 -0
- data/Rakefile +8 -0
- data/bin/console +14 -0
- data/bin/rspec +29 -0
- data/bin/setup +8 -0
- data/examples/basic/docker-compose.yml +28 -0
- data/examples/basic/example_publisher.rb +18 -0
- data/examples/basic/example_subscriber.rb +24 -0
- data/jstreams.gemspec +58 -0
- data/lib/jstreams.rb +9 -0
- data/lib/jstreams/consumer_group.rb +32 -0
- data/lib/jstreams/context.rb +106 -0
- data/lib/jstreams/publisher.rb +34 -0
- data/lib/jstreams/serializer.rb +37 -0
- data/lib/jstreams/serializers/json.rb +34 -0
- data/lib/jstreams/subscriber.rb +230 -0
- data/lib/jstreams/tagged_logging.rb +89 -0
- data/lib/jstreams/version.rb +5 -0
- data/package-lock.json +21 -0
- metadata +148 -0
@@ -0,0 +1,34 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
# :nodoc:
|
4
|
+
module Jstreams
|
5
|
+
##
|
6
|
+
# Publishes messages to the given stream.
|
7
|
+
class Publisher
|
8
|
+
##
|
9
|
+
# @param [ConnectionPool] redis_pool Redis connection pool
|
10
|
+
# @param [Serializer] serializer Serializer
|
11
|
+
# @param [TaggedLogging] logger Logger
|
12
|
+
def initialize(redis_pool:, serializer:, logger:)
|
13
|
+
@redis_pool = redis_pool
|
14
|
+
@serializer = serializer
|
15
|
+
@logger = logger
|
16
|
+
end
|
17
|
+
|
18
|
+
##
|
19
|
+
# Publishes a message to the given stream
|
20
|
+
#
|
21
|
+
# @param [String] stream Destination stream name
|
22
|
+
# @param [Hash] message Message payload
|
23
|
+
def publish(stream, message)
|
24
|
+
@logger.tagged('publisher') do
|
25
|
+
@redis_pool.with do |redis|
|
26
|
+
redis.xadd(stream, payload: @serializer.serialize(message, stream))
|
27
|
+
end
|
28
|
+
@logger.debug { "published to stream #{stream}: #{message.inspect}" }
|
29
|
+
end
|
30
|
+
end
|
31
|
+
end
|
32
|
+
|
33
|
+
private_constant :Publisher
|
34
|
+
end
|
@@ -0,0 +1,37 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
# rubocop:disable Lint/UnusedMethodArgument
|
4
|
+
|
5
|
+
module Jstreams
|
6
|
+
##
|
7
|
+
# @abstract
|
8
|
+
class Serializer
|
9
|
+
##
|
10
|
+
# Serialize a message from a hash into a string
|
11
|
+
#
|
12
|
+
# @param [Hash] message Message to serialize
|
13
|
+
# @param [String] stream Destination stream name
|
14
|
+
#
|
15
|
+
# @return [String] The serialized message
|
16
|
+
#
|
17
|
+
# @abstract
|
18
|
+
def serialize(message, stream)
|
19
|
+
raise NotImplementedError
|
20
|
+
end
|
21
|
+
|
22
|
+
##
|
23
|
+
# Deserialize a message from a string into a hash
|
24
|
+
#
|
25
|
+
# @param [String] message Message to deserialize
|
26
|
+
# @param [String] stream Source stream name
|
27
|
+
#
|
28
|
+
# @return [Hash] The deserialized message
|
29
|
+
#
|
30
|
+
# @abstract
|
31
|
+
def deserialize(message, stream)
|
32
|
+
raise NotImplementedError
|
33
|
+
end
|
34
|
+
end
|
35
|
+
end
|
36
|
+
|
37
|
+
# rubocop:enable Lint/UnusedMethodArgument
|
@@ -0,0 +1,34 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'json'
|
4
|
+
require_relative '../serializer'
|
5
|
+
|
6
|
+
module Jstreams
|
7
|
+
module Serializers
|
8
|
+
##
|
9
|
+
# Simple JSON serializer
|
10
|
+
class JSON < Serializer
|
11
|
+
##
|
12
|
+
# Serializes the given message to a JSON string
|
13
|
+
#
|
14
|
+
# @param [Hash] message Message to serialize
|
15
|
+
# @param [String] _stream Destination stream name (unused)
|
16
|
+
#
|
17
|
+
# @return [String] The JSON serialized message
|
18
|
+
def serialize(message, _stream)
|
19
|
+
::JSON.generate(message)
|
20
|
+
end
|
21
|
+
|
22
|
+
##
|
23
|
+
# Deserializes the given JSON message to a Hash
|
24
|
+
#
|
25
|
+
# @param [Hash] message Message to deserialize
|
26
|
+
# @param [String] _stream Source stream name (unused)
|
27
|
+
#
|
28
|
+
# @return [Hash] The deserialized message
|
29
|
+
def deserialize(message, _stream)
|
30
|
+
::JSON.parse(message)
|
31
|
+
end
|
32
|
+
end
|
33
|
+
end
|
34
|
+
end
|
@@ -0,0 +1,230 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require_relative 'consumer_group'
|
4
|
+
|
5
|
+
# :nodoc:
|
6
|
+
module Jstreams
|
7
|
+
##
|
8
|
+
# Retrieves messages from the Redis consumer group and dispatches them
|
9
|
+
# to the handler.
|
10
|
+
class Subscriber
|
11
|
+
##
|
12
|
+
# Returns a new instance of Subscriber
|
13
|
+
def initialize(
|
14
|
+
name:,
|
15
|
+
key: name,
|
16
|
+
streams:,
|
17
|
+
redis_pool:,
|
18
|
+
serializer:,
|
19
|
+
handler:,
|
20
|
+
logger:,
|
21
|
+
error_handler: nil,
|
22
|
+
abandoned_message_check_interval: ABANDONED_MESSAGE_CHECK_INTERVAL,
|
23
|
+
abandoned_message_idle_timeout: ABANDONED_MESSAGE_IDLE_TIMEOUT
|
24
|
+
)
|
25
|
+
@name = name
|
26
|
+
@key = key
|
27
|
+
@streams = streams
|
28
|
+
@redis_pool = redis_pool
|
29
|
+
@serializer = serializer
|
30
|
+
@handler = handler
|
31
|
+
@error_handler = error_handler
|
32
|
+
@logger = logger
|
33
|
+
@abandoned_message_check_interval = abandoned_message_check_interval
|
34
|
+
@abandoned_message_idle_timeout = abandoned_message_idle_timeout
|
35
|
+
@need_to_check_own_pending = true
|
36
|
+
end
|
37
|
+
|
38
|
+
##
|
39
|
+
# Starts the subscriber's message handling loop.
|
40
|
+
# Blocks until either a fatal error is raised or #stop is called
|
41
|
+
def run
|
42
|
+
# TODO: Mutex
|
43
|
+
@running = true
|
44
|
+
logger.tagged("subscriber:#{name}", "key:#{key}") do
|
45
|
+
process_messages while @running
|
46
|
+
logger.info 'Subscriber exiting run loop'
|
47
|
+
end
|
48
|
+
end
|
49
|
+
|
50
|
+
##
|
51
|
+
# Stops the subscriber.
|
52
|
+
def stop
|
53
|
+
# TODO: Mutex
|
54
|
+
logger.info 'Subscriber stopping'
|
55
|
+
@running = false
|
56
|
+
end
|
57
|
+
|
58
|
+
private
|
59
|
+
|
60
|
+
READ_TIMEOUT = 0.25 # seconds
|
61
|
+
ABANDONED_MESSAGE_CHECK_INTERVAL = 10 # seconds
|
62
|
+
ABANDONED_MESSAGE_IDLE_TIMEOUT = 600 # seconds
|
63
|
+
ABANDONED_MESSAGE_BATCH_SIZE = 100
|
64
|
+
|
65
|
+
attr_reader :name,
|
66
|
+
:key,
|
67
|
+
:logger,
|
68
|
+
:handler,
|
69
|
+
:streams,
|
70
|
+
:redis_pool,
|
71
|
+
:redis,
|
72
|
+
:serializer,
|
73
|
+
:abandoned_message_check_interval,
|
74
|
+
:abandoned_message_idle_timeout
|
75
|
+
|
76
|
+
alias consumer_group name
|
77
|
+
alias consumer_name key
|
78
|
+
|
79
|
+
def process_messages
|
80
|
+
@redis_pool.with do |redis|
|
81
|
+
@redis = redis
|
82
|
+
results = read_messages
|
83
|
+
logger.debug 'timed out waiting for messages' if results.empty?
|
84
|
+
results.each do |stream, entries|
|
85
|
+
entries.each do |id, entry|
|
86
|
+
logger.tagged("stream:#{stream}", "id:#{id}") do
|
87
|
+
handle_entry(stream, id, entry)
|
88
|
+
end
|
89
|
+
end
|
90
|
+
end
|
91
|
+
end
|
92
|
+
end
|
93
|
+
|
94
|
+
def read_messages
|
95
|
+
logger.debug do
|
96
|
+
"Reading messages (time to reclaim?: #{time_to_reclaim?}, last reclaim: #{@last_reclaim_time})"
|
97
|
+
end
|
98
|
+
return read_own_pending if @need_to_check_own_pending
|
99
|
+
results = {}
|
100
|
+
results.merge!(reclaim_abandoned_messages) if time_to_reclaim?
|
101
|
+
results.merge!(read_group)
|
102
|
+
results
|
103
|
+
end
|
104
|
+
|
105
|
+
def read_own_pending
|
106
|
+
logger.debug 'Reading own pending entries'
|
107
|
+
results = read_group(block: nil, id: 0)
|
108
|
+
if results.values.any? { |entries| !entries.empty? }
|
109
|
+
logger.debug { "Own pending entries: #{results}" }
|
110
|
+
else
|
111
|
+
logger.debug 'No pending entries'
|
112
|
+
@need_to_check_own_pending = false
|
113
|
+
end
|
114
|
+
results
|
115
|
+
end
|
116
|
+
|
117
|
+
def reclaim_abandoned_messages
|
118
|
+
logger.debug 'Looking for abandoned messages to reclaim'
|
119
|
+
results = {}
|
120
|
+
streams.each do |stream|
|
121
|
+
results[stream] = reclaim_abandoned_messages_in_stream(stream)
|
122
|
+
end
|
123
|
+
@last_reclaim_time = Time.now
|
124
|
+
logger.debug do
|
125
|
+
"Done looking for abandoned messages to reclaim. Found: #{results
|
126
|
+
.inspect}"
|
127
|
+
end
|
128
|
+
results
|
129
|
+
rescue Redis::CommandError => e
|
130
|
+
raise e unless e.message =~ /NOGROUP/
|
131
|
+
logger.debug "Couldn't reclaim messages because group does not exist yet"
|
132
|
+
{}
|
133
|
+
end
|
134
|
+
|
135
|
+
def reclaim_abandoned_messages_in_stream(stream)
|
136
|
+
reclaim_ids = []
|
137
|
+
# TODO: pagination & configurable batch size
|
138
|
+
read_pending(stream, ABANDONED_MESSAGE_BATCH_SIZE).each do |pe|
|
139
|
+
unless pe['consumer'] != consumer_name && abandoned_pending_entry?(pe)
|
140
|
+
next
|
141
|
+
end
|
142
|
+
logger.info "Reclaiming abandoned message #{pe['entry_id']}" \
|
143
|
+
" from consumer #{pe['consumer']}"
|
144
|
+
reclaim_ids << pe['entry_id']
|
145
|
+
end
|
146
|
+
|
147
|
+
return [] if reclaim_ids.empty?
|
148
|
+
|
149
|
+
redis.xclaim(
|
150
|
+
stream,
|
151
|
+
consumer_group,
|
152
|
+
consumer_name,
|
153
|
+
(abandoned_message_idle_timeout * 1000).round,
|
154
|
+
reclaim_ids
|
155
|
+
)
|
156
|
+
end
|
157
|
+
|
158
|
+
def abandoned_pending_entry?(pending_entry)
|
159
|
+
pending_entry['elapsed'] >= (abandoned_message_idle_timeout * 1000)
|
160
|
+
end
|
161
|
+
|
162
|
+
def read_pending(stream, count)
|
163
|
+
redis.xpending(stream, consumer_group, '-', '+', count)
|
164
|
+
end
|
165
|
+
|
166
|
+
def time_to_reclaim?
|
167
|
+
@last_reclaim_time.nil? ||
|
168
|
+
(Time.now - @last_reclaim_time) >= abandoned_message_check_interval
|
169
|
+
end
|
170
|
+
|
171
|
+
def read_group(block: READ_TIMEOUT * 1000, id: '>')
|
172
|
+
logger.debug 'calling xreadgroup'
|
173
|
+
redis.xreadgroup(
|
174
|
+
consumer_group,
|
175
|
+
consumer_name,
|
176
|
+
streams,
|
177
|
+
streams.map { id },
|
178
|
+
block: block
|
179
|
+
)
|
180
|
+
rescue ::Redis::CommandError => e
|
181
|
+
if /NOGROUP/ =~ e.message
|
182
|
+
create_consumer_groups
|
183
|
+
retry
|
184
|
+
end
|
185
|
+
raise
|
186
|
+
end
|
187
|
+
|
188
|
+
def handle_entry(stream, id, entry)
|
189
|
+
logger.debug { "received raw entry: #{entry.inspect}" }
|
190
|
+
begin
|
191
|
+
handler.call(deserialize_entry(stream, id, entry), stream, self)
|
192
|
+
logger.debug { "ACK message #{[stream, consumer_group, id].inspect}" }
|
193
|
+
redis.xack(stream, consumer_group, id)
|
194
|
+
rescue StandardError => e
|
195
|
+
logger.debug do
|
196
|
+
"Error processing message #{[
|
197
|
+
stream,
|
198
|
+
consumer_group,
|
199
|
+
id
|
200
|
+
].inspect}: #{e}"
|
201
|
+
end
|
202
|
+
raise e if @error_handler.nil?
|
203
|
+
@error_handler.call(e, stream, id, entry)
|
204
|
+
end
|
205
|
+
end
|
206
|
+
|
207
|
+
def deserialize_entry(stream, id, entry)
|
208
|
+
serializer.deserialize(entry['payload'], stream)
|
209
|
+
rescue StandardError => e
|
210
|
+
# TODO: Allow subscribers to register an error handler.
|
211
|
+
# For now we'll just log and skip.
|
212
|
+
logger.error "failed to deserialize entry #{id}: #{entry
|
213
|
+
.inspect} - error: #{e}"
|
214
|
+
end
|
215
|
+
|
216
|
+
def create_consumer_groups
|
217
|
+
streams.each do |stream|
|
218
|
+
group =
|
219
|
+
ConsumerGroup.new(name: consumer_group, stream: stream, redis: redis)
|
220
|
+
if group.create_if_not_exists
|
221
|
+
logger.info "Created consumer group #{consumer_group} for stream #{stream}"
|
222
|
+
else
|
223
|
+
logger.info 'Consumer group already exists'
|
224
|
+
end
|
225
|
+
end
|
226
|
+
end
|
227
|
+
end
|
228
|
+
|
229
|
+
private_constant :Subscriber
|
230
|
+
end
|
@@ -0,0 +1,89 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
# rubocop:disable Style/DocumentationMethod
|
4
|
+
require 'logger'
|
5
|
+
|
6
|
+
##
|
7
|
+
# This is ActiveSupport::TaggedLogging extracted from the activesupport gem
|
8
|
+
# and adopted to be used in environments without activesupport's core extensions.
|
9
|
+
module Jstreams
|
10
|
+
# :nodoc:
|
11
|
+
module TaggedLogging
|
12
|
+
# :nodoc:
|
13
|
+
module Formatter
|
14
|
+
def call(severity, timestamp, progname, msg)
|
15
|
+
super(severity, timestamp, progname, "#{tags_text}#{msg}")
|
16
|
+
end
|
17
|
+
|
18
|
+
def tagged(*tags)
|
19
|
+
new_tags = push_tags(*tags)
|
20
|
+
yield self
|
21
|
+
ensure
|
22
|
+
pop_tags(new_tags.size)
|
23
|
+
end
|
24
|
+
|
25
|
+
def push_tags(*tags)
|
26
|
+
tags.flatten.reject(&:nil?).reject(&:empty?).tap do |new_tags|
|
27
|
+
current_tags.concat new_tags
|
28
|
+
end
|
29
|
+
end
|
30
|
+
|
31
|
+
def pop_tags(size = 1)
|
32
|
+
current_tags.pop size
|
33
|
+
end
|
34
|
+
|
35
|
+
def clear_tags!
|
36
|
+
current_tags.clear
|
37
|
+
end
|
38
|
+
|
39
|
+
def current_tags
|
40
|
+
# We use our object ID here to avoid conflicting with other instances
|
41
|
+
thread_key = @thread_key ||= "jstreams_tagged_logging_tags:#{object_id}"
|
42
|
+
Thread.current[thread_key] ||= []
|
43
|
+
end
|
44
|
+
|
45
|
+
def tags_text
|
46
|
+
tags = current_tags
|
47
|
+
if tags.one?
|
48
|
+
"[#{tags[0]}] "
|
49
|
+
elsif tags.any?
|
50
|
+
tags.collect { |tag| "[#{tag}] " }.join
|
51
|
+
end
|
52
|
+
end
|
53
|
+
end
|
54
|
+
|
55
|
+
def self.new(logger)
|
56
|
+
logger = logger.dup
|
57
|
+
|
58
|
+
logger.formatter =
|
59
|
+
if logger.formatter
|
60
|
+
logger.formatter.dup
|
61
|
+
else
|
62
|
+
# Ensure we set a default formatter so we aren't extending nil!
|
63
|
+
::Logger::Formatter.new
|
64
|
+
end
|
65
|
+
|
66
|
+
logger.formatter.extend Formatter
|
67
|
+
logger.extend(self)
|
68
|
+
end
|
69
|
+
|
70
|
+
%i[push_tags pop_tags clear_tags!].each do |method_name|
|
71
|
+
define_method(method_name) do |*args, &block|
|
72
|
+
formatter.send(method_name, *args, &block)
|
73
|
+
end
|
74
|
+
end
|
75
|
+
|
76
|
+
def tagged(*tags)
|
77
|
+
formatter.tagged(*tags) { yield self }
|
78
|
+
end
|
79
|
+
|
80
|
+
def flush
|
81
|
+
clear_tags!
|
82
|
+
super if defined?(super)
|
83
|
+
end
|
84
|
+
end
|
85
|
+
|
86
|
+
private_constant :TaggedLogging
|
87
|
+
end
|
88
|
+
|
89
|
+
# rubocop:enable Style/DocumentationMethod
|
data/package-lock.json
ADDED
@@ -0,0 +1,21 @@
|
|
1
|
+
{
|
2
|
+
"requires": true,
|
3
|
+
"lockfileVersion": 1,
|
4
|
+
"dependencies": {
|
5
|
+
"@prettier/plugin-ruby": {
|
6
|
+
"version": "0.10.0",
|
7
|
+
"resolved": "https://registry.npmjs.org/@prettier/plugin-ruby/-/plugin-ruby-0.10.0.tgz",
|
8
|
+
"integrity": "sha512-AIua0gE+sG4zoioYU2iU32AJpoRTPBC+69wAdbLYYdxoh0g3hDFeUmm8R17Ig30hMXa8pJGU/SuW4MHCWmhQcQ==",
|
9
|
+
"dev": true,
|
10
|
+
"requires": {
|
11
|
+
"prettier": "^1.16.4"
|
12
|
+
}
|
13
|
+
},
|
14
|
+
"prettier": {
|
15
|
+
"version": "1.17.0",
|
16
|
+
"resolved": "https://registry.npmjs.org/prettier/-/prettier-1.17.0.tgz",
|
17
|
+
"integrity": "sha512-sXe5lSt2WQlCbydGETgfm1YBShgOX4HxQkFPvbxkcwgDvGDeqVau8h+12+lmSVlP3rHPz0oavfddSZg/q+Szjw==",
|
18
|
+
"dev": true
|
19
|
+
}
|
20
|
+
}
|
21
|
+
}
|