eventhub-processor2 1.4.1 → 1.10.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +5 -5
- data/.rspec +1 -0
- data/.ruby-version +1 -0
- data/.travis.yml +5 -4
- data/CHANGELOG.md +26 -0
- data/Gemfile +1 -1
- data/Gemfile.lock +64 -52
- data/README.md +13 -2
- data/Rakefile +13 -4
- data/docker/Dockerfile +5 -3
- data/docker/README.md +2 -2
- data/docker/definitions.json +44 -1
- data/docker/docker-compose.yml +11 -0
- data/docker/reset +5 -0
- data/eventhub-processor2.gemspec +24 -23
- data/example/crasher.rb +12 -13
- data/example/publisher.rb +41 -41
- data/example/receiver.rb +2 -2
- data/example/router.rb +3 -3
- data/lib/eventhub/actor_heartbeat.rb +22 -23
- data/lib/eventhub/actor_listener.rb +18 -23
- data/lib/eventhub/actor_publisher.rb +7 -7
- data/lib/eventhub/actor_watchdog.rb +3 -4
- data/lib/eventhub/base.rb +24 -31
- data/lib/eventhub/base_exception.rb +2 -2
- data/lib/eventhub/configuration.rb +31 -26
- data/lib/eventhub/constant.rb +24 -24
- data/lib/eventhub/hash_extensions.rb +8 -8
- data/lib/eventhub/helper.rb +7 -7
- data/lib/eventhub/logger.rb +2 -2
- data/lib/eventhub/message.rb +45 -45
- data/lib/eventhub/processor2.rb +22 -22
- data/lib/eventhub/sleeper.rb +1 -1
- data/lib/eventhub/statistics.rb +6 -9
- data/lib/eventhub/version.rb +1 -1
- metadata +35 -19
data/example/crasher.rb
CHANGED
@@ -1,17 +1,17 @@
|
|
1
|
-
require
|
2
|
-
require_relative
|
1
|
+
require "eventhub/components"
|
2
|
+
require_relative "../lib/eventhub/sleeper"
|
3
3
|
|
4
4
|
RESTART_RANGES_IN_SECONDS = (30..600).to_a
|
5
|
-
PROCESS_PATTERNS = [
|
5
|
+
PROCESS_PATTERNS = ["router", "receiver"]
|
6
6
|
|
7
7
|
# Module Crasher
|
8
8
|
module Crasher
|
9
9
|
def self.logger
|
10
10
|
unless @logger
|
11
11
|
@logger = ::EventHub::Components::MultiLogger.new
|
12
|
-
@logger.add_device(Logger.new(
|
12
|
+
@logger.add_device(Logger.new($stdout))
|
13
13
|
@logger.add_device(
|
14
|
-
EventHub::Components::Logger.logstash(
|
14
|
+
EventHub::Components::Logger.logstash("crasher", "development")
|
15
15
|
)
|
16
16
|
end
|
17
17
|
@logger
|
@@ -26,7 +26,7 @@ module Crasher
|
|
26
26
|
|
27
27
|
def restart
|
28
28
|
Crasher.logger.info "Sending Signal HUP to process [#{@id}/#{@name}]"
|
29
|
-
Process.kill(
|
29
|
+
Process.kill("HUP", @id)
|
30
30
|
rescue Errno::ESRCH
|
31
31
|
end
|
32
32
|
|
@@ -35,13 +35,13 @@ module Crasher
|
|
35
35
|
PROCESS_PATTERNS.each do |name|
|
36
36
|
data = `ps | grep #{name}.rb`
|
37
37
|
data.lines[0..-2].each do |line|
|
38
|
-
a = line.split(
|
38
|
+
a = line.split(" ")
|
39
39
|
next if a.size > 5
|
40
40
|
processes << MyProcess.new(a[0].to_i, a[-1])
|
41
41
|
end
|
42
42
|
end
|
43
43
|
|
44
|
-
Crasher.logger.info "Found ids: #{processes.map{ |pr| pr.id}.join(
|
44
|
+
Crasher.logger.info "Found ids: #{processes.map { |pr| pr.id }.join(", ")}"
|
45
45
|
processes
|
46
46
|
end
|
47
47
|
end
|
@@ -61,13 +61,12 @@ module Crasher
|
|
61
61
|
end
|
62
62
|
end
|
63
63
|
|
64
|
-
|
65
64
|
class Application
|
66
65
|
def initialize
|
67
66
|
@sleeper = EventHub::Sleeper.new
|
68
67
|
@run = true
|
69
68
|
|
70
|
-
Signal.trap(
|
69
|
+
Signal.trap("INT") {
|
71
70
|
@run = false
|
72
71
|
@sleeper.stop
|
73
72
|
}
|
@@ -75,8 +74,8 @@ module Crasher
|
|
75
74
|
|
76
75
|
def pick_process
|
77
76
|
processes = []
|
78
|
-
processes << Docker.new(
|
79
|
-
processes << Docker.new(
|
77
|
+
processes << Docker.new("eventhub.rabbitmq")
|
78
|
+
processes << Docker.new("eventhub.rabbitmq", 0)
|
80
79
|
processes << MyProcess.all
|
81
80
|
processes.flatten.sample
|
82
81
|
end
|
@@ -89,7 +88,7 @@ module Crasher
|
|
89
88
|
@sleeper.start(to_sleep)
|
90
89
|
next unless @run
|
91
90
|
process = pick_process
|
92
|
-
process
|
91
|
+
process&.restart
|
93
92
|
end
|
94
93
|
Crasher.logger.info "Crasher has been stopped"
|
95
94
|
end
|
data/example/publisher.rb
CHANGED
@@ -1,9 +1,9 @@
|
|
1
|
-
require
|
2
|
-
require
|
3
|
-
require
|
4
|
-
require
|
5
|
-
require
|
6
|
-
require_relative
|
1
|
+
require "bunny"
|
2
|
+
require "celluloid"
|
3
|
+
require "json"
|
4
|
+
require "securerandom"
|
5
|
+
require "eventhub/components"
|
6
|
+
require_relative "../lib/eventhub/sleeper"
|
7
7
|
|
8
8
|
SIGNALS_FOR_TERMINATION = [:INT, :TERM, :QUIT]
|
9
9
|
SIGNALS_FOR_RELOAD_CONFIG = [:HUP]
|
@@ -12,16 +12,16 @@ PAUSE_BETWEEN_WORK = 0.05 # default is 0.05
|
|
12
12
|
|
13
13
|
Celluloid.logger = nil
|
14
14
|
Celluloid.exception_handler { |ex| Publisher.logger.error "Exception occured: #{ex}}" }
|
15
|
+
Celluloid.boot
|
15
16
|
|
16
17
|
# Publisher module
|
17
18
|
module Publisher
|
18
|
-
|
19
19
|
def self.logger
|
20
20
|
unless @logger
|
21
21
|
@logger = ::EventHub::Components::MultiLogger.new
|
22
|
-
@logger.add_device(Logger.new(
|
22
|
+
@logger.add_device(Logger.new($stdout))
|
23
23
|
@logger.add_device(
|
24
|
-
EventHub::Components::Logger.logstash(
|
24
|
+
EventHub::Components::Logger.logstash("publisher", "development")
|
25
25
|
)
|
26
26
|
end
|
27
27
|
@logger
|
@@ -36,11 +36,11 @@ module Publisher
|
|
36
36
|
@start = Time.now
|
37
37
|
@files_sent = 0
|
38
38
|
|
39
|
-
@filename =
|
39
|
+
@filename = "data/store.json"
|
40
40
|
if File.exist?(@filename)
|
41
41
|
cleanup
|
42
42
|
else
|
43
|
-
File.write(@filename,
|
43
|
+
File.write(@filename, "{}")
|
44
44
|
end
|
45
45
|
|
46
46
|
every(30) { write_statistics }
|
@@ -48,7 +48,7 @@ module Publisher
|
|
48
48
|
|
49
49
|
def start(name)
|
50
50
|
store = read_store
|
51
|
-
store[name] = Time.now.strftime(
|
51
|
+
store[name] = Time.now.strftime("%Y-%m-%d %H:%M:%S.%L")
|
52
52
|
write_store(store)
|
53
53
|
end
|
54
54
|
|
@@ -76,19 +76,20 @@ module Publisher
|
|
76
76
|
|
77
77
|
def write_statistics
|
78
78
|
now = Time.now
|
79
|
-
rate = @files_sent / (now
|
80
|
-
time_spent = (now
|
81
|
-
Publisher.logger.info("Started @ #{@start.strftime(
|
79
|
+
rate = @files_sent / (now - @start)
|
80
|
+
time_spent = (now - @start) / 60
|
81
|
+
Publisher.logger.info("Started @ #{@start.strftime("%Y-%m-%d %H:%M:%S.%L")}: Files sent within #{"%0.1f" % time_spent} minutes: #{@files_sent}, #{"%0.1f" % rate} files/second")
|
82
82
|
end
|
83
83
|
|
84
84
|
private
|
85
|
-
def read_store
|
86
|
-
JSON.parse(File.read(@filename))
|
87
|
-
end
|
88
85
|
|
89
|
-
|
90
|
-
|
91
|
-
|
86
|
+
def read_store
|
87
|
+
JSON.parse(File.read(@filename))
|
88
|
+
end
|
89
|
+
|
90
|
+
def write_store(store)
|
91
|
+
File.write(@filename, store.to_json)
|
92
|
+
end
|
92
93
|
end
|
93
94
|
|
94
95
|
# Worker
|
@@ -106,26 +107,26 @@ module Publisher
|
|
106
107
|
sleep PAUSE_BETWEEN_WORK
|
107
108
|
end
|
108
109
|
ensure
|
109
|
-
@connection
|
110
|
+
@connection&.close
|
110
111
|
end
|
111
112
|
|
112
113
|
private
|
113
114
|
|
114
115
|
def connect
|
115
|
-
@connection = Bunny.new(vhost:
|
116
|
+
@connection = Bunny.new(vhost: "event_hub",
|
116
117
|
automatic_recovery: false,
|
117
|
-
logger: Logger.new(
|
118
|
+
logger: Logger.new("/dev/null"))
|
118
119
|
@connection.start
|
119
120
|
@channel = @connection.create_channel
|
120
121
|
@channel.confirm_select
|
121
|
-
@exchange = @channel.direct(
|
122
|
+
@exchange = @channel.direct("example.outbound", durable: true)
|
122
123
|
end
|
123
124
|
|
124
125
|
def do_the_work
|
125
|
-
#prepare id and content
|
126
|
+
# prepare id and content
|
126
127
|
id = SecureRandom.uuid
|
127
128
|
file_name = "data/#{id}.json"
|
128
|
-
data = {
|
129
|
+
data = {body: {id: id}}.to_json
|
129
130
|
|
130
131
|
# start transaction...
|
131
132
|
Celluloid::Actor[:transaction_store].start(id)
|
@@ -135,10 +136,10 @@ module Publisher
|
|
135
136
|
@exchange.publish(data, persistent: true)
|
136
137
|
success = @channel.wait_for_confirms
|
137
138
|
if success
|
138
|
-
Celluloid::Actor[:transaction_store]
|
139
|
-
Publisher
|
139
|
+
Celluloid::Actor[:transaction_store]&.stop(id)
|
140
|
+
Publisher&.logger&.info("[#{id}] - Message sent")
|
140
141
|
else
|
141
|
-
Publisher
|
142
|
+
Publisher&.logger&.error("[#{id}] - Published message not confirmed")
|
142
143
|
end
|
143
144
|
end
|
144
145
|
end
|
@@ -153,27 +154,27 @@ module Publisher
|
|
153
154
|
def start_supervisor
|
154
155
|
@config = Celluloid::Supervision::Configuration.define(
|
155
156
|
[
|
156
|
-
{
|
157
|
-
{
|
157
|
+
{type: TransactionStore, as: :transaction_store},
|
158
|
+
{type: Worker, as: :worker}
|
158
159
|
]
|
159
160
|
)
|
160
161
|
|
161
162
|
sleeper = @sleeper
|
162
163
|
@config.injection!(:before_restart, proc do
|
163
|
-
Publisher.logger.info(
|
164
|
+
Publisher.logger.info("Restarting in 15 seconds...")
|
164
165
|
sleeper.start(15)
|
165
166
|
end)
|
166
167
|
@config.deploy
|
167
168
|
end
|
168
169
|
|
169
170
|
def start
|
170
|
-
Publisher.logger.info
|
171
|
+
Publisher.logger.info "Publisher has been started"
|
171
172
|
|
172
173
|
setup_signal_handler
|
173
174
|
start_supervisor
|
174
175
|
main_event_loop
|
175
176
|
|
176
|
-
Publisher.logger.info
|
177
|
+
Publisher.logger.info "Publisher has been stopped"
|
177
178
|
end
|
178
179
|
|
179
180
|
private
|
@@ -181,12 +182,11 @@ module Publisher
|
|
181
182
|
def main_event_loop
|
182
183
|
loop do
|
183
184
|
command = @command_queue.pop
|
184
|
-
|
185
|
-
|
186
|
-
|
187
|
-
|
188
|
-
|
189
|
-
sleep 0.5
|
185
|
+
if SIGNALS_FOR_TERMINATION.include?(command)
|
186
|
+
@sleeper.stop
|
187
|
+
break
|
188
|
+
else
|
189
|
+
sleep 0.5
|
190
190
|
end
|
191
191
|
end
|
192
192
|
|
data/example/receiver.rb
CHANGED
@@ -1,9 +1,9 @@
|
|
1
|
-
require_relative
|
1
|
+
require_relative "../lib/eventhub/base"
|
2
2
|
|
3
3
|
module EventHub
|
4
4
|
class Receiver < Processor2
|
5
5
|
def handle_message(message, args = {})
|
6
|
-
id = message.body[
|
6
|
+
id = message.body["id"]
|
7
7
|
EventHub.logger.info("[#{id}] - Received")
|
8
8
|
|
9
9
|
file_name = "data/#{id}.json"
|
data/example/router.rb
CHANGED
@@ -1,12 +1,12 @@
|
|
1
|
-
require_relative
|
1
|
+
require_relative "../lib/eventhub/base"
|
2
2
|
|
3
3
|
module EventHub
|
4
4
|
# Demo class
|
5
5
|
class Router < Processor2
|
6
6
|
def handle_message(message, args = {})
|
7
|
-
id = message.body[
|
7
|
+
id = message.body["id"]
|
8
8
|
EventHub.logger.info("Received: [#{id}]")
|
9
|
-
publish(message: message.to_json, exchange_name:
|
9
|
+
publish(message: message.to_json, exchange_name: "example.inbound")
|
10
10
|
EventHub.logger.info("Returned: [#{id}]")
|
11
11
|
nil
|
12
12
|
end
|
@@ -12,23 +12,22 @@ module EventHub
|
|
12
12
|
end
|
13
13
|
|
14
14
|
def start
|
15
|
-
EventHub.logger.info(
|
15
|
+
EventHub.logger.info("Heartbeat is starting...")
|
16
16
|
|
17
|
-
every(
|
17
|
+
every(60 * 60 * 24) { EventHub.logger.info("Actual actors: #{Celluloid::Actor.all.size}: #{Celluloid::Actor.all.map { |a| a.class }.join(", ")}") }
|
18
18
|
|
19
|
-
publish(heartbeat(action:
|
20
|
-
EventHub.logger.info(
|
19
|
+
publish(heartbeat(action: "started"))
|
20
|
+
EventHub.logger.info("Heartbeat has sent [started] beat")
|
21
21
|
loop do
|
22
22
|
sleep Configuration.processor[:heartbeat_cycle_in_s]
|
23
23
|
publish(heartbeat)
|
24
|
-
EventHub.logger.info('Heartbeat has sent a beat')
|
25
24
|
end
|
26
25
|
end
|
27
26
|
|
28
27
|
def cleanup
|
29
|
-
EventHub.logger.info(
|
30
|
-
publish(heartbeat(action:
|
31
|
-
EventHub.logger.info(
|
28
|
+
EventHub.logger.info("Heartbeat is cleaning up...")
|
29
|
+
publish(heartbeat(action: "stopped"))
|
30
|
+
EventHub.logger.info("Heartbeat has sent a [stopped] beat")
|
32
31
|
end
|
33
32
|
|
34
33
|
private
|
@@ -43,29 +42,29 @@ module EventHub
|
|
43
42
|
success = channel.wait_for_confirms
|
44
43
|
|
45
44
|
unless success
|
46
|
-
raise
|
47
|
-
|
45
|
+
raise "Published heartbeat message has "\
|
46
|
+
"not been confirmed by the server"
|
48
47
|
end
|
49
48
|
ensure
|
50
|
-
connection
|
49
|
+
connection&.close
|
51
50
|
end
|
52
51
|
|
53
|
-
def heartbeat(args = {
|
52
|
+
def heartbeat(args = {action: "running"})
|
54
53
|
message = EventHub::Message.new
|
55
|
-
message.origin_module_id
|
56
|
-
message.origin_type
|
57
|
-
message.origin_site_id
|
54
|
+
message.origin_module_id = EventHub::Configuration.name
|
55
|
+
message.origin_type = "processor"
|
56
|
+
message.origin_site_id = "global"
|
58
57
|
|
59
|
-
message.process_name
|
58
|
+
message.process_name = "event_hub.heartbeat"
|
60
59
|
|
61
60
|
now = Time.now
|
62
61
|
|
63
62
|
# message structure needs more changes
|
64
63
|
message.body = {
|
65
64
|
version: @processor_instance.send(:version),
|
66
|
-
action:
|
67
|
-
pid:
|
68
|
-
process_name:
|
65
|
+
action: args[:action],
|
66
|
+
pid: Process.pid,
|
67
|
+
process_name: "event_hub.heartbeat",
|
69
68
|
heartbeat: {
|
70
69
|
started: now_stamp(started_at),
|
71
70
|
stamp_last_beat: now_stamp(now),
|
@@ -90,11 +89,11 @@ module EventHub
|
|
90
89
|
end
|
91
90
|
|
92
91
|
def addresses
|
93
|
-
interfaces = Socket.getifaddrs.select
|
92
|
+
interfaces = Socket.getifaddrs.select { |interface|
|
94
93
|
!interface.addr.ipv4_loopback? && !interface.addr.ipv6_loopback?
|
95
|
-
|
94
|
+
}
|
96
95
|
|
97
|
-
interfaces.map
|
96
|
+
interfaces.map { |interface|
|
98
97
|
begin
|
99
98
|
{
|
100
99
|
interface: interface.name,
|
@@ -104,7 +103,7 @@ module EventHub
|
|
104
103
|
rescue
|
105
104
|
nil # will be ignored
|
106
105
|
end
|
107
|
-
|
106
|
+
}.compact
|
108
107
|
end
|
109
108
|
|
110
109
|
def messages_statistics
|
@@ -15,14 +15,14 @@ module EventHub
|
|
15
15
|
end
|
16
16
|
|
17
17
|
def start
|
18
|
-
EventHub.logger.info(
|
18
|
+
EventHub.logger.info("Listener is starting...")
|
19
19
|
EventHub::Configuration.processor[:listener_queues].each_with_index do |queue_name, index|
|
20
20
|
async.listen(queue_name: queue_name, index: index)
|
21
21
|
end
|
22
22
|
end
|
23
23
|
|
24
24
|
def restart
|
25
|
-
raise
|
25
|
+
raise "Listener is restarting..."
|
26
26
|
end
|
27
27
|
|
28
28
|
def listen(args = {})
|
@@ -30,7 +30,7 @@ module EventHub
|
|
30
30
|
EventHub.logger.info("Listening to queue [#{queue_name}]")
|
31
31
|
consumer.on_delivery do |delivery_info, metadata, payload|
|
32
32
|
EventHub.logger.info("#{queue_name}: [#{delivery_info.delivery_tag}]"\
|
33
|
-
|
33
|
+
" delivery")
|
34
34
|
|
35
35
|
@processor_instance.statistics.measure(payload.size) do
|
36
36
|
handle_payload(payload: payload,
|
@@ -38,25 +38,21 @@ module EventHub
|
|
38
38
|
queue_name: queue_name,
|
39
39
|
content_type: metadata[:content_type],
|
40
40
|
priority: metadata[:priority],
|
41
|
-
delivery_tag: delivery_info.delivery_tag
|
42
|
-
)
|
41
|
+
delivery_tag: delivery_info.delivery_tag)
|
43
42
|
channel.acknowledge(delivery_info.delivery_tag, false)
|
44
43
|
end
|
45
44
|
|
46
45
|
EventHub.logger.info("#{queue_name}: [#{delivery_info.delivery_tag}]"\
|
47
|
-
|
46
|
+
" acknowledged")
|
48
47
|
end
|
49
48
|
queue.subscribe_with(consumer, block: false)
|
50
49
|
end
|
51
|
-
|
52
50
|
rescue => error
|
53
51
|
EventHub.logger.error("Unexpected exception: #{error}. It should restart now with this exception...")
|
54
52
|
raise
|
55
53
|
end
|
56
54
|
|
57
55
|
def with_listen(args = {}, &block)
|
58
|
-
connection_string, connection_properties = connection_properties
|
59
|
-
|
60
56
|
connection = create_bunny_connection
|
61
57
|
connection.start
|
62
58
|
queue_name = args[:queue_name]
|
@@ -65,11 +61,11 @@ module EventHub
|
|
65
61
|
channel.prefetch(1)
|
66
62
|
queue = channel.queue(queue_name, durable: true)
|
67
63
|
consumer = EventHub::Consumer.new(channel,
|
68
|
-
|
69
|
-
|
70
|
-
|
71
|
-
|
72
|
-
|
64
|
+
queue,
|
65
|
+
EventHub::Configuration.name +
|
66
|
+
"-" +
|
67
|
+
args[:index].to_s,
|
68
|
+
false)
|
73
69
|
yield connection, channel, consumer, queue, queue_name
|
74
70
|
end
|
75
71
|
|
@@ -86,18 +82,18 @@ module EventHub
|
|
86
82
|
# return invalid messages to dispatcher
|
87
83
|
if message.invalid?
|
88
84
|
response_messages << message
|
89
|
-
EventHub.logger.info("-> #{message
|
85
|
+
EventHub.logger.info("-> #{message} => return invalid to dispatcher")
|
90
86
|
else
|
91
87
|
begin
|
92
88
|
response_messages = @processor_instance.send(:handle_message,
|
93
|
-
|
94
|
-
|
89
|
+
message,
|
90
|
+
pass_arguments(args))
|
95
91
|
rescue => exception
|
96
92
|
# this catches unexpected exceptions in handle message method
|
97
93
|
# deadletter the message via dispatcher
|
98
94
|
message.status_code = EventHub::STATUS_DEADLETTER
|
99
|
-
message.status_message = exception
|
100
|
-
EventHub.logger.info("-> #{message
|
95
|
+
message.status_message = exception.to_s
|
96
|
+
EventHub.logger.info("-> #{message} => return exception to dispatcher")
|
101
97
|
response_messages << message
|
102
98
|
end
|
103
99
|
end
|
@@ -109,21 +105,20 @@ module EventHub
|
|
109
105
|
|
110
106
|
def pass_arguments(args = {})
|
111
107
|
keys_to_pass = [:queue_name, :content_type, :priority, :delivery_tag]
|
112
|
-
args.select{ |key| keys_to_pass.include?(key) }
|
108
|
+
args.select { |key| keys_to_pass.include?(key) }
|
113
109
|
end
|
114
110
|
|
115
111
|
def cleanup
|
116
|
-
EventHub.logger.info(
|
112
|
+
EventHub.logger.info("Listener is cleaning up...")
|
117
113
|
# close all open connections
|
118
114
|
return unless @connections
|
119
115
|
@connections.values.each do |connection|
|
120
|
-
connection
|
116
|
+
connection&.close
|
121
117
|
end
|
122
118
|
end
|
123
119
|
|
124
120
|
def publish(args)
|
125
121
|
@actor_publisher.publish(args)
|
126
122
|
end
|
127
|
-
|
128
123
|
end
|
129
124
|
end
|