eventhub-processor2 1.0.1 → 1.1.0

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: 6055d052611f6817ac5917b68ea7ab9ff434878b
4
- data.tar.gz: e181ff110ba25de25f863fcd1eec79c12d7063fc
3
+ metadata.gz: '037081a3ac65841f8c3efd865fc17e26eaafbe44'
4
+ data.tar.gz: c38fc7169a5368e381c89595292916d0f9368a1b
5
5
  SHA512:
6
- metadata.gz: de63d420cbbe1511af91e4661d61eb03a838866b7f8550ae1b63ccb14e84f767e3c2e3480834943a13b34520d5ed73e796d7c0cf77c26201e346be7be70d9b2d
7
- data.tar.gz: 192e6a6b2d95a42339568501a85522ff694483317a3d5e90c1c4b635ecfb0fd22f2229087fe0614fc77c796233e581447106a10b90a534be51601b7c5b02524d
6
+ metadata.gz: '081046afc2e55110f336eff3982cfbd1893849d125ff41bb3a75bfe8ad1027342964d05ffee90c25549a293d9a1a1ba6b90d76d8beea1596d4fc8b14cfc7da03'
7
+ data.tar.gz: f40bb241e4f25f6d12fa2715a2ab99830f56a5ee0afcc468ab8eba8de1ae8f71bfe20de0ac8918ccefa8c93a0feff454752703d6d9c7ac75f72573697425fa1b
data/.travis.yml CHANGED
@@ -9,9 +9,9 @@ os:
9
9
  - linux
10
10
 
11
11
  rvm:
12
- - 2.4.2
13
- - 2.3.1
14
- - 2.1.5
12
+ - 2.5.0
13
+ - 2.4.3
14
+ - 2.3.6
15
15
 
16
16
  env:
17
17
  - secure: R0rBtWClmnJKO+zyAncF6+1qDpZ6JQR+Z6VXzUmWw8MuxUXv732XTNYulY/tTeDW876GofAsOF9QfX/jhF+2ubV8SzM6rKTZS09feATuOWRPtECEaBOPcb0JGYouBNg/Lv4onuXb8KS/9U093sRqiJMfib82iY7QK9bnDPsYoQXtMnI7KBeQNrTV0H4nby3lvbjcpMDhmyg+anOmCP5yepZeS53Qq+C5GHguYivisvhSPmCfONrT303XxFLDo/aOkMDlceeZBDhtcpm5Px3GIwybSI9xRtAnpT73oL1Piazda6mRN7VHoT7YAHKqcJNZDB48HYuTj3Pt3oVVw0v78Cf25caBqXttXflXfB8U3qfVt+94uF0icH34NAnpW9l1B8GXHi6CTr27Hg2ZatWkQCN1rCpcCaW0eM/NoAG6snwTIwr1VvjLWpSW5wwhQgHPyMl+MRlTmCn/UM+eQSwPVhflKpMS1+ah6cbhDrQQF1jDt3dRaLqSnVE4APrafdT90SA3SCG2MGOE+pp+DX528gEsRB34UdX2V67dlwXeRu1eU5vZBR7jZrqwuwTAgSvxEF0eJWC0kgvEfouzuVHhVmmSAcNNHlg2kd0/oyQELyXTsklJghwAupcy4X/GEAyfLW9GAxItjATyV7XyfjmaBmdzHl/5JWeXicoyZ/4+8iE=
data/Gemfile.lock CHANGED
@@ -1,8 +1,8 @@
1
1
  PATH
2
2
  remote: .
3
3
  specs:
4
- eventhub-processor2 (1.0.1)
5
- bunny (~> 2.7)
4
+ eventhub-processor2 (1.1.0)
5
+ bunny (~> 2.9)
6
6
  celluloid (~> 0.17)
7
7
  eventhub-components (~> 0.2)
8
8
  uuidtools (~> 2.1)
@@ -10,9 +10,9 @@ PATH
10
10
  GEM
11
11
  remote: https://rubygems.org/
12
12
  specs:
13
- amq-protocol (2.2.0)
14
- bunny (2.7.2)
15
- amq-protocol (>= 2.2.0)
13
+ amq-protocol (2.3.0)
14
+ bunny (2.9.1)
15
+ amq-protocol (~> 2.3.0)
16
16
  celluloid (0.17.3)
17
17
  celluloid-essentials
18
18
  celluloid-extras
@@ -37,7 +37,7 @@ GEM
37
37
  hitimes (1.2.6)
38
38
  json (2.1.0)
39
39
  logstash-event (1.2.02)
40
- logstash-logger (0.25.1)
40
+ logstash-logger (0.26.0)
41
41
  logstash-event (~> 1.2)
42
42
  rake (12.3.0)
43
43
  rspec (3.7.0)
data/docker/Dockerfile CHANGED
@@ -1,4 +1,4 @@
1
- FROM rabbitmq:3-management
1
+ FROM rabbitmq:3.7.3-management
2
2
  ADD definitions.json /etc/rabbitmq/
3
3
  ADD rabbitmq.config /etc/rabbitmq/
4
4
  RUN chown rabbitmq:rabbitmq /etc/rabbitmq/rabbitmq.config /etc/rabbitmq/definitions.json
@@ -39,7 +39,14 @@
39
39
  "arguments": {}
40
40
  },
41
41
  {
42
- "name": "example",
42
+ "name": "example.inbound",
43
+ "vhost": "event_hub",
44
+ "durable": true,
45
+ "auto_delete": false,
46
+ "arguments": {}
47
+ },
48
+ {
49
+ "name": "example.outbound",
43
50
  "vhost": "event_hub",
44
51
  "durable": true,
45
52
  "auto_delete": false,
@@ -57,7 +64,16 @@
57
64
  "arguments": {}
58
65
  },
59
66
  {
60
- "name": "example",
67
+ "name": "example.inbound",
68
+ "vhost": "event_hub",
69
+ "type": "direct",
70
+ "durable": true,
71
+ "auto_delete": false,
72
+ "internal": false,
73
+ "arguments": {}
74
+ },
75
+ {
76
+ "name": "example.outbound",
61
77
  "vhost": "event_hub",
62
78
  "type": "direct",
63
79
  "durable": true,
@@ -76,9 +92,17 @@
76
92
  "arguments": {}
77
93
  },
78
94
  {
79
- "source": "example",
95
+ "source": "example.inbound",
96
+ "vhost": "event_hub",
97
+ "destination": "example.inbound",
98
+ "destination_type": "queue",
99
+ "routing_key": "",
100
+ "arguments": {}
101
+ },
102
+ {
103
+ "source": "example.outbound",
80
104
  "vhost": "event_hub",
81
- "destination": "example",
105
+ "destination": "example.outbound",
82
106
  "destination_type": "queue",
83
107
  "routing_key": "",
84
108
  "arguments": {}
@@ -22,8 +22,11 @@ Gem::Specification.new do |spec|
22
22
  spec.executables = spec.files.grep(%r{^exe/}) { |f| File.basename(f) }
23
23
  spec.require_paths = ['lib']
24
24
 
25
+ # required by celluloid and bunny (-> amq-protocol)
26
+ # spec.required_ruby_version = '~> 2.2.6'
27
+
25
28
  spec.add_dependency 'celluloid', '~> 0.17'
26
- spec.add_dependency 'bunny', '~> 2.7'
29
+ spec.add_dependency 'bunny', '~> 2.9'
27
30
  spec.add_dependency 'eventhub-components', '~> 0.2'
28
31
  spec.add_dependency 'uuidtools', '~> 2.1'
29
32
 
data/example/README.md CHANGED
@@ -1,19 +1,30 @@
1
1
  ## Example Application
2
2
 
3
- ### Components
4
- * example.rb - is listening to the example queue, reads id from message and deletes file with id in data folder
5
- * publisher.rb - creates a file with name id.json (id=guid) and publishes a json messages with id as content to the example exchange
6
- * crasher.rb - randomly restarts example processes or docker container (processor-rabbitmq)
3
+ ### Description
4
+
5
+ Example application is a suite of applicaitons in order to test reliabiliy and performance of processor2 gem.
6
+
7
+ How does it work?
8
+
9
+ A message is passed throuhg the following components.
10
+ publisher.rb => [example.outbound] => router.rb => [example.inbound] => receiver.rb
11
+
12
+ 1. publisher.rb generates a unique ID, creates a message with the ID as payload, passes the message to example.outbound queue.
13
+
14
+ 2. router.rb receives the message and passes it to exmaple.outbound queue
15
+
16
+ 3. receiver.rb gets the message and deletes the file with the given ID
17
+
18
+ Goal: What ever happens to these components (restarted, killed and restarted, stopped and started, message broker restarted) if you do a graceful shutdown at the end there should be no message in the /data folder.
19
+
20
+ Graceful shutdown: Stop producer.rb. Leave the other components running until all messages in example.* queues are gone. Stop remaining components.
21
+
7
22
 
8
23
  ### How to use
9
24
  * Make sure docker container (process-rabbitmq) is running
10
- * Start 1 or more example processes (I did 3)
11
- * Start 1 or more publisher processes ( I did 1)
12
- * Start one crasher.rb
13
-
14
- ### What is the goal
15
- * See how the components work under various conditions. Feel free to manually interact (Exp. kill -9 PID)
16
- * There should be no files left in the data folder when all example messages are consumed
25
+ * Start one or more router.rb
26
+ * Start one or more receier.rb
27
+ * Start one or more publisher.rb
28
+ * Start crasher.rb if you like (or do it manually)
17
29
 
18
30
  ### Note
19
- It can happen that a file gets deleted before message is acknowledged. This message will be processed again and will just log a warning about missing file. Due to the nature of 2 independent processes it can not garanteed that both process transaction are all done or not done at all, but with message acknowledgement and and publisher confirms we can mitigate risk of lost messages.
@@ -9,10 +9,8 @@
9
9
  "tls": false
10
10
  },
11
11
  "processor": {
12
- "heartbeat_cycle_in_s": 300,
13
- "watchdog_cycle_in_s": 15,
14
12
  "listener_queues": [
15
- "example"
13
+ "example.inbound"
16
14
  ]
17
15
  }
18
16
  }
@@ -0,0 +1,17 @@
1
+ {
2
+ "development": {
3
+ "server": {
4
+ "user": "guest",
5
+ "password": "guest",
6
+ "host": "localhost",
7
+ "vhost": "event_hub",
8
+ "port": 5672,
9
+ "tls": false
10
+ },
11
+ "processor": {
12
+ "listener_queues": [
13
+ "example.outbound"
14
+ ]
15
+ }
16
+ }
17
+ }
data/example/crasher.rb CHANGED
@@ -1,57 +1,99 @@
1
- # MyProcess
2
- class MyProcess
3
- attr_reader :id
4
- def initialize(id)
5
- @id = id
6
- end
1
+ require 'eventhub/components'
2
+ require_relative '../lib/eventhub/sleeper'
3
+
4
+ RESTART_RANGES_IN_SECONDS = (30..600).to_a
5
+ PROCESS_PATTERNS = ['router', 'receiver']
7
6
 
8
- def restart
9
- puts "Sending Signal HUP to process [#{@id}]"
10
- Process.kill('HUP', @id)
11
- rescue Errno::ESRCH
7
+ # Module Crasher
8
+ module Crasher
9
+ def self.logger
10
+ unless @logger
11
+ @logger = ::EventHub::Components::MultiLogger.new
12
+ @logger.add_device(Logger.new(STDOUT))
13
+ @logger.add_device(
14
+ EventHub::Components::Logger.logstash('crasher', 'development')
15
+ )
16
+ end
17
+ @logger
12
18
  end
13
19
 
14
- def self.all
15
- processes = []
16
- data = `ps | grep example.rb | grep ruby`
17
- data.lines[0..-2].each do |line|
18
- processes << MyProcess.new(line.split(' ')[0].to_i)
20
+ class MyProcess
21
+ attr_reader :id, :name
22
+ def initialize(id, name)
23
+ @id = id
24
+ @name = name
19
25
  end
20
26
 
21
- puts "Found ids: #{processes.map{ |pr| pr.id}.join(', ')}"
22
- processes
23
- end
24
- end
27
+ def restart
28
+ Crasher.logger.info "Sending Signal HUP to process [#{@id}/#{@name}]"
29
+ Process.kill('HUP', @id)
30
+ rescue Errno::ESRCH
31
+ end
32
+
33
+ def self.all
34
+ processes = []
35
+ PROCESS_PATTERNS.each do |name|
36
+ data = `ps | grep #{name}.rb`
37
+ data.lines[0..-2].each do |line|
38
+ a = line.split(' ')
39
+ next if a.size > 5
40
+ processes << MyProcess.new(a[0].to_i, a[-1])
41
+ end
42
+ end
25
43
 
26
- # Docker
27
- class Docker
28
- def initialize(name, time = 10)
29
- @name = name
30
- @time = time
44
+ Crasher.logger.info "Found ids: #{processes.map{ |pr| pr.id}.join(', ')}"
45
+ processes
46
+ end
31
47
  end
32
48
 
33
- def restart
34
- puts "Restart (#{@time}) [#{@name}]"
35
- `docker restart -t #{@time} #{@name}`
49
+ # Docker
50
+ class Docker
51
+ attr_reader :name
52
+
53
+ def initialize(name, time = 10)
54
+ @name = name
55
+ @time = time
56
+ end
57
+
58
+ def restart
59
+ Crasher.logger.info "Restart (#{@time}) [#{@name}]"
60
+ `docker restart -t #{@time} #{@name}`
61
+ end
36
62
  end
37
- end
38
63
 
39
- def items
40
- a = []
41
- a << Docker.new('processor-rabbitmq')
42
- a << Docker.new('processor-rabbitmq', 0)
43
- a << MyProcess.all
44
- a.flatten!
45
- end
46
64
 
47
- run = true
48
- Signal.trap('INT') { run = false }
65
+ class Application
66
+ def initialize
67
+ @sleeper = EventHub::Sleeper.new
68
+ @run = true
69
+
70
+ Signal.trap('INT') {
71
+ @run = false
72
+ @sleeper.stop
73
+ }
74
+ end
75
+
76
+ def pick_process
77
+ processes = []
78
+ processes << Docker.new('processor-rabbitmq')
79
+ processes << Docker.new('processor-rabbitmq', 0)
80
+ processes << MyProcess.all
81
+ processes.flatten.sample
82
+ end
49
83
 
50
- while run
51
- to_sleep = rand(3600)
52
- puts "Waiting [#{to_sleep}]..."
53
- sleep to_sleep
54
- items.sample.restart
84
+ def start
85
+ Crasher.logger.info "Crasher has been started"
86
+ while @run
87
+ to_sleep = RESTART_RANGES_IN_SECONDS.sample
88
+ Crasher.logger.info "Waiting #{to_sleep} seconds..."
89
+ @sleeper.start(to_sleep)
90
+ next unless @run
91
+ process = pick_process
92
+ process.restart if process
93
+ end
94
+ Crasher.logger.info "Crasher has been stopped"
95
+ end
96
+ end
55
97
  end
56
98
 
57
- puts 'Done'
99
+ Crasher::Application.new.start
data/example/publisher.rb CHANGED
@@ -2,9 +2,79 @@ require 'bunny'
2
2
  require 'celluloid/current'
3
3
  require 'json'
4
4
  require 'securerandom'
5
+ require 'eventhub/components'
6
+ require_relative '../lib/eventhub/sleeper'
7
+
8
+ # Example module
9
+ module Example
10
+ def self.logger
11
+ unless @logger
12
+ @logger = ::EventHub::Components::MultiLogger.new
13
+ @logger.add_device(Logger.new(STDOUT))
14
+ @logger.add_device(
15
+ EventHub::Components::Logger.logstash('publisher', 'development')
16
+ )
17
+ end
18
+ @logger
19
+ end
20
+ end
21
+
22
+ SIGNALS_FOR_TERMINATION = [:INT, :TERM, :QUIT]
23
+ SIGNALS_FOR_RELOAD_CONFIG = [:HUP]
24
+ ALL_SIGNALS = SIGNALS_FOR_TERMINATION + SIGNALS_FOR_RELOAD_CONFIG
5
25
 
6
26
  Celluloid.logger = nil
7
- Celluloid.exception_handler { |ex| puts "Exception occured: #{ex}" }
27
+ Celluloid.exception_handler { |ex| Example.logger.error "Exception occured: #{ex}}" }
28
+
29
+ # Store to track pending files (files not yet confirmed to be sent)
30
+ class TransactionStore
31
+ include Celluloid
32
+ finalizer :cleanup
33
+
34
+ def initialize
35
+ @filename = 'data/store.json'
36
+ if File.exist?(@filename)
37
+ cleanup
38
+ else
39
+ File.write(@filename, '{}')
40
+ end
41
+ end
42
+
43
+ def start(name)
44
+ store = read_store
45
+ store[name] = Time.now.strftime('%Y-%m-%d %H:%M:%S.%L')
46
+ write_store(store)
47
+ end
48
+
49
+ def stop(name)
50
+ store = read_store
51
+ store.delete(name)
52
+ write_store(store)
53
+ end
54
+
55
+ def cleanup
56
+ # cleanup pending entries
57
+ Example.logger.info("Cleaning pending transactions...")
58
+ store = read_store
59
+ store.keys.each do |name|
60
+ name = "data/#{name}.json"
61
+ if File.exist?(name)
62
+ File.delete(name)
63
+ Example.logger.info("Deleted: #{name}")
64
+ end
65
+ end
66
+ write_store({})
67
+ end
68
+
69
+ private
70
+ def read_store
71
+ JSON.parse(File.read(@filename))
72
+ end
73
+
74
+ def write_store(store)
75
+ File.write(@filename, store.to_json)
76
+ end
77
+ end
8
78
 
9
79
  # Publisher
10
80
  class Publisher
@@ -16,14 +86,9 @@ class Publisher
16
86
 
17
87
  def start
18
88
  connect
19
- count = 1
20
89
  loop do
21
90
  do_the_work
22
-
23
- sleep 0.001
24
- print '.'
25
- puts '' if (count % 80).zero?
26
- count += 1
91
+ sleep 0.050
27
92
  end
28
93
  ensure
29
94
  @connection.close if @connection
@@ -38,57 +103,91 @@ class Publisher
38
103
  @connection.start
39
104
  @channel = @connection.create_channel
40
105
  @channel.confirm_select
41
- @exchange = @channel.direct('example', durable: true)
106
+ @exchange = @channel.direct('example.outbound', durable: true)
42
107
  end
43
108
 
44
109
  def do_the_work
110
+ #prepare id and content
45
111
  id = SecureRandom.uuid
112
+ file_name = "data/#{id}.json"
46
113
  data = { body: { id: id } }.to_json
47
114
 
48
- file = File.open("data/#{id}.json", 'w')
49
- file.write(data)
50
- file.close
115
+ # start transaction...
116
+ Celluloid::Actor[:transaction_store].start(id)
117
+ File.write(file_name, data)
118
+ Example.logger.info("[#{id}] - Message/File created")
51
119
 
52
120
  @exchange.publish(data, persistent: true)
53
121
  success = @channel.wait_for_confirms
54
-
55
- raise 'Published message not confirmed' unless success
122
+ if success
123
+ Celluloid::Actor[:transaction_store].stop(id) if Celluloid::Actor[:transaction_store]
124
+ Example.logger.info("[#{id}] - Message sent")
125
+ else
126
+ Example.logger.error("[#{id}] - Published message not confirmed")
127
+ end
56
128
  end
57
129
  end
58
130
 
59
131
  # Application
60
132
  class Application
61
133
  def initialize
62
- @run = true
134
+ @sleeper = EventHub::Sleeper.new
135
+ @command_queue = []
136
+ end
137
+
138
+ def start_supervisor
63
139
  @config = Celluloid::Supervision::Configuration.define(
64
140
  [
141
+ { type: TransactionStore, as: :transaction_store },
65
142
  { type: Publisher, as: :publisher }
66
143
  ]
67
144
  )
68
145
 
146
+ sleeper = @sleeper
69
147
  @config.injection!(:before_restart, proc do
70
- puts 'Restarting in 5 seconds...'
71
- sleep 5
148
+ Example.logger.info('Restarting in 15 seconds...')
149
+ sleeper.start(15)
72
150
  end)
151
+ @config.deploy
73
152
  end
74
153
 
75
154
  def start
76
- puts 'Publisher has been started'
77
- @config.deploy
155
+ Example.logger.info 'Publisher has been started'
156
+
157
+ setup_signal_handler
158
+ start_supervisor
78
159
  main_event_loop
79
- cleanup
80
- puts 'Publisher has been stopped'
160
+
161
+ Example.logger.info 'Publisher has been stopped'
81
162
  end
82
163
 
83
164
  private
84
165
 
85
166
  def main_event_loop
86
- Signal.trap(:INT) { @run = false }
87
- sleep 0.5 while @run
88
- end
167
+ loop do
168
+ command = @command_queue.pop
169
+ case
170
+ when SIGNALS_FOR_TERMINATION.include?(command)
171
+ @sleeper.stop
172
+ break
173
+ else
174
+ sleep 0.5
175
+ end
176
+ end
89
177
 
90
- def cleanup
91
178
  Celluloid.shutdown
179
+ # make sure all actors are gone
180
+ while Celluloid.running?
181
+ sleep 0.1
182
+ end
183
+ end
184
+
185
+ def setup_signal_handler
186
+ # have a re-entrant signal handler by just using a simple array
187
+ # https://www.sitepoint.com/the-self-pipe-trick-explained/
188
+ ALL_SIGNALS.each do |signal|
189
+ Signal.trap(signal) { @command_queue << signal }
190
+ end
92
191
  end
93
192
  end
94
193
 
@@ -0,0 +1,22 @@
1
+ require_relative '../lib/eventhub/base'
2
+
3
+ module EventHub
4
+ class Receiver < Processor2
5
+ def handle_message(message, args = {})
6
+ id = message.body['id']
7
+ EventHub.logger.info("[#{id}] - Received")
8
+
9
+ file_name = "data/#{id}.json"
10
+ begin
11
+ File.delete(file_name)
12
+ EventHub.logger.info("[#{id}] - File has been deleted")
13
+ rescue => error
14
+ EventHub.logger.error("[#{id}] - Unable to delete File: #{error}")
15
+ end
16
+
17
+ []
18
+ end
19
+ end
20
+ end
21
+
22
+ EventHub::Receiver.new.start
data/example/router.rb ADDED
@@ -0,0 +1,16 @@
1
+ require_relative '../lib/eventhub/base'
2
+
3
+ module EventHub
4
+ # Demo class
5
+ class Router < Processor2
6
+ def handle_message(message, args = {})
7
+ id = message.body['id']
8
+ EventHub.logger.info("Received: [#{id}]")
9
+ publish(message: message.to_json, exchange_name: 'example.inbound')
10
+ EventHub.logger.info("Returned: [#{id}]")
11
+ []
12
+ end
13
+ end
14
+ end
15
+
16
+ EventHub::Router.new.start
@@ -14,25 +14,27 @@ module EventHub
14
14
  def start
15
15
  EventHub.logger.info('Heartbeat is starting...')
16
16
 
17
- every(60) { EventHub.logger.info("Running actors: #{Celluloid::Actor.all.size}: #{Celluloid::Actor.all.map{ |a| a.class }.join(', ') }") }
17
+ every(300) { EventHub.logger.info("Actual actors: #{Celluloid::Actor.all.size}: #{Celluloid::Actor.all.map{ |a| a.class }.join(', ') }") }
18
18
 
19
19
  publish(heartbeat(action: 'started'))
20
+ EventHub.logger.info('Heartbeat has sent [started] beat')
20
21
  loop do
21
22
  sleep Configuration.processor[:heartbeat_cycle_in_s]
22
- EventHub.logger.info('Running heartbeat...')
23
23
  publish(heartbeat)
24
+ EventHub.logger.info('Heartbeat has sent a beat')
24
25
  end
25
26
  end
26
27
 
27
28
  def cleanup
28
29
  EventHub.logger.info('Heartbeat is cleanig up...')
29
30
  publish(heartbeat(action: 'stopped'))
31
+ EventHub.logger.info('Heartbeat has sent a [stopped] beat')
30
32
  end
31
33
 
32
34
  private
33
35
 
34
36
  def publish(message)
35
- connection = Bunny.new(bunny_connection_properties)
37
+ connection = create_bunny_connection
36
38
  connection.start
37
39
  channel = connection.create_channel
38
40
  channel.confirm_select
@@ -7,8 +7,9 @@ module EventHub
7
7
  finalizer :cleanup
8
8
 
9
9
  def initialize(processor_instance)
10
+ @actor_publisher = ActorPublisher.new_link
10
11
  @actor_watchdog = ActorWatchdog.new_link
11
- @connections= {}
12
+ @connections = {}
12
13
  @processor_instance = processor_instance
13
14
  start
14
15
  end
@@ -31,9 +32,6 @@ module EventHub
31
32
  EventHub.logger.info("#{queue_name}: [#{delivery_info.delivery_tag}]"\
32
33
  ' delivery')
33
34
 
34
- # EventHub::logger.debug("delivery_info: #{delivery_info.inspect}")
35
- # EventHub::logger.debug("metadata: #{metadata.inspect}")
36
-
37
35
  @processor_instance.statistics.measure(payload.size) do
38
36
  handle_payload(payload: payload,
39
37
  connection: connection,
@@ -50,10 +48,16 @@ module EventHub
50
48
  end
51
49
  queue.subscribe_with(consumer, block: false)
52
50
  end
51
+
52
+ rescue => error
53
+ EventHub.logger.error("Unexpected exception: #{error}. It should restart now with this exception...")
54
+ raise
53
55
  end
54
56
 
55
57
  def with_listen(args = {}, &block)
56
- connection = Bunny.new(bunny_connection_properties)
58
+ connection_string, connection_properties = connection_properties
59
+
60
+ connection = create_bunny_connection
57
61
  connection.start
58
62
  queue_name = args[:queue_name]
59
63
  @connections[queue_name] = connection
@@ -93,7 +97,7 @@ module EventHub
93
97
  # deadletter the message via dispatcher
94
98
  message.status_code = EventHub::STATUS_DEADLETTER
95
99
  message.status_message = exception
96
- EventHub.logger.info("-> #{message.to_s} => return exception to diaptcher")
100
+ EventHub.logger.info("-> #{message.to_s} => return exception to dispatcher")
97
101
  response_messages << message
98
102
  end
99
103
  end
@@ -108,54 +112,18 @@ module EventHub
108
112
  args.select{ |key| keys_to_pass.include?(key) }
109
113
  end
110
114
 
111
- def publish(args = {})
112
- with_publish(args) do |connection, exchange_name, message|
113
- begin
114
- channel = connection.create_channel
115
- channel.confirm_select
116
- exchange = channel.direct(exchange_name, durable: true)
117
- exchange.publish(message, persistent: true)
118
-
119
- success = channel.wait_for_confirms
120
-
121
- unless success
122
- raise 'Published message from Listener actor '\
123
- 'has not been confirmed by the server'
124
- end
125
- ensure
126
- channel.close if channel
127
- end
128
- end
129
- end
130
-
131
-
132
- def with_publish(args = {}, &block)
133
- message = args[:message]
134
- return if message.nil?
135
-
136
- need_to_close = false
137
- connection = args[:connection]
138
- if connection.nil?
139
- connection = Bunny.new(bunny_connection_properties)
140
- connection.start
141
- need_to_close = true
142
- end
143
-
144
- exchange_name = args[:exchange_name] || EH_X_INBOUND
145
-
146
- yield connection, exchange_name, message
147
- ensure
148
- connection.close if connection && need_to_close
149
- end
150
-
151
-
152
115
  def cleanup
153
116
  EventHub.logger.info('Listener is cleanig up...')
154
117
  # close all open connections
118
+ return unless @connections
155
119
  @connections.values.each do |connection|
156
120
  connection.close if connection
157
121
  end
158
122
  end
159
123
 
124
+ def publish(args)
125
+ @actor_publisher.publish(args)
126
+ end
127
+
160
128
  end
161
129
  end
@@ -0,0 +1,46 @@
1
+ # EventHub module
2
+ module EventHub
3
+ # Heartbeat class
4
+ class ActorPublisher
5
+ include Celluloid
6
+ include Helper
7
+ finalizer :cleanup
8
+
9
+ def initialize
10
+ EventHub.logger.info('Publisher is starting...')
11
+ @connection = nil
12
+ end
13
+
14
+ def publish(args = {})
15
+ # keep connection once established
16
+ unless @connection
17
+ @connection = create_bunny_connection
18
+ @connection.start
19
+ end
20
+
21
+ message = args[:message]
22
+ return if message.nil?
23
+
24
+ exchange_name = args[:exchange_name] || EH_X_INBOUND
25
+
26
+ channel = @connection.create_channel
27
+ channel.confirm_select
28
+ exchange = channel.direct(exchange_name, durable: true)
29
+
30
+ exchange.publish(message, persistent: true)
31
+ success = channel.wait_for_confirms
32
+
33
+ unless success
34
+ raise 'Published message from Listener actor '\
35
+ 'has not been confirmed by the server'
36
+ end
37
+ ensure
38
+ channel.close if channel
39
+ end
40
+
41
+ def cleanup
42
+ EventHub.logger.info('Publisher is cleanig up...')
43
+ @connection.close if @connection
44
+ end
45
+ end
46
+ end
@@ -7,6 +7,7 @@ module EventHub
7
7
  finalizer :cleanup
8
8
 
9
9
  def initialize
10
+ EventHub.logger.info('Watchdog is starting...')
10
11
  async.start
11
12
  end
12
13
 
@@ -25,7 +26,7 @@ module EventHub
25
26
  private
26
27
 
27
28
  def watch
28
- connection = Bunny.new(bunny_connection_properties)
29
+ connection = create_bunny_connection
29
30
  connection.start
30
31
 
31
32
  EventHub::Configuration.processor[:listener_queues].each do |queue_name|
data/lib/eventhub/base.rb CHANGED
@@ -5,12 +5,20 @@ require 'base64'
5
5
  require 'eventhub/components'
6
6
  require 'logstash-logger'
7
7
  require 'bunny'
8
- require 'celluloid/current'
8
+
9
+ # Maybe needs refactoring in future versions ?!
10
+ if ENV['RSPEC_PROCESSOR2']
11
+ require 'celluloid'
12
+ else
13
+ # celluloid is booting automatically
14
+ require 'celluloid/current'
15
+ end
9
16
 
10
17
  require_relative 'version'
11
18
  require_relative 'constant'
12
19
  require_relative 'logger'
13
20
  require_relative 'helper'
21
+ require_relative 'sleeper'
14
22
  require_relative 'hash_extensions'
15
23
  require_relative 'configuration'
16
24
  require_relative 'message'
@@ -18,8 +26,9 @@ require_relative 'statistics'
18
26
  require_relative 'consumer'
19
27
  require_relative 'actor_heartbeat'
20
28
  require_relative 'actor_watchdog'
29
+ require_relative 'actor_publisher'
21
30
  require_relative 'actor_listener'
22
31
  require_relative 'processor2'
23
32
 
24
33
  Celluloid.logger = nil
25
- Celluloid.exception_handler { |ex| EventHub.logger.info "Exception occured: #{ex}" }
34
+ Celluloid.exception_handler { |ex| EventHub.logger.error "Exception occured: #{ex}" }
@@ -101,11 +101,17 @@ module EventHub
101
101
  host: 'localhost',
102
102
  vhost: 'event_hub',
103
103
  port: 5672,
104
- tls: false
104
+ tls: false,
105
+ tls_cert: nil,
106
+ tls_key: nil,
107
+ tls_ca_certificates: [],
108
+ verify_peer: false,
109
+ show_bunny_logs: false
105
110
  },
106
111
  processor: {
107
112
  heartbeat_cycle_in_s: 300,
108
113
  watchdog_cycle_in_s: 15,
114
+ restart_in_s: 15,
109
115
  listener_queues: [@name]
110
116
  }
111
117
  }
@@ -19,31 +19,36 @@ module EventHub
19
19
  end.compact.join('.')
20
20
  end
21
21
 
22
- def bunny_connection_properties
22
+ def create_bunny_connection
23
23
  server = EventHub::Configuration.server
24
24
 
25
- if Configuration.server[:tls]
26
- {
27
- user: server[:user],
28
- password: server[:password],
29
- host: server[:host],
30
- vhost: server[:vhost],
31
- port: server[:port],
32
- tls: server[:tls],
33
- logger: Logger.new('/dev/null'), # logs from Bunny not required
34
- network_recovery_interval: 15
35
- }
36
- else
37
- {
38
- user: server[:user],
39
- password: server[:password],
40
- host: server[:host],
41
- vhost: server[:vhost],
42
- port: server[:port],
43
- logger: Logger.new('/dev/null'), # logs from Bunny not required
44
- network_recovery_interval: 15
45
- }
25
+ protocol = 'amqp'
26
+ connection_properties = {}
27
+ connection_properties[:user] = server[:user]
28
+ connection_properties[:pass] = server[:password]
29
+ connection_properties[:vhost] = server[:vhost]
30
+
31
+ # inject bunny logs on request
32
+ unless server[:show_bunny_logs]
33
+ connection_properties[:logger] = Logger.new('/dev/null')
34
+ end
35
+
36
+ # we don't need it since reactors can deal with it
37
+ connection_properties[:automatically_recover] = false
38
+
39
+ # do we do tls?
40
+ if server[:tls]
41
+ protocol = "amqps"
42
+ connection_properties[:tls] = server[:tls]
43
+ connection_properties[:tls_cert] = server[:tls_cert]
44
+ connection_properties[:tls_key] = server[:tls_key]
45
+ connection_properties[:tls_ca_certificates] = server[:tls_ca_certificates]
46
+ connection_properties[:verify_peer] = server[:verify_peer]
46
47
  end
48
+
49
+ connection_string = "#{protocol}://#{server[:host]}:#{server[:port]}"
50
+
51
+ Bunny.new(connection_string, connection_properties)
47
52
  end
48
53
 
49
54
  # Formats stamp into UTC format
@@ -25,6 +25,7 @@ module EventHub
25
25
 
26
26
  @command_queue = []
27
27
 
28
+ @sleeper = EventHub::Sleeper.new
28
29
  @started_at = Time.now
29
30
  @statistics = EventHub::Statistics.new
30
31
  end
@@ -56,10 +57,13 @@ module EventHub
56
57
  raise 'need to be implemented in derived class'
57
58
  end
58
59
 
59
- # pass message: '{ "header": ... , "body": { .. }}'
60
+ # pass message as string like: '{ "header": ... , "body": { .. }}'
60
61
  # and optionally exchange_name: 'your exchange name'
61
62
  def publish(args = {})
62
63
  Celluloid::Actor[:actor_listener].publish(args)
64
+ rescue => error
65
+ EventHub.logger.error("Unexpected exeption while publish: #{error}")
66
+ raise
63
67
  end
64
68
 
65
69
  def before_start
@@ -86,9 +90,11 @@ module EventHub
86
90
  {type: ActorListener, as: :actor_listener, args: [ self ]}
87
91
  ])
88
92
 
93
+ sleeper = @sleeper
89
94
  @config.injection!(:before_restart, proc do
90
- EventHub.logger.info('Restarting in 10 seconds...')
91
- sleep 10
95
+ restart_in_s = Configuration.processor[:restart_in_s]
96
+ EventHub.logger.info("Restarting in #{restart_in_s} seconds...")
97
+ sleeper.start(restart_in_s)
92
98
  end )
93
99
 
94
100
  @config.deploy
@@ -103,11 +109,18 @@ module EventHub
103
109
  case
104
110
  when SIGNALS_FOR_TERMINATION.include?(command)
105
111
  EventHub.logger.info("Command [#{command}] received")
112
+ @sleeper.stop
106
113
  break
107
114
  when SIGNALS_FOR_RELOAD_CONFIG.include?(command)
108
115
  EventHub::Configuration.load!
109
116
  EventHub.logger.info('Configuration file reloaded')
110
- Celluloid::Actor[:actor_listener].async.restart
117
+
118
+ # restart listener when actor is known
119
+ if Celluloid::Actor[:actor_listener]
120
+ Celluloid::Actor[:actor_listener].async.restart
121
+ else
122
+ EventHub.logger.info('Was unable to get a valid listener actor to restart... check!!!')
123
+ end
111
124
  else
112
125
  sleep 0.5
113
126
  end
@@ -0,0 +1,14 @@
1
+ # EventHub module
2
+ module EventHub
3
+ # Sleep Class which can interrupt running sleep
4
+ class Sleeper
5
+ def start(seconds)
6
+ @reader, @writer = IO.pipe
7
+ IO.select([@reader], nil, nil, seconds)
8
+ end
9
+
10
+ def stop
11
+ @writer.close if @writer and !@writer.closed?
12
+ end
13
+ end
14
+ end
@@ -1,3 +1,3 @@
1
1
  module EventHub
2
- VERSION = '1.0.1'.freeze
2
+ VERSION = '1.1.0'.freeze
3
3
  end
metadata CHANGED
@@ -1,14 +1,14 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: eventhub-processor2
3
3
  version: !ruby/object:Gem::Version
4
- version: 1.0.1
4
+ version: 1.1.0
5
5
  platform: ruby
6
6
  authors:
7
7
  - Steiner, Thomas
8
8
  autorequire:
9
9
  bindir: exe
10
10
  cert_chain: []
11
- date: 2017-12-08 00:00:00.000000000 Z
11
+ date: 2018-02-09 00:00:00.000000000 Z
12
12
  dependencies:
13
13
  - !ruby/object:Gem::Dependency
14
14
  name: celluloid
@@ -30,14 +30,14 @@ dependencies:
30
30
  requirements:
31
31
  - - "~>"
32
32
  - !ruby/object:Gem::Version
33
- version: '2.7'
33
+ version: '2.9'
34
34
  type: :runtime
35
35
  prerelease: false
36
36
  version_requirements: !ruby/object:Gem::Requirement
37
37
  requirements:
38
38
  - - "~>"
39
39
  - !ruby/object:Gem::Version
40
- version: '2.7'
40
+ version: '2.9'
41
41
  - !ruby/object:Gem::Dependency
42
42
  name: eventhub-components
43
43
  requirement: !ruby/object:Gem::Requirement
@@ -145,12 +145,15 @@ files:
145
145
  - docker/rabbitmq.config
146
146
  - eventhub-processor2.gemspec
147
147
  - example/README.md
148
- - example/config/example.json
148
+ - example/config/receiver.json
149
+ - example/config/router.json
149
150
  - example/crasher.rb
150
- - example/example.rb
151
151
  - example/publisher.rb
152
+ - example/receiver.rb
153
+ - example/router.rb
152
154
  - lib/eventhub/actor_heartbeat.rb
153
155
  - lib/eventhub/actor_listener.rb
156
+ - lib/eventhub/actor_publisher.rb
154
157
  - lib/eventhub/actor_watchdog.rb
155
158
  - lib/eventhub/base.rb
156
159
  - lib/eventhub/configuration.rb
@@ -161,6 +164,7 @@ files:
161
164
  - lib/eventhub/logger.rb
162
165
  - lib/eventhub/message.rb
163
166
  - lib/eventhub/processor2.rb
167
+ - lib/eventhub/sleeper.rb
164
168
  - lib/eventhub/statistics.rb
165
169
  - lib/eventhub/version.rb
166
170
  homepage: https://github.com/thomis/eventhub-processor2
@@ -183,7 +187,7 @@ required_rubygems_version: !ruby/object:Gem::Requirement
183
187
  version: '0'
184
188
  requirements: []
185
189
  rubyforge_project:
186
- rubygems_version: 2.6.12
190
+ rubygems_version: 2.6.11
187
191
  signing_key:
188
192
  specification_version: 4
189
193
  summary: Next generation gem to build ruby based eventhub processor
data/example/example.rb DELETED
@@ -1,25 +0,0 @@
1
- require_relative '../lib/eventhub'
2
-
3
- module EventHub
4
- # Demo class
5
- class Example < Processor2
6
- def handle_message(message, args = {})
7
- id = message.body['id']
8
- name = "data/#{id}.json"
9
-
10
- begin
11
- File.delete(name)
12
- rescue => ex
13
- EventHub.logger.warn("File [#{name}]: #{ex}")
14
- end
15
-
16
- { body: { id: id, message: 'has been done'}}
17
- end
18
-
19
- def version
20
- '10.1.1'
21
- end
22
- end
23
- end
24
-
25
- EventHub::Example.new.start