eventhub-processor2 1.4.0 → 1.9.0

Sign up to get free protection for your applications and to get access to all the features.
data/example/publisher.rb CHANGED
@@ -1,9 +1,9 @@
1
- require 'bunny'
2
- require 'celluloid/current'
3
- require 'json'
4
- require 'securerandom'
5
- require 'eventhub/components'
6
- require_relative '../lib/eventhub/sleeper'
1
+ require "bunny"
2
+ require "celluloid"
3
+ require "json"
4
+ require "securerandom"
5
+ require "eventhub/components"
6
+ require_relative "../lib/eventhub/sleeper"
7
7
 
8
8
  SIGNALS_FOR_TERMINATION = [:INT, :TERM, :QUIT]
9
9
  SIGNALS_FOR_RELOAD_CONFIG = [:HUP]
@@ -12,16 +12,16 @@ PAUSE_BETWEEN_WORK = 0.05 # default is 0.05
12
12
 
13
13
  Celluloid.logger = nil
14
14
  Celluloid.exception_handler { |ex| Publisher.logger.error "Exception occured: #{ex}}" }
15
+ Celluloid.boot
15
16
 
16
17
  # Publisher module
17
18
  module Publisher
18
-
19
19
  def self.logger
20
20
  unless @logger
21
21
  @logger = ::EventHub::Components::MultiLogger.new
22
- @logger.add_device(Logger.new(STDOUT))
22
+ @logger.add_device(Logger.new($stdout))
23
23
  @logger.add_device(
24
- EventHub::Components::Logger.logstash('publisher', 'development')
24
+ EventHub::Components::Logger.logstash("publisher", "development")
25
25
  )
26
26
  end
27
27
  @logger
@@ -36,11 +36,11 @@ module Publisher
36
36
  @start = Time.now
37
37
  @files_sent = 0
38
38
 
39
- @filename = 'data/store.json'
39
+ @filename = "data/store.json"
40
40
  if File.exist?(@filename)
41
41
  cleanup
42
42
  else
43
- File.write(@filename, '{}')
43
+ File.write(@filename, "{}")
44
44
  end
45
45
 
46
46
  every(30) { write_statistics }
@@ -48,7 +48,7 @@ module Publisher
48
48
 
49
49
  def start(name)
50
50
  store = read_store
51
- store[name] = Time.now.strftime('%Y-%m-%d %H:%M:%S.%L')
51
+ store[name] = Time.now.strftime("%Y-%m-%d %H:%M:%S.%L")
52
52
  write_store(store)
53
53
  end
54
54
 
@@ -76,19 +76,20 @@ module Publisher
76
76
 
77
77
  def write_statistics
78
78
  now = Time.now
79
- rate = @files_sent / (now-@start)
80
- time_spent = (now-@start)/60
81
- Publisher.logger.info("Started @ #{@start.strftime('%Y-%m-%d %H:%M:%S.%L')}: Files sent within #{'%0.1f' % time_spent} minutes: #{@files_sent}, #{ '%0.1f' % rate} files/second")
79
+ rate = @files_sent / (now - @start)
80
+ time_spent = (now - @start) / 60
81
+ Publisher.logger.info("Started @ #{@start.strftime("%Y-%m-%d %H:%M:%S.%L")}: Files sent within #{"%0.1f" % time_spent} minutes: #{@files_sent}, #{"%0.1f" % rate} files/second")
82
82
  end
83
83
 
84
84
  private
85
- def read_store
86
- JSON.parse(File.read(@filename))
87
- end
88
85
 
89
- def write_store(store)
90
- File.write(@filename, store.to_json)
91
- end
86
+ def read_store
87
+ JSON.parse(File.read(@filename))
88
+ end
89
+
90
+ def write_store(store)
91
+ File.write(@filename, store.to_json)
92
+ end
92
93
  end
93
94
 
94
95
  # Worker
@@ -106,26 +107,26 @@ module Publisher
106
107
  sleep PAUSE_BETWEEN_WORK
107
108
  end
108
109
  ensure
109
- @connection.close if @connection
110
+ @connection&.close
110
111
  end
111
112
 
112
113
  private
113
114
 
114
115
  def connect
115
- @connection = Bunny.new(vhost: 'event_hub',
116
+ @connection = Bunny.new(vhost: "event_hub",
116
117
  automatic_recovery: false,
117
- logger: Logger.new('/dev/null'))
118
+ logger: Logger.new("/dev/null"))
118
119
  @connection.start
119
120
  @channel = @connection.create_channel
120
121
  @channel.confirm_select
121
- @exchange = @channel.direct('example.outbound', durable: true)
122
+ @exchange = @channel.direct("example.outbound", durable: true)
122
123
  end
123
124
 
124
125
  def do_the_work
125
- #prepare id and content
126
+ # prepare id and content
126
127
  id = SecureRandom.uuid
127
128
  file_name = "data/#{id}.json"
128
- data = { body: { id: id } }.to_json
129
+ data = {body: {id: id}}.to_json
129
130
 
130
131
  # start transaction...
131
132
  Celluloid::Actor[:transaction_store].start(id)
@@ -135,10 +136,10 @@ module Publisher
135
136
  @exchange.publish(data, persistent: true)
136
137
  success = @channel.wait_for_confirms
137
138
  if success
138
- Celluloid::Actor[:transaction_store].stop(id) if Celluloid::Actor[:transaction_store]
139
- Publisher.logger.info("[#{id}] - Message sent")
139
+ Celluloid::Actor[:transaction_store]&.stop(id)
140
+ Publisher&.logger&.info("[#{id}] - Message sent")
140
141
  else
141
- Publisher.logger.error("[#{id}] - Published message not confirmed")
142
+ Publisher&.logger&.error("[#{id}] - Published message not confirmed")
142
143
  end
143
144
  end
144
145
  end
@@ -153,27 +154,27 @@ module Publisher
153
154
  def start_supervisor
154
155
  @config = Celluloid::Supervision::Configuration.define(
155
156
  [
156
- { type: TransactionStore, as: :transaction_store },
157
- { type: Worker, as: :worker }
157
+ {type: TransactionStore, as: :transaction_store},
158
+ {type: Worker, as: :worker}
158
159
  ]
159
160
  )
160
161
 
161
162
  sleeper = @sleeper
162
163
  @config.injection!(:before_restart, proc do
163
- Publisher.logger.info('Restarting in 15 seconds...')
164
+ Publisher.logger.info("Restarting in 15 seconds...")
164
165
  sleeper.start(15)
165
166
  end)
166
167
  @config.deploy
167
168
  end
168
169
 
169
170
  def start
170
- Publisher.logger.info 'Publisher has been started'
171
+ Publisher.logger.info "Publisher has been started"
171
172
 
172
173
  setup_signal_handler
173
174
  start_supervisor
174
175
  main_event_loop
175
176
 
176
- Publisher.logger.info 'Publisher has been stopped'
177
+ Publisher.logger.info "Publisher has been stopped"
177
178
  end
178
179
 
179
180
  private
@@ -181,12 +182,11 @@ module Publisher
181
182
  def main_event_loop
182
183
  loop do
183
184
  command = @command_queue.pop
184
- case
185
- when SIGNALS_FOR_TERMINATION.include?(command)
186
- @sleeper.stop
187
- break
188
- else
189
- sleep 0.5
185
+ if SIGNALS_FOR_TERMINATION.include?(command)
186
+ @sleeper.stop
187
+ break
188
+ else
189
+ sleep 0.5
190
190
  end
191
191
  end
192
192
 
data/example/receiver.rb CHANGED
@@ -1,9 +1,9 @@
1
- require_relative '../lib/eventhub/base'
1
+ require_relative "../lib/eventhub/base"
2
2
 
3
3
  module EventHub
4
4
  class Receiver < Processor2
5
5
  def handle_message(message, args = {})
6
- id = message.body['id']
6
+ id = message.body["id"]
7
7
  EventHub.logger.info("[#{id}] - Received")
8
8
 
9
9
  file_name = "data/#{id}.json"
data/example/router.rb CHANGED
@@ -1,12 +1,12 @@
1
- require_relative '../lib/eventhub/base'
1
+ require_relative "../lib/eventhub/base"
2
2
 
3
3
  module EventHub
4
4
  # Demo class
5
5
  class Router < Processor2
6
6
  def handle_message(message, args = {})
7
- id = message.body['id']
7
+ id = message.body["id"]
8
8
  EventHub.logger.info("Received: [#{id}]")
9
- publish(message: message.to_json, exchange_name: 'example.inbound')
9
+ publish(message: message.to_json, exchange_name: "example.inbound")
10
10
  EventHub.logger.info("Returned: [#{id}]")
11
11
  nil
12
12
  end
@@ -12,23 +12,23 @@ module EventHub
12
12
  end
13
13
 
14
14
  def start
15
- EventHub.logger.info('Heartbeat is starting...')
15
+ EventHub.logger.info("Heartbeat is starting...")
16
16
 
17
- every(300) { EventHub.logger.info("Actual actors: #{Celluloid::Actor.all.size}: #{Celluloid::Actor.all.map{ |a| a.class }.join(', ') }") }
17
+ every(60 * 60 * 24) { EventHub.logger.info("Actual actors: #{Celluloid::Actor.all.size}: #{Celluloid::Actor.all.map { |a| a.class }.join(", ")}") }
18
18
 
19
- publish(heartbeat(action: 'started'))
20
- EventHub.logger.info('Heartbeat has sent [started] beat')
19
+ publish(heartbeat(action: "started"))
20
+ EventHub.logger.info("Heartbeat has sent [started] beat")
21
21
  loop do
22
22
  sleep Configuration.processor[:heartbeat_cycle_in_s]
23
23
  publish(heartbeat)
24
- EventHub.logger.info('Heartbeat has sent a beat')
24
+ EventHub.logger.info("Heartbeat has sent a beat")
25
25
  end
26
26
  end
27
27
 
28
28
  def cleanup
29
- EventHub.logger.info('Heartbeat is cleaning up...')
30
- publish(heartbeat(action: 'stopped'))
31
- EventHub.logger.info('Heartbeat has sent a [stopped] beat')
29
+ EventHub.logger.info("Heartbeat is cleaning up...")
30
+ publish(heartbeat(action: "stopped"))
31
+ EventHub.logger.info("Heartbeat has sent a [stopped] beat")
32
32
  end
33
33
 
34
34
  private
@@ -43,29 +43,29 @@ module EventHub
43
43
  success = channel.wait_for_confirms
44
44
 
45
45
  unless success
46
- raise 'Published heartbeat message has '\
47
- 'not been confirmed by the server'
46
+ raise "Published heartbeat message has "\
47
+ "not been confirmed by the server"
48
48
  end
49
49
  ensure
50
- connection.close if connection
50
+ connection&.close
51
51
  end
52
52
 
53
- def heartbeat(args = { action: 'running' })
53
+ def heartbeat(args = {action: "running"})
54
54
  message = EventHub::Message.new
55
- message.origin_module_id = EventHub::Configuration.name
56
- message.origin_type = 'processor'
57
- message.origin_site_id = 'global'
55
+ message.origin_module_id = EventHub::Configuration.name
56
+ message.origin_type = "processor"
57
+ message.origin_site_id = "global"
58
58
 
59
- message.process_name = 'event_hub.heartbeat'
59
+ message.process_name = "event_hub.heartbeat"
60
60
 
61
61
  now = Time.now
62
62
 
63
63
  # message structure needs more changes
64
64
  message.body = {
65
65
  version: @processor_instance.send(:version),
66
- action: args[:action],
67
- pid: Process.pid,
68
- process_name: 'event_hub.heartbeat',
66
+ action: args[:action],
67
+ pid: Process.pid,
68
+ process_name: "event_hub.heartbeat",
69
69
  heartbeat: {
70
70
  started: now_stamp(started_at),
71
71
  stamp_last_beat: now_stamp(now),
@@ -90,11 +90,11 @@ module EventHub
90
90
  end
91
91
 
92
92
  def addresses
93
- interfaces = Socket.getifaddrs.select do |interface|
93
+ interfaces = Socket.getifaddrs.select { |interface|
94
94
  !interface.addr.ipv4_loopback? && !interface.addr.ipv6_loopback?
95
- end
95
+ }
96
96
 
97
- interfaces.map do |interface|
97
+ interfaces.map { |interface|
98
98
  begin
99
99
  {
100
100
  interface: interface.name,
@@ -104,7 +104,7 @@ module EventHub
104
104
  rescue
105
105
  nil # will be ignored
106
106
  end
107
- end.compact
107
+ }.compact
108
108
  end
109
109
 
110
110
  def messages_statistics
@@ -15,14 +15,14 @@ module EventHub
15
15
  end
16
16
 
17
17
  def start
18
- EventHub.logger.info('Listener is starting...')
18
+ EventHub.logger.info("Listener is starting...")
19
19
  EventHub::Configuration.processor[:listener_queues].each_with_index do |queue_name, index|
20
20
  async.listen(queue_name: queue_name, index: index)
21
21
  end
22
22
  end
23
23
 
24
24
  def restart
25
- raise 'Listener is restarting...'
25
+ raise "Listener is restarting..."
26
26
  end
27
27
 
28
28
  def listen(args = {})
@@ -30,7 +30,7 @@ module EventHub
30
30
  EventHub.logger.info("Listening to queue [#{queue_name}]")
31
31
  consumer.on_delivery do |delivery_info, metadata, payload|
32
32
  EventHub.logger.info("#{queue_name}: [#{delivery_info.delivery_tag}]"\
33
- ' delivery')
33
+ " delivery")
34
34
 
35
35
  @processor_instance.statistics.measure(payload.size) do
36
36
  handle_payload(payload: payload,
@@ -38,25 +38,21 @@ module EventHub
38
38
  queue_name: queue_name,
39
39
  content_type: metadata[:content_type],
40
40
  priority: metadata[:priority],
41
- delivery_tag: delivery_info.delivery_tag
42
- )
41
+ delivery_tag: delivery_info.delivery_tag)
43
42
  channel.acknowledge(delivery_info.delivery_tag, false)
44
43
  end
45
44
 
46
45
  EventHub.logger.info("#{queue_name}: [#{delivery_info.delivery_tag}]"\
47
- ' acknowledged')
46
+ " acknowledged")
48
47
  end
49
48
  queue.subscribe_with(consumer, block: false)
50
49
  end
51
-
52
50
  rescue => error
53
51
  EventHub.logger.error("Unexpected exception: #{error}. It should restart now with this exception...")
54
52
  raise
55
53
  end
56
54
 
57
55
  def with_listen(args = {}, &block)
58
- connection_string, connection_properties = connection_properties
59
-
60
56
  connection = create_bunny_connection
61
57
  connection.start
62
58
  queue_name = args[:queue_name]
@@ -65,11 +61,11 @@ module EventHub
65
61
  channel.prefetch(1)
66
62
  queue = channel.queue(queue_name, durable: true)
67
63
  consumer = EventHub::Consumer.new(channel,
68
- queue,
69
- EventHub::Configuration.name +
70
- '-' +
71
- args[:index].to_s,
72
- false)
64
+ queue,
65
+ EventHub::Configuration.name +
66
+ "-" +
67
+ args[:index].to_s,
68
+ false)
73
69
  yield connection, channel, consumer, queue, queue_name
74
70
  end
75
71
 
@@ -86,18 +82,18 @@ module EventHub
86
82
  # return invalid messages to dispatcher
87
83
  if message.invalid?
88
84
  response_messages << message
89
- EventHub.logger.info("-> #{message.to_s} => return invalid to dispatcher")
85
+ EventHub.logger.info("-> #{message} => return invalid to dispatcher")
90
86
  else
91
87
  begin
92
88
  response_messages = @processor_instance.send(:handle_message,
93
- message,
94
- pass_arguments(args))
89
+ message,
90
+ pass_arguments(args))
95
91
  rescue => exception
96
92
  # this catches unexpected exceptions in handle message method
97
93
  # deadletter the message via dispatcher
98
94
  message.status_code = EventHub::STATUS_DEADLETTER
99
- message.status_message = exception
100
- EventHub.logger.info("-> #{message.to_s} => return exception to dispatcher")
95
+ message.status_message = exception.to_s
96
+ EventHub.logger.info("-> #{message} => return exception to dispatcher")
101
97
  response_messages << message
102
98
  end
103
99
  end
@@ -109,21 +105,20 @@ module EventHub
109
105
 
110
106
  def pass_arguments(args = {})
111
107
  keys_to_pass = [:queue_name, :content_type, :priority, :delivery_tag]
112
- args.select{ |key| keys_to_pass.include?(key) }
108
+ args.select { |key| keys_to_pass.include?(key) }
113
109
  end
114
110
 
115
111
  def cleanup
116
- EventHub.logger.info('Listener is cleaning up...')
112
+ EventHub.logger.info("Listener is cleaning up...")
117
113
  # close all open connections
118
114
  return unless @connections
119
115
  @connections.values.each do |connection|
120
- connection.close if connection
116
+ connection&.close
121
117
  end
122
118
  end
123
119
 
124
120
  def publish(args)
125
121
  @actor_publisher.publish(args)
126
122
  end
127
-
128
123
  end
129
124
  end
@@ -7,7 +7,7 @@ module EventHub
7
7
  finalizer :cleanup
8
8
 
9
9
  def initialize
10
- EventHub.logger.info('Publisher is starting...')
10
+ EventHub.logger.info("Publisher is starting...")
11
11
  @connection = nil
12
12
  end
13
13
 
@@ -31,16 +31,16 @@ module EventHub
31
31
  success = channel.wait_for_confirms
32
32
 
33
33
  unless success
34
- raise 'Published message from Listener actor '\
35
- 'has not been confirmed by the server'
34
+ raise "Published message from Listener actor "\
35
+ "has not been confirmed by the server"
36
36
  end
37
- ensure
38
- channel.close if channel
37
+ ensure
38
+ channel&.close
39
39
  end
40
40
 
41
41
  def cleanup
42
- EventHub.logger.info('Publisher is cleaning up...')
43
- @connection.close if @connection
42
+ EventHub.logger.info("Publisher is cleaning up...")
43
+ @connection&.close
44
44
  end
45
45
  end
46
46
  end