gilmour 0.3.4 → 0.4.1

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -4,13 +4,12 @@ require 'json'
4
4
  require 'logger'
5
5
  require_relative './waiter'
6
6
 
7
- # Top level module
8
7
  module Gilmour
8
+
9
9
  # The Responder module that provides the request and respond
10
10
  # DSL
11
11
  # The public methods in this class are available to be called
12
12
  # from the body of the handlers directly
13
-
14
13
  class Request
15
14
  attr_reader :topic, :body
16
15
 
@@ -20,6 +19,10 @@ module Gilmour
20
19
  end
21
20
  end
22
21
 
22
+ # Every request handler is executed in the context of a Responder
23
+ # object.
24
+ # This class contains methods to respond to requests as well as
25
+ # proxy methods for carrying out gilmour actions inside the handlers.
23
26
  class Responder
24
27
  LOG_SEPERATOR = '%%'
25
28
  LOG_PREFIX = "#{LOG_SEPERATOR}gilmour#{LOG_SEPERATOR}"
@@ -27,20 +30,21 @@ module Gilmour
27
30
  attr_reader :logger
28
31
  attr_reader :request
29
32
 
30
- def fork_logger(io_writer)
33
+ def child_logger(writer) #:nodoc:
31
34
  logger = Logger.new(STDERR)
32
35
  loglevel = ENV["LOG_LEVEL"] ? ENV["LOG_LEVEL"].to_sym : :warn
33
36
  logger.level = Gilmour::LoggerLevels[loglevel] || Logger::WARN
34
37
  logger.formatter = proc do |severity, datetime, progname, msg|
35
- data = "#{LOG_PREFIX}#{severity}#{LOG_SEPERATOR}#{msg}"
36
- io_writer.write(data+"\n")
37
- io_writer.flush
38
+ data = JSON.generate(severity: severity, msg: msg)
39
+ # data = "#{LOG_PREFIX}#{severity}#{LOG_SEPERATOR}#{msg}"
40
+ writer.write(data+"\n")
41
+ writer.flush
38
42
  nil
39
43
  end
40
44
  logger
41
45
  end
42
46
 
43
- def make_logger
47
+ def make_logger #:nodoc:
44
48
  logger = Logger.new(STDERR)
45
49
  loglevel = ENV["LOG_LEVEL"] ? ENV["LOG_LEVEL"].to_sym : :warn
46
50
  logger.level = Gilmour::LoggerLevels[loglevel] || Logger::WARN
@@ -51,34 +55,38 @@ module Gilmour
51
55
  logger
52
56
  end
53
57
 
54
- def initialize(sender, topic, data, backend, timeout=600, forked=false)
58
+ def initialize(sender, topic, data, backend, opts={}) #:nodoc:
55
59
  @sender = sender
56
60
  @request = Request.new(topic, data)
57
61
  @response = { data: nil, code: nil }
58
62
  @backend = backend
59
- @timeout = timeout || 600
60
- @multi_process = forked || false
61
- @pipe = IO.pipe
62
- @publish_pipe = IO.pipe
63
+ @timeout = opts[:timeout] || 600
64
+ @multi_process = opts[:fork] || false
65
+ @respond = opts[:respond]
66
+ @response_pipe = IO.pipe
67
+ @logger_pipe = IO.pipe
68
+ @command_pipe = IO.pipe
63
69
  @logger = make_logger()
70
+ @delayed_response = false
64
71
  end
65
72
 
66
- def receive_data(data)
67
- sender, res_data, res_code = JSON.parse(data)
73
+ def receive_data(data) #:nodoc:
74
+ sender, res_data, res_code, opts = JSON.parse(data)
75
+ res_code ||= 200 if @respond
68
76
  write_response(sender, res_data, res_code) if sender && res_code
69
77
  end
70
78
 
71
79
  # Called by parent
72
- def write_response(sender, data, code)
80
+ def write_response(sender, data, code) #:nodoc:
81
+ return unless @respond
73
82
  if code >= 300 && @backend.report_errors?
74
83
  emit_error data, code
75
84
  end
76
-
77
85
  @backend.send_response(sender, data, code)
78
86
  end
79
87
 
80
- # Adds a dynamic listener for _topic_
81
- def add_listener(topic, &handler)
88
+ # proxy to base add_listener
89
+ def add_listener(topic, opts={}, &handler)
82
90
  if @multi_process
83
91
  GLogger.error "Dynamic listeners using add_listener not supported \
84
92
  in forked responder. Ignoring!"
@@ -87,6 +95,27 @@ module Gilmour
87
95
  @backend.add_listener(topic, &handler)
88
96
  end
89
97
 
98
+ # Proxy to register slot (see Backend#slot for details)
99
+ def slot(topic, opts={}, &handler)
100
+ if @multi_process
101
+ GLogger.error "Dynamic listeners using add_listener not supported \
102
+ in forked responder. Ignoring!"
103
+ end
104
+
105
+ @backend.slot(topic, opts, &handler)
106
+ end
107
+
108
+ # Proxy to register reply listener (see Backend#reply_to for details)
109
+ def reply_to(topic, opts={}, &handler)
110
+ if @multi_process
111
+ GLogger.error "Dynamic listeners using add_listener not supported \
112
+ in forked responder. Ignoring!"
113
+ end
114
+
115
+ @backend.reply_to(topic, opts, &handler)
116
+ end
117
+
118
+
90
119
  # Sends a response with _body_ and _code_
91
120
  # If +opts[:now]+ is true, the response is sent immediately,
92
121
  # else it is defered until the handler finishes executing
@@ -99,149 +128,140 @@ module Gilmour
99
128
  end
100
129
  end
101
130
 
102
- def pub_relay(waiter)
103
- Thread.new {
104
- waiter.add 1
105
- loop {
131
+ # This prohibits sending a response from a reply handler even after
132
+ # the request execution has finished. This is useful for sending
133
+ # a response from inside a closure if the handler has to make further
134
+ # gilmour requests. To send a response later, call respond with the
135
+ # option "now" as true.
136
+ def delay_response
137
+ @delayed_response = true
138
+ end
139
+
140
+ def delayed_response? #:nodoc:
141
+ @delayed_response
142
+ end
143
+
144
+ def command_relay(reader, waiter) #:nodoc:
145
+ waiter.add
146
+ pub_mutex = Mutex.new
147
+
148
+ Thread.new do
149
+ loop do
106
150
  begin
107
- data = @read_publish_pipe.readline
108
- destination, message = JSON.parse(data)
109
- @backend.publish(message, destination)
151
+ data = reader.readline
152
+ pub_mutex.synchronize do
153
+ method, args = JSON.parse(data)
154
+ @backend.send(method.to_sym, *args)
155
+ end
110
156
  rescue EOFError
111
157
  waiter.done
158
+ break
112
159
  rescue Exception => e
113
160
  GLogger.debug e.message
114
161
  GLogger.debug e.backtrace
115
162
  end
116
- }
117
- }
163
+ end
164
+ end
118
165
  end
119
166
 
120
167
  # All logs in forked mode are relayed chr
121
- def child_io_relay(io_reader, waiter, parent_logger)
122
- Thread.new {
123
- waiter.add 1
124
- loop {
168
+ def logger_relay(read_logger_pipe, waiter, parent_logger) #:nodoc:
169
+ waiter.add 1
170
+ Thread.new do
171
+ loop do
125
172
  begin
126
- data = io_reader.readline.chomp
127
- if data.start_with?(LOG_PREFIX)
128
- data.split(LOG_PREFIX).each do |msg|
129
- unless msg.empty?
130
- msg_grp = msg.split(LOG_SEPERATOR, 2)
131
- if msg_grp.length > 1
132
- data = msg_grp[1]
133
- case msg_grp[0]
134
- when 'INFO'
135
- parent_logger.info data
136
- when 'UNKNOWN'
137
- parent_logger.unknown data
138
- when 'WARN'
139
- parent_logger.warn data
140
- when 'ERROR'
141
- parent_logger.error data
142
- when 'FATAL'
143
- parent_logger.fatal data
144
- else
145
- parent_logger.debug data
146
- end
147
- else
148
- parent_logger.debug msg
149
- end
150
- end
151
- end
152
- next
153
- end
154
-
155
- parent_logger.debug data
173
+ data = read_logger_pipe.readline.chomp
174
+ logdata = JSON.parse(data)
175
+ meth = logdata['severity'].downcase.to_sym
176
+ parent_logger.send(meth, logdata['msg'])
177
+ rescue JSON::ParserError
178
+ parent_logger.info data
179
+ next
156
180
  rescue EOFError
157
181
  waiter.done
182
+ break
158
183
  rescue Exception => e
159
184
  GLogger.error e.message
160
185
  GLogger.error e.backtrace
161
186
  end
162
- }
163
- }
187
+ end #loop
188
+ end
164
189
  end
165
190
 
166
191
  # Called by parent
167
- # :nodoc:
168
- def execute(handler)
169
- if @multi_process
170
- GLogger.debug "Executing #{@sender} in forked moode"
171
-
172
- @read_pipe, @write_pipe = @pipe
173
- @read_publish_pipe, @write_publish_pipe = @publish_pipe
174
-
175
- io_reader, io_writer = IO.pipe
176
-
177
- wg = Gilmour::Waiter.new
178
- io_threads = []
179
- io_threads << child_io_relay(io_reader, wg, @logger)
180
- io_threads << pub_relay(wg)
192
+ def execute(handler) #:nodoc:
193
+ if !@multi_process
194
+ _execute(handler)
195
+ return
196
+ end
197
+ GLogger.debug "Executing #{@sender} in forked moode"
181
198
 
182
- pid = Process.fork do
183
- @backend.stop
184
- EventMachine.stop_event_loop
199
+ # Create pipes for child communication
200
+ @read_pipe, @write_pipe = @response_pipe
201
+ @read_command_pipe, @write_command_pipe = @command_pipe
202
+ @read_logger_pipe, @write_logger_pipe = @logger_pipe = IO.pipe
185
203
 
186
- #Close the parent channels in forked process
187
- @read_pipe.close
188
- @read_publish_pipe.close
189
- io_reader.close unless io_reader.closed?
204
+ # setup relay threads
205
+ wg = Gilmour::Waiter.new
206
+ relay_threads = []
207
+ relay_threads << logger_relay(@read_logger_pipe, wg, @logger)
208
+ relay_threads << command_relay(@read_command_pipe, wg)
190
209
 
191
- @response_sent = false
210
+ pid = Process.fork do
211
+ @backend.stop
212
+ EventMachine.stop_event_loop
192
213
 
193
- @logger = fork_logger(io_writer)
194
- _execute(handler)
195
- io_writer.close
196
- end
214
+ #Close the parent channels in forked process
215
+ @read_pipe.close
216
+ @read_command_pipe.close
217
+ @read_logger_pipe.close
197
218
 
198
- # Cleanup the writers in Parent process.
199
- io_writer.close
200
- @write_pipe.close
201
- @write_publish_pipe.close
219
+ @response_sent = false
202
220
 
203
- begin
204
- receive_data(@read_pipe.readline)
205
- rescue EOFError => e
206
- logger.debug e.message
207
- end
221
+ # override the logger for the child
222
+ @logger = child_logger(@write_logger_pipe)
223
+ _execute(handler)
224
+ @write_logger_pipe.close
225
+ end
208
226
 
209
- pid, status = Process.waitpid2(pid)
210
- if !status
211
- msg = "Child Process #{pid} crashed without status."
212
- logger.error msg
213
- # Set the multi-process mode as false, the child has died anyway.
214
- @multi_process = false
215
- write_response(@sender, msg, 500)
216
- elsif status.exitstatus > 0
217
- msg = "Child Process #{pid} exited with status #{status.exitstatus}"
218
- logger.error msg
219
- # Set the multi-process mode as false, the child has died anyway.
220
- @multi_process = false
221
- write_response(@sender, msg, 500)
222
- end
227
+ # Cleanup the writers in Parent process.
228
+ @write_logger_pipe.close
229
+ @write_pipe.close
230
+ @write_command_pipe.close
223
231
 
224
- @read_pipe.close
232
+ begin
233
+ receive_data(@read_pipe.readline)
234
+ rescue EOFError => e
235
+ logger.debug e.message
236
+ end
225
237
 
226
- wg.wait do
227
- io_threads.each { |th|
228
- th.kill
229
- }
230
- end
238
+ pid, status = Process.waitpid2(pid)
239
+ if !status || status.exitstatus > 0
240
+ msg = if !status
241
+ "Child Process #{pid} crashed without status."
242
+ else
243
+ "Child Process #{pid} exited with status #{status.exitstatus}"
244
+ end
245
+ logger.error msg
246
+ # Set the multi-process mode as false, the child has died anyway.
247
+ @multi_process = false
248
+ write_response(@sender, msg, 500)
249
+ end
231
250
 
232
- # Cleanup.
233
- @read_publish_pipe.close
234
- io_reader.close unless io_reader.closed?
251
+ @read_pipe.close
235
252
 
236
- else
237
- _execute(handler)
253
+ # relay cleanup.
254
+ wg.wait do
255
+ relay_threads.each { |th| th.kill }
238
256
  end
257
+ @read_command_pipe.close
258
+ @read_logger_pipe.close
239
259
  end
240
260
 
241
261
  # Publish all errors on gilmour.error
242
262
  # This may or may not have a listener based on the configuration
243
263
  # supplied at setup.
244
- def emit_error(message, code = 500, extra = {})
264
+ def emit_error(message, code = 500, extra = {}) #:nodoc:
245
265
  opts = {
246
266
  topic: @request.topic,
247
267
  request_data: @request.body,
@@ -257,11 +277,11 @@ module Gilmour
257
277
  end
258
278
 
259
279
  # Called by child
260
- # :nodoc:
261
- def _execute(handler)
280
+ def _execute(handler) #:nodoc:
281
+ ret = nil
262
282
  begin
263
283
  Timeout.timeout(@timeout) do
264
- instance_eval(&handler)
284
+ ret = instance_eval(&handler)
265
285
  end
266
286
  rescue Timeout::Error => e
267
287
  logger.error e.message
@@ -274,32 +294,58 @@ module Gilmour
274
294
  @response[:code] = 500
275
295
  @response[:data] = e.message
276
296
  end
277
-
297
+ @response[:code] ||= 200 if @respond && !delayed_response?
278
298
  send_response if @response[:code]
279
299
  end
280
300
 
281
- # Publishes a message. See Backend::publish
282
- def publish(message, destination, opts = {}, code=nil)
301
+ def call_parent_backend_method(method, *args) #:nodoc:
302
+ msg = JSON.generate([method, args])
303
+ @write_command_pipe.write(msg+"\n")
304
+ @write_command_pipe.flush
305
+ end
306
+
307
+ # Proxy to publish method. See Backend#publish
308
+ def publish(message, destination, opts = {}, code=nil, &blk)
283
309
  if @multi_process
284
310
  if block_given?
285
311
  GLogger.error "Publish callback not supported in forked responder. Ignoring!"
286
- # raise Exception.new("Publish Callback is not supported in forked mode.")
287
312
  end
288
-
289
- msg = JSON.generate([destination, message, code])
290
- @write_publish_pipe.write(msg+"\n")
291
- @write_publish_pipe.flush
313
+ call_parent_backend_method('publish', message, destination, opts, code)
314
+ # method = opts[:method] || 'publish'
315
+ # msg = JSON.generate([method, [message, destination, opts, code]])
316
+ # @write_command_pipe.write(msg+"\n")
317
+ # @write_command_pipe.flush
292
318
  elsif block_given?
293
- blk = Proc.new
294
319
  @backend.publish(message, destination, opts, &blk)
295
320
  else
296
321
  @backend.publish(message, destination, opts)
297
322
  end
298
323
  end
299
324
 
325
+ # Proxy to request! method. See Backend#request!
326
+ def request!(message, destination, opts={}, &blk)
327
+ if @multi_process
328
+ if block_given?
329
+ GLogger.error "Publish callback not supported in forked responder. Ignoring!"
330
+ end
331
+ call_parent_backend_method('request!', message, destination, opts)
332
+ else
333
+ @backend.request!(message, destination, opts, &blk)
334
+ end
335
+ end
336
+
337
+ # Proxy to signal! method. See Backend#signal!
338
+ def signal!(message, destination, opts={})
339
+ if @multi_process
340
+ call_parent_backend_method('signal!', message, destination, opts)
341
+ else
342
+ @backend.signal!(message, destination, opts)
343
+ end
344
+ end
345
+
346
+
300
347
  # Called by child
301
- # :nodoc:
302
- def send_response
348
+ def send_response #:nodoc:
303
349
  return if @response_sent
304
350
  @response_sent = true
305
351
 
@@ -14,7 +14,7 @@ module Gilmour
14
14
  def done
15
15
  synchronize do
16
16
  @count -= 1
17
- if @count == 0
17
+ if @count <= 0
18
18
  @done = true
19
19
  @waiter_c.broadcast
20
20
  end
@@ -26,6 +26,10 @@ module Gilmour
26
26
  end
27
27
 
28
28
  def signal
29
+ if @count != 0
30
+ raise 'Cannot use signal alongside add/done'
31
+ end
32
+
29
33
  synchronize do
30
34
  @done = true
31
35
  @count = 0
@@ -34,31 +38,8 @@ module Gilmour
34
38
  end
35
39
 
36
40
  def wait(timeout=nil)
37
- synchronize do
38
- while !@done
39
- @waiter_c.wait(@waiter_m, timeout)
40
- end
41
- end
41
+ synchronize { @waiter_c.wait(@waiter_m, timeout) unless @done }
42
42
  yield if block_given?
43
43
  end
44
44
  end
45
45
  end
46
-
47
- def test
48
- wg = Gilmour::Waiter.new
49
- wg.add 3
50
-
51
- 3.times do
52
- Thread.new {
53
- t = rand(10000) / 10000.0
54
- sleep(t)
55
- puts "done\n"
56
- wg.done
57
- }
58
- end
59
-
60
- wg.wait do
61
- puts "All jobs done"
62
- end
63
-
64
- end
@@ -1,26 +1,6 @@
1
+ require '../lib/gilmour/waiter'
1
2
 
2
- class Waiter
3
- def initialize
4
- @done = false
5
- @waiter_m = Mutex.new
6
- @waiter_c = ConditionVariable.new
7
- end
8
-
9
- def synchronize(&blk)
10
- @waiter_m.synchronize(&blk)
11
- end
12
-
13
- def signal
14
- synchronize do
15
- @done = true
16
- @waiter_c.signal
17
- end
18
- end
19
-
20
- def wait(timeout=nil)
21
- synchronize { @waiter_c.wait(@waiter_m, timeout) unless @done }
22
- end
23
- end
3
+ Waiter = Gilmour::Waiter
24
4
 
25
5
  RSpec.configure do |config|
26
6
  config.expect_with :rspec do |c|
@@ -25,12 +25,11 @@ def redis_wildcard_options
25
25
  end
26
26
 
27
27
  def redis_publish_async(options, message, key)
28
- operation = proc do
29
- redis = EM::Hiredis.connect
28
+ redis = EM::Hiredis.connect
29
+ EM.defer do
30
30
  payload, _ = Gilmour::Protocol.create_request(message)
31
31
  redis.publish(key, payload)
32
32
  end
33
- EM.defer(operation)
34
33
  end
35
34
 
36
35
  def redis_send_and_recv(options, message, key)