gilmour 0.3.2 → 0.3.3

Sign up to get free protection for your applications and to get access to all the features.
checksums.yaml CHANGED
@@ -1,7 +1,7 @@
1
1
  ---
2
2
  SHA1:
3
- metadata.gz: b164c237e9423f1bbcc3e91cf3dc94c376c7163e
4
- data.tar.gz: 87aec87330f766c0e5428fa301637e077dc61a41
3
+ metadata.gz: 7927c5fb1c94d36f7ae85c605d16b3e715cc215a
4
+ data.tar.gz: b57d2ca7464355623376c35faa04e2e0adbc133b
5
5
  SHA512:
6
- metadata.gz: 9227f1a16c983556caaa99b9a712e3641d0f208f517eab8eba0fb3001f543146c57284ea8b1cda44982099ad69a40b54ddaa31069384b5c165a5ec5292c778ff
7
- data.tar.gz: ec09fe7036f7829e1d5478b6f8ffcdd97216a09b94822b5c29bb5803af4f5f23b477f8bfea1d19da9f9ac5ee4b83dbe1478ea0373df2041a94c5c406e368029f
6
+ metadata.gz: 088587c3aea578730a2926698cd952d76e101abd6e1c5e99e57459116407bc5cc27d68d91b9ceaec64fb0412718da5ad4e620859b81bd5f638af102faa82ad29
7
+ data.tar.gz: 80fe912c98305251b4871be44e10f4c8ee9551e185cfb7c4c31b9bff86289cbdbae905a03309754bff0f53a58a22d3842aa280a7cb4fc2bca25a59e4ffd9440b
@@ -0,0 +1,97 @@
1
+ # encoding: utf-8
2
+ require_relative '../lib/gilmour'
3
+
4
+ class EventServer
5
+ include Gilmour::Base
6
+
7
+ def initialize
8
+ backend = 'redis'
9
+ enable_backend(backend, { })
10
+ registered_subscribers.each do |sub|
11
+ sub.backend = backend
12
+ end
13
+ $stderr.puts "Starting server. To see messaging in action run clients."
14
+ start(true)
15
+ end
16
+ end
17
+
18
+ class EchoSubscriber < EventServer
19
+ class << self
20
+ def capture_output(pipe)
21
+ streams = [$stdout, $stderr]
22
+
23
+ # Save the streams to be reassigned later.
24
+ # Actually it doesn't matter because the child process would be killed
25
+ # anyway after the work is done.
26
+ saved = streams.collect { |stream| stream.dup }
27
+
28
+ begin
29
+ streams.each_with_index do |stream, ix|
30
+ # Probably I should not use IX, otherwise stdout and stderr can arrive
31
+ # out of order, which they should?
32
+ # If I reopen both of them on the same PIPE, they are guaranteed to
33
+ # arrive in order.
34
+ stream.reopen(pipe)
35
+ #stream.sync = true
36
+ end
37
+ yield
38
+ ensure
39
+ # This is sort of meaningless, just makes sense aesthetically.
40
+ # To return what was borrowed.
41
+ streams.each_with_index do |stream, i|
42
+ stream.reopen(saved[i])
43
+ end
44
+ pipe.close unless pipe.closed?
45
+ end
46
+ end
47
+
48
+ def ds_respond(topic, opts={}, &blk)
49
+ options = { exclusive: true, fork: true }.merge(opts)
50
+ listen_to topic, options do
51
+ logger.error "Captuting output before execution"
52
+
53
+ waiter = Gilmour::Waiter.new
54
+ waiter.add 1
55
+ read_pipe, write_pipe = IO.pipe
56
+
57
+ th = Thread.new {
58
+ loop {
59
+ begin
60
+ result = read_pipe.readline.chomp
61
+ logger.debug result
62
+ rescue EOFError
63
+ waiter.done
64
+ rescue Exception => e
65
+ logger.error "Error: #{e.message}"
66
+ logger.error "Traceback: #{e.backtrace}"
67
+ end
68
+ }
69
+ }
70
+
71
+ EchoSubscriber::capture_output(write_pipe) do
72
+ instance_eval(&blk)
73
+ end
74
+
75
+ waiter.wait do
76
+ th.kill
77
+ end
78
+
79
+ end
80
+ end
81
+ end
82
+
83
+ # Passing second parameter as true makes only one instance of this handler handle a request
84
+ EchoSubscriber::ds_respond 'echo.*' do
85
+ if request.body == 'Palmolive'
86
+ respond nil
87
+ else
88
+ logger.error "logger: #{request.body}"
89
+ $stderr.puts "stderr.puts: #{request.body}"
90
+ puts "stdout.puts #{request.body}"
91
+ respond "#{request.topic}"
92
+ end
93
+ end
94
+
95
+ end
96
+
97
+ EventServer.new
@@ -42,9 +42,6 @@ module Gilmour
42
42
  @report_errors = opts["broadcast_errors"] || opts[:broadcast_errors]
43
43
  @report_errors = true if @report_errors != false
44
44
 
45
- @capture_stdout = opts["capture_stdout"] || opts[:capture_stdout]
46
- @capture_stdout = false if @capture_stdout != true
47
-
48
45
  @ident = generate_ident
49
46
  end
50
47
 
@@ -56,10 +53,6 @@ module Gilmour
56
53
  "#{Socket.gethostname}-pid-#{Process.pid}-uuid-#{SecureRandom.uuid}"
57
54
  end
58
55
 
59
- def capture_stdout?
60
- @capture_stdout
61
- end
62
-
63
56
  def report_health?
64
57
  @report_health
65
58
  end
@@ -2,7 +2,6 @@
2
2
 
3
3
  require 'json'
4
4
  require 'logger'
5
- require_relative './stdhijack'
6
5
  require_relative './waiter'
7
6
 
8
7
  # Top level module
@@ -28,12 +27,16 @@ module Gilmour
28
27
  attr_reader :logger
29
28
  attr_reader :request
30
29
 
31
- def fork_logger
30
+ def fork_logger(io_writer)
32
31
  logger = Logger.new(STDERR)
33
32
  loglevel = ENV["LOG_LEVEL"] ? ENV["LOG_LEVEL"].to_sym : :warn
34
33
  logger.level = Gilmour::LoggerLevels[loglevel] || Logger::WARN
34
+ logger.datetime_format = "%Y-%m-%d %H:%M:%S"
35
35
  logger.formatter = proc do |severity, datetime, progname, msg|
36
- "#{LOG_PREFIX}#{severity}#{LOG_SEPERATOR}#{msg}"
36
+ data = "#{LOG_PREFIX}#{severity}#{LOG_SEPERATOR}#{msg}"
37
+ io_writer.write(data+"\n")
38
+ io_writer.flush
39
+ nil
37
40
  end
38
41
  logger
39
42
  end
@@ -43,6 +46,7 @@ module Gilmour
43
46
  original_formatter = Logger::Formatter.new
44
47
  loglevel = ENV["LOG_LEVEL"] ? ENV["LOG_LEVEL"].to_sym : :warn
45
48
  logger.level = Gilmour::LoggerLevels[loglevel] || Logger::WARN
49
+ logger.datetime_format = "%Y-%m-%d %H:%M:%S"
46
50
  logger.formatter = proc do |severity, datetime, progname, msg|
47
51
  original_formatter.call(severity, datetime, @sender, msg)
48
52
  end
@@ -59,7 +63,6 @@ module Gilmour
59
63
  @pipe = IO.pipe
60
64
  @publish_pipe = IO.pipe
61
65
  @logger = make_logger()
62
- @capture_stdout = @backend.capture_stdout? || false
63
66
  end
64
67
 
65
68
  def receive_data(data)
@@ -116,59 +119,50 @@ module Gilmour
116
119
  }
117
120
  end
118
121
 
119
- def io_readers(parent_io, waiter)
120
- io_threads = []
121
-
122
- parent_io.each do |reader|
123
- io_threads << Thread.new {
124
- waiter.add 1
125
- loop {
126
- begin
127
- data = reader.readline.chomp
128
- if data.empty?
129
- next
130
- end
131
-
132
- if data.start_with?(LOG_PREFIX)
133
- data.split(LOG_PREFIX).each do |msg|
122
+ # All logs in forked mode are relayed chr
123
+ def child_io_relay(io_reader, waiter, parent_logger)
124
+ Thread.new {
125
+ waiter.add 1
126
+ loop {
127
+ begin
128
+ data = io_reader.readline.chomp
129
+ if data.start_with?(LOG_PREFIX)
130
+ data.split(LOG_PREFIX).each do |msg|
131
+ unless msg.empty?
134
132
  msg_grp = msg.split(LOG_SEPERATOR, 2)
135
-
136
133
  if msg_grp.length > 1
137
134
  data = msg_grp[1]
138
135
  case msg_grp[0]
139
136
  when 'INFO'
140
- logger.info data
137
+ parent_logger.info data
141
138
  when 'UNKNOWN'
142
- logger.unknown data
139
+ parent_logger.unknown data
143
140
  when 'WARN'
144
- logger.warn data
141
+ parent_logger.warn data
145
142
  when 'ERROR'
146
- logger.error data
143
+ parent_logger.error data
147
144
  when 'FATAL'
148
- logger.fatal data
145
+ parent_logger.fatal data
149
146
  else
150
- logger.debug data
147
+ parent_logger.debug data
151
148
  end
152
149
  else
153
- logger.debug msg
150
+ parent_logger.debug msg
154
151
  end
155
-
156
152
  end
157
- next
158
153
  end
159
-
160
- logger.debug data
161
- rescue EOFError
162
- waiter.done
163
- rescue Exception => e
164
- GLogger.error e.message
165
- GLogger.error e.backtrace
154
+ next
166
155
  end
167
- }
168
- }
169
- end
170
156
 
171
- io_threads
157
+ parent_logger.debug data
158
+ rescue EOFError
159
+ waiter.done
160
+ rescue Exception => e
161
+ GLogger.error e.message
162
+ GLogger.error e.backtrace
163
+ end
164
+ }
165
+ }
172
166
  end
173
167
 
174
168
  # Called by parent
@@ -180,15 +174,12 @@ module Gilmour
180
174
  @read_pipe, @write_pipe = @pipe
181
175
  @read_publish_pipe, @write_publish_pipe = @publish_pipe
182
176
 
183
- out_r, out_w = IO.pipe
184
- parent_io = [out_r]
185
- child_io = [out_w]
177
+ io_reader, io_writer = IO.pipe
186
178
 
187
- if @capture_stdout == true
188
- err_r, err_w = IO.pipe
189
- child_io << err_w
190
- parent_io << err_r
191
- end
179
+ wg = Gilmour::Waiter.new
180
+ io_threads = []
181
+ io_threads << child_io_relay(io_reader, wg, @logger)
182
+ io_threads << pub_relay(wg)
192
183
 
193
184
  pid = Process.fork do
194
185
  @backend.stop
@@ -197,26 +188,20 @@ module Gilmour
197
188
  #Close the parent channels in forked process
198
189
  @read_pipe.close
199
190
  @read_publish_pipe.close
200
- parent_io.each{|io| io.close}
191
+ io_reader.close unless io_reader.closed?
201
192
 
202
193
  @response_sent = false
203
- @logger = fork_logger
204
194
 
205
- capture_output(child_io, @capture_stdout) {
206
- _execute(handler)
207
- }
195
+ @logger = fork_logger(io_writer)
196
+ _execute(handler)
197
+ io_writer.close
208
198
  end
209
199
 
210
200
  # Cleanup the writers in Parent process.
211
- child_io.each {|io| io.close }
212
-
201
+ io_writer.close
213
202
  @write_pipe.close
214
203
  @write_publish_pipe.close
215
204
 
216
- wg = Gilmour::Waiter.new
217
- io_threads = io_readers(parent_io, wg)
218
- io_threads << pub_relay(wg)
219
-
220
205
  begin
221
206
  receive_data(@read_pipe.readline)
222
207
  rescue EOFError => e
@@ -248,7 +233,7 @@ module Gilmour
248
233
 
249
234
  # Cleanup.
250
235
  @read_publish_pipe.close
251
- parent_io.each{|io| io.close unless io.closed?}
236
+ io_reader.close unless io_reader.closed?
252
237
 
253
238
  else
254
239
  _execute(handler)
@@ -13,9 +13,10 @@ module Gilmour
13
13
 
14
14
  def done
15
15
  synchronize do
16
- @count -= n
16
+ @count -= 1
17
17
  if @count == 0
18
- signal
18
+ @done = true
19
+ @waiter_c.broadcast
19
20
  end
20
21
  end
21
22
  end
@@ -33,7 +34,11 @@ module Gilmour
33
34
  end
34
35
 
35
36
  def wait(timeout=nil)
36
- synchronize { @waiter_c.wait(@waiter_m, timeout) unless @done }
37
+ synchronize do
38
+ while !@done
39
+ @waiter_c.wait(@waiter_m, timeout)
40
+ end
41
+ end
37
42
  yield if block_given?
38
43
  end
39
44
  end
data/version.rb CHANGED
@@ -1,3 +1,3 @@
1
1
  module Gilmour
2
- VERSION = '0.3.2'
2
+ VERSION = '0.3.3'
3
3
  end
metadata CHANGED
@@ -1,7 +1,7 @@
1
1
  --- !ruby/object:Gem::Specification
2
2
  name: gilmour
3
3
  version: !ruby/object:Gem::Version
4
- version: 0.3.2
4
+ version: 0.3.3
5
5
  platform: ruby
6
6
  authors:
7
7
  - Aditya Godbole
@@ -9,7 +9,7 @@ authors:
9
9
  autorequire:
10
10
  bindir: bin
11
11
  cert_chain: []
12
- date: 2015-07-29 00:00:00.000000000 Z
12
+ date: 2015-07-31 00:00:00.000000000 Z
13
13
  dependencies:
14
14
  - !ruby/object:Gem::Dependency
15
15
  name: rspec
@@ -110,6 +110,7 @@ files:
110
110
  - LICENSE
111
111
  - README.md
112
112
  - examples/echoclient.rb
113
+ - examples/fork_log_server.rb
113
114
  - examples/server.rb
114
115
  - examples/thread_example.rb
115
116
  - gilmour.gemspec
@@ -119,7 +120,6 @@ files:
119
120
  - lib/gilmour/base.rb
120
121
  - lib/gilmour/protocol.rb
121
122
  - lib/gilmour/responder.rb
122
- - lib/gilmour/stdhijack.rb
123
123
  - lib/gilmour/waiter.rb
124
124
  - test/spec/helpers/common.rb
125
125
  - test/spec/helpers/connection.rb
@@ -1,49 +0,0 @@
1
- # StringIO also do as IO, but IO#reopen fails.
2
- # The problem is that a StringIO cannot exist in the O/S's file descriptor
3
- # table. STDERR.reopen(...) at the low level does a dup() or dup2() to
4
- # copy one file descriptor to another.
5
- #
6
- # I have two options:
7
- #
8
- # (1) $stderr = StringIO.new
9
- # Then any program which writes to $stderr will be fine. But anything
10
- # which writes to STDERR will still go to file descriptor 2.
11
- #
12
- # (2) reopen STDERR with something which exists in the O/S file descriptor
13
- # table: e.g. a file or a pipe.
14
- #
15
- # I canot use a file, hence a Pipe.
16
-
17
- def capture_output(pipes, capture_stdout=false)
18
- streams = []
19
-
20
- if capture_stdout == true
21
- streams << STDOUT
22
- end
23
-
24
- streams << STDERR
25
-
26
- # Save the streams to be reassigned later.
27
- # Actually it doesn't matter because the child process would be killed
28
- # anyway after the work is done.
29
- saved = streams.each do |stream|
30
- stream.dup
31
- end
32
-
33
- begin
34
- streams.each_with_index do |stream, ix|
35
- # Probably I should not use IX, otherwise stdout and stderr can arrive
36
- # out of order, which they should?
37
- # If I reopen both of them on the same PIPE, they are guaranteed to
38
- # arrive in order.
39
- stream.reopen(pipes[ix])
40
- end
41
- yield
42
- ensure
43
- # This is sort of meaningless, just makes sense aesthetically.
44
- # To return what was borrowed.
45
- streams.each_with_index do |stream, i|
46
- stream.reopen(saved[i])
47
- end
48
- end
49
- end