hayabusa 0.0.11 → 0.0.12
Sign up to get free protection for your applications and to get access to all the features.
- data/Gemfile.lock +2 -2
- data/VERSION +1 -1
- data/hayabusa.gemspec +12 -20
- data/lib/hayabusa.rb +23 -3
- data/lib/hayabusa_cgi_session.rb +5 -91
- data/lib/hayabusa_cgi_tools.rb +6 -3
- data/lib/hayabusa_client_session.rb +151 -0
- data/lib/hayabusa_ext/threadding.rb +19 -2
- data/lib/hayabusa_fcgi.rb +2 -1
- data/lib/hayabusa_http_server.rb +22 -16
- data/lib/hayabusa_http_session.rb +4 -122
- data/lib/hayabusa_http_session_request.rb +1 -1
- data/lib/hayabusa_http_session_response.rb +2 -1
- data/pages/config_cgi.rb +19 -0
- data/pages/config_fcgi.rb +19 -0
- data/{tests/fcgi_test/multiple_threads.rhtml → pages/spec_multiple_threads.rhtml} +0 -0
- data/{tests/fcgi_test/sleeper.rhtml → pages/spec_sleeper.rhtml} +0 -0
- data/{tests/cgi_test/vars_get_test.rhtml → pages/spec_vars_get.rhtml} +0 -0
- data/{tests/cgi_test/vars_header_test.rhtml → pages/spec_vars_header.rhtml} +0 -0
- data/{tests/cgi_test/vars_post_test.rhtml → pages/spec_vars_post.rhtml} +0 -0
- data/{tests/cgi_test → pages}/threadded_content_test.rhtml +0 -0
- data/spec/fcgi_multiple_processes_spec.rb +3 -3
- data/spec/hayabusa_spec.rb +156 -47
- metadata +12 -20
- data/pages/spec_post.rhtml +0 -3
- data/spec/cgi_spec.rb +0 -49
- data/spec/custom_urls_spec.rb +0 -35
- data/spec/fcgi_spec.rb +0 -71
- data/tests/cgi_test/config_cgi.rb +0 -6
- data/tests/fcgi_test/config_fcgi.rb +0 -6
- data/tests/fcgi_test/index.rhtml +0 -3
- data/tests/fcgi_test/threadded_content_test.rhtml +0 -23
- data/tests/fcgi_test/vars_get_test.rhtml +0 -4
- data/tests/fcgi_test/vars_header_test.rhtml +0 -3
- data/tests/fcgi_test/vars_post_test.rhtml +0 -4
data/lib/hayabusa_fcgi.rb
CHANGED
@@ -119,7 +119,7 @@ class Hayabusa::Fcgi
|
|
119
119
|
else
|
120
120
|
self.handle_fcgi_request(:cgi => cgi)
|
121
121
|
end
|
122
|
-
rescue Errno::ECONNABORTED
|
122
|
+
rescue Errno::ECONNABORTED, Errno::ECONNREFUSED
|
123
123
|
$stderr.puts "[hayabusa] Connection to server was interrupted - trying again."
|
124
124
|
@fcgi_proxy = nil #Force re-evaluate if this process should be host or proxy.
|
125
125
|
retry
|
@@ -141,6 +141,7 @@ class Hayabusa::Fcgi
|
|
141
141
|
end
|
142
142
|
end
|
143
143
|
ensure
|
144
|
+
$stderr.puts "[hayabusa] FCGI-loop stopped."
|
144
145
|
@hayabusa.stop if @hayabusa
|
145
146
|
end
|
146
147
|
end
|
data/lib/hayabusa_http_server.rb
CHANGED
@@ -23,17 +23,17 @@ class Hayabusa::Http_server
|
|
23
23
|
loop do
|
24
24
|
begin
|
25
25
|
if !@server or @server.closed?
|
26
|
-
|
26
|
+
@hb.log_puts "Starting TCPServer." if @debug
|
27
27
|
@server = TCPServer.new(@hb.config[:host], @hb.config[:port])
|
28
28
|
end
|
29
29
|
|
30
|
-
|
30
|
+
@hb.log_puts "Trying to spawn new HTTP-session from socket-accept." if @debug
|
31
31
|
self.spawn_httpsession(@server.accept)
|
32
|
-
|
32
|
+
@hb.log_puts "Starting new HTTP-request." if @debug
|
33
33
|
rescue Exception => e
|
34
34
|
if @debug
|
35
|
-
|
36
|
-
|
35
|
+
@hb.log_puts Knj::Errors.error_str(e)
|
36
|
+
@hb.log_puts "Could not accept HTTP-request - waiting 1 sec and then trying again."
|
37
37
|
end
|
38
38
|
|
39
39
|
raise e if e.is_a?(SystemExit) or e.is_a?(Interrupt)
|
@@ -44,23 +44,29 @@ class Hayabusa::Http_server
|
|
44
44
|
end
|
45
45
|
|
46
46
|
def stop
|
47
|
-
|
47
|
+
while @working_count > 0
|
48
|
+
@hb.log_puts "Waiting until no HTTP-sessions are running." if @debug
|
49
|
+
sleep 0.1
|
50
|
+
end
|
51
|
+
|
52
|
+
|
53
|
+
@hb.log_puts "Stopping accept-thread." if @debug
|
48
54
|
@thread_accept.kill if @thread_accept and @thread_accept.alive?
|
49
55
|
@thread_restart.kill if @thread_restart and @thread_restart.alive?
|
50
56
|
|
51
|
-
|
52
|
-
if @http_sessions
|
53
|
-
|
54
|
-
|
55
|
-
|
56
|
-
end
|
57
|
+
#@hb.log_puts "Stopping all HTTP sessions." if @debug
|
58
|
+
#if @http_sessions
|
59
|
+
# @http_sessions.each do |httpsession|
|
60
|
+
# httpsession.destruct
|
61
|
+
# end
|
62
|
+
#end
|
57
63
|
|
58
64
|
begin
|
59
|
-
|
65
|
+
@hb.log_puts "Stopping TCPServer." if @debug
|
60
66
|
@server.close if @server and !@server.closed?
|
61
|
-
|
67
|
+
@hb.log_puts "TCPServer was closed." if @debug
|
62
68
|
rescue Timeout::Error
|
63
|
-
raise "Could not close TCPserver
|
69
|
+
raise "Could not close TCPserver."
|
64
70
|
rescue IOError => e
|
65
71
|
if e.message == "closed stream"
|
66
72
|
#ignore - it should be closed.
|
@@ -78,7 +84,7 @@ class Hayabusa::Http_server
|
|
78
84
|
end
|
79
85
|
|
80
86
|
def spawn_httpsession(socket)
|
81
|
-
|
87
|
+
@hb.log_puts "Starting new HTTP-session." if @debug
|
82
88
|
@http_sessions << Hayabusa::Http_session.new(self, socket)
|
83
89
|
end
|
84
90
|
|
@@ -1,8 +1,5 @@
|
|
1
1
|
#This class handels the HTTP-sessions.
|
2
|
-
class Hayabusa::Http_session
|
3
|
-
attr_accessor :data, :alert_sent
|
4
|
-
attr_reader :cookie, :get, :headers, :ip, :session, :session_id, :session_hash, :hb, :active, :out, :eruby, :browser, :debug, :resp, :page_path, :post, :cgroup, :meta, :httpsession_var, :handler, :working
|
5
|
-
|
2
|
+
class Hayabusa::Http_session < Hayabusa::Client_session
|
6
3
|
#Autoloader for subclasses.
|
7
4
|
def self.const_missing(name)
|
8
5
|
require "#{File.dirname(__FILE__)}/hayabusa_http_session_#{name.to_s.downcase}.rb"
|
@@ -48,7 +45,7 @@ class Hayabusa::Http_session
|
|
48
45
|
"SERVER_PORT" => addr_peer[1]
|
49
46
|
}
|
50
47
|
|
51
|
-
@resp = Hayabusa::Http_session::Response.new(:socket => @socket)
|
48
|
+
@resp = Hayabusa::Http_session::Response.new(:hb => @hb, :socket => @socket)
|
52
49
|
@handler = Hayabusa::Http_session::Request.new(:hb => @hb, :httpsession => self)
|
53
50
|
@cgroup = Hayabusa::Http_session::Contentgroup.new(:socket => @socket, :hb => @hb, :resp => @resp, :httpsession => self)
|
54
51
|
@resp.cgroup = @cgroup
|
@@ -134,53 +131,6 @@ class Hayabusa::Http_session
|
|
134
131
|
end
|
135
132
|
end
|
136
133
|
|
137
|
-
#Creates a new Hayabusa::Binding-object and returns the binding for that object.
|
138
|
-
def create_binding
|
139
|
-
binding_obj = Hayabusa::Http_session::Page_environment.new(:httpsession => self, :hb => @hb)
|
140
|
-
return binding_obj.get_binding
|
141
|
-
end
|
142
|
-
|
143
|
-
#Is called when content is added and begings to write the output if it goes above the limit.
|
144
|
-
def add_size(size)
|
145
|
-
@written_size += size
|
146
|
-
@cgroup.write_output if @written_size >= @size_send
|
147
|
-
end
|
148
|
-
|
149
|
-
def threadded_content(block)
|
150
|
-
raise "No block was given." if !block
|
151
|
-
cgroup = Thread.current[:hayabusa][:contentgroup].new_thread
|
152
|
-
|
153
|
-
Thread.new do
|
154
|
-
begin
|
155
|
-
self.init_thread
|
156
|
-
cgroup.register_thread
|
157
|
-
|
158
|
-
@hb.db_handler.get_and_register_thread if @hb and @hb.db_handler.opts[:threadsafe]
|
159
|
-
@hb.ob.db.get_and_register_thread if @hb and @hb.ob.db.opts[:threadsafe]
|
160
|
-
|
161
|
-
block.call
|
162
|
-
rescue Exception => e
|
163
|
-
Thread.current[:hayabusa][:contentgroup].write Knj::Errors.error_str(e, {:html => true})
|
164
|
-
_hb.handle_error(e)
|
165
|
-
ensure
|
166
|
-
Thread.current[:hayabusa][:contentgroup].mark_done
|
167
|
-
@hb.ob.db.free_thread if @hb and @hb.ob.db.opts[:threadsafe]
|
168
|
-
@hb.db_handler.free_thread if @hb and @hb.db_handler.opts[:threadsafe]
|
169
|
-
end
|
170
|
-
end
|
171
|
-
end
|
172
|
-
|
173
|
-
def init_thread
|
174
|
-
Thread.current[:hayabusa] = {} if !Thread.current[:hayabusa]
|
175
|
-
Thread.current[:hayabusa][:hb] = @hb
|
176
|
-
Thread.current[:hayabusa][:httpsession] = self
|
177
|
-
Thread.current[:hayabusa][:session] = @session
|
178
|
-
Thread.current[:hayabusa][:get] = @get
|
179
|
-
Thread.current[:hayabusa][:post] = @post
|
180
|
-
Thread.current[:hayabusa][:meta] = @meta
|
181
|
-
Thread.current[:hayabusa][:cookie] = @cookie
|
182
|
-
end
|
183
|
-
|
184
134
|
def self.finalize(id)
|
185
135
|
@hb.log_puts "Http_session finalize #{id}." if @debug
|
186
136
|
end
|
@@ -202,11 +152,6 @@ class Hayabusa::Http_session
|
|
202
152
|
@thread_request.kill if @thread_request.alive?
|
203
153
|
end
|
204
154
|
|
205
|
-
#Forces the content to be the input - nothing else can be added after calling this.
|
206
|
-
def force_content(newcont)
|
207
|
-
@cgroup.force_content(newcont)
|
208
|
-
end
|
209
|
-
|
210
155
|
def serve
|
211
156
|
@hb.log_puts "Generating meta, cookie, get, post and headers." if @debug
|
212
157
|
@meta = @handler.meta.merge(@socket_meta)
|
@@ -287,70 +232,7 @@ class Hayabusa::Http_session
|
|
287
232
|
Thread.current[:hayabusa][:contentgroup] = @cgroup
|
288
233
|
time_start = Time.now.to_f if @debug
|
289
234
|
|
290
|
-
|
291
|
-
|
292
|
-
|
293
|
-
Timeout.timeout(@hb.config[:timeout]) do
|
294
|
-
if @handlers_cache.key?(@ext)
|
295
|
-
@hb.log_puts "Calling handler." if @debug
|
296
|
-
@handlers_cache[@ext].call(self)
|
297
|
-
else
|
298
|
-
#check if we should use a handler for this request.
|
299
|
-
@config[:handlers].each do |handler_info|
|
300
|
-
if handler_info.key?(:file_ext) and handler_info[:file_ext] == @ext
|
301
|
-
return handler_info[:callback].call(self)
|
302
|
-
elsif handler_info.key?(:path) and handler_info[:mount] and @meta["SCRIPT_NAME"].slice(0, handler_info[:path].length) == handler_info[:path]
|
303
|
-
@page_path = "#{handler_info[:mount]}#{@meta["SCRIPT_NAME"].slice(handler_info[:path].length, @meta["SCRIPT_NAME"].length)}"
|
304
|
-
break
|
305
|
-
end
|
306
|
-
end
|
307
|
-
|
308
|
-
if !File.exists?(@page_path)
|
309
|
-
@resp.status = 404
|
310
|
-
@resp.header("Content-Type", "text/html")
|
311
|
-
@cgroup.write("File you are looking for was not found: '#{@meta["REQUEST_URI"]}'.")
|
312
|
-
else
|
313
|
-
if @headers["cache-control"] and @headers["cache-control"][0]
|
314
|
-
cache_control = {}
|
315
|
-
@headers["cache-control"][0].scan(/(.+)=(.+)/) do |match|
|
316
|
-
cache_control[match[1]] = match[2]
|
317
|
-
end
|
318
|
-
end
|
319
|
-
|
320
|
-
cache_dont = true if cache_control and cache_control.key?("max-age") and cache_control["max-age"].to_i <= 0
|
321
|
-
lastmod = File.mtime(@page_path)
|
322
|
-
|
323
|
-
@resp.header("Last-Modified", lastmod.httpdate)
|
324
|
-
@resp.header("Expires", (Time.now + 86400).httpdate) #next day.
|
325
|
-
|
326
|
-
if !cache_dont and @headers["if-modified-since"] and @headers["if-modified-since"][0]
|
327
|
-
request_mod = Datet.in(@headers["if-modified-since"].first).time
|
328
|
-
|
329
|
-
if request_mod == lastmod
|
330
|
-
@resp.status = 304
|
331
|
-
return nil
|
332
|
-
end
|
333
|
-
end
|
334
|
-
|
335
|
-
@cgroup.new_io(:type => :file, :path => @page_path)
|
336
|
-
end
|
337
|
-
end
|
338
|
-
end
|
339
|
-
rescue SystemExit
|
340
|
-
#do nothing - ignore.
|
341
|
-
rescue Timeout::Error
|
342
|
-
@resp.status = 500
|
343
|
-
print "The request timed out."
|
344
|
-
end
|
345
|
-
|
346
|
-
@cgroup.mark_done
|
347
|
-
@cgroup.write_output
|
348
|
-
@hb.log_puts "#{__id__} - Served '#{@meta["REQUEST_URI"]}' in #{Time.now.to_f - time_start} secs (#{@resp.status})." if @debug
|
349
|
-
@cgroup.join
|
350
|
-
|
351
|
-
@hb.events.call(:request_done, {
|
352
|
-
:httpsession => self
|
353
|
-
}) if @hb.events
|
354
|
-
@httpsession_var = {}
|
235
|
+
self.execute_page
|
236
|
+
self.execute_done
|
355
237
|
end
|
356
238
|
end
|
@@ -70,7 +70,7 @@ class Hayabusa::Http_session::Request
|
|
70
70
|
uri_raw = match[2]
|
71
71
|
uri_raw = "index.rhtml" if uri_raw == ""
|
72
72
|
|
73
|
-
uri = Knj::Web.parse_uri(match[2])
|
73
|
+
uri = Knj::Web.parse_uri(match[2]) rescue {:path => match[2], :query => ""}
|
74
74
|
|
75
75
|
page_filepath = Knj::Web.urldec(uri[:path])
|
76
76
|
if page_filepath.empty? or page_filepath == "/" or File.directory?("#{@hb.config[:doc_root]}/#{page_filepath}")
|
@@ -28,6 +28,7 @@ class Hayabusa::Http_session::Response
|
|
28
28
|
def initialize(args)
|
29
29
|
@chunked = false
|
30
30
|
@socket = args[:socket]
|
31
|
+
@hb = args[:hb]
|
31
32
|
end
|
32
33
|
|
33
34
|
def reset(args)
|
@@ -142,7 +143,7 @@ class Hayabusa::Http_session::Response
|
|
142
143
|
end
|
143
144
|
|
144
145
|
if @close and @mode != :cgi
|
145
|
-
|
146
|
+
@hb.log_puts("Closing socket.")
|
146
147
|
@socket.close
|
147
148
|
end
|
148
149
|
end
|
data/pages/config_cgi.rb
ADDED
@@ -0,0 +1,19 @@
|
|
1
|
+
Hayabusa::CGI_CONF = {
|
2
|
+
:hayabusa => {
|
3
|
+
:title => "Cgi_test",
|
4
|
+
:doc_root => File.realpath(File.dirname(__FILE__)),
|
5
|
+
:handlers_extra => [{
|
6
|
+
:regex => /^\/Kasper$/,
|
7
|
+
:callback => proc{|data|
|
8
|
+
data[:httpsession].page_path = nil
|
9
|
+
|
10
|
+
eruby = data[:httpsession].eruby
|
11
|
+
eruby.connect(:on_error) do |e|
|
12
|
+
_hb.handle_error(e)
|
13
|
+
end
|
14
|
+
|
15
|
+
eruby.import("#{File.dirname(__FILE__)}/../pages/spec.rhtml")
|
16
|
+
}
|
17
|
+
}]
|
18
|
+
}
|
19
|
+
}
|
@@ -0,0 +1,19 @@
|
|
1
|
+
Hayabusa::FCGI_CONF = {
|
2
|
+
:hayabusa => {
|
3
|
+
:title => "Fcgi_test",
|
4
|
+
:doc_root => File.realpath(File.dirname(__FILE__)),
|
5
|
+
:handlers_extra => [{
|
6
|
+
:regex => /^\/Kasper$/,
|
7
|
+
:callback => proc{|data|
|
8
|
+
data[:httpsession].page_path = nil
|
9
|
+
|
10
|
+
eruby = data[:httpsession].eruby
|
11
|
+
eruby.connect(:on_error) do |e|
|
12
|
+
_hb.handle_error(e)
|
13
|
+
end
|
14
|
+
|
15
|
+
eruby.import("#{File.dirname(__FILE__)}/../pages/spec.rhtml")
|
16
|
+
}
|
17
|
+
}]
|
18
|
+
}
|
19
|
+
}
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
File without changes
|
@@ -24,7 +24,7 @@ describe "Hayabusa" do
|
|
24
24
|
|
25
25
|
Http2.new(:host => "localhost", :user_agent => "Client#{t_i}", :debug => false) do |http|
|
26
26
|
1.upto(25) do |request_i|
|
27
|
-
res = http.get(:url => "hayabusa_fcgi_test/
|
27
|
+
res = http.get(:url => "hayabusa_fcgi_test/spec_multiple_threads.rhtml")
|
28
28
|
|
29
29
|
begin
|
30
30
|
data_json = JSON.parse(res.body)
|
@@ -84,11 +84,11 @@ describe "Hayabusa" do
|
|
84
84
|
res2 = nil
|
85
85
|
|
86
86
|
t1 = Thread.new do
|
87
|
-
res1 = http1.get(:url => "hayabusa_fcgi_test/
|
87
|
+
res1 = http1.get(:url => "hayabusa_fcgi_test/spec_sleeper.rhtml")
|
88
88
|
end
|
89
89
|
|
90
90
|
t2 = Thread.new do
|
91
|
-
res2 = http2.get(:url => "hayabusa_fcgi_test/
|
91
|
+
res2 = http2.get(:url => "hayabusa_fcgi_test/spec_sleeper.rhtml")
|
92
92
|
end
|
93
93
|
|
94
94
|
t1.join
|
data/spec/hayabusa_spec.rb
CHANGED
@@ -4,8 +4,10 @@ describe "Hayabusa" do
|
|
4
4
|
it "should be able to start a sample-server" do
|
5
5
|
require "rubygems"
|
6
6
|
require "hayabusa"
|
7
|
-
require "knjrbfw"
|
7
|
+
require "/home/kaspernj/Dev/Ruby/Gems/knjrbfw/lib/knjrbfw.rb"
|
8
8
|
require "sqlite3" if RUBY_ENGINE != "jruby"
|
9
|
+
require "json"
|
10
|
+
Knj.gem_require(:Http2, "http2")
|
9
11
|
|
10
12
|
db_path = "#{Knj::Os.tmpdir}/hayabusa_rspec.sqlite3"
|
11
13
|
File.unlink(db_path) if File.exists?(db_path)
|
@@ -29,45 +31,82 @@ describe "Hayabusa" do
|
|
29
31
|
}
|
30
32
|
)
|
31
33
|
|
34
|
+
$appserver.config[:handlers] << {
|
35
|
+
:regex => /^\/Kasper$/,
|
36
|
+
:callback => proc{|data|
|
37
|
+
data[:httpsession].page_path = nil
|
38
|
+
|
39
|
+
eruby = data[:httpsession].eruby
|
40
|
+
eruby.connect(:on_error) do |e|
|
41
|
+
_hb.handle_error(e)
|
42
|
+
end
|
43
|
+
|
44
|
+
eruby.import("#{File.dirname(__FILE__)}/../pages/spec.rhtml")
|
45
|
+
}
|
46
|
+
}
|
47
|
+
|
32
48
|
$appserver.vars[:test] = "kasper"
|
33
49
|
$appserver.define_magic_var(:_testvar1, "Kasper")
|
34
50
|
$appserver.define_magic_var(:_testvar2, "Johansen")
|
35
51
|
$appserver.start
|
36
52
|
|
37
53
|
raise "Expected thread-pool-priority to be '-3' but it wasnt: '#{$appserver.threadpool.args[:priority]}'." if $appserver.threadpool.args[:priority] != -3
|
54
|
+
|
55
|
+
http = Http2.new(:host => "localhost", :port => 80)
|
56
|
+
|
57
|
+
$testmodes = [{
|
58
|
+
:name => :standalone,
|
59
|
+
:path_pre => "",
|
60
|
+
:http => Http2.new(:host => "localhost", :port => 1515)
|
61
|
+
},{
|
62
|
+
:name => :cgi,
|
63
|
+
:path_pre => "hayabusa_cgi_test/",
|
64
|
+
:http => http
|
65
|
+
},{
|
66
|
+
:name => :fcgi,
|
67
|
+
:path_pre => "hayabusa_fcgi_test/",
|
68
|
+
:http => http
|
69
|
+
}]
|
38
70
|
end
|
39
71
|
|
72
|
+
#it "should be able to handle custom urls" do
|
73
|
+
# $testmodes.each do |tdata|
|
74
|
+
# res = tdata[:http].get("#{tdata[:path_pre]}Kasper")
|
75
|
+
# raise "Expected data to be 'Test' in mode '#{tdata[:name]}' but it wasnt: '#{res.body}'." if res.body != "Test"
|
76
|
+
# end
|
77
|
+
#end
|
78
|
+
|
40
79
|
it "should be able to handle a GET-request." do
|
41
|
-
|
42
|
-
|
43
|
-
|
44
|
-
|
45
|
-
|
46
|
-
|
47
|
-
|
48
|
-
|
49
|
-
res = $http.get("spec.rhtml?choice=check_get_parse&value=#{Knj::Web.urlenc("gfx/nopic.png")}")
|
50
|
-
raise "Unexpected HTML: '#{res.body}'." if res.body.to_s != "gfx/nopic.png"
|
80
|
+
$testmodes.each do |tdata|
|
81
|
+
res = tdata[:http].get("#{tdata[:path_pre]}spec.rhtml")
|
82
|
+
raise "Unexpected HTML: '#{res.body}'." if res.body.to_s != "Test"
|
83
|
+
|
84
|
+
#Check that URL-decoding are being done.
|
85
|
+
res = tdata[:http].get("#{tdata[:path_pre]}spec.rhtml?choice=check_get_parse&value=#{Knj::Web.urlenc("gfx/nopic.png")}")
|
86
|
+
raise "Unexpected HTML: '#{res.body}'." if res.body.to_s != "gfx/nopic.png"
|
87
|
+
end
|
51
88
|
end
|
52
89
|
|
53
90
|
it "should be able to handle a HEAD-request." do
|
54
91
|
#Http2 doesnt support head?
|
55
|
-
#res = $http.head("
|
92
|
+
#res = $http.head("spec.rhtml")
|
56
93
|
#raise "HEAD-request returned content - it shouldnt?" if res.body.to_s.length > 0
|
57
94
|
end
|
58
95
|
|
59
96
|
it "should be able to handle a POST-request." do
|
60
|
-
|
61
|
-
|
62
|
-
|
63
|
-
|
64
|
-
|
65
|
-
|
66
|
-
|
67
|
-
|
68
|
-
|
69
|
-
|
70
|
-
|
97
|
+
$testmodes.each do |tdata|
|
98
|
+
res = tdata[:http].post(:url => "#{tdata[:path_pre]}spec.rhtml", :post => {
|
99
|
+
"postdata" => "Test post"
|
100
|
+
})
|
101
|
+
raise "POST-request did not return expected data: '#{res.body}' for '#{tdata[:name]}'." if res.body.to_s.strip != "Test post"
|
102
|
+
|
103
|
+
res = tdata[:http].post(:url => "#{tdata[:path_pre]}spec.rhtml?choice=dopostconvert", :post => {
|
104
|
+
"postdata" => "Test post",
|
105
|
+
"array" => ["a", "b", "d"]
|
106
|
+
})
|
107
|
+
data = JSON.parse(res.body)
|
108
|
+
raise "Expected posted data restored but it wasnt: '#{data}'." if data["array"]["0"] != "a" or data["array"]["1"] != "b" or data["array"]["2"] != "d"
|
109
|
+
end
|
71
110
|
end
|
72
111
|
|
73
112
|
it "should be able to join the server so other tests can be made manually." do
|
@@ -82,30 +121,31 @@ describe "Hayabusa" do
|
|
82
121
|
end
|
83
122
|
|
84
123
|
it "should be able to use the header-methods." do
|
85
|
-
|
86
|
-
|
87
|
-
|
124
|
+
$testmodes.each do |tdata|
|
125
|
+
res = tdata[:http].get("#{tdata[:path_pre]}spec.rhtml")
|
126
|
+
raise "Normal header data could not be detected." if res.header("testheader") != "NormalHeader"
|
127
|
+
raise "Raw header data could not be detected." if res.header("testraw") != "RawHeader"
|
128
|
+
end
|
88
129
|
end
|
89
130
|
|
90
131
|
it "should be able to set and get multiple cookies at the same time." do
|
91
|
-
|
92
|
-
|
93
|
-
|
94
|
-
|
95
|
-
|
96
|
-
|
97
|
-
|
98
|
-
|
99
|
-
|
100
|
-
|
101
|
-
|
132
|
+
$testmodes.each do |tdata|
|
133
|
+
res = tdata[:http].get("#{tdata[:path_pre]}spec.rhtml?choice=test_cookie")
|
134
|
+
raise res.body if res.body.to_s.length > 0
|
135
|
+
|
136
|
+
res = tdata[:http].get("#{tdata[:path_pre]}spec.rhtml?choice=get_cookies")
|
137
|
+
parsed = JSON.parse(res.body)
|
138
|
+
|
139
|
+
raise "Unexpected value for 'TestCookie': '#{parsed["TestCookie"]}'." if parsed["TestCookie"] != "TestValue"
|
140
|
+
raise "Unexpected value for 'TestCookie2': '#{parsed["TestCookie2"]}'." if parsed["TestCookie2"] != "TestValue2"
|
141
|
+
raise "Unexpected value for 'TestCookie3': '#{parsed["TestCookie3"]}'." if parsed["TestCookie3"] != "TestValue 3 "
|
142
|
+
end
|
102
143
|
end
|
103
144
|
|
104
145
|
it "should be able to run the rspec_threadded_content test correctly." do
|
105
|
-
|
106
|
-
|
107
|
-
|
108
|
-
raise res.body.to_s
|
146
|
+
$testmodes.each do |tdata|
|
147
|
+
res = tdata[:http].get("#{tdata[:path_pre]}spec_threadded_content.rhtml")
|
148
|
+
raise "Expected body to be '12345678910' for mode '#{tdata[:name]}' but it wasnt: '#{res.body.to_s}'." if res.body.to_s != "12345678910"
|
109
149
|
end
|
110
150
|
end
|
111
151
|
|
@@ -145,15 +185,20 @@ describe "Hayabusa" do
|
|
145
185
|
end
|
146
186
|
|
147
187
|
it "should be able to join threads tarted from _hb.thread." do
|
148
|
-
|
149
|
-
|
188
|
+
$testmodes.each do |tdata|
|
189
|
+
res = tdata[:http].get("#{tdata[:path_pre]}spec_thread_joins.rhtml")
|
190
|
+
raise res.body if res.body.to_s != "12345"
|
191
|
+
end
|
150
192
|
end
|
151
193
|
|
152
194
|
it "should be able to properly parse special characters in post-requests." do
|
153
|
-
|
154
|
-
"
|
155
|
-
|
156
|
-
|
195
|
+
$testmodes.each do |tdata|
|
196
|
+
res = tdata[:http].post(:url => "#{tdata[:path_pre]}spec_vars_post.rhtml", :post => {
|
197
|
+
"test" => "123+456%789%20"
|
198
|
+
})
|
199
|
+
data = JSON.parse(res.body)
|
200
|
+
raise res.body if data["test"] != "123+456%789%20"
|
201
|
+
end
|
157
202
|
end
|
158
203
|
|
159
204
|
it "should be able to do logging" do
|
@@ -193,6 +238,70 @@ describe "Hayabusa" do
|
|
193
238
|
raise "Expected count to be 0 but got: #{logs.length}" if logs.length != 0
|
194
239
|
end
|
195
240
|
|
241
|
+
it "should handle multi-threadding well" do
|
242
|
+
ts = []
|
243
|
+
es = []
|
244
|
+
|
245
|
+
#Execute multiple threads to test FCGI-proxy and thread-safety.
|
246
|
+
1.upto(5) do
|
247
|
+
$testmodes.each do |tdata|
|
248
|
+
ts << Thread.new do
|
249
|
+
begin
|
250
|
+
res = tdata[:http].post(:url => "#{tdata[:path_pre]}spec_vars_post.rhtml", :post => {
|
251
|
+
"test_special_chars" => "1%23+-456",
|
252
|
+
"var" => {
|
253
|
+
0 => 1,
|
254
|
+
1 => 2,
|
255
|
+
3 => {
|
256
|
+
"kasper" => 5,
|
257
|
+
"arr" => ["a", "b", "c"]
|
258
|
+
}
|
259
|
+
}
|
260
|
+
})
|
261
|
+
|
262
|
+
begin
|
263
|
+
data = JSON.parse(res.body)
|
264
|
+
rescue JSON::GeneratorError
|
265
|
+
raise "Could not parse JSON from result: '#{res.body}'."
|
266
|
+
end
|
267
|
+
|
268
|
+
begin
|
269
|
+
raise "Expected hash to be a certain way: '#{data}'." if data["var"]["0"] != "1" or data["var"]["1"] != "2" or data["var"]["3"]["kasper"] != "5" or data["var"]["3"]["arr"]["0"] != "a" or data["var"]["3"]["arr"]["1"] != "b"
|
270
|
+
rescue => e
|
271
|
+
raise "Error when parsing result: '#{data}'."
|
272
|
+
end
|
273
|
+
|
274
|
+
raise "Expected 'test_special_chars' to be '1%23+-456' but it wasnt: '#{data["test_special_chars"]}'." if data["test_special_chars"] != "1%23+-456"
|
275
|
+
|
276
|
+
res = tdata[:http].get("#{tdata[:path_pre]}spec_threadded_content.rhtml")
|
277
|
+
raise "Expected body to be '12345678910' but it was: '#{res.body}'." if res.body != "12345678910"
|
278
|
+
|
279
|
+
res = tdata[:http].get("#{tdata[:path_pre]}spec_vars_get.rhtml?var[]=1&var[]=2&var[]=3&var[3][kasper]=5")
|
280
|
+
data = JSON.parse(res.body)
|
281
|
+
raise "Expected hash to be a certain way: '#{data}'." if data["var"]["0"] != "1" or data["var"]["1"] != "2" or data["var"]["3"]["kasper"] != "5"
|
282
|
+
|
283
|
+
|
284
|
+
|
285
|
+
res = tdata[:http].get("#{tdata[:path_pre]}spec_vars_header.rhtml")
|
286
|
+
raise "Expected header 'testheader' to be 'TestValue' but it wasnt: '#{res.header("testheader")}'." if res.header("testheader") != "TestValue"
|
287
|
+
rescue => e
|
288
|
+
es << e
|
289
|
+
puts e.inspect
|
290
|
+
puts e.backtrace
|
291
|
+
end
|
292
|
+
end
|
293
|
+
end
|
294
|
+
|
295
|
+
ts.each do |t|
|
296
|
+
t.join
|
297
|
+
end
|
298
|
+
|
299
|
+
es.each do |e|
|
300
|
+
raise e
|
301
|
+
end
|
302
|
+
end
|
303
|
+
end
|
304
|
+
|
196
305
|
it "should be able to stop." do
|
197
306
|
$appserver.stop
|
198
307
|
end
|