hatetepe 0.4.1 → 0.5.0.pre

Sign up to get free protection for your applications and to get access to all the features.
@@ -1,32 +0,0 @@
1
- class Hatetepe::Client
2
- class KeepAlive
3
- attr_reader :app
4
-
5
- def initialize(app)
6
- @app = app
7
- end
8
-
9
- # XXX should we be explicit about Connection: keep-alive?
10
- # i think it doesn't matter if we send it as we don't wait
11
- # for the first response to see if we're talking to an HTTP/1.1
12
- # server. we're sending more requests anyway.
13
- def call(request)
14
- req, conn = request, request.connection
15
-
16
- single = req.headers.delete("X-Hatetepe-Single")
17
- req.headers["Connection"] = "close" if single
18
-
19
- req.headers["Connection"] ||= "keep-alive"
20
- close = req.headers["Connection"] == "close"
21
-
22
- conn.processing_enabled = false if close
23
-
24
- app.call(request).tap do |res|
25
- if !single && (close || res.headers["Connection"] == "close")
26
- conn.processing_enabled = false
27
- conn.stop
28
- end
29
- end
30
- end
31
- end
32
- end
@@ -1,19 +0,0 @@
1
- require "em-synchrony"
2
-
3
- class Hatetepe::Client
4
- class Pipeline
5
- attr_reader :app
6
-
7
- def initialize(app)
8
- @app = app
9
- end
10
-
11
- def call(request)
12
- previous = request.connection.requests[-2]
13
- lock = request.connection.pending_transmission[previous.object_id]
14
- EM::Synchrony.sync lock if previous != request && lock
15
-
16
- app.call request
17
- end
18
- end
19
- end
@@ -1,85 +0,0 @@
1
- require "async-rack"
2
- require "rack"
3
-
4
- Rack::STREAMING = "Rack::STREAMING"
5
-
6
- class Hatetepe::Server
7
- ASYNC_RESPONSE = [-1, {}, []].freeze
8
-
9
- ERROR_RESPONSE = [500, {"Content-Type" => "text/html"},
10
- ["Internal Server Error"]].freeze
11
-
12
- # Interface between Rack-compatible applications and Hatetepe's server.
13
- # Provides support for both synchronous and asynchronous responses.
14
- class App
15
- attr_reader :app
16
-
17
- # Initializes a new App object.
18
- #
19
- # @param [#call] app
20
- # The Rack app.
21
- #
22
- def initialize(app)
23
- @app = app
24
- end
25
-
26
- # Processes the request.
27
- #
28
- # Will call #postprocess with the Rack app's response. Catches :async
29
- # as an additional indicator for an asynchronous response. Uses a standard
30
- # 500 response if the Rack app raises an error.
31
- #
32
- # @param [Hash] env
33
- # The Rack environment.
34
- #
35
- def call(env)
36
- env["async.callback"] = proc do |response|
37
- postprocess env, response
38
- end
39
-
40
- response = ASYNC_RESPONSE
41
- catch :async do
42
- response = begin
43
- app.call env
44
- rescue => ex
45
- raise ex if ENV["RACK_ENV"] == "testing"
46
- ERROR_RESPONSE
47
- end
48
- end
49
-
50
- postprocess env, response
51
- end
52
-
53
- # Sends the response.
54
- #
55
- # Does nothing if response status is indicating an asynchronous response.
56
- # This is the case if the response +Array+'s first element equals -1.
57
- # Otherwise it will start sending the response (status and headers).
58
- #
59
- # If the body indicates streaming it will return after sending the status
60
- # and headers. This happens if the body equals +Rack::STREAMING+ or isn't
61
- # set. Otherwise it sends each body chunk and then closes the response
62
- # stream.
63
- #
64
- # Sending an empty body is as simple as passing an object that responds to
65
- # +each+ but doesn't actually yield anything.
66
- #
67
- # @param [Hash] env
68
- # The Rack environment.
69
- # @param [Array] response
70
- # An array of 1..3 length containing the status, headers, body.
71
- #
72
- def postprocess(env, response)
73
- return if response[0] == ASYNC_RESPONSE[0]
74
-
75
- env["stream.start"].call response[0..1]
76
- return if !response[2] || response[2] == Rack::STREAMING
77
-
78
- begin
79
- response[2].each &env["stream.send"]
80
- ensure
81
- env["stream.close"].call
82
- end
83
- end
84
- end
85
- end
@@ -1,48 +0,0 @@
1
- require "hatetepe/client"
2
- require "hatetepe/request"
3
- require "uri"
4
-
5
- class Hatetepe::Server
6
- class Proxy
7
- attr_reader :app
8
-
9
- def initialize(app)
10
- @app = app
11
- end
12
-
13
- def call(env)
14
- env["proxy.start"] = proc do |target, client = nil|
15
- start env, target, client
16
- end
17
- app.call env
18
- end
19
-
20
- def start(env, target, client)
21
- target = URI.parse(target)
22
- env.delete "proxy.start"
23
-
24
- env["proxy.callback"] ||= env["async.callback"]
25
-
26
- cl = client || Hatetepe::Client.start(:host => target.host,
27
- :port => target.port)
28
- build_request(env, target).tap do |req|
29
- cl << req
30
- EM::Synchrony.sync req
31
- req.response.body.callback { cl.stop } unless client
32
- env["proxy.callback"].call req.response
33
- end
34
- end
35
-
36
- # TODO use only +env+ to build the request
37
- def build_request(env, target)
38
- unless base = env["hatetepe.request"]
39
- raise ArgumentError, "Proxying requires env[hatetepe.request] to be set"
40
- end
41
-
42
- uri = target.path + base.uri
43
- host = "#{target.host}:#{target.port}"
44
- headers = base.headers.merge("Host" => host)
45
- Hatetepe::Request.new base.verb, uri, headers, base.body
46
- end
47
- end
48
- end
@@ -1,125 +0,0 @@
1
- require "spec_helper"
2
- require "hatetepe/server"
3
-
4
- describe Hatetepe::Server::App do
5
- let(:inner_app) { stub "inner app", :call => response }
6
- let(:app) { Hatetepe::Server::App.new inner_app }
7
- let(:env) {
8
- {
9
- "stream.start" => proc {},
10
- "stream.send" => proc {},
11
- "stream.close" => proc {}
12
- }
13
- }
14
-
15
- let(:status) { 123 }
16
- let(:headers) { stub "headers" }
17
- let(:body) { [stub("chunk#1"), stub("chunk#2")] }
18
- let(:response) { [status, headers, body] }
19
-
20
- context "#initialize(inner_app)" do
21
- it "keeps the inner app" do
22
- Hatetepe::Server::App.new(inner_app).app.should equal(inner_app)
23
- end
24
- end
25
-
26
- context "#call(env)" do
27
- it "sets env[async.callback] before #call'ing inner_app" do
28
- app.call env
29
-
30
- app.should_receive(:postprocess) {|e, res|
31
- e.should equal(env)
32
- res.should equal(response)
33
- }
34
- env["async.callback"].call response
35
- end
36
-
37
- it "calls #postprocess with the return of inner_app#call(env)" do
38
- inner_app.stub :call => response
39
- app.should_receive(:postprocess) {|e, res|
40
- e.should equal(env)
41
- res.should equal(response)
42
- }
43
-
44
- app.call env
45
- end
46
-
47
- let(:error_response) {
48
- [500, {"Content-Type" => "text/html"}, ["Internal Server Error"]]
49
- }
50
-
51
- it "responds with 500 when catching an error" do
52
- ENV.delete "RACK_ENV"
53
-
54
- inner_app.stub(:call) { raise }
55
- app.should_receive(:postprocess) {|e, res|
56
- res.should == error_response
57
- }
58
-
59
- app.call env
60
- end
61
-
62
- describe "if server's :env option is testing" do
63
- let(:error) { StandardError.new }
64
-
65
- it "doesn't catch errors" do
66
- inner_app.stub(:call) { raise error }
67
- expect { app.call env }.to raise_error(error)
68
- end
69
- end
70
-
71
- let(:async_response) { [-1, {}, []] }
72
-
73
- it "catches :async for Thin compatibility" do
74
- inner_app.stub(:call) { throw :async }
75
- app.should_receive(:postprocess) {|e, res|
76
- res.should == async_response
77
- }
78
-
79
- app.call env
80
- end
81
- end
82
-
83
- context "#postprocess(env, response)" do
84
- it "does nothing if the response status is lighter than 0" do
85
- env["stream.start"].should_not_receive :call
86
- app.postprocess env, [-1]
87
- end
88
-
89
- it "starts the response stream" do
90
- env["stream.start"].should_receive(:call).with([status, headers])
91
- app.postprocess env, [status, headers, []]
92
- end
93
-
94
- it "streams the body" do
95
- body.should_receive :each do |&blk|
96
- blk.should equal(env["stream.send"])
97
- end
98
- app.postprocess env, [status, headers, body]
99
- end
100
-
101
- it "doesn't stream the body if it equals Rack::STREAMING" do
102
- body.should_not_receive :each
103
- app.postprocess env, [status, headers, Rack::STREAMING]
104
- end
105
-
106
- it "doesn't try to stream a body that isn't set" do
107
- body.should_not_receive :each
108
- app.postprocess env, [status, headers]
109
- end
110
-
111
- it "closes the response stream after streaming the body" do
112
- env["stream.close"].should_receive :call
113
- app.postprocess env, [status, headers, body]
114
- end
115
-
116
- it "closes the response even if streaming the body fails" do
117
- body.should_receive(:each).and_raise
118
- env["stream.close"].should_receive :call
119
-
120
- proc {
121
- app.postprocess env, [status, headers, body]
122
- }.should raise_error
123
- end
124
- end
125
- end
@@ -1,40 +0,0 @@
1
- require "spec_helper"
2
- require "hatetepe/client"
3
-
4
- describe Hatetepe::Client::Pipeline do
5
- let(:app) { stub "app", :call => nil }
6
- let(:pipeline) { Hatetepe::Client::Pipeline.new app }
7
-
8
- describe "#initialize(app)" do
9
- it "sets the app" do
10
- pipeline.app.should equal(app)
11
- end
12
- end
13
-
14
- let(:requests) {
15
- [stub("previous_request"), stub("request")]
16
- }
17
- let(:lock) { stub "lock" }
18
- let(:pending) { {requests.first.object_id => lock} }
19
- let(:client) do
20
- stub "client", :requests => requests, :pending_transmission => pending
21
- end
22
- let(:response) { stub "response" }
23
-
24
- before do
25
- requests.last.stub :connection => client
26
- EM::Synchrony.stub :sync
27
- end
28
-
29
- describe "#call(request)" do
30
- it "waits until the previous request has been transmitted" do
31
- EM::Synchrony.should_receive(:sync).with lock
32
- pipeline.call requests.last
33
- end
34
-
35
- it "calls the app" do
36
- app.should_receive(:call).with(requests.last) { response }
37
- pipeline.call(requests.last).should equal(response)
38
- end
39
- end
40
- end
@@ -1,145 +0,0 @@
1
- require "spec_helper"
2
- require "hatetepe/server"
3
-
4
- describe Hatetepe::Server::Proxy do
5
- let(:app) { stub "app" }
6
-
7
- describe "#initialize(app)" do
8
- it "sets the app" do
9
- Hatetepe::Server::Proxy.new(app).app.should equal(app)
10
- end
11
- end
12
-
13
- let(:proxy) { Hatetepe::Server::Proxy.new app }
14
- let(:target) { stub "target" }
15
- let(:env) { {} }
16
- let(:client) { stub "client", :<< => nil }
17
-
18
- describe "#call(env)" do
19
- it "sets env[proxy.start]" do
20
- app.stub :call do |env|
21
- env["proxy.start"].should respond_to(:call)
22
- end
23
- proxy.call env
24
- end
25
-
26
- let(:response) { stub "response" }
27
-
28
- it "calls the app" do
29
- app.should_receive(:call).with(env) { response }
30
- proxy.call(env).should equal(response)
31
- end
32
-
33
- describe "env[proxy.start]" do
34
- it "forwards to #start" do
35
- proxy.should_receive(:start).with(env, target, client)
36
- app.stub :call do |env|
37
- env["proxy.start"].call target, client
38
- end
39
- proxy.call env
40
- end
41
- end
42
- end
43
-
44
- describe "#start(env, target, client)" do
45
- let(:request) { stub "request" }
46
- let(:response) { stub "response" }
47
- let(:callback) { stub "async.callback", :call => nil }
48
-
49
- let(:host) { stub "host" }
50
- let(:port) { stub "port" }
51
- let(:target) { stub "target", :host => host, :port => port }
52
-
53
- before do
54
- URI.stub :parse => target
55
- proxy.stub :build_request => request
56
-
57
- request.stub :dup => request, :response => response
58
- request.extend EM::Deferrable
59
- env["async.callback"] = callback
60
- end
61
-
62
- it "deletes env[proxy.start] from the env hash" do
63
- env.should_receive(:delete).with "proxy.start"
64
- Fiber.new { proxy.start env, target, client }.resume
65
- end
66
-
67
- it "defaults env[proxy.callback] to env[async.callback]" do
68
- Fiber.new { proxy.start env, target, client }.resume
69
- env["proxy.callback"].should equal(env["async.callback"])
70
- end
71
-
72
- let(:new_client) { stub "new client" }
73
-
74
- it "starts a client if none was passed" do
75
- Hatetepe::Client.stub :start do |config|
76
- config[:host].should equal(host)
77
- config[:port].should equal(port)
78
- new_client
79
- end
80
- new_client.should_receive(:<<).with request
81
- Fiber.new { proxy.start env, target, nil }.resume
82
- end
83
-
84
- it "doesn't stop a client that was passed" do
85
- client.should_not_receive :stop
86
- Fiber.new { proxy.start env, target, client }.resume
87
- request.succeed
88
- end
89
-
90
- it "passes the request to the client" do
91
- proxy.should_receive :build_request do |e, t|
92
- env.should equal(e)
93
- target.should equal(t)
94
- request
95
- end
96
- client.should_receive(:<<).with request
97
- Fiber.new { proxy.start env, target, client }.resume
98
- end
99
-
100
- it "passes the response to env[async.callback]" do
101
- callback.should_receive(:call).with response
102
- Fiber.new { proxy.start env, target, client }.resume
103
- request.succeed
104
- end
105
-
106
- it "waits for the request to succeed" do
107
- succeeded = false
108
- callback.stub(:call) {|response| succeeded = true }
109
-
110
- Fiber.new { proxy.start env, target, client }.resume
111
- succeeded.should be_false
112
-
113
- request.succeed
114
- succeeded.should be_true
115
- end
116
- end
117
-
118
- describe "#build_request(env, target)" do
119
- let(:target) { URI.parse "http://localhost:3000/bar" }
120
- let(:base_request) { Hatetepe::Request.new "GET", "/foo" }
121
-
122
- before do
123
- env["hatetepe.request"] = base_request
124
- env["REMOTE_ADDR"] = "123.234.123.234"
125
- end
126
-
127
- it "fails if env[hatetepe.request] isn't set" do
128
- env.delete "hatetepe.request"
129
- proc { proxy.build_request env, target }.should raise_error(ArgumentError)
130
- end
131
-
132
- it "combines the original URI with the target URI" do
133
- proxy.build_request(env, target).uri.should == "/bar/foo"
134
- end
135
-
136
- it "builds a new request" do
137
- proxy.build_request(env, target).should_not equal(base_request)
138
- end
139
-
140
- it "sets version to HTTP/1.1" do
141
- base_request.http_version = "1.0"
142
- proxy.build_request(env, target).http_version.should == "1.1"
143
- end
144
- end
145
- end