rails-threaded-proxy 0.4.1 → 0.5.0
Sign up to get free protection for your applications and to get access to all the features.
- checksums.yaml +4 -4
- data/VERSION +1 -1
- data/lib/threaded_proxy/client.rb +19 -7
- data/lib/threaded_proxy/controller.rb +39 -17
- data/lib/threaded_proxy/http.rb +20 -2
- data/lib/threaded_proxy/socket_responder.rb +65 -0
- data/rails-threaded-proxy.gemspec +6 -4
- data/spec/threaded_proxy/client_spec.rb +197 -4
- data/spec/threaded_proxy/controller_spec.rb +50 -0
- metadata +4 -2
checksums.yaml
CHANGED
@@ -1,7 +1,7 @@
|
|
1
1
|
---
|
2
2
|
SHA256:
|
3
|
-
metadata.gz:
|
4
|
-
data.tar.gz:
|
3
|
+
metadata.gz: 8d77c14092a49dc2e231f233da21a934600a5159aabbc6bbb40294404c8e49d7
|
4
|
+
data.tar.gz: 8590d4359939c617098a036435c340623de538fcb5a65bbd15fe1b72726be86b
|
5
5
|
SHA512:
|
6
|
-
metadata.gz:
|
7
|
-
data.tar.gz:
|
6
|
+
metadata.gz: 7704567ef587afce79dbf21ce20f67add2e04c87cbc1efb7fbb3491cc7a95e01e05a8b7881917a64073caca7f4769c07814d22038f06fce045aa80b07f00b501
|
7
|
+
data.tar.gz: 025e8d6eda736b0ffa5c8f8e97d47941f64b06081b4a3a2164c6288a337b50d7325d64971c89fab35622f9c383dad0941584f1e1e4bf80d2a9c99bc4be20501b
|
data/VERSION
CHANGED
@@ -1 +1 @@
|
|
1
|
-
0.
|
1
|
+
0.5.0
|
@@ -2,10 +2,15 @@
|
|
2
2
|
|
3
3
|
require 'addressable/uri'
|
4
4
|
require 'active_support/notifications'
|
5
|
+
require 'action_dispatch'
|
5
6
|
require 'net/http'
|
7
|
+
|
6
8
|
require_relative 'http'
|
9
|
+
require_relative 'socket_responder'
|
7
10
|
|
8
11
|
module ThreadedProxy
|
12
|
+
class ResponseBodyAlreadyConsumedError < StandardError; end
|
13
|
+
|
9
14
|
class Client
|
10
15
|
DISALLOWED_RESPONSE_HEADERS = %w[keep-alive].freeze
|
11
16
|
|
@@ -22,7 +27,6 @@ module ThreadedProxy
|
|
22
27
|
CALLBACK_METHODS = %i[
|
23
28
|
on_response
|
24
29
|
on_headers
|
25
|
-
on_body
|
26
30
|
on_complete
|
27
31
|
on_error
|
28
32
|
].freeze
|
@@ -42,11 +46,13 @@ module ThreadedProxy
|
|
42
46
|
def initialize(origin_url, options = {})
|
43
47
|
@origin_url = Addressable::URI.parse(origin_url)
|
44
48
|
@options = DEFAULT_OPTIONS.merge(options)
|
49
|
+
@wrote_headers = false
|
45
50
|
|
46
51
|
@callbacks = {}
|
47
|
-
CALLBACK_METHODS.each do |method_name|
|
52
|
+
(CALLBACK_METHODS - [:on_error]).each do |method_name|
|
48
53
|
@callbacks[method_name] = proc {}
|
49
54
|
end
|
55
|
+
@callbacks[:on_error] = proc { |e| raise e }
|
50
56
|
|
51
57
|
yield(self) if block_given?
|
52
58
|
end
|
@@ -67,6 +73,8 @@ module ThreadedProxy
|
|
67
73
|
http_request.body = @options[:body]
|
68
74
|
end
|
69
75
|
|
76
|
+
socket_responder = SocketResponder.new(socket)
|
77
|
+
|
70
78
|
ActiveSupport::Notifications.instrument('threaded_proxy.fetch', method: request_method, url: @origin_url.to_s,
|
71
79
|
headers: request_headers) do
|
72
80
|
http = HTTP.new(@origin_url.host, @origin_url.port || default_port(@origin_url))
|
@@ -76,15 +84,14 @@ module ThreadedProxy
|
|
76
84
|
|
77
85
|
http.start do
|
78
86
|
http.request(http_request) do |client_response|
|
79
|
-
@callbacks[:on_response].call(client_response,
|
87
|
+
@callbacks[:on_response].call(client_response, socket_responder)
|
80
88
|
break if socket.closed?
|
81
89
|
|
82
90
|
log('Writing response status and headers')
|
83
91
|
write_headers(client_response, socket)
|
84
92
|
break if socket.closed?
|
85
93
|
|
86
|
-
|
87
|
-
break if socket.closed?
|
94
|
+
raise ResponseBodyAlreadyConsumedError if client_response.read?
|
88
95
|
|
89
96
|
# There may have been some existing data in client_response's read buffer, flush it out
|
90
97
|
# before we manually connect the raw sockets
|
@@ -97,9 +104,13 @@ module ThreadedProxy
|
|
97
104
|
|
98
105
|
@callbacks[:on_complete].call(client_response)
|
99
106
|
end
|
100
|
-
rescue StandardError => e
|
101
|
-
@callbacks[:on_error].call(e) or raise
|
102
107
|
end
|
108
|
+
rescue StandardError => e
|
109
|
+
@callbacks[:on_error].call(e, socket_responder)
|
110
|
+
# Default to 500 if the error callback didn't write a response
|
111
|
+
socket_responder.render(status: 500, text: 'Internal Server Error') unless socket.closed? || @wrote_headers
|
112
|
+
|
113
|
+
socket.close unless socket.closed?
|
103
114
|
end
|
104
115
|
end
|
105
116
|
|
@@ -118,6 +129,7 @@ module ThreadedProxy
|
|
118
129
|
|
119
130
|
# Done with headers
|
120
131
|
socket.write "\r\n"
|
132
|
+
@wrote_headers = true
|
121
133
|
end
|
122
134
|
|
123
135
|
def default_port(uri)
|
@@ -4,36 +4,58 @@ require_relative 'client'
|
|
4
4
|
|
5
5
|
module ThreadedProxy
|
6
6
|
module Controller
|
7
|
+
# Proxies a fetch request to the specified origin URL, allowing for hijacking
|
8
|
+
# the controller response outside of the Rack request/response cycle.
|
9
|
+
#
|
10
|
+
# @param origin_url [String] The URL to which the request will be proxied.
|
11
|
+
# @param options [Hash] Optional parameters for the request.
|
12
|
+
# @option options [Symbol] :body The body of the request. If set to :rack, the request body stream will be used.
|
13
|
+
# @option options [Hash] :headers Additional headers to include in the request.
|
14
|
+
# @yield [Client] Optional block to configure the client.
|
15
|
+
#
|
16
|
+
# @raise [RuntimeError] If a non-chunked POST request is made without a content-length header.
|
17
|
+
#
|
18
|
+
# @return [void]
|
19
|
+
#
|
20
|
+
# @example
|
21
|
+
# proxy_fetch('http://example.com', body: :rack, headers: { 'Custom-Header' => 'value' }) do |client|
|
22
|
+
# client.on_headers { |client_response| client_response['x-foo'] = 'bar' }
|
23
|
+
# client.on_error { |e| Rails.logger.error(e) }
|
24
|
+
# end
|
7
25
|
def proxy_fetch(origin_url, options = {}, &block)
|
8
26
|
# hijack the response so we can take it outside of the rack request/response cycle
|
9
27
|
request.env['rack.hijack'].call
|
10
28
|
socket = request.env['rack.hijack_io']
|
11
29
|
|
12
|
-
|
13
|
-
if options[:body] == :rack
|
14
|
-
options[:headers] ||= {}
|
15
|
-
options[:body] = request.body_stream
|
16
|
-
|
17
|
-
if request.env['HTTP_TRANSFER_ENCODING'] == 'chunked'
|
18
|
-
options[:headers]['Transfer-Encoding'] = 'chunked'
|
19
|
-
elsif request.env['CONTENT_LENGTH']
|
20
|
-
options[:headers]['content-length'] = request.env['CONTENT_LENGTH'].to_s
|
21
|
-
else
|
22
|
-
raise 'Cannot proxy a non-chunked POST request without content-length'
|
23
|
-
end
|
24
|
-
|
25
|
-
options[:headers]['Content-Type'] = request.env['CONTENT_TYPE'] if request.env['CONTENT_TYPE']
|
26
|
-
end
|
30
|
+
options.deep_merge!(proxy_options_from_request) if options[:body] == :rack
|
27
31
|
|
32
|
+
Thread.new do
|
28
33
|
client = Client.new(origin_url, options, &block)
|
29
34
|
client.start(socket)
|
30
|
-
rescue Errno::EPIPE
|
31
|
-
# client disconnected before request finished; not an error
|
32
35
|
ensure
|
33
36
|
socket.close unless socket.closed?
|
34
37
|
end
|
35
38
|
|
36
39
|
head :ok
|
37
40
|
end
|
41
|
+
|
42
|
+
protected
|
43
|
+
|
44
|
+
def proxy_options_from_request
|
45
|
+
options = {}
|
46
|
+
options[:headers] ||= {}
|
47
|
+
options[:body] = request.body_stream
|
48
|
+
|
49
|
+
if request.env['HTTP_TRANSFER_ENCODING'] == 'chunked'
|
50
|
+
options[:headers]['Transfer-Encoding'] = 'chunked'
|
51
|
+
elsif request.env['CONTENT_LENGTH']
|
52
|
+
options[:headers]['content-length'] = request.env['CONTENT_LENGTH'].to_s
|
53
|
+
else
|
54
|
+
raise 'Cannot proxy a non-chunked POST request without content-length'
|
55
|
+
end
|
56
|
+
|
57
|
+
options[:headers]['Content-Type'] = request.env['CONTENT_TYPE'] if request.env['CONTENT_TYPE']
|
58
|
+
options
|
59
|
+
end
|
38
60
|
end
|
39
61
|
end
|
data/lib/threaded_proxy/http.rb
CHANGED
@@ -20,7 +20,14 @@ module ThreadedProxy
|
|
20
20
|
|
21
21
|
def request(*args)
|
22
22
|
if block_given?
|
23
|
-
super
|
23
|
+
super do |res|
|
24
|
+
access_read(res)
|
25
|
+
yield(res).tap do
|
26
|
+
# In the block case, the response is hijacked _after_ the block is called
|
27
|
+
# to allow the block to read the response body if it wants
|
28
|
+
hijack_response(res)
|
29
|
+
end
|
30
|
+
end
|
24
31
|
else
|
25
32
|
hijack_response(super)
|
26
33
|
end
|
@@ -30,8 +37,19 @@ module ThreadedProxy
|
|
30
37
|
|
31
38
|
# We read the response ourselves; don't need net/http to try to read it again
|
32
39
|
def hijack_response(res)
|
33
|
-
res.
|
40
|
+
access_read(res) unless res.respond_to?(:read?)
|
41
|
+
res.read = true
|
34
42
|
res
|
35
43
|
end
|
44
|
+
|
45
|
+
def access_read(res)
|
46
|
+
res.singleton_class.class_eval do
|
47
|
+
attr_writer :read
|
48
|
+
|
49
|
+
def read?
|
50
|
+
@read
|
51
|
+
end
|
52
|
+
end
|
53
|
+
end
|
36
54
|
end
|
37
55
|
end
|
@@ -0,0 +1,65 @@
|
|
1
|
+
module ThreadedProxy
|
2
|
+
class SocketResponder
|
3
|
+
def initialize(socket)
|
4
|
+
@socket = socket
|
5
|
+
end
|
6
|
+
|
7
|
+
def render(options = {})
|
8
|
+
return false if @socket.closed?
|
9
|
+
|
10
|
+
status = options[:status] || 200
|
11
|
+
headers = options[:headers] || {}
|
12
|
+
body = options[:body]
|
13
|
+
json = options[:json]
|
14
|
+
text = options[:text]
|
15
|
+
|
16
|
+
if json
|
17
|
+
body = json.to_json
|
18
|
+
headers['Content-Type'] ||= 'application/json; charset=utf-8'
|
19
|
+
elsif text
|
20
|
+
body = text
|
21
|
+
headers['Content-Type'] ||= 'text/plain; charset=utf-8'
|
22
|
+
else
|
23
|
+
body ||= ''
|
24
|
+
end
|
25
|
+
|
26
|
+
response = ActionDispatch::Response.new(status, headers, [])
|
27
|
+
response.prepare!
|
28
|
+
|
29
|
+
# Build the HTTP response
|
30
|
+
response_str = "HTTP/1.1 #{response.status} #{response.message}\r\n"
|
31
|
+
response.headers.each do |key, value|
|
32
|
+
Array(value).each do |v|
|
33
|
+
response_str += "#{key}: #{v}\r\n"
|
34
|
+
end
|
35
|
+
end
|
36
|
+
response_str += "\r\n"
|
37
|
+
|
38
|
+
write(response_str)
|
39
|
+
|
40
|
+
if body.respond_to?(:read)
|
41
|
+
IO.copy_stream(body, @socket)
|
42
|
+
else
|
43
|
+
write(body)
|
44
|
+
end
|
45
|
+
|
46
|
+
close
|
47
|
+
end
|
48
|
+
|
49
|
+
def redirect_to(url)
|
50
|
+
render(status: 302, headers: { 'Location' => url })
|
51
|
+
end
|
52
|
+
|
53
|
+
def write(data)
|
54
|
+
@socket.write(data) unless @socket.closed?
|
55
|
+
end
|
56
|
+
|
57
|
+
def close
|
58
|
+
@socket.close unless @socket.closed?
|
59
|
+
end
|
60
|
+
|
61
|
+
def closed?
|
62
|
+
@socket.closed?
|
63
|
+
end
|
64
|
+
end
|
65
|
+
end
|
@@ -2,16 +2,16 @@
|
|
2
2
|
# DO NOT EDIT THIS FILE DIRECTLY
|
3
3
|
# Instead, edit Jeweler::Tasks in Rakefile, and run 'rake gemspec'
|
4
4
|
# -*- encoding: utf-8 -*-
|
5
|
-
# stub: rails-threaded-proxy 0.
|
5
|
+
# stub: rails-threaded-proxy 0.5.0 ruby lib
|
6
6
|
|
7
7
|
Gem::Specification.new do |s|
|
8
8
|
s.name = "rails-threaded-proxy".freeze
|
9
|
-
s.version = "0.
|
9
|
+
s.version = "0.5.0".freeze
|
10
10
|
|
11
11
|
s.required_rubygems_version = Gem::Requirement.new(">= 0".freeze) if s.respond_to? :required_rubygems_version=
|
12
12
|
s.require_paths = ["lib".freeze]
|
13
13
|
s.authors = ["Michael Nutt".freeze]
|
14
|
-
s.date = "2024-10-
|
14
|
+
s.date = "2024-10-18"
|
15
15
|
s.description = "Threaded reverse proxy for Ruby on Rails".freeze
|
16
16
|
s.email = "michael@nuttnet.net".freeze
|
17
17
|
s.extra_rdoc_files = [
|
@@ -35,9 +35,11 @@ Gem::Specification.new do |s|
|
|
35
35
|
"lib/threaded_proxy/client.rb",
|
36
36
|
"lib/threaded_proxy/controller.rb",
|
37
37
|
"lib/threaded_proxy/http.rb",
|
38
|
+
"lib/threaded_proxy/socket_responder.rb",
|
38
39
|
"rails-threaded-proxy.gemspec",
|
39
40
|
"spec/spec_helper.rb",
|
40
|
-
"spec/threaded_proxy/client_spec.rb"
|
41
|
+
"spec/threaded_proxy/client_spec.rb",
|
42
|
+
"spec/threaded_proxy/controller_spec.rb"
|
41
43
|
]
|
42
44
|
s.homepage = "http://github.com/mnutt/rails-threaded-proxy".freeze
|
43
45
|
s.licenses = ["MIT".freeze]
|
@@ -5,6 +5,15 @@ require 'json'
|
|
5
5
|
|
6
6
|
BACKEND_STUB_PORT = 38_293
|
7
7
|
|
8
|
+
def parse_raw_response(raw_response)
|
9
|
+
status, rest = raw_response.split("\r\n", 2)
|
10
|
+
headers, body = rest.split("\r\n\r\n", 2)
|
11
|
+
|
12
|
+
parsed_headers = headers.split("\r\n").map { |h| h.split(': ', 2) }.to_h
|
13
|
+
|
14
|
+
[status, parsed_headers, body]
|
15
|
+
end
|
16
|
+
|
8
17
|
RSpec.describe ThreadedProxy::Client do
|
9
18
|
before(:all) do
|
10
19
|
@backend_server = WEBrick::HTTPServer.new(Port: BACKEND_STUB_PORT,
|
@@ -50,15 +59,199 @@ RSpec.describe ThreadedProxy::Client do
|
|
50
59
|
body: 'hello world')
|
51
60
|
client.start(socket)
|
52
61
|
|
53
|
-
status,
|
54
|
-
headers, body = rest.split("\r\n\r\n", 2)
|
62
|
+
status, headers, body = parse_raw_response(socket.string)
|
55
63
|
|
56
64
|
parsed_body = JSON.parse(body)
|
57
|
-
parsed_headers = headers.split("\r\n").map { |h| h.split(': ', 2) }.to_h
|
58
65
|
|
59
66
|
expect(status).to eq('HTTP/1.1 200 OK')
|
60
|
-
expect(
|
67
|
+
expect(headers['content-type']).to eq('application/json')
|
61
68
|
expect(parsed_body['path']).to eq('/post')
|
62
69
|
expect(parsed_body['headers']['content-length']).to eq(['11'])
|
63
70
|
end
|
71
|
+
|
72
|
+
describe 'callbacks' do
|
73
|
+
describe 'on_headers' do
|
74
|
+
it 'proxies a request and modifies the response headers' do
|
75
|
+
socket = StringIO.new
|
76
|
+
|
77
|
+
client = ThreadedProxy::Client.new("http://localhost:#{BACKEND_STUB_PORT}/get") do |config|
|
78
|
+
config.on_headers do |response|
|
79
|
+
response['X-Test'] = 'test'
|
80
|
+
end
|
81
|
+
end
|
82
|
+
client.start(socket)
|
83
|
+
|
84
|
+
status, headers, body = parse_raw_response(socket.string)
|
85
|
+
|
86
|
+
expect(status).to eq('HTTP/1.1 200 OK')
|
87
|
+
expect(headers['x-test']).to eq('test')
|
88
|
+
expect(headers['connection']).to eq('close')
|
89
|
+
expect(body).to eq('Received request: /get')
|
90
|
+
end
|
91
|
+
end
|
92
|
+
|
93
|
+
describe 'on_complete' do
|
94
|
+
it 'fires when the request is successful' do
|
95
|
+
socket = StringIO.new
|
96
|
+
received_client_response = nil
|
97
|
+
|
98
|
+
client = ThreadedProxy::Client.new("http://localhost:#{BACKEND_STUB_PORT}/get") do |config|
|
99
|
+
config.on_complete do |client_response|
|
100
|
+
received_client_response = client_response
|
101
|
+
end
|
102
|
+
end
|
103
|
+
client.start(socket)
|
104
|
+
|
105
|
+
expect(received_client_response.code).to eq('200')
|
106
|
+
end
|
107
|
+
end
|
108
|
+
|
109
|
+
describe 'on_error' do
|
110
|
+
it 'fires when the request is unsuccessful' do
|
111
|
+
socket = StringIO.new
|
112
|
+
received_error = nil
|
113
|
+
|
114
|
+
client = ThreadedProxy::Client.new('http://localhost:9999') do |config|
|
115
|
+
config.on_error do |e|
|
116
|
+
received_error = e
|
117
|
+
end
|
118
|
+
end
|
119
|
+
client.start(socket)
|
120
|
+
|
121
|
+
expect(received_error).to be_a_kind_of(Errno::ECONNREFUSED)
|
122
|
+
|
123
|
+
status, headers, body = parse_raw_response(socket.string)
|
124
|
+
expect(status).to eq('HTTP/1.1 500 Internal Server Error')
|
125
|
+
expect(headers['Content-Type']).to eq('text/plain; charset=utf-8')
|
126
|
+
expect(body).to eq('Internal Server Error')
|
127
|
+
end
|
128
|
+
|
129
|
+
it 'returns custom response on error' do
|
130
|
+
socket = StringIO.new
|
131
|
+
received_error = nil
|
132
|
+
|
133
|
+
client = ThreadedProxy::Client.new('http://localhost:9999') do |config|
|
134
|
+
config.on_error do |e, response|
|
135
|
+
response.render status: 404, text: 'Custom error'
|
136
|
+
received_error = e
|
137
|
+
end
|
138
|
+
end
|
139
|
+
client.start(socket)
|
140
|
+
|
141
|
+
status, headers, body = parse_raw_response(socket.string)
|
142
|
+
expect(status).to eq('HTTP/1.1 404 Not Found')
|
143
|
+
expect(headers['Content-Type']).to eq('text/plain; charset=utf-8')
|
144
|
+
expect(body).to eq('Custom error')
|
145
|
+
expect(received_error).to be_a_kind_of(Errno::ECONNREFUSED)
|
146
|
+
end
|
147
|
+
end
|
148
|
+
|
149
|
+
describe 'on_response' do
|
150
|
+
it 'proxies a request and lets caller send response' do
|
151
|
+
socket = StringIO.new
|
152
|
+
|
153
|
+
client = ThreadedProxy::Client.new("http://localhost:#{BACKEND_STUB_PORT}/get") do |config|
|
154
|
+
config.on_response do |client_response, response|
|
155
|
+
response.render status: 200, json: { body: client_response.body }, headers: { 'x-passed': 'yes' }
|
156
|
+
end
|
157
|
+
end
|
158
|
+
client.start(socket)
|
159
|
+
|
160
|
+
status, headers, body = parse_raw_response(socket.string)
|
161
|
+
|
162
|
+
parsed_body = JSON.parse(body)
|
163
|
+
|
164
|
+
expect(status).to eq('HTTP/1.1 200 OK')
|
165
|
+
expect(headers['Content-Type']).to eq('application/json; charset=utf-8')
|
166
|
+
expect(headers['x-passed']).to eq('yes')
|
167
|
+
expect(parsed_body['body']).to eq('Received request: /get')
|
168
|
+
end
|
169
|
+
|
170
|
+
it 'accepts IO objects as the body' do
|
171
|
+
socket = StringIO.new
|
172
|
+
|
173
|
+
client = ThreadedProxy::Client.new("http://localhost:#{BACKEND_STUB_PORT}/get") do |config|
|
174
|
+
config.on_response do |_client_response, response|
|
175
|
+
response.render status: 200, body: StringIO.new('this is IO')
|
176
|
+
end
|
177
|
+
end
|
178
|
+
client.start(socket)
|
179
|
+
|
180
|
+
status, _headers, body = parse_raw_response(socket.string)
|
181
|
+
expect(status).to eq('HTTP/1.1 200 OK')
|
182
|
+
expect(body).to eq('this is IO')
|
183
|
+
end
|
184
|
+
|
185
|
+
it 'accepts json body' do
|
186
|
+
socket = StringIO.new
|
187
|
+
|
188
|
+
client = ThreadedProxy::Client.new("http://localhost:#{BACKEND_STUB_PORT}/get") do |config|
|
189
|
+
config.on_response do |_client_response, response|
|
190
|
+
response.render status: 200, json: { key: 'value' }
|
191
|
+
end
|
192
|
+
end
|
193
|
+
client.start(socket)
|
194
|
+
|
195
|
+
status, headers, body = parse_raw_response(socket.string)
|
196
|
+
|
197
|
+
parsed_body = JSON.parse(body)
|
198
|
+
|
199
|
+
expect(status).to eq('HTTP/1.1 200 OK')
|
200
|
+
expect(headers['Content-Type']).to eq('application/json; charset=utf-8')
|
201
|
+
expect(parsed_body['key']).to eq('value')
|
202
|
+
end
|
203
|
+
|
204
|
+
it 'redirects to a URL' do
|
205
|
+
socket = StringIO.new
|
206
|
+
|
207
|
+
client = ThreadedProxy::Client.new("http://localhost:#{BACKEND_STUB_PORT}/get") do |config|
|
208
|
+
config.on_response do |_client_response, response|
|
209
|
+
response.redirect_to('http://example.com')
|
210
|
+
end
|
211
|
+
end
|
212
|
+
client.start(socket)
|
213
|
+
|
214
|
+
status, headers, _body = parse_raw_response(socket.string)
|
215
|
+
|
216
|
+
expect(status).to eq('HTTP/1.1 302 Found')
|
217
|
+
expect(headers['Location']).to eq('http://example.com')
|
218
|
+
end
|
219
|
+
|
220
|
+
it 'handles errors in on_response' do
|
221
|
+
socket = StringIO.new
|
222
|
+
received_error = nil
|
223
|
+
|
224
|
+
client = ThreadedProxy::Client.new("http://localhost:#{BACKEND_STUB_PORT}/get") do |config|
|
225
|
+
config.on_response do |_client_response, _response|
|
226
|
+
raise 'error in on_response'
|
227
|
+
end
|
228
|
+
|
229
|
+
config.on_error do |e|
|
230
|
+
received_error = e
|
231
|
+
end
|
232
|
+
end
|
233
|
+
|
234
|
+
client.start(socket)
|
235
|
+
|
236
|
+
status, headers, body = parse_raw_response(socket.string)
|
237
|
+
|
238
|
+
expect(status).to eq('HTTP/1.1 500 Internal Server Error')
|
239
|
+
expect(headers['Content-Type']).to eq('text/plain; charset=utf-8')
|
240
|
+
expect(body).to eq('Internal Server Error')
|
241
|
+
expect(received_error.message).to eq('error in on_response')
|
242
|
+
end
|
243
|
+
|
244
|
+
it 'errors if on_response reads the body but does not render a response' do
|
245
|
+
socket = StringIO.new
|
246
|
+
|
247
|
+
client = ThreadedProxy::Client.new("http://localhost:#{BACKEND_STUB_PORT}/get") do |config|
|
248
|
+
config.on_response do |client_response, _response|
|
249
|
+
client_response.body
|
250
|
+
end
|
251
|
+
end
|
252
|
+
|
253
|
+
expect { client.start(socket) }.to raise_error(ThreadedProxy::ResponseBodyAlreadyConsumedError)
|
254
|
+
end
|
255
|
+
end
|
256
|
+
end
|
64
257
|
end
|
@@ -0,0 +1,50 @@
|
|
1
|
+
# frozen_string_literal: true
|
2
|
+
|
3
|
+
require 'rails-threaded-proxy'
|
4
|
+
|
5
|
+
class TestController
|
6
|
+
include ThreadedProxy::Controller
|
7
|
+
|
8
|
+
attr_accessor :request
|
9
|
+
end
|
10
|
+
|
11
|
+
RSpec.describe ThreadedProxy::Controller do
|
12
|
+
let(:request) { double(env: {}) }
|
13
|
+
let(:controller) do
|
14
|
+
TestController.new.tap do |controller|
|
15
|
+
controller.request = request
|
16
|
+
end
|
17
|
+
end
|
18
|
+
|
19
|
+
describe '#proxy_options_from_request' do
|
20
|
+
subject { controller.send(:proxy_options_from_request) }
|
21
|
+
let(:body_stream) { StringIO.new('HELLO') }
|
22
|
+
|
23
|
+
describe 'when the request is chunked' do
|
24
|
+
let(:request) { double(body_stream:, env: { 'HTTP_TRANSFER_ENCODING' => 'chunked' }) }
|
25
|
+
|
26
|
+
it 'sets the Transfer-Encoding header' do
|
27
|
+
expect(subject).to include(headers: { 'Transfer-Encoding' => 'chunked' },
|
28
|
+
body: body_stream)
|
29
|
+
end
|
30
|
+
end
|
31
|
+
|
32
|
+
describe 'when the request is not chunked' do
|
33
|
+
let(:request) { double(body_stream:, env: { 'CONTENT_LENGTH' => '5', 'CONTENT_TYPE' => 'application/json' }) }
|
34
|
+
|
35
|
+
it 'sets the Content-Length header' do
|
36
|
+
expect(subject).to include(headers: { 'content-length' => '5',
|
37
|
+
'Content-Type' => 'application/json' },
|
38
|
+
body: body_stream)
|
39
|
+
end
|
40
|
+
end
|
41
|
+
|
42
|
+
describe 'when the request is not chunked and has no content-length' do
|
43
|
+
let(:request) { double(body_stream:, env: {}) }
|
44
|
+
|
45
|
+
it 'raises an error' do
|
46
|
+
expect { subject }.to raise_error('Cannot proxy a non-chunked POST request without content-length')
|
47
|
+
end
|
48
|
+
end
|
49
|
+
end
|
50
|
+
end
|
metadata
CHANGED
@@ -1,14 +1,14 @@
|
|
1
1
|
--- !ruby/object:Gem::Specification
|
2
2
|
name: rails-threaded-proxy
|
3
3
|
version: !ruby/object:Gem::Version
|
4
|
-
version: 0.
|
4
|
+
version: 0.5.0
|
5
5
|
platform: ruby
|
6
6
|
authors:
|
7
7
|
- Michael Nutt
|
8
8
|
autorequire:
|
9
9
|
bindir: bin
|
10
10
|
cert_chain: []
|
11
|
-
date: 2024-10-
|
11
|
+
date: 2024-10-18 00:00:00.000000000 Z
|
12
12
|
dependencies:
|
13
13
|
- !ruby/object:Gem::Dependency
|
14
14
|
name: actionpack
|
@@ -160,9 +160,11 @@ files:
|
|
160
160
|
- lib/threaded_proxy/client.rb
|
161
161
|
- lib/threaded_proxy/controller.rb
|
162
162
|
- lib/threaded_proxy/http.rb
|
163
|
+
- lib/threaded_proxy/socket_responder.rb
|
163
164
|
- rails-threaded-proxy.gemspec
|
164
165
|
- spec/spec_helper.rb
|
165
166
|
- spec/threaded_proxy/client_spec.rb
|
167
|
+
- spec/threaded_proxy/controller_spec.rb
|
166
168
|
homepage: http://github.com/mnutt/rails-threaded-proxy
|
167
169
|
licenses:
|
168
170
|
- MIT
|