unicorn-rupcio 6.1.0
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- checksums.yaml +7 -0
- data/.CHANGELOG.old +25 -0
- data/.document +28 -0
- data/.gitattributes +5 -0
- data/.gitignore +25 -0
- data/.mailmap +26 -0
- data/.manifest +144 -0
- data/.olddoc.yml +25 -0
- data/Application_Timeouts +77 -0
- data/CONTRIBUTORS +39 -0
- data/COPYING +674 -0
- data/DESIGN +99 -0
- data/Documentation/.gitignore +3 -0
- data/Documentation/unicorn.1 +222 -0
- data/Documentation/unicorn_rails.1 +207 -0
- data/FAQ +70 -0
- data/GIT-VERSION-FILE +1 -0
- data/GIT-VERSION-GEN +39 -0
- data/GNUmakefile +318 -0
- data/HACKING +117 -0
- data/ISSUES +102 -0
- data/KNOWN_ISSUES +79 -0
- data/LICENSE +67 -0
- data/Links +58 -0
- data/PHILOSOPHY +139 -0
- data/README +165 -0
- data/Rakefile +17 -0
- data/SIGNALS +123 -0
- data/Sandbox +104 -0
- data/TODO +1 -0
- data/TUNING +119 -0
- data/archive/.gitignore +3 -0
- data/archive/slrnpull.conf +4 -0
- data/bin/unicorn +129 -0
- data/bin/unicorn_rails +210 -0
- data/examples/big_app_gc.rb +3 -0
- data/examples/echo.ru +27 -0
- data/examples/init.sh +102 -0
- data/examples/logger_mp_safe.rb +26 -0
- data/examples/logrotate.conf +44 -0
- data/examples/nginx.conf +156 -0
- data/examples/unicorn.conf.minimal.rb +14 -0
- data/examples/unicorn.conf.rb +111 -0
- data/examples/unicorn.socket +11 -0
- data/examples/unicorn@.service +40 -0
- data/ext/unicorn_http/CFLAGS +13 -0
- data/ext/unicorn_http/c_util.h +115 -0
- data/ext/unicorn_http/common_field_optimization.h +128 -0
- data/ext/unicorn_http/epollexclusive.h +128 -0
- data/ext/unicorn_http/ext_help.h +38 -0
- data/ext/unicorn_http/extconf.rb +40 -0
- data/ext/unicorn_http/global_variables.h +97 -0
- data/ext/unicorn_http/httpdate.c +91 -0
- data/ext/unicorn_http/unicorn_http.c +4348 -0
- data/ext/unicorn_http/unicorn_http.rl +1054 -0
- data/ext/unicorn_http/unicorn_http_common.rl +76 -0
- data/lib/unicorn/app/old_rails/static.rb +60 -0
- data/lib/unicorn/app/old_rails.rb +36 -0
- data/lib/unicorn/cgi_wrapper.rb +148 -0
- data/lib/unicorn/configurator.rb +749 -0
- data/lib/unicorn/const.rb +22 -0
- data/lib/unicorn/http_request.rb +180 -0
- data/lib/unicorn/http_response.rb +95 -0
- data/lib/unicorn/http_server.rb +860 -0
- data/lib/unicorn/launcher.rb +63 -0
- data/lib/unicorn/oob_gc.rb +82 -0
- data/lib/unicorn/preread_input.rb +34 -0
- data/lib/unicorn/select_waiter.rb +7 -0
- data/lib/unicorn/socket_helper.rb +186 -0
- data/lib/unicorn/stream_input.rb +152 -0
- data/lib/unicorn/tee_input.rb +132 -0
- data/lib/unicorn/tmpio.rb +34 -0
- data/lib/unicorn/util.rb +91 -0
- data/lib/unicorn/version.rb +1 -0
- data/lib/unicorn/worker.rb +166 -0
- data/lib/unicorn.rb +137 -0
- data/man/man1/unicorn.1 +222 -0
- data/man/man1/unicorn_rails.1 +207 -0
- data/setup.rb +1587 -0
- data/t/.gitignore +4 -0
- data/t/GNUmakefile +5 -0
- data/t/README +49 -0
- data/t/active-unix-socket.t +110 -0
- data/t/back-out-of-upgrade.t +44 -0
- data/t/bin/unused_listen +40 -0
- data/t/client_body_buffer_size.ru +15 -0
- data/t/client_body_buffer_size.t +79 -0
- data/t/detach.ru +12 -0
- data/t/env.ru +4 -0
- data/t/fails-rack-lint.ru +6 -0
- data/t/heartbeat-timeout.ru +13 -0
- data/t/heartbeat-timeout.t +60 -0
- data/t/integration.ru +129 -0
- data/t/integration.t +509 -0
- data/t/lib.perl +309 -0
- data/t/listener_names.ru +5 -0
- data/t/my-tap-lib.sh +201 -0
- data/t/oob_gc.ru +18 -0
- data/t/oob_gc_path.ru +18 -0
- data/t/pid.ru +4 -0
- data/t/preread_input.ru +23 -0
- data/t/reload-bad-config.t +49 -0
- data/t/reopen-logs.ru +14 -0
- data/t/reopen-logs.t +36 -0
- data/t/t0010-reap-logging.sh +55 -0
- data/t/t0012-reload-empty-config.sh +86 -0
- data/t/t0013-rewindable-input-false.sh +24 -0
- data/t/t0013.ru +13 -0
- data/t/t0014-rewindable-input-true.sh +24 -0
- data/t/t0014.ru +13 -0
- data/t/t0015-configurator-internals.sh +25 -0
- data/t/t0020-at_exit-handler.sh +49 -0
- data/t/t0021-process_detach.sh +29 -0
- data/t/t0022-listener_names-preload_app.sh +32 -0
- data/t/t0300-no-default-middleware.sh +20 -0
- data/t/t0301-no-default-middleware-ignored-in-config.sh +25 -0
- data/t/t0301.ru +14 -0
- data/t/t9001-oob_gc.sh +47 -0
- data/t/t9002-oob_gc-path.sh +75 -0
- data/t/test-lib.sh +125 -0
- data/t/winch_ttin.t +64 -0
- data/t/working_directory.t +86 -0
- data/test/aggregate.rb +16 -0
- data/test/benchmark/README +60 -0
- data/test/benchmark/dd.ru +19 -0
- data/test/benchmark/ddstream.ru +51 -0
- data/test/benchmark/readinput.ru +41 -0
- data/test/benchmark/stack.ru +9 -0
- data/test/benchmark/uconnect.perl +66 -0
- data/test/exec/README +5 -0
- data/test/exec/test_exec.rb +1030 -0
- data/test/test_helper.rb +307 -0
- data/test/unit/test_configurator.rb +176 -0
- data/test/unit/test_droplet.rb +29 -0
- data/test/unit/test_http_parser.rb +885 -0
- data/test/unit/test_http_parser_ng.rb +715 -0
- data/test/unit/test_server.rb +245 -0
- data/test/unit/test_signals.rb +189 -0
- data/test/unit/test_socket_helper.rb +160 -0
- data/test/unit/test_stream_input.rb +211 -0
- data/test/unit/test_tee_input.rb +304 -0
- data/test/unit/test_util.rb +132 -0
- data/test/unit/test_waiter.rb +35 -0
- data/unicorn.gemspec +49 -0
- metadata +266 -0
@@ -0,0 +1,885 @@
|
|
1
|
+
# -*- encoding: binary -*-
|
2
|
+
# frozen_string_literal: false
|
3
|
+
|
4
|
+
# Copyright (c) 2005 Zed A. Shaw
|
5
|
+
# You can redistribute it and/or modify it under the same terms as Ruby 1.8 or
|
6
|
+
# the GPLv2+ (GPLv3+ preferred)
|
7
|
+
#
|
8
|
+
# Additional work donated by contributors. See git history
|
9
|
+
# for more information.
|
10
|
+
|
11
|
+
require './test/test_helper'
|
12
|
+
|
13
|
+
include Unicorn
|
14
|
+
|
15
|
+
class HttpParserTest < Test::Unit::TestCase
|
16
|
+
|
17
|
+
def test_parse_simple
|
18
|
+
parser = HttpParser.new
|
19
|
+
req = parser.env
|
20
|
+
http = parser.buf
|
21
|
+
http << "GET / HTTP/1.1\r\n\r\n"
|
22
|
+
assert_equal req, parser.parse
|
23
|
+
assert_equal '', http
|
24
|
+
|
25
|
+
assert_equal 'HTTP/1.1', req['SERVER_PROTOCOL']
|
26
|
+
assert_equal '/', req['REQUEST_PATH']
|
27
|
+
assert_equal 'HTTP/1.1', req['HTTP_VERSION']
|
28
|
+
assert_equal '/', req['REQUEST_URI']
|
29
|
+
assert_equal 'GET', req['REQUEST_METHOD']
|
30
|
+
assert_nil req['FRAGMENT']
|
31
|
+
assert_equal '', req['QUERY_STRING']
|
32
|
+
|
33
|
+
assert parser.keepalive?
|
34
|
+
parser.clear
|
35
|
+
req.clear
|
36
|
+
|
37
|
+
http << "G"
|
38
|
+
assert_nil parser.parse
|
39
|
+
assert_equal "G", http
|
40
|
+
assert req.empty?
|
41
|
+
|
42
|
+
# try parsing again to ensure we were reset correctly
|
43
|
+
http << "ET /hello-world HTTP/1.1\r\n\r\n"
|
44
|
+
assert parser.parse
|
45
|
+
|
46
|
+
assert_equal 'HTTP/1.1', req['SERVER_PROTOCOL']
|
47
|
+
assert_equal '/hello-world', req['REQUEST_PATH']
|
48
|
+
assert_equal 'HTTP/1.1', req['HTTP_VERSION']
|
49
|
+
assert_equal '/hello-world', req['REQUEST_URI']
|
50
|
+
assert_equal 'GET', req['REQUEST_METHOD']
|
51
|
+
assert_nil req['FRAGMENT']
|
52
|
+
assert_equal '', req['QUERY_STRING']
|
53
|
+
assert_equal '', http
|
54
|
+
assert parser.keepalive?
|
55
|
+
end
|
56
|
+
|
57
|
+
def test_tab_lws
|
58
|
+
parser = HttpParser.new
|
59
|
+
req = parser.env
|
60
|
+
parser.buf << "GET / HTTP/1.1\r\nHost:\tfoo.bar\r\n\r\n"
|
61
|
+
assert_equal req.object_id, parser.parse.object_id
|
62
|
+
assert_equal "foo.bar", req['HTTP_HOST']
|
63
|
+
end
|
64
|
+
|
65
|
+
def test_connection_close_no_ka
|
66
|
+
parser = HttpParser.new
|
67
|
+
req = parser.env
|
68
|
+
parser.buf << "GET / HTTP/1.1\r\nConnection: close\r\n\r\n"
|
69
|
+
assert_equal req.object_id, parser.parse.object_id
|
70
|
+
assert_equal "GET", req['REQUEST_METHOD']
|
71
|
+
assert ! parser.keepalive?
|
72
|
+
end
|
73
|
+
|
74
|
+
def test_connection_keep_alive_ka
|
75
|
+
parser = HttpParser.new
|
76
|
+
req = parser.env
|
77
|
+
parser.buf << "HEAD / HTTP/1.1\r\nConnection: keep-alive\r\n\r\n"
|
78
|
+
assert_equal req.object_id, parser.parse.object_id
|
79
|
+
assert parser.keepalive?
|
80
|
+
end
|
81
|
+
|
82
|
+
def test_connection_keep_alive_no_body
|
83
|
+
parser = HttpParser.new
|
84
|
+
req = parser.env
|
85
|
+
parser.buf << "POST / HTTP/1.1\r\nConnection: keep-alive\r\n\r\n"
|
86
|
+
assert_equal req.object_id, parser.parse.object_id
|
87
|
+
assert parser.keepalive?
|
88
|
+
end
|
89
|
+
|
90
|
+
def test_connection_keep_alive_no_body_empty
|
91
|
+
parser = HttpParser.new
|
92
|
+
req = parser.env
|
93
|
+
parser.buf << "POST / HTTP/1.1\r\n" \
|
94
|
+
"Content-Length: 0\r\n" \
|
95
|
+
"Connection: keep-alive\r\n\r\n"
|
96
|
+
assert_equal req.object_id, parser.parse.object_id
|
97
|
+
assert parser.keepalive?
|
98
|
+
end
|
99
|
+
|
100
|
+
def test_connection_keep_alive_ka_bad_version
|
101
|
+
parser = HttpParser.new
|
102
|
+
req = parser.env
|
103
|
+
parser.buf << "GET / HTTP/1.0\r\nConnection: keep-alive\r\n\r\n"
|
104
|
+
assert_equal req.object_id, parser.parse.object_id
|
105
|
+
assert parser.keepalive?
|
106
|
+
end
|
107
|
+
|
108
|
+
def test_parse_server_host_default_port
|
109
|
+
parser = HttpParser.new
|
110
|
+
req = parser.env
|
111
|
+
parser.buf << "GET / HTTP/1.1\r\nHost: foo\r\n\r\n"
|
112
|
+
assert_equal req, parser.parse
|
113
|
+
assert_equal 'foo', req['SERVER_NAME']
|
114
|
+
assert_equal '80', req['SERVER_PORT']
|
115
|
+
assert_equal '', parser.buf
|
116
|
+
assert parser.keepalive?
|
117
|
+
end
|
118
|
+
|
119
|
+
def test_parse_server_host_alt_port
|
120
|
+
parser = HttpParser.new
|
121
|
+
req = parser.env
|
122
|
+
parser.buf << "GET / HTTP/1.1\r\nHost: foo:999\r\n\r\n"
|
123
|
+
assert_equal req, parser.parse
|
124
|
+
assert_equal 'foo', req['SERVER_NAME']
|
125
|
+
assert_equal '999', req['SERVER_PORT']
|
126
|
+
assert_equal '', parser.buf
|
127
|
+
assert parser.keepalive?
|
128
|
+
end
|
129
|
+
|
130
|
+
def test_parse_server_host_empty_port
|
131
|
+
parser = HttpParser.new
|
132
|
+
req = parser.env
|
133
|
+
parser.buf << "GET / HTTP/1.1\r\nHost: foo:\r\n\r\n"
|
134
|
+
assert_equal req, parser.parse
|
135
|
+
assert_equal 'foo', req['SERVER_NAME']
|
136
|
+
assert_equal '80', req['SERVER_PORT']
|
137
|
+
assert_equal '', parser.buf
|
138
|
+
assert parser.keepalive?
|
139
|
+
end
|
140
|
+
|
141
|
+
def test_parse_server_host_xfp_https
|
142
|
+
parser = HttpParser.new
|
143
|
+
req = parser.env
|
144
|
+
parser.buf << "GET / HTTP/1.1\r\nHost: foo:\r\n" \
|
145
|
+
"X-Forwarded-Proto: https\r\n\r\n"
|
146
|
+
assert_equal req, parser.parse
|
147
|
+
assert_equal 'foo', req['SERVER_NAME']
|
148
|
+
assert_equal '443', req['SERVER_PORT']
|
149
|
+
assert_equal '', parser.buf
|
150
|
+
assert parser.keepalive?
|
151
|
+
end
|
152
|
+
|
153
|
+
def test_parse_xfp_https_chained
|
154
|
+
parser = HttpParser.new
|
155
|
+
req = parser.env
|
156
|
+
parser.buf << "GET / HTTP/1.0\r\n" \
|
157
|
+
"X-Forwarded-Proto: https,http\r\n\r\n"
|
158
|
+
assert_equal req, parser.parse
|
159
|
+
assert_equal '443', req['SERVER_PORT'], req.inspect
|
160
|
+
assert_equal 'https', req['rack.url_scheme'], req.inspect
|
161
|
+
assert_equal '', parser.buf
|
162
|
+
end
|
163
|
+
|
164
|
+
def test_parse_xfp_https_chained_backwards
|
165
|
+
parser = HttpParser.new
|
166
|
+
req = parser.env
|
167
|
+
parser.buf << "GET / HTTP/1.0\r\n" \
|
168
|
+
"X-Forwarded-Proto: http,https\r\n\r\n"
|
169
|
+
assert_equal req, parser.parse
|
170
|
+
assert_equal '80', req['SERVER_PORT'], req.inspect
|
171
|
+
assert_equal 'http', req['rack.url_scheme'], req.inspect
|
172
|
+
assert_equal '', parser.buf
|
173
|
+
end
|
174
|
+
|
175
|
+
def test_parse_xfp_gopher_is_ignored
|
176
|
+
parser = HttpParser.new
|
177
|
+
req = parser.env
|
178
|
+
parser.buf << "GET / HTTP/1.0\r\n" \
|
179
|
+
"X-Forwarded-Proto: gopher\r\n\r\n"
|
180
|
+
assert_equal req, parser.parse
|
181
|
+
assert_equal '80', req['SERVER_PORT'], req.inspect
|
182
|
+
assert_equal 'http', req['rack.url_scheme'], req.inspect
|
183
|
+
assert_equal '', parser.buf
|
184
|
+
end
|
185
|
+
|
186
|
+
def test_parse_x_forwarded_ssl_on
|
187
|
+
parser = HttpParser.new
|
188
|
+
req = parser.env
|
189
|
+
parser.buf << "GET / HTTP/1.0\r\n" \
|
190
|
+
"X-Forwarded-Ssl: on\r\n\r\n"
|
191
|
+
assert_equal req, parser.parse
|
192
|
+
assert_equal '443', req['SERVER_PORT'], req.inspect
|
193
|
+
assert_equal 'https', req['rack.url_scheme'], req.inspect
|
194
|
+
assert_equal '', parser.buf
|
195
|
+
end
|
196
|
+
|
197
|
+
def test_parse_x_forwarded_ssl_off
|
198
|
+
parser = HttpParser.new
|
199
|
+
req = parser.env
|
200
|
+
parser.buf << "GET / HTTP/1.0\r\nX-Forwarded-Ssl: off\r\n\r\n"
|
201
|
+
assert_equal req, parser.parse
|
202
|
+
assert_equal '80', req['SERVER_PORT'], req.inspect
|
203
|
+
assert_equal 'http', req['rack.url_scheme'], req.inspect
|
204
|
+
assert_equal '', parser.buf
|
205
|
+
end
|
206
|
+
|
207
|
+
def test_parse_strange_headers
|
208
|
+
parser = HttpParser.new
|
209
|
+
req = parser.env
|
210
|
+
should_be_good = "GET / HTTP/1.1\r\naaaaaaaaaaaaa:++++++++++\r\n\r\n"
|
211
|
+
parser.buf << should_be_good
|
212
|
+
assert_equal req, parser.parse
|
213
|
+
assert_equal '', parser.buf
|
214
|
+
assert parser.keepalive?
|
215
|
+
end
|
216
|
+
|
217
|
+
# legacy test case from Mongrel that we never supported before...
|
218
|
+
# I still consider Pound irrelevant, unfortunately stupid clients that
|
219
|
+
# send extremely big headers do exist and they've managed to find Unicorn...
|
220
|
+
def test_nasty_pound_header
|
221
|
+
parser = HttpParser.new
|
222
|
+
nasty_pound_header = "GET / HTTP/1.1\r\nX-SSL-Bullshit: -----BEGIN CERTIFICATE-----\r\n\tMIIFbTCCBFWgAwIBAgICH4cwDQYJKoZIhvcNAQEFBQAwcDELMAkGA1UEBhMCVUsx\r\n\tETAPBgNVBAoTCGVTY2llbmNlMRIwEAYDVQQLEwlBdXRob3JpdHkxCzAJBgNVBAMT\r\n\tAkNBMS0wKwYJKoZIhvcNAQkBFh5jYS1vcGVyYXRvckBncmlkLXN1cHBvcnQuYWMu\r\n\tdWswHhcNMDYwNzI3MTQxMzI4WhcNMDcwNzI3MTQxMzI4WjBbMQswCQYDVQQGEwJV\r\n\tSzERMA8GA1UEChMIZVNjaWVuY2UxEzARBgNVBAsTCk1hbmNoZXN0ZXIxCzAJBgNV\r\n\tBAcTmrsogriqMWLAk1DMRcwFQYDVQQDEw5taWNoYWVsIHBhcmQYJKoZIhvcNAQEB\r\n\tBQADggEPADCCAQoCggEBANPEQBgl1IaKdSS1TbhF3hEXSl72G9J+WC/1R64fAcEF\r\n\tW51rEyFYiIeZGx/BVzwXbeBoNUK41OK65sxGuflMo5gLflbwJtHBRIEKAfVVp3YR\r\n\tgW7cMA/s/XKgL1GEC7rQw8lIZT8RApukCGqOVHSi/F1SiFlPDxuDfmdiNzL31+sL\r\n\t0iwHDdNkGjy5pyBSB8Y79dsSJtCW/iaLB0/n8Sj7HgvvZJ7x0fr+RQjYOUUfrePP\r\n\tu2MSpFyf+9BbC/aXgaZuiCvSR+8Snv3xApQY+fULK/xY8h8Ua51iXoQ5jrgu2SqR\r\n\twgA7BUi3G8LFzMBl8FRCDYGUDy7M6QaHXx1ZWIPWNKsCAwEAAaOCAiQwggIgMAwG\r\n\tA1UdEwEB/wQCMAAwEQYJYIZIAYb4QgEBBAQDAgWgMA4GA1UdDwEB/wQEAwID6DAs\r\n\tBglghkgBhvhCAQ0EHxYdVUsgZS1TY2llbmNlIFVzZXIgQ2VydGlmaWNhdGUwHQYD\r\n\tVR0OBBYEFDTt/sf9PeMaZDHkUIldrDYMNTBZMIGaBgNVHSMEgZIwgY+AFAI4qxGj\r\n\tloCLDdMVKwiljjDastqooXSkcjBwMQswCQYDVQQGEwJVSzERMA8GA1UEChMIZVNj\r\n\taWVuY2UxEjAQBgNVBAsTCUF1dGhvcml0eTELMAkGA1UEAxMCQ0ExLTArBgkqhkiG\r\n\t9w0BCQEWHmNhLW9wZXJhdG9yQGdyaWQtc3VwcG9ydC5hYy51a4IBADApBgNVHRIE\r\n\tIjAggR5jYS1vcGVyYXRvckBncmlkLXN1cHBvcnQuYWMudWswGQYDVR0gBBIwEDAO\r\n\tBgwrBgEEAdkvAQEBAQYwPQYJYIZIAYb4QgEEBDAWLmh0dHA6Ly9jYS5ncmlkLXN1\r\n\tcHBvcnQuYWMudmT4sopwqlBWsvcHViL2NybC9jYWNybC5jcmwwPQYJYIZIAYb4QgEDBDAWLmh0\r\n\tdHA6Ly9jYS5ncmlkLXN1cHBvcnQuYWMudWsvcHViL2NybC9jYWNybC5jcmwwPwYD\r\n\tVR0fBDgwNjA0oDKgMIYuaHR0cDovL2NhLmdyaWQt5hYy51ay9wdWIv\r\n\tY3JsL2NhY3JsLmNybDANBgkqhkiG9w0BAQUFAAOCAQEAS/U4iiooBENGW/Hwmmd3\r\n\tXCy6Zrt08YjKCzGNjorT98g8uGsqYjSxv/hmi0qlnlHs+k/3Iobc3LjS5AMYr5L8\r\n\tUO7OSkgFFlLHQyC9JzPfmLCAugvzEbyv4Olnsr8hbxF1MbKZoQxUZtMVu29wjfXk\r\n\thTeApBv7eaKCWpSp7MCbvgzm74izKhu3vlDk9w6qVrxePfGgpKPqfHiOoGhFnbTK\r\n\twTC6o2xq5y0qZ03JonF7OJspEd3I5zKY3E+ov7/ZhW6DqT8UFvsAdjvQbXyhV8Eu\r\n\tYhixw1aKEPzNjNowuIseVogKOLXxWI5vAi5HgXdS0/ES5gDGsABo4fqovUKlgop3\r\n\tRA==\r\n\t-----END CERTIFICATE-----\r\n\r\n"
|
223
|
+
req = parser.env
|
224
|
+
parser.buf << nasty_pound_header.dup
|
225
|
+
|
226
|
+
assert nasty_pound_header =~ /(-----BEGIN .*--END CERTIFICATE-----)/m
|
227
|
+
expect = $1.dup
|
228
|
+
expect.gsub!(/\r\n\t/, ' ')
|
229
|
+
assert_equal req, parser.parse
|
230
|
+
assert_equal '', parser.buf
|
231
|
+
assert_equal expect, req['HTTP_X_SSL_BULLSHIT']
|
232
|
+
end
|
233
|
+
|
234
|
+
def test_multiline_header_0d0a
|
235
|
+
parser = HttpParser.new
|
236
|
+
parser.buf << "GET / HTTP/1.0\r\n" \
|
237
|
+
"X-Multiline-Header: foo bar\r\n\tcha cha\r\n\tzha zha\r\n\r\n"
|
238
|
+
req = parser.env
|
239
|
+
assert_equal req, parser.parse
|
240
|
+
assert_equal 'foo bar cha cha zha zha', req['HTTP_X_MULTILINE_HEADER']
|
241
|
+
end
|
242
|
+
|
243
|
+
def test_multiline_header_0a
|
244
|
+
parser = HttpParser.new
|
245
|
+
parser.buf << "GET / HTTP/1.0\n" \
|
246
|
+
"X-Multiline-Header: foo bar\n\tcha cha\n\tzha zha\n\n"
|
247
|
+
req = parser.env
|
248
|
+
assert_equal req, parser.parse
|
249
|
+
assert_equal 'foo bar cha cha zha zha', req['HTTP_X_MULTILINE_HEADER']
|
250
|
+
end
|
251
|
+
|
252
|
+
def test_continuation_eats_leading_spaces
|
253
|
+
parser = HttpParser.new
|
254
|
+
header = "GET / HTTP/1.1\r\n" \
|
255
|
+
"X-ASDF: \r\n" \
|
256
|
+
"\t\r\n" \
|
257
|
+
" \r\n" \
|
258
|
+
" ASDF\r\n\r\n"
|
259
|
+
parser.buf << header
|
260
|
+
req = parser.env
|
261
|
+
assert_equal req, parser.parse
|
262
|
+
assert_equal '', parser.buf
|
263
|
+
assert_equal 'ASDF', req['HTTP_X_ASDF']
|
264
|
+
end
|
265
|
+
|
266
|
+
def test_continuation_eats_scattered_leading_spaces
|
267
|
+
parser = HttpParser.new
|
268
|
+
header = "GET / HTTP/1.1\r\n" \
|
269
|
+
"X-ASDF: hi\r\n" \
|
270
|
+
" y\r\n" \
|
271
|
+
"\t\r\n" \
|
272
|
+
" x\r\n" \
|
273
|
+
" ASDF\r\n\r\n"
|
274
|
+
req = parser.env
|
275
|
+
parser.buf << header
|
276
|
+
assert_equal req, parser.parse
|
277
|
+
assert_equal '', parser.buf
|
278
|
+
assert_equal 'hi y x ASDF', req['HTTP_X_ASDF']
|
279
|
+
end
|
280
|
+
|
281
|
+
def test_continuation_eats_trailing_spaces
|
282
|
+
parser = HttpParser.new
|
283
|
+
header = "GET / HTTP/1.1\r\n" \
|
284
|
+
"X-ASDF: \r\n" \
|
285
|
+
"\t\r\n" \
|
286
|
+
" b \r\n" \
|
287
|
+
" ASDF\r\n\r\n"
|
288
|
+
parser.buf << header
|
289
|
+
req = parser.env
|
290
|
+
assert_equal req, parser.parse
|
291
|
+
assert_equal '', parser.buf
|
292
|
+
assert_equal 'b ASDF', req['HTTP_X_ASDF']
|
293
|
+
end
|
294
|
+
|
295
|
+
def test_continuation_with_absolute_uri_and_ignored_host_header
|
296
|
+
parser = HttpParser.new
|
297
|
+
header = "GET http://example.com/ HTTP/1.1\r\n" \
|
298
|
+
"Host: \r\n" \
|
299
|
+
" YHBT.net\r\n" \
|
300
|
+
"\r\n"
|
301
|
+
parser.buf << header
|
302
|
+
req = parser.env
|
303
|
+
assert_equal req, parser.parse
|
304
|
+
assert_equal 'example.com', req['HTTP_HOST']
|
305
|
+
end
|
306
|
+
|
307
|
+
# this may seem to be testing more of an implementation detail, but
|
308
|
+
# it also helps ensure we're safe in the presence of multiple parsers
|
309
|
+
# in case we ever go multithreaded/evented...
|
310
|
+
def test_resumable_continuations
|
311
|
+
nr = 1000
|
312
|
+
header = "GET / HTTP/1.1\r\n" \
|
313
|
+
"X-ASDF: \r\n" \
|
314
|
+
" hello\r\n"
|
315
|
+
tmp = []
|
316
|
+
nr.times { |i|
|
317
|
+
parser = HttpParser.new
|
318
|
+
req = parser.env
|
319
|
+
parser.buf << "#{header} #{i}\r\n"
|
320
|
+
assert parser.parse.nil?
|
321
|
+
asdf = req['HTTP_X_ASDF']
|
322
|
+
assert_equal "hello #{i}", asdf
|
323
|
+
tmp << [ parser, asdf ]
|
324
|
+
}
|
325
|
+
tmp.each_with_index { |(parser, asdf), i|
|
326
|
+
parser.buf << " .\r\n\r\n"
|
327
|
+
assert parser.parse
|
328
|
+
assert_equal "hello #{i} .", asdf
|
329
|
+
}
|
330
|
+
end
|
331
|
+
|
332
|
+
def test_invalid_continuation
|
333
|
+
parser = HttpParser.new
|
334
|
+
header = "GET / HTTP/1.1\r\n" \
|
335
|
+
" y\r\n" \
|
336
|
+
"Host: hello\r\n" \
|
337
|
+
"\r\n"
|
338
|
+
parser.buf << header
|
339
|
+
assert_raises(HttpParserError) { parser.parse }
|
340
|
+
end
|
341
|
+
|
342
|
+
def test_parse_ie6_urls
|
343
|
+
%w(/some/random/path"
|
344
|
+
/some/random/path>
|
345
|
+
/some/random/path<
|
346
|
+
/we/love/you/ie6?q=<"">
|
347
|
+
/url?<="&>="
|
348
|
+
/mal"formed"?
|
349
|
+
).each do |path|
|
350
|
+
parser = HttpParser.new
|
351
|
+
req = parser.env
|
352
|
+
sorta_safe = %(GET #{path} HTTP/1.1\r\n\r\n)
|
353
|
+
assert_equal req, parser.headers(req, sorta_safe)
|
354
|
+
assert_equal path, req['REQUEST_URI']
|
355
|
+
assert_equal '', sorta_safe
|
356
|
+
assert parser.keepalive?
|
357
|
+
end
|
358
|
+
end
|
359
|
+
|
360
|
+
def test_parse_error
|
361
|
+
parser = HttpParser.new
|
362
|
+
req = parser.env
|
363
|
+
bad_http = "GET / SsUTF/1.1"
|
364
|
+
|
365
|
+
assert_raises(HttpParserError) { parser.headers(req, bad_http) }
|
366
|
+
|
367
|
+
# make sure we can recover
|
368
|
+
parser.clear
|
369
|
+
req.clear
|
370
|
+
assert_equal req, parser.headers(req, "GET / HTTP/1.0\r\n\r\n")
|
371
|
+
assert ! parser.keepalive?
|
372
|
+
end
|
373
|
+
|
374
|
+
def test_piecemeal
|
375
|
+
parser = HttpParser.new
|
376
|
+
req = parser.env
|
377
|
+
http = "GET"
|
378
|
+
assert_nil parser.headers(req, http)
|
379
|
+
assert_nil parser.headers(req, http)
|
380
|
+
assert_nil parser.headers(req, http << " / HTTP/1.0")
|
381
|
+
assert_equal '/', req['REQUEST_PATH']
|
382
|
+
assert_equal '/', req['REQUEST_URI']
|
383
|
+
assert_equal 'GET', req['REQUEST_METHOD']
|
384
|
+
assert_nil parser.headers(req, http << "\r\n")
|
385
|
+
assert_equal 'HTTP/1.0', req['HTTP_VERSION']
|
386
|
+
assert_nil parser.headers(req, http << "\r")
|
387
|
+
assert_equal req, parser.headers(req, http << "\n")
|
388
|
+
assert_equal 'HTTP/1.0', req['SERVER_PROTOCOL']
|
389
|
+
assert_nil req['FRAGMENT']
|
390
|
+
assert_equal '', req['QUERY_STRING']
|
391
|
+
assert_equal "", http
|
392
|
+
assert ! parser.keepalive?
|
393
|
+
end
|
394
|
+
|
395
|
+
# not common, but underscores do appear in practice
|
396
|
+
def test_absolute_uri_underscores
|
397
|
+
parser = HttpParser.new
|
398
|
+
req = parser.env
|
399
|
+
http = "GET http://under_score.example.com/foo?q=bar HTTP/1.0\r\n\r\n"
|
400
|
+
parser.buf << http
|
401
|
+
assert_equal req, parser.parse
|
402
|
+
assert_equal 'http', req['rack.url_scheme']
|
403
|
+
assert_equal '/foo?q=bar', req['REQUEST_URI']
|
404
|
+
assert_equal '/foo', req['REQUEST_PATH']
|
405
|
+
assert_equal 'q=bar', req['QUERY_STRING']
|
406
|
+
|
407
|
+
assert_equal 'under_score.example.com', req['HTTP_HOST']
|
408
|
+
assert_equal 'under_score.example.com', req['SERVER_NAME']
|
409
|
+
assert_equal '80', req['SERVER_PORT']
|
410
|
+
assert_equal "", parser.buf
|
411
|
+
assert ! parser.keepalive?
|
412
|
+
end
|
413
|
+
|
414
|
+
# some dumb clients add users because they're stupid
|
415
|
+
def test_absolute_uri_w_user
|
416
|
+
parser = HttpParser.new
|
417
|
+
req = parser.env
|
418
|
+
http = "GET http://user%20space@example.com/foo?q=bar HTTP/1.0\r\n\r\n"
|
419
|
+
parser.buf << http
|
420
|
+
assert_equal req, parser.parse
|
421
|
+
assert_equal 'http', req['rack.url_scheme']
|
422
|
+
assert_equal '/foo?q=bar', req['REQUEST_URI']
|
423
|
+
assert_equal '/foo', req['REQUEST_PATH']
|
424
|
+
assert_equal 'q=bar', req['QUERY_STRING']
|
425
|
+
|
426
|
+
assert_equal 'example.com', req['HTTP_HOST']
|
427
|
+
assert_equal 'example.com', req['SERVER_NAME']
|
428
|
+
assert_equal '80', req['SERVER_PORT']
|
429
|
+
assert_equal "", parser.buf
|
430
|
+
assert ! parser.keepalive?
|
431
|
+
end
|
432
|
+
|
433
|
+
# since Mongrel supported anything URI.parse supported, we're stuck
|
434
|
+
# supporting everything URI.parse supports
|
435
|
+
def test_absolute_uri_uri_parse
|
436
|
+
"#{URI::REGEXP::PATTERN::UNRESERVED};:&=+$,".split(//).each do |char|
|
437
|
+
parser = HttpParser.new
|
438
|
+
req = parser.env
|
439
|
+
http = "GET http://#{char}@example.com/ HTTP/1.0\r\n\r\n"
|
440
|
+
assert_equal req, parser.headers(req, http)
|
441
|
+
assert_equal 'http', req['rack.url_scheme']
|
442
|
+
assert_equal '/', req['REQUEST_URI']
|
443
|
+
assert_equal '/', req['REQUEST_PATH']
|
444
|
+
assert_equal '', req['QUERY_STRING']
|
445
|
+
|
446
|
+
assert_equal 'example.com', req['HTTP_HOST']
|
447
|
+
assert_equal 'example.com', req['SERVER_NAME']
|
448
|
+
assert_equal '80', req['SERVER_PORT']
|
449
|
+
assert_equal "", http
|
450
|
+
assert ! parser.keepalive?
|
451
|
+
end
|
452
|
+
end
|
453
|
+
|
454
|
+
def test_absolute_uri
|
455
|
+
parser = HttpParser.new
|
456
|
+
req = parser.env
|
457
|
+
parser.buf << "GET http://example.com/foo?q=bar HTTP/1.0\r\n\r\n"
|
458
|
+
assert_equal req, parser.parse
|
459
|
+
assert_equal 'http', req['rack.url_scheme']
|
460
|
+
assert_equal '/foo?q=bar', req['REQUEST_URI']
|
461
|
+
assert_equal '/foo', req['REQUEST_PATH']
|
462
|
+
assert_equal 'q=bar', req['QUERY_STRING']
|
463
|
+
|
464
|
+
assert_equal 'example.com', req['HTTP_HOST']
|
465
|
+
assert_equal 'example.com', req['SERVER_NAME']
|
466
|
+
assert_equal '80', req['SERVER_PORT']
|
467
|
+
assert_equal "", parser.buf
|
468
|
+
assert ! parser.keepalive?
|
469
|
+
end
|
470
|
+
|
471
|
+
# X-Forwarded-Proto is not in rfc2616, absolute URIs are, however...
|
472
|
+
def test_absolute_uri_https
|
473
|
+
parser = HttpParser.new
|
474
|
+
req = parser.env
|
475
|
+
http = "GET https://example.com/foo?q=bar HTTP/1.1\r\n" \
|
476
|
+
"X-Forwarded-Proto: http\r\n\r\n"
|
477
|
+
parser.buf << http
|
478
|
+
assert_equal req, parser.parse
|
479
|
+
assert_equal 'https', req['rack.url_scheme']
|
480
|
+
assert_equal '/foo?q=bar', req['REQUEST_URI']
|
481
|
+
assert_equal '/foo', req['REQUEST_PATH']
|
482
|
+
assert_equal 'q=bar', req['QUERY_STRING']
|
483
|
+
|
484
|
+
assert_equal 'example.com', req['HTTP_HOST']
|
485
|
+
assert_equal 'example.com', req['SERVER_NAME']
|
486
|
+
assert_equal '443', req['SERVER_PORT']
|
487
|
+
assert_equal "", parser.buf
|
488
|
+
assert parser.keepalive?
|
489
|
+
end
|
490
|
+
|
491
|
+
# Host: header should be ignored for absolute URIs
|
492
|
+
def test_absolute_uri_with_port
|
493
|
+
parser = HttpParser.new
|
494
|
+
req = parser.env
|
495
|
+
parser.buf << "GET http://example.com:8080/foo?q=bar HTTP/1.2\r\n" \
|
496
|
+
"Host: bad.example.com\r\n\r\n"
|
497
|
+
assert_equal req, parser.parse
|
498
|
+
assert_equal 'http', req['rack.url_scheme']
|
499
|
+
assert_equal '/foo?q=bar', req['REQUEST_URI']
|
500
|
+
assert_equal '/foo', req['REQUEST_PATH']
|
501
|
+
assert_equal 'q=bar', req['QUERY_STRING']
|
502
|
+
|
503
|
+
assert_equal 'example.com:8080', req['HTTP_HOST']
|
504
|
+
assert_equal 'example.com', req['SERVER_NAME']
|
505
|
+
assert_equal '8080', req['SERVER_PORT']
|
506
|
+
assert_equal "", parser.buf
|
507
|
+
assert ! parser.keepalive? # TODO: read HTTP/1.2 when it's final
|
508
|
+
end
|
509
|
+
|
510
|
+
def test_absolute_uri_with_empty_port
|
511
|
+
parser = HttpParser.new
|
512
|
+
req = parser.env
|
513
|
+
parser.buf << "GET https://example.com:/foo?q=bar HTTP/1.1\r\n" \
|
514
|
+
"Host: bad.example.com\r\n\r\n"
|
515
|
+
assert_equal req, parser.parse
|
516
|
+
assert_equal 'https', req['rack.url_scheme']
|
517
|
+
assert_equal '/foo?q=bar', req['REQUEST_URI']
|
518
|
+
assert_equal '/foo', req['REQUEST_PATH']
|
519
|
+
assert_equal 'q=bar', req['QUERY_STRING']
|
520
|
+
|
521
|
+
assert_equal 'example.com:', req['HTTP_HOST']
|
522
|
+
assert_equal 'example.com', req['SERVER_NAME']
|
523
|
+
assert_equal '443', req['SERVER_PORT']
|
524
|
+
assert_equal "", parser.buf
|
525
|
+
assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
|
526
|
+
end
|
527
|
+
|
528
|
+
def test_absolute_ipv6_uri
|
529
|
+
parser = HttpParser.new
|
530
|
+
req = parser.env
|
531
|
+
url = "http://[::1]/foo?q=bar"
|
532
|
+
http = "GET #{url} HTTP/1.1\r\n" \
|
533
|
+
"Host: bad.example.com\r\n\r\n"
|
534
|
+
assert_equal req, parser.headers(req, http)
|
535
|
+
assert_equal 'http', req['rack.url_scheme']
|
536
|
+
assert_equal '/foo?q=bar', req['REQUEST_URI']
|
537
|
+
assert_equal '/foo', req['REQUEST_PATH']
|
538
|
+
assert_equal 'q=bar', req['QUERY_STRING']
|
539
|
+
|
540
|
+
uri = URI.parse(url)
|
541
|
+
assert_equal "[::1]", uri.host,
|
542
|
+
"URI.parse changed upstream for #{url}? host=#{uri.host}"
|
543
|
+
assert_equal "[::1]", req['HTTP_HOST']
|
544
|
+
assert_equal "[::1]", req['SERVER_NAME']
|
545
|
+
assert_equal '80', req['SERVER_PORT']
|
546
|
+
assert_equal "", http
|
547
|
+
assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
|
548
|
+
end
|
549
|
+
|
550
|
+
def test_absolute_ipv6_uri_alpha
|
551
|
+
parser = HttpParser.new
|
552
|
+
req = parser.env
|
553
|
+
url = "http://[::a]/"
|
554
|
+
http = "GET #{url} HTTP/1.1\r\n" \
|
555
|
+
"Host: bad.example.com\r\n\r\n"
|
556
|
+
assert_equal req, parser.headers(req, http)
|
557
|
+
assert_equal 'http', req['rack.url_scheme']
|
558
|
+
|
559
|
+
uri = URI.parse(url)
|
560
|
+
assert_equal "[::a]", uri.host,
|
561
|
+
"URI.parse changed upstream for #{url}? host=#{uri.host}"
|
562
|
+
assert_equal "[::a]", req['HTTP_HOST']
|
563
|
+
assert_equal "[::a]", req['SERVER_NAME']
|
564
|
+
assert_equal '80', req['SERVER_PORT']
|
565
|
+
end
|
566
|
+
|
567
|
+
def test_absolute_ipv6_uri_alpha_2
|
568
|
+
parser = HttpParser.new
|
569
|
+
req = parser.env
|
570
|
+
url = "http://[::B]/"
|
571
|
+
http = "GET #{url} HTTP/1.1\r\n" \
|
572
|
+
"Host: bad.example.com\r\n\r\n"
|
573
|
+
assert_equal req, parser.headers(req, http)
|
574
|
+
assert_equal 'http', req['rack.url_scheme']
|
575
|
+
|
576
|
+
uri = URI.parse(url)
|
577
|
+
assert_equal "[::B]", uri.host,
|
578
|
+
"URI.parse changed upstream for #{url}? host=#{uri.host}"
|
579
|
+
assert_equal "[::B]", req['HTTP_HOST']
|
580
|
+
assert_equal "[::B]", req['SERVER_NAME']
|
581
|
+
assert_equal '80', req['SERVER_PORT']
|
582
|
+
end
|
583
|
+
|
584
|
+
def test_absolute_ipv6_uri_with_empty_port
|
585
|
+
parser = HttpParser.new
|
586
|
+
req = parser.env
|
587
|
+
url = "https://[::1]:/foo?q=bar"
|
588
|
+
http = "GET #{url} HTTP/1.1\r\n" \
|
589
|
+
"Host: bad.example.com\r\n\r\n"
|
590
|
+
assert_equal req, parser.headers(req, http)
|
591
|
+
assert_equal 'https', req['rack.url_scheme']
|
592
|
+
assert_equal '/foo?q=bar', req['REQUEST_URI']
|
593
|
+
assert_equal '/foo', req['REQUEST_PATH']
|
594
|
+
assert_equal 'q=bar', req['QUERY_STRING']
|
595
|
+
|
596
|
+
uri = URI.parse(url)
|
597
|
+
assert_equal "[::1]", uri.host,
|
598
|
+
"URI.parse changed upstream for #{url}? host=#{uri.host}"
|
599
|
+
assert_equal "[::1]:", req['HTTP_HOST']
|
600
|
+
assert_equal "[::1]", req['SERVER_NAME']
|
601
|
+
assert_equal '443', req['SERVER_PORT']
|
602
|
+
assert_equal "", http
|
603
|
+
assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
|
604
|
+
end
|
605
|
+
|
606
|
+
def test_absolute_ipv6_uri_with_port
|
607
|
+
parser = HttpParser.new
|
608
|
+
req = parser.env
|
609
|
+
url = "https://[::1]:666/foo?q=bar"
|
610
|
+
http = "GET #{url} HTTP/1.1\r\n" \
|
611
|
+
"Host: bad.example.com\r\n\r\n"
|
612
|
+
assert_equal req, parser.headers(req, http)
|
613
|
+
assert_equal 'https', req['rack.url_scheme']
|
614
|
+
assert_equal '/foo?q=bar', req['REQUEST_URI']
|
615
|
+
assert_equal '/foo', req['REQUEST_PATH']
|
616
|
+
assert_equal 'q=bar', req['QUERY_STRING']
|
617
|
+
|
618
|
+
uri = URI.parse(url)
|
619
|
+
assert_equal "[::1]", uri.host,
|
620
|
+
"URI.parse changed upstream for #{url}? host=#{uri.host}"
|
621
|
+
assert_equal "[::1]:666", req['HTTP_HOST']
|
622
|
+
assert_equal "[::1]", req['SERVER_NAME']
|
623
|
+
assert_equal '666', req['SERVER_PORT']
|
624
|
+
assert_equal "", http
|
625
|
+
assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
|
626
|
+
end
|
627
|
+
|
628
|
+
def test_ipv6_host_header
|
629
|
+
parser = HttpParser.new
|
630
|
+
req = parser.env
|
631
|
+
parser.buf << "GET / HTTP/1.1\r\n" \
|
632
|
+
"Host: [::1]\r\n\r\n"
|
633
|
+
assert_equal req, parser.parse
|
634
|
+
assert_equal "[::1]", req['HTTP_HOST']
|
635
|
+
assert_equal "[::1]", req['SERVER_NAME']
|
636
|
+
assert_equal '80', req['SERVER_PORT']
|
637
|
+
assert_equal "", parser.buf
|
638
|
+
assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
|
639
|
+
end
|
640
|
+
|
641
|
+
def test_ipv6_host_header_with_port
|
642
|
+
parser = HttpParser.new
|
643
|
+
req = parser.env
|
644
|
+
parser.buf << "GET / HTTP/1.1\r\n" \
|
645
|
+
"Host: [::1]:666\r\n\r\n"
|
646
|
+
assert_equal req, parser.parse
|
647
|
+
assert_equal "[::1]", req['SERVER_NAME']
|
648
|
+
assert_equal '666', req['SERVER_PORT']
|
649
|
+
assert_equal "[::1]:666", req['HTTP_HOST']
|
650
|
+
assert_equal "", parser.buf
|
651
|
+
assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
|
652
|
+
end
|
653
|
+
|
654
|
+
def test_ipv6_host_header_with_empty_port
|
655
|
+
parser = HttpParser.new
|
656
|
+
req = parser.env
|
657
|
+
parser.buf << "GET / HTTP/1.1\r\nHost: [::1]:\r\n\r\n"
|
658
|
+
assert_equal req, parser.parse
|
659
|
+
assert_equal "[::1]", req['SERVER_NAME']
|
660
|
+
assert_equal '80', req['SERVER_PORT']
|
661
|
+
assert_equal "[::1]:", req['HTTP_HOST']
|
662
|
+
assert_equal "", parser.buf
|
663
|
+
assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
|
664
|
+
end
|
665
|
+
|
666
|
+
# XXX Highly unlikely..., just make sure we don't segfault or assert on it
|
667
|
+
def test_broken_ipv6_host_header
|
668
|
+
parser = HttpParser.new
|
669
|
+
req = parser.env
|
670
|
+
parser.buf << "GET / HTTP/1.1\r\nHost: [::1:\r\n\r\n"
|
671
|
+
assert_equal req, parser.parse
|
672
|
+
assert_equal "[", req['SERVER_NAME']
|
673
|
+
assert_equal ':1:', req['SERVER_PORT']
|
674
|
+
assert_equal "[::1:", req['HTTP_HOST']
|
675
|
+
assert_equal "", parser.buf
|
676
|
+
end
|
677
|
+
|
678
|
+
def test_put_body_oneshot
|
679
|
+
parser = HttpParser.new
|
680
|
+
req = parser.env
|
681
|
+
parser.buf << "PUT / HTTP/1.0\r\nContent-Length: 5\r\n\r\nabcde"
|
682
|
+
assert_equal req, parser.parse
|
683
|
+
assert_equal '/', req['REQUEST_PATH']
|
684
|
+
assert_equal '/', req['REQUEST_URI']
|
685
|
+
assert_equal 'PUT', req['REQUEST_METHOD']
|
686
|
+
assert_equal 'HTTP/1.0', req['HTTP_VERSION']
|
687
|
+
assert_equal 'HTTP/1.0', req['SERVER_PROTOCOL']
|
688
|
+
assert_equal "abcde", parser.buf
|
689
|
+
assert ! parser.keepalive? # TODO: read HTTP/1.2 when it's final
|
690
|
+
end
|
691
|
+
|
692
|
+
def test_put_body_later
|
693
|
+
parser = HttpParser.new
|
694
|
+
req = parser.env
|
695
|
+
parser.buf << "PUT /l HTTP/1.0\r\nContent-Length: 5\r\n\r\n"
|
696
|
+
assert_equal req, parser.parse
|
697
|
+
assert_equal '/l', req['REQUEST_PATH']
|
698
|
+
assert_equal '/l', req['REQUEST_URI']
|
699
|
+
assert_equal 'PUT', req['REQUEST_METHOD']
|
700
|
+
assert_equal 'HTTP/1.0', req['HTTP_VERSION']
|
701
|
+
assert_equal 'HTTP/1.0', req['SERVER_PROTOCOL']
|
702
|
+
assert_equal "", parser.buf
|
703
|
+
assert ! parser.keepalive? # TODO: read HTTP/1.2 when it's final
|
704
|
+
end
|
705
|
+
|
706
|
+
def test_unknown_methods
|
707
|
+
%w(GETT HEADR XGET XHEAD).each { |m|
|
708
|
+
parser = HttpParser.new
|
709
|
+
req = parser.env
|
710
|
+
s = "#{m} /forums/1/topics/2375?page=1#posts-17408 HTTP/1.1\r\n\r\n"
|
711
|
+
ok = parser.headers(req, s)
|
712
|
+
assert ok
|
713
|
+
assert_equal '/forums/1/topics/2375?page=1', req['REQUEST_URI']
|
714
|
+
assert_equal 'posts-17408', req['FRAGMENT']
|
715
|
+
assert_equal 'page=1', req['QUERY_STRING']
|
716
|
+
assert_equal "", s
|
717
|
+
assert_equal m, req['REQUEST_METHOD']
|
718
|
+
assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
|
719
|
+
}
|
720
|
+
end
|
721
|
+
|
722
|
+
def test_fragment_in_uri
|
723
|
+
parser = HttpParser.new
|
724
|
+
req = parser.env
|
725
|
+
get = "GET /forums/1/topics/2375?page=1#posts-17408 HTTP/1.1\r\n\r\n"
|
726
|
+
parser.buf << get
|
727
|
+
ok = parser.parse
|
728
|
+
assert ok
|
729
|
+
assert_equal '/forums/1/topics/2375?page=1', req['REQUEST_URI']
|
730
|
+
assert_equal 'posts-17408', req['FRAGMENT']
|
731
|
+
assert_equal 'page=1', req['QUERY_STRING']
|
732
|
+
assert_equal '', parser.buf
|
733
|
+
assert parser.keepalive?
|
734
|
+
end
|
735
|
+
|
736
|
+
# lame random garbage maker
|
737
|
+
def rand_data(min, max, readable=true)
|
738
|
+
count = min + ((rand(max)+1) *10).to_i
|
739
|
+
res = count.to_s + "/"
|
740
|
+
|
741
|
+
if readable
|
742
|
+
res << Digest::SHA1.hexdigest(rand(count * 100).to_s) * (count / 40)
|
743
|
+
else
|
744
|
+
res << Digest::SHA1.digest(rand(count * 100).to_s) * (count / 20)
|
745
|
+
end
|
746
|
+
|
747
|
+
return res
|
748
|
+
end
|
749
|
+
|
750
|
+
|
751
|
+
def test_horrible_queries
|
752
|
+
parser = HttpParser.new
|
753
|
+
|
754
|
+
# then that large header names are caught
|
755
|
+
10.times do |c|
|
756
|
+
get = "GET /#{rand_data(10,120)} HTTP/1.1\r\nX-#{rand_data(1024, 1024+(c*1024))}: Test\r\n\r\n"
|
757
|
+
assert_raises(Unicorn::HttpParserError,Unicorn::RequestURITooLongError) do
|
758
|
+
parser.buf << get
|
759
|
+
parser.parse
|
760
|
+
parser.clear
|
761
|
+
end
|
762
|
+
end
|
763
|
+
|
764
|
+
# then that large mangled field values are caught
|
765
|
+
10.times do |c|
|
766
|
+
get = "GET /#{rand_data(10,120)} HTTP/1.1\r\nX-Test: #{rand_data(1024, 1024+(c*1024), false)}\r\n\r\n"
|
767
|
+
assert_raises(Unicorn::HttpParserError,Unicorn::RequestURITooLongError) do
|
768
|
+
parser.buf << get
|
769
|
+
parser.parse
|
770
|
+
parser.clear
|
771
|
+
end
|
772
|
+
end
|
773
|
+
|
774
|
+
# then large headers are rejected too
|
775
|
+
get = "GET /#{rand_data(10,120)} HTTP/1.1\r\n"
|
776
|
+
get << "X-Test: test\r\n" * (80 * 1024)
|
777
|
+
parser.buf << get
|
778
|
+
assert_raises(Unicorn::HttpParserError,Unicorn::RequestURITooLongError) do
|
779
|
+
parser.parse
|
780
|
+
end
|
781
|
+
parser.clear
|
782
|
+
|
783
|
+
# finally just that random garbage gets blocked all the time
|
784
|
+
10.times do |c|
|
785
|
+
get = "GET #{rand_data(1024, 1024+(c*1024), false)} #{rand_data(1024, 1024+(c*1024), false)}\r\n\r\n"
|
786
|
+
assert_raises(Unicorn::HttpParserError,Unicorn::RequestURITooLongError) do
|
787
|
+
parser.buf << get
|
788
|
+
parser.parse
|
789
|
+
parser.clear
|
790
|
+
end
|
791
|
+
end
|
792
|
+
|
793
|
+
end
|
794
|
+
|
795
|
+
def test_leading_tab
|
796
|
+
parser = HttpParser.new
|
797
|
+
get = "GET / HTTP/1.1\r\nHost:\texample.com\r\n\r\n"
|
798
|
+
assert parser.add_parse(get)
|
799
|
+
assert_equal 'example.com', parser.env['HTTP_HOST']
|
800
|
+
end
|
801
|
+
|
802
|
+
def test_trailing_whitespace
|
803
|
+
parser = HttpParser.new
|
804
|
+
get = "GET / HTTP/1.1\r\nHost: example.com \r\n\r\n"
|
805
|
+
assert parser.add_parse(get)
|
806
|
+
assert_equal 'example.com', parser.env['HTTP_HOST']
|
807
|
+
end
|
808
|
+
|
809
|
+
def test_trailing_tab
|
810
|
+
parser = HttpParser.new
|
811
|
+
get = "GET / HTTP/1.1\r\nHost: example.com\t\r\n\r\n"
|
812
|
+
assert parser.add_parse(get)
|
813
|
+
assert_equal 'example.com', parser.env['HTTP_HOST']
|
814
|
+
end
|
815
|
+
|
816
|
+
def test_trailing_multiple_linear_whitespace
|
817
|
+
parser = HttpParser.new
|
818
|
+
get = "GET / HTTP/1.1\r\nHost: example.com\t \t \t\r\n\r\n"
|
819
|
+
assert parser.add_parse(get)
|
820
|
+
assert_equal 'example.com', parser.env['HTTP_HOST']
|
821
|
+
end
|
822
|
+
|
823
|
+
def test_embedded_linear_whitespace_ok
|
824
|
+
parser = HttpParser.new
|
825
|
+
get = "GET / HTTP/1.1\r\nX-Space: hello\t world\t \r\n\r\n"
|
826
|
+
assert parser.add_parse(get)
|
827
|
+
assert_equal "hello\t world", parser.env["HTTP_X_SPACE"]
|
828
|
+
end
|
829
|
+
|
830
|
+
def test_null_byte_header
|
831
|
+
parser = HttpParser.new
|
832
|
+
get = "GET / HTTP/1.1\r\nHost: \0\r\n\r\n"
|
833
|
+
assert_raises(HttpParserError) { parser.add_parse(get) }
|
834
|
+
end
|
835
|
+
|
836
|
+
def test_null_byte_in_middle
|
837
|
+
parser = HttpParser.new
|
838
|
+
get = "GET / HTTP/1.1\r\nHost: hello\0world\r\n\r\n"
|
839
|
+
assert_raises(HttpParserError) { parser.add_parse(get) }
|
840
|
+
end
|
841
|
+
|
842
|
+
def test_null_byte_at_end
|
843
|
+
parser = HttpParser.new
|
844
|
+
get = "GET / HTTP/1.1\r\nHost: hello\0\r\n\r\n"
|
845
|
+
assert_raises(HttpParserError) { parser.add_parse(get) }
|
846
|
+
end
|
847
|
+
|
848
|
+
def test_empty_header
|
849
|
+
parser = HttpParser.new
|
850
|
+
get = "GET / HTTP/1.1\r\nHost: \r\n\r\n"
|
851
|
+
assert parser.add_parse(get)
|
852
|
+
assert_equal '', parser.env['HTTP_HOST']
|
853
|
+
end
|
854
|
+
|
855
|
+
def test_memsize
|
856
|
+
require 'objspace'
|
857
|
+
if ObjectSpace.respond_to?(:memsize_of)
|
858
|
+
n = ObjectSpace.memsize_of(Unicorn::HttpParser.new)
|
859
|
+
assert_kind_of Integer, n
|
860
|
+
# need to update this when 128-bit machines come out
|
861
|
+
# n.b. actual struct size on 64-bit is 56 bytes + 40 bytes for RVALUE
|
862
|
+
# Ruby <= 2.2 objspace did not count the 40-byte RVALUE, 2.3 does.
|
863
|
+
assert_operator n, :<=, 96
|
864
|
+
assert_operator n, :>, 0
|
865
|
+
end
|
866
|
+
rescue LoadError
|
867
|
+
# not all Ruby implementations have objspace
|
868
|
+
end
|
869
|
+
|
870
|
+
def test_dedupe
|
871
|
+
parser = HttpParser.new
|
872
|
+
# n.b. String#freeze optimization doesn't work under modern test-unit
|
873
|
+
exp = -'HTTP_HOST'
|
874
|
+
get = "GET / HTTP/1.1\r\nHost: example.com\r\nHavpbea-fhpxf: true\r\n\r\n"
|
875
|
+
assert parser.add_parse(get)
|
876
|
+
key = parser.env.keys.detect { |k| k == exp }
|
877
|
+
assert_same exp, key
|
878
|
+
|
879
|
+
if RUBY_VERSION.to_r >= 2.6 # 2.6.0-rc1+
|
880
|
+
exp = -'HTTP_HAVPBEA_FHPXF'
|
881
|
+
key = parser.env.keys.detect { |k| k == exp }
|
882
|
+
assert_same exp, key
|
883
|
+
end
|
884
|
+
end if RUBY_VERSION.to_r >= 2.5 && RUBY_ENGINE == 'ruby'
|
885
|
+
end
|