unicorn-heroku-wait 4.8.0.1.g0ed2.dirty

Sign up to get free protection for your applications and to get access to all the features.
Files changed (168) hide show
  1. checksums.yaml +7 -0
  2. data/.CHANGELOG.old +25 -0
  3. data/.document +29 -0
  4. data/.gitignore +25 -0
  5. data/.mailmap +26 -0
  6. data/.manifest +166 -0
  7. data/.wrongdoc.yml +10 -0
  8. data/Application_Timeouts +77 -0
  9. data/CONTRIBUTORS +35 -0
  10. data/COPYING +674 -0
  11. data/ChangeLog +4861 -0
  12. data/DESIGN +97 -0
  13. data/Documentation/.gitignore +5 -0
  14. data/Documentation/GNUmakefile +30 -0
  15. data/Documentation/unicorn.1.txt +178 -0
  16. data/Documentation/unicorn_rails.1.txt +175 -0
  17. data/FAQ +53 -0
  18. data/GIT-VERSION-FILE +1 -0
  19. data/GIT-VERSION-GEN +39 -0
  20. data/GNUmakefile +267 -0
  21. data/HACKING +134 -0
  22. data/ISSUES +36 -0
  23. data/KNOWN_ISSUES +79 -0
  24. data/LATEST +28 -0
  25. data/LICENSE +67 -0
  26. data/Links +56 -0
  27. data/NEWS +2067 -0
  28. data/PHILOSOPHY +145 -0
  29. data/README +150 -0
  30. data/Rakefile +60 -0
  31. data/SIGNALS +123 -0
  32. data/Sandbox +103 -0
  33. data/TODO +5 -0
  34. data/TUNING +98 -0
  35. data/bin/unicorn +126 -0
  36. data/bin/unicorn_rails +209 -0
  37. data/examples/big_app_gc.rb +2 -0
  38. data/examples/echo.ru +27 -0
  39. data/examples/git.ru +13 -0
  40. data/examples/init.sh +74 -0
  41. data/examples/logger_mp_safe.rb +25 -0
  42. data/examples/logrotate.conf +29 -0
  43. data/examples/nginx.conf +156 -0
  44. data/examples/unicorn.conf.minimal.rb +13 -0
  45. data/examples/unicorn.conf.rb +102 -0
  46. data/ext/unicorn_http/CFLAGS +13 -0
  47. data/ext/unicorn_http/c_util.h +124 -0
  48. data/ext/unicorn_http/common_field_optimization.h +111 -0
  49. data/ext/unicorn_http/ext_help.h +82 -0
  50. data/ext/unicorn_http/extconf.rb +10 -0
  51. data/ext/unicorn_http/global_variables.h +97 -0
  52. data/ext/unicorn_http/httpdate.c +78 -0
  53. data/ext/unicorn_http/unicorn_http.c +4031 -0
  54. data/ext/unicorn_http/unicorn_http.rl +1036 -0
  55. data/ext/unicorn_http/unicorn_http_common.rl +76 -0
  56. data/lib/unicorn/app/exec_cgi.rb +154 -0
  57. data/lib/unicorn/app/inetd.rb +109 -0
  58. data/lib/unicorn/app/old_rails/static.rb +59 -0
  59. data/lib/unicorn/app/old_rails.rb +35 -0
  60. data/lib/unicorn/cgi_wrapper.rb +147 -0
  61. data/lib/unicorn/configurator.rb +679 -0
  62. data/lib/unicorn/const.rb +44 -0
  63. data/lib/unicorn/http_request.rb +122 -0
  64. data/lib/unicorn/http_response.rb +75 -0
  65. data/lib/unicorn/http_server.rb +803 -0
  66. data/lib/unicorn/launcher.rb +62 -0
  67. data/lib/unicorn/oob_gc.rb +71 -0
  68. data/lib/unicorn/preread_input.rb +33 -0
  69. data/lib/unicorn/socket_helper.rb +231 -0
  70. data/lib/unicorn/ssl_client.rb +11 -0
  71. data/lib/unicorn/ssl_configurator.rb +104 -0
  72. data/lib/unicorn/ssl_server.rb +42 -0
  73. data/lib/unicorn/stream_input.rb +146 -0
  74. data/lib/unicorn/tee_input.rb +126 -0
  75. data/lib/unicorn/tmpio.rb +29 -0
  76. data/lib/unicorn/util.rb +89 -0
  77. data/lib/unicorn/version.rb +1 -0
  78. data/lib/unicorn/worker.rb +152 -0
  79. data/lib/unicorn.rb +118 -0
  80. data/local.mk.sample +59 -0
  81. data/man/man1/unicorn.1 +211 -0
  82. data/man/man1/unicorn_rails.1 +210 -0
  83. data/script/isolate_for_tests +32 -0
  84. data/setup.rb +1586 -0
  85. data/t/.gitignore +5 -0
  86. data/t/GNUmakefile +82 -0
  87. data/t/README +42 -0
  88. data/t/bin/content-md5-put +36 -0
  89. data/t/bin/sha1sum.rb +17 -0
  90. data/t/bin/unused_listen +40 -0
  91. data/t/broken-app.ru +12 -0
  92. data/t/detach.ru +11 -0
  93. data/t/env.ru +3 -0
  94. data/t/fails-rack-lint.ru +5 -0
  95. data/t/heartbeat-timeout.ru +12 -0
  96. data/t/hijack.ru +42 -0
  97. data/t/listener_names.ru +4 -0
  98. data/t/my-tap-lib.sh +201 -0
  99. data/t/oob_gc.ru +20 -0
  100. data/t/oob_gc_path.ru +20 -0
  101. data/t/pid.ru +3 -0
  102. data/t/preread_input.ru +17 -0
  103. data/t/rack-input-tests.ru +21 -0
  104. data/t/sslgen.sh +71 -0
  105. data/t/t0000-http-basic.sh +50 -0
  106. data/t/t0001-reload-bad-config.sh +53 -0
  107. data/t/t0002-config-conflict.sh +49 -0
  108. data/t/t0002-parser-error.sh +94 -0
  109. data/t/t0003-working_directory.sh +51 -0
  110. data/t/t0004-heartbeat-timeout.sh +69 -0
  111. data/t/t0004-working_directory_broken.sh +24 -0
  112. data/t/t0005-working_directory_app.rb.sh +40 -0
  113. data/t/t0006-reopen-logs.sh +83 -0
  114. data/t/t0006.ru +13 -0
  115. data/t/t0007-working_directory_no_embed_cli.sh +44 -0
  116. data/t/t0008-back_out_of_upgrade.sh +110 -0
  117. data/t/t0009-broken-app.sh +56 -0
  118. data/t/t0009-winch_ttin.sh +59 -0
  119. data/t/t0010-reap-logging.sh +55 -0
  120. data/t/t0011-active-unix-socket.sh +79 -0
  121. data/t/t0012-reload-empty-config.sh +85 -0
  122. data/t/t0013-rewindable-input-false.sh +24 -0
  123. data/t/t0013.ru +12 -0
  124. data/t/t0014-rewindable-input-true.sh +24 -0
  125. data/t/t0014.ru +12 -0
  126. data/t/t0015-configurator-internals.sh +25 -0
  127. data/t/t0016-trust-x-forwarded-false.sh +30 -0
  128. data/t/t0017-trust-x-forwarded-true.sh +30 -0
  129. data/t/t0018-write-on-close.sh +23 -0
  130. data/t/t0019-max_header_len.sh +49 -0
  131. data/t/t0020-at_exit-handler.sh +49 -0
  132. data/t/t0021-process_detach.sh +29 -0
  133. data/t/t0022-listener_names-preload_app.sh +32 -0
  134. data/t/t0100-rack-input-tests.sh +124 -0
  135. data/t/t0116-client_body_buffer_size.sh +80 -0
  136. data/t/t0116.ru +16 -0
  137. data/t/t0200-rack-hijack.sh +27 -0
  138. data/t/t0300-no-default-middleware.sh +20 -0
  139. data/t/t0600-https-server-basic.sh +48 -0
  140. data/t/t9000-preread-input.sh +48 -0
  141. data/t/t9001-oob_gc.sh +47 -0
  142. data/t/t9002-oob_gc-path.sh +75 -0
  143. data/t/test-lib.sh +128 -0
  144. data/t/write-on-close.ru +11 -0
  145. data/test/aggregate.rb +15 -0
  146. data/test/benchmark/README +50 -0
  147. data/test/benchmark/dd.ru +18 -0
  148. data/test/benchmark/stack.ru +8 -0
  149. data/test/exec/README +5 -0
  150. data/test/exec/test_exec.rb +1047 -0
  151. data/test/test_helper.rb +297 -0
  152. data/test/unit/test_configurator.rb +175 -0
  153. data/test/unit/test_droplet.rb +28 -0
  154. data/test/unit/test_http_parser.rb +854 -0
  155. data/test/unit/test_http_parser_ng.rb +731 -0
  156. data/test/unit/test_http_parser_xftrust.rb +38 -0
  157. data/test/unit/test_request.rb +182 -0
  158. data/test/unit/test_response.rb +99 -0
  159. data/test/unit/test_server.rb +268 -0
  160. data/test/unit/test_signals.rb +188 -0
  161. data/test/unit/test_sni_hostnames.rb +47 -0
  162. data/test/unit/test_socket_helper.rb +197 -0
  163. data/test/unit/test_stream_input.rb +203 -0
  164. data/test/unit/test_tee_input.rb +294 -0
  165. data/test/unit/test_upload.rb +306 -0
  166. data/test/unit/test_util.rb +105 -0
  167. data/unicorn.gemspec +44 -0
  168. metadata +328 -0
@@ -0,0 +1,854 @@
1
+ # -*- encoding: binary -*-
2
+
3
+ # Copyright (c) 2005 Zed A. Shaw
4
+ # You can redistribute it and/or modify it under the same terms as Ruby 1.8 or
5
+ # the GPLv2+ (GPLv3+ preferred)
6
+ #
7
+ # Additional work donated by contributors. See http://mongrel.rubyforge.org/attributions.html
8
+ # for more information.
9
+
10
+ require 'test/test_helper'
11
+
12
+ include Unicorn
13
+
14
+ class HttpParserTest < Test::Unit::TestCase
15
+
16
+ def test_parse_simple
17
+ parser = HttpParser.new
18
+ req = parser.env
19
+ http = parser.buf
20
+ http << "GET / HTTP/1.1\r\n\r\n"
21
+ assert_equal req, parser.parse
22
+ assert_equal '', http
23
+
24
+ assert_equal 'HTTP/1.1', req['SERVER_PROTOCOL']
25
+ assert_equal '/', req['REQUEST_PATH']
26
+ assert_equal 'HTTP/1.1', req['HTTP_VERSION']
27
+ assert_equal '/', req['REQUEST_URI']
28
+ assert_equal 'GET', req['REQUEST_METHOD']
29
+ assert_nil req['FRAGMENT']
30
+ assert_equal '', req['QUERY_STRING']
31
+
32
+ assert parser.keepalive?
33
+ parser.clear
34
+ req.clear
35
+
36
+ http << "G"
37
+ assert_nil parser.parse
38
+ assert_equal "G", http
39
+ assert req.empty?
40
+
41
+ # try parsing again to ensure we were reset correctly
42
+ http << "ET /hello-world HTTP/1.1\r\n\r\n"
43
+ assert parser.parse
44
+
45
+ assert_equal 'HTTP/1.1', req['SERVER_PROTOCOL']
46
+ assert_equal '/hello-world', req['REQUEST_PATH']
47
+ assert_equal 'HTTP/1.1', req['HTTP_VERSION']
48
+ assert_equal '/hello-world', req['REQUEST_URI']
49
+ assert_equal 'GET', req['REQUEST_METHOD']
50
+ assert_nil req['FRAGMENT']
51
+ assert_equal '', req['QUERY_STRING']
52
+ assert_equal '', http
53
+ assert parser.keepalive?
54
+ end
55
+
56
+ def test_tab_lws
57
+ parser = HttpParser.new
58
+ req = parser.env
59
+ parser.buf << "GET / HTTP/1.1\r\nHost:\tfoo.bar\r\n\r\n"
60
+ assert_equal req.object_id, parser.parse.object_id
61
+ assert_equal "foo.bar", req['HTTP_HOST']
62
+ end
63
+
64
+ def test_connection_close_no_ka
65
+ parser = HttpParser.new
66
+ req = parser.env
67
+ parser.buf << "GET / HTTP/1.1\r\nConnection: close\r\n\r\n"
68
+ assert_equal req.object_id, parser.parse.object_id
69
+ assert_equal "GET", req['REQUEST_METHOD']
70
+ assert ! parser.keepalive?
71
+ end
72
+
73
+ def test_connection_keep_alive_ka
74
+ parser = HttpParser.new
75
+ req = parser.env
76
+ parser.buf << "HEAD / HTTP/1.1\r\nConnection: keep-alive\r\n\r\n"
77
+ assert_equal req.object_id, parser.parse.object_id
78
+ assert parser.keepalive?
79
+ end
80
+
81
+ def test_connection_keep_alive_no_body
82
+ parser = HttpParser.new
83
+ req = parser.env
84
+ parser.buf << "POST / HTTP/1.1\r\nConnection: keep-alive\r\n\r\n"
85
+ assert_equal req.object_id, parser.parse.object_id
86
+ assert parser.keepalive?
87
+ end
88
+
89
+ def test_connection_keep_alive_no_body_empty
90
+ parser = HttpParser.new
91
+ req = parser.env
92
+ parser.buf << "POST / HTTP/1.1\r\n" \
93
+ "Content-Length: 0\r\n" \
94
+ "Connection: keep-alive\r\n\r\n"
95
+ assert_equal req.object_id, parser.parse.object_id
96
+ assert parser.keepalive?
97
+ end
98
+
99
+ def test_connection_keep_alive_ka_bad_version
100
+ parser = HttpParser.new
101
+ req = parser.env
102
+ parser.buf << "GET / HTTP/1.0\r\nConnection: keep-alive\r\n\r\n"
103
+ assert_equal req.object_id, parser.parse.object_id
104
+ assert parser.keepalive?
105
+ end
106
+
107
+ def test_parse_server_host_default_port
108
+ parser = HttpParser.new
109
+ req = parser.env
110
+ parser.buf << "GET / HTTP/1.1\r\nHost: foo\r\n\r\n"
111
+ assert_equal req, parser.parse
112
+ assert_equal 'foo', req['SERVER_NAME']
113
+ assert_equal '80', req['SERVER_PORT']
114
+ assert_equal '', parser.buf
115
+ assert parser.keepalive?
116
+ end
117
+
118
+ def test_parse_server_host_alt_port
119
+ parser = HttpParser.new
120
+ req = parser.env
121
+ parser.buf << "GET / HTTP/1.1\r\nHost: foo:999\r\n\r\n"
122
+ assert_equal req, parser.parse
123
+ assert_equal 'foo', req['SERVER_NAME']
124
+ assert_equal '999', req['SERVER_PORT']
125
+ assert_equal '', parser.buf
126
+ assert parser.keepalive?
127
+ end
128
+
129
+ def test_parse_server_host_empty_port
130
+ parser = HttpParser.new
131
+ req = parser.env
132
+ parser.buf << "GET / HTTP/1.1\r\nHost: foo:\r\n\r\n"
133
+ assert_equal req, parser.parse
134
+ assert_equal 'foo', req['SERVER_NAME']
135
+ assert_equal '80', req['SERVER_PORT']
136
+ assert_equal '', parser.buf
137
+ assert parser.keepalive?
138
+ end
139
+
140
+ def test_parse_server_host_xfp_https
141
+ parser = HttpParser.new
142
+ req = parser.env
143
+ parser.buf << "GET / HTTP/1.1\r\nHost: foo:\r\n" \
144
+ "X-Forwarded-Proto: https\r\n\r\n"
145
+ assert_equal req, parser.parse
146
+ assert_equal 'foo', req['SERVER_NAME']
147
+ assert_equal '443', req['SERVER_PORT']
148
+ assert_equal '', parser.buf
149
+ assert parser.keepalive?
150
+ end
151
+
152
+ def test_parse_xfp_https_chained
153
+ parser = HttpParser.new
154
+ req = parser.env
155
+ parser.buf << "GET / HTTP/1.0\r\n" \
156
+ "X-Forwarded-Proto: https,http\r\n\r\n"
157
+ assert_equal req, parser.parse
158
+ assert_equal '443', req['SERVER_PORT'], req.inspect
159
+ assert_equal 'https', req['rack.url_scheme'], req.inspect
160
+ assert_equal '', parser.buf
161
+ end
162
+
163
+ def test_parse_xfp_https_chained_backwards
164
+ parser = HttpParser.new
165
+ req = parser.env
166
+ parser.buf << "GET / HTTP/1.0\r\n" \
167
+ "X-Forwarded-Proto: http,https\r\n\r\n"
168
+ assert_equal req, parser.parse
169
+ assert_equal '80', req['SERVER_PORT'], req.inspect
170
+ assert_equal 'http', req['rack.url_scheme'], req.inspect
171
+ assert_equal '', parser.buf
172
+ end
173
+
174
+ def test_parse_xfp_gopher_is_ignored
175
+ parser = HttpParser.new
176
+ req = parser.env
177
+ parser.buf << "GET / HTTP/1.0\r\n" \
178
+ "X-Forwarded-Proto: gopher\r\n\r\n"
179
+ assert_equal req, parser.parse
180
+ assert_equal '80', req['SERVER_PORT'], req.inspect
181
+ assert_equal 'http', req['rack.url_scheme'], req.inspect
182
+ assert_equal '', parser.buf
183
+ end
184
+
185
+ def test_parse_x_forwarded_ssl_on
186
+ parser = HttpParser.new
187
+ req = parser.env
188
+ parser.buf << "GET / HTTP/1.0\r\n" \
189
+ "X-Forwarded-Ssl: on\r\n\r\n"
190
+ assert_equal req, parser.parse
191
+ assert_equal '443', req['SERVER_PORT'], req.inspect
192
+ assert_equal 'https', req['rack.url_scheme'], req.inspect
193
+ assert_equal '', parser.buf
194
+ end
195
+
196
+ def test_parse_x_forwarded_ssl_off
197
+ parser = HttpParser.new
198
+ req = parser.env
199
+ parser.buf << "GET / HTTP/1.0\r\nX-Forwarded-Ssl: off\r\n\r\n"
200
+ assert_equal req, parser.parse
201
+ assert_equal '80', req['SERVER_PORT'], req.inspect
202
+ assert_equal 'http', req['rack.url_scheme'], req.inspect
203
+ assert_equal '', parser.buf
204
+ end
205
+
206
+ def test_parse_strange_headers
207
+ parser = HttpParser.new
208
+ req = parser.env
209
+ should_be_good = "GET / HTTP/1.1\r\naaaaaaaaaaaaa:++++++++++\r\n\r\n"
210
+ parser.buf << should_be_good
211
+ assert_equal req, parser.parse
212
+ assert_equal '', parser.buf
213
+ assert parser.keepalive?
214
+ end
215
+
216
+ # legacy test case from Mongrel that we never supported before...
217
+ # I still consider Pound irrelevant, unfortunately stupid clients that
218
+ # send extremely big headers do exist and they've managed to find Unicorn...
219
+ def test_nasty_pound_header
220
+ parser = HttpParser.new
221
+ nasty_pound_header = "GET / HTTP/1.1\r\nX-SSL-Bullshit: -----BEGIN CERTIFICATE-----\r\n\tMIIFbTCCBFWgAwIBAgICH4cwDQYJKoZIhvcNAQEFBQAwcDELMAkGA1UEBhMCVUsx\r\n\tETAPBgNVBAoTCGVTY2llbmNlMRIwEAYDVQQLEwlBdXRob3JpdHkxCzAJBgNVBAMT\r\n\tAkNBMS0wKwYJKoZIhvcNAQkBFh5jYS1vcGVyYXRvckBncmlkLXN1cHBvcnQuYWMu\r\n\tdWswHhcNMDYwNzI3MTQxMzI4WhcNMDcwNzI3MTQxMzI4WjBbMQswCQYDVQQGEwJV\r\n\tSzERMA8GA1UEChMIZVNjaWVuY2UxEzARBgNVBAsTCk1hbmNoZXN0ZXIxCzAJBgNV\r\n\tBAcTmrsogriqMWLAk1DMRcwFQYDVQQDEw5taWNoYWVsIHBhcmQYJKoZIhvcNAQEB\r\n\tBQADggEPADCCAQoCggEBANPEQBgl1IaKdSS1TbhF3hEXSl72G9J+WC/1R64fAcEF\r\n\tW51rEyFYiIeZGx/BVzwXbeBoNUK41OK65sxGuflMo5gLflbwJtHBRIEKAfVVp3YR\r\n\tgW7cMA/s/XKgL1GEC7rQw8lIZT8RApukCGqOVHSi/F1SiFlPDxuDfmdiNzL31+sL\r\n\t0iwHDdNkGjy5pyBSB8Y79dsSJtCW/iaLB0/n8Sj7HgvvZJ7x0fr+RQjYOUUfrePP\r\n\tu2MSpFyf+9BbC/aXgaZuiCvSR+8Snv3xApQY+fULK/xY8h8Ua51iXoQ5jrgu2SqR\r\n\twgA7BUi3G8LFzMBl8FRCDYGUDy7M6QaHXx1ZWIPWNKsCAwEAAaOCAiQwggIgMAwG\r\n\tA1UdEwEB/wQCMAAwEQYJYIZIAYb4QgEBBAQDAgWgMA4GA1UdDwEB/wQEAwID6DAs\r\n\tBglghkgBhvhCAQ0EHxYdVUsgZS1TY2llbmNlIFVzZXIgQ2VydGlmaWNhdGUwHQYD\r\n\tVR0OBBYEFDTt/sf9PeMaZDHkUIldrDYMNTBZMIGaBgNVHSMEgZIwgY+AFAI4qxGj\r\n\tloCLDdMVKwiljjDastqooXSkcjBwMQswCQYDVQQGEwJVSzERMA8GA1UEChMIZVNj\r\n\taWVuY2UxEjAQBgNVBAsTCUF1dGhvcml0eTELMAkGA1UEAxMCQ0ExLTArBgkqhkiG\r\n\t9w0BCQEWHmNhLW9wZXJhdG9yQGdyaWQtc3VwcG9ydC5hYy51a4IBADApBgNVHRIE\r\n\tIjAggR5jYS1vcGVyYXRvckBncmlkLXN1cHBvcnQuYWMudWswGQYDVR0gBBIwEDAO\r\n\tBgwrBgEEAdkvAQEBAQYwPQYJYIZIAYb4QgEEBDAWLmh0dHA6Ly9jYS5ncmlkLXN1\r\n\tcHBvcnQuYWMudmT4sopwqlBWsvcHViL2NybC9jYWNybC5jcmwwPQYJYIZIAYb4QgEDBDAWLmh0\r\n\tdHA6Ly9jYS5ncmlkLXN1cHBvcnQuYWMudWsvcHViL2NybC9jYWNybC5jcmwwPwYD\r\n\tVR0fBDgwNjA0oDKgMIYuaHR0cDovL2NhLmdyaWQt5hYy51ay9wdWIv\r\n\tY3JsL2NhY3JsLmNybDANBgkqhkiG9w0BAQUFAAOCAQEAS/U4iiooBENGW/Hwmmd3\r\n\tXCy6Zrt08YjKCzGNjorT98g8uGsqYjSxv/hmi0qlnlHs+k/3Iobc3LjS5AMYr5L8\r\n\tUO7OSkgFFlLHQyC9JzPfmLCAugvzEbyv4Olnsr8hbxF1MbKZoQxUZtMVu29wjfXk\r\n\thTeApBv7eaKCWpSp7MCbvgzm74izKhu3vlDk9w6qVrxePfGgpKPqfHiOoGhFnbTK\r\n\twTC6o2xq5y0qZ03JonF7OJspEd3I5zKY3E+ov7/ZhW6DqT8UFvsAdjvQbXyhV8Eu\r\n\tYhixw1aKEPzNjNowuIseVogKOLXxWI5vAi5HgXdS0/ES5gDGsABo4fqovUKlgop3\r\n\tRA==\r\n\t-----END CERTIFICATE-----\r\n\r\n"
222
+ req = parser.env
223
+ parser.buf << nasty_pound_header.dup
224
+
225
+ assert nasty_pound_header =~ /(-----BEGIN .*--END CERTIFICATE-----)/m
226
+ expect = $1.dup
227
+ expect.gsub!(/\r\n\t/, ' ')
228
+ assert_equal req, parser.parse
229
+ assert_equal '', parser.buf
230
+ assert_equal expect, req['HTTP_X_SSL_BULLSHIT']
231
+ end
232
+
233
+ def test_continuation_eats_leading_spaces
234
+ parser = HttpParser.new
235
+ header = "GET / HTTP/1.1\r\n" \
236
+ "X-ASDF: \r\n" \
237
+ "\t\r\n" \
238
+ " \r\n" \
239
+ " ASDF\r\n\r\n"
240
+ parser.buf << header
241
+ req = parser.env
242
+ assert_equal req, parser.parse
243
+ assert_equal '', parser.buf
244
+ assert_equal 'ASDF', req['HTTP_X_ASDF']
245
+ end
246
+
247
+ def test_continuation_eats_scattered_leading_spaces
248
+ parser = HttpParser.new
249
+ header = "GET / HTTP/1.1\r\n" \
250
+ "X-ASDF: hi\r\n" \
251
+ " y\r\n" \
252
+ "\t\r\n" \
253
+ " x\r\n" \
254
+ " ASDF\r\n\r\n"
255
+ req = parser.env
256
+ parser.buf << header
257
+ assert_equal req, parser.parse
258
+ assert_equal '', parser.buf
259
+ assert_equal 'hi y x ASDF', req['HTTP_X_ASDF']
260
+ end
261
+
262
+ def test_continuation_eats_trailing_spaces
263
+ parser = HttpParser.new
264
+ header = "GET / HTTP/1.1\r\n" \
265
+ "X-ASDF: \r\n" \
266
+ "\t\r\n" \
267
+ " b \r\n" \
268
+ " ASDF\r\n\r\n"
269
+ parser.buf << header
270
+ req = parser.env
271
+ assert_equal req, parser.parse
272
+ assert_equal '', parser.buf
273
+ assert_equal 'b ASDF', req['HTTP_X_ASDF']
274
+ end
275
+
276
+ def test_continuation_with_absolute_uri_and_ignored_host_header
277
+ parser = HttpParser.new
278
+ header = "GET http://example.com/ HTTP/1.1\r\n" \
279
+ "Host: \r\n" \
280
+ " YHBT.net\r\n" \
281
+ "\r\n"
282
+ parser.buf << header
283
+ req = parser.env
284
+ assert_equal req, parser.parse
285
+ assert_equal 'example.com', req['HTTP_HOST']
286
+ end
287
+
288
+ # this may seem to be testing more of an implementation detail, but
289
+ # it also helps ensure we're safe in the presence of multiple parsers
290
+ # in case we ever go multithreaded/evented...
291
+ def test_resumable_continuations
292
+ nr = 1000
293
+ header = "GET / HTTP/1.1\r\n" \
294
+ "X-ASDF: \r\n" \
295
+ " hello\r\n"
296
+ tmp = []
297
+ nr.times { |i|
298
+ parser = HttpParser.new
299
+ req = parser.env
300
+ parser.buf << "#{header} #{i}\r\n"
301
+ assert parser.parse.nil?
302
+ asdf = req['HTTP_X_ASDF']
303
+ assert_equal "hello #{i}", asdf
304
+ tmp << [ parser, asdf ]
305
+ }
306
+ tmp.each_with_index { |(parser, asdf), i|
307
+ parser.buf << " .\r\n\r\n"
308
+ assert parser.parse
309
+ assert_equal "hello #{i} .", asdf
310
+ }
311
+ end
312
+
313
+ def test_invalid_continuation
314
+ parser = HttpParser.new
315
+ header = "GET / HTTP/1.1\r\n" \
316
+ " y\r\n" \
317
+ "Host: hello\r\n" \
318
+ "\r\n"
319
+ parser.buf << header
320
+ assert_raises(HttpParserError) { parser.parse }
321
+ end
322
+
323
+ def test_parse_ie6_urls
324
+ %w(/some/random/path"
325
+ /some/random/path>
326
+ /some/random/path<
327
+ /we/love/you/ie6?q=<"">
328
+ /url?<="&>="
329
+ /mal"formed"?
330
+ ).each do |path|
331
+ parser = HttpParser.new
332
+ req = parser.env
333
+ sorta_safe = %(GET #{path} HTTP/1.1\r\n\r\n)
334
+ assert_equal req, parser.headers(req, sorta_safe)
335
+ assert_equal path, req['REQUEST_URI']
336
+ assert_equal '', sorta_safe
337
+ assert parser.keepalive?
338
+ end
339
+ end
340
+
341
+ def test_parse_error
342
+ parser = HttpParser.new
343
+ req = parser.env
344
+ bad_http = "GET / SsUTF/1.1"
345
+
346
+ assert_raises(HttpParserError) { parser.headers(req, bad_http) }
347
+
348
+ # make sure we can recover
349
+ parser.clear
350
+ req.clear
351
+ assert_equal req, parser.headers(req, "GET / HTTP/1.0\r\n\r\n")
352
+ assert ! parser.keepalive?
353
+ end
354
+
355
+ def test_piecemeal
356
+ parser = HttpParser.new
357
+ req = parser.env
358
+ http = "GET"
359
+ assert_nil parser.headers(req, http)
360
+ assert_nil parser.headers(req, http)
361
+ assert_nil parser.headers(req, http << " / HTTP/1.0")
362
+ assert_equal '/', req['REQUEST_PATH']
363
+ assert_equal '/', req['REQUEST_URI']
364
+ assert_equal 'GET', req['REQUEST_METHOD']
365
+ assert_nil parser.headers(req, http << "\r\n")
366
+ assert_equal 'HTTP/1.0', req['HTTP_VERSION']
367
+ assert_nil parser.headers(req, http << "\r")
368
+ assert_equal req, parser.headers(req, http << "\n")
369
+ assert_equal 'HTTP/1.0', req['SERVER_PROTOCOL']
370
+ assert_nil req['FRAGMENT']
371
+ assert_equal '', req['QUERY_STRING']
372
+ assert_equal "", http
373
+ assert ! parser.keepalive?
374
+ end
375
+
376
+ # not common, but underscores do appear in practice
377
+ def test_absolute_uri_underscores
378
+ parser = HttpParser.new
379
+ req = parser.env
380
+ http = "GET http://under_score.example.com/foo?q=bar HTTP/1.0\r\n\r\n"
381
+ parser.buf << http
382
+ assert_equal req, parser.parse
383
+ assert_equal 'http', req['rack.url_scheme']
384
+ assert_equal '/foo?q=bar', req['REQUEST_URI']
385
+ assert_equal '/foo', req['REQUEST_PATH']
386
+ assert_equal 'q=bar', req['QUERY_STRING']
387
+
388
+ assert_equal 'under_score.example.com', req['HTTP_HOST']
389
+ assert_equal 'under_score.example.com', req['SERVER_NAME']
390
+ assert_equal '80', req['SERVER_PORT']
391
+ assert_equal "", parser.buf
392
+ assert ! parser.keepalive?
393
+ end
394
+
395
+ # some dumb clients add users because they're stupid
396
+ def test_absolute_uri_w_user
397
+ parser = HttpParser.new
398
+ req = parser.env
399
+ http = "GET http://user%20space@example.com/foo?q=bar HTTP/1.0\r\n\r\n"
400
+ parser.buf << http
401
+ assert_equal req, parser.parse
402
+ assert_equal 'http', req['rack.url_scheme']
403
+ assert_equal '/foo?q=bar', req['REQUEST_URI']
404
+ assert_equal '/foo', req['REQUEST_PATH']
405
+ assert_equal 'q=bar', req['QUERY_STRING']
406
+
407
+ assert_equal 'example.com', req['HTTP_HOST']
408
+ assert_equal 'example.com', req['SERVER_NAME']
409
+ assert_equal '80', req['SERVER_PORT']
410
+ assert_equal "", parser.buf
411
+ assert ! parser.keepalive?
412
+ end
413
+
414
+ # since Mongrel supported anything URI.parse supported, we're stuck
415
+ # supporting everything URI.parse supports
416
+ def test_absolute_uri_uri_parse
417
+ "#{URI::REGEXP::PATTERN::UNRESERVED};:&=+$,".split(//).each do |char|
418
+ parser = HttpParser.new
419
+ req = parser.env
420
+ http = "GET http://#{char}@example.com/ HTTP/1.0\r\n\r\n"
421
+ assert_equal req, parser.headers(req, http)
422
+ assert_equal 'http', req['rack.url_scheme']
423
+ assert_equal '/', req['REQUEST_URI']
424
+ assert_equal '/', req['REQUEST_PATH']
425
+ assert_equal '', req['QUERY_STRING']
426
+
427
+ assert_equal 'example.com', req['HTTP_HOST']
428
+ assert_equal 'example.com', req['SERVER_NAME']
429
+ assert_equal '80', req['SERVER_PORT']
430
+ assert_equal "", http
431
+ assert ! parser.keepalive?
432
+ end
433
+ end
434
+
435
+ def test_absolute_uri
436
+ parser = HttpParser.new
437
+ req = parser.env
438
+ parser.buf << "GET http://example.com/foo?q=bar HTTP/1.0\r\n\r\n"
439
+ assert_equal req, parser.parse
440
+ assert_equal 'http', req['rack.url_scheme']
441
+ assert_equal '/foo?q=bar', req['REQUEST_URI']
442
+ assert_equal '/foo', req['REQUEST_PATH']
443
+ assert_equal 'q=bar', req['QUERY_STRING']
444
+
445
+ assert_equal 'example.com', req['HTTP_HOST']
446
+ assert_equal 'example.com', req['SERVER_NAME']
447
+ assert_equal '80', req['SERVER_PORT']
448
+ assert_equal "", parser.buf
449
+ assert ! parser.keepalive?
450
+ end
451
+
452
+ # X-Forwarded-Proto is not in rfc2616, absolute URIs are, however...
453
+ def test_absolute_uri_https
454
+ parser = HttpParser.new
455
+ req = parser.env
456
+ http = "GET https://example.com/foo?q=bar HTTP/1.1\r\n" \
457
+ "X-Forwarded-Proto: http\r\n\r\n"
458
+ parser.buf << http
459
+ assert_equal req, parser.parse
460
+ assert_equal 'https', req['rack.url_scheme']
461
+ assert_equal '/foo?q=bar', req['REQUEST_URI']
462
+ assert_equal '/foo', req['REQUEST_PATH']
463
+ assert_equal 'q=bar', req['QUERY_STRING']
464
+
465
+ assert_equal 'example.com', req['HTTP_HOST']
466
+ assert_equal 'example.com', req['SERVER_NAME']
467
+ assert_equal '443', req['SERVER_PORT']
468
+ assert_equal "", parser.buf
469
+ assert parser.keepalive?
470
+ end
471
+
472
+ # Host: header should be ignored for absolute URIs
473
+ def test_absolute_uri_with_port
474
+ parser = HttpParser.new
475
+ req = parser.env
476
+ parser.buf << "GET http://example.com:8080/foo?q=bar HTTP/1.2\r\n" \
477
+ "Host: bad.example.com\r\n\r\n"
478
+ assert_equal req, parser.parse
479
+ assert_equal 'http', req['rack.url_scheme']
480
+ assert_equal '/foo?q=bar', req['REQUEST_URI']
481
+ assert_equal '/foo', req['REQUEST_PATH']
482
+ assert_equal 'q=bar', req['QUERY_STRING']
483
+
484
+ assert_equal 'example.com:8080', req['HTTP_HOST']
485
+ assert_equal 'example.com', req['SERVER_NAME']
486
+ assert_equal '8080', req['SERVER_PORT']
487
+ assert_equal "", parser.buf
488
+ assert ! parser.keepalive? # TODO: read HTTP/1.2 when it's final
489
+ end
490
+
491
+ def test_absolute_uri_with_empty_port
492
+ parser = HttpParser.new
493
+ req = parser.env
494
+ parser.buf << "GET https://example.com:/foo?q=bar HTTP/1.1\r\n" \
495
+ "Host: bad.example.com\r\n\r\n"
496
+ assert_equal req, parser.parse
497
+ assert_equal 'https', req['rack.url_scheme']
498
+ assert_equal '/foo?q=bar', req['REQUEST_URI']
499
+ assert_equal '/foo', req['REQUEST_PATH']
500
+ assert_equal 'q=bar', req['QUERY_STRING']
501
+
502
+ assert_equal 'example.com:', req['HTTP_HOST']
503
+ assert_equal 'example.com', req['SERVER_NAME']
504
+ assert_equal '443', req['SERVER_PORT']
505
+ assert_equal "", parser.buf
506
+ assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
507
+ end
508
+
509
+ def test_absolute_ipv6_uri
510
+ parser = HttpParser.new
511
+ req = parser.env
512
+ url = "http://[::1]/foo?q=bar"
513
+ http = "GET #{url} HTTP/1.1\r\n" \
514
+ "Host: bad.example.com\r\n\r\n"
515
+ assert_equal req, parser.headers(req, http)
516
+ assert_equal 'http', req['rack.url_scheme']
517
+ assert_equal '/foo?q=bar', req['REQUEST_URI']
518
+ assert_equal '/foo', req['REQUEST_PATH']
519
+ assert_equal 'q=bar', req['QUERY_STRING']
520
+
521
+ uri = URI.parse(url)
522
+ assert_equal "[::1]", uri.host,
523
+ "URI.parse changed upstream for #{url}? host=#{uri.host}"
524
+ assert_equal "[::1]", req['HTTP_HOST']
525
+ assert_equal "[::1]", req['SERVER_NAME']
526
+ assert_equal '80', req['SERVER_PORT']
527
+ assert_equal "", http
528
+ assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
529
+ end
530
+
531
+ def test_absolute_ipv6_uri_alpha
532
+ parser = HttpParser.new
533
+ req = parser.env
534
+ url = "http://[::a]/"
535
+ http = "GET #{url} HTTP/1.1\r\n" \
536
+ "Host: bad.example.com\r\n\r\n"
537
+ assert_equal req, parser.headers(req, http)
538
+ assert_equal 'http', req['rack.url_scheme']
539
+
540
+ uri = URI.parse(url)
541
+ assert_equal "[::a]", uri.host,
542
+ "URI.parse changed upstream for #{url}? host=#{uri.host}"
543
+ assert_equal "[::a]", req['HTTP_HOST']
544
+ assert_equal "[::a]", req['SERVER_NAME']
545
+ assert_equal '80', req['SERVER_PORT']
546
+ end
547
+
548
+ def test_absolute_ipv6_uri_alpha_2
549
+ parser = HttpParser.new
550
+ req = parser.env
551
+ url = "http://[::B]/"
552
+ http = "GET #{url} HTTP/1.1\r\n" \
553
+ "Host: bad.example.com\r\n\r\n"
554
+ assert_equal req, parser.headers(req, http)
555
+ assert_equal 'http', req['rack.url_scheme']
556
+
557
+ uri = URI.parse(url)
558
+ assert_equal "[::B]", uri.host,
559
+ "URI.parse changed upstream for #{url}? host=#{uri.host}"
560
+ assert_equal "[::B]", req['HTTP_HOST']
561
+ assert_equal "[::B]", req['SERVER_NAME']
562
+ assert_equal '80', req['SERVER_PORT']
563
+ end
564
+
565
+ def test_absolute_ipv6_uri_with_empty_port
566
+ parser = HttpParser.new
567
+ req = parser.env
568
+ url = "https://[::1]:/foo?q=bar"
569
+ http = "GET #{url} HTTP/1.1\r\n" \
570
+ "Host: bad.example.com\r\n\r\n"
571
+ assert_equal req, parser.headers(req, http)
572
+ assert_equal 'https', req['rack.url_scheme']
573
+ assert_equal '/foo?q=bar', req['REQUEST_URI']
574
+ assert_equal '/foo', req['REQUEST_PATH']
575
+ assert_equal 'q=bar', req['QUERY_STRING']
576
+
577
+ uri = URI.parse(url)
578
+ assert_equal "[::1]", uri.host,
579
+ "URI.parse changed upstream for #{url}? host=#{uri.host}"
580
+ assert_equal "[::1]:", req['HTTP_HOST']
581
+ assert_equal "[::1]", req['SERVER_NAME']
582
+ assert_equal '443', req['SERVER_PORT']
583
+ assert_equal "", http
584
+ assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
585
+ end
586
+
587
+ def test_absolute_ipv6_uri_with_port
588
+ parser = HttpParser.new
589
+ req = parser.env
590
+ url = "https://[::1]:666/foo?q=bar"
591
+ http = "GET #{url} HTTP/1.1\r\n" \
592
+ "Host: bad.example.com\r\n\r\n"
593
+ assert_equal req, parser.headers(req, http)
594
+ assert_equal 'https', req['rack.url_scheme']
595
+ assert_equal '/foo?q=bar', req['REQUEST_URI']
596
+ assert_equal '/foo', req['REQUEST_PATH']
597
+ assert_equal 'q=bar', req['QUERY_STRING']
598
+
599
+ uri = URI.parse(url)
600
+ assert_equal "[::1]", uri.host,
601
+ "URI.parse changed upstream for #{url}? host=#{uri.host}"
602
+ assert_equal "[::1]:666", req['HTTP_HOST']
603
+ assert_equal "[::1]", req['SERVER_NAME']
604
+ assert_equal '666', req['SERVER_PORT']
605
+ assert_equal "", http
606
+ assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
607
+ end
608
+
609
+ def test_ipv6_host_header
610
+ parser = HttpParser.new
611
+ req = parser.env
612
+ parser.buf << "GET / HTTP/1.1\r\n" \
613
+ "Host: [::1]\r\n\r\n"
614
+ assert_equal req, parser.parse
615
+ assert_equal "[::1]", req['HTTP_HOST']
616
+ assert_equal "[::1]", req['SERVER_NAME']
617
+ assert_equal '80', req['SERVER_PORT']
618
+ assert_equal "", parser.buf
619
+ assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
620
+ end
621
+
622
+ def test_ipv6_host_header_with_port
623
+ parser = HttpParser.new
624
+ req = parser.env
625
+ parser.buf << "GET / HTTP/1.1\r\n" \
626
+ "Host: [::1]:666\r\n\r\n"
627
+ assert_equal req, parser.parse
628
+ assert_equal "[::1]", req['SERVER_NAME']
629
+ assert_equal '666', req['SERVER_PORT']
630
+ assert_equal "[::1]:666", req['HTTP_HOST']
631
+ assert_equal "", parser.buf
632
+ assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
633
+ end
634
+
635
+ def test_ipv6_host_header_with_empty_port
636
+ parser = HttpParser.new
637
+ req = parser.env
638
+ parser.buf << "GET / HTTP/1.1\r\nHost: [::1]:\r\n\r\n"
639
+ assert_equal req, parser.parse
640
+ assert_equal "[::1]", req['SERVER_NAME']
641
+ assert_equal '80', req['SERVER_PORT']
642
+ assert_equal "[::1]:", req['HTTP_HOST']
643
+ assert_equal "", parser.buf
644
+ assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
645
+ end
646
+
647
+ # XXX Highly unlikely..., just make sure we don't segfault or assert on it
648
+ def test_broken_ipv6_host_header
649
+ parser = HttpParser.new
650
+ req = parser.env
651
+ parser.buf << "GET / HTTP/1.1\r\nHost: [::1:\r\n\r\n"
652
+ assert_equal req, parser.parse
653
+ assert_equal "[", req['SERVER_NAME']
654
+ assert_equal ':1:', req['SERVER_PORT']
655
+ assert_equal "[::1:", req['HTTP_HOST']
656
+ assert_equal "", parser.buf
657
+ end
658
+
659
+ def test_put_body_oneshot
660
+ parser = HttpParser.new
661
+ req = parser.env
662
+ parser.buf << "PUT / HTTP/1.0\r\nContent-Length: 5\r\n\r\nabcde"
663
+ assert_equal req, parser.parse
664
+ assert_equal '/', req['REQUEST_PATH']
665
+ assert_equal '/', req['REQUEST_URI']
666
+ assert_equal 'PUT', req['REQUEST_METHOD']
667
+ assert_equal 'HTTP/1.0', req['HTTP_VERSION']
668
+ assert_equal 'HTTP/1.0', req['SERVER_PROTOCOL']
669
+ assert_equal "abcde", parser.buf
670
+ assert ! parser.keepalive? # TODO: read HTTP/1.2 when it's final
671
+ end
672
+
673
+ def test_put_body_later
674
+ parser = HttpParser.new
675
+ req = parser.env
676
+ parser.buf << "PUT /l HTTP/1.0\r\nContent-Length: 5\r\n\r\n"
677
+ assert_equal req, parser.parse
678
+ assert_equal '/l', req['REQUEST_PATH']
679
+ assert_equal '/l', req['REQUEST_URI']
680
+ assert_equal 'PUT', req['REQUEST_METHOD']
681
+ assert_equal 'HTTP/1.0', req['HTTP_VERSION']
682
+ assert_equal 'HTTP/1.0', req['SERVER_PROTOCOL']
683
+ assert_equal "", parser.buf
684
+ assert ! parser.keepalive? # TODO: read HTTP/1.2 when it's final
685
+ end
686
+
687
+ def test_unknown_methods
688
+ %w(GETT HEADR XGET XHEAD).each { |m|
689
+ parser = HttpParser.new
690
+ req = parser.env
691
+ s = "#{m} /forums/1/topics/2375?page=1#posts-17408 HTTP/1.1\r\n\r\n"
692
+ ok = parser.headers(req, s)
693
+ assert ok
694
+ assert_equal '/forums/1/topics/2375?page=1', req['REQUEST_URI']
695
+ assert_equal 'posts-17408', req['FRAGMENT']
696
+ assert_equal 'page=1', req['QUERY_STRING']
697
+ assert_equal "", s
698
+ assert_equal m, req['REQUEST_METHOD']
699
+ assert parser.keepalive? # TODO: read HTTP/1.2 when it's final
700
+ }
701
+ end
702
+
703
+ def test_fragment_in_uri
704
+ parser = HttpParser.new
705
+ req = parser.env
706
+ get = "GET /forums/1/topics/2375?page=1#posts-17408 HTTP/1.1\r\n\r\n"
707
+ parser.buf << get
708
+ ok = parser.parse
709
+ assert ok
710
+ assert_equal '/forums/1/topics/2375?page=1', req['REQUEST_URI']
711
+ assert_equal 'posts-17408', req['FRAGMENT']
712
+ assert_equal 'page=1', req['QUERY_STRING']
713
+ assert_equal '', parser.buf
714
+ assert parser.keepalive?
715
+ end
716
+
717
+ # lame random garbage maker
718
+ def rand_data(min, max, readable=true)
719
+ count = min + ((rand(max)+1) *10).to_i
720
+ res = count.to_s + "/"
721
+
722
+ if readable
723
+ res << Digest::SHA1.hexdigest(rand(count * 100).to_s) * (count / 40)
724
+ else
725
+ res << Digest::SHA1.digest(rand(count * 100).to_s) * (count / 20)
726
+ end
727
+
728
+ return res
729
+ end
730
+
731
+
732
+ def test_horrible_queries
733
+ parser = HttpParser.new
734
+
735
+ # then that large header names are caught
736
+ 10.times do |c|
737
+ get = "GET /#{rand_data(10,120)} HTTP/1.1\r\nX-#{rand_data(1024, 1024+(c*1024))}: Test\r\n\r\n"
738
+ assert_raises(Unicorn::HttpParserError,Unicorn::RequestURITooLongError) do
739
+ parser.buf << get
740
+ parser.parse
741
+ parser.clear
742
+ end
743
+ end
744
+
745
+ # then that large mangled field values are caught
746
+ 10.times do |c|
747
+ get = "GET /#{rand_data(10,120)} HTTP/1.1\r\nX-Test: #{rand_data(1024, 1024+(c*1024), false)}\r\n\r\n"
748
+ assert_raises(Unicorn::HttpParserError,Unicorn::RequestURITooLongError) do
749
+ parser.buf << get
750
+ parser.parse
751
+ parser.clear
752
+ end
753
+ end
754
+
755
+ # then large headers are rejected too
756
+ get = "GET /#{rand_data(10,120)} HTTP/1.1\r\n"
757
+ get << "X-Test: test\r\n" * (80 * 1024)
758
+ parser.buf << get
759
+ assert_raises(Unicorn::HttpParserError,Unicorn::RequestURITooLongError) do
760
+ parser.parse
761
+ end
762
+ parser.clear
763
+
764
+ # finally just that random garbage gets blocked all the time
765
+ 10.times do |c|
766
+ get = "GET #{rand_data(1024, 1024+(c*1024), false)} #{rand_data(1024, 1024+(c*1024), false)}\r\n\r\n"
767
+ assert_raises(Unicorn::HttpParserError,Unicorn::RequestURITooLongError) do
768
+ parser.buf << get
769
+ parser.parse
770
+ parser.clear
771
+ end
772
+ end
773
+
774
+ end
775
+
776
+ def test_leading_tab
777
+ parser = HttpParser.new
778
+ get = "GET / HTTP/1.1\r\nHost:\texample.com\r\n\r\n"
779
+ assert parser.add_parse(get)
780
+ assert_equal 'example.com', parser.env['HTTP_HOST']
781
+ end
782
+
783
+ def test_trailing_whitespace
784
+ parser = HttpParser.new
785
+ get = "GET / HTTP/1.1\r\nHost: example.com \r\n\r\n"
786
+ assert parser.add_parse(get)
787
+ assert_equal 'example.com', parser.env['HTTP_HOST']
788
+ end
789
+
790
+ def test_trailing_tab
791
+ parser = HttpParser.new
792
+ get = "GET / HTTP/1.1\r\nHost: example.com\t\r\n\r\n"
793
+ assert parser.add_parse(get)
794
+ assert_equal 'example.com', parser.env['HTTP_HOST']
795
+ end
796
+
797
+ def test_trailing_multiple_linear_whitespace
798
+ parser = HttpParser.new
799
+ get = "GET / HTTP/1.1\r\nHost: example.com\t \t \t\r\n\r\n"
800
+ assert parser.add_parse(get)
801
+ assert_equal 'example.com', parser.env['HTTP_HOST']
802
+ end
803
+
804
+ def test_embedded_linear_whitespace_ok
805
+ parser = HttpParser.new
806
+ get = "GET / HTTP/1.1\r\nX-Space: hello\t world\t \r\n\r\n"
807
+ assert parser.add_parse(get)
808
+ assert_equal "hello\t world", parser.env["HTTP_X_SPACE"]
809
+ end
810
+
811
+ def test_null_byte_header
812
+ parser = HttpParser.new
813
+ get = "GET / HTTP/1.1\r\nHost: \0\r\n\r\n"
814
+ assert_raises(HttpParserError) { parser.add_parse(get) }
815
+ end
816
+
817
+ def test_null_byte_in_middle
818
+ parser = HttpParser.new
819
+ get = "GET / HTTP/1.1\r\nHost: hello\0world\r\n\r\n"
820
+ assert_raises(HttpParserError) { parser.add_parse(get) }
821
+ end
822
+
823
+ def test_null_byte_at_end
824
+ parser = HttpParser.new
825
+ get = "GET / HTTP/1.1\r\nHost: hello\0\r\n\r\n"
826
+ assert_raises(HttpParserError) { parser.add_parse(get) }
827
+ end
828
+
829
+ def test_empty_header
830
+ parser = HttpParser.new
831
+ get = "GET / HTTP/1.1\r\nHost: \r\n\r\n"
832
+ assert parser.add_parse(get)
833
+ assert_equal '', parser.env['HTTP_HOST']
834
+ end
835
+
836
+ # so we don't care about the portability of this test
837
+ # if it doesn't leak on Linux, it won't leak anywhere else
838
+ # unless your C compiler or platform is otherwise broken
839
+ LINUX_PROC_PID_STATUS = "/proc/self/status"
840
+ def test_memory_leak
841
+ match_rss = /^VmRSS:\s+(\d+)/
842
+ if File.read(LINUX_PROC_PID_STATUS) =~ match_rss
843
+ before = $1.to_i
844
+ 1000000.times { Unicorn::HttpParser.new }
845
+ File.read(LINUX_PROC_PID_STATUS) =~ match_rss
846
+ after = $1.to_i
847
+ diff = after - before
848
+ assert(diff < 10000, "memory grew more than 10M: #{diff}")
849
+ end
850
+ end if RUBY_PLATFORM =~ /linux/ &&
851
+ File.readable?(LINUX_PROC_PID_STATUS) &&
852
+ !defined?(RUBY_ENGINE)
853
+
854
+ end