tipi 0.40 → 0.41

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
@@ -0,0 +1,18 @@
1
+ #ifndef HTTP1_PARSER_H
2
+ #define HTTP1_PARSER_H
3
+
4
+ #include "ruby.h"
5
+
6
+ // debugging
7
+ #define OBJ_ID(obj) (NUM2LONG(rb_funcall(obj, rb_intern("object_id"), 0)))
8
+ #define INSPECT(str, obj) { printf(str); VALUE s = rb_funcall(obj, rb_intern("inspect"), 0); printf(": %s\n", StringValueCStr(s)); }
9
+ #define TRACE_CALLER() { VALUE c = rb_funcall(rb_mKernel, rb_intern("caller"), 0); INSPECT("caller: ", c); }
10
+ #define TRACE_C_STACK() { \
11
+ void *entries[10]; \
12
+ size_t size = backtrace(entries, 10); \
13
+ char **strings = backtrace_symbols(entries, size); \
14
+ for (unsigned long i = 0; i < size; i++) printf("%s\n", strings[i]); \
15
+ free(strings); \
16
+ }
17
+
18
+ #endif /* HTTP1_PARSER_H */
@@ -0,0 +1,5 @@
1
+ void Init_HTTP1_Parser();
2
+
3
+ void Init_tipi_ext() {
4
+ Init_HTTP1_Parser();
5
+ }
data/lib/tipi.rb CHANGED
@@ -2,6 +2,7 @@
2
2
 
3
3
  require 'polyphony'
4
4
  require_relative './tipi/http1_adapter'
5
+ # require_relative './tipi/http1_adapter_new'
5
6
  require_relative './tipi/http2_adapter'
6
7
  require_relative './tipi/configuration'
7
8
  require_relative './tipi/response_extensions'
@@ -52,7 +53,7 @@ module Tipi
52
53
  def protocol_adapter(socket, opts)
53
54
  use_http2 = socket.respond_to?(:alpn_protocol) &&
54
55
  socket.alpn_protocol == H2_PROTOCOL
55
- klass = use_http2 ? HTTP2Adapter : HTTP1Adapter
56
+ klass = use_http2 ? HTTP2Adapter : HTTP1Adapter#New
56
57
  klass.new(socket, opts)
57
58
  end
58
59
 
@@ -24,9 +24,11 @@ module DigitalFabric
24
24
  class GracefulShutdown < RuntimeError
25
25
  end
26
26
 
27
+ @@id = 0
28
+
27
29
  def run
28
30
  @fiber = Fiber.current
29
- @keep_alive_timer = spin_loop(interval: 5) { keep_alive }
31
+ @keep_alive_timer = spin_loop("#{@fiber.tag}-keep_alive", interval: 5) { keep_alive }
30
32
  while true
31
33
  connect_and_process_incoming_requests
32
34
  return if @shutdown
@@ -166,7 +168,7 @@ module DigitalFabric
166
168
  def recv_http_request(msg)
167
169
  req = prepare_http_request(msg)
168
170
  id = msg[Protocol::Attribute::ID]
169
- @requests[id] = spin do
171
+ @requests[id] = spin("#{Fiber.current.tag}.#{id}") do
170
172
  http_request(req)
171
173
  rescue IOError, Errno::ECONNREFUSED, Errno::EPIPE
172
174
  # ignore
@@ -204,7 +206,7 @@ module DigitalFabric
204
206
  def recv_ws_request(msg)
205
207
  req = Qeweney::Request.new(msg[Protocol::Attribute::WS::HEADERS], RequestAdapter.new(self, msg))
206
208
  id = msg[Protocol::Attribute::ID]
207
- @requests[id] = @long_running_requests[id] = spin do
209
+ @requests[id] = @long_running_requests[id] = spin("#{Fiber.current.tag}.#{id}-ws") do
208
210
  ws_request(req)
209
211
  rescue IOError, Errno::ECONNREFUSED, Errno::EPIPE
210
212
  # ignore
@@ -32,13 +32,13 @@ module DigitalFabric
32
32
  @fiber = Fiber.current
33
33
  @service.mount(route, self)
34
34
  @mounted = true
35
- keep_alive_timer = spin_loop(interval: 5) { keep_alive }
35
+ # keep_alive_timer = spin_loop("#{@fiber.tag}-keep_alive", interval: 5) { keep_alive }
36
36
  process_incoming_messages(false)
37
37
  rescue GracefulShutdown
38
38
  puts "Proxy got graceful shutdown, left: #{@requests.size} requests" if @requests.size > 0
39
39
  process_incoming_messages(true)
40
40
  ensure
41
- keep_alive_timer&.stop
41
+ # keep_alive_timer&.stop
42
42
  unmount
43
43
  end
44
44
 
@@ -98,6 +98,8 @@ module DigitalFabric
98
98
  return
99
99
  when Protocol::UNMOUNT
100
100
  return unmount
101
+ when Protocol::STATS_REQUEST
102
+ return handle_stats_request(message[Protocol::Attribute::ID])
101
103
  end
102
104
 
103
105
  handler = @requests[message[Protocol::Attribute::ID]]
@@ -146,7 +148,7 @@ module DigitalFabric
146
148
  while (message = receive)
147
149
  unless t1
148
150
  t1 = Time.now
149
- @service.record_latency_measurement(t1 - t0)
151
+ @service.record_latency_measurement(t1 - t0, req)
150
152
  end
151
153
  kind = message[Protocol::Attribute::KIND]
152
154
  attributes = message[Protocol::Attribute::HttpRequest::HEADERS..-1]
@@ -187,6 +189,11 @@ module DigitalFabric
187
189
  send_df_message(Protocol.transfer_count(key, rx, tx))
188
190
  end
189
191
 
192
+ def handle_stats_request(id)
193
+ stats = @service.get_stats
194
+ send_df_message(Protocol.stats_response(id, stats))
195
+ end
196
+
190
197
  HTTP_RESPONSE_UPGRADE_HEADERS = { ':status' => Qeweney::Status::SWITCHING_PROTOCOLS }
191
198
 
192
199
  def http_custom_upgrade(id, req, headers)
@@ -197,7 +204,7 @@ module DigitalFabric
197
204
  req.send_headers(upgrade_headers, true)
198
205
 
199
206
  conn = req.adapter.conn
200
- reader = spin do
207
+ reader = spin("#{Fiber.current.tag}.#{id}") do
201
208
  conn.recv_loop do |data|
202
209
  send_df_message(Protocol.conn_data(id, data))
203
210
  end
@@ -294,7 +301,7 @@ module DigitalFabric
294
301
  end
295
302
 
296
303
  def run_websocket_connection(id, websocket)
297
- reader = spin do
304
+ reader = spin("#{Fiber.current}.#{id}-ws") do
298
305
  websocket.recv_loop do |data|
299
306
  send_df_message(Protocol.ws_data(id, data))
300
307
  end
@@ -15,8 +15,8 @@ module DigitalFabric
15
15
  route[:executive] = true
16
16
  @service.mount(route, self)
17
17
  @current_request_count = 0
18
- @updater = spin_loop(interval: 10) { update_service_stats }
19
- update_service_stats
18
+ # @updater = spin_loop(:executive_updater, interval: 10) { update_service_stats }
19
+ # update_service_stats
20
20
  end
21
21
 
22
22
  def current_request_count
@@ -22,6 +22,9 @@ module DigitalFabric
22
22
 
23
23
  TRANSFER_COUNT = 'transfer_count'
24
24
 
25
+ STATS_REQUEST = 'stats_request'
26
+ STATS_RESPONSE = 'stats_response'
27
+
25
28
  SEND_TIMEOUT = 15
26
29
  RECV_TIMEOUT = SEND_TIMEOUT + 5
27
30
 
@@ -69,6 +72,10 @@ module DigitalFabric
69
72
  RX = 2
70
73
  TX = 3
71
74
  end
75
+
76
+ module Stats
77
+ STATS = 2
78
+ end
72
79
  end
73
80
 
74
81
  class << self
@@ -136,12 +143,20 @@ module DigitalFabric
136
143
  end
137
144
 
138
145
  def ws_close(id)
139
- [WS_CLOSE, id ]
146
+ [ WS_CLOSE, id ]
140
147
  end
141
148
 
142
149
  def transfer_count(key, rx, tx)
143
150
  [ TRANSFER_COUNT, key, rx, tx ]
144
151
  end
152
+
153
+ def stats_request(id)
154
+ [ STATS_REQUEST, id ]
155
+ end
156
+
157
+ def stats_response(id, stats)
158
+ [ STATS_RESPONSE, id, stats ]
159
+ end
145
160
  end
146
161
  end
147
162
  end
@@ -13,26 +13,22 @@ module DigitalFabric
13
13
  @token = token
14
14
  @agents = {}
15
15
  @routes = {}
16
- @waiting_lists = {} # hash mapping routes to arrays of requests waiting for an agent to mount
17
16
  @counters = {
18
17
  connections: 0,
19
18
  http_requests: 0,
20
19
  errors: 0
21
20
  }
22
21
  @connection_count = 0
22
+ @current_request_count = 0
23
23
  @http_latency_accumulator = 0
24
24
  @http_latency_counter = 0
25
+ @http_latency_max = 0
25
26
  @last_counters = @counters.merge(stamp: Time.now.to_f - 1)
26
27
  @fiber = Fiber.current
27
- @timer = Polyphony::Timer.new(resolution: 1)
28
-
29
- stats_updater = spin { @timer.every(10) { update_stats } }
30
- @stats = {}
31
-
32
- @current_request_count = 0
28
+ @timer = Polyphony::Timer.new('service_timer', resolution: 5)
33
29
  end
34
30
 
35
- def update_stats
31
+ def calculate_stats
36
32
  now = Time.now.to_f
37
33
  elapsed = now - @last_counters[:stamp]
38
34
  connections = @counters[:connections] - @last_counters[:connections]
@@ -40,23 +36,59 @@ module DigitalFabric
40
36
  errors = @counters[:errors] - @last_counters[:errors]
41
37
  @last_counters = @counters.merge(stamp: now)
42
38
 
43
- average_latency = @http_latency_counter > 0 ?
44
- @http_latency_accumulator / @http_latency_counter :
45
- 0
39
+ average_latency = @http_latency_counter == 0 ? 0 :
40
+ @http_latency_accumulator / @http_latency_counter
46
41
  @http_latency_accumulator = 0
47
42
  @http_latency_counter = 0
48
-
49
- @stats = {
50
- connection_rate: connections / elapsed,
51
- http_request_rate: http_requests / elapsed,
52
- error_rate: errors / elapsed,
53
- average_latency: average_latency,
54
- agent_count: @agents.size,
55
- connection_count: @connection_count,
56
- concurrent_requests: @current_request_count
43
+ max_latency = @http_latency_max
44
+ @http_latency_max = 0
45
+
46
+ cpu, rss = pid_cpu_and_rss(Process.pid)
47
+
48
+ backend_stats = Thread.backend.stats
49
+ op_rate = backend_stats[:op_count] / elapsed
50
+ switch_rate = backend_stats[:switch_count] / elapsed
51
+ poll_rate = backend_stats[:poll_count] / elapsed
52
+
53
+ {
54
+ service: {
55
+ agent_count: @agents.size,
56
+ connection_count: @connection_count,
57
+ connection_rate: connections / elapsed,
58
+ error_rate: errors / elapsed,
59
+ http_request_rate: http_requests / elapsed,
60
+ latency_avg: average_latency,
61
+ latency_max: max_latency,
62
+ pending_requests: @current_request_count,
63
+ },
64
+ backend: {
65
+ op_rate: op_rate,
66
+ pending_ops: backend_stats[:pending_ops],
67
+ poll_rate: poll_rate,
68
+ runqueue_size: backend_stats[:runqueue_size],
69
+ runqueue_high_watermark: backend_stats[:runqueue_max_length],
70
+ switch_rate: switch_rate,
71
+
72
+ },
73
+ process: {
74
+ cpu_usage: cpu,
75
+ rss: rss.to_f / 1024,
76
+ }
57
77
  }
58
78
  end
59
79
 
80
+ def pid_cpu_and_rss(pid)
81
+ s = `ps -p #{pid} -o %cpu,rss`
82
+ cpu, rss = s.lines[1].chomp.strip.split(' ')
83
+ [cpu.to_f, rss.to_i]
84
+ rescue Exception
85
+ [nil, nil]
86
+ end
87
+
88
+ def get_stats
89
+ calculate_stats
90
+ end
91
+
60
92
  def incr_connection_count
61
93
  @connection_count += 1
62
94
  end
@@ -77,23 +109,25 @@ module DigitalFabric
77
109
  count
78
110
  end
79
111
 
80
- def record_latency_measurement(latency)
112
+ def record_latency_measurement(latency, req)
81
113
  @http_latency_accumulator += latency
82
114
  @http_latency_counter += 1
115
+ @http_latency_max = latency if latency > @http_latency_max
116
+ return if latency < 1.0
117
+
118
+ puts format('slow request (%.1f): %p', latency, req.headers)
83
119
  end
84
120
 
85
- def http_request(req)
121
+ def http_request(req, allow_df_upgrade = false)
86
122
  @current_request_count += 1
87
123
  @counters[:http_requests] += 1
88
124
  @counters[:connections] += 1 if req.headers[':first']
89
125
 
90
- return upgrade_request(req) if req.upgrade_protocol
126
+ return upgrade_request(req, allow_df_upgrade) if req.upgrade_protocol
91
127
 
92
128
  inject_request_headers(req)
93
129
  agent = find_agent(req)
94
130
  unless agent
95
- return req.respond('pong') if req.query[:q] == 'ping'
96
-
97
131
  @counters[:errors] += 1
98
132
  return req.respond(nil, ':status' => Qeweney::Status::SERVICE_UNAVAILABLE)
99
133
  end
@@ -120,10 +154,14 @@ module DigitalFabric
120
154
  req.headers['x-forwarded-proto'] ||= conn.is_a?(OpenSSL::SSL::SSLSocket) ? 'https' : 'http'
121
155
  end
122
156
 
123
- def upgrade_request(req)
157
+ def upgrade_request(req, allow_df_upgrade)
124
158
  case (protocol = req.upgrade_protocol)
125
159
  when 'df'
126
- df_upgrade(req)
160
+ if allow_df_upgrade
161
+ df_upgrade(req)
162
+ else
163
+ req.respond(nil, ':status' => Qeweney::Status::SERVICE_UNAVAILABLE)
164
+ end
127
165
  else
128
166
  agent = find_agent(req)
129
167
  unless agent
@@ -136,12 +174,16 @@ module DigitalFabric
136
174
  end
137
175
 
138
176
  def df_upgrade(req)
177
+ # we don't want to count connected agents
178
+ @current_request_count -= 1
139
179
  if req.headers['df-token'] != @token
140
180
  return req.respond(nil, ':status' => Qeweney::Status::FORBIDDEN)
141
181
  end
142
182
 
143
183
  req.adapter.conn << Protocol.df_upgrade_response
144
184
  AgentProxy.new(self, req)
185
+ ensure
186
+ @current_request_count += 1
145
187
  end
146
188
 
147
189
  def mount(route, agent)
@@ -151,11 +193,6 @@ module DigitalFabric
151
193
  @executive = agent if route[:executive]
152
194
  @agents[agent] = route
153
195
  @routing_changed = true
154
-
155
- if (waiting = @waiting_lists[route])
156
- waiting.each { |f| f.schedule(agent) }
157
- @waiting_lists.delete(route)
158
- end
159
196
  end
160
197
 
161
198
  def unmount(agent)
@@ -165,8 +202,6 @@ module DigitalFabric
165
202
  @executive = nil if route[:executive]
166
203
  @agents.delete(agent)
167
204
  @routing_changed = true
168
-
169
- @waiting_lists[route] ||= []
170
205
  end
171
206
 
172
207
  INVALID_HOST = 'INVALID_HOST'
@@ -182,12 +217,6 @@ module DigitalFabric
182
217
  end
183
218
  return @routes[route] if route
184
219
 
185
- # # search for a known route for an agent that recently unmounted
186
- # route, wait_list = @waiting_lists.find do |route, _|
187
- # (host == route[:host]) || (path =~ route[:path_regexp])
188
- # end
189
- # return wait_for_agent(wait_list) if route
190
-
191
220
  nil
192
221
  end
193
222
 
@@ -19,7 +19,7 @@ module Tipi
19
19
 
20
20
  def each(&block)
21
21
  @conn.recv_loop do |data|
22
- return if handle_incoming_data(data, &block)
22
+ return if handle_incoming_data(data, &block)
23
23
  end
24
24
  rescue SystemCallError, IOError
25
25
  # ignore
@@ -234,7 +234,7 @@ module Tipi
234
234
  "0\r\n\r\n",
235
235
  ->(len) { "#{len.to_s(16)}\r\n" },
236
236
  "\r\n",
237
- 16384
237
+ chunk_size
238
238
  )
239
239
  end
240
240
 
@@ -0,0 +1,293 @@
1
+ # frozen_string_literal: true
2
+
3
+ require 'tipi_ext'
4
+ require_relative './http2_adapter'
5
+ require 'qeweney/request'
6
+
7
+ module Tipi
8
+ # HTTP1 protocol implementation
9
+ class HTTP1AdapterNew
10
+ attr_reader :conn
11
+
12
+ # Initializes a protocol adapter instance
13
+ def initialize(conn, opts)
14
+ @conn = conn
15
+ @opts = opts
16
+ @first = true
17
+ @parser = Tipi::HTTP1Parser.new(@conn)
18
+ end
19
+
20
+ def each(&block)
21
+ while true
22
+ headers = @parser.parse_headers
23
+ break unless headers
24
+
25
+ # handle_request should return false if connection is persistent
26
+ # break if handle_request(headers, &block)
27
+ handle_request(headers, &block)
28
+ end
29
+ rescue Tipi::HTTP1Parser::Error
30
+ # ignore
31
+ rescue SystemCallError, IOError
32
+ # ignore
33
+ ensure
34
+ finalize_client_loop
35
+ end
36
+
37
+ def handle_request(headers, &block)
38
+ scheme = (proto = headers['x-forwarded-proto']) ?
39
+ proto.downcase : scheme_from_connection
40
+ headers[':scheme'] = scheme
41
+ @protocol = headers[':protocol']
42
+ if @first
43
+ headers[':first'] = true
44
+ @first = nil
45
+ end
46
+
47
+ request = Qeweney::Request.new(headers, self)
48
+ return true if upgrade_connection(request.headers, &block)
49
+
50
+ block.call(request)
51
+ return !request.keep_alive?
52
+ end
53
+
54
+ def finalize_client_loop
55
+ @parser = nil
56
+ @splicing_pipe = nil
57
+ @conn.shutdown if @conn.respond_to?(:shutdown) rescue nil
58
+ @conn.close
59
+ end
60
+
61
+ # Reads a body chunk for the current request. Transfers control to the parse
62
+ # loop, and resumes once the parse_loop has fired the on_body callback
63
+ def get_body_chunk(request)
64
+ raise NotImplementedError
65
+ end
66
+
67
+ # Waits for the current request to complete. Transfers control to the parse
68
+ # loop, and resumes once the parse_loop has fired the on_message_complete
69
+ # callback
70
+ def consume_request(request)
71
+ raise NotImplementedError
72
+ end
73
+
74
+ def protocol
75
+ @protocol
76
+ end
77
+
78
+ # Upgrades the connection to a different protocol, if the 'Upgrade' header is
79
+ # given. By default the only supported upgrade protocol is HTTP2. Additional
80
+ # protocols, notably WebSocket, can be specified by passing a hash to the
81
+ # :upgrade option when starting a server:
82
+ #
83
+ # def ws_handler(conn)
84
+ # conn << 'hi'
85
+ # msg = conn.recv
86
+ # conn << "You said #{msg}"
87
+ # conn << 'bye'
88
+ # conn.close
89
+ # end
90
+ #
91
+ # opts = {
92
+ # upgrade: {
93
+ # websocket: Tipi::Websocket.handler(&method(:ws_handler))
94
+ # }
95
+ # }
96
+ # Tipi.serve('0.0.0.0', 1234, opts) { |req| ... }
97
+ #
98
+ # @param headers [Hash] request headers
99
+ # @return [boolean] truthy if the connection has been upgraded
100
+ def upgrade_connection(headers, &block)
101
+ upgrade_protocol = headers['upgrade']
102
+ return nil unless upgrade_protocol
103
+
104
+ upgrade_protocol = upgrade_protocol.downcase.to_sym
105
+ upgrade_handler = @opts[:upgrade] && @opts[:upgrade][upgrade_protocol]
106
+ return upgrade_with_handler(upgrade_handler, headers) if upgrade_handler
107
+ return upgrade_to_http2(headers, &block) if upgrade_protocol == :h2c
108
+
109
+ nil
110
+ end
111
+
112
+ def upgrade_with_handler(handler, headers)
113
+ @parser = @requests_head = @requests_tail = nil
114
+ handler.(self, headers)
115
+ true
116
+ end
117
+
118
+ def upgrade_to_http2(headers, &block)
119
+ @parser = @requests_head = @requests_tail = nil
120
+ HTTP2Adapter.upgrade_each(@conn, @opts, http2_upgraded_headers(headers), &block)
121
+ true
122
+ end
123
+
124
+ # Returns headers for HTTP2 upgrade
125
+ # @param headers [Hash] request headers
126
+ # @return [Hash] headers for HTTP2 upgrade
127
+ def http2_upgraded_headers(headers)
128
+ headers.merge(
129
+ ':scheme' => 'http',
130
+ ':authority' => headers['host']
131
+ )
132
+ end
133
+
134
+ def websocket_connection(request)
135
+ Tipi::Websocket.new(@conn, request.headers)
136
+ end
137
+
138
+ def scheme_from_connection
139
+ @conn.is_a?(OpenSSL::SSL::SSLSocket) ? 'https' : 'http'
140
+ end
141
+
142
+ # response API
143
+
144
+ CRLF = "\r\n"
145
+ CRLF_ZERO_CRLF_CRLF = "\r\n0\r\n\r\n"
146
+
147
+ # Sends response including headers and body. Waits for the request to complete
148
+ # if not yet completed. The body is sent using chunked transfer encoding.
149
+ # @param request [Qeweney::Request] HTTP request
150
+ # @param body [String] response body
151
+ # @param headers
152
+ def respond(request, body, headers)
153
+ consume_request(request) if @parsing
154
+ formatted_headers = format_headers(headers, body, false)
155
+ request.tx_incr(formatted_headers.bytesize + (body ? body.bytesize : 0))
156
+ if body
157
+ @conn.write(formatted_headers, body)
158
+ else
159
+ @conn.write(formatted_headers)
160
+ end
161
+ end
162
+
163
+ def respond_from_io(request, io, headers, chunk_size = 2**14)
164
+ consume_request(request) if @parsing
165
+
166
+ formatted_headers = format_headers(headers, true, true)
167
+ request.tx_incr(formatted_headers.bytesize)
168
+
169
+ # assume chunked encoding
170
+ Thread.current.backend.splice_chunks(
171
+ io,
172
+ @conn,
173
+ formatted_headers,
174
+ "0\r\n\r\n",
175
+ ->(len) { "#{len.to_s(16)}\r\n" },
176
+ "\r\n",
177
+ chunk_size
178
+ )
179
+ end
180
+
181
+ # Sends response headers. If empty_response is truthy, the response status
182
+ # code will default to 204, otherwise to 200.
183
+ # @param request [Qeweney::Request] HTTP request
184
+ # @param headers [Hash] response headers
185
+ # @param empty_response [boolean] whether a response body will be sent
186
+ # @param chunked [boolean] whether to use chunked transfer encoding
187
+ # @return [void]
188
+ def send_headers(request, headers, empty_response: false, chunked: true)
189
+ formatted_headers = format_headers(headers, !empty_response, @parser.http_minor == 1 && chunked)
190
+ request.tx_incr(formatted_headers.bytesize)
191
+ @conn.write(formatted_headers)
192
+ end
193
+
194
+ # Sends a response body chunk. If no headers were sent, default headers are
195
+ # sent using #send_headers. if the done option is true(thy), an empty chunk
196
+ # will be sent to signal response completion to the client.
197
+ # @param request [Qeweney::Request] HTTP request
198
+ # @param chunk [String] response body chunk
199
+ # @param done [boolean] whether the response is completed
200
+ # @return [void]
201
+ def send_chunk(request, chunk, done: false)
202
+ data = +''
203
+ data << "#{chunk.bytesize.to_s(16)}\r\n#{chunk}\r\n" if chunk
204
+ data << "0\r\n\r\n" if done
205
+ return if data.empty?
206
+
207
+ request.tx_incr(data.bytesize)
208
+ @conn.write(data)
209
+ end
210
+
211
+ def send_chunk_from_io(request, io, r, w, chunk_size)
212
+ len = w.splice(io, chunk_size)
213
+ if len > 0
214
+ Thread.current.backend.chain(
215
+ [:write, @conn, "#{len.to_s(16)}\r\n"],
216
+ [:splice, r, @conn, len],
217
+ [:write, @conn, "\r\n"]
218
+ )
219
+ else
220
+ @conn.write("0\r\n\r\n")
221
+ end
222
+ len
223
+ end
224
+
225
+ # Finishes the response to the current request. If no headers were sent,
226
+ # default headers are sent using #send_headers.
227
+ # @return [void]
228
+ def finish(request)
229
+ request.tx_incr(5)
230
+ @conn << "0\r\n\r\n"
231
+ end
232
+
233
+ def close
234
+ @conn.shutdown if @conn.respond_to?(:shutdown) rescue nil
235
+ @conn.close
236
+ end
237
+
238
+ private
239
+
240
+ INTERNAL_HEADER_REGEXP = /^:/.freeze
241
+
242
+ # Formats response headers into an array. If empty_response is true(thy),
243
+ # the response status code will default to 204, otherwise to 200.
244
+ # @param headers [Hash] response headers
245
+ # @param body [boolean] whether a response body will be sent
246
+ # @param chunked [boolean] whether to use chunked transfer encoding
247
+ # @return [String] formatted response headers
248
+ def format_headers(headers, body, chunked)
249
+ status = headers[':status']
250
+ status ||= (body ? Qeweney::Status::OK : Qeweney::Status::NO_CONTENT)
251
+ lines = format_status_line(body, status, chunked)
252
+ headers.each do |k, v|
253
+ next if k =~ INTERNAL_HEADER_REGEXP
254
+
255
+ collect_header_lines(lines, k, v)
256
+ end
257
+ lines << CRLF
258
+ lines
259
+ end
260
+
261
+ def format_status_line(body, status, chunked)
262
+ if !body
263
+ empty_status_line(status)
264
+ else
265
+ with_body_status_line(status, body, chunked)
266
+ end
267
+ end
268
+
269
+ def empty_status_line(status)
270
+ if status == 204
271
+ +"HTTP/1.1 #{status}\r\n"
272
+ else
273
+ +"HTTP/1.1 #{status}\r\nContent-Length: 0\r\n"
274
+ end
275
+ end
276
+
277
+ def with_body_status_line(status, body, chunked)
278
+ if chunked
279
+ +"HTTP/1.1 #{status}\r\nTransfer-Encoding: chunked\r\n"
280
+ else
281
+ +"HTTP/1.1 #{status}\r\nContent-Length: #{body.is_a?(String) ? body.bytesize : body.to_i}\r\n"
282
+ end
283
+ end
284
+
285
+ def collect_header_lines(lines, key, value)
286
+ if value.is_a?(Array)
287
+ value.inject(lines) { |_, item| lines << "#{key}: #{item}\r\n" }
288
+ else
289
+ lines << "#{key}: #{value}\r\n"
290
+ end
291
+ end
292
+ end
293
+ end