uhttp-server 2.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
uhttp/server.py
ADDED
|
@@ -0,0 +1,1334 @@
|
|
|
1
|
+
"""uHttp - Micro HTTP Server
|
|
2
|
+
python or micropython
|
|
3
|
+
(c) 2022-2024 Pavel Revak <pavelrevak@gmail.com>
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import os as _os
|
|
7
|
+
import errno
|
|
8
|
+
import socket as _socket
|
|
9
|
+
import select as _select
|
|
10
|
+
import json as _json
|
|
11
|
+
import time as _time
|
|
12
|
+
|
|
13
|
+
KB = 2 ** 10
|
|
14
|
+
MB = 2 ** 20
|
|
15
|
+
GB = 2 ** 30
|
|
16
|
+
|
|
17
|
+
LISTEN_SOCKETS = 2
|
|
18
|
+
MAX_WAITING_CLIENTS = 5
|
|
19
|
+
MAX_HEADERS_LENGTH = 4 * KB
|
|
20
|
+
MAX_CONTENT_LENGTH = 512 * KB
|
|
21
|
+
FILE_CHUNK_SIZE = 4 * KB # bytes - chunk size for streaming file responses
|
|
22
|
+
KEEP_ALIVE_TIMEOUT = 15 # seconds
|
|
23
|
+
KEEP_ALIVE_MAX_REQUESTS = 100 # max requests per connection
|
|
24
|
+
|
|
25
|
+
HEADERS_DELIMITERS = (b'\n\r\n', b'\n\n')
|
|
26
|
+
BOUNDARY = 'frame'
|
|
27
|
+
CONTENT_LENGTH = 'content-length'
|
|
28
|
+
CONTENT_TYPE = 'content-type'
|
|
29
|
+
CONTENT_TYPE_XFORMDATA = 'application/x-www-form-urlencoded'
|
|
30
|
+
CONTENT_TYPE_HTML_UTF8 = 'text/html; charset=UTF-8'
|
|
31
|
+
CONTENT_TYPE_JSON = 'application/json'
|
|
32
|
+
CONTENT_TYPE_OCTET_STREAM = 'application/octet-stream'
|
|
33
|
+
CONTENT_TYPE_MULTIPART_REPLACE = (
|
|
34
|
+
'multipart/x-mixed-replace; boundary=' + BOUNDARY)
|
|
35
|
+
CACHE_CONTROL = 'cache-control'
|
|
36
|
+
CACHE_CONTROL_NO_CACHE = 'no-cache'
|
|
37
|
+
LOCATION = 'Location'
|
|
38
|
+
CONNECTION = 'connection'
|
|
39
|
+
CONNECTION_CLOSE = 'close'
|
|
40
|
+
CONNECTION_KEEP_ALIVE = 'keep-alive'
|
|
41
|
+
COOKIE = 'cookie'
|
|
42
|
+
SET_COOKIE = 'set-cookie'
|
|
43
|
+
HOST = 'host'
|
|
44
|
+
EXPECT = 'expect'
|
|
45
|
+
EXPECT_100_CONTINUE = '100-continue'
|
|
46
|
+
CONTENT_TYPE_MAP = {
|
|
47
|
+
'html': CONTENT_TYPE_HTML_UTF8,
|
|
48
|
+
'htm': CONTENT_TYPE_HTML_UTF8,
|
|
49
|
+
'jpg': 'image/jpeg',
|
|
50
|
+
'jpeg': 'image/jpeg',
|
|
51
|
+
'png': 'image/png',
|
|
52
|
+
'gif': 'image/gif',
|
|
53
|
+
'svg': 'image/svg+xml',
|
|
54
|
+
'webp': 'image/webp',
|
|
55
|
+
'ico': 'image/x-icon',
|
|
56
|
+
'bmp': 'image/bmp',
|
|
57
|
+
}
|
|
58
|
+
METHODS = (
|
|
59
|
+
'CONNECT', 'DELETE', 'GET', 'HEAD', 'OPTIONS', 'PATCH', 'POST',
|
|
60
|
+
'PUT', 'TRACE')
|
|
61
|
+
PROTOCOLS = ('HTTP/1.0', 'HTTP/1.1')
|
|
62
|
+
|
|
63
|
+
# Event mode constants
|
|
64
|
+
EVENT_REQUEST = 0 # Complete request (headers + body)
|
|
65
|
+
EVENT_HEADERS = 1 # Headers received, waiting for accept_body()
|
|
66
|
+
EVENT_DATA = 2 # Data available in buffer, call read_buffer()
|
|
67
|
+
EVENT_COMPLETE = 3 # Body fully received
|
|
68
|
+
EVENT_ERROR = 4 # Error occurred (timeout, disconnect)
|
|
69
|
+
STATUS_CODES = {
|
|
70
|
+
100: "Continue",
|
|
71
|
+
200: "OK",
|
|
72
|
+
201: "Created",
|
|
73
|
+
202: "Accepted",
|
|
74
|
+
204: "No Content",
|
|
75
|
+
205: "Reset Content",
|
|
76
|
+
206: "Partial Content",
|
|
77
|
+
300: "Multiple Choices",
|
|
78
|
+
301: "Moved Permanently",
|
|
79
|
+
302: "Found",
|
|
80
|
+
303: "See Other",
|
|
81
|
+
304: "Not Modified",
|
|
82
|
+
307: "Temporary Redirect",
|
|
83
|
+
308: "Permanent Redirect",
|
|
84
|
+
400: "Bad Request",
|
|
85
|
+
401: "Unauthorized",
|
|
86
|
+
403: "Forbidden",
|
|
87
|
+
404: "Not Found",
|
|
88
|
+
405: "Method Not Allowed",
|
|
89
|
+
406: "Not Acceptable",
|
|
90
|
+
408: "Request Timeout",
|
|
91
|
+
410: "Gone",
|
|
92
|
+
411: "Length Required",
|
|
93
|
+
413: "Payload Too Large",
|
|
94
|
+
414: "URI Too Long",
|
|
95
|
+
415: "Unsupported Media Type",
|
|
96
|
+
416: "Range Not Satisfiable",
|
|
97
|
+
429: "Too Many Requests",
|
|
98
|
+
431: "Request Header Fields Too Large",
|
|
99
|
+
500: "Internal Server Error",
|
|
100
|
+
501: "Not Implemented",
|
|
101
|
+
503: "Service Unavailable",
|
|
102
|
+
505: "HTTP Version Not Supported",
|
|
103
|
+
507: "Insufficient Storage",
|
|
104
|
+
}
|
|
105
|
+
|
|
106
|
+
|
|
107
|
+
class ClientError(Exception):
|
|
108
|
+
"""Server error"""
|
|
109
|
+
|
|
110
|
+
|
|
111
|
+
class HttpError(ClientError):
|
|
112
|
+
"""uHttp error"""
|
|
113
|
+
|
|
114
|
+
|
|
115
|
+
class HttpDisconnected(HttpError):
|
|
116
|
+
"""uHttp error"""
|
|
117
|
+
|
|
118
|
+
|
|
119
|
+
class HttpErrorWithResponse(HttpError):
|
|
120
|
+
"""uHttp errpr with result"""
|
|
121
|
+
|
|
122
|
+
def __init__(self, status=500, message=None):
|
|
123
|
+
msg = str(status)
|
|
124
|
+
if status in STATUS_CODES:
|
|
125
|
+
msg += " " + STATUS_CODES[status]
|
|
126
|
+
if message:
|
|
127
|
+
msg += ": " + message
|
|
128
|
+
super().__init__(msg)
|
|
129
|
+
self._status = status
|
|
130
|
+
|
|
131
|
+
@property
|
|
132
|
+
def status(self):
|
|
133
|
+
"""Result status code"""
|
|
134
|
+
return self._status
|
|
135
|
+
|
|
136
|
+
|
|
137
|
+
def decode_percent_encoding(data):
|
|
138
|
+
"""Decode percent encoded data (bytes)"""
|
|
139
|
+
if b'%' not in data:
|
|
140
|
+
return data.replace(b'+', b' ')
|
|
141
|
+
res = bytearray()
|
|
142
|
+
i = 0
|
|
143
|
+
n = len(data)
|
|
144
|
+
while i < n:
|
|
145
|
+
b = data[i]
|
|
146
|
+
if b == 37 and i + 2 < n: # '%'
|
|
147
|
+
try:
|
|
148
|
+
res.append(int(bytes(data[i+1:i+3]), 16))
|
|
149
|
+
i += 3
|
|
150
|
+
continue
|
|
151
|
+
except ValueError:
|
|
152
|
+
pass
|
|
153
|
+
res.append(32 if b == 43 else b) # '+' -> ' '
|
|
154
|
+
i += 1
|
|
155
|
+
return bytes(res)
|
|
156
|
+
|
|
157
|
+
|
|
158
|
+
def split_iter(data, sep):
|
|
159
|
+
"""Split data by separator, yielding parts without allocating full list"""
|
|
160
|
+
start = 0
|
|
161
|
+
while True:
|
|
162
|
+
pos = data.find(sep, start)
|
|
163
|
+
if pos == -1:
|
|
164
|
+
yield data[start:]
|
|
165
|
+
break
|
|
166
|
+
yield data[start:pos]
|
|
167
|
+
start = pos + len(sep)
|
|
168
|
+
|
|
169
|
+
|
|
170
|
+
def parse_header_parameters(value):
|
|
171
|
+
"""Parse parameters/directives from header value, returns dict"""
|
|
172
|
+
directives = {}
|
|
173
|
+
for part in split_iter(value, ';'):
|
|
174
|
+
if '=' in part:
|
|
175
|
+
key, val = part.split('=', 1)
|
|
176
|
+
directives[key.strip()] = val.strip().strip('"')
|
|
177
|
+
elif part:
|
|
178
|
+
directives[part.strip()] = None
|
|
179
|
+
return directives
|
|
180
|
+
|
|
181
|
+
|
|
182
|
+
def parse_query(raw_query, query=None):
|
|
183
|
+
"""Parse raw_query from URL, append it to existing query, returns dict"""
|
|
184
|
+
if query is None:
|
|
185
|
+
query = {}
|
|
186
|
+
for query_part in split_iter(raw_query, b'&'):
|
|
187
|
+
if query_part:
|
|
188
|
+
try:
|
|
189
|
+
if b'=' in query_part:
|
|
190
|
+
key, val = query_part.split(b'=', 1)
|
|
191
|
+
key = decode_percent_encoding(key).decode('utf-8')
|
|
192
|
+
val = decode_percent_encoding(val).decode('utf-8')
|
|
193
|
+
else:
|
|
194
|
+
key = decode_percent_encoding(query_part).decode('utf-8')
|
|
195
|
+
val = None
|
|
196
|
+
except (UnicodeError, ValueError) as err:
|
|
197
|
+
raise HttpErrorWithResponse(
|
|
198
|
+
400, "Invalid query string encoding") from err
|
|
199
|
+
if key not in query:
|
|
200
|
+
query[key] = val
|
|
201
|
+
elif isinstance(query[key], list):
|
|
202
|
+
query[key].append(val)
|
|
203
|
+
else:
|
|
204
|
+
query[key] = [query[key], val]
|
|
205
|
+
return query
|
|
206
|
+
|
|
207
|
+
|
|
208
|
+
def parse_url(url):
|
|
209
|
+
"""Parse URL to path and query"""
|
|
210
|
+
query = None
|
|
211
|
+
if b'?' in url:
|
|
212
|
+
path, raw_query = url.split(b'?', 1)
|
|
213
|
+
query = parse_query(raw_query, query)
|
|
214
|
+
else:
|
|
215
|
+
path = url
|
|
216
|
+
try:
|
|
217
|
+
path = decode_percent_encoding(path).decode('utf-8')
|
|
218
|
+
except (UnicodeError, ValueError) as err:
|
|
219
|
+
raise HttpErrorWithResponse(
|
|
220
|
+
400, "Invalid URL path encoding") from err
|
|
221
|
+
return path, query
|
|
222
|
+
|
|
223
|
+
|
|
224
|
+
def parse_header_line(line):
|
|
225
|
+
"""Parse header line to key and value"""
|
|
226
|
+
try:
|
|
227
|
+
line = line.decode('ascii')
|
|
228
|
+
except ValueError as err:
|
|
229
|
+
readable = line.decode('utf-8', errors='replace')
|
|
230
|
+
raise HttpErrorWithResponse(
|
|
231
|
+
400, f"Invalid non-ASCII characters in header: {readable}") from err
|
|
232
|
+
if ':' not in line:
|
|
233
|
+
raise HttpErrorWithResponse(400, f"Wrong header format {line}")
|
|
234
|
+
key, val = line.split(':', 1)
|
|
235
|
+
return key.strip().lower(), val.strip()
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
def encode_response_data(headers, data):
|
|
239
|
+
"""encode response data by its type"""
|
|
240
|
+
if isinstance(data, (dict, list, tuple, int, float)):
|
|
241
|
+
data = _json.dumps(data).encode('ascii')
|
|
242
|
+
if CONTENT_TYPE not in headers:
|
|
243
|
+
headers[CONTENT_TYPE] = CONTENT_TYPE_JSON
|
|
244
|
+
elif isinstance(data, str):
|
|
245
|
+
data = data.encode('utf-8')
|
|
246
|
+
if CONTENT_TYPE not in headers:
|
|
247
|
+
headers[CONTENT_TYPE] = CONTENT_TYPE_HTML_UTF8
|
|
248
|
+
elif isinstance(data, (bytes, bytearray, memoryview)):
|
|
249
|
+
if CONTENT_TYPE not in headers:
|
|
250
|
+
headers[CONTENT_TYPE] = CONTENT_TYPE_OCTET_STREAM
|
|
251
|
+
else:
|
|
252
|
+
raise HttpErrorWithResponse(415, f"Unsupported data type: {type(data).__name__}")
|
|
253
|
+
headers[CONTENT_LENGTH] = len(data)
|
|
254
|
+
return data
|
|
255
|
+
|
|
256
|
+
|
|
257
|
+
class HttpConnection():
|
|
258
|
+
"""Simple HTTP client connection"""
|
|
259
|
+
|
|
260
|
+
# pylint: disable=too-many-instance-attributes
|
|
261
|
+
|
|
262
|
+
def __init__(self, server, sock, addr, **kwargs):
|
|
263
|
+
"""sock - client socket, addr - tuple (ip, port)"""
|
|
264
|
+
self._server = server
|
|
265
|
+
self._addr = addr
|
|
266
|
+
self._socket = sock
|
|
267
|
+
self._buffer = bytearray()
|
|
268
|
+
self._send_buffer = bytearray()
|
|
269
|
+
self._rx_bytes_counter = 0
|
|
270
|
+
self._method = None
|
|
271
|
+
self._url = None
|
|
272
|
+
self._protocol = None
|
|
273
|
+
self._headers = None
|
|
274
|
+
self._data = None
|
|
275
|
+
self._path = None
|
|
276
|
+
self._query = None
|
|
277
|
+
self._content_length = None
|
|
278
|
+
self._cookies = None
|
|
279
|
+
self._is_multipart = False
|
|
280
|
+
self._response_started = False
|
|
281
|
+
self._response_keep_alive = False
|
|
282
|
+
self._file_handle = None
|
|
283
|
+
self._last_activity = _time.time()
|
|
284
|
+
self._requests_count = 0
|
|
285
|
+
# Event mode attributes
|
|
286
|
+
self.context = None
|
|
287
|
+
self._event = None
|
|
288
|
+
self._bytes_received = 0
|
|
289
|
+
self._error = None
|
|
290
|
+
self._streaming_body = False
|
|
291
|
+
self._streaming_events = False
|
|
292
|
+
self._body_complete = False
|
|
293
|
+
self._body_file_handle = None
|
|
294
|
+
self._to_file = None
|
|
295
|
+
self._expect_continue = False
|
|
296
|
+
# Config from kwargs
|
|
297
|
+
self._max_headers_length = kwargs.get(
|
|
298
|
+
'max_headers_length', MAX_HEADERS_LENGTH)
|
|
299
|
+
self._max_content_length = kwargs.get(
|
|
300
|
+
'max_content_length', MAX_CONTENT_LENGTH)
|
|
301
|
+
self._file_chunk_size = kwargs.get(
|
|
302
|
+
'file_chunk_size', FILE_CHUNK_SIZE)
|
|
303
|
+
self._keep_alive_timeout = kwargs.get(
|
|
304
|
+
'keep_alive_timeout', KEEP_ALIVE_TIMEOUT)
|
|
305
|
+
self._keep_alive_max_requests = kwargs.get(
|
|
306
|
+
'keep_alive_max_requests', KEEP_ALIVE_MAX_REQUESTS)
|
|
307
|
+
|
|
308
|
+
def __del__(self):
|
|
309
|
+
self.close()
|
|
310
|
+
|
|
311
|
+
def __repr__(self):
|
|
312
|
+
result = f"HttpConnection: [{self.remote_address}] {self.method}"
|
|
313
|
+
result += f" http://{self.full_url}"
|
|
314
|
+
return result
|
|
315
|
+
|
|
316
|
+
@property
|
|
317
|
+
def addr(self):
|
|
318
|
+
"""Client address"""
|
|
319
|
+
return self._addr
|
|
320
|
+
|
|
321
|
+
@property
|
|
322
|
+
def remote_address(self):
|
|
323
|
+
"""Return client address"""
|
|
324
|
+
forwarded = self.headers_get_attribute('x-forwarded-for')
|
|
325
|
+
if forwarded:
|
|
326
|
+
return forwarded.split(',')[0]
|
|
327
|
+
addr = self._addr[0]
|
|
328
|
+
if addr.startswith('::ffff:'):
|
|
329
|
+
addr = addr[7:] # Remove IPv4-mapped prefix
|
|
330
|
+
return f"{addr}:{self._addr[1]}"
|
|
331
|
+
|
|
332
|
+
@property
|
|
333
|
+
def remote_addresses(self):
|
|
334
|
+
"""Return client address"""
|
|
335
|
+
forwarded = self.headers_get_attribute('x-forwarded-for')
|
|
336
|
+
if forwarded:
|
|
337
|
+
return forwarded
|
|
338
|
+
return f"{self._addr[0]}:{self._addr[1]}"
|
|
339
|
+
|
|
340
|
+
@property
|
|
341
|
+
def is_secure(self):
|
|
342
|
+
"""Return True if connection is using SSL/TLS"""
|
|
343
|
+
return self._server.is_secure
|
|
344
|
+
|
|
345
|
+
@property
|
|
346
|
+
def method(self):
|
|
347
|
+
"""HTTP method"""
|
|
348
|
+
return self._method
|
|
349
|
+
|
|
350
|
+
@property
|
|
351
|
+
def url(self):
|
|
352
|
+
"""URL address"""
|
|
353
|
+
return self._url
|
|
354
|
+
|
|
355
|
+
@property
|
|
356
|
+
def host(self):
|
|
357
|
+
"""URL address"""
|
|
358
|
+
return self.headers_get_attribute(HOST, '')
|
|
359
|
+
|
|
360
|
+
@property
|
|
361
|
+
def full_url(self):
|
|
362
|
+
"""URL address"""
|
|
363
|
+
return f"{self.host}{self.url}"
|
|
364
|
+
|
|
365
|
+
@property
|
|
366
|
+
def protocol(self):
|
|
367
|
+
"""Protocol"""
|
|
368
|
+
return self._protocol
|
|
369
|
+
|
|
370
|
+
@property
|
|
371
|
+
def headers(self):
|
|
372
|
+
"""headers dict"""
|
|
373
|
+
return self._headers
|
|
374
|
+
|
|
375
|
+
@property
|
|
376
|
+
def data(self):
|
|
377
|
+
"""Content data"""
|
|
378
|
+
return self._data
|
|
379
|
+
|
|
380
|
+
@property
|
|
381
|
+
def path(self):
|
|
382
|
+
"""Path"""
|
|
383
|
+
return self._path
|
|
384
|
+
|
|
385
|
+
@property
|
|
386
|
+
def query(self):
|
|
387
|
+
"""Query dict"""
|
|
388
|
+
return self._query
|
|
389
|
+
|
|
390
|
+
@property
|
|
391
|
+
def cookies(self):
|
|
392
|
+
"""Cookies dict"""
|
|
393
|
+
if self._cookies is None:
|
|
394
|
+
self._cookies = {}
|
|
395
|
+
raw_cookies = self.headers_get_attribute(COOKIE)
|
|
396
|
+
if raw_cookies:
|
|
397
|
+
for cookie_param in split_iter(raw_cookies, ';'):
|
|
398
|
+
if '=' in cookie_param:
|
|
399
|
+
key, val = cookie_param.split('=')
|
|
400
|
+
key = key.strip()
|
|
401
|
+
if key:
|
|
402
|
+
self._cookies[key] = val.strip()
|
|
403
|
+
return self._cookies
|
|
404
|
+
|
|
405
|
+
@property
|
|
406
|
+
def socket(self):
|
|
407
|
+
"""This socket"""
|
|
408
|
+
return self._socket
|
|
409
|
+
|
|
410
|
+
@property
|
|
411
|
+
def rx_bytes_counter(self):
|
|
412
|
+
"""Read bytes counter"""
|
|
413
|
+
return self._rx_bytes_counter
|
|
414
|
+
|
|
415
|
+
@property
|
|
416
|
+
def is_loaded(self):
|
|
417
|
+
"""True when request is fully loaded and ready for response"""
|
|
418
|
+
if self._response_started:
|
|
419
|
+
return False
|
|
420
|
+
return self._method and (not self.content_length or self._data)
|
|
421
|
+
|
|
422
|
+
@property
|
|
423
|
+
def is_timed_out(self):
|
|
424
|
+
"""True when connection has been idle too long"""
|
|
425
|
+
return (_time.time() - self._last_activity) > self._keep_alive_timeout
|
|
426
|
+
|
|
427
|
+
@property
|
|
428
|
+
def is_max_requests_reached(self):
|
|
429
|
+
"""True when connection reached max requests limit"""
|
|
430
|
+
return self._requests_count >= self._keep_alive_max_requests
|
|
431
|
+
|
|
432
|
+
@property
|
|
433
|
+
def has_data_to_send(self):
|
|
434
|
+
"""True when there is data waiting to be sent or file being streamed"""
|
|
435
|
+
return len(self._send_buffer) > 0 or self._file_handle is not None
|
|
436
|
+
|
|
437
|
+
@property
|
|
438
|
+
def send_buffer_size(self):
|
|
439
|
+
"""Size of pending send buffer in bytes"""
|
|
440
|
+
return len(self._send_buffer)
|
|
441
|
+
|
|
442
|
+
@property
|
|
443
|
+
def event(self):
|
|
444
|
+
"""Current event type (EVENT_REQUEST, EVENT_HEADERS, etc.)"""
|
|
445
|
+
return self._event
|
|
446
|
+
|
|
447
|
+
@property
|
|
448
|
+
def bytes_received(self):
|
|
449
|
+
"""Number of body bytes received so far"""
|
|
450
|
+
return self._bytes_received
|
|
451
|
+
|
|
452
|
+
@property
|
|
453
|
+
def error(self):
|
|
454
|
+
"""Error message if event is EVENT_ERROR"""
|
|
455
|
+
return self._error
|
|
456
|
+
|
|
457
|
+
@property
|
|
458
|
+
def content_type(self):
|
|
459
|
+
"""Content type"""
|
|
460
|
+
return self.headers_get_attribute(CONTENT_TYPE, '')
|
|
461
|
+
|
|
462
|
+
@property
|
|
463
|
+
def content_length(self):
|
|
464
|
+
"""Content length"""
|
|
465
|
+
if self._headers is None:
|
|
466
|
+
return None
|
|
467
|
+
if self._content_length is None:
|
|
468
|
+
content_length = self.headers_get_attribute(CONTENT_LENGTH)
|
|
469
|
+
if content_length is None:
|
|
470
|
+
self._content_length = False
|
|
471
|
+
elif content_length.isdigit():
|
|
472
|
+
self._content_length = int(content_length)
|
|
473
|
+
else:
|
|
474
|
+
raise HttpErrorWithResponse(
|
|
475
|
+
400, f"Wrong content length {content_length}")
|
|
476
|
+
return self._content_length
|
|
477
|
+
|
|
478
|
+
def headers_get_attribute(self, key, default=None):
|
|
479
|
+
"""Return headers value"""
|
|
480
|
+
if self._headers:
|
|
481
|
+
return self._headers.get(key, default)
|
|
482
|
+
return default
|
|
483
|
+
|
|
484
|
+
def _recv_to_buffer(self, size):
|
|
485
|
+
try:
|
|
486
|
+
buffer = self._socket.recv(size - len(self._buffer))
|
|
487
|
+
except OSError as err:
|
|
488
|
+
if err.errno in (errno.EAGAIN, errno.ENOENT):
|
|
489
|
+
# EAGAIN: no data available (non-blocking)
|
|
490
|
+
# ENOENT: SSL handshake in progress (CPython)
|
|
491
|
+
return
|
|
492
|
+
raise HttpDisconnected(f"{err}: {self.addr}") from err
|
|
493
|
+
except MemoryError as err:
|
|
494
|
+
raise HttpErrorWithResponse(413) from err
|
|
495
|
+
if buffer is None:
|
|
496
|
+
# MicroPython SSL: handshake in progress
|
|
497
|
+
return
|
|
498
|
+
if not buffer:
|
|
499
|
+
raise HttpDisconnected(f"Lost connection from client {self.addr}")
|
|
500
|
+
self._rx_bytes_counter += len(buffer)
|
|
501
|
+
self._buffer.extend(buffer)
|
|
502
|
+
self.update_activity()
|
|
503
|
+
|
|
504
|
+
def _parse_http_request(self, line):
|
|
505
|
+
if line.count(b' ') != 2:
|
|
506
|
+
readable = line.decode('utf-8', errors='replace')
|
|
507
|
+
raise HttpError(f"Malformed request line: {readable}")
|
|
508
|
+
method, url, protocol = line.strip().split(b' ')
|
|
509
|
+
try:
|
|
510
|
+
self._method = method.decode('ascii')
|
|
511
|
+
self._url = url.decode('ascii')
|
|
512
|
+
self._protocol = protocol.decode('ascii')
|
|
513
|
+
except ValueError as err:
|
|
514
|
+
readable = line.decode('utf-8', errors='replace')
|
|
515
|
+
raise HttpErrorWithResponse(
|
|
516
|
+
400, f"Invalid characters in request line: {readable}") from err
|
|
517
|
+
if self._method not in METHODS:
|
|
518
|
+
raise HttpErrorWithResponse(501)
|
|
519
|
+
if self._protocol not in PROTOCOLS:
|
|
520
|
+
raise HttpErrorWithResponse(505)
|
|
521
|
+
self._path, self._query = parse_url(url)
|
|
522
|
+
|
|
523
|
+
def _process_data(self):
|
|
524
|
+
if len(self._buffer) < self.content_length:
|
|
525
|
+
return
|
|
526
|
+
|
|
527
|
+
if len(self._buffer) > self.content_length:
|
|
528
|
+
raise HttpErrorWithResponse(400, "Unexpected data after body")
|
|
529
|
+
|
|
530
|
+
content_type_parts = parse_header_parameters(self.content_type)
|
|
531
|
+
if CONTENT_TYPE_XFORMDATA in content_type_parts:
|
|
532
|
+
self._data = parse_query(self._buffer)
|
|
533
|
+
elif CONTENT_TYPE_JSON in content_type_parts:
|
|
534
|
+
try:
|
|
535
|
+
self._data = _json.loads(self._buffer)
|
|
536
|
+
except ValueError as err:
|
|
537
|
+
raise HttpErrorWithResponse(
|
|
538
|
+
400, f"JSON decode error: {err}") from err
|
|
539
|
+
else:
|
|
540
|
+
self._data = self._buffer
|
|
541
|
+
self._buffer = bytearray()
|
|
542
|
+
|
|
543
|
+
def _process_headers(self, header_lines):
|
|
544
|
+
self._headers = {}
|
|
545
|
+
while header_lines:
|
|
546
|
+
line = header_lines.pop(0)
|
|
547
|
+
if not line:
|
|
548
|
+
break
|
|
549
|
+
if self._method is None:
|
|
550
|
+
self._parse_http_request(line)
|
|
551
|
+
else:
|
|
552
|
+
key, val = parse_header_line(line)
|
|
553
|
+
self._headers[key] = val
|
|
554
|
+
|
|
555
|
+
# RFC 2616: HTTP/1.1 requires Host header
|
|
556
|
+
if self._protocol == 'HTTP/1.1' and 'host' not in self._headers:
|
|
557
|
+
raise HttpErrorWithResponse(
|
|
558
|
+
400, "Host header is required for HTTP/1.1")
|
|
559
|
+
|
|
560
|
+
# Handle Expect: 100-continue
|
|
561
|
+
expect = self.headers_get_attribute(EXPECT, '').lower()
|
|
562
|
+
if expect == EXPECT_100_CONTINUE and self.content_length:
|
|
563
|
+
self._expect_continue = True
|
|
564
|
+
if not self._server.event_mode:
|
|
565
|
+
# Non-event mode: send 100 Continue immediately
|
|
566
|
+
self._send_100_continue()
|
|
567
|
+
|
|
568
|
+
if self.content_length:
|
|
569
|
+
if self.content_length > self._max_content_length:
|
|
570
|
+
raise HttpErrorWithResponse(413)
|
|
571
|
+
self._process_data()
|
|
572
|
+
|
|
573
|
+
def _read_headers(self):
|
|
574
|
+
self._recv_to_buffer(self._max_headers_length)
|
|
575
|
+
for delimiter in HEADERS_DELIMITERS:
|
|
576
|
+
if delimiter in self._buffer:
|
|
577
|
+
end_index = self._buffer.index(delimiter) + len(delimiter)
|
|
578
|
+
header_lines = self._buffer[:end_index].splitlines()
|
|
579
|
+
self._buffer = self._buffer[end_index:]
|
|
580
|
+
self._process_headers(header_lines)
|
|
581
|
+
return
|
|
582
|
+
if len(self._buffer) >= self._max_headers_length:
|
|
583
|
+
raise HttpErrorWithResponse(
|
|
584
|
+
431,
|
|
585
|
+
f"Headers too large: {len(self._buffer)} bytes (max {self._max_headers_length})")
|
|
586
|
+
|
|
587
|
+
def _send(self, data):
|
|
588
|
+
"""Add data to send buffer for async sending"""
|
|
589
|
+
if self._socket is None:
|
|
590
|
+
return
|
|
591
|
+
if isinstance(data, str):
|
|
592
|
+
data = data.encode('ascii')
|
|
593
|
+
self._send_buffer.extend(data)
|
|
594
|
+
self.try_send()
|
|
595
|
+
|
|
596
|
+
def _send_100_continue(self):
|
|
597
|
+
"""Send 100 Continue response if client expects it"""
|
|
598
|
+
if not self._expect_continue:
|
|
599
|
+
return
|
|
600
|
+
self._expect_continue = False
|
|
601
|
+
self._send('HTTP/1.1 100 Continue\r\n\r\n')
|
|
602
|
+
|
|
603
|
+
def _close_file_handle(self):
|
|
604
|
+
"""Close file handle safely"""
|
|
605
|
+
if self._file_handle:
|
|
606
|
+
try:
|
|
607
|
+
self._file_handle.close()
|
|
608
|
+
except OSError:
|
|
609
|
+
pass
|
|
610
|
+
self._file_handle = None
|
|
611
|
+
|
|
612
|
+
def _refill_from_file(self):
|
|
613
|
+
"""Read next chunk from file into send buffer.
|
|
614
|
+
Returns False if error occurred and connection was closed."""
|
|
615
|
+
if not self._file_handle:
|
|
616
|
+
return True
|
|
617
|
+
if len(self._send_buffer) >= self._file_chunk_size:
|
|
618
|
+
return True
|
|
619
|
+
try:
|
|
620
|
+
chunk = self._file_handle.read(self._file_chunk_size)
|
|
621
|
+
if chunk:
|
|
622
|
+
self._send_buffer.extend(chunk)
|
|
623
|
+
else:
|
|
624
|
+
self._close_file_handle()
|
|
625
|
+
except OSError:
|
|
626
|
+
self._close_file_handle()
|
|
627
|
+
self.close()
|
|
628
|
+
return False
|
|
629
|
+
return True
|
|
630
|
+
|
|
631
|
+
def _flush_send_buffer(self):
|
|
632
|
+
"""Try to send data from buffer.
|
|
633
|
+
Returns True if buffer is empty."""
|
|
634
|
+
if not self._send_buffer:
|
|
635
|
+
return True
|
|
636
|
+
try:
|
|
637
|
+
sent = self._socket.send(self._send_buffer)
|
|
638
|
+
# MicroPython SSL may return None when buffer full
|
|
639
|
+
if sent is None:
|
|
640
|
+
return False
|
|
641
|
+
if sent > 0:
|
|
642
|
+
self._send_buffer = self._send_buffer[sent:]
|
|
643
|
+
return len(self._send_buffer) == 0
|
|
644
|
+
except OSError as err:
|
|
645
|
+
if err.errno == errno.EAGAIN:
|
|
646
|
+
return False
|
|
647
|
+
self.close()
|
|
648
|
+
return False
|
|
649
|
+
|
|
650
|
+
def try_send(self):
|
|
651
|
+
"""Try to send data, finalize when complete"""
|
|
652
|
+
if self._socket is None:
|
|
653
|
+
return
|
|
654
|
+
|
|
655
|
+
if not self._refill_from_file():
|
|
656
|
+
return
|
|
657
|
+
|
|
658
|
+
if self._flush_send_buffer() and self._file_handle is None:
|
|
659
|
+
self._finalize_sent_response()
|
|
660
|
+
|
|
661
|
+
def update_activity(self):
|
|
662
|
+
"""Update last activity timestamp"""
|
|
663
|
+
self._last_activity = _time.time()
|
|
664
|
+
|
|
665
|
+
def _should_keep_alive(self, response_headers=None):
|
|
666
|
+
"""Determine if connection should be kept alive
|
|
667
|
+
|
|
668
|
+
Args:
|
|
669
|
+
response_headers: Optional dict of response headers
|
|
670
|
+
to check for explicit Connection header
|
|
671
|
+
|
|
672
|
+
Returns:
|
|
673
|
+
bool: True if connection should be kept alive
|
|
674
|
+
"""
|
|
675
|
+
if response_headers and CONNECTION in response_headers:
|
|
676
|
+
return response_headers[CONNECTION].lower() == CONNECTION_KEEP_ALIVE
|
|
677
|
+
|
|
678
|
+
req_connection = self.headers_get_attribute(CONNECTION, '').lower()
|
|
679
|
+
|
|
680
|
+
if self._protocol == 'HTTP/1.1':
|
|
681
|
+
keep_alive = req_connection != CONNECTION_CLOSE
|
|
682
|
+
else:
|
|
683
|
+
keep_alive = req_connection == CONNECTION_KEEP_ALIVE
|
|
684
|
+
|
|
685
|
+
if keep_alive and self.is_max_requests_reached:
|
|
686
|
+
keep_alive = False
|
|
687
|
+
|
|
688
|
+
return keep_alive
|
|
689
|
+
|
|
690
|
+
def _finalize_sent_response(self):
|
|
691
|
+
"""Finalize connection after response fully sent (no buffered data)"""
|
|
692
|
+
if not self._response_started:
|
|
693
|
+
return
|
|
694
|
+
|
|
695
|
+
if self._is_multipart:
|
|
696
|
+
return
|
|
697
|
+
|
|
698
|
+
if self._response_keep_alive:
|
|
699
|
+
self.reset()
|
|
700
|
+
else:
|
|
701
|
+
self.close()
|
|
702
|
+
|
|
703
|
+
def reset(self):
|
|
704
|
+
"""Reset connection for next request (keep-alive)"""
|
|
705
|
+
self._close_file_handle()
|
|
706
|
+
self._close_body_file()
|
|
707
|
+
self._method = None
|
|
708
|
+
self._url = None
|
|
709
|
+
self._protocol = None
|
|
710
|
+
self._headers = None
|
|
711
|
+
self._data = None
|
|
712
|
+
self._path = None
|
|
713
|
+
self._query = None
|
|
714
|
+
self._content_length = None
|
|
715
|
+
self._cookies = None
|
|
716
|
+
self._is_multipart = False
|
|
717
|
+
self._response_started = False
|
|
718
|
+
self._response_keep_alive = False
|
|
719
|
+
# Reset event mode attributes
|
|
720
|
+
self.context = None
|
|
721
|
+
self._event = None
|
|
722
|
+
self._bytes_received = 0
|
|
723
|
+
self._error = None
|
|
724
|
+
self._streaming_body = False
|
|
725
|
+
self._streaming_events = False
|
|
726
|
+
self._body_complete = False
|
|
727
|
+
self._to_file = None
|
|
728
|
+
self._expect_continue = False
|
|
729
|
+
self.update_activity()
|
|
730
|
+
|
|
731
|
+
def close(self):
|
|
732
|
+
"""Close connection"""
|
|
733
|
+
self._close_file_handle()
|
|
734
|
+
self._close_body_file(delete=True)
|
|
735
|
+
self._server.remove_connection(self)
|
|
736
|
+
if self._socket:
|
|
737
|
+
try:
|
|
738
|
+
self._socket.close()
|
|
739
|
+
except OSError:
|
|
740
|
+
pass
|
|
741
|
+
self._socket = None
|
|
742
|
+
self._send_buffer = bytearray()
|
|
743
|
+
|
|
744
|
+
def headers_get(self, key, default=None):
|
|
745
|
+
"""Return value from headers by key, or default if key not found"""
|
|
746
|
+
return self._headers.get(key.lower(), default)
|
|
747
|
+
|
|
748
|
+
def process_request(self):
|
|
749
|
+
"""Process HTTP request when read event on client socket"""
|
|
750
|
+
if self._socket is None:
|
|
751
|
+
return None
|
|
752
|
+
if self._is_multipart:
|
|
753
|
+
return False
|
|
754
|
+
try:
|
|
755
|
+
if self._method is None:
|
|
756
|
+
self._read_headers()
|
|
757
|
+
elif self.content_length:
|
|
758
|
+
self._recv_to_buffer(self.content_length)
|
|
759
|
+
self._process_data()
|
|
760
|
+
if self.is_loaded:
|
|
761
|
+
self._requests_count += 1
|
|
762
|
+
return self.is_loaded
|
|
763
|
+
except HttpErrorWithResponse as err:
|
|
764
|
+
self.respond(
|
|
765
|
+
data=str(err), status=err.status,
|
|
766
|
+
headers={CONNECTION: CONNECTION_CLOSE})
|
|
767
|
+
except ClientError:
|
|
768
|
+
self.close()
|
|
769
|
+
return None
|
|
770
|
+
|
|
771
|
+
def process_request_event(self):
|
|
772
|
+
"""Process HTTP request in event mode.
|
|
773
|
+
|
|
774
|
+
Returns True if event is ready, False if waiting, None on error.
|
|
775
|
+
"""
|
|
776
|
+
if self._socket is None:
|
|
777
|
+
return None
|
|
778
|
+
if self._is_multipart:
|
|
779
|
+
return False
|
|
780
|
+
|
|
781
|
+
try:
|
|
782
|
+
return self._process_event()
|
|
783
|
+
except HttpErrorWithResponse as err:
|
|
784
|
+
self._error = str(err)
|
|
785
|
+
self._event = EVENT_ERROR
|
|
786
|
+
return True
|
|
787
|
+
except ClientError as err:
|
|
788
|
+
# Client disconnect on keep-alive while waiting for next request
|
|
789
|
+
# is normal - just close silently
|
|
790
|
+
if self._requests_count > 0 and self._method is None:
|
|
791
|
+
self.close()
|
|
792
|
+
return None
|
|
793
|
+
self._error = str(err)
|
|
794
|
+
self._event = EVENT_ERROR
|
|
795
|
+
return True
|
|
796
|
+
|
|
797
|
+
def _process_event(self):
|
|
798
|
+
"""Internal event processing logic"""
|
|
799
|
+
# Phase 1: Reading headers
|
|
800
|
+
if self._method is None:
|
|
801
|
+
self._read_headers()
|
|
802
|
+
if self._method is None:
|
|
803
|
+
return False # Headers not complete yet
|
|
804
|
+
return self._handle_headers_complete()
|
|
805
|
+
|
|
806
|
+
# Phase 2: Streaming body
|
|
807
|
+
if self._streaming_body:
|
|
808
|
+
return self._handle_streaming_body()
|
|
809
|
+
|
|
810
|
+
# Phase 3: Waiting for accept_body() call
|
|
811
|
+
return False
|
|
812
|
+
|
|
813
|
+
def _handle_headers_complete(self):
|
|
814
|
+
"""Handle completed headers, decide event type"""
|
|
815
|
+
if not self.content_length:
|
|
816
|
+
# No body - complete request
|
|
817
|
+
self._event = EVENT_REQUEST
|
|
818
|
+
self._requests_count += 1
|
|
819
|
+
return True
|
|
820
|
+
|
|
821
|
+
# Check if small body already arrived with headers
|
|
822
|
+
# _data may be set by _process_headers() or buffer may have the data
|
|
823
|
+
if self._data is not None or len(self._buffer) >= self.content_length:
|
|
824
|
+
if self._data is None:
|
|
825
|
+
self._process_data()
|
|
826
|
+
self._event = EVENT_REQUEST
|
|
827
|
+
self._requests_count += 1
|
|
828
|
+
return True
|
|
829
|
+
|
|
830
|
+
# Body expected but not complete - notify headers ready
|
|
831
|
+
self._event = EVENT_HEADERS
|
|
832
|
+
return True
|
|
833
|
+
|
|
834
|
+
def _handle_streaming_body(self):
|
|
835
|
+
"""Handle streaming body data"""
|
|
836
|
+
self._recv_to_buffer(self._max_content_length)
|
|
837
|
+
|
|
838
|
+
if not self._buffer:
|
|
839
|
+
return False # No new data
|
|
840
|
+
|
|
841
|
+
# Write to file if in file mode
|
|
842
|
+
if self._body_file_handle:
|
|
843
|
+
self._write_buffer_to_file()
|
|
844
|
+
if self._event == EVENT_ERROR:
|
|
845
|
+
return True
|
|
846
|
+
|
|
847
|
+
# Check if body is complete
|
|
848
|
+
total = self._bytes_received + len(self._buffer)
|
|
849
|
+
if self.content_length and total >= self.content_length:
|
|
850
|
+
self._close_body_file() # Close file before EVENT_COMPLETE
|
|
851
|
+
self._body_complete = True
|
|
852
|
+
self._event = EVENT_COMPLETE
|
|
853
|
+
self._requests_count += 1
|
|
854
|
+
return True
|
|
855
|
+
|
|
856
|
+
# If not streaming events, keep buffering until complete
|
|
857
|
+
if not self._streaming_events:
|
|
858
|
+
return False
|
|
859
|
+
|
|
860
|
+
self._event = EVENT_DATA
|
|
861
|
+
return True
|
|
862
|
+
|
|
863
|
+
def _write_buffer_to_file(self):
|
|
864
|
+
"""Write buffer to body file handle"""
|
|
865
|
+
try:
|
|
866
|
+
self._body_file_handle.write(self._buffer)
|
|
867
|
+
self._bytes_received += len(self._buffer)
|
|
868
|
+
self._buffer = bytearray()
|
|
869
|
+
except OSError as err:
|
|
870
|
+
self._close_body_file(delete=True)
|
|
871
|
+
self._error = f"Failed to write file: {err}"
|
|
872
|
+
self._event = EVENT_ERROR
|
|
873
|
+
|
|
874
|
+
def _build_response_header(self, status=200, headers=None, cookies=None):
|
|
875
|
+
"""Build HTTP response header string
|
|
876
|
+
|
|
877
|
+
Connection header is added automatically based on keep-alive decision if not explicitly set.
|
|
878
|
+
To force connection close, set headers['connection'] = 'close'.
|
|
879
|
+
"""
|
|
880
|
+
parts = [f'{PROTOCOLS[-1]} {status} {STATUS_CODES[status]}']
|
|
881
|
+
|
|
882
|
+
if headers:
|
|
883
|
+
for key, val in headers.items():
|
|
884
|
+
parts.append(f'{key}: {val}')
|
|
885
|
+
|
|
886
|
+
if cookies:
|
|
887
|
+
for key, val in cookies.items():
|
|
888
|
+
if val is None:
|
|
889
|
+
val = '; Max-Age=0'
|
|
890
|
+
parts.append(f'{SET_COOKIE}: {key}={val}')
|
|
891
|
+
|
|
892
|
+
parts.append('\r\n')
|
|
893
|
+
return '\r\n'.join(parts)
|
|
894
|
+
|
|
895
|
+
def _prepare_response(self, headers=None, is_multipart=False):
|
|
896
|
+
"""Common response preparation, returns headers dict"""
|
|
897
|
+
if self._response_started:
|
|
898
|
+
raise HttpError("Response already sent for this request")
|
|
899
|
+
self._response_started = True
|
|
900
|
+
self._is_multipart = is_multipart
|
|
901
|
+
|
|
902
|
+
if headers is None:
|
|
903
|
+
headers = {}
|
|
904
|
+
|
|
905
|
+
if not is_multipart:
|
|
906
|
+
keep_alive = self._should_keep_alive(headers)
|
|
907
|
+
if CONNECTION not in headers:
|
|
908
|
+
headers[CONNECTION] = (
|
|
909
|
+
CONNECTION_KEEP_ALIVE if keep_alive else CONNECTION_CLOSE)
|
|
910
|
+
self._response_keep_alive = keep_alive
|
|
911
|
+
|
|
912
|
+
return headers
|
|
913
|
+
|
|
914
|
+
def _accept_body_common(self):
|
|
915
|
+
"""Common setup for accept_body methods.
|
|
916
|
+
|
|
917
|
+
Returns:
|
|
918
|
+
int: Number of bytes already waiting in buffer.
|
|
919
|
+
|
|
920
|
+
Raises:
|
|
921
|
+
HttpError: If called outside of EVENT_HEADERS state.
|
|
922
|
+
"""
|
|
923
|
+
if self._event != EVENT_HEADERS:
|
|
924
|
+
raise HttpError("accept_body() can only be called after EVENT_HEADERS")
|
|
925
|
+
self._streaming_body = True
|
|
926
|
+
self._send_100_continue()
|
|
927
|
+
return len(self._buffer)
|
|
928
|
+
|
|
929
|
+
def accept_body(self):
|
|
930
|
+
"""Accept incoming body data, buffer all and receive EVENT_COMPLETE.
|
|
931
|
+
|
|
932
|
+
Must be called after receiving EVENT_HEADERS to start receiving body.
|
|
933
|
+
All data is buffered internally. When complete, EVENT_COMPLETE is emitted
|
|
934
|
+
and data can be read with read_buffer().
|
|
935
|
+
|
|
936
|
+
Returns:
|
|
937
|
+
int: Number of bytes already waiting in buffer.
|
|
938
|
+
"""
|
|
939
|
+
return self._accept_body_common()
|
|
940
|
+
|
|
941
|
+
def accept_body_streaming(self):
|
|
942
|
+
"""Accept incoming body data with streaming events.
|
|
943
|
+
|
|
944
|
+
Must be called after receiving EVENT_HEADERS to start receiving body.
|
|
945
|
+
Emits EVENT_DATA for each chunk received. Call read_buffer() to get data.
|
|
946
|
+
When complete, EVENT_COMPLETE is emitted.
|
|
947
|
+
|
|
948
|
+
Returns:
|
|
949
|
+
int: Number of bytes already waiting in buffer.
|
|
950
|
+
"""
|
|
951
|
+
pending = self._accept_body_common()
|
|
952
|
+
self._streaming_events = True
|
|
953
|
+
return pending
|
|
954
|
+
|
|
955
|
+
def accept_body_to_file(self, path):
|
|
956
|
+
"""Accept incoming body data and save directly to file.
|
|
957
|
+
|
|
958
|
+
Must be called after receiving EVENT_HEADERS to start receiving body.
|
|
959
|
+
Data is written to file as it arrives. When complete, EVENT_COMPLETE
|
|
960
|
+
is emitted. No EVENT_DATA events are sent.
|
|
961
|
+
|
|
962
|
+
Args:
|
|
963
|
+
path: Path to file where body will be saved.
|
|
964
|
+
|
|
965
|
+
Returns:
|
|
966
|
+
int: Number of bytes already waiting in buffer.
|
|
967
|
+
"""
|
|
968
|
+
pending = self._accept_body_common()
|
|
969
|
+
self._to_file = path
|
|
970
|
+
|
|
971
|
+
try:
|
|
972
|
+
self._body_file_handle = open(path, 'wb')
|
|
973
|
+
except OSError as err:
|
|
974
|
+
self._error = f"Failed to open file: {err}"
|
|
975
|
+
self._event = EVENT_ERROR
|
|
976
|
+
return 0
|
|
977
|
+
|
|
978
|
+
return pending
|
|
979
|
+
|
|
980
|
+
def read_buffer(self):
|
|
981
|
+
"""Read available data from buffer.
|
|
982
|
+
|
|
983
|
+
Returns:
|
|
984
|
+
bytes or None: Data from buffer, or None if no data available.
|
|
985
|
+
"""
|
|
986
|
+
if not self._buffer:
|
|
987
|
+
return None
|
|
988
|
+
chunk = bytes(self._buffer)
|
|
989
|
+
self._bytes_received += len(chunk)
|
|
990
|
+
self._buffer = bytearray()
|
|
991
|
+
return chunk
|
|
992
|
+
|
|
993
|
+
def _close_body_file(self, delete=False):
|
|
994
|
+
"""Close body file handle safely"""
|
|
995
|
+
if hasattr(self, '_body_file_handle') and self._body_file_handle:
|
|
996
|
+
try:
|
|
997
|
+
self._body_file_handle.close()
|
|
998
|
+
except OSError:
|
|
999
|
+
pass
|
|
1000
|
+
self._body_file_handle = None
|
|
1001
|
+
if delete and hasattr(self, '_to_file') and self._to_file:
|
|
1002
|
+
try:
|
|
1003
|
+
_os.remove(self._to_file)
|
|
1004
|
+
except OSError:
|
|
1005
|
+
pass
|
|
1006
|
+
|
|
1007
|
+
def respond(self, data=None, status=200, headers=None, cookies=None):
|
|
1008
|
+
"""Create general respond with data, status and headers as dict
|
|
1009
|
+
|
|
1010
|
+
To force connection close, set headers['connection'] = 'close'.
|
|
1011
|
+
By default, HTTP/1.1 uses keep-alive, HTTP/1.0 closes connection.
|
|
1012
|
+
"""
|
|
1013
|
+
if self._socket is None:
|
|
1014
|
+
return
|
|
1015
|
+
headers = self._prepare_response(headers)
|
|
1016
|
+
if data:
|
|
1017
|
+
data = encode_response_data(headers, data)
|
|
1018
|
+
|
|
1019
|
+
header = self._build_response_header(status, headers=headers, cookies=cookies)
|
|
1020
|
+
try:
|
|
1021
|
+
if data:
|
|
1022
|
+
header_bytes = header.encode('ascii') if isinstance(header, str) else header
|
|
1023
|
+
self._send(header_bytes + data)
|
|
1024
|
+
else:
|
|
1025
|
+
self._send(header)
|
|
1026
|
+
if not self.has_data_to_send:
|
|
1027
|
+
self._finalize_sent_response()
|
|
1028
|
+
except OSError:
|
|
1029
|
+
self.close()
|
|
1030
|
+
|
|
1031
|
+
def respond_file(self, file_name, headers=None):
|
|
1032
|
+
"""Respond with file content, streaming asynchronously to minimize memory usage
|
|
1033
|
+
|
|
1034
|
+
To force connection close, set headers['connection'] = 'close'.
|
|
1035
|
+
"""
|
|
1036
|
+
try:
|
|
1037
|
+
file_size = _os.stat(file_name)[6] # st_size
|
|
1038
|
+
except (OSError, ImportError, AttributeError):
|
|
1039
|
+
self.respond(data=f'File not found: {file_name}', status=404)
|
|
1040
|
+
return
|
|
1041
|
+
|
|
1042
|
+
headers = self._prepare_response(headers)
|
|
1043
|
+
|
|
1044
|
+
if CONTENT_TYPE not in headers:
|
|
1045
|
+
ext = file_name.lower().split('.')[-1] if '.' in file_name else ''
|
|
1046
|
+
headers[CONTENT_TYPE] = CONTENT_TYPE_MAP.get(ext, CONTENT_TYPE_OCTET_STREAM)
|
|
1047
|
+
headers[CONTENT_LENGTH] = file_size
|
|
1048
|
+
|
|
1049
|
+
header = self._build_response_header(200, headers=headers)
|
|
1050
|
+
|
|
1051
|
+
try:
|
|
1052
|
+
self._file_handle = open(file_name, 'rb')
|
|
1053
|
+
self._send(header)
|
|
1054
|
+
except OSError:
|
|
1055
|
+
self._close_file_handle()
|
|
1056
|
+
self.close()
|
|
1057
|
+
|
|
1058
|
+
def response_multipart(self, headers=None):
|
|
1059
|
+
"""Create multipart respond with headers as dict"""
|
|
1060
|
+
if self._socket is None:
|
|
1061
|
+
return False
|
|
1062
|
+
headers = self._prepare_response(headers, is_multipart=True)
|
|
1063
|
+
|
|
1064
|
+
if CONTENT_TYPE not in headers:
|
|
1065
|
+
headers[CONTENT_TYPE] = CONTENT_TYPE_MULTIPART_REPLACE
|
|
1066
|
+
|
|
1067
|
+
header = self._build_response_header(200, headers=headers)
|
|
1068
|
+
try:
|
|
1069
|
+
self._send(header)
|
|
1070
|
+
except OSError:
|
|
1071
|
+
self.close()
|
|
1072
|
+
return False
|
|
1073
|
+
return True
|
|
1074
|
+
|
|
1075
|
+
def response_multipart_frame(self, data, headers=None, boundary=None):
|
|
1076
|
+
"""Create multipart frame respond with data and headers as dict"""
|
|
1077
|
+
if self._socket is None:
|
|
1078
|
+
return False
|
|
1079
|
+
if not data:
|
|
1080
|
+
self.response_multipart_end()
|
|
1081
|
+
return False
|
|
1082
|
+
if not boundary:
|
|
1083
|
+
boundary = BOUNDARY
|
|
1084
|
+
if headers is None:
|
|
1085
|
+
headers = {}
|
|
1086
|
+
data = encode_response_data(headers, data)
|
|
1087
|
+
parts = [f'--{boundary}']
|
|
1088
|
+
for key, val in headers.items():
|
|
1089
|
+
parts.append(f'{key}: {val}')
|
|
1090
|
+
parts.append('\r\n')
|
|
1091
|
+
header = '\r\n'.join(parts)
|
|
1092
|
+
try:
|
|
1093
|
+
self._send(header)
|
|
1094
|
+
self._send(data)
|
|
1095
|
+
self._send('\r\n')
|
|
1096
|
+
except OSError:
|
|
1097
|
+
self.close()
|
|
1098
|
+
return False
|
|
1099
|
+
return True
|
|
1100
|
+
|
|
1101
|
+
def response_multipart_end(self, boundary=None):
|
|
1102
|
+
"""Finish multipart stream"""
|
|
1103
|
+
if not boundary:
|
|
1104
|
+
boundary = BOUNDARY
|
|
1105
|
+
self._is_multipart = False
|
|
1106
|
+
|
|
1107
|
+
# Determine keep-alive behavior (multipart was started without Connection header)
|
|
1108
|
+
# Use default protocol behavior
|
|
1109
|
+
keep_alive = self._should_keep_alive()
|
|
1110
|
+
self._response_keep_alive = keep_alive
|
|
1111
|
+
|
|
1112
|
+
try:
|
|
1113
|
+
self._send(f'--{boundary}--\r\n')
|
|
1114
|
+
if not self.has_data_to_send:
|
|
1115
|
+
self._finalize_sent_response()
|
|
1116
|
+
except OSError:
|
|
1117
|
+
self.close()
|
|
1118
|
+
|
|
1119
|
+
def respond_redirect(self, url, status=302, cookies=None):
|
|
1120
|
+
"""Create redirect respond to URL"""
|
|
1121
|
+
self.respond(status=status, headers={LOCATION: url}, cookies=cookies)
|
|
1122
|
+
|
|
1123
|
+
|
|
1124
|
+
class HttpServer():
|
|
1125
|
+
"""HTTP server"""
|
|
1126
|
+
|
|
1127
|
+
def __init__(
|
|
1128
|
+
self, address='0.0.0.0', port=80, ssl_context=None,
|
|
1129
|
+
event_mode=False, **kwargs):
|
|
1130
|
+
"""IP address and port of listening interface for HTTP
|
|
1131
|
+
|
|
1132
|
+
For IPv6 dual-stack (accepts both IPv4 and IPv6), use address='::'
|
|
1133
|
+
|
|
1134
|
+
Args:
|
|
1135
|
+
event_mode: If True, enables streaming event mode where wait()
|
|
1136
|
+
returns clients at different stages (headers, data, complete).
|
|
1137
|
+
If False (default), wait() only returns fully loaded requests.
|
|
1138
|
+
"""
|
|
1139
|
+
self._kwargs = kwargs
|
|
1140
|
+
self._ssl_context = ssl_context
|
|
1141
|
+
self._event_mode = event_mode
|
|
1142
|
+
if ':' in address:
|
|
1143
|
+
self._socket = _socket.socket(_socket.AF_INET6, _socket.SOCK_STREAM)
|
|
1144
|
+
try:
|
|
1145
|
+
self._socket.setsockopt(
|
|
1146
|
+
_socket.IPPROTO_IPV6, _socket.IPV6_V6ONLY, 0)
|
|
1147
|
+
except (AttributeError, OSError):
|
|
1148
|
+
pass
|
|
1149
|
+
else:
|
|
1150
|
+
self._socket = _socket.socket()
|
|
1151
|
+
self._socket.setsockopt(_socket.SOL_SOCKET, _socket.SO_REUSEADDR, 1)
|
|
1152
|
+
self._socket.bind((address, port))
|
|
1153
|
+
self._socket.listen(kwargs.get('listen', LISTEN_SOCKETS))
|
|
1154
|
+
self._max_clients = kwargs.get(
|
|
1155
|
+
'max_waiting_clients', MAX_WAITING_CLIENTS)
|
|
1156
|
+
self._waiting_connections = []
|
|
1157
|
+
|
|
1158
|
+
@property
|
|
1159
|
+
def socket(self):
|
|
1160
|
+
"""Server socket"""
|
|
1161
|
+
return self._socket
|
|
1162
|
+
|
|
1163
|
+
@property
|
|
1164
|
+
def is_secure(self):
|
|
1165
|
+
"""Return True if server uses SSL/TLS"""
|
|
1166
|
+
return bool(self._ssl_context)
|
|
1167
|
+
|
|
1168
|
+
@property
|
|
1169
|
+
def event_mode(self):
|
|
1170
|
+
"""Return True if event mode is enabled"""
|
|
1171
|
+
return self._event_mode
|
|
1172
|
+
|
|
1173
|
+
@property
|
|
1174
|
+
def read_sockets(self):
|
|
1175
|
+
"""All sockets waiting for communication, used for select"""
|
|
1176
|
+
read_sockets = [
|
|
1177
|
+
con.socket for con in self._waiting_connections
|
|
1178
|
+
if con.socket is not None]
|
|
1179
|
+
if self._socket is not None:
|
|
1180
|
+
read_sockets.append(self._socket)
|
|
1181
|
+
return read_sockets
|
|
1182
|
+
|
|
1183
|
+
@property
|
|
1184
|
+
def write_sockets(self):
|
|
1185
|
+
"""All sockets with data to send, used for select"""
|
|
1186
|
+
return [
|
|
1187
|
+
con.socket for con in self._waiting_connections
|
|
1188
|
+
if con.socket is not None and con.has_data_to_send]
|
|
1189
|
+
|
|
1190
|
+
def close(self):
|
|
1191
|
+
"""Close HTTP server"""
|
|
1192
|
+
try:
|
|
1193
|
+
self._socket.close()
|
|
1194
|
+
except OSError:
|
|
1195
|
+
pass
|
|
1196
|
+
self._socket = None
|
|
1197
|
+
|
|
1198
|
+
def remove_connection(self, connection):
|
|
1199
|
+
if connection in self._waiting_connections:
|
|
1200
|
+
self._waiting_connections.remove(connection)
|
|
1201
|
+
|
|
1202
|
+
def _cleanup_idle_connections(self):
|
|
1203
|
+
"""Remove timed out idle connections"""
|
|
1204
|
+
for connection in list(self._waiting_connections):
|
|
1205
|
+
if connection._is_multipart:
|
|
1206
|
+
continue
|
|
1207
|
+
if not connection.is_loaded and connection.is_timed_out:
|
|
1208
|
+
connection.respond(
|
|
1209
|
+
'Request Timeout', status=408,
|
|
1210
|
+
headers={CONNECTION: CONNECTION_CLOSE})
|
|
1211
|
+
|
|
1212
|
+
def _accept(self):
|
|
1213
|
+
try:
|
|
1214
|
+
cl_socket, addr = self._socket.accept()
|
|
1215
|
+
except OSError:
|
|
1216
|
+
return
|
|
1217
|
+
|
|
1218
|
+
try:
|
|
1219
|
+
cl_socket.setsockopt(_socket.IPPROTO_TCP, _socket.TCP_NODELAY, 1)
|
|
1220
|
+
except (OSError, AttributeError):
|
|
1221
|
+
pass
|
|
1222
|
+
|
|
1223
|
+
try:
|
|
1224
|
+
cl_socket.setblocking(False)
|
|
1225
|
+
except (OSError, AttributeError):
|
|
1226
|
+
pass
|
|
1227
|
+
|
|
1228
|
+
if self._ssl_context:
|
|
1229
|
+
try:
|
|
1230
|
+
cl_socket = self._ssl_context.wrap_socket(
|
|
1231
|
+
cl_socket, server_side=True, do_handshake_on_connect=False)
|
|
1232
|
+
except OSError:
|
|
1233
|
+
try:
|
|
1234
|
+
cl_socket.close()
|
|
1235
|
+
except OSError:
|
|
1236
|
+
pass
|
|
1237
|
+
return
|
|
1238
|
+
|
|
1239
|
+
connection = HttpConnection(self, cl_socket, addr, **self._kwargs)
|
|
1240
|
+
while len(self._waiting_connections) > self._max_clients:
|
|
1241
|
+
connection_to_remove = self._waiting_connections.pop(0)
|
|
1242
|
+
if connection_to_remove._response_started:
|
|
1243
|
+
# Already responding (e.g., multipart stream) - just close
|
|
1244
|
+
connection_to_remove.close()
|
|
1245
|
+
else:
|
|
1246
|
+
connection_to_remove.respond(
|
|
1247
|
+
'Request Timeout, too many requests', status=408,
|
|
1248
|
+
headers={CONNECTION: CONNECTION_CLOSE})
|
|
1249
|
+
self._waiting_connections.append(connection)
|
|
1250
|
+
|
|
1251
|
+
def event_read(self, sockets):
|
|
1252
|
+
"""Process sockets with read_event,
|
|
1253
|
+
returns None or instance of HttpConnection with established connection"""
|
|
1254
|
+
result = None
|
|
1255
|
+
|
|
1256
|
+
if self._socket in sockets:
|
|
1257
|
+
self._accept()
|
|
1258
|
+
else:
|
|
1259
|
+
for connection in list(self._waiting_connections):
|
|
1260
|
+
if connection.socket in sockets:
|
|
1261
|
+
if self._event_mode:
|
|
1262
|
+
if connection.process_request_event():
|
|
1263
|
+
result = connection
|
|
1264
|
+
break
|
|
1265
|
+
elif connection.process_request():
|
|
1266
|
+
result = connection
|
|
1267
|
+
break
|
|
1268
|
+
|
|
1269
|
+
self._cleanup_idle_connections()
|
|
1270
|
+
|
|
1271
|
+
return result
|
|
1272
|
+
|
|
1273
|
+
def _get_pending_connection(self):
|
|
1274
|
+
"""Get connection with pending data in buffer (event mode only)"""
|
|
1275
|
+
if not self._event_mode:
|
|
1276
|
+
return None
|
|
1277
|
+
for connection in self._waiting_connections:
|
|
1278
|
+
if connection._streaming_body and connection._buffer:
|
|
1279
|
+
return connection
|
|
1280
|
+
return None
|
|
1281
|
+
|
|
1282
|
+
def event_write(self, sockets):
|
|
1283
|
+
"""Process sockets with write_event, send buffered data"""
|
|
1284
|
+
for connection in list(self._waiting_connections):
|
|
1285
|
+
if connection.socket in sockets:
|
|
1286
|
+
connection.try_send()
|
|
1287
|
+
|
|
1288
|
+
def process_events(self, read_sockets, write_sockets):
|
|
1289
|
+
"""Process select results, returns loaded connection or None
|
|
1290
|
+
|
|
1291
|
+
This allows using external select with multiple servers/sockets:
|
|
1292
|
+
|
|
1293
|
+
Example:
|
|
1294
|
+
server1 = HttpServer(port=80)
|
|
1295
|
+
server2 = HttpServer(port=443, ssl_context=ctx)
|
|
1296
|
+
|
|
1297
|
+
read_all = server1.read_sockets + server2.read_sockets
|
|
1298
|
+
write_all = server1.write_sockets + server2.write_sockets
|
|
1299
|
+
r, w, _ = select.select(read_all, write_all, [], timeout)
|
|
1300
|
+
|
|
1301
|
+
client = server1.process_events(r, w) or server2.process_events(r, w)
|
|
1302
|
+
"""
|
|
1303
|
+
# Check pending connections first (event mode)
|
|
1304
|
+
pending = self._get_pending_connection()
|
|
1305
|
+
if pending:
|
|
1306
|
+
if pending._handle_streaming_body():
|
|
1307
|
+
return pending
|
|
1308
|
+
|
|
1309
|
+
if write_sockets:
|
|
1310
|
+
self.event_write(write_sockets)
|
|
1311
|
+
if read_sockets:
|
|
1312
|
+
return self.event_read(read_sockets)
|
|
1313
|
+
return None
|
|
1314
|
+
|
|
1315
|
+
def wait(self, timeout=1):
|
|
1316
|
+
"""Wait for new clients with specified timeout,
|
|
1317
|
+
returns None or instance of HttpConnection with established connection"""
|
|
1318
|
+
# Check pending connections first (event mode)
|
|
1319
|
+
pending = self._get_pending_connection()
|
|
1320
|
+
if pending:
|
|
1321
|
+
if pending._handle_streaming_body():
|
|
1322
|
+
return pending
|
|
1323
|
+
|
|
1324
|
+
self.event_write(self.write_sockets)
|
|
1325
|
+
try:
|
|
1326
|
+
read_sockets, write_sockets, _ = _select.select(
|
|
1327
|
+
self.read_sockets, self.write_sockets, [], timeout)
|
|
1328
|
+
except (OSError, ValueError) as err:
|
|
1329
|
+
# EBADF: socket closed concurrently
|
|
1330
|
+
# ValueError: socket fileno() is -1 (closed)
|
|
1331
|
+
if isinstance(err, ValueError) or err.errno == errno.EBADF:
|
|
1332
|
+
return None
|
|
1333
|
+
raise
|
|
1334
|
+
return self.process_events(read_sockets, write_sockets)
|
|
@@ -0,0 +1,15 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: uhttp-server
|
|
3
|
+
Version: 2.2.0
|
|
4
|
+
Summary: Micro HTTP server for Python and MicroPython
|
|
5
|
+
Author-email: Pavel Revak <pavelrevak@gmail.com>
|
|
6
|
+
License: MIT
|
|
7
|
+
Project-URL: Homepage, https://github.com/pavelrevak/uhttp
|
|
8
|
+
Project-URL: Repository, https://github.com/pavelrevak/uhttp
|
|
9
|
+
Classifier: Programming Language :: Python :: 3
|
|
10
|
+
Classifier: Programming Language :: Python :: Implementation :: MicroPython
|
|
11
|
+
Classifier: License :: OSI Approved :: MIT License
|
|
12
|
+
Classifier: Operating System :: OS Independent
|
|
13
|
+
Requires-Python: >=3.7
|
|
14
|
+
License-File: LICENSE
|
|
15
|
+
Dynamic: license-file
|
|
@@ -0,0 +1,6 @@
|
|
|
1
|
+
uhttp/server.py,sha256=JQMBFxFb7A7ThYKnTsri6aTf1Omt-mnT-dQ5EuydVxQ,44288
|
|
2
|
+
uhttp_server-2.2.0.dist-info/licenses/LICENSE,sha256=6-oZcPg0ovRlW59Mg19iGTIrt5yqaY7jk-gq_i8faJo,1068
|
|
3
|
+
uhttp_server-2.2.0.dist-info/METADATA,sha256=dJJFJ_swvwZoqRE6_JZjkVKve8twUT8-gimGaKBLPGQ,581
|
|
4
|
+
uhttp_server-2.2.0.dist-info/WHEEL,sha256=YCfwYGOYMi5Jhw2fU4yNgwErybb2IX5PEwBKV4ZbdBo,91
|
|
5
|
+
uhttp_server-2.2.0.dist-info/top_level.txt,sha256=JLzzUTgjsqdOzD0mcujR5k2iPkXHxCQ5DvnDWccW8qU,6
|
|
6
|
+
uhttp_server-2.2.0.dist-info/RECORD,,
|
|
@@ -0,0 +1,21 @@
|
|
|
1
|
+
MIT License
|
|
2
|
+
|
|
3
|
+
Copyright (c) 2026 Pavel Revak
|
|
4
|
+
|
|
5
|
+
Permission is hereby granted, free of charge, to any person obtaining a copy
|
|
6
|
+
of this software and associated documentation files (the "Software"), to deal
|
|
7
|
+
in the Software without restriction, including without limitation the rights
|
|
8
|
+
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
|
9
|
+
copies of the Software, and to permit persons to whom the Software is
|
|
10
|
+
furnished to do so, subject to the following conditions:
|
|
11
|
+
|
|
12
|
+
The above copyright notice and this permission notice shall be included in all
|
|
13
|
+
copies or substantial portions of the Software.
|
|
14
|
+
|
|
15
|
+
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
|
16
|
+
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
|
17
|
+
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
|
18
|
+
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
|
19
|
+
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
|
20
|
+
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
|
21
|
+
SOFTWARE.
|
|
@@ -0,0 +1 @@
|
|
|
1
|
+
uhttp
|