plain 0.66.0__py3-none-any.whl → 0.101.2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- plain/CHANGELOG.md +684 -0
- plain/README.md +1 -1
- plain/assets/compile.py +25 -12
- plain/assets/finders.py +24 -17
- plain/assets/fingerprints.py +10 -7
- plain/assets/urls.py +1 -1
- plain/assets/views.py +47 -33
- plain/chores/README.md +25 -23
- plain/chores/__init__.py +2 -1
- plain/chores/core.py +27 -0
- plain/chores/registry.py +23 -53
- plain/cli/README.md +185 -16
- plain/cli/__init__.py +2 -1
- plain/cli/agent.py +236 -0
- plain/cli/build.py +7 -8
- plain/cli/changelog.py +11 -5
- plain/cli/chores.py +32 -34
- plain/cli/core.py +112 -28
- plain/cli/docs.py +52 -11
- plain/cli/formatting.py +40 -17
- plain/cli/install.py +10 -54
- plain/cli/{agent/llmdocs.py → llmdocs.py} +21 -9
- plain/cli/output.py +6 -2
- plain/cli/preflight.py +175 -102
- plain/cli/print.py +4 -4
- plain/cli/registry.py +95 -26
- plain/cli/request.py +206 -0
- plain/cli/runtime.py +45 -0
- plain/cli/scaffold.py +2 -7
- plain/cli/server.py +153 -0
- plain/cli/settings.py +53 -49
- plain/cli/shell.py +15 -12
- plain/cli/startup.py +9 -8
- plain/cli/upgrade.py +17 -104
- plain/cli/urls.py +12 -7
- plain/cli/utils.py +3 -3
- plain/csrf/README.md +65 -40
- plain/csrf/middleware.py +53 -43
- plain/debug.py +5 -2
- plain/exceptions.py +22 -114
- plain/forms/README.md +453 -24
- plain/forms/__init__.py +55 -4
- plain/forms/boundfield.py +15 -8
- plain/forms/exceptions.py +1 -1
- plain/forms/fields.py +346 -143
- plain/forms/forms.py +75 -45
- plain/http/README.md +356 -9
- plain/http/__init__.py +41 -26
- plain/http/cookie.py +15 -7
- plain/http/exceptions.py +65 -0
- plain/http/middleware.py +32 -0
- plain/http/multipartparser.py +99 -88
- plain/http/request.py +362 -250
- plain/http/response.py +99 -197
- plain/internal/__init__.py +8 -1
- plain/internal/files/base.py +35 -19
- plain/internal/files/locks.py +19 -11
- plain/internal/files/move.py +8 -3
- plain/internal/files/temp.py +25 -6
- plain/internal/files/uploadedfile.py +47 -28
- plain/internal/files/uploadhandler.py +64 -58
- plain/internal/files/utils.py +24 -10
- plain/internal/handlers/base.py +34 -23
- plain/internal/handlers/exception.py +68 -65
- plain/internal/handlers/wsgi.py +65 -54
- plain/internal/middleware/headers.py +37 -11
- plain/internal/middleware/hosts.py +11 -13
- plain/internal/middleware/https.py +17 -7
- plain/internal/middleware/slash.py +14 -9
- plain/internal/reloader.py +77 -0
- plain/json.py +2 -1
- plain/logs/README.md +161 -62
- plain/logs/__init__.py +1 -1
- plain/logs/{loggers.py → app.py} +71 -67
- plain/logs/configure.py +63 -14
- plain/logs/debug.py +17 -6
- plain/logs/filters.py +15 -0
- plain/logs/formatters.py +7 -4
- plain/packages/README.md +105 -23
- plain/packages/config.py +15 -7
- plain/packages/registry.py +40 -15
- plain/paginator.py +31 -21
- plain/preflight/README.md +208 -23
- plain/preflight/__init__.py +5 -24
- plain/preflight/checks.py +12 -0
- plain/preflight/files.py +19 -13
- plain/preflight/registry.py +80 -58
- plain/preflight/results.py +37 -0
- plain/preflight/security.py +65 -71
- plain/preflight/settings.py +54 -0
- plain/preflight/urls.py +10 -48
- plain/runtime/README.md +115 -47
- plain/runtime/__init__.py +10 -6
- plain/runtime/global_settings.py +43 -33
- plain/runtime/secret.py +20 -0
- plain/runtime/user_settings.py +110 -38
- plain/runtime/utils.py +1 -1
- plain/server/LICENSE +35 -0
- plain/server/README.md +155 -0
- plain/server/__init__.py +9 -0
- plain/server/app.py +52 -0
- plain/server/arbiter.py +555 -0
- plain/server/config.py +118 -0
- plain/server/errors.py +31 -0
- plain/server/glogging.py +292 -0
- plain/server/http/__init__.py +12 -0
- plain/server/http/body.py +283 -0
- plain/server/http/errors.py +155 -0
- plain/server/http/message.py +400 -0
- plain/server/http/parser.py +70 -0
- plain/server/http/unreader.py +88 -0
- plain/server/http/wsgi.py +421 -0
- plain/server/pidfile.py +92 -0
- plain/server/sock.py +240 -0
- plain/server/util.py +317 -0
- plain/server/workers/__init__.py +6 -0
- plain/server/workers/base.py +304 -0
- plain/server/workers/sync.py +212 -0
- plain/server/workers/thread.py +399 -0
- plain/server/workers/workertmp.py +50 -0
- plain/signals/README.md +170 -1
- plain/signals/__init__.py +0 -1
- plain/signals/dispatch/dispatcher.py +49 -27
- plain/signing.py +131 -35
- plain/skills/README.md +36 -0
- plain/skills/plain-docs/SKILL.md +25 -0
- plain/skills/plain-install/SKILL.md +26 -0
- plain/skills/plain-request/SKILL.md +39 -0
- plain/skills/plain-shell/SKILL.md +24 -0
- plain/skills/plain-upgrade/SKILL.md +35 -0
- plain/templates/README.md +211 -20
- plain/templates/jinja/__init__.py +14 -27
- plain/templates/jinja/environments.py +5 -4
- plain/templates/jinja/extensions.py +12 -5
- plain/templates/jinja/filters.py +7 -2
- plain/templates/jinja/globals.py +2 -2
- plain/test/README.md +184 -22
- plain/test/client.py +340 -222
- plain/test/encoding.py +9 -6
- plain/test/exceptions.py +7 -2
- plain/urls/README.md +157 -73
- plain/urls/converters.py +18 -15
- plain/urls/exceptions.py +2 -2
- plain/urls/patterns.py +56 -40
- plain/urls/resolvers.py +38 -28
- plain/urls/utils.py +5 -1
- plain/utils/README.md +250 -3
- plain/utils/cache.py +17 -11
- plain/utils/crypto.py +21 -5
- plain/utils/datastructures.py +89 -56
- plain/utils/dateparse.py +9 -6
- plain/utils/deconstruct.py +15 -7
- plain/utils/decorators.py +5 -1
- plain/utils/dotenv.py +373 -0
- plain/utils/duration.py +8 -4
- plain/utils/encoding.py +14 -7
- plain/utils/functional.py +66 -49
- plain/utils/hashable.py +5 -1
- plain/utils/html.py +36 -22
- plain/utils/http.py +16 -9
- plain/utils/inspect.py +14 -6
- plain/utils/ipv6.py +7 -3
- plain/utils/itercompat.py +6 -1
- plain/utils/module_loading.py +7 -3
- plain/utils/regex_helper.py +37 -23
- plain/utils/safestring.py +14 -6
- plain/utils/text.py +41 -23
- plain/utils/timezone.py +33 -22
- plain/utils/tree.py +35 -19
- plain/validators.py +94 -52
- plain/views/README.md +156 -79
- plain/views/__init__.py +0 -1
- plain/views/base.py +25 -18
- plain/views/errors.py +13 -5
- plain/views/exceptions.py +4 -1
- plain/views/forms.py +6 -6
- plain/views/objects.py +52 -49
- plain/views/redirect.py +18 -15
- plain/views/templates.py +5 -3
- plain/wsgi.py +3 -1
- {plain-0.66.0.dist-info → plain-0.101.2.dist-info}/METADATA +4 -2
- plain-0.101.2.dist-info/RECORD +201 -0
- {plain-0.66.0.dist-info → plain-0.101.2.dist-info}/WHEEL +1 -1
- plain-0.101.2.dist-info/entry_points.txt +2 -0
- plain/AGENTS.md +0 -18
- plain/cli/agent/__init__.py +0 -20
- plain/cli/agent/docs.py +0 -80
- plain/cli/agent/md.py +0 -87
- plain/cli/agent/prompt.py +0 -45
- plain/cli/agent/request.py +0 -181
- plain/csrf/views.py +0 -31
- plain/logs/utils.py +0 -46
- plain/preflight/messages.py +0 -81
- plain/templates/AGENTS.md +0 -3
- plain-0.66.0.dist-info/RECORD +0 -168
- plain-0.66.0.dist-info/entry_points.txt +0 -4
- {plain-0.66.0.dist-info → plain-0.101.2.dist-info}/licenses/LICENSE +0 -0
plain/http/multipartparser.py
CHANGED
|
@@ -5,24 +5,35 @@ Exposes one class, ``MultiPartParser``, which feeds chunks of uploaded data to
|
|
|
5
5
|
file upload handlers for processing.
|
|
6
6
|
"""
|
|
7
7
|
|
|
8
|
+
from __future__ import annotations
|
|
9
|
+
|
|
8
10
|
import base64
|
|
9
11
|
import binascii
|
|
10
12
|
import collections
|
|
11
13
|
import html
|
|
14
|
+
from collections.abc import Iterator
|
|
15
|
+
from typing import TYPE_CHECKING, Any
|
|
12
16
|
|
|
13
|
-
from plain.
|
|
14
|
-
RequestDataTooBig,
|
|
15
|
-
SuspiciousMultipartForm,
|
|
16
|
-
TooManyFieldsSent,
|
|
17
|
-
TooManyFilesSent,
|
|
18
|
-
)
|
|
17
|
+
from plain.internal import internalcode
|
|
19
18
|
from plain.internal.files.uploadhandler import SkipFile, StopFutureHandlers, StopUpload
|
|
20
19
|
from plain.runtime import settings
|
|
21
20
|
from plain.utils.datastructures import MultiValueDict
|
|
22
21
|
from plain.utils.encoding import force_str
|
|
23
22
|
from plain.utils.http import parse_header_parameters
|
|
23
|
+
from plain.utils.module_loading import import_string
|
|
24
24
|
from plain.utils.regex_helper import _lazy_re_compile
|
|
25
25
|
|
|
26
|
+
from .exceptions import (
|
|
27
|
+
RequestDataTooBigError400,
|
|
28
|
+
SuspiciousMultipartFormError400,
|
|
29
|
+
TooManyFieldsSentError400,
|
|
30
|
+
TooManyFilesSentError400,
|
|
31
|
+
)
|
|
32
|
+
|
|
33
|
+
if TYPE_CHECKING:
|
|
34
|
+
from plain.http.request import Request
|
|
35
|
+
from plain.internal.files.uploadhandler import FileUploadHandler
|
|
36
|
+
|
|
26
37
|
__all__ = ("MultiPartParser", "MultiPartParserError", "InputStreamExhausted")
|
|
27
38
|
|
|
28
39
|
|
|
@@ -38,10 +49,10 @@ class InputStreamExhausted(Exception):
|
|
|
38
49
|
pass
|
|
39
50
|
|
|
40
51
|
|
|
41
|
-
|
|
42
|
-
|
|
43
|
-
|
|
44
|
-
|
|
52
|
+
_RAW = "raw"
|
|
53
|
+
_FILE = "file"
|
|
54
|
+
_FIELD = "field"
|
|
55
|
+
_FIELD_TYPES = frozenset([_FIELD, _RAW])
|
|
45
56
|
|
|
46
57
|
|
|
47
58
|
class MultiPartParser:
|
|
@@ -54,22 +65,15 @@ class MultiPartParser:
|
|
|
54
65
|
|
|
55
66
|
boundary_re = _lazy_re_compile(r"[ -~]{0,200}[!-~]")
|
|
56
67
|
|
|
57
|
-
def __init__(self,
|
|
68
|
+
def __init__(self, request: Request):
|
|
58
69
|
"""
|
|
59
70
|
Initialize the MultiPartParser object.
|
|
60
71
|
|
|
61
|
-
:
|
|
62
|
-
The
|
|
63
|
-
:input_data:
|
|
64
|
-
The raw post data, as a file-like object.
|
|
65
|
-
:upload_handlers:
|
|
66
|
-
A list of UploadHandler instances that perform operations on the
|
|
67
|
-
uploaded data.
|
|
68
|
-
:encoding:
|
|
69
|
-
The encoding with which to treat the incoming data.
|
|
72
|
+
:request:
|
|
73
|
+
The HTTP request object (used for headers and as the input stream).
|
|
70
74
|
"""
|
|
71
75
|
# Content-Type should contain multipart and the boundary information.
|
|
72
|
-
content_type =
|
|
76
|
+
content_type = request.content_type or ""
|
|
73
77
|
if not content_type.startswith("multipart/"):
|
|
74
78
|
raise MultiPartParserError(f"Invalid Content-Type: {content_type}")
|
|
75
79
|
|
|
@@ -80,39 +84,36 @@ class MultiPartParser:
|
|
|
80
84
|
f"Invalid non-ASCII Content-Type in multipart: {force_str(content_type)}"
|
|
81
85
|
)
|
|
82
86
|
|
|
83
|
-
#
|
|
84
|
-
|
|
85
|
-
boundary =
|
|
87
|
+
# Get the boundary from parsed content type parameters.
|
|
88
|
+
content_params = request.content_params or {}
|
|
89
|
+
boundary = content_params.get("boundary")
|
|
86
90
|
if not boundary or not self.boundary_re.fullmatch(boundary):
|
|
87
91
|
raise MultiPartParserError(
|
|
88
92
|
f"Invalid boundary in multipart: {force_str(boundary)}"
|
|
89
93
|
)
|
|
90
94
|
|
|
91
|
-
|
|
92
|
-
# to receive.
|
|
93
|
-
try:
|
|
94
|
-
content_length = int(meta.get("CONTENT_LENGTH", 0))
|
|
95
|
-
except (ValueError, TypeError):
|
|
96
|
-
content_length = 0
|
|
97
|
-
|
|
95
|
+
content_length = request.content_length
|
|
98
96
|
if content_length < 0:
|
|
99
97
|
# This means we shouldn't continue...raise an error.
|
|
100
98
|
raise MultiPartParserError(f"Invalid content length: {content_length!r}")
|
|
101
99
|
|
|
102
100
|
self._boundary = boundary.encode("ascii")
|
|
103
|
-
self.
|
|
101
|
+
self._request = request
|
|
102
|
+
|
|
103
|
+
# Create upload handlers for this parsing session
|
|
104
|
+
self._upload_handlers: list[FileUploadHandler] = [
|
|
105
|
+
import_string(handler)(request) for handler in settings.FILE_UPLOAD_HANDLERS
|
|
106
|
+
]
|
|
104
107
|
|
|
105
108
|
# For compatibility with low-level network APIs (with 32-bit integers),
|
|
106
109
|
# the chunk size should be < 2^31, but still divisible by 4.
|
|
107
|
-
possible_sizes = [x.chunk_size for x in
|
|
110
|
+
possible_sizes = [x.chunk_size for x in self._upload_handlers if x.chunk_size]
|
|
108
111
|
self._chunk_size = min([2**31 - 4] + possible_sizes)
|
|
109
112
|
|
|
110
|
-
self.
|
|
111
|
-
self._encoding = encoding or settings.DEFAULT_CHARSET
|
|
113
|
+
self._encoding = request.encoding or settings.DEFAULT_CHARSET
|
|
112
114
|
self._content_length = content_length
|
|
113
|
-
self._upload_handlers = upload_handlers
|
|
114
115
|
|
|
115
|
-
def parse(self):
|
|
116
|
+
def parse(self) -> tuple[Any, MultiValueDict]:
|
|
116
117
|
# Call the actual parse routine and close all open files in case of
|
|
117
118
|
# errors. This is needed because if exceptions are thrown the
|
|
118
119
|
# MultiPartParser will not be garbage collected immediately and
|
|
@@ -128,7 +129,7 @@ class MultiPartParser:
|
|
|
128
129
|
fileobj.close()
|
|
129
130
|
raise
|
|
130
131
|
|
|
131
|
-
def _parse(self):
|
|
132
|
+
def _parse(self) -> tuple[Any, MultiValueDict]:
|
|
132
133
|
"""
|
|
133
134
|
Parse the POST data and break it into a FILES MultiValueDict and a POST
|
|
134
135
|
MultiValueDict.
|
|
@@ -149,9 +150,7 @@ class MultiPartParser:
|
|
|
149
150
|
# This allows overriding everything if need be.
|
|
150
151
|
for handler in handlers:
|
|
151
152
|
result = handler.handle_raw_input(
|
|
152
|
-
self.
|
|
153
|
-
self._meta,
|
|
154
|
-
self._content_length,
|
|
153
|
+
self._request,
|
|
155
154
|
self._boundary,
|
|
156
155
|
encoding,
|
|
157
156
|
)
|
|
@@ -164,7 +163,7 @@ class MultiPartParser:
|
|
|
164
163
|
self._files = MultiValueDict()
|
|
165
164
|
|
|
166
165
|
# Instantiate the parser and stream:
|
|
167
|
-
stream = LazyStream(ChunkIter(self.
|
|
166
|
+
stream = LazyStream(ChunkIter(self._request, self._chunk_size))
|
|
168
167
|
|
|
169
168
|
# Whether or not to signal a file-completion at the beginning of the loop.
|
|
170
169
|
old_field_name = None
|
|
@@ -192,7 +191,7 @@ class MultiPartParser:
|
|
|
192
191
|
uploaded_file = True
|
|
193
192
|
|
|
194
193
|
if (
|
|
195
|
-
item_type in
|
|
194
|
+
item_type in _FIELD_TYPES
|
|
196
195
|
and settings.DATA_UPLOAD_MAX_NUMBER_FIELDS is not None
|
|
197
196
|
):
|
|
198
197
|
# Avoid storing more than DATA_UPLOAD_MAX_NUMBER_FIELDS.
|
|
@@ -200,7 +199,7 @@ class MultiPartParser:
|
|
|
200
199
|
# 2 accounts for empty raw fields before and after the
|
|
201
200
|
# last boundary.
|
|
202
201
|
if settings.DATA_UPLOAD_MAX_NUMBER_FIELDS + 2 < num_post_keys:
|
|
203
|
-
raise
|
|
202
|
+
raise TooManyFieldsSentError400(
|
|
204
203
|
"The number of GET/POST parameters exceeded "
|
|
205
204
|
"settings.DATA_UPLOAD_MAX_NUMBER_FIELDS."
|
|
206
205
|
)
|
|
@@ -216,7 +215,7 @@ class MultiPartParser:
|
|
|
216
215
|
transfer_encoding = transfer_encoding[0].strip()
|
|
217
216
|
field_name = force_str(field_name, encoding, errors="replace")
|
|
218
217
|
|
|
219
|
-
if item_type ==
|
|
218
|
+
if item_type == _FIELD:
|
|
220
219
|
# Avoid reading more than DATA_UPLOAD_MAX_MEMORY_SIZE.
|
|
221
220
|
if settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None:
|
|
222
221
|
read_size = (
|
|
@@ -242,7 +241,7 @@ class MultiPartParser:
|
|
|
242
241
|
settings.DATA_UPLOAD_MAX_MEMORY_SIZE is not None
|
|
243
242
|
and num_bytes_read > settings.DATA_UPLOAD_MAX_MEMORY_SIZE
|
|
244
243
|
):
|
|
245
|
-
raise
|
|
244
|
+
raise RequestDataTooBigError400(
|
|
246
245
|
"Request body exceeded "
|
|
247
246
|
"settings.DATA_UPLOAD_MAX_MEMORY_SIZE."
|
|
248
247
|
)
|
|
@@ -250,14 +249,14 @@ class MultiPartParser:
|
|
|
250
249
|
self._post.appendlist(
|
|
251
250
|
field_name, force_str(data, encoding, errors="replace")
|
|
252
251
|
)
|
|
253
|
-
elif item_type ==
|
|
252
|
+
elif item_type == _FILE:
|
|
254
253
|
# Avoid storing more than DATA_UPLOAD_MAX_NUMBER_FILES.
|
|
255
254
|
num_files += 1
|
|
256
255
|
if (
|
|
257
256
|
settings.DATA_UPLOAD_MAX_NUMBER_FILES is not None
|
|
258
257
|
and num_files > settings.DATA_UPLOAD_MAX_NUMBER_FILES
|
|
259
258
|
):
|
|
260
|
-
raise
|
|
259
|
+
raise TooManyFilesSentError400(
|
|
261
260
|
"The number of files exceeded "
|
|
262
261
|
"settings.DATA_UPLOAD_MAX_NUMBER_FILES."
|
|
263
262
|
)
|
|
@@ -276,7 +275,12 @@ class MultiPartParser:
|
|
|
276
275
|
charset = content_type_extra.get("charset")
|
|
277
276
|
|
|
278
277
|
try:
|
|
279
|
-
|
|
278
|
+
content_length_value = meta_data.get("content-length")
|
|
279
|
+
content_length = (
|
|
280
|
+
int(content_length_value[0])
|
|
281
|
+
if content_length_value
|
|
282
|
+
else None
|
|
283
|
+
)
|
|
280
284
|
except (IndexError, TypeError, ValueError):
|
|
281
285
|
content_length = None
|
|
282
286
|
|
|
@@ -333,7 +337,7 @@ class MultiPartParser:
|
|
|
333
337
|
except SkipFile:
|
|
334
338
|
self._close_files()
|
|
335
339
|
# Just use up the rest of this file...
|
|
336
|
-
|
|
340
|
+
_exhaust(field_stream)
|
|
337
341
|
else:
|
|
338
342
|
# Handle file upload completions on next iteration.
|
|
339
343
|
old_field_name = field_name
|
|
@@ -344,17 +348,17 @@ class MultiPartParser:
|
|
|
344
348
|
# after the other boundaries). This branch is usually not
|
|
345
349
|
# reached at all, because a missing content-disposition
|
|
346
350
|
# header will skip the whole boundary.
|
|
347
|
-
|
|
351
|
+
_exhaust(field_stream)
|
|
348
352
|
except StopUpload as e:
|
|
349
353
|
self._close_files()
|
|
350
354
|
if not e.connection_reset:
|
|
351
|
-
|
|
355
|
+
_exhaust(self._request)
|
|
352
356
|
else:
|
|
353
357
|
if not uploaded_file:
|
|
354
358
|
for handler in handlers:
|
|
355
359
|
handler.upload_interrupted()
|
|
356
360
|
# Make sure that the request data is all fed
|
|
357
|
-
|
|
361
|
+
_exhaust(self._request)
|
|
358
362
|
|
|
359
363
|
# Signal that the upload has completed.
|
|
360
364
|
# any() shortcircuits if a handler's upload_complete() returns a value.
|
|
@@ -362,7 +366,7 @@ class MultiPartParser:
|
|
|
362
366
|
self._post._mutable = False
|
|
363
367
|
return self._post, self._files
|
|
364
368
|
|
|
365
|
-
def handle_file_complete(self, old_field_name, counters):
|
|
369
|
+
def handle_file_complete(self, old_field_name: str, counters: list[int]) -> None:
|
|
366
370
|
"""
|
|
367
371
|
Handle all the signaling that takes place when a file is complete.
|
|
368
372
|
"""
|
|
@@ -376,7 +380,7 @@ class MultiPartParser:
|
|
|
376
380
|
)
|
|
377
381
|
break
|
|
378
382
|
|
|
379
|
-
def sanitize_file_name(self, file_name):
|
|
383
|
+
def sanitize_file_name(self, file_name: str) -> str | None:
|
|
380
384
|
"""
|
|
381
385
|
Sanitize the filename of an upload.
|
|
382
386
|
|
|
@@ -399,16 +403,17 @@ class MultiPartParser:
|
|
|
399
403
|
return None
|
|
400
404
|
return file_name
|
|
401
405
|
|
|
402
|
-
def _close_files(self):
|
|
406
|
+
def _close_files(self) -> None:
|
|
403
407
|
# Free up all file handles.
|
|
404
408
|
# FIXME: this currently assumes that upload handlers store the file as 'file'
|
|
405
409
|
# We should document that...
|
|
406
410
|
# (Maybe add handler.free_file to complement new_file)
|
|
407
411
|
for handler in self._upload_handlers:
|
|
408
412
|
if hasattr(handler, "file"):
|
|
409
|
-
handler.file.close()
|
|
413
|
+
handler.file.close() # type: ignore[union-attr]
|
|
410
414
|
|
|
411
415
|
|
|
416
|
+
@internalcode
|
|
412
417
|
class LazyStream:
|
|
413
418
|
"""
|
|
414
419
|
The LazyStream wrapper allows one to get and "unget" bytes from a stream.
|
|
@@ -418,7 +423,7 @@ class LazyStream:
|
|
|
418
423
|
variable in case you need to "unget" some bytes.
|
|
419
424
|
"""
|
|
420
425
|
|
|
421
|
-
def __init__(self, producer, length=None):
|
|
426
|
+
def __init__(self, producer: Iterator[bytes], length: int | None = None):
|
|
422
427
|
"""
|
|
423
428
|
Every LazyStream must have a producer when instantiated.
|
|
424
429
|
|
|
@@ -433,11 +438,11 @@ class LazyStream:
|
|
|
433
438
|
self._remaining = length
|
|
434
439
|
self._unget_history = []
|
|
435
440
|
|
|
436
|
-
def tell(self):
|
|
441
|
+
def tell(self) -> int:
|
|
437
442
|
return self.position
|
|
438
443
|
|
|
439
|
-
def read(self, size=None):
|
|
440
|
-
def parts():
|
|
444
|
+
def read(self, size: int | None = None) -> bytes:
|
|
445
|
+
def parts() -> Iterator[bytes]:
|
|
441
446
|
remaining = self._remaining if size is None else size
|
|
442
447
|
# do the whole thing in one shot if no limit was provided.
|
|
443
448
|
if remaining is None:
|
|
@@ -462,7 +467,7 @@ class LazyStream:
|
|
|
462
467
|
|
|
463
468
|
return b"".join(parts())
|
|
464
469
|
|
|
465
|
-
def __next__(self):
|
|
470
|
+
def __next__(self) -> bytes:
|
|
466
471
|
"""
|
|
467
472
|
Used when the exact number of bytes to read is unimportant.
|
|
468
473
|
|
|
@@ -478,19 +483,19 @@ class LazyStream:
|
|
|
478
483
|
self.position += len(output)
|
|
479
484
|
return output
|
|
480
485
|
|
|
481
|
-
def close(self):
|
|
486
|
+
def close(self) -> None:
|
|
482
487
|
"""
|
|
483
488
|
Used to invalidate/disable this lazy stream.
|
|
484
489
|
|
|
485
490
|
Replace the producer with an empty list. Any leftover bytes that have
|
|
486
491
|
already been read will still be reported upon read() and/or next().
|
|
487
492
|
"""
|
|
488
|
-
self._producer = []
|
|
493
|
+
self._producer = iter([])
|
|
489
494
|
|
|
490
|
-
def __iter__(self):
|
|
495
|
+
def __iter__(self) -> LazyStream:
|
|
491
496
|
return self
|
|
492
497
|
|
|
493
|
-
def unget(self, bytes):
|
|
498
|
+
def unget(self, bytes: bytes) -> None:
|
|
494
499
|
"""
|
|
495
500
|
Place bytes back onto the front of the lazy stream.
|
|
496
501
|
|
|
@@ -503,7 +508,7 @@ class LazyStream:
|
|
|
503
508
|
self.position -= len(bytes)
|
|
504
509
|
self._leftover = bytes + self._leftover
|
|
505
510
|
|
|
506
|
-
def _update_unget_history(self, num_bytes):
|
|
511
|
+
def _update_unget_history(self, num_bytes: int) -> None:
|
|
507
512
|
"""
|
|
508
513
|
Update the unget history as a sanity check to see if we've pushed
|
|
509
514
|
back the same number of bytes in one chunk. If we keep ungetting the
|
|
@@ -521,24 +526,25 @@ class LazyStream:
|
|
|
521
526
|
)
|
|
522
527
|
|
|
523
528
|
if number_equal > 40:
|
|
524
|
-
raise
|
|
529
|
+
raise SuspiciousMultipartFormError400(
|
|
525
530
|
"The multipart parser got stuck, which shouldn't happen with"
|
|
526
531
|
" normal uploaded files. Check for malicious upload activity;"
|
|
527
532
|
" if there is none, report this to the Plain developers."
|
|
528
533
|
)
|
|
529
534
|
|
|
530
535
|
|
|
536
|
+
@internalcode
|
|
531
537
|
class ChunkIter:
|
|
532
538
|
"""
|
|
533
539
|
An iterable that will yield chunks of data. Given a file-like object as the
|
|
534
540
|
constructor, yield chunks of read operations from that object.
|
|
535
541
|
"""
|
|
536
542
|
|
|
537
|
-
def __init__(self, flo, chunk_size=64 * 1024):
|
|
543
|
+
def __init__(self, flo: Any, chunk_size: int = 64 * 1024):
|
|
538
544
|
self.flo = flo
|
|
539
545
|
self.chunk_size = chunk_size
|
|
540
546
|
|
|
541
|
-
def __next__(self):
|
|
547
|
+
def __next__(self) -> bytes:
|
|
542
548
|
try:
|
|
543
549
|
data = self.flo.read(self.chunk_size)
|
|
544
550
|
except InputStreamExhausted:
|
|
@@ -548,29 +554,31 @@ class ChunkIter:
|
|
|
548
554
|
else:
|
|
549
555
|
raise StopIteration()
|
|
550
556
|
|
|
551
|
-
def __iter__(self):
|
|
557
|
+
def __iter__(self) -> ChunkIter:
|
|
552
558
|
return self
|
|
553
559
|
|
|
554
560
|
|
|
561
|
+
@internalcode
|
|
555
562
|
class InterBoundaryIter:
|
|
556
563
|
"""
|
|
557
564
|
A Producer that will iterate over boundaries.
|
|
558
565
|
"""
|
|
559
566
|
|
|
560
|
-
def __init__(self, stream, boundary):
|
|
567
|
+
def __init__(self, stream: LazyStream, boundary: bytes):
|
|
561
568
|
self._stream = stream
|
|
562
569
|
self._boundary = boundary
|
|
563
570
|
|
|
564
|
-
def __iter__(self):
|
|
571
|
+
def __iter__(self) -> InterBoundaryIter:
|
|
565
572
|
return self
|
|
566
573
|
|
|
567
|
-
def __next__(self):
|
|
574
|
+
def __next__(self) -> LazyStream:
|
|
568
575
|
try:
|
|
569
576
|
return LazyStream(BoundaryIter(self._stream, self._boundary))
|
|
570
577
|
except InputStreamExhausted:
|
|
571
578
|
raise StopIteration()
|
|
572
579
|
|
|
573
580
|
|
|
581
|
+
@internalcode
|
|
574
582
|
class BoundaryIter:
|
|
575
583
|
"""
|
|
576
584
|
A Producer that is sensitive to boundaries.
|
|
@@ -583,7 +591,7 @@ class BoundaryIter:
|
|
|
583
591
|
StopIteration exception.
|
|
584
592
|
"""
|
|
585
593
|
|
|
586
|
-
def __init__(self, stream, boundary):
|
|
594
|
+
def __init__(self, stream: LazyStream, boundary: bytes):
|
|
587
595
|
self._stream = stream
|
|
588
596
|
self._boundary = boundary
|
|
589
597
|
self._done = False
|
|
@@ -598,10 +606,10 @@ class BoundaryIter:
|
|
|
598
606
|
raise InputStreamExhausted()
|
|
599
607
|
self._stream.unget(unused_char)
|
|
600
608
|
|
|
601
|
-
def __iter__(self):
|
|
609
|
+
def __iter__(self) -> BoundaryIter:
|
|
602
610
|
return self
|
|
603
611
|
|
|
604
|
-
def __next__(self):
|
|
612
|
+
def __next__(self) -> bytes:
|
|
605
613
|
if self._done:
|
|
606
614
|
raise StopIteration()
|
|
607
615
|
|
|
@@ -642,7 +650,7 @@ class BoundaryIter:
|
|
|
642
650
|
stream.unget(chunk[-rollback:])
|
|
643
651
|
return chunk[:-rollback]
|
|
644
652
|
|
|
645
|
-
def _find_boundary(self, data):
|
|
653
|
+
def _find_boundary(self, data: bytes) -> tuple[int, int] | None:
|
|
646
654
|
"""
|
|
647
655
|
Find a multipart boundary in data.
|
|
648
656
|
|
|
@@ -667,7 +675,7 @@ class BoundaryIter:
|
|
|
667
675
|
return end, next
|
|
668
676
|
|
|
669
677
|
|
|
670
|
-
def
|
|
678
|
+
def _exhaust(stream_or_iterable: Any) -> None:
|
|
671
679
|
"""Exhaust an iterator or stream."""
|
|
672
680
|
try:
|
|
673
681
|
iterator = iter(stream_or_iterable)
|
|
@@ -676,7 +684,9 @@ def exhaust(stream_or_iterable):
|
|
|
676
684
|
collections.deque(iterator, maxlen=0) # consume iterator quickly.
|
|
677
685
|
|
|
678
686
|
|
|
679
|
-
def
|
|
687
|
+
def _parse_boundary_stream(
|
|
688
|
+
stream: LazyStream, max_header_size: int
|
|
689
|
+
) -> tuple[str, dict[str, Any], LazyStream]:
|
|
680
690
|
"""
|
|
681
691
|
Parse one and exactly one stream that encapsulates a boundary.
|
|
682
692
|
"""
|
|
@@ -694,7 +704,7 @@ def parse_boundary_stream(stream, max_header_size):
|
|
|
694
704
|
# we find no header, so we just mark this fact and pass on
|
|
695
705
|
# the stream verbatim
|
|
696
706
|
stream.unget(chunk)
|
|
697
|
-
return (
|
|
707
|
+
return (_RAW, {}, stream)
|
|
698
708
|
|
|
699
709
|
header = chunk[:header_end]
|
|
700
710
|
|
|
@@ -702,7 +712,7 @@ def parse_boundary_stream(stream, max_header_size):
|
|
|
702
712
|
# well as throwing away the CRLFCRLF bytes from above.
|
|
703
713
|
stream.unget(chunk[header_end + 4 :])
|
|
704
714
|
|
|
705
|
-
TYPE =
|
|
715
|
+
TYPE = _RAW
|
|
706
716
|
outdict = {}
|
|
707
717
|
|
|
708
718
|
# Eliminate blank lines
|
|
@@ -717,25 +727,26 @@ def parse_boundary_stream(stream, max_header_size):
|
|
|
717
727
|
continue
|
|
718
728
|
|
|
719
729
|
if name == "content-disposition":
|
|
720
|
-
TYPE =
|
|
730
|
+
TYPE = _FIELD
|
|
721
731
|
if params.get("filename"):
|
|
722
|
-
TYPE =
|
|
732
|
+
TYPE = _FILE
|
|
723
733
|
|
|
724
734
|
outdict[name] = value, params
|
|
725
735
|
|
|
726
|
-
if TYPE ==
|
|
736
|
+
if TYPE == _RAW:
|
|
727
737
|
stream.unget(chunk)
|
|
728
738
|
|
|
729
739
|
return (TYPE, outdict, stream)
|
|
730
740
|
|
|
731
741
|
|
|
742
|
+
@internalcode
|
|
732
743
|
class Parser:
|
|
733
|
-
def __init__(self, stream, boundary):
|
|
744
|
+
def __init__(self, stream: LazyStream, boundary: bytes):
|
|
734
745
|
self._stream = stream
|
|
735
746
|
self._separator = b"--" + boundary
|
|
736
747
|
|
|
737
|
-
def __iter__(self):
|
|
748
|
+
def __iter__(self) -> Iterator[tuple[str, dict[str, Any], LazyStream]]:
|
|
738
749
|
boundarystream = InterBoundaryIter(self._stream, self._separator)
|
|
739
750
|
for sub_stream in boundarystream:
|
|
740
751
|
# Iterate over each part
|
|
741
|
-
yield
|
|
752
|
+
yield _parse_boundary_stream(sub_stream, 1024)
|