plain 0.69.0__py3-none-any.whl → 0.71.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- plain/AGENTS.md +1 -1
- plain/CHANGELOG.md +28 -0
- plain/assets/compile.py +20 -7
- plain/assets/finders.py +15 -11
- plain/assets/fingerprints.py +6 -5
- plain/assets/urls.py +1 -1
- plain/assets/views.py +23 -17
- plain/chores/registry.py +14 -9
- plain/cli/agent/__init__.py +1 -1
- plain/cli/agent/docs.py +7 -6
- plain/cli/agent/llmdocs.py +18 -8
- plain/cli/agent/md.py +19 -14
- plain/cli/agent/prompt.py +1 -1
- plain/cli/agent/request.py +37 -17
- plain/cli/build.py +2 -2
- plain/cli/changelog.py +8 -4
- plain/cli/chores.py +4 -4
- plain/cli/core.py +8 -5
- plain/cli/docs.py +2 -2
- plain/cli/formatting.py +10 -7
- plain/cli/output.py +6 -2
- plain/cli/preflight.py +3 -3
- plain/cli/print.py +1 -1
- plain/cli/registry.py +10 -6
- plain/cli/scaffold.py +1 -1
- plain/cli/settings.py +1 -1
- plain/cli/shell.py +10 -7
- plain/cli/startup.py +3 -3
- plain/cli/urls.py +10 -4
- plain/cli/utils.py +2 -2
- plain/csrf/middleware.py +15 -5
- plain/csrf/views.py +11 -8
- plain/debug.py +5 -2
- plain/exceptions.py +19 -8
- plain/forms/__init__.py +1 -1
- plain/forms/boundfield.py +14 -7
- plain/forms/exceptions.py +1 -1
- plain/forms/fields.py +139 -97
- plain/forms/forms.py +55 -39
- plain/http/README.md +1 -1
- plain/http/__init__.py +4 -4
- plain/http/cookie.py +15 -7
- plain/http/multipartparser.py +50 -30
- plain/http/request.py +156 -108
- plain/http/response.py +99 -80
- plain/internal/__init__.py +8 -1
- plain/internal/files/base.py +34 -18
- plain/internal/files/locks.py +19 -11
- plain/internal/files/move.py +8 -3
- plain/internal/files/temp.py +23 -5
- plain/internal/files/uploadedfile.py +42 -26
- plain/internal/files/uploadhandler.py +50 -29
- plain/internal/files/utils.py +13 -6
- plain/internal/handlers/base.py +21 -7
- plain/internal/handlers/exception.py +19 -5
- plain/internal/handlers/wsgi.py +33 -21
- plain/internal/middleware/headers.py +11 -2
- plain/internal/middleware/hosts.py +12 -4
- plain/internal/middleware/https.py +13 -3
- plain/internal/middleware/slash.py +15 -5
- plain/json.py +2 -1
- plain/logs/configure.py +3 -1
- plain/logs/debug.py +16 -5
- plain/logs/formatters.py +6 -3
- plain/logs/loggers.py +56 -52
- plain/logs/utils.py +19 -9
- plain/packages/config.py +14 -6
- plain/packages/registry.py +27 -12
- plain/paginator.py +31 -21
- plain/preflight/checks.py +3 -1
- plain/preflight/files.py +3 -1
- plain/preflight/registry.py +25 -10
- plain/preflight/results.py +10 -4
- plain/preflight/security.py +7 -5
- plain/preflight/urls.py +4 -1
- plain/runtime/__init__.py +7 -6
- plain/runtime/global_settings.py +6 -9
- plain/runtime/user_settings.py +26 -17
- plain/runtime/utils.py +1 -1
- plain/signals/dispatch/dispatcher.py +39 -17
- plain/signing.py +49 -30
- plain/templates/jinja/__init__.py +13 -5
- plain/templates/jinja/environments.py +4 -3
- plain/templates/jinja/extensions.py +9 -3
- plain/templates/jinja/filters.py +7 -2
- plain/templates/jinja/globals.py +1 -1
- plain/test/client.py +249 -177
- plain/test/encoding.py +9 -6
- plain/test/exceptions.py +10 -2
- plain/urls/converters.py +13 -10
- plain/urls/patterns.py +32 -20
- plain/urls/resolvers.py +32 -22
- plain/urls/utils.py +5 -1
- plain/utils/cache.py +14 -8
- plain/utils/crypto.py +21 -5
- plain/utils/datastructures.py +84 -54
- plain/utils/dateparse.py +10 -7
- plain/utils/deconstruct.py +12 -4
- plain/utils/decorators.py +5 -1
- plain/utils/duration.py +8 -4
- plain/utils/encoding.py +14 -7
- plain/utils/functional.py +62 -47
- plain/utils/hashable.py +5 -1
- plain/utils/html.py +21 -14
- plain/utils/http.py +16 -9
- plain/utils/inspect.py +14 -6
- plain/utils/ipv6.py +7 -3
- plain/utils/itercompat.py +6 -1
- plain/utils/module_loading.py +7 -3
- plain/utils/regex_helper.py +23 -13
- plain/utils/safestring.py +14 -6
- plain/utils/text.py +34 -18
- plain/utils/timezone.py +30 -19
- plain/utils/tree.py +31 -18
- plain/validators.py +71 -44
- plain/views/base.py +16 -8
- plain/views/errors.py +11 -4
- plain/views/exceptions.py +4 -1
- plain/views/objects.py +15 -15
- plain/views/redirect.py +14 -10
- plain/views/templates.py +1 -1
- plain/wsgi.py +3 -1
- {plain-0.69.0.dist-info → plain-0.71.0.dist-info}/METADATA +1 -1
- plain-0.71.0.dist-info/RECORD +169 -0
- plain-0.69.0.dist-info/RECORD +0 -169
- {plain-0.69.0.dist-info → plain-0.71.0.dist-info}/WHEEL +0 -0
- {plain-0.69.0.dist-info → plain-0.71.0.dist-info}/entry_points.txt +0 -0
- {plain-0.69.0.dist-info → plain-0.71.0.dist-info}/licenses/LICENSE +0 -0
plain/forms/forms.py
CHANGED
@@ -2,21 +2,34 @@
|
|
2
2
|
Form classes
|
3
3
|
"""
|
4
4
|
|
5
|
+
from __future__ import annotations
|
6
|
+
|
5
7
|
import copy
|
6
8
|
from functools import cached_property
|
9
|
+
from typing import TYPE_CHECKING, Any
|
7
10
|
|
8
11
|
from plain.exceptions import NON_FIELD_ERRORS
|
9
12
|
|
10
13
|
from .exceptions import ValidationError
|
11
14
|
from .fields import Field, FileField
|
12
15
|
|
16
|
+
if TYPE_CHECKING:
|
17
|
+
from plain.http import Request
|
18
|
+
|
19
|
+
from .boundfield import BoundField
|
20
|
+
|
13
21
|
__all__ = ("BaseForm", "Form")
|
14
22
|
|
15
23
|
|
16
24
|
class DeclarativeFieldsMetaclass(type):
|
17
25
|
"""Collect Fields declared on the base classes."""
|
18
26
|
|
19
|
-
def __new__(
|
27
|
+
def __new__(
|
28
|
+
mcs: type[DeclarativeFieldsMetaclass],
|
29
|
+
name: str,
|
30
|
+
bases: tuple[type, ...],
|
31
|
+
attrs: dict[str, Any],
|
32
|
+
) -> type:
|
20
33
|
# Collect fields from current class and remove them from attrs.
|
21
34
|
attrs["declared_fields"] = {
|
22
35
|
key: attrs.pop(key)
|
@@ -24,7 +37,7 @@ class DeclarativeFieldsMetaclass(type):
|
|
24
37
|
if isinstance(value, Field)
|
25
38
|
}
|
26
39
|
|
27
|
-
new_class = super().__new__(mcs, name, bases, attrs)
|
40
|
+
new_class = super().__new__(mcs, name, bases, attrs) # type: ignore[misc]
|
28
41
|
|
29
42
|
# Walk through the MRO.
|
30
43
|
declared_fields = {}
|
@@ -52,15 +65,15 @@ class BaseForm:
|
|
52
65
|
class.
|
53
66
|
"""
|
54
67
|
|
55
|
-
prefix = None
|
68
|
+
prefix: str | None = None
|
56
69
|
|
57
70
|
def __init__(
|
58
71
|
self,
|
59
72
|
*,
|
60
|
-
request,
|
61
|
-
auto_id="id_%s",
|
62
|
-
prefix=None,
|
63
|
-
initial=None,
|
73
|
+
request: Request,
|
74
|
+
auto_id: str | bool = "id_%s",
|
75
|
+
prefix: str | None = None,
|
76
|
+
initial: dict[str, Any] | None = None,
|
64
77
|
):
|
65
78
|
self.data = request.data
|
66
79
|
self.files = request.files
|
@@ -75,17 +88,19 @@ class BaseForm:
|
|
75
88
|
if prefix is not None:
|
76
89
|
self.prefix = prefix
|
77
90
|
self.initial = initial or {}
|
78
|
-
self._errors
|
91
|
+
self._errors: dict[str, list[str]] | None = (
|
92
|
+
None # Stores the errors after clean() has been called.
|
93
|
+
)
|
79
94
|
|
80
95
|
# The base_fields class attribute is the *class-wide* definition of
|
81
96
|
# fields. Because a particular *instance* of the class might want to
|
82
97
|
# alter self.fields, we create self.fields here by copying base_fields.
|
83
98
|
# Instances should always modify self.fields; they should not modify
|
84
99
|
# self.base_fields.
|
85
|
-
self.fields = copy.deepcopy(self.base_fields)
|
86
|
-
self._bound_fields_cache = {}
|
100
|
+
self.fields: dict[str, Field] = copy.deepcopy(self.base_fields)
|
101
|
+
self._bound_fields_cache: dict[str, BoundField] = {}
|
87
102
|
|
88
|
-
def __repr__(self):
|
103
|
+
def __repr__(self) -> str:
|
89
104
|
if self._errors is None:
|
90
105
|
is_valid = "Unknown"
|
91
106
|
else:
|
@@ -97,17 +112,17 @@ class BaseForm:
|
|
97
112
|
fields=";".join(self.fields),
|
98
113
|
)
|
99
114
|
|
100
|
-
def _bound_items(self):
|
115
|
+
def _bound_items(self) -> Any:
|
101
116
|
"""Yield (name, bf) pairs, where bf is a BoundField object."""
|
102
117
|
for name in self.fields:
|
103
118
|
yield name, self[name]
|
104
119
|
|
105
|
-
def __iter__(self):
|
120
|
+
def __iter__(self) -> Any:
|
106
121
|
"""Yield the form's fields as BoundField objects."""
|
107
122
|
for name in self.fields:
|
108
123
|
yield self[name]
|
109
124
|
|
110
|
-
def __getitem__(self, name):
|
125
|
+
def __getitem__(self, name: str) -> BoundField:
|
111
126
|
"""Return a BoundField with the given name."""
|
112
127
|
try:
|
113
128
|
field = self.fields[name]
|
@@ -124,17 +139,17 @@ class BaseForm:
|
|
124
139
|
return self._bound_fields_cache[name]
|
125
140
|
|
126
141
|
@property
|
127
|
-
def errors(self):
|
142
|
+
def errors(self) -> dict[str, list[str]] | None:
|
128
143
|
"""Return an error dict for the data provided for the form."""
|
129
144
|
if self._errors is None:
|
130
145
|
self.full_clean()
|
131
146
|
return self._errors
|
132
147
|
|
133
|
-
def is_valid(self):
|
148
|
+
def is_valid(self) -> bool:
|
134
149
|
"""Return True if the form has no errors, or False otherwise."""
|
135
150
|
return self.is_bound and not self.errors
|
136
151
|
|
137
|
-
def add_prefix(self, field_name):
|
152
|
+
def add_prefix(self, field_name: str) -> str:
|
138
153
|
"""
|
139
154
|
Return the field name with a prefix appended, if this Form has a
|
140
155
|
prefix set.
|
@@ -144,7 +159,7 @@ class BaseForm:
|
|
144
159
|
return f"{self.prefix}-{field_name}" if self.prefix else field_name
|
145
160
|
|
146
161
|
@property
|
147
|
-
def non_field_errors(self):
|
162
|
+
def non_field_errors(self) -> list[str]:
|
148
163
|
"""
|
149
164
|
Return a list of errors that aren't associated with a particular
|
150
165
|
field -- i.e., from Form.clean(). Return an empty list if there
|
@@ -155,7 +170,7 @@ class BaseForm:
|
|
155
170
|
[],
|
156
171
|
)
|
157
172
|
|
158
|
-
def add_error(self, field, error):
|
173
|
+
def add_error(self, field: str | None, error: ValidationError) -> None:
|
159
174
|
"""
|
160
175
|
Update the content of `self._errors`.
|
161
176
|
|
@@ -179,6 +194,7 @@ class BaseForm:
|
|
179
194
|
f"`ValidationError`, not `{type(error).__name__}`."
|
180
195
|
)
|
181
196
|
|
197
|
+
error_dict: dict[str, Any]
|
182
198
|
if hasattr(error, "error_dict"):
|
183
199
|
if field is not None:
|
184
200
|
raise TypeError(
|
@@ -186,45 +202,45 @@ class BaseForm:
|
|
186
202
|
"argument contains errors for multiple fields."
|
187
203
|
)
|
188
204
|
else:
|
189
|
-
|
205
|
+
error_dict = error.error_dict
|
190
206
|
else:
|
191
|
-
|
207
|
+
error_dict = {field or NON_FIELD_ERRORS: error.error_list}
|
192
208
|
|
193
209
|
class ValidationErrors(list):
|
194
|
-
def __iter__(self):
|
210
|
+
def __iter__(self) -> Any:
|
195
211
|
for err in super().__iter__():
|
196
212
|
# TODO make sure this works...
|
197
213
|
yield next(iter(err))
|
198
214
|
|
199
|
-
for
|
200
|
-
if
|
201
|
-
if
|
215
|
+
for field_key, error_list in error_dict.items():
|
216
|
+
if field_key not in self.errors:
|
217
|
+
if field_key != NON_FIELD_ERRORS and field_key not in self.fields:
|
202
218
|
raise ValueError(
|
203
|
-
f"'{self.__class__.__name__}' has no field named '{
|
219
|
+
f"'{self.__class__.__name__}' has no field named '{field_key}'."
|
204
220
|
)
|
205
|
-
self._errors[
|
221
|
+
self._errors[field_key] = ValidationErrors()
|
206
222
|
|
207
|
-
self._errors[
|
223
|
+
self._errors[field_key].extend(error_list)
|
208
224
|
|
209
225
|
# The field had an error, so removed it from the final data
|
210
226
|
# (we use getattr here so errors can be added to uncleaned forms)
|
211
|
-
if
|
212
|
-
del self.cleaned_data[
|
227
|
+
if field_key in getattr(self, "cleaned_data", {}):
|
228
|
+
del self.cleaned_data[field_key]
|
213
229
|
|
214
|
-
def full_clean(self):
|
230
|
+
def full_clean(self) -> None:
|
215
231
|
"""
|
216
232
|
Clean all of self.data and populate self._errors and self.cleaned_data.
|
217
233
|
"""
|
218
234
|
self._errors = {}
|
219
235
|
if not self.is_bound: # Stop further processing.
|
220
|
-
return
|
236
|
+
return None
|
221
237
|
self.cleaned_data = {}
|
222
238
|
|
223
239
|
self._clean_fields()
|
224
240
|
self._clean_form()
|
225
241
|
self._post_clean()
|
226
242
|
|
227
|
-
def _field_data_value(self, field, html_name):
|
243
|
+
def _field_data_value(self, field: Field, html_name: str) -> Any:
|
228
244
|
if hasattr(self, f"parse_{html_name}"):
|
229
245
|
# Allow custom parsing from form data/files at the form level
|
230
246
|
return getattr(self, f"parse_{html_name}")()
|
@@ -234,7 +250,7 @@ class BaseForm:
|
|
234
250
|
else:
|
235
251
|
return field.value_from_form_data(self.data, self.files, html_name)
|
236
252
|
|
237
|
-
def _clean_fields(self):
|
253
|
+
def _clean_fields(self) -> None:
|
238
254
|
for name, bf in self._bound_items():
|
239
255
|
field = bf.field
|
240
256
|
|
@@ -252,7 +268,7 @@ class BaseForm:
|
|
252
268
|
except ValidationError as e:
|
253
269
|
self.add_error(name, e)
|
254
270
|
|
255
|
-
def _clean_form(self):
|
271
|
+
def _clean_form(self) -> None:
|
256
272
|
try:
|
257
273
|
cleaned_data = self.clean()
|
258
274
|
except ValidationError as e:
|
@@ -261,14 +277,14 @@ class BaseForm:
|
|
261
277
|
if cleaned_data is not None:
|
262
278
|
self.cleaned_data = cleaned_data
|
263
279
|
|
264
|
-
def _post_clean(self):
|
280
|
+
def _post_clean(self) -> None:
|
265
281
|
"""
|
266
282
|
An internal hook for performing additional cleaning after form cleaning
|
267
283
|
is complete. Used for model validation in model forms.
|
268
284
|
"""
|
269
285
|
pass
|
270
286
|
|
271
|
-
def clean(self):
|
287
|
+
def clean(self) -> dict[str, Any]:
|
272
288
|
"""
|
273
289
|
Hook for doing any extra form-wide cleaning after Field.clean() has been
|
274
290
|
called on every field. Any ValidationError raised by this method will
|
@@ -278,10 +294,10 @@ class BaseForm:
|
|
278
294
|
return self.cleaned_data
|
279
295
|
|
280
296
|
@cached_property
|
281
|
-
def changed_data(self):
|
297
|
+
def changed_data(self) -> list[str]:
|
282
298
|
return [name for name, bf in self._bound_items() if bf._has_changed()]
|
283
299
|
|
284
|
-
def get_initial_for_field(self, field, field_name):
|
300
|
+
def get_initial_for_field(self, field: Field, field_name: str) -> Any:
|
285
301
|
"""
|
286
302
|
Return initial data for field on form. Use initial data from the form
|
287
303
|
or the field, in that order. Evaluate callable values.
|
plain/http/README.md
CHANGED
@@ -6,7 +6,7 @@
|
|
6
6
|
|
7
7
|
## Overview
|
8
8
|
|
9
|
-
Typically you will interact with [
|
9
|
+
Typically you will interact with [Request](request.py#Request) and [Response](response.py#ResponseBase) objects in your views and middleware.
|
10
10
|
|
11
11
|
```python
|
12
12
|
from plain.views import View
|
plain/http/__init__.py
CHANGED
@@ -1,9 +1,9 @@
|
|
1
1
|
from plain.http.cookie import parse_cookie
|
2
2
|
from plain.http.request import (
|
3
|
-
HttpHeaders,
|
4
|
-
HttpRequest,
|
5
3
|
QueryDict,
|
6
4
|
RawPostDataException,
|
5
|
+
Request,
|
6
|
+
RequestHeaders,
|
7
7
|
UnreadablePostError,
|
8
8
|
)
|
9
9
|
from plain.http.response import (
|
@@ -26,8 +26,8 @@ from plain.http.response import (
|
|
26
26
|
|
27
27
|
__all__ = [
|
28
28
|
"parse_cookie",
|
29
|
-
"
|
30
|
-
"
|
29
|
+
"Request",
|
30
|
+
"RequestHeaders",
|
31
31
|
"QueryDict",
|
32
32
|
"RawPostDataException",
|
33
33
|
"UnreadablePostError",
|
plain/http/cookie.py
CHANGED
@@ -1,3 +1,5 @@
|
|
1
|
+
from __future__ import annotations
|
2
|
+
|
1
3
|
from http import cookies
|
2
4
|
|
3
5
|
from plain.runtime import settings
|
@@ -5,7 +7,7 @@ from plain.signing import BadSignature, TimestampSigner
|
|
5
7
|
from plain.utils.encoding import force_bytes
|
6
8
|
|
7
9
|
|
8
|
-
def parse_cookie(cookie):
|
10
|
+
def parse_cookie(cookie: str) -> dict[str, str]:
|
9
11
|
"""
|
10
12
|
Return a dictionary parsed from a `Cookie:` header string.
|
11
13
|
"""
|
@@ -24,7 +26,7 @@ def parse_cookie(cookie):
|
|
24
26
|
return cookiedict
|
25
27
|
|
26
28
|
|
27
|
-
def _cookie_key(key):
|
29
|
+
def _cookie_key(key: str) -> bytes:
|
28
30
|
"""
|
29
31
|
Generate a key for cookie signing that matches the pattern used by
|
30
32
|
set_signed_cookie and get_signed_cookie.
|
@@ -32,19 +34,19 @@ def _cookie_key(key):
|
|
32
34
|
return b"plain.http.cookies" + force_bytes(key)
|
33
35
|
|
34
36
|
|
35
|
-
def get_signed_cookie_signer(key, salt=""):
|
37
|
+
def get_signed_cookie_signer(key: str, salt: str = "") -> TimestampSigner:
|
36
38
|
"""
|
37
39
|
Create a TimestampSigner for signed cookies with the same configuration
|
38
40
|
used by both set_signed_cookie and get_signed_cookie.
|
39
41
|
"""
|
40
42
|
return TimestampSigner(
|
41
|
-
key=_cookie_key(settings.SECRET_KEY),
|
42
|
-
fallback_keys=
|
43
|
+
key=_cookie_key(settings.SECRET_KEY).decode(),
|
44
|
+
fallback_keys=[_cookie_key(k).decode() for k in settings.SECRET_KEY_FALLBACKS],
|
43
45
|
salt=key + salt,
|
44
46
|
)
|
45
47
|
|
46
48
|
|
47
|
-
def sign_cookie_value(key, value, salt=""):
|
49
|
+
def sign_cookie_value(key: str, value: str, salt: str = "") -> str:
|
48
50
|
"""
|
49
51
|
Sign a cookie value using the standard Plain cookie signing approach.
|
50
52
|
"""
|
@@ -52,7 +54,13 @@ def sign_cookie_value(key, value, salt=""):
|
|
52
54
|
return signer.sign(value)
|
53
55
|
|
54
56
|
|
55
|
-
def unsign_cookie_value(
|
57
|
+
def unsign_cookie_value(
|
58
|
+
key: str,
|
59
|
+
signed_value: str,
|
60
|
+
salt: str = "",
|
61
|
+
max_age: int | None = None,
|
62
|
+
default: str | None = None,
|
63
|
+
) -> str | None:
|
56
64
|
"""
|
57
65
|
Unsign a cookie value using the standard Plain cookie signing approach.
|
58
66
|
Returns the default value if the signature is invalid or the cookie has expired.
|
plain/http/multipartparser.py
CHANGED
@@ -5,10 +5,14 @@ Exposes one class, ``MultiPartParser``, which feeds chunks of uploaded data to
|
|
5
5
|
file upload handlers for processing.
|
6
6
|
"""
|
7
7
|
|
8
|
+
from __future__ import annotations
|
9
|
+
|
8
10
|
import base64
|
9
11
|
import binascii
|
10
12
|
import collections
|
11
13
|
import html
|
14
|
+
from collections.abc import Iterator
|
15
|
+
from typing import TYPE_CHECKING, Any
|
12
16
|
|
13
17
|
from plain.exceptions import (
|
14
18
|
RequestDataTooBig,
|
@@ -23,6 +27,9 @@ from plain.utils.encoding import force_str
|
|
23
27
|
from plain.utils.http import parse_header_parameters
|
24
28
|
from plain.utils.regex_helper import _lazy_re_compile
|
25
29
|
|
30
|
+
if TYPE_CHECKING:
|
31
|
+
from plain.internal.files.uploadhandler import FileUploadHandler
|
32
|
+
|
26
33
|
__all__ = ("MultiPartParser", "MultiPartParserError", "InputStreamExhausted")
|
27
34
|
|
28
35
|
|
@@ -54,7 +61,13 @@ class MultiPartParser:
|
|
54
61
|
|
55
62
|
boundary_re = _lazy_re_compile(r"[ -~]{0,200}[!-~]")
|
56
63
|
|
57
|
-
def __init__(
|
64
|
+
def __init__(
|
65
|
+
self,
|
66
|
+
meta: dict[str, Any],
|
67
|
+
input_data: Any,
|
68
|
+
upload_handlers: list[FileUploadHandler],
|
69
|
+
encoding: str | None = None,
|
70
|
+
):
|
58
71
|
"""
|
59
72
|
Initialize the MultiPartParser object.
|
60
73
|
|
@@ -112,7 +125,7 @@ class MultiPartParser:
|
|
112
125
|
self._content_length = content_length
|
113
126
|
self._upload_handlers = upload_handlers
|
114
127
|
|
115
|
-
def parse(self):
|
128
|
+
def parse(self) -> tuple[Any, MultiValueDict]:
|
116
129
|
# Call the actual parse routine and close all open files in case of
|
117
130
|
# errors. This is needed because if exceptions are thrown the
|
118
131
|
# MultiPartParser will not be garbage collected immediately and
|
@@ -128,7 +141,7 @@ class MultiPartParser:
|
|
128
141
|
fileobj.close()
|
129
142
|
raise
|
130
143
|
|
131
|
-
def _parse(self):
|
144
|
+
def _parse(self) -> tuple[Any, MultiValueDict]:
|
132
145
|
"""
|
133
146
|
Parse the POST data and break it into a FILES MultiValueDict and a POST
|
134
147
|
MultiValueDict.
|
@@ -276,7 +289,12 @@ class MultiPartParser:
|
|
276
289
|
charset = content_type_extra.get("charset")
|
277
290
|
|
278
291
|
try:
|
279
|
-
|
292
|
+
content_length_value = meta_data.get("content-length")
|
293
|
+
content_length = (
|
294
|
+
int(content_length_value[0])
|
295
|
+
if content_length_value
|
296
|
+
else None
|
297
|
+
)
|
280
298
|
except (IndexError, TypeError, ValueError):
|
281
299
|
content_length = None
|
282
300
|
|
@@ -362,7 +380,7 @@ class MultiPartParser:
|
|
362
380
|
self._post._mutable = False
|
363
381
|
return self._post, self._files
|
364
382
|
|
365
|
-
def handle_file_complete(self, old_field_name, counters):
|
383
|
+
def handle_file_complete(self, old_field_name: str, counters: list[int]) -> None:
|
366
384
|
"""
|
367
385
|
Handle all the signaling that takes place when a file is complete.
|
368
386
|
"""
|
@@ -376,7 +394,7 @@ class MultiPartParser:
|
|
376
394
|
)
|
377
395
|
break
|
378
396
|
|
379
|
-
def sanitize_file_name(self, file_name):
|
397
|
+
def sanitize_file_name(self, file_name: str) -> str | None:
|
380
398
|
"""
|
381
399
|
Sanitize the filename of an upload.
|
382
400
|
|
@@ -399,7 +417,7 @@ class MultiPartParser:
|
|
399
417
|
return None
|
400
418
|
return file_name
|
401
419
|
|
402
|
-
def _close_files(self):
|
420
|
+
def _close_files(self) -> None:
|
403
421
|
# Free up all file handles.
|
404
422
|
# FIXME: this currently assumes that upload handlers store the file as 'file'
|
405
423
|
# We should document that...
|
@@ -418,7 +436,7 @@ class LazyStream:
|
|
418
436
|
variable in case you need to "unget" some bytes.
|
419
437
|
"""
|
420
438
|
|
421
|
-
def __init__(self, producer, length=None):
|
439
|
+
def __init__(self, producer: Iterator[bytes], length: int | None = None):
|
422
440
|
"""
|
423
441
|
Every LazyStream must have a producer when instantiated.
|
424
442
|
|
@@ -433,11 +451,11 @@ class LazyStream:
|
|
433
451
|
self._remaining = length
|
434
452
|
self._unget_history = []
|
435
453
|
|
436
|
-
def tell(self):
|
454
|
+
def tell(self) -> int:
|
437
455
|
return self.position
|
438
456
|
|
439
|
-
def read(self, size=None):
|
440
|
-
def parts():
|
457
|
+
def read(self, size: int | None = None) -> bytes:
|
458
|
+
def parts() -> Iterator[bytes]:
|
441
459
|
remaining = self._remaining if size is None else size
|
442
460
|
# do the whole thing in one shot if no limit was provided.
|
443
461
|
if remaining is None:
|
@@ -462,7 +480,7 @@ class LazyStream:
|
|
462
480
|
|
463
481
|
return b"".join(parts())
|
464
482
|
|
465
|
-
def __next__(self):
|
483
|
+
def __next__(self) -> bytes:
|
466
484
|
"""
|
467
485
|
Used when the exact number of bytes to read is unimportant.
|
468
486
|
|
@@ -478,7 +496,7 @@ class LazyStream:
|
|
478
496
|
self.position += len(output)
|
479
497
|
return output
|
480
498
|
|
481
|
-
def close(self):
|
499
|
+
def close(self) -> None:
|
482
500
|
"""
|
483
501
|
Used to invalidate/disable this lazy stream.
|
484
502
|
|
@@ -487,10 +505,10 @@ class LazyStream:
|
|
487
505
|
"""
|
488
506
|
self._producer = []
|
489
507
|
|
490
|
-
def __iter__(self):
|
508
|
+
def __iter__(self) -> LazyStream:
|
491
509
|
return self
|
492
510
|
|
493
|
-
def unget(self, bytes):
|
511
|
+
def unget(self, bytes: bytes) -> None:
|
494
512
|
"""
|
495
513
|
Place bytes back onto the front of the lazy stream.
|
496
514
|
|
@@ -503,7 +521,7 @@ class LazyStream:
|
|
503
521
|
self.position -= len(bytes)
|
504
522
|
self._leftover = bytes + self._leftover
|
505
523
|
|
506
|
-
def _update_unget_history(self, num_bytes):
|
524
|
+
def _update_unget_history(self, num_bytes: int) -> None:
|
507
525
|
"""
|
508
526
|
Update the unget history as a sanity check to see if we've pushed
|
509
527
|
back the same number of bytes in one chunk. If we keep ungetting the
|
@@ -534,11 +552,11 @@ class ChunkIter:
|
|
534
552
|
constructor, yield chunks of read operations from that object.
|
535
553
|
"""
|
536
554
|
|
537
|
-
def __init__(self, flo, chunk_size=64 * 1024):
|
555
|
+
def __init__(self, flo: Any, chunk_size: int = 64 * 1024):
|
538
556
|
self.flo = flo
|
539
557
|
self.chunk_size = chunk_size
|
540
558
|
|
541
|
-
def __next__(self):
|
559
|
+
def __next__(self) -> bytes:
|
542
560
|
try:
|
543
561
|
data = self.flo.read(self.chunk_size)
|
544
562
|
except InputStreamExhausted:
|
@@ -548,7 +566,7 @@ class ChunkIter:
|
|
548
566
|
else:
|
549
567
|
raise StopIteration()
|
550
568
|
|
551
|
-
def __iter__(self):
|
569
|
+
def __iter__(self) -> ChunkIter:
|
552
570
|
return self
|
553
571
|
|
554
572
|
|
@@ -557,14 +575,14 @@ class InterBoundaryIter:
|
|
557
575
|
A Producer that will iterate over boundaries.
|
558
576
|
"""
|
559
577
|
|
560
|
-
def __init__(self, stream, boundary):
|
578
|
+
def __init__(self, stream: LazyStream, boundary: bytes):
|
561
579
|
self._stream = stream
|
562
580
|
self._boundary = boundary
|
563
581
|
|
564
|
-
def __iter__(self):
|
582
|
+
def __iter__(self) -> InterBoundaryIter:
|
565
583
|
return self
|
566
584
|
|
567
|
-
def __next__(self):
|
585
|
+
def __next__(self) -> LazyStream:
|
568
586
|
try:
|
569
587
|
return LazyStream(BoundaryIter(self._stream, self._boundary))
|
570
588
|
except InputStreamExhausted:
|
@@ -583,7 +601,7 @@ class BoundaryIter:
|
|
583
601
|
StopIteration exception.
|
584
602
|
"""
|
585
603
|
|
586
|
-
def __init__(self, stream, boundary):
|
604
|
+
def __init__(self, stream: LazyStream, boundary: bytes):
|
587
605
|
self._stream = stream
|
588
606
|
self._boundary = boundary
|
589
607
|
self._done = False
|
@@ -598,10 +616,10 @@ class BoundaryIter:
|
|
598
616
|
raise InputStreamExhausted()
|
599
617
|
self._stream.unget(unused_char)
|
600
618
|
|
601
|
-
def __iter__(self):
|
619
|
+
def __iter__(self) -> BoundaryIter:
|
602
620
|
return self
|
603
621
|
|
604
|
-
def __next__(self):
|
622
|
+
def __next__(self) -> bytes:
|
605
623
|
if self._done:
|
606
624
|
raise StopIteration()
|
607
625
|
|
@@ -642,7 +660,7 @@ class BoundaryIter:
|
|
642
660
|
stream.unget(chunk[-rollback:])
|
643
661
|
return chunk[:-rollback]
|
644
662
|
|
645
|
-
def _find_boundary(self, data):
|
663
|
+
def _find_boundary(self, data: bytes) -> tuple[int, int] | None:
|
646
664
|
"""
|
647
665
|
Find a multipart boundary in data.
|
648
666
|
|
@@ -667,7 +685,7 @@ class BoundaryIter:
|
|
667
685
|
return end, next
|
668
686
|
|
669
687
|
|
670
|
-
def exhaust(stream_or_iterable):
|
688
|
+
def exhaust(stream_or_iterable: Any) -> None:
|
671
689
|
"""Exhaust an iterator or stream."""
|
672
690
|
try:
|
673
691
|
iterator = iter(stream_or_iterable)
|
@@ -676,7 +694,9 @@ def exhaust(stream_or_iterable):
|
|
676
694
|
collections.deque(iterator, maxlen=0) # consume iterator quickly.
|
677
695
|
|
678
696
|
|
679
|
-
def parse_boundary_stream(
|
697
|
+
def parse_boundary_stream(
|
698
|
+
stream: LazyStream, max_header_size: int
|
699
|
+
) -> tuple[str, dict[str, Any], LazyStream]:
|
680
700
|
"""
|
681
701
|
Parse one and exactly one stream that encapsulates a boundary.
|
682
702
|
"""
|
@@ -730,11 +750,11 @@ def parse_boundary_stream(stream, max_header_size):
|
|
730
750
|
|
731
751
|
|
732
752
|
class Parser:
|
733
|
-
def __init__(self, stream, boundary):
|
753
|
+
def __init__(self, stream: LazyStream, boundary: bytes):
|
734
754
|
self._stream = stream
|
735
755
|
self._separator = b"--" + boundary
|
736
756
|
|
737
|
-
def __iter__(self):
|
757
|
+
def __iter__(self) -> Iterator[tuple[str, dict[str, Any], LazyStream]]:
|
738
758
|
boundarystream = InterBoundaryIter(self._stream, self._separator)
|
739
759
|
for sub_stream in boundarystream:
|
740
760
|
# Iterate over each part
|