plain 0.1.2__py3-none-any.whl → 0.2.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- plain/assets/README.md +69 -31
- plain/assets/__init__.py +0 -6
- plain/assets/compile.py +121 -0
- plain/assets/finders.py +26 -218
- plain/assets/fingerprints.py +38 -0
- plain/assets/urls.py +31 -0
- plain/assets/views.py +263 -0
- plain/cli/cli.py +67 -5
- plain/packages/config.py +5 -5
- plain/packages/registry.py +1 -7
- plain/preflight/urls.py +0 -10
- plain/runtime/README.md +0 -1
- plain/runtime/global_settings.py +5 -16
- plain/runtime/user_settings.py +0 -49
- plain/templates/jinja/globals.py +1 -1
- plain/test/__init__.py +0 -8
- plain/test/client.py +36 -16
- plain/views/base.py +5 -3
- plain/views/errors.py +7 -0
- {plain-0.1.2.dist-info → plain-0.2.1.dist-info}/LICENSE +0 -24
- {plain-0.1.2.dist-info → plain-0.2.1.dist-info}/METADATA +1 -1
- {plain-0.1.2.dist-info → plain-0.2.1.dist-info}/RECORD +24 -34
- plain/assets/preflight.py +0 -14
- plain/assets/storage.py +0 -916
- plain/assets/utils.py +0 -52
- plain/assets/whitenoise/__init__.py +0 -5
- plain/assets/whitenoise/base.py +0 -259
- plain/assets/whitenoise/compress.py +0 -189
- plain/assets/whitenoise/media_types.py +0 -137
- plain/assets/whitenoise/middleware.py +0 -197
- plain/assets/whitenoise/responders.py +0 -286
- plain/assets/whitenoise/storage.py +0 -178
- plain/assets/whitenoise/string_utils.py +0 -13
- plain/internal/legacy/management/commands/__init__.py +0 -0
- plain/internal/legacy/management/commands/collectstatic.py +0 -297
- plain/test/utils.py +0 -255
- {plain-0.1.2.dist-info → plain-0.2.1.dist-info}/WHEEL +0 -0
- {plain-0.1.2.dist-info → plain-0.2.1.dist-info}/entry_points.txt +0 -0
plain/assets/utils.py
DELETED
|
@@ -1,52 +0,0 @@
|
|
|
1
|
-
import fnmatch
|
|
2
|
-
import os
|
|
3
|
-
|
|
4
|
-
from plain.exceptions import ImproperlyConfigured
|
|
5
|
-
from plain.runtime import settings
|
|
6
|
-
|
|
7
|
-
|
|
8
|
-
def matches_patterns(path, patterns):
|
|
9
|
-
"""
|
|
10
|
-
Return True or False depending on whether the ``path`` should be
|
|
11
|
-
ignored (if it matches any pattern in ``ignore_patterns``).
|
|
12
|
-
"""
|
|
13
|
-
return any(fnmatch.fnmatchcase(path, pattern) for pattern in patterns)
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
def get_files(storage, ignore_patterns=None, location=""):
|
|
17
|
-
"""
|
|
18
|
-
Recursively walk the storage directories yielding the paths
|
|
19
|
-
of all files that should be copied.
|
|
20
|
-
"""
|
|
21
|
-
if ignore_patterns is None:
|
|
22
|
-
ignore_patterns = []
|
|
23
|
-
directories, files = storage.listdir(location)
|
|
24
|
-
for fn in files:
|
|
25
|
-
# Match only the basename.
|
|
26
|
-
if matches_patterns(fn, ignore_patterns):
|
|
27
|
-
continue
|
|
28
|
-
if location:
|
|
29
|
-
fn = os.path.join(location, fn)
|
|
30
|
-
# Match the full file path.
|
|
31
|
-
if matches_patterns(fn, ignore_patterns):
|
|
32
|
-
continue
|
|
33
|
-
yield fn
|
|
34
|
-
for dir in directories:
|
|
35
|
-
if matches_patterns(dir, ignore_patterns):
|
|
36
|
-
continue
|
|
37
|
-
if location:
|
|
38
|
-
dir = os.path.join(location, dir)
|
|
39
|
-
yield from get_files(storage, ignore_patterns, dir)
|
|
40
|
-
|
|
41
|
-
|
|
42
|
-
def check_settings(base_url=None):
|
|
43
|
-
"""
|
|
44
|
-
Check if the assets settings have sane values.
|
|
45
|
-
"""
|
|
46
|
-
if base_url is None:
|
|
47
|
-
base_url = settings.ASSETS_URL
|
|
48
|
-
if not base_url:
|
|
49
|
-
raise ImproperlyConfigured(
|
|
50
|
-
"You're using the assets app "
|
|
51
|
-
"without having set the required ASSETS_URL setting."
|
|
52
|
-
)
|
plain/assets/whitenoise/base.py
DELETED
|
@@ -1,259 +0,0 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
import os
|
|
4
|
-
import re
|
|
5
|
-
import warnings
|
|
6
|
-
from collections.abc import Callable
|
|
7
|
-
from posixpath import normpath
|
|
8
|
-
from wsgiref.headers import Headers
|
|
9
|
-
from wsgiref.util import FileWrapper
|
|
10
|
-
|
|
11
|
-
from .media_types import MediaTypes
|
|
12
|
-
from .responders import IsDirectoryError, MissingFileError, Redirect, StaticFile
|
|
13
|
-
from .string_utils import decode_path_info, ensure_leading_trailing_slash
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
class WhiteNoise:
|
|
17
|
-
# Ten years is what nginx sets a max age if you use 'expires max;'
|
|
18
|
-
# so we'll follow its lead
|
|
19
|
-
FOREVER = 10 * 365 * 24 * 60 * 60
|
|
20
|
-
|
|
21
|
-
def __init__(
|
|
22
|
-
self,
|
|
23
|
-
application,
|
|
24
|
-
root=None,
|
|
25
|
-
prefix=None,
|
|
26
|
-
*,
|
|
27
|
-
# Re-check the filesystem on every request so that any changes are
|
|
28
|
-
# automatically picked up. NOTE: For use in development only, not supported
|
|
29
|
-
# in production
|
|
30
|
-
autorefresh: bool = False,
|
|
31
|
-
max_age: int | None = 60, # seconds
|
|
32
|
-
# Set 'Access-Control-Allow-Origin: *' header on all files.
|
|
33
|
-
# As these are all public static files this is safe (See
|
|
34
|
-
# https://www.w3.org/TR/cors/#security) and ensures that things (e.g
|
|
35
|
-
# webfonts in Firefox) still work as expected when your static files are
|
|
36
|
-
# served from a CDN, rather than your primary domain.
|
|
37
|
-
allow_all_origins: bool = True,
|
|
38
|
-
charset: str = "utf-8",
|
|
39
|
-
mimetypes: dict[str, str] | None = None,
|
|
40
|
-
add_headers_function: Callable[[Headers, str, str], None] | None = None,
|
|
41
|
-
index_file: str | bool | None = None,
|
|
42
|
-
immutable_file_test: Callable | str | None = None,
|
|
43
|
-
):
|
|
44
|
-
self.autorefresh = autorefresh
|
|
45
|
-
self.max_age = max_age
|
|
46
|
-
self.allow_all_origins = allow_all_origins
|
|
47
|
-
self.charset = charset
|
|
48
|
-
self.add_headers_function = add_headers_function
|
|
49
|
-
if index_file is True:
|
|
50
|
-
self.index_file: str | None = "index.html"
|
|
51
|
-
elif isinstance(index_file, str):
|
|
52
|
-
self.index_file = index_file
|
|
53
|
-
else:
|
|
54
|
-
self.index_file = None
|
|
55
|
-
|
|
56
|
-
if immutable_file_test is not None:
|
|
57
|
-
if not callable(immutable_file_test):
|
|
58
|
-
regex = re.compile(immutable_file_test)
|
|
59
|
-
self.immutable_file_test = lambda path, url: bool(regex.search(url))
|
|
60
|
-
else:
|
|
61
|
-
self.immutable_file_test = immutable_file_test
|
|
62
|
-
|
|
63
|
-
self.media_types = MediaTypes(extra_types=mimetypes)
|
|
64
|
-
self.application = application
|
|
65
|
-
self.files = {}
|
|
66
|
-
self.directories = []
|
|
67
|
-
if root is not None:
|
|
68
|
-
self.add_files(root, prefix)
|
|
69
|
-
|
|
70
|
-
def __call__(self, environ, start_response):
|
|
71
|
-
path = decode_path_info(environ.get("PATH_INFO", ""))
|
|
72
|
-
if self.autorefresh:
|
|
73
|
-
asset_file = self.find_file(path)
|
|
74
|
-
else:
|
|
75
|
-
asset_file = self.files.get(path)
|
|
76
|
-
if asset_file is None:
|
|
77
|
-
return self.application(environ, start_response)
|
|
78
|
-
else:
|
|
79
|
-
return self.serve(asset_file, environ, start_response)
|
|
80
|
-
|
|
81
|
-
@staticmethod
|
|
82
|
-
def serve(asset_file, environ, start_response):
|
|
83
|
-
response = asset_file.get_response(environ["REQUEST_METHOD"], environ)
|
|
84
|
-
status_line = f"{response.status} {response.status.phrase}"
|
|
85
|
-
start_response(status_line, list(response.headers))
|
|
86
|
-
if response.file is not None:
|
|
87
|
-
file_wrapper = environ.get("wsgi.file_wrapper", FileWrapper)
|
|
88
|
-
return file_wrapper(response.file)
|
|
89
|
-
else:
|
|
90
|
-
return []
|
|
91
|
-
|
|
92
|
-
def add_files(self, root, prefix=None):
|
|
93
|
-
root = os.path.abspath(root)
|
|
94
|
-
root = root.rstrip(os.path.sep) + os.path.sep
|
|
95
|
-
prefix = ensure_leading_trailing_slash(prefix)
|
|
96
|
-
if self.autorefresh:
|
|
97
|
-
# Later calls to `add_files` overwrite earlier ones, hence we need
|
|
98
|
-
# to store the list of directories in reverse order so later ones
|
|
99
|
-
# match first when they're checked in "autorefresh" mode
|
|
100
|
-
self.directories.insert(0, (root, prefix))
|
|
101
|
-
else:
|
|
102
|
-
if os.path.isdir(root):
|
|
103
|
-
self.update_files_dictionary(root, prefix)
|
|
104
|
-
else:
|
|
105
|
-
warnings.warn(f"No directory at: {root}", stacklevel=3)
|
|
106
|
-
|
|
107
|
-
def update_files_dictionary(self, root, prefix):
|
|
108
|
-
# Build a mapping from paths to the results of `os.stat` calls
|
|
109
|
-
# so we only have to touch the filesystem once
|
|
110
|
-
stat_cache = dict(scantree(root))
|
|
111
|
-
for path in stat_cache.keys():
|
|
112
|
-
relative_path = path[len(root) :]
|
|
113
|
-
relative_url = relative_path.replace("\\", "/")
|
|
114
|
-
url = prefix + relative_url
|
|
115
|
-
self.add_file_to_dictionary(url, path, stat_cache=stat_cache)
|
|
116
|
-
|
|
117
|
-
def add_file_to_dictionary(self, url, path, stat_cache=None):
|
|
118
|
-
if self.is_compressed_variant(path, stat_cache=stat_cache):
|
|
119
|
-
return
|
|
120
|
-
if self.index_file is not None and url.endswith("/" + self.index_file):
|
|
121
|
-
index_url = url[: -len(self.index_file)]
|
|
122
|
-
index_no_slash = index_url.rstrip("/")
|
|
123
|
-
self.files[url] = self.redirect(url, index_url)
|
|
124
|
-
self.files[index_no_slash] = self.redirect(index_no_slash, index_url)
|
|
125
|
-
url = index_url
|
|
126
|
-
asset_file = self.get_asset_file(path, url, stat_cache=stat_cache)
|
|
127
|
-
self.files[url] = asset_file
|
|
128
|
-
|
|
129
|
-
def find_file(self, url):
|
|
130
|
-
# Optimization: bail early if the URL can never match a file
|
|
131
|
-
if self.index_file is None and url.endswith("/"):
|
|
132
|
-
return
|
|
133
|
-
if not self.url_is_canonical(url):
|
|
134
|
-
return
|
|
135
|
-
for path in self.candidate_paths_for_url(url):
|
|
136
|
-
try:
|
|
137
|
-
return self.find_file_at_path(path, url)
|
|
138
|
-
except MissingFileError:
|
|
139
|
-
pass
|
|
140
|
-
|
|
141
|
-
def candidate_paths_for_url(self, url):
|
|
142
|
-
for root, prefix in self.directories:
|
|
143
|
-
if url.startswith(prefix):
|
|
144
|
-
path = os.path.join(root, url[len(prefix) :])
|
|
145
|
-
if os.path.commonprefix((root, path)) == root:
|
|
146
|
-
yield path
|
|
147
|
-
|
|
148
|
-
def find_file_at_path(self, path, url):
|
|
149
|
-
if self.is_compressed_variant(path):
|
|
150
|
-
raise MissingFileError(path)
|
|
151
|
-
|
|
152
|
-
if self.index_file is not None:
|
|
153
|
-
if url.endswith("/"):
|
|
154
|
-
path = os.path.join(path, self.index_file)
|
|
155
|
-
return self.get_asset_file(path, url)
|
|
156
|
-
elif url.endswith("/" + self.index_file):
|
|
157
|
-
if os.path.isfile(path):
|
|
158
|
-
return self.redirect(url, url[: -len(self.index_file)])
|
|
159
|
-
else:
|
|
160
|
-
try:
|
|
161
|
-
return self.get_asset_file(path, url)
|
|
162
|
-
except IsDirectoryError:
|
|
163
|
-
if os.path.isfile(os.path.join(path, self.index_file)):
|
|
164
|
-
return self.redirect(url, url + "/")
|
|
165
|
-
raise MissingFileError(path)
|
|
166
|
-
|
|
167
|
-
return self.get_asset_file(path, url)
|
|
168
|
-
|
|
169
|
-
@staticmethod
|
|
170
|
-
def url_is_canonical(url):
|
|
171
|
-
"""
|
|
172
|
-
Check that the URL path is in canonical format i.e. has normalised
|
|
173
|
-
slashes and no path traversal elements
|
|
174
|
-
"""
|
|
175
|
-
if "\\" in url:
|
|
176
|
-
return False
|
|
177
|
-
normalised = normpath(url)
|
|
178
|
-
if url.endswith("/") and url != "/":
|
|
179
|
-
normalised += "/"
|
|
180
|
-
return normalised == url
|
|
181
|
-
|
|
182
|
-
@staticmethod
|
|
183
|
-
def is_compressed_variant(path, stat_cache=None):
|
|
184
|
-
if path[-3:] in (".gz", ".br"):
|
|
185
|
-
uncompressed_path = path[:-3]
|
|
186
|
-
if stat_cache is None:
|
|
187
|
-
return os.path.isfile(uncompressed_path)
|
|
188
|
-
else:
|
|
189
|
-
return uncompressed_path in stat_cache
|
|
190
|
-
return False
|
|
191
|
-
|
|
192
|
-
def get_asset_file(self, path, url, stat_cache=None):
|
|
193
|
-
# Optimization: bail early if file does not exist
|
|
194
|
-
if stat_cache is None and not os.path.exists(path):
|
|
195
|
-
raise MissingFileError(path)
|
|
196
|
-
headers = Headers([])
|
|
197
|
-
self.add_mime_headers(headers, path, url)
|
|
198
|
-
self.add_cache_headers(headers, path, url)
|
|
199
|
-
if self.allow_all_origins:
|
|
200
|
-
headers["Access-Control-Allow-Origin"] = "*"
|
|
201
|
-
if self.add_headers_function is not None:
|
|
202
|
-
self.add_headers_function(headers, path, url)
|
|
203
|
-
return StaticFile(
|
|
204
|
-
path,
|
|
205
|
-
headers.items(),
|
|
206
|
-
stat_cache=stat_cache,
|
|
207
|
-
encodings={"gzip": path + ".gz", "br": path + ".br"},
|
|
208
|
-
)
|
|
209
|
-
|
|
210
|
-
def add_mime_headers(self, headers, path, url):
|
|
211
|
-
media_type = self.media_types.get_type(path)
|
|
212
|
-
if media_type.startswith("text/"):
|
|
213
|
-
params = {"charset": str(self.charset)}
|
|
214
|
-
else:
|
|
215
|
-
params = {}
|
|
216
|
-
headers.add_header("Content-Type", str(media_type), **params)
|
|
217
|
-
|
|
218
|
-
def add_cache_headers(self, headers, path, url):
|
|
219
|
-
if self.immutable_file_test(path, url):
|
|
220
|
-
headers["Cache-Control"] = f"max-age={self.FOREVER}, public, immutable"
|
|
221
|
-
elif self.max_age is not None:
|
|
222
|
-
headers["Cache-Control"] = f"max-age={self.max_age}, public"
|
|
223
|
-
|
|
224
|
-
def immutable_file_test(self, path, url):
|
|
225
|
-
"""
|
|
226
|
-
This should be implemented by sub-classes (see e.g. WhiteNoiseMiddleware)
|
|
227
|
-
or by setting the `immutable_file_test` config option
|
|
228
|
-
"""
|
|
229
|
-
return False
|
|
230
|
-
|
|
231
|
-
def redirect(self, from_url, to_url):
|
|
232
|
-
"""
|
|
233
|
-
Return a relative 302 redirect
|
|
234
|
-
|
|
235
|
-
We use relative redirects as we don't know the absolute URL the app is
|
|
236
|
-
being hosted under
|
|
237
|
-
"""
|
|
238
|
-
if to_url == from_url + "/":
|
|
239
|
-
relative_url = from_url.split("/")[-1] + "/"
|
|
240
|
-
elif from_url == to_url + self.index_file:
|
|
241
|
-
relative_url = "./"
|
|
242
|
-
else:
|
|
243
|
-
raise ValueError(f"Cannot handle redirect: {from_url} > {to_url}")
|
|
244
|
-
if self.max_age is not None:
|
|
245
|
-
headers = {"Cache-Control": f"max-age={self.max_age}, public"}
|
|
246
|
-
else:
|
|
247
|
-
headers = {}
|
|
248
|
-
return Redirect(relative_url, headers=headers)
|
|
249
|
-
|
|
250
|
-
|
|
251
|
-
def scantree(root):
|
|
252
|
-
"""
|
|
253
|
-
Recurse the given directory yielding (pathname, os.stat(pathname)) pairs
|
|
254
|
-
"""
|
|
255
|
-
for entry in os.scandir(root):
|
|
256
|
-
if entry.is_dir():
|
|
257
|
-
yield from scantree(entry.path)
|
|
258
|
-
else:
|
|
259
|
-
yield entry.path, entry.stat()
|
|
@@ -1,189 +0,0 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
import argparse
|
|
4
|
-
import gzip
|
|
5
|
-
import os
|
|
6
|
-
import re
|
|
7
|
-
from io import BytesIO
|
|
8
|
-
|
|
9
|
-
try:
|
|
10
|
-
import brotli
|
|
11
|
-
|
|
12
|
-
brotli_installed = True
|
|
13
|
-
except ImportError: # pragma: no cover
|
|
14
|
-
brotli_installed = False
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
class Compressor:
|
|
18
|
-
# Extensions that it's not worth trying to compress
|
|
19
|
-
SKIP_COMPRESS_EXTENSIONS = (
|
|
20
|
-
# Images
|
|
21
|
-
"jpg",
|
|
22
|
-
"jpeg",
|
|
23
|
-
"png",
|
|
24
|
-
"gif",
|
|
25
|
-
"webp",
|
|
26
|
-
# Compressed files
|
|
27
|
-
"zip",
|
|
28
|
-
"gz",
|
|
29
|
-
"tgz",
|
|
30
|
-
"bz2",
|
|
31
|
-
"tbz",
|
|
32
|
-
"xz",
|
|
33
|
-
"br",
|
|
34
|
-
# Flash
|
|
35
|
-
"swf",
|
|
36
|
-
"flv",
|
|
37
|
-
# Fonts
|
|
38
|
-
"woff",
|
|
39
|
-
"woff2",
|
|
40
|
-
# Video
|
|
41
|
-
"3gp",
|
|
42
|
-
"3gpp",
|
|
43
|
-
"asf",
|
|
44
|
-
"avi",
|
|
45
|
-
"m4v",
|
|
46
|
-
"mov",
|
|
47
|
-
"mp4",
|
|
48
|
-
"mpeg",
|
|
49
|
-
"mpg",
|
|
50
|
-
"webm",
|
|
51
|
-
"wmv",
|
|
52
|
-
)
|
|
53
|
-
|
|
54
|
-
def __init__(
|
|
55
|
-
self, extensions=None, use_gzip=True, use_brotli=True, log=print, quiet=False
|
|
56
|
-
):
|
|
57
|
-
if extensions is None:
|
|
58
|
-
extensions = self.SKIP_COMPRESS_EXTENSIONS
|
|
59
|
-
self.extension_re = self.get_extension_re(extensions)
|
|
60
|
-
self.use_gzip = use_gzip
|
|
61
|
-
self.use_brotli = use_brotli and brotli_installed
|
|
62
|
-
if not quiet:
|
|
63
|
-
self.log = log
|
|
64
|
-
|
|
65
|
-
@staticmethod
|
|
66
|
-
def get_extension_re(extensions):
|
|
67
|
-
if not extensions:
|
|
68
|
-
return re.compile("^$")
|
|
69
|
-
else:
|
|
70
|
-
return re.compile(
|
|
71
|
-
r"\.({})$".format("|".join(map(re.escape, extensions))), re.IGNORECASE
|
|
72
|
-
)
|
|
73
|
-
|
|
74
|
-
def should_compress(self, filename):
|
|
75
|
-
return not self.extension_re.search(filename)
|
|
76
|
-
|
|
77
|
-
def log(self, message):
|
|
78
|
-
pass
|
|
79
|
-
|
|
80
|
-
def compress(self, path):
|
|
81
|
-
with open(path, "rb") as f:
|
|
82
|
-
stat_result = os.fstat(f.fileno())
|
|
83
|
-
data = f.read()
|
|
84
|
-
size = len(data)
|
|
85
|
-
if self.use_brotli:
|
|
86
|
-
compressed = self.compress_brotli(data)
|
|
87
|
-
if self.is_compressed_effectively("Brotli", path, size, compressed):
|
|
88
|
-
yield self.write_data(path, compressed, ".br", stat_result)
|
|
89
|
-
else:
|
|
90
|
-
# If Brotli compression wasn't effective gzip won't be either
|
|
91
|
-
return
|
|
92
|
-
if self.use_gzip:
|
|
93
|
-
compressed = self.compress_gzip(data)
|
|
94
|
-
if self.is_compressed_effectively("Gzip", path, size, compressed):
|
|
95
|
-
yield self.write_data(path, compressed, ".gz", stat_result)
|
|
96
|
-
|
|
97
|
-
@staticmethod
|
|
98
|
-
def compress_gzip(data):
|
|
99
|
-
output = BytesIO()
|
|
100
|
-
# Explicitly set mtime to 0 so gzip content is fully determined
|
|
101
|
-
# by file content (0 = "no timestamp" according to gzip spec)
|
|
102
|
-
with gzip.GzipFile(
|
|
103
|
-
filename="", mode="wb", fileobj=output, compresslevel=9, mtime=0
|
|
104
|
-
) as gz_file:
|
|
105
|
-
gz_file.write(data)
|
|
106
|
-
return output.getvalue()
|
|
107
|
-
|
|
108
|
-
@staticmethod
|
|
109
|
-
def compress_brotli(data):
|
|
110
|
-
return brotli.compress(data)
|
|
111
|
-
|
|
112
|
-
def is_compressed_effectively(self, encoding_name, path, orig_size, data):
|
|
113
|
-
compressed_size = len(data)
|
|
114
|
-
if orig_size == 0:
|
|
115
|
-
is_effective = False
|
|
116
|
-
else:
|
|
117
|
-
ratio = compressed_size / orig_size
|
|
118
|
-
is_effective = ratio <= 0.95
|
|
119
|
-
if is_effective:
|
|
120
|
-
self.log(
|
|
121
|
-
"{} compressed {} ({}K -> {}K)".format(
|
|
122
|
-
encoding_name, path, orig_size // 1024, compressed_size // 1024
|
|
123
|
-
)
|
|
124
|
-
)
|
|
125
|
-
else:
|
|
126
|
-
self.log(f"Skipping {path} ({encoding_name} compression not effective)")
|
|
127
|
-
return is_effective
|
|
128
|
-
|
|
129
|
-
def write_data(self, path, data, suffix, stat_result):
|
|
130
|
-
filename = path + suffix
|
|
131
|
-
with open(filename, "wb") as f:
|
|
132
|
-
f.write(data)
|
|
133
|
-
os.utime(filename, (stat_result.st_atime, stat_result.st_mtime))
|
|
134
|
-
return filename
|
|
135
|
-
|
|
136
|
-
|
|
137
|
-
def main(argv=None):
|
|
138
|
-
parser = argparse.ArgumentParser(
|
|
139
|
-
description="Search for all files inside <root> *not* matching "
|
|
140
|
-
"<extensions> and produce compressed versions with "
|
|
141
|
-
"'.gz' and '.br' suffixes (as long as this results in a "
|
|
142
|
-
"smaller file)"
|
|
143
|
-
)
|
|
144
|
-
parser.add_argument(
|
|
145
|
-
"-q", "--quiet", help="Don't produce log output", action="store_true"
|
|
146
|
-
)
|
|
147
|
-
parser.add_argument(
|
|
148
|
-
"--no-gzip",
|
|
149
|
-
help="Don't produce gzip '.gz' files",
|
|
150
|
-
action="store_false",
|
|
151
|
-
dest="use_gzip",
|
|
152
|
-
)
|
|
153
|
-
parser.add_argument(
|
|
154
|
-
"--no-brotli",
|
|
155
|
-
help="Don't produce brotli '.br' files",
|
|
156
|
-
action="store_false",
|
|
157
|
-
dest="use_brotli",
|
|
158
|
-
)
|
|
159
|
-
parser.add_argument("root", help="Path root from which to search for files")
|
|
160
|
-
default_exclude = ", ".join(Compressor.SKIP_COMPRESS_EXTENSIONS)
|
|
161
|
-
parser.add_argument(
|
|
162
|
-
"extensions",
|
|
163
|
-
nargs="*",
|
|
164
|
-
help=(
|
|
165
|
-
"File extensions to exclude from compression "
|
|
166
|
-
f"(default: {default_exclude})"
|
|
167
|
-
),
|
|
168
|
-
default=Compressor.SKIP_COMPRESS_EXTENSIONS,
|
|
169
|
-
)
|
|
170
|
-
args = parser.parse_args(argv)
|
|
171
|
-
|
|
172
|
-
compressor = Compressor(
|
|
173
|
-
extensions=args.extensions,
|
|
174
|
-
use_gzip=args.use_gzip,
|
|
175
|
-
use_brotli=args.use_brotli,
|
|
176
|
-
quiet=args.quiet,
|
|
177
|
-
)
|
|
178
|
-
for dirpath, _dirs, files in os.walk(args.root):
|
|
179
|
-
for filename in files:
|
|
180
|
-
if compressor.should_compress(filename):
|
|
181
|
-
path = os.path.join(dirpath, filename)
|
|
182
|
-
for _compressed in compressor.compress(path):
|
|
183
|
-
pass
|
|
184
|
-
|
|
185
|
-
return 0
|
|
186
|
-
|
|
187
|
-
|
|
188
|
-
if __name__ == "__main__": # pragma: no cover
|
|
189
|
-
raise SystemExit(main())
|
|
@@ -1,137 +0,0 @@
|
|
|
1
|
-
from __future__ import annotations
|
|
2
|
-
|
|
3
|
-
import os
|
|
4
|
-
|
|
5
|
-
|
|
6
|
-
class MediaTypes:
|
|
7
|
-
__slots__ = ("types_map",)
|
|
8
|
-
|
|
9
|
-
def __init__(self, *, extra_types: dict[str, str] | None = None) -> None:
|
|
10
|
-
self.types_map = default_types()
|
|
11
|
-
if extra_types is not None:
|
|
12
|
-
self.types_map.update(extra_types)
|
|
13
|
-
|
|
14
|
-
def get_type(self, path: str) -> str:
|
|
15
|
-
name = os.path.basename(path).lower()
|
|
16
|
-
media_type = self.types_map.get(name)
|
|
17
|
-
if media_type is not None:
|
|
18
|
-
return media_type
|
|
19
|
-
extension = os.path.splitext(name)[1]
|
|
20
|
-
return self.types_map.get(extension, "application/octet-stream")
|
|
21
|
-
|
|
22
|
-
|
|
23
|
-
def default_types() -> dict[str, str]:
|
|
24
|
-
"""
|
|
25
|
-
We use our own set of default media types rather than the system-supplied
|
|
26
|
-
ones. This ensures consistent media type behaviour across varied
|
|
27
|
-
environments. The defaults are based on those shipped with nginx, with
|
|
28
|
-
some custom additions.
|
|
29
|
-
|
|
30
|
-
(Auto-generated by scripts/generate_default_media_types.py)
|
|
31
|
-
"""
|
|
32
|
-
return {
|
|
33
|
-
".3gp": "video/3gpp",
|
|
34
|
-
".3gpp": "video/3gpp",
|
|
35
|
-
".7z": "application/x-7z-compressed",
|
|
36
|
-
".ai": "application/postscript",
|
|
37
|
-
".asf": "video/x-ms-asf",
|
|
38
|
-
".asx": "video/x-ms-asf",
|
|
39
|
-
".atom": "application/atom+xml",
|
|
40
|
-
".avi": "video/x-msvideo",
|
|
41
|
-
".avif": "image/avif",
|
|
42
|
-
".bmp": "image/x-ms-bmp",
|
|
43
|
-
".cco": "application/x-cocoa",
|
|
44
|
-
".crt": "application/x-x509-ca-cert",
|
|
45
|
-
".css": "text/css",
|
|
46
|
-
".der": "application/x-x509-ca-cert",
|
|
47
|
-
".doc": "application/msword",
|
|
48
|
-
".docx": "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
|
49
|
-
".ear": "application/java-archive",
|
|
50
|
-
".eot": "application/vnd.ms-fontobject",
|
|
51
|
-
".eps": "application/postscript",
|
|
52
|
-
".flv": "video/x-flv",
|
|
53
|
-
".gif": "image/gif",
|
|
54
|
-
".hqx": "application/mac-binhex40",
|
|
55
|
-
".htc": "text/x-component",
|
|
56
|
-
".htm": "text/html",
|
|
57
|
-
".html": "text/html",
|
|
58
|
-
".ico": "image/x-icon",
|
|
59
|
-
".jad": "text/vnd.sun.j2me.app-descriptor",
|
|
60
|
-
".jar": "application/java-archive",
|
|
61
|
-
".jardiff": "application/x-java-archive-diff",
|
|
62
|
-
".jng": "image/x-jng",
|
|
63
|
-
".jnlp": "application/x-java-jnlp-file",
|
|
64
|
-
".jpeg": "image/jpeg",
|
|
65
|
-
".jpg": "image/jpeg",
|
|
66
|
-
".js": "text/javascript",
|
|
67
|
-
".json": "application/json",
|
|
68
|
-
".kar": "audio/midi",
|
|
69
|
-
".kml": "application/vnd.google-earth.kml+xml",
|
|
70
|
-
".kmz": "application/vnd.google-earth.kmz",
|
|
71
|
-
".m3u8": "application/vnd.apple.mpegurl",
|
|
72
|
-
".m4a": "audio/x-m4a",
|
|
73
|
-
".m4v": "video/x-m4v",
|
|
74
|
-
".md": "text/markdown",
|
|
75
|
-
".mid": "audio/midi",
|
|
76
|
-
".midi": "audio/midi",
|
|
77
|
-
".mjs": "text/javascript",
|
|
78
|
-
".mml": "text/mathml",
|
|
79
|
-
".mng": "video/x-mng",
|
|
80
|
-
".mov": "video/quicktime",
|
|
81
|
-
".mp3": "audio/mpeg",
|
|
82
|
-
".mp4": "video/mp4",
|
|
83
|
-
".mpeg": "video/mpeg",
|
|
84
|
-
".mpg": "video/mpeg",
|
|
85
|
-
".odg": "application/vnd.oasis.opendocument.graphics",
|
|
86
|
-
".odp": "application/vnd.oasis.opendocument.presentation",
|
|
87
|
-
".ods": "application/vnd.oasis.opendocument.spreadsheet",
|
|
88
|
-
".odt": "application/vnd.oasis.opendocument.text",
|
|
89
|
-
".ogg": "audio/ogg",
|
|
90
|
-
".pdb": "application/x-pilot",
|
|
91
|
-
".pdf": "application/pdf",
|
|
92
|
-
".pem": "application/x-x509-ca-cert",
|
|
93
|
-
".pl": "application/x-perl",
|
|
94
|
-
".pm": "application/x-perl",
|
|
95
|
-
".png": "image/png",
|
|
96
|
-
".ppt": "application/vnd.ms-powerpoint",
|
|
97
|
-
".pptx": "application/vnd.openxmlformats-officedocument.presentationml.presentation",
|
|
98
|
-
".prc": "application/x-pilot",
|
|
99
|
-
".ps": "application/postscript",
|
|
100
|
-
".ra": "audio/x-realaudio",
|
|
101
|
-
".rar": "application/x-rar-compressed",
|
|
102
|
-
".rpm": "application/x-redhat-package-manager",
|
|
103
|
-
".rss": "application/rss+xml",
|
|
104
|
-
".rtf": "application/rtf",
|
|
105
|
-
".run": "application/x-makeself",
|
|
106
|
-
".sea": "application/x-sea",
|
|
107
|
-
".shtml": "text/html",
|
|
108
|
-
".sit": "application/x-stuffit",
|
|
109
|
-
".svg": "image/svg+xml",
|
|
110
|
-
".svgz": "image/svg+xml",
|
|
111
|
-
".swf": "application/x-shockwave-flash",
|
|
112
|
-
".tcl": "application/x-tcl",
|
|
113
|
-
".tif": "image/tiff",
|
|
114
|
-
".tiff": "image/tiff",
|
|
115
|
-
".tk": "application/x-tcl",
|
|
116
|
-
".ts": "video/mp2t",
|
|
117
|
-
".txt": "text/plain",
|
|
118
|
-
".war": "application/java-archive",
|
|
119
|
-
".wasm": "application/wasm",
|
|
120
|
-
".wbmp": "image/vnd.wap.wbmp",
|
|
121
|
-
".webm": "video/webm",
|
|
122
|
-
".webp": "image/webp",
|
|
123
|
-
".wml": "text/vnd.wap.wml",
|
|
124
|
-
".wmlc": "application/vnd.wap.wmlc",
|
|
125
|
-
".wmv": "video/x-ms-wmv",
|
|
126
|
-
".woff": "application/font-woff",
|
|
127
|
-
".woff2": "font/woff2",
|
|
128
|
-
".xhtml": "application/xhtml+xml",
|
|
129
|
-
".xls": "application/vnd.ms-excel",
|
|
130
|
-
".xlsx": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
|
131
|
-
".xml": "text/xml",
|
|
132
|
-
".xpi": "application/x-xpinstall",
|
|
133
|
-
".xspf": "application/xspf+xml",
|
|
134
|
-
".zip": "application/zip",
|
|
135
|
-
"apple-app-site-association": "application/pkc7-mime",
|
|
136
|
-
"crossdomain.xml": "text/x-cross-domain-policy",
|
|
137
|
-
}
|