plain 0.1.1__py3-none-any.whl → 0.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- plain/assets/README.md +18 -37
- plain/assets/__init__.py +0 -6
- plain/assets/compile.py +111 -0
- plain/assets/finders.py +26 -218
- plain/assets/fingerprints.py +38 -0
- plain/assets/urls.py +31 -0
- plain/assets/views.py +263 -0
- plain/cli/cli.py +68 -5
- plain/csrf/README.md +12 -0
- plain/packages/config.py +5 -5
- plain/packages/registry.py +1 -7
- plain/preflight/urls.py +0 -10
- plain/runtime/README.md +0 -1
- plain/runtime/__init__.py +1 -1
- plain/runtime/global_settings.py +7 -14
- plain/runtime/user_settings.py +0 -49
- plain/templates/jinja/globals.py +1 -1
- plain/test/__init__.py +0 -8
- plain/test/client.py +36 -16
- plain/views/base.py +5 -3
- plain/views/errors.py +7 -0
- {plain-0.1.1.dist-info → plain-0.2.0.dist-info}/LICENSE +0 -24
- {plain-0.1.1.dist-info → plain-0.2.0.dist-info}/METADATA +1 -1
- {plain-0.1.1.dist-info → plain-0.2.0.dist-info}/RECORD +26 -36
- plain/assets/preflight.py +0 -14
- plain/assets/storage.py +0 -916
- plain/assets/utils.py +0 -52
- plain/assets/whitenoise/__init__.py +0 -5
- plain/assets/whitenoise/base.py +0 -259
- plain/assets/whitenoise/compress.py +0 -189
- plain/assets/whitenoise/media_types.py +0 -137
- plain/assets/whitenoise/middleware.py +0 -197
- plain/assets/whitenoise/responders.py +0 -286
- plain/assets/whitenoise/storage.py +0 -178
- plain/assets/whitenoise/string_utils.py +0 -13
- plain/internal/legacy/management/commands/__init__.py +0 -0
- plain/internal/legacy/management/commands/collectstatic.py +0 -297
- plain/test/utils.py +0 -255
- {plain-0.1.1.dist-info → plain-0.2.0.dist-info}/WHEEL +0 -0
- {plain-0.1.1.dist-info → plain-0.2.0.dist-info}/entry_points.txt +0 -0
plain/assets/storage.py
DELETED
|
@@ -1,916 +0,0 @@
|
|
|
1
|
-
import json
|
|
2
|
-
import os
|
|
3
|
-
import pathlib
|
|
4
|
-
import posixpath
|
|
5
|
-
import re
|
|
6
|
-
from datetime import datetime, timezone
|
|
7
|
-
from hashlib import md5
|
|
8
|
-
from urllib.parse import unquote, urldefrag, urljoin, urlsplit, urlunsplit
|
|
9
|
-
|
|
10
|
-
from plain.assets.utils import check_settings, matches_patterns
|
|
11
|
-
from plain.exceptions import ImproperlyConfigured, SuspiciousFileOperation
|
|
12
|
-
from plain.internal.files import File, locks
|
|
13
|
-
from plain.internal.files.base import ContentFile
|
|
14
|
-
from plain.internal.files.move import file_move_safe
|
|
15
|
-
from plain.internal.files.utils import validate_file_name
|
|
16
|
-
from plain.runtime import settings
|
|
17
|
-
from plain.utils._os import safe_join
|
|
18
|
-
from plain.utils.crypto import get_random_string
|
|
19
|
-
from plain.utils.deconstruct import deconstructible
|
|
20
|
-
from plain.utils.encoding import filepath_to_uri
|
|
21
|
-
from plain.utils.functional import LazyObject, cached_property
|
|
22
|
-
from plain.utils.module_loading import import_string
|
|
23
|
-
from plain.utils.text import get_valid_filename
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
class Storage:
|
|
27
|
-
"""
|
|
28
|
-
A base storage class, providing some default behaviors that all other
|
|
29
|
-
storage systems can inherit or override, as necessary.
|
|
30
|
-
"""
|
|
31
|
-
|
|
32
|
-
# The following methods represent a public interface to private methods.
|
|
33
|
-
# These shouldn't be overridden by subclasses unless absolutely necessary.
|
|
34
|
-
|
|
35
|
-
def open(self, name, mode="rb"):
|
|
36
|
-
"""Retrieve the specified file from storage."""
|
|
37
|
-
return self._open(name, mode)
|
|
38
|
-
|
|
39
|
-
def save(self, name, content, max_length=None):
|
|
40
|
-
"""
|
|
41
|
-
Save new content to the file specified by name. The content should be
|
|
42
|
-
a proper File object or any Python file-like object, ready to be read
|
|
43
|
-
from the beginning.
|
|
44
|
-
"""
|
|
45
|
-
# Get the proper name for the file, as it will actually be saved.
|
|
46
|
-
if name is None:
|
|
47
|
-
name = content.name
|
|
48
|
-
|
|
49
|
-
if not hasattr(content, "chunks"):
|
|
50
|
-
content = File(content, name)
|
|
51
|
-
|
|
52
|
-
name = self.get_available_name(name, max_length=max_length)
|
|
53
|
-
name = self._save(name, content)
|
|
54
|
-
# Ensure that the name returned from the storage system is still valid.
|
|
55
|
-
validate_file_name(name, allow_relative_path=True)
|
|
56
|
-
return name
|
|
57
|
-
|
|
58
|
-
# These methods are part of the public API, with default implementations.
|
|
59
|
-
|
|
60
|
-
def get_valid_name(self, name):
|
|
61
|
-
"""
|
|
62
|
-
Return a filename, based on the provided filename, that's suitable for
|
|
63
|
-
use in the target storage system.
|
|
64
|
-
"""
|
|
65
|
-
return get_valid_filename(name)
|
|
66
|
-
|
|
67
|
-
def get_alternative_name(self, file_root, file_ext):
|
|
68
|
-
"""
|
|
69
|
-
Return an alternative filename, by adding an underscore and a random 7
|
|
70
|
-
character alphanumeric string (before the file extension, if one
|
|
71
|
-
exists) to the filename.
|
|
72
|
-
"""
|
|
73
|
-
return f"{file_root}_{get_random_string(7)}{file_ext}"
|
|
74
|
-
|
|
75
|
-
def get_available_name(self, name, max_length=None):
|
|
76
|
-
"""
|
|
77
|
-
Return a filename that's free on the target storage system and
|
|
78
|
-
available for new content to be written to.
|
|
79
|
-
"""
|
|
80
|
-
name = str(name).replace("\\", "/")
|
|
81
|
-
dir_name, file_name = os.path.split(name)
|
|
82
|
-
if ".." in pathlib.PurePath(dir_name).parts:
|
|
83
|
-
raise SuspiciousFileOperation(
|
|
84
|
-
"Detected path traversal attempt in '%s'" % dir_name
|
|
85
|
-
)
|
|
86
|
-
validate_file_name(file_name)
|
|
87
|
-
file_root, file_ext = os.path.splitext(file_name)
|
|
88
|
-
# If the filename already exists, generate an alternative filename
|
|
89
|
-
# until it doesn't exist.
|
|
90
|
-
# Truncate original name if required, so the new filename does not
|
|
91
|
-
# exceed the max_length.
|
|
92
|
-
while self.exists(name) or (max_length and len(name) > max_length):
|
|
93
|
-
# file_ext includes the dot.
|
|
94
|
-
name = os.path.join(
|
|
95
|
-
dir_name, self.get_alternative_name(file_root, file_ext)
|
|
96
|
-
)
|
|
97
|
-
if max_length is None:
|
|
98
|
-
continue
|
|
99
|
-
# Truncate file_root if max_length exceeded.
|
|
100
|
-
truncation = len(name) - max_length
|
|
101
|
-
if truncation > 0:
|
|
102
|
-
file_root = file_root[:-truncation]
|
|
103
|
-
# Entire file_root was truncated in attempt to find an
|
|
104
|
-
# available filename.
|
|
105
|
-
if not file_root:
|
|
106
|
-
raise SuspiciousFileOperation(
|
|
107
|
-
'Storage can not find an available filename for "%s". '
|
|
108
|
-
"Please make sure that the corresponding file field "
|
|
109
|
-
'allows sufficient "max_length".' % name
|
|
110
|
-
)
|
|
111
|
-
name = os.path.join(
|
|
112
|
-
dir_name, self.get_alternative_name(file_root, file_ext)
|
|
113
|
-
)
|
|
114
|
-
return name
|
|
115
|
-
|
|
116
|
-
def path(self, name):
|
|
117
|
-
"""
|
|
118
|
-
Return a local filesystem path where the file can be retrieved using
|
|
119
|
-
Python's built-in open() function. Storage systems that can't be
|
|
120
|
-
accessed using open() should *not* implement this method.
|
|
121
|
-
"""
|
|
122
|
-
raise NotImplementedError("This backend doesn't support absolute paths.")
|
|
123
|
-
|
|
124
|
-
# The following methods form the public API for storage systems, but with
|
|
125
|
-
# no default implementations. Subclasses must implement *all* of these.
|
|
126
|
-
|
|
127
|
-
def delete(self, name):
|
|
128
|
-
"""
|
|
129
|
-
Delete the specified file from the storage system.
|
|
130
|
-
"""
|
|
131
|
-
raise NotImplementedError(
|
|
132
|
-
"subclasses of Storage must provide a delete() method"
|
|
133
|
-
)
|
|
134
|
-
|
|
135
|
-
def exists(self, name):
|
|
136
|
-
"""
|
|
137
|
-
Return True if a file referenced by the given name already exists in the
|
|
138
|
-
storage system, or False if the name is available for a new file.
|
|
139
|
-
"""
|
|
140
|
-
raise NotImplementedError(
|
|
141
|
-
"subclasses of Storage must provide an exists() method"
|
|
142
|
-
)
|
|
143
|
-
|
|
144
|
-
def listdir(self, path):
|
|
145
|
-
"""
|
|
146
|
-
List the contents of the specified path. Return a 2-tuple of lists:
|
|
147
|
-
the first item being directories, the second item being files.
|
|
148
|
-
"""
|
|
149
|
-
raise NotImplementedError(
|
|
150
|
-
"subclasses of Storage must provide a listdir() method"
|
|
151
|
-
)
|
|
152
|
-
|
|
153
|
-
def size(self, name):
|
|
154
|
-
"""
|
|
155
|
-
Return the total size, in bytes, of the file specified by name.
|
|
156
|
-
"""
|
|
157
|
-
raise NotImplementedError("subclasses of Storage must provide a size() method")
|
|
158
|
-
|
|
159
|
-
def url(self, name):
|
|
160
|
-
"""
|
|
161
|
-
Return an absolute URL where the file's contents can be accessed
|
|
162
|
-
directly by a web browser.
|
|
163
|
-
"""
|
|
164
|
-
raise NotImplementedError("subclasses of Storage must provide a url() method")
|
|
165
|
-
|
|
166
|
-
def get_accessed_time(self, name):
|
|
167
|
-
"""
|
|
168
|
-
Return the last accessed time (as a datetime) of the file specified by
|
|
169
|
-
name. The datetime will be timezone-aware if USE_TZ=True.
|
|
170
|
-
"""
|
|
171
|
-
raise NotImplementedError(
|
|
172
|
-
"subclasses of Storage must provide a get_accessed_time() method"
|
|
173
|
-
)
|
|
174
|
-
|
|
175
|
-
def get_created_time(self, name):
|
|
176
|
-
"""
|
|
177
|
-
Return the creation time (as a datetime) of the file specified by name.
|
|
178
|
-
The datetime will be timezone-aware if USE_TZ=True.
|
|
179
|
-
"""
|
|
180
|
-
raise NotImplementedError(
|
|
181
|
-
"subclasses of Storage must provide a get_created_time() method"
|
|
182
|
-
)
|
|
183
|
-
|
|
184
|
-
def get_modified_time(self, name):
|
|
185
|
-
"""
|
|
186
|
-
Return the last modified time (as a datetime) of the file specified by
|
|
187
|
-
name. The datetime will be timezone-aware if USE_TZ=True.
|
|
188
|
-
"""
|
|
189
|
-
raise NotImplementedError(
|
|
190
|
-
"subclasses of Storage must provide a get_modified_time() method"
|
|
191
|
-
)
|
|
192
|
-
|
|
193
|
-
|
|
194
|
-
class StorageSettingsMixin:
|
|
195
|
-
def _clear_cached_properties(self, setting, **kwargs):
|
|
196
|
-
"""Reset setting based property values."""
|
|
197
|
-
if setting == "FILE_UPLOAD_PERMISSIONS":
|
|
198
|
-
self.__dict__.pop("file_permissions_mode", None)
|
|
199
|
-
elif setting == "FILE_UPLOAD_DIRECTORY_PERMISSIONS":
|
|
200
|
-
self.__dict__.pop("directory_permissions_mode", None)
|
|
201
|
-
|
|
202
|
-
def _value_or_setting(self, value, setting):
|
|
203
|
-
return setting if value is None else value
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
@deconstructible(path="plain.assets.storage.FileSystemStorage")
|
|
207
|
-
class FileSystemStorage(Storage, StorageSettingsMixin):
|
|
208
|
-
"""
|
|
209
|
-
Standard filesystem storage
|
|
210
|
-
"""
|
|
211
|
-
|
|
212
|
-
# The combination of O_CREAT and O_EXCL makes os.open() raise OSError if
|
|
213
|
-
# the file already exists before it's opened.
|
|
214
|
-
OS_OPEN_FLAGS = os.O_WRONLY | os.O_CREAT | os.O_EXCL | getattr(os, "O_BINARY", 0)
|
|
215
|
-
|
|
216
|
-
def __init__(
|
|
217
|
-
self,
|
|
218
|
-
location=None,
|
|
219
|
-
base_url=None,
|
|
220
|
-
file_permissions_mode=None,
|
|
221
|
-
directory_permissions_mode=None,
|
|
222
|
-
):
|
|
223
|
-
self.base_location = location
|
|
224
|
-
self.base_url = base_url
|
|
225
|
-
if self.base_url and not self.base_url.endswith("/"):
|
|
226
|
-
self.base_url += "/"
|
|
227
|
-
self._file_permissions_mode = file_permissions_mode
|
|
228
|
-
self._directory_permissions_mode = directory_permissions_mode
|
|
229
|
-
|
|
230
|
-
@cached_property
|
|
231
|
-
def location(self):
|
|
232
|
-
return os.path.abspath(self.base_location)
|
|
233
|
-
|
|
234
|
-
@cached_property
|
|
235
|
-
def file_permissions_mode(self):
|
|
236
|
-
return self._value_or_setting(
|
|
237
|
-
self._file_permissions_mode, settings.FILE_UPLOAD_PERMISSIONS
|
|
238
|
-
)
|
|
239
|
-
|
|
240
|
-
@cached_property
|
|
241
|
-
def directory_permissions_mode(self):
|
|
242
|
-
return self._value_or_setting(
|
|
243
|
-
self._directory_permissions_mode, settings.FILE_UPLOAD_DIRECTORY_PERMISSIONS
|
|
244
|
-
)
|
|
245
|
-
|
|
246
|
-
def _open(self, name, mode="rb"):
|
|
247
|
-
return File(open(self.path(name), mode))
|
|
248
|
-
|
|
249
|
-
def _save(self, name, content):
|
|
250
|
-
full_path = self.path(name)
|
|
251
|
-
|
|
252
|
-
# Create any intermediate directories that do not exist.
|
|
253
|
-
directory = os.path.dirname(full_path)
|
|
254
|
-
try:
|
|
255
|
-
if self.directory_permissions_mode is not None:
|
|
256
|
-
# Set the umask because os.makedirs() doesn't apply the "mode"
|
|
257
|
-
# argument to intermediate-level directories.
|
|
258
|
-
old_umask = os.umask(0o777 & ~self.directory_permissions_mode)
|
|
259
|
-
try:
|
|
260
|
-
os.makedirs(
|
|
261
|
-
directory, self.directory_permissions_mode, exist_ok=True
|
|
262
|
-
)
|
|
263
|
-
finally:
|
|
264
|
-
os.umask(old_umask)
|
|
265
|
-
else:
|
|
266
|
-
os.makedirs(directory, exist_ok=True)
|
|
267
|
-
except FileExistsError:
|
|
268
|
-
raise FileExistsError("%s exists and is not a directory." % directory)
|
|
269
|
-
|
|
270
|
-
# There's a potential race condition between get_available_name and
|
|
271
|
-
# saving the file; it's possible that two threads might return the
|
|
272
|
-
# same name, at which point all sorts of fun happens. So we need to
|
|
273
|
-
# try to create the file, but if it already exists we have to go back
|
|
274
|
-
# to get_available_name() and try again.
|
|
275
|
-
|
|
276
|
-
while True:
|
|
277
|
-
try:
|
|
278
|
-
# This file has a file path that we can move.
|
|
279
|
-
if hasattr(content, "temporary_file_path"):
|
|
280
|
-
file_move_safe(content.temporary_file_path(), full_path)
|
|
281
|
-
|
|
282
|
-
# This is a normal uploadedfile that we can stream.
|
|
283
|
-
else:
|
|
284
|
-
# The current umask value is masked out by os.open!
|
|
285
|
-
fd = os.open(full_path, self.OS_OPEN_FLAGS, 0o666)
|
|
286
|
-
_file = None
|
|
287
|
-
try:
|
|
288
|
-
locks.lock(fd, locks.LOCK_EX)
|
|
289
|
-
for chunk in content.chunks():
|
|
290
|
-
if _file is None:
|
|
291
|
-
mode = "wb" if isinstance(chunk, bytes) else "wt"
|
|
292
|
-
_file = os.fdopen(fd, mode)
|
|
293
|
-
_file.write(chunk)
|
|
294
|
-
finally:
|
|
295
|
-
locks.unlock(fd)
|
|
296
|
-
if _file is not None:
|
|
297
|
-
_file.close()
|
|
298
|
-
else:
|
|
299
|
-
os.close(fd)
|
|
300
|
-
except FileExistsError:
|
|
301
|
-
# A new name is needed if the file exists.
|
|
302
|
-
name = self.get_available_name(name)
|
|
303
|
-
full_path = self.path(name)
|
|
304
|
-
else:
|
|
305
|
-
# OK, the file save worked. Break out of the loop.
|
|
306
|
-
break
|
|
307
|
-
|
|
308
|
-
if self.file_permissions_mode is not None:
|
|
309
|
-
os.chmod(full_path, self.file_permissions_mode)
|
|
310
|
-
|
|
311
|
-
# Ensure the saved path is always relative to the storage root.
|
|
312
|
-
name = os.path.relpath(full_path, self.location)
|
|
313
|
-
# Ensure the moved file has the same gid as the storage root.
|
|
314
|
-
self._ensure_location_group_id(full_path)
|
|
315
|
-
# Store filenames with forward slashes, even on Windows.
|
|
316
|
-
return str(name).replace("\\", "/")
|
|
317
|
-
|
|
318
|
-
def _ensure_location_group_id(self, full_path):
|
|
319
|
-
if os.name == "posix":
|
|
320
|
-
file_gid = os.stat(full_path).st_gid
|
|
321
|
-
location_gid = os.stat(self.location).st_gid
|
|
322
|
-
if file_gid != location_gid:
|
|
323
|
-
try:
|
|
324
|
-
os.chown(full_path, uid=-1, gid=location_gid)
|
|
325
|
-
except PermissionError:
|
|
326
|
-
pass
|
|
327
|
-
|
|
328
|
-
def delete(self, name):
|
|
329
|
-
if not name:
|
|
330
|
-
raise ValueError("The name must be given to delete().")
|
|
331
|
-
name = self.path(name)
|
|
332
|
-
# If the file or directory exists, delete it from the filesystem.
|
|
333
|
-
try:
|
|
334
|
-
if os.path.isdir(name):
|
|
335
|
-
os.rmdir(name)
|
|
336
|
-
else:
|
|
337
|
-
os.remove(name)
|
|
338
|
-
except FileNotFoundError:
|
|
339
|
-
# FileNotFoundError is raised if the file or directory was removed
|
|
340
|
-
# concurrently.
|
|
341
|
-
pass
|
|
342
|
-
|
|
343
|
-
def exists(self, name):
|
|
344
|
-
return os.path.lexists(self.path(name))
|
|
345
|
-
|
|
346
|
-
def listdir(self, path):
|
|
347
|
-
path = self.path(path)
|
|
348
|
-
directories, files = [], []
|
|
349
|
-
with os.scandir(path) as entries:
|
|
350
|
-
for entry in entries:
|
|
351
|
-
if entry.is_dir():
|
|
352
|
-
directories.append(entry.name)
|
|
353
|
-
else:
|
|
354
|
-
files.append(entry.name)
|
|
355
|
-
return directories, files
|
|
356
|
-
|
|
357
|
-
def path(self, name):
|
|
358
|
-
return safe_join(self.location, name)
|
|
359
|
-
|
|
360
|
-
def size(self, name):
|
|
361
|
-
return os.path.getsize(self.path(name))
|
|
362
|
-
|
|
363
|
-
def url(self, name):
|
|
364
|
-
if self.base_url is None:
|
|
365
|
-
raise ValueError("This file is not accessible via a URL.")
|
|
366
|
-
url = filepath_to_uri(name)
|
|
367
|
-
if url is not None:
|
|
368
|
-
url = url.lstrip("/")
|
|
369
|
-
return urljoin(self.base_url, url)
|
|
370
|
-
|
|
371
|
-
def _datetime_from_timestamp(self, ts):
|
|
372
|
-
"""
|
|
373
|
-
If timezone support is enabled, make an aware datetime object in UTC;
|
|
374
|
-
otherwise make a naive one in the local timezone.
|
|
375
|
-
"""
|
|
376
|
-
tz = timezone.utc if settings.USE_TZ else None
|
|
377
|
-
return datetime.fromtimestamp(ts, tz=tz)
|
|
378
|
-
|
|
379
|
-
def get_accessed_time(self, name):
|
|
380
|
-
return self._datetime_from_timestamp(os.path.getatime(self.path(name)))
|
|
381
|
-
|
|
382
|
-
def get_created_time(self, name):
|
|
383
|
-
return self._datetime_from_timestamp(os.path.getctime(self.path(name)))
|
|
384
|
-
|
|
385
|
-
def get_modified_time(self, name):
|
|
386
|
-
return self._datetime_from_timestamp(os.path.getmtime(self.path(name)))
|
|
387
|
-
|
|
388
|
-
|
|
389
|
-
class StaticFilesStorage(FileSystemStorage):
|
|
390
|
-
"""
|
|
391
|
-
Standard file system storage for static files.
|
|
392
|
-
|
|
393
|
-
The defaults for ``location`` and ``base_url`` are
|
|
394
|
-
``ASSETS_ROOT`` and ``ASSETS_URL``.
|
|
395
|
-
"""
|
|
396
|
-
|
|
397
|
-
def __init__(self, location=None, base_url=None, *args, **kwargs):
|
|
398
|
-
if location is None:
|
|
399
|
-
location = settings.ASSETS_ROOT
|
|
400
|
-
if base_url is None:
|
|
401
|
-
base_url = settings.ASSETS_URL
|
|
402
|
-
check_settings(base_url)
|
|
403
|
-
super().__init__(location, base_url, *args, **kwargs)
|
|
404
|
-
# FileSystemStorage fallbacks to MEDIA_ROOT when location
|
|
405
|
-
# is empty, so we restore the empty value.
|
|
406
|
-
if not location:
|
|
407
|
-
self.base_location = None
|
|
408
|
-
self.location = None
|
|
409
|
-
|
|
410
|
-
def path(self, name):
|
|
411
|
-
if not self.location:
|
|
412
|
-
raise ImproperlyConfigured(
|
|
413
|
-
"You're using the assets app "
|
|
414
|
-
"without having set the ASSETS_ROOT "
|
|
415
|
-
"setting to a filesystem path."
|
|
416
|
-
)
|
|
417
|
-
return super().path(name)
|
|
418
|
-
|
|
419
|
-
|
|
420
|
-
class HashedFilesMixin:
|
|
421
|
-
default_template = """url("%(url)s")"""
|
|
422
|
-
max_post_process_passes = 5
|
|
423
|
-
support_js_module_import_aggregation = False
|
|
424
|
-
_js_module_import_aggregation_patterns = (
|
|
425
|
-
"*.js",
|
|
426
|
-
(
|
|
427
|
-
(
|
|
428
|
-
(
|
|
429
|
-
r"""(?P<matched>import(?s:(?P<import>[\s\{].*?))"""
|
|
430
|
-
r"""\s*from\s*['"](?P<url>[\.\/].*?)["']\s*;)"""
|
|
431
|
-
),
|
|
432
|
-
"""import%(import)s from "%(url)s";""",
|
|
433
|
-
),
|
|
434
|
-
(
|
|
435
|
-
(
|
|
436
|
-
r"""(?P<matched>export(?s:(?P<exports>[\s\{].*?))"""
|
|
437
|
-
r"""\s*from\s*["'](?P<url>[\.\/].*?)["']\s*;)"""
|
|
438
|
-
),
|
|
439
|
-
"""export%(exports)s from "%(url)s";""",
|
|
440
|
-
),
|
|
441
|
-
(
|
|
442
|
-
r"""(?P<matched>import\s*['"](?P<url>[\.\/].*?)["']\s*;)""",
|
|
443
|
-
"""import"%(url)s";""",
|
|
444
|
-
),
|
|
445
|
-
(
|
|
446
|
-
r"""(?P<matched>import\(["'](?P<url>.*?)["']\))""",
|
|
447
|
-
"""import("%(url)s")""",
|
|
448
|
-
),
|
|
449
|
-
),
|
|
450
|
-
)
|
|
451
|
-
patterns = (
|
|
452
|
-
(
|
|
453
|
-
"*.css",
|
|
454
|
-
(
|
|
455
|
-
r"""(?P<matched>url\(['"]{0,1}\s*(?P<url>.*?)["']{0,1}\))""",
|
|
456
|
-
(
|
|
457
|
-
r"""(?P<matched>@import\s*["']\s*(?P<url>.*?)["'])""",
|
|
458
|
-
"""@import url("%(url)s")""",
|
|
459
|
-
),
|
|
460
|
-
(
|
|
461
|
-
(
|
|
462
|
-
r"(?m)(?P<matched>)^(/\*#[ \t]"
|
|
463
|
-
r"(?-i:sourceMappingURL)=(?P<url>.*)[ \t]*\*/)$"
|
|
464
|
-
),
|
|
465
|
-
"/*# sourceMappingURL=%(url)s */",
|
|
466
|
-
),
|
|
467
|
-
),
|
|
468
|
-
),
|
|
469
|
-
(
|
|
470
|
-
"*.js",
|
|
471
|
-
(
|
|
472
|
-
(
|
|
473
|
-
r"(?m)(?P<matched>)^(//# (?-i:sourceMappingURL)=(?P<url>.*))$",
|
|
474
|
-
"//# sourceMappingURL=%(url)s",
|
|
475
|
-
),
|
|
476
|
-
),
|
|
477
|
-
),
|
|
478
|
-
)
|
|
479
|
-
keep_intermediate_files = True
|
|
480
|
-
|
|
481
|
-
def __init__(self, *args, **kwargs):
|
|
482
|
-
if self.support_js_module_import_aggregation:
|
|
483
|
-
self.patterns += (self._js_module_import_aggregation_patterns,)
|
|
484
|
-
super().__init__(*args, **kwargs)
|
|
485
|
-
self._patterns = {}
|
|
486
|
-
self.hashed_files = {}
|
|
487
|
-
for extension, patterns in self.patterns:
|
|
488
|
-
for pattern in patterns:
|
|
489
|
-
if isinstance(pattern, tuple | list):
|
|
490
|
-
pattern, template = pattern
|
|
491
|
-
else:
|
|
492
|
-
template = self.default_template
|
|
493
|
-
compiled = re.compile(pattern, re.IGNORECASE)
|
|
494
|
-
self._patterns.setdefault(extension, []).append((compiled, template))
|
|
495
|
-
|
|
496
|
-
def file_hash(self, name, content=None):
|
|
497
|
-
"""
|
|
498
|
-
Return a hash of the file with the given name and optional content.
|
|
499
|
-
"""
|
|
500
|
-
if content is None:
|
|
501
|
-
return None
|
|
502
|
-
hasher = md5(usedforsecurity=False)
|
|
503
|
-
for chunk in content.chunks():
|
|
504
|
-
hasher.update(chunk)
|
|
505
|
-
return hasher.hexdigest()[:12]
|
|
506
|
-
|
|
507
|
-
def hashed_name(self, name, content=None, filename=None):
|
|
508
|
-
# `filename` is the name of file to hash if `content` isn't given.
|
|
509
|
-
# `name` is the base name to construct the new hashed filename from.
|
|
510
|
-
parsed_name = urlsplit(unquote(name))
|
|
511
|
-
clean_name = parsed_name.path.strip()
|
|
512
|
-
filename = (filename and urlsplit(unquote(filename)).path.strip()) or clean_name
|
|
513
|
-
opened = content is None
|
|
514
|
-
if opened:
|
|
515
|
-
if not self.exists(filename):
|
|
516
|
-
raise ValueError(
|
|
517
|
-
f"The file '{filename}' could not be found with {self!r}."
|
|
518
|
-
)
|
|
519
|
-
try:
|
|
520
|
-
content = self.open(filename)
|
|
521
|
-
except OSError:
|
|
522
|
-
# Handle directory paths and fragments
|
|
523
|
-
return name
|
|
524
|
-
try:
|
|
525
|
-
file_hash = self.file_hash(clean_name, content)
|
|
526
|
-
finally:
|
|
527
|
-
if opened:
|
|
528
|
-
content.close()
|
|
529
|
-
path, filename = os.path.split(clean_name)
|
|
530
|
-
root, ext = os.path.splitext(filename)
|
|
531
|
-
file_hash = (".%s" % file_hash) if file_hash else ""
|
|
532
|
-
hashed_name = os.path.join(path, f"{root}{file_hash}{ext}")
|
|
533
|
-
unparsed_name = list(parsed_name)
|
|
534
|
-
unparsed_name[2] = hashed_name
|
|
535
|
-
# Special casing for a @font-face hack, like url(myfont.eot?#iefix")
|
|
536
|
-
# http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax
|
|
537
|
-
if "?#" in name and not unparsed_name[3]:
|
|
538
|
-
unparsed_name[2] += "?"
|
|
539
|
-
return urlunsplit(unparsed_name)
|
|
540
|
-
|
|
541
|
-
def _url(self, hashed_name_func, name, force=False, hashed_files=None):
|
|
542
|
-
"""
|
|
543
|
-
Return the non-hashed URL in DEBUG mode.
|
|
544
|
-
"""
|
|
545
|
-
if settings.DEBUG and not force:
|
|
546
|
-
hashed_name, fragment = name, ""
|
|
547
|
-
else:
|
|
548
|
-
clean_name, fragment = urldefrag(name)
|
|
549
|
-
if urlsplit(clean_name).path.endswith("/"): # don't hash paths
|
|
550
|
-
hashed_name = name
|
|
551
|
-
else:
|
|
552
|
-
args = (clean_name,)
|
|
553
|
-
if hashed_files is not None:
|
|
554
|
-
args += (hashed_files,)
|
|
555
|
-
hashed_name = hashed_name_func(*args)
|
|
556
|
-
|
|
557
|
-
final_url = super().url(hashed_name)
|
|
558
|
-
|
|
559
|
-
# Special casing for a @font-face hack, like url(myfont.eot?#iefix")
|
|
560
|
-
# http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax
|
|
561
|
-
query_fragment = "?#" in name # [sic!]
|
|
562
|
-
if fragment or query_fragment:
|
|
563
|
-
urlparts = list(urlsplit(final_url))
|
|
564
|
-
if fragment and not urlparts[4]:
|
|
565
|
-
urlparts[4] = fragment
|
|
566
|
-
if query_fragment and not urlparts[3]:
|
|
567
|
-
urlparts[2] += "?"
|
|
568
|
-
final_url = urlunsplit(urlparts)
|
|
569
|
-
|
|
570
|
-
return unquote(final_url)
|
|
571
|
-
|
|
572
|
-
def url(self, name, force=False):
|
|
573
|
-
"""
|
|
574
|
-
Return the non-hashed URL in DEBUG mode.
|
|
575
|
-
"""
|
|
576
|
-
return self._url(self.stored_name, name, force)
|
|
577
|
-
|
|
578
|
-
def url_converter(self, name, hashed_files, template=None):
|
|
579
|
-
"""
|
|
580
|
-
Return the custom URL converter for the given file name.
|
|
581
|
-
"""
|
|
582
|
-
if template is None:
|
|
583
|
-
template = self.default_template
|
|
584
|
-
|
|
585
|
-
def converter(matchobj):
|
|
586
|
-
"""
|
|
587
|
-
Convert the matched URL to a normalized and hashed URL.
|
|
588
|
-
|
|
589
|
-
This requires figuring out which files the matched URL resolves
|
|
590
|
-
to and calling the url() method of the storage.
|
|
591
|
-
"""
|
|
592
|
-
matches = matchobj.groupdict()
|
|
593
|
-
matched = matches["matched"]
|
|
594
|
-
url = matches["url"]
|
|
595
|
-
|
|
596
|
-
# Ignore absolute/protocol-relative and data-uri URLs.
|
|
597
|
-
if re.match(r"^[a-z]+:", url):
|
|
598
|
-
return matched
|
|
599
|
-
|
|
600
|
-
# Ignore absolute URLs that don't point to a static file (dynamic
|
|
601
|
-
# CSS / JS?). Note that ASSETS_URL cannot be empty.
|
|
602
|
-
if url.startswith("/") and not url.startswith(settings.ASSETS_URL):
|
|
603
|
-
return matched
|
|
604
|
-
|
|
605
|
-
# Strip off the fragment so a path-like fragment won't interfere.
|
|
606
|
-
url_path, fragment = urldefrag(url)
|
|
607
|
-
|
|
608
|
-
# Ignore URLs without a path
|
|
609
|
-
if not url_path:
|
|
610
|
-
return matched
|
|
611
|
-
|
|
612
|
-
if url_path.startswith("/"):
|
|
613
|
-
# Otherwise the condition above would have returned prematurely.
|
|
614
|
-
assert url_path.startswith(settings.ASSETS_URL)
|
|
615
|
-
target_name = url_path.removeprefix(settings.ASSETS_URL)
|
|
616
|
-
else:
|
|
617
|
-
# We're using the posixpath module to mix paths and URLs conveniently.
|
|
618
|
-
source_name = name if os.sep == "/" else name.replace(os.sep, "/")
|
|
619
|
-
target_name = posixpath.join(posixpath.dirname(source_name), url_path)
|
|
620
|
-
|
|
621
|
-
# Determine the hashed name of the target file with the storage backend.
|
|
622
|
-
hashed_url = self._url(
|
|
623
|
-
self._stored_name,
|
|
624
|
-
unquote(target_name),
|
|
625
|
-
force=True,
|
|
626
|
-
hashed_files=hashed_files,
|
|
627
|
-
)
|
|
628
|
-
|
|
629
|
-
transformed_url = "/".join(
|
|
630
|
-
url_path.split("/")[:-1] + hashed_url.split("/")[-1:]
|
|
631
|
-
)
|
|
632
|
-
|
|
633
|
-
# Restore the fragment that was stripped off earlier.
|
|
634
|
-
if fragment:
|
|
635
|
-
transformed_url += ("?#" if "?#" in url else "#") + fragment
|
|
636
|
-
|
|
637
|
-
# Return the hashed version to the file
|
|
638
|
-
matches["url"] = unquote(transformed_url)
|
|
639
|
-
return template % matches
|
|
640
|
-
|
|
641
|
-
return converter
|
|
642
|
-
|
|
643
|
-
def post_process(self, paths, dry_run=False, **options):
|
|
644
|
-
"""
|
|
645
|
-
Post process the given dictionary of files (called from collectstatic).
|
|
646
|
-
|
|
647
|
-
Processing is actually two separate operations:
|
|
648
|
-
|
|
649
|
-
1. renaming files to include a hash of their content for cache-busting,
|
|
650
|
-
and copying those files to the target storage.
|
|
651
|
-
2. adjusting files which contain references to other files so they
|
|
652
|
-
refer to the cache-busting filenames.
|
|
653
|
-
|
|
654
|
-
If either of these are performed on a file, then that file is considered
|
|
655
|
-
post-processed.
|
|
656
|
-
"""
|
|
657
|
-
# don't even dare to process the files if we're in dry run mode
|
|
658
|
-
if dry_run:
|
|
659
|
-
return
|
|
660
|
-
|
|
661
|
-
# where to store the new paths
|
|
662
|
-
hashed_files = {}
|
|
663
|
-
|
|
664
|
-
# build a list of adjustable files
|
|
665
|
-
adjustable_paths = [
|
|
666
|
-
path for path in paths if matches_patterns(path, self._patterns)
|
|
667
|
-
]
|
|
668
|
-
|
|
669
|
-
# Adjustable files to yield at end, keyed by the original path.
|
|
670
|
-
processed_adjustable_paths = {}
|
|
671
|
-
|
|
672
|
-
# Do a single pass first. Post-process all files once, yielding not
|
|
673
|
-
# adjustable files and exceptions, and collecting adjustable files.
|
|
674
|
-
for name, hashed_name, processed, _ in self._post_process(
|
|
675
|
-
paths, adjustable_paths, hashed_files
|
|
676
|
-
):
|
|
677
|
-
if name not in adjustable_paths or isinstance(processed, Exception):
|
|
678
|
-
yield name, hashed_name, processed
|
|
679
|
-
else:
|
|
680
|
-
processed_adjustable_paths[name] = (name, hashed_name, processed)
|
|
681
|
-
|
|
682
|
-
paths = {path: paths[path] for path in adjustable_paths}
|
|
683
|
-
substitutions = False
|
|
684
|
-
|
|
685
|
-
for i in range(self.max_post_process_passes):
|
|
686
|
-
substitutions = False
|
|
687
|
-
for name, hashed_name, processed, subst in self._post_process(
|
|
688
|
-
paths, adjustable_paths, hashed_files
|
|
689
|
-
):
|
|
690
|
-
# Overwrite since hashed_name may be newer.
|
|
691
|
-
processed_adjustable_paths[name] = (name, hashed_name, processed)
|
|
692
|
-
substitutions = substitutions or subst
|
|
693
|
-
|
|
694
|
-
if not substitutions:
|
|
695
|
-
break
|
|
696
|
-
|
|
697
|
-
if substitutions:
|
|
698
|
-
yield "All", None, RuntimeError("Max post-process passes exceeded.")
|
|
699
|
-
|
|
700
|
-
# Store the processed paths
|
|
701
|
-
self.hashed_files.update(hashed_files)
|
|
702
|
-
|
|
703
|
-
# Yield adjustable files with final, hashed name.
|
|
704
|
-
yield from processed_adjustable_paths.values()
|
|
705
|
-
|
|
706
|
-
def _post_process(self, paths, adjustable_paths, hashed_files):
|
|
707
|
-
# Sort the files by directory level
|
|
708
|
-
def path_level(name):
|
|
709
|
-
return len(name.split(os.sep))
|
|
710
|
-
|
|
711
|
-
for name in sorted(paths, key=path_level, reverse=True):
|
|
712
|
-
substitutions = True
|
|
713
|
-
# use the original, local file, not the copied-but-unprocessed
|
|
714
|
-
# file, which might be somewhere far away, like S3
|
|
715
|
-
storage, path = paths[name]
|
|
716
|
-
with storage.open(path) as original_file:
|
|
717
|
-
cleaned_name = self.clean_name(name)
|
|
718
|
-
hash_key = self.hash_key(cleaned_name)
|
|
719
|
-
|
|
720
|
-
# generate the hash with the original content, even for
|
|
721
|
-
# adjustable files.
|
|
722
|
-
if hash_key not in hashed_files:
|
|
723
|
-
hashed_name = self.hashed_name(name, original_file)
|
|
724
|
-
else:
|
|
725
|
-
hashed_name = hashed_files[hash_key]
|
|
726
|
-
|
|
727
|
-
# then get the original's file content..
|
|
728
|
-
if hasattr(original_file, "seek"):
|
|
729
|
-
original_file.seek(0)
|
|
730
|
-
|
|
731
|
-
hashed_file_exists = self.exists(hashed_name)
|
|
732
|
-
processed = False
|
|
733
|
-
|
|
734
|
-
# ..to apply each replacement pattern to the content
|
|
735
|
-
if name in adjustable_paths:
|
|
736
|
-
old_hashed_name = hashed_name
|
|
737
|
-
try:
|
|
738
|
-
content = original_file.read().decode("utf-8")
|
|
739
|
-
except UnicodeDecodeError as exc:
|
|
740
|
-
yield name, None, exc, False
|
|
741
|
-
for extension, patterns in self._patterns.items():
|
|
742
|
-
if matches_patterns(path, (extension,)):
|
|
743
|
-
for pattern, template in patterns:
|
|
744
|
-
converter = self.url_converter(
|
|
745
|
-
name, hashed_files, template
|
|
746
|
-
)
|
|
747
|
-
try:
|
|
748
|
-
content = pattern.sub(converter, content)
|
|
749
|
-
except ValueError as exc:
|
|
750
|
-
yield name, None, exc, False
|
|
751
|
-
if hashed_file_exists:
|
|
752
|
-
self.delete(hashed_name)
|
|
753
|
-
# then save the processed result
|
|
754
|
-
content_file = ContentFile(content.encode())
|
|
755
|
-
if self.keep_intermediate_files:
|
|
756
|
-
# Save intermediate file for reference
|
|
757
|
-
self._save(hashed_name, content_file)
|
|
758
|
-
hashed_name = self.hashed_name(name, content_file)
|
|
759
|
-
|
|
760
|
-
if self.exists(hashed_name):
|
|
761
|
-
self.delete(hashed_name)
|
|
762
|
-
|
|
763
|
-
saved_name = self._save(hashed_name, content_file)
|
|
764
|
-
hashed_name = self.clean_name(saved_name)
|
|
765
|
-
# If the file hash stayed the same, this file didn't change
|
|
766
|
-
if old_hashed_name == hashed_name:
|
|
767
|
-
substitutions = False
|
|
768
|
-
processed = True
|
|
769
|
-
|
|
770
|
-
if not processed:
|
|
771
|
-
# or handle the case in which neither processing nor
|
|
772
|
-
# a change to the original file happened
|
|
773
|
-
if not hashed_file_exists:
|
|
774
|
-
processed = True
|
|
775
|
-
saved_name = self._save(hashed_name, original_file)
|
|
776
|
-
hashed_name = self.clean_name(saved_name)
|
|
777
|
-
|
|
778
|
-
# and then set the cache accordingly
|
|
779
|
-
hashed_files[hash_key] = hashed_name
|
|
780
|
-
|
|
781
|
-
yield name, hashed_name, processed, substitutions
|
|
782
|
-
|
|
783
|
-
def clean_name(self, name):
|
|
784
|
-
return name.replace("\\", "/")
|
|
785
|
-
|
|
786
|
-
def hash_key(self, name):
|
|
787
|
-
return name
|
|
788
|
-
|
|
789
|
-
def _stored_name(self, name, hashed_files):
|
|
790
|
-
# Normalize the path to avoid multiple names for the same file like
|
|
791
|
-
# ../foo/bar.css and ../foo/../foo/bar.css which normalize to the same
|
|
792
|
-
# path.
|
|
793
|
-
name = posixpath.normpath(name)
|
|
794
|
-
cleaned_name = self.clean_name(name)
|
|
795
|
-
hash_key = self.hash_key(cleaned_name)
|
|
796
|
-
cache_name = hashed_files.get(hash_key)
|
|
797
|
-
if cache_name is None:
|
|
798
|
-
cache_name = self.clean_name(self.hashed_name(name))
|
|
799
|
-
return cache_name
|
|
800
|
-
|
|
801
|
-
def stored_name(self, name):
|
|
802
|
-
cleaned_name = self.clean_name(name)
|
|
803
|
-
hash_key = self.hash_key(cleaned_name)
|
|
804
|
-
cache_name = self.hashed_files.get(hash_key)
|
|
805
|
-
if cache_name:
|
|
806
|
-
return cache_name
|
|
807
|
-
# No cached name found, recalculate it from the files.
|
|
808
|
-
intermediate_name = name
|
|
809
|
-
for i in range(self.max_post_process_passes + 1):
|
|
810
|
-
cache_name = self.clean_name(
|
|
811
|
-
self.hashed_name(name, content=None, filename=intermediate_name)
|
|
812
|
-
)
|
|
813
|
-
if intermediate_name == cache_name:
|
|
814
|
-
# Store the hashed name if there was a miss.
|
|
815
|
-
self.hashed_files[hash_key] = cache_name
|
|
816
|
-
return cache_name
|
|
817
|
-
else:
|
|
818
|
-
# Move on to the next intermediate file.
|
|
819
|
-
intermediate_name = cache_name
|
|
820
|
-
# If the cache name can't be determined after the max number of passes,
|
|
821
|
-
# the intermediate files on disk may be corrupt; avoid an infinite loop.
|
|
822
|
-
raise ValueError(f"The name '{name}' could not be hashed with {self!r}.")
|
|
823
|
-
|
|
824
|
-
|
|
825
|
-
class ManifestFilesMixin(HashedFilesMixin):
|
|
826
|
-
manifest_version = "1.1" # the manifest format standard
|
|
827
|
-
manifest_name = "assets.json"
|
|
828
|
-
manifest_strict = True
|
|
829
|
-
keep_intermediate_files = False
|
|
830
|
-
|
|
831
|
-
def __init__(self, *args, manifest_storage=None, **kwargs):
|
|
832
|
-
super().__init__(*args, **kwargs)
|
|
833
|
-
if manifest_storage is None:
|
|
834
|
-
manifest_storage = self
|
|
835
|
-
self.manifest_storage = manifest_storage
|
|
836
|
-
self.hashed_files, self.manifest_hash = self.load_manifest()
|
|
837
|
-
|
|
838
|
-
def read_manifest(self):
|
|
839
|
-
try:
|
|
840
|
-
with self.manifest_storage.open(self.manifest_name) as manifest:
|
|
841
|
-
return manifest.read().decode()
|
|
842
|
-
except FileNotFoundError:
|
|
843
|
-
return None
|
|
844
|
-
|
|
845
|
-
def load_manifest(self):
|
|
846
|
-
content = self.read_manifest()
|
|
847
|
-
if content is None:
|
|
848
|
-
return {}, ""
|
|
849
|
-
try:
|
|
850
|
-
stored = json.loads(content)
|
|
851
|
-
except json.JSONDecodeError:
|
|
852
|
-
pass
|
|
853
|
-
else:
|
|
854
|
-
version = stored.get("version")
|
|
855
|
-
if version in ("1.0", "1.1"):
|
|
856
|
-
return stored.get("paths", {}), stored.get("hash", "")
|
|
857
|
-
raise ValueError(
|
|
858
|
-
"Couldn't load manifest '{}' (version {})".format(
|
|
859
|
-
self.manifest_name, self.manifest_version
|
|
860
|
-
)
|
|
861
|
-
)
|
|
862
|
-
|
|
863
|
-
def post_process(self, *args, **kwargs):
|
|
864
|
-
self.hashed_files = {}
|
|
865
|
-
yield from super().post_process(*args, **kwargs)
|
|
866
|
-
if not kwargs.get("dry_run"):
|
|
867
|
-
self.save_manifest()
|
|
868
|
-
|
|
869
|
-
def save_manifest(self):
|
|
870
|
-
self.manifest_hash = self.file_hash(
|
|
871
|
-
None, ContentFile(json.dumps(sorted(self.hashed_files.items())).encode())
|
|
872
|
-
)
|
|
873
|
-
payload = {
|
|
874
|
-
"paths": self.hashed_files,
|
|
875
|
-
"version": self.manifest_version,
|
|
876
|
-
"hash": self.manifest_hash,
|
|
877
|
-
}
|
|
878
|
-
if self.manifest_storage.exists(self.manifest_name):
|
|
879
|
-
self.manifest_storage.delete(self.manifest_name)
|
|
880
|
-
contents = json.dumps(payload).encode()
|
|
881
|
-
self.manifest_storage._save(self.manifest_name, ContentFile(contents))
|
|
882
|
-
|
|
883
|
-
def stored_name(self, name):
|
|
884
|
-
parsed_name = urlsplit(unquote(name))
|
|
885
|
-
clean_name = parsed_name.path.strip()
|
|
886
|
-
hash_key = self.hash_key(clean_name)
|
|
887
|
-
cache_name = self.hashed_files.get(hash_key)
|
|
888
|
-
if cache_name is None:
|
|
889
|
-
if self.manifest_strict:
|
|
890
|
-
raise ValueError("Missing assets manifest entry for '%s'" % clean_name)
|
|
891
|
-
cache_name = self.clean_name(self.hashed_name(name))
|
|
892
|
-
unparsed_name = list(parsed_name)
|
|
893
|
-
unparsed_name[2] = cache_name
|
|
894
|
-
# Special casing for a @font-face hack, like url(myfont.eot?#iefix")
|
|
895
|
-
# http://www.fontspring.com/blog/the-new-bulletproof-font-face-syntax
|
|
896
|
-
if "?#" in name and not unparsed_name[3]:
|
|
897
|
-
unparsed_name[2] += "?"
|
|
898
|
-
return urlunsplit(unparsed_name)
|
|
899
|
-
|
|
900
|
-
|
|
901
|
-
class ManifestStaticFilesStorage(ManifestFilesMixin, StaticFilesStorage):
|
|
902
|
-
"""
|
|
903
|
-
A static file system storage backend which also saves
|
|
904
|
-
hashed copies of the files it saves.
|
|
905
|
-
"""
|
|
906
|
-
|
|
907
|
-
pass
|
|
908
|
-
|
|
909
|
-
|
|
910
|
-
class ConfiguredStorage(LazyObject):
|
|
911
|
-
def _setup(self):
|
|
912
|
-
backend_class = import_string(settings.ASSETS_BACKEND)
|
|
913
|
-
self._wrapped = backend_class()
|
|
914
|
-
|
|
915
|
-
|
|
916
|
-
assets_storage = ConfiguredStorage()
|