igs-slm 0.1.5b3__py3-none-any.whl → 0.2.0b1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {igs_slm-0.1.5b3.dist-info → igs_slm-0.2.0b1.dist-info}/METADATA +2 -2
- {igs_slm-0.1.5b3.dist-info → igs_slm-0.2.0b1.dist-info}/RECORD +47 -34
- slm/__init__.py +1 -1
- slm/admin.py +40 -3
- slm/api/edit/views.py +37 -2
- slm/api/public/serializers.py +1 -1
- slm/defines/CoordinateMode.py +9 -0
- slm/defines/SiteLogFormat.py +19 -6
- slm/defines/__init__.py +24 -22
- slm/file_views/apps.py +7 -0
- slm/file_views/config.py +253 -0
- slm/file_views/settings.py +124 -0
- slm/file_views/static/slm/file_views/banner_header.png +0 -0
- slm/file_views/static/slm/file_views/css/listing.css +82 -0
- slm/file_views/templates/slm/file_views/listing.html +70 -0
- slm/file_views/urls.py +47 -0
- slm/file_views/views.py +472 -0
- slm/forms.py +22 -4
- slm/jinja2/slm/sitelog/ascii_9char.log +1 -1
- slm/jinja2/slm/sitelog/legacy.log +1 -1
- slm/management/commands/check_upgrade.py +25 -19
- slm/management/commands/generate_sinex.py +9 -7
- slm/map/settings.py +0 -0
- slm/migrations/0001_alter_archivedsitelog_size_and_more.py +44 -0
- slm/migrations/0032_archiveindex_valid_range_and_more.py +8 -1
- slm/migrations/simplify_daily_index_files.py +86 -0
- slm/models/index.py +73 -6
- slm/models/sitelog.py +6 -0
- slm/models/system.py +35 -2
- slm/parsing/__init__.py +10 -0
- slm/parsing/legacy/binding.py +3 -2
- slm/receivers/cache.py +25 -0
- slm/settings/root.py +22 -0
- slm/settings/routines.py +2 -0
- slm/settings/slm.py +58 -0
- slm/settings/urls.py +1 -1
- slm/settings/validation.py +5 -4
- slm/signals.py +3 -4
- slm/static/slm/js/enums.js +7 -6
- slm/static/slm/js/form.js +25 -14
- slm/static/slm/js/slm.js +4 -2
- slm/templatetags/slm.py +1 -1
- slm/utils.py +161 -36
- slm/validators.py +51 -0
- {igs_slm-0.1.5b3.dist-info → igs_slm-0.2.0b1.dist-info}/WHEEL +0 -0
- {igs_slm-0.1.5b3.dist-info → igs_slm-0.2.0b1.dist-info}/entry_points.txt +0 -0
- {igs_slm-0.1.5b3.dist-info → igs_slm-0.2.0b1.dist-info}/licenses/LICENSE +0 -0
slm/file_views/views.py
ADDED
@@ -0,0 +1,472 @@
|
|
1
|
+
import csv
|
2
|
+
import fnmatch
|
3
|
+
import mimetypes
|
4
|
+
import typing as t
|
5
|
+
from datetime import datetime, timezone
|
6
|
+
from io import StringIO
|
7
|
+
from pathlib import Path
|
8
|
+
|
9
|
+
from django.conf import settings
|
10
|
+
from django.core.cache import cache
|
11
|
+
from django.core.management import call_command
|
12
|
+
from django.db.models import DateTimeField, F, Func, Max, PositiveIntegerField, Q, Value
|
13
|
+
from django.db.models.functions import Length
|
14
|
+
from django.http import (
|
15
|
+
FileResponse,
|
16
|
+
Http404,
|
17
|
+
HttpResponse,
|
18
|
+
JsonResponse,
|
19
|
+
)
|
20
|
+
from django.utils.decorators import method_decorator
|
21
|
+
from django.views.decorators.cache import cache_page
|
22
|
+
from django.views.generic import TemplateView
|
23
|
+
|
24
|
+
from slm.defines import SiteLogFormat, SiteLogStatus
|
25
|
+
from slm.models import ArchivedSiteLog, Site
|
26
|
+
|
27
|
+
from .config import Listing
|
28
|
+
|
29
|
+
|
30
|
+
def guess_mimetype(pth: t.Union[Path, str]) -> str:
|
31
|
+
return (
|
32
|
+
(
|
33
|
+
mimetypes.guess_type(pth.name if isinstance(pth, Path) else pth)[0]
|
34
|
+
or "text/plain"
|
35
|
+
)
|
36
|
+
.split(";")[0]
|
37
|
+
.strip()
|
38
|
+
.lower()
|
39
|
+
)
|
40
|
+
|
41
|
+
|
42
|
+
def is_browser_renderable(pth: t.Union[Path, str]) -> bool:
|
43
|
+
"""
|
44
|
+
Return True if the given MIME type is typically renderable directly in browsers.
|
45
|
+
"""
|
46
|
+
mime_type = guess_mimetype(pth)
|
47
|
+
return mime_type.startswith("text") or mime_type in getattr(
|
48
|
+
settings, "BROWSER_RENDERABLE_MIMETYPES", {}
|
49
|
+
)
|
50
|
+
|
51
|
+
|
52
|
+
def file_cache_key(path: Path, property: t.Optional[str] = None) -> str:
|
53
|
+
key = f"file_views:{{property}}:{path.as_posix()}"
|
54
|
+
if property:
|
55
|
+
return key.format(property=property)
|
56
|
+
return key
|
57
|
+
|
58
|
+
|
59
|
+
class FileSystemView(TemplateView):
|
60
|
+
"""
|
61
|
+
A base file system view that renders file system artifacts.
|
62
|
+
"""
|
63
|
+
|
64
|
+
template_name = "slm/file_views/listing.html"
|
65
|
+
|
66
|
+
builtin_listings: t.Sequence[Listing] = []
|
67
|
+
|
68
|
+
def get_filter_patterns(self, filename=None, **_) -> t.List[str]:
|
69
|
+
"""
|
70
|
+
Return a list of glob patterns to filter entries by from the request.
|
71
|
+
|
72
|
+
Subclasses may override this to fetch the patterns differently or disable
|
73
|
+
filtering:
|
74
|
+
|
75
|
+
:param kwargs: The kwargs passed to the view.
|
76
|
+
:return: A list of glob patterns or an empty list if no filtering should be
|
77
|
+
done.
|
78
|
+
"""
|
79
|
+
matches = self.request.GET.getlist("match", [])
|
80
|
+
if filename:
|
81
|
+
matches.append(filename)
|
82
|
+
return filename
|
83
|
+
|
84
|
+
def filter_listings(
|
85
|
+
self, listings: t.Iterable[Listing], patterns: t.List[str]
|
86
|
+
) -> t.Generator[Listing, None, None]:
|
87
|
+
if patterns:
|
88
|
+
for listing in listings:
|
89
|
+
if any(fnmatch.fnmatch(listing.display, p) for p in patterns):
|
90
|
+
yield listing
|
91
|
+
continue
|
92
|
+
else:
|
93
|
+
yield from listings
|
94
|
+
|
95
|
+
def translate_order_key(self, key: str) -> t.Optional[str]:
|
96
|
+
"""
|
97
|
+
This hook gives subclasses a chance to swap out ordering attribute keys. You may also return
|
98
|
+
a falsey value for a key to remove it from the ordering tuple.
|
99
|
+
|
100
|
+
:param key: The key to translate
|
101
|
+
:return: the key that should be used
|
102
|
+
"""
|
103
|
+
return key
|
104
|
+
|
105
|
+
def order_listing(
|
106
|
+
self,
|
107
|
+
listings: t.Iterable[Listing],
|
108
|
+
order_column: str,
|
109
|
+
order_key: t.Sequence[str] = ("is_dir", "display"),
|
110
|
+
reverse: bool = False,
|
111
|
+
**_,
|
112
|
+
) -> t.Tuple[t.Iterable[Listing], int]:
|
113
|
+
"""
|
114
|
+
Apply configured ordering to the listings.
|
115
|
+
|
116
|
+
:param listings: An iterable of :class:`~slm.file_views.config.Listing`
|
117
|
+
objects to order
|
118
|
+
:param order_column: The column key to order on
|
119
|
+
:param order_key: A default :class:`~slm.file_views.config.Listing`
|
120
|
+
attribute tuple to order by.
|
121
|
+
:param reverse: True if the ordering should be reversed, false otherwise
|
122
|
+
:param kwargs: Other named arguments passed to the view
|
123
|
+
:return: A 2-tuple where the first element is an iterable of ordered
|
124
|
+
:class:`~slm.file_views.config.Listing` objects and the second element is the
|
125
|
+
max length of all display strings
|
126
|
+
"""
|
127
|
+
keys = tuple(key for key in self.order_keys(order_column, order_key) if key)
|
128
|
+
max_len = 0
|
129
|
+
if keys:
|
130
|
+
|
131
|
+
def key_func(listing: Listing) -> t.Any:
|
132
|
+
nonlocal max_len
|
133
|
+
max_len = ((ln := len(listing.display)) > max_len and ln) or max_len
|
134
|
+
return max_len
|
135
|
+
|
136
|
+
return sorted(listings, key=key_func, reverse=reverse), max_len
|
137
|
+
return reversed(listings) if reversed else listings, max(
|
138
|
+
listings, key=lambda listing: listing.display
|
139
|
+
)
|
140
|
+
|
141
|
+
def order_keys(
|
142
|
+
self, order_column: str, order_key: t.Sequence[str] = ("is_dir", "display"), **_
|
143
|
+
) -> t.Generator[str, None, None]:
|
144
|
+
for key in {"N": order_key, "S": ("size",), "M": ("modified",)}.get(
|
145
|
+
order_column, order_key
|
146
|
+
):
|
147
|
+
yield self.translate_order_key(key)
|
148
|
+
|
149
|
+
def get_context_data(self, filename=None, **kwargs):
|
150
|
+
# we use the legacy query parameter naming even though its non-standard
|
151
|
+
# so as not to break any links out in the wild
|
152
|
+
order_column = "N"
|
153
|
+
reverse = False
|
154
|
+
params = self.request.GET.get("C", None)
|
155
|
+
patterns = self.get_filter_patterns(**kwargs)
|
156
|
+
path = Path(self.request.path)
|
157
|
+
builtins = []
|
158
|
+
for listing in self.filter_listings(
|
159
|
+
(*self.builtin_listings, *kwargs.get("listings", [])), patterns=patterns
|
160
|
+
):
|
161
|
+
if listing.size is None and not listing.is_dir:
|
162
|
+
key = file_cache_key(path / listing.display)
|
163
|
+
listing.size = cache.get(key.format(property="size"), None)
|
164
|
+
listing.modified = cache.get(key.format(property="modified"), None)
|
165
|
+
builtins.append(listing)
|
166
|
+
self.builtin_listings = builtins
|
167
|
+
|
168
|
+
if params:
|
169
|
+
params = f"C={params}".split(";")
|
170
|
+
if len(params) > 0:
|
171
|
+
order_column = params[0].split("=")[-1]
|
172
|
+
if len(params) > 1 and params[1].split("=")[-1] == "D":
|
173
|
+
reverse = True
|
174
|
+
|
175
|
+
listings, max_len = self.order_listing(
|
176
|
+
(
|
177
|
+
*self.builtin_listings,
|
178
|
+
*Listing.from_glob(
|
179
|
+
kwargs.get("glob", None),
|
180
|
+
filter=lambda name: any(
|
181
|
+
fnmatch(name, pattern) for pattern in patterns
|
182
|
+
),
|
183
|
+
),
|
184
|
+
),
|
185
|
+
order_column=order_column,
|
186
|
+
order_key=kwargs.get("order_key", ("is_dir", "display")),
|
187
|
+
reverse=reverse,
|
188
|
+
**{param: value for param, value in kwargs.items() if param != "listings"},
|
189
|
+
)
|
190
|
+
return {
|
191
|
+
**kwargs,
|
192
|
+
**super().get_context_data(filename=filename, **kwargs),
|
193
|
+
"order_column": order_column,
|
194
|
+
"reverse": reverse,
|
195
|
+
f"{order_column}_ordering": "A" if reverse else "D",
|
196
|
+
"patterns": patterns,
|
197
|
+
"listings": listings,
|
198
|
+
"max_len": max_len,
|
199
|
+
"parent": path.parent if path and path.parent != Path("/") else None,
|
200
|
+
}
|
201
|
+
|
202
|
+
def render_to_response(self, context, **kwargs):
|
203
|
+
"""Custom rendering based on content type"""
|
204
|
+
accept = self.request.META.get("HTTP_ACCEPT", "")
|
205
|
+
|
206
|
+
if "json" in accept or "json" in self.request.GET:
|
207
|
+
return self.render_json_response(context, **kwargs)
|
208
|
+
elif "text/csv" in accept or "csv" in self.request.GET:
|
209
|
+
return self.render_csv_response(context, **kwargs)
|
210
|
+
elif (
|
211
|
+
"text/plain" in accept
|
212
|
+
or "txt" in self.request.GET
|
213
|
+
or "list" in self.request.GET
|
214
|
+
):
|
215
|
+
return self.render_txt_response(context, **kwargs)
|
216
|
+
|
217
|
+
return super().render_to_response(context, **kwargs)
|
218
|
+
|
219
|
+
def render_json_response(self, context, **_):
|
220
|
+
return JsonResponse(
|
221
|
+
[
|
222
|
+
{
|
223
|
+
"name": listing.display,
|
224
|
+
"modified": listing.modified.astimezone(timezone.utc).strftime(
|
225
|
+
"%Y-%m-%dT%H:%MZ"
|
226
|
+
),
|
227
|
+
"size": listing.size,
|
228
|
+
}
|
229
|
+
for listing in context["listings"]
|
230
|
+
]
|
231
|
+
)
|
232
|
+
|
233
|
+
def render_csv_response(self, context):
|
234
|
+
response = HttpResponse(content_type="text/csv")
|
235
|
+
writer = csv.writer(response)
|
236
|
+
writer.writerow(["Name", "Modified", "Size"])
|
237
|
+
for listing in context["listings"]:
|
238
|
+
writer.writerow(
|
239
|
+
[
|
240
|
+
listing.display,
|
241
|
+
listing.modified.astimezone(timezone.utc).strftime(
|
242
|
+
"%Y-%m-%dT%H:%MZ"
|
243
|
+
),
|
244
|
+
listing.size,
|
245
|
+
]
|
246
|
+
)
|
247
|
+
return response
|
248
|
+
|
249
|
+
def render_txt_response(self, context):
|
250
|
+
lines = [
|
251
|
+
f"{listing.display}{' ' * (context['max_len'] - len(listing.display))} {listing.modified.astimezone(timezone.utc).strftime('%Y-%m-%dT%H:%MZ')} {listing.size or '-----'}"
|
252
|
+
for listing in context["listings"]
|
253
|
+
]
|
254
|
+
return HttpResponse("\n".join(lines), content_type="text/plain")
|
255
|
+
|
256
|
+
def get(self, request, *args, filename=None, **kwargs):
|
257
|
+
if filename:
|
258
|
+
context = self.get_context_data(**kwargs)
|
259
|
+
found = None
|
260
|
+
listing: Listing
|
261
|
+
for listing in context.get("listings", []):
|
262
|
+
if listing.display == filename:
|
263
|
+
found = listing
|
264
|
+
break
|
265
|
+
if not found:
|
266
|
+
raise Http404()
|
267
|
+
return FileResponse(
|
268
|
+
listing.on_disk,
|
269
|
+
as_attachment=context.get("download", False)
|
270
|
+
or not is_browser_renderable(listing.display),
|
271
|
+
filename=listing.display,
|
272
|
+
)
|
273
|
+
return super().get(request, *args, filename=filename, **kwargs)
|
274
|
+
|
275
|
+
|
276
|
+
@method_decorator(cache_page(3600 * 12, key_prefix="file_views"), name="dispatch")
|
277
|
+
class ArchivedSiteLogView(FileSystemView):
|
278
|
+
"""
|
279
|
+
This view renders a file listing from the site log archive index based on
|
280
|
+
configured parameters. The default tempates render the view as an FTP over HTTP
|
281
|
+
interace similar to the page at https://files.igs.org/pub/station/log
|
282
|
+
|
283
|
+
It allows access to site log text and a wild card/plain text listing
|
284
|
+
interface if the ?list url query parameter is present.
|
285
|
+
|
286
|
+
File list views are cached in the default cache for 12 hours, or until the
|
287
|
+
cache is cleared by a publish event.
|
288
|
+
"""
|
289
|
+
|
290
|
+
sites = Site.objects.public()
|
291
|
+
|
292
|
+
log_formats: t.Sequence[SiteLogFormat] = []
|
293
|
+
log_status: t.Sequence[SiteLogStatus] = SiteLogStatus.active_states()
|
294
|
+
best_format: bool = False
|
295
|
+
most_recent: bool = False
|
296
|
+
non_current: bool = False
|
297
|
+
name_len: t.Optional[int] = None
|
298
|
+
lower_case: t.Optional[bool] = None
|
299
|
+
|
300
|
+
lookup_field = "display"
|
301
|
+
|
302
|
+
def translate_order_key(self, key: str) -> str:
|
303
|
+
return {
|
304
|
+
"display": self.lookup_field,
|
305
|
+
"is_dir": "is_dir" if self.builtin_listings else None,
|
306
|
+
}.get(key, super().translate_order_key(key))
|
307
|
+
|
308
|
+
def get_context_data(self, filename=None, **kwargs):
|
309
|
+
context = super().get_context_data(**kwargs)
|
310
|
+
listings = self.get_queryset(**kwargs)
|
311
|
+
if patterns := context.get("patterns", []):
|
312
|
+
pattern_filter = Q()
|
313
|
+
pattern_key = f"{self.translate_order_key('display')}__iregex"
|
314
|
+
for pattern in patterns:
|
315
|
+
pattern_filter |= Q(
|
316
|
+
**{
|
317
|
+
pattern_key: fnmatch.translate(pattern)
|
318
|
+
.rstrip(")\\Z")
|
319
|
+
.lstrip("(?s:")
|
320
|
+
}
|
321
|
+
)
|
322
|
+
listings = listings.filter(pattern_filter)
|
323
|
+
|
324
|
+
context["download"] = kwargs.get(
|
325
|
+
"download", getattr(settings, "SLM_FILE_VIEW_DOWNLOAD", False)
|
326
|
+
)
|
327
|
+
order_column = context.get("order_column", "N")
|
328
|
+
order_key = context.get("order_key", ("is_dir", "display"))
|
329
|
+
if parent_listings := context.get("listings", []):
|
330
|
+
context["listings"], context["max_len"] = self.order_listing(
|
331
|
+
(*parent_listings, *listings.distinct()),
|
332
|
+
order_column=order_column,
|
333
|
+
order_key=order_key,
|
334
|
+
reverse=context.get("reverse", False),
|
335
|
+
**kwargs,
|
336
|
+
)
|
337
|
+
else:
|
338
|
+
# if we have no external listings we can use the database to do all of the ordering
|
339
|
+
listings = listings.order_by(
|
340
|
+
*(key for key in self.order_keys(order_column, order_key) if key)
|
341
|
+
)
|
342
|
+
if context["reverse"]:
|
343
|
+
listings = listings.reverse()
|
344
|
+
context["listings"] = listings.distinct()
|
345
|
+
context["max_len"] = max(
|
346
|
+
listings.aggregate(Max("display_len"))["display_len__max"],
|
347
|
+
context.get("max_len", 0),
|
348
|
+
)
|
349
|
+
|
350
|
+
return context
|
351
|
+
|
352
|
+
def get_queryset(
|
353
|
+
self,
|
354
|
+
log_formats=log_formats,
|
355
|
+
log_status=log_status,
|
356
|
+
best_format=best_format,
|
357
|
+
most_recent=most_recent,
|
358
|
+
non_current=non_current,
|
359
|
+
name_len=name_len,
|
360
|
+
lower_case=lower_case,
|
361
|
+
**_,
|
362
|
+
):
|
363
|
+
"""
|
364
|
+
Fetch the archived site logs of legacy format for the current indexes
|
365
|
+
of our public sites. We annotate file names because the root log views
|
366
|
+
should always show the requested canonical name of the log file, even
|
367
|
+
if the file name in the archive is different.
|
368
|
+
|
369
|
+
:param log_formats: Restrict logs to these formats
|
370
|
+
:param log_status: Restrict logs to sites in these status states.
|
371
|
+
:param best_format: Include the highest ranking format at each timestamp
|
372
|
+
:param most_recent: Only include the most recent log for each site
|
373
|
+
:param non_current: Only include archived logs that are no longer current
|
374
|
+
:param name_len: Normalize site log names to using this many characters of the site name
|
375
|
+
:param lower_case: Normalize site log names to lower or upper case if True or False
|
376
|
+
:return: A queryset holding :class:`~slm.models.ArchivedSiteLog` objects matching the
|
377
|
+
parameters.
|
378
|
+
"""
|
379
|
+
fltr = Q(index__site__in=self.kwargs.get("sites", self.sites))
|
380
|
+
|
381
|
+
if log_status:
|
382
|
+
fltr &= Q(index__site__status__in=log_status)
|
383
|
+
if log_formats:
|
384
|
+
fltr &= Q(log_format__in=log_formats)
|
385
|
+
|
386
|
+
qry = ArchivedSiteLog.objects.filter(fltr)
|
387
|
+
|
388
|
+
if best_format:
|
389
|
+
qry = qry.best_format()
|
390
|
+
|
391
|
+
if most_recent:
|
392
|
+
qry = qry.most_recent()
|
393
|
+
|
394
|
+
if non_current:
|
395
|
+
# we do it this way because in cases where the latest log has multiple indexes
|
396
|
+
# on the same day the last same day index will appear in the results - if we
|
397
|
+
# exclude the last indexes in the same query it breaks
|
398
|
+
# the windowing exclusion of older same day logs for that latest index date
|
399
|
+
qry = ArchivedSiteLog.objects.filter(
|
400
|
+
pk__in=qry, index__valid_range__upper_inf=False
|
401
|
+
)
|
402
|
+
|
403
|
+
if name_len is not None or lower_case is not None:
|
404
|
+
self.lookup_field = "display"
|
405
|
+
qry = qry.annotate_filenames(
|
406
|
+
name_length=name_len or None,
|
407
|
+
lower_case=lower_case,
|
408
|
+
field_name="display",
|
409
|
+
)
|
410
|
+
else:
|
411
|
+
self.lookup_field = "name"
|
412
|
+
qry = qry.annotate(display=F("name"))
|
413
|
+
|
414
|
+
return qry.annotate(
|
415
|
+
modified=Func(
|
416
|
+
"index__valid_range", function="lower", output_field=DateTimeField()
|
417
|
+
),
|
418
|
+
is_dir=Value(False),
|
419
|
+
display_len=Length("display", output_field=PositiveIntegerField()),
|
420
|
+
).select_related("index", "index__site")
|
421
|
+
|
422
|
+
def get(self, request, *args, filename=None, **kwargs):
|
423
|
+
from slm.models import ArchivedSiteLog
|
424
|
+
|
425
|
+
if filename:
|
426
|
+
try:
|
427
|
+
archived = (
|
428
|
+
self.get_queryset(**kwargs)
|
429
|
+
.filter(**{f"{self.lookup_field}__iexact": filename})
|
430
|
+
.order_by("-timestamp")
|
431
|
+
.first()
|
432
|
+
)
|
433
|
+
if not archived:
|
434
|
+
raise Http404()
|
435
|
+
return FileResponse(
|
436
|
+
archived.file,
|
437
|
+
filename=filename,
|
438
|
+
as_attachment=kwargs.get("download", False),
|
439
|
+
)
|
440
|
+
except ArchivedSiteLog.DoesNotExist:
|
441
|
+
raise Http404()
|
442
|
+
|
443
|
+
return super().get(request, *args, filename=filename, **kwargs)
|
444
|
+
|
445
|
+
|
446
|
+
@cache_page(3600 * 12, key_prefix="file_views")
|
447
|
+
def command_output_view(
|
448
|
+
request,
|
449
|
+
command: str,
|
450
|
+
download: bool = False,
|
451
|
+
mimetype: t.Optional[str] = None,
|
452
|
+
**kwargs,
|
453
|
+
):
|
454
|
+
"""
|
455
|
+
Return a generated sinex file from the currently published site log data.
|
456
|
+
"""
|
457
|
+
out = StringIO()
|
458
|
+
call_command(command, *kwargs.pop("args", []), **kwargs, stdout=out)
|
459
|
+
out.seek(0)
|
460
|
+
contents = out.getvalue()
|
461
|
+
path = Path(request.path)
|
462
|
+
response = HttpResponse(
|
463
|
+
content=contents, content_type=mimetype or guess_mimetype(path)
|
464
|
+
)
|
465
|
+
key = file_cache_key(path)
|
466
|
+
cache.set(key.format(property="size"), len(response.content), timeout=3600 * 12)
|
467
|
+
cache.set(
|
468
|
+
key.format(property="modified"), datetime.now(timezone.utc), timeout=3600 * 12
|
469
|
+
)
|
470
|
+
if download:
|
471
|
+
response["Content-Disposition"] = f'attachment; filename="{path.name}"'
|
472
|
+
return response
|
slm/forms.py
CHANGED
@@ -17,6 +17,7 @@ from ckeditor.widgets import CKEditorWidget
|
|
17
17
|
from crispy_forms.helper import FormHelper
|
18
18
|
from crispy_forms.layout import Div, Layout
|
19
19
|
from django import forms
|
20
|
+
from django.conf import settings
|
20
21
|
from django.contrib.gis.forms import PointField
|
21
22
|
from django.contrib.gis.geos import Point, Polygon
|
22
23
|
from django.core.exceptions import FieldDoesNotExist, ValidationError
|
@@ -45,6 +46,7 @@ from slm.api.edit.serializers import UserProfileSerializer, UserSerializer
|
|
45
46
|
from slm.defines import (
|
46
47
|
AlertLevel,
|
47
48
|
CardinalDirection,
|
49
|
+
CoordinateMode,
|
48
50
|
FrequencyStandardType,
|
49
51
|
ISOCountry,
|
50
52
|
SiteLogStatus,
|
@@ -255,7 +257,13 @@ class PointWidget(MultiWidget):
|
|
255
257
|
|
256
258
|
def value_from_datadict(self, data, files, name):
|
257
259
|
if name in data:
|
258
|
-
|
260
|
+
coords = []
|
261
|
+
for coord in data.getlist(name):
|
262
|
+
try:
|
263
|
+
coords.append(float(coord))
|
264
|
+
except (ValueError, TypeError):
|
265
|
+
coords.append(coord)
|
266
|
+
return coords
|
259
267
|
return None
|
260
268
|
|
261
269
|
|
@@ -284,9 +292,15 @@ class SLMPointField(PointField):
|
|
284
292
|
raise ValidationError(
|
285
293
|
self.error_messages["invalid_geom_type"], code="invalid_geom_type"
|
286
294
|
)
|
287
|
-
|
288
|
-
|
289
|
-
|
295
|
+
try:
|
296
|
+
return (
|
297
|
+
Point(*[None if val in ["", None] else float(val) for val in value])
|
298
|
+
or None
|
299
|
+
)
|
300
|
+
except (ValueError, TypeError) as err:
|
301
|
+
raise ValidationError(
|
302
|
+
self.error_messages["invalid_geom_type"], code="invalid_geom_type"
|
303
|
+
) from err
|
290
304
|
|
291
305
|
|
292
306
|
class AutoSelectMixin:
|
@@ -611,12 +625,16 @@ class SiteLocationForm(SectionForm):
|
|
611
625
|
xyz = SLMPointField(
|
612
626
|
help_text=SiteLocation._meta.get_field("xyz").help_text,
|
613
627
|
label=SiteLocation._meta.get_field("xyz").verbose_name,
|
628
|
+
disabled=getattr(settings, "SLM_COORDINATE_MODE", CoordinateMode.INDEPENDENT)
|
629
|
+
== CoordinateMode.LLH,
|
614
630
|
)
|
615
631
|
|
616
632
|
llh = SLMPointField(
|
617
633
|
help_text=SiteLocation._meta.get_field("llh").help_text,
|
618
634
|
label=SiteLocation._meta.get_field("llh").verbose_name,
|
619
635
|
attrs={"step": 0.0000001},
|
636
|
+
disabled=getattr(settings, "SLM_COORDINATE_MODE", CoordinateMode.INDEPENDENT)
|
637
|
+
== CoordinateMode.ECEF,
|
620
638
|
)
|
621
639
|
|
622
640
|
class Meta:
|
@@ -333,7 +333,7 @@
|
|
333
333
|
|
334
334
|
Primary Data Center : {% if moreinformation %}{{ moreinformation.primary }}{% endif %}
|
335
335
|
Secondary Data Center : {% if moreinformation %}{{ moreinformation.secondary }}{% endif %}
|
336
|
-
URL for More Information : {% if moreinformation %}{{ moreinformation.more_info }}{% endif %}
|
336
|
+
URL for More Information : {% if moreinformation %}{{ moreinformation.more_info|multi_line }}{% endif %}
|
337
337
|
Hardcopy on File
|
338
338
|
Site Map : {% if moreinformation %}{{ moreinformation.sitemap }}{% else %}(Y or URL){% endif %}
|
339
339
|
Site Diagram : {% if moreinformation %}{{ moreinformation.site_diagram }}{% else %}(Y or URL){% endif %}
|
@@ -333,7 +333,7 @@
|
|
333
333
|
|
334
334
|
Primary Data Center : {% if moreinformation %}{{ moreinformation.primary }}{% endif %}
|
335
335
|
Secondary Data Center : {% if moreinformation %}{{ moreinformation.secondary }}{% endif %}
|
336
|
-
URL for More Information : {% if moreinformation %}{{ moreinformation.more_info }}{% endif %}
|
336
|
+
URL for More Information : {% if moreinformation %}{{ moreinformation.more_info|multi_line }}{% endif %}
|
337
337
|
Hardcopy on File
|
338
338
|
Site Map : {% if moreinformation %}{{ moreinformation.sitemap }}{% else %}(Y or URL){% endif %}
|
339
339
|
Site Diagram : {% if moreinformation %}{{ moreinformation.site_diagram }}{% else %}(Y or URL){% endif %}
|
@@ -13,7 +13,7 @@ import typing as t
|
|
13
13
|
|
14
14
|
import typer
|
15
15
|
from django.core.management import CommandError
|
16
|
-
from django.db import connection
|
16
|
+
from django.db import ProgrammingError, connection
|
17
17
|
from django.db.migrations.loader import MigrationLoader
|
18
18
|
from django.utils.translation import gettext as _
|
19
19
|
from django_typer.management import TyperCommand, command
|
@@ -26,7 +26,7 @@ from slm.models import SLMVersion
|
|
26
26
|
|
27
27
|
def parse_version(version: str) -> Version:
|
28
28
|
try:
|
29
|
-
parse(version)
|
29
|
+
return parse(version)
|
30
30
|
except InvalidVersion as verr:
|
31
31
|
raise typer.BadParameter(
|
32
32
|
f"{version} is not a valid Python package version string."
|
@@ -68,11 +68,20 @@ class Command(TyperCommand):
|
|
68
68
|
# these specific versions. These versions are points at which the migration
|
69
69
|
# files were remade - meaning the database state may not be migrated correctly
|
70
70
|
# if it was not first updated to be state consistent with these specific versions.
|
71
|
-
VERSION_WAYPOINTS = list(sorted([parse("0.
|
71
|
+
VERSION_WAYPOINTS = [] # list(sorted([parse("0.2.0b0")]))
|
72
72
|
|
73
73
|
requires_migrations_checks = False
|
74
74
|
requires_system_checks = []
|
75
75
|
|
76
|
+
slm_version = parse_version(slm_version)
|
77
|
+
|
78
|
+
@property
|
79
|
+
def db_version(self) -> Version:
|
80
|
+
try:
|
81
|
+
return SLMVersion.load().version or parse("0.1.4b")
|
82
|
+
except ProgrammingError:
|
83
|
+
return parse("0.1.4b")
|
84
|
+
|
76
85
|
def closest_waypoint_gte(self, version: Version) -> t.Optional[Version]:
|
77
86
|
idx = bisect.bisect_left(self.VERSION_WAYPOINTS, version)
|
78
87
|
if idx < len(self.VERSION_WAYPOINTS):
|
@@ -89,24 +98,23 @@ class Command(TyperCommand):
|
|
89
98
|
help="Check that it is safe to run migrations from the installed version of igs-slm."
|
90
99
|
)
|
91
100
|
def is_safe(self):
|
92
|
-
|
93
|
-
if db_version > slm_version:
|
101
|
+
if self.db_version > self.slm_version:
|
94
102
|
# downgrades are possible with reversible migrations - we only
|
95
103
|
# balk if the downgrade would travel through a migration waypoint
|
96
|
-
nearest_waypoint = self.closest_waypoint_lte(db_version)
|
97
|
-
if nearest_waypoint and nearest_waypoint > slm_version:
|
104
|
+
nearest_waypoint = self.closest_waypoint_lte(self.db_version)
|
105
|
+
if nearest_waypoint and nearest_waypoint > self.slm_version:
|
98
106
|
raise CommandError(
|
99
|
-
f"Unable to downgrade from {db_version} to {slm_version}. "
|
107
|
+
f"Unable to downgrade from {self.db_version} to {self.slm_version}. "
|
100
108
|
f"Traverses version waypoint: {nearest_waypoint}. "
|
101
109
|
f"It is recommended that you restore from a database backup."
|
102
110
|
)
|
103
|
-
elif db_version < slm_version:
|
111
|
+
elif self.db_version < self.slm_version:
|
104
112
|
# Upgrades must pass through all waypoints between the database code version and
|
105
113
|
# the installed version of igs-slm
|
106
|
-
nearest_waypoint = self.closest_waypoint_gte(db_version)
|
107
|
-
if nearest_waypoint and nearest_waypoint < slm_version:
|
114
|
+
nearest_waypoint = self.closest_waypoint_gte(self.db_version)
|
115
|
+
if nearest_waypoint and nearest_waypoint < self.slm_version:
|
108
116
|
raise CommandError(
|
109
|
-
f"Unable to upgrade from {db_version} to {slm_version}. "
|
117
|
+
f"Unable to upgrade from {self.db_version} to {self.slm_version}. "
|
110
118
|
f"Traverses version waypoint: {nearest_waypoint}. "
|
111
119
|
f"You must first install and upgrade SLM at the waypoint: "
|
112
120
|
f"pip install igs-slm=={nearest_waypoint}."
|
@@ -118,22 +126,20 @@ class Command(TyperCommand):
|
|
118
126
|
def set_db_version(
|
119
127
|
self,
|
120
128
|
version: Annotated[
|
121
|
-
Version,
|
129
|
+
t.Optional[Version],
|
122
130
|
typer.Option(
|
123
131
|
help="The version string to set in the database if different than installed.",
|
124
132
|
parser=parse_version,
|
125
133
|
),
|
126
|
-
],
|
134
|
+
] = None,
|
127
135
|
):
|
128
|
-
|
129
|
-
|
130
|
-
version = version or parse_version(slm_version)
|
131
|
-
if version != SLMVersion.load().version:
|
136
|
+
version = version or self.slm_version
|
137
|
+
if version != self.db_version:
|
132
138
|
confirm = typer.confirm(
|
133
139
|
_(
|
134
140
|
"You are about to force the database to record the version of the igs-slm "
|
135
141
|
f"software it is structurally synchronized to "
|
136
|
-
f"({
|
142
|
+
f"({self.db_version} -> {version}). Are you sure you want to do this?"
|
137
143
|
)
|
138
144
|
)
|
139
145
|
if not confirm:
|