geovisio 2.7.0__py3-none-any.whl → 2.7.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- geovisio/__init__.py +2 -2
- geovisio/admin_cli/cleanup.py +2 -2
- geovisio/translations/de/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/de/LC_MESSAGES/messages.po +139 -2
- geovisio/translations/el/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/el/LC_MESSAGES/messages.po +685 -0
- geovisio/translations/en/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/en/LC_MESSAGES/messages.po +126 -118
- geovisio/translations/fr/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/fr/LC_MESSAGES/messages.po +1 -1
- geovisio/translations/hu/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/hu/LC_MESSAGES/messages.po +773 -0
- geovisio/translations/messages.pot +125 -117
- geovisio/translations/nl/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/nl/LC_MESSAGES/messages.po +24 -16
- geovisio/utils/pictures.py +18 -20
- geovisio/utils/sequences.py +21 -23
- geovisio/utils/upload_set.py +40 -52
- geovisio/web/collections.py +19 -3
- geovisio/web/docs.py +35 -1
- geovisio/web/items.py +15 -6
- geovisio/web/map.py +86 -18
- geovisio/web/upload_set.py +9 -6
- geovisio/workers/runner_pictures.py +19 -33
- {geovisio-2.7.0.dist-info → geovisio-2.7.1.dist-info}/METADATA +2 -2
- {geovisio-2.7.0.dist-info → geovisio-2.7.1.dist-info}/RECORD +28 -24
- {geovisio-2.7.0.dist-info → geovisio-2.7.1.dist-info}/WHEEL +1 -1
- {geovisio-2.7.0.dist-info → geovisio-2.7.1.dist-info}/LICENSE +0 -0
|
@@ -8,26 +8,29 @@ msgstr ""
|
|
|
8
8
|
"Project-Id-Version: PROJECT VERSION\n"
|
|
9
9
|
"Report-Msgid-Bugs-To: EMAIL@ADDRESS\n"
|
|
10
10
|
"POT-Creation-Date: 2024-07-10 14:10+0200\n"
|
|
11
|
-
"PO-Revision-Date:
|
|
12
|
-
"Last-Translator:
|
|
13
|
-
"Language-Team:
|
|
11
|
+
"PO-Revision-Date: 2024-11-14 23:10+0000\n"
|
|
12
|
+
"Last-Translator: BrechtD <brecht.devriese92@gmail.com>\n"
|
|
13
|
+
"Language-Team: Dutch <http://weblate.panoramax.xyz/projects/panoramax/api/nl/"
|
|
14
|
+
">\n"
|
|
14
15
|
"Language: nl\n"
|
|
15
16
|
"MIME-Version: 1.0\n"
|
|
16
17
|
"Content-Type: text/plain; charset=utf-8\n"
|
|
17
18
|
"Content-Transfer-Encoding: 8bit\n"
|
|
19
|
+
"Plural-Forms: nplurals=2; plural=n != 1;\n"
|
|
20
|
+
"X-Generator: Weblate 5.4.3\n"
|
|
18
21
|
"Generated-By: Babel 2.15.0\n"
|
|
19
22
|
|
|
20
23
|
#: geovisio/templates/main.html:45
|
|
21
24
|
msgid "Simple 360° geolocated pictures hosting"
|
|
22
|
-
msgstr ""
|
|
25
|
+
msgstr "Eenvoudige hosting van 360°-geolokaliseerde foto's"
|
|
23
26
|
|
|
24
27
|
#: geovisio/templates/main.html:47
|
|
25
28
|
msgid "Full page version"
|
|
26
|
-
msgstr ""
|
|
29
|
+
msgstr "Volledige pagina"
|
|
27
30
|
|
|
28
31
|
#: geovisio/templates/main.html:51
|
|
29
32
|
msgid "Viewer"
|
|
30
|
-
msgstr ""
|
|
33
|
+
msgstr "Viewee"
|
|
31
34
|
|
|
32
35
|
#: geovisio/templates/main.html:52
|
|
33
36
|
msgid "Embed pre-configured viewer"
|
|
@@ -36,42 +39,43 @@ msgstr ""
|
|
|
36
39
|
#: geovisio/templates/main.html:53
|
|
37
40
|
msgid "Easiest way to have a working GeoVisio viewer on your website"
|
|
38
41
|
msgstr ""
|
|
42
|
+
"Gemakkelijkste manier om een GeoVisio viewer op uw webpagina te plaatsen"
|
|
39
43
|
|
|
40
44
|
#: geovisio/templates/main.html:59
|
|
41
45
|
msgid "Use JS library"
|
|
42
|
-
msgstr ""
|
|
46
|
+
msgstr "Gebruik JS bibliotheek"
|
|
43
47
|
|
|
44
48
|
#: geovisio/templates/main.html:60
|
|
45
49
|
msgid "A completely configurable viewer for your website"
|
|
46
|
-
msgstr ""
|
|
50
|
+
msgstr "Een compleet configureerbare viewer voor uw website"
|
|
47
51
|
|
|
48
52
|
#: geovisio/templates/main.html:82
|
|
49
53
|
msgid "Links"
|
|
50
|
-
msgstr ""
|
|
54
|
+
msgstr "Links"
|
|
51
55
|
|
|
52
56
|
#: geovisio/templates/main.html:84
|
|
53
57
|
msgid "Pictures viewer"
|
|
54
|
-
msgstr ""
|
|
58
|
+
msgstr "Foto viewer"
|
|
55
59
|
|
|
56
60
|
#: geovisio/templates/main.html:86
|
|
57
61
|
msgid "API docs"
|
|
58
|
-
msgstr ""
|
|
62
|
+
msgstr "API documentatie"
|
|
59
63
|
|
|
60
64
|
#: geovisio/templates/main.html:88
|
|
61
65
|
msgid "JS library docs"
|
|
62
|
-
msgstr ""
|
|
66
|
+
msgstr "JS bibliotheek documentatie"
|
|
63
67
|
|
|
64
68
|
#: geovisio/templates/main.html:90
|
|
65
69
|
msgid "Repositories"
|
|
66
|
-
msgstr ""
|
|
70
|
+
msgstr "Archief"
|
|
67
71
|
|
|
68
72
|
#: geovisio/templates/viewer.html:22
|
|
69
73
|
msgid "You need to enable JavaScript to run this app."
|
|
70
|
-
msgstr ""
|
|
74
|
+
msgstr "U moet JavaScript toestaan om deze te kunnen gebruiken."
|
|
71
75
|
|
|
72
76
|
#: geovisio/utils/auth.py:164
|
|
73
77
|
msgid "Authentication is mandatory"
|
|
74
|
-
msgstr ""
|
|
78
|
+
msgstr "Authenticatie is verplicht"
|
|
75
79
|
|
|
76
80
|
#: geovisio/utils/auth.py:292
|
|
77
81
|
msgid "Only Bearer token are supported"
|
|
@@ -134,7 +138,7 @@ msgstr ""
|
|
|
134
138
|
|
|
135
139
|
#: geovisio/utils/upload_set.py:143 geovisio/web/params.py:359
|
|
136
140
|
msgid "Unsupported filter parameter"
|
|
137
|
-
msgstr ""
|
|
141
|
+
msgstr "Filter parameter is niet ondersteund"
|
|
138
142
|
|
|
139
143
|
#: geovisio/web/auth.py:62
|
|
140
144
|
msgid "Impossible to finish authentication flow"
|
|
@@ -592,3 +596,7 @@ msgstr ""
|
|
|
592
596
|
#: geovisio/web/utils.py:51
|
|
593
597
|
msgid "No default account defined, please contact your instance administrator"
|
|
594
598
|
msgstr ""
|
|
599
|
+
|
|
600
|
+
#: geovisio/utils/excluded_areas.py:82
|
|
601
|
+
msgid "Impossible to find excluded area"
|
|
602
|
+
msgstr "Onmogelijk om uitgesloten gebied te vinden"
|
geovisio/utils/pictures.py
CHANGED
|
@@ -15,6 +15,7 @@ from fs.path import dirname
|
|
|
15
15
|
from psycopg.errors import UniqueViolation, InvalidParameterValue
|
|
16
16
|
from geovisio import utils, errors
|
|
17
17
|
from geopic_tag_reader import reader
|
|
18
|
+
import re
|
|
18
19
|
|
|
19
20
|
log = logging.getLogger(__name__)
|
|
20
21
|
|
|
@@ -504,9 +505,10 @@ class InvalidMetadataValue(Exception):
|
|
|
504
505
|
|
|
505
506
|
|
|
506
507
|
class MetadataReadingError(Exception):
|
|
507
|
-
def __init__(self, details):
|
|
508
|
+
def __init__(self, details, missing_mandatory_tags=[]):
|
|
508
509
|
super().__init__()
|
|
509
510
|
self.details = details
|
|
511
|
+
self.missing_mandatory_tags = missing_mandatory_tags
|
|
510
512
|
|
|
511
513
|
|
|
512
514
|
def insertNewPictureInDatabase(
|
|
@@ -588,7 +590,7 @@ def insertNewPictureInDatabase(
|
|
|
588
590
|
# Flat pictures = variable fov
|
|
589
591
|
if metadata["type"] == "flat":
|
|
590
592
|
make, model = metadata.get("make"), metadata.get("model")
|
|
591
|
-
if make is not None and model is not None:
|
|
593
|
+
if make is not None and model is not None and metadata["focal_length"] != 0:
|
|
592
594
|
db.execute("SET pg_trgm.similarity_threshold = 0.9")
|
|
593
595
|
db.execute(
|
|
594
596
|
"""
|
|
@@ -630,31 +632,21 @@ def insertNewPictureInDatabase(
|
|
|
630
632
|
# Note: we don't want to store and expose exif binary fields as they are difficult to use and take a lot of storage in the database (~20% for maker notes only)
|
|
631
633
|
# This list has been queried from real data (cf [this comment](https://gitlab.com/panoramax/server/api/-/merge_requests/241#note_1790580636)).
|
|
632
634
|
# Update this list (and do a sql migration) if new binary fields are added
|
|
635
|
+
# Note that tags ending in ".0xXXXX" are automatically striped by a regex
|
|
633
636
|
BLACK_LISTED_BINARY_EXIF_FIELDS = set(
|
|
634
637
|
[
|
|
635
638
|
"Exif.Photo.MakerNote",
|
|
636
|
-
"Exif.Photo.0xea1c",
|
|
637
|
-
"Exif.Image.0xea1c",
|
|
638
639
|
"Exif.Canon.CameraInfo",
|
|
639
640
|
"Exif.Image.PrintImageMatching",
|
|
640
|
-
"Exif.Image.0xc6d3",
|
|
641
641
|
"Exif.Panasonic.FaceDetInfo",
|
|
642
642
|
"Exif.Panasonic.DataDump",
|
|
643
|
-
"Exif.Image.0xc6d2",
|
|
644
643
|
"Exif.Canon.CustomFunctions",
|
|
645
644
|
"Exif.Canon.AFInfo",
|
|
646
|
-
"Exif.Canon.0x4011",
|
|
647
|
-
"Exif.Canon.0x4019",
|
|
648
645
|
"Exif.Canon.ColorData",
|
|
649
646
|
"Exif.Canon.DustRemovalData",
|
|
650
647
|
"Exif.Canon.VignettingCorr",
|
|
651
648
|
"Exif.Canon.AFInfo3",
|
|
652
|
-
"Exif.Canon.0x001f",
|
|
653
|
-
"Exif.Canon.0x0018",
|
|
654
649
|
"Exif.Canon.ContrastInfo",
|
|
655
|
-
"Exif.Canon.0x002e",
|
|
656
|
-
"Exif.Canon.0x0022",
|
|
657
|
-
"Exif.Photo.0x9aaa",
|
|
658
650
|
]
|
|
659
651
|
)
|
|
660
652
|
|
|
@@ -677,14 +669,15 @@ def readPictureMetadata(picture: bytes, lang: Optional[str] = "en") -> dict:
|
|
|
677
669
|
|
|
678
670
|
try:
|
|
679
671
|
metadata = asdict(reader.readPictureMetadata(picture, lang))
|
|
680
|
-
except
|
|
681
|
-
|
|
672
|
+
except reader.PartialExifException as e:
|
|
673
|
+
tags = [t for t in e.missing_mandatory_tags if t not in ("lon", "lat")]
|
|
674
|
+
if "lon" in e.missing_mandatory_tags or "lat" in e.missing_mandatory_tags:
|
|
675
|
+
tags.append("location") # lat/lon is too much detail for missing metadatas, we replace those by 'location'
|
|
676
|
+
raise MetadataReadingError(details=str(e), missing_mandatory_tags=tags)
|
|
682
677
|
|
|
683
678
|
# Cleanup raw EXIF tags to avoid SQL issues
|
|
684
679
|
cleanedExif = {}
|
|
685
|
-
for k, v in metadata["exif"].items():
|
|
686
|
-
if k in BLACK_LISTED_BINARY_EXIF_FIELDS:
|
|
687
|
-
continue
|
|
680
|
+
for k, v in cleanupExif(metadata["exif"]).items():
|
|
688
681
|
try:
|
|
689
682
|
if isinstance(v, bytes):
|
|
690
683
|
try:
|
|
@@ -704,15 +697,20 @@ def readPictureMetadata(picture: bytes, lang: Optional[str] = "en") -> dict:
|
|
|
704
697
|
return metadata
|
|
705
698
|
|
|
706
699
|
|
|
700
|
+
EXIF_KEY_HEX_RGX = r"\.0x[0-9a-fA-F]+$"
|
|
701
|
+
|
|
702
|
+
|
|
707
703
|
def cleanupExif(exif: Optional[Dict[str, str]]) -> Optional[Dict[str, str]]:
|
|
708
|
-
"""Removes binary fields from
|
|
704
|
+
"""Removes binary or undocumented fields from EXIF tags
|
|
709
705
|
>>> cleanupExif({'A': 'B', 'Exif.Canon.AFInfo': 'Blablabla'})
|
|
710
706
|
{'A': 'B'}
|
|
711
707
|
>>> cleanupExif({'A': 'B', 'Exif.Photo.MakerNote': 'Blablabla'})
|
|
712
708
|
{'A': 'B'}
|
|
709
|
+
>>> cleanupExif({'A': 'B', 'Exif.Sony.0x1234': 'Blablabla'})
|
|
710
|
+
{'A': 'B'}
|
|
713
711
|
"""
|
|
714
712
|
|
|
715
713
|
if exif is None:
|
|
716
714
|
return None
|
|
717
715
|
|
|
718
|
-
return {k: v for k, v in exif.items() if k not in BLACK_LISTED_BINARY_EXIF_FIELDS}
|
|
716
|
+
return {k: v for k, v in exif.items() if not re.search(EXIF_KEY_HEX_RGX, k) and k not in BLACK_LISTED_BINARY_EXIF_FIELDS}
|
geovisio/utils/sequences.py
CHANGED
|
@@ -156,7 +156,9 @@ def get_collections(request: CollectionsRequest) -> Collections:
|
|
|
156
156
|
{status},
|
|
157
157
|
s.computed_capture_date AS datetime,
|
|
158
158
|
s.user_agent,
|
|
159
|
-
ROUND(ST_Length(s.geom::geography)) / 1000 AS length_km
|
|
159
|
+
ROUND(ST_Length(s.geom::geography)) / 1000 AS length_km,
|
|
160
|
+
s.computed_h_pixel_density,
|
|
161
|
+
s.computed_gps_accuracy
|
|
160
162
|
FROM sequences s
|
|
161
163
|
LEFT JOIN accounts on s.account_id = accounts.id
|
|
162
164
|
WHERE {filter}
|
|
@@ -516,7 +518,9 @@ SELECT
|
|
|
516
518
|
ARRAY_AGG(DISTINCT TRIM(
|
|
517
519
|
CONCAT(p.metadata->>'make', ' ', p.metadata->>'model')
|
|
518
520
|
)) AS models,
|
|
519
|
-
ARRAY_AGG(DISTINCT p.metadata->>'type') AS types
|
|
521
|
+
ARRAY_AGG(DISTINCT p.metadata->>'type') AS types,
|
|
522
|
+
ARRAY_AGG(DISTINCT p.h_pixel_density) AS reshpd,
|
|
523
|
+
PERCENTILE_CONT(0.9) WITHIN GROUP(ORDER BY p.gps_accuracy_m) AS gpsacc
|
|
520
524
|
FROM sequences_pictures sp
|
|
521
525
|
JOIN pictures p ON sp.pic_id = p.id
|
|
522
526
|
WHERE sp.seq_id = %(seq)s
|
|
@@ -529,7 +533,9 @@ geom = compute_sequence_geom(id),
|
|
|
529
533
|
bbox = compute_sequence_bbox(id),
|
|
530
534
|
computed_type = CASE WHEN array_length(types, 1) = 1 THEN types[1] ELSE NULL END,
|
|
531
535
|
computed_model = CASE WHEN array_length(models, 1) = 1 THEN models[1] ELSE NULL END,
|
|
532
|
-
computed_capture_date = day
|
|
536
|
+
computed_capture_date = day,
|
|
537
|
+
computed_h_pixel_density = CASE WHEN array_length(reshpd, 1) = 1 THEN reshpd[1] ELSE NULL END,
|
|
538
|
+
computed_gps_accuracy = gpsacc
|
|
533
539
|
FROM aggregated_pictures
|
|
534
540
|
WHERE id = %(seq)s
|
|
535
541
|
""",
|
|
@@ -595,19 +601,16 @@ def delete_collection(collectionId: UUID, account: Optional[Account]) -> int:
|
|
|
595
601
|
|
|
596
602
|
logging.info(f"Asking for deletion of sequence {collectionId} and all its pictures")
|
|
597
603
|
|
|
598
|
-
# mark all the pictures as waiting for deletion for async removal as this can be quite long if the storage is slow
|
|
604
|
+
# mark all the pictures as waiting for deletion for async removal as this can be quite long if the storage is slow and there are lots of pictures
|
|
599
605
|
# Note: To avoid a deadlock if some workers are currently also working on those picture to prepare them,
|
|
600
606
|
# the SQL queries are split in 2:
|
|
601
|
-
# - First a query to
|
|
602
|
-
# - Then a query
|
|
607
|
+
# - First a query to remove jobs preparing those pictures
|
|
608
|
+
# - Then a query deleting those pictures from the database (and a trigger will add async deletion tasks to the queue)
|
|
603
609
|
#
|
|
604
|
-
#
|
|
605
|
-
#
|
|
606
|
-
|
|
607
|
-
|
|
608
|
-
nb_updated = cursor.execute(
|
|
609
|
-
"""
|
|
610
|
-
WITH pic2rm AS (
|
|
610
|
+
# Since the workers lock their job_queue row when working, at the end of this query, we know that there are no more workers working on those pictures,
|
|
611
|
+
# so we can delete them without fearing a deadlock.
|
|
612
|
+
cursor.execute(
|
|
613
|
+
"""WITH pic2rm AS (
|
|
611
614
|
SELECT pic_id FROM sequences_pictures WHERE seq_id = %(seq)s
|
|
612
615
|
),
|
|
613
616
|
picWithoutOtherSeq AS (
|
|
@@ -615,19 +618,15 @@ def delete_collection(collectionId: UUID, account: Optional[Account]) -> int:
|
|
|
615
618
|
EXCEPT
|
|
616
619
|
SELECT pic_id FROM sequences_pictures WHERE pic_id IN (SELECT pic_id FROM pic2rm) AND seq_id != %(seq)s
|
|
617
620
|
)
|
|
618
|
-
|
|
619
|
-
SELECT pic_id, 'delete' FROM picWithoutOtherSeq
|
|
620
|
-
ON CONFLICT (picture_id) DO UPDATE SET task = 'delete'
|
|
621
|
-
""",
|
|
621
|
+
DELETE FROM job_queue WHERE picture_id IN (SELECT pic_id FROM picWithoutOtherSeq)""",
|
|
622
622
|
{"seq": collectionId},
|
|
623
623
|
).rowcount
|
|
624
624
|
# if there was a finalize task for this collection in the queue, we remove it, it's useless
|
|
625
625
|
cursor.execute("""DELETE FROM job_queue WHERE sequence_id = %(seq)s""", {"seq": collectionId})
|
|
626
626
|
|
|
627
|
-
# after the task have been added to the queue,
|
|
628
|
-
cursor.execute(
|
|
629
|
-
"""
|
|
630
|
-
WITH pic2rm AS (
|
|
627
|
+
# after the task have been added to the queue, delete the pictures, and db triggers will ensure the correct deletion jobs are added
|
|
628
|
+
nb_updated = cursor.execute(
|
|
629
|
+
"""WITH pic2rm AS (
|
|
631
630
|
SELECT pic_id FROM sequences_pictures WHERE seq_id = %(seq)s
|
|
632
631
|
),
|
|
633
632
|
picWithoutOtherSeq AS (
|
|
@@ -635,8 +634,7 @@ def delete_collection(collectionId: UUID, account: Optional[Account]) -> int:
|
|
|
635
634
|
EXCEPT
|
|
636
635
|
SELECT pic_id FROM sequences_pictures WHERE pic_id IN (SELECT pic_id FROM pic2rm) AND seq_id != %(seq)s
|
|
637
636
|
)
|
|
638
|
-
|
|
639
|
-
""",
|
|
637
|
+
DELETE FROM pictures WHERE id IN (SELECT pic_id FROM picWithoutOtherSeq)""",
|
|
640
638
|
{"seq": collectionId},
|
|
641
639
|
).rowcount
|
|
642
640
|
|
geovisio/utils/upload_set.py
CHANGED
|
@@ -126,6 +126,23 @@ class FileRejectionStatus(Enum):
|
|
|
126
126
|
"""other_error means there was an error that is not related to the picture itself"""
|
|
127
127
|
|
|
128
128
|
|
|
129
|
+
class FileRejectionDetails(BaseModel):
|
|
130
|
+
|
|
131
|
+
missing_fields: List[str]
|
|
132
|
+
"""Mandatory metadata missing from the file. Metadata can be `datetime` or `location`."""
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
class FileRejection(BaseModel):
|
|
136
|
+
"""Details about a file rejection"""
|
|
137
|
+
|
|
138
|
+
reason: str
|
|
139
|
+
severity: FileRejectionStatusSeverity
|
|
140
|
+
message: Optional[str]
|
|
141
|
+
details: Optional[FileRejectionDetails]
|
|
142
|
+
|
|
143
|
+
model_config = ConfigDict(use_enum_values=True, use_attribute_docstrings=True)
|
|
144
|
+
|
|
145
|
+
|
|
129
146
|
class UploadSetFile(BaseModel):
|
|
130
147
|
"""File uploaded in an UploadSet"""
|
|
131
148
|
|
|
@@ -136,6 +153,7 @@ class UploadSetFile(BaseModel):
|
|
|
136
153
|
upload_set_id: UUID = Field(..., exclude=True)
|
|
137
154
|
rejection_status: Optional[FileRejectionStatus] = Field(None, exclude=True)
|
|
138
155
|
rejection_message: Optional[str] = Field(None, exclude=True)
|
|
156
|
+
rejection_details: Optional[Dict[str, Any]] = Field(None, exclude=True)
|
|
139
157
|
file_type: Optional[FileType] = None
|
|
140
158
|
size: Optional[int] = None
|
|
141
159
|
|
|
@@ -148,7 +166,7 @@ class UploadSetFile(BaseModel):
|
|
|
148
166
|
|
|
149
167
|
@computed_field
|
|
150
168
|
@property
|
|
151
|
-
def rejected(self) -> Optional[
|
|
169
|
+
def rejected(self) -> Optional[FileRejection]:
|
|
152
170
|
if self.rejection_status is None:
|
|
153
171
|
return None
|
|
154
172
|
msg = None
|
|
@@ -168,11 +186,7 @@ class UploadSetFile(BaseModel):
|
|
|
168
186
|
severity = FileRejectionStatusSeverity.error
|
|
169
187
|
else:
|
|
170
188
|
msg = self.rejection_message
|
|
171
|
-
return
|
|
172
|
-
"reason": self.rejection_status,
|
|
173
|
-
"severity": severity,
|
|
174
|
-
"message": msg,
|
|
175
|
-
}
|
|
189
|
+
return FileRejection(reason=self.rejection_status, severity=severity, message=msg, details=self.rejection_details)
|
|
176
190
|
|
|
177
191
|
@field_serializer("content_md5")
|
|
178
192
|
def serialize_md5(self, md5: UUID, _info):
|
|
@@ -199,7 +213,7 @@ def get_simple_upload_set(id: UUID) -> Optional[UploadSet]:
|
|
|
199
213
|
"""Get the DB representation of an UploadSet, without associated collections and statuses"""
|
|
200
214
|
u = db.fetchone(
|
|
201
215
|
current_app,
|
|
202
|
-
SQL("SELECT * FROM upload_sets WHERE id = %(id)s
|
|
216
|
+
SQL("SELECT * FROM upload_sets WHERE id = %(id)s"),
|
|
203
217
|
{"id": id},
|
|
204
218
|
row_factory=class_row(UploadSet),
|
|
205
219
|
)
|
|
@@ -310,7 +324,7 @@ SELECT u.*,
|
|
|
310
324
|
) AS associated_collections
|
|
311
325
|
FROM upload_sets u
|
|
312
326
|
LEFT JOIN upload_set_statuses us on us.upload_set_id = u.id
|
|
313
|
-
WHERE u.id = %(id)s
|
|
327
|
+
WHERE u.id = %(id)s"""
|
|
314
328
|
),
|
|
315
329
|
{"id": id},
|
|
316
330
|
row_factory=class_row(UploadSet),
|
|
@@ -384,7 +398,7 @@ def list_upload_sets(account_id: UUID, limit: int = 100, filter: Optional[str] =
|
|
|
384
398
|
WHERE p.upload_set_id = u.id
|
|
385
399
|
) AS nb_items
|
|
386
400
|
FROM upload_sets u
|
|
387
|
-
WHERE account_id = %(account_id)s AND
|
|
401
|
+
WHERE account_id = %(account_id)s AND {filter}
|
|
388
402
|
ORDER BY created_at ASC
|
|
389
403
|
LIMIT %(limit)s
|
|
390
404
|
"""
|
|
@@ -490,16 +504,8 @@ WHERE p.upload_set_id = %(upload_set_id)s"""
|
|
|
490
504
|
"UPDATE files SET rejection_status = 'capture_duplicate' WHERE picture_id IN (select picture_id from tmp_duplicates)"
|
|
491
505
|
)
|
|
492
506
|
)
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
"""INSERT INTO job_queue (picture_id, task)
|
|
496
|
-
SELECT picture_id, 'delete'
|
|
497
|
-
FROM tmp_duplicates
|
|
498
|
-
ON CONFLICT(picture_id) DO UPDATE SET task = 'delete'"""
|
|
499
|
-
)
|
|
500
|
-
)
|
|
501
|
-
|
|
502
|
-
# ask for deletion of the pictures
|
|
507
|
+
# delete all pictures (the DB triggers will also add background jobs to delete the associated files)
|
|
508
|
+
cursor.execute(SQL("DELETE FROM pictures WHERE id IN (select picture_id FROM tmp_duplicates)"))
|
|
503
509
|
|
|
504
510
|
for s in report.sequences:
|
|
505
511
|
existing_sequence = next(
|
|
@@ -557,21 +563,21 @@ def insertFileInDatabase(
|
|
|
557
563
|
picture_id: Optional[UUID] = None,
|
|
558
564
|
rejection_status: Optional[FileRejectionStatus] = None,
|
|
559
565
|
rejection_message: Optional[str] = None,
|
|
566
|
+
rejection_details: Optional[Dict[str, Any]] = None,
|
|
560
567
|
) -> UploadSetFile:
|
|
561
568
|
"""Insert a file linked to an UploadSet into the database"""
|
|
562
569
|
|
|
563
570
|
f = cursor.execute(
|
|
564
571
|
SQL(
|
|
565
|
-
"""
|
|
566
|
-
INSERT INTO files(
|
|
572
|
+
"""INSERT INTO files(
|
|
567
573
|
upload_set_id, picture_id, file_type, file_name,
|
|
568
|
-
size, content_md5, rejection_status, rejection_message)
|
|
574
|
+
size, content_md5, rejection_status, rejection_message, rejection_details)
|
|
569
575
|
VALUES (
|
|
570
576
|
%(upload_set_id)s, %(picture_id)s, %(type)s, %(file_name)s,
|
|
571
|
-
%(size)s, %(content_md5)s, %(rejection_status)s, %(rejection_message)s)
|
|
577
|
+
%(size)s, %(content_md5)s, %(rejection_status)s, %(rejection_message)s, %(rejection_details)s)
|
|
572
578
|
ON CONFLICT (upload_set_id, file_name)
|
|
573
579
|
DO UPDATE SET picture_id = %(picture_id)s, size = %(size)s, content_md5 = %(content_md5)s,
|
|
574
|
-
rejection_status = %(rejection_status)s, rejection_message = %(rejection_message)s
|
|
580
|
+
rejection_status = %(rejection_status)s, rejection_message = %(rejection_message)s, rejection_details = %(rejection_details)s
|
|
575
581
|
RETURNING *
|
|
576
582
|
"""
|
|
577
583
|
),
|
|
@@ -584,6 +590,7 @@ def insertFileInDatabase(
|
|
|
584
590
|
"content_md5": content_md5,
|
|
585
591
|
"rejection_status": rejection_status,
|
|
586
592
|
"rejection_message": rejection_message,
|
|
593
|
+
"rejection_details": Jsonb(rejection_details),
|
|
587
594
|
},
|
|
588
595
|
)
|
|
589
596
|
return UploadSetFile(**f.fetchone())
|
|
@@ -602,6 +609,7 @@ def get_upload_set_files(upload_set_id: UUID) -> UploadSetFiles:
|
|
|
602
609
|
content_md5,
|
|
603
610
|
rejection_status,
|
|
604
611
|
rejection_message,
|
|
612
|
+
rejection_details,
|
|
605
613
|
picture_id,
|
|
606
614
|
inserted_at
|
|
607
615
|
FROM files
|
|
@@ -618,37 +626,17 @@ def delete(upload_set: UploadSet):
|
|
|
618
626
|
"""Delete an UploadSet"""
|
|
619
627
|
logging.info(f"Asking for deletion of uploadset {upload_set.id}")
|
|
620
628
|
with db.conn(current_app) as conn:
|
|
629
|
+
# clean job queue, to ensure no async runner are currently processing pictures/sequences/upload_sets
|
|
630
|
+
# Done outside the real deletion transaction to not trigger deadlock
|
|
631
|
+
conn.execute(SQL("DELETE FROM job_queue WHERE picture_id IN (SELECT id FROM pictures where upload_set_id = %s)"), [upload_set.id])
|
|
632
|
+
for c in upload_set.associated_collections:
|
|
633
|
+
conn.execute(SQL("DELETE FROM job_queue WHERE sequence_id = %s"), [c.id])
|
|
634
|
+
|
|
621
635
|
with conn.transaction(), conn.cursor() as cursor:
|
|
622
636
|
for c in upload_set.associated_collections:
|
|
623
637
|
# Mark all collections as deleted, but do not delete them
|
|
624
638
|
# Note: we do not use utils.sequences.delete_collection here, since we also want to remove the pictures not associated to any collection
|
|
625
|
-
cursor.execute(SQL("DELETE FROM job_queue WHERE sequence_id = %s"), [c.id])
|
|
626
639
|
cursor.execute(SQL("UPDATE sequences SET status = 'deleted' WHERE id = %s"), [c.id])
|
|
627
640
|
|
|
628
|
-
#
|
|
629
|
-
cursor.execute(
|
|
630
|
-
"""
|
|
631
|
-
INSERT INTO job_queue(picture_id, task)
|
|
632
|
-
SELECT id, 'delete'
|
|
633
|
-
FROM pictures
|
|
634
|
-
WHERE upload_set_id = %(upload_set_id)s
|
|
635
|
-
ON CONFLICT (picture_id) DO UPDATE SET task = 'delete'""",
|
|
636
|
-
{"upload_set_id": upload_set.id},
|
|
637
|
-
)
|
|
638
|
-
|
|
639
|
-
# after the task have been added to the queue, we mark all picture for deletion
|
|
640
|
-
cursor.execute(
|
|
641
|
-
SQL(
|
|
642
|
-
"UPDATE pictures SET status = 'waiting-for-delete' WHERE id IN (SELECT id FROM pictures WHERE upload_set_id = %(upload_set_id)s)"
|
|
643
|
-
),
|
|
644
|
-
{"upload_set_id": upload_set.id},
|
|
645
|
-
)
|
|
646
|
-
# we insert the upload set deletion task in the queue after the rest, it will be done once all pictures are deleted
|
|
647
|
-
cursor.execute(
|
|
648
|
-
"""INSERT INTO job_queue(upload_set_id, task) VALUES (%(upload_set_id)s, 'delete')
|
|
649
|
-
ON CONFLICT (upload_set_id) DO UPDATE SET task = 'delete'""",
|
|
650
|
-
{"upload_set_id": upload_set.id},
|
|
651
|
-
)
|
|
652
|
-
|
|
653
|
-
# and we mark it as deleted so it will disapear from the responses even if all the pictures are not yet deleted
|
|
654
|
-
cursor.execute("UPDATE upload_sets SET deleted = true WHERE id = %(upload_set_id)s", {"upload_set_id": upload_set.id})
|
|
641
|
+
# after the task have been added to the queue, we delete the upload set, and this will delete all pictures associated to it
|
|
642
|
+
cursor.execute(SQL("DELETE FROM upload_sets WHERE id = %(upload_set_id)s"), {"upload_set_id": upload_set.id})
|
geovisio/web/collections.py
CHANGED
|
@@ -82,7 +82,10 @@ def dbSequenceToStacCollection(dbSeq, description="A sequence of geolocated pict
|
|
|
82
82
|
{
|
|
83
83
|
"type": "Collection",
|
|
84
84
|
"stac_version": STAC_VERSION,
|
|
85
|
-
"stac_extensions": [
|
|
85
|
+
"stac_extensions": [
|
|
86
|
+
"https://stac-extensions.github.io/stats/v0.2.0/schema.json", # For stats: fields
|
|
87
|
+
"https://stac.linz.govt.nz/v0.0.15/quality/schema.json", # For quality: fields
|
|
88
|
+
],
|
|
86
89
|
"id": str(dbSeq["id"]),
|
|
87
90
|
"title": str(dbSeq["name"]),
|
|
88
91
|
"description": description,
|
|
@@ -96,6 +99,10 @@ def dbSequenceToStacCollection(dbSeq, description="A sequence of geolocated pict
|
|
|
96
99
|
"geovisio:sorted-by": dbSeq.get("current_sort"),
|
|
97
100
|
"geovisio:upload-software": userAgentToClient(dbSeq.get("user_agent")).value,
|
|
98
101
|
"geovisio:length_km": dbSeq.get("length_km"),
|
|
102
|
+
"quality:horizontal_accuracy": (
|
|
103
|
+
float("{:.1f}".format(dbSeq["computed_gps_accuracy"])) if dbSeq.get("computed_gps_accuracy") else None
|
|
104
|
+
),
|
|
105
|
+
"quality:horizontal_accuracy_type": "95% confidence interval" if "computed_gps_accuracy" in dbSeq else None,
|
|
99
106
|
"providers": [
|
|
100
107
|
{"name": dbSeq["account_name"], "roles": ["producer"], "id": str(dbSeq["account_id"])},
|
|
101
108
|
],
|
|
@@ -110,7 +117,14 @@ def dbSequenceToStacCollection(dbSeq, description="A sequence of geolocated pict
|
|
|
110
117
|
]
|
|
111
118
|
},
|
|
112
119
|
},
|
|
113
|
-
"summaries": cleanNoneInDict(
|
|
120
|
+
"summaries": cleanNoneInDict(
|
|
121
|
+
{
|
|
122
|
+
"pers:interior_orientation": dbSeq.get("metas"),
|
|
123
|
+
"panoramax:horizontal_pixel_density": (
|
|
124
|
+
[dbSeq["computed_h_pixel_density"]] if "computed_h_pixel_density" in dbSeq else None
|
|
125
|
+
),
|
|
126
|
+
}
|
|
127
|
+
),
|
|
114
128
|
"stats:items": removeNoneInDict({"count": nb_pic}),
|
|
115
129
|
"links": cleanNoneInList(
|
|
116
130
|
[
|
|
@@ -374,7 +388,9 @@ def getCollection(collectionId):
|
|
|
374
388
|
max_picture_ts AS maxts,
|
|
375
389
|
nb_pictures AS nbpic,
|
|
376
390
|
s.user_agent,
|
|
377
|
-
ROUND(ST_Length(s.geom::geography)) / 1000 as length_km
|
|
391
|
+
ROUND(ST_Length(s.geom::geography)) / 1000 as length_km,
|
|
392
|
+
s.computed_h_pixel_density,
|
|
393
|
+
s.computed_gps_accuracy
|
|
378
394
|
FROM sequences s
|
|
379
395
|
JOIN accounts ON s.account_id = accounts.id, (
|
|
380
396
|
SELECT
|
geovisio/web/docs.py
CHANGED
|
@@ -157,6 +157,15 @@ Note that you may not rely only on these ID that could change through time.
|
|
|
157
157
|
"properties": {
|
|
158
158
|
"version": {"type": "integer", "example": 8},
|
|
159
159
|
"name": {"type": "string", "example": "GeoVisio Vector Tiles"},
|
|
160
|
+
"metadata": {
|
|
161
|
+
"type": "object",
|
|
162
|
+
"properties": {
|
|
163
|
+
"panoramax:fields": {
|
|
164
|
+
"type": "object",
|
|
165
|
+
"description": "Available properties per layer (layer: [field1, field2...])",
|
|
166
|
+
}
|
|
167
|
+
},
|
|
168
|
+
},
|
|
160
169
|
"sources": {
|
|
161
170
|
"type": "object",
|
|
162
171
|
"properties": {
|
|
@@ -340,12 +349,28 @@ Note that you may not rely only on these ID that could change through time.
|
|
|
340
349
|
"geovisio:sorted-by": {"$ref": "#/components/schemas/GeoVisioCollectionSortedBy"},
|
|
341
350
|
"geovisio:upload-software": {"$ref": "#/components/schemas/GeoVisioCollectionUploadSoftware"},
|
|
342
351
|
"geovisio:length_km": {"$ref": "#/components/schemas/GeoVisioLengthKm"},
|
|
352
|
+
"quality:horizontal_accuracy": {"type": "number", "title": "Estimated GPS position precision (in meters)"},
|
|
353
|
+
"quality:horizontal_accuracy_type": {
|
|
354
|
+
"type": "string",
|
|
355
|
+
"title": "Estimation process for GPS precision",
|
|
356
|
+
"example": "95% confidence interval",
|
|
357
|
+
},
|
|
343
358
|
"providers": {
|
|
344
359
|
"type": "array",
|
|
345
360
|
"items": {
|
|
346
361
|
"$ref": "#/components/schemas/GeoVisioProvider",
|
|
347
362
|
},
|
|
348
363
|
},
|
|
364
|
+
"summaries": {
|
|
365
|
+
"type": "object",
|
|
366
|
+
"properties": {
|
|
367
|
+
"panoramax:horizontal_pixel_density": {
|
|
368
|
+
"type": "array",
|
|
369
|
+
"title": "Number of pixels on horizon per field of view degree (as a list with a single value for STAC conformance)",
|
|
370
|
+
"items": {"type": "integer", "minimum": 0},
|
|
371
|
+
},
|
|
372
|
+
},
|
|
373
|
+
},
|
|
349
374
|
},
|
|
350
375
|
},
|
|
351
376
|
]
|
|
@@ -440,6 +465,15 @@ If unset, sort order is unchanged.
|
|
|
440
465
|
"geovisio:thumbnail": {"type": "string", "format": "uri"},
|
|
441
466
|
"original_file:size": {"type": "integer", "minimum": 0, "title": "Size of the original file, in bytes"},
|
|
442
467
|
"original_file:name": {"type": "string", "title": "Original file name"},
|
|
468
|
+
"panoramax:horizontal_pixel_density": {
|
|
469
|
+
"type": "integer",
|
|
470
|
+
"minimum": 0,
|
|
471
|
+
"title": "Number of pixels on horizon per field of view degree",
|
|
472
|
+
},
|
|
473
|
+
"quality:horizontal_accuracy": {
|
|
474
|
+
"type": "number",
|
|
475
|
+
"title": "Estimated GPS position precision (in meters)",
|
|
476
|
+
},
|
|
443
477
|
},
|
|
444
478
|
}
|
|
445
479
|
},
|
|
@@ -485,7 +519,7 @@ If unset, sort order is unchanged.
|
|
|
485
519
|
},
|
|
486
520
|
"isBlurred": {
|
|
487
521
|
"type": "string",
|
|
488
|
-
"description": "Is picture blurred",
|
|
522
|
+
"description": "Is picture blurred. If set to 'true', the server will not apply the face blurring algorithm but will publish the image as it is",
|
|
489
523
|
"enum": ["true", "false", "null"],
|
|
490
524
|
"default": "false",
|
|
491
525
|
},
|