geovisio 2.9.0__py3-none-any.whl → 2.10.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- geovisio/__init__.py +6 -1
- geovisio/config_app.py +5 -5
- geovisio/translations/ar/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/ar/LC_MESSAGES/messages.po +818 -0
- geovisio/translations/br/LC_MESSAGES/messages.po +1 -1
- geovisio/translations/da/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/da/LC_MESSAGES/messages.po +4 -3
- geovisio/translations/de/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/de/LC_MESSAGES/messages.po +55 -2
- geovisio/translations/el/LC_MESSAGES/messages.po +1 -1
- geovisio/translations/en/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/en/LC_MESSAGES/messages.po +193 -139
- geovisio/translations/eo/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/eo/LC_MESSAGES/messages.po +53 -4
- geovisio/translations/es/LC_MESSAGES/messages.po +1 -1
- geovisio/translations/fi/LC_MESSAGES/messages.po +1 -1
- geovisio/translations/fr/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/fr/LC_MESSAGES/messages.po +91 -3
- geovisio/translations/hu/LC_MESSAGES/messages.po +1 -1
- geovisio/translations/it/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/it/LC_MESSAGES/messages.po +63 -3
- geovisio/translations/ja/LC_MESSAGES/messages.po +1 -1
- geovisio/translations/ko/LC_MESSAGES/messages.po +1 -1
- geovisio/translations/messages.pot +185 -129
- geovisio/translations/nl/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/nl/LC_MESSAGES/messages.po +292 -63
- geovisio/translations/oc/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/oc/LC_MESSAGES/messages.po +818 -0
- geovisio/translations/pl/LC_MESSAGES/messages.po +1 -1
- geovisio/translations/sv/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/sv/LC_MESSAGES/messages.po +4 -3
- geovisio/translations/ti/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/ti/LC_MESSAGES/messages.po +762 -0
- geovisio/translations/zh_Hant/LC_MESSAGES/messages.po +1 -1
- geovisio/utils/annotations.py +14 -17
- geovisio/utils/auth.py +14 -13
- geovisio/utils/cql2.py +2 -2
- geovisio/utils/fields.py +14 -2
- geovisio/utils/items.py +44 -0
- geovisio/utils/model_query.py +2 -2
- geovisio/utils/pic_shape.py +1 -1
- geovisio/utils/pictures.py +111 -18
- geovisio/utils/semantics.py +32 -3
- geovisio/utils/sentry.py +1 -1
- geovisio/utils/sequences.py +51 -34
- geovisio/utils/upload_set.py +285 -198
- geovisio/utils/website.py +1 -1
- geovisio/web/annotations.py +209 -68
- geovisio/web/auth.py +1 -1
- geovisio/web/collections.py +26 -22
- geovisio/web/configuration.py +24 -4
- geovisio/web/docs.py +93 -11
- geovisio/web/items.py +197 -121
- geovisio/web/params.py +44 -31
- geovisio/web/pictures.py +34 -0
- geovisio/web/tokens.py +49 -1
- geovisio/web/upload_set.py +150 -32
- geovisio/web/users.py +4 -4
- geovisio/web/utils.py +2 -2
- geovisio/workers/runner_pictures.py +128 -23
- {geovisio-2.9.0.dist-info → geovisio-2.10.0.dist-info}/METADATA +13 -13
- geovisio-2.10.0.dist-info/RECORD +105 -0
- geovisio-2.9.0.dist-info/RECORD +0 -98
- {geovisio-2.9.0.dist-info → geovisio-2.10.0.dist-info}/WHEEL +0 -0
- {geovisio-2.9.0.dist-info → geovisio-2.10.0.dist-info}/licenses/LICENSE +0 -0
geovisio/utils/annotations.py
CHANGED
|
@@ -62,31 +62,28 @@ Note that the API will always output geometry as geojson geometry (thus will tra
|
|
|
62
62
|
return shape_as_geometry(self.shape)
|
|
63
63
|
|
|
64
64
|
|
|
65
|
-
def creation_annotation(params: AnnotationCreationParameter) -> Annotation:
|
|
66
|
-
"""Create an annotation in the database
|
|
65
|
+
def creation_annotation(params: AnnotationCreationParameter, conn: psycopg.Connection) -> Annotation:
|
|
66
|
+
"""Create an annotation in the database.
|
|
67
|
+
Note, this should be called from an autocommit connection"""
|
|
67
68
|
|
|
68
69
|
model = model_query.get_db_params_and_values(
|
|
69
70
|
AnnotationCreationRow(picture_id=params.picture_id, shape=params.shape_as_geometry()), jsonb_fields={"shape"}
|
|
70
71
|
)
|
|
71
|
-
insert_query = SQL(
|
|
72
|
-
"""WITH existing_annotations AS (
|
|
73
|
-
SELECT * FROM annotations WHERE picture_id = %(picture_id)s AND shape = %(shape)s
|
|
74
|
-
)
|
|
75
|
-
, new_ones AS (
|
|
76
|
-
INSERT INTO annotations (picture_id, shape)
|
|
77
|
-
SELECT %(picture_id)s, %(shape)s
|
|
78
|
-
WHERE NOT EXISTS (SELECT FROM existing_annotations)
|
|
79
|
-
RETURNING *
|
|
80
|
-
)
|
|
81
|
-
SELECT * FROM existing_annotations UNION ALL SELECT * FROM new_ones
|
|
82
|
-
;"""
|
|
83
|
-
)
|
|
84
72
|
|
|
85
|
-
with
|
|
73
|
+
with conn.transaction(), conn.cursor(row_factory=class_row(Annotation)) as cursor:
|
|
86
74
|
# we check that the shape is valid
|
|
87
75
|
check_shape(conn, params)
|
|
88
76
|
|
|
89
|
-
annotation = cursor.execute(
|
|
77
|
+
annotation = cursor.execute(
|
|
78
|
+
"SELECT * FROM annotations WHERE picture_id = %(picture_id)s AND shape = %(shape)s", model.params_as_dict
|
|
79
|
+
).fetchone()
|
|
80
|
+
if annotation is None:
|
|
81
|
+
annotation = cursor.execute(
|
|
82
|
+
"""INSERT INTO annotations (picture_id, shape)
|
|
83
|
+
VALUES (%(picture_id)s, %(shape)s)
|
|
84
|
+
RETURNING *""",
|
|
85
|
+
model.params_as_dict,
|
|
86
|
+
).fetchone()
|
|
90
87
|
|
|
91
88
|
if annotation is None:
|
|
92
89
|
raise Exception("Impossible to insert annotation in database")
|
geovisio/utils/auth.py
CHANGED
|
@@ -31,7 +31,7 @@ class OAuthUserAccount(object):
|
|
|
31
31
|
|
|
32
32
|
|
|
33
33
|
class OAuthProvider(ABC):
|
|
34
|
-
"""Base class for oauth provider. Need
|
|
34
|
+
"""Base class for oauth provider. Need to specify how to get user's info"""
|
|
35
35
|
|
|
36
36
|
name: str
|
|
37
37
|
client: Any
|
|
@@ -52,7 +52,7 @@ class OAuthProvider(ABC):
|
|
|
52
52
|
"""
|
|
53
53
|
URL to a user settings page.
|
|
54
54
|
This URL should point to a web page where user can edit its password or email address,
|
|
55
|
-
if that makes sense
|
|
55
|
+
if that makes sense regarding your GeoVisio instance.
|
|
56
56
|
|
|
57
57
|
This is useful if your instance has its own specific identity provider. It may not be used if you rely on third-party auth provider.
|
|
58
58
|
"""
|
|
@@ -235,7 +235,7 @@ class Account(BaseModel):
|
|
|
235
235
|
|
|
236
236
|
|
|
237
237
|
def account_allow_collaborative_editing(account_id: str | UUID):
|
|
238
|
-
"""An account
|
|
238
|
+
"""An account allows collaborative editing it if has been allowed at the account level else we check the instance configuration"""
|
|
239
239
|
r = db.fetchone(
|
|
240
240
|
current_app,
|
|
241
241
|
"""SELECT COALESCE(accounts.collaborative_metadata, configurations.collaborative_metadata, true) AS collaborative_metadata
|
|
@@ -249,15 +249,16 @@ WHERE accounts.id = %s""",
|
|
|
249
249
|
|
|
250
250
|
|
|
251
251
|
def login_required():
|
|
252
|
-
"""Check that the user is logged, and abort if it's not the case"""
|
|
252
|
+
"""Check that the user is logged in, and abort if it's not the case"""
|
|
253
253
|
|
|
254
254
|
def actual_decorator(f):
|
|
255
255
|
@wraps(f)
|
|
256
256
|
def decorator(*args, **kwargs):
|
|
257
|
-
account
|
|
258
|
-
|
|
259
|
-
|
|
260
|
-
|
|
257
|
+
if "account" not in kwargs:
|
|
258
|
+
account = get_current_account()
|
|
259
|
+
if not account:
|
|
260
|
+
return flask.abort(flask.make_response(flask.jsonify(message=_("Authentication is mandatory")), 401))
|
|
261
|
+
kwargs["account"] = account
|
|
261
262
|
|
|
262
263
|
return f(*args, **kwargs)
|
|
263
264
|
|
|
@@ -267,7 +268,7 @@ def login_required():
|
|
|
267
268
|
|
|
268
269
|
|
|
269
270
|
def login_required_by_setting(mandatory_login_param):
|
|
270
|
-
"""Check that the user is logged, and abort if it's not the case
|
|
271
|
+
"""Check that the user is logged in, and abort if it's not the case
|
|
271
272
|
|
|
272
273
|
Args:
|
|
273
274
|
mandatory_login_param (str): name of the configuration parameter used to decide if the login is mandatory or not
|
|
@@ -303,7 +304,7 @@ def login_required_by_setting(mandatory_login_param):
|
|
|
303
304
|
|
|
304
305
|
|
|
305
306
|
def login_required_with_redirect():
|
|
306
|
-
"""Check that the user is logged, and redirect if it's not the case"""
|
|
307
|
+
"""Check that the user is logged in, and redirect if it's not the case"""
|
|
307
308
|
|
|
308
309
|
def actual_decorator(f):
|
|
309
310
|
@wraps(f)
|
|
@@ -346,7 +347,7 @@ class UnknowAccountException(Exception):
|
|
|
346
347
|
status_code = 401
|
|
347
348
|
|
|
348
349
|
def __init__(self):
|
|
349
|
-
msg = "No account with this oauth id is
|
|
350
|
+
msg = "No account with this oauth id is known, you should login first"
|
|
350
351
|
super().__init__(msg)
|
|
351
352
|
|
|
352
353
|
|
|
@@ -358,12 +359,12 @@ class LoginRequiredException(Exception):
|
|
|
358
359
|
super().__init__(msg)
|
|
359
360
|
|
|
360
361
|
|
|
361
|
-
def get_current_account():
|
|
362
|
+
def get_current_account() -> Optional[Account]:
|
|
362
363
|
"""Get the authenticated account information.
|
|
363
364
|
|
|
364
365
|
This account is either stored in the flask's session or retrieved with the Bearer token passed with an `Authorization` header.
|
|
365
366
|
|
|
366
|
-
The flask session is usually used by browser, whereas the bearer token is
|
|
367
|
+
The flask session is usually used by browser, whereas the bearer token is handy for non interactive uses, like curls or CLI usage.
|
|
367
368
|
|
|
368
369
|
Returns:
|
|
369
370
|
Account: the current logged account, None if nobody is logged
|
geovisio/utils/cql2.py
CHANGED
|
@@ -66,12 +66,12 @@ def parse_search_filter(value: Optional[str]) -> Optional[sql.SQL]:
|
|
|
66
66
|
UNION
|
|
67
67
|
SELECT DISTINCT(picture_id)
|
|
68
68
|
FROM annotations_semantics ans
|
|
69
|
-
JOIN annotations a
|
|
69
|
+
JOIN annotations a ON a.id = ans.annotation_id
|
|
70
70
|
WHERE {semantic_filter}
|
|
71
71
|
UNION
|
|
72
72
|
SELECT sp.pic_id
|
|
73
73
|
FROM sequences_pictures sp
|
|
74
|
-
|
|
74
|
+
JOIN sequences_semantics sm ON sp.seq_id = sm.sequence_id
|
|
75
75
|
WHERE {semantic_filter}
|
|
76
76
|
LIMIT %(limit)s
|
|
77
77
|
))"""
|
geovisio/utils/fields.py
CHANGED
|
@@ -2,6 +2,8 @@ from enum import Enum
|
|
|
2
2
|
from dataclasses import dataclass, field
|
|
3
3
|
from typing import Any, List, Generic, TypeVar, Protocol
|
|
4
4
|
from psycopg import sql
|
|
5
|
+
from geovisio import errors
|
|
6
|
+
from gettext import gettext as _
|
|
5
7
|
|
|
6
8
|
|
|
7
9
|
@dataclass
|
|
@@ -12,8 +14,8 @@ class FieldMapping:
|
|
|
12
14
|
stac: str
|
|
13
15
|
|
|
14
16
|
@property
|
|
15
|
-
def sql_filter(self) -> sql.Composable:
|
|
16
|
-
return sql.SQL("
|
|
17
|
+
def sql_filter(self, row_alias="s.") -> sql.Composable:
|
|
18
|
+
return sql.SQL(row_alias + "{}").format(self.sql_column)
|
|
17
19
|
|
|
18
20
|
|
|
19
21
|
class SQLDirection(Enum):
|
|
@@ -97,3 +99,13 @@ class BBox:
|
|
|
97
99
|
maxx: float
|
|
98
100
|
miny: float
|
|
99
101
|
maxy: float
|
|
102
|
+
|
|
103
|
+
|
|
104
|
+
def parse_relative_heading(value: str) -> int:
|
|
105
|
+
try:
|
|
106
|
+
relHeading = int(value)
|
|
107
|
+
if relHeading < -180 or relHeading > 180:
|
|
108
|
+
raise ValueError()
|
|
109
|
+
return relHeading
|
|
110
|
+
except (ValueError, TypeError):
|
|
111
|
+
raise errors.InvalidAPIUsage(_("Relative heading is not valid, should be an integer in degrees from -180 to 180"), status_code=400)
|
geovisio/utils/items.py
ADDED
|
@@ -0,0 +1,44 @@
|
|
|
1
|
+
from .fields import SQLDirection
|
|
2
|
+
from psycopg.sql import SQL, Identifier
|
|
3
|
+
from enum import Enum
|
|
4
|
+
from dataclasses import dataclass, field
|
|
5
|
+
from typing import Optional, List
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
class SortableItemField(Enum):
|
|
9
|
+
ts = Identifier("ts")
|
|
10
|
+
updated = Identifier("updated_at")
|
|
11
|
+
distance_to = ""
|
|
12
|
+
id = Identifier("id")
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
@dataclass
|
|
16
|
+
class ItemSortByField:
|
|
17
|
+
field: SortableItemField
|
|
18
|
+
direction: SQLDirection
|
|
19
|
+
|
|
20
|
+
# Note that this obj_to_compare is only used for the `distance_to` field, but we cannot put it in the enum
|
|
21
|
+
obj_to_compare: Optional[SQL] = None
|
|
22
|
+
|
|
23
|
+
def to_sql(self, alias) -> SQL:
|
|
24
|
+
sql_order = None
|
|
25
|
+
if self.obj_to_compare:
|
|
26
|
+
if self.field == SortableItemField.distance_to:
|
|
27
|
+
sql_order = SQL('{alias}."geom" <-> {obj_to_compare} {direction}').format(
|
|
28
|
+
alias=alias, obj_to_compare=self.obj_to_compare, direction=self.direction.value
|
|
29
|
+
)
|
|
30
|
+
else:
|
|
31
|
+
raise InvalidAPIUsage("For the moment only the distance comparison to another item is supported")
|
|
32
|
+
else:
|
|
33
|
+
sql_order = SQL("{alias}.{field} {direction}").format(alias=alias, field=self.field.value, direction=self.direction.value)
|
|
34
|
+
return sql_order
|
|
35
|
+
|
|
36
|
+
|
|
37
|
+
@dataclass
|
|
38
|
+
class SortBy:
|
|
39
|
+
fields: List[ItemSortByField] = field(default_factory=lambda: [])
|
|
40
|
+
|
|
41
|
+
def to_sql(self, alias=Identifier("p")) -> SQL:
|
|
42
|
+
if len(self.fields) == 0:
|
|
43
|
+
return SQL("")
|
|
44
|
+
return SQL("ORDER BY {fields}").format(fields=SQL(", ").join([f.to_sql(alias=alias) for f in self.fields]))
|
geovisio/utils/model_query.py
CHANGED
|
@@ -28,7 +28,7 @@ class ParamsAndValues:
|
|
|
28
28
|
return SQL(", ").join([Placeholder(f) for f in self.params_as_dict.keys()])
|
|
29
29
|
|
|
30
30
|
def fields_for_set(self) -> Composed:
|
|
31
|
-
"""Get the fields and the placeholders
|
|
31
|
+
"""Get the fields and the placeholders formatted for an update query like:
|
|
32
32
|
'"a" = %(a)s, "b" = %(b)s'
|
|
33
33
|
|
|
34
34
|
Can be used directly with a query like:
|
|
@@ -39,7 +39,7 @@ class ParamsAndValues:
|
|
|
39
39
|
return SQL(", ").join(self.fields_for_set_list())
|
|
40
40
|
|
|
41
41
|
def fields_for_set_list(self) -> List[Composed]:
|
|
42
|
-
"""Get the fields and the placeholders
|
|
42
|
+
"""Get the fields and the placeholders formatted for an update query like:
|
|
43
43
|
['"a" = %(a)s', '"b" = %(b)s']
|
|
44
44
|
|
|
45
45
|
Note that the returned list should be joined with SQL(", ").join()
|
geovisio/utils/pic_shape.py
CHANGED
|
@@ -15,7 +15,7 @@ class Polygon(BaseModel):
|
|
|
15
15
|
|
|
16
16
|
@field_validator("coordinates")
|
|
17
17
|
def check_closure(cls, coordinates: List) -> List:
|
|
18
|
-
"""Validate that Polygon is closed (first and last
|
|
18
|
+
"""Validate that Polygon is closed (first and last coordinates are the same)."""
|
|
19
19
|
if any(ring[-1] != ring[0] for ring in coordinates):
|
|
20
20
|
raise ValueError("All linear rings have the same start and end coordinates")
|
|
21
21
|
|
geovisio/utils/pictures.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
import json
|
|
2
2
|
import math
|
|
3
|
-
from typing import Dict, Optional
|
|
3
|
+
from typing import Any, Dict, List, Optional, Tuple
|
|
4
4
|
from uuid import UUID
|
|
5
5
|
from attr import dataclass
|
|
6
6
|
from flask import current_app, redirect, send_file
|
|
@@ -15,12 +15,16 @@ import logging
|
|
|
15
15
|
from dataclasses import asdict
|
|
16
16
|
from fs.path import dirname
|
|
17
17
|
from psycopg.errors import UniqueViolation, InvalidParameterValue
|
|
18
|
+
from psycopg.types.json import Jsonb
|
|
19
|
+
from psycopg import sql, Connection
|
|
18
20
|
import sentry_sdk
|
|
19
21
|
from geovisio import utils, errors
|
|
20
22
|
from geopic_tag_reader import reader
|
|
21
23
|
import re
|
|
22
24
|
import multipart
|
|
23
25
|
|
|
26
|
+
from geovisio.utils import db
|
|
27
|
+
|
|
24
28
|
log = logging.getLogger(__name__)
|
|
25
29
|
|
|
26
30
|
|
|
@@ -51,7 +55,6 @@ def createBlurredHDPicture(fs, blurApi, pictureBytes, outputFilename, keep_unblu
|
|
|
51
55
|
PIL.Image
|
|
52
56
|
The blurred version of the image
|
|
53
57
|
"""
|
|
54
|
-
|
|
55
58
|
if blurApi is None:
|
|
56
59
|
return None
|
|
57
60
|
# Call blur API, asking for multipart response if available
|
|
@@ -553,6 +556,25 @@ class MetadataReadingError(Exception):
|
|
|
553
556
|
self.missing_mandatory_tags = missing_mandatory_tags
|
|
554
557
|
|
|
555
558
|
|
|
559
|
+
def get_lighter_metadata(metadata):
|
|
560
|
+
"""Create a lighter metadata field to remove duplicates fields"""
|
|
561
|
+
lighterMetadata = dict(
|
|
562
|
+
filter(
|
|
563
|
+
lambda v: v[0] not in ["ts", "heading", "lon", "lat", "exif", "originalContentMd5", "ts_by_source", "gps_accuracy"],
|
|
564
|
+
metadata.items(),
|
|
565
|
+
)
|
|
566
|
+
)
|
|
567
|
+
if lighterMetadata.get("tagreader_warnings") is not None and len(lighterMetadata["tagreader_warnings"]) == 0:
|
|
568
|
+
del lighterMetadata["tagreader_warnings"]
|
|
569
|
+
lighterMetadata["tz"] = metadata["ts"].tzname()
|
|
570
|
+
if metadata.get("ts_by_source", {}).get("gps") is not None:
|
|
571
|
+
lighterMetadata["ts_gps"] = metadata["ts_by_source"]["gps"].isoformat()
|
|
572
|
+
if metadata.get("ts_by_source", {}).get("camera") is not None:
|
|
573
|
+
lighterMetadata["ts_camera"] = metadata["ts_by_source"]["camera"].isoformat()
|
|
574
|
+
|
|
575
|
+
return lighterMetadata
|
|
576
|
+
|
|
577
|
+
|
|
556
578
|
def insertNewPictureInDatabase(
|
|
557
579
|
db, sequenceId, position, pictureBytes, associatedAccountID, additionalMetadata, uploadSetID=None, lang="en"
|
|
558
580
|
):
|
|
@@ -579,11 +601,10 @@ def insertNewPictureInDatabase(
|
|
|
579
601
|
-------
|
|
580
602
|
uuid : The uuid of the new picture entry in the database
|
|
581
603
|
"""
|
|
582
|
-
from psycopg.types.json import Jsonb
|
|
583
604
|
|
|
584
605
|
# Create a fully-featured metadata object
|
|
585
|
-
|
|
586
|
-
|
|
606
|
+
with Image.open(io.BytesIO(pictureBytes)) as picturePillow:
|
|
607
|
+
metadata = readPictureMetadata(pictureBytes, lang) | utils.pictures.getPictureSizing(picturePillow) | additionalMetadata
|
|
587
608
|
|
|
588
609
|
# Remove cols/rows information for flat pictures
|
|
589
610
|
if metadata["type"] == "flat":
|
|
@@ -591,19 +612,7 @@ def insertNewPictureInDatabase(
|
|
|
591
612
|
metadata.pop("rows")
|
|
592
613
|
|
|
593
614
|
# Create a lighter metadata field to remove duplicates fields
|
|
594
|
-
lighterMetadata =
|
|
595
|
-
filter(
|
|
596
|
-
lambda v: v[0] not in ["ts", "heading", "lon", "lat", "exif", "originalContentMd5", "ts_by_source", "gps_accuracy"],
|
|
597
|
-
metadata.items(),
|
|
598
|
-
)
|
|
599
|
-
)
|
|
600
|
-
if lighterMetadata.get("tagreader_warnings") is not None and len(lighterMetadata["tagreader_warnings"]) == 0:
|
|
601
|
-
del lighterMetadata["tagreader_warnings"]
|
|
602
|
-
lighterMetadata["tz"] = metadata["ts"].tzname()
|
|
603
|
-
if metadata.get("ts_by_source", {}).get("gps") is not None:
|
|
604
|
-
lighterMetadata["ts_gps"] = metadata["ts_by_source"]["gps"].isoformat()
|
|
605
|
-
if metadata.get("ts_by_source", {}).get("camera") is not None:
|
|
606
|
-
lighterMetadata["ts_camera"] = metadata["ts_by_source"]["camera"].isoformat()
|
|
615
|
+
lighterMetadata = get_lighter_metadata(metadata)
|
|
607
616
|
|
|
608
617
|
exif = cleanupExif(metadata["exif"])
|
|
609
618
|
|
|
@@ -639,6 +648,90 @@ def insertNewPictureInDatabase(
|
|
|
639
648
|
return picId
|
|
640
649
|
|
|
641
650
|
|
|
651
|
+
def _get_metadata_to_update(db_picture: Dict, new_reader_metadata: reader.GeoPicTags) -> Tuple[List[str], Dict[str, Any]]:
|
|
652
|
+
fields_to_update = []
|
|
653
|
+
params = {}
|
|
654
|
+
|
|
655
|
+
if new_reader_metadata.ts != db_picture["ts"]:
|
|
656
|
+
fields_to_update.append(sql.SQL("ts = %(ts)s"))
|
|
657
|
+
params["ts"] = new_reader_metadata.ts.isoformat()
|
|
658
|
+
if db_picture["heading_computed"] is False and new_reader_metadata.heading != db_picture["heading"]:
|
|
659
|
+
fields_to_update.append(sql.SQL("heading = %(heading)s"))
|
|
660
|
+
params["heading"] = new_reader_metadata.heading
|
|
661
|
+
if new_reader_metadata.gps_accuracy != db_picture["gps_accuracy_m"]:
|
|
662
|
+
fields_to_update.append(sql.SQL("gps_accuracy_m = %(gps_accuracy_m)s"))
|
|
663
|
+
params["gps_accuracy_m"] = new_reader_metadata.gps_accuracy
|
|
664
|
+
|
|
665
|
+
# Note: The db metadata can have more stuff (like originalFileName, size, ...), we so only check if the new value is different from the old one
|
|
666
|
+
# we cannot check directly for dict equality
|
|
667
|
+
new_lighterMetadata = get_lighter_metadata(asdict(new_reader_metadata))
|
|
668
|
+
metadata_updates = {}
|
|
669
|
+
for k, v in new_lighterMetadata.items():
|
|
670
|
+
if v != db_picture["metadata"].get(k):
|
|
671
|
+
metadata_updates[k] = v
|
|
672
|
+
|
|
673
|
+
# if the position has been updated (by more than ~10cm)
|
|
674
|
+
lon, lat = db_picture["lon"], db_picture["lat"]
|
|
675
|
+
new_lon, new_lat = new_reader_metadata.lon, new_reader_metadata.lat
|
|
676
|
+
if not math.isclose(lon, new_lon, abs_tol=0.0000001) or not math.isclose(lat, new_lat, abs_tol=0.0000001):
|
|
677
|
+
fields_to_update.append(sql.SQL("geom = ST_SetSRID(ST_MakePoint(%(lon)s, %(lat)s), 4326)"))
|
|
678
|
+
params["lon"] = new_reader_metadata.lon
|
|
679
|
+
params["lat"] = new_reader_metadata.lat
|
|
680
|
+
|
|
681
|
+
if metadata_updates:
|
|
682
|
+
fields_to_update.append(sql.SQL("metadata = metadata || %(new_metadata)s"))
|
|
683
|
+
params["new_metadata"] = Jsonb(metadata_updates)
|
|
684
|
+
|
|
685
|
+
return fields_to_update, params
|
|
686
|
+
|
|
687
|
+
|
|
688
|
+
def ask_for_metadata_update(picture_id: UUID, read_file=False):
|
|
689
|
+
"""Enqueue an async job to reread the picture's metadata"""
|
|
690
|
+
args = Jsonb({"read_file": True}) if read_file else None
|
|
691
|
+
with db.conn(current_app) as conn:
|
|
692
|
+
conn.execute(
|
|
693
|
+
"INSERT INTO job_queue(picture_id, task, args) VALUES (%s, 'read_metadata', %s)",
|
|
694
|
+
[picture_id, args],
|
|
695
|
+
)
|
|
696
|
+
|
|
697
|
+
|
|
698
|
+
def update_picture_metadata(conn: Connection, picture_id: UUID, read_file=False) -> bool:
|
|
699
|
+
"""Update picture metadata in database, using either the stored metadata or the original file
|
|
700
|
+
|
|
701
|
+
Only updates metadata that have changed.
|
|
702
|
+
Returns True if some metadata have been updated, False otherwise
|
|
703
|
+
"""
|
|
704
|
+
|
|
705
|
+
with conn.cursor(row_factory=dict_row) as cursor:
|
|
706
|
+
db_picture = cursor.execute(
|
|
707
|
+
"SELECT ts, heading, metadata, ST_X(geom) as lon, ST_Y(geom) as lat, account_id, exif, gps_accuracy_m, heading_computed FROM pictures WHERE id = %s",
|
|
708
|
+
[picture_id],
|
|
709
|
+
).fetchone()
|
|
710
|
+
if db_picture is None:
|
|
711
|
+
raise Exception(f"Picture {picture_id} not found")
|
|
712
|
+
|
|
713
|
+
if read_file:
|
|
714
|
+
pic_path = getHDPicturePath(picture_id)
|
|
715
|
+
|
|
716
|
+
with current_app.config["FILESYSTEMS"].permanent.openbin(pic_path) as picture_bytes:
|
|
717
|
+
new_metadata = reader.readPictureMetadata(picture_bytes.read())
|
|
718
|
+
else:
|
|
719
|
+
new_metadata = reader.getPictureMetadata(db_picture["exif"], db_picture["metadata"]["width"], db_picture["metadata"]["height"])
|
|
720
|
+
|
|
721
|
+
# we want to only updates values that have changed
|
|
722
|
+
fields_to_update, params = _get_metadata_to_update(db_picture, new_metadata)
|
|
723
|
+
|
|
724
|
+
if not fields_to_update:
|
|
725
|
+
logging.debug(f"No metadata update needed for picture {picture_id}")
|
|
726
|
+
return False
|
|
727
|
+
|
|
728
|
+
conn.execute(
|
|
729
|
+
sql.SQL("UPDATE pictures SET {f} WHERE id = %(pic_id)s").format(f=sql.SQL(", ").join(fields_to_update)),
|
|
730
|
+
params | {"pic_id": picture_id},
|
|
731
|
+
)
|
|
732
|
+
return True
|
|
733
|
+
|
|
734
|
+
|
|
642
735
|
# Note: we don't want to store and expose exif binary fields as they are difficult to use and take a lot of storage in the database (~20% for maker notes only)
|
|
643
736
|
# This list has been queried from real data (cf [this comment](https://gitlab.com/panoramax/server/api/-/merge_requests/241#note_1790580636)).
|
|
644
737
|
# Update this list (and do a sql migration) if new binary fields are added
|
geovisio/utils/semantics.py
CHANGED
|
@@ -22,6 +22,7 @@ class EntityType(Enum):
|
|
|
22
22
|
pic = "picture_id"
|
|
23
23
|
seq = "sequence_id"
|
|
24
24
|
annotation = "annotation_id"
|
|
25
|
+
upload_set = "upload_set_id"
|
|
25
26
|
|
|
26
27
|
|
|
27
28
|
@dataclass
|
|
@@ -37,10 +38,12 @@ class Entity:
|
|
|
37
38
|
return Identifier("sequences_semantics")
|
|
38
39
|
case EntityType.annotation:
|
|
39
40
|
return Identifier("annotations_semantics")
|
|
41
|
+
case EntityType.upload_set:
|
|
42
|
+
return Identifier("upload_sets_semantics")
|
|
40
43
|
case _:
|
|
41
44
|
raise ValueError(f"Unknown entity type: {self.type}")
|
|
42
45
|
|
|
43
|
-
def get_history_table(self) -> Identifier:
|
|
46
|
+
def get_history_table(self) -> Optional[Identifier]:
|
|
44
47
|
match self.type:
|
|
45
48
|
case EntityType.pic:
|
|
46
49
|
return Identifier("pictures_semantics_history")
|
|
@@ -48,6 +51,8 @@ class Entity:
|
|
|
48
51
|
return Identifier("sequences_semantics_history")
|
|
49
52
|
case EntityType.annotation:
|
|
50
53
|
return Identifier("pictures_semantics_history")
|
|
54
|
+
case EntityType.upload_set:
|
|
55
|
+
return None
|
|
51
56
|
case _:
|
|
52
57
|
raise ValueError(f"Unknown entity type: {self.type}")
|
|
53
58
|
|
|
@@ -77,14 +82,34 @@ AND ({filter})"""
|
|
|
77
82
|
params,
|
|
78
83
|
)
|
|
79
84
|
if tag_to_add:
|
|
85
|
+
fields = [Identifier(entity.type.value), Identifier("key"), Identifier("value")]
|
|
86
|
+
if entity.type == EntityType.upload_set:
|
|
87
|
+
# upload_set semantics have no history, the account is directly stored in the table
|
|
88
|
+
fields.append(Identifier("account_id"))
|
|
89
|
+
|
|
80
90
|
with cursor.copy(
|
|
81
91
|
SQL("COPY {table} ({fields}) FROM STDIN").format(
|
|
82
92
|
table=table_name,
|
|
83
|
-
fields=SQL(",").join(
|
|
93
|
+
fields=SQL(",").join(fields),
|
|
84
94
|
)
|
|
85
95
|
) as copy:
|
|
86
96
|
for tag in tag_to_add:
|
|
87
|
-
|
|
97
|
+
row = [entity.id, tag.key, tag.value]
|
|
98
|
+
if entity.type == EntityType.upload_set:
|
|
99
|
+
row.append(account)
|
|
100
|
+
copy.write_row(row)
|
|
101
|
+
if tag_to_delete and entity.type == EntityType.annotation and not tag_to_add:
|
|
102
|
+
# if tags have been deleted, we check if some annotations are now empty and need to be deleted
|
|
103
|
+
cursor.execute(
|
|
104
|
+
"""DELETE FROM annotations
|
|
105
|
+
WHERE id = %(annotation_id)s AND
|
|
106
|
+
(
|
|
107
|
+
SELECT count(*) AS nb_semantics
|
|
108
|
+
FROM annotations_semantics
|
|
109
|
+
WHERE annotation_id = %(annotation_id)s
|
|
110
|
+
) = 0""",
|
|
111
|
+
{"annotation_id": entity.id},
|
|
112
|
+
)
|
|
88
113
|
if tag_to_add or tag_to_delete:
|
|
89
114
|
# we track the history changes of the semantic tags
|
|
90
115
|
track_semantic_history(cursor, entity, actions, account, annotation)
|
|
@@ -100,6 +125,10 @@ class SemanticTagUpdateOnAnnotation(SemanticTagUpdate):
|
|
|
100
125
|
|
|
101
126
|
|
|
102
127
|
def track_semantic_history(cursor: Cursor, entity: Entity, actions: List[SemanticTagUpdate], account: UUID, annotation):
|
|
128
|
+
history_table = entity.get_history_table()
|
|
129
|
+
if history_table is None:
|
|
130
|
+
# no history for upload_set semantics
|
|
131
|
+
return
|
|
103
132
|
params = {
|
|
104
133
|
"account_id": account,
|
|
105
134
|
}
|
geovisio/utils/sentry.py
CHANGED
|
@@ -73,7 +73,7 @@ def _wrap_cursor_execute(f):
|
|
|
73
73
|
|
|
74
74
|
|
|
75
75
|
class FileSystemIntegration(Integration):
|
|
76
|
-
"""Add metrics to the 2 most
|
|
76
|
+
"""Add metrics to the 2 most useful filesystem, the 'os file' filesystem and the s3 filesystem"""
|
|
77
77
|
|
|
78
78
|
identifier = "filesystem"
|
|
79
79
|
|