geovisio 2.6.0__py3-none-any.whl → 2.7.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- geovisio/__init__.py +36 -7
- geovisio/admin_cli/db.py +1 -4
- geovisio/config_app.py +40 -1
- geovisio/db_migrations.py +24 -3
- geovisio/templates/main.html +13 -13
- geovisio/templates/viewer.html +3 -3
- geovisio/translations/de/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/de/LC_MESSAGES/messages.po +667 -0
- geovisio/translations/en/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/en/LC_MESSAGES/messages.po +730 -0
- geovisio/translations/es/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/es/LC_MESSAGES/messages.po +778 -0
- geovisio/translations/fi/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/fi/LC_MESSAGES/messages.po +589 -0
- geovisio/translations/fr/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/fr/LC_MESSAGES/messages.po +814 -0
- geovisio/translations/ko/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/ko/LC_MESSAGES/messages.po +685 -0
- geovisio/translations/messages.pot +686 -0
- geovisio/translations/nl/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/nl/LC_MESSAGES/messages.po +594 -0
- geovisio/utils/__init__.py +1 -1
- geovisio/utils/auth.py +50 -11
- geovisio/utils/db.py +65 -0
- geovisio/utils/excluded_areas.py +83 -0
- geovisio/utils/extent.py +30 -0
- geovisio/utils/fields.py +1 -1
- geovisio/utils/filesystems.py +0 -1
- geovisio/utils/link.py +14 -0
- geovisio/utils/params.py +20 -0
- geovisio/utils/pictures.py +92 -68
- geovisio/utils/reports.py +171 -0
- geovisio/utils/sequences.py +264 -126
- geovisio/utils/tokens.py +37 -42
- geovisio/utils/upload_set.py +654 -0
- geovisio/web/auth.py +37 -37
- geovisio/web/collections.py +286 -302
- geovisio/web/configuration.py +14 -0
- geovisio/web/docs.py +241 -14
- geovisio/web/excluded_areas.py +377 -0
- geovisio/web/items.py +156 -108
- geovisio/web/map.py +20 -20
- geovisio/web/params.py +69 -26
- geovisio/web/pictures.py +14 -31
- geovisio/web/reports.py +399 -0
- geovisio/web/rss.py +13 -7
- geovisio/web/stac.py +129 -134
- geovisio/web/tokens.py +98 -109
- geovisio/web/upload_set.py +768 -0
- geovisio/web/users.py +100 -73
- geovisio/web/utils.py +28 -9
- geovisio/workers/runner_pictures.py +252 -204
- {geovisio-2.6.0.dist-info → geovisio-2.7.0.dist-info}/METADATA +16 -13
- geovisio-2.7.0.dist-info/RECORD +66 -0
- geovisio-2.6.0.dist-info/RECORD +0 -41
- {geovisio-2.6.0.dist-info → geovisio-2.7.0.dist-info}/LICENSE +0 -0
- {geovisio-2.6.0.dist-info → geovisio-2.7.0.dist-info}/WHEEL +0 -0
geovisio/utils/auth.py
CHANGED
|
@@ -1,12 +1,17 @@
|
|
|
1
1
|
import flask
|
|
2
2
|
from flask import current_app, url_for, session, redirect, request
|
|
3
|
+
from flask_babel import gettext as _
|
|
3
4
|
from functools import wraps
|
|
4
5
|
from authlib.integrations.flask_client import OAuth
|
|
5
6
|
from dataclasses import dataclass
|
|
6
7
|
from abc import ABC, abstractmethod
|
|
7
8
|
from typing import Any
|
|
8
9
|
from typing import Optional
|
|
10
|
+
from enum import Enum
|
|
11
|
+
from pydantic import BaseModel, ConfigDict, Field
|
|
9
12
|
import sentry_sdk
|
|
13
|
+
from psycopg.rows import dict_row
|
|
14
|
+
from geovisio.utils import db
|
|
10
15
|
|
|
11
16
|
|
|
12
17
|
ACCOUNT_KEY = "account" # Key in flask's session with the account's information
|
|
@@ -144,12 +149,45 @@ def make_auth(app):
|
|
|
144
149
|
return oauth
|
|
145
150
|
|
|
146
151
|
|
|
147
|
-
|
|
148
|
-
|
|
152
|
+
class AccountRole(Enum):
|
|
153
|
+
user = "user"
|
|
154
|
+
admin = "admin"
|
|
155
|
+
|
|
156
|
+
|
|
157
|
+
class Account(BaseModel):
|
|
149
158
|
id: str
|
|
150
159
|
name: str
|
|
151
|
-
oauth_provider: str
|
|
152
|
-
oauth_id: str
|
|
160
|
+
oauth_provider: Optional[str] = None
|
|
161
|
+
oauth_id: Optional[str] = None
|
|
162
|
+
|
|
163
|
+
model_config = ConfigDict(extra="forbid")
|
|
164
|
+
|
|
165
|
+
def __init__(self, role: Optional[AccountRole] = None, **kwargs) -> None:
|
|
166
|
+
super().__init__(**kwargs)
|
|
167
|
+
self.role = role
|
|
168
|
+
|
|
169
|
+
# Note: this field is excluded since we do not want to persist it in the cookie. It will be fetched from the database if needed
|
|
170
|
+
# and accessed though the `role` property
|
|
171
|
+
role_: Optional[AccountRole] = Field(default=None, exclude=True)
|
|
172
|
+
|
|
173
|
+
def can_check_reports(self):
|
|
174
|
+
"""Is account legitimate to read any report ?"""
|
|
175
|
+
return self.role == AccountRole.admin
|
|
176
|
+
|
|
177
|
+
def can_edit_excluded_areas(self):
|
|
178
|
+
"""Is account legitimate to read and edit excluded areas ?"""
|
|
179
|
+
return self.role == AccountRole.admin
|
|
180
|
+
|
|
181
|
+
@property
|
|
182
|
+
def role(self) -> AccountRole:
|
|
183
|
+
if self.role_ is None:
|
|
184
|
+
role = db.fetchone(current_app, "SELECT role FROM accounts WHERE id = %s", (self.id,), row_factory=dict_row)
|
|
185
|
+
self.role_ = AccountRole(role["role"])
|
|
186
|
+
return self.role_
|
|
187
|
+
|
|
188
|
+
@role.setter
|
|
189
|
+
def role(self, r: AccountRole) -> None:
|
|
190
|
+
self.role_ = r
|
|
153
191
|
|
|
154
192
|
|
|
155
193
|
def login_required():
|
|
@@ -160,7 +198,7 @@ def login_required():
|
|
|
160
198
|
def decorator(*args, **kwargs):
|
|
161
199
|
account = get_current_account()
|
|
162
200
|
if not account:
|
|
163
|
-
return flask.abort(flask.make_response(flask.jsonify(message="Authentication is mandatory"), 401))
|
|
201
|
+
return flask.abort(flask.make_response(flask.jsonify(message=_("Authentication is mandatory")), 401))
|
|
164
202
|
kwargs["account"] = account
|
|
165
203
|
|
|
166
204
|
return f(*args, **kwargs)
|
|
@@ -236,7 +274,7 @@ class UnknowAccountException(Exception):
|
|
|
236
274
|
status_code = 401
|
|
237
275
|
|
|
238
276
|
def __init__(self):
|
|
239
|
-
msg =
|
|
277
|
+
msg = "No account with this oauth id is know, you should login first"
|
|
240
278
|
super().__init__(msg)
|
|
241
279
|
|
|
242
280
|
|
|
@@ -244,7 +282,7 @@ class LoginRequiredException(Exception):
|
|
|
244
282
|
status_code = 401
|
|
245
283
|
|
|
246
284
|
def __init__(self):
|
|
247
|
-
msg =
|
|
285
|
+
msg = "You should login to request this API"
|
|
248
286
|
super().__init__(msg)
|
|
249
287
|
|
|
250
288
|
|
|
@@ -259,9 +297,10 @@ def get_current_account():
|
|
|
259
297
|
Account: the current logged account, None if nobody is logged
|
|
260
298
|
"""
|
|
261
299
|
if ACCOUNT_KEY in session:
|
|
262
|
-
|
|
300
|
+
a = session[ACCOUNT_KEY]
|
|
301
|
+
session_account = Account(**a)
|
|
263
302
|
|
|
264
|
-
sentry_sdk.set_user(session_account.
|
|
303
|
+
sentry_sdk.set_user(session_account.model_dump(exclude_none=True))
|
|
265
304
|
return session_account
|
|
266
305
|
|
|
267
306
|
bearer_token = _get_bearer_token()
|
|
@@ -269,7 +308,7 @@ def get_current_account():
|
|
|
269
308
|
from geovisio.utils import tokens
|
|
270
309
|
|
|
271
310
|
a = tokens.get_account_from_jwt_token(bearer_token)
|
|
272
|
-
sentry_sdk.set_user(a.
|
|
311
|
+
sentry_sdk.set_user(a.model_dump(exclude_none=True))
|
|
273
312
|
return a
|
|
274
313
|
|
|
275
314
|
return None
|
|
@@ -288,5 +327,5 @@ def _get_bearer_token() -> Optional[str]:
|
|
|
288
327
|
if not auth_header.startswith("Bearer "):
|
|
289
328
|
from geovisio.utils.tokens import InvalidTokenException
|
|
290
329
|
|
|
291
|
-
raise InvalidTokenException("Only Bearer token are supported")
|
|
330
|
+
raise InvalidTokenException(_("Only Bearer token are supported"))
|
|
292
331
|
return auth_header.split(" ")[1]
|
geovisio/utils/db.py
ADDED
|
@@ -0,0 +1,65 @@
|
|
|
1
|
+
from psycopg_pool import ConnectionPool
|
|
2
|
+
from contextlib import contextmanager
|
|
3
|
+
from typing import Optional
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
def create_db_pool(app):
|
|
7
|
+
"""
|
|
8
|
+
Create Database connection pool
|
|
9
|
+
|
|
10
|
+
Note: all returned connections are autocommit connection. If it's not the wanted behavior, wrap the query in an explicit transaction, or acquire a connection outside of the pool.
|
|
11
|
+
"""
|
|
12
|
+
if hasattr(app, "pool"):
|
|
13
|
+
return
|
|
14
|
+
min_size = int(app.config["DB_MIN_CNX"])
|
|
15
|
+
max_size = int(app.config["DB_MAX_CNX"])
|
|
16
|
+
statement_timeout = app.config["DB_STATEMENT_TIMEOUT"]
|
|
17
|
+
args = {"autocommit": True}
|
|
18
|
+
if statement_timeout > 0:
|
|
19
|
+
args["options"] = f"-c statement_timeout={statement_timeout}"
|
|
20
|
+
app.pool = ConnectionPool(conninfo=app.config["DB_URL"], min_size=min_size, max_size=max_size, open=True, kwargs=args)
|
|
21
|
+
# add also a connection pool without timeout for queries that are known to be long
|
|
22
|
+
# This is useful for example for refreshing the pictures_grid materialized view
|
|
23
|
+
app.long_queries_pool = ConnectionPool(
|
|
24
|
+
conninfo=app.config["DB_URL"], min_size=0, max_size=max_size, open=True, kwargs={"autocommit": True}
|
|
25
|
+
)
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
@contextmanager
|
|
29
|
+
def conn(app, timeout: Optional[float] = None):
|
|
30
|
+
"""Get a psycopg connection from the connection pool"""
|
|
31
|
+
with app.pool.connection(timeout=timeout) as conn:
|
|
32
|
+
yield conn
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
@contextmanager
|
|
36
|
+
def cursor(app, timeout: Optional[float] = None, **kwargs):
|
|
37
|
+
"""Get a psycopg cursor from the connection pool"""
|
|
38
|
+
with app.pool.connection(timeout=timeout) as conn:
|
|
39
|
+
yield conn.cursor(**kwargs)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
@contextmanager
|
|
43
|
+
def execute(app, sql, params=None, timeout: Optional[float] = None, **kwargs):
|
|
44
|
+
"""Simple helpers to simplify simple calls to get a cursor and execute a query on it"""
|
|
45
|
+
with cursor(app, timeout=timeout, **kwargs) as c:
|
|
46
|
+
yield c.execute(sql, params=params)
|
|
47
|
+
|
|
48
|
+
|
|
49
|
+
def fetchone(app, sql, params=None, timeout: Optional[float] = None, **kwargs):
|
|
50
|
+
"""Simple helpers to simplify simple calls to fetchone"""
|
|
51
|
+
with execute(app, sql, params, timeout=timeout, **kwargs) as q:
|
|
52
|
+
return q.fetchone()
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
def fetchall(app, sql, params=None, timeout: Optional[float] = None, **kwargs):
|
|
56
|
+
"""Simple helpers to simplify simple calls to fetchall"""
|
|
57
|
+
with execute(app, sql, params, timeout=timeout, **kwargs) as q:
|
|
58
|
+
return q.fetchall()
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
@contextmanager
|
|
62
|
+
def long_queries_conn(app, connection_timeout: Optional[float] = None):
|
|
63
|
+
"""Get a psycopg connection for queries that are known to be long from the connection pool"""
|
|
64
|
+
with app.long_queries_pool.connection(timeout=connection_timeout) as conn:
|
|
65
|
+
yield conn
|
|
@@ -0,0 +1,83 @@
|
|
|
1
|
+
from uuid import UUID
|
|
2
|
+
from typing import Optional
|
|
3
|
+
from pydantic import BaseModel, ConfigDict
|
|
4
|
+
from geojson_pydantic import MultiPolygon, FeatureCollection, Feature
|
|
5
|
+
from geovisio.utils import db
|
|
6
|
+
from geovisio.errors import InvalidAPIUsage
|
|
7
|
+
from flask import current_app
|
|
8
|
+
from flask_babel import gettext as _
|
|
9
|
+
from psycopg.sql import SQL, Literal
|
|
10
|
+
from psycopg.rows import class_row
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class ExcludedArea(BaseModel):
|
|
14
|
+
"""An excluded area is a geographical boundary where pictures should not be accepted."""
|
|
15
|
+
|
|
16
|
+
id: UUID
|
|
17
|
+
label: Optional[str] = None
|
|
18
|
+
is_public: bool = False
|
|
19
|
+
account_id: Optional[UUID] = None
|
|
20
|
+
|
|
21
|
+
model_config = ConfigDict()
|
|
22
|
+
|
|
23
|
+
|
|
24
|
+
ExcludedAreaFeature = Feature[MultiPolygon, ExcludedArea]
|
|
25
|
+
ExcludedAreaFeatureCollection = FeatureCollection[ExcludedAreaFeature]
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
def get_excluded_area(id: UUID) -> Optional[ExcludedAreaFeature]:
|
|
29
|
+
"""Get the excluded area corresponding to the ID"""
|
|
30
|
+
return db.fetchone(
|
|
31
|
+
current_app,
|
|
32
|
+
SQL(
|
|
33
|
+
"""SELECT id, label, is_public, account_id, ST_AsGeoJSON(geom) AS geometry
|
|
34
|
+
FROM excluded_area
|
|
35
|
+
WHERE id = %(id)s"""
|
|
36
|
+
),
|
|
37
|
+
{"id": id},
|
|
38
|
+
row_factory=class_row(ExcludedAreaFeature),
|
|
39
|
+
)
|
|
40
|
+
|
|
41
|
+
|
|
42
|
+
def list_excluded_areas(is_public: Optional[bool] = None, account_id: Optional[UUID] = None) -> ExcludedAreaFeatureCollection:
|
|
43
|
+
where = [Literal(True)]
|
|
44
|
+
if is_public is not None:
|
|
45
|
+
where.append(SQL("is_public IS {}").format(Literal(is_public)))
|
|
46
|
+
if account_id:
|
|
47
|
+
where.append(SQL("account_id = {}").format(Literal(account_id)))
|
|
48
|
+
|
|
49
|
+
areas = db.fetchall(
|
|
50
|
+
current_app,
|
|
51
|
+
SQL(
|
|
52
|
+
"""SELECT
|
|
53
|
+
'Feature' as type,
|
|
54
|
+
json_build_object(
|
|
55
|
+
'id', id,
|
|
56
|
+
'label', label,
|
|
57
|
+
'is_public', is_public,
|
|
58
|
+
'account_id', account_id
|
|
59
|
+
) as properties,
|
|
60
|
+
ST_AsGeoJSON(geom)::json as geometry
|
|
61
|
+
FROM excluded_areas
|
|
62
|
+
WHERE {}"""
|
|
63
|
+
).format(SQL(" AND ").join(where)),
|
|
64
|
+
row_factory=class_row(ExcludedAreaFeature),
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
return ExcludedAreaFeatureCollection(type="FeatureCollection", features=areas)
|
|
68
|
+
|
|
69
|
+
|
|
70
|
+
def delete_excluded_area(areaId: UUID, accountId: Optional[UUID] = None):
|
|
71
|
+
where = [SQL("id = {}").format(Literal(areaId))]
|
|
72
|
+
if accountId is not None:
|
|
73
|
+
where.append(SQL("account_id = {}").format(accountId))
|
|
74
|
+
|
|
75
|
+
with db.execute(
|
|
76
|
+
current_app,
|
|
77
|
+
SQL("DELETE FROM excluded_areas WHERE {}").format(SQL(" AND ").join(where)),
|
|
78
|
+
) as res:
|
|
79
|
+
area_deleted = res.rowcount
|
|
80
|
+
|
|
81
|
+
if not area_deleted:
|
|
82
|
+
raise InvalidAPIUsage(_("Impossible to find excluded area"), status_code=404)
|
|
83
|
+
return "", 204
|
geovisio/utils/extent.py
ADDED
|
@@ -0,0 +1,30 @@
|
|
|
1
|
+
from typing import List, Optional
|
|
2
|
+
from datetime import datetime
|
|
3
|
+
from pydantic import BaseModel
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class Temporal(BaseModel):
|
|
7
|
+
"""Temporal extent"""
|
|
8
|
+
|
|
9
|
+
interval: List[List[datetime]]
|
|
10
|
+
"""Interval"""
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class Spatial(BaseModel):
|
|
14
|
+
"""Spatial extent"""
|
|
15
|
+
|
|
16
|
+
bbox: List[List[float]]
|
|
17
|
+
"""Bounding box"""
|
|
18
|
+
|
|
19
|
+
|
|
20
|
+
class Extent(BaseModel):
|
|
21
|
+
"""Spatio-temporal extents"""
|
|
22
|
+
|
|
23
|
+
temporal: Optional[Temporal]
|
|
24
|
+
spatial: Optional[Spatial]
|
|
25
|
+
|
|
26
|
+
|
|
27
|
+
class TemporalExtent(BaseModel):
|
|
28
|
+
"""Temporal extents (without spatial extent)"""
|
|
29
|
+
|
|
30
|
+
temporal: Optional[Temporal]
|
geovisio/utils/fields.py
CHANGED
geovisio/utils/filesystems.py
CHANGED
geovisio/utils/link.py
ADDED
|
@@ -0,0 +1,14 @@
|
|
|
1
|
+
from pydantic import BaseModel
|
|
2
|
+
from typing import Optional
|
|
3
|
+
from flask import url_for
|
|
4
|
+
|
|
5
|
+
|
|
6
|
+
class Link(BaseModel):
|
|
7
|
+
rel: str
|
|
8
|
+
type: str
|
|
9
|
+
title: Optional[str]
|
|
10
|
+
href: str
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
def make_link(rel: str, route: str, title: Optional[str] = None, type: str = "application/json", **args):
|
|
14
|
+
return Link(rel=rel, type=type, title=title, href=url_for(route, **args, _external=True))
|
geovisio/utils/params.py
ADDED
|
@@ -0,0 +1,20 @@
|
|
|
1
|
+
from pydantic import ValidationError
|
|
2
|
+
|
|
3
|
+
|
|
4
|
+
def validation_error(e: ValidationError):
|
|
5
|
+
"""Transform a pydantic error to user friendly error, meant to be used as `payload` of a geovisio.error"""
|
|
6
|
+
|
|
7
|
+
details = []
|
|
8
|
+
for d in e.errors():
|
|
9
|
+
detail = {
|
|
10
|
+
"fields": d["loc"],
|
|
11
|
+
"error": d["msg"],
|
|
12
|
+
}
|
|
13
|
+
if d["input"]:
|
|
14
|
+
detail["input"] = d["input"]
|
|
15
|
+
if "user_agent" in detail["input"]:
|
|
16
|
+
del detail["input"]["user_agent"]
|
|
17
|
+
if len(detail["input"]) == 0:
|
|
18
|
+
del detail["input"]
|
|
19
|
+
details.append(detail)
|
|
20
|
+
return {"details": details}
|
geovisio/utils/pictures.py
CHANGED
|
@@ -1,8 +1,9 @@
|
|
|
1
1
|
import math
|
|
2
2
|
from typing import Dict, Optional
|
|
3
|
+
from uuid import UUID
|
|
3
4
|
from flask import current_app, redirect, send_file
|
|
5
|
+
from flask_babel import gettext as _
|
|
4
6
|
import os
|
|
5
|
-
import psycopg
|
|
6
7
|
from psycopg.rows import dict_row
|
|
7
8
|
import requests
|
|
8
9
|
from PIL import Image
|
|
@@ -11,9 +12,9 @@ import fs.base
|
|
|
11
12
|
import logging
|
|
12
13
|
from dataclasses import asdict
|
|
13
14
|
from fs.path import dirname
|
|
14
|
-
from
|
|
15
|
-
from psycopg.errors import UniqueViolation
|
|
15
|
+
from psycopg.errors import UniqueViolation, InvalidParameterValue
|
|
16
16
|
from geovisio import utils, errors
|
|
17
|
+
from geopic_tag_reader import reader
|
|
17
18
|
|
|
18
19
|
log = logging.getLogger(__name__)
|
|
19
20
|
|
|
@@ -259,7 +260,7 @@ def generatePictureDerivates(fs, picture, sizing, outputFolder, type="equirectan
|
|
|
259
260
|
return True
|
|
260
261
|
|
|
261
262
|
|
|
262
|
-
def removeAllFiles(picId:
|
|
263
|
+
def removeAllFiles(picId: UUID):
|
|
263
264
|
"""
|
|
264
265
|
Remove all picture's associated files (the picture and all its derivate)
|
|
265
266
|
"""
|
|
@@ -293,10 +294,15 @@ def _remove_empty_parent_dirs(fs: fs.base.FS, dir: str):
|
|
|
293
294
|
def checkFormatParam(format):
|
|
294
295
|
"""Verify that user asks for a valid image format"""
|
|
295
296
|
|
|
296
|
-
valid = ["jpg"
|
|
297
|
+
valid = ["jpg"]
|
|
297
298
|
if format not in valid:
|
|
298
299
|
raise errors.InvalidAPIUsage(
|
|
299
|
-
|
|
300
|
+
_(
|
|
301
|
+
"Invalid '%(format)s' format for image, only the following formats are available: %(allowed_formats)s",
|
|
302
|
+
format=format,
|
|
303
|
+
allowed_formats=", ".join(valid),
|
|
304
|
+
),
|
|
305
|
+
status_code=404,
|
|
300
306
|
)
|
|
301
307
|
|
|
302
308
|
|
|
@@ -308,11 +314,9 @@ def sendInFormat(picture, picFormat, httpFormat):
|
|
|
308
314
|
|
|
309
315
|
if picFormat == httpFormat:
|
|
310
316
|
return send_file(picture, mimetype="image/" + httpFormat)
|
|
311
|
-
|
|
312
|
-
|
|
313
|
-
|
|
314
|
-
imgio.seek(0)
|
|
315
|
-
return send_file(imgio, mimetype="image/" + httpFormat)
|
|
317
|
+
|
|
318
|
+
# We do not want on the fly conversions
|
|
319
|
+
raise errors.InvalidAPIUsage("Picture is not available in this format", status_code=404)
|
|
316
320
|
|
|
317
321
|
|
|
318
322
|
def getPublicDerivatePictureExternalUrl(pictureId: str, format: str, derivateFileName: str) -> Optional[str]:
|
|
@@ -390,9 +394,9 @@ def checkPictureStatus(fses, pictureId):
|
|
|
390
394
|
account = utils.auth.get_current_account()
|
|
391
395
|
accountId = account.id if account is not None else None
|
|
392
396
|
# Check picture availability + status
|
|
393
|
-
|
|
394
|
-
|
|
395
|
-
|
|
397
|
+
picMetadata = utils.db.fetchone(
|
|
398
|
+
current_app,
|
|
399
|
+
"""
|
|
396
400
|
SELECT
|
|
397
401
|
p.status,
|
|
398
402
|
(p.metadata->>'cols')::int AS cols,
|
|
@@ -405,41 +409,42 @@ def checkPictureStatus(fses, pictureId):
|
|
|
405
409
|
JOIN sequences s ON s.id = sp.seq_id
|
|
406
410
|
WHERE p.id = %s
|
|
407
411
|
""",
|
|
408
|
-
|
|
409
|
-
|
|
412
|
+
[pictureId],
|
|
413
|
+
row_factory=dict_row,
|
|
414
|
+
)
|
|
410
415
|
|
|
411
|
-
|
|
412
|
-
|
|
416
|
+
if picMetadata is None:
|
|
417
|
+
raise errors.InvalidAPIUsage(_("Picture can't be found, you may check its ID"), status_code=404)
|
|
413
418
|
|
|
414
|
-
|
|
415
|
-
|
|
419
|
+
if (picMetadata["status"] != "ready" or picMetadata["seq_status"] != "ready") and accountId != str(picMetadata["account_id"]):
|
|
420
|
+
raise errors.InvalidAPIUsage(_("Picture is not available (either hidden by admin or processing)"), status_code=403)
|
|
416
421
|
|
|
417
|
-
|
|
418
|
-
|
|
419
|
-
|
|
422
|
+
if current_app.config.get("PICTURE_PROCESS_DERIVATES_STRATEGY") == "PREPROCESS":
|
|
423
|
+
# if derivates are always generated, not need for other checks
|
|
424
|
+
return picMetadata
|
|
420
425
|
|
|
421
|
-
|
|
422
|
-
|
|
423
|
-
|
|
426
|
+
# Check original image availability
|
|
427
|
+
if not fses.permanent.exists(utils.pictures.getHDPicturePath(pictureId)):
|
|
428
|
+
raise errors.InvalidAPIUsage(_("HD Picture file is not available"), status_code=500)
|
|
424
429
|
|
|
425
|
-
|
|
426
|
-
|
|
427
|
-
|
|
428
|
-
|
|
429
|
-
|
|
430
|
+
# Check derivates availability
|
|
431
|
+
if areDerivatesAvailable(fses.derivates, pictureId, picMetadata["type"]):
|
|
432
|
+
return picMetadata
|
|
433
|
+
else:
|
|
434
|
+
picDerivates = utils.pictures.getPictureFolderPath(pictureId)
|
|
430
435
|
|
|
431
|
-
|
|
432
|
-
|
|
436
|
+
# Try to create derivates folder if it doesn't exist yet
|
|
437
|
+
fses.derivates.makedirs(picDerivates, recreate=True)
|
|
433
438
|
|
|
434
|
-
|
|
439
|
+
picture = Image.open(fses.permanent.openbin(utils.pictures.getHDPicturePath(pictureId)))
|
|
435
440
|
|
|
436
|
-
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
|
|
442
|
-
|
|
441
|
+
# Force generation of derivates
|
|
442
|
+
if utils.pictures.generatePictureDerivates(
|
|
443
|
+
fses.derivates, picture, utils.pictures.getPictureSizing(picture), picDerivates, picMetadata["type"]
|
|
444
|
+
):
|
|
445
|
+
return picMetadata
|
|
446
|
+
else:
|
|
447
|
+
raise errors.InvalidAPIUsage(_("Picture derivates file are not available"), status_code=500)
|
|
443
448
|
|
|
444
449
|
|
|
445
450
|
def sendThumbnail(pictureId, format):
|
|
@@ -456,7 +461,7 @@ def sendThumbnail(pictureId, format):
|
|
|
456
461
|
try:
|
|
457
462
|
picture = fses.derivates.openbin(utils.pictures.getPictureFolderPath(pictureId) + "/thumb.jpg")
|
|
458
463
|
except:
|
|
459
|
-
raise errors.InvalidAPIUsage("Unable to read picture on filesystem", status_code=500)
|
|
464
|
+
raise errors.InvalidAPIUsage(_("Unable to read picture on filesystem"), status_code=500)
|
|
460
465
|
|
|
461
466
|
return sendInFormat(picture, "jpeg", format)
|
|
462
467
|
|
|
@@ -492,13 +497,21 @@ class PicturePositionConflict(Exception):
|
|
|
492
497
|
super().__init__()
|
|
493
498
|
|
|
494
499
|
|
|
500
|
+
class InvalidMetadataValue(Exception):
|
|
501
|
+
def __init__(self, details):
|
|
502
|
+
super().__init__()
|
|
503
|
+
self.details = details
|
|
504
|
+
|
|
505
|
+
|
|
495
506
|
class MetadataReadingError(Exception):
|
|
496
507
|
def __init__(self, details):
|
|
497
508
|
super().__init__()
|
|
498
509
|
self.details = details
|
|
499
510
|
|
|
500
511
|
|
|
501
|
-
def insertNewPictureInDatabase(
|
|
512
|
+
def insertNewPictureInDatabase(
|
|
513
|
+
db, sequenceId, position, pictureBytes, associatedAccountID, additionalMetadata, uploadSetID=None, lang="en"
|
|
514
|
+
):
|
|
502
515
|
"""Inserts a new 'pictures' entry in the database, from a picture file.
|
|
503
516
|
Database is not committed in this function, to make entry definitively stored
|
|
504
517
|
you have to call db.commit() after or use an autocommit connection.
|
|
@@ -526,7 +539,7 @@ def insertNewPictureInDatabase(db, sequenceId, position, pictureBytes, associate
|
|
|
526
539
|
|
|
527
540
|
# Create a fully-featured metadata object
|
|
528
541
|
picturePillow = Image.open(io.BytesIO(pictureBytes))
|
|
529
|
-
metadata = readPictureMetadata(pictureBytes) | utils.pictures.getPictureSizing(picturePillow) |
|
|
542
|
+
metadata = readPictureMetadata(pictureBytes, lang) | utils.pictures.getPictureSizing(picturePillow) | additionalMetadata
|
|
530
543
|
|
|
531
544
|
# Remove cols/rows information for flat pictures
|
|
532
545
|
if metadata["type"] == "flat":
|
|
@@ -534,31 +547,42 @@ def insertNewPictureInDatabase(db, sequenceId, position, pictureBytes, associate
|
|
|
534
547
|
metadata.pop("rows")
|
|
535
548
|
|
|
536
549
|
# Create a lighter metadata field to remove duplicates fields
|
|
537
|
-
lighterMetadata = dict(
|
|
550
|
+
lighterMetadata = dict(
|
|
551
|
+
filter(lambda v: v[0] not in ["ts", "heading", "lon", "lat", "exif", "originalContentMd5", "ts_by_source"], metadata.items())
|
|
552
|
+
)
|
|
538
553
|
if lighterMetadata.get("tagreader_warnings") is not None and len(lighterMetadata["tagreader_warnings"]) == 0:
|
|
539
554
|
del lighterMetadata["tagreader_warnings"]
|
|
540
555
|
lighterMetadata["tz"] = metadata["ts"].tzname()
|
|
556
|
+
if metadata.get("ts_by_source", {}).get("gps") is not None:
|
|
557
|
+
lighterMetadata["ts_gps"] = metadata["ts_by_source"]["gps"].isoformat()
|
|
558
|
+
if metadata.get("ts_by_source", {}).get("camera") is not None:
|
|
559
|
+
lighterMetadata["ts_camera"] = metadata["ts_by_source"]["camera"].isoformat()
|
|
541
560
|
|
|
542
561
|
exif = cleanupExif(metadata["exif"])
|
|
543
562
|
|
|
544
563
|
with db.transaction():
|
|
545
564
|
# Add picture metadata to database
|
|
546
|
-
|
|
547
|
-
|
|
548
|
-
|
|
549
|
-
|
|
550
|
-
|
|
551
|
-
|
|
552
|
-
|
|
553
|
-
|
|
554
|
-
|
|
555
|
-
|
|
556
|
-
|
|
557
|
-
|
|
558
|
-
|
|
559
|
-
|
|
560
|
-
|
|
561
|
-
|
|
565
|
+
try:
|
|
566
|
+
picId = db.execute(
|
|
567
|
+
"""
|
|
568
|
+
INSERT INTO pictures (ts, heading, metadata, geom, account_id, exif, original_content_md5, upload_set_id)
|
|
569
|
+
VALUES (%s, %s, %s, ST_SetSRID(ST_MakePoint(%s, %s), 4326), %s, %s, %s, %s)
|
|
570
|
+
RETURNING id
|
|
571
|
+
""",
|
|
572
|
+
(
|
|
573
|
+
metadata["ts"].isoformat(),
|
|
574
|
+
metadata["heading"],
|
|
575
|
+
Jsonb(lighterMetadata),
|
|
576
|
+
metadata["lon"],
|
|
577
|
+
metadata["lat"],
|
|
578
|
+
associatedAccountID,
|
|
579
|
+
Jsonb(exif),
|
|
580
|
+
metadata.get("originalContentMd5"),
|
|
581
|
+
uploadSetID,
|
|
582
|
+
),
|
|
583
|
+
).fetchone()[0]
|
|
584
|
+
except InvalidParameterValue as e:
|
|
585
|
+
raise InvalidMetadataValue(e.diag.message_primary) from e
|
|
562
586
|
|
|
563
587
|
# Process field of view for each pictures
|
|
564
588
|
# Flat pictures = variable fov
|
|
@@ -594,11 +618,11 @@ def insertNewPictureInDatabase(db, sequenceId, position, pictureBytes, associate
|
|
|
594
618
|
""",
|
|
595
619
|
[picId],
|
|
596
620
|
)
|
|
597
|
-
|
|
598
|
-
|
|
599
|
-
|
|
600
|
-
|
|
601
|
-
|
|
621
|
+
if sequenceId is not None:
|
|
622
|
+
try:
|
|
623
|
+
db.execute("INSERT INTO sequences_pictures(seq_id, rank, pic_id) VALUES(%s, %s, %s)", [sequenceId, position, picId])
|
|
624
|
+
except UniqueViolation as e:
|
|
625
|
+
raise PicturePositionConflict() from e
|
|
602
626
|
|
|
603
627
|
return picId
|
|
604
628
|
|
|
@@ -635,7 +659,7 @@ BLACK_LISTED_BINARY_EXIF_FIELDS = set(
|
|
|
635
659
|
)
|
|
636
660
|
|
|
637
661
|
|
|
638
|
-
def readPictureMetadata(picture: bytes) -> dict:
|
|
662
|
+
def readPictureMetadata(picture: bytes, lang: Optional[str] = "en") -> dict:
|
|
639
663
|
"""Extracts metadata from picture file
|
|
640
664
|
|
|
641
665
|
Parameters
|
|
@@ -652,7 +676,7 @@ def readPictureMetadata(picture: bytes) -> dict:
|
|
|
652
676
|
"""
|
|
653
677
|
|
|
654
678
|
try:
|
|
655
|
-
metadata = asdict(reader.readPictureMetadata(picture))
|
|
679
|
+
metadata = asdict(reader.readPictureMetadata(picture, lang))
|
|
656
680
|
except Exception as e:
|
|
657
681
|
raise MetadataReadingError(details=str(e))
|
|
658
682
|
|