geovisio 2.9.0__py3-none-any.whl → 2.10.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- geovisio/__init__.py +6 -1
- geovisio/config_app.py +5 -5
- geovisio/translations/ar/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/ar/LC_MESSAGES/messages.po +818 -0
- geovisio/translations/br/LC_MESSAGES/messages.po +1 -1
- geovisio/translations/da/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/da/LC_MESSAGES/messages.po +4 -3
- geovisio/translations/de/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/de/LC_MESSAGES/messages.po +55 -2
- geovisio/translations/el/LC_MESSAGES/messages.po +1 -1
- geovisio/translations/en/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/en/LC_MESSAGES/messages.po +193 -139
- geovisio/translations/eo/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/eo/LC_MESSAGES/messages.po +53 -4
- geovisio/translations/es/LC_MESSAGES/messages.po +1 -1
- geovisio/translations/fi/LC_MESSAGES/messages.po +1 -1
- geovisio/translations/fr/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/fr/LC_MESSAGES/messages.po +91 -3
- geovisio/translations/hu/LC_MESSAGES/messages.po +1 -1
- geovisio/translations/it/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/it/LC_MESSAGES/messages.po +63 -3
- geovisio/translations/ja/LC_MESSAGES/messages.po +1 -1
- geovisio/translations/ko/LC_MESSAGES/messages.po +1 -1
- geovisio/translations/messages.pot +185 -129
- geovisio/translations/nl/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/nl/LC_MESSAGES/messages.po +292 -63
- geovisio/translations/oc/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/oc/LC_MESSAGES/messages.po +818 -0
- geovisio/translations/pl/LC_MESSAGES/messages.po +1 -1
- geovisio/translations/sv/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/sv/LC_MESSAGES/messages.po +4 -3
- geovisio/translations/ti/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/ti/LC_MESSAGES/messages.po +762 -0
- geovisio/translations/zh_Hant/LC_MESSAGES/messages.po +1 -1
- geovisio/utils/annotations.py +14 -17
- geovisio/utils/auth.py +14 -13
- geovisio/utils/cql2.py +2 -2
- geovisio/utils/fields.py +14 -2
- geovisio/utils/items.py +44 -0
- geovisio/utils/model_query.py +2 -2
- geovisio/utils/pic_shape.py +1 -1
- geovisio/utils/pictures.py +111 -18
- geovisio/utils/semantics.py +32 -3
- geovisio/utils/sentry.py +1 -1
- geovisio/utils/sequences.py +51 -34
- geovisio/utils/upload_set.py +285 -198
- geovisio/utils/website.py +1 -1
- geovisio/web/annotations.py +209 -68
- geovisio/web/auth.py +1 -1
- geovisio/web/collections.py +26 -22
- geovisio/web/configuration.py +24 -4
- geovisio/web/docs.py +93 -11
- geovisio/web/items.py +197 -121
- geovisio/web/params.py +44 -31
- geovisio/web/pictures.py +34 -0
- geovisio/web/tokens.py +49 -1
- geovisio/web/upload_set.py +150 -32
- geovisio/web/users.py +4 -4
- geovisio/web/utils.py +2 -2
- geovisio/workers/runner_pictures.py +128 -23
- {geovisio-2.9.0.dist-info → geovisio-2.10.0.dist-info}/METADATA +13 -13
- geovisio-2.10.0.dist-info/RECORD +105 -0
- geovisio-2.9.0.dist-info/RECORD +0 -98
- {geovisio-2.9.0.dist-info → geovisio-2.10.0.dist-info}/WHEEL +0 -0
- {geovisio-2.9.0.dist-info → geovisio-2.10.0.dist-info}/licenses/LICENSE +0 -0
geovisio/web/params.py
CHANGED
|
@@ -14,6 +14,7 @@ from psycopg import sql
|
|
|
14
14
|
from geovisio.utils.sequences import STAC_FIELD_MAPPINGS, STAC_FIELD_TO_SQL_FILTER
|
|
15
15
|
from geovisio.utils.fields import SortBy, SQLDirection, SortByField
|
|
16
16
|
from flask_babel import gettext as _
|
|
17
|
+
from geovisio.utils import items as utils_items
|
|
17
18
|
|
|
18
19
|
from geovisio.utils.cql2 import parse_cql2_filter
|
|
19
20
|
|
|
@@ -43,11 +44,11 @@ def parse_datetime(value, error, fallback_as_UTC=False):
|
|
|
43
44
|
|
|
44
45
|
"""
|
|
45
46
|
# Hack to parse a date
|
|
46
|
-
# dateutils know how to parse lots of date, but fail to correctly parse date
|
|
47
|
+
# dateutils know how to parse lots of date, but fail to correctly parse date formatted by `datetime.isoformat()`
|
|
47
48
|
# (like all the dates returned by the API).
|
|
48
49
|
# datetime.isoformat is like: `2023-06-17T21:22:18.406856+02:00`
|
|
49
|
-
# dateutils silently fails the parse, and create an
|
|
50
|
-
# so we first try to parse it like an
|
|
50
|
+
# dateutils silently fails the parse, and create an incorrect date
|
|
51
|
+
# so we first try to parse it like an isoformatted date, and if this fails we try the flexible dateutils
|
|
51
52
|
d = None
|
|
52
53
|
try:
|
|
53
54
|
d = datetime.datetime.fromisoformat(value)
|
|
@@ -400,7 +401,25 @@ def _alterFilterAst(ast: ast.Node):
|
|
|
400
401
|
return filtered
|
|
401
402
|
|
|
402
403
|
|
|
403
|
-
def
|
|
404
|
+
def _parse_sorty_by(value: Optional[str], field_mapping_func, SortByCls):
|
|
405
|
+
if not value:
|
|
406
|
+
return None
|
|
407
|
+
# Check value pattern
|
|
408
|
+
if not RGX_SORTBY.match(value):
|
|
409
|
+
raise errors.InvalidAPIUsage(_("Unsupported sortby parameter: syntax isn't correct"), status_code=400)
|
|
410
|
+
values = value.split(",")
|
|
411
|
+
orders = []
|
|
412
|
+
for v in values:
|
|
413
|
+
direction = SQLDirection.DESC if v.startswith("-") else SQLDirection.ASC
|
|
414
|
+
raw_field = v.lstrip("+-")
|
|
415
|
+
f = field_mapping_func(raw_field, direction)
|
|
416
|
+
|
|
417
|
+
orders.append(f)
|
|
418
|
+
|
|
419
|
+
return SortByCls(fields=orders)
|
|
420
|
+
|
|
421
|
+
|
|
422
|
+
def parse_collection_sortby(value: Optional[str]) -> Optional[SortBy]:
|
|
404
423
|
"""Reads STAC/OGC sortby parameter, and sends a SQL ORDER BY string.
|
|
405
424
|
|
|
406
425
|
Parameters
|
|
@@ -416,46 +435,40 @@ def parse_sortby(value: Optional[str]) -> Optional[SortBy]:
|
|
|
416
435
|
|
|
417
436
|
None if no sort by is found
|
|
418
437
|
|
|
419
|
-
>>>
|
|
420
|
-
>>>
|
|
421
|
-
>>>
|
|
438
|
+
>>> parse_collection_sortby(None)
|
|
439
|
+
>>> parse_collection_sortby("")
|
|
440
|
+
>>> parse_collection_sortby('updated')
|
|
422
441
|
SortBy(fields=[SortByField(field=FieldMapping(sql_column=SQL('updated_at'), stac='updated'), direction=<SQLDirection.ASC: SQL('ASC')>)])
|
|
423
|
-
>>>
|
|
442
|
+
>>> parse_collection_sortby('+created')
|
|
424
443
|
SortBy(fields=[SortByField(field=FieldMapping(sql_column=SQL('inserted_at'), stac='created'), direction=<SQLDirection.ASC: SQL('ASC')>)])
|
|
425
|
-
>>>
|
|
444
|
+
>>> parse_collection_sortby('-created')
|
|
426
445
|
SortBy(fields=[SortByField(field=FieldMapping(sql_column=SQL('inserted_at'), stac='created'), direction=<SQLDirection.DESC: SQL('DESC')>)])
|
|
427
|
-
>>>
|
|
446
|
+
>>> parse_collection_sortby('+updated,-created')
|
|
428
447
|
SortBy(fields=[SortByField(field=FieldMapping(sql_column=SQL('updated_at'), stac='updated'), direction=<SQLDirection.ASC: SQL('ASC')>), SortByField(field=FieldMapping(sql_column=SQL('inserted_at'), stac='created'), direction=<SQLDirection.DESC: SQL('DESC')>)])
|
|
429
|
-
>>>
|
|
448
|
+
>>> parse_collection_sortby('invalid') # doctest: +IGNORE_EXCEPTION_DETAIL
|
|
430
449
|
Traceback (most recent call last):
|
|
431
450
|
geovisio.errors.InvalidAPIUsage: Unsupported sortby parameter
|
|
432
|
-
>>>
|
|
451
|
+
>>> parse_collection_sortby('~nb') # doctest: +IGNORE_EXCEPTION_DETAIL
|
|
433
452
|
Traceback (most recent call last):
|
|
434
453
|
geovisio.errors.InvalidAPIUsage: Unsupported sortby parameter
|
|
435
454
|
"""
|
|
436
455
|
|
|
437
|
-
|
|
438
|
-
|
|
439
|
-
|
|
440
|
-
|
|
441
|
-
orders = []
|
|
442
|
-
for v in values:
|
|
443
|
-
direction = SQLDirection.DESC if v.startswith("-") else SQLDirection.ASC
|
|
444
|
-
vOnly = v.replace("+", "").replace("-", "")
|
|
456
|
+
def mapping(raw_field: str, direction: SQLDirection):
|
|
457
|
+
if raw_field not in STAC_FIELD_MAPPINGS:
|
|
458
|
+
raise errors.InvalidAPIUsage(_("Unsupported sortby parameter: invalid column name"), status_code=400)
|
|
459
|
+
return SortByField(field=STAC_FIELD_MAPPINGS[raw_field], direction=direction)
|
|
445
460
|
|
|
446
|
-
|
|
447
|
-
if vOnly not in STAC_FIELD_MAPPINGS:
|
|
448
|
-
raise errors.InvalidAPIUsage(_("Unsupported sortby parameter: invalid column name"), status_code=400)
|
|
449
|
-
field_mapping = STAC_FIELD_MAPPINGS[vOnly]
|
|
461
|
+
return _parse_sorty_by(value, mapping, SortByCls=SortBy)
|
|
450
462
|
|
|
451
|
-
orders.append(SortByField(field=field_mapping, direction=direction))
|
|
452
463
|
|
|
453
|
-
|
|
454
|
-
|
|
455
|
-
|
|
456
|
-
|
|
457
|
-
|
|
458
|
-
return
|
|
464
|
+
def parse_item_sortby(value: Optional[str]) -> Optional[utils_items.SortBy]:
|
|
465
|
+
def mapping(raw_field: str, direction: SQLDirection):
|
|
466
|
+
if raw_field == "distance_to" or raw_field not in utils_items.SortableItemField.__dict__:
|
|
467
|
+
# distance to is for the moment only an implicit sort when search a point or in a bbox
|
|
468
|
+
raise errors.InvalidAPIUsage(_("Unsupported sortby parameter: invalid field"), status_code=400)
|
|
469
|
+
return utils_items.ItemSortByField(field=utils_items.SortableItemField[raw_field], direction=direction)
|
|
470
|
+
|
|
471
|
+
return _parse_sorty_by(value, mapping, SortByCls=utils_items.SortBy)
|
|
459
472
|
|
|
460
473
|
|
|
461
474
|
def parse_collections_limit(limit: Optional[str]) -> int:
|
geovisio/web/pictures.py
CHANGED
|
@@ -208,3 +208,37 @@ def getPictureTile(pictureId, col, row, format):
|
|
|
208
208
|
raise errors.InvalidAPIUsage(_("Unable to read picture on filesystem"), status_code=500)
|
|
209
209
|
|
|
210
210
|
return utils.pictures.sendInFormat(picture, "jpeg", format)
|
|
211
|
+
|
|
212
|
+
|
|
213
|
+
@bp.route("/<uuid:pictureId>")
|
|
214
|
+
def getPictureById(pictureId):
|
|
215
|
+
"""Get picture's STAC definition.
|
|
216
|
+
|
|
217
|
+
It's the non-stac alias to the `/api/collections/<collectionId>/items/<itemId>` endpoint (but you don't need to know the collection ID here).
|
|
218
|
+
---
|
|
219
|
+
tags:
|
|
220
|
+
- Pictures
|
|
221
|
+
parameters:
|
|
222
|
+
- name: pictureId
|
|
223
|
+
in: path
|
|
224
|
+
description: ID of the picture (called item in STAC) to retrieve
|
|
225
|
+
required: true
|
|
226
|
+
schema:
|
|
227
|
+
type: string
|
|
228
|
+
responses:
|
|
229
|
+
102:
|
|
230
|
+
description: the picture (which is still under process)
|
|
231
|
+
content:
|
|
232
|
+
application/geo+json:
|
|
233
|
+
schema:
|
|
234
|
+
$ref: '#/components/schemas/GeoVisioItem'
|
|
235
|
+
200:
|
|
236
|
+
description: the wanted picture
|
|
237
|
+
content:
|
|
238
|
+
application/geo+json:
|
|
239
|
+
schema:
|
|
240
|
+
$ref: '#/components/schemas/GeoVisioItem'
|
|
241
|
+
"""
|
|
242
|
+
from geovisio.web.items import getCollectionItem
|
|
243
|
+
|
|
244
|
+
return getCollectionItem(collectionId=None, itemId=pictureId)
|
geovisio/web/tokens.py
CHANGED
|
@@ -22,7 +22,7 @@ def list_tokens(account):
|
|
|
22
22
|
|
|
23
23
|
The list of tokens will not contain their JWT counterpart (the JWT is the real token used in authentication).
|
|
24
24
|
|
|
25
|
-
The JWT counterpart can be
|
|
25
|
+
The JWT counterpart can be retrieved by providing the token's id to the endpoint [/users/me/tokens/{token_id}](#/Auth/get_api_users_me_tokens__token_id_).
|
|
26
26
|
---
|
|
27
27
|
tags:
|
|
28
28
|
- Auth
|
|
@@ -254,6 +254,54 @@ def claim_non_associated_token(token_id, account):
|
|
|
254
254
|
return "You are now logged in the CLI, you can upload your pictures", 200
|
|
255
255
|
|
|
256
256
|
|
|
257
|
+
@bp.route("/users/me/tokens", methods=["POST"])
|
|
258
|
+
@auth.login_required_with_redirect()
|
|
259
|
+
def generate_associated_token(account: auth.Account):
|
|
260
|
+
"""
|
|
261
|
+
Generate a new token associated to the current user
|
|
262
|
+
|
|
263
|
+
The response contains the JWT token and is directly usable (unlike tokens created by `/auth/tokens/generate` that are not associated to a user by default). This token does not need to be claimed.
|
|
264
|
+
---
|
|
265
|
+
tags:
|
|
266
|
+
- Auth
|
|
267
|
+
requestBody:
|
|
268
|
+
content:
|
|
269
|
+
application/json:
|
|
270
|
+
schema:
|
|
271
|
+
$ref: '#/components/schemas/GeovisioPostToken'
|
|
272
|
+
responses:
|
|
273
|
+
200:
|
|
274
|
+
description: The newly generated token
|
|
275
|
+
content:
|
|
276
|
+
application/json:
|
|
277
|
+
schema:
|
|
278
|
+
$ref: '#/components/schemas/GeoVisioEncodedToken'
|
|
279
|
+
"""
|
|
280
|
+
if request.is_json:
|
|
281
|
+
description = request.json.get("description", "")
|
|
282
|
+
else:
|
|
283
|
+
description = None
|
|
284
|
+
|
|
285
|
+
token = db.fetchone(
|
|
286
|
+
current_app,
|
|
287
|
+
"INSERT INTO tokens (description, account_id) VALUES (%(description)s, %(account_id)s) RETURNING *",
|
|
288
|
+
{"account_id": account.id, "description": description},
|
|
289
|
+
row_factory=dict_row,
|
|
290
|
+
)
|
|
291
|
+
if not token:
|
|
292
|
+
raise errors.InternalError(_("Impossible to generate a new token"))
|
|
293
|
+
|
|
294
|
+
jwt_token = _generate_jwt_token(token["id"])
|
|
295
|
+
return flask.jsonify(
|
|
296
|
+
{
|
|
297
|
+
"jwt_token": jwt_token,
|
|
298
|
+
"id": token["id"],
|
|
299
|
+
"description": token["description"],
|
|
300
|
+
"generated_at": token["generated_at"].astimezone(tz.gettz("UTC")).isoformat(),
|
|
301
|
+
}
|
|
302
|
+
)
|
|
303
|
+
|
|
304
|
+
|
|
257
305
|
def _generate_jwt_token(token_id: uuid.UUID) -> str:
|
|
258
306
|
"""
|
|
259
307
|
Generate a JWT token from a token's id.
|
geovisio/web/upload_set.py
CHANGED
|
@@ -1,6 +1,5 @@
|
|
|
1
1
|
from copy import deepcopy
|
|
2
2
|
from dataclasses import dataclass
|
|
3
|
-
|
|
4
3
|
import PIL
|
|
5
4
|
from geovisio.utils import auth, model_query
|
|
6
5
|
from psycopg.rows import class_row, dict_row
|
|
@@ -9,7 +8,7 @@ from flask import current_app, request, Blueprint, url_for
|
|
|
9
8
|
from flask_babel import gettext as _, get_locale
|
|
10
9
|
from geopic_tag_reader import sequence as geopic_sequence
|
|
11
10
|
from geovisio.web.utils import accountIdOrDefault
|
|
12
|
-
from
|
|
11
|
+
from geovisio.utils.fields import parse_relative_heading
|
|
13
12
|
from geovisio.web.params import (
|
|
14
13
|
as_latitude,
|
|
15
14
|
as_longitude,
|
|
@@ -20,6 +19,8 @@ from geovisio.utils import db
|
|
|
20
19
|
from geovisio import utils
|
|
21
20
|
from geopic_tag_reader.writer import writePictureMetadata, PictureMetadata
|
|
22
21
|
from geovisio.utils.params import validation_error
|
|
22
|
+
from geovisio.utils.semantics import SemanticTagUpdate
|
|
23
|
+
from geovisio.utils import semantics
|
|
23
24
|
from geovisio import errors
|
|
24
25
|
from pydantic import BaseModel, ConfigDict, ValidationError, Field, field_validator, model_validator
|
|
25
26
|
from uuid import UUID
|
|
@@ -37,7 +38,7 @@ from geovisio.utils.upload_set import (
|
|
|
37
38
|
import os
|
|
38
39
|
import hashlib
|
|
39
40
|
import sentry_sdk
|
|
40
|
-
from typing import Optional, Any, Dict
|
|
41
|
+
from typing import Optional, Any, Dict, List
|
|
41
42
|
|
|
42
43
|
|
|
43
44
|
bp = Blueprint("upload_set", __name__, url_prefix="/api")
|
|
@@ -52,67 +53,177 @@ class UploadSetCreationParameter(BaseModel):
|
|
|
52
53
|
"""Estimated number of items that will be sent to the UploadSet"""
|
|
53
54
|
sort_method: Optional[geopic_sequence.SortMethod] = None
|
|
54
55
|
"""Strategy used for sorting your pictures. Either by filename or EXIF time, in ascending or descending order."""
|
|
56
|
+
no_split: Optional[bool] = None
|
|
57
|
+
"""If True, all pictures of this upload set will be grouped in the same sequence. Is incompatible with split_distance / split_time."""
|
|
55
58
|
split_distance: Optional[int] = None
|
|
56
|
-
"""Maximum distance between two pictures to be considered in the same sequence (in meters)."""
|
|
59
|
+
"""Maximum distance between two pictures to be considered in the same sequence (in meters). If not set, the instance default will be used. The instance defaults can be see in /api/configuration."""
|
|
57
60
|
split_time: Optional[timedelta] = None
|
|
58
|
-
"""Maximum time interval between two pictures to be considered in the same sequence.
|
|
61
|
+
"""Maximum time interval between two pictures to be considered in the same sequence.
|
|
62
|
+
If not set, the instance default will be used. The instance defaults can be see in /api/configuration."""
|
|
63
|
+
no_deduplication: Optional[bool] = None
|
|
64
|
+
"""If True, no duplication will be done. Is incompatible with duplicate_distance / duplicate_rotation."""
|
|
59
65
|
duplicate_distance: Optional[float] = None
|
|
60
|
-
"""Maximum distance between two pictures to be considered as duplicates (in meters).
|
|
66
|
+
"""Maximum distance between two pictures to be considered as duplicates (in meters).
|
|
67
|
+
If not set, the instance default will be used. The instance defaults can be see in /api/configuration."""
|
|
61
68
|
duplicate_rotation: Optional[int] = None
|
|
62
|
-
"""Maximum angle of rotation for two too-close-pictures to be considered as duplicates (in degrees).
|
|
69
|
+
"""Maximum angle of rotation for two too-close-pictures to be considered as duplicates (in degrees).
|
|
70
|
+
If not set, the instance default will be used. The instance defaults can be see in /api/configuration."""
|
|
63
71
|
metadata: Optional[Dict[str, Any]] = None
|
|
64
72
|
"""Optional metadata associated to the upload set. Can contain any key-value pair."""
|
|
65
73
|
user_agent: Optional[str] = None
|
|
66
74
|
"""Software used by client to create this upload set, in HTTP Header User-Agent format"""
|
|
75
|
+
semantics: Optional[List[SemanticTagUpdate]] = None
|
|
76
|
+
"""Semantic tags associated to the upload_set. Those tags will be added to all sequences linked to this upload set"""
|
|
77
|
+
relative_heading: Optional[int] = None
|
|
78
|
+
"""The relative heading (in degrees), offset based on movement path (0° = looking forward, -90° = looking left, 90° = looking right). For single picture upload_sets, 0° is heading north). Headings are unchanged if this parameter is not set."""
|
|
67
79
|
|
|
68
80
|
model_config = ConfigDict(use_attribute_docstrings=True)
|
|
69
81
|
|
|
82
|
+
def validate(self):
|
|
83
|
+
if self.no_split is True and (self.split_distance is not None or self.split_time is not None):
|
|
84
|
+
raise errors.InvalidAPIUsage("The `no_split` parameter is incompatible with specifying `split_distance` / `split_duration`")
|
|
85
|
+
if self.no_deduplication is True and (self.duplicate_distance is not None or self.duplicate_rotation is not None):
|
|
86
|
+
raise errors.InvalidAPIUsage(
|
|
87
|
+
"The `no_deduplication` parameter is incompatible with specifying `duplicate_distance` / `duplicate_rotation`"
|
|
88
|
+
)
|
|
89
|
+
|
|
90
|
+
@field_validator("relative_heading", mode="before")
|
|
91
|
+
@classmethod
|
|
92
|
+
def parse_relative_heading(cls, value):
|
|
93
|
+
return parse_relative_heading(value)
|
|
94
|
+
|
|
70
95
|
|
|
71
96
|
class UploadSetUpdateParameter(BaseModel):
|
|
72
97
|
"""Parameters used to update an UploadSet"""
|
|
73
98
|
|
|
74
99
|
sort_method: Optional[geopic_sequence.SortMethod] = None
|
|
75
100
|
"""Strategy used for sorting your pictures. Either by filename or EXIF time, in ascending or descending order."""
|
|
101
|
+
no_split: Optional[bool] = None
|
|
102
|
+
"""If True, all pictures of this upload set will be grouped in the same sequence. Is incompatible with split_distance / split_time."""
|
|
76
103
|
split_distance: Optional[int] = None
|
|
77
104
|
"""Maximum distance between two pictures to be considered in the same sequence (in meters)."""
|
|
78
105
|
split_time: Optional[timedelta] = None
|
|
79
106
|
"""Maximum time interval between two pictures to be considered in the same sequence."""
|
|
107
|
+
no_deduplication: Optional[bool] = None
|
|
108
|
+
"""If True, no deduplication will be done. Is incompatible with duplicate_distance / duplicate_rotation
|
|
109
|
+
|
|
110
|
+
Note that if the upload_set has already been dispatched, the deduplication has already been done so it cannot be deactivated.
|
|
111
|
+
"""
|
|
80
112
|
duplicate_distance: Optional[float] = None
|
|
81
113
|
"""Maximum distance between two pictures to be considered as duplicates (in meters)."""
|
|
82
114
|
duplicate_rotation: Optional[int] = None
|
|
83
115
|
"""Maximum angle of rotation for two too-close-pictures to be considered as duplicates (in degrees)."""
|
|
116
|
+
semantics: Optional[List[SemanticTagUpdate]] = None
|
|
117
|
+
"""Semantic tags associated to the upload_set. Those tags will be added to all sequences linked to this upload set.
|
|
118
|
+
By default each tag will be added to the upload set's tags, but you can change this behavior by setting the `action` parameter to `delete`.
|
|
119
|
+
|
|
120
|
+
If you want to replace a tag, you need to first delete it, then add it again.
|
|
121
|
+
|
|
122
|
+
Like:
|
|
123
|
+
[
|
|
124
|
+
{"key": "some_key", "value": "some_value", "action": "delete"},
|
|
125
|
+
{"key": "some_key", "value": "some_new_value"}
|
|
126
|
+
]
|
|
127
|
+
|
|
128
|
+
Note: for the moment it's not possible to update the semantics of an upload set after it has been dispatched.
|
|
129
|
+
If that is something needed, feel free to open an issue.
|
|
130
|
+
"""
|
|
131
|
+
relative_heading: Optional[int] = None
|
|
132
|
+
"""The relative heading (in degrees), offset based on movement path (0° = looking forward, -90° = looking left, 90° = looking right). For single picture upload_sets, 0° is heading north). Headings are unchanged if this parameter is not set."""
|
|
84
133
|
|
|
85
134
|
model_config = ConfigDict(use_attribute_docstrings=True, extra="forbid")
|
|
86
135
|
|
|
136
|
+
def validate(self):
|
|
137
|
+
if self.no_split is True and (self.split_distance is not None or self.split_time is not None):
|
|
138
|
+
raise errors.InvalidAPIUsage("The `no_split` parameter is incompatible with specifying `split_distance` / `split_duration`")
|
|
139
|
+
if self.no_deduplication is True and (self.duplicate_distance is not None or self.duplicate_rotation is not None):
|
|
140
|
+
raise errors.InvalidAPIUsage(
|
|
141
|
+
"The `no_deduplication` parameter is incompatible with specifying `duplicate_distance` / `duplicate_rotation`"
|
|
142
|
+
)
|
|
143
|
+
|
|
144
|
+
def has_only_semantics_updates(self):
|
|
145
|
+
return self.model_fields_set == {"semantics"}
|
|
146
|
+
|
|
147
|
+
@field_validator("relative_heading", mode="before")
|
|
148
|
+
@classmethod
|
|
149
|
+
def parse_relative_heading(cls, value):
|
|
150
|
+
return parse_relative_heading(value)
|
|
151
|
+
|
|
87
152
|
|
|
88
153
|
def create_upload_set(params: UploadSetCreationParameter, accountId: UUID) -> UploadSet:
|
|
154
|
+
sem = params.semantics
|
|
155
|
+
params.semantics = None
|
|
89
156
|
db_params = model_query.get_db_params_and_values(params, account_id=accountId)
|
|
157
|
+
with db.conn(current_app) as conn, conn.transaction():
|
|
158
|
+
|
|
159
|
+
with conn.cursor(row_factory=class_row(UploadSet)) as cursor:
|
|
160
|
+
db_upload_set = cursor.execute(
|
|
161
|
+
SQL("INSERT INTO upload_sets({fields}) VALUES({values}) RETURNING *").format(
|
|
162
|
+
fields=db_params.fields(), values=db_params.placeholders()
|
|
163
|
+
),
|
|
164
|
+
db_params.params_as_dict,
|
|
165
|
+
).fetchone()
|
|
166
|
+
|
|
167
|
+
if db_upload_set is None:
|
|
168
|
+
raise Exception("Impossible to insert upload_set in database")
|
|
169
|
+
|
|
170
|
+
if sem:
|
|
171
|
+
with conn.cursor() as cursor:
|
|
172
|
+
semantics.update_tags(
|
|
173
|
+
cursor=cursor,
|
|
174
|
+
entity=semantics.Entity(semantics.EntityType.upload_set, db_upload_set.id),
|
|
175
|
+
actions=sem,
|
|
176
|
+
account=accountId,
|
|
177
|
+
)
|
|
90
178
|
|
|
91
|
-
db_upload_set
|
|
92
|
-
current_app,
|
|
93
|
-
SQL("INSERT INTO upload_sets({fields}) VALUES({values}) RETURNING *").format(
|
|
94
|
-
fields=db_params.fields(), values=db_params.placeholders()
|
|
95
|
-
),
|
|
96
|
-
db_params.params_as_dict,
|
|
97
|
-
row_factory=class_row(UploadSet),
|
|
98
|
-
)
|
|
179
|
+
return db_upload_set
|
|
99
180
|
|
|
100
|
-
if db_upload_set is None:
|
|
101
|
-
raise Exception("Impossible to insert upload_set in database")
|
|
102
181
|
|
|
103
|
-
|
|
182
|
+
def update_upload_set(upload_set_id: UUID, params: UploadSetUpdateParameter, account) -> UploadSet:
|
|
183
|
+
"""Update an upload set
|
|
184
|
+
Since the semantic tags are handled in a separate table, split the update in 2, the semantic update, and the upload_sets table update"""
|
|
185
|
+
with db.conn(current_app) as conn, conn.transaction():
|
|
186
|
+
if params.semantics:
|
|
187
|
+
# update the semantics if needed, and remove the semantic from the params for the other fields update
|
|
188
|
+
sem = params.semantics
|
|
189
|
+
params.semantics = None
|
|
104
190
|
|
|
191
|
+
with conn.cursor() as cursor:
|
|
192
|
+
semantics.update_tags(
|
|
193
|
+
cursor=cursor,
|
|
194
|
+
entity=semantics.Entity(semantics.EntityType.upload_set, upload_set_id),
|
|
195
|
+
actions=sem,
|
|
196
|
+
account=account.id if account is not None else None,
|
|
197
|
+
)
|
|
198
|
+
|
|
199
|
+
us_dispatched = cursor.execute(
|
|
200
|
+
SQL("SELECT dispatched FROM upload_sets WHERE id = %(upload_set_id)s"),
|
|
201
|
+
{"upload_set_id": upload_set_id},
|
|
202
|
+
).fetchone()
|
|
203
|
+
|
|
204
|
+
if us_dispatched[0] is True:
|
|
205
|
+
# if the upload set is already dispatched, we propagate the semantic update to all the associated collections
|
|
206
|
+
# Note that there is a lock on the `upload_sets` row to avoid updating the semantics while dispatching the upload set
|
|
207
|
+
associated_cols = conn.execute("SELECT id FROM sequences WHERE upload_set_id = %s", [upload_set_id]).fetchall()
|
|
208
|
+
for c in associated_cols:
|
|
209
|
+
col_id = c[0]
|
|
210
|
+
semantics.update_tags(
|
|
211
|
+
cursor=cursor,
|
|
212
|
+
entity=semantics.Entity(semantics.EntityType.seq, col_id),
|
|
213
|
+
actions=sem,
|
|
214
|
+
account=account.id if account is not None else None,
|
|
215
|
+
)
|
|
105
216
|
|
|
106
|
-
|
|
107
|
-
|
|
217
|
+
if params.model_fields_set != {"semantics"}:
|
|
218
|
+
# if there was other fields to update
|
|
219
|
+
db_params = model_query.get_db_params_and_values(params)
|
|
108
220
|
|
|
109
|
-
|
|
110
|
-
|
|
111
|
-
|
|
112
|
-
|
|
113
|
-
|
|
114
|
-
|
|
115
|
-
return get_upload_set(upload_set_id)
|
|
221
|
+
conn.execute(
|
|
222
|
+
SQL("UPDATE upload_sets SET {fields} WHERE id = %(upload_set_id)s").format(fields=db_params.fields_for_set()),
|
|
223
|
+
db_params.params_as_dict | {"upload_set_id": upload_set_id},
|
|
224
|
+
)
|
|
225
|
+
# we get a full uploadset response
|
|
226
|
+
return get_upload_set(upload_set_id)
|
|
116
227
|
|
|
117
228
|
|
|
118
229
|
@bp.route("/upload_sets", methods=["POST"])
|
|
@@ -132,7 +243,7 @@ def postUploadSet(account=None):
|
|
|
132
243
|
required: false
|
|
133
244
|
schema:
|
|
134
245
|
type: string
|
|
135
|
-
description: An explicit User-Agent value is
|
|
246
|
+
description: An explicit User-Agent value is preferred if you create a production-ready tool, formatted like "GeoVisioCLI/1.0"
|
|
136
247
|
requestBody:
|
|
137
248
|
content:
|
|
138
249
|
application/json:
|
|
@@ -158,6 +269,7 @@ def postUploadSet(account=None):
|
|
|
158
269
|
else:
|
|
159
270
|
raise errors.InvalidAPIUsage(_("Parameter for creating an UploadSet should be a valid JSON"), status_code=415)
|
|
160
271
|
|
|
272
|
+
params.validate()
|
|
161
273
|
account_id = UUID(accountIdOrDefault(account))
|
|
162
274
|
|
|
163
275
|
upload_set = create_upload_set(params, account_id)
|
|
@@ -178,7 +290,9 @@ def postUploadSet(account=None):
|
|
|
178
290
|
def patchUploadSet(upload_set_id, account=None):
|
|
179
291
|
"""Update an existing UploadSet.
|
|
180
292
|
|
|
181
|
-
|
|
293
|
+
For most fields, only the owner of the UploadSet can update it. The only exception is the `semantics` field, which can be updated by any user.
|
|
294
|
+
|
|
295
|
+
Note that the upload set will not be dispatched again, so if you changed the dispatch parameters (like split_distance, split_time, duplicate_distance, duplicate_rotation, relative_heading, ...), you need to call the `POST /api/upload_sets/:id/complete` endpoint to dispatch the upload set afterward.
|
|
182
296
|
---
|
|
183
297
|
tags:
|
|
184
298
|
- Upload
|
|
@@ -215,18 +329,20 @@ def patchUploadSet(upload_set_id, account=None):
|
|
|
215
329
|
else:
|
|
216
330
|
raise errors.InvalidAPIUsage(_("Parameter for updating an UploadSet should be a valid JSON"), status_code=415)
|
|
217
331
|
|
|
332
|
+
params.validate()
|
|
218
333
|
upload_set = get_simple_upload_set(upload_set_id)
|
|
219
334
|
if upload_set is None:
|
|
220
335
|
raise errors.InvalidAPIUsage(_("UploadSet doesn't exist"), status_code=404)
|
|
221
336
|
|
|
222
337
|
if account and str(upload_set.account_id) != account.id:
|
|
223
|
-
|
|
338
|
+
if not params.has_only_semantics_updates():
|
|
339
|
+
raise errors.InvalidAPIUsage(_("You are not allowed to update this upload set"), status_code=403)
|
|
224
340
|
|
|
225
341
|
if not params.model_fields_set:
|
|
226
342
|
# nothing to update, return the upload set
|
|
227
343
|
upload_set = get_upload_set(upload_set_id)
|
|
228
344
|
else:
|
|
229
|
-
upload_set = update_upload_set(upload_set_id, params)
|
|
345
|
+
upload_set = update_upload_set(upload_set_id, params, account)
|
|
230
346
|
|
|
231
347
|
return upload_set.model_dump_json(exclude_none=True), 200, {"Content-Type": "application/json"}
|
|
232
348
|
|
|
@@ -515,7 +631,7 @@ def mark_upload_set_completed_if_needed(cursor, upload_set_id: UUID) -> bool:
|
|
|
515
631
|
"""WITH nb_items AS (
|
|
516
632
|
SELECT count(*) AS nb, upload_set_id
|
|
517
633
|
FROM files f
|
|
518
|
-
WHERE upload_set_id = %(id)s
|
|
634
|
+
WHERE upload_set_id = %(id)s
|
|
519
635
|
GROUP BY upload_set_id
|
|
520
636
|
)
|
|
521
637
|
UPDATE upload_sets
|
|
@@ -846,6 +962,8 @@ def deleteUploadSet(upload_set_id: UUID, account=None):
|
|
|
846
962
|
|
|
847
963
|
upload_set = get_upload_set(upload_set_id)
|
|
848
964
|
|
|
965
|
+
if not upload_set:
|
|
966
|
+
raise errors.InvalidAPIUsage(_("UploadSet %(u)s does not exist", u=upload_set_id), status_code=404)
|
|
849
967
|
# Account associated to uploadset doesn't match current user
|
|
850
968
|
if account is not None and account.id != str(upload_set.account_id):
|
|
851
969
|
raise errors.InvalidAPIUsage(_("You're not authorized to delete this upload set"), status_code=403)
|
geovisio/web/users.py
CHANGED
|
@@ -91,13 +91,13 @@ def _get_user_info(account: auth.Account):
|
|
|
91
91
|
@bp.route("/me")
|
|
92
92
|
@auth.login_required_with_redirect()
|
|
93
93
|
def getMyUserInfo(account):
|
|
94
|
-
"""Get current logged user
|
|
94
|
+
"""Get current logged user information
|
|
95
95
|
---
|
|
96
96
|
tags:
|
|
97
97
|
- Users
|
|
98
98
|
responses:
|
|
99
99
|
200:
|
|
100
|
-
description: Information about the logged account
|
|
100
|
+
description: Information about the logged in account
|
|
101
101
|
content:
|
|
102
102
|
application/json:
|
|
103
103
|
schema:
|
|
@@ -108,7 +108,7 @@ def getMyUserInfo(account):
|
|
|
108
108
|
|
|
109
109
|
@bp.route("/<uuid:userId>")
|
|
110
110
|
def getUserInfo(userId):
|
|
111
|
-
"""Get user
|
|
111
|
+
"""Get user information
|
|
112
112
|
---
|
|
113
113
|
tags:
|
|
114
114
|
- Users
|
|
@@ -152,7 +152,7 @@ def getMyCatalog(account):
|
|
|
152
152
|
deprecated: true
|
|
153
153
|
responses:
|
|
154
154
|
200:
|
|
155
|
-
description: the Catalog listing all sequences associated to given user. Note that it's similar to the user's
|
|
155
|
+
description: the Catalog listing all sequences associated to given user. Note that it's similar to the user's collection, but with less metadata since a STAC collection is an enhanced STAC catalog.
|
|
156
156
|
content:
|
|
157
157
|
application/json:
|
|
158
158
|
schema:
|
geovisio/web/utils.py
CHANGED
|
@@ -14,12 +14,12 @@ STAC_VERSION = "1.0.0"
|
|
|
14
14
|
|
|
15
15
|
|
|
16
16
|
def removeNoneInDict(val):
|
|
17
|
-
"""Removes empty values from
|
|
17
|
+
"""Removes empty values from dictionary"""
|
|
18
18
|
return {k: v for k, v in val.items() if v is not None}
|
|
19
19
|
|
|
20
20
|
|
|
21
21
|
def cleanNoneInDict(val):
|
|
22
|
-
"""Removes empty values from
|
|
22
|
+
"""Removes empty values from dictionary, and return None if dict is empty"""
|
|
23
23
|
res = removeNoneInDict(val)
|
|
24
24
|
return res if len(res) > 0 else None
|
|
25
25
|
|