geovisio 2.6.0__py3-none-any.whl → 2.7.1__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- geovisio/__init__.py +36 -7
- geovisio/admin_cli/cleanup.py +2 -2
- geovisio/admin_cli/db.py +1 -4
- geovisio/config_app.py +40 -1
- geovisio/db_migrations.py +24 -3
- geovisio/templates/main.html +13 -13
- geovisio/templates/viewer.html +3 -3
- geovisio/translations/de/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/de/LC_MESSAGES/messages.po +804 -0
- geovisio/translations/el/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/el/LC_MESSAGES/messages.po +685 -0
- geovisio/translations/en/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/en/LC_MESSAGES/messages.po +738 -0
- geovisio/translations/es/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/es/LC_MESSAGES/messages.po +778 -0
- geovisio/translations/fi/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/fi/LC_MESSAGES/messages.po +589 -0
- geovisio/translations/fr/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/fr/LC_MESSAGES/messages.po +814 -0
- geovisio/translations/hu/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/hu/LC_MESSAGES/messages.po +773 -0
- geovisio/translations/ko/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/ko/LC_MESSAGES/messages.po +685 -0
- geovisio/translations/messages.pot +694 -0
- geovisio/translations/nl/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/nl/LC_MESSAGES/messages.po +602 -0
- geovisio/utils/__init__.py +1 -1
- geovisio/utils/auth.py +50 -11
- geovisio/utils/db.py +65 -0
- geovisio/utils/excluded_areas.py +83 -0
- geovisio/utils/extent.py +30 -0
- geovisio/utils/fields.py +1 -1
- geovisio/utils/filesystems.py +0 -1
- geovisio/utils/link.py +14 -0
- geovisio/utils/params.py +20 -0
- geovisio/utils/pictures.py +110 -88
- geovisio/utils/reports.py +171 -0
- geovisio/utils/sequences.py +262 -126
- geovisio/utils/tokens.py +37 -42
- geovisio/utils/upload_set.py +642 -0
- geovisio/web/auth.py +37 -37
- geovisio/web/collections.py +304 -304
- geovisio/web/configuration.py +14 -0
- geovisio/web/docs.py +276 -15
- geovisio/web/excluded_areas.py +377 -0
- geovisio/web/items.py +169 -112
- geovisio/web/map.py +104 -36
- geovisio/web/params.py +69 -26
- geovisio/web/pictures.py +14 -31
- geovisio/web/reports.py +399 -0
- geovisio/web/rss.py +13 -7
- geovisio/web/stac.py +129 -134
- geovisio/web/tokens.py +98 -109
- geovisio/web/upload_set.py +771 -0
- geovisio/web/users.py +100 -73
- geovisio/web/utils.py +28 -9
- geovisio/workers/runner_pictures.py +241 -207
- {geovisio-2.6.0.dist-info → geovisio-2.7.1.dist-info}/METADATA +17 -14
- geovisio-2.7.1.dist-info/RECORD +70 -0
- {geovisio-2.6.0.dist-info → geovisio-2.7.1.dist-info}/WHEEL +1 -1
- geovisio-2.6.0.dist-info/RECORD +0 -41
- {geovisio-2.6.0.dist-info → geovisio-2.7.1.dist-info}/LICENSE +0 -0
|
@@ -0,0 +1,771 @@
|
|
|
1
|
+
from copy import deepcopy
|
|
2
|
+
from dataclasses import dataclass
|
|
3
|
+
|
|
4
|
+
import PIL
|
|
5
|
+
from geovisio.utils import auth
|
|
6
|
+
from psycopg.rows import class_row, dict_row
|
|
7
|
+
from psycopg.sql import SQL
|
|
8
|
+
from flask import current_app, request, Blueprint, url_for
|
|
9
|
+
from flask_babel import gettext as _, get_locale
|
|
10
|
+
from geopic_tag_reader import sequence as geopic_sequence
|
|
11
|
+
from geovisio.web.utils import accountIdOrDefault
|
|
12
|
+
from psycopg.types.json import Jsonb
|
|
13
|
+
from geovisio.web.params import (
|
|
14
|
+
as_latitude,
|
|
15
|
+
as_longitude,
|
|
16
|
+
parse_datetime,
|
|
17
|
+
)
|
|
18
|
+
import logging
|
|
19
|
+
from geovisio.utils import db
|
|
20
|
+
from geovisio import utils
|
|
21
|
+
from geopic_tag_reader.writer import writePictureMetadata, PictureMetadata
|
|
22
|
+
from geovisio.utils.params import validation_error
|
|
23
|
+
from geovisio import errors
|
|
24
|
+
from pydantic import BaseModel, ConfigDict, ValidationError, Field, field_validator, model_validator
|
|
25
|
+
from uuid import UUID
|
|
26
|
+
from werkzeug.datastructures import FileStorage
|
|
27
|
+
from datetime import timedelta, datetime
|
|
28
|
+
from geovisio.utils.upload_set import (
|
|
29
|
+
FileRejectionStatus,
|
|
30
|
+
FileType,
|
|
31
|
+
UploadSet,
|
|
32
|
+
get_simple_upload_set,
|
|
33
|
+
get_upload_set,
|
|
34
|
+
get_upload_set_files,
|
|
35
|
+
list_upload_sets,
|
|
36
|
+
)
|
|
37
|
+
import os
|
|
38
|
+
import hashlib
|
|
39
|
+
import sentry_sdk
|
|
40
|
+
from typing import Optional, Any, Dict
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
bp = Blueprint("upload_set", __name__, url_prefix="/api")
|
|
44
|
+
|
|
45
|
+
|
|
46
|
+
class UploadSetCreationParameter(BaseModel):
|
|
47
|
+
"""Parameters used to create an UploadSet"""
|
|
48
|
+
|
|
49
|
+
title: str
|
|
50
|
+
"""Title of the upload. The title will be used to generate a name for the collections"""
|
|
51
|
+
estimated_nb_files: Optional[int] = None
|
|
52
|
+
"""Estimated number of items that will be sent to the UploadSet"""
|
|
53
|
+
sort_method: Optional[geopic_sequence.SortMethod] = None
|
|
54
|
+
"""Strategy used for sorting your pictures. Either by filename or EXIF time, in ascending or descending order."""
|
|
55
|
+
split_distance: Optional[int] = None
|
|
56
|
+
"""Maximum distance between two pictures to be considered in the same sequence (in meters)."""
|
|
57
|
+
split_time: Optional[timedelta] = None
|
|
58
|
+
"""Maximum time interval between two pictures to be considered in the same sequence."""
|
|
59
|
+
duplicate_distance: Optional[float] = None
|
|
60
|
+
"""Maximum distance between two pictures to be considered as duplicates (in meters)."""
|
|
61
|
+
duplicate_rotation: Optional[int] = None
|
|
62
|
+
"""Maximum angle of rotation for two too-close-pictures to be considered as duplicates (in degrees)."""
|
|
63
|
+
metadata: Optional[Dict[str, Any]] = None
|
|
64
|
+
"""Optional metadata associated to the upload set. Can contain any key-value pair."""
|
|
65
|
+
user_agent: Optional[str] = None
|
|
66
|
+
"""Software used by client to create this upload set, in HTTP Header User-Agent format"""
|
|
67
|
+
|
|
68
|
+
model_config = ConfigDict(use_attribute_docstrings=True)
|
|
69
|
+
|
|
70
|
+
|
|
71
|
+
def create_upload_set(params: UploadSetCreationParameter, accountId: UUID) -> UploadSet:
|
|
72
|
+
params_as_dict = params.model_dump(exclude_none=True) | {"account_id": accountId}
|
|
73
|
+
|
|
74
|
+
fields = [SQL(f) for f in params_as_dict.keys()] # type: ignore (we can ignore psycopg types there as we control those keys since they are the attributes of UploadSetCreationParameter)
|
|
75
|
+
values = [SQL(f"%({f})s") for f in params_as_dict.keys()] # type: ignore
|
|
76
|
+
for k, v in params_as_dict.items():
|
|
77
|
+
if isinstance(v, Dict):
|
|
78
|
+
params_as_dict[k] = Jsonb(v) # convert dict to jsonb in database
|
|
79
|
+
|
|
80
|
+
db_upload_set = db.fetchone(
|
|
81
|
+
current_app,
|
|
82
|
+
SQL("INSERT INTO upload_sets({fields}) VALUES({values}) RETURNING *").format(
|
|
83
|
+
fields=SQL(", ").join(fields), values=SQL(", ").join(values)
|
|
84
|
+
),
|
|
85
|
+
params_as_dict,
|
|
86
|
+
row_factory=class_row(UploadSet),
|
|
87
|
+
)
|
|
88
|
+
|
|
89
|
+
if db_upload_set is None:
|
|
90
|
+
raise Exception("Impossible to insert sequence in database")
|
|
91
|
+
|
|
92
|
+
return db_upload_set
|
|
93
|
+
|
|
94
|
+
|
|
95
|
+
@bp.route("/upload_sets", methods=["POST"])
|
|
96
|
+
@auth.login_required_by_setting("API_FORCE_AUTH_ON_UPLOAD")
|
|
97
|
+
def postUploadSet(account=None):
|
|
98
|
+
"""Create a new UploadSet
|
|
99
|
+
|
|
100
|
+
The UploadSet are used to group pictures during an upload.
|
|
101
|
+
The pictures will be dispatch to several collections when the UploadSet will be completed
|
|
102
|
+
---
|
|
103
|
+
tags:
|
|
104
|
+
- Upload
|
|
105
|
+
- UploadSet
|
|
106
|
+
parameters:
|
|
107
|
+
- in: header
|
|
108
|
+
name: User-Agent
|
|
109
|
+
required: false
|
|
110
|
+
schema:
|
|
111
|
+
type: string
|
|
112
|
+
description: An explicit User-Agent value is prefered if you create a production-ready tool, formatted like "GeoVisioCLI/1.0"
|
|
113
|
+
requestBody:
|
|
114
|
+
content:
|
|
115
|
+
application/json:
|
|
116
|
+
schema:
|
|
117
|
+
$ref: '#/components/schemas/GeoVisioPostUploadSet'
|
|
118
|
+
security:
|
|
119
|
+
- bearerToken: []
|
|
120
|
+
- cookieAuth: []
|
|
121
|
+
responses:
|
|
122
|
+
200:
|
|
123
|
+
description: the UploadSet metadata
|
|
124
|
+
content:
|
|
125
|
+
application/json:
|
|
126
|
+
schema:
|
|
127
|
+
$ref: '#/components/schemas/GeoVisioUploadSet'
|
|
128
|
+
"""
|
|
129
|
+
|
|
130
|
+
if request.is_json and request.json is not None:
|
|
131
|
+
try:
|
|
132
|
+
params = UploadSetCreationParameter(user_agent=request.user_agent.string, **request.json)
|
|
133
|
+
except ValidationError as ve:
|
|
134
|
+
raise errors.InvalidAPIUsage(_("Impossible to create an UploadSet"), payload=validation_error(ve))
|
|
135
|
+
else:
|
|
136
|
+
raise errors.InvalidAPIUsage(_("Parameter for creating an UploadSet should be a valid JSON"), status_code=415)
|
|
137
|
+
|
|
138
|
+
account_id = UUID(accountIdOrDefault(account))
|
|
139
|
+
|
|
140
|
+
upload_set = create_upload_set(params, account_id)
|
|
141
|
+
|
|
142
|
+
return (
|
|
143
|
+
upload_set.model_dump_json(exclude_none=True),
|
|
144
|
+
200,
|
|
145
|
+
{
|
|
146
|
+
"Content-Type": "application/json",
|
|
147
|
+
"Access-Control-Expose-Headers": "Location", # Needed for allowing web browsers access Location header
|
|
148
|
+
"Location": url_for("upload_set.getUploadSet", _external=True, upload_set_id=upload_set.id),
|
|
149
|
+
},
|
|
150
|
+
)
|
|
151
|
+
|
|
152
|
+
|
|
153
|
+
@bp.route("/upload_sets/<uuid:upload_set_id>", methods=["GET"])
|
|
154
|
+
def getUploadSet(upload_set_id):
|
|
155
|
+
"""Get an existing UploadSet
|
|
156
|
+
|
|
157
|
+
The UploadSet are used to group pictures during an upload.
|
|
158
|
+
---
|
|
159
|
+
tags:
|
|
160
|
+
- Upload
|
|
161
|
+
- UploadSet
|
|
162
|
+
parameters:
|
|
163
|
+
- name: upload_set_id
|
|
164
|
+
in: path
|
|
165
|
+
description: ID of the UploadSet to retrieve
|
|
166
|
+
required: true
|
|
167
|
+
schema:
|
|
168
|
+
type: string
|
|
169
|
+
security:
|
|
170
|
+
- bearerToken: []
|
|
171
|
+
- cookieAuth: []
|
|
172
|
+
responses:
|
|
173
|
+
200:
|
|
174
|
+
description: the UploadSet metadata
|
|
175
|
+
content:
|
|
176
|
+
application/json:
|
|
177
|
+
schema:
|
|
178
|
+
$ref: '#/components/schemas/GeoVisioUploadSet'
|
|
179
|
+
"""
|
|
180
|
+
upload_set = get_upload_set(upload_set_id)
|
|
181
|
+
if upload_set is None:
|
|
182
|
+
raise errors.InvalidAPIUsage(_("UploadSet doesn't exist"), status_code=404)
|
|
183
|
+
|
|
184
|
+
return upload_set.model_dump_json(exclude_none=True), 200, {"Content-Type": "application/json"}
|
|
185
|
+
|
|
186
|
+
|
|
187
|
+
@bp.route("/upload_sets/<uuid:upload_set_id>/files", methods=["GET"])
|
|
188
|
+
@auth.login_required_by_setting("API_FORCE_AUTH_ON_UPLOAD")
|
|
189
|
+
def getUploadSetFiles(upload_set_id, account=None):
|
|
190
|
+
"""List the files of an UploadSet
|
|
191
|
+
---
|
|
192
|
+
tags:
|
|
193
|
+
- Upload
|
|
194
|
+
- UploadSet
|
|
195
|
+
parameters:
|
|
196
|
+
- name: upload_set_id
|
|
197
|
+
in: path
|
|
198
|
+
description: ID of the UploadSet
|
|
199
|
+
required: true
|
|
200
|
+
schema:
|
|
201
|
+
type: string
|
|
202
|
+
security:
|
|
203
|
+
- bearerToken: []
|
|
204
|
+
- cookieAuth: []
|
|
205
|
+
responses:
|
|
206
|
+
200:
|
|
207
|
+
description: the UploadSet files list
|
|
208
|
+
content:
|
|
209
|
+
application/json:
|
|
210
|
+
schema:
|
|
211
|
+
$ref: '#/components/schemas/GeoVisioUploadSetFiles'
|
|
212
|
+
"""
|
|
213
|
+
u = get_simple_upload_set(upload_set_id)
|
|
214
|
+
if u is None:
|
|
215
|
+
raise errors.InvalidAPIUsage(_("UploadSet doesn't exist"), status_code=404)
|
|
216
|
+
if account is not None and account.id != str(u.account_id):
|
|
217
|
+
raise errors.InvalidAPIUsage(_("You're not authorized to list pictures in this upload set"), status_code=403)
|
|
218
|
+
|
|
219
|
+
upload_set_files = get_upload_set_files(upload_set_id)
|
|
220
|
+
return upload_set_files.model_dump_json(exclude_none=True), 200, {"Content-Type": "application/json"}
|
|
221
|
+
|
|
222
|
+
|
|
223
|
+
class ListUploadSetParameter(BaseModel):
|
|
224
|
+
"""Parameters used to list a user's UploadSet"""
|
|
225
|
+
|
|
226
|
+
account_id: UUID
|
|
227
|
+
limit: int = Field(default=100, ge=0, le=1000)
|
|
228
|
+
filter: Optional[str] = "dispatched = FALSE"
|
|
229
|
+
"""Filter to apply to the list of UploadSet. The filter should be a valid SQL WHERE clause"""
|
|
230
|
+
|
|
231
|
+
|
|
232
|
+
@bp.route("/users/me/upload_sets", methods=["GET"])
|
|
233
|
+
@auth.login_required_with_redirect()
|
|
234
|
+
def listUserUpload(account):
|
|
235
|
+
"""List the upload of a user
|
|
236
|
+
|
|
237
|
+
The UploadSet are used to group pictures during an upload.
|
|
238
|
+
---
|
|
239
|
+
tags:
|
|
240
|
+
- Upload
|
|
241
|
+
- UploadSet
|
|
242
|
+
parameters:
|
|
243
|
+
- $ref: '#/components/parameters/UploadSetFilter'
|
|
244
|
+
- name: limit
|
|
245
|
+
in: query
|
|
246
|
+
description: limit to the number of upload set to retrieve
|
|
247
|
+
required: true
|
|
248
|
+
schema:
|
|
249
|
+
type: integer
|
|
250
|
+
minimum: 1
|
|
251
|
+
maximum: 100
|
|
252
|
+
security:
|
|
253
|
+
- bearerToken: []
|
|
254
|
+
- cookieAuth: []
|
|
255
|
+
responses:
|
|
256
|
+
200:
|
|
257
|
+
description: the UploadSet metadata
|
|
258
|
+
content:
|
|
259
|
+
application/json:
|
|
260
|
+
schema:
|
|
261
|
+
$ref: '#/components/schemas/GeoVisioUploadSets'
|
|
262
|
+
"""
|
|
263
|
+
try:
|
|
264
|
+
params = ListUploadSetParameter(account_id=UUID(account.id), **request.args)
|
|
265
|
+
except ValidationError as ve:
|
|
266
|
+
raise errors.InvalidAPIUsage(_("Impossible to parse parameters"), payload=validation_error(ve))
|
|
267
|
+
|
|
268
|
+
upload_sets = list_upload_sets(account_id=params.account_id, limit=params.limit, filter=params.filter)
|
|
269
|
+
|
|
270
|
+
return upload_sets.model_dump_json(exclude_none=True), 200, {"Content-Type": "application/json"}
|
|
271
|
+
|
|
272
|
+
|
|
273
|
+
# Note: class used to generate documentation
|
|
274
|
+
class AddFileToUploadSetParameter(BaseModel):
|
|
275
|
+
"""Parameters used to add an item to an UploadSet"""
|
|
276
|
+
|
|
277
|
+
override_capture_time: Optional[datetime] = None
|
|
278
|
+
"""Override the capture time of the picture. The new capture time will also be persisted in the picture's exif tags"""
|
|
279
|
+
override_longitude: Optional[float] = None
|
|
280
|
+
"""Override the longitude of the picture. The new longitude will also be persisted in the picture's exif tags"""
|
|
281
|
+
override_latitude: Optional[float] = None
|
|
282
|
+
"""Override the latitude of the picture. The new latitude will also be persisted in the picture's exif tags"""
|
|
283
|
+
|
|
284
|
+
extra_exif: Optional[Dict[str, str]] = None
|
|
285
|
+
"""Extra Exif metadata can be added to the picture. They need to be named `override_` and have the full exiv2 path of the tag.
|
|
286
|
+
For example, to override the `Exif.Image.Orientation` tag, you should use `override_Exif.Image.Orientation` as the key"""
|
|
287
|
+
|
|
288
|
+
"""External metadata to add to the picture"""
|
|
289
|
+
isBlurred: bool = False
|
|
290
|
+
"""True if the picture is already blurred, False otherwise"""
|
|
291
|
+
|
|
292
|
+
file: bytes
|
|
293
|
+
"""File to upload"""
|
|
294
|
+
|
|
295
|
+
model_config = ConfigDict(use_attribute_docstrings=True)
|
|
296
|
+
|
|
297
|
+
@field_validator("override_capture_time", mode="before")
|
|
298
|
+
@classmethod
|
|
299
|
+
def parse_capture_time(cls, value):
|
|
300
|
+
if value is None:
|
|
301
|
+
return None
|
|
302
|
+
return parse_datetime(
|
|
303
|
+
value,
|
|
304
|
+
error=_(
|
|
305
|
+
"Parameter `override_capture_time` is not a valid datetime, it should be an iso formated datetime (like '2017-07-21T17:32:28Z')."
|
|
306
|
+
),
|
|
307
|
+
)
|
|
308
|
+
|
|
309
|
+
@field_validator("override_longitude")
|
|
310
|
+
@classmethod
|
|
311
|
+
def parse_longitude(cls, value):
|
|
312
|
+
return as_longitude(value, error=_("For parameter `override_longitude`, `%(v)s` is not a valid longitude", v=value))
|
|
313
|
+
|
|
314
|
+
@field_validator("override_latitude")
|
|
315
|
+
@classmethod
|
|
316
|
+
def parse_latitude(cls, value):
|
|
317
|
+
return as_latitude(value, error=_("For parameter `override_latitude`, `%(v)s` is not a valid latitude", v=value))
|
|
318
|
+
|
|
319
|
+
@model_validator(mode="before")
|
|
320
|
+
@classmethod
|
|
321
|
+
def parse_extra_exif(cls, values: Dict) -> Dict:
|
|
322
|
+
# Check if others override elements were given
|
|
323
|
+
exif = {}
|
|
324
|
+
override_exif = [k for k in values.keys() if (k.startswith("override_Exif.") or k.startswith("override_Xmp."))]
|
|
325
|
+
for k in override_exif:
|
|
326
|
+
v = values.pop(k)
|
|
327
|
+
exif_tag = k.replace("override_", "")
|
|
328
|
+
exif[exif_tag] = v
|
|
329
|
+
|
|
330
|
+
values["extra_exif"] = exif
|
|
331
|
+
|
|
332
|
+
return values
|
|
333
|
+
|
|
334
|
+
@model_validator(mode="after")
|
|
335
|
+
def validate(self):
|
|
336
|
+
if self.override_latitude is None and self.override_longitude is not None:
|
|
337
|
+
raise errors.InvalidAPIUsage(_("Longitude cannot be overridden alone, override_latitude also needs to be set"))
|
|
338
|
+
if self.override_longitude is None and self.override_latitude is not None:
|
|
339
|
+
raise errors.InvalidAPIUsage(_("Latitude cannot be overridden alone, override_longitude also needs to be set"))
|
|
340
|
+
return self
|
|
341
|
+
|
|
342
|
+
|
|
343
|
+
# Note: class used to store parameters
|
|
344
|
+
@dataclass
|
|
345
|
+
class AddFileToUploadSetParsedParameter:
|
|
346
|
+
file: FileStorage
|
|
347
|
+
ext_mtd: Optional[PictureMetadata] = None
|
|
348
|
+
isBlurred: bool = False
|
|
349
|
+
|
|
350
|
+
file_type: FileType = Field(exclude=True)
|
|
351
|
+
|
|
352
|
+
|
|
353
|
+
class TrackedFileException(errors.InvalidAPIUsage):
|
|
354
|
+
def __init__(
|
|
355
|
+
self,
|
|
356
|
+
message: str,
|
|
357
|
+
rejection_status: FileRejectionStatus,
|
|
358
|
+
payload=None,
|
|
359
|
+
status_code: int = 400,
|
|
360
|
+
file: Optional[Dict[str, Any]] = None,
|
|
361
|
+
):
|
|
362
|
+
super().__init__(message=message, status_code=status_code, payload=payload)
|
|
363
|
+
self.rejection_status = rejection_status
|
|
364
|
+
self.file = file
|
|
365
|
+
|
|
366
|
+
|
|
367
|
+
def _read_add_items_params(form, files) -> AddFileToUploadSetParsedParameter:
|
|
368
|
+
|
|
369
|
+
if "file" not in files:
|
|
370
|
+
# Note: we do not want to track this as it is a bad use of the API
|
|
371
|
+
raise errors.InvalidAPIUsage(_("No file was sent"), status_code=400)
|
|
372
|
+
# Note: for the moment we only accept `picture` in files, but later we might accept more kind of files (like gpx traces, video, ...) and autodetect them here
|
|
373
|
+
file_type = FileType.picture
|
|
374
|
+
|
|
375
|
+
file = files["file"]
|
|
376
|
+
if not (file.filename and "." in file.filename and file.filename.rsplit(".", 1)[1].lower() in ["jpg", "jpeg"]):
|
|
377
|
+
raise TrackedFileException(
|
|
378
|
+
_("Picture file is either missing or in an unsupported format (should be jpg)"),
|
|
379
|
+
rejection_status=FileRejectionStatus.invalid_file,
|
|
380
|
+
file=dict(file_name=os.path.basename(file.filename), file_type=file_type),
|
|
381
|
+
)
|
|
382
|
+
|
|
383
|
+
try:
|
|
384
|
+
params = AddFileToUploadSetParameter(file=b"", **form)
|
|
385
|
+
except ValidationError as ve:
|
|
386
|
+
raise errors.InvalidAPIUsage(_("Impossible to parse parameters"), payload=validation_error(ve))
|
|
387
|
+
|
|
388
|
+
# Check if datetime was given
|
|
389
|
+
if (
|
|
390
|
+
params.override_capture_time is not None
|
|
391
|
+
or params.override_latitude is not None
|
|
392
|
+
or params.override_longitude is not None
|
|
393
|
+
or params.extra_exif
|
|
394
|
+
):
|
|
395
|
+
ext_mtd = PictureMetadata(
|
|
396
|
+
capture_time=params.override_capture_time,
|
|
397
|
+
latitude=params.override_latitude,
|
|
398
|
+
longitude=params.override_longitude,
|
|
399
|
+
additional_exif=params.extra_exif,
|
|
400
|
+
)
|
|
401
|
+
else:
|
|
402
|
+
ext_mtd = None
|
|
403
|
+
|
|
404
|
+
return AddFileToUploadSetParsedParameter(ext_mtd=ext_mtd, isBlurred=params.isBlurred, file=file, file_type=file_type)
|
|
405
|
+
|
|
406
|
+
|
|
407
|
+
def un_complete_upload_set(cursor, upload_set_id: UUID):
|
|
408
|
+
"""Marks the upload set as uncompleted"""
|
|
409
|
+
cursor.execute(
|
|
410
|
+
"UPDATE upload_sets SET completed = FALSE WHERE id = %(id)s",
|
|
411
|
+
{"id": upload_set_id},
|
|
412
|
+
)
|
|
413
|
+
|
|
414
|
+
|
|
415
|
+
def mark_upload_set_completed_if_needed(cursor, upload_set_id: UUID) -> bool:
|
|
416
|
+
"""
|
|
417
|
+
Marks the upload set as completed if the number of pictures in the upload set
|
|
418
|
+
is greater than or equal to the estimated number of files.
|
|
419
|
+
|
|
420
|
+
Args:
|
|
421
|
+
cursor: The database cursor object.
|
|
422
|
+
upload_set_id: The ID of the upload set.
|
|
423
|
+
|
|
424
|
+
Returns:
|
|
425
|
+
bool: True if the upload set is marked as completed, False otherwise.
|
|
426
|
+
"""
|
|
427
|
+
r = cursor.execute(
|
|
428
|
+
"""WITH nb_items AS (
|
|
429
|
+
SELECT count(*) AS nb, upload_set_id
|
|
430
|
+
FROM files f
|
|
431
|
+
WHERE upload_set_id = %(id)s
|
|
432
|
+
GROUP BY upload_set_id
|
|
433
|
+
)
|
|
434
|
+
UPDATE upload_sets
|
|
435
|
+
SET completed = (nb_items.nb = estimated_nb_files)
|
|
436
|
+
FROM nb_items
|
|
437
|
+
WHERE id = %(id)s AND estimated_nb_files IS NOT NULL
|
|
438
|
+
RETURNING completed;""",
|
|
439
|
+
{"id": upload_set_id},
|
|
440
|
+
).fetchone()
|
|
441
|
+
|
|
442
|
+
return r is not None and r["completed"]
|
|
443
|
+
|
|
444
|
+
|
|
445
|
+
def handle_completion(cursor, upload_set):
|
|
446
|
+
"""
|
|
447
|
+
At the end of an upload, we need to check if the upload needs to be completed or not
|
|
448
|
+
* If is not yet completed, we check if we received the expected number of files
|
|
449
|
+
* If is already completed, we mark it as uncompleted as we don't know if the client will send more pictures
|
|
450
|
+
"""
|
|
451
|
+
if not upload_set["completed"]:
|
|
452
|
+
mark_upload_set_completed_if_needed(cursor, upload_set["id"])
|
|
453
|
+
else:
|
|
454
|
+
# if the upload set is already completed and some pictures were added, we need to mark it as uncompleted as we don't know if the client will send more pictures
|
|
455
|
+
un_complete_upload_set(cursor, upload_set["id"])
|
|
456
|
+
|
|
457
|
+
|
|
458
|
+
@bp.route("/upload_sets/<uuid:upload_set_id>/files", methods=["POST"])
|
|
459
|
+
@auth.login_required_by_setting("API_FORCE_AUTH_ON_UPLOAD")
|
|
460
|
+
def addFilesToUploadSet(upload_set_id: UUID, account=None):
|
|
461
|
+
"""Add files to an UploadSet
|
|
462
|
+
|
|
463
|
+
---
|
|
464
|
+
tags:
|
|
465
|
+
- Upload
|
|
466
|
+
- UploadSet
|
|
467
|
+
parameters:
|
|
468
|
+
- name: upload_set_id
|
|
469
|
+
in: path
|
|
470
|
+
description: ID of the UploadSet
|
|
471
|
+
required: true
|
|
472
|
+
schema:
|
|
473
|
+
type: string
|
|
474
|
+
requestBody:
|
|
475
|
+
content:
|
|
476
|
+
multipart/form-data:
|
|
477
|
+
schema:
|
|
478
|
+
$ref: '#/components/schemas/GeoVisioAddToUploadSet'
|
|
479
|
+
security:
|
|
480
|
+
- bearerToken: []
|
|
481
|
+
- cookieAuth: []
|
|
482
|
+
responses:
|
|
483
|
+
202:
|
|
484
|
+
description: The UploadSet metadata
|
|
485
|
+
content:
|
|
486
|
+
application/json:
|
|
487
|
+
schema:
|
|
488
|
+
$ref: '#/components/schemas/GeoVisioUploadSetFile'
|
|
489
|
+
400:
|
|
490
|
+
description: Error if the request is malformed
|
|
491
|
+
content:
|
|
492
|
+
application/json:
|
|
493
|
+
schema:
|
|
494
|
+
$ref: '#/components/schemas/GeoVisioError'
|
|
495
|
+
401:
|
|
496
|
+
description: Error if you're not logged in
|
|
497
|
+
content:
|
|
498
|
+
application/json:
|
|
499
|
+
schema:
|
|
500
|
+
$ref: '#/components/schemas/GeoVisioError'
|
|
501
|
+
403:
|
|
502
|
+
description: Error if you're not authorized to add picture to this upload set
|
|
503
|
+
content:
|
|
504
|
+
application/json:
|
|
505
|
+
schema:
|
|
506
|
+
$ref: '#/components/schemas/GeoVisioError'
|
|
507
|
+
404:
|
|
508
|
+
description: Error if the UploadSet doesn't exist
|
|
509
|
+
content:
|
|
510
|
+
application/json:
|
|
511
|
+
schema:
|
|
512
|
+
$ref: '#/components/schemas/GeoVisioError'
|
|
513
|
+
409:
|
|
514
|
+
description: Error if the item has already been added to this upload set or to another upload set
|
|
515
|
+
content:
|
|
516
|
+
application/json:
|
|
517
|
+
schema:
|
|
518
|
+
$ref: '#/components/schemas/GeoVisioError'
|
|
519
|
+
415:
|
|
520
|
+
description: Error if the content type is not multipart/form-data
|
|
521
|
+
content:
|
|
522
|
+
application/json:
|
|
523
|
+
schema:
|
|
524
|
+
$ref: '#/components/schemas/GeoVisioError'
|
|
525
|
+
"""
|
|
526
|
+
|
|
527
|
+
if not request.headers.get("Content-Type", "").startswith("multipart/form-data") or request.form is None:
|
|
528
|
+
raise errors.InvalidAPIUsage(_("Content type should be multipart/form-data"), status_code=415)
|
|
529
|
+
|
|
530
|
+
with db.conn(current_app) as conn:
|
|
531
|
+
try:
|
|
532
|
+
with conn.transaction(), conn.cursor(row_factory=dict_row) as cursor:
|
|
533
|
+
upload_set = cursor.execute("SELECT id, account_id, completed FROM upload_sets WHERE id = %s", [upload_set_id]).fetchone()
|
|
534
|
+
if not upload_set:
|
|
535
|
+
raise errors.InvalidAPIUsage(_("UploadSet %(u)s does not exist", u=upload_set_id), status_code=404)
|
|
536
|
+
|
|
537
|
+
# Account associated to uploadset doesn't match current user
|
|
538
|
+
if account is not None and account.id != str(upload_set["account_id"]):
|
|
539
|
+
raise errors.InvalidAPIUsage(_("You're not authorized to add picture to this upload set"), status_code=403)
|
|
540
|
+
|
|
541
|
+
# parse params
|
|
542
|
+
params = _read_add_items_params(request.form, request.files)
|
|
543
|
+
|
|
544
|
+
file: Dict[str, Any] = dict(
|
|
545
|
+
file_name=os.path.basename(params.file.filename or ""),
|
|
546
|
+
file_type=params.file_type,
|
|
547
|
+
)
|
|
548
|
+
# Compute various metadata
|
|
549
|
+
accountId = accountIdOrDefault(account)
|
|
550
|
+
raw_pic = params.file.read()
|
|
551
|
+
filesize = len(raw_pic)
|
|
552
|
+
file["size"] = filesize
|
|
553
|
+
|
|
554
|
+
with sentry_sdk.start_span(description="computing md5"):
|
|
555
|
+
# we save the content hash md5 as uuid since md5 is 128bit and uuid are efficiently handled in postgres
|
|
556
|
+
md5 = hashlib.md5(raw_pic).digest()
|
|
557
|
+
md5 = UUID(bytes=md5)
|
|
558
|
+
file["content_md5"] = md5
|
|
559
|
+
|
|
560
|
+
additionalMetadata = {
|
|
561
|
+
"blurredByAuthor": params.isBlurred,
|
|
562
|
+
"originalFileName": os.path.basename(params.file.filename), # type: ignore
|
|
563
|
+
"originalFileSize": filesize,
|
|
564
|
+
"originalContentMd5": md5,
|
|
565
|
+
}
|
|
566
|
+
|
|
567
|
+
# check if items already exists
|
|
568
|
+
same_pics = cursor.execute(
|
|
569
|
+
"SELECT id AS existing_item_id, upload_set_id FROM pictures WHERE original_content_md5 = %s", [md5]
|
|
570
|
+
).fetchall()
|
|
571
|
+
if same_pics:
|
|
572
|
+
same_pics_in_same_upload_set = next(
|
|
573
|
+
(p["existing_item_id"] for p in same_pics if p["upload_set_id"] == upload_set_id), None
|
|
574
|
+
)
|
|
575
|
+
if same_pics_in_same_upload_set:
|
|
576
|
+
# same picture sent twice in the same upload set is likely a client error, we don't keep track of it
|
|
577
|
+
# it's especially important since for the moment we can't track 2 files with the same name in the same uploadset
|
|
578
|
+
raise errors.InvalidAPIUsage(
|
|
579
|
+
_("The item has already been added to this upload set"),
|
|
580
|
+
status_code=409,
|
|
581
|
+
payload={"existing_item": {"id": same_pics_in_same_upload_set}},
|
|
582
|
+
)
|
|
583
|
+
if current_app.config["API_ACCEPT_DUPLICATE"] is False:
|
|
584
|
+
# If the picture has been sent in another upload set, we reject it and track it as file sent (to advance the counter to the completion)
|
|
585
|
+
raise TrackedFileException(
|
|
586
|
+
_("The same picture has already been sent in a past upload"),
|
|
587
|
+
payload={"upload_sets": same_pics},
|
|
588
|
+
rejection_status=FileRejectionStatus.file_duplicate,
|
|
589
|
+
status_code=409,
|
|
590
|
+
file=file,
|
|
591
|
+
)
|
|
592
|
+
|
|
593
|
+
# Update picture metadata if needed
|
|
594
|
+
if params.ext_mtd:
|
|
595
|
+
with sentry_sdk.start_span(description="overwriting metadata"):
|
|
596
|
+
raw_pic = writePictureMetadata(raw_pic, params.ext_mtd)
|
|
597
|
+
|
|
598
|
+
# Insert picture into database
|
|
599
|
+
with sentry_sdk.start_span(description="Insert picture in db"):
|
|
600
|
+
|
|
601
|
+
try:
|
|
602
|
+
picId = utils.pictures.insertNewPictureInDatabase(
|
|
603
|
+
db=conn,
|
|
604
|
+
sequenceId=None,
|
|
605
|
+
position=None,
|
|
606
|
+
pictureBytes=raw_pic,
|
|
607
|
+
associatedAccountID=accountId,
|
|
608
|
+
additionalMetadata=additionalMetadata,
|
|
609
|
+
uploadSetID=upload_set_id,
|
|
610
|
+
lang=get_locale().language,
|
|
611
|
+
)
|
|
612
|
+
except utils.pictures.MetadataReadingError as e:
|
|
613
|
+
raise TrackedFileException(
|
|
614
|
+
_("Impossible to parse picture metadata"),
|
|
615
|
+
payload={"details": {"error": e.details, "missing_fields": e.missing_mandatory_tags}},
|
|
616
|
+
rejection_status=FileRejectionStatus.invalid_metadata,
|
|
617
|
+
file=file,
|
|
618
|
+
)
|
|
619
|
+
except utils.pictures.InvalidMetadataValue as e:
|
|
620
|
+
raise TrackedFileException(
|
|
621
|
+
_("Picture has invalid metadata"),
|
|
622
|
+
payload={"details": {"error": e.details}},
|
|
623
|
+
rejection_status=FileRejectionStatus.invalid_metadata,
|
|
624
|
+
file=file,
|
|
625
|
+
)
|
|
626
|
+
except PIL.UnidentifiedImageError as e:
|
|
627
|
+
logging.warning("Impossible to open file as an image: " + str(e))
|
|
628
|
+
raise TrackedFileException(
|
|
629
|
+
_("Impossible to open file as image. The only supported image format is jpg."),
|
|
630
|
+
rejection_status=FileRejectionStatus.invalid_file,
|
|
631
|
+
file=file,
|
|
632
|
+
)
|
|
633
|
+
|
|
634
|
+
# persist the file in the database
|
|
635
|
+
file = utils.upload_set.insertFileInDatabase(
|
|
636
|
+
cursor=cursor,
|
|
637
|
+
upload_set_id=upload_set_id,
|
|
638
|
+
picture_id=picId,
|
|
639
|
+
**file,
|
|
640
|
+
)
|
|
641
|
+
# Save file into appropriate filesystem
|
|
642
|
+
with sentry_sdk.start_span(description="Saving picture"):
|
|
643
|
+
try:
|
|
644
|
+
utils.pictures.saveRawPicture(picId, raw_pic, params.isBlurred)
|
|
645
|
+
except:
|
|
646
|
+
logging.exception("Picture wasn't correctly saved in filesystem")
|
|
647
|
+
raise errors.InvalidAPIUsage(_("Picture wasn't correctly saved in filesystem"), status_code=500)
|
|
648
|
+
|
|
649
|
+
handle_completion(cursor, upload_set)
|
|
650
|
+
except TrackedFileException as e:
|
|
651
|
+
# something went wrong, we reject the file, but keep track of it
|
|
652
|
+
with conn.transaction(), conn.cursor(row_factory=dict_row) as cursor:
|
|
653
|
+
msg = e.message
|
|
654
|
+
d = None
|
|
655
|
+
if e.payload and e.payload.get("details", {}).get("error") is not None:
|
|
656
|
+
d = deepcopy(e.payload["details"])
|
|
657
|
+
msg = d.pop("error")
|
|
658
|
+
|
|
659
|
+
utils.upload_set.insertFileInDatabase(
|
|
660
|
+
cursor=cursor,
|
|
661
|
+
upload_set_id=upload_set_id,
|
|
662
|
+
**e.file,
|
|
663
|
+
rejection_status=e.rejection_status,
|
|
664
|
+
rejection_message=msg,
|
|
665
|
+
rejection_details=d,
|
|
666
|
+
)
|
|
667
|
+
handle_completion(cursor, upload_set)
|
|
668
|
+
raise e
|
|
669
|
+
|
|
670
|
+
# prepare the picture in the background
|
|
671
|
+
current_app.background_processor.process_pictures() # type: ignore
|
|
672
|
+
|
|
673
|
+
# Return picture metadata
|
|
674
|
+
return (
|
|
675
|
+
file.model_dump_json(exclude_none=True),
|
|
676
|
+
202,
|
|
677
|
+
{
|
|
678
|
+
"Content-Type": "application/json",
|
|
679
|
+
},
|
|
680
|
+
)
|
|
681
|
+
|
|
682
|
+
|
|
683
|
+
@bp.route("/upload_sets/<uuid:upload_set_id>/complete", methods=["POST"])
|
|
684
|
+
@auth.login_required_by_setting("API_FORCE_AUTH_ON_UPLOAD")
|
|
685
|
+
def completeUploadSet(upload_set_id: UUID, account=None):
|
|
686
|
+
"""Complete an UploadSet
|
|
687
|
+
|
|
688
|
+
---
|
|
689
|
+
tags:
|
|
690
|
+
- Upload
|
|
691
|
+
- UploadSet
|
|
692
|
+
parameters:
|
|
693
|
+
- name: upload_set_id
|
|
694
|
+
in: path
|
|
695
|
+
description: ID of the UploadSet
|
|
696
|
+
required: true
|
|
697
|
+
schema:
|
|
698
|
+
type: string
|
|
699
|
+
security:
|
|
700
|
+
- bearerToken: []
|
|
701
|
+
- cookieAuth: []
|
|
702
|
+
responses:
|
|
703
|
+
200:
|
|
704
|
+
description: the UploadSet metadata
|
|
705
|
+
content:
|
|
706
|
+
application/json:
|
|
707
|
+
schema:
|
|
708
|
+
$ref: '#/components/schemas/GeoVisioUploadSet'
|
|
709
|
+
"""
|
|
710
|
+
|
|
711
|
+
with db.conn(current_app) as conn:
|
|
712
|
+
with conn.cursor(row_factory=dict_row) as cursor:
|
|
713
|
+
upload_set = cursor.execute("SELECT account_id, completed FROM upload_sets WHERE id = %s", [upload_set_id]).fetchone()
|
|
714
|
+
if not upload_set:
|
|
715
|
+
raise errors.InvalidAPIUsage(_("UploadSet %(u)s does not exist", u=upload_set_id), status_code=404)
|
|
716
|
+
|
|
717
|
+
# Account associated to uploadset doesn't match current user
|
|
718
|
+
if account is not None and account.id != str(upload_set["account_id"]):
|
|
719
|
+
raise errors.InvalidAPIUsage(_("You're not authorized to complete this upload set"), status_code=403)
|
|
720
|
+
|
|
721
|
+
cursor.execute("UPDATE upload_sets SET completed = True WHERE id = %(id)s", {"id": upload_set_id})
|
|
722
|
+
|
|
723
|
+
# dispatch the upload_set in the background
|
|
724
|
+
current_app.background_processor.process_pictures() # type: ignore
|
|
725
|
+
|
|
726
|
+
# query again the upload set, to get the updated status
|
|
727
|
+
upload_set = get_upload_set(upload_set_id)
|
|
728
|
+
if upload_set is None:
|
|
729
|
+
raise errors.InvalidAPIUsage(_("UploadSet doesn't exist"), status_code=404)
|
|
730
|
+
|
|
731
|
+
return upload_set.model_dump_json(exclude_none=True), 200, {"Content-Type": "application/json"}
|
|
732
|
+
|
|
733
|
+
|
|
734
|
+
@bp.route("/upload_sets/<uuid:upload_set_id>", methods=["DELETE"])
|
|
735
|
+
@auth.login_required_by_setting("API_FORCE_AUTH_ON_UPLOAD")
|
|
736
|
+
def deleteUploadSet(upload_set_id: UUID, account=None):
|
|
737
|
+
"""Delete an UploadSet
|
|
738
|
+
|
|
739
|
+
Deleting an UploadSet will delete all the pictures of the UploadSet, and all the associated collections will be marked as deleted.
|
|
740
|
+
|
|
741
|
+
---
|
|
742
|
+
tags:
|
|
743
|
+
- Upload
|
|
744
|
+
- UploadSet
|
|
745
|
+
parameters:
|
|
746
|
+
- name: upload_set_id
|
|
747
|
+
in: path
|
|
748
|
+
description: ID of the UploadSet
|
|
749
|
+
required: true
|
|
750
|
+
schema:
|
|
751
|
+
type: string
|
|
752
|
+
security:
|
|
753
|
+
- bearerToken: []
|
|
754
|
+
- cookieAuth: []
|
|
755
|
+
responses:
|
|
756
|
+
204:
|
|
757
|
+
description: The UploadSet has been correctly deleted
|
|
758
|
+
"""
|
|
759
|
+
|
|
760
|
+
upload_set = get_upload_set(upload_set_id)
|
|
761
|
+
|
|
762
|
+
# Account associated to uploadset doesn't match current user
|
|
763
|
+
if account is not None and account.id != str(upload_set.account_id):
|
|
764
|
+
raise errors.InvalidAPIUsage(_("You're not authorized to delete this upload set"), status_code=403)
|
|
765
|
+
|
|
766
|
+
utils.upload_set.delete(upload_set)
|
|
767
|
+
|
|
768
|
+
# run background task to delete the associated pictures
|
|
769
|
+
current_app.background_processor.process_pictures() # type: ignore
|
|
770
|
+
|
|
771
|
+
return "", 204
|