geovisio 2.9.0__py3-none-any.whl → 2.11.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. geovisio/__init__.py +8 -1
  2. geovisio/admin_cli/user.py +7 -2
  3. geovisio/config_app.py +26 -12
  4. geovisio/translations/ar/LC_MESSAGES/messages.mo +0 -0
  5. geovisio/translations/ar/LC_MESSAGES/messages.po +818 -0
  6. geovisio/translations/be/LC_MESSAGES/messages.mo +0 -0
  7. geovisio/translations/be/LC_MESSAGES/messages.po +886 -0
  8. geovisio/translations/br/LC_MESSAGES/messages.po +1 -1
  9. geovisio/translations/da/LC_MESSAGES/messages.mo +0 -0
  10. geovisio/translations/da/LC_MESSAGES/messages.po +96 -4
  11. geovisio/translations/de/LC_MESSAGES/messages.mo +0 -0
  12. geovisio/translations/de/LC_MESSAGES/messages.po +214 -122
  13. geovisio/translations/el/LC_MESSAGES/messages.po +1 -1
  14. geovisio/translations/en/LC_MESSAGES/messages.mo +0 -0
  15. geovisio/translations/en/LC_MESSAGES/messages.po +234 -157
  16. geovisio/translations/eo/LC_MESSAGES/messages.mo +0 -0
  17. geovisio/translations/eo/LC_MESSAGES/messages.po +55 -5
  18. geovisio/translations/es/LC_MESSAGES/messages.po +1 -1
  19. geovisio/translations/fi/LC_MESSAGES/messages.po +1 -1
  20. geovisio/translations/fr/LC_MESSAGES/messages.mo +0 -0
  21. geovisio/translations/fr/LC_MESSAGES/messages.po +92 -3
  22. geovisio/translations/hu/LC_MESSAGES/messages.po +1 -1
  23. geovisio/translations/it/LC_MESSAGES/messages.mo +0 -0
  24. geovisio/translations/it/LC_MESSAGES/messages.po +63 -3
  25. geovisio/translations/ja/LC_MESSAGES/messages.po +1 -1
  26. geovisio/translations/ko/LC_MESSAGES/messages.po +1 -1
  27. geovisio/translations/messages.pot +216 -139
  28. geovisio/translations/nl/LC_MESSAGES/messages.mo +0 -0
  29. geovisio/translations/nl/LC_MESSAGES/messages.po +333 -62
  30. geovisio/translations/oc/LC_MESSAGES/messages.mo +0 -0
  31. geovisio/translations/oc/LC_MESSAGES/messages.po +821 -0
  32. geovisio/translations/pl/LC_MESSAGES/messages.po +1 -1
  33. geovisio/translations/pt/LC_MESSAGES/messages.mo +0 -0
  34. geovisio/translations/pt/LC_MESSAGES/messages.po +944 -0
  35. geovisio/translations/pt_BR/LC_MESSAGES/messages.mo +0 -0
  36. geovisio/translations/pt_BR/LC_MESSAGES/messages.po +942 -0
  37. geovisio/translations/sv/LC_MESSAGES/messages.mo +0 -0
  38. geovisio/translations/sv/LC_MESSAGES/messages.po +4 -3
  39. geovisio/translations/ti/LC_MESSAGES/messages.mo +0 -0
  40. geovisio/translations/ti/LC_MESSAGES/messages.po +762 -0
  41. geovisio/translations/tr/LC_MESSAGES/messages.mo +0 -0
  42. geovisio/translations/tr/LC_MESSAGES/messages.po +927 -0
  43. geovisio/translations/uk/LC_MESSAGES/messages.mo +0 -0
  44. geovisio/translations/uk/LC_MESSAGES/messages.po +920 -0
  45. geovisio/translations/zh_Hant/LC_MESSAGES/messages.po +1 -1
  46. geovisio/utils/annotations.py +21 -21
  47. geovisio/utils/auth.py +47 -13
  48. geovisio/utils/cql2.py +22 -5
  49. geovisio/utils/fields.py +14 -2
  50. geovisio/utils/items.py +44 -0
  51. geovisio/utils/model_query.py +2 -2
  52. geovisio/utils/pic_shape.py +1 -1
  53. geovisio/utils/pictures.py +127 -36
  54. geovisio/utils/semantics.py +32 -3
  55. geovisio/utils/sentry.py +1 -1
  56. geovisio/utils/sequences.py +155 -109
  57. geovisio/utils/upload_set.py +303 -206
  58. geovisio/utils/users.py +18 -0
  59. geovisio/utils/website.py +1 -1
  60. geovisio/web/annotations.py +303 -69
  61. geovisio/web/auth.py +1 -1
  62. geovisio/web/collections.py +194 -97
  63. geovisio/web/configuration.py +36 -4
  64. geovisio/web/docs.py +109 -13
  65. geovisio/web/items.py +319 -186
  66. geovisio/web/map.py +92 -54
  67. geovisio/web/pages.py +48 -4
  68. geovisio/web/params.py +100 -42
  69. geovisio/web/pictures.py +37 -3
  70. geovisio/web/prepare.py +4 -2
  71. geovisio/web/queryables.py +57 -0
  72. geovisio/web/stac.py +8 -2
  73. geovisio/web/tokens.py +49 -1
  74. geovisio/web/upload_set.py +226 -51
  75. geovisio/web/users.py +89 -8
  76. geovisio/web/utils.py +26 -8
  77. geovisio/workers/runner_pictures.py +128 -23
  78. {geovisio-2.9.0.dist-info → geovisio-2.11.0.dist-info}/METADATA +15 -14
  79. geovisio-2.11.0.dist-info/RECORD +117 -0
  80. geovisio-2.9.0.dist-info/RECORD +0 -98
  81. {geovisio-2.9.0.dist-info → geovisio-2.11.0.dist-info}/WHEEL +0 -0
  82. {geovisio-2.9.0.dist-info → geovisio-2.11.0.dist-info}/licenses/LICENSE +0 -0
@@ -308,7 +308,7 @@ msgstr ""
308
308
 
309
309
  #: geovisio/web/items.py:420
310
310
  #, python-format
311
- msgid "Picture with id %(p)s does not exists"
311
+ msgid "Picture with id %(p)s does not exist"
312
312
  msgstr ""
313
313
 
314
314
  #: geovisio/web/items.py:706
@@ -62,31 +62,28 @@ Note that the API will always output geometry as geojson geometry (thus will tra
62
62
  return shape_as_geometry(self.shape)
63
63
 
64
64
 
65
- def creation_annotation(params: AnnotationCreationParameter) -> Annotation:
66
- """Create an annotation in the database"""
65
+ def creation_annotation(params: AnnotationCreationParameter, conn: psycopg.Connection) -> Annotation:
66
+ """Create an annotation in the database.
67
+ Note, this should be called from an autocommit connection"""
67
68
 
68
69
  model = model_query.get_db_params_and_values(
69
70
  AnnotationCreationRow(picture_id=params.picture_id, shape=params.shape_as_geometry()), jsonb_fields={"shape"}
70
71
  )
71
- insert_query = SQL(
72
- """WITH existing_annotations AS (
73
- SELECT * FROM annotations WHERE picture_id = %(picture_id)s AND shape = %(shape)s
74
- )
75
- , new_ones AS (
76
- INSERT INTO annotations (picture_id, shape)
77
- SELECT %(picture_id)s, %(shape)s
78
- WHERE NOT EXISTS (SELECT FROM existing_annotations)
79
- RETURNING *
80
- )
81
- SELECT * FROM existing_annotations UNION ALL SELECT * FROM new_ones
82
- ;"""
83
- )
84
72
 
85
- with db.conn(current_app) as conn, conn.transaction(), conn.cursor(row_factory=class_row(Annotation)) as cursor:
73
+ with conn.transaction(), conn.cursor(row_factory=class_row(Annotation)) as cursor:
86
74
  # we check that the shape is valid
87
75
  check_shape(conn, params)
88
76
 
89
- annotation = cursor.execute(insert_query, model.params_as_dict).fetchone()
77
+ annotation = cursor.execute(
78
+ "SELECT * FROM annotations WHERE picture_id = %(picture_id)s AND shape = %(shape)s", model.params_as_dict
79
+ ).fetchone()
80
+ if annotation is None:
81
+ annotation = cursor.execute(
82
+ """INSERT INTO annotations (picture_id, shape)
83
+ VALUES (%(picture_id)s, %(shape)s)
84
+ RETURNING *""",
85
+ model.params_as_dict,
86
+ ).fetchone()
90
87
 
91
88
  if annotation is None:
92
89
  raise Exception("Impossible to insert annotation in database")
@@ -180,7 +177,10 @@ def update_annotation(annotation: Annotation, tag_updates: List[SemanticTagUpdat
180
177
  return a
181
178
 
182
179
 
183
- def delete_annotation(conn: psycopg.Connection, annotation_id: UUID) -> None:
184
- """Delete an annotation from the database"""
185
- with conn.cursor() as cursor:
186
- cursor.execute("DELETE FROM annotations WHERE id = %(id)s", {"id": annotation_id})
180
+ def delete_annotation(conn: psycopg.Connection, annotation: Annotation, account_id: UUID) -> None:
181
+ """Delete an annotation from the database
182
+ Note: to track the history, we delete each tags separately, and the annotation should be deleted after its last tag is deleted"""
183
+ with conn.cursor(row_factory=dict_row) as cursor:
184
+ actions = [SemanticTagUpdate(action=semantics.TagAction.delete, key=t.key, value=t.value) for t in annotation.semantics]
185
+ entity = semantics.Entity(id=annotation.id, type=semantics.EntityType.annotation)
186
+ semantics.update_tags(cursor, entity, actions, account=account_id, annotation=annotation)
geovisio/utils/auth.py CHANGED
@@ -31,7 +31,7 @@ class OAuthUserAccount(object):
31
31
 
32
32
 
33
33
  class OAuthProvider(ABC):
34
- """Base class for oauth provider. Need so specify how to get user's info"""
34
+ """Base class for oauth provider. Need to specify how to get user's info"""
35
35
 
36
36
  name: str
37
37
  client: Any
@@ -52,7 +52,7 @@ class OAuthProvider(ABC):
52
52
  """
53
53
  URL to a user settings page.
54
54
  This URL should point to a web page where user can edit its password or email address,
55
- if that makes sense regardinz your GeoVisio instance.
55
+ if that makes sense regarding your GeoVisio instance.
56
56
 
57
57
  This is useful if your instance has its own specific identity provider. It may not be used if you rely on third-party auth provider.
58
58
  """
@@ -200,6 +200,25 @@ class Account(BaseModel):
200
200
  """Is account legitimate to edit web pages ?"""
201
201
  return self.role == AccountRole.admin
202
202
 
203
+ def can_edit_item(self, item_account_id: str):
204
+ """Is account legitimate to edit an item owned by `item_account_id` ?
205
+ Admin can edit everything, then the item owner can edit only its own item"""
206
+ return self.role == AccountRole.admin or self.id == item_account_id
207
+
208
+ def can_edit_collection(self, col_account_id: str):
209
+ """Is account legitimate to edit a collection owned by `col_account_id` ?
210
+ Admin can edit everything, then the collection owner can edit only its own collection"""
211
+ return self.role == AccountRole.admin or self.id == col_account_id
212
+
213
+ def can_edit_upload_set(self, us_account_id: str):
214
+ """Is account legitimate to edit an upload set owned by `us_account_id` ?
215
+ Admin can edit everything, then the us owner can edit only its own us"""
216
+ return self.role == AccountRole.admin or self.id == us_account_id
217
+
218
+ def can_see_all(self):
219
+ """Can the account see all pictures/sequences/upload_sets ?"""
220
+ return self.role == AccountRole.admin
221
+
203
222
  @property
204
223
  def role(self) -> AccountRole:
205
224
  if self.role_ is None:
@@ -235,7 +254,7 @@ class Account(BaseModel):
235
254
 
236
255
 
237
256
  def account_allow_collaborative_editing(account_id: str | UUID):
238
- """An account allow collaborative editing it if has been allow at the account level else we check the instance configuration"""
257
+ """An account allows collaborative editing it if has been allowed at the account level else we check the instance configuration"""
239
258
  r = db.fetchone(
240
259
  current_app,
241
260
  """SELECT COALESCE(accounts.collaborative_metadata, configurations.collaborative_metadata, true) AS collaborative_metadata
@@ -249,15 +268,16 @@ WHERE accounts.id = %s""",
249
268
 
250
269
 
251
270
  def login_required():
252
- """Check that the user is logged, and abort if it's not the case"""
271
+ """Check that the user is logged in, and abort if it's not the case"""
253
272
 
254
273
  def actual_decorator(f):
255
274
  @wraps(f)
256
275
  def decorator(*args, **kwargs):
257
- account = get_current_account()
258
- if not account:
259
- return flask.abort(flask.make_response(flask.jsonify(message=_("Authentication is mandatory")), 401))
260
- kwargs["account"] = account
276
+ if "account" not in kwargs:
277
+ account = get_current_account()
278
+ if not account:
279
+ return flask.abort(flask.make_response(flask.jsonify(message=_("Authentication is mandatory")), 401))
280
+ kwargs["account"] = account
261
281
 
262
282
  return f(*args, **kwargs)
263
283
 
@@ -267,7 +287,7 @@ def login_required():
267
287
 
268
288
 
269
289
  def login_required_by_setting(mandatory_login_param):
270
- """Check that the user is logged, and abort if it's not the case
290
+ """Check that the user is logged in, and abort if it's not the case
271
291
 
272
292
  Args:
273
293
  mandatory_login_param (str): name of the configuration parameter used to decide if the login is mandatory or not
@@ -303,7 +323,7 @@ def login_required_by_setting(mandatory_login_param):
303
323
 
304
324
 
305
325
  def login_required_with_redirect():
306
- """Check that the user is logged, and redirect if it's not the case"""
326
+ """Check that the user is logged in, and redirect if it's not the case"""
307
327
 
308
328
  def actual_decorator(f):
309
329
  @wraps(f)
@@ -346,7 +366,7 @@ class UnknowAccountException(Exception):
346
366
  status_code = 401
347
367
 
348
368
  def __init__(self):
349
- msg = "No account with this oauth id is know, you should login first"
369
+ msg = "No account with this oauth id is known, you should login first"
350
370
  super().__init__(msg)
351
371
 
352
372
 
@@ -358,12 +378,12 @@ class LoginRequiredException(Exception):
358
378
  super().__init__(msg)
359
379
 
360
380
 
361
- def get_current_account():
381
+ def get_current_account() -> Optional[Account]:
362
382
  """Get the authenticated account information.
363
383
 
364
384
  This account is either stored in the flask's session or retrieved with the Bearer token passed with an `Authorization` header.
365
385
 
366
- The flask session is usually used by browser, whereas the bearer token is handly for non interactive uses, like curls or CLI usage.
386
+ The flask session is usually used by browser, whereas the bearer token is handy for non interactive uses, like curls or CLI usage.
367
387
 
368
388
  Returns:
369
389
  Account: the current logged account, None if nobody is logged
@@ -386,6 +406,20 @@ def get_current_account():
386
406
  return None
387
407
 
388
408
 
409
+ def get_current_account_id() -> Optional[UUID]:
410
+ """Get the authenticated account ID.
411
+
412
+ This account is either stored in the flask's session or retrieved with the Bearer token passed with an `Authorization` header.
413
+
414
+ The flask session is usually used by browser, whereas the bearer token is handy for non interactive uses, like curls or CLI usage.
415
+
416
+ Returns:
417
+ The current logged account ID, None if nobody is logged
418
+ """
419
+ account_to_query = get_current_account()
420
+ return account_to_query.id if account_to_query is not None else None
421
+
422
+
389
423
  def _get_bearer_token() -> Optional[str]:
390
424
  """
391
425
  Get the associated bearer token from the `Authorization` header
geovisio/utils/cql2.py CHANGED
@@ -43,6 +43,11 @@ def parse_semantic_filter(value: Optional[str]) -> Optional[sql.SQL]:
43
43
  SQL("((key = 'pouet') AND (value = 'stop'))")
44
44
  >>> parse_semantic_filter("\\"semantics.osm|traffic_sign\\"='stop'")
45
45
  SQL("((key = 'osm|traffic_sign') AND (value = 'stop'))")
46
+ >>> parse_semantic_filter("\\"semantics\\" IS NOT NULL")
47
+ SQL('True')
48
+ >>> parse_semantic_filter("\\"semantics\\" IS NULL") # doctest: +IGNORE_EXCEPTION_DETAIL
49
+ Traceback (most recent call last):
50
+ geovisio.errors.InvalidAPIUsage: Unsupported filter parameter: only `semantics IS NOT NULL` is supported (to express that we want all items with at least one semantic tags)
46
51
  """
47
52
  return parse_cql2_filter(value, SEMANTIC_FIELD_MAPPOING, ast_updater=lambda a: SemanticAttributesAstUpdater().evaluate(a))
48
53
 
@@ -52,6 +57,8 @@ def parse_search_filter(value: Optional[str]) -> Optional[sql.SQL]:
52
57
 
53
58
  Note that, for the moment, only semantics are supported. If more needs to be supported, we should evaluate the
54
59
  non semantic filters separately (likely with a AstEvaluator).
60
+
61
+ Note: if more search filters are added, don't forget to add them to the qeryables endpoint (in queryables.py)
55
62
  """
56
63
  s = parse_semantic_filter(value)
57
64
 
@@ -66,12 +73,12 @@ def parse_search_filter(value: Optional[str]) -> Optional[sql.SQL]:
66
73
  UNION
67
74
  SELECT DISTINCT(picture_id)
68
75
  FROM annotations_semantics ans
69
- JOIN annotations a on a.id = ans.annotation_id
76
+ JOIN annotations a ON a.id = ans.annotation_id
70
77
  WHERE {semantic_filter}
71
78
  UNION
72
79
  SELECT sp.pic_id
73
80
  FROM sequences_pictures sp
74
- join sequences_semantics sm on sp.seq_id = sm.sequence_id
81
+ JOIN sequences_semantics sm ON sp.seq_id = sm.sequence_id
75
82
  WHERE {semantic_filter}
76
83
  LIMIT %(limit)s
77
84
  ))"""
@@ -91,6 +98,7 @@ class SemanticAttributesAstUpdater(Evaluator):
91
98
  So
92
99
  * `semantics.some_tag='some_value'` becomes `(key = 'some_tag' AND value = 'some_value')`
93
100
  * `semantics.some_tag IN ('some_value', 'some_other_value')` becomes `(key = 'some_tag' AND value IN ('some_value', 'some_other_value'))`
101
+ * `semantics IS NOT NULL` becomes `True` (to get all elements with some semantics)
94
102
  """
95
103
 
96
104
  @handle(ast.Equal)
@@ -112,14 +120,23 @@ class SemanticAttributesAstUpdater(Evaluator):
112
120
 
113
121
  @handle(ast.IsNull)
114
122
  def is_null(self, node, lhs):
123
+ semantic_attribute = get_semantic_attribute(lhs)
124
+ if semantic_attribute is None:
125
+ if lhs.name == "semantics":
126
+ # semantics IS NOT NULL means we want all elements with some semantics (=> we return True)
127
+ # semantics IS NULL is not yet handled
128
+ if node.not_:
129
+ return True
130
+ raise errors.InvalidAPIUsage(
131
+ "Unsupported filter parameter: only `semantics IS NOT NULL` is supported (to express that we want all items with at least one semantic tags)",
132
+ status_code=400,
133
+ )
134
+ return node
115
135
  if not node.not_:
116
136
  raise errors.InvalidAPIUsage(
117
137
  "Unsupported filter parameter: only `IS NOT NULL` is supported (to express that we want all values of a semantic tags)",
118
138
  status_code=400,
119
139
  )
120
- semantic_attribute = get_semantic_attribute(lhs)
121
- if semantic_attribute is None:
122
- return node
123
140
  return ast.Equal(ast.Attribute("key"), semantic_attribute)
124
141
 
125
142
  @handle(ast.In)
geovisio/utils/fields.py CHANGED
@@ -2,6 +2,8 @@ from enum import Enum
2
2
  from dataclasses import dataclass, field
3
3
  from typing import Any, List, Generic, TypeVar, Protocol
4
4
  from psycopg import sql
5
+ from geovisio import errors
6
+ from gettext import gettext as _
5
7
 
6
8
 
7
9
  @dataclass
@@ -12,8 +14,8 @@ class FieldMapping:
12
14
  stac: str
13
15
 
14
16
  @property
15
- def sql_filter(self) -> sql.Composable:
16
- return sql.SQL("s.{}").format(self.sql_column)
17
+ def sql_filter(self, row_alias="s.") -> sql.Composable:
18
+ return sql.SQL(row_alias + "{}").format(self.sql_column)
17
19
 
18
20
 
19
21
  class SQLDirection(Enum):
@@ -97,3 +99,13 @@ class BBox:
97
99
  maxx: float
98
100
  miny: float
99
101
  maxy: float
102
+
103
+
104
+ def parse_relative_heading(value: str) -> int:
105
+ try:
106
+ relHeading = int(value)
107
+ if relHeading < -180 or relHeading > 180:
108
+ raise ValueError()
109
+ return relHeading
110
+ except (ValueError, TypeError):
111
+ raise errors.InvalidAPIUsage(_("Relative heading is not valid, should be an integer in degrees from -180 to 180"), status_code=400)
@@ -0,0 +1,44 @@
1
+ from .fields import SQLDirection
2
+ from psycopg.sql import SQL, Identifier
3
+ from enum import Enum
4
+ from dataclasses import dataclass, field
5
+ from typing import Optional, List
6
+
7
+
8
+ class SortableItemField(Enum):
9
+ ts = Identifier("ts")
10
+ updated = Identifier("updated_at")
11
+ distance_to = ""
12
+ id = Identifier("id")
13
+
14
+
15
+ @dataclass
16
+ class ItemSortByField:
17
+ field: SortableItemField
18
+ direction: SQLDirection
19
+
20
+ # Note that this obj_to_compare is only used for the `distance_to` field, but we cannot put it in the enum
21
+ obj_to_compare: Optional[SQL] = None
22
+
23
+ def to_sql(self, alias) -> SQL:
24
+ sql_order = None
25
+ if self.obj_to_compare:
26
+ if self.field == SortableItemField.distance_to:
27
+ sql_order = SQL('{alias}."geom" <-> {obj_to_compare} {direction}').format(
28
+ alias=alias, obj_to_compare=self.obj_to_compare, direction=self.direction.value
29
+ )
30
+ else:
31
+ raise InvalidAPIUsage("For the moment only the distance comparison to another item is supported")
32
+ else:
33
+ sql_order = SQL("{alias}.{field} {direction}").format(alias=alias, field=self.field.value, direction=self.direction.value)
34
+ return sql_order
35
+
36
+
37
+ @dataclass
38
+ class SortBy:
39
+ fields: List[ItemSortByField] = field(default_factory=lambda: [])
40
+
41
+ def to_sql(self, alias=Identifier("p")) -> SQL:
42
+ if len(self.fields) == 0:
43
+ return SQL("")
44
+ return SQL("ORDER BY {fields}").format(fields=SQL(", ").join([f.to_sql(alias=alias) for f in self.fields]))
@@ -28,7 +28,7 @@ class ParamsAndValues:
28
28
  return SQL(", ").join([Placeholder(f) for f in self.params_as_dict.keys()])
29
29
 
30
30
  def fields_for_set(self) -> Composed:
31
- """Get the fields and the placeholders formated for an update query like:
31
+ """Get the fields and the placeholders formatted for an update query like:
32
32
  '"a" = %(a)s, "b" = %(b)s'
33
33
 
34
34
  Can be used directly with a query like:
@@ -39,7 +39,7 @@ class ParamsAndValues:
39
39
  return SQL(", ").join(self.fields_for_set_list())
40
40
 
41
41
  def fields_for_set_list(self) -> List[Composed]:
42
- """Get the fields and the placeholders formated for an update query like:
42
+ """Get the fields and the placeholders formatted for an update query like:
43
43
  ['"a" = %(a)s', '"b" = %(b)s']
44
44
 
45
45
  Note that the returned list should be joined with SQL(", ").join()
@@ -15,7 +15,7 @@ class Polygon(BaseModel):
15
15
 
16
16
  @field_validator("coordinates")
17
17
  def check_closure(cls, coordinates: List) -> List:
18
- """Validate that Polygon is closed (first and last coordinate are the same)."""
18
+ """Validate that Polygon is closed (first and last coordinates are the same)."""
19
19
  if any(ring[-1] != ring[0] for ring in coordinates):
20
20
  raise ValueError("All linear rings have the same start and end coordinates")
21
21
 
@@ -1,6 +1,6 @@
1
1
  import json
2
2
  import math
3
- from typing import Dict, Optional
3
+ from typing import Any, Dict, List, Optional, Tuple
4
4
  from uuid import UUID
5
5
  from attr import dataclass
6
6
  from flask import current_app, redirect, send_file
@@ -15,12 +15,16 @@ import logging
15
15
  from dataclasses import asdict
16
16
  from fs.path import dirname
17
17
  from psycopg.errors import UniqueViolation, InvalidParameterValue
18
+ from psycopg.types.json import Jsonb
19
+ from psycopg import sql, Connection
18
20
  import sentry_sdk
19
21
  from geovisio import utils, errors
20
22
  from geopic_tag_reader import reader
21
23
  import re
22
24
  import multipart
23
25
 
26
+ from geovisio.utils import db
27
+
24
28
  log = logging.getLogger(__name__)
25
29
 
26
30
 
@@ -51,7 +55,6 @@ def createBlurredHDPicture(fs, blurApi, pictureBytes, outputFilename, keep_unblu
51
55
  PIL.Image
52
56
  The blurred version of the image
53
57
  """
54
-
55
58
  if blurApi is None:
56
59
  return None
57
60
  # Call blur API, asking for multipart response if available
@@ -434,25 +437,23 @@ def checkPictureStatus(fses, pictureId):
434
437
  if current_app.config["DEBUG_PICTURES_SKIP_FS_CHECKS_WITH_PUBLIC_URL"]:
435
438
  return {"status": "ready"}
436
439
 
437
- account = utils.auth.get_current_account()
438
- accountId = account.id if account is not None else None
440
+ accountId = utils.auth.get_current_account_id()
439
441
  # Check picture availability + status
440
442
  picMetadata = utils.db.fetchone(
441
443
  current_app,
442
- """
443
- SELECT
444
- p.status,
445
- (p.metadata->>'cols')::int AS cols,
446
- (p.metadata->>'rows')::int AS rows,
447
- p.metadata->>'type' AS type,
448
- p.account_id,
449
- s.status AS seq_status
450
- FROM pictures p
451
- JOIN sequences_pictures sp ON sp.pic_id = p.id
452
- JOIN sequences s ON s.id = sp.seq_id
453
- WHERE p.id = %s
454
- """,
455
- [pictureId],
444
+ """SELECT
445
+ p.status,
446
+ (p.metadata->>'cols')::int AS cols,
447
+ (p.metadata->>'rows')::int AS rows,
448
+ p.metadata->>'type' AS type,
449
+ p.account_id,
450
+ s.status AS seq_status,
451
+ COALESCE(p.visibility, s.visibility) AS visibility
452
+ FROM pictures p
453
+ JOIN sequences_pictures sp ON sp.pic_id = p.id
454
+ JOIN sequences s ON s.id = sp.seq_id
455
+ WHERE p.id = %(pic_id)s AND is_picture_visible_by_user(p, %(account)s) AND is_sequence_visible_by_user(s, %(account)s)""",
456
+ {"pic_id": pictureId, "account": accountId},
456
457
  row_factory=dict_row,
457
458
  )
458
459
 
@@ -460,7 +461,7 @@ def checkPictureStatus(fses, pictureId):
460
461
  raise errors.InvalidAPIUsage(_("Picture can't be found, you may check its ID"), status_code=404)
461
462
 
462
463
  if (picMetadata["status"] != "ready" or picMetadata["seq_status"] != "ready") and accountId != str(picMetadata["account_id"]):
463
- raise errors.InvalidAPIUsage(_("Picture is not available (either hidden by admin or processing)"), status_code=403)
464
+ raise errors.InvalidAPIUsage(_("Picture is not available (currently in processing)"), status_code=403)
464
465
 
465
466
  if current_app.config.get("PICTURE_PROCESS_DERIVATES_STRATEGY") == "PREPROCESS":
466
467
  # if derivates are always generated, not need for other checks
@@ -498,7 +499,7 @@ def sendThumbnail(pictureId, format):
498
499
  metadata = checkPictureStatus(fses, pictureId)
499
500
 
500
501
  external_url = getPublicDerivatePictureExternalUrl(pictureId, format, "thumb.jpg")
501
- if external_url and metadata["status"] == "ready":
502
+ if external_url and metadata["status"] == "ready" and metadata["visibility"] in ("anyone", None):
502
503
  return redirect(external_url)
503
504
 
504
505
  try:
@@ -553,6 +554,25 @@ class MetadataReadingError(Exception):
553
554
  self.missing_mandatory_tags = missing_mandatory_tags
554
555
 
555
556
 
557
+ def get_lighter_metadata(metadata):
558
+ """Create a lighter metadata field to remove duplicates fields"""
559
+ lighterMetadata = dict(
560
+ filter(
561
+ lambda v: v[0] not in ["ts", "heading", "lon", "lat", "exif", "originalContentMd5", "ts_by_source", "gps_accuracy"],
562
+ metadata.items(),
563
+ )
564
+ )
565
+ if lighterMetadata.get("tagreader_warnings") is not None and len(lighterMetadata["tagreader_warnings"]) == 0:
566
+ del lighterMetadata["tagreader_warnings"]
567
+ lighterMetadata["tz"] = metadata["ts"].tzname()
568
+ if metadata.get("ts_by_source", {}).get("gps") is not None:
569
+ lighterMetadata["ts_gps"] = metadata["ts_by_source"]["gps"].isoformat()
570
+ if metadata.get("ts_by_source", {}).get("camera") is not None:
571
+ lighterMetadata["ts_camera"] = metadata["ts_by_source"]["camera"].isoformat()
572
+
573
+ return lighterMetadata
574
+
575
+
556
576
  def insertNewPictureInDatabase(
557
577
  db, sequenceId, position, pictureBytes, associatedAccountID, additionalMetadata, uploadSetID=None, lang="en"
558
578
  ):
@@ -579,11 +599,10 @@ def insertNewPictureInDatabase(
579
599
  -------
580
600
  uuid : The uuid of the new picture entry in the database
581
601
  """
582
- from psycopg.types.json import Jsonb
583
602
 
584
603
  # Create a fully-featured metadata object
585
- picturePillow = Image.open(io.BytesIO(pictureBytes))
586
- metadata = readPictureMetadata(pictureBytes, lang) | utils.pictures.getPictureSizing(picturePillow) | additionalMetadata
604
+ with Image.open(io.BytesIO(pictureBytes)) as picturePillow:
605
+ metadata = readPictureMetadata(pictureBytes, lang) | utils.pictures.getPictureSizing(picturePillow) | additionalMetadata
587
606
 
588
607
  # Remove cols/rows information for flat pictures
589
608
  if metadata["type"] == "flat":
@@ -591,19 +610,7 @@ def insertNewPictureInDatabase(
591
610
  metadata.pop("rows")
592
611
 
593
612
  # Create a lighter metadata field to remove duplicates fields
594
- lighterMetadata = dict(
595
- filter(
596
- lambda v: v[0] not in ["ts", "heading", "lon", "lat", "exif", "originalContentMd5", "ts_by_source", "gps_accuracy"],
597
- metadata.items(),
598
- )
599
- )
600
- if lighterMetadata.get("tagreader_warnings") is not None and len(lighterMetadata["tagreader_warnings"]) == 0:
601
- del lighterMetadata["tagreader_warnings"]
602
- lighterMetadata["tz"] = metadata["ts"].tzname()
603
- if metadata.get("ts_by_source", {}).get("gps") is not None:
604
- lighterMetadata["ts_gps"] = metadata["ts_by_source"]["gps"].isoformat()
605
- if metadata.get("ts_by_source", {}).get("camera") is not None:
606
- lighterMetadata["ts_camera"] = metadata["ts_by_source"]["camera"].isoformat()
613
+ lighterMetadata = get_lighter_metadata(metadata)
607
614
 
608
615
  exif = cleanupExif(metadata["exif"])
609
616
 
@@ -639,6 +646,90 @@ def insertNewPictureInDatabase(
639
646
  return picId
640
647
 
641
648
 
649
+ def _get_metadata_to_update(db_picture: Dict, new_reader_metadata: reader.GeoPicTags) -> Tuple[List[str], Dict[str, Any]]:
650
+ fields_to_update = []
651
+ params = {}
652
+
653
+ if new_reader_metadata.ts != db_picture["ts"]:
654
+ fields_to_update.append(sql.SQL("ts = %(ts)s"))
655
+ params["ts"] = new_reader_metadata.ts.isoformat()
656
+ if db_picture["heading_computed"] is False and new_reader_metadata.heading != db_picture["heading"]:
657
+ fields_to_update.append(sql.SQL("heading = %(heading)s"))
658
+ params["heading"] = new_reader_metadata.heading
659
+ if new_reader_metadata.gps_accuracy != db_picture["gps_accuracy_m"]:
660
+ fields_to_update.append(sql.SQL("gps_accuracy_m = %(gps_accuracy_m)s"))
661
+ params["gps_accuracy_m"] = new_reader_metadata.gps_accuracy
662
+
663
+ # Note: The db metadata can have more stuff (like originalFileName, size, ...), we so only check if the new value is different from the old one
664
+ # we cannot check directly for dict equality
665
+ new_lighterMetadata = get_lighter_metadata(asdict(new_reader_metadata))
666
+ metadata_updates = {}
667
+ for k, v in new_lighterMetadata.items():
668
+ if v != db_picture["metadata"].get(k):
669
+ metadata_updates[k] = v
670
+
671
+ # if the position has been updated (by more than ~10cm)
672
+ lon, lat = db_picture["lon"], db_picture["lat"]
673
+ new_lon, new_lat = new_reader_metadata.lon, new_reader_metadata.lat
674
+ if not math.isclose(lon, new_lon, abs_tol=0.0000001) or not math.isclose(lat, new_lat, abs_tol=0.0000001):
675
+ fields_to_update.append(sql.SQL("geom = ST_SetSRID(ST_MakePoint(%(lon)s, %(lat)s), 4326)"))
676
+ params["lon"] = new_reader_metadata.lon
677
+ params["lat"] = new_reader_metadata.lat
678
+
679
+ if metadata_updates:
680
+ fields_to_update.append(sql.SQL("metadata = metadata || %(new_metadata)s"))
681
+ params["new_metadata"] = Jsonb(metadata_updates)
682
+
683
+ return fields_to_update, params
684
+
685
+
686
+ def ask_for_metadata_update(picture_id: UUID, read_file=False):
687
+ """Enqueue an async job to reread the picture's metadata"""
688
+ args = Jsonb({"read_file": True}) if read_file else None
689
+ with db.conn(current_app) as conn:
690
+ conn.execute(
691
+ "INSERT INTO job_queue(picture_id, task, args) VALUES (%s, 'read_metadata', %s)",
692
+ [picture_id, args],
693
+ )
694
+
695
+
696
+ def update_picture_metadata(conn: Connection, picture_id: UUID, read_file=False) -> bool:
697
+ """Update picture metadata in database, using either the stored metadata or the original file
698
+
699
+ Only updates metadata that have changed.
700
+ Returns True if some metadata have been updated, False otherwise
701
+ """
702
+
703
+ with conn.cursor(row_factory=dict_row) as cursor:
704
+ db_picture = cursor.execute(
705
+ "SELECT ts, heading, metadata, ST_X(geom) as lon, ST_Y(geom) as lat, account_id, exif, gps_accuracy_m, heading_computed FROM pictures WHERE id = %s",
706
+ [picture_id],
707
+ ).fetchone()
708
+ if db_picture is None:
709
+ raise Exception(f"Picture {picture_id} not found")
710
+
711
+ if read_file:
712
+ pic_path = getHDPicturePath(picture_id)
713
+
714
+ with current_app.config["FILESYSTEMS"].permanent.openbin(pic_path) as picture_bytes:
715
+ new_metadata = reader.readPictureMetadata(picture_bytes.read())
716
+ else:
717
+ new_metadata = reader.getPictureMetadata(db_picture["exif"], db_picture["metadata"]["width"], db_picture["metadata"]["height"])
718
+
719
+ # we want to only updates values that have changed
720
+ fields_to_update, params = _get_metadata_to_update(db_picture, new_metadata)
721
+
722
+ if not fields_to_update:
723
+ logging.debug(f"No metadata update needed for picture {picture_id}")
724
+ return False
725
+
726
+ conn.execute(
727
+ sql.SQL("UPDATE pictures SET {f} WHERE id = %(pic_id)s").format(f=sql.SQL(", ").join(fields_to_update)),
728
+ params | {"pic_id": picture_id},
729
+ )
730
+ return True
731
+
732
+
642
733
  # Note: we don't want to store and expose exif binary fields as they are difficult to use and take a lot of storage in the database (~20% for maker notes only)
643
734
  # This list has been queried from real data (cf [this comment](https://gitlab.com/panoramax/server/api/-/merge_requests/241#note_1790580636)).
644
735
  # Update this list (and do a sql migration) if new binary fields are added