geovisio 2.7.1__py3-none-any.whl → 2.8.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (60) hide show
  1. geovisio/__init__.py +10 -2
  2. geovisio/admin_cli/__init__.py +3 -1
  3. geovisio/admin_cli/user.py +75 -0
  4. geovisio/config_app.py +87 -4
  5. geovisio/templates/main.html +2 -2
  6. geovisio/templates/viewer.html +3 -3
  7. geovisio/translations/da/LC_MESSAGES/messages.mo +0 -0
  8. geovisio/translations/da/LC_MESSAGES/messages.po +850 -0
  9. geovisio/translations/de/LC_MESSAGES/messages.mo +0 -0
  10. geovisio/translations/de/LC_MESSAGES/messages.po +97 -1
  11. geovisio/translations/el/LC_MESSAGES/messages.mo +0 -0
  12. geovisio/translations/en/LC_MESSAGES/messages.mo +0 -0
  13. geovisio/translations/en/LC_MESSAGES/messages.po +210 -127
  14. geovisio/translations/eo/LC_MESSAGES/messages.mo +0 -0
  15. geovisio/translations/eo/LC_MESSAGES/messages.po +790 -0
  16. geovisio/translations/es/LC_MESSAGES/messages.mo +0 -0
  17. geovisio/translations/fi/LC_MESSAGES/messages.mo +0 -0
  18. geovisio/translations/fr/LC_MESSAGES/messages.mo +0 -0
  19. geovisio/translations/fr/LC_MESSAGES/messages.po +39 -2
  20. geovisio/translations/hu/LC_MESSAGES/messages.mo +0 -0
  21. geovisio/translations/it/LC_MESSAGES/messages.mo +0 -0
  22. geovisio/translations/it/LC_MESSAGES/messages.po +875 -0
  23. geovisio/translations/ja/LC_MESSAGES/messages.mo +0 -0
  24. geovisio/translations/ja/LC_MESSAGES/messages.po +719 -0
  25. geovisio/translations/ko/LC_MESSAGES/messages.mo +0 -0
  26. geovisio/translations/messages.pot +191 -122
  27. geovisio/translations/nl/LC_MESSAGES/messages.mo +0 -0
  28. geovisio/translations/pl/LC_MESSAGES/messages.mo +0 -0
  29. geovisio/translations/pl/LC_MESSAGES/messages.po +727 -0
  30. geovisio/translations/zh_Hant/LC_MESSAGES/messages.mo +0 -0
  31. geovisio/translations/zh_Hant/LC_MESSAGES/messages.po +719 -0
  32. geovisio/utils/auth.py +80 -8
  33. geovisio/utils/link.py +3 -2
  34. geovisio/utils/model_query.py +55 -0
  35. geovisio/utils/pictures.py +12 -43
  36. geovisio/utils/semantics.py +120 -0
  37. geovisio/utils/sequences.py +10 -1
  38. geovisio/utils/tokens.py +5 -3
  39. geovisio/utils/upload_set.py +50 -15
  40. geovisio/utils/website.py +50 -0
  41. geovisio/web/annotations.py +17 -0
  42. geovisio/web/auth.py +9 -5
  43. geovisio/web/collections.py +217 -61
  44. geovisio/web/configuration.py +17 -1
  45. geovisio/web/docs.py +64 -53
  46. geovisio/web/items.py +220 -96
  47. geovisio/web/map.py +48 -18
  48. geovisio/web/pages.py +240 -0
  49. geovisio/web/params.py +17 -0
  50. geovisio/web/prepare.py +165 -0
  51. geovisio/web/stac.py +17 -4
  52. geovisio/web/tokens.py +14 -4
  53. geovisio/web/upload_set.py +10 -4
  54. geovisio/web/users.py +176 -44
  55. geovisio/workers/runner_pictures.py +61 -22
  56. {geovisio-2.7.1.dist-info → geovisio-2.8.0.dist-info}/METADATA +5 -4
  57. geovisio-2.8.0.dist-info/RECORD +89 -0
  58. geovisio-2.7.1.dist-info/RECORD +0 -70
  59. {geovisio-2.7.1.dist-info → geovisio-2.8.0.dist-info}/LICENSE +0 -0
  60. {geovisio-2.7.1.dist-info → geovisio-2.8.0.dist-info}/WHEEL +0 -0
geovisio/utils/auth.py CHANGED
@@ -1,3 +1,6 @@
1
+ from ast import Dict
2
+ from uuid import UUID
3
+ from click import Option
1
4
  import flask
2
5
  from flask import current_app, url_for, session, redirect, request
3
6
  from flask_babel import gettext as _
@@ -8,9 +11,10 @@ from abc import ABC, abstractmethod
8
11
  from typing import Any
9
12
  from typing import Optional
10
13
  from enum import Enum
11
- from pydantic import BaseModel, ConfigDict, Field
14
+ from pydantic import BaseModel, ConfigDict, Field, ValidationError, field_validator
12
15
  import sentry_sdk
13
16
  from psycopg.rows import dict_row
17
+ from geovisio import errors
14
18
  from geovisio.utils import db
15
19
 
16
20
 
@@ -159,16 +163,30 @@ class Account(BaseModel):
159
163
  name: str
160
164
  oauth_provider: Optional[str] = None
161
165
  oauth_id: Optional[str] = None
162
-
163
- model_config = ConfigDict(extra="forbid")
166
+ tos_accepted: Optional[bool] = None
164
167
 
165
168
  def __init__(self, role: Optional[AccountRole] = None, **kwargs) -> None:
169
+ # Note: since it's a valid state for the collaborative_metadata to be None,
170
+ # we need to only set it if provided, this way we can check the `model_fields_set` to know if the collaborative_metadata is set
171
+ collaborative_metadata_set = "collaborative_metadata" in kwargs
172
+ collaborative_metadata = kwargs.pop("collaborative_metadata", None)
166
173
  super().__init__(**kwargs)
167
174
  self.role = role
175
+ if collaborative_metadata_set:
176
+ self.collaborative_metadata = collaborative_metadata
168
177
 
169
- # Note: this field is excluded since we do not want to persist it in the cookie. It will be fetched from the database if needed
170
- # and accessed though the `role` property
178
+ # Note: those fields are excluded since we do not want to persist it in the cookie. It will be fetched from the database if needed
171
179
  role_: Optional[AccountRole] = Field(default=None, exclude=True)
180
+ collaborative_metadata_: Optional[bool] = Field(default=None, exclude=True)
181
+
182
+ @field_validator("id", mode="before")
183
+ @classmethod
184
+ def check_id(cls, value) -> str:
185
+ if isinstance(value, UUID):
186
+ return str(value)
187
+ if isinstance(value, str):
188
+ return value
189
+ raise ValidationError("Invalid account id type")
172
190
 
173
191
  def can_check_reports(self):
174
192
  """Is account legitimate to read any report ?"""
@@ -178,17 +196,57 @@ class Account(BaseModel):
178
196
  """Is account legitimate to read and edit excluded areas ?"""
179
197
  return self.role == AccountRole.admin
180
198
 
199
+ def can_edit_pages(self):
200
+ """Is account legitimate to edit web pages ?"""
201
+ return self.role == AccountRole.admin
202
+
181
203
  @property
182
204
  def role(self) -> AccountRole:
183
205
  if self.role_ is None:
184
- role = db.fetchone(current_app, "SELECT role FROM accounts WHERE id = %s", (self.id,), row_factory=dict_row)
185
- self.role_ = AccountRole(role["role"])
206
+ self._fetch_database_info()
186
207
  return self.role_
187
208
 
188
209
  @role.setter
189
- def role(self, r: AccountRole) -> None:
210
+ def role(self, r: AccountRole | str) -> None:
211
+ if isinstance(r, str):
212
+ r = AccountRole(r)
190
213
  self.role_ = r
191
214
 
215
+ @property
216
+ def collaborative_metadata(self) -> Optional[bool]:
217
+ if "collaborative_metadata_" not in self.model_fields_set:
218
+ self._fetch_database_info()
219
+ return self.collaborative_metadata_
220
+
221
+ @collaborative_metadata.setter
222
+ def collaborative_metadata(self, b: Optional[bool]) -> None:
223
+ self.collaborative_metadata_ = b
224
+
225
+ def _fetch_database_info(self):
226
+ """Fetch the missing database metadata for this account"""
227
+ r = db.fetchone(
228
+ current_app,
229
+ "SELECT role, collaborative_metadata FROM accounts WHERE id = %s",
230
+ (self.id,),
231
+ row_factory=dict_row,
232
+ )
233
+ self.role = AccountRole(r["role"])
234
+ self.collaborative_metadata = r["collaborative_metadata"]
235
+
236
+
237
+ def account_allow_collaborative_editing(account_id: str | UUID):
238
+ """An account allow collaborative editing it if has been allow at the account level else we check the instance configuration"""
239
+ r = db.fetchone(
240
+ current_app,
241
+ """SELECT COALESCE(accounts.collaborative_metadata, configurations.collaborative_metadata, true) AS collaborative_metadata
242
+ FROM accounts
243
+ JOIN configurations ON TRUE
244
+ WHERE accounts.id = %s""",
245
+ [account_id],
246
+ row_factory=dict_row,
247
+ )
248
+ return r["collaborative_metadata"]
249
+
192
250
 
193
251
  def login_required():
194
252
  """Check that the user is logged, and abort if it's not the case"""
@@ -221,6 +279,20 @@ def login_required_by_setting(mandatory_login_param):
221
279
  account = get_current_account()
222
280
  if not account and current_app.config[mandatory_login_param]:
223
281
  return flask.abort(flask.make_response(flask.jsonify(message="Authentication is mandatory"), 401))
282
+ if account and account.tos_accepted is False and current_app.config["API_ENFORCE_TOS_ACCEPTANCE"]:
283
+ tos_acceptance_page = current_app.config["API_WEBSITE_URL"].tos_validation_page()
284
+ raise errors.InvalidAPIUsage(
285
+ message=_(
286
+ "You need to accept the terms of service before uploading any pictures. You can do so by validating them here: %(url)s",
287
+ url=tos_acceptance_page,
288
+ ),
289
+ status_code=401,
290
+ payload={
291
+ "details": {
292
+ "validation_page": tos_acceptance_page,
293
+ }
294
+ },
295
+ )
224
296
  kwargs["account"] = account
225
297
 
226
298
  return f(*args, **kwargs)
geovisio/utils/link.py CHANGED
@@ -6,9 +6,10 @@ from flask import url_for
6
6
  class Link(BaseModel):
7
7
  rel: str
8
8
  type: str
9
- title: Optional[str]
9
+ title: Optional[str] = None
10
10
  href: str
11
11
 
12
12
 
13
13
  def make_link(rel: str, route: str, title: Optional[str] = None, type: str = "application/json", **args):
14
- return Link(rel=rel, type=type, title=title, href=url_for(route, **args, _external=True))
14
+ kwargs = {"title": title} if title else {} # do not pass none title, to know if it has been set or not
15
+ return Link(rel=rel, type=type, href=url_for(route, **args, _external=True), **kwargs)
@@ -0,0 +1,55 @@
1
+ from typing import Any, Dict, List
2
+ from pydantic import BaseModel
3
+ from psycopg.sql import SQL, Identifier, Placeholder, Composed
4
+ from psycopg.types.json import Jsonb
5
+
6
+
7
+ class ParamsAndValues:
8
+ """Simple wrapper used to help building a query with the right psycopg types"""
9
+
10
+ params_as_dict: Dict[str, Any]
11
+
12
+ def __init__(self, model: BaseModel, **kwargs):
13
+ self.params_as_dict = model.model_dump(exclude_none=True) | kwargs
14
+
15
+ for k, v in self.params_as_dict.items():
16
+ if isinstance(v, Dict):
17
+ self.params_as_dict[k] = Jsonb(v) # convert dict to jsonb in database
18
+
19
+ def has_updates(self):
20
+ return bool(self.params_as_dict)
21
+
22
+ def fields(self) -> Composed:
23
+ """Get the database fields identifiers"""
24
+ return SQL(", ").join([Identifier(f) for f in self.params_as_dict.keys()])
25
+
26
+ def placeholders(self) -> Composed:
27
+ """Get the placeholders for the query"""
28
+ return SQL(", ").join([Placeholder(f) for f in self.params_as_dict.keys()])
29
+
30
+ def fields_for_set(self) -> Composed:
31
+ """Get the fields and the placeholders formated for an update query like:
32
+ '"a" = %(a)s, "b" = %(b)s'
33
+
34
+ Can be used directly with a query like:
35
+ ```python
36
+ SQL("UPDATE some_table SET {fields}").format(fields=fields)
37
+ ```
38
+ """
39
+ return SQL(", ").join(self.fields_for_set_list())
40
+
41
+ def fields_for_set_list(self) -> List[Composed]:
42
+ """Get the fields and the placeholders formated for an update query like:
43
+ ['"a" = %(a)s', '"b" = %(b)s']
44
+
45
+ Note that the returned list should be joined with SQL(", ").join()
46
+ """
47
+ return [SQL("{f} = {p}").format(f=Identifier(f), p=Placeholder(f)) for f in self.params_as_dict.keys()]
48
+
49
+
50
+ def get_db_params_and_values(model: BaseModel, **kwargs):
51
+ """Get a simple wrapper to help building a query with the right psycopg types
52
+
53
+ check the unit tests in test_model_query.py for examples
54
+ """
55
+ return ParamsAndValues(model, **kwargs)
@@ -550,7 +550,10 @@ def insertNewPictureInDatabase(
550
550
 
551
551
  # Create a lighter metadata field to remove duplicates fields
552
552
  lighterMetadata = dict(
553
- filter(lambda v: v[0] not in ["ts", "heading", "lon", "lat", "exif", "originalContentMd5", "ts_by_source"], metadata.items())
553
+ filter(
554
+ lambda v: v[0] not in ["ts", "heading", "lon", "lat", "exif", "originalContentMd5", "ts_by_source", "gps_accuracy"],
555
+ metadata.items(),
556
+ )
554
557
  )
555
558
  if lighterMetadata.get("tagreader_warnings") is not None and len(lighterMetadata["tagreader_warnings"]) == 0:
556
559
  del lighterMetadata["tagreader_warnings"]
@@ -566,11 +569,9 @@ def insertNewPictureInDatabase(
566
569
  # Add picture metadata to database
567
570
  try:
568
571
  picId = db.execute(
569
- """
570
- INSERT INTO pictures (ts, heading, metadata, geom, account_id, exif, original_content_md5, upload_set_id)
571
- VALUES (%s, %s, %s, ST_SetSRID(ST_MakePoint(%s, %s), 4326), %s, %s, %s, %s)
572
- RETURNING id
573
- """,
572
+ """INSERT INTO pictures (ts, heading, metadata, geom, account_id, exif, original_content_md5, upload_set_id, gps_accuracy_m)
573
+ VALUES (%s, %s, %s, ST_SetSRID(ST_MakePoint(%s, %s), 4326), %s, %s, %s, %s, %s)
574
+ RETURNING id""",
574
575
  (
575
576
  metadata["ts"].isoformat(),
576
577
  metadata["heading"],
@@ -581,45 +582,12 @@ def insertNewPictureInDatabase(
581
582
  Jsonb(exif),
582
583
  metadata.get("originalContentMd5"),
583
584
  uploadSetID,
585
+ metadata.get("gps_accuracy"),
584
586
  ),
585
587
  ).fetchone()[0]
586
588
  except InvalidParameterValue as e:
587
589
  raise InvalidMetadataValue(e.diag.message_primary) from e
588
590
 
589
- # Process field of view for each pictures
590
- # Flat pictures = variable fov
591
- if metadata["type"] == "flat":
592
- make, model = metadata.get("make"), metadata.get("model")
593
- if make is not None and model is not None and metadata["focal_length"] != 0:
594
- db.execute("SET pg_trgm.similarity_threshold = 0.9")
595
- db.execute(
596
- """
597
- UPDATE pictures
598
- SET metadata = jsonb_set(metadata, '{field_of_view}'::text[], COALESCE(
599
- (
600
- SELECT ROUND(DEGREES(2 * ATAN(sensor_width / (2 * (metadata->>'focal_length')::float))))::varchar
601
- FROM cameras
602
- WHERE model %% CONCAT(%(make)s::text, ' ', %(model)s::text)
603
- ORDER BY model <-> CONCAT(%(make)s::text, ' ', %(model)s::text)
604
- LIMIT 1
605
- ),
606
- 'null'
607
- )::jsonb)
608
- WHERE id = %(id)s
609
- """,
610
- {"id": picId, "make": make, "model": model},
611
- )
612
-
613
- # 360 pictures = 360° fov
614
- else:
615
- db.execute(
616
- """
617
- UPDATE pictures
618
- SET metadata = jsonb_set(metadata, '{field_of_view}'::text[], '360'::jsonb)
619
- WHERE id = %s
620
- """,
621
- [picId],
622
- )
623
591
  if sequenceId is not None:
624
592
  try:
625
593
  db.execute("INSERT INTO sequences_pictures(seq_id, rank, pic_id) VALUES(%s, %s, %s)", [sequenceId, position, picId])
@@ -681,11 +649,11 @@ def readPictureMetadata(picture: bytes, lang: Optional[str] = "en") -> dict:
681
649
  try:
682
650
  if isinstance(v, bytes):
683
651
  try:
684
- cleanedExif[k] = v.decode("utf-8").replace("\x00", "")
652
+ cleanedExif[k] = v.decode("utf-8").replace("\x00", "").replace("\u0000", "")
685
653
  except UnicodeDecodeError:
686
- cleanedExif[k] = str(v).replace("\x00", "")
654
+ cleanedExif[k] = str(v).replace("\x00", "").replace("\u0000", "")
687
655
  elif isinstance(v, str):
688
- cleanedExif[k] = v.replace("\x00", "")
656
+ cleanedExif[k] = v.replace("\x00", "").replace("\u0000", "")
689
657
  else:
690
658
  try:
691
659
  cleanedExif[k] = str(v)
@@ -694,6 +662,7 @@ def readPictureMetadata(picture: bytes, lang: Optional[str] = "en") -> dict:
694
662
  except:
695
663
  logging.exception("Can't read EXIF tag: " + k + " " + str(type(v)))
696
664
 
665
+ metadata["exif"] = cleanedExif
697
666
  return metadata
698
667
 
699
668
 
@@ -0,0 +1,120 @@
1
+ from dataclasses import dataclass
2
+ from uuid import UUID
3
+ from psycopg import Cursor
4
+ from psycopg.sql import SQL, Identifier
5
+ from psycopg.types.json import Jsonb
6
+ from psycopg.errors import UniqueViolation
7
+ from pydantic import BaseModel, ConfigDict, Field
8
+ from typing import List
9
+ from enum import Enum
10
+
11
+ from geovisio import errors
12
+
13
+
14
+ class TagAction(str, Enum):
15
+ """Actions to perform on a tag list"""
16
+
17
+ add = "add"
18
+ delete = "delete"
19
+
20
+
21
+ class SemanticTagUpdate(BaseModel):
22
+ """Parameters used to update a tag list"""
23
+
24
+ action: TagAction = Field(default=TagAction.add)
25
+ """Action to perform on the tag list. The default action is `add` which will add the given tag to the list.
26
+ The action can also be to `delete` the key/value"""
27
+ key: str = Field(max_length=256)
28
+ """Key of the tag to update limited to 256 characters"""
29
+ value: str = Field(max_length=2048)
30
+ """Value of the tag to update limited ot 2048 characters"""
31
+
32
+ model_config = ConfigDict(use_attribute_docstrings=True)
33
+
34
+
35
+ class SemanticTag(BaseModel):
36
+ key: str
37
+ """Key of the tag"""
38
+ value: str
39
+ """Value of the tag"""
40
+
41
+
42
+ class EntityType(Enum):
43
+
44
+ pic = "picture_id"
45
+ seq = "sequence_id"
46
+ annotation = "annotation_id"
47
+
48
+ def entitiy_id_field(self) -> Identifier:
49
+ return Identifier(self.value)
50
+
51
+
52
+ @dataclass
53
+ class Entity:
54
+ type: EntityType
55
+ id: UUID
56
+
57
+ def get_table(self) -> Identifier:
58
+ match self.type:
59
+ case EntityType.pic:
60
+ return Identifier("pictures_semantics")
61
+ case EntityType.seq:
62
+ return Identifier("sequences_semantics")
63
+ case EntityType.annotation:
64
+ return Identifier("annotations_semantics")
65
+ case _:
66
+ raise ValueError(f"Unknown entity type: {self.type}")
67
+
68
+ def get_history_table(self) -> Identifier:
69
+ match self.type:
70
+ case EntityType.pic:
71
+ return Identifier("pictures_semantics_history")
72
+ case EntityType.seq:
73
+ return Identifier("sequences_semantics_history")
74
+ case EntityType.annotation:
75
+ return Identifier("annotations_semantics_history")
76
+ case _:
77
+ raise ValueError(f"Unknown entity type: {self.type}")
78
+
79
+
80
+ def update_tags(cursor: Cursor, entity: Entity, actions: List[SemanticTagUpdate], account: UUID) -> SemanticTag:
81
+ """Update tags for an entity
82
+ Note: this should be done inside an autocommit transaction
83
+ """
84
+ table_name = entity.get_table()
85
+ fields = [entity.type.entitiy_id_field(), Identifier("key"), Identifier("value")]
86
+ tag_to_add = [t for t in actions if t.action == TagAction.add]
87
+ tag_to_delete = [t for t in actions if t.action == TagAction.delete]
88
+ try:
89
+ if tag_to_delete:
90
+ cursor.execute(SQL("CREATE TEMPORARY TABLE tags_to_delete(key TEXT, value TEXT) ON COMMIT DROP"))
91
+ with cursor.copy(SQL("COPY tags_to_delete (key, value) FROM STDIN")) as copy:
92
+ for tag in tag_to_delete:
93
+ copy.write_row((tag.key, tag.value))
94
+ cursor.execute(
95
+ SQL(
96
+ """DELETE FROM {table}
97
+ WHERE {entity_id} = %(entity)s
98
+ AND (key, value) IN (
99
+ SELECT key, value FROM tags_to_delete
100
+ )"""
101
+ ).format(table=table_name, entity_id=entity.type.entitiy_id_field()),
102
+ {"entity": entity.id, "key_values": [(t.key, t.value) for t in tag_to_delete]},
103
+ )
104
+ if tag_to_add:
105
+ with cursor.copy(SQL("COPY {table} ({fields}) FROM STDIN").format(table=table_name, fields=SQL(",").join(fields))) as copy:
106
+ for tag in tag_to_add:
107
+ copy.write_row((entity.id, tag.key, tag.value))
108
+ if tag_to_add or tag_to_delete:
109
+ # we track the history changes of the semantic tags
110
+ cursor.execute(
111
+ SQL("INSERT INTO {history_table} ({entity_id_field}, account_id, updates) VALUES (%(id)s, %(account)s, %(tags)s)").format(
112
+ history_table=entity.get_history_table(), entity_id_field=entity.type.entitiy_id_field()
113
+ ),
114
+ {"id": entity.id, "account": account, "tags": Jsonb([t.model_dump() for t in tag_to_add + tag_to_delete])},
115
+ )
116
+ except UniqueViolation as e:
117
+ # if the tag already exists, we don't want to add it again
118
+ raise errors.InvalidAPIUsage(
119
+ "Impossible to add semantic tags because of duplicates", payload={"details": {"duplicate": e.diag.message_detail}}
120
+ )
@@ -158,9 +158,18 @@ def get_collections(request: CollectionsRequest) -> Collections:
158
158
  s.user_agent,
159
159
  ROUND(ST_Length(s.geom::geography)) / 1000 AS length_km,
160
160
  s.computed_h_pixel_density,
161
- s.computed_gps_accuracy
161
+ s.computed_gps_accuracy,
162
+ t.semantics
162
163
  FROM sequences s
163
164
  LEFT JOIN accounts on s.account_id = accounts.id
165
+ LEFT JOIN (
166
+ SELECT sequence_id, json_agg(json_strip_nulls(json_build_object(
167
+ 'key', key,
168
+ 'value', value
169
+ ))) AS semantics
170
+ FROM sequences_semantics
171
+ GROUP BY sequence_id
172
+ ) t ON t.sequence_id = s.id
164
173
  WHERE {filter}
165
174
  ORDER BY {order1}
166
175
  LIMIT {limit}
geovisio/utils/tokens.py CHANGED
@@ -46,7 +46,7 @@ def get_account_from_jwt_token(jwt_token: str) -> auth.Account:
46
46
  # check token existence
47
47
  records = cursor.execute(
48
48
  """SELECT
49
- t.account_id AS id, a.name, a.oauth_provider, a.oauth_id, a.role
49
+ t.account_id AS id, a.name, a.oauth_provider, a.oauth_id, a.role, a.collaborative_metadata, a.tos_accepted
50
50
  FROM tokens t
51
51
  LEFT OUTER JOIN accounts a ON t.account_id = a.id
52
52
  WHERE t.id = %(token)s""",
@@ -61,11 +61,13 @@ WHERE t.id = %(token)s""",
61
61
  )
62
62
 
63
63
  return auth.Account(
64
- id=str(records["id"]),
64
+ id=records["id"],
65
65
  name=records["name"],
66
66
  oauth_provider=records["oauth_provider"],
67
67
  oauth_id=records["oauth_id"],
68
- role=auth.AccountRole[records["role"]],
68
+ role=auth.AccountRole(records["role"]),
69
+ collaborative_metadata=records["collaborative_metadata"],
70
+ tos_accepted=records["tos_accepted"],
69
71
  )
70
72
 
71
73
 
@@ -147,6 +147,7 @@ class UploadSetFile(BaseModel):
147
147
  """File uploaded in an UploadSet"""
148
148
 
149
149
  picture_id: Optional[UUID] = None
150
+ """ID of the picture this file belongs to. Can only be seen by the owner of the File"""
150
151
  file_name: str
151
152
  content_md5: Optional[UUID] = None
152
153
  inserted_at: datetime
@@ -448,16 +449,25 @@ def dispatch(upload_set_id: UUID):
448
449
  p.heading as heading,
449
450
  p.metadata->>'originalFileName' as file_name,
450
451
  p.metadata,
451
- s.id as sequence_id
452
+ s.id as sequence_id,
453
+ f is null as has_no_file
452
454
  FROM pictures p
453
455
  LEFT JOIN sequences_pictures sp ON sp.pic_id = p.id
454
456
  LEFT JOIN sequences s ON s.id = sp.seq_id
457
+ LEFT JOIN files f ON f.picture_id = p.id
455
458
  WHERE p.upload_set_id = %(upload_set_id)s"""
456
459
  ),
457
460
  {"upload_set_id": upload_set_id},
458
461
  ).fetchall()
459
462
 
463
+ # there is currently a bug where 2 pictures can be uploaded for the same file, so only 1 is associated to it.
464
+ # we want to delete one of them
465
+ # Those duplicates happen when a client send an upload that timeouts, but the client retries the upload and the server is not aware of this timeout (the connection is not closed).
466
+ # Note: later, if we are confident the bug has been removed, we might clean this code.
467
+ pics_to_delete_bug = [p["id"] for p in db_pics if p["has_no_file"]]
468
+ db_pics = [p for p in db_pics if p["has_no_file"] is False] # pictures without files will be deleted, we don't need them
460
469
  pics_by_filename = {p["file_name"]: p for p in db_pics}
470
+
461
471
  pics = [
462
472
  geopic_sequence.Picture(
463
473
  p["file_name"],
@@ -487,9 +497,12 @@ WHERE p.upload_set_id = %(upload_set_id)s"""
487
497
  )
488
498
  reused_sequence = set()
489
499
 
490
- pics_to_delete = [pics_by_filename[p.filename]["id"] for p in report.duplicate_pictures or []]
500
+ pics_to_delete_duplicates = [pics_by_filename[p.filename]["id"] for p in report.duplicate_pictures or []]
501
+ pics_to_delete = pics_to_delete_duplicates + pics_to_delete_bug
491
502
  if pics_to_delete:
492
- logging.debug(f"For uploadset '{upload_set_id}', nb duplicate pictures {len(pics_to_delete)}")
503
+ logging.debug(
504
+ f"For uploadset '{upload_set_id}', nb duplicate pictures {len(pics_to_delete_duplicates)} {f' and {len(pics_to_delete_bug)} pictures without files' if pics_to_delete_bug else ''}"
505
+ )
493
506
  logging.debug(
494
507
  f"For uploadset '{upload_set_id}', duplicate pictures {[p.filename for p in report.duplicate_pictures or []]}"
495
508
  )
@@ -507,7 +520,8 @@ WHERE p.upload_set_id = %(upload_set_id)s"""
507
520
  # delete all pictures (the DB triggers will also add background jobs to delete the associated files)
508
521
  cursor.execute(SQL("DELETE FROM pictures WHERE id IN (select picture_id FROM tmp_duplicates)"))
509
522
 
510
- for s in report.sequences:
523
+ number_title = len(report.sequences) > 1
524
+ for i, s in enumerate(report.sequences, start=1):
511
525
  existing_sequence = next(
512
526
  (seq for p in s.pictures if (seq := pics_by_filename[p.filename]["sequence_id"]) not in reused_sequence),
513
527
  None,
@@ -525,6 +539,7 @@ WHERE p.upload_set_id = %(upload_set_id)s"""
525
539
  )
526
540
  reused_sequence.add(seq_id)
527
541
  else:
542
+ new_title = f"{db_upload_set.title}{f'-{i}' if number_title else ''}"
528
543
  seq_id = cursor.execute(
529
544
  SQL(
530
545
  """INSERT INTO sequences(account_id, metadata, user_agent)
@@ -533,7 +548,7 @@ RETURNING id"""
533
548
  ),
534
549
  {
535
550
  "account_id": db_upload_set.account_id,
536
- "metadata": Jsonb({"title": db_upload_set.title}),
551
+ "metadata": Jsonb({"title": new_title}),
537
552
  "user_agent": db_upload_set.user_agent,
538
553
  },
539
554
  ).fetchone()
@@ -567,19 +582,39 @@ def insertFileInDatabase(
567
582
  ) -> UploadSetFile:
568
583
  """Insert a file linked to an UploadSet into the database"""
569
584
 
585
+ # we check if there is already a file with this name in the upload set with an associated picture.
586
+ # If there is no picture (because the picture has been rejected), we accept that the file is overridden
587
+ existing_file = cursor.execute(
588
+ SQL(
589
+ """SELECT picture_id, rejection_status
590
+ FROM files
591
+ WHERE upload_set_id = %(upload_set_id)s AND file_name = %(file_name)s AND picture_id IS NOT NULL"""
592
+ ),
593
+ params={
594
+ "upload_set_id": upload_set_id,
595
+ "file_name": file_name,
596
+ },
597
+ ).fetchone()
598
+ if existing_file:
599
+ raise errors.InvalidAPIUsage(
600
+ _("A different picture with the same name has already been added to this uploadset"),
601
+ status_code=409,
602
+ payload={"existing_item": {"id": existing_file["picture_id"]}},
603
+ )
604
+
570
605
  f = cursor.execute(
571
606
  SQL(
572
607
  """INSERT INTO files(
573
- upload_set_id, picture_id, file_type, file_name,
574
- size, content_md5, rejection_status, rejection_message, rejection_details)
575
- VALUES (
576
- %(upload_set_id)s, %(picture_id)s, %(type)s, %(file_name)s,
577
- %(size)s, %(content_md5)s, %(rejection_status)s, %(rejection_message)s, %(rejection_details)s)
578
- ON CONFLICT (upload_set_id, file_name)
579
- DO UPDATE SET picture_id = %(picture_id)s, size = %(size)s, content_md5 = %(content_md5)s,
580
- rejection_status = %(rejection_status)s, rejection_message = %(rejection_message)s, rejection_details = %(rejection_details)s
581
- RETURNING *
582
- """
608
+ upload_set_id, picture_id, file_type, file_name,
609
+ size, content_md5, rejection_status, rejection_message, rejection_details)
610
+ VALUES (
611
+ %(upload_set_id)s, %(picture_id)s, %(type)s, %(file_name)s,
612
+ %(size)s, %(content_md5)s, %(rejection_status)s, %(rejection_message)s, %(rejection_details)s)
613
+ ON CONFLICT (upload_set_id, file_name)
614
+ DO UPDATE SET picture_id = %(picture_id)s, size = %(size)s, content_md5 = %(content_md5)s,
615
+ rejection_status = %(rejection_status)s, rejection_message = %(rejection_message)s, rejection_details = %(rejection_details)s
616
+ WHERE files.picture_id IS NULL -- check again that we do not override an existing picture
617
+ RETURNING *"""
583
618
  ),
584
619
  params={
585
620
  "upload_set_id": upload_set_id,
@@ -0,0 +1,50 @@
1
+ from typing import Optional, Dict
2
+
3
+ from flask import url_for
4
+
5
+ from geovisio import web
6
+
7
+ WEBSITE_UNDER_SAME_HOST = "same-host"
8
+
9
+ TOKEN_ACCEPTED_PAGE = "token-accepted"
10
+ TOS_VALIDATION_PAGE = "tos-validation"
11
+
12
+
13
+ class Website:
14
+ """Website associated to the API.
15
+ This wrapper will define the routes we expect from the website.
16
+
17
+ We should limit the interraction from the api to the website, but for some flow (especially auth flows), it's can be useful to redirect to website's page
18
+
19
+ If the url is:
20
+ * set to `false`, there is no associated website
21
+ * set to `same-host`, the website is assumed to be on the same host as the API (and will respect the host of the current request)
22
+ * else it should be a valid url
23
+ """
24
+
25
+ def __init__(self, website_url: str):
26
+ if website_url == WEBSITE_UNDER_SAME_HOST:
27
+ self.url = WEBSITE_UNDER_SAME_HOST
28
+ elif website_url == "false":
29
+ self.url = None
30
+ elif website_url.startswith("http"):
31
+ self.url = website_url
32
+ if not self.url.endswith("/"):
33
+ self.url += "/"
34
+ else:
35
+ raise Exception(
36
+ "API_WEBSITE_URL should either be `same-host` (and the website will be assumed to be on the same host), set to `false` if there is no website, or a valid URL"
37
+ )
38
+
39
+ def _to_url(self, route: str, params: Optional[Dict[str, str]] = None):
40
+ base_url = self.url if self.url != WEBSITE_UNDER_SAME_HOST else url_for("index", _external=True)
41
+
42
+ from urllib.parse import urlencode
43
+
44
+ return f"{base_url}{route}{f'?{urlencode(params)}' if params else ''}"
45
+
46
+ def tos_validation_page(self, params: Optional[Dict[str, str]] = None):
47
+ return self._to_url(TOS_VALIDATION_PAGE, params)
48
+
49
+ def cli_token_accepted_page(self, params: Optional[Dict[str, str]] = None):
50
+ return self._to_url(TOKEN_ACCEPTED_PAGE, params)
@@ -0,0 +1,17 @@
1
+ from geovisio.utils import auth
2
+ from psycopg.rows import dict_row, class_row
3
+ from psycopg.sql import SQL
4
+ from geovisio.utils.semantics import Entity, EntityType, SemanticTagUpdate, update_tags
5
+ from geovisio.web.utils import accountIdOrDefault
6
+ from psycopg.types.json import Jsonb
7
+ from geovisio.utils import db
8
+ from geovisio.utils.params import validation_error
9
+ from geovisio import errors
10
+ from pydantic import BaseModel, ConfigDict, ValidationError
11
+ from uuid import UUID
12
+ from typing import List, Optional
13
+ from flask import Blueprint, request, current_app
14
+ from flask_babel import gettext as _
15
+
16
+
17
+ bp = Blueprint("annotations", __name__, url_prefix="/api")