geovisio 2.5.0__py3-none-any.whl → 2.7.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (59) hide show
  1. geovisio/__init__.py +38 -8
  2. geovisio/admin_cli/__init__.py +2 -2
  3. geovisio/admin_cli/db.py +8 -0
  4. geovisio/config_app.py +64 -0
  5. geovisio/db_migrations.py +24 -3
  6. geovisio/templates/main.html +14 -14
  7. geovisio/templates/viewer.html +3 -3
  8. geovisio/translations/de/LC_MESSAGES/messages.mo +0 -0
  9. geovisio/translations/de/LC_MESSAGES/messages.po +667 -0
  10. geovisio/translations/en/LC_MESSAGES/messages.mo +0 -0
  11. geovisio/translations/en/LC_MESSAGES/messages.po +730 -0
  12. geovisio/translations/es/LC_MESSAGES/messages.mo +0 -0
  13. geovisio/translations/es/LC_MESSAGES/messages.po +778 -0
  14. geovisio/translations/fi/LC_MESSAGES/messages.mo +0 -0
  15. geovisio/translations/fi/LC_MESSAGES/messages.po +589 -0
  16. geovisio/translations/fr/LC_MESSAGES/messages.mo +0 -0
  17. geovisio/translations/fr/LC_MESSAGES/messages.po +814 -0
  18. geovisio/translations/ko/LC_MESSAGES/messages.mo +0 -0
  19. geovisio/translations/ko/LC_MESSAGES/messages.po +685 -0
  20. geovisio/translations/messages.pot +686 -0
  21. geovisio/translations/nl/LC_MESSAGES/messages.mo +0 -0
  22. geovisio/translations/nl/LC_MESSAGES/messages.po +594 -0
  23. geovisio/utils/__init__.py +1 -1
  24. geovisio/utils/auth.py +50 -11
  25. geovisio/utils/db.py +65 -0
  26. geovisio/utils/excluded_areas.py +83 -0
  27. geovisio/utils/extent.py +30 -0
  28. geovisio/utils/fields.py +1 -1
  29. geovisio/utils/filesystems.py +0 -1
  30. geovisio/utils/link.py +14 -0
  31. geovisio/utils/params.py +20 -0
  32. geovisio/utils/pictures.py +94 -69
  33. geovisio/utils/reports.py +171 -0
  34. geovisio/utils/sequences.py +288 -126
  35. geovisio/utils/tokens.py +37 -42
  36. geovisio/utils/upload_set.py +654 -0
  37. geovisio/web/auth.py +50 -37
  38. geovisio/web/collections.py +305 -319
  39. geovisio/web/configuration.py +14 -0
  40. geovisio/web/docs.py +288 -12
  41. geovisio/web/excluded_areas.py +377 -0
  42. geovisio/web/items.py +203 -151
  43. geovisio/web/map.py +322 -106
  44. geovisio/web/params.py +69 -26
  45. geovisio/web/pictures.py +14 -31
  46. geovisio/web/reports.py +399 -0
  47. geovisio/web/rss.py +13 -7
  48. geovisio/web/stac.py +129 -121
  49. geovisio/web/tokens.py +105 -112
  50. geovisio/web/upload_set.py +768 -0
  51. geovisio/web/users.py +100 -73
  52. geovisio/web/utils.py +38 -9
  53. geovisio/workers/runner_pictures.py +278 -183
  54. geovisio-2.7.0.dist-info/METADATA +95 -0
  55. geovisio-2.7.0.dist-info/RECORD +66 -0
  56. geovisio-2.5.0.dist-info/METADATA +0 -115
  57. geovisio-2.5.0.dist-info/RECORD +0 -41
  58. {geovisio-2.5.0.dist-info → geovisio-2.7.0.dist-info}/LICENSE +0 -0
  59. {geovisio-2.5.0.dist-info → geovisio-2.7.0.dist-info}/WHEEL +0 -0
@@ -0,0 +1,171 @@
1
+ from enum import Enum
2
+ from uuid import UUID
3
+ from typing import Optional, List
4
+ from typing_extensions import Self
5
+ from datetime import datetime
6
+ from pydantic import BaseModel, ConfigDict
7
+ from geovisio.utils import db
8
+ from geovisio.errors import InvalidAPIUsage
9
+ from flask import current_app
10
+ from psycopg.sql import SQL
11
+ from psycopg.rows import class_row
12
+
13
+
14
+ class ReportType(Enum):
15
+ blur_missing = "blur_missing"
16
+ blur_excess = "blur_excess"
17
+ inappropriate = "inappropriate"
18
+ privacy = "privacy"
19
+ picture_low_quality = "picture_low_quality"
20
+ mislocated = "mislocated"
21
+ copyright = "copyright"
22
+ other = "other"
23
+
24
+
25
+ class ReportStatus(Enum):
26
+ open = "open"
27
+ open_autofix = "open_autofix"
28
+ waiting = "waiting"
29
+ closed_solved = "closed_solved"
30
+ closed_ignored = "closed_ignored"
31
+
32
+
33
+ class Report(BaseModel):
34
+ """A Report is a problem reported from a third-party about a picture or a sequence."""
35
+
36
+ id: UUID
37
+ issue: ReportType
38
+ status: ReportStatus
39
+ picture_id: Optional[UUID]
40
+ sequence_id: Optional[UUID]
41
+ ts_opened: datetime
42
+ ts_closed: Optional[datetime]
43
+ reporter_account_id: Optional[UUID]
44
+ reporter_email: Optional[str]
45
+ resolver_account_id: Optional[UUID]
46
+ reporter_comments: Optional[str]
47
+ resolver_comments: Optional[str]
48
+
49
+ model_config = ConfigDict(use_enum_values=True, ser_json_timedelta="float")
50
+
51
+ def for_public(self) -> Self:
52
+ """Report version for public display (without report email and admin comments)"""
53
+ return Report(
54
+ id=self.id,
55
+ issue=self.issue,
56
+ status=self.status,
57
+ picture_id=self.picture_id,
58
+ sequence_id=self.sequence_id,
59
+ ts_opened=self.ts_opened,
60
+ ts_closed=self.ts_closed,
61
+ reporter_account_id=self.reporter_account_id,
62
+ reporter_email=None,
63
+ resolver_account_id=self.resolver_account_id,
64
+ reporter_comments=self.reporter_comments,
65
+ resolver_comments=None,
66
+ )
67
+
68
+
69
+ class Reports(BaseModel):
70
+ reports: List[Report]
71
+
72
+
73
+ def get_report(id: UUID) -> Optional[Report]:
74
+ """Get the Report corresponding to the ID"""
75
+ db_report = db.fetchone(
76
+ current_app,
77
+ SQL("SELECT * FROM reports WHERE id = %(id)s"),
78
+ {"id": id},
79
+ row_factory=class_row(Report),
80
+ )
81
+
82
+ return db_report
83
+
84
+
85
+ def is_picture_owner(report: Report, account_id: UUID):
86
+ """Check if given account is owner of picture concerned by report"""
87
+
88
+ isOwner = False
89
+ if report.picture_id is not None:
90
+ concernedPic = db.fetchone(
91
+ current_app,
92
+ SQL("SELECT id FROM pictures WHERE id = %(id)s AND account_id = %(uid)s"),
93
+ {"id": report.picture_id, "uid": account_id},
94
+ )
95
+ isOwner = concernedPic is not None
96
+ elif report.sequence_id is not None:
97
+ concernedSeq = db.fetchone(
98
+ current_app,
99
+ SQL("SELECT id FROM sequences WHERE id = %(id)s AND account_id = %(uid)s"),
100
+ {"id": report.sequence_id, "uid": account_id},
101
+ )
102
+ isOwner = concernedSeq is not None
103
+ return isOwner
104
+
105
+
106
+ def _parse_filter(filter: Optional[str]) -> SQL:
107
+ """
108
+ Parse a filter string and return a SQL expression
109
+
110
+ >>> _parse_filter('')
111
+ SQL('TRUE')
112
+ >>> _parse_filter(None)
113
+ SQL('TRUE')
114
+ >>> _parse_filter("status = \'open\'")
115
+ SQL("(r.status = \'open\')")
116
+ >>> _parse_filter("status IN (\'open_autofix\', \'waiting\')")
117
+ SQL("r.status IN (\'open_autofix\', \'waiting\')")
118
+ >>> _parse_filter("reporter = \'me\'")
119
+ SQL('(reporter_account_id = %(account_id)s)')
120
+ >>> _parse_filter("owner = \'me\'")
121
+ SQL('(COALESCE(p.account_id, s.account_id) = %(account_id)s)')
122
+ >>> _parse_filter("status IN (\'open\', \'open_autofix\', \'waiting\') AND (owner = \'me\' OR reporter = \'me\')")
123
+ SQL("(r.status IN (\'open\', \'open_autofix\', \'waiting\') AND ((COALESCE(p.account_id, s.account_id) = %(account_id)s) OR (reporter_account_id = %(account_id)s)))")
124
+ """
125
+ if not filter:
126
+ return SQL("TRUE")
127
+ from pygeofilter.backends.sql import to_sql_where
128
+ from pygeofilter.parsers.ecql import parse as ecql_parser
129
+
130
+ try:
131
+ filterAst = ecql_parser(filter)
132
+ fieldsToFilter = {
133
+ "status": "r.status",
134
+ "reporter": "reporter_account_id",
135
+ "owner": "COALESCE(p.account_id, s.account_id)",
136
+ }
137
+
138
+ f = to_sql_where(filterAst, fieldsToFilter).replace('"', "").replace("'me'", "%(account_id)s") # type: ignore
139
+ return SQL(f) # type: ignore
140
+ except Exception as e:
141
+ print(e)
142
+ raise InvalidAPIUsage(_("Unsupported filter parameter"), status_code=400)
143
+
144
+
145
+ def list_reports(account_id: UUID, limit: int = 100, filter: Optional[str] = None, forceAccount: bool = True) -> Reports:
146
+ filter_sql = _parse_filter(filter)
147
+ if forceAccount:
148
+ filter_sql = SQL(" ").join(
149
+ [SQL("(COALESCE(p.account_id, s.account_id) = %(account_id)s OR reporter_account_id = %(account_id)s) AND "), filter_sql]
150
+ )
151
+
152
+ l = db.fetchall(
153
+ current_app,
154
+ SQL(
155
+ """
156
+ SELECT
157
+ r.*,
158
+ COALESCE(p.account_id, s.account_id) AS owner_account_id
159
+ FROM reports r
160
+ LEFT JOIN pictures p ON r.picture_id = p.id
161
+ LEFT JOIN sequences s ON r.sequence_id = s.id
162
+ WHERE {filter}
163
+ ORDER BY ts_opened DESC
164
+ LIMIT %(limit)s
165
+ """
166
+ ).format(filter=filter_sql),
167
+ {"account_id": account_id, "limit": limit},
168
+ row_factory=class_row(Report),
169
+ )
170
+
171
+ return Reports(reports=l)
@@ -1,44 +1,44 @@
1
1
  import psycopg
2
2
  from flask import current_app, url_for
3
+ from flask_babel import gettext as _
3
4
  from psycopg.types.json import Jsonb
4
- from psycopg import sql
5
- from psycopg.sql import SQL
5
+ from psycopg.sql import SQL, Composable
6
6
  from psycopg.rows import dict_row
7
7
  from dataclasses import dataclass, field
8
8
  from typing import Any, List, Dict, Optional
9
9
  import datetime
10
10
  from uuid import UUID
11
11
  from enum import Enum
12
+ from geovisio.utils import db
13
+ from geovisio.utils.auth import Account
12
14
  from geovisio.utils.fields import FieldMapping, SortBy, SQLDirection, BBox, Bounds
13
15
  from geopic_tag_reader import reader
14
16
  from pathlib import PurePath
15
- from geovisio import errors
17
+ from geovisio import errors, utils
18
+ import logging
19
+ import sentry_sdk
16
20
 
17
21
 
18
- def createSequence(metadata, accountId) -> str:
19
- with psycopg.connect(current_app.config["DB_URL"]) as conn:
20
- with conn.cursor() as cursor:
21
- # Add sequence in database
22
- seqId = cursor.execute(
23
- "INSERT INTO sequences(account_id, metadata) VALUES(%s, %s) RETURNING id", [accountId, Jsonb(metadata)]
24
- ).fetchone()
25
-
26
- # Make changes definitive in database
27
- conn.commit()
28
-
29
- if seqId is None:
30
- raise Exception(f"impossible to insert sequence in database")
31
- return seqId[0]
22
+ def createSequence(metadata, accountId, user_agent: Optional[str] = None) -> UUID:
23
+ with db.execute(
24
+ current_app,
25
+ "INSERT INTO sequences(account_id, metadata, user_agent) VALUES(%s, %s, %s) RETURNING id",
26
+ [accountId, Jsonb(metadata), user_agent],
27
+ ) as r:
28
+ seqId = r.fetchone()
29
+ if seqId is None:
30
+ raise Exception("impossible to insert sequence in database")
31
+ return seqId[0]
32
32
 
33
33
 
34
34
  # Mappings from stac name to SQL names
35
35
  STAC_FIELD_MAPPINGS = {
36
36
  p.stac: p
37
37
  for p in [
38
- FieldMapping(sql_column=sql.SQL("inserted_at"), stac="created"),
39
- FieldMapping(sql_column=sql.SQL("updated_at"), stac="updated"),
40
- FieldMapping(sql_column=sql.SQL("computed_capture_date"), stac="datetime"),
41
- FieldMapping(sql_column=sql.SQL("status"), stac="status"),
38
+ FieldMapping(sql_column=SQL("inserted_at"), stac="created"),
39
+ FieldMapping(sql_column=SQL("updated_at"), stac="updated"),
40
+ FieldMapping(sql_column=SQL("computed_capture_date"), stac="datetime"),
41
+ FieldMapping(sql_column=SQL("status"), stac="status"),
42
42
  ]
43
43
  }
44
44
  STAC_FIELD_TO_SQL_FILTER = {p.stac: p.sql_filter.as_string(None) for p in STAC_FIELD_MAPPINGS.values()}
@@ -64,8 +64,8 @@ class CollectionsRequest:
64
64
  created_before: Optional[datetime.datetime] = None
65
65
  user_id: Optional[UUID] = None
66
66
  bbox: Optional[BBox] = None
67
- user_filter: Optional[sql.SQL] = None
68
- pagination_filter: Optional[sql.SQL] = None
67
+ user_filter: Optional[SQL] = None
68
+ pagination_filter: Optional[SQL] = None
69
69
  limit: int = 100
70
70
  userOwnsAllCollections: bool = False # bool to represent that the user's asking for the collections is the owner of them
71
71
 
@@ -75,9 +75,8 @@ class CollectionsRequest:
75
75
 
76
76
  def get_collections(request: CollectionsRequest) -> Collections:
77
77
  # Check basic parameters
78
- seq_filter: List[sql.Composable] = []
78
+ seq_filter: List[Composable] = []
79
79
  seq_params: dict = {}
80
- pic_filter = [SQL("sp.seq_id = s.id")]
81
80
 
82
81
  # Sort-by parameter
83
82
  # Note for review: I'm not sure I understand this non nullity constraint, but if so, shouldn't all sortby fields be added ?
@@ -90,25 +89,25 @@ def get_collections(request: CollectionsRequest) -> Collections:
90
89
  seq_filter.append(SQL("s.account_id = %(account)s"))
91
90
  seq_params["account"] = request.user_id
92
91
 
93
- if request.user_filter is None or "status" not in request.user_filter.as_string(None):
92
+ user_filter_str = request.user_filter.as_string(None) if request.user_filter is not None else None
93
+ if user_filter_str is None or "status" not in user_filter_str:
94
94
  # if the filter does not contains any `status` condition, we want to show only 'ready' collection to the general users, and non deleted one for the owner
95
95
  if not request.userOwnsAllCollections:
96
- seq_filter.append(SQL("s.status = 'ready'"))
97
- pic_filter.append(SQL("p.status = 'ready'"))
96
+ seq_filter.append(SQL("status = 'ready'"))
98
97
  else:
99
- seq_filter.append(SQL("s.status != 'deleted'"))
98
+ seq_filter.append(SQL("status != 'deleted'"))
100
99
  else:
101
- # else, even if there are status filter, we make sure not to show hidden pictures/sequence to non owner
102
- if not request.userOwnsAllCollections:
103
- seq_filter.append(SQL("s.status <> 'hidden'"))
104
- pic_filter.append(SQL("p.status <> 'hidden'"))
100
+ if not request.userOwnsAllCollections and "'deleted'" not in user_filter_str:
101
+ # if there are status filter and we ask for deleted sequence, we also include hidden one and consider them as deleted
102
+ seq_filter.append(SQL("status <> 'hidden'"))
105
103
 
106
104
  status_field = None
107
105
  if request.userOwnsAllCollections:
108
106
  # only logged users can see detailed status
109
107
  status_field = SQL("s.status AS status")
110
108
  else:
111
- status_field = SQL("CASE WHEN s.status = 'deleted' THEN s.status ELSE NULL END AS status")
109
+ # hidden sequence are marked as deleted, this way crawler can update their catalog
110
+ status_field = SQL("CASE WHEN s.status IN ('hidden', 'deleted') THEN 'deleted' ELSE s.status END AS status")
112
111
 
113
112
  # Datetime
114
113
  if request.min_dt is not None:
@@ -134,102 +133,90 @@ def get_collections(request: CollectionsRequest) -> Collections:
134
133
  seq_filter.append(SQL("s.inserted_at < %(created_before)s::timestamp with time zone"))
135
134
  seq_params["created_before"] = request.created_before
136
135
 
137
- with psycopg.connect(current_app.config["DB_URL"], row_factory=dict_row) as conn:
138
- with conn.cursor() as cursor:
139
- sqlSequencesRaw = SQL(
140
- """
141
- SELECT * FROM (
142
- SELECT
143
- s.id,
144
- s.status,
145
- s.metadata->>'title' AS name,
146
- s.inserted_at AS created,
147
- s.updated_at AS updated,
148
- ST_XMin(s.bbox) AS minx,
149
- ST_YMin(s.bbox) AS miny,
150
- ST_XMax(s.bbox) AS maxx,
151
- ST_YMax(s.bbox) AS maxy,
152
- accounts.name AS account_name,
153
- ST_X(ST_PointN(s.geom, 1)) AS x1,
154
- ST_Y(ST_PointN(s.geom, 1)) AS y1,
155
- {status},
156
- s.computed_capture_date AS datetime
157
- FROM sequences s
158
- LEFT JOIN accounts on s.account_id = accounts.id
159
- WHERE {filter}
160
- ORDER BY {order1}
161
- LIMIT {limit}
162
- ) s
163
- LEFT JOIN LATERAL (
164
- SELECT MIN(p.ts) as mints,
165
- MAX(p.ts) as maxts,
166
- COUNT(p.*) AS nbpic
167
- FROM sequences_pictures sp
168
- JOIN pictures p ON sp.pic_id = p.id
169
- WHERE {pic_filter}
170
- GROUP BY sp.seq_id
171
- ) sub ON true
136
+ with utils.db.cursor(current_app, row_factory=dict_row) as cursor:
137
+ sqlSequencesRaw = SQL(
172
138
  """
139
+ SELECT
140
+ s.id,
141
+ s.status,
142
+ s.metadata->>'title' AS name,
143
+ s.inserted_at AS created,
144
+ s.updated_at AS updated,
145
+ ST_XMin(s.bbox) AS minx,
146
+ ST_YMin(s.bbox) AS miny,
147
+ ST_XMax(s.bbox) AS maxx,
148
+ ST_YMax(s.bbox) AS maxy,
149
+ accounts.name AS account_name,
150
+ s.account_id AS account_id,
151
+ ST_X(ST_PointN(ST_GeometryN(s.geom, 1), 1)) AS x1,
152
+ ST_Y(ST_PointN(ST_GeometryN(s.geom, 1), 1)) AS y1,
153
+ s.min_picture_ts AS mints,
154
+ s.max_picture_ts AS maxts,
155
+ s.nb_pictures AS nbpic,
156
+ {status},
157
+ s.computed_capture_date AS datetime,
158
+ s.user_agent,
159
+ ROUND(ST_Length(s.geom::geography)) / 1000 AS length_km
160
+ FROM sequences s
161
+ LEFT JOIN accounts on s.account_id = accounts.id
162
+ WHERE {filter}
163
+ ORDER BY {order1}
164
+ LIMIT {limit}
165
+ """
166
+ )
167
+ sqlSequences = sqlSequencesRaw.format(
168
+ filter=SQL(" AND ").join(seq_filter),
169
+ order1=request.sort_by.as_sql(),
170
+ limit=request.limit,
171
+ status=status_field,
172
+ )
173
+
174
+ # Different request if we want the last n sequences
175
+ # Useful for paginating from last page to first
176
+ if request.pagination_filter and (
177
+ (
178
+ request.sort_by.fields[0].direction == SQLDirection.ASC
179
+ and request.pagination_filter.as_string(None).startswith(f"({request.sort_by.fields[0].field.sql_filter.as_string(None)} <")
173
180
  )
174
- sqlSequences = sqlSequencesRaw.format(
181
+ or (
182
+ request.sort_by.fields[0].direction == SQLDirection.DESC
183
+ and request.pagination_filter.as_string(None).startswith(f"({request.sort_by.fields[0].field.sql_filter.as_string(None)} >")
184
+ )
185
+ ):
186
+ base_query = sqlSequencesRaw.format(
175
187
  filter=SQL(" AND ").join(seq_filter),
176
- order1=request.sort_by.as_sql(),
188
+ order1=request.sort_by.revert(),
177
189
  limit=request.limit,
178
- pic_filter=SQL(" AND ").join(pic_filter),
179
190
  status=status_field,
180
191
  )
181
-
182
- # Different request if we want the last n sequences
183
- # Useful for paginating from last page to first
184
- if request.pagination_filter and (
185
- (
186
- request.sort_by.fields[0].direction == SQLDirection.ASC
187
- and request.pagination_filter.as_string(None).startswith(
188
- f"({request.sort_by.fields[0].field.sql_filter.as_string(None)} <"
189
- )
190
- )
191
- or (
192
- request.sort_by.fields[0].direction == SQLDirection.DESC
193
- and request.pagination_filter.as_string(None).startswith(
194
- f"({request.sort_by.fields[0].field.sql_filter.as_string(None)} >"
195
- )
196
- )
197
- ):
198
- base_query = sqlSequencesRaw.format(
199
- filter=SQL(" AND ").join(seq_filter),
200
- order1=request.sort_by.revert(),
201
- limit=request.limit,
202
- pic_filter=SQL(" AND ").join(pic_filter),
203
- status=status_field,
204
- )
205
- sqlSequences = SQL(
206
- """
207
- SELECT *
208
- FROM ({base_query}) s
209
- ORDER BY {order2}
192
+ sqlSequences = SQL(
210
193
  """
211
- ).format(
212
- order2=request.sort_by.as_sql(),
213
- base_query=base_query,
214
- )
215
-
216
- records = cursor.execute(sqlSequences, seq_params).fetchall()
217
-
218
- query_bounds = None
219
- for s in records:
220
- first_order_val = s.get(request.sort_by.fields[0].field.stac)
221
- if first_order_val is None:
222
- continue
223
- if query_bounds is None:
224
- query_bounds = Bounds(first_order_val, first_order_val)
225
- else:
226
- query_bounds.update(first_order_val)
227
-
228
- return Collections(
229
- collections=records,
230
- query_first_order_bounds=query_bounds,
194
+ SELECT *
195
+ FROM ({base_query}) s
196
+ ORDER BY {order2}
197
+ """
198
+ ).format(
199
+ order2=request.sort_by.as_sql(),
200
+ base_query=base_query,
231
201
  )
232
202
 
203
+ records = cursor.execute(sqlSequences, seq_params).fetchall()
204
+
205
+ query_bounds = None
206
+ for s in records:
207
+ first_order_val = s.get(request.sort_by.fields[0].field.stac)
208
+ if first_order_val is None:
209
+ continue
210
+ if query_bounds is None:
211
+ query_bounds = Bounds(first_order_val, first_order_val)
212
+ else:
213
+ query_bounds.update(first_order_val)
214
+
215
+ return Collections(
216
+ collections=records,
217
+ query_first_order_bounds=query_bounds,
218
+ )
219
+
233
220
 
234
221
  def get_pagination_links(
235
222
  route: str,
@@ -387,17 +374,23 @@ def sort_collection(db, collectionId: UUID, sortby: CollectionSort):
387
374
 
388
375
  if usedDateField is None:
389
376
  raise errors.InvalidAPIUsage(
390
- "Sort by file date is not possible on this sequence (no file date information available on pictures)",
377
+ _("Sort by file date is not possible on this sequence (no file date information available on pictures)"),
391
378
  status_code=422,
392
379
  )
393
380
 
394
381
  for pm in picMetas:
395
382
  # Find value for wanted sort
396
383
  if sortby.order == CollectionSortOrder.GPS_DATE:
397
- pm["sort"] = reader.decodeGPSDateTime(pm["exif"], "Exif.GPSInfo")[0]
384
+ if "ts_gps" in pm["metadata"]:
385
+ pm["sort"] = pm["metadata"]["ts_gps"]
386
+ else:
387
+ pm["sort"] = reader.decodeGPSDateTime(pm["exif"], "Exif.GPSInfo", _)[0]
398
388
  elif sortby.order == CollectionSortOrder.FILE_DATE:
399
- assert usedDateField # nullity has been checked before
400
- pm["sort"] = reader.decodeDateTimeOriginal(pm["exif"], usedDateField)[0]
389
+ if "ts_camera" in pm["metadata"]:
390
+ pm["sort"] = pm["metadata"]["ts_camera"]
391
+ else:
392
+ assert usedDateField # nullity has been checked before
393
+ pm["sort"] = reader.decodeDateTimeOriginal(pm["exif"], usedDateField, _)[0]
401
394
  elif sortby.order == CollectionSortOrder.FILE_NAME:
402
395
  pm["sort"] = pm["metadata"].get("originalFileName")
403
396
  if isFileNameNumeric:
@@ -406,7 +399,11 @@ def sort_collection(db, collectionId: UUID, sortby: CollectionSort):
406
399
  # Fail if sort value is missing
407
400
  if pm["sort"] is None:
408
401
  raise errors.InvalidAPIUsage(
409
- f"Sort using {sortby} is not possible on this sequence, picture {pm['id']} is missing mandatory metadata",
402
+ _(
403
+ "Sort using %(sort)s is not possible on this sequence, picture %(pic)s is missing mandatory metadata",
404
+ sort=sortby,
405
+ pic=pm["id"],
406
+ ),
410
407
  status_code=422,
411
408
  )
412
409
 
@@ -480,3 +477,168 @@ def update_headings(
480
477
  ), # lots of camera have heading set to 0 for unset heading, so we recompute the heading when it's 0 too, even if this could be a valid value
481
478
  {"seq": sequenceId, "diff": relativeHeading, "account": editingAccount},
482
479
  )
480
+
481
+
482
+ def add_finalization_job(cursor, seqId: UUID):
483
+ """
484
+ Add a sequence finalization job in the queue.
485
+ If there is already a finalization job, do nothing (changing it might cause a deadlock, since a worker could be processing this job)
486
+ """
487
+ cursor.execute(
488
+ """INSERT INTO
489
+ job_queue(sequence_id, task)
490
+ VALUES (%(seq_id)s, 'finalize')
491
+ ON CONFLICT (sequence_id) DO NOTHING""",
492
+ {"seq_id": seqId},
493
+ )
494
+
495
+
496
+ def finalize(cursor, seqId: UUID, logger: logging.Logger = logging.getLogger()):
497
+ """
498
+ Finalize a sequence, by updating its status and computed fields.
499
+ """
500
+ with sentry_sdk.start_span(description="Finalizing sequence") as span:
501
+ span.set_data("sequence_id", seqId)
502
+ logger.debug(f"Finalizing sequence {seqId}")
503
+
504
+ with utils.time.log_elapsed(f"Finalizing sequence {seqId}"):
505
+ # Complete missing headings in pictures
506
+ update_headings(cursor, seqId)
507
+
508
+ # Change sequence database status in DB
509
+ # Also generates data in computed columns
510
+ cursor.execute(
511
+ """WITH
512
+ aggregated_pictures AS (
513
+ SELECT
514
+ sp.seq_id,
515
+ MIN(p.ts::DATE) AS day,
516
+ ARRAY_AGG(DISTINCT TRIM(
517
+ CONCAT(p.metadata->>'make', ' ', p.metadata->>'model')
518
+ )) AS models,
519
+ ARRAY_AGG(DISTINCT p.metadata->>'type') AS types
520
+ FROM sequences_pictures sp
521
+ JOIN pictures p ON sp.pic_id = p.id
522
+ WHERE sp.seq_id = %(seq)s
523
+ GROUP BY sp.seq_id
524
+ )
525
+ UPDATE sequences
526
+ SET
527
+ status = CASE WHEN status = 'hidden' THEN 'hidden'::sequence_status ELSE 'ready'::sequence_status END, -- we don't want to change status if it's hidden
528
+ geom = compute_sequence_geom(id),
529
+ bbox = compute_sequence_bbox(id),
530
+ computed_type = CASE WHEN array_length(types, 1) = 1 THEN types[1] ELSE NULL END,
531
+ computed_model = CASE WHEN array_length(models, 1) = 1 THEN models[1] ELSE NULL END,
532
+ computed_capture_date = day
533
+ FROM aggregated_pictures
534
+ WHERE id = %(seq)s
535
+ """,
536
+ {"seq": seqId},
537
+ )
538
+
539
+ logger.info(f"Sequence {seqId} is ready")
540
+
541
+
542
+ def update_pictures_grid() -> Optional[datetime.datetime]:
543
+ """Refreshes the pictures_grid materialized view for an up-to-date view of pictures availability on map.
544
+
545
+ Parameters
546
+ ----------
547
+ db : psycopg.Connection
548
+ Database connection
549
+
550
+ Returns
551
+ -------
552
+ bool : True if the view has been updated else False
553
+ """
554
+ from geovisio.utils import db
555
+
556
+ logger = logging.getLogger("geovisio.picture_grid")
557
+
558
+ # get a connection outside of the connection pool in order to avoid
559
+ # the default statement timeout as this query can be very long
560
+ with db.long_queries_conn(current_app) as conn, conn.transaction():
561
+ try:
562
+ conn.execute("SELECT refreshed_at FROM refresh_database FOR UPDATE NOWAIT").fetchone()
563
+ except psycopg.errors.LockNotAvailable:
564
+ logger.info("Database refresh already in progress, nothing to do")
565
+ return False
566
+
567
+ with sentry_sdk.start_span(description="Refreshing database"):
568
+ with utils.time.log_elapsed("Refreshing database", logger=logger):
569
+ logger.info("Refreshing database")
570
+ conn.execute("UPDATE refresh_database SET refreshed_at = NOW()")
571
+ conn.execute("REFRESH MATERIALIZED VIEW CONCURRENTLY pictures_grid")
572
+
573
+ return True
574
+
575
+
576
+ def delete_collection(collectionId: UUID, account: Optional[Account]) -> int:
577
+ """
578
+ Mark a collection as deleted and delete all it's pictures.
579
+
580
+ Note that since the deletion as asynchronous, some workers need to be run in order for the deletion to be effective.
581
+ """
582
+ with db.conn(current_app) as conn:
583
+ with conn.transaction(), conn.cursor() as cursor:
584
+ sequence = cursor.execute(
585
+ "SELECT status, account_id FROM sequences WHERE id = %s AND status != 'deleted'", [collectionId]
586
+ ).fetchone()
587
+
588
+ # sequence not found
589
+ if not sequence:
590
+ raise errors.InvalidAPIUsage(_("Collection %(c)s wasn't found in database", c=collectionId), status_code=404)
591
+
592
+ # Account associated to sequence doesn't match current user
593
+ if account is not None and account.id != str(sequence[1]):
594
+ raise errors.InvalidAPIUsage("You're not authorized to edit this sequence", status_code=403)
595
+
596
+ logging.info(f"Asking for deletion of sequence {collectionId} and all its pictures")
597
+
598
+ # mark all the pictures as waiting for deletion for async removal as this can be quite long if the storage is slow if there are lots of pictures
599
+ # Note: To avoid a deadlock if some workers are currently also working on those picture to prepare them,
600
+ # the SQL queries are split in 2:
601
+ # - First a query to add the async deletion task to the queue.
602
+ # - Then a query changing the status of the picture to `waiting-for-delete`
603
+ #
604
+ # The trick there is that there can only be one task for a given picture (either preparing or deleting it)
605
+ # And the first query do a `ON CONFLICT DO UPDATE` to change the remaining `prepare` task to `delete`.
606
+ # So at the end of this query, we know that there are no more workers working on those pictures, so we can change their status
607
+ # without fearing a deadlock.
608
+ nb_updated = cursor.execute(
609
+ """
610
+ WITH pic2rm AS (
611
+ SELECT pic_id FROM sequences_pictures WHERE seq_id = %(seq)s
612
+ ),
613
+ picWithoutOtherSeq AS (
614
+ SELECT pic_id FROM pic2rm
615
+ EXCEPT
616
+ SELECT pic_id FROM sequences_pictures WHERE pic_id IN (SELECT pic_id FROM pic2rm) AND seq_id != %(seq)s
617
+ )
618
+ INSERT INTO job_queue(picture_id, task)
619
+ SELECT pic_id, 'delete' FROM picWithoutOtherSeq
620
+ ON CONFLICT (picture_id) DO UPDATE SET task = 'delete'
621
+ """,
622
+ {"seq": collectionId},
623
+ ).rowcount
624
+ # if there was a finalize task for this collection in the queue, we remove it, it's useless
625
+ cursor.execute("""DELETE FROM job_queue WHERE sequence_id = %(seq)s""", {"seq": collectionId})
626
+
627
+ # after the task have been added to the queue, we mark all picture for deletion
628
+ cursor.execute(
629
+ """
630
+ WITH pic2rm AS (
631
+ SELECT pic_id FROM sequences_pictures WHERE seq_id = %(seq)s
632
+ ),
633
+ picWithoutOtherSeq AS (
634
+ SELECT pic_id FROM pic2rm
635
+ EXCEPT
636
+ SELECT pic_id FROM sequences_pictures WHERE pic_id IN (SELECT pic_id FROM pic2rm) AND seq_id != %(seq)s
637
+ )
638
+ UPDATE pictures SET status = 'waiting-for-delete' WHERE id IN (SELECT pic_id FROM picWithoutOtherSeq)
639
+ """,
640
+ {"seq": collectionId},
641
+ ).rowcount
642
+
643
+ cursor.execute("UPDATE sequences SET status = 'deleted' WHERE id = %s", [collectionId])
644
+ return nb_updated