geovisio 2.9.0__py3-none-any.whl → 2.11.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (82) hide show
  1. geovisio/__init__.py +8 -1
  2. geovisio/admin_cli/user.py +7 -2
  3. geovisio/config_app.py +26 -12
  4. geovisio/translations/ar/LC_MESSAGES/messages.mo +0 -0
  5. geovisio/translations/ar/LC_MESSAGES/messages.po +818 -0
  6. geovisio/translations/be/LC_MESSAGES/messages.mo +0 -0
  7. geovisio/translations/be/LC_MESSAGES/messages.po +886 -0
  8. geovisio/translations/br/LC_MESSAGES/messages.po +1 -1
  9. geovisio/translations/da/LC_MESSAGES/messages.mo +0 -0
  10. geovisio/translations/da/LC_MESSAGES/messages.po +96 -4
  11. geovisio/translations/de/LC_MESSAGES/messages.mo +0 -0
  12. geovisio/translations/de/LC_MESSAGES/messages.po +214 -122
  13. geovisio/translations/el/LC_MESSAGES/messages.po +1 -1
  14. geovisio/translations/en/LC_MESSAGES/messages.mo +0 -0
  15. geovisio/translations/en/LC_MESSAGES/messages.po +234 -157
  16. geovisio/translations/eo/LC_MESSAGES/messages.mo +0 -0
  17. geovisio/translations/eo/LC_MESSAGES/messages.po +55 -5
  18. geovisio/translations/es/LC_MESSAGES/messages.po +1 -1
  19. geovisio/translations/fi/LC_MESSAGES/messages.po +1 -1
  20. geovisio/translations/fr/LC_MESSAGES/messages.mo +0 -0
  21. geovisio/translations/fr/LC_MESSAGES/messages.po +92 -3
  22. geovisio/translations/hu/LC_MESSAGES/messages.po +1 -1
  23. geovisio/translations/it/LC_MESSAGES/messages.mo +0 -0
  24. geovisio/translations/it/LC_MESSAGES/messages.po +63 -3
  25. geovisio/translations/ja/LC_MESSAGES/messages.po +1 -1
  26. geovisio/translations/ko/LC_MESSAGES/messages.po +1 -1
  27. geovisio/translations/messages.pot +216 -139
  28. geovisio/translations/nl/LC_MESSAGES/messages.mo +0 -0
  29. geovisio/translations/nl/LC_MESSAGES/messages.po +333 -62
  30. geovisio/translations/oc/LC_MESSAGES/messages.mo +0 -0
  31. geovisio/translations/oc/LC_MESSAGES/messages.po +821 -0
  32. geovisio/translations/pl/LC_MESSAGES/messages.po +1 -1
  33. geovisio/translations/pt/LC_MESSAGES/messages.mo +0 -0
  34. geovisio/translations/pt/LC_MESSAGES/messages.po +944 -0
  35. geovisio/translations/pt_BR/LC_MESSAGES/messages.mo +0 -0
  36. geovisio/translations/pt_BR/LC_MESSAGES/messages.po +942 -0
  37. geovisio/translations/sv/LC_MESSAGES/messages.mo +0 -0
  38. geovisio/translations/sv/LC_MESSAGES/messages.po +4 -3
  39. geovisio/translations/ti/LC_MESSAGES/messages.mo +0 -0
  40. geovisio/translations/ti/LC_MESSAGES/messages.po +762 -0
  41. geovisio/translations/tr/LC_MESSAGES/messages.mo +0 -0
  42. geovisio/translations/tr/LC_MESSAGES/messages.po +927 -0
  43. geovisio/translations/uk/LC_MESSAGES/messages.mo +0 -0
  44. geovisio/translations/uk/LC_MESSAGES/messages.po +920 -0
  45. geovisio/translations/zh_Hant/LC_MESSAGES/messages.po +1 -1
  46. geovisio/utils/annotations.py +21 -21
  47. geovisio/utils/auth.py +47 -13
  48. geovisio/utils/cql2.py +22 -5
  49. geovisio/utils/fields.py +14 -2
  50. geovisio/utils/items.py +44 -0
  51. geovisio/utils/model_query.py +2 -2
  52. geovisio/utils/pic_shape.py +1 -1
  53. geovisio/utils/pictures.py +127 -36
  54. geovisio/utils/semantics.py +32 -3
  55. geovisio/utils/sentry.py +1 -1
  56. geovisio/utils/sequences.py +155 -109
  57. geovisio/utils/upload_set.py +303 -206
  58. geovisio/utils/users.py +18 -0
  59. geovisio/utils/website.py +1 -1
  60. geovisio/web/annotations.py +303 -69
  61. geovisio/web/auth.py +1 -1
  62. geovisio/web/collections.py +194 -97
  63. geovisio/web/configuration.py +36 -4
  64. geovisio/web/docs.py +109 -13
  65. geovisio/web/items.py +319 -186
  66. geovisio/web/map.py +92 -54
  67. geovisio/web/pages.py +48 -4
  68. geovisio/web/params.py +100 -42
  69. geovisio/web/pictures.py +37 -3
  70. geovisio/web/prepare.py +4 -2
  71. geovisio/web/queryables.py +57 -0
  72. geovisio/web/stac.py +8 -2
  73. geovisio/web/tokens.py +49 -1
  74. geovisio/web/upload_set.py +226 -51
  75. geovisio/web/users.py +89 -8
  76. geovisio/web/utils.py +26 -8
  77. geovisio/workers/runner_pictures.py +128 -23
  78. {geovisio-2.9.0.dist-info → geovisio-2.11.0.dist-info}/METADATA +15 -14
  79. geovisio-2.11.0.dist-info/RECORD +117 -0
  80. geovisio-2.9.0.dist-info/RECORD +0 -98
  81. {geovisio-2.9.0.dist-info → geovisio-2.11.0.dist-info}/WHEEL +0 -0
  82. {geovisio-2.9.0.dist-info → geovisio-2.11.0.dist-info}/licenses/LICENSE +0 -0
@@ -22,6 +22,7 @@ class EntityType(Enum):
22
22
  pic = "picture_id"
23
23
  seq = "sequence_id"
24
24
  annotation = "annotation_id"
25
+ upload_set = "upload_set_id"
25
26
 
26
27
 
27
28
  @dataclass
@@ -37,10 +38,12 @@ class Entity:
37
38
  return Identifier("sequences_semantics")
38
39
  case EntityType.annotation:
39
40
  return Identifier("annotations_semantics")
41
+ case EntityType.upload_set:
42
+ return Identifier("upload_sets_semantics")
40
43
  case _:
41
44
  raise ValueError(f"Unknown entity type: {self.type}")
42
45
 
43
- def get_history_table(self) -> Identifier:
46
+ def get_history_table(self) -> Optional[Identifier]:
44
47
  match self.type:
45
48
  case EntityType.pic:
46
49
  return Identifier("pictures_semantics_history")
@@ -48,6 +51,8 @@ class Entity:
48
51
  return Identifier("sequences_semantics_history")
49
52
  case EntityType.annotation:
50
53
  return Identifier("pictures_semantics_history")
54
+ case EntityType.upload_set:
55
+ return None
51
56
  case _:
52
57
  raise ValueError(f"Unknown entity type: {self.type}")
53
58
 
@@ -77,14 +82,34 @@ AND ({filter})"""
77
82
  params,
78
83
  )
79
84
  if tag_to_add:
85
+ fields = [Identifier(entity.type.value), Identifier("key"), Identifier("value")]
86
+ if entity.type == EntityType.upload_set:
87
+ # upload_set semantics have no history, the account is directly stored in the table
88
+ fields.append(Identifier("account_id"))
89
+
80
90
  with cursor.copy(
81
91
  SQL("COPY {table} ({fields}) FROM STDIN").format(
82
92
  table=table_name,
83
- fields=SQL(",").join([Identifier(entity.type.value), Identifier("key"), Identifier("value")]),
93
+ fields=SQL(",").join(fields),
84
94
  )
85
95
  ) as copy:
86
96
  for tag in tag_to_add:
87
- copy.write_row((entity.id, tag.key, tag.value))
97
+ row = [entity.id, tag.key, tag.value]
98
+ if entity.type == EntityType.upload_set:
99
+ row.append(account)
100
+ copy.write_row(row)
101
+ if tag_to_delete and entity.type == EntityType.annotation and not tag_to_add:
102
+ # if tags have been deleted, we check if some annotations are now empty and need to be deleted
103
+ cursor.execute(
104
+ """DELETE FROM annotations
105
+ WHERE id = %(annotation_id)s AND
106
+ (
107
+ SELECT count(*) AS nb_semantics
108
+ FROM annotations_semantics
109
+ WHERE annotation_id = %(annotation_id)s
110
+ ) = 0""",
111
+ {"annotation_id": entity.id},
112
+ )
88
113
  if tag_to_add or tag_to_delete:
89
114
  # we track the history changes of the semantic tags
90
115
  track_semantic_history(cursor, entity, actions, account, annotation)
@@ -100,6 +125,10 @@ class SemanticTagUpdateOnAnnotation(SemanticTagUpdate):
100
125
 
101
126
 
102
127
  def track_semantic_history(cursor: Cursor, entity: Entity, actions: List[SemanticTagUpdate], account: UUID, annotation):
128
+ history_table = entity.get_history_table()
129
+ if history_table is None:
130
+ # no history for upload_set semantics
131
+ return
103
132
  params = {
104
133
  "account_id": account,
105
134
  }
geovisio/utils/sentry.py CHANGED
@@ -73,7 +73,7 @@ def _wrap_cursor_execute(f):
73
73
 
74
74
 
75
75
  class FileSystemIntegration(Integration):
76
- """Add metrics to the 2 most usefull filesystem, the 'os file' filesystem and the s3 filesystem"""
76
+ """Add metrics to the 2 most useful filesystem, the 'os file' filesystem and the s3 filesystem"""
77
77
 
78
78
  identifier = "filesystem"
79
79
 
@@ -1,20 +1,17 @@
1
- from operator import ne
2
- from click import Option
3
- from numpy import sort
4
1
  import psycopg
5
- from flask import current_app, g, url_for
2
+ from flask import current_app, url_for
6
3
  from flask_babel import gettext as _
7
4
  from psycopg.types.json import Jsonb
8
5
  from psycopg.sql import SQL, Composable
9
6
  from psycopg.rows import dict_row
10
7
  from dataclasses import dataclass, field
11
- from typing import Any, List, Dict, Optional
8
+ from typing import Any, List, Dict, Optional, Tuple
12
9
  import datetime
13
10
  from uuid import UUID
14
11
  from enum import Enum
15
12
  from geovisio.utils import db
16
- from geovisio.utils.auth import Account
17
- from geovisio.utils.fields import FieldMapping, SortBy, SQLDirection, BBox, Bounds, SortByField
13
+ from geovisio.utils.auth import Account, get_current_account
14
+ from geovisio.utils.fields import FieldMapping, SortBy, SQLDirection, BBox, Bounds
18
15
  from geopic_tag_reader import reader
19
16
  from pathlib import PurePath
20
17
  from geovisio import errors, utils
@@ -22,11 +19,22 @@ import logging
22
19
  import sentry_sdk
23
20
 
24
21
 
25
- def createSequence(metadata, accountId, user_agent: Optional[str] = None) -> UUID:
22
+ def createSequence(
23
+ metadata, accountId, user_agent: Optional[str] = None, upload_set_id: Optional[UUID] = None, visibility: Optional[str] = None
24
+ ):
26
25
  with db.execute(
27
26
  current_app,
28
- "INSERT INTO sequences(account_id, metadata, user_agent) VALUES(%s, %s, %s) RETURNING id",
29
- [accountId, Jsonb(metadata), user_agent],
27
+ """INSERT INTO sequences(account_id, metadata, user_agent, upload_set_id, visibility)
28
+ VALUES(%(account_id)s, %(metadata)s, %(user_agent)s, %(upload_set_id)s,
29
+ COALESCE(%(visibility)s, (SELECT default_visibility FROM accounts WHERE id = %(account_id)s), (SELECT default_visibility FROM configurations LIMIT 1)))
30
+ RETURNING id""",
31
+ {
32
+ "account_id": accountId,
33
+ "metadata": Jsonb(metadata),
34
+ "user_agent": user_agent,
35
+ "upload_set_id": upload_set_id,
36
+ "visibility": visibility,
37
+ },
30
38
  ) as r:
31
39
  seqId = r.fetchone()
32
40
  if seqId is None:
@@ -41,7 +49,7 @@ STAC_FIELD_MAPPINGS = {
41
49
  FieldMapping(sql_column=SQL("inserted_at"), stac="created"),
42
50
  FieldMapping(sql_column=SQL("updated_at"), stac="updated"),
43
51
  FieldMapping(sql_column=SQL("computed_capture_date"), stac="datetime"),
44
- FieldMapping(sql_column=SQL("status"), stac="status"),
52
+ FieldMapping(sql_column=SQL("visibility"), stac="visibility"),
45
53
  FieldMapping(sql_column=SQL("id"), stac="id"),
46
54
  ]
47
55
  }
@@ -55,7 +63,7 @@ class Collections:
55
63
  """
56
64
 
57
65
  collections: List[Dict[Any, Any]] = field(default_factory=lambda: [])
58
- # Bounds of the field used by the first field of the `ORDER BY` (usefull especially for pagination)
66
+ # Bounds of the field used by the first field of the `ORDER BY` (useful especially for pagination)
59
67
  query_bounds: Optional[Bounds] = None
60
68
 
61
69
 
@@ -72,77 +80,90 @@ class CollectionsRequest:
72
80
  pagination_filter: Optional[SQL] = None
73
81
  limit: int = 100
74
82
  userOwnsAllCollections: bool = False # bool to represent that the user's asking for the collections is the owner of them
83
+ show_deleted: bool = False
84
+ """Do we want to return deleted collections that respect the other filters in a separate field"""
75
85
 
76
86
  def filters(self):
77
87
  return [f for f in (self.user_filter, self.pagination_filter) if f is not None]
78
88
 
89
+ def to_sql_filters_and_params_without_permissions(self) -> Tuple[List[Composable], dict]:
90
+ """Transform the request to a list of SQL filters and a dict of parameters
91
+ Note: the filters do not contain any filter on permission/status, they need to be added afterward"""
92
+ seq_filter: List[Composable] = []
93
+ seq_params: dict = {}
94
+
95
+ # Sort-by parameter
96
+ seq_filter.append(SQL("{field} IS NOT NULL").format(field=self.sort_by.fields[0].field.sql_filter))
97
+ seq_filter.extend(self.filters())
98
+
99
+ if self.user_id is not None:
100
+ seq_filter.append(SQL("s.account_id = %(account)s"))
101
+ seq_params["account"] = self.user_id
102
+
103
+ # Datetime
104
+ if self.min_dt is not None:
105
+ seq_filter.append(SQL("s.computed_capture_date >= %(cmindate)s::date"))
106
+ seq_params["cmindate"] = self.min_dt
107
+ if self.max_dt is not None:
108
+ seq_filter.append(SQL("s.computed_capture_date <= %(cmaxdate)s::date"))
109
+ seq_params["cmaxdate"] = self.max_dt
110
+
111
+ if self.bbox is not None:
112
+ seq_filter.append(SQL("ST_Intersects(s.geom, ST_MakeEnvelope(%(minx)s, %(miny)s, %(maxx)s, %(maxy)s, 4326))"))
113
+ seq_params["minx"] = self.bbox.minx
114
+ seq_params["miny"] = self.bbox.miny
115
+ seq_params["maxx"] = self.bbox.maxx
116
+ seq_params["maxy"] = self.bbox.maxy
117
+
118
+ # Created after/before
119
+ if self.created_after is not None:
120
+ seq_filter.append(SQL("s.inserted_at > %(created_after)s::timestamp with time zone"))
121
+ seq_params["created_after"] = self.created_after
122
+
123
+ if self.created_before:
124
+ seq_filter.append(SQL("s.inserted_at < %(created_before)s::timestamp with time zone"))
125
+ seq_params["created_before"] = self.created_before
126
+
127
+ return seq_filter, seq_params
128
+
79
129
 
80
130
  def get_collections(request: CollectionsRequest) -> Collections:
81
131
  # Check basic parameters
82
- seq_filter: List[Composable] = []
83
- seq_params: dict = {}
84
-
85
- # Sort-by parameter
86
- # Note for review: I'm not sure I understand this non nullity constraint, but if so, shouldn't all sortby fields be added ?
87
- # for s in request.sort_by.fields:
88
- # sqlConditionsSequences.append(SQL("{field} IS NOT NULL").format(field=s.field.sql_filter))
89
- seq_filter.append(SQL("{field} IS NOT NULL").format(field=request.sort_by.fields[0].field.sql_filter))
90
- seq_filter.extend(request.filters())
91
-
92
- if request.user_id is not None:
93
- seq_filter.append(SQL("s.account_id = %(account)s"))
94
- seq_params["account"] = request.user_id
95
-
96
- user_filter_str = request.user_filter.as_string(None) if request.user_filter is not None else None
97
- if user_filter_str is None or "status" not in user_filter_str:
98
- # if the filter does not contains any `status` condition, we want to show only 'ready' collection to the general users, and non deleted one for the owner
132
+ seq_filter, seq_params = request.to_sql_filters_and_params_without_permissions()
133
+
134
+ # Only the owner of an account can view sequences not 'ready' (and we don't want to show the deleted even to the owner)
135
+ account_to_query = get_current_account()
136
+ if not request.show_deleted:
99
137
  if not request.userOwnsAllCollections:
100
138
  seq_filter.append(SQL("status = 'ready'"))
101
139
  else:
102
140
  seq_filter.append(SQL("status != 'deleted'"))
103
141
  else:
104
- if not request.userOwnsAllCollections and "'deleted'" not in user_filter_str:
105
- # if there are status filter and we ask for deleted sequence, we also include hidden one and consider them as deleted
106
- seq_filter.append(SQL("status <> 'hidden'"))
142
+ seq_filter.append(SQL("status IN ('deleted', 'ready')"))
143
+
144
+ seq_params["account_to_query"] = account_to_query.id if account_to_query is not None else None
145
+
146
+ if account_to_query is not None and account_to_query.can_see_all():
147
+ # if the account querying is an admin, we also do not filter, and we consider that the admin can see all sequences
148
+ visible_by_user = SQL("TRUE")
149
+ elif request.show_deleted:
150
+ # if asked to show deletion, we do not filter using the rights, but we'll output only the id of the non visible sequence
151
+ visible_by_user = SQL("is_sequence_visible_by_user(s, %(account_to_query)s)")
152
+ else:
153
+ visible_by_user = SQL("is_sequence_visible_by_user(s, %(account_to_query)s)")
154
+ seq_filter.append(SQL("is_sequence_visible_by_user(s, %(account_to_query)s)"))
107
155
 
108
- status_field = None
156
+ status_field = SQL("s.status AS status")
109
157
  if request.userOwnsAllCollections:
110
- # only logged users can see detailed status
111
- status_field = SQL("s.status AS status")
158
+ # only show detailed visibility if the user querying owns all the collections (so on /api/users/me/collection)
159
+ visibility_field = SQL("s.visibility")
112
160
  else:
113
- # hidden sequence are marked as deleted, this way crawler can update their catalog
114
- status_field = SQL("CASE WHEN s.status IN ('hidden', 'deleted') THEN 'deleted' ELSE s.status END AS status")
115
-
116
- # Datetime
117
- if request.min_dt is not None:
118
- seq_filter.append(SQL("s.computed_capture_date >= %(cmindate)s::date"))
119
- seq_params["cmindate"] = request.min_dt
120
- if request.max_dt is not None:
121
- seq_filter.append(SQL("s.computed_capture_date <= %(cmaxdate)s::date"))
122
- seq_params["cmaxdate"] = request.max_dt
123
-
124
- if request.bbox is not None:
125
- seq_filter.append(SQL("ST_Intersects(s.geom, ST_MakeEnvelope(%(minx)s, %(miny)s, %(maxx)s, %(maxy)s, 4326))"))
126
- seq_params["minx"] = request.bbox.minx
127
- seq_params["miny"] = request.bbox.miny
128
- seq_params["maxx"] = request.bbox.maxx
129
- seq_params["maxy"] = request.bbox.maxy
130
-
131
- # Created after/before
132
- if request.created_after is not None:
133
- seq_filter.append(SQL("s.inserted_at > %(created_after)s::timestamp with time zone"))
134
- seq_params["created_after"] = request.created_after
135
-
136
- if request.created_before:
137
- seq_filter.append(SQL("s.inserted_at < %(created_before)s::timestamp with time zone"))
138
- seq_params["created_before"] = request.created_before
161
+ visibility_field = SQL("NULL AS visibility")
139
162
 
140
163
  with utils.db.cursor(current_app, row_factory=dict_row) as cursor:
141
164
  sqlSequencesRaw = SQL(
142
- """
143
- SELECT
165
+ """SELECT
144
166
  s.id,
145
- s.status,
146
167
  s.metadata->>'title' AS name,
147
168
  s.inserted_at AS created,
148
169
  s.updated_at AS updated,
@@ -157,33 +178,37 @@ def get_collections(request: CollectionsRequest) -> Collections:
157
178
  s.min_picture_ts AS mints,
158
179
  s.max_picture_ts AS maxts,
159
180
  s.nb_pictures AS nbpic,
181
+ s.upload_set_id,
160
182
  {status},
183
+ {visibility},
184
+ {visible_by_user} as is_sequence_visible_by_user,
161
185
  s.computed_capture_date AS datetime,
162
186
  s.user_agent,
163
187
  ROUND(ST_Length(s.geom::geography)) / 1000 AS length_km,
164
188
  s.computed_h_pixel_density,
165
189
  s.computed_gps_accuracy,
166
- t.semantics
190
+ COALESCE(seq_sem.semantics, '[]'::json) AS semantics
167
191
  FROM sequences s
168
192
  LEFT JOIN accounts on s.account_id = accounts.id
169
193
  LEFT JOIN (
170
- SELECT sequence_id, json_agg(json_strip_nulls(json_build_object(
171
- 'key', key,
172
- 'value', value
173
- )) ORDER BY key, value) AS semantics
174
- FROM sequences_semantics
175
- GROUP BY sequence_id
176
- ) t ON t.sequence_id = s.id
194
+ SELECT sequence_id, json_agg(json_strip_nulls(json_build_object(
195
+ 'key', key,
196
+ 'value', value
197
+ )) ORDER BY key, value) AS semantics
198
+ FROM sequences_semantics
199
+ GROUP BY sequence_id
200
+ ) seq_sem ON seq_sem.sequence_id = s.id
177
201
  WHERE {filter}
178
202
  ORDER BY {order1}
179
- LIMIT {limit}
180
- """
203
+ LIMIT {limit}"""
181
204
  )
182
205
  sqlSequences = sqlSequencesRaw.format(
183
206
  filter=SQL(" AND ").join(seq_filter),
184
207
  order1=request.sort_by.as_sql(),
185
208
  limit=request.limit,
186
209
  status=status_field,
210
+ visibility=visibility_field,
211
+ visible_by_user=visible_by_user,
187
212
  )
188
213
 
189
214
  # Different request if we want the last n sequences
@@ -200,13 +225,13 @@ def get_collections(request: CollectionsRequest) -> Collections:
200
225
  order1=request.sort_by.revert(),
201
226
  limit=request.limit,
202
227
  status=status_field,
228
+ visibility=visibility_field,
229
+ visible_by_user=visible_by_user,
203
230
  )
204
231
  sqlSequences = SQL(
205
- """
206
- SELECT *
207
- FROM ({base_query}) s
208
- ORDER BY {order2}
209
- """
232
+ """SELECT *
233
+ FROM ({base_query}) s
234
+ ORDER BY {order2}"""
210
235
  ).format(
211
236
  order2=request.sort_by.as_sql(),
212
237
  base_query=base_query,
@@ -253,6 +278,7 @@ def get_dataset_bounds(
253
278
  sortBy: SortBy,
254
279
  additional_filters: Optional[SQL] = None,
255
280
  additional_filters_params: Optional[Dict[str, Any]] = None,
281
+ account_to_query_id: Optional[UUID] = None,
256
282
  ) -> Optional[Bounds]:
257
283
  """Computes the dataset bounds from the sortBy field (using lexicographic order)
258
284
 
@@ -277,7 +303,7 @@ SELECT * FROM min_bounds, max_bounds;
277
303
  reverse_fields=sortBy.revert_non_aliased_sql(),
278
304
  filters=additional_filters or SQL("TRUE"),
279
305
  ),
280
- params=additional_filters_params or {},
306
+ params=(additional_filters_params or {}) | {"account_to_query": account_to_query_id},
281
307
  ).fetchone()
282
308
  if not sql_bounds:
283
309
  return None
@@ -322,6 +348,7 @@ def get_pagination_links(
322
348
  datasetBounds: Bounds,
323
349
  dataBounds: Optional[Bounds],
324
350
  additional_filters: Optional[str],
351
+ showDeleted: Optional[bool] = None,
325
352
  ) -> List:
326
353
  """Computes STAC links to handle pagination"""
327
354
 
@@ -336,7 +363,7 @@ def get_pagination_links(
336
363
  {
337
364
  "rel": "first",
338
365
  "type": "application/json",
339
- "href": url_for(route, _external=True, **routeArgs, filter=additional_filters, sortby=sortby),
366
+ "href": url_for(route, _external=True, **routeArgs, filter=additional_filters, sortby=sortby, show_deleted=showDeleted),
340
367
  }
341
368
  )
342
369
 
@@ -351,6 +378,7 @@ def get_pagination_links(
351
378
  _external=True,
352
379
  **routeArgs,
353
380
  sortby=sortby,
381
+ show_deleted=showDeleted,
354
382
  filter=additional_filters,
355
383
  page=page_filter,
356
384
  ),
@@ -369,6 +397,7 @@ def get_pagination_links(
369
397
  _external=True,
370
398
  **routeArgs,
371
399
  sortby=sortby,
400
+ show_deleted=showDeleted,
372
401
  filter=additional_filters,
373
402
  page=next_filter,
374
403
  ),
@@ -387,6 +416,7 @@ def get_pagination_links(
387
416
  _external=True,
388
417
  **routeArgs,
389
418
  sortby=sortby,
419
+ show_deleted=showDeleted,
390
420
  filter=additional_filters,
391
421
  page=last_filter,
392
422
  ),
@@ -427,7 +457,7 @@ def sort_collection(db, collectionId: UUID, sortby: CollectionSort):
427
457
  """
428
458
  Sort a collection by a given parameter
429
459
 
430
- Note: the transaction is not commited at the end, you need to commit it or use an autocommit connection
460
+ Note: the transaction is not committed at the end, you need to commit it or use an autocommit connection
431
461
  """
432
462
 
433
463
  # Remove existing order, and keep list of pictures IDs
@@ -519,8 +549,8 @@ def update_headings(
519
549
  db,
520
550
  sequenceId: UUID,
521
551
  editingAccount: Optional[UUID] = None,
522
- relativeHeading: int = 0,
523
- updateOnlyMissing: bool = True,
552
+ relativeHeading: Optional[int] = None,
553
+ updateOnlyMissing: Optional[bool] = None,
524
554
  ):
525
555
  """Defines pictures heading according to sequence path.
526
556
  Database is not committed in this function, to make entry definitively stored
@@ -532,27 +562,42 @@ def update_headings(
532
562
  Database connection
533
563
  sequenceId : uuid
534
564
  The sequence's uuid, as stored in the database
535
- relativeHeading : int
565
+ relativeHeading : Optional[int]
536
566
  Camera relative orientation compared to path, in degrees clockwise.
537
567
  Example: 0° = looking forward, 90° = looking to right, 180° = looking backward, -90° = looking left.
538
- updateOnlyMissing : bool
568
+ If not provided, will first use the relative_heading stored in the sequence's metadata, then the relative_heading of its upload_set (if if none is set, default to 0).
569
+ updateOnlyMissing : Optional[bool]
539
570
  If true, doesn't change existing heading values in database
571
+ if not provided, we check if some relative heading has been set (either in the sequence or in its upload_set), and if so, we recompute all
540
572
  """
541
-
542
573
  db.execute(
543
574
  SQL(
544
- """
545
- WITH h AS (
575
+ """WITH
576
+ relative_heading AS (
577
+ SELECT COALESCE(
578
+ %(relativeHeading)s,
579
+ (SELECT (metadata->>'relative_heading')::int FROM sequences WHERE id = %(seq)s),
580
+ (SELECT upload_sets.relative_heading FROM sequences JOIN upload_sets ON sequences.upload_set_id = upload_sets.id WHERE sequences.id = %(seq)s),
581
+ 0
582
+ ) AS heading,
583
+ COALESCE(
584
+ %(update_only_missing)s,
585
+ (SELECT metadata->'relative_heading' IS NULL FROM sequences WHERE id = %(seq)s and metadata ? 'relative_heading'),
586
+ (SELECT upload_sets.relative_heading IS NULL FROM sequences JOIN upload_sets ON sequences.upload_set_id = upload_sets.id WHERE sequences.id = %(seq)s)
587
+ ) AS update_only_missing
588
+ )
589
+ , h AS (
546
590
  SELECT
547
591
  p.id,
548
592
  p.heading AS old_heading,
549
593
  CASE
550
594
  WHEN LEAD(sp.rank) OVER othpics IS NULL AND LAG(sp.rank) OVER othpics IS NULL
551
- THEN NULL
595
+ -- if there is a single picture, we take the relative heading directly
596
+ THEN (SELECT heading FROM relative_heading)
552
597
  WHEN LEAD(sp.rank) OVER othpics IS NULL
553
- THEN (360 + FLOOR(DEGREES(ST_Azimuth(LAG(p.geom) OVER othpics, p.geom)))::int + (%(diff)s %% 360)) %% 360
598
+ THEN (360 + FLOOR(DEGREES(ST_Azimuth(LAG(p.geom) OVER othpics, p.geom)))::int + ((SELECT heading FROM relative_heading) %% 360)) %% 360
554
599
  ELSE
555
- (360 + FLOOR(DEGREES(ST_Azimuth(p.geom, LEAD(p.geom) OVER othpics)))::int + (%(diff)s %% 360)) %% 360
600
+ (360 + FLOOR(DEGREES(ST_Azimuth(p.geom, LEAD(p.geom) OVER othpics)))::int + ((SELECT heading FROM relative_heading) %% 360)) %% 360
556
601
  END AS heading
557
602
  FROM pictures p
558
603
  JOIN sequences_pictures sp ON sp.pic_id = p.id AND sp.seq_id = %(seq)s
@@ -561,13 +606,15 @@ def update_headings(
561
606
  UPDATE pictures p
562
607
  SET heading = h.heading, heading_computed = true {editing_account}
563
608
  FROM h
564
- WHERE h.id = p.id {update_missing}
609
+ WHERE h.id = p.id AND (
610
+ (SELECT NOT update_only_missing FROM relative_heading)
611
+ OR (p.heading IS NULL OR p.heading = 0 OR p.heading_computed) -- # lots of camera have heading set to 0 for unset heading, so we recompute the heading when it's 0 too, even if this could be a valid value
612
+ )
565
613
  """
566
614
  ).format(
567
- update_missing=SQL(" AND (p.heading IS NULL OR p.heading = 0 OR p.heading_computed)") if updateOnlyMissing else SQL(""),
568
615
  editing_account=SQL(", last_account_to_edit = %(account)s") if editingAccount is not None else SQL(""),
569
- ), # lots of camera have heading set to 0 for unset heading, so we recompute the heading when it's 0 too, even if this could be a valid value
570
- {"seq": sequenceId, "diff": relativeHeading, "account": editingAccount},
616
+ ),
617
+ {"seq": sequenceId, "relativeHeading": relativeHeading, "account": editingAccount, "update_only_missing": updateOnlyMissing},
571
618
  )
572
619
 
573
620
 
@@ -593,14 +640,13 @@ def finalize(cursor, seqId: UUID, logger: logging.Logger = logging.getLogger()):
593
640
  span.set_data("sequence_id", seqId)
594
641
  logger.debug(f"Finalizing sequence {seqId}")
595
642
 
596
- with utils.time.log_elapsed(f"Finalizing sequence {seqId}"):
597
- # Complete missing headings in pictures
598
- update_headings(cursor, seqId)
643
+ # Complete missing headings in pictures
644
+ update_headings(cursor, seqId)
599
645
 
600
- # Change sequence database status in DB
601
- # Also generates data in computed columns
602
- cursor.execute(
603
- """WITH
646
+ # Change sequence database status in DB
647
+ # Also generates data in computed columns
648
+ cursor.execute(
649
+ """WITH
604
650
  aggregated_pictures AS (
605
651
  SELECT
606
652
  sp.seq_id,
@@ -618,7 +664,7 @@ GROUP BY sp.seq_id
618
664
  )
619
665
  UPDATE sequences
620
666
  SET
621
- status = CASE WHEN status = 'hidden' THEN 'hidden'::sequence_status ELSE 'ready'::sequence_status END, -- we don't want to change status if it's hidden
667
+ status = 'ready'::sequence_status,
622
668
  geom = compute_sequence_geom(id),
623
669
  bbox = compute_sequence_bbox(id),
624
670
  computed_type = CASE WHEN array_length(types, 1) = 1 THEN types[1] ELSE NULL END,
@@ -629,13 +675,13 @@ computed_gps_accuracy = gpsacc
629
675
  FROM aggregated_pictures
630
676
  WHERE id = %(seq)s
631
677
  """,
632
- {"seq": seqId},
633
- )
678
+ {"seq": seqId},
679
+ )
634
680
 
635
- logger.info(f"Sequence {seqId} is ready")
681
+ logger.info(f"Sequence {seqId} is ready")
636
682
 
637
683
 
638
- def update_pictures_grid() -> Optional[datetime.datetime]:
684
+ def update_pictures_grid() -> bool:
639
685
  """Refreshes the pictures_grid materialized view for an up-to-date view of pictures availability on map.
640
686
 
641
687
  Parameters
@@ -686,7 +732,7 @@ def delete_collection(collectionId: UUID, account: Optional[Account]) -> int:
686
732
  raise errors.InvalidAPIUsage(_("Collection %(c)s wasn't found in database", c=collectionId), status_code=404)
687
733
 
688
734
  # Account associated to sequence doesn't match current user
689
- if account is not None and account.id != str(sequence[1]):
735
+ if account is not None and not account.can_edit_collection(str(sequence[1])):
690
736
  raise errors.InvalidAPIUsage("You're not authorized to edit this sequence", status_code=403)
691
737
 
692
738
  logging.info(f"Asking for deletion of sequence {collectionId} and all its pictures")