geovisio 2.8.0__py3-none-any.whl → 2.9.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (61) hide show
  1. geovisio/__init__.py +16 -3
  2. geovisio/config_app.py +11 -1
  3. geovisio/translations/br/LC_MESSAGES/messages.mo +0 -0
  4. geovisio/translations/br/LC_MESSAGES/messages.po +762 -0
  5. geovisio/translations/da/LC_MESSAGES/messages.mo +0 -0
  6. geovisio/translations/da/LC_MESSAGES/messages.po +10 -1
  7. geovisio/translations/de/LC_MESSAGES/messages.mo +0 -0
  8. geovisio/translations/de/LC_MESSAGES/messages.po +10 -1
  9. geovisio/translations/en/LC_MESSAGES/messages.mo +0 -0
  10. geovisio/translations/en/LC_MESSAGES/messages.po +9 -7
  11. geovisio/translations/eo/LC_MESSAGES/messages.mo +0 -0
  12. geovisio/translations/eo/LC_MESSAGES/messages.po +67 -1
  13. geovisio/translations/es/LC_MESSAGES/messages.mo +0 -0
  14. geovisio/translations/es/LC_MESSAGES/messages.po +4 -3
  15. geovisio/translations/fr/LC_MESSAGES/messages.mo +0 -0
  16. geovisio/translations/fr/LC_MESSAGES/messages.po +37 -4
  17. geovisio/translations/hu/LC_MESSAGES/messages.mo +0 -0
  18. geovisio/translations/hu/LC_MESSAGES/messages.po +4 -3
  19. geovisio/translations/it/LC_MESSAGES/messages.mo +0 -0
  20. geovisio/translations/it/LC_MESSAGES/messages.po +10 -1
  21. geovisio/translations/ja/LC_MESSAGES/messages.mo +0 -0
  22. geovisio/translations/ja/LC_MESSAGES/messages.po +242 -154
  23. geovisio/translations/nl/LC_MESSAGES/messages.mo +0 -0
  24. geovisio/translations/nl/LC_MESSAGES/messages.po +131 -25
  25. geovisio/translations/pl/LC_MESSAGES/messages.mo +0 -0
  26. geovisio/translations/pl/LC_MESSAGES/messages.po +4 -3
  27. geovisio/translations/sv/LC_MESSAGES/messages.mo +0 -0
  28. geovisio/translations/sv/LC_MESSAGES/messages.po +822 -0
  29. geovisio/utils/annotations.py +186 -0
  30. geovisio/utils/cql2.py +134 -0
  31. geovisio/utils/db.py +7 -0
  32. geovisio/utils/fields.py +24 -7
  33. geovisio/utils/loggers.py +14 -0
  34. geovisio/utils/model_query.py +2 -2
  35. geovisio/utils/params.py +7 -4
  36. geovisio/utils/pic_shape.py +63 -0
  37. geovisio/utils/pictures.py +54 -12
  38. geovisio/utils/reports.py +10 -17
  39. geovisio/utils/semantics.py +165 -55
  40. geovisio/utils/sentry.py +0 -1
  41. geovisio/utils/sequences.py +141 -60
  42. geovisio/utils/tags.py +31 -0
  43. geovisio/utils/upload_set.py +26 -21
  44. geovisio/utils/website.py +3 -0
  45. geovisio/web/annotations.py +205 -9
  46. geovisio/web/auth.py +3 -2
  47. geovisio/web/collections.py +49 -34
  48. geovisio/web/configuration.py +2 -1
  49. geovisio/web/docs.py +55 -16
  50. geovisio/web/items.py +55 -54
  51. geovisio/web/map.py +25 -13
  52. geovisio/web/params.py +11 -21
  53. geovisio/web/stac.py +19 -12
  54. geovisio/web/upload_set.py +92 -11
  55. geovisio/web/users.py +31 -4
  56. geovisio/workers/runner_pictures.py +71 -10
  57. {geovisio-2.8.0.dist-info → geovisio-2.9.0.dist-info}/METADATA +24 -22
  58. geovisio-2.9.0.dist-info/RECORD +98 -0
  59. {geovisio-2.8.0.dist-info → geovisio-2.9.0.dist-info}/WHEEL +1 -1
  60. geovisio-2.8.0.dist-info/RECORD +0 -89
  61. {geovisio-2.8.0.dist-info → geovisio-2.9.0.dist-info/licenses}/LICENSE +0 -0
@@ -6,7 +6,7 @@ from geovisio.utils.extent import TemporalExtent
6
6
  from uuid import UUID
7
7
  from typing import Optional, List, Dict, Any
8
8
  from datetime import datetime, timedelta
9
- from geovisio.utils import db, sequences
9
+ from geovisio.utils import cql2, db, sequences
10
10
  from geovisio import errors
11
11
  from geovisio.utils.link import make_link, Link
12
12
  import psycopg
@@ -17,6 +17,8 @@ from flask import current_app
17
17
  from flask_babel import gettext as _
18
18
  from geopic_tag_reader import sequence as geopic_sequence, reader
19
19
 
20
+ from geovisio.utils.loggers import getLoggerWithExtra
21
+
20
22
 
21
23
  class AggregatedStatus(BaseModel):
22
24
  """Aggregated status"""
@@ -355,16 +357,7 @@ def _parse_filter(filter: Optional[str]) -> SQL:
355
357
  """
356
358
  if not filter:
357
359
  return SQL("TRUE")
358
- from pygeofilter.backends.sql import to_sql_where
359
- from pygeofilter.parsers.cql2_text import parse as cql_parser
360
-
361
- try:
362
- filterAst = cql_parser(filter)
363
- f = to_sql_where(filterAst, FIELD_TO_SQL_FILTER).replace('"', "") # type: ignore
364
- return SQL(f) # type: ignore
365
- except Exception:
366
- logging.error(f"Unsupported filter parameter: {filter}")
367
- raise errors.InvalidAPIUsage(_("Unsupported filter parameter"), status_code=400)
360
+ return cql2.parse_cql2_filter(filter, FIELD_TO_SQL_FILTER)
368
361
 
369
362
 
370
363
  def list_upload_sets(account_id: UUID, limit: int = 100, filter: Optional[str] = None) -> UploadSets:
@@ -435,6 +428,7 @@ def dispatch(upload_set_id: UUID):
435
428
  if not db_upload_set:
436
429
  raise Exception(f"Upload set {upload_set_id} not found")
437
430
 
431
+ logger = getLoggerWithExtra("geovisio.upload_set", {"upload_set_id": str(upload_set_id)})
438
432
  with db.conn(current_app) as conn:
439
433
  with conn.transaction(), conn.cursor(row_factory=dict_row) as cursor:
440
434
 
@@ -493,19 +487,19 @@ WHERE p.upload_set_id = %(upload_set_id)s"""
493
487
  maxDistance=db_upload_set.duplicate_distance, maxRotationAngle=db_upload_set.duplicate_rotation
494
488
  ),
495
489
  sortMethod=db_upload_set.sort_method,
496
- splitParams=geopic_sequence.SplitParams(maxDistance=db_upload_set.split_distance, maxTime=db_upload_set.split_time.seconds),
490
+ splitParams=geopic_sequence.SplitParams(
491
+ maxDistance=db_upload_set.split_distance, maxTime=db_upload_set.split_time.total_seconds()
492
+ ),
497
493
  )
498
494
  reused_sequence = set()
499
495
 
500
496
  pics_to_delete_duplicates = [pics_by_filename[p.filename]["id"] for p in report.duplicate_pictures or []]
501
497
  pics_to_delete = pics_to_delete_duplicates + pics_to_delete_bug
502
498
  if pics_to_delete:
503
- logging.debug(
504
- f"For uploadset '{upload_set_id}', nb duplicate pictures {len(pics_to_delete_duplicates)} {f' and {len(pics_to_delete_bug)} pictures without files' if pics_to_delete_bug else ''}"
505
- )
506
- logging.debug(
507
- f"For uploadset '{upload_set_id}', duplicate pictures {[p.filename for p in report.duplicate_pictures or []]}"
499
+ logger.debug(
500
+ f"nb duplicate pictures {len(pics_to_delete_duplicates)} {f' and {len(pics_to_delete_bug)} pictures without files' if pics_to_delete_bug else ''}"
508
501
  )
502
+ logger.debug(f"duplicate pictures {[p.filename for p in report.duplicate_pictures or []]}")
509
503
 
510
504
  cursor.execute(SQL("CREATE TEMPORARY TABLE tmp_duplicates(picture_id UUID) ON COMMIT DROP"))
511
505
  with cursor.copy("COPY tmp_duplicates(picture_id) FROM stdin;") as copy:
@@ -521,6 +515,8 @@ WHERE p.upload_set_id = %(upload_set_id)s"""
521
515
  cursor.execute(SQL("DELETE FROM pictures WHERE id IN (select picture_id FROM tmp_duplicates)"))
522
516
 
523
517
  number_title = len(report.sequences) > 1
518
+ existing_sequences = set(p["sequence_id"] for p in db_pics if p["sequence_id"])
519
+ new_sequence_ids = set()
524
520
  for i, s in enumerate(report.sequences, start=1):
525
521
  existing_sequence = next(
526
522
  (seq for p in s.pictures if (seq := pics_by_filename[p.filename]["sequence_id"]) not in reused_sequence),
@@ -528,9 +524,7 @@ WHERE p.upload_set_id = %(upload_set_id)s"""
528
524
  )
529
525
  # if some of the pictures were already in a sequence, we should not create a new one
530
526
  if existing_sequence:
531
- logging.info(
532
- f"For uploadset '{upload_set_id}', sequence {existing_sequence} already contains pictures, we will not create a new one"
533
- )
527
+ logger.info(f"sequence {existing_sequence} already contains pictures, we will not create a new one")
534
528
  # we should wipe the sequences_pictures though
535
529
  seq_id = existing_sequence
536
530
  cursor.execute(
@@ -554,6 +548,8 @@ RETURNING id"""
554
548
  ).fetchone()
555
549
  seq_id = seq_id["id"]
556
550
 
551
+ new_sequence_ids.add(seq_id)
552
+
557
553
  with cursor.copy("COPY sequences_pictures(seq_id, pic_id, rank) FROM stdin;") as copy:
558
554
  for i, p in enumerate(s.pictures, 1):
559
555
  copy.write_row(
@@ -562,8 +558,17 @@ RETURNING id"""
562
558
 
563
559
  sequences.add_finalization_job(cursor=cursor, seqId=seq_id)
564
560
 
561
+ # we can delete all the old sequences
562
+ sequences_to_delete = existing_sequences - new_sequence_ids
563
+ if sequences_to_delete:
564
+ logger.debug(f"sequences to delete = {sequences_to_delete} (existing = {existing_sequences}, new = {new_sequence_ids})")
565
+ conn.execute(SQL("DELETE FROM sequences_pictures WHERE seq_id = ANY(%(seq_ids)s)"), {"seq_ids": list(sequences_to_delete)})
566
+ conn.execute(
567
+ SQL("UPDATE sequences SET status = 'deleted' WHERE id = ANY(%(seq_ids)s)"), {"seq_ids": list(sequences_to_delete)}
568
+ )
569
+
565
570
  for s in report.sequences_splits or []:
566
- logging.debug(f"For uploadset '{upload_set_id}', split = {s.prevPic.filename} -> {s.nextPic.filename} : {s.reason}")
571
+ logger.debug(f"split = {s.prevPic.filename} -> {s.nextPic.filename} : {s.reason}")
567
572
  conn.execute(SQL("UPDATE upload_sets SET dispatched = true WHERE id = %(upload_set_id)s"), {"upload_set_id": db_upload_set.id})
568
573
 
569
574
 
geovisio/utils/website.py CHANGED
@@ -37,6 +37,9 @@ class Website:
37
37
  )
38
38
 
39
39
  def _to_url(self, route: str, params: Optional[Dict[str, str]] = None):
40
+ if not self.url:
41
+ return None
42
+
40
43
  base_url = self.url if self.url != WEBSITE_UNDER_SAME_HOST else url_for("index", _external=True)
41
44
 
42
45
  from urllib.parse import urlencode
@@ -1,17 +1,213 @@
1
- from geovisio.utils import auth
2
- from psycopg.rows import dict_row, class_row
3
- from psycopg.sql import SQL
4
- from geovisio.utils.semantics import Entity, EntityType, SemanticTagUpdate, update_tags
1
+ from typing import List, Optional
2
+ from geovisio.utils import auth, db
3
+ from geovisio.utils.annotations import AnnotationCreationParameter, creation_annotation, get_annotation, update_annotation
4
+ from geovisio.utils.tags import SemanticTagUpdate
5
5
  from geovisio.web.utils import accountIdOrDefault
6
- from psycopg.types.json import Jsonb
7
- from geovisio.utils import db
8
6
  from geovisio.utils.params import validation_error
9
7
  from geovisio import errors
10
- from pydantic import BaseModel, ConfigDict, ValidationError
8
+ from pydantic import BaseModel, ValidationError
11
9
  from uuid import UUID
12
- from typing import List, Optional
13
- from flask import Blueprint, request, current_app
10
+ from flask import Blueprint, current_app, request, url_for
14
11
  from flask_babel import gettext as _
15
12
 
16
13
 
17
14
  bp = Blueprint("annotations", __name__, url_prefix="/api")
15
+
16
+
17
+ @bp.route("/collections/<uuid:collectionId>/items/<uuid:itemId>/annotations", methods=["POST"])
18
+ @auth.login_required()
19
+ def postAnnotation(collectionId, itemId, account):
20
+ """Create an annotation on a picture.
21
+
22
+ The geometry can be provided as a bounding box (a list of 4 integers, minx, miny, maxx, maxy) or as a geojson geometry.
23
+ All coordinates must be in pixel, starting from the top left of the picture.
24
+
25
+ If an annotation already exists on the picture with the same shape, it will be used.
26
+ ---
27
+ tags:
28
+ - Editing
29
+ - Semantics
30
+ parameters:
31
+ - name: collectionId
32
+ in: path
33
+ description: ID of collection to retrieve
34
+ required: true
35
+ schema:
36
+ type: string
37
+ - name: itemId
38
+ in: path
39
+ description: ID of item to retrieve
40
+ required: true
41
+ schema:
42
+ type: string
43
+ requestBody:
44
+ content:
45
+ application/json:
46
+ schema:
47
+ $ref: '#/components/schemas/GeoVisioPostAnnotation'
48
+ security:
49
+ - bearerToken: []
50
+ - cookieAuth: []
51
+ responses:
52
+ 200:
53
+ description: the annotation metadata
54
+ content:
55
+ application/json:
56
+ schema:
57
+ $ref: '#/components/schemas/GeoVisioAnnotation'
58
+ """
59
+
60
+ account_id = UUID(accountIdOrDefault(account))
61
+
62
+ pic = db.fetchone(
63
+ current_app,
64
+ "SELECT 1 FROM sequences_pictures WHERE seq_id = %(seq)s AND pic_id = %(pic)s",
65
+ {"seq": collectionId, "pic": itemId},
66
+ )
67
+ if not pic:
68
+ raise errors.InvalidAPIUsage(_("Picture %(p)s wasn't found in database", p=itemId), status_code=404)
69
+
70
+ if request.is_json and request.json is not None:
71
+ try:
72
+ params = AnnotationCreationParameter(**request.json, account_id=account_id, picture_id=itemId)
73
+ except ValidationError as ve:
74
+ raise errors.InvalidAPIUsage(_("Impossible to create an annotation"), payload=validation_error(ve))
75
+ else:
76
+ raise errors.InvalidAPIUsage(_("Parameter for creating an annotation should be a valid JSON"), status_code=415)
77
+
78
+ annotation = creation_annotation(params)
79
+
80
+ return (
81
+ annotation.model_dump_json(exclude_none=True),
82
+ 200,
83
+ {
84
+ "Content-Type": "application/json",
85
+ "Access-Control-Expose-Headers": "Location", # Needed for allowing web browsers access Location header
86
+ "Location": url_for(
87
+ "annotations.getAnnotation", _external=True, annotationId=annotation.id, collectionId=collectionId, itemId=itemId
88
+ ),
89
+ },
90
+ )
91
+
92
+
93
+ @bp.route("/collections/<uuid:collectionId>/items/<uuid:itemId>/annotations/<uuid:annotationId>", methods=["GET"])
94
+ def getAnnotation(collectionId, itemId, annotationId):
95
+ """Get an annotation
96
+
97
+ ---
98
+ tags:
99
+ - Semantics
100
+ parameters:
101
+ - name: collectionId
102
+ in: path
103
+ description: ID of collection
104
+ required: true
105
+ schema:
106
+ type: string
107
+ - name: itemId
108
+ in: path
109
+ description: ID of item
110
+ required: true
111
+ schema:
112
+ type: string
113
+ - name: annotationId
114
+ in: path
115
+ description: ID of annotation
116
+ required: true
117
+ schema:
118
+ type: string
119
+ security:
120
+ - bearerToken: []
121
+ - cookieAuth: []
122
+ responses:
123
+ 200:
124
+ description: the annotation metadata
125
+ content:
126
+ application/json:
127
+ schema:
128
+ $ref: '#/components/schemas/GeoVisioAnnotation'
129
+ """
130
+ with db.conn(current_app) as conn:
131
+
132
+ annotation = get_annotation(conn, annotationId)
133
+ if not annotation or annotation.picture_id != itemId:
134
+ raise errors.InvalidAPIUsage(_("Annotation %(p)s not found", p=itemId), status_code=404)
135
+
136
+ return annotation.model_dump_json(exclude_none=True), 200, {"Content-Type": "application/json"}
137
+
138
+
139
+ class AnnotationPatchParameter(BaseModel):
140
+ """Parameters used to update an annotation"""
141
+
142
+ semantics: Optional[List[SemanticTagUpdate]] = None
143
+ """Tags to update on the annotation. By default each tag will be added to the annotation's tags, but you can change this behavior by setting the `action` parameter to `delete`.
144
+
145
+ If you want to replace a tag, you need to first delete it, then add it again.
146
+
147
+ Like:
148
+ [
149
+ {"key": "some_key", "value": "some_value", "action": "delete"},
150
+ {"key": "some_key", "value": "some_new_value"}
151
+ ]
152
+ """
153
+
154
+
155
+ @bp.route("/collections/<uuid:collectionId>/items/<uuid:itemId>/annotations/<uuid:annotationId>", methods=["PATCH"])
156
+ @auth.login_required()
157
+ def patchAnnotation(collectionId, itemId, annotationId, account):
158
+ """Patch an annotation
159
+
160
+ Note that if the annotation has no associated tags anymore, it will be deleted.
161
+ ---
162
+ tags:
163
+ - Semantics
164
+ parameters:
165
+ - name: collectionId
166
+ in: path
167
+ description: ID of collection
168
+ required: true
169
+ schema:
170
+ type: string
171
+ - name: itemId
172
+ in: path
173
+ description: ID of item
174
+ required: true
175
+ schema:
176
+ type: string
177
+ - name: annotationId
178
+ in: path
179
+ description: ID of annotation
180
+ required: true
181
+ schema:
182
+ type: string
183
+ security:
184
+ - bearerToken: []
185
+ - cookieAuth: []
186
+ responses:
187
+ 200:
188
+ description: the annotation metadata
189
+ content:
190
+ application/json:
191
+ schema:
192
+ $ref: '#/components/schemas/GeoVisioAnnotation'
193
+ 204:
194
+ description: The annotation was empty, it has been correctly deleted
195
+ """
196
+ if request.is_json and request.json is not None:
197
+ try:
198
+ params = AnnotationPatchParameter(**request.json)
199
+ except ValidationError as ve:
200
+ raise errors.InvalidAPIUsage(_("Impossible to patch annotation, invalid parameters"), payload=validation_error(ve))
201
+ else:
202
+ raise errors.InvalidAPIUsage(_("Parameter for updating an annotation should be a valid JSON"), status_code=415)
203
+
204
+ with db.conn(current_app) as conn:
205
+
206
+ annotation = get_annotation(conn, annotationId)
207
+ if not annotation or annotation.picture_id != itemId:
208
+ raise errors.InvalidAPIUsage(_("Annotation %(p)s not found", p=itemId), status_code=404)
209
+
210
+ a = update_annotation(annotation, params.semantics, account.id)
211
+ if a is None:
212
+ return "", 204
213
+ return a.model_dump_json(exclude_none=True), 200, {"Content-Type": "application/json"}
geovisio/web/auth.py CHANGED
@@ -94,8 +94,9 @@ def auth():
94
94
  if not tos_accepted and current_app.config["API_ENFORCE_TOS_ACCEPTANCE"]:
95
95
  args = {"next_url": next_url} if next_url else None
96
96
  next_url = current_app.config["API_WEBSITE_URL"].tos_validation_page(args)
97
- else:
98
- next_url = next_url or "/"
97
+
98
+ if next_url is None:
99
+ next_url = "/"
99
100
 
100
101
  response = flask.make_response(redirect(next_url))
101
102
 
@@ -1,14 +1,16 @@
1
+ from copy import deepcopy
1
2
  from enum import Enum
2
3
  from attr import dataclass
3
4
  from geovisio import errors, utils, db
4
5
  from geovisio.utils import auth, sequences
5
6
  from geovisio.utils.params import validation_error
6
- from geovisio.utils.semantics import SemanticTagUpdate, Entity, EntityType, update_tags
7
+ from geovisio.utils.semantics import Entity, EntityType, update_tags
8
+ from geovisio.utils.tags import SemanticTagUpdate
7
9
  from geovisio.web.params import (
8
10
  parse_datetime,
9
11
  parse_datetime_interval,
10
12
  parse_bbox,
11
- parse_filter,
13
+ parse_collection_filter,
12
14
  parse_sortby,
13
15
  parse_collections_limit,
14
16
  )
@@ -16,6 +18,7 @@ from geovisio.utils.sequences import (
16
18
  STAC_FIELD_MAPPINGS,
17
19
  CollectionsRequest,
18
20
  get_collections,
21
+ get_dataset_bounds,
19
22
  )
20
23
  from geovisio.utils.fields import SortBy, SortByField, SQLDirection, Bounds, BBox
21
24
  from geovisio.web.rss import dbSequencesToGeoRSS
@@ -248,12 +251,18 @@ def getAllCollections():
248
251
  if not sortBy:
249
252
  direction = SQLDirection.DESC if format == "rss" else SQLDirection.ASC
250
253
  sortBy = SortBy(fields=[SortByField(field=STAC_FIELD_MAPPINGS["created"], direction=direction)])
254
+ # we always add the creation date fields in the sort list (after the selected ones), this will we'll get the `created` bounds of the dataset
255
+ # we'll also get
256
+ if not any(s.field == STAC_FIELD_MAPPINGS["created"] for s in sortBy.fields):
257
+ sortBy.fields.append(SortByField(field=STAC_FIELD_MAPPINGS["created"], direction=SQLDirection.ASC))
258
+ if not any(s.field == STAC_FIELD_MAPPINGS["id"] for s in sortBy.fields):
259
+ sortBy.fields.append(SortByField(field=STAC_FIELD_MAPPINGS["id"], direction=SQLDirection.ASC))
251
260
 
252
261
  collection_request = CollectionsRequest(sort_by=sortBy)
253
262
 
254
263
  # Filter parameter
255
- collection_request.user_filter = parse_filter(request.args.get("filter"))
256
- collection_request.pagination_filter = parse_filter(request.args.get("page"))
264
+ collection_request.user_filter = parse_collection_filter(request.args.get("filter"))
265
+ collection_request.pagination_filter = parse_collection_filter(request.args.get("page"))
257
266
 
258
267
  # Limit parameter
259
268
  collection_request.limit = parse_collections_limit(request.args.get("limit"))
@@ -298,14 +307,14 @@ def getAllCollections():
298
307
  },
299
308
  ]
300
309
 
301
- with db.cursor(current_app, row_factory=dict_row) as cursor:
302
- stats = cursor.execute("SELECT min(inserted_at) as min, max(inserted_at) as max FROM sequences").fetchone()
303
- if stats is None:
310
+ with db.conn(current_app) as conn:
311
+ datasetBounds = get_dataset_bounds(conn, collection_request.sort_by, additional_filters=collection_request.user_filter)
312
+ if datasetBounds is None:
304
313
  return ({"collections": [], "links": links}, 200, {"Content-Type": "application/json"})
305
- datasetBounds = Bounds(min=stats["min"], max=stats["max"])
306
- if collection_request.created_after and collection_request.created_after > datasetBounds.max:
314
+ creation_date_index = collection_request.sort_by.get_field_index("created")
315
+ if collection_request.created_after and collection_request.created_after > datasetBounds.last[creation_date_index]:
307
316
  raise errors.InvalidAPIUsage(_("There is no collection created after %(d)s", d=collection_request.created_after))
308
- if collection_request.created_before and collection_request.created_before < datasetBounds.min:
317
+ if collection_request.created_before and collection_request.created_before < datasetBounds.first[creation_date_index]:
309
318
  raise errors.InvalidAPIUsage(_("There is no collection created before %(d)s", d=collection_request.created_before))
310
319
 
311
320
  db_collections = get_collections(collection_request)
@@ -322,10 +331,9 @@ def getAllCollections():
322
331
  pagination_links = sequences.get_pagination_links(
323
332
  route="stac_collections.getAllCollections",
324
333
  routeArgs={"limit": collection_request.limit},
325
- field=sortBy.fields[0].field.stac,
326
- direction=sortBy.fields[0].direction,
334
+ sortBy=sortBy,
327
335
  datasetBounds=datasetBounds,
328
- dataBounds=db_collections.query_first_order_bounds,
336
+ dataBounds=db_collections.query_bounds,
329
337
  additional_filters=additional_filters,
330
338
  )
331
339
 
@@ -402,7 +410,7 @@ def getCollection(collectionId):
402
410
  SELECT sequence_id, json_agg(json_strip_nulls(json_build_object(
403
411
  'key', key,
404
412
  'value', value
405
- ))) AS semantics
413
+ )) ORDER BY key, value) AS semantics
406
414
  FROM sequences_semantics
407
415
  GROUP BY sequence_id
408
416
  ) t ON t.sequence_id = s.id
@@ -607,7 +615,7 @@ If unset, sort order is unchanged."""
607
615
  if relHeading < -180 or relHeading > 180:
608
616
  raise ValueError()
609
617
  return relHeading
610
- except ValueError:
618
+ except (ValueError, TypeError):
611
619
  raise errors.InvalidAPIUsage(
612
620
  _("Relative heading is not valid, should be an integer in degrees from -180 to 180"), status_code=400
613
621
  )
@@ -629,7 +637,7 @@ def patchCollection(collectionId, account):
629
637
  ---
630
638
  tags:
631
639
  - Editing
632
- - Tags
640
+ - Semantics
633
641
  parameters:
634
642
  - name: collectionId
635
643
  in: path
@@ -749,13 +757,7 @@ def patchCollection(collectionId, account):
749
757
  sqlUpdates.append(SQL("last_account_to_edit = %(account)s"))
750
758
 
751
759
  cursor.execute(
752
- SQL(
753
- """
754
- UPDATE sequences
755
- SET {updates}
756
- WHERE id = %(id)s
757
- """
758
- ).format(updates=SQL(", ").join(sqlUpdates)),
760
+ SQL("UPDATE sequences SET {updates} WHERE id = %(id)s").format(updates=SQL(", ").join(sqlUpdates)),
759
761
  sqlParams,
760
762
  )
761
763
 
@@ -924,7 +926,12 @@ def send_collections_as_csv(collection_request: CollectionsRequest):
924
926
  raise errors.InvalidAPIUsage(_("CSV export does not support pagination"), status_code=400)
925
927
  if collection_request.filters():
926
928
  raise errors.InvalidAPIUsage(_("CSV export does not support filters"), status_code=400)
927
- if collection_request.sort_by != SortBy(fields=[SortByField(field=STAC_FIELD_MAPPINGS["created"], direction=SQLDirection.DESC)]):
929
+ if collection_request.sort_by != SortBy(
930
+ fields=[
931
+ SortByField(field=STAC_FIELD_MAPPINGS["created"], direction=SQLDirection.DESC),
932
+ SortByField(field=STAC_FIELD_MAPPINGS["id"], direction=SQLDirection.ASC),
933
+ ]
934
+ ):
928
935
  raise errors.InvalidAPIUsage(_("CSV export does not support sorting by anything but creation date"), status_code=400)
929
936
 
930
937
  def generate_csv():
@@ -957,7 +964,7 @@ SELECT
957
964
  s.computed_gps_accuracy AS computed_gps_accuracy
958
965
  FROM sequences s
959
966
  WHERE {filter}
960
- ORDER BY s.inserted_at DESC
967
+ ORDER BY s.inserted_at DESC, id ASC
961
968
  ) TO STDOUT CSV HEADER"""
962
969
  ).format(filter=SQL(" AND ").join(filters)),
963
970
  params,
@@ -966,7 +973,7 @@ ORDER BY s.inserted_at DESC
966
973
  for a in copy:
967
974
  yield bytes(a)
968
975
 
969
- return stream_with_context(generate_csv()), {"Content-Disposition": "attachment"}
976
+ return stream_with_context(generate_csv()), {"Content-Type": "text/csv", "Content-Disposition": "attachment"}
970
977
 
971
978
 
972
979
  @bp.route("/users/<uuid:userId>/collection")
@@ -1033,13 +1040,17 @@ def getUserCollection(userId, userIdMatchesAccount=False):
1033
1040
  if not sortBy:
1034
1041
  sortBy = SortBy(fields=[SortByField(field=STAC_FIELD_MAPPINGS["created"], direction=SQLDirection.DESC)])
1035
1042
 
1043
+ if not any(s.field == STAC_FIELD_MAPPINGS["created"] for s in sortBy.fields):
1044
+ sortBy.fields.append(SortByField(field=STAC_FIELD_MAPPINGS["created"], direction=SQLDirection.ASC))
1045
+ if not any(s.field == STAC_FIELD_MAPPINGS["id"] for s in sortBy.fields):
1046
+ sortBy.fields.append(SortByField(field=STAC_FIELD_MAPPINGS["id"], direction=SQLDirection.ASC))
1036
1047
  collection_request = CollectionsRequest(sort_by=sortBy, userOwnsAllCollections=userIdMatchesAccount)
1037
1048
 
1038
1049
  # Filter parameter
1039
- collection_request.user_filter = parse_filter(request.args.get("filter"))
1050
+ collection_request.user_filter = parse_collection_filter(request.args.get("filter"))
1040
1051
 
1041
1052
  # Filters added by the pagination
1042
- collection_request.pagination_filter = parse_filter(request.args.get("page"))
1053
+ collection_request.pagination_filter = parse_collection_filter(request.args.get("page"))
1043
1054
 
1044
1055
  # Limit parameter
1045
1056
  # if not specified, the default with CSV it 1000. if there are more, the paginated API should be used
@@ -1095,8 +1106,6 @@ def getUserCollection(userId, userIdMatchesAccount=False):
1095
1106
  MAX(LEAST(90, ST_YMax(s.bbox))) AS maxy,
1096
1107
  MIN(s.inserted_at) AS created,
1097
1108
  MAX(s.updated_at) AS updated,
1098
- MIN({order_column}) AS min_order,
1099
- MAX({order_column}) AS max_order,
1100
1109
  ROUND(SUM(ST_Length(s.geom::geography))) / 1000 AS length_km
1101
1110
  FROM sequences s
1102
1111
  WHERE {filter}
@@ -1115,6 +1124,13 @@ def getUserCollection(userId, userIdMatchesAccount=False):
1115
1124
  else:
1116
1125
  raise errors.InvalidAPIUsage(_("No matching sequences found"), 404)
1117
1126
 
1127
+ datasetBounds = get_dataset_bounds(
1128
+ cursor.connection,
1129
+ collection_request.sort_by,
1130
+ additional_filters=SQL(" AND ").join(meta_filter),
1131
+ additional_filters_params={"account": userId},
1132
+ )
1133
+
1118
1134
  collections = get_collections(collection_request)
1119
1135
 
1120
1136
  sequences_links = [
@@ -1164,10 +1180,9 @@ def getUserCollection(userId, userIdMatchesAccount=False):
1164
1180
  pagination_links = sequences.get_pagination_links(
1165
1181
  route="stac_collections.getUserCollection",
1166
1182
  routeArgs={"userId": str(userId), "limit": collection_request.limit},
1167
- field=sortBy.fields[0].field.stac,
1168
- direction=sortBy.fields[0].direction,
1169
- datasetBounds=Bounds(min=meta_collection["min_order"], max=meta_collection["max_order"]),
1170
- dataBounds=collections.query_first_order_bounds,
1183
+ sortBy=sortBy,
1184
+ datasetBounds=datasetBounds,
1185
+ dataBounds=collections.query_bounds,
1171
1186
  additional_filters=additional_filters,
1172
1187
  )
1173
1188
 
@@ -29,7 +29,7 @@ def configuration():
29
29
  "name": _get_translated(apiSum.name, userLang),
30
30
  "description": _get_translated(apiSum.description, userLang),
31
31
  "geo_coverage": _get_translated(apiSum.geo_coverage, userLang),
32
- "logo": apiSum.logo,
32
+ "logo": str(apiSum.logo),
33
33
  "color": str(apiSum.color),
34
34
  "email": apiSum.email,
35
35
  "auth": _auth_configuration(),
@@ -53,6 +53,7 @@ def _auth_configuration():
53
53
  return {
54
54
  "enabled": True,
55
55
  "user_profile": {"url": auth.oauth_provider.user_profile_page_url()},
56
+ "registration_is_open": flask.current_app.config["API_REGISTRATION_IS_OPEN"],
56
57
  "enforce_tos_acceptance": flask.current_app.config["API_ENFORCE_TOS_ACCEPTANCE"],
57
58
  }
58
59