geovisio 2.7.0__py3-none-any.whl → 2.8.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- geovisio/__init__.py +11 -3
- geovisio/admin_cli/__init__.py +3 -1
- geovisio/admin_cli/cleanup.py +2 -2
- geovisio/admin_cli/user.py +75 -0
- geovisio/config_app.py +87 -4
- geovisio/templates/main.html +2 -2
- geovisio/templates/viewer.html +3 -3
- geovisio/translations/da/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/da/LC_MESSAGES/messages.po +850 -0
- geovisio/translations/de/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/de/LC_MESSAGES/messages.po +235 -2
- geovisio/translations/el/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/el/LC_MESSAGES/messages.po +685 -0
- geovisio/translations/en/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/en/LC_MESSAGES/messages.po +244 -153
- geovisio/translations/eo/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/eo/LC_MESSAGES/messages.po +790 -0
- geovisio/translations/es/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/fi/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/fr/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/fr/LC_MESSAGES/messages.po +40 -3
- geovisio/translations/hu/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/hu/LC_MESSAGES/messages.po +773 -0
- geovisio/translations/it/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/it/LC_MESSAGES/messages.po +875 -0
- geovisio/translations/ja/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/ja/LC_MESSAGES/messages.po +719 -0
- geovisio/translations/ko/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/messages.pot +225 -148
- geovisio/translations/nl/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/nl/LC_MESSAGES/messages.po +24 -16
- geovisio/translations/pl/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/pl/LC_MESSAGES/messages.po +727 -0
- geovisio/translations/zh_Hant/LC_MESSAGES/messages.mo +0 -0
- geovisio/translations/zh_Hant/LC_MESSAGES/messages.po +719 -0
- geovisio/utils/auth.py +80 -8
- geovisio/utils/link.py +3 -2
- geovisio/utils/model_query.py +55 -0
- geovisio/utils/pictures.py +29 -62
- geovisio/utils/semantics.py +120 -0
- geovisio/utils/sequences.py +30 -23
- geovisio/utils/tokens.py +5 -3
- geovisio/utils/upload_set.py +87 -64
- geovisio/utils/website.py +50 -0
- geovisio/web/annotations.py +17 -0
- geovisio/web/auth.py +9 -5
- geovisio/web/collections.py +235 -63
- geovisio/web/configuration.py +17 -1
- geovisio/web/docs.py +99 -54
- geovisio/web/items.py +233 -100
- geovisio/web/map.py +129 -31
- geovisio/web/pages.py +240 -0
- geovisio/web/params.py +17 -0
- geovisio/web/prepare.py +165 -0
- geovisio/web/stac.py +17 -4
- geovisio/web/tokens.py +14 -4
- geovisio/web/upload_set.py +19 -10
- geovisio/web/users.py +176 -44
- geovisio/workers/runner_pictures.py +75 -50
- {geovisio-2.7.0.dist-info → geovisio-2.8.0.dist-info}/METADATA +6 -5
- geovisio-2.8.0.dist-info/RECORD +89 -0
- {geovisio-2.7.0.dist-info → geovisio-2.8.0.dist-info}/WHEEL +1 -1
- geovisio-2.7.0.dist-info/RECORD +0 -66
- {geovisio-2.7.0.dist-info → geovisio-2.8.0.dist-info}/LICENSE +0 -0
geovisio/utils/upload_set.py
CHANGED
|
@@ -126,16 +126,35 @@ class FileRejectionStatus(Enum):
|
|
|
126
126
|
"""other_error means there was an error that is not related to the picture itself"""
|
|
127
127
|
|
|
128
128
|
|
|
129
|
+
class FileRejectionDetails(BaseModel):
|
|
130
|
+
|
|
131
|
+
missing_fields: List[str]
|
|
132
|
+
"""Mandatory metadata missing from the file. Metadata can be `datetime` or `location`."""
|
|
133
|
+
|
|
134
|
+
|
|
135
|
+
class FileRejection(BaseModel):
|
|
136
|
+
"""Details about a file rejection"""
|
|
137
|
+
|
|
138
|
+
reason: str
|
|
139
|
+
severity: FileRejectionStatusSeverity
|
|
140
|
+
message: Optional[str]
|
|
141
|
+
details: Optional[FileRejectionDetails]
|
|
142
|
+
|
|
143
|
+
model_config = ConfigDict(use_enum_values=True, use_attribute_docstrings=True)
|
|
144
|
+
|
|
145
|
+
|
|
129
146
|
class UploadSetFile(BaseModel):
|
|
130
147
|
"""File uploaded in an UploadSet"""
|
|
131
148
|
|
|
132
149
|
picture_id: Optional[UUID] = None
|
|
150
|
+
"""ID of the picture this file belongs to. Can only be seen by the owner of the File"""
|
|
133
151
|
file_name: str
|
|
134
152
|
content_md5: Optional[UUID] = None
|
|
135
153
|
inserted_at: datetime
|
|
136
154
|
upload_set_id: UUID = Field(..., exclude=True)
|
|
137
155
|
rejection_status: Optional[FileRejectionStatus] = Field(None, exclude=True)
|
|
138
156
|
rejection_message: Optional[str] = Field(None, exclude=True)
|
|
157
|
+
rejection_details: Optional[Dict[str, Any]] = Field(None, exclude=True)
|
|
139
158
|
file_type: Optional[FileType] = None
|
|
140
159
|
size: Optional[int] = None
|
|
141
160
|
|
|
@@ -148,7 +167,7 @@ class UploadSetFile(BaseModel):
|
|
|
148
167
|
|
|
149
168
|
@computed_field
|
|
150
169
|
@property
|
|
151
|
-
def rejected(self) -> Optional[
|
|
170
|
+
def rejected(self) -> Optional[FileRejection]:
|
|
152
171
|
if self.rejection_status is None:
|
|
153
172
|
return None
|
|
154
173
|
msg = None
|
|
@@ -168,11 +187,7 @@ class UploadSetFile(BaseModel):
|
|
|
168
187
|
severity = FileRejectionStatusSeverity.error
|
|
169
188
|
else:
|
|
170
189
|
msg = self.rejection_message
|
|
171
|
-
return
|
|
172
|
-
"reason": self.rejection_status,
|
|
173
|
-
"severity": severity,
|
|
174
|
-
"message": msg,
|
|
175
|
-
}
|
|
190
|
+
return FileRejection(reason=self.rejection_status, severity=severity, message=msg, details=self.rejection_details)
|
|
176
191
|
|
|
177
192
|
@field_serializer("content_md5")
|
|
178
193
|
def serialize_md5(self, md5: UUID, _info):
|
|
@@ -199,7 +214,7 @@ def get_simple_upload_set(id: UUID) -> Optional[UploadSet]:
|
|
|
199
214
|
"""Get the DB representation of an UploadSet, without associated collections and statuses"""
|
|
200
215
|
u = db.fetchone(
|
|
201
216
|
current_app,
|
|
202
|
-
SQL("SELECT * FROM upload_sets WHERE id = %(id)s
|
|
217
|
+
SQL("SELECT * FROM upload_sets WHERE id = %(id)s"),
|
|
203
218
|
{"id": id},
|
|
204
219
|
row_factory=class_row(UploadSet),
|
|
205
220
|
)
|
|
@@ -310,7 +325,7 @@ SELECT u.*,
|
|
|
310
325
|
) AS associated_collections
|
|
311
326
|
FROM upload_sets u
|
|
312
327
|
LEFT JOIN upload_set_statuses us on us.upload_set_id = u.id
|
|
313
|
-
WHERE u.id = %(id)s
|
|
328
|
+
WHERE u.id = %(id)s"""
|
|
314
329
|
),
|
|
315
330
|
{"id": id},
|
|
316
331
|
row_factory=class_row(UploadSet),
|
|
@@ -384,7 +399,7 @@ def list_upload_sets(account_id: UUID, limit: int = 100, filter: Optional[str] =
|
|
|
384
399
|
WHERE p.upload_set_id = u.id
|
|
385
400
|
) AS nb_items
|
|
386
401
|
FROM upload_sets u
|
|
387
|
-
WHERE account_id = %(account_id)s AND
|
|
402
|
+
WHERE account_id = %(account_id)s AND {filter}
|
|
388
403
|
ORDER BY created_at ASC
|
|
389
404
|
LIMIT %(limit)s
|
|
390
405
|
"""
|
|
@@ -434,16 +449,25 @@ def dispatch(upload_set_id: UUID):
|
|
|
434
449
|
p.heading as heading,
|
|
435
450
|
p.metadata->>'originalFileName' as file_name,
|
|
436
451
|
p.metadata,
|
|
437
|
-
s.id as sequence_id
|
|
452
|
+
s.id as sequence_id,
|
|
453
|
+
f is null as has_no_file
|
|
438
454
|
FROM pictures p
|
|
439
455
|
LEFT JOIN sequences_pictures sp ON sp.pic_id = p.id
|
|
440
456
|
LEFT JOIN sequences s ON s.id = sp.seq_id
|
|
457
|
+
LEFT JOIN files f ON f.picture_id = p.id
|
|
441
458
|
WHERE p.upload_set_id = %(upload_set_id)s"""
|
|
442
459
|
),
|
|
443
460
|
{"upload_set_id": upload_set_id},
|
|
444
461
|
).fetchall()
|
|
445
462
|
|
|
463
|
+
# there is currently a bug where 2 pictures can be uploaded for the same file, so only 1 is associated to it.
|
|
464
|
+
# we want to delete one of them
|
|
465
|
+
# Those duplicates happen when a client send an upload that timeouts, but the client retries the upload and the server is not aware of this timeout (the connection is not closed).
|
|
466
|
+
# Note: later, if we are confident the bug has been removed, we might clean this code.
|
|
467
|
+
pics_to_delete_bug = [p["id"] for p in db_pics if p["has_no_file"]]
|
|
468
|
+
db_pics = [p for p in db_pics if p["has_no_file"] is False] # pictures without files will be deleted, we don't need them
|
|
446
469
|
pics_by_filename = {p["file_name"]: p for p in db_pics}
|
|
470
|
+
|
|
447
471
|
pics = [
|
|
448
472
|
geopic_sequence.Picture(
|
|
449
473
|
p["file_name"],
|
|
@@ -473,9 +497,12 @@ WHERE p.upload_set_id = %(upload_set_id)s"""
|
|
|
473
497
|
)
|
|
474
498
|
reused_sequence = set()
|
|
475
499
|
|
|
476
|
-
|
|
500
|
+
pics_to_delete_duplicates = [pics_by_filename[p.filename]["id"] for p in report.duplicate_pictures or []]
|
|
501
|
+
pics_to_delete = pics_to_delete_duplicates + pics_to_delete_bug
|
|
477
502
|
if pics_to_delete:
|
|
478
|
-
logging.debug(
|
|
503
|
+
logging.debug(
|
|
504
|
+
f"For uploadset '{upload_set_id}', nb duplicate pictures {len(pics_to_delete_duplicates)} {f' and {len(pics_to_delete_bug)} pictures without files' if pics_to_delete_bug else ''}"
|
|
505
|
+
)
|
|
479
506
|
logging.debug(
|
|
480
507
|
f"For uploadset '{upload_set_id}', duplicate pictures {[p.filename for p in report.duplicate_pictures or []]}"
|
|
481
508
|
)
|
|
@@ -490,18 +517,11 @@ WHERE p.upload_set_id = %(upload_set_id)s"""
|
|
|
490
517
|
"UPDATE files SET rejection_status = 'capture_duplicate' WHERE picture_id IN (select picture_id from tmp_duplicates)"
|
|
491
518
|
)
|
|
492
519
|
)
|
|
493
|
-
|
|
494
|
-
|
|
495
|
-
"""INSERT INTO job_queue (picture_id, task)
|
|
496
|
-
SELECT picture_id, 'delete'
|
|
497
|
-
FROM tmp_duplicates
|
|
498
|
-
ON CONFLICT(picture_id) DO UPDATE SET task = 'delete'"""
|
|
499
|
-
)
|
|
500
|
-
)
|
|
520
|
+
# delete all pictures (the DB triggers will also add background jobs to delete the associated files)
|
|
521
|
+
cursor.execute(SQL("DELETE FROM pictures WHERE id IN (select picture_id FROM tmp_duplicates)"))
|
|
501
522
|
|
|
502
|
-
|
|
503
|
-
|
|
504
|
-
for s in report.sequences:
|
|
523
|
+
number_title = len(report.sequences) > 1
|
|
524
|
+
for i, s in enumerate(report.sequences, start=1):
|
|
505
525
|
existing_sequence = next(
|
|
506
526
|
(seq for p in s.pictures if (seq := pics_by_filename[p.filename]["sequence_id"]) not in reused_sequence),
|
|
507
527
|
None,
|
|
@@ -519,6 +539,7 @@ ON CONFLICT(picture_id) DO UPDATE SET task = 'delete'"""
|
|
|
519
539
|
)
|
|
520
540
|
reused_sequence.add(seq_id)
|
|
521
541
|
else:
|
|
542
|
+
new_title = f"{db_upload_set.title}{f'-{i}' if number_title else ''}"
|
|
522
543
|
seq_id = cursor.execute(
|
|
523
544
|
SQL(
|
|
524
545
|
"""INSERT INTO sequences(account_id, metadata, user_agent)
|
|
@@ -527,7 +548,7 @@ RETURNING id"""
|
|
|
527
548
|
),
|
|
528
549
|
{
|
|
529
550
|
"account_id": db_upload_set.account_id,
|
|
530
|
-
"metadata": Jsonb({"title":
|
|
551
|
+
"metadata": Jsonb({"title": new_title}),
|
|
531
552
|
"user_agent": db_upload_set.user_agent,
|
|
532
553
|
},
|
|
533
554
|
).fetchone()
|
|
@@ -557,23 +578,43 @@ def insertFileInDatabase(
|
|
|
557
578
|
picture_id: Optional[UUID] = None,
|
|
558
579
|
rejection_status: Optional[FileRejectionStatus] = None,
|
|
559
580
|
rejection_message: Optional[str] = None,
|
|
581
|
+
rejection_details: Optional[Dict[str, Any]] = None,
|
|
560
582
|
) -> UploadSetFile:
|
|
561
583
|
"""Insert a file linked to an UploadSet into the database"""
|
|
562
584
|
|
|
585
|
+
# we check if there is already a file with this name in the upload set with an associated picture.
|
|
586
|
+
# If there is no picture (because the picture has been rejected), we accept that the file is overridden
|
|
587
|
+
existing_file = cursor.execute(
|
|
588
|
+
SQL(
|
|
589
|
+
"""SELECT picture_id, rejection_status
|
|
590
|
+
FROM files
|
|
591
|
+
WHERE upload_set_id = %(upload_set_id)s AND file_name = %(file_name)s AND picture_id IS NOT NULL"""
|
|
592
|
+
),
|
|
593
|
+
params={
|
|
594
|
+
"upload_set_id": upload_set_id,
|
|
595
|
+
"file_name": file_name,
|
|
596
|
+
},
|
|
597
|
+
).fetchone()
|
|
598
|
+
if existing_file:
|
|
599
|
+
raise errors.InvalidAPIUsage(
|
|
600
|
+
_("A different picture with the same name has already been added to this uploadset"),
|
|
601
|
+
status_code=409,
|
|
602
|
+
payload={"existing_item": {"id": existing_file["picture_id"]}},
|
|
603
|
+
)
|
|
604
|
+
|
|
563
605
|
f = cursor.execute(
|
|
564
606
|
SQL(
|
|
565
|
-
"""
|
|
566
|
-
|
|
567
|
-
|
|
568
|
-
|
|
569
|
-
|
|
570
|
-
|
|
571
|
-
|
|
572
|
-
|
|
573
|
-
|
|
574
|
-
|
|
575
|
-
|
|
576
|
-
"""
|
|
607
|
+
"""INSERT INTO files(
|
|
608
|
+
upload_set_id, picture_id, file_type, file_name,
|
|
609
|
+
size, content_md5, rejection_status, rejection_message, rejection_details)
|
|
610
|
+
VALUES (
|
|
611
|
+
%(upload_set_id)s, %(picture_id)s, %(type)s, %(file_name)s,
|
|
612
|
+
%(size)s, %(content_md5)s, %(rejection_status)s, %(rejection_message)s, %(rejection_details)s)
|
|
613
|
+
ON CONFLICT (upload_set_id, file_name)
|
|
614
|
+
DO UPDATE SET picture_id = %(picture_id)s, size = %(size)s, content_md5 = %(content_md5)s,
|
|
615
|
+
rejection_status = %(rejection_status)s, rejection_message = %(rejection_message)s, rejection_details = %(rejection_details)s
|
|
616
|
+
WHERE files.picture_id IS NULL -- check again that we do not override an existing picture
|
|
617
|
+
RETURNING *"""
|
|
577
618
|
),
|
|
578
619
|
params={
|
|
579
620
|
"upload_set_id": upload_set_id,
|
|
@@ -584,6 +625,7 @@ def insertFileInDatabase(
|
|
|
584
625
|
"content_md5": content_md5,
|
|
585
626
|
"rejection_status": rejection_status,
|
|
586
627
|
"rejection_message": rejection_message,
|
|
628
|
+
"rejection_details": Jsonb(rejection_details),
|
|
587
629
|
},
|
|
588
630
|
)
|
|
589
631
|
return UploadSetFile(**f.fetchone())
|
|
@@ -602,6 +644,7 @@ def get_upload_set_files(upload_set_id: UUID) -> UploadSetFiles:
|
|
|
602
644
|
content_md5,
|
|
603
645
|
rejection_status,
|
|
604
646
|
rejection_message,
|
|
647
|
+
rejection_details,
|
|
605
648
|
picture_id,
|
|
606
649
|
inserted_at
|
|
607
650
|
FROM files
|
|
@@ -618,37 +661,17 @@ def delete(upload_set: UploadSet):
|
|
|
618
661
|
"""Delete an UploadSet"""
|
|
619
662
|
logging.info(f"Asking for deletion of uploadset {upload_set.id}")
|
|
620
663
|
with db.conn(current_app) as conn:
|
|
664
|
+
# clean job queue, to ensure no async runner are currently processing pictures/sequences/upload_sets
|
|
665
|
+
# Done outside the real deletion transaction to not trigger deadlock
|
|
666
|
+
conn.execute(SQL("DELETE FROM job_queue WHERE picture_id IN (SELECT id FROM pictures where upload_set_id = %s)"), [upload_set.id])
|
|
667
|
+
for c in upload_set.associated_collections:
|
|
668
|
+
conn.execute(SQL("DELETE FROM job_queue WHERE sequence_id = %s"), [c.id])
|
|
669
|
+
|
|
621
670
|
with conn.transaction(), conn.cursor() as cursor:
|
|
622
671
|
for c in upload_set.associated_collections:
|
|
623
672
|
# Mark all collections as deleted, but do not delete them
|
|
624
673
|
# Note: we do not use utils.sequences.delete_collection here, since we also want to remove the pictures not associated to any collection
|
|
625
|
-
cursor.execute(SQL("DELETE FROM job_queue WHERE sequence_id = %s"), [c.id])
|
|
626
674
|
cursor.execute(SQL("UPDATE sequences SET status = 'deleted' WHERE id = %s"), [c.id])
|
|
627
675
|
|
|
628
|
-
#
|
|
629
|
-
cursor.execute(
|
|
630
|
-
"""
|
|
631
|
-
INSERT INTO job_queue(picture_id, task)
|
|
632
|
-
SELECT id, 'delete'
|
|
633
|
-
FROM pictures
|
|
634
|
-
WHERE upload_set_id = %(upload_set_id)s
|
|
635
|
-
ON CONFLICT (picture_id) DO UPDATE SET task = 'delete'""",
|
|
636
|
-
{"upload_set_id": upload_set.id},
|
|
637
|
-
)
|
|
638
|
-
|
|
639
|
-
# after the task have been added to the queue, we mark all picture for deletion
|
|
640
|
-
cursor.execute(
|
|
641
|
-
SQL(
|
|
642
|
-
"UPDATE pictures SET status = 'waiting-for-delete' WHERE id IN (SELECT id FROM pictures WHERE upload_set_id = %(upload_set_id)s)"
|
|
643
|
-
),
|
|
644
|
-
{"upload_set_id": upload_set.id},
|
|
645
|
-
)
|
|
646
|
-
# we insert the upload set deletion task in the queue after the rest, it will be done once all pictures are deleted
|
|
647
|
-
cursor.execute(
|
|
648
|
-
"""INSERT INTO job_queue(upload_set_id, task) VALUES (%(upload_set_id)s, 'delete')
|
|
649
|
-
ON CONFLICT (upload_set_id) DO UPDATE SET task = 'delete'""",
|
|
650
|
-
{"upload_set_id": upload_set.id},
|
|
651
|
-
)
|
|
652
|
-
|
|
653
|
-
# and we mark it as deleted so it will disapear from the responses even if all the pictures are not yet deleted
|
|
654
|
-
cursor.execute("UPDATE upload_sets SET deleted = true WHERE id = %(upload_set_id)s", {"upload_set_id": upload_set.id})
|
|
676
|
+
# after the task have been added to the queue, we delete the upload set, and this will delete all pictures associated to it
|
|
677
|
+
cursor.execute(SQL("DELETE FROM upload_sets WHERE id = %(upload_set_id)s"), {"upload_set_id": upload_set.id})
|
|
@@ -0,0 +1,50 @@
|
|
|
1
|
+
from typing import Optional, Dict
|
|
2
|
+
|
|
3
|
+
from flask import url_for
|
|
4
|
+
|
|
5
|
+
from geovisio import web
|
|
6
|
+
|
|
7
|
+
WEBSITE_UNDER_SAME_HOST = "same-host"
|
|
8
|
+
|
|
9
|
+
TOKEN_ACCEPTED_PAGE = "token-accepted"
|
|
10
|
+
TOS_VALIDATION_PAGE = "tos-validation"
|
|
11
|
+
|
|
12
|
+
|
|
13
|
+
class Website:
|
|
14
|
+
"""Website associated to the API.
|
|
15
|
+
This wrapper will define the routes we expect from the website.
|
|
16
|
+
|
|
17
|
+
We should limit the interraction from the api to the website, but for some flow (especially auth flows), it's can be useful to redirect to website's page
|
|
18
|
+
|
|
19
|
+
If the url is:
|
|
20
|
+
* set to `false`, there is no associated website
|
|
21
|
+
* set to `same-host`, the website is assumed to be on the same host as the API (and will respect the host of the current request)
|
|
22
|
+
* else it should be a valid url
|
|
23
|
+
"""
|
|
24
|
+
|
|
25
|
+
def __init__(self, website_url: str):
|
|
26
|
+
if website_url == WEBSITE_UNDER_SAME_HOST:
|
|
27
|
+
self.url = WEBSITE_UNDER_SAME_HOST
|
|
28
|
+
elif website_url == "false":
|
|
29
|
+
self.url = None
|
|
30
|
+
elif website_url.startswith("http"):
|
|
31
|
+
self.url = website_url
|
|
32
|
+
if not self.url.endswith("/"):
|
|
33
|
+
self.url += "/"
|
|
34
|
+
else:
|
|
35
|
+
raise Exception(
|
|
36
|
+
"API_WEBSITE_URL should either be `same-host` (and the website will be assumed to be on the same host), set to `false` if there is no website, or a valid URL"
|
|
37
|
+
)
|
|
38
|
+
|
|
39
|
+
def _to_url(self, route: str, params: Optional[Dict[str, str]] = None):
|
|
40
|
+
base_url = self.url if self.url != WEBSITE_UNDER_SAME_HOST else url_for("index", _external=True)
|
|
41
|
+
|
|
42
|
+
from urllib.parse import urlencode
|
|
43
|
+
|
|
44
|
+
return f"{base_url}{route}{f'?{urlencode(params)}' if params else ''}"
|
|
45
|
+
|
|
46
|
+
def tos_validation_page(self, params: Optional[Dict[str, str]] = None):
|
|
47
|
+
return self._to_url(TOS_VALIDATION_PAGE, params)
|
|
48
|
+
|
|
49
|
+
def cli_token_accepted_page(self, params: Optional[Dict[str, str]] = None):
|
|
50
|
+
return self._to_url(TOKEN_ACCEPTED_PAGE, params)
|
|
@@ -0,0 +1,17 @@
|
|
|
1
|
+
from geovisio.utils import auth
|
|
2
|
+
from psycopg.rows import dict_row, class_row
|
|
3
|
+
from psycopg.sql import SQL
|
|
4
|
+
from geovisio.utils.semantics import Entity, EntityType, SemanticTagUpdate, update_tags
|
|
5
|
+
from geovisio.web.utils import accountIdOrDefault
|
|
6
|
+
from psycopg.types.json import Jsonb
|
|
7
|
+
from geovisio.utils import db
|
|
8
|
+
from geovisio.utils.params import validation_error
|
|
9
|
+
from geovisio import errors
|
|
10
|
+
from pydantic import BaseModel, ConfigDict, ValidationError
|
|
11
|
+
from uuid import UUID
|
|
12
|
+
from typing import List, Optional
|
|
13
|
+
from flask import Blueprint, request, current_app
|
|
14
|
+
from flask_babel import gettext as _
|
|
15
|
+
|
|
16
|
+
|
|
17
|
+
bp = Blueprint("annotations", __name__, url_prefix="/api")
|
geovisio/web/auth.py
CHANGED
|
@@ -70,7 +70,7 @@ def auth():
|
|
|
70
70
|
oauth_info = utils.auth.oauth_provider.get_user_oauth_info(tokenResponse)
|
|
71
71
|
with db.cursor(current_app) as cursor:
|
|
72
72
|
res = cursor.execute(
|
|
73
|
-
"INSERT INTO accounts (name, oauth_provider, oauth_id) VALUES (%(name)s, %(provider)s, %(id)s) ON CONFLICT (oauth_provider, oauth_id) DO UPDATE SET name = %(name)s RETURNING id, name",
|
|
73
|
+
"INSERT INTO accounts (name, oauth_provider, oauth_id) VALUES (%(name)s, %(provider)s, %(id)s) ON CONFLICT (oauth_provider, oauth_id) DO UPDATE SET name = %(name)s RETURNING id, name, tos_accepted",
|
|
74
74
|
{
|
|
75
75
|
"provider": utils.auth.oauth_provider.name,
|
|
76
76
|
"id": oauth_info.id,
|
|
@@ -79,21 +79,25 @@ def auth():
|
|
|
79
79
|
).fetchone()
|
|
80
80
|
if res is None:
|
|
81
81
|
raise Exception("Impossible to insert user in database")
|
|
82
|
-
id, name = res
|
|
82
|
+
id, name, tos_accepted = res
|
|
83
83
|
account = Account(
|
|
84
84
|
id=str(id), # convert uuid to string for serialization
|
|
85
85
|
name=name,
|
|
86
86
|
oauth_provider=utils.auth.oauth_provider.name,
|
|
87
87
|
oauth_id=oauth_info.id,
|
|
88
|
+
tos_accepted=tos_accepted,
|
|
88
89
|
)
|
|
89
90
|
session[ACCOUNT_KEY] = account.model_dump(exclude_none=True)
|
|
90
91
|
session.permanent = True
|
|
91
92
|
|
|
92
93
|
next_url = session.pop(NEXT_URL_KEY, None)
|
|
93
|
-
if
|
|
94
|
-
|
|
94
|
+
if not tos_accepted and current_app.config["API_ENFORCE_TOS_ACCEPTANCE"]:
|
|
95
|
+
args = {"next_url": next_url} if next_url else None
|
|
96
|
+
next_url = current_app.config["API_WEBSITE_URL"].tos_validation_page(args)
|
|
95
97
|
else:
|
|
96
|
-
|
|
98
|
+
next_url = next_url or "/"
|
|
99
|
+
|
|
100
|
+
response = flask.make_response(redirect(next_url))
|
|
97
101
|
|
|
98
102
|
# also store id/name in cookies for the front end to use those
|
|
99
103
|
max_age = current_app.config["PERMANENT_SESSION_LIFETIME"]
|