geovisio 2.5.0__py3-none-any.whl → 2.6.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- geovisio/__init__.py +3 -2
- geovisio/admin_cli/__init__.py +2 -2
- geovisio/admin_cli/db.py +11 -0
- geovisio/config_app.py +25 -0
- geovisio/templates/main.html +2 -2
- geovisio/utils/pictures.py +4 -3
- geovisio/utils/sequences.py +56 -32
- geovisio/web/auth.py +15 -2
- geovisio/web/collections.py +38 -36
- geovisio/web/docs.py +51 -2
- geovisio/web/items.py +49 -45
- geovisio/web/map.py +307 -91
- geovisio/web/stac.py +13 -0
- geovisio/web/tokens.py +7 -3
- geovisio/web/utils.py +10 -0
- geovisio/workers/runner_pictures.py +47 -0
- geovisio-2.6.0.dist-info/METADATA +92 -0
- {geovisio-2.5.0.dist-info → geovisio-2.6.0.dist-info}/RECORD +20 -20
- geovisio-2.5.0.dist-info/METADATA +0 -115
- {geovisio-2.5.0.dist-info → geovisio-2.6.0.dist-info}/LICENSE +0 -0
- {geovisio-2.5.0.dist-info → geovisio-2.6.0.dist-info}/WHEEL +0 -0
geovisio/__init__.py
CHANGED
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
"""GeoVisio API - Main"""
|
|
2
2
|
|
|
3
|
-
__version__ = "2.
|
|
3
|
+
__version__ = "2.6.0"
|
|
4
4
|
|
|
5
5
|
import os
|
|
6
6
|
from flask import Flask, jsonify, stream_template, send_from_directory, redirect
|
|
@@ -60,7 +60,8 @@ def create_app(test_config=None, app=None):
|
|
|
60
60
|
app.config["FILESYSTEMS"] = filesystems.openFilesystemsFromConfig(app.config)
|
|
61
61
|
|
|
62
62
|
# Check database connection and update its schema if needed
|
|
63
|
-
|
|
63
|
+
if app.config.get("DB_CHECK_SCHEMA"):
|
|
64
|
+
db_migrations.update_db_schema(app.config["DB_URL"])
|
|
64
65
|
|
|
65
66
|
if app.config.get("OAUTH_PROVIDER"):
|
|
66
67
|
utils.auth.make_auth(app)
|
geovisio/admin_cli/__init__.py
CHANGED
|
@@ -58,8 +58,8 @@ def cleanup_cmd(sequencesids, full, database, cache, permanent_pictures):
|
|
|
58
58
|
@bp.cli.command("process-sequences")
|
|
59
59
|
@with_appcontext
|
|
60
60
|
def process_sequences():
|
|
61
|
-
"""Deprecated entry point, use https://gitlab.com/
|
|
62
|
-
logging.error("This function has been deprecated, use https://gitlab.com/
|
|
61
|
+
"""Deprecated entry point, use https://gitlab.com/panoramax/clients/cli to upload a sequence instead"""
|
|
62
|
+
logging.error("This function has been deprecated, use https://gitlab.com/panoramax/clients/cli to upload a sequence instead.")
|
|
63
63
|
logging.error(
|
|
64
64
|
"To upload a sequence with this tool, install it with `pip install geovisio_cli`, then run:\ngeovisio upload --path <directory> --api-url <api-url>"
|
|
65
65
|
)
|
geovisio/admin_cli/db.py
CHANGED
|
@@ -1,7 +1,9 @@
|
|
|
1
1
|
from flask import Blueprint, current_app
|
|
2
2
|
from flask.cli import with_appcontext
|
|
3
3
|
import click
|
|
4
|
+
import psycopg
|
|
4
5
|
from geovisio import db_migrations
|
|
6
|
+
from geovisio.utils import sequences
|
|
5
7
|
|
|
6
8
|
bp = Blueprint("db", __name__)
|
|
7
9
|
|
|
@@ -25,3 +27,12 @@ def upgrade():
|
|
|
25
27
|
def rollback(all):
|
|
26
28
|
"""Rollbacks the latest database migration"""
|
|
27
29
|
db_migrations.rollback_db_schema(current_app.config["DB_URL"], all)
|
|
30
|
+
|
|
31
|
+
|
|
32
|
+
@bp.cli.command("refresh")
|
|
33
|
+
@with_appcontext
|
|
34
|
+
def refresh():
|
|
35
|
+
"""Refresh cached data (pictures_grid)"""
|
|
36
|
+
with psycopg.connect(current_app.config["DB_URL"]) as db:
|
|
37
|
+
sequences.update_pictures_grid(db)
|
|
38
|
+
db.commit()
|
geovisio/config_app.py
CHANGED
|
@@ -3,6 +3,8 @@ import os.path
|
|
|
3
3
|
from urllib.parse import urlparse
|
|
4
4
|
import datetime
|
|
5
5
|
import logging
|
|
6
|
+
from typing import Optional
|
|
7
|
+
import croniter
|
|
6
8
|
|
|
7
9
|
|
|
8
10
|
class DefaultConfig:
|
|
@@ -14,9 +16,14 @@ class DefaultConfig:
|
|
|
14
16
|
PICTURE_PROCESS_DERIVATES_STRATEGY = "ON_DEMAND"
|
|
15
17
|
API_BLUR_URL = None
|
|
16
18
|
PICTURE_PROCESS_THREADS_LIMIT = 1
|
|
19
|
+
DB_CHECK_SCHEMA = True # If True check the database schema, and do not start the api if not up to date
|
|
17
20
|
API_PICTURES_LICENSE_SPDX_ID = None
|
|
18
21
|
API_PICTURES_LICENSE_URL = None
|
|
19
22
|
DEBUG_PICTURES_SKIP_FS_CHECKS_WITH_PUBLIC_URL = False
|
|
23
|
+
SESSION_COOKIE_HTTPONLY = False
|
|
24
|
+
PICTURE_PROCESS_REFRESH_CRON = (
|
|
25
|
+
"0 2 * * *" # Background worker will refresh by default some stats at 2 o'clock in the night (local time of the server)
|
|
26
|
+
)
|
|
20
27
|
|
|
21
28
|
|
|
22
29
|
def read_config(app, test_config):
|
|
@@ -38,6 +45,7 @@ def read_config(app, test_config):
|
|
|
38
45
|
"DB_USERNAME",
|
|
39
46
|
"DB_PASSWORD",
|
|
40
47
|
"DB_NAME",
|
|
48
|
+
"DB_CHECK_SCHEMA",
|
|
41
49
|
# API
|
|
42
50
|
"API_BLUR_URL",
|
|
43
51
|
"API_VIEWER_PAGE",
|
|
@@ -51,6 +59,7 @@ def read_config(app, test_config):
|
|
|
51
59
|
# Picture process
|
|
52
60
|
"PICTURE_PROCESS_DERIVATES_STRATEGY",
|
|
53
61
|
"PICTURE_PROCESS_THREADS_LIMIT",
|
|
62
|
+
"PICTURE_PROCESS_REFRESH_CRON",
|
|
54
63
|
# OAUTH
|
|
55
64
|
"OAUTH_PROVIDER",
|
|
56
65
|
"OAUTH_OIDC_URL",
|
|
@@ -106,6 +115,8 @@ def read_config(app, test_config):
|
|
|
106
115
|
|
|
107
116
|
app.config["DB_URL"] = f"postgres://{username}:{passw}@{host}:{port}/{dbname}"
|
|
108
117
|
|
|
118
|
+
app.config["DB_CHECK_SCHEMA"] = _read_bool(app.config, "DB_CHECK_SCHEMA")
|
|
119
|
+
|
|
109
120
|
if app.config.get("API_BLUR_URL") is not None and len(app.config.get("API_BLUR_URL")) > 0:
|
|
110
121
|
try:
|
|
111
122
|
urlparse(app.config.get("API_BLUR_URL"))
|
|
@@ -155,6 +166,9 @@ def read_config(app, test_config):
|
|
|
155
166
|
if app.config.get("API_PICTURES_LICENSE_SPDX_ID") is None:
|
|
156
167
|
app.config["API_PICTURES_LICENSE_SPDX_ID"] = "proprietary"
|
|
157
168
|
|
|
169
|
+
cron_val = app.config["PICTURE_PROCESS_REFRESH_CRON"]
|
|
170
|
+
if not croniter.croniter.is_valid(cron_val):
|
|
171
|
+
raise Exception(f"PICTURE_PROCESS_REFRESH_CRON should be a valid cron syntax, got '{cron_val}'")
|
|
158
172
|
#
|
|
159
173
|
# Add generated config vars
|
|
160
174
|
#
|
|
@@ -164,6 +178,17 @@ def read_config(app, test_config):
|
|
|
164
178
|
app.config["EXECUTOR_PROPAGATE_EXCEPTIONS"] = True # propagate the excecutor's exceptions, to be able to trace them
|
|
165
179
|
|
|
166
180
|
|
|
181
|
+
def _read_bool(config, value_name: str) -> Optional[bool]:
|
|
182
|
+
value = config.get(value_name)
|
|
183
|
+
if value is None:
|
|
184
|
+
return value
|
|
185
|
+
if type(value) == bool:
|
|
186
|
+
return value
|
|
187
|
+
if type(value) == str:
|
|
188
|
+
return value.lower() == "true"
|
|
189
|
+
raise Exception(f"Configuration {value_name} should either be a boolean or a string, got '{value}'")
|
|
190
|
+
|
|
191
|
+
|
|
167
192
|
def _get_threads_limit(param: str) -> int:
|
|
168
193
|
"""Computes maximum thread limit depending on environment variables and available CPU.
|
|
169
194
|
|
geovisio/templates/main.html
CHANGED
|
@@ -67,7 +67,7 @@
|
|
|
67
67
|
+'<div id="viewer" style="width: 500px; height: 300px"></div>\n\n'
|
|
68
68
|
+'<script>\n'
|
|
69
69
|
+'\t// All options available are listed here\n'
|
|
70
|
-
+'\t// https://gitlab.com/
|
|
70
|
+
+'\t// https://gitlab.com/panoramax/clients/web-viewer/-/blob/develop/docs/02_Usage.md\n'
|
|
71
71
|
+'\tvar instance = new GeoVisio.default(\n'
|
|
72
72
|
+'\t\t"viewer",\n'
|
|
73
73
|
+'\t\t"'+baseUrl+'/api",\n'
|
|
@@ -85,7 +85,7 @@
|
|
|
85
85
|
-
|
|
86
86
|
<a href="/api/docs/swagger">API docs</a>
|
|
87
87
|
-
|
|
88
|
-
<a href="https://gitlab.com/
|
|
88
|
+
<a href="https://gitlab.com/panoramax/clients/web-viewer/-/tree/develop/docs">JS library docs</a>
|
|
89
89
|
-
|
|
90
90
|
<a href="https://gitlab.com/geovisio">Repositories</a>
|
|
91
91
|
</p>
|
geovisio/utils/pictures.py
CHANGED
|
@@ -537,6 +537,7 @@ def insertNewPictureInDatabase(db, sequenceId, position, pictureBytes, associate
|
|
|
537
537
|
lighterMetadata = dict(filter(lambda v: v[0] not in ["ts", "heading", "lon", "lat", "exif"], metadata.items()))
|
|
538
538
|
if lighterMetadata.get("tagreader_warnings") is not None and len(lighterMetadata["tagreader_warnings"]) == 0:
|
|
539
539
|
del lighterMetadata["tagreader_warnings"]
|
|
540
|
+
lighterMetadata["tz"] = metadata["ts"].tzname()
|
|
540
541
|
|
|
541
542
|
exif = cleanupExif(metadata["exif"])
|
|
542
543
|
|
|
@@ -545,11 +546,11 @@ def insertNewPictureInDatabase(db, sequenceId, position, pictureBytes, associate
|
|
|
545
546
|
picId = db.execute(
|
|
546
547
|
"""
|
|
547
548
|
INSERT INTO pictures (ts, heading, metadata, geom, account_id, exif)
|
|
548
|
-
VALUES (
|
|
549
|
+
VALUES (%s, %s, %s, ST_SetSRID(ST_MakePoint(%s, %s), 4326), %s, %s)
|
|
549
550
|
RETURNING id
|
|
550
551
|
""",
|
|
551
552
|
(
|
|
552
|
-
metadata["ts"],
|
|
553
|
+
metadata["ts"].isoformat(),
|
|
553
554
|
metadata["heading"],
|
|
554
555
|
Jsonb(lighterMetadata),
|
|
555
556
|
metadata["lon"],
|
|
@@ -603,7 +604,7 @@ def insertNewPictureInDatabase(db, sequenceId, position, pictureBytes, associate
|
|
|
603
604
|
|
|
604
605
|
|
|
605
606
|
# Note: we don't want to store and expose exif binary fields as they are difficult to use and take a lot of storage in the database (~20% for maker notes only)
|
|
606
|
-
# This list has been queried from real data (cf [this comment](https://gitlab.com/
|
|
607
|
+
# This list has been queried from real data (cf [this comment](https://gitlab.com/panoramax/server/api/-/merge_requests/241#note_1790580636)).
|
|
607
608
|
# Update this list (and do a sql migration) if new binary fields are added
|
|
608
609
|
BLACK_LISTED_BINARY_EXIF_FIELDS = set(
|
|
609
610
|
[
|
geovisio/utils/sequences.py
CHANGED
|
@@ -12,7 +12,9 @@ from enum import Enum
|
|
|
12
12
|
from geovisio.utils.fields import FieldMapping, SortBy, SQLDirection, BBox, Bounds
|
|
13
13
|
from geopic_tag_reader import reader
|
|
14
14
|
from pathlib import PurePath
|
|
15
|
-
from geovisio import errors
|
|
15
|
+
from geovisio import errors, utils
|
|
16
|
+
import logging
|
|
17
|
+
import sentry_sdk
|
|
16
18
|
|
|
17
19
|
|
|
18
20
|
def createSequence(metadata, accountId) -> str:
|
|
@@ -138,37 +140,29 @@ def get_collections(request: CollectionsRequest) -> Collections:
|
|
|
138
140
|
with conn.cursor() as cursor:
|
|
139
141
|
sqlSequencesRaw = SQL(
|
|
140
142
|
"""
|
|
141
|
-
SELECT
|
|
142
|
-
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
|
|
147
|
-
|
|
148
|
-
|
|
149
|
-
|
|
150
|
-
|
|
151
|
-
|
|
152
|
-
|
|
153
|
-
|
|
154
|
-
|
|
155
|
-
|
|
156
|
-
|
|
157
|
-
|
|
158
|
-
|
|
159
|
-
|
|
160
|
-
|
|
161
|
-
|
|
162
|
-
|
|
163
|
-
|
|
164
|
-
SELECT MIN(p.ts) as mints,
|
|
165
|
-
MAX(p.ts) as maxts,
|
|
166
|
-
COUNT(p.*) AS nbpic
|
|
167
|
-
FROM sequences_pictures sp
|
|
168
|
-
JOIN pictures p ON sp.pic_id = p.id
|
|
169
|
-
WHERE {pic_filter}
|
|
170
|
-
GROUP BY sp.seq_id
|
|
171
|
-
) sub ON true
|
|
143
|
+
SELECT
|
|
144
|
+
s.id,
|
|
145
|
+
s.status,
|
|
146
|
+
s.metadata->>'title' AS name,
|
|
147
|
+
s.inserted_at AS created,
|
|
148
|
+
s.updated_at AS updated,
|
|
149
|
+
ST_XMin(s.bbox) AS minx,
|
|
150
|
+
ST_YMin(s.bbox) AS miny,
|
|
151
|
+
ST_XMax(s.bbox) AS maxx,
|
|
152
|
+
ST_YMax(s.bbox) AS maxy,
|
|
153
|
+
accounts.name AS account_name,
|
|
154
|
+
ST_X(ST_PointN(s.geom, 1)) AS x1,
|
|
155
|
+
ST_Y(ST_PointN(s.geom, 1)) AS y1,
|
|
156
|
+
s.min_picture_ts AS mints,
|
|
157
|
+
s.max_picture_ts AS maxts,
|
|
158
|
+
s.nb_pictures AS nbpic,
|
|
159
|
+
{status},
|
|
160
|
+
s.computed_capture_date AS datetime
|
|
161
|
+
FROM sequences s
|
|
162
|
+
LEFT JOIN accounts on s.account_id = accounts.id
|
|
163
|
+
WHERE {filter}
|
|
164
|
+
ORDER BY {order1}
|
|
165
|
+
LIMIT {limit}
|
|
172
166
|
"""
|
|
173
167
|
)
|
|
174
168
|
sqlSequences = sqlSequencesRaw.format(
|
|
@@ -480,3 +474,33 @@ def update_headings(
|
|
|
480
474
|
), # lots of camera have heading set to 0 for unset heading, so we recompute the heading when it's 0 too, even if this could be a valid value
|
|
481
475
|
{"seq": sequenceId, "diff": relativeHeading, "account": editingAccount},
|
|
482
476
|
)
|
|
477
|
+
|
|
478
|
+
|
|
479
|
+
def update_pictures_grid(db) -> bool:
|
|
480
|
+
"""Refreshes the pictures_grid materialized view for an up-to-date view of pictures availability on map.
|
|
481
|
+
|
|
482
|
+
Note: the transaction is not commited at the end, you need to commit it or use an autocommit connection.
|
|
483
|
+
|
|
484
|
+
Parameters
|
|
485
|
+
----------
|
|
486
|
+
db : psycopg.Connection
|
|
487
|
+
Database connection
|
|
488
|
+
|
|
489
|
+
Returns
|
|
490
|
+
-------
|
|
491
|
+
bool : True if the view has been updated else False
|
|
492
|
+
"""
|
|
493
|
+
logger = logging.getLogger("geovisio.picture_grid")
|
|
494
|
+
with db.transaction():
|
|
495
|
+
try:
|
|
496
|
+
db.execute("SELECT refreshed_at FROM refresh_database FOR UPDATE NOWAIT").fetchone()
|
|
497
|
+
except psycopg.errors.LockNotAvailable:
|
|
498
|
+
logger.info("Database refresh already in progress, nothing to do")
|
|
499
|
+
return False
|
|
500
|
+
|
|
501
|
+
with sentry_sdk.start_span(description="Refreshing database") as span:
|
|
502
|
+
with utils.time.log_elapsed(f"Refreshing database", logger=logger):
|
|
503
|
+
logger.info("Refreshing database")
|
|
504
|
+
db.execute("UPDATE refresh_database SET refreshed_at = NOW()")
|
|
505
|
+
db.execute("REFRESH MATERIALIZED VIEW pictures_grid")
|
|
506
|
+
return True
|
geovisio/web/auth.py
CHANGED
|
@@ -3,8 +3,9 @@ from flask import current_app, url_for, session, redirect, request, jsonify
|
|
|
3
3
|
import psycopg
|
|
4
4
|
from typing import Any
|
|
5
5
|
from urllib.parse import quote
|
|
6
|
-
from geovisio import utils
|
|
6
|
+
from geovisio import utils, errors
|
|
7
7
|
from geovisio.utils.auth import Account, ACCOUNT_KEY
|
|
8
|
+
from authlib.integrations.base_client.errors import MismatchingStateError
|
|
8
9
|
|
|
9
10
|
bp = flask.Blueprint("auth", __name__, url_prefix="/api/auth")
|
|
10
11
|
|
|
@@ -52,7 +53,19 @@ def auth():
|
|
|
52
53
|
schema:
|
|
53
54
|
type: string
|
|
54
55
|
"""
|
|
55
|
-
|
|
56
|
+
try:
|
|
57
|
+
tokenResponse = utils.auth.oauth_provider.client.authorize_access_token()
|
|
58
|
+
except MismatchingStateError as e:
|
|
59
|
+
raise errors.InternalError(
|
|
60
|
+
"Impossible to finish authentication flow",
|
|
61
|
+
payload={
|
|
62
|
+
"details": {
|
|
63
|
+
"error": str(e),
|
|
64
|
+
"tips": "You can try to clear your cookies and retry. If the problem persists, contact your instance administrator.",
|
|
65
|
+
}
|
|
66
|
+
},
|
|
67
|
+
status_code=403,
|
|
68
|
+
)
|
|
56
69
|
|
|
57
70
|
oauth_info = utils.auth.oauth_provider.get_user_oauth_info(tokenResponse)
|
|
58
71
|
with psycopg.connect(current_app.config["DB_URL"]) as conn:
|
geovisio/web/collections.py
CHANGED
|
@@ -340,19 +340,19 @@ def getCollection(collectionId):
|
|
|
340
340
|
s.inserted_at AS created,
|
|
341
341
|
s.updated_at AS updated,
|
|
342
342
|
s.current_sort AS current_sort,
|
|
343
|
-
a
|
|
343
|
+
a.*,
|
|
344
|
+
min_picture_ts AS mints,
|
|
345
|
+
max_picture_ts AS maxts,
|
|
346
|
+
nb_pictures AS nbpic
|
|
344
347
|
FROM sequences s
|
|
345
348
|
JOIN accounts ON s.account_id = accounts.id, (
|
|
346
349
|
SELECT
|
|
347
|
-
MIN(ts) as mints,
|
|
348
|
-
MAX(ts) as maxts,
|
|
349
350
|
array_agg(DISTINCT jsonb_build_object(
|
|
350
351
|
'make', metadata->>'make',
|
|
351
352
|
'model', metadata->>'model',
|
|
352
353
|
'focal_length', metadata->>'focal_length',
|
|
353
354
|
'field_of_view', metadata->>'field_of_view'
|
|
354
|
-
)) AS metas
|
|
355
|
-
COUNT(*) AS nbpic
|
|
355
|
+
)) AS metas
|
|
356
356
|
FROM pictures p
|
|
357
357
|
JOIN sequences_pictures sp ON sp.seq_id = %(id)s AND sp.pic_id = p.id
|
|
358
358
|
) a
|
|
@@ -770,6 +770,18 @@ def getCollectionImportStatus(collectionId):
|
|
|
770
770
|
with psycopg.connect(current_app.config["DB_URL"], row_factory=dict_row) as conn:
|
|
771
771
|
with conn.cursor() as cursor:
|
|
772
772
|
sequence_status = cursor.execute(
|
|
773
|
+
SQL(
|
|
774
|
+
"""SELECT status
|
|
775
|
+
FROM sequences
|
|
776
|
+
WHERE id = %(seq_id)s
|
|
777
|
+
AND (status != 'hidden' OR account_id = %(account)s)-- show deleted sequence here"""
|
|
778
|
+
),
|
|
779
|
+
params,
|
|
780
|
+
).fetchone()
|
|
781
|
+
if sequence_status is None:
|
|
782
|
+
raise errors.InvalidAPIUsage("Sequence doesn't exists", status_code=404)
|
|
783
|
+
|
|
784
|
+
pics_status = cursor.execute(
|
|
773
785
|
"""WITH
|
|
774
786
|
pic_jobs_stats AS (
|
|
775
787
|
SELECT
|
|
@@ -796,47 +808,37 @@ pic_jobs_stats AS (
|
|
|
796
808
|
pic_jobs_stats.nb_errors,
|
|
797
809
|
pic_jobs_stats.last_job_finished_at
|
|
798
810
|
FROM sequences s
|
|
799
|
-
|
|
800
|
-
|
|
811
|
+
JOIN sequences_pictures sp ON sp.seq_id = s.id
|
|
812
|
+
JOIN pictures p ON sp.pic_id = p.id
|
|
801
813
|
LEFT JOIN pic_jobs_stats ON pic_jobs_stats.picture_id = p.id
|
|
802
814
|
WHERE
|
|
803
815
|
s.id = %(seq_id)s
|
|
804
816
|
AND (p IS NULL OR p.status != 'hidden' OR p.account_id = %(account)s)
|
|
805
|
-
AND (s.status != 'hidden' OR s.account_id = %(account)s) -- show deleted sequence here
|
|
806
817
|
ORDER BY s.id, sp.rank
|
|
807
818
|
)
|
|
808
|
-
SELECT
|
|
809
|
-
|
|
810
|
-
|
|
811
|
-
|
|
812
|
-
|
|
813
|
-
|
|
814
|
-
|
|
815
|
-
|
|
816
|
-
|
|
817
|
-
|
|
818
|
-
|
|
819
|
-
|
|
820
|
-
|
|
821
|
-
|
|
822
|
-
|
|
823
|
-
'nb_errors', i.nb_errors,
|
|
824
|
-
'processed_at', i.last_job_finished_at,
|
|
825
|
-
'rank', i.rank
|
|
826
|
-
)
|
|
827
|
-
)
|
|
819
|
+
SELECT json_strip_nulls(
|
|
820
|
+
json_build_object(
|
|
821
|
+
'id', i.id,
|
|
822
|
+
-- status is a bit deprecated, we'll split this field in more fields (like `processing_in_progress`, `hidden`, ...)
|
|
823
|
+
-- but we maintain it for retrocompatibility
|
|
824
|
+
'status', CASE
|
|
825
|
+
WHEN i.is_job_running IS TRUE THEN 'preparing'
|
|
826
|
+
WHEN i.last_job_error IS NOT NULL THEN 'broken'
|
|
827
|
+
ELSE i.status
|
|
828
|
+
END,
|
|
829
|
+
'processing_in_progress', i.is_job_running,
|
|
830
|
+
'process_error', i.last_job_error,
|
|
831
|
+
'nb_errors', i.nb_errors,
|
|
832
|
+
'processed_at', i.last_job_finished_at,
|
|
833
|
+
'rank', i.rank
|
|
828
834
|
)
|
|
829
|
-
)
|
|
830
|
-
FROM items i
|
|
831
|
-
JOIN sequences s on i.seq_id = s.id
|
|
832
|
-
GROUP by s.id;""",
|
|
835
|
+
) as pic_status
|
|
836
|
+
FROM items i;""",
|
|
833
837
|
params,
|
|
834
838
|
).fetchall()
|
|
839
|
+
pics = [p["pic_status"] for p in pics_status if len(p["pic_status"]) > 0]
|
|
835
840
|
|
|
836
|
-
|
|
837
|
-
raise errors.InvalidAPIUsage("Sequence doesn't exists", status_code=404)
|
|
838
|
-
|
|
839
|
-
return sequence_status[0]["sequence"]
|
|
841
|
+
return {"status": sequence_status["status"], "items": pics}
|
|
840
842
|
|
|
841
843
|
|
|
842
844
|
@bp.route("/users/<uuid:userId>/collection")
|
geovisio/web/docs.py
CHANGED
|
@@ -120,6 +120,50 @@ API_CONFIG = {
|
|
|
120
120
|
"STACItemSearchBody": {
|
|
121
121
|
"$ref": f"https://api.stacspec.org/v{utils.STAC_VERSION}/item-search/openapi.yaml#/components/schemas/searchBody"
|
|
122
122
|
},
|
|
123
|
+
"MapLibreStyleJSON": {
|
|
124
|
+
"type": "object",
|
|
125
|
+
"description": """
|
|
126
|
+
MapLibre Style JSON, see https://maplibre.org/maplibre-style-spec/ for reference.
|
|
127
|
+
|
|
128
|
+
Source ID is either \"geovisio\" or \"geovisio_\{userId\}\".
|
|
129
|
+
|
|
130
|
+
Layers ID are \"geovisio_grid\", \"geovisio_sequences\" and \"geovisio_pictures\", or with user UUID included (\"geovisio_\{userId\}_sequences\" and \"geovisio_\{userId\}_pictures\").
|
|
131
|
+
|
|
132
|
+
Note that you may not rely only on these ID that could change through time.
|
|
133
|
+
""",
|
|
134
|
+
"properties": {
|
|
135
|
+
"version": {"type": "integer", "example": 8},
|
|
136
|
+
"name": {"type": "string", "example": "GeoVisio Vector Tiles"},
|
|
137
|
+
"sources": {
|
|
138
|
+
"type": "object",
|
|
139
|
+
"properties": {
|
|
140
|
+
"geovisio": {
|
|
141
|
+
"type": "object",
|
|
142
|
+
"properties": {
|
|
143
|
+
"type": {"type": "string", "example": "vector"},
|
|
144
|
+
"minzoom": {"type": "integer", "example": "0"},
|
|
145
|
+
"maxzoom": {"type": "integer", "example": "15"},
|
|
146
|
+
"tiles": {"type": "array", "items": {"type": "string"}},
|
|
147
|
+
},
|
|
148
|
+
}
|
|
149
|
+
},
|
|
150
|
+
},
|
|
151
|
+
"layers": {
|
|
152
|
+
"type": "array",
|
|
153
|
+
"items": {
|
|
154
|
+
"type": "object",
|
|
155
|
+
"properties": {
|
|
156
|
+
"id": {"type": "string"},
|
|
157
|
+
"source": {"type": "string"},
|
|
158
|
+
"source-layer": {"type": "string"},
|
|
159
|
+
"type": {"type": "string"},
|
|
160
|
+
"paint": {"type": "object"},
|
|
161
|
+
"layout": {"type": "object"},
|
|
162
|
+
},
|
|
163
|
+
},
|
|
164
|
+
},
|
|
165
|
+
},
|
|
166
|
+
},
|
|
123
167
|
"GeoVisioLanding": {
|
|
124
168
|
"allOf": [
|
|
125
169
|
{"$ref": "#/components/schemas/STACLanding"},
|
|
@@ -312,6 +356,11 @@ If unset, sort order is unchanged.
|
|
|
312
356
|
"properties": {
|
|
313
357
|
"type": "object",
|
|
314
358
|
"properties": {
|
|
359
|
+
"datetimetz": {
|
|
360
|
+
"type": "string",
|
|
361
|
+
"format": "date-time",
|
|
362
|
+
"title": "Date & time with original timezone information",
|
|
363
|
+
},
|
|
315
364
|
"geovisio:status": {"$ref": "#/components/schemas/GeoVisioItemStatus"},
|
|
316
365
|
"geovisio:producer": {"type": "string"},
|
|
317
366
|
"geovisio:image": {"type": "string", "format": "uri"},
|
|
@@ -558,7 +607,7 @@ Available properties are:
|
|
|
558
607
|
},
|
|
559
608
|
},
|
|
560
609
|
},
|
|
561
|
-
"
|
|
610
|
+
"GeoVisioEncodedToken": {
|
|
562
611
|
"type": "object",
|
|
563
612
|
"properties": {
|
|
564
613
|
"id": {"type": "string"},
|
|
@@ -572,7 +621,7 @@ Available properties are:
|
|
|
572
621
|
},
|
|
573
622
|
"JWTokenClaimable": {
|
|
574
623
|
"allOf": [
|
|
575
|
-
{"$ref": "#/components/schemas/
|
|
624
|
+
{"$ref": "#/components/schemas/GeoVisioEncodedToken"},
|
|
576
625
|
{
|
|
577
626
|
"type": "object",
|
|
578
627
|
"properties": {
|