c2cgeoportal-geoportal 2.3.5.80__py3-none-any.whl → 2.9rc44__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- c2cgeoportal_geoportal/__init__.py +960 -0
- c2cgeoportal_geoportal/lib/__init__.py +256 -0
- c2cgeoportal_geoportal/lib/authentication.py +250 -0
- c2cgeoportal_geoportal/lib/bashcolor.py +46 -0
- c2cgeoportal_geoportal/lib/cacheversion.py +77 -0
- c2cgeoportal_geoportal/lib/caching.py +176 -0
- c2cgeoportal_geoportal/lib/check_collector.py +80 -0
- c2cgeoportal_geoportal/lib/checker.py +295 -0
- c2cgeoportal_geoportal/lib/common_headers.py +172 -0
- c2cgeoportal_geoportal/lib/dbreflection.py +266 -0
- c2cgeoportal_geoportal/lib/filter_capabilities.py +360 -0
- c2cgeoportal_geoportal/lib/fulltextsearch.py +50 -0
- c2cgeoportal_geoportal/lib/functionality.py +166 -0
- c2cgeoportal_geoportal/lib/headers.py +62 -0
- c2cgeoportal_geoportal/lib/i18n.py +38 -0
- c2cgeoportal_geoportal/lib/layers.py +132 -0
- c2cgeoportal_geoportal/lib/lingva_extractor.py +937 -0
- c2cgeoportal_geoportal/lib/loader.py +57 -0
- c2cgeoportal_geoportal/lib/metrics.py +117 -0
- c2cgeoportal_geoportal/lib/oauth2.py +1186 -0
- c2cgeoportal_geoportal/lib/oidc.py +304 -0
- c2cgeoportal_geoportal/lib/wmstparsing.py +353 -0
- c2cgeoportal_geoportal/lib/xsd.py +166 -0
- c2cgeoportal_geoportal/py.typed +0 -0
- c2cgeoportal_geoportal/resources.py +49 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/ci/config.yaml +26 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/cookiecutter.json +18 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/.dockerignore +6 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/.eslintrc.yaml +19 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/.prospector.yaml +30 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/Dockerfile +75 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/Makefile +6 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/alembic.ini +58 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/alembic.yaml +19 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/development.ini +121 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/gunicorn.conf.py +139 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/language_mapping +3 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/lingva-client.cfg +5 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/lingva-server.cfg +6 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/production.ini +38 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/requirements.txt +2 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/setup.py +25 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/webpack.api.js +41 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/webpack.apps.js +64 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/webpack.commons.js +11 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/webpack.config.js +22 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/__init__.py +42 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/authentication.py +10 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/dev.py +14 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/models.py +8 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/multi_organization.py +7 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/resources.py +11 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static-ngeo/api/index.js +12 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static-ngeo/js/{{cookiecutter.package}}module.js +25 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/subscribers.py +39 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/views/__init__.py +0 -0
- c2cgeoportal_geoportal/scaffolds/advance_update/cookiecutter.json +18 -0
- c2cgeoportal_geoportal/scaffolds/advance_update/{{cookiecutter.project}}/geoportal/CONST_Makefile +121 -0
- c2cgeoportal_geoportal/scaffolds/create/cookiecutter.json +18 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/.dockerignore +14 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/.editorconfig +17 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/.github/workflows/main.yaml +73 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/.github/workflows/rebuild.yaml +50 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/.github/workflows/update_l10n.yaml +66 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/.gitignore +16 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/.pre-commit-config.yaml +35 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/.prettierignore +1 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/.prettierrc.yaml +2 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/Dockerfile +75 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/Makefile +70 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/README.rst +29 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/build +179 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/ci/config.yaml +22 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/ci/docker-compose-check +25 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/ci/requirements.txt +2 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/docker-compose-db.yaml +24 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/docker-compose-lib.yaml +513 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/docker-compose-qgis.yaml +21 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/docker-compose.override.sample.yaml +65 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/docker-compose.yaml +121 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/env.default +102 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/env.project +69 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/vars.yaml +430 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/locale/en/LC_MESSAGES/{{cookiecutter.package}}_geoportal-client.po +6 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/css/desktop.css +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/css/iframe_api.css +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/css/mobile.css +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/images/banner_left.png +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/images/banner_right.png +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/images/blank.png +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/images/markers/marker-blue.png +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/images/markers/marker-gold.png +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/images/markers/marker-green.png +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/images/markers/marker.png +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/robot.txt.tmpl +3 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/data/Readme.txt +69 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/data/TM_EUROPE_BORDERS-0.3.sql +70 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/demo.map.tmpl +224 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/Arial.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/Arialbd.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/Arialbi.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/Ariali.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/NotoSans-Bold.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/NotoSans-BoldItalic.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/NotoSans-Italic.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/NotoSans-Regular.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/Verdana.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/Verdanab.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/Verdanai.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/Verdanaz.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts.conf +12 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/mapserver.conf +16 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/mapserver.map.tmpl +87 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/tinyows.xml.tmpl +36 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/A3_Landscape.jrxml +207 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/A3_Portrait.jrxml +185 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/A4_Landscape.jrxml +200 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/A4_Portrait.jrxml +170 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/config.yaml.tmpl +175 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/legend.jrxml +109 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/localisation.properties +4 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/localisation_fr.properties +4 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/logo.png +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/north.svg +93 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/results.jrxml +25 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/project.yaml +18 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/pyproject.toml +7 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/qgisserver/pg_service.conf.tmpl +15 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/run_alembic.sh +11 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/scripts/db-backup +126 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/scripts/db-restore +132 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/setup.cfg +7 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/spell-ignore-words.txt +5 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/tests/__init__.py +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/tests/test_app.py +78 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/tilegeneration/config.yaml.tmpl +195 -0
- c2cgeoportal_geoportal/scaffolds/update/cookiecutter.json +18 -0
- c2cgeoportal_geoportal/scaffolds/update/{{cookiecutter.project}}/.upgrade.yaml +67 -0
- c2cgeoportal_geoportal/scaffolds/update/{{cookiecutter.project}}/CONST_CHANGELOG.txt +304 -0
- c2cgeoportal_geoportal/scaffolds/update/{{cookiecutter.project}}/CONST_create_template/tests/test_testapp.py +48 -0
- c2cgeoportal_geoportal/scaffolds/update/{{cookiecutter.project}}/geoportal/.CONST_vars.yaml.swp +0 -0
- c2cgeoportal_geoportal/scaffolds/update/{{cookiecutter.project}}/geoportal/CONST_config-schema.yaml +927 -0
- c2cgeoportal_geoportal/scaffolds/update/{{cookiecutter.project}}/geoportal/CONST_vars.yaml +1503 -0
- c2cgeoportal_geoportal/scripts/__init__.py +64 -0
- c2cgeoportal_geoportal/scripts/c2cupgrade.py +879 -0
- c2cgeoportal_geoportal/scripts/create_demo_theme.py +83 -0
- c2cgeoportal_geoportal/scripts/manage_users.py +140 -0
- c2cgeoportal_geoportal/scripts/pcreate.py +296 -0
- c2cgeoportal_geoportal/scripts/theme2fts.py +347 -0
- c2cgeoportal_geoportal/scripts/urllogin.py +81 -0
- c2cgeoportal_geoportal/templates/login.html +90 -0
- c2cgeoportal_geoportal/templates/notlogin.html +62 -0
- c2cgeoportal_geoportal/templates/testi18n.html +12 -0
- c2cgeoportal_geoportal/views/__init__.py +59 -0
- c2cgeoportal_geoportal/views/dev.py +57 -0
- c2cgeoportal_geoportal/views/dynamic.py +208 -0
- c2cgeoportal_geoportal/views/entry.py +174 -0
- c2cgeoportal_geoportal/views/fulltextsearch.py +189 -0
- c2cgeoportal_geoportal/views/geometry_processing.py +75 -0
- c2cgeoportal_geoportal/views/i18n.py +129 -0
- c2cgeoportal_geoportal/views/layers.py +713 -0
- c2cgeoportal_geoportal/views/login.py +684 -0
- c2cgeoportal_geoportal/views/mapserverproxy.py +234 -0
- c2cgeoportal_geoportal/views/memory.py +90 -0
- c2cgeoportal_geoportal/views/ogcproxy.py +120 -0
- c2cgeoportal_geoportal/views/pdfreport.py +245 -0
- c2cgeoportal_geoportal/views/printproxy.py +143 -0
- c2cgeoportal_geoportal/views/profile.py +192 -0
- c2cgeoportal_geoportal/views/proxy.py +261 -0
- c2cgeoportal_geoportal/views/raster.py +233 -0
- c2cgeoportal_geoportal/views/resourceproxy.py +73 -0
- c2cgeoportal_geoportal/views/shortener.py +152 -0
- c2cgeoportal_geoportal/views/theme.py +1322 -0
- c2cgeoportal_geoportal/views/tinyowsproxy.py +189 -0
- c2cgeoportal_geoportal/views/vector_tiles.py +83 -0
- {c2cgeoportal_geoportal-2.3.5.80.dist-info → c2cgeoportal_geoportal-2.9rc44.dist-info}/METADATA +21 -24
- c2cgeoportal_geoportal-2.9rc44.dist-info/RECORD +193 -0
- {c2cgeoportal_geoportal-2.3.5.80.dist-info → c2cgeoportal_geoportal-2.9rc44.dist-info}/WHEEL +1 -1
- c2cgeoportal_geoportal-2.9rc44.dist-info/entry_points.txt +28 -0
- c2cgeoportal_geoportal-2.9rc44.dist-info/top_level.txt +2 -0
- tests/__init__.py +100 -0
- tests/test_cachebuster.py +71 -0
- tests/test_caching.py +275 -0
- tests/test_checker.py +85 -0
- tests/test_decimaljson.py +47 -0
- tests/test_headerstween.py +64 -0
- tests/test_i18n.py +31 -0
- tests/test_init.py +193 -0
- tests/test_locale_negociator.py +69 -0
- tests/test_mapserverproxy_route_predicate.py +64 -0
- tests/test_raster.py +267 -0
- tests/test_wmstparsing.py +238 -0
- tests/xmlstr.py +103 -0
- c2cgeoportal_geoportal-2.3.5.80.dist-info/DESCRIPTION.rst +0 -8
- c2cgeoportal_geoportal-2.3.5.80.dist-info/RECORD +0 -7
- c2cgeoportal_geoportal-2.3.5.80.dist-info/entry_points.txt +0 -22
- c2cgeoportal_geoportal-2.3.5.80.dist-info/metadata.json +0 -1
- c2cgeoportal_geoportal-2.3.5.80.dist-info/top_level.txt +0 -1
@@ -0,0 +1,1322 @@
|
|
1
|
+
# Copyright (c) 2011-2024, Camptocamp SA
|
2
|
+
# All rights reserved.
|
3
|
+
|
4
|
+
# Redistribution and use in source and binary forms, with or without
|
5
|
+
# modification, are permitted provided that the following conditions are met:
|
6
|
+
|
7
|
+
# 1. Redistributions of source code must retain the above copyright notice, this
|
8
|
+
# list of conditions and the following disclaimer.
|
9
|
+
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
10
|
+
# this list of conditions and the following disclaimer in the documentation
|
11
|
+
# and/or other materials provided with the distribution.
|
12
|
+
|
13
|
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
14
|
+
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
15
|
+
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
16
|
+
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
17
|
+
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
18
|
+
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
19
|
+
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
20
|
+
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
21
|
+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
22
|
+
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
23
|
+
|
24
|
+
# The views and conclusions contained in the software and documentation are those
|
25
|
+
# of the authors and should not be interpreted as representing official policies,
|
26
|
+
# either expressed or implied, of the FreeBSD Project.
|
27
|
+
|
28
|
+
|
29
|
+
import asyncio
|
30
|
+
import gc
|
31
|
+
import logging
|
32
|
+
import os
|
33
|
+
import re
|
34
|
+
import sys
|
35
|
+
import time
|
36
|
+
from collections import Counter
|
37
|
+
from math import sqrt
|
38
|
+
from typing import Any, Optional, Union, cast
|
39
|
+
|
40
|
+
import dogpile.cache.api
|
41
|
+
import pyramid.httpexceptions
|
42
|
+
import pyramid.request
|
43
|
+
import requests
|
44
|
+
import sqlalchemy
|
45
|
+
import sqlalchemy.orm.query
|
46
|
+
from c2cwsgiutils.auth import auth_view
|
47
|
+
from defusedxml import lxml
|
48
|
+
from lxml import etree # nosec
|
49
|
+
from owslib.wms import WebMapService
|
50
|
+
from pyramid.view import view_config
|
51
|
+
from sqlalchemy.orm import subqueryload
|
52
|
+
from sqlalchemy.orm.exc import NoResultFound # type: ignore[attr-defined]
|
53
|
+
|
54
|
+
from c2cgeoportal_commons import models
|
55
|
+
from c2cgeoportal_commons.lib.url import Url, get_url2
|
56
|
+
from c2cgeoportal_commons.models import cache_invalidate_cb, main
|
57
|
+
from c2cgeoportal_geoportal import is_allowed_host, is_allowed_url
|
58
|
+
from c2cgeoportal_geoportal.lib import get_roles_id, get_typed, get_types_map, is_intranet
|
59
|
+
from c2cgeoportal_geoportal.lib.caching import get_region
|
60
|
+
from c2cgeoportal_geoportal.lib.common_headers import Cache, set_common_headers
|
61
|
+
from c2cgeoportal_geoportal.lib.functionality import get_mapserver_substitution_params
|
62
|
+
from c2cgeoportal_geoportal.lib.layers import (
|
63
|
+
get_private_layers,
|
64
|
+
get_protected_layers,
|
65
|
+
get_protected_layers_query,
|
66
|
+
)
|
67
|
+
from c2cgeoportal_geoportal.lib.wmstparsing import TimeInformation, parse_extent
|
68
|
+
from c2cgeoportal_geoportal.views.layers import get_layer_metadata
|
69
|
+
|
70
|
+
_LOG = logging.getLogger(__name__)
|
71
|
+
_CACHE_REGION = get_region("std")
|
72
|
+
_CACHE_OGC_SERVER_REGION = get_region("ogc-server")
|
73
|
+
_TIMEOUT = int(os.environ.get("C2CGEOPORTAL_THEME_TIMEOUT", "300"))
|
74
|
+
|
75
|
+
Metadata = Union[str, int, float, bool, list[Any], dict[str, Any]]
|
76
|
+
|
77
|
+
|
78
|
+
async def get_http_cached(
|
79
|
+
http_options: dict[str, Any], url: str, headers: dict[str, str], cache: bool = True
|
80
|
+
) -> tuple[bytes, str]:
|
81
|
+
"""Get the content of the URL with a cache (dogpile)."""
|
82
|
+
|
83
|
+
@_CACHE_OGC_SERVER_REGION.cache_on_arguments()
|
84
|
+
def do_get_http_cached(url: str) -> tuple[bytes, str]:
|
85
|
+
# This function is just used to create a cache entry
|
86
|
+
raise NotImplementedError()
|
87
|
+
|
88
|
+
# Use the cache
|
89
|
+
if cache:
|
90
|
+
result = cast(tuple[bytes, str], do_get_http_cached.get(url)) # type: ignore[attr-defined]
|
91
|
+
if result != dogpile.cache.api.NO_VALUE: # type: ignore[comparison-overlap]
|
92
|
+
return result
|
93
|
+
|
94
|
+
response = await asyncio.to_thread(
|
95
|
+
requests.get, url.strip(), headers=headers, timeout=_TIMEOUT, **http_options
|
96
|
+
)
|
97
|
+
response.raise_for_status()
|
98
|
+
_LOG.info("Get url '%s' in %.1fs.", url, response.elapsed.total_seconds())
|
99
|
+
result = (response.content, response.headers.get("Content-Type", ""))
|
100
|
+
# Set the result in the cache
|
101
|
+
do_get_http_cached.set(result, url) # type: ignore[attr-defined]
|
102
|
+
return result
|
103
|
+
|
104
|
+
|
105
|
+
class DimensionInformation:
|
106
|
+
"""Used to collect the dimensions information."""
|
107
|
+
|
108
|
+
URL_PART_RE = re.compile(r"[a-zA-Z0-9_\-\+~\.]*$")
|
109
|
+
|
110
|
+
def __init__(self) -> None:
|
111
|
+
self._dimensions: dict[str, str] = {}
|
112
|
+
|
113
|
+
def merge(self, layer: main.Layer, layer_node: dict[str, Any], mixed: bool) -> set[str]:
|
114
|
+
errors = set()
|
115
|
+
|
116
|
+
dimensions: dict[str, str] = {}
|
117
|
+
dimensions_filters = {}
|
118
|
+
for dimension in layer.dimensions:
|
119
|
+
if (
|
120
|
+
not isinstance(layer, main.LayerWMS)
|
121
|
+
and dimension.value is not None
|
122
|
+
and not self.URL_PART_RE.match(dimension.value)
|
123
|
+
):
|
124
|
+
errors.add(
|
125
|
+
f"The layer '{layer.name}' has an unsupported dimension value "
|
126
|
+
f"'{dimension.value}' ('{dimension.name}')."
|
127
|
+
)
|
128
|
+
elif dimension.name in dimensions: # pragma: nocover
|
129
|
+
errors.add(f"The layer '{layer.name}' has a duplicated dimension name '{dimension.name}'.")
|
130
|
+
else:
|
131
|
+
if dimension.field:
|
132
|
+
dimensions_filters[dimension.name] = {"field": dimension.field, "value": dimension.value}
|
133
|
+
else:
|
134
|
+
dimensions[dimension.name] = dimension.value
|
135
|
+
|
136
|
+
if dimensions_filters:
|
137
|
+
layer_node["dimensionsFilters"] = dimensions_filters
|
138
|
+
if mixed:
|
139
|
+
layer_node["dimensions"] = dimensions
|
140
|
+
else:
|
141
|
+
for name, value in list(dimensions.items()):
|
142
|
+
if name not in self._dimensions or self._dimensions[name] is None:
|
143
|
+
self._dimensions[name] = value
|
144
|
+
elif self._dimensions[name] != value and value is not None:
|
145
|
+
errors.add(
|
146
|
+
f"The layer '{layer.name}' has a wrong dimension value '{value}' for '{name}', "
|
147
|
+
f"expected '{self._dimensions[name]}' or empty."
|
148
|
+
)
|
149
|
+
return errors
|
150
|
+
|
151
|
+
def get_dimensions(self) -> dict[str, str]:
|
152
|
+
return self._dimensions
|
153
|
+
|
154
|
+
|
155
|
+
class Theme:
|
156
|
+
"""All the views concerning the themes."""
|
157
|
+
|
158
|
+
def __init__(self, request: pyramid.request.Request):
|
159
|
+
self.request = request
|
160
|
+
self.settings = request.registry.settings
|
161
|
+
self.http_options = self.settings.get("http_options", {})
|
162
|
+
self.metadata_type = get_types_map(
|
163
|
+
self.settings.get("admin_interface", {}).get("available_metadata", [])
|
164
|
+
)
|
165
|
+
|
166
|
+
self._ogcservers_cache: list[main.OGCServer] | None = None
|
167
|
+
self._treeitems_cache: list[main.TreeItem] | None = None
|
168
|
+
self._layerswms_cache: list[main.LayerWMS] | None = None
|
169
|
+
self._layerswmts_cache: list[main.LayerWMTS] | None = None
|
170
|
+
self._layergroup_cache: list[main.LayerGroup] | None = None
|
171
|
+
self._themes_cache: list[main.Theme] | None = None
|
172
|
+
|
173
|
+
def _get_metadata(
|
174
|
+
self, item: main.TreeItem, metadata: str, errors: set[str]
|
175
|
+
) -> None | str | int | float | bool | list[Any] | dict[str, Any]:
|
176
|
+
metadatas = item.get_metadata(metadata)
|
177
|
+
return (
|
178
|
+
None
|
179
|
+
if not metadatas
|
180
|
+
else get_typed(
|
181
|
+
metadata, metadatas[0].value, self.metadata_type, self.request, errors, layer_name=item.name
|
182
|
+
)
|
183
|
+
)
|
184
|
+
|
185
|
+
def _get_metadata_list(self, item: main.TreeItem, errors: set[str]) -> dict[str, Metadata]:
|
186
|
+
metadatas: dict[str, Metadata] = {}
|
187
|
+
metadata: main.Metadata
|
188
|
+
for metadata in item.metadatas:
|
189
|
+
value = get_typed(metadata.name, metadata.value, self.metadata_type, self.request, errors)
|
190
|
+
if value is not None:
|
191
|
+
metadatas[metadata.name] = value
|
192
|
+
|
193
|
+
return metadatas
|
194
|
+
|
195
|
+
async def _wms_getcap(
|
196
|
+
self, ogc_server: main.OGCServer, preload: bool = False, cache: bool = True
|
197
|
+
) -> tuple[dict[str, dict[str, Any]] | None, set[str]]:
|
198
|
+
_LOG.debug("Get the WMS Capabilities of %s, preload: %s, cache: %s", ogc_server.name, preload, cache)
|
199
|
+
|
200
|
+
@_CACHE_OGC_SERVER_REGION.cache_on_arguments()
|
201
|
+
def build_web_map_service(ogc_server_id: int) -> tuple[dict[str, dict[str, Any]] | None, set[str]]:
|
202
|
+
del ogc_server_id # Just for cache
|
203
|
+
|
204
|
+
if url is None:
|
205
|
+
raise RuntimeError("URL is None")
|
206
|
+
|
207
|
+
version = url.query.get("VERSION", "1.1.1")
|
208
|
+
layers = {}
|
209
|
+
try:
|
210
|
+
wms = WebMapService(None, xml=content, version=version)
|
211
|
+
except Exception as e: # pylint: disable=broad-exception-caught
|
212
|
+
error = (
|
213
|
+
f"WARNING! an error '{e!s}' occurred while trying to read the mapfile and "
|
214
|
+
"recover the themes."
|
215
|
+
f"\nURL: {url}\n{content.decode() if content else None}"
|
216
|
+
)
|
217
|
+
_LOG.error(error, exc_info=True)
|
218
|
+
return None, {error}
|
219
|
+
wms_layers_name = list(wms.contents)
|
220
|
+
for layer_name in wms_layers_name:
|
221
|
+
wms_layer = wms[layer_name]
|
222
|
+
resolution = self._get_layer_resolution_hint(wms_layer)
|
223
|
+
info = {
|
224
|
+
"name": wms_layer.name,
|
225
|
+
"minResolutionHint": float(f"{resolution[0]:0.2f}"),
|
226
|
+
"maxResolutionHint": float(f"{resolution[1]:0.2f}"),
|
227
|
+
}
|
228
|
+
if hasattr(wms_layer, "queryable"):
|
229
|
+
info["queryable"] = wms_layer.queryable == 1
|
230
|
+
|
231
|
+
layers[layer_name] = {
|
232
|
+
"info": info,
|
233
|
+
"timepositions": wms_layer.timepositions,
|
234
|
+
"defaulttimeposition": wms_layer.defaulttimeposition,
|
235
|
+
"children": [layer.name for layer in wms_layer.layers],
|
236
|
+
}
|
237
|
+
|
238
|
+
del wms
|
239
|
+
|
240
|
+
return {"layers": layers}, set()
|
241
|
+
|
242
|
+
if cache:
|
243
|
+
result = build_web_map_service.get(ogc_server.id) # type: ignore[attr-defined]
|
244
|
+
if result != dogpile.cache.api.NO_VALUE:
|
245
|
+
return result # type: ignore[no-any-return]
|
246
|
+
|
247
|
+
try:
|
248
|
+
url, content, errors = await self._wms_getcap_cached(ogc_server, cache=cache)
|
249
|
+
except requests.exceptions.RequestException as exception:
|
250
|
+
if exception.response is None:
|
251
|
+
error = (
|
252
|
+
f"Unable to get the WMS Capabilities for OGC server '{ogc_server.name}', "
|
253
|
+
f"return the error: {exception}"
|
254
|
+
)
|
255
|
+
else:
|
256
|
+
error = (
|
257
|
+
f"Unable to get the WMS Capabilities for OGC server '{ogc_server.name}', "
|
258
|
+
f"return the error: {exception.response.status_code} {exception.response.reason}"
|
259
|
+
)
|
260
|
+
_LOG.exception(error)
|
261
|
+
return None, {error}
|
262
|
+
if errors or preload:
|
263
|
+
return None, errors
|
264
|
+
|
265
|
+
return build_web_map_service.refresh(ogc_server.id) # type: ignore
|
266
|
+
|
267
|
+
async def _wms_getcap_cached(
|
268
|
+
self, ogc_server: main.OGCServer, cache: bool = True
|
269
|
+
) -> tuple[Url | None, bytes | None, set[str]]:
|
270
|
+
errors: set[str] = set()
|
271
|
+
url = get_url2(f"The OGC server '{ogc_server.name}'", ogc_server.url, self.request, errors)
|
272
|
+
if errors or url is None:
|
273
|
+
return url, None, errors
|
274
|
+
|
275
|
+
# Add functionality params
|
276
|
+
if (
|
277
|
+
ogc_server.auth == main.OGCSERVER_AUTH_STANDARD
|
278
|
+
and ogc_server.type == main.OGCSERVER_TYPE_MAPSERVER
|
279
|
+
):
|
280
|
+
url.add_query(get_mapserver_substitution_params(self.request))
|
281
|
+
|
282
|
+
url.add_query(
|
283
|
+
{
|
284
|
+
"SERVICE": "WMS",
|
285
|
+
"VERSION": "1.1.1",
|
286
|
+
"REQUEST": "GetCapabilities",
|
287
|
+
"ROLE_IDS": "0",
|
288
|
+
"USER_ID": "0",
|
289
|
+
},
|
290
|
+
)
|
291
|
+
|
292
|
+
_LOG.debug("Get WMS GetCapabilities for URL: %s", url)
|
293
|
+
|
294
|
+
headers = {}
|
295
|
+
|
296
|
+
# Add headers for Geoserver
|
297
|
+
if ogc_server.auth == main.OGCSERVER_AUTH_GEOSERVER:
|
298
|
+
headers["sec-username"] = "root"
|
299
|
+
headers["sec-roles"] = "root"
|
300
|
+
|
301
|
+
try:
|
302
|
+
content, content_type = await get_http_cached(self.http_options, url.url(), headers, cache=cache)
|
303
|
+
except Exception: # pylint: disable=broad-exception-caught
|
304
|
+
error = f"Unable to GetCapabilities from URL {url}"
|
305
|
+
errors.add(error)
|
306
|
+
_LOG.error(error, exc_info=True)
|
307
|
+
return url, None, errors
|
308
|
+
|
309
|
+
# With wms 1.3 it returns text/xml also in case of error :-(
|
310
|
+
if content_type.split(";")[0].strip() not in [
|
311
|
+
"application/vnd.ogc.wms_xml",
|
312
|
+
"text/xml",
|
313
|
+
]:
|
314
|
+
error = (
|
315
|
+
f"GetCapabilities from URL '{url}' returns a wrong Content-Type: {content_type}\n"
|
316
|
+
f"{content.decode()}"
|
317
|
+
)
|
318
|
+
errors.add(error)
|
319
|
+
_LOG.error(error)
|
320
|
+
return url, None, errors
|
321
|
+
|
322
|
+
return url, content, errors
|
323
|
+
|
324
|
+
def _create_layer_query(self, interface: str) -> sqlalchemy.orm.query.RowReturningQuery[tuple[str]]:
|
325
|
+
"""Create an SQLAlchemy query for Layer and for the role identified to by ``role_id``."""
|
326
|
+
|
327
|
+
assert models.DBSession is not None
|
328
|
+
|
329
|
+
query: sqlalchemy.orm.query.RowReturningQuery[tuple[str]] = models.DBSession.query(
|
330
|
+
main.Layer.name
|
331
|
+
).filter(main.Layer.public.is_(True))
|
332
|
+
|
333
|
+
if interface is not None:
|
334
|
+
query = query.join(main.Layer.interfaces)
|
335
|
+
query = query.filter(main.Interface.name == interface)
|
336
|
+
|
337
|
+
query2 = get_protected_layers_query(self.request, None, what=main.LayerWMS.name) # type: ignore[arg-type]
|
338
|
+
if interface is not None:
|
339
|
+
query2 = query2.join(main.Layer.interfaces)
|
340
|
+
query2 = query2.filter(main.Interface.name == interface)
|
341
|
+
query = query.union(query2)
|
342
|
+
query3 = get_protected_layers_query(self.request, None, what=main.LayerWMTS.name) # type: ignore[arg-type]
|
343
|
+
if interface is not None:
|
344
|
+
query3 = query3.join(main.Layer.interfaces)
|
345
|
+
query3 = query3.filter(main.Interface.name == interface)
|
346
|
+
query = query.union(query3)
|
347
|
+
|
348
|
+
return query
|
349
|
+
|
350
|
+
def _get_layer_metadata_urls(self, layer: main.Layer) -> list[str]:
|
351
|
+
metadata_urls: list[str] = []
|
352
|
+
if layer.metadataUrls:
|
353
|
+
metadata_urls = layer.metadataUrls
|
354
|
+
for child_layer in layer.layers:
|
355
|
+
metadata_urls.extend(self._get_layer_metadata_urls(child_layer))
|
356
|
+
return metadata_urls
|
357
|
+
|
358
|
+
def _get_layer_resolution_hint_raw(self, layer: main.Layer) -> tuple[float | None, float | None]:
|
359
|
+
resolution_hint_min = None
|
360
|
+
resolution_hint_max = None
|
361
|
+
if layer.scaleHint:
|
362
|
+
# scaleHint is based upon a pixel diagonal length whereas we use
|
363
|
+
# resolutions based upon a pixel edge length. There is a sqrt(2)
|
364
|
+
# ratio between edge and diagonal of a square.
|
365
|
+
resolution_hint_min = float(layer.scaleHint["min"]) / sqrt(2)
|
366
|
+
resolution_hint_max = (
|
367
|
+
float(layer.scaleHint["max"]) / sqrt(2)
|
368
|
+
if layer.scaleHint["max"] not in ("0", "Infinity")
|
369
|
+
else 999999999
|
370
|
+
)
|
371
|
+
for child_layer in layer.layers:
|
372
|
+
resolution = self._get_layer_resolution_hint_raw(child_layer)
|
373
|
+
resolution_hint_min = (
|
374
|
+
resolution[0]
|
375
|
+
if resolution_hint_min is None
|
376
|
+
else (
|
377
|
+
resolution_hint_min if resolution[0] is None else min(resolution_hint_min, resolution[0])
|
378
|
+
)
|
379
|
+
)
|
380
|
+
resolution_hint_max = (
|
381
|
+
resolution[1]
|
382
|
+
if resolution_hint_max is None
|
383
|
+
else (
|
384
|
+
resolution_hint_max if resolution[1] is None else max(resolution_hint_max, resolution[1])
|
385
|
+
)
|
386
|
+
)
|
387
|
+
|
388
|
+
return (resolution_hint_min, resolution_hint_max)
|
389
|
+
|
390
|
+
def _get_layer_resolution_hint(self, layer: main.Layer) -> tuple[float, float]:
|
391
|
+
resolution_hint_min, resolution_hint_max = self._get_layer_resolution_hint_raw(layer)
|
392
|
+
return (
|
393
|
+
0.0 if resolution_hint_min is None else resolution_hint_min,
|
394
|
+
999999999 if resolution_hint_max is None else resolution_hint_max,
|
395
|
+
)
|
396
|
+
|
397
|
+
async def _layer(
|
398
|
+
self,
|
399
|
+
layer: main.Layer,
|
400
|
+
time_: TimeInformation | None = None,
|
401
|
+
dim: DimensionInformation | None = None,
|
402
|
+
mixed: bool = True,
|
403
|
+
) -> tuple[dict[str, Any] | None, set[str]]:
|
404
|
+
errors: set[str] = set()
|
405
|
+
layer_info = {"id": layer.id, "name": layer.name, "metadata": self._get_metadata_list(layer, errors)}
|
406
|
+
if re.search("[/?#]", layer.name):
|
407
|
+
errors.add(f"The layer has an unsupported name '{layer.name}'.")
|
408
|
+
if layer.geo_table:
|
409
|
+
errors |= self._fill_editable(layer_info, layer)
|
410
|
+
if mixed:
|
411
|
+
assert time_ is None
|
412
|
+
time_ = TimeInformation()
|
413
|
+
assert time_ is not None
|
414
|
+
assert dim is not None
|
415
|
+
|
416
|
+
if not isinstance(layer, main.LayerCOG):
|
417
|
+
errors |= dim.merge(layer, layer_info, mixed)
|
418
|
+
|
419
|
+
if isinstance(layer, main.LayerWMS):
|
420
|
+
wms, wms_errors = await self._wms_layers(layer.ogc_server)
|
421
|
+
errors |= wms_errors
|
422
|
+
if wms is None:
|
423
|
+
return None if errors else layer_info, errors
|
424
|
+
if layer.layer is None or layer.layer == "":
|
425
|
+
errors.add(f"The layer '{layer.name}' do not have any layers")
|
426
|
+
return None, errors
|
427
|
+
layer_info["type"] = "WMS"
|
428
|
+
layer_info["layers"] = layer.layer
|
429
|
+
await self._fill_wms(layer_info, layer, errors, mixed=mixed)
|
430
|
+
errors |= self._merge_time(time_, layer_info, layer, wms)
|
431
|
+
|
432
|
+
elif isinstance(layer, main.LayerWMTS):
|
433
|
+
layer_info["type"] = "WMTS"
|
434
|
+
self._fill_wmts(layer_info, layer, errors)
|
435
|
+
|
436
|
+
elif isinstance(layer, main.LayerVectorTiles):
|
437
|
+
layer_info["type"] = "VectorTiles"
|
438
|
+
self._vectortiles_layers(layer_info, layer, errors)
|
439
|
+
|
440
|
+
elif isinstance(layer, main.LayerCOG):
|
441
|
+
layer_info["type"] = "COG"
|
442
|
+
self._cog_layers(layer_info, layer, errors)
|
443
|
+
|
444
|
+
return None if errors else layer_info, errors
|
445
|
+
|
446
|
+
@staticmethod
|
447
|
+
def _merge_time(
|
448
|
+
time_: TimeInformation, layer_theme: dict[str, Any], layer: main.Layer, wms: dict[str, dict[str, Any]]
|
449
|
+
) -> set[str]:
|
450
|
+
errors = set()
|
451
|
+
wmslayer = layer.layer
|
452
|
+
|
453
|
+
def merge_time(wms_layer_obj: dict[str, Any]) -> None:
|
454
|
+
extent = parse_extent(wms_layer_obj["timepositions"], wms_layer_obj["defaulttimeposition"])
|
455
|
+
time_.merge(layer_theme, extent, layer.time_mode, layer.time_widget)
|
456
|
+
|
457
|
+
try:
|
458
|
+
if wmslayer in wms["layers"]:
|
459
|
+
wms_layer_obj = wms["layers"][wmslayer]
|
460
|
+
|
461
|
+
if layer.time_mode != "disabled":
|
462
|
+
has_time = False
|
463
|
+
if wms_layer_obj["timepositions"]:
|
464
|
+
merge_time(wms_layer_obj)
|
465
|
+
has_time = True
|
466
|
+
|
467
|
+
else:
|
468
|
+
# For wms layer group, get time from the chldren.
|
469
|
+
for child_layer_name in wms_layer_obj["children"]:
|
470
|
+
child_layer = wms["layers"][child_layer_name]
|
471
|
+
if child_layer["timepositions"]:
|
472
|
+
merge_time(child_layer) # The time mode comes from the wms layer group
|
473
|
+
has_time = True
|
474
|
+
|
475
|
+
if not has_time:
|
476
|
+
errors.add(
|
477
|
+
f"Error: time layer '{layer.name}' has no time information in capabilities"
|
478
|
+
)
|
479
|
+
|
480
|
+
except ValueError: # pragma no cover
|
481
|
+
errors.add(f"Error while handling time for layer '{layer.name}': {sys.exc_info()[1]}")
|
482
|
+
|
483
|
+
return errors
|
484
|
+
|
485
|
+
def _fill_editable(self, layer_theme: dict[str, Any], layer: main.Layer) -> set[str]:
|
486
|
+
assert models.DBSession is not None
|
487
|
+
|
488
|
+
errors = set()
|
489
|
+
try:
|
490
|
+
if self.request.user:
|
491
|
+
count = (
|
492
|
+
models.DBSession.query(main.RestrictionArea)
|
493
|
+
.join(main.RestrictionArea.roles)
|
494
|
+
.filter(main.Role.id.in_(get_roles_id(self.request)))
|
495
|
+
.filter(main.RestrictionArea.layers.any(main.Layer.id == layer.id))
|
496
|
+
.filter(main.RestrictionArea.readwrite.is_(True))
|
497
|
+
.count()
|
498
|
+
)
|
499
|
+
if count > 0:
|
500
|
+
layer_theme["edit_columns"] = get_layer_metadata(layer)
|
501
|
+
layer_theme["editable"] = True
|
502
|
+
except Exception as exception: # pylint: disable=broad-exception-caught
|
503
|
+
_LOG.exception(str(exception))
|
504
|
+
errors.add(str(exception))
|
505
|
+
return errors
|
506
|
+
|
507
|
+
def _fill_child_layer(
|
508
|
+
self,
|
509
|
+
layer_theme: dict[str, Any],
|
510
|
+
layer_name: str,
|
511
|
+
wms: dict[str, dict[str, Any]],
|
512
|
+
) -> None:
|
513
|
+
wms_layer_obj = wms["layers"][layer_name]
|
514
|
+
if not wms_layer_obj["children"]:
|
515
|
+
layer_theme["childLayers"].append(wms["layers"][layer_name]["info"])
|
516
|
+
else:
|
517
|
+
for child_layer in wms_layer_obj["children"]:
|
518
|
+
self._fill_child_layer(layer_theme, child_layer, wms)
|
519
|
+
|
520
|
+
async def _fill_wms(
|
521
|
+
self, layer_theme: dict[str, Any], layer: main.Layer, errors: set[str], mixed: bool
|
522
|
+
) -> None:
|
523
|
+
wms, wms_errors = await self._wms_layers(layer.ogc_server)
|
524
|
+
errors |= wms_errors
|
525
|
+
if wms is None:
|
526
|
+
return
|
527
|
+
|
528
|
+
layer_theme["imageType"] = layer.ogc_server.image_type
|
529
|
+
if layer.style:
|
530
|
+
layer_theme["style"] = layer.style
|
531
|
+
|
532
|
+
# now look at what is in the WMS capabilities doc
|
533
|
+
layer_theme["childLayers"] = []
|
534
|
+
for layer_name in layer.layer.split(","):
|
535
|
+
if layer_name in wms["layers"]:
|
536
|
+
self._fill_child_layer(layer_theme, layer_name, wms)
|
537
|
+
else:
|
538
|
+
errors.add(
|
539
|
+
f"The layer '{layer_name}' ({layer.name}) is not defined in WMS capabilities "
|
540
|
+
f"from '{layer.ogc_server.name}'"
|
541
|
+
)
|
542
|
+
|
543
|
+
if "minResolutionHint" not in layer_theme:
|
544
|
+
resolution_min = self._get_metadata(layer, "minResolution", errors)
|
545
|
+
|
546
|
+
if resolution_min is not None:
|
547
|
+
layer_theme["minResolutionHint"] = resolution_min
|
548
|
+
else:
|
549
|
+
min_resolutions_hint = [
|
550
|
+
l_["minResolutionHint"] for l_ in layer_theme["childLayers"] if "minResolutionHint" in l_
|
551
|
+
]
|
552
|
+
if min_resolutions_hint:
|
553
|
+
layer_theme["minResolutionHint"] = min(min_resolutions_hint)
|
554
|
+
if "maxResolutionHint" not in layer_theme:
|
555
|
+
resolution_max = self._get_metadata(layer, "maxResolution", errors)
|
556
|
+
|
557
|
+
if resolution_max is not None:
|
558
|
+
layer_theme["maxResolutionHint"] = resolution_max
|
559
|
+
else:
|
560
|
+
max_resolutions_hint = [
|
561
|
+
l_["maxResolutionHint"] for l_ in layer_theme["childLayers"] if "maxResolutionHint" in l_
|
562
|
+
]
|
563
|
+
if max_resolutions_hint:
|
564
|
+
layer_theme["maxResolutionHint"] = max(max_resolutions_hint)
|
565
|
+
|
566
|
+
if mixed:
|
567
|
+
layer_theme["ogcServer"] = layer.ogc_server.name
|
568
|
+
|
569
|
+
def _fill_wmts(self, layer_theme: dict[str, Any], layer: main.Layer, errors: set[str]) -> None:
|
570
|
+
url = get_url2(f"The WMTS layer '{layer.name}'", layer.url, self.request, errors=errors)
|
571
|
+
layer_theme["url"] = url.url() if url is not None else None
|
572
|
+
|
573
|
+
if layer.style:
|
574
|
+
layer_theme["style"] = layer.style
|
575
|
+
if layer.matrix_set:
|
576
|
+
layer_theme["matrixSet"] = layer.matrix_set
|
577
|
+
|
578
|
+
layer_theme["layer"] = layer.layer
|
579
|
+
layer_theme["imageType"] = layer.image_type
|
580
|
+
|
581
|
+
def _vectortiles_layers(
|
582
|
+
self, layer_theme: dict[str, Any], layer: main.LayerVectorTiles, errors: set[str]
|
583
|
+
) -> None:
|
584
|
+
style = get_url2(f"The VectorTiles layer '{layer.name}'", layer.style, self.request, errors=errors)
|
585
|
+
layer_theme["style"] = style.url() if style is not None else None
|
586
|
+
if layer.xyz:
|
587
|
+
layer_theme["xyz"] = layer.xyz
|
588
|
+
|
589
|
+
def _cog_layers(self, layer_theme: dict[str, Any], layer: main.LayerCOG, errors: set[str]) -> None:
|
590
|
+
url = get_url2(f"The COG layer '{layer.name}'", layer.url, self.request, errors=errors)
|
591
|
+
layer_theme["url"] = url.url() if url is not None else None
|
592
|
+
|
593
|
+
@staticmethod
|
594
|
+
def _layer_included(tree_item: main.TreeItem) -> bool:
|
595
|
+
return isinstance(tree_item, main.Layer)
|
596
|
+
|
597
|
+
def _get_ogc_servers(self, group: main.LayerGroup, depth: int) -> set[str | bool]:
|
598
|
+
"""Get unique identifier for each child by recursing on all the children."""
|
599
|
+
|
600
|
+
ogc_servers: set[str | bool] = set()
|
601
|
+
|
602
|
+
# escape loop
|
603
|
+
if depth > 30:
|
604
|
+
_LOG.error("Error: too many recursions with group '%s'", group.name)
|
605
|
+
return ogc_servers
|
606
|
+
|
607
|
+
# recurse on children
|
608
|
+
if isinstance(group, main.LayerGroup) and group.children:
|
609
|
+
for tree_item in group.children:
|
610
|
+
ogc_servers.update(self._get_ogc_servers(tree_item, depth + 1))
|
611
|
+
|
612
|
+
if isinstance(group, main.LayerWMS):
|
613
|
+
ogc_servers.add(group.ogc_server.name)
|
614
|
+
|
615
|
+
if isinstance(group, main.LayerWMTS):
|
616
|
+
ogc_servers.add(False)
|
617
|
+
|
618
|
+
return ogc_servers
|
619
|
+
|
620
|
+
@staticmethod
|
621
|
+
def is_mixed(ogc_servers: list[str | bool]) -> bool:
|
622
|
+
return len(ogc_servers) != 1 or ogc_servers[0] is False
|
623
|
+
|
624
|
+
async def _group(
|
625
|
+
self,
|
626
|
+
path: str,
|
627
|
+
group: main.LayerGroup,
|
628
|
+
layers: list[str],
|
629
|
+
depth: int = 1,
|
630
|
+
min_levels: int = 1,
|
631
|
+
mixed: bool = True,
|
632
|
+
time_: TimeInformation | None = None,
|
633
|
+
dim: DimensionInformation | None = None,
|
634
|
+
wms_layers: list[str] | None = None,
|
635
|
+
layers_name: list[str] | None = None,
|
636
|
+
**kwargs: Any,
|
637
|
+
) -> tuple[dict[str, Any] | None, set[str]]:
|
638
|
+
if wms_layers is None:
|
639
|
+
wms_layers = []
|
640
|
+
if layers_name is None:
|
641
|
+
layers_name = []
|
642
|
+
children = []
|
643
|
+
errors = set()
|
644
|
+
|
645
|
+
if re.search("[/?#]", group.name):
|
646
|
+
errors.add(f"The group has an unsupported name '{group.name}'.")
|
647
|
+
|
648
|
+
# escape loop
|
649
|
+
if depth > 30:
|
650
|
+
errors.add(f"Too many recursions with group '{group.name}'")
|
651
|
+
return None, errors
|
652
|
+
|
653
|
+
ogc_servers = None
|
654
|
+
org_depth = depth
|
655
|
+
if depth == 1:
|
656
|
+
ogc_servers = list(self._get_ogc_servers(group, 1))
|
657
|
+
# check if mixed content
|
658
|
+
mixed = self.is_mixed(ogc_servers)
|
659
|
+
if not mixed:
|
660
|
+
time_ = TimeInformation()
|
661
|
+
dim = DimensionInformation()
|
662
|
+
|
663
|
+
for tree_item in group.children:
|
664
|
+
if isinstance(tree_item, main.LayerGroup):
|
665
|
+
group_theme, gp_errors = await self._group(
|
666
|
+
f"{path}/{tree_item.name}",
|
667
|
+
tree_item,
|
668
|
+
layers,
|
669
|
+
depth=depth + 1,
|
670
|
+
min_levels=min_levels,
|
671
|
+
mixed=mixed,
|
672
|
+
time_=time_,
|
673
|
+
dim=dim,
|
674
|
+
wms_layers=wms_layers,
|
675
|
+
layers_name=layers_name,
|
676
|
+
**kwargs,
|
677
|
+
)
|
678
|
+
errors |= gp_errors
|
679
|
+
if group_theme is not None:
|
680
|
+
children.append(group_theme)
|
681
|
+
elif self._layer_included(tree_item):
|
682
|
+
if tree_item.name in layers:
|
683
|
+
layers_name.append(tree_item.name)
|
684
|
+
if isinstance(tree_item, main.LayerWMS):
|
685
|
+
wms_layers.extend(tree_item.layer.split(","))
|
686
|
+
|
687
|
+
layer_theme, l_errors = await self._layer(tree_item, mixed=mixed, time_=time_, dim=dim)
|
688
|
+
errors |= l_errors
|
689
|
+
if layer_theme is not None:
|
690
|
+
if depth < min_levels:
|
691
|
+
errors.add(
|
692
|
+
f"The Layer '{path + '/' + tree_item.name}' is under indented "
|
693
|
+
f"({depth:d}/{min_levels:d})."
|
694
|
+
)
|
695
|
+
else:
|
696
|
+
children.append(layer_theme)
|
697
|
+
|
698
|
+
if children:
|
699
|
+
group_theme = {
|
700
|
+
"id": group.id,
|
701
|
+
"name": group.name,
|
702
|
+
"children": children,
|
703
|
+
"metadata": self._get_metadata_list(group, errors),
|
704
|
+
"mixed": False,
|
705
|
+
}
|
706
|
+
if not mixed:
|
707
|
+
name: str
|
708
|
+
for name, nb in Counter(layers_name).items():
|
709
|
+
if nb > 1:
|
710
|
+
errors.add(
|
711
|
+
f"The GeoMapFish layer name '{name}', cannot be two times "
|
712
|
+
"in the same block (first level group)."
|
713
|
+
)
|
714
|
+
|
715
|
+
group_theme["mixed"] = mixed
|
716
|
+
if org_depth == 1:
|
717
|
+
if not mixed:
|
718
|
+
assert time_ is not None
|
719
|
+
assert dim is not None
|
720
|
+
group_theme["ogcServer"] = cast(list[Any], ogc_servers)[0]
|
721
|
+
if time_.has_time() and time_.layer is None:
|
722
|
+
group_theme["time"] = time_.to_dict()
|
723
|
+
|
724
|
+
group_theme["dimensions"] = dim.get_dimensions()
|
725
|
+
|
726
|
+
return group_theme, errors
|
727
|
+
return None, errors
|
728
|
+
|
729
|
+
def _layers(self, interface: str) -> list[str]:
|
730
|
+
query = self._create_layer_query(interface=interface)
|
731
|
+
return [name for (name,) in query.all()]
|
732
|
+
|
733
|
+
async def _wms_layers(
|
734
|
+
self, ogc_server: main.OGCServer
|
735
|
+
) -> tuple[dict[str, dict[str, Any]] | None, set[str]]:
|
736
|
+
# retrieve layers metadata via GetCapabilities
|
737
|
+
wms, wms_errors = await self._wms_getcap(ogc_server)
|
738
|
+
if wms_errors:
|
739
|
+
return None, wms_errors
|
740
|
+
|
741
|
+
return wms, set()
|
742
|
+
|
743
|
+
def _load_tree_items(self) -> None:
|
744
|
+
assert models.DBSession is not None
|
745
|
+
|
746
|
+
# Populate sqlalchemy session.identity_map to reduce the number of database requests.
|
747
|
+
self._ogcservers_cache = models.DBSession.query(main.OGCServer).all()
|
748
|
+
self._treeitems_cache = models.DBSession.query(main.TreeItem).all()
|
749
|
+
self._layerswms_cache = (
|
750
|
+
models.DBSession.query(main.LayerWMS)
|
751
|
+
.options(subqueryload(main.LayerWMS.dimensions), subqueryload(main.LayerWMS.metadatas))
|
752
|
+
.all()
|
753
|
+
)
|
754
|
+
self._layerswmts_cache = (
|
755
|
+
models.DBSession.query(main.LayerWMTS)
|
756
|
+
.options(subqueryload(main.LayerWMTS.dimensions), subqueryload(main.LayerWMTS.metadatas))
|
757
|
+
.all()
|
758
|
+
)
|
759
|
+
self._layergroup_cache = (
|
760
|
+
models.DBSession.query(main.LayerGroup)
|
761
|
+
.options(subqueryload(main.LayerGroup.metadatas), subqueryload(main.LayerGroup.children_relation))
|
762
|
+
.all()
|
763
|
+
)
|
764
|
+
self._themes_cache = (
|
765
|
+
models.DBSession.query(main.Theme)
|
766
|
+
.options(
|
767
|
+
subqueryload(main.Theme.functionalities),
|
768
|
+
subqueryload(main.Theme.metadatas),
|
769
|
+
subqueryload(main.Theme.children_relation),
|
770
|
+
)
|
771
|
+
.all()
|
772
|
+
)
|
773
|
+
|
774
|
+
async def _themes(
|
775
|
+
self, interface: str = "desktop", filter_themes: bool = True, min_levels: int = 1
|
776
|
+
) -> tuple[list[dict[str, Any]], set[str]]:
|
777
|
+
"""Return theme information for the role identified by ``role_id``."""
|
778
|
+
|
779
|
+
assert models.DBSession is not None
|
780
|
+
|
781
|
+
self._load_tree_items()
|
782
|
+
errors = set()
|
783
|
+
layers = self._layers(interface)
|
784
|
+
|
785
|
+
themes = models.DBSession.query(main.Theme)
|
786
|
+
themes = themes.filter(main.Theme.public.is_(True))
|
787
|
+
auth_themes = models.DBSession.query(main.Theme)
|
788
|
+
auth_themes = auth_themes.filter(main.Theme.public.is_(False))
|
789
|
+
auth_themes = auth_themes.join(main.Theme.restricted_roles)
|
790
|
+
auth_themes = auth_themes.filter(main.Role.id.in_(get_roles_id(self.request)))
|
791
|
+
|
792
|
+
themes = themes.union(auth_themes)
|
793
|
+
|
794
|
+
themes = themes.order_by(main.Theme.ordering.asc())
|
795
|
+
|
796
|
+
if filter_themes and interface is not None:
|
797
|
+
themes = themes.join(main.Theme.interfaces)
|
798
|
+
themes = themes.filter(main.Interface.name == interface)
|
799
|
+
|
800
|
+
export_themes = []
|
801
|
+
for theme in themes.all():
|
802
|
+
if re.search("[/?#]", theme.name):
|
803
|
+
errors.add(f"The theme has an unsupported name '{theme.name}'.")
|
804
|
+
continue
|
805
|
+
|
806
|
+
children, children_errors = await self._get_children(theme, layers, min_levels)
|
807
|
+
errors |= children_errors
|
808
|
+
|
809
|
+
# Test if the theme is visible for the current user
|
810
|
+
if children:
|
811
|
+
url = (
|
812
|
+
get_url2(f"The Theme '{theme.name}'", theme.icon, self.request, errors)
|
813
|
+
if theme.icon is not None and theme.icon
|
814
|
+
else None
|
815
|
+
)
|
816
|
+
icon = (
|
817
|
+
url.url()
|
818
|
+
if url is not None
|
819
|
+
else self.request.static_url("/etc/geomapfish/static/images/blank.png")
|
820
|
+
)
|
821
|
+
|
822
|
+
theme_theme = {
|
823
|
+
"id": theme.id,
|
824
|
+
"name": theme.name,
|
825
|
+
"icon": icon,
|
826
|
+
"children": children,
|
827
|
+
"functionalities": self._get_functionalities(theme),
|
828
|
+
"metadata": self._get_metadata_list(theme, errors),
|
829
|
+
}
|
830
|
+
export_themes.append(theme_theme)
|
831
|
+
|
832
|
+
return export_themes, errors
|
833
|
+
|
834
|
+
@staticmethod
|
835
|
+
def _get_functionalities(theme: main.Theme) -> dict[str, list[str]]:
|
836
|
+
result: dict[str, list[str]] = {}
|
837
|
+
for functionality in theme.functionalities:
|
838
|
+
if functionality.name in result:
|
839
|
+
result[functionality.name].append(functionality.value)
|
840
|
+
else:
|
841
|
+
result[functionality.name] = [functionality.value]
|
842
|
+
return result
|
843
|
+
|
844
|
+
@view_config(route_name="invalidate", renderer="json") # type: ignore[misc]
|
845
|
+
def invalidate_cache(self) -> dict[str, bool]:
|
846
|
+
auth_view(self.request)
|
847
|
+
models.cache_invalidate_cb()
|
848
|
+
return {"success": True}
|
849
|
+
|
850
|
+
async def _get_children(
|
851
|
+
self, theme: main.Theme, layers: list[str], min_levels: int
|
852
|
+
) -> tuple[list[dict[str, Any]], set[str]]:
|
853
|
+
children = []
|
854
|
+
errors: set[str] = set()
|
855
|
+
for item in theme.children:
|
856
|
+
if isinstance(item, main.LayerGroup):
|
857
|
+
group_theme, gp_errors = await self._group(
|
858
|
+
f"{theme.name}/{item.name}", item, layers, min_levels=min_levels
|
859
|
+
)
|
860
|
+
errors |= gp_errors
|
861
|
+
if group_theme is not None:
|
862
|
+
children.append(group_theme)
|
863
|
+
elif self._layer_included(item):
|
864
|
+
if min_levels > 0:
|
865
|
+
errors.add(
|
866
|
+
f"The Layer '{item.name}' cannot be directly in the theme '{theme.name}' "
|
867
|
+
f"(0/{min_levels:d})."
|
868
|
+
)
|
869
|
+
elif item.name in layers:
|
870
|
+
layer_theme, l_errors = await self._layer(item, dim=DimensionInformation())
|
871
|
+
errors |= l_errors
|
872
|
+
if layer_theme is not None:
|
873
|
+
children.append(layer_theme)
|
874
|
+
return children, errors
|
875
|
+
|
876
|
+
@_CACHE_REGION.cache_on_arguments()
|
877
|
+
def _get_layers_enum(self) -> dict[str, dict[str, str]]:
|
878
|
+
layers_enum = {}
|
879
|
+
if "enum" in self.settings.get("layers", {}):
|
880
|
+
for layer_name, layer in list(self.settings["layers"]["enum"].items()):
|
881
|
+
layer_enum: dict[str, str] = {}
|
882
|
+
layers_enum[layer_name] = layer_enum
|
883
|
+
for attribute in list(layer["attributes"].keys()):
|
884
|
+
layer_enum[attribute] = self.request.route_url(
|
885
|
+
"layers_enumerate_attribute_values",
|
886
|
+
layer_name=layer_name,
|
887
|
+
field_name=attribute,
|
888
|
+
path="",
|
889
|
+
)
|
890
|
+
return layers_enum
|
891
|
+
|
892
|
+
def _get_role_ids(self) -> set[int] | None:
|
893
|
+
return None if self.request.user is None else {role.id for role in self.request.user.roles}
|
894
|
+
|
895
|
+
async def _wfs_get_features_type(
|
896
|
+
self, wfs_url: Url, ogc_server: main.OGCServer, preload: bool = False, cache: bool = True
|
897
|
+
) -> tuple[Optional[etree.Element], set[str]]: # pylint: disable=c-extension-no-member
|
898
|
+
errors = set()
|
899
|
+
|
900
|
+
wfs_url.add_query(
|
901
|
+
{
|
902
|
+
"SERVICE": "WFS",
|
903
|
+
"VERSION": "1.0.0",
|
904
|
+
"REQUEST": "DescribeFeatureType",
|
905
|
+
"ROLE_IDS": "0",
|
906
|
+
"USER_ID": "0",
|
907
|
+
}
|
908
|
+
)
|
909
|
+
|
910
|
+
_LOG.debug("WFS DescribeFeatureType for the URL: %s", wfs_url.url())
|
911
|
+
|
912
|
+
headers = {}
|
913
|
+
|
914
|
+
# Add headers for Geoserver
|
915
|
+
if ogc_server.auth == main.OGCSERVER_AUTH_GEOSERVER:
|
916
|
+
headers["sec-username"] = "root"
|
917
|
+
headers["sec-roles"] = "root"
|
918
|
+
|
919
|
+
try:
|
920
|
+
content, _ = await get_http_cached(self.http_options, wfs_url.url(), headers, cache)
|
921
|
+
except requests.exceptions.RequestException as exception:
|
922
|
+
error = (
|
923
|
+
f"Unable to get WFS DescribeFeatureType from the URL '{wfs_url.url()}' for "
|
924
|
+
f"OGC server {ogc_server.name}, "
|
925
|
+
+ (
|
926
|
+
f"return the error: {exception.response.status_code} {exception.response.reason}"
|
927
|
+
if exception.response is not None
|
928
|
+
else f"{exception}"
|
929
|
+
)
|
930
|
+
)
|
931
|
+
errors.add(error)
|
932
|
+
_LOG.exception(error)
|
933
|
+
return None, errors
|
934
|
+
except Exception: # pylint: disable=broad-exception-caught
|
935
|
+
error = (
|
936
|
+
f"Unable to get WFS DescribeFeatureType from the URL {wfs_url} for "
|
937
|
+
f"OGC server {ogc_server.name}"
|
938
|
+
)
|
939
|
+
errors.add(error)
|
940
|
+
_LOG.exception(error)
|
941
|
+
return None, errors
|
942
|
+
|
943
|
+
if preload:
|
944
|
+
return None, errors
|
945
|
+
|
946
|
+
try:
|
947
|
+
return lxml.XML(content), errors
|
948
|
+
except Exception as e: # pylint: disable=broad-except
|
949
|
+
errors.add(
|
950
|
+
f"Error '{e!s}' on reading DescribeFeatureType from URL {wfs_url}:\n{content.decode()}"
|
951
|
+
)
|
952
|
+
return None, errors
|
953
|
+
|
954
|
+
def get_url_internal_wfs(
|
955
|
+
self, ogc_server: main.OGCServer, errors: set[str]
|
956
|
+
) -> tuple[Url | None, Url | None, Url | None]:
|
957
|
+
# required to do every time to validate the url.
|
958
|
+
if ogc_server.auth != main.OGCSERVER_AUTH_NOAUTH:
|
959
|
+
url: Url | None = Url(
|
960
|
+
self.request.route_url("mapserverproxy", _query={"ogcserver": ogc_server.name})
|
961
|
+
)
|
962
|
+
url_wfs: Url | None = url
|
963
|
+
url_internal_wfs = get_url2(
|
964
|
+
f"The OGC server (WFS) '{ogc_server.name}'",
|
965
|
+
ogc_server.url_wfs or ogc_server.url,
|
966
|
+
self.request,
|
967
|
+
errors=errors,
|
968
|
+
)
|
969
|
+
else:
|
970
|
+
url = get_url2(f"The OGC server '{ogc_server.name}'", ogc_server.url, self.request, errors=errors)
|
971
|
+
url_wfs = (
|
972
|
+
get_url2(
|
973
|
+
f"The OGC server (WFS) '{ogc_server.name}'",
|
974
|
+
ogc_server.url_wfs,
|
975
|
+
self.request,
|
976
|
+
errors=errors,
|
977
|
+
)
|
978
|
+
if ogc_server.url_wfs is not None
|
979
|
+
else url
|
980
|
+
)
|
981
|
+
url_internal_wfs = url_wfs
|
982
|
+
return url_internal_wfs, url, url_wfs
|
983
|
+
|
984
|
+
async def _preload(self, errors: set[str]) -> None:
|
985
|
+
assert models.DBSession is not None
|
986
|
+
tasks = set()
|
987
|
+
|
988
|
+
for ogc_server, nb_layers in (
|
989
|
+
models.DBSession.query(
|
990
|
+
main.OGCServer, sqlalchemy.func.count(main.LayerWMS.id) # pylint: disable=not-callable
|
991
|
+
)
|
992
|
+
.filter(main.LayerWMS.ogc_server_id == main.OGCServer.id)
|
993
|
+
.group_by(main.OGCServer.id)
|
994
|
+
.all()
|
995
|
+
):
|
996
|
+
# Don't load unused OGC servers, required for landing page, because the related OGC server
|
997
|
+
# will be on error in those functions.
|
998
|
+
_LOG.debug("%i layers for OGC server '%s'", nb_layers, ogc_server.name)
|
999
|
+
if nb_layers > 0:
|
1000
|
+
_LOG.debug("Preload OGC server '%s'", ogc_server.name)
|
1001
|
+
url_internal_wfs, _, _ = self.get_url_internal_wfs(ogc_server, errors)
|
1002
|
+
if url_internal_wfs is not None:
|
1003
|
+
tasks.add(self.preload_ogc_server(ogc_server, url_internal_wfs))
|
1004
|
+
|
1005
|
+
await asyncio.gather(*tasks)
|
1006
|
+
|
1007
|
+
async def preload_ogc_server(
|
1008
|
+
self, ogc_server: main.OGCServer, url_internal_wfs: Url, cache: bool = True
|
1009
|
+
) -> None:
|
1010
|
+
if ogc_server.wfs_support:
|
1011
|
+
await self._get_features_attributes(url_internal_wfs, ogc_server, cache=cache)
|
1012
|
+
await self._wms_getcap(ogc_server, False, cache=cache)
|
1013
|
+
|
1014
|
+
async def _get_features_attributes(
|
1015
|
+
self, url_internal_wfs: Url, ogc_server: main.OGCServer, cache: bool = True
|
1016
|
+
) -> tuple[dict[str, dict[Any, dict[str, Any]]] | None, str | None, set[str]]:
|
1017
|
+
@_CACHE_OGC_SERVER_REGION.cache_on_arguments()
|
1018
|
+
def _get_features_attributes_cache(
|
1019
|
+
url_internal_wfs: Url, ogc_server_name: str
|
1020
|
+
) -> tuple[dict[str, dict[Any, dict[str, Any]]] | None, str | None, set[str]]:
|
1021
|
+
del url_internal_wfs # Just for cache
|
1022
|
+
all_errors: set[str] = set()
|
1023
|
+
if errors:
|
1024
|
+
all_errors |= errors
|
1025
|
+
return None, None, all_errors
|
1026
|
+
assert feature_type is not None
|
1027
|
+
namespace: str = feature_type.attrib.get("targetNamespace")
|
1028
|
+
types: dict[Any, dict[str, Any]] = {}
|
1029
|
+
elements = {}
|
1030
|
+
for child in feature_type.getchildren():
|
1031
|
+
if child.tag == "{http://www.w3.org/2001/XMLSchema}element":
|
1032
|
+
name = child.attrib["name"]
|
1033
|
+
type_namespace, type_ = child.attrib["type"].split(":")
|
1034
|
+
if type_namespace not in child.nsmap:
|
1035
|
+
_LOG.info(
|
1036
|
+
"The namespace '%s' of the type '%s' is not found in the "
|
1037
|
+
"available namespaces: %s (OGC server: %s)",
|
1038
|
+
type_namespace,
|
1039
|
+
name,
|
1040
|
+
", ".join([str(k) for k in child.nsmap.keys()]),
|
1041
|
+
ogc_server_name,
|
1042
|
+
)
|
1043
|
+
elif child.nsmap[type_namespace] != namespace:
|
1044
|
+
_LOG.info(
|
1045
|
+
"The namespace '%s' of the type '%s' should be '%s' (OGC server: %s).",
|
1046
|
+
child.nsmap[type_namespace],
|
1047
|
+
name,
|
1048
|
+
namespace,
|
1049
|
+
ogc_server_name,
|
1050
|
+
)
|
1051
|
+
elements[name] = type_
|
1052
|
+
|
1053
|
+
if child.tag == "{http://www.w3.org/2001/XMLSchema}complexType":
|
1054
|
+
sequence = child.find(".//{http://www.w3.org/2001/XMLSchema}sequence")
|
1055
|
+
attrib = {}
|
1056
|
+
for children in sequence.getchildren():
|
1057
|
+
type_namespace = None
|
1058
|
+
type_ = children.attrib["type"]
|
1059
|
+
if len(type_.split(":")) == 2:
|
1060
|
+
type_namespace, type_ = type_.split(":")
|
1061
|
+
name = children.attrib["name"]
|
1062
|
+
attrib[name] = {"type": type_}
|
1063
|
+
if type_namespace in children.nsmap:
|
1064
|
+
type_namespace = children.nsmap[type_namespace]
|
1065
|
+
attrib[name]["namespace"] = type_namespace
|
1066
|
+
else:
|
1067
|
+
_LOG.info(
|
1068
|
+
"The namespace '%s' of the type '%s' is not found in the "
|
1069
|
+
"available namespaces: %s (OGC server: %s)",
|
1070
|
+
type_namespace,
|
1071
|
+
name,
|
1072
|
+
", ".join([str(k) for k in child.nsmap.keys()]),
|
1073
|
+
ogc_server_name,
|
1074
|
+
)
|
1075
|
+
for key, value in children.attrib.items():
|
1076
|
+
if key not in ("name", "type", "namespace"):
|
1077
|
+
attrib[name][key] = value
|
1078
|
+
types[child.attrib["name"]] = attrib
|
1079
|
+
attributes: dict[str, dict[Any, dict[str, Any]]] = {}
|
1080
|
+
for name, type_ in elements.items():
|
1081
|
+
if type_ in types:
|
1082
|
+
attributes[name] = types[type_]
|
1083
|
+
elif (type_ == "Character") and (name + "Type") in types:
|
1084
|
+
_LOG.debug(
|
1085
|
+
'Due to MapServer weird behavior when using METADATA "gml_types" "auto"'
|
1086
|
+
"the type 'ms:Character' is returned as type '%sType' for feature '%s'.",
|
1087
|
+
name,
|
1088
|
+
name,
|
1089
|
+
)
|
1090
|
+
attributes[name] = types[name + "Type"]
|
1091
|
+
else:
|
1092
|
+
_LOG.warning(
|
1093
|
+
"The provided type '%s' does not exist, available types are %s.",
|
1094
|
+
type_,
|
1095
|
+
", ".join(types.keys()),
|
1096
|
+
)
|
1097
|
+
|
1098
|
+
return attributes, namespace, all_errors
|
1099
|
+
|
1100
|
+
if cache:
|
1101
|
+
result = _get_features_attributes_cache.get( # type: ignore[attr-defined]
|
1102
|
+
url_internal_wfs,
|
1103
|
+
ogc_server.name,
|
1104
|
+
)
|
1105
|
+
if result != dogpile.cache.api.NO_VALUE:
|
1106
|
+
return result # type: ignore[no-any-return]
|
1107
|
+
|
1108
|
+
feature_type, errors = await self._wfs_get_features_type(url_internal_wfs, ogc_server, False, cache)
|
1109
|
+
|
1110
|
+
return _get_features_attributes_cache.refresh( # type: ignore[attr-defined,no-any-return]
|
1111
|
+
url_internal_wfs,
|
1112
|
+
ogc_server.name,
|
1113
|
+
)
|
1114
|
+
|
1115
|
+
@view_config(route_name="themes", renderer="json") # type: ignore[misc]
|
1116
|
+
def themes(self) -> dict[str, dict[str, dict[str, Any]] | list[str]]:
|
1117
|
+
|
1118
|
+
is_allowed_host(self.request)
|
1119
|
+
|
1120
|
+
interface = self.request.params.get("interface", "desktop")
|
1121
|
+
sets = self.request.params.get("set", "all")
|
1122
|
+
min_levels = int(self.request.params.get("min_levels", 1))
|
1123
|
+
group = self.request.params.get("group")
|
1124
|
+
background_layers_group = self.request.params.get("background")
|
1125
|
+
|
1126
|
+
set_common_headers(self.request, "themes", Cache.PRIVATE)
|
1127
|
+
|
1128
|
+
async def get_theme() -> dict[str, dict[str, Any] | list[str]]:
|
1129
|
+
assert models.DBSession is not None
|
1130
|
+
|
1131
|
+
export_themes = sets in ("all", "themes")
|
1132
|
+
export_group = group is not None and sets in ("all", "group")
|
1133
|
+
export_background = background_layers_group is not None and sets in ("all", "background")
|
1134
|
+
|
1135
|
+
result: dict[str, dict[str, Any] | list[Any]] = {}
|
1136
|
+
all_errors: set[str] = set()
|
1137
|
+
_LOG.debug("Start preload")
|
1138
|
+
start_time = time.time()
|
1139
|
+
await self._preload(all_errors)
|
1140
|
+
_LOG.debug("End preload")
|
1141
|
+
# Don't log if it looks to be already preloaded.
|
1142
|
+
if (time.time() - start_time) > 1:
|
1143
|
+
_LOG.info("Do preload in %.3fs.", time.time() - start_time)
|
1144
|
+
_LOG.debug("Run garbage collection: %s", ", ".join([str(gc.collect(n)) for n in range(3)]))
|
1145
|
+
result["ogcServers"] = {}
|
1146
|
+
for ogc_server, nb_layers in (
|
1147
|
+
models.DBSession.query(
|
1148
|
+
main.OGCServer, sqlalchemy.func.count(main.LayerWMS.id) # pylint: disable=not-callable
|
1149
|
+
)
|
1150
|
+
.filter(main.LayerWMS.ogc_server_id == main.OGCServer.id)
|
1151
|
+
.group_by(main.OGCServer.id)
|
1152
|
+
.all()
|
1153
|
+
):
|
1154
|
+
if nb_layers == 0:
|
1155
|
+
# QGIS Server landing page requires an OGC server that can't be used here.
|
1156
|
+
continue
|
1157
|
+
|
1158
|
+
_LOG.debug("Process OGC server '%s'", ogc_server.name)
|
1159
|
+
|
1160
|
+
url_internal_wfs, url, url_wfs = self.get_url_internal_wfs(ogc_server, all_errors)
|
1161
|
+
|
1162
|
+
attributes = None
|
1163
|
+
namespace = None
|
1164
|
+
if ogc_server.wfs_support and not url_internal_wfs:
|
1165
|
+
all_errors.add(
|
1166
|
+
f"The OGC server '{ogc_server.name}' is configured to support WFS "
|
1167
|
+
"but no internal WFS URL is found."
|
1168
|
+
)
|
1169
|
+
if ogc_server.wfs_support and url_internal_wfs:
|
1170
|
+
attributes, namespace, errors = await self._get_features_attributes(
|
1171
|
+
url_internal_wfs, ogc_server
|
1172
|
+
)
|
1173
|
+
# Create a local copy (don't modify the cache)
|
1174
|
+
if attributes is not None:
|
1175
|
+
attributes = dict(attributes)
|
1176
|
+
all_errors |= errors
|
1177
|
+
|
1178
|
+
all_private_layers = get_private_layers([ogc_server.id]).values()
|
1179
|
+
protected_layers_name = [
|
1180
|
+
layer.name for layer in get_protected_layers(self.request, [ogc_server.id]).values()
|
1181
|
+
]
|
1182
|
+
private_layers_name: list[str] = []
|
1183
|
+
for layers in [
|
1184
|
+
v.layer for v in all_private_layers if v.name not in protected_layers_name
|
1185
|
+
]:
|
1186
|
+
private_layers_name.extend(layers.split(","))
|
1187
|
+
|
1188
|
+
if attributes is not None:
|
1189
|
+
for name in private_layers_name:
|
1190
|
+
if name in attributes:
|
1191
|
+
del attributes[name]
|
1192
|
+
|
1193
|
+
result["ogcServers"][ogc_server.name] = {
|
1194
|
+
"url": url.url() if url else None,
|
1195
|
+
"urlWfs": url_wfs.url() if url_wfs else None,
|
1196
|
+
"type": ogc_server.type,
|
1197
|
+
"credential": ogc_server.auth != main.OGCSERVER_AUTH_NOAUTH,
|
1198
|
+
"imageType": ogc_server.image_type,
|
1199
|
+
"wfsSupport": ogc_server.wfs_support,
|
1200
|
+
"isSingleTile": ogc_server.is_single_tile,
|
1201
|
+
"namespace": namespace,
|
1202
|
+
"attributes": attributes,
|
1203
|
+
}
|
1204
|
+
if export_themes:
|
1205
|
+
themes, errors = await self._themes(interface, True, min_levels)
|
1206
|
+
|
1207
|
+
result["themes"] = themes
|
1208
|
+
all_errors |= errors
|
1209
|
+
|
1210
|
+
if export_group:
|
1211
|
+
exported_group, errors = await self._get_group(group, interface)
|
1212
|
+
if exported_group is not None:
|
1213
|
+
result["group"] = exported_group
|
1214
|
+
all_errors |= errors
|
1215
|
+
|
1216
|
+
if export_background:
|
1217
|
+
exported_group, errors = await self._get_group(background_layers_group, interface)
|
1218
|
+
result["background_layers"] = exported_group["children"] if exported_group is not None else []
|
1219
|
+
all_errors |= errors
|
1220
|
+
|
1221
|
+
result["errors"] = list(all_errors)
|
1222
|
+
if all_errors:
|
1223
|
+
_LOG.info("Theme errors:\n%s", "\n".join(all_errors))
|
1224
|
+
return result
|
1225
|
+
|
1226
|
+
@_CACHE_REGION.cache_on_arguments()
|
1227
|
+
def get_theme_anonymous(
|
1228
|
+
intranet: bool,
|
1229
|
+
interface: str,
|
1230
|
+
sets: str,
|
1231
|
+
min_levels: str,
|
1232
|
+
group: str,
|
1233
|
+
background_layers_group: str,
|
1234
|
+
host: str,
|
1235
|
+
) -> dict[str, dict[str, dict[str, Any]] | list[str]]:
|
1236
|
+
# Only for cache key
|
1237
|
+
del intranet, interface, sets, min_levels, group, background_layers_group, host
|
1238
|
+
return asyncio.run(get_theme())
|
1239
|
+
|
1240
|
+
if self.request.user is None:
|
1241
|
+
return cast(
|
1242
|
+
dict[str, Union[dict[str, dict[str, Any]], list[str]]],
|
1243
|
+
get_theme_anonymous(
|
1244
|
+
is_intranet(self.request),
|
1245
|
+
interface,
|
1246
|
+
sets,
|
1247
|
+
min_levels,
|
1248
|
+
group,
|
1249
|
+
background_layers_group,
|
1250
|
+
self.request.headers.get("Host"),
|
1251
|
+
),
|
1252
|
+
)
|
1253
|
+
return asyncio.run(get_theme())
|
1254
|
+
|
1255
|
+
async def _get_group(
|
1256
|
+
self, group: main.LayerGroup, interface: main.Interface
|
1257
|
+
) -> tuple[dict[str, Any] | None, set[str]]:
|
1258
|
+
assert models.DBSession is not None
|
1259
|
+
|
1260
|
+
layers = self._layers(interface)
|
1261
|
+
try:
|
1262
|
+
group_db = models.DBSession.query(main.LayerGroup).filter(main.LayerGroup.name == group).one() # type: ignore[arg-type]
|
1263
|
+
assert isinstance(group_db, main.LayerGroup)
|
1264
|
+
return await self._group(group_db.name, group_db, layers, depth=2, dim=DimensionInformation())
|
1265
|
+
except NoResultFound:
|
1266
|
+
return (
|
1267
|
+
None,
|
1268
|
+
{
|
1269
|
+
f"Unable to find the Group named: {group}, Available Groups: "
|
1270
|
+
f"{', '.join([i[0] for i in models.DBSession.query(main.LayerGroup.name).all()])}"
|
1271
|
+
},
|
1272
|
+
)
|
1273
|
+
|
1274
|
+
@view_config(route_name="ogc_server_clear_cache", renderer="json") # type: ignore[misc]
|
1275
|
+
def ogc_server_clear_cache_view(self) -> dict[str, Any]:
|
1276
|
+
assert models.DBSession is not None
|
1277
|
+
|
1278
|
+
if not self.request.user:
|
1279
|
+
raise pyramid.httpexceptions.HTTPForbidden()
|
1280
|
+
|
1281
|
+
admin_roles = [r for r in self.request.user.roles if r.name == ("role_admin")]
|
1282
|
+
if not admin_roles:
|
1283
|
+
raise pyramid.httpexceptions.HTTPForbidden()
|
1284
|
+
|
1285
|
+
self._ogc_server_clear_cache(
|
1286
|
+
models.DBSession.query(main.OGCServer).filter_by(id=self.request.matchdict.get("id")).one()
|
1287
|
+
)
|
1288
|
+
came_from = self.request.params.get("came_from")
|
1289
|
+
allowed_hosts = self.request.registry.settings.get("admin_interface", {}).get("allowed_hosts", [])
|
1290
|
+
came_from_hostname, ok = is_allowed_url(self.request, came_from, allowed_hosts)
|
1291
|
+
if not ok:
|
1292
|
+
message = (
|
1293
|
+
f"Invalid hostname '{came_from_hostname}' in 'came_from' parameter, "
|
1294
|
+
f"is not the current host '{self.request.host}' "
|
1295
|
+
f"or part of allowed hosts: {', '.join(allowed_hosts)}"
|
1296
|
+
)
|
1297
|
+
_LOG.debug(message)
|
1298
|
+
raise pyramid.httpexceptions.HTTPBadRequest(message)
|
1299
|
+
if came_from:
|
1300
|
+
raise pyramid.httpexceptions.HTTPFound(location=came_from)
|
1301
|
+
return {"success": True}
|
1302
|
+
|
1303
|
+
def _ogc_server_clear_cache(self, ogc_server: main.OGCServer) -> None:
|
1304
|
+
errors: set[str] = set()
|
1305
|
+
url_internal_wfs, _, _ = self.get_url_internal_wfs(ogc_server, errors)
|
1306
|
+
if errors:
|
1307
|
+
_LOG.error(
|
1308
|
+
"Error while getting the URL of the OGC Server %s:\n%s", ogc_server.id, "\n".join(errors)
|
1309
|
+
)
|
1310
|
+
return
|
1311
|
+
if url_internal_wfs is None:
|
1312
|
+
return
|
1313
|
+
|
1314
|
+
asyncio.run(self._async_cache_invalidate_ogc_server_cb(ogc_server, url_internal_wfs))
|
1315
|
+
|
1316
|
+
async def _async_cache_invalidate_ogc_server_cb(
|
1317
|
+
self, ogc_server: main.OGCServer, url_internal_wfs: Url
|
1318
|
+
) -> None:
|
1319
|
+
# Fill the cache
|
1320
|
+
await self.preload_ogc_server(ogc_server, url_internal_wfs, False)
|
1321
|
+
|
1322
|
+
cache_invalidate_cb()
|