c2cgeoportal-geoportal 2.3.5.80__py3-none-any.whl → 2.9rc45__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- c2cgeoportal_geoportal/__init__.py +960 -0
- c2cgeoportal_geoportal/lib/__init__.py +256 -0
- c2cgeoportal_geoportal/lib/authentication.py +250 -0
- c2cgeoportal_geoportal/lib/bashcolor.py +46 -0
- c2cgeoportal_geoportal/lib/cacheversion.py +77 -0
- c2cgeoportal_geoportal/lib/caching.py +176 -0
- c2cgeoportal_geoportal/lib/check_collector.py +80 -0
- c2cgeoportal_geoportal/lib/checker.py +295 -0
- c2cgeoportal_geoportal/lib/common_headers.py +172 -0
- c2cgeoportal_geoportal/lib/dbreflection.py +266 -0
- c2cgeoportal_geoportal/lib/filter_capabilities.py +360 -0
- c2cgeoportal_geoportal/lib/fulltextsearch.py +50 -0
- c2cgeoportal_geoportal/lib/functionality.py +166 -0
- c2cgeoportal_geoportal/lib/headers.py +62 -0
- c2cgeoportal_geoportal/lib/i18n.py +38 -0
- c2cgeoportal_geoportal/lib/layers.py +132 -0
- c2cgeoportal_geoportal/lib/lingva_extractor.py +937 -0
- c2cgeoportal_geoportal/lib/loader.py +57 -0
- c2cgeoportal_geoportal/lib/metrics.py +117 -0
- c2cgeoportal_geoportal/lib/oauth2.py +1186 -0
- c2cgeoportal_geoportal/lib/oidc.py +304 -0
- c2cgeoportal_geoportal/lib/wmstparsing.py +353 -0
- c2cgeoportal_geoportal/lib/xsd.py +166 -0
- c2cgeoportal_geoportal/py.typed +0 -0
- c2cgeoportal_geoportal/resources.py +49 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/ci/config.yaml +26 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/cookiecutter.json +18 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/.dockerignore +6 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/.eslintrc.yaml +19 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/.prospector.yaml +30 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/Dockerfile +75 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/Makefile +6 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/alembic.ini +58 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/alembic.yaml +19 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/development.ini +121 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/gunicorn.conf.py +139 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/language_mapping +3 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/lingva-client.cfg +5 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/lingva-server.cfg +6 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/production.ini +38 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/requirements.txt +2 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/setup.py +25 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/webpack.api.js +41 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/webpack.apps.js +64 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/webpack.commons.js +11 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/webpack.config.js +22 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/__init__.py +42 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/authentication.py +10 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/dev.py +14 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/models.py +8 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/multi_organization.py +7 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/resources.py +11 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static-ngeo/api/index.js +12 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static-ngeo/js/{{cookiecutter.package}}module.js +25 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/subscribers.py +39 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/views/__init__.py +0 -0
- c2cgeoportal_geoportal/scaffolds/advance_update/cookiecutter.json +18 -0
- c2cgeoportal_geoportal/scaffolds/advance_update/{{cookiecutter.project}}/geoportal/CONST_Makefile +121 -0
- c2cgeoportal_geoportal/scaffolds/create/cookiecutter.json +18 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/.dockerignore +14 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/.editorconfig +17 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/.github/workflows/main.yaml +73 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/.github/workflows/rebuild.yaml +50 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/.github/workflows/update_l10n.yaml +66 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/.gitignore +16 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/.pre-commit-config.yaml +35 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/.prettierignore +1 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/.prettierrc.yaml +2 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/Dockerfile +75 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/Makefile +70 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/README.rst +29 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/build +179 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/ci/config.yaml +22 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/ci/docker-compose-check +25 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/ci/requirements.txt +2 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/docker-compose-db.yaml +24 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/docker-compose-lib.yaml +513 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/docker-compose-qgis.yaml +21 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/docker-compose.override.sample.yaml +65 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/docker-compose.yaml +121 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/env.default +102 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/env.project +69 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/vars.yaml +430 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/locale/en/LC_MESSAGES/{{cookiecutter.package}}_geoportal-client.po +6 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/css/desktop.css +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/css/iframe_api.css +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/css/mobile.css +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/images/banner_left.png +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/images/banner_right.png +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/images/blank.png +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/images/markers/marker-blue.png +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/images/markers/marker-gold.png +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/images/markers/marker-green.png +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/images/markers/marker.png +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/robot.txt.tmpl +3 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/data/Readme.txt +69 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/data/TM_EUROPE_BORDERS-0.3.sql +70 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/demo.map.tmpl +224 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/Arial.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/Arialbd.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/Arialbi.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/Ariali.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/NotoSans-Bold.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/NotoSans-BoldItalic.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/NotoSans-Italic.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/NotoSans-Regular.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/Verdana.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/Verdanab.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/Verdanai.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/Verdanaz.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts.conf +12 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/mapserver.conf +16 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/mapserver.map.tmpl +87 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/tinyows.xml.tmpl +36 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/A3_Landscape.jrxml +207 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/A3_Portrait.jrxml +185 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/A4_Landscape.jrxml +200 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/A4_Portrait.jrxml +170 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/config.yaml.tmpl +175 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/legend.jrxml +109 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/localisation.properties +4 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/localisation_fr.properties +4 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/logo.png +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/north.svg +93 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/results.jrxml +25 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/project.yaml +18 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/pyproject.toml +7 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/qgisserver/pg_service.conf.tmpl +15 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/run_alembic.sh +11 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/scripts/db-backup +126 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/scripts/db-restore +132 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/setup.cfg +7 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/spell-ignore-words.txt +5 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/tests/__init__.py +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/tests/test_app.py +78 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/tilegeneration/config.yaml.tmpl +195 -0
- c2cgeoportal_geoportal/scaffolds/update/cookiecutter.json +18 -0
- c2cgeoportal_geoportal/scaffolds/update/{{cookiecutter.project}}/.upgrade.yaml +67 -0
- c2cgeoportal_geoportal/scaffolds/update/{{cookiecutter.project}}/CONST_CHANGELOG.txt +304 -0
- c2cgeoportal_geoportal/scaffolds/update/{{cookiecutter.project}}/CONST_create_template/tests/test_testapp.py +48 -0
- c2cgeoportal_geoportal/scaffolds/update/{{cookiecutter.project}}/geoportal/.CONST_vars.yaml.swp +0 -0
- c2cgeoportal_geoportal/scaffolds/update/{{cookiecutter.project}}/geoportal/CONST_config-schema.yaml +927 -0
- c2cgeoportal_geoportal/scaffolds/update/{{cookiecutter.project}}/geoportal/CONST_vars.yaml +1503 -0
- c2cgeoportal_geoportal/scripts/__init__.py +64 -0
- c2cgeoportal_geoportal/scripts/c2cupgrade.py +879 -0
- c2cgeoportal_geoportal/scripts/create_demo_theme.py +83 -0
- c2cgeoportal_geoportal/scripts/manage_users.py +140 -0
- c2cgeoportal_geoportal/scripts/pcreate.py +296 -0
- c2cgeoportal_geoportal/scripts/theme2fts.py +347 -0
- c2cgeoportal_geoportal/scripts/urllogin.py +81 -0
- c2cgeoportal_geoportal/templates/login.html +90 -0
- c2cgeoportal_geoportal/templates/notlogin.html +62 -0
- c2cgeoportal_geoportal/templates/testi18n.html +12 -0
- c2cgeoportal_geoportal/views/__init__.py +59 -0
- c2cgeoportal_geoportal/views/dev.py +57 -0
- c2cgeoportal_geoportal/views/dynamic.py +209 -0
- c2cgeoportal_geoportal/views/entry.py +174 -0
- c2cgeoportal_geoportal/views/fulltextsearch.py +189 -0
- c2cgeoportal_geoportal/views/geometry_processing.py +75 -0
- c2cgeoportal_geoportal/views/i18n.py +129 -0
- c2cgeoportal_geoportal/views/layers.py +713 -0
- c2cgeoportal_geoportal/views/login.py +684 -0
- c2cgeoportal_geoportal/views/mapserverproxy.py +234 -0
- c2cgeoportal_geoportal/views/memory.py +90 -0
- c2cgeoportal_geoportal/views/ogcproxy.py +120 -0
- c2cgeoportal_geoportal/views/pdfreport.py +245 -0
- c2cgeoportal_geoportal/views/printproxy.py +143 -0
- c2cgeoportal_geoportal/views/profile.py +192 -0
- c2cgeoportal_geoportal/views/proxy.py +261 -0
- c2cgeoportal_geoportal/views/raster.py +233 -0
- c2cgeoportal_geoportal/views/resourceproxy.py +73 -0
- c2cgeoportal_geoportal/views/shortener.py +152 -0
- c2cgeoportal_geoportal/views/theme.py +1322 -0
- c2cgeoportal_geoportal/views/tinyowsproxy.py +189 -0
- c2cgeoportal_geoportal/views/vector_tiles.py +83 -0
- {c2cgeoportal_geoportal-2.3.5.80.dist-info → c2cgeoportal_geoportal-2.9rc45.dist-info}/METADATA +21 -24
- c2cgeoportal_geoportal-2.9rc45.dist-info/RECORD +193 -0
- {c2cgeoportal_geoportal-2.3.5.80.dist-info → c2cgeoportal_geoportal-2.9rc45.dist-info}/WHEEL +1 -1
- c2cgeoportal_geoportal-2.9rc45.dist-info/entry_points.txt +28 -0
- c2cgeoportal_geoportal-2.9rc45.dist-info/top_level.txt +2 -0
- tests/__init__.py +100 -0
- tests/test_cachebuster.py +71 -0
- tests/test_caching.py +275 -0
- tests/test_checker.py +85 -0
- tests/test_decimaljson.py +47 -0
- tests/test_headerstween.py +64 -0
- tests/test_i18n.py +31 -0
- tests/test_init.py +193 -0
- tests/test_locale_negociator.py +69 -0
- tests/test_mapserverproxy_route_predicate.py +64 -0
- tests/test_raster.py +267 -0
- tests/test_wmstparsing.py +238 -0
- tests/xmlstr.py +103 -0
- c2cgeoportal_geoportal-2.3.5.80.dist-info/DESCRIPTION.rst +0 -8
- c2cgeoportal_geoportal-2.3.5.80.dist-info/RECORD +0 -7
- c2cgeoportal_geoportal-2.3.5.80.dist-info/entry_points.txt +0 -22
- c2cgeoportal_geoportal-2.3.5.80.dist-info/metadata.json +0 -1
- c2cgeoportal_geoportal-2.3.5.80.dist-info/top_level.txt +0 -1
@@ -0,0 +1,937 @@
|
|
1
|
+
# Copyright (c) 2011-2025, Camptocamp SA
|
2
|
+
# All rights reserved.
|
3
|
+
|
4
|
+
# Redistribution and use in source and binary forms, with or without
|
5
|
+
# modification, are permitted provided that the following conditions are met:
|
6
|
+
|
7
|
+
# 1. Redistributions of source code must retain the above copyright notice, this
|
8
|
+
# list of conditions and the following disclaimer.
|
9
|
+
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
10
|
+
# this list of conditions and the following disclaimer in the documentation
|
11
|
+
# and/or other materials provided with the distribution.
|
12
|
+
|
13
|
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
14
|
+
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
15
|
+
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
16
|
+
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
17
|
+
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
18
|
+
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
19
|
+
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
20
|
+
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
21
|
+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
22
|
+
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
23
|
+
|
24
|
+
# The views and conclusions contained in the software and documentation are those
|
25
|
+
# of the authors and should not be interpreted as representing official policies,
|
26
|
+
# either expressed or implied, of the FreeBSD Project.
|
27
|
+
|
28
|
+
|
29
|
+
import json
|
30
|
+
import os
|
31
|
+
import re
|
32
|
+
import subprocess
|
33
|
+
import traceback
|
34
|
+
from collections.abc import Callable
|
35
|
+
from typing import TYPE_CHECKING, Any, Optional, cast
|
36
|
+
from xml.dom import Node
|
37
|
+
from xml.parsers.expat import ExpatError
|
38
|
+
|
39
|
+
import pyramid.threadlocal
|
40
|
+
import requests
|
41
|
+
import sqlalchemy.orm
|
42
|
+
import yaml
|
43
|
+
from bottle import MakoTemplate, template
|
44
|
+
from c2c.template.config import config
|
45
|
+
from defusedxml.minidom import parseString
|
46
|
+
from geoalchemy2.types import Geometry
|
47
|
+
from lingva.extractors import Extractor, Message
|
48
|
+
from mako.lookup import TemplateLookup
|
49
|
+
from mako.template import Template
|
50
|
+
from owslib.wms import WebMapService
|
51
|
+
from sqlalchemy.exc import NoSuchTableError, OperationalError, ProgrammingError
|
52
|
+
from sqlalchemy.orm.exc import NoResultFound # type: ignore[attr-defined]
|
53
|
+
from sqlalchemy.orm.properties import ColumnProperty
|
54
|
+
from sqlalchemy.orm.util import class_mapper
|
55
|
+
|
56
|
+
import c2cgeoportal_geoportal
|
57
|
+
from c2cgeoportal_commons.lib.url import Url, get_url2
|
58
|
+
from c2cgeoportal_geoportal.lib.bashcolor import Color, colorize
|
59
|
+
from c2cgeoportal_geoportal.lib.caching import init_region
|
60
|
+
from c2cgeoportal_geoportal.views.layers import Layers, get_layer_class
|
61
|
+
|
62
|
+
if TYPE_CHECKING:
|
63
|
+
from c2cgeoportal_commons.models import main # pylint: disable=ungrouped-imports,useless-suppression
|
64
|
+
|
65
|
+
|
66
|
+
class LinguaExtractorException(Exception):
|
67
|
+
"""Exception raised when an error occurs during the extraction."""
|
68
|
+
|
69
|
+
|
70
|
+
def _get_config(key: str, default: str | None = None) -> str | None:
|
71
|
+
"""
|
72
|
+
Return the config value for passed key.
|
73
|
+
|
74
|
+
Passed throw environment variable for the command line,
|
75
|
+
or throw the query string on HTTP request.
|
76
|
+
"""
|
77
|
+
request = pyramid.threadlocal.get_current_request()
|
78
|
+
if request is not None:
|
79
|
+
return cast(Optional[str], request.params.get(key.lower(), default))
|
80
|
+
|
81
|
+
return os.environ.get(key, default)
|
82
|
+
|
83
|
+
|
84
|
+
def _get_config_str(key: str, default: str) -> str:
|
85
|
+
result = _get_config(key, default)
|
86
|
+
assert result is not None
|
87
|
+
return result
|
88
|
+
|
89
|
+
|
90
|
+
class _Registry:
|
91
|
+
settings = None
|
92
|
+
|
93
|
+
def __init__(self, settings: dict[str, Any] | None) -> None:
|
94
|
+
self.settings = settings
|
95
|
+
|
96
|
+
|
97
|
+
class _Request:
|
98
|
+
params: dict[str, str] = {}
|
99
|
+
matchdict: dict[str, str] = {}
|
100
|
+
GET: dict[str, str] = {}
|
101
|
+
|
102
|
+
def __init__(self, settings: dict[str, Any] | None = None) -> None:
|
103
|
+
self.registry: _Registry = _Registry(settings)
|
104
|
+
|
105
|
+
@staticmethod
|
106
|
+
def static_url(*args: Any, **kwargs: Any) -> str:
|
107
|
+
del args
|
108
|
+
del kwargs
|
109
|
+
return ""
|
110
|
+
|
111
|
+
@staticmethod
|
112
|
+
def static_path(*args: Any, **kwargs: Any) -> str:
|
113
|
+
del args
|
114
|
+
del kwargs
|
115
|
+
return ""
|
116
|
+
|
117
|
+
@staticmethod
|
118
|
+
def route_url(*args: Any, **kwargs: Any) -> str:
|
119
|
+
del args
|
120
|
+
del kwargs
|
121
|
+
return ""
|
122
|
+
|
123
|
+
@staticmethod
|
124
|
+
def current_route_url(*args: Any, **kwargs: Any) -> str:
|
125
|
+
del args
|
126
|
+
del kwargs
|
127
|
+
return ""
|
128
|
+
|
129
|
+
|
130
|
+
class GeomapfishAngularExtractor(Extractor): # type: ignore
|
131
|
+
"""GeoMapFish angular extractor."""
|
132
|
+
|
133
|
+
extensions = [".js", ".html"]
|
134
|
+
|
135
|
+
def __init__(self) -> None:
|
136
|
+
super().__init__()
|
137
|
+
if os.path.exists("/etc/geomapfish/config.yaml"):
|
138
|
+
config.init("/etc/geomapfish/config.yaml")
|
139
|
+
conf = config.get_config()
|
140
|
+
assert conf is not None
|
141
|
+
self.config = conf
|
142
|
+
else:
|
143
|
+
self.config = {}
|
144
|
+
self.tpl = None
|
145
|
+
|
146
|
+
@staticmethod
|
147
|
+
def get_message_cleaner(filename: str) -> Callable[[str], str]:
|
148
|
+
"""Return a function for cleaning messages according to input file format."""
|
149
|
+
ext = os.path.splitext(filename)[1]
|
150
|
+
|
151
|
+
if ext in [".html", ".ejs"]:
|
152
|
+
# Remove \n in HTML multi-line strings
|
153
|
+
pattern = re.compile("\n *")
|
154
|
+
return lambda s: re.sub(pattern, " ", s)
|
155
|
+
|
156
|
+
return lambda s: s
|
157
|
+
|
158
|
+
def __call__(
|
159
|
+
self,
|
160
|
+
filename: str,
|
161
|
+
options: dict[str, Any],
|
162
|
+
fileobj: dict[str, Any] | None = None,
|
163
|
+
lineno: int = 0,
|
164
|
+
) -> list[Message]:
|
165
|
+
del fileobj, lineno
|
166
|
+
|
167
|
+
print(f"Running {self.__class__.__name__} on {filename}")
|
168
|
+
|
169
|
+
cleaner = self.get_message_cleaner(filename)
|
170
|
+
|
171
|
+
init_region({"backend": "dogpile.cache.memory"}, "std")
|
172
|
+
init_region({"backend": "dogpile.cache.memory"}, "obj")
|
173
|
+
|
174
|
+
int_filename = filename
|
175
|
+
if re.match("^" + re.escape(f"./{self.config['package']}/templates"), filename):
|
176
|
+
try:
|
177
|
+
empty_template = Template("") # nosec
|
178
|
+
|
179
|
+
class Lookup(TemplateLookup): # type: ignore
|
180
|
+
def get_template(self, uri: str) -> Template:
|
181
|
+
del uri # unused
|
182
|
+
return empty_template
|
183
|
+
|
184
|
+
class MyTemplate(MakoTemplate): # type: ignore
|
185
|
+
tpl = None
|
186
|
+
|
187
|
+
def prepare(self, **kwargs: Any) -> None:
|
188
|
+
options.update({"input_encoding": self.encoding})
|
189
|
+
lookup = Lookup(**kwargs)
|
190
|
+
if self.source:
|
191
|
+
self.tpl = Template(self.source, lookup=lookup, **kwargs) # nosec
|
192
|
+
else:
|
193
|
+
self.tpl = Template( # nosec
|
194
|
+
uri=self.name, filename=self.filename, lookup=lookup, **kwargs
|
195
|
+
)
|
196
|
+
|
197
|
+
try:
|
198
|
+
request = pyramid.threadlocal.get_current_request()
|
199
|
+
request = _Request() if request is None else request
|
200
|
+
processed = template(
|
201
|
+
filename,
|
202
|
+
{
|
203
|
+
"request": request,
|
204
|
+
"lang": "fr",
|
205
|
+
"debug": False,
|
206
|
+
"extra_params": {},
|
207
|
+
"permalink_themes": "",
|
208
|
+
"fulltextsearch_groups": [],
|
209
|
+
"wfs_types": [],
|
210
|
+
"_": lambda x: x,
|
211
|
+
},
|
212
|
+
template_adapter=MyTemplate,
|
213
|
+
)
|
214
|
+
int_filename = os.path.join(os.path.dirname(filename), "_" + os.path.basename(filename))
|
215
|
+
with open(int_filename, "wb") as file_open:
|
216
|
+
file_open.write(processed.encode("utf-8"))
|
217
|
+
except Exception: # pylint: disable=broad-exception-caught
|
218
|
+
print(colorize(f"ERROR! Occurred during the '{filename}' template generation", Color.RED))
|
219
|
+
print(colorize(traceback.format_exc(), Color.RED))
|
220
|
+
if _get_config_str("IGNORE_I18N_ERRORS", "FALSE") == "TRUE":
|
221
|
+
# Continue with the original one
|
222
|
+
int_filename = filename
|
223
|
+
else:
|
224
|
+
raise
|
225
|
+
except Exception: # pylint: disable=broad-exception-caught
|
226
|
+
print(traceback.format_exc())
|
227
|
+
|
228
|
+
# Path in geomapfish-tools
|
229
|
+
script_path = "/opt/c2cgeoportal/geoportal/extract-messages.js"
|
230
|
+
message_str = subprocess.check_output(["node", script_path, int_filename]).decode("utf-8")
|
231
|
+
if int_filename != filename:
|
232
|
+
os.unlink(int_filename)
|
233
|
+
try:
|
234
|
+
messages = []
|
235
|
+
for contexts, message in json.loads(message_str):
|
236
|
+
assert message is not None
|
237
|
+
message = cleaner(message)
|
238
|
+
for context in contexts.split(", "):
|
239
|
+
messages.append(Message(None, message, None, [], "", "", context.split(":")))
|
240
|
+
return messages
|
241
|
+
except Exception:
|
242
|
+
print(colorize("An error occurred", Color.RED))
|
243
|
+
print(colorize(message_str, Color.RED))
|
244
|
+
print("------")
|
245
|
+
raise
|
246
|
+
|
247
|
+
|
248
|
+
def init_db(settings: dict[str, Any]) -> None:
|
249
|
+
"""
|
250
|
+
Initialize the SQLAlchemy Session.
|
251
|
+
|
252
|
+
First test the connection, on when environment it should be OK, with the command line we should get
|
253
|
+
an exception ind initialize the connection.
|
254
|
+
"""
|
255
|
+
|
256
|
+
try:
|
257
|
+
from c2cgeoportal_commons.models import DBSession # pylint: disable=import-outside-toplevel
|
258
|
+
from c2cgeoportal_commons.models.main import Theme # pylint: disable=import-outside-toplevel
|
259
|
+
|
260
|
+
assert DBSession is not None
|
261
|
+
|
262
|
+
session = DBSession()
|
263
|
+
session.query(Theme).count()
|
264
|
+
except: # pylint: disable=bare-except
|
265
|
+
# Init db sessions
|
266
|
+
|
267
|
+
class R:
|
268
|
+
settings: dict[str, Any] = {}
|
269
|
+
|
270
|
+
class C:
|
271
|
+
registry = R()
|
272
|
+
|
273
|
+
def get_settings(self) -> dict[str, Any]:
|
274
|
+
return self.registry.settings
|
275
|
+
|
276
|
+
def add_tween(self, *args: Any, **kwargs: Any) -> None:
|
277
|
+
pass
|
278
|
+
|
279
|
+
config_ = C()
|
280
|
+
config_.registry.settings = settings
|
281
|
+
|
282
|
+
c2cgeoportal_geoportal.init_db_sessions(settings, config_)
|
283
|
+
|
284
|
+
|
285
|
+
class GeomapfishConfigExtractor(Extractor): # type: ignore
|
286
|
+
"""GeoMapFish config extractor (raster layers, and print templates)."""
|
287
|
+
|
288
|
+
extensions = [".yaml", ".tmpl"]
|
289
|
+
|
290
|
+
def __call__(
|
291
|
+
self,
|
292
|
+
filename: str,
|
293
|
+
options: dict[str, Any],
|
294
|
+
fileobj: dict[str, Any] | None = None,
|
295
|
+
lineno: int = 0,
|
296
|
+
) -> list[Message]:
|
297
|
+
del options, fileobj, lineno
|
298
|
+
|
299
|
+
print(f"Running {self.__class__.__name__} on {filename}")
|
300
|
+
|
301
|
+
init_region({"backend": "dogpile.cache.memory"}, "std")
|
302
|
+
init_region({"backend": "dogpile.cache.memory"}, "obj")
|
303
|
+
|
304
|
+
with open(filename, encoding="utf8") as config_file:
|
305
|
+
gmf_config = yaml.load(config_file, Loader=yaml.BaseLoader) # nosec
|
306
|
+
# For application config (config.yaml)
|
307
|
+
if "vars" in gmf_config:
|
308
|
+
return self._collect_app_config(filename)
|
309
|
+
# For the print config
|
310
|
+
if "templates" in gmf_config:
|
311
|
+
return self._collect_print_config(gmf_config, filename)
|
312
|
+
raise Exception("Not a known config file") # pylint: disable=broad-exception-raised
|
313
|
+
|
314
|
+
def _collect_app_config(self, filename: str) -> list[Message]:
|
315
|
+
config.init(filename)
|
316
|
+
settings = config.get_config()
|
317
|
+
assert settings is not None
|
318
|
+
assert not [
|
319
|
+
raster_layer for raster_layer in list(settings.get("raster", {}).keys()) if raster_layer is None
|
320
|
+
]
|
321
|
+
# Collect raster layers names
|
322
|
+
raster = [
|
323
|
+
Message(None, raster_layer, None, [], "", "", (filename, f"raster/{raster_layer}"))
|
324
|
+
for raster_layer in list(settings.get("raster", {}).keys())
|
325
|
+
]
|
326
|
+
|
327
|
+
init_db(settings)
|
328
|
+
|
329
|
+
# Collect layers enum values (for filters)
|
330
|
+
|
331
|
+
from c2cgeoportal_commons.models import ( # pylint: disable=import-outside-toplevel
|
332
|
+
DBSession,
|
333
|
+
DBSessions,
|
334
|
+
)
|
335
|
+
from c2cgeoportal_commons.models.main import Metadata # pylint: disable=import-outside-toplevel
|
336
|
+
|
337
|
+
assert DBSession is not None
|
338
|
+
|
339
|
+
enums = []
|
340
|
+
enum_layers = settings.get("layers", {}).get("enum", {})
|
341
|
+
for layername in list(enum_layers.keys()):
|
342
|
+
layerinfos = enum_layers.get(layername, {})
|
343
|
+
attributes = layerinfos.get("attributes", {})
|
344
|
+
for fieldname in list(attributes.keys()):
|
345
|
+
values = self._enumerate_attributes_values(DBSessions, layerinfos, fieldname)
|
346
|
+
for (value,) in values:
|
347
|
+
if isinstance(value, str) and value != "":
|
348
|
+
msgid = value
|
349
|
+
location = (
|
350
|
+
f"/layers/{layername}/values/{fieldname}/"
|
351
|
+
f"{value.encode('ascii', errors='replace').decode('ascii')}"
|
352
|
+
)
|
353
|
+
assert msgid is not None
|
354
|
+
enums.append(Message(None, msgid, None, [], "", "", (filename, location)))
|
355
|
+
|
356
|
+
metadata_list = []
|
357
|
+
defs = config["admin_interface"]["available_metadata"] # pylint: disable=unsubscriptable-object
|
358
|
+
names = [e["name"] for e in defs if e.get("translate", False)]
|
359
|
+
|
360
|
+
if names:
|
361
|
+
session = DBSession()
|
362
|
+
|
363
|
+
query = session.query(Metadata).filter(Metadata.name.in_(names))
|
364
|
+
for metadata in query.all():
|
365
|
+
location = f"metadata/{metadata.name}/{metadata.id}"
|
366
|
+
assert metadata.value is not None
|
367
|
+
metadata_list.append(Message(None, metadata.value, None, [], "", "", (filename, location)))
|
368
|
+
|
369
|
+
interfaces_messages = []
|
370
|
+
for interface, interface_config in config["interfaces_config"].items():
|
371
|
+
for ds_index, datasource in enumerate(
|
372
|
+
interface_config.get("constants", {}).get("gmfSearchOptions", {}).get("datasources", [])
|
373
|
+
):
|
374
|
+
for a_index, action in enumerate(datasource.get("groupActions", [])):
|
375
|
+
location = (
|
376
|
+
f"interfaces_config/{interface}/constants/gmfSearchOptions/datasources[{ds_index}]/"
|
377
|
+
f"groupActions[{a_index}]/title"
|
378
|
+
)
|
379
|
+
assert action["title"] is not None
|
380
|
+
interfaces_messages.append(
|
381
|
+
Message(None, action["title"], None, [], "", "", (filename, location))
|
382
|
+
)
|
383
|
+
|
384
|
+
for merge_tab in (
|
385
|
+
interface_config.get("constants", {})
|
386
|
+
.get("gmfDisplayQueryGridOptions", {})
|
387
|
+
.get("mergeTabs", {})
|
388
|
+
.keys()
|
389
|
+
):
|
390
|
+
location = (
|
391
|
+
f"interfaces_config/{interface}/constants/gmfDisplayQueryGridOptions/"
|
392
|
+
f"mergeTabs/{merge_tab}/"
|
393
|
+
)
|
394
|
+
assert merge_tab is not None
|
395
|
+
interfaces_messages.append(Message(None, merge_tab, None, [], "", "", (filename, location)))
|
396
|
+
|
397
|
+
return raster + enums + metadata_list + interfaces_messages
|
398
|
+
|
399
|
+
@staticmethod
|
400
|
+
def _enumerate_attributes_values(
|
401
|
+
dbsessions: dict[str, sqlalchemy.orm.scoped_session[sqlalchemy.orm.Session]],
|
402
|
+
layerinfos: dict[str, Any],
|
403
|
+
fieldname: str,
|
404
|
+
) -> set[tuple[str, ...]]:
|
405
|
+
dbname = layerinfos.get("dbsession", "dbsession")
|
406
|
+
translate = cast(dict[str, Any], layerinfos["attributes"]).get(fieldname, {}).get("translate", True)
|
407
|
+
if not translate:
|
408
|
+
return set()
|
409
|
+
try:
|
410
|
+
dbsession = dbsessions.get(dbname)
|
411
|
+
assert dbsession is not None
|
412
|
+
return Layers.query_enumerate_attribute_values(dbsession, layerinfos, fieldname)
|
413
|
+
except Exception as e:
|
414
|
+
table = cast(dict[str, Any], layerinfos["attributes"]).get(fieldname, {}).get("table")
|
415
|
+
print(
|
416
|
+
colorize(
|
417
|
+
"ERROR! Unable to collect enumerate attributes for "
|
418
|
+
f"db: {dbname}, table: {table}, column: {fieldname} ({e!s})",
|
419
|
+
Color.RED,
|
420
|
+
)
|
421
|
+
)
|
422
|
+
if _get_config_str("IGNORE_I18N_ERRORS", "FALSE") == "TRUE":
|
423
|
+
return set()
|
424
|
+
raise
|
425
|
+
|
426
|
+
@staticmethod
|
427
|
+
def _collect_print_config(print_config: dict[str, Any], filename: str) -> list[Message]:
|
428
|
+
result = []
|
429
|
+
for template_ in list(cast(dict[str, Any], print_config.get("templates")).keys()):
|
430
|
+
assert template_ is not None
|
431
|
+
result.append(Message(None, template_, None, [], "", "", (filename, f"template/{template_}")))
|
432
|
+
assert not [
|
433
|
+
attribute
|
434
|
+
for attribute in list(print_config["templates"][template_]["attributes"].keys())
|
435
|
+
if attribute is None
|
436
|
+
]
|
437
|
+
result += [
|
438
|
+
Message(
|
439
|
+
None,
|
440
|
+
attribute,
|
441
|
+
None,
|
442
|
+
[],
|
443
|
+
"",
|
444
|
+
"",
|
445
|
+
(filename, f"template/{template_}/{attribute}"),
|
446
|
+
)
|
447
|
+
for attribute in list(print_config["templates"][template_]["attributes"].keys())
|
448
|
+
]
|
449
|
+
return result
|
450
|
+
|
451
|
+
|
452
|
+
class GeomapfishThemeExtractor(Extractor): # type: ignore
|
453
|
+
"""GeoMapFish theme extractor."""
|
454
|
+
|
455
|
+
# Run on the development.ini file
|
456
|
+
extensions = [".ini"]
|
457
|
+
featuretype_cache: dict[str, dict[str, Any] | None] = {}
|
458
|
+
wms_capabilities_cache: dict[str, WebMapService] = {}
|
459
|
+
|
460
|
+
def __init__(self) -> None:
|
461
|
+
super().__init__()
|
462
|
+
if os.path.exists("/etc/geomapfish/config.yaml"):
|
463
|
+
config.init("/etc/geomapfish/config.yaml")
|
464
|
+
conf = config.get_config()
|
465
|
+
assert conf is not None
|
466
|
+
self.config = conf
|
467
|
+
else:
|
468
|
+
self.config = {}
|
469
|
+
self.env = None
|
470
|
+
|
471
|
+
def __call__(
|
472
|
+
self, filename: str, options: dict[str, Any], fileobj: str | None = None, lineno: int = 0
|
473
|
+
) -> list[Message]:
|
474
|
+
del fileobj, lineno
|
475
|
+
|
476
|
+
print(f"Running {self.__class__.__name__} on {filename}")
|
477
|
+
|
478
|
+
messages: list[Message] = []
|
479
|
+
|
480
|
+
try:
|
481
|
+
init_db(self.config)
|
482
|
+
from c2cgeoportal_commons.models import DBSession # pylint: disable=import-outside-toplevel
|
483
|
+
|
484
|
+
assert DBSession is not None
|
485
|
+
|
486
|
+
db_session = DBSession()
|
487
|
+
|
488
|
+
try:
|
489
|
+
from c2cgeoportal_commons.models.main import ( # pylint: disable=import-outside-toplevel
|
490
|
+
FullTextSearch,
|
491
|
+
LayerGroup,
|
492
|
+
LayerWMS,
|
493
|
+
LayerWMTS,
|
494
|
+
Theme,
|
495
|
+
)
|
496
|
+
|
497
|
+
self._import(Theme, messages, name_regex=_get_config_str("THEME_REGEX", ".*"))
|
498
|
+
self._import(
|
499
|
+
LayerGroup,
|
500
|
+
messages,
|
501
|
+
name_regex=_get_config_str("GROUP_REGEX", ".*"),
|
502
|
+
has_interfaces=False,
|
503
|
+
)
|
504
|
+
self._import(
|
505
|
+
LayerWMS,
|
506
|
+
messages,
|
507
|
+
self._import_layer_wms,
|
508
|
+
name_regex=_get_config_str("WMSLAYER_REGEX", ".*"),
|
509
|
+
)
|
510
|
+
self._import(
|
511
|
+
LayerWMTS,
|
512
|
+
messages,
|
513
|
+
self._import_layer_wmts,
|
514
|
+
name_regex=_get_config_str("WMTSLAYER_REGEX", ".*"),
|
515
|
+
)
|
516
|
+
|
517
|
+
for (layer_name,) in db_session.query(FullTextSearch.layer_name).distinct().all():
|
518
|
+
if layer_name is not None and layer_name != "":
|
519
|
+
assert layer_name is not None
|
520
|
+
messages.append(
|
521
|
+
Message(
|
522
|
+
None,
|
523
|
+
layer_name,
|
524
|
+
None,
|
525
|
+
[],
|
526
|
+
"",
|
527
|
+
"",
|
528
|
+
("fts", layer_name.encode("ascii", errors="replace")),
|
529
|
+
)
|
530
|
+
)
|
531
|
+
|
532
|
+
for (actions,) in db_session.query(FullTextSearch.actions).distinct().all():
|
533
|
+
if actions is not None and actions != "":
|
534
|
+
for action in actions:
|
535
|
+
assert action["data"] is not None
|
536
|
+
messages.append(
|
537
|
+
Message(
|
538
|
+
None,
|
539
|
+
action["data"],
|
540
|
+
None,
|
541
|
+
[],
|
542
|
+
"",
|
543
|
+
"",
|
544
|
+
("fts", action["data"].encode("ascii", errors="replace")),
|
545
|
+
)
|
546
|
+
)
|
547
|
+
except ProgrammingError as e:
|
548
|
+
print(
|
549
|
+
colorize(
|
550
|
+
"ERROR! The database is probably not up to date "
|
551
|
+
"(should be ignored when happen during the upgrade)",
|
552
|
+
Color.RED,
|
553
|
+
)
|
554
|
+
)
|
555
|
+
print(colorize(str(e), Color.RED))
|
556
|
+
if _get_config_str("IGNORE_I18N_ERRORS", "FALSE") != "TRUE":
|
557
|
+
raise
|
558
|
+
except NoSuchTableError as e:
|
559
|
+
print(
|
560
|
+
colorize(
|
561
|
+
"ERROR! The schema didn't seem to exists "
|
562
|
+
"(should be ignored when happen during the deploy)",
|
563
|
+
Color.RED,
|
564
|
+
)
|
565
|
+
)
|
566
|
+
print(colorize(str(e), Color.RED))
|
567
|
+
if _get_config_str("IGNORE_I18N_ERRORS", "FALSE") != "TRUE":
|
568
|
+
raise
|
569
|
+
except OperationalError as e:
|
570
|
+
print(
|
571
|
+
colorize(
|
572
|
+
"ERROR! The database didn't seem to exists "
|
573
|
+
"(should be ignored when happen during the deploy)",
|
574
|
+
Color.RED,
|
575
|
+
)
|
576
|
+
)
|
577
|
+
print(colorize(str(e), Color.RED))
|
578
|
+
if _get_config_str("IGNORE_I18N_ERRORS", "FALSE") != "TRUE":
|
579
|
+
raise
|
580
|
+
|
581
|
+
return messages
|
582
|
+
|
583
|
+
@staticmethod
|
584
|
+
def _import(
|
585
|
+
object_type: type[Any],
|
586
|
+
messages: list[str],
|
587
|
+
callback: Callable[["main.Layer", list[str]], None] | None = None,
|
588
|
+
has_interfaces: bool = True,
|
589
|
+
name_regex: str = ".*",
|
590
|
+
) -> None:
|
591
|
+
from c2cgeoportal_commons.models import DBSession # pylint: disable=import-outside-toplevel
|
592
|
+
from c2cgeoportal_commons.models.main import Interface # pylint: disable=import-outside-toplevel
|
593
|
+
|
594
|
+
assert DBSession is not None
|
595
|
+
|
596
|
+
filter_re = re.compile(name_regex)
|
597
|
+
|
598
|
+
query = DBSession.query(object_type)
|
599
|
+
|
600
|
+
interfaces = _get_config("INTERFACES")
|
601
|
+
if has_interfaces and interfaces is not None:
|
602
|
+
query.join(object_type.interface).filter(Interface.name in interfaces.split(".")) # type: ignore[arg-type]
|
603
|
+
|
604
|
+
for item in query.all():
|
605
|
+
assert item.name is not None
|
606
|
+
if filter_re.match(item.name):
|
607
|
+
messages.append(
|
608
|
+
Message(
|
609
|
+
None,
|
610
|
+
item.name,
|
611
|
+
None,
|
612
|
+
[],
|
613
|
+
"",
|
614
|
+
"",
|
615
|
+
(item.item_type, item.name.encode("ascii", errors="replace")),
|
616
|
+
)
|
617
|
+
)
|
618
|
+
|
619
|
+
if callback is not None:
|
620
|
+
callback(item, messages)
|
621
|
+
|
622
|
+
def _import_layer_wms(self, layer: "main.Layer", messages: list[str]) -> None:
|
623
|
+
server = layer.ogc_server
|
624
|
+
url = server.url_wfs or server.url
|
625
|
+
if url is None:
|
626
|
+
return
|
627
|
+
if layer.ogc_server.wfs_support:
|
628
|
+
for wms_layer in layer.layer.split(","):
|
629
|
+
self._import_layer_attributes(url, wms_layer, layer.item_type, layer.name, messages)
|
630
|
+
if layer.geo_table is not None and layer.geo_table != "":
|
631
|
+
try:
|
632
|
+
cls = get_layer_class(layer, with_last_update_columns=True)
|
633
|
+
for column_property in class_mapper(cls).iterate_properties:
|
634
|
+
if isinstance(column_property, ColumnProperty) and len(column_property.columns) == 1:
|
635
|
+
column = column_property.columns[0]
|
636
|
+
if not column.primary_key and not isinstance(column.type, Geometry):
|
637
|
+
if column.foreign_keys:
|
638
|
+
if column.name == "type_id":
|
639
|
+
name = "type_"
|
640
|
+
elif column.name.endswith("_id"):
|
641
|
+
name = column.name[:-3]
|
642
|
+
else:
|
643
|
+
name = column.name + "_"
|
644
|
+
else:
|
645
|
+
name = column_property.key
|
646
|
+
assert name is not None
|
647
|
+
messages.append(
|
648
|
+
Message(
|
649
|
+
None,
|
650
|
+
name,
|
651
|
+
None,
|
652
|
+
[],
|
653
|
+
"",
|
654
|
+
"",
|
655
|
+
(".".join(["edit", layer.item_type, str(layer.id)]), layer.name),
|
656
|
+
)
|
657
|
+
)
|
658
|
+
except NoSuchTableError:
|
659
|
+
print(
|
660
|
+
colorize(
|
661
|
+
f"ERROR! No such table '{layer.geo_table}' for layer '{layer.name}'.",
|
662
|
+
Color.RED,
|
663
|
+
)
|
664
|
+
)
|
665
|
+
print(colorize(traceback.format_exc(), Color.RED))
|
666
|
+
if _get_config_str("IGNORE_I18N_ERRORS", "FALSE") != "TRUE":
|
667
|
+
raise
|
668
|
+
|
669
|
+
def _import_layer_wmts(self, layer: "main.Layer", messages: list[str]) -> None:
|
670
|
+
from c2cgeoportal_commons.models import DBSession # pylint: disable=import-outside-toplevel
|
671
|
+
from c2cgeoportal_commons.models.main import OGCServer # pylint: disable=import-outside-toplevel
|
672
|
+
|
673
|
+
assert DBSession is not None
|
674
|
+
|
675
|
+
layers = [d.value for d in layer.metadatas if d.name == "queryLayers"]
|
676
|
+
if not layers:
|
677
|
+
layers = [d.value for d in layer.metadatas if d.name == "wmsLayer"]
|
678
|
+
server = [d.value for d in layer.metadatas if d.name == "ogcServer"]
|
679
|
+
if server and layers:
|
680
|
+
layers = [layer for ls in layers for layer in ls.split(",")]
|
681
|
+
for wms_layer in layers:
|
682
|
+
try:
|
683
|
+
db_server = DBSession.query(OGCServer).filter(OGCServer.name == server[0]).one()
|
684
|
+
if db_server.wfs_support:
|
685
|
+
self._import_layer_attributes(
|
686
|
+
db_server.url_wfs or db_server.url,
|
687
|
+
wms_layer,
|
688
|
+
layer.item_type,
|
689
|
+
layer.name,
|
690
|
+
messages,
|
691
|
+
)
|
692
|
+
except NoResultFound:
|
693
|
+
print(
|
694
|
+
colorize(
|
695
|
+
f"ERROR! the OGC server '{server[0]}' from the "
|
696
|
+
f"WMTS layer '{layer.name}' does not exist.",
|
697
|
+
Color.RED,
|
698
|
+
)
|
699
|
+
)
|
700
|
+
if _get_config_str("IGNORE_I18N_ERRORS", "FALSE") != "TRUE":
|
701
|
+
raise
|
702
|
+
|
703
|
+
def _import_layer_attributes(
|
704
|
+
self, url: str, layer: "main.Layer", item_type: str, name: str, messages: list[str]
|
705
|
+
) -> None:
|
706
|
+
attributes, layers = self._layer_attributes(url, layer)
|
707
|
+
for sub_layer in layers:
|
708
|
+
assert sub_layer is not None
|
709
|
+
messages.append(
|
710
|
+
Message(
|
711
|
+
None,
|
712
|
+
sub_layer,
|
713
|
+
None,
|
714
|
+
[],
|
715
|
+
"",
|
716
|
+
"",
|
717
|
+
(".".join([item_type, name]), sub_layer.encode("ascii", "replace")),
|
718
|
+
)
|
719
|
+
)
|
720
|
+
for attribute in attributes:
|
721
|
+
assert attribute is not None
|
722
|
+
messages.append(
|
723
|
+
Message(
|
724
|
+
None,
|
725
|
+
attribute,
|
726
|
+
None,
|
727
|
+
[],
|
728
|
+
"",
|
729
|
+
"",
|
730
|
+
(".".join([item_type, name]), layer.encode("ascii", "replace")),
|
731
|
+
)
|
732
|
+
)
|
733
|
+
|
734
|
+
def _build_url(self, url: Url) -> tuple[Url, dict[str, str], dict[str, Any]]:
|
735
|
+
hostname = url.hostname
|
736
|
+
host_map = self.config.get("lingva_extractor", {}).get("host_map", {})
|
737
|
+
if hostname in host_map:
|
738
|
+
map_ = host_map[hostname]
|
739
|
+
if "netloc" in map_:
|
740
|
+
url.netloc = map_["netloc"]
|
741
|
+
if "scheme" in map_:
|
742
|
+
url.scheme = map_["scheme"]
|
743
|
+
kwargs = {"verify": map_["verify"]} if "verify" in map_ else {}
|
744
|
+
return url, map_.get("headers", {}), kwargs
|
745
|
+
return url, {}, {}
|
746
|
+
|
747
|
+
def _layer_attributes(self, url: str, layer: str) -> tuple[list[str], list[str]]:
|
748
|
+
errors: set[str] = set()
|
749
|
+
|
750
|
+
request = pyramid.threadlocal.get_current_request()
|
751
|
+
if request is None:
|
752
|
+
request = _Request()
|
753
|
+
request.registry.settings = self.config
|
754
|
+
|
755
|
+
# Static schema will not be supported
|
756
|
+
url_obj_ = get_url2("Layer", url, request, errors)
|
757
|
+
if errors:
|
758
|
+
print("\n".join(errors))
|
759
|
+
return [], []
|
760
|
+
if not url_obj_:
|
761
|
+
print(f"No URL for: {url}")
|
762
|
+
return [], []
|
763
|
+
url_obj: Url = url_obj_
|
764
|
+
url_obj, headers, kwargs = self._build_url(url_obj)
|
765
|
+
|
766
|
+
if url not in self.wms_capabilities_cache:
|
767
|
+
print(f"Get WMS GetCapabilities for URL: {url_obj}")
|
768
|
+
self.wms_capabilities_cache[url] = None
|
769
|
+
|
770
|
+
wms_getcap_url = (
|
771
|
+
url_obj.clone()
|
772
|
+
.add_query(
|
773
|
+
{
|
774
|
+
"SERVICE": "WMS",
|
775
|
+
"VERSION": "1.1.1",
|
776
|
+
"REQUEST": "GetCapabilities",
|
777
|
+
"ROLE_IDS": "0",
|
778
|
+
"USER_ID": "0",
|
779
|
+
}
|
780
|
+
)
|
781
|
+
.url()
|
782
|
+
)
|
783
|
+
try:
|
784
|
+
rendered_headers = " ".join(
|
785
|
+
[
|
786
|
+
f"{h}={v if h not in ('Authorization', 'Cookie') else '***'}"
|
787
|
+
for h, v in headers.items()
|
788
|
+
]
|
789
|
+
)
|
790
|
+
print(f"Get WMS GetCapabilities for URL {wms_getcap_url},\nwith headers: {rendered_headers}")
|
791
|
+
response = requests.get(wms_getcap_url, headers=headers, timeout=60, **kwargs)
|
792
|
+
|
793
|
+
if response.ok:
|
794
|
+
try:
|
795
|
+
self.wms_capabilities_cache[url] = WebMapService(None, xml=response.content)
|
796
|
+
except Exception as e: # pylint: disable=broad-exception-caught
|
797
|
+
print(
|
798
|
+
colorize(
|
799
|
+
"ERROR! an error occurred while trying to parse "
|
800
|
+
"the GetCapabilities document.",
|
801
|
+
Color.RED,
|
802
|
+
)
|
803
|
+
)
|
804
|
+
print(colorize(str(e), Color.RED))
|
805
|
+
print(f"URL: {wms_getcap_url}\nxml:\n{response.text}")
|
806
|
+
if _get_config_str("IGNORE_I18N_ERRORS", "FALSE") != "TRUE":
|
807
|
+
raise
|
808
|
+
else:
|
809
|
+
print(
|
810
|
+
colorize(
|
811
|
+
f"ERROR! Unable to GetCapabilities from URL: {wms_getcap_url},\n"
|
812
|
+
f"with headers: {rendered_headers}",
|
813
|
+
Color.RED,
|
814
|
+
)
|
815
|
+
)
|
816
|
+
print(f"Response: {response.status_code} {response.reason}\n{response.text}")
|
817
|
+
if _get_config_str("IGNORE_I18N_ERRORS", "FALSE") != "TRUE":
|
818
|
+
raise LinguaExtractorException(response.reason)
|
819
|
+
except Exception as e: # pylint: disable=broad-exception-caught
|
820
|
+
print(colorize(str(e), Color.RED))
|
821
|
+
rendered_headers = " ".join(
|
822
|
+
[
|
823
|
+
f"{h}={v if h not in ('Authorization', 'Cookie') else '***'}"
|
824
|
+
for h, v in headers.items()
|
825
|
+
]
|
826
|
+
)
|
827
|
+
print(
|
828
|
+
colorize(
|
829
|
+
f"ERROR! Unable to GetCapabilities from URL: {wms_getcap_url},\n"
|
830
|
+
f"with headers: {rendered_headers}",
|
831
|
+
Color.RED,
|
832
|
+
)
|
833
|
+
)
|
834
|
+
if _get_config_str("IGNORE_I18N_ERRORS", "FALSE") != "TRUE":
|
835
|
+
raise
|
836
|
+
|
837
|
+
wms_capabilities = self.wms_capabilities_cache[url]
|
838
|
+
|
839
|
+
if url not in self.featuretype_cache:
|
840
|
+
print(f"Get WFS DescribeFeatureType for URL: {url_obj}")
|
841
|
+
self.featuretype_cache[url] = None
|
842
|
+
|
843
|
+
wfs_describe_feature_url = (
|
844
|
+
url_obj.clone()
|
845
|
+
.add_query(
|
846
|
+
{
|
847
|
+
"SERVICE": "WFS",
|
848
|
+
"VERSION": "1.1.0",
|
849
|
+
"REQUEST": "DescribeFeatureType",
|
850
|
+
"ROLE_IDS": "0",
|
851
|
+
"USER_ID": "0",
|
852
|
+
}
|
853
|
+
)
|
854
|
+
.url()
|
855
|
+
)
|
856
|
+
try:
|
857
|
+
response = requests.get(wfs_describe_feature_url, headers=headers, timeout=60, **kwargs)
|
858
|
+
except Exception as e:
|
859
|
+
print(colorize(str(e), Color.RED))
|
860
|
+
print(
|
861
|
+
colorize(
|
862
|
+
f"ERROR! Unable to DescribeFeatureType from URL: {wfs_describe_feature_url}",
|
863
|
+
Color.RED,
|
864
|
+
)
|
865
|
+
)
|
866
|
+
if _get_config_str("IGNORE_I18N_ERRORS", "FALSE") == "TRUE":
|
867
|
+
return [], []
|
868
|
+
raise
|
869
|
+
|
870
|
+
if not response.ok:
|
871
|
+
print(
|
872
|
+
colorize(
|
873
|
+
f"ERROR! DescribeFeatureType from URL {wfs_describe_feature_url} return the error: "
|
874
|
+
f"{response.status_code:d} {response.reason}",
|
875
|
+
Color.RED,
|
876
|
+
)
|
877
|
+
)
|
878
|
+
if _get_config_str("IGNORE_I18N_ERRORS", "FALSE") == "TRUE":
|
879
|
+
return [], []
|
880
|
+
raise Exception("Aborted") # pylint: disable=broad-exception-raised
|
881
|
+
|
882
|
+
try:
|
883
|
+
describe = parseString(response.text)
|
884
|
+
featurestype: dict[str, Node] | None = {}
|
885
|
+
self.featuretype_cache[url] = featurestype
|
886
|
+
for type_element in describe.getElementsByTagNameNS(
|
887
|
+
"http://www.w3.org/2001/XMLSchema", "complexType"
|
888
|
+
):
|
889
|
+
cast(dict[str, Node], featurestype)[type_element.getAttribute("name")] = type_element
|
890
|
+
except ExpatError as e:
|
891
|
+
print(
|
892
|
+
colorize(
|
893
|
+
"ERROR! an error occurred while trying to parse the DescribeFeatureType document.",
|
894
|
+
Color.RED,
|
895
|
+
)
|
896
|
+
)
|
897
|
+
print(colorize(str(e), Color.RED))
|
898
|
+
print(f"URL: {wfs_describe_feature_url}\nxml:\n{response.text}")
|
899
|
+
if _get_config_str("IGNORE_I18N_ERRORS", "FALSE") == "TRUE":
|
900
|
+
return [], []
|
901
|
+
raise
|
902
|
+
except AttributeError:
|
903
|
+
print(
|
904
|
+
colorize(
|
905
|
+
"ERROR! an error occurred while trying to "
|
906
|
+
"read the Mapfile and recover the themes.",
|
907
|
+
Color.RED,
|
908
|
+
)
|
909
|
+
)
|
910
|
+
print(f"URL: {wfs_describe_feature_url}\nxml:\n{response.text}")
|
911
|
+
if _get_config_str("IGNORE_I18N_ERRORS", "FALSE") == "TRUE":
|
912
|
+
return [], []
|
913
|
+
raise
|
914
|
+
else:
|
915
|
+
featurestype = self.featuretype_cache[url]
|
916
|
+
|
917
|
+
if featurestype is None:
|
918
|
+
return [], []
|
919
|
+
|
920
|
+
layers: list[str] = [layer]
|
921
|
+
if wms_capabilities is not None and layer in list(wms_capabilities.contents):
|
922
|
+
layer_obj = wms_capabilities[layer]
|
923
|
+
if layer_obj.layers:
|
924
|
+
layers = [layer.name for layer in layer_obj.layers]
|
925
|
+
|
926
|
+
attributes: list[str] = []
|
927
|
+
for sub_layer in layers:
|
928
|
+
# Should probably be adapted for other king of servers
|
929
|
+
type_element = featurestype.get(f"{sub_layer}Type")
|
930
|
+
if type_element is not None:
|
931
|
+
for element in type_element.getElementsByTagNameNS(
|
932
|
+
"http://www.w3.org/2001/XMLSchema", "element"
|
933
|
+
):
|
934
|
+
if not element.getAttribute("type").startswith("gml:"):
|
935
|
+
attributes.append(element.getAttribute("name"))
|
936
|
+
|
937
|
+
return attributes, layers
|