c2cgeoportal-geoportal 2.3.5.80__py3-none-any.whl → 2.9rc45__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- c2cgeoportal_geoportal/__init__.py +960 -0
- c2cgeoportal_geoportal/lib/__init__.py +256 -0
- c2cgeoportal_geoportal/lib/authentication.py +250 -0
- c2cgeoportal_geoportal/lib/bashcolor.py +46 -0
- c2cgeoportal_geoportal/lib/cacheversion.py +77 -0
- c2cgeoportal_geoportal/lib/caching.py +176 -0
- c2cgeoportal_geoportal/lib/check_collector.py +80 -0
- c2cgeoportal_geoportal/lib/checker.py +295 -0
- c2cgeoportal_geoportal/lib/common_headers.py +172 -0
- c2cgeoportal_geoportal/lib/dbreflection.py +266 -0
- c2cgeoportal_geoportal/lib/filter_capabilities.py +360 -0
- c2cgeoportal_geoportal/lib/fulltextsearch.py +50 -0
- c2cgeoportal_geoportal/lib/functionality.py +166 -0
- c2cgeoportal_geoportal/lib/headers.py +62 -0
- c2cgeoportal_geoportal/lib/i18n.py +38 -0
- c2cgeoportal_geoportal/lib/layers.py +132 -0
- c2cgeoportal_geoportal/lib/lingva_extractor.py +937 -0
- c2cgeoportal_geoportal/lib/loader.py +57 -0
- c2cgeoportal_geoportal/lib/metrics.py +117 -0
- c2cgeoportal_geoportal/lib/oauth2.py +1186 -0
- c2cgeoportal_geoportal/lib/oidc.py +304 -0
- c2cgeoportal_geoportal/lib/wmstparsing.py +353 -0
- c2cgeoportal_geoportal/lib/xsd.py +166 -0
- c2cgeoportal_geoportal/py.typed +0 -0
- c2cgeoportal_geoportal/resources.py +49 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/ci/config.yaml +26 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/cookiecutter.json +18 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/.dockerignore +6 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/.eslintrc.yaml +19 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/.prospector.yaml +30 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/Dockerfile +75 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/Makefile +6 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/alembic.ini +58 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/alembic.yaml +19 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/development.ini +121 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/gunicorn.conf.py +139 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/language_mapping +3 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/lingva-client.cfg +5 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/lingva-server.cfg +6 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/production.ini +38 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/requirements.txt +2 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/setup.py +25 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/webpack.api.js +41 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/webpack.apps.js +64 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/webpack.commons.js +11 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/webpack.config.js +22 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/__init__.py +42 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/authentication.py +10 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/dev.py +14 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/models.py +8 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/multi_organization.py +7 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/resources.py +11 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static-ngeo/api/index.js +12 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static-ngeo/js/{{cookiecutter.package}}module.js +25 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/subscribers.py +39 -0
- c2cgeoportal_geoportal/scaffolds/advance_create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/views/__init__.py +0 -0
- c2cgeoportal_geoportal/scaffolds/advance_update/cookiecutter.json +18 -0
- c2cgeoportal_geoportal/scaffolds/advance_update/{{cookiecutter.project}}/geoportal/CONST_Makefile +121 -0
- c2cgeoportal_geoportal/scaffolds/create/cookiecutter.json +18 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/.dockerignore +14 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/.editorconfig +17 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/.github/workflows/main.yaml +73 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/.github/workflows/rebuild.yaml +50 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/.github/workflows/update_l10n.yaml +66 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/.gitignore +16 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/.pre-commit-config.yaml +35 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/.prettierignore +1 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/.prettierrc.yaml +2 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/Dockerfile +75 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/Makefile +70 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/README.rst +29 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/build +179 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/ci/config.yaml +22 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/ci/docker-compose-check +25 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/ci/requirements.txt +2 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/docker-compose-db.yaml +24 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/docker-compose-lib.yaml +513 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/docker-compose-qgis.yaml +21 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/docker-compose.override.sample.yaml +65 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/docker-compose.yaml +121 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/env.default +102 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/env.project +69 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/vars.yaml +430 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/locale/en/LC_MESSAGES/{{cookiecutter.package}}_geoportal-client.po +6 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/css/desktop.css +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/css/iframe_api.css +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/css/mobile.css +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/images/banner_left.png +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/images/banner_right.png +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/images/blank.png +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/images/markers/marker-blue.png +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/images/markers/marker-gold.png +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/images/markers/marker-green.png +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/images/markers/marker.png +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/geoportal/{{cookiecutter.package}}_geoportal/static/robot.txt.tmpl +3 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/data/Readme.txt +69 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/data/TM_EUROPE_BORDERS-0.3.sql +70 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/demo.map.tmpl +224 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/Arial.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/Arialbd.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/Arialbi.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/Ariali.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/NotoSans-Bold.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/NotoSans-BoldItalic.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/NotoSans-Italic.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/NotoSans-Regular.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/Verdana.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/Verdanab.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/Verdanai.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts/Verdanaz.ttf +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/fonts.conf +12 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/mapserver.conf +16 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/mapserver.map.tmpl +87 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/mapserver/tinyows.xml.tmpl +36 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/A3_Landscape.jrxml +207 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/A3_Portrait.jrxml +185 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/A4_Landscape.jrxml +200 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/A4_Portrait.jrxml +170 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/config.yaml.tmpl +175 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/legend.jrxml +109 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/localisation.properties +4 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/localisation_fr.properties +4 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/logo.png +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/north.svg +93 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/print/print-apps/{{cookiecutter.package}}/results.jrxml +25 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/project.yaml +18 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/pyproject.toml +7 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/qgisserver/pg_service.conf.tmpl +15 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/run_alembic.sh +11 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/scripts/db-backup +126 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/scripts/db-restore +132 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/setup.cfg +7 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/spell-ignore-words.txt +5 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/tests/__init__.py +0 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/tests/test_app.py +78 -0
- c2cgeoportal_geoportal/scaffolds/create/{{cookiecutter.project}}/tilegeneration/config.yaml.tmpl +195 -0
- c2cgeoportal_geoportal/scaffolds/update/cookiecutter.json +18 -0
- c2cgeoportal_geoportal/scaffolds/update/{{cookiecutter.project}}/.upgrade.yaml +67 -0
- c2cgeoportal_geoportal/scaffolds/update/{{cookiecutter.project}}/CONST_CHANGELOG.txt +304 -0
- c2cgeoportal_geoportal/scaffolds/update/{{cookiecutter.project}}/CONST_create_template/tests/test_testapp.py +48 -0
- c2cgeoportal_geoportal/scaffolds/update/{{cookiecutter.project}}/geoportal/.CONST_vars.yaml.swp +0 -0
- c2cgeoportal_geoportal/scaffolds/update/{{cookiecutter.project}}/geoportal/CONST_config-schema.yaml +927 -0
- c2cgeoportal_geoportal/scaffolds/update/{{cookiecutter.project}}/geoportal/CONST_vars.yaml +1503 -0
- c2cgeoportal_geoportal/scripts/__init__.py +64 -0
- c2cgeoportal_geoportal/scripts/c2cupgrade.py +879 -0
- c2cgeoportal_geoportal/scripts/create_demo_theme.py +83 -0
- c2cgeoportal_geoportal/scripts/manage_users.py +140 -0
- c2cgeoportal_geoportal/scripts/pcreate.py +296 -0
- c2cgeoportal_geoportal/scripts/theme2fts.py +347 -0
- c2cgeoportal_geoportal/scripts/urllogin.py +81 -0
- c2cgeoportal_geoportal/templates/login.html +90 -0
- c2cgeoportal_geoportal/templates/notlogin.html +62 -0
- c2cgeoportal_geoportal/templates/testi18n.html +12 -0
- c2cgeoportal_geoportal/views/__init__.py +59 -0
- c2cgeoportal_geoportal/views/dev.py +57 -0
- c2cgeoportal_geoportal/views/dynamic.py +209 -0
- c2cgeoportal_geoportal/views/entry.py +174 -0
- c2cgeoportal_geoportal/views/fulltextsearch.py +189 -0
- c2cgeoportal_geoportal/views/geometry_processing.py +75 -0
- c2cgeoportal_geoportal/views/i18n.py +129 -0
- c2cgeoportal_geoportal/views/layers.py +713 -0
- c2cgeoportal_geoportal/views/login.py +684 -0
- c2cgeoportal_geoportal/views/mapserverproxy.py +234 -0
- c2cgeoportal_geoportal/views/memory.py +90 -0
- c2cgeoportal_geoportal/views/ogcproxy.py +120 -0
- c2cgeoportal_geoportal/views/pdfreport.py +245 -0
- c2cgeoportal_geoportal/views/printproxy.py +143 -0
- c2cgeoportal_geoportal/views/profile.py +192 -0
- c2cgeoportal_geoportal/views/proxy.py +261 -0
- c2cgeoportal_geoportal/views/raster.py +233 -0
- c2cgeoportal_geoportal/views/resourceproxy.py +73 -0
- c2cgeoportal_geoportal/views/shortener.py +152 -0
- c2cgeoportal_geoportal/views/theme.py +1322 -0
- c2cgeoportal_geoportal/views/tinyowsproxy.py +189 -0
- c2cgeoportal_geoportal/views/vector_tiles.py +83 -0
- {c2cgeoportal_geoportal-2.3.5.80.dist-info → c2cgeoportal_geoportal-2.9rc45.dist-info}/METADATA +21 -24
- c2cgeoportal_geoportal-2.9rc45.dist-info/RECORD +193 -0
- {c2cgeoportal_geoportal-2.3.5.80.dist-info → c2cgeoportal_geoportal-2.9rc45.dist-info}/WHEEL +1 -1
- c2cgeoportal_geoportal-2.9rc45.dist-info/entry_points.txt +28 -0
- c2cgeoportal_geoportal-2.9rc45.dist-info/top_level.txt +2 -0
- tests/__init__.py +100 -0
- tests/test_cachebuster.py +71 -0
- tests/test_caching.py +275 -0
- tests/test_checker.py +85 -0
- tests/test_decimaljson.py +47 -0
- tests/test_headerstween.py +64 -0
- tests/test_i18n.py +31 -0
- tests/test_init.py +193 -0
- tests/test_locale_negociator.py +69 -0
- tests/test_mapserverproxy_route_predicate.py +64 -0
- tests/test_raster.py +267 -0
- tests/test_wmstparsing.py +238 -0
- tests/xmlstr.py +103 -0
- c2cgeoportal_geoportal-2.3.5.80.dist-info/DESCRIPTION.rst +0 -8
- c2cgeoportal_geoportal-2.3.5.80.dist-info/RECORD +0 -7
- c2cgeoportal_geoportal-2.3.5.80.dist-info/entry_points.txt +0 -22
- c2cgeoportal_geoportal-2.3.5.80.dist-info/metadata.json +0 -1
- c2cgeoportal_geoportal-2.3.5.80.dist-info/top_level.txt +0 -1
@@ -0,0 +1,304 @@
|
|
1
|
+
# Copyright (c) 2024, Camptocamp SA
|
2
|
+
# All rights reserved.
|
3
|
+
|
4
|
+
# Redistribution and use in source and binary forms, with or without
|
5
|
+
# modification, are permitted provided that the following conditions are met:
|
6
|
+
|
7
|
+
# 1. Redistributions of source code must retain the above copyright notice, this
|
8
|
+
# list of conditions and the following disclaimer.
|
9
|
+
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
10
|
+
# this list of conditions and the following disclaimer in the documentation
|
11
|
+
# and/or other materials provided with the distribution.
|
12
|
+
|
13
|
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
14
|
+
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
15
|
+
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
16
|
+
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
17
|
+
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
18
|
+
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
19
|
+
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
20
|
+
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
21
|
+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
22
|
+
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
23
|
+
|
24
|
+
# The views and conclusions contained in the software and documentation are those
|
25
|
+
# of the authors and should not be interpreted as representing official policies,
|
26
|
+
# either expressed or implied, of the FreeBSD Project.
|
27
|
+
|
28
|
+
import datetime
|
29
|
+
import json
|
30
|
+
import logging
|
31
|
+
from typing import TYPE_CHECKING, Any, NamedTuple, Optional, TypedDict, Union
|
32
|
+
|
33
|
+
import pyramid.request
|
34
|
+
import pyramid.response
|
35
|
+
import simple_openid_connect.client
|
36
|
+
import simple_openid_connect.data
|
37
|
+
from pyramid.httpexceptions import HTTPBadRequest, HTTPInternalServerError, HTTPUnauthorized
|
38
|
+
from pyramid.security import remember
|
39
|
+
|
40
|
+
from c2cgeoportal_geoportal.lib.caching import get_region
|
41
|
+
|
42
|
+
if TYPE_CHECKING:
|
43
|
+
from c2cgeoportal_commons.models import main, static
|
44
|
+
|
45
|
+
_LOG = logging.getLogger(__name__)
|
46
|
+
_CACHE_REGION_OBJ = get_region("obj")
|
47
|
+
|
48
|
+
|
49
|
+
# User create on demand
|
50
|
+
class DynamicUser(NamedTuple):
|
51
|
+
"""
|
52
|
+
User created dynamically.
|
53
|
+
"""
|
54
|
+
|
55
|
+
id: int
|
56
|
+
username: str
|
57
|
+
display_name: str
|
58
|
+
email: str
|
59
|
+
settings_role: Optional["main.Role"]
|
60
|
+
roles: list["main.Role"]
|
61
|
+
|
62
|
+
|
63
|
+
@_CACHE_REGION_OBJ.cache_on_arguments()
|
64
|
+
def get_oidc_client(request: pyramid.request.Request, host: str) -> simple_openid_connect.client.OpenidClient:
|
65
|
+
"""
|
66
|
+
Get the OpenID Connect client from the request settings.
|
67
|
+
"""
|
68
|
+
|
69
|
+
del host # used for cache key
|
70
|
+
|
71
|
+
authentication_settings = request.registry.settings.get("authentication", {})
|
72
|
+
openid_connect = authentication_settings.get("openid_connect", {})
|
73
|
+
if openid_connect.get("enabled", False) is not True:
|
74
|
+
raise HTTPBadRequest("OpenID Connect not enabled")
|
75
|
+
|
76
|
+
return simple_openid_connect.client.OpenidClient.from_issuer_url(
|
77
|
+
url=openid_connect["url"],
|
78
|
+
authentication_redirect_uri=request.route_url("oidc_callback"),
|
79
|
+
client_id=openid_connect["client_id"],
|
80
|
+
client_secret=openid_connect.get("client-secret"),
|
81
|
+
scope=" ".join(openid_connect.get("scopes", ["openid", "profile", "email"])),
|
82
|
+
)
|
83
|
+
|
84
|
+
|
85
|
+
class OidcRememberObject(TypedDict):
|
86
|
+
"""
|
87
|
+
The JSON object that is stored in a cookie to remember the user.
|
88
|
+
"""
|
89
|
+
|
90
|
+
access_token: str
|
91
|
+
access_token_expires: str
|
92
|
+
refresh_token: str | None
|
93
|
+
refresh_token_expires: str | None
|
94
|
+
username: str | None
|
95
|
+
display_name: str | None
|
96
|
+
email: str | None
|
97
|
+
settings_role: str | None
|
98
|
+
roles: list[str]
|
99
|
+
|
100
|
+
|
101
|
+
def get_remember_from_user_info(
|
102
|
+
request: pyramid.request.Request, user_info: dict[str, Any], remember_object: OidcRememberObject
|
103
|
+
) -> None:
|
104
|
+
"""
|
105
|
+
Fill the remember object from the user info.
|
106
|
+
|
107
|
+
The remember object will be stored in a cookie to remember the user.
|
108
|
+
|
109
|
+
:param user_info: The user info from the ID token or from the user info view according to the `query_user_info` configuration.
|
110
|
+
:param remember_object: The object to fill, by default with the `username`, `email`, `settings_role` and `roles`,
|
111
|
+
the corresponding field from `user_info` can be configured in `user_info_fields`.
|
112
|
+
:param settings: The OpenID Connect configuration.
|
113
|
+
"""
|
114
|
+
settings_fields = (
|
115
|
+
request.registry.settings.get("authentication", {})
|
116
|
+
.get("openid_connect", {})
|
117
|
+
.get("user_info_fields", {})
|
118
|
+
)
|
119
|
+
|
120
|
+
for field_, default_field in (
|
121
|
+
("username", "sub"),
|
122
|
+
("display_name", "name"),
|
123
|
+
("email", "email"),
|
124
|
+
("settings_role", None),
|
125
|
+
("roles", None),
|
126
|
+
):
|
127
|
+
user_info_field = settings_fields.get(field_, default_field)
|
128
|
+
if user_info_field is not None:
|
129
|
+
if user_info_field not in user_info:
|
130
|
+
_LOG.error(
|
131
|
+
"Field '%s' not found in user info, available: %s.",
|
132
|
+
user_info_field,
|
133
|
+
", ".join(user_info.keys()),
|
134
|
+
)
|
135
|
+
raise HTTPInternalServerError(f"Field '{user_info_field}' not found in user info.")
|
136
|
+
remember_object[field_] = user_info[user_info_field] # type: ignore[literal-required]
|
137
|
+
|
138
|
+
|
139
|
+
def get_user_from_remember(
|
140
|
+
request: pyramid.request.Request, remember_object: OidcRememberObject, update_create_user: bool = False
|
141
|
+
) -> Union["static.User", DynamicUser] | None:
|
142
|
+
"""
|
143
|
+
Create a user from the remember object filled from `get_remember_from_user_info`.
|
144
|
+
|
145
|
+
:param remember_object: The object to fill, by default with the `username`, `email`, `settings_role` and `roles`.
|
146
|
+
:param settings: The OpenID Connect configuration.
|
147
|
+
:param update_create_user: If the user should be updated or created if it does not exist.
|
148
|
+
"""
|
149
|
+
|
150
|
+
# Those imports are here to avoid initializing the models module before the database schema are
|
151
|
+
# correctly initialized.
|
152
|
+
from c2cgeoportal_commons import models # pylint: disable=import-outside-toplevel
|
153
|
+
from c2cgeoportal_commons.models import main, static # pylint: disable=import-outside-toplevel
|
154
|
+
|
155
|
+
assert models.DBSession is not None
|
156
|
+
|
157
|
+
user: static.User | DynamicUser | None
|
158
|
+
username = remember_object["username"]
|
159
|
+
assert username is not None
|
160
|
+
email = remember_object["email"]
|
161
|
+
assert email is not None
|
162
|
+
display_name = remember_object["display_name"] or email
|
163
|
+
|
164
|
+
openid_connect_configuration = request.registry.settings.get("authentication", {}).get(
|
165
|
+
"openid_connect", {}
|
166
|
+
)
|
167
|
+
provide_roles = openid_connect_configuration.get("provide_roles", False)
|
168
|
+
if provide_roles is False:
|
169
|
+
user_query = models.DBSession.query(static.User)
|
170
|
+
match_field = openid_connect_configuration.get("match_field", "username")
|
171
|
+
if match_field == "username":
|
172
|
+
user_query = user_query.filter_by(username=username)
|
173
|
+
elif match_field == "email":
|
174
|
+
user_query = user_query.filter_by(email=email)
|
175
|
+
else:
|
176
|
+
raise HTTPInternalServerError(
|
177
|
+
f"Unknown match_field: '{match_field}', allowed values are 'username' and 'email'"
|
178
|
+
)
|
179
|
+
user = user_query.one_or_none()
|
180
|
+
if update_create_user is True:
|
181
|
+
if user is not None:
|
182
|
+
for field in openid_connect_configuration.get("update_fields", []):
|
183
|
+
if field == "username":
|
184
|
+
user.username = username
|
185
|
+
elif field == "display_name":
|
186
|
+
user.display_name = display_name
|
187
|
+
elif field == "email":
|
188
|
+
user.email = email
|
189
|
+
else:
|
190
|
+
raise HTTPInternalServerError(
|
191
|
+
f"Unknown update_field: '{field}', allowed values are 'username', 'display_name' and 'email'"
|
192
|
+
)
|
193
|
+
elif openid_connect_configuration.get("create_user", False) is True:
|
194
|
+
user = static.User(username=username, email=email, display_name=display_name)
|
195
|
+
models.DBSession.add(user)
|
196
|
+
else:
|
197
|
+
user = DynamicUser(
|
198
|
+
id=-1,
|
199
|
+
username=username,
|
200
|
+
display_name=display_name,
|
201
|
+
email=email,
|
202
|
+
settings_role=(
|
203
|
+
models.DBSession.query(main.Role).filter_by(name=remember_object["settings_role"]).first()
|
204
|
+
if remember_object.get("settings_role") is not None
|
205
|
+
else None
|
206
|
+
),
|
207
|
+
roles=[
|
208
|
+
models.DBSession.query(main.Role).filter_by(name=role).one()
|
209
|
+
for role in remember_object.get("roles", [])
|
210
|
+
],
|
211
|
+
)
|
212
|
+
return user
|
213
|
+
|
214
|
+
|
215
|
+
class OidcRemember:
|
216
|
+
"""
|
217
|
+
Build the abject that we want to remember in the cookie.
|
218
|
+
"""
|
219
|
+
|
220
|
+
def __init__(self, request: pyramid.request.Request):
|
221
|
+
self.request = request
|
222
|
+
self.authentication_settings = request.registry.settings.get("authentication", {})
|
223
|
+
|
224
|
+
@_CACHE_REGION_OBJ.cache_on_arguments()
|
225
|
+
def remember(
|
226
|
+
self,
|
227
|
+
token_response: (
|
228
|
+
simple_openid_connect.data.TokenSuccessResponse | simple_openid_connect.data.TokenErrorResponse
|
229
|
+
),
|
230
|
+
host: str,
|
231
|
+
) -> OidcRememberObject:
|
232
|
+
"""
|
233
|
+
Remember the user in the cookie.
|
234
|
+
"""
|
235
|
+
|
236
|
+
del host # Used for cache key
|
237
|
+
|
238
|
+
if isinstance(token_response, simple_openid_connect.data.TokenErrorResponse):
|
239
|
+
_LOG.warning(
|
240
|
+
"OpenID connect connection error: %s [%s]",
|
241
|
+
token_response.error_description,
|
242
|
+
token_response.error_uri,
|
243
|
+
)
|
244
|
+
raise HTTPUnauthorized("See server logs for details")
|
245
|
+
|
246
|
+
if not isinstance(token_response, simple_openid_connect.data.TokenSuccessResponse):
|
247
|
+
_LOG.warning("OpenID connect connection error: %s", token_response)
|
248
|
+
raise HTTPUnauthorized("See server logs for details")
|
249
|
+
|
250
|
+
openid_connect = self.authentication_settings.get("openid_connect", {})
|
251
|
+
remember_object: OidcRememberObject = {
|
252
|
+
"access_token": token_response.access_token,
|
253
|
+
"access_token_expires": (
|
254
|
+
datetime.datetime.now() + datetime.timedelta(seconds=token_response.expires_in)
|
255
|
+
).isoformat(),
|
256
|
+
"refresh_token": token_response.refresh_token,
|
257
|
+
"refresh_token_expires": (
|
258
|
+
None
|
259
|
+
if token_response.refresh_expires_in is None
|
260
|
+
else (
|
261
|
+
datetime.datetime.now() + datetime.timedelta(seconds=token_response.refresh_expires_in)
|
262
|
+
).isoformat()
|
263
|
+
),
|
264
|
+
"username": None,
|
265
|
+
"display_name": None,
|
266
|
+
"email": None,
|
267
|
+
"settings_role": None,
|
268
|
+
"roles": [],
|
269
|
+
}
|
270
|
+
client = get_oidc_client(self.request, self.request.host)
|
271
|
+
|
272
|
+
if openid_connect.get("query_user_info", False) is True:
|
273
|
+
user_info = client.fetch_userinfo(token_response.access_token)
|
274
|
+
else:
|
275
|
+
un_validated_user_info = simple_openid_connect.data.IdToken.parse_jwt(
|
276
|
+
token_response.id_token, client.provider_keys
|
277
|
+
)
|
278
|
+
_LOG.info(
|
279
|
+
"Receive audiences: %s",
|
280
|
+
(
|
281
|
+
un_validated_user_info.aud
|
282
|
+
if isinstance(un_validated_user_info.aud, str)
|
283
|
+
else ", ".join(un_validated_user_info.aud)
|
284
|
+
),
|
285
|
+
)
|
286
|
+
user_info = client.decode_id_token(
|
287
|
+
token_response.id_token,
|
288
|
+
extra_trusted_audiences=openid_connect.get(
|
289
|
+
"trusted_audiences", [openid_connect.get("client_id")]
|
290
|
+
),
|
291
|
+
)
|
292
|
+
|
293
|
+
self.request.get_remember_from_user_info(user_info.dict(), remember_object)
|
294
|
+
self.request.response.headers.extend(remember(self.request, json.dumps(remember_object)))
|
295
|
+
|
296
|
+
return remember_object
|
297
|
+
|
298
|
+
|
299
|
+
def includeme(config: pyramid.config.Configurator) -> None:
|
300
|
+
"""
|
301
|
+
Pyramid includeme function.
|
302
|
+
"""
|
303
|
+
config.add_request_method(get_remember_from_user_info, name="get_remember_from_user_info")
|
304
|
+
config.add_request_method(get_user_from_remember, name="get_user_from_remember")
|
@@ -0,0 +1,353 @@
|
|
1
|
+
# Copyright (c) 2013-2024, Camptocamp SA
|
2
|
+
# All rights reserved.
|
3
|
+
|
4
|
+
# Redistribution and use in source and binary forms, with or without
|
5
|
+
# modification, are permitted provided that the following conditions are met:
|
6
|
+
|
7
|
+
# 1. Redistributions of source code must retain the above copyright notice, this
|
8
|
+
# list of conditions and the following disclaimer.
|
9
|
+
# 2. Redistributions in binary form must reproduce the above copyright notice,
|
10
|
+
# this list of conditions and the following disclaimer in the documentation
|
11
|
+
# and/or other materials provided with the distribution.
|
12
|
+
|
13
|
+
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS" AND
|
14
|
+
# ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
|
15
|
+
# WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
|
16
|
+
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT OWNER OR CONTRIBUTORS BE LIABLE FOR
|
17
|
+
# ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES
|
18
|
+
# (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES;
|
19
|
+
# LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND
|
20
|
+
# ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
|
21
|
+
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
|
22
|
+
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
|
23
|
+
|
24
|
+
# The views and conclusions contained in the software and documentation are those
|
25
|
+
# of the authors and should not be interpreted as representing official policies,
|
26
|
+
# either expressed or implied, of the FreeBSD Project.
|
27
|
+
|
28
|
+
|
29
|
+
import datetime
|
30
|
+
from typing import Any, Union
|
31
|
+
|
32
|
+
import isodate
|
33
|
+
|
34
|
+
TimeExtent = Union["TimeExtentValue", "TimeExtentInterval"]
|
35
|
+
|
36
|
+
|
37
|
+
def min_none(a: datetime.datetime | None, b: datetime.datetime | None) -> datetime.datetime | None:
|
38
|
+
"""Return the min value, support non in input."""
|
39
|
+
if a is None:
|
40
|
+
return b
|
41
|
+
if b is None:
|
42
|
+
return a
|
43
|
+
return min(a, b)
|
44
|
+
|
45
|
+
|
46
|
+
def max_none(a: datetime.datetime | None, b: datetime.datetime | None) -> datetime.datetime | None:
|
47
|
+
"""Return the max value, support non in input."""
|
48
|
+
if a is None:
|
49
|
+
return b
|
50
|
+
if b is None:
|
51
|
+
return a
|
52
|
+
return max(a, b)
|
53
|
+
|
54
|
+
|
55
|
+
class TimeInformation:
|
56
|
+
"""
|
57
|
+
Collect the WMS time information.
|
58
|
+
|
59
|
+
Arguments:
|
60
|
+
|
61
|
+
extent: A time extent instance (``TimeExtentValue`` or ``TimeExtentInterval``)
|
62
|
+
mode: The layer mode ("single", "range" or "disabled")
|
63
|
+
widget: The layer mode ("slider" (default) or "datepicker")
|
64
|
+
"""
|
65
|
+
|
66
|
+
def __init__(self) -> None:
|
67
|
+
self.extent: TimeExtent | None = None
|
68
|
+
self.mode: str | None = None
|
69
|
+
self.widget: str | None = None
|
70
|
+
self.layer: dict[str, Any] | None = None
|
71
|
+
|
72
|
+
def merge(self, layer: dict[str, Any], extent: TimeExtent, mode: str, widget: str) -> None:
|
73
|
+
layer_apply = self.layer == layer or (not self.has_time() and extent is not None)
|
74
|
+
|
75
|
+
self.merge_extent(extent)
|
76
|
+
self.merge_mode(mode)
|
77
|
+
self.merge_widget(widget)
|
78
|
+
|
79
|
+
if layer_apply:
|
80
|
+
layer["time"] = self.to_dict()
|
81
|
+
self.layer = layer
|
82
|
+
elif self.layer is not None:
|
83
|
+
del self.layer["time"]
|
84
|
+
self.layer = None
|
85
|
+
|
86
|
+
def merge_extent(self, extent: TimeExtent) -> None:
|
87
|
+
if self.extent is not None:
|
88
|
+
self.extent.merge(extent)
|
89
|
+
else:
|
90
|
+
self.extent = extent
|
91
|
+
|
92
|
+
def merge_mode(self, mode: str) -> None:
|
93
|
+
if mode != "disabled":
|
94
|
+
if self.mode is not None:
|
95
|
+
if self.mode != mode:
|
96
|
+
raise ValueError(f"Could not mix time mode '{mode!s}' and '{self.mode!s}'")
|
97
|
+
else:
|
98
|
+
self.mode = mode
|
99
|
+
|
100
|
+
def merge_widget(self, widget: str | None) -> None:
|
101
|
+
widget = "slider" if not widget else widget
|
102
|
+
assert widget is not None
|
103
|
+
|
104
|
+
if self.widget is not None:
|
105
|
+
if self.widget != widget:
|
106
|
+
raise ValueError(f"Could not mix time widget '{widget!s}' and '{self.widget!s}'")
|
107
|
+
else:
|
108
|
+
self.widget = widget
|
109
|
+
|
110
|
+
def has_time(self) -> bool:
|
111
|
+
return self.extent is not None
|
112
|
+
|
113
|
+
def to_dict(self) -> dict[str, Any] | None:
|
114
|
+
if self.has_time():
|
115
|
+
assert self.extent is not None
|
116
|
+
time = self.extent.to_dict()
|
117
|
+
time["mode"] = self.mode
|
118
|
+
time["widget"] = self.widget
|
119
|
+
return time
|
120
|
+
return None
|
121
|
+
|
122
|
+
|
123
|
+
class TimeExtentValue:
|
124
|
+
"""Represents time as a list of values."""
|
125
|
+
|
126
|
+
def __init__(
|
127
|
+
self,
|
128
|
+
values: set[datetime.datetime],
|
129
|
+
resolution: str,
|
130
|
+
min_def_value: datetime.datetime | None,
|
131
|
+
max_def_value: datetime.datetime | None,
|
132
|
+
):
|
133
|
+
"""
|
134
|
+
Initialize.
|
135
|
+
|
136
|
+
Arguments:
|
137
|
+
|
138
|
+
values: A set() of datetime
|
139
|
+
resolution: The resolution from the mapfile time definition
|
140
|
+
min_def_value: the minimum default value as a datetime
|
141
|
+
max_def_value: the maximum default value as a datetime
|
142
|
+
"""
|
143
|
+
self.values = values
|
144
|
+
self.resolution = resolution
|
145
|
+
self.min_def_value = min_def_value
|
146
|
+
self.max_def_value = max_def_value
|
147
|
+
|
148
|
+
def merge(self, extent: TimeExtent) -> None:
|
149
|
+
if not isinstance(extent, TimeExtentValue):
|
150
|
+
raise ValueError("Could not mix time defined as a list of values with other type of definition")
|
151
|
+
self.values.update(extent.values)
|
152
|
+
self.min_def_value = min_none(self.min_def_value, extent.min_def_value)
|
153
|
+
self.max_def_value = max_none(self.max_def_value, extent.max_def_value)
|
154
|
+
|
155
|
+
def to_dict(self) -> dict[str, Any]:
|
156
|
+
values = sorted(self.values)
|
157
|
+
min_def_value = _format_date(self.min_def_value) if self.min_def_value else None
|
158
|
+
max_def_value = _format_date(self.max_def_value) if self.max_def_value else None
|
159
|
+
|
160
|
+
return {
|
161
|
+
"minValue": _format_date(values[0]),
|
162
|
+
"maxValue": _format_date(values[-1]),
|
163
|
+
"values": list(map(_format_date, values)),
|
164
|
+
"resolution": self.resolution,
|
165
|
+
"minDefValue": min_def_value,
|
166
|
+
"maxDefValue": max_def_value,
|
167
|
+
}
|
168
|
+
|
169
|
+
|
170
|
+
class TimeExtentInterval:
|
171
|
+
"""Represents time with the help of a start, an end and an interval."""
|
172
|
+
|
173
|
+
def __init__(
|
174
|
+
self,
|
175
|
+
start: datetime.datetime,
|
176
|
+
end: datetime.datetime,
|
177
|
+
interval: tuple[int, int, int, int],
|
178
|
+
resolution: str,
|
179
|
+
min_def_value: datetime.datetime | None,
|
180
|
+
max_def_value: datetime.datetime | None,
|
181
|
+
):
|
182
|
+
"""
|
183
|
+
Initialize.
|
184
|
+
|
185
|
+
Arguments:
|
186
|
+
|
187
|
+
start: The start value as a datetime
|
188
|
+
end: The end value as a datetime
|
189
|
+
interval: The interval as a tuple (years, months, days, seconds)
|
190
|
+
resolution: The resolution from the mapfile time definition
|
191
|
+
min_def_value: the minimum default value as a datetime
|
192
|
+
max_def_value: the maximum default value as a datetime
|
193
|
+
"""
|
194
|
+
self.start = start
|
195
|
+
self.end = end
|
196
|
+
self.interval = interval
|
197
|
+
self.resolution = resolution
|
198
|
+
self.min_def_value = min_def_value
|
199
|
+
self.max_def_value = max_def_value
|
200
|
+
|
201
|
+
def merge(self, extent: TimeExtent) -> None:
|
202
|
+
if not isinstance(extent, TimeExtentInterval):
|
203
|
+
raise ValueError("Could not merge time defined as with an interval with other type of definition")
|
204
|
+
if self.interval != extent.interval:
|
205
|
+
raise ValueError("Could not merge times defined with a different interval")
|
206
|
+
start = min_none(self.start, extent.start)
|
207
|
+
assert start is not None
|
208
|
+
self.start = start
|
209
|
+
end = max_none(self.end, extent.end)
|
210
|
+
assert end is not None
|
211
|
+
self.end = end
|
212
|
+
self.min_def_value = (
|
213
|
+
self.min_def_value
|
214
|
+
if extent.min_def_value is None
|
215
|
+
else (
|
216
|
+
extent.min_def_value
|
217
|
+
if self.min_def_value is None
|
218
|
+
else min_none(self.min_def_value, extent.min_def_value)
|
219
|
+
)
|
220
|
+
)
|
221
|
+
self.max_def_value = (
|
222
|
+
self.max_def_value
|
223
|
+
if extent.max_def_value is None
|
224
|
+
else (
|
225
|
+
extent.max_def_value
|
226
|
+
if self.max_def_value is None
|
227
|
+
else max_none(self.max_def_value, extent.max_def_value)
|
228
|
+
)
|
229
|
+
)
|
230
|
+
|
231
|
+
def to_dict(self) -> dict[str, Any]:
|
232
|
+
min_def_value = _format_date(self.min_def_value) if self.min_def_value is not None else None
|
233
|
+
max_def_value = _format_date(self.max_def_value) if self.max_def_value is not None else None
|
234
|
+
|
235
|
+
return {
|
236
|
+
"minValue": _format_date(self.start),
|
237
|
+
"maxValue": _format_date(self.end),
|
238
|
+
"interval": self.interval,
|
239
|
+
"resolution": self.resolution,
|
240
|
+
"minDefValue": min_def_value,
|
241
|
+
"maxDefValue": max_def_value,
|
242
|
+
}
|
243
|
+
|
244
|
+
|
245
|
+
def parse_extent(extent: list[str], default_values: str) -> TimeExtent:
|
246
|
+
"""
|
247
|
+
Parse a time extend from OWSLib to a `̀ TimeExtentValue`` or a ``TimeExtentInterval``.
|
248
|
+
|
249
|
+
Two formats are supported:
|
250
|
+
* ['start/end/interval']
|
251
|
+
* ['date1', 'date2', ..., 'date N']
|
252
|
+
|
253
|
+
default_values must be a slash separated String from OWSLib's a
|
254
|
+
defaulttimeposition
|
255
|
+
"""
|
256
|
+
if extent:
|
257
|
+
min_def_value, max_def_value = _parse_default_values(default_values)
|
258
|
+
if extent[0].count("/") > 0:
|
259
|
+
# case "start/end/interval"
|
260
|
+
if len(extent) > 1 or extent[0].count("/") != 2:
|
261
|
+
raise ValueError(f"Unsupported time definition '{extent!s}'")
|
262
|
+
s, e, i = extent[0].split("/")
|
263
|
+
start = _parse_date(s)
|
264
|
+
end = _parse_date(e)
|
265
|
+
interval = _parse_duration(i)
|
266
|
+
|
267
|
+
return TimeExtentInterval(start[1], end[1], interval, start[0], min_def_value, max_def_value)
|
268
|
+
# case "value1, value2, ..., valueN"
|
269
|
+
dates = [_parse_date(d) for d in extent]
|
270
|
+
resolution = dates[0][0]
|
271
|
+
values = {d[1] for d in dates}
|
272
|
+
|
273
|
+
return TimeExtentValue(values, resolution, min_def_value, max_def_value)
|
274
|
+
raise ValueError(f"Invalid time extent format '{extent}'")
|
275
|
+
|
276
|
+
|
277
|
+
def _parse_default_values(default_values: str) -> tuple[datetime.datetime, datetime.datetime | None]:
|
278
|
+
"""
|
279
|
+
Parse the 'default' value from OWSLib's defaulttimeposition and return a maximum of two dates.
|
280
|
+
|
281
|
+
default value must be a slash separated String. return None on the seconde value if it does not exist.
|
282
|
+
"""
|
283
|
+
if default_values is None:
|
284
|
+
return None, None
|
285
|
+
|
286
|
+
def_value = default_values.split("/")
|
287
|
+
|
288
|
+
_, min_def_value = _parse_date(def_value[0])
|
289
|
+
max_def_value = None
|
290
|
+
|
291
|
+
if len(def_value) > 1:
|
292
|
+
_, max_def_value = _parse_date(def_value[1])
|
293
|
+
|
294
|
+
return min_def_value, max_def_value
|
295
|
+
|
296
|
+
|
297
|
+
def _parse_date(date: str) -> tuple[str, datetime.datetime]:
|
298
|
+
"""
|
299
|
+
Parse a date string.
|
300
|
+
|
301
|
+
Return a tuple containing:
|
302
|
+
|
303
|
+
* the resolution: "year", "month", "day" or "second"
|
304
|
+
* the date as a datetime
|
305
|
+
|
306
|
+
The returned datetime always has a timezone (default is UTC)
|
307
|
+
"""
|
308
|
+
resolutions = {"year": "%Y", "month": "%Y-%m", "day": "%Y-%m-%d"}
|
309
|
+
|
310
|
+
for resolution, pattern in list(resolutions.items()):
|
311
|
+
try:
|
312
|
+
dt = datetime.datetime.strptime(date, pattern)
|
313
|
+
return resolution, dt.replace(tzinfo=isodate.UTC)
|
314
|
+
except Exception: # pylint: disable=broad-exception-caught # nosec
|
315
|
+
pass
|
316
|
+
|
317
|
+
try:
|
318
|
+
dt = isodate.parse_datetime(date)
|
319
|
+
if dt.tzinfo is None:
|
320
|
+
dt = dt.replace(tzinfo=isodate.UTC)
|
321
|
+
return "second", dt
|
322
|
+
except Exception as e:
|
323
|
+
raise ValueError(f"Invalid date format '{date}'") from e
|
324
|
+
|
325
|
+
|
326
|
+
def _format_date(date: datetime.datetime) -> str:
|
327
|
+
str_ = isodate.datetime_isoformat(date)
|
328
|
+
assert isinstance(str_, str)
|
329
|
+
if date.tzinfo is None:
|
330
|
+
str_ += "Z"
|
331
|
+
return str_
|
332
|
+
|
333
|
+
|
334
|
+
def _parse_duration(duration: str) -> tuple[int, int, int, int]:
|
335
|
+
"""
|
336
|
+
Parse an ISO 8601 duration (i.e. "P2DT5S").
|
337
|
+
|
338
|
+
Return a tuple containing:
|
339
|
+
|
340
|
+
* years
|
341
|
+
* months
|
342
|
+
* days
|
343
|
+
* seconds
|
344
|
+
"""
|
345
|
+
parsed_duration = isodate.parse_duration(duration)
|
346
|
+
|
347
|
+
# casting years and months to int as isodate might return a float
|
348
|
+
return (
|
349
|
+
int(parsed_duration.years) if hasattr(parsed_duration, "years") else 0,
|
350
|
+
int(parsed_duration.months) if hasattr(parsed_duration, "months") else 0,
|
351
|
+
parsed_duration.days,
|
352
|
+
parsed_duration.seconds,
|
353
|
+
)
|