xplan-tools 1.11.1__tar.gz → 1.12.0__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/PKG-INFO +3 -2
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/pyproject.toml +2 -2
- xplan_tools-1.12.0/xplan_tools/interface/db.py +398 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/interface/gml.py +1 -3
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/main.py +2 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/model/adapters/coretable.py +20 -14
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/model/adapters/gml.py +3 -5
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/model/base.py +17 -10
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/model/migrations/env.py +11 -3
- xplan_tools-1.12.0/xplan_tools/model/migrations/versions/3c3445a58565_base_schema.py +290 -0
- xplan_tools-1.12.0/xplan_tools/model/migrations/versions/f8b74c08ec07_add_refs_indexes_ensure_polygon_ccw.py +61 -0
- xplan_tools-1.12.0/xplan_tools/model/orm.py +361 -0
- xplan_tools-1.11.1/xplan_tools/interface/db.py +0 -459
- xplan_tools-1.11.1/xplan_tools/model/migrations/versions/3c3445a58565_base_schema.py +0 -25
- xplan_tools-1.11.1/xplan_tools/model/orm.py +0 -220
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/LICENSE.md +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/README.md +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/interface/__init__.py +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/interface/base.py +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/interface/jsonfg.py +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/interface/shape.py +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/model/__init__.py +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/model/adapters/__init__.py +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/model/adapters/jsonfg.py +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/model/appschema/__init__.py +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/model/appschema/definitions.py +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/model/appschema/inspire_base.py +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/model/appschema/inspire_base2.py +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/model/appschema/inspire_plu40.py +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/model/appschema/xplan41.py +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/model/appschema/xplan54.py +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/model/appschema/xplan60.py +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/model/appschema/xplan61.py +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/model/appschema/xtrasse20.py +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/model/migrations/script.py.mako +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/resources/styles.py +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/transform/__init__.py +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/transform/mappingtables/XPlanToINSPIRE-SupplementaryRegulation_2_6_2025-10-08.xlsx +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/transform/mappingtables/XPlanToINSPIRE-ZoningElement_2_6_2025-10-08.xlsx +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/transform/mappingtables/XPlanToINSPIREFeatures_2_6_2025-10-08.xlsx +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/transform/migrate_41_54.py +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/transform/migrate_54_60.py +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/transform/migrate_60_61.py +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/transform/migrate_6x_plu.py +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/transform/transformer.py +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/util/__init__.py +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/util/style.py +0 -0
- {xplan_tools-1.11.1 → xplan_tools-1.12.0}/xplan_tools/util/validate.py +0 -0
|
@@ -1,8 +1,9 @@
|
|
|
1
|
-
Metadata-Version: 2.
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
2
|
Name: xplan-tools
|
|
3
|
-
Version: 1.
|
|
3
|
+
Version: 1.12.0
|
|
4
4
|
Summary: Manage XPlanung data
|
|
5
5
|
License: EUPL-1.2-or-later
|
|
6
|
+
License-File: LICENSE.md
|
|
6
7
|
Author: Tobias Kraft
|
|
7
8
|
Author-email: tobias.kraft@gv.hamburg.de
|
|
8
9
|
Requires-Python: >=3.10,<3.14
|
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[project]
|
|
2
2
|
name = "xplan-tools"
|
|
3
|
-
version = "1.
|
|
3
|
+
version = "1.12.0"
|
|
4
4
|
description = "Manage XPlanung data"
|
|
5
5
|
authors = [
|
|
6
6
|
{ name = "Tobias Kraft", email = "tobias.kraft@gv.hamburg.de" },
|
|
@@ -84,7 +84,7 @@ markers = [
|
|
|
84
84
|
]
|
|
85
85
|
|
|
86
86
|
[tool.coverage.run]
|
|
87
|
-
omit = ["tests/*", "xplan_tools/interface/base.py"]
|
|
87
|
+
omit = ["tests/*", "xplan_tools/interface/base.py", "xplan_tools/model/migrations/*"]
|
|
88
88
|
|
|
89
89
|
[build-system]
|
|
90
90
|
requires = ["poetry-core"]
|
|
@@ -0,0 +1,398 @@
|
|
|
1
|
+
"""Module containing the class for extracting plans from and writing to databases."""
|
|
2
|
+
|
|
3
|
+
# import json
|
|
4
|
+
import logging
|
|
5
|
+
from pathlib import Path
|
|
6
|
+
from typing import Iterable
|
|
7
|
+
|
|
8
|
+
from alembic import command, config, script
|
|
9
|
+
from geoalchemy2 import load_spatialite_gpkg
|
|
10
|
+
from geoalchemy2.admin.dialects.sqlite import load_spatialite_driver
|
|
11
|
+
from sqlalchemy import (
|
|
12
|
+
Column,
|
|
13
|
+
Engine,
|
|
14
|
+
MetaData,
|
|
15
|
+
Table,
|
|
16
|
+
create_engine,
|
|
17
|
+
delete,
|
|
18
|
+
insert,
|
|
19
|
+
inspect,
|
|
20
|
+
select,
|
|
21
|
+
text,
|
|
22
|
+
)
|
|
23
|
+
from sqlalchemy.engine import URL, make_url
|
|
24
|
+
|
|
25
|
+
# from sqlalchemy.dialects.sqlite.base import SQLiteCompiler
|
|
26
|
+
from sqlalchemy.event import listen, listens_for, remove
|
|
27
|
+
|
|
28
|
+
# from sqlalchemy.ext.compiler import compiles
|
|
29
|
+
from sqlalchemy.orm import sessionmaker
|
|
30
|
+
|
|
31
|
+
# from sqlalchemy.sql.expression import BindParameter
|
|
32
|
+
from xplan_tools.model import model_factory
|
|
33
|
+
from xplan_tools.model.base import BaseCollection, BaseFeature
|
|
34
|
+
from xplan_tools.model.orm import Base, Feature, Geometry, Refs
|
|
35
|
+
from xplan_tools.util import check_schema_accessibility
|
|
36
|
+
|
|
37
|
+
# from xplan_tools.util import linearize_geom
|
|
38
|
+
from .base import BaseRepository
|
|
39
|
+
|
|
40
|
+
logger = logging.getLogger(__name__)
|
|
41
|
+
|
|
42
|
+
|
|
43
|
+
class DBRepository(BaseRepository):
|
|
44
|
+
"""Repository class for loading from and writing to databases."""
|
|
45
|
+
|
|
46
|
+
def __init__(
|
|
47
|
+
self,
|
|
48
|
+
datasource: str = "",
|
|
49
|
+
schema: str | None = None,
|
|
50
|
+
srid: int = 25832,
|
|
51
|
+
with_views: bool = False,
|
|
52
|
+
) -> None:
|
|
53
|
+
"""Initializes the DB Repository.
|
|
54
|
+
|
|
55
|
+
During initialization, a connection is established and the existence of required tables is tested.
|
|
56
|
+
If an alembic revision is found, automatic migration is executed for PostgreSQL DBs.
|
|
57
|
+
For other DBs, an Exception is raised if the revision does not correspond to the current model.
|
|
58
|
+
If no revision and tables are found, they are automatically created.
|
|
59
|
+
|
|
60
|
+
Args:
|
|
61
|
+
datasource: A connection string which will be transformed to a URL instance.
|
|
62
|
+
schema: Schema name for DB repository. If not specified, the default schema is used. Only for PostgreSQL.
|
|
63
|
+
srid: the EPSG code for spatial data
|
|
64
|
+
with_views: whether to create geometrytype-specific views (postgres only)
|
|
65
|
+
"""
|
|
66
|
+
self.datasource: URL = make_url(datasource)
|
|
67
|
+
self.content = None
|
|
68
|
+
self.schema = schema
|
|
69
|
+
self.dialect = self.datasource.get_dialect().name
|
|
70
|
+
self.Session = sessionmaker(bind=self._engine)
|
|
71
|
+
# self.session = self.Session()
|
|
72
|
+
self.srid = srid
|
|
73
|
+
self.with_views = with_views
|
|
74
|
+
|
|
75
|
+
self.alembic_cfg = config.Config()
|
|
76
|
+
self.alembic_cfg.set_main_option(
|
|
77
|
+
"script_location", "xplan_tools:model:migrations"
|
|
78
|
+
)
|
|
79
|
+
self.alembic_cfg.set_main_option("srid", str(srid))
|
|
80
|
+
if with_views:
|
|
81
|
+
self.alembic_cfg.set_main_option("with_views", "1")
|
|
82
|
+
self.alembic_cfg.set_main_option(
|
|
83
|
+
"sqlalchemy.url",
|
|
84
|
+
datasource.replace("gpkg:", "sqlite:").replace(
|
|
85
|
+
"postgresql:", "postgresql+psycopg:"
|
|
86
|
+
),
|
|
87
|
+
)
|
|
88
|
+
if self.schema and self.dialect == "postgresql":
|
|
89
|
+
check_schema_accessibility(self._engine, self.schema)
|
|
90
|
+
self.alembic_cfg.set_main_option("custom_schema", self.schema)
|
|
91
|
+
current_version = script.ScriptDirectory.from_config(
|
|
92
|
+
self.alembic_cfg
|
|
93
|
+
).get_heads()
|
|
94
|
+
# test for tables and revision
|
|
95
|
+
with self._engine.connect() as conn:
|
|
96
|
+
inspector = inspect(conn)
|
|
97
|
+
tables = inspector.get_table_names(schema=self.schema)
|
|
98
|
+
is_coretable = {"coretable", "refs"}.issubset(set(tables))
|
|
99
|
+
if "alembic_version" in tables:
|
|
100
|
+
alembic_table = Table(
|
|
101
|
+
"alembic_version",
|
|
102
|
+
MetaData(schema=self.schema),
|
|
103
|
+
Column("version_num"),
|
|
104
|
+
)
|
|
105
|
+
stmt = select(alembic_table.c.version_num)
|
|
106
|
+
db_version = conn.execute(stmt).scalars().all()
|
|
107
|
+
else:
|
|
108
|
+
db_version = []
|
|
109
|
+
is_current_version = set(db_version) == set(current_version)
|
|
110
|
+
if is_current_version:
|
|
111
|
+
logger.info("Database is at current revision")
|
|
112
|
+
return
|
|
113
|
+
# handle schema upgrade or table creation
|
|
114
|
+
if is_coretable and not db_version:
|
|
115
|
+
e = RuntimeError("Coretable with no revision found in database")
|
|
116
|
+
e.add_note(
|
|
117
|
+
"it is likely that the database was set up with an older version of this library which didn't use revisions yet"
|
|
118
|
+
)
|
|
119
|
+
e.add_note(
|
|
120
|
+
"please set up a new database or add a revision corresponding to the current model manually"
|
|
121
|
+
)
|
|
122
|
+
raise e
|
|
123
|
+
# if postgresql, run alembic and return
|
|
124
|
+
elif self.dialect == "postgresql":
|
|
125
|
+
logger.info(
|
|
126
|
+
"Running database migrations"
|
|
127
|
+
if db_version
|
|
128
|
+
else "Creating new database schema"
|
|
129
|
+
)
|
|
130
|
+
command.upgrade(self.alembic_cfg, "head")
|
|
131
|
+
return
|
|
132
|
+
elif db_version:
|
|
133
|
+
e = NotImplementedError(
|
|
134
|
+
f"Incompatible database revision and automatic migration not implemented for {self.dialect}"
|
|
135
|
+
)
|
|
136
|
+
e.add_note(
|
|
137
|
+
"please set up a new database with the current version of this library"
|
|
138
|
+
)
|
|
139
|
+
raise e
|
|
140
|
+
else:
|
|
141
|
+
# create tables if it's a fresh file-based DB and set it to current revision
|
|
142
|
+
logger.info("Creating new database schema")
|
|
143
|
+
self.create_tables(self.srid)
|
|
144
|
+
command.stamp(self.alembic_cfg, "head")
|
|
145
|
+
|
|
146
|
+
@property
|
|
147
|
+
def _engine(self) -> Engine:
|
|
148
|
+
url = (
|
|
149
|
+
self.datasource.set(drivername="postgresql+psycopg")
|
|
150
|
+
if self.dialect == "postgresql"
|
|
151
|
+
else self.datasource
|
|
152
|
+
)
|
|
153
|
+
connect_args: dict[str, str] = {}
|
|
154
|
+
if self.schema and self.dialect == "postgresql":
|
|
155
|
+
connect_args["options"] = f"-csearch_path={self.schema},public"
|
|
156
|
+
engine = create_engine(url, connect_args=connect_args)
|
|
157
|
+
if self.dialect == "geopackage":
|
|
158
|
+
listen(engine, "connect", load_spatialite_gpkg)
|
|
159
|
+
elif self.dialect == "sqlite":
|
|
160
|
+
listen(
|
|
161
|
+
engine,
|
|
162
|
+
"connect",
|
|
163
|
+
load_spatialite_driver,
|
|
164
|
+
)
|
|
165
|
+
return engine
|
|
166
|
+
|
|
167
|
+
def get_plan_by_id(self, id: str) -> BaseCollection:
|
|
168
|
+
logger.debug(f"retrieving plan with id {id}")
|
|
169
|
+
with self.Session() as session:
|
|
170
|
+
plan_feature = session.get(Feature, id)
|
|
171
|
+
if not plan_feature:
|
|
172
|
+
raise ValueError(f"no feature found with id {id}")
|
|
173
|
+
elif "Plan" not in plan_feature.featuretype:
|
|
174
|
+
raise ValueError(f"{plan_feature.featuretype} is not a plan object")
|
|
175
|
+
else:
|
|
176
|
+
plan_model = model_factory(
|
|
177
|
+
plan_feature.featuretype,
|
|
178
|
+
plan_feature.version,
|
|
179
|
+
plan_feature.appschema,
|
|
180
|
+
).model_validate(plan_feature)
|
|
181
|
+
collection = {id: plan_model}
|
|
182
|
+
srid = plan_model.get_geom_srid()
|
|
183
|
+
# iterate related features with depth=2: plan -> section -> features
|
|
184
|
+
for feature in plan_feature.related_features(session, depth=2):
|
|
185
|
+
collection[str(feature.id)] = model_factory(
|
|
186
|
+
feature.featuretype, feature.version, feature.appschema
|
|
187
|
+
).model_validate(feature)
|
|
188
|
+
return BaseCollection(
|
|
189
|
+
features=collection,
|
|
190
|
+
srid=srid,
|
|
191
|
+
version=plan_feature.version,
|
|
192
|
+
appschema=plan_feature.appschema,
|
|
193
|
+
)
|
|
194
|
+
|
|
195
|
+
def get(self, id: str) -> BaseFeature:
|
|
196
|
+
logger.debug(f"retrieving feature with id {id}")
|
|
197
|
+
with self.Session() as session:
|
|
198
|
+
feature = session.get(Feature, id)
|
|
199
|
+
if not feature:
|
|
200
|
+
raise ValueError(f"no feature found with id {id}")
|
|
201
|
+
else:
|
|
202
|
+
return model_factory(
|
|
203
|
+
feature.featuretype, feature.version, feature.appschema
|
|
204
|
+
).model_validate(feature)
|
|
205
|
+
|
|
206
|
+
def save(self, feature: BaseFeature) -> None:
|
|
207
|
+
logger.debug(f"saving feature with id {id}")
|
|
208
|
+
with self.Session() as session:
|
|
209
|
+
feature = feature.model_dump_coretable()
|
|
210
|
+
if session.get(Feature, feature.id):
|
|
211
|
+
raise ValueError(f"feature with id {feature.id} already exists")
|
|
212
|
+
session.merge(feature)
|
|
213
|
+
session.commit()
|
|
214
|
+
|
|
215
|
+
def delete_plan_by_id(self, id: str) -> BaseFeature:
|
|
216
|
+
logger.debug(f"deleting plan with id {id}")
|
|
217
|
+
with self.Session() as session:
|
|
218
|
+
plan_feature = session.get(Feature, id)
|
|
219
|
+
if not plan_feature:
|
|
220
|
+
raise ValueError(f"no feature found with id {id}")
|
|
221
|
+
elif "Plan" not in plan_feature.featuretype:
|
|
222
|
+
raise ValueError(f"{plan_feature.featuretype} is not a plan object")
|
|
223
|
+
else:
|
|
224
|
+
plan_model = model_factory(
|
|
225
|
+
plan_feature.featuretype,
|
|
226
|
+
plan_feature.version,
|
|
227
|
+
plan_feature.appschema,
|
|
228
|
+
).model_validate(plan_feature)
|
|
229
|
+
ids = [plan_feature.id]
|
|
230
|
+
ids += [
|
|
231
|
+
feature.id for feature in plan_feature.related_features(session)
|
|
232
|
+
]
|
|
233
|
+
stmt = delete(Feature).where(Feature.id.in_(ids))
|
|
234
|
+
session.execute(stmt)
|
|
235
|
+
session.commit()
|
|
236
|
+
return plan_model
|
|
237
|
+
|
|
238
|
+
def delete(self, id: str) -> BaseFeature:
|
|
239
|
+
logger.debug(f"deleting feature with id {id}")
|
|
240
|
+
with self.Session() as session:
|
|
241
|
+
feature = session.get(Feature, id)
|
|
242
|
+
if not feature:
|
|
243
|
+
raise ValueError(f"no feature found with id {id}")
|
|
244
|
+
else:
|
|
245
|
+
session.delete(feature)
|
|
246
|
+
session.commit()
|
|
247
|
+
return model_factory(
|
|
248
|
+
feature.featuretype, feature.version, feature.appschema
|
|
249
|
+
).model_validate(feature)
|
|
250
|
+
|
|
251
|
+
def save_all(
|
|
252
|
+
self, features: BaseCollection | Iterable[BaseFeature], **kwargs
|
|
253
|
+
) -> None:
|
|
254
|
+
logger.debug("saving collection")
|
|
255
|
+
with self.Session() as session:
|
|
256
|
+
feature_list = []
|
|
257
|
+
refs_list = []
|
|
258
|
+
for feature in (
|
|
259
|
+
features.get_features()
|
|
260
|
+
if isinstance(features, BaseCollection)
|
|
261
|
+
else features
|
|
262
|
+
):
|
|
263
|
+
feature, refs = feature.model_dump_coretable_bulk()
|
|
264
|
+
feature_list.append(feature)
|
|
265
|
+
refs_list.extend([ref for ref in refs if ref not in refs_list])
|
|
266
|
+
if feature_list:
|
|
267
|
+
session.execute(insert(Feature), feature_list)
|
|
268
|
+
if refs_list:
|
|
269
|
+
session.execute(insert(Refs), refs_list)
|
|
270
|
+
session.commit()
|
|
271
|
+
|
|
272
|
+
def update_all(
|
|
273
|
+
self, features: BaseCollection | Iterable[BaseFeature], **kwargs
|
|
274
|
+
) -> None:
|
|
275
|
+
logger.debug("updating collection")
|
|
276
|
+
with self.Session() as session:
|
|
277
|
+
for feature in (
|
|
278
|
+
features.get_features()
|
|
279
|
+
if isinstance(features, BaseCollection)
|
|
280
|
+
else features
|
|
281
|
+
):
|
|
282
|
+
feature = feature.model_dump_coretable()
|
|
283
|
+
session.merge(feature)
|
|
284
|
+
session.commit()
|
|
285
|
+
|
|
286
|
+
def update(self, id: str, feature: BaseFeature) -> BaseFeature:
|
|
287
|
+
logger.debug(f"updating feature with id {id}")
|
|
288
|
+
with self.Session() as session:
|
|
289
|
+
db_feature = session.get(Feature, id)
|
|
290
|
+
if db_feature:
|
|
291
|
+
session.merge(feature.model_dump_coretable())
|
|
292
|
+
session.commit()
|
|
293
|
+
return feature
|
|
294
|
+
else:
|
|
295
|
+
raise ValueError(f"no feature found with id {id}")
|
|
296
|
+
|
|
297
|
+
def patch(self, id: str, partial_update: dict) -> BaseFeature:
|
|
298
|
+
logger.debug(f"patching feature with id {id}: {partial_update}")
|
|
299
|
+
with self.Session() as session:
|
|
300
|
+
db_feature = session.get(Feature, id)
|
|
301
|
+
if db_feature:
|
|
302
|
+
feature_dict = (
|
|
303
|
+
model_factory(
|
|
304
|
+
db_feature.featuretype, db_feature.version, db_feature.appschema
|
|
305
|
+
)
|
|
306
|
+
.model_validate(db_feature)
|
|
307
|
+
.model_dump()
|
|
308
|
+
)
|
|
309
|
+
feature = model_factory(
|
|
310
|
+
db_feature.featuretype, db_feature.version, db_feature.appschema
|
|
311
|
+
).model_validate(feature_dict | partial_update)
|
|
312
|
+
session.merge(feature.model_dump_coretable())
|
|
313
|
+
session.commit()
|
|
314
|
+
return feature
|
|
315
|
+
else:
|
|
316
|
+
raise ValueError(f"no feature found with id {id}")
|
|
317
|
+
|
|
318
|
+
def create_tables(self, srid: int) -> None:
|
|
319
|
+
"""Creates coretable and related/spatial tables in the database.
|
|
320
|
+
|
|
321
|
+
Args:
|
|
322
|
+
srid: the EPSG code for spatial data
|
|
323
|
+
"""
|
|
324
|
+
|
|
325
|
+
@listens_for(Base.metadata, "before_create")
|
|
326
|
+
def pre_creation(_, conn, **kwargs):
|
|
327
|
+
if self.dialect == "sqlite":
|
|
328
|
+
conn.execute(text("SELECT InitSpatialMetaData('EMPTY')"))
|
|
329
|
+
conn.execute(text("SELECT InsertEpsgSrid(:srid)"), {"srid": srid})
|
|
330
|
+
|
|
331
|
+
@listens_for(Base.metadata, "after_create")
|
|
332
|
+
def post_creation(_, conn, **kwargs):
|
|
333
|
+
if self.dialect == "geopackage":
|
|
334
|
+
conn.execute(
|
|
335
|
+
text(
|
|
336
|
+
"""
|
|
337
|
+
INSERT INTO gpkg_extensions (table_name, extension_name, definition, scope)
|
|
338
|
+
VALUES
|
|
339
|
+
('gpkg_data_columns', 'gpkg_schema', 'http://www.geopackage.org/spec/#extension_schema', 'read-write'),
|
|
340
|
+
('gpkg_data_column_constraints', 'gpkg_schema', 'http://www.geopackage.org/spec/#extension_schema', 'read-write'),
|
|
341
|
+
('gpkgext_relations', 'related_tables', 'http://www.opengis.net/doc/IS/gpkg-rte/1.0', 'read-write'),
|
|
342
|
+
('refs', 'related_tables', 'http://www.opengis.net/doc/IS/gpkg-rte/1.0', 'read-write')
|
|
343
|
+
"""
|
|
344
|
+
)
|
|
345
|
+
)
|
|
346
|
+
conn.execute(
|
|
347
|
+
text(
|
|
348
|
+
"""
|
|
349
|
+
INSERT INTO gpkgext_relations (base_table_name, base_primary_column, related_table_name, related_primary_column, relation_name, mapping_table_name)
|
|
350
|
+
VALUES
|
|
351
|
+
('coretable', 'id', 'coretable', 'id', 'features', 'refs')
|
|
352
|
+
"""
|
|
353
|
+
)
|
|
354
|
+
)
|
|
355
|
+
conn.execute(
|
|
356
|
+
text(
|
|
357
|
+
"""
|
|
358
|
+
INSERT INTO gpkg_data_columns (table_name, column_name, mime_type)
|
|
359
|
+
VALUES
|
|
360
|
+
('coretable', 'properties', 'application/json')
|
|
361
|
+
"""
|
|
362
|
+
)
|
|
363
|
+
)
|
|
364
|
+
|
|
365
|
+
logger.debug(f"creating tables with srid {srid}")
|
|
366
|
+
tables = Base.metadata.sorted_tables
|
|
367
|
+
if not self.dialect == "geopackage":
|
|
368
|
+
tables.pop(1)
|
|
369
|
+
tables[0].append_column(
|
|
370
|
+
Column(
|
|
371
|
+
"geometry",
|
|
372
|
+
Geometry(
|
|
373
|
+
srid=srid,
|
|
374
|
+
spatial_index=True,
|
|
375
|
+
),
|
|
376
|
+
nullable=True,
|
|
377
|
+
),
|
|
378
|
+
replace_existing=True,
|
|
379
|
+
)
|
|
380
|
+
|
|
381
|
+
try:
|
|
382
|
+
Base.metadata.create_all(self._engine, tables)
|
|
383
|
+
remove(Base.metadata, "before_create", pre_creation)
|
|
384
|
+
remove(Base.metadata, "after_create", post_creation)
|
|
385
|
+
|
|
386
|
+
except Exception as e:
|
|
387
|
+
if self.dialect in ["sqlite", "geopackage"]:
|
|
388
|
+
file = self._engine.url.database
|
|
389
|
+
Path(file).unlink(missing_ok=True)
|
|
390
|
+
raise e
|
|
391
|
+
|
|
392
|
+
def delete_tables(self) -> None:
|
|
393
|
+
"""Deletes coretable and related/spatial tables from the database."""
|
|
394
|
+
logger.debug("deleting tables")
|
|
395
|
+
if self.dialect == "postgresql":
|
|
396
|
+
command.downgrade(self.alembic_cfg, "base")
|
|
397
|
+
else:
|
|
398
|
+
Base.metadata.drop_all(self._engine)
|
|
@@ -199,9 +199,7 @@ class GMLRepository(BaseRepository):
|
|
|
199
199
|
if self.appschema == "xtrasse"
|
|
200
200
|
else "{http://www.opengis.net/wfs/2.0}member",
|
|
201
201
|
).append(
|
|
202
|
-
feature.model_dump_gml(
|
|
203
|
-
self.appschema, feature_srs=kwargs.get("feature_srs", True)
|
|
204
|
-
)
|
|
202
|
+
feature.model_dump_gml(feature_srs=kwargs.get("feature_srs", True))
|
|
205
203
|
)
|
|
206
204
|
bbox = get_envelope(geoms)
|
|
207
205
|
attrib = (
|
|
@@ -22,6 +22,8 @@ __version__ = metadata.version("xplan_tools")
|
|
|
22
22
|
console = Console()
|
|
23
23
|
error_console = Console(stderr=True, style="bold red")
|
|
24
24
|
logger = logging.getLogger(__name__)
|
|
25
|
+
# don't propagate alembic logs when using CLI
|
|
26
|
+
logging.getLogger("alembic").propagate = False
|
|
25
27
|
|
|
26
28
|
app = typer.Typer(help=f"XPlan-Tools {__version__}")
|
|
27
29
|
db_app = typer.Typer()
|
|
@@ -8,7 +8,9 @@ from xplan_tools.model.orm import Feature, Refs
|
|
|
8
8
|
class CoretableAdapter:
|
|
9
9
|
"""Class to add ORM model - i.e. coretable - transformation methods to XPlan pydantic model via inheritance."""
|
|
10
10
|
|
|
11
|
-
def _to_coretable(
|
|
11
|
+
def _to_coretable(
|
|
12
|
+
self, bulk_mode: bool = False
|
|
13
|
+
) -> Feature | tuple[dict, list[dict]]:
|
|
12
14
|
"""Converts a BaseFeature to a Coretable Feature object."""
|
|
13
15
|
properties = self.model_dump(mode="json", exclude_none=True)
|
|
14
16
|
id = properties.pop("id")
|
|
@@ -95,19 +97,23 @@ class CoretableAdapter:
|
|
|
95
97
|
gener_att[f"wert_{item.get_name()}"] = gener_att.pop("wert")
|
|
96
98
|
gener_att["datatype"] = item.get_name()
|
|
97
99
|
properties["hatGenerAttribut"].append(gener_att)
|
|
98
|
-
feature =
|
|
99
|
-
id
|
|
100
|
-
featuretype
|
|
101
|
-
properties
|
|
102
|
-
geometry
|
|
103
|
-
appschema
|
|
104
|
-
version
|
|
105
|
-
|
|
106
|
-
if
|
|
107
|
-
feature
|
|
108
|
-
|
|
109
|
-
|
|
110
|
-
|
|
100
|
+
feature = {
|
|
101
|
+
"id": id,
|
|
102
|
+
"featuretype": self.get_name(),
|
|
103
|
+
"properties": properties,
|
|
104
|
+
"geometry": geometry,
|
|
105
|
+
"appschema": self.get_appschema(),
|
|
106
|
+
"version": self.get_version(),
|
|
107
|
+
}
|
|
108
|
+
if bulk_mode:
|
|
109
|
+
return feature, [*refs, *refs_inv]
|
|
110
|
+
else:
|
|
111
|
+
orm_feature = Feature(**feature)
|
|
112
|
+
if refs:
|
|
113
|
+
orm_feature.refs = [Refs(**ref) for ref in refs]
|
|
114
|
+
if refs_inv:
|
|
115
|
+
orm_feature.refs_inv = [Refs(**ref) for ref in refs_inv]
|
|
116
|
+
return orm_feature
|
|
111
117
|
|
|
112
118
|
@classmethod
|
|
113
119
|
def _from_coretable(cls, feature: Feature) -> dict:
|
|
@@ -1,7 +1,6 @@
|
|
|
1
1
|
"""Module containing the GMLAdapter for reading from and writing to gml."""
|
|
2
2
|
|
|
3
3
|
import logging
|
|
4
|
-
from typing import Literal
|
|
5
4
|
from uuid import uuid4
|
|
6
5
|
|
|
7
6
|
from lxml import etree
|
|
@@ -19,7 +18,6 @@ class GMLAdapter:
|
|
|
19
18
|
|
|
20
19
|
def _to_etree(
|
|
21
20
|
self,
|
|
22
|
-
appschema: Literal["xplan", "xtrasse", "plu"] = "xplan",
|
|
23
21
|
**kwargs,
|
|
24
22
|
) -> etree._Element:
|
|
25
23
|
"""Converts XPlan and INSPIRE PLU object to lxml etree Element."""
|
|
@@ -62,7 +60,7 @@ class GMLAdapter:
|
|
|
62
60
|
"FORMAT=GML32",
|
|
63
61
|
f"GMLID=GML_{uuid4()}",
|
|
64
62
|
"SRSNAME_FORMAT=OGC_URL"
|
|
65
|
-
if
|
|
63
|
+
if self.get_appschema() == "plu"
|
|
66
64
|
else "GML3_LONGSRS=NO",
|
|
67
65
|
"NAMESPACE_DECL=YES",
|
|
68
66
|
]
|
|
@@ -125,11 +123,11 @@ class GMLAdapter:
|
|
|
125
123
|
if isinstance(model_value, list):
|
|
126
124
|
value_item = model_value[index]
|
|
127
125
|
etree.SubElement(feature, gml_name).append(
|
|
128
|
-
value_item._to_etree(
|
|
126
|
+
value_item._to_etree()
|
|
129
127
|
)
|
|
130
128
|
else:
|
|
131
129
|
etree.SubElement(feature, gml_name).append(
|
|
132
|
-
model_value._to_etree(
|
|
130
|
+
model_value._to_etree()
|
|
133
131
|
)
|
|
134
132
|
|
|
135
133
|
ns = self.namespace_uri.replace("base/4.0", "base/3.3")
|
|
@@ -198,12 +198,14 @@ class BaseCollection(BaseModel):
|
|
|
198
198
|
{str(ref): self.features[str(ref)] for ref in refs}
|
|
199
199
|
)
|
|
200
200
|
yield (
|
|
201
|
-
|
|
202
|
-
|
|
203
|
-
|
|
204
|
-
|
|
205
|
-
|
|
206
|
-
|
|
201
|
+
(
|
|
202
|
+
plan.name,
|
|
203
|
+
BaseCollection(
|
|
204
|
+
features=collection,
|
|
205
|
+
srid=self.srid,
|
|
206
|
+
version=self.version,
|
|
207
|
+
appschema=self.appschema,
|
|
208
|
+
),
|
|
207
209
|
)
|
|
208
210
|
if with_name
|
|
209
211
|
else BaseCollection(
|
|
@@ -211,7 +213,7 @@ class BaseCollection(BaseModel):
|
|
|
211
213
|
srid=self.srid,
|
|
212
214
|
version=self.version,
|
|
213
215
|
appschema=self.appschema,
|
|
214
|
-
)
|
|
216
|
+
)
|
|
215
217
|
)
|
|
216
218
|
|
|
217
219
|
# def __iter__(self):
|
|
@@ -464,16 +466,21 @@ class BaseFeature(BaseModel, GMLAdapter, CoretableAdapter, JsonFGAdapter):
|
|
|
464
466
|
|
|
465
467
|
def model_dump_gml(
|
|
466
468
|
self,
|
|
467
|
-
appschema: Literal["xplan", "xtrasse", "plu"] = "xplan",
|
|
468
469
|
**kwargs,
|
|
469
470
|
) -> _Element:
|
|
470
471
|
"""Dumps the model data to a GML structure held in an etree.Element."""
|
|
471
|
-
return self._to_etree(
|
|
472
|
+
return self._to_etree(**kwargs)
|
|
472
473
|
|
|
473
|
-
def model_dump_coretable(
|
|
474
|
+
def model_dump_coretable(
|
|
475
|
+
self,
|
|
476
|
+
) -> Feature:
|
|
474
477
|
"""Dumps the model data to a coretable Feature object to store in a database."""
|
|
475
478
|
return self._to_coretable()
|
|
476
479
|
|
|
480
|
+
def model_dump_coretable_bulk(self) -> tuple[dict, list[dict]]:
|
|
481
|
+
"""Dumps the model data to feature and refs dicts to bulk insert in a database."""
|
|
482
|
+
return self._to_coretable(bulk_mode=True)
|
|
483
|
+
|
|
477
484
|
def model_dump_jsonfg(
|
|
478
485
|
self,
|
|
479
486
|
**kwargs,
|
|
@@ -22,9 +22,10 @@ def run_migrations_offline() -> None:
|
|
|
22
22
|
script output.
|
|
23
23
|
|
|
24
24
|
"""
|
|
25
|
-
url = config.get_main_option("sqlalchemy.url")
|
|
26
25
|
context.configure(
|
|
27
|
-
|
|
26
|
+
dialect_name=context.get_x_argument(as_dictionary=True).get(
|
|
27
|
+
"dialect", "postgresql"
|
|
28
|
+
),
|
|
28
29
|
target_metadata=target_metadata,
|
|
29
30
|
literal_binds=True,
|
|
30
31
|
dialect_opts={"paramstyle": "named"},
|
|
@@ -41,8 +42,15 @@ def run_migrations_online() -> None:
|
|
|
41
42
|
and associate a connection with the context.
|
|
42
43
|
|
|
43
44
|
"""
|
|
45
|
+
cfg_section = config.get_section(config.config_ini_section, {}) or {}
|
|
46
|
+
x_args = context.get_x_argument(as_dictionary=True)
|
|
47
|
+
if "sqlalchemy.url" in x_args:
|
|
48
|
+
cfg_section["sqlalchemy.url"] = x_args["sqlalchemy.url"]
|
|
49
|
+
elif "url" in x_args:
|
|
50
|
+
cfg_section["sqlalchemy.url"] = x_args["url"]
|
|
51
|
+
|
|
44
52
|
connectable = engine_from_config(
|
|
45
|
-
|
|
53
|
+
cfg_section,
|
|
46
54
|
prefix="sqlalchemy.",
|
|
47
55
|
poolclass=pool.NullPool,
|
|
48
56
|
)
|