pygiskit 0.1.0.dev0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- giskit/__init__.py +8 -0
- giskit/cli/__init__.py +0 -0
- giskit/cli/main.py +659 -0
- giskit/config/__init__.py +26 -0
- giskit/config/export/colors.yml +67 -0
- giskit/config/export/layer_mappings.yml +171 -0
- giskit/config/loader.py +459 -0
- giskit/config/quirks/formats.yml +18 -0
- giskit/config/quirks/providers.yml +8 -0
- giskit/config/quirks/services.yml +17 -0
- giskit/config/services/pdok-wcs.yml +50 -0
- giskit/config/services/pdok-wmts.yml +78 -0
- giskit/config/services/pdok.yml +540 -0
- giskit/config/services/test-wms.yml +17 -0
- giskit/config/services/testprovider.yml +26 -0
- giskit/core/__init__.py +0 -0
- giskit/core/geocoding.py +160 -0
- giskit/core/recipe.py +275 -0
- giskit/core/spatial.py +303 -0
- giskit/exporters/__init__.py +11 -0
- giskit/exporters/glb_exporter.py +229 -0
- giskit/exporters/ifc/__init__.py +8 -0
- giskit/exporters/ifc/exporter.py +297 -0
- giskit/exporters/ifc/geometry.py +243 -0
- giskit/exporters/ifc/layer_exporter.py +364 -0
- giskit/exporters/ifc/materials.py +192 -0
- giskit/exporters/ifc/schema_adapter.py +125 -0
- giskit/indexer/README.md +207 -0
- giskit/indexer/__init__.py +33 -0
- giskit/indexer/__main__.py +203 -0
- giskit/indexer/monitor.py +388 -0
- giskit/protocols/__init__.py +23 -0
- giskit/protocols/base.py +145 -0
- giskit/protocols/cityjson.py +260 -0
- giskit/protocols/ogc_features.py +301 -0
- giskit/protocols/quirks.py +328 -0
- giskit/protocols/quirks_monitor.py +232 -0
- giskit/protocols/wcs.py +357 -0
- giskit/protocols/wfs.py +167 -0
- giskit/protocols/wmts.py +373 -0
- giskit/providers/__init__.py +0 -0
- giskit/providers/base.py +219 -0
- giskit/providers/ogc_features.py +257 -0
- giskit/providers/pdok.py +719 -0
- giskit/providers/wcs.py +315 -0
- giskit/providers/wmts.py +287 -0
- pygiskit-0.1.0.dev0.dist-info/LICENSE +21 -0
- pygiskit-0.1.0.dev0.dist-info/METADATA +434 -0
- pygiskit-0.1.0.dev0.dist-info/RECORD +51 -0
- pygiskit-0.1.0.dev0.dist-info/WHEEL +4 -0
- pygiskit-0.1.0.dev0.dist-info/entry_points.txt +3 -0
giskit/__init__.py
ADDED
giskit/cli/__init__.py
ADDED
|
File without changes
|
giskit/cli/main.py
ADDED
|
@@ -0,0 +1,659 @@
|
|
|
1
|
+
"""GISKit CLI - Recipe-driven spatial data downloader.
|
|
2
|
+
|
|
3
|
+
Usage:
|
|
4
|
+
giskit run recipe.json
|
|
5
|
+
giskit validate recipe.json
|
|
6
|
+
giskit providers list
|
|
7
|
+
giskit providers info pdok
|
|
8
|
+
giskit quirks list
|
|
9
|
+
giskit quirks show pdok ogc-features
|
|
10
|
+
"""
|
|
11
|
+
|
|
12
|
+
import asyncio
|
|
13
|
+
import re
|
|
14
|
+
from pathlib import Path
|
|
15
|
+
from typing import Optional
|
|
16
|
+
|
|
17
|
+
import typer
|
|
18
|
+
from rich.console import Console
|
|
19
|
+
from rich.panel import Panel
|
|
20
|
+
from rich.table import Table
|
|
21
|
+
|
|
22
|
+
from giskit import __version__
|
|
23
|
+
from giskit.core.recipe import Recipe
|
|
24
|
+
from giskit.protocols.quirks import KNOWN_QUIRKS, get_quirks
|
|
25
|
+
from giskit.providers.base import get_provider, list_providers
|
|
26
|
+
|
|
27
|
+
app = typer.Typer(
|
|
28
|
+
name="giskit",
|
|
29
|
+
help="Recipe-driven spatial data downloader for any location, any provider, anywhere",
|
|
30
|
+
add_completion=False,
|
|
31
|
+
)
|
|
32
|
+
console = Console()
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def _normalize_layer_name(name: str) -> str:
|
|
36
|
+
"""Normalize layer name to snake_case.
|
|
37
|
+
|
|
38
|
+
Converts PascalCase/camelCase to snake_case for consistency.
|
|
39
|
+
Examples:
|
|
40
|
+
Perceel -> perceel
|
|
41
|
+
BuildingPart -> building_part
|
|
42
|
+
pand -> pand (already lowercase)
|
|
43
|
+
"""
|
|
44
|
+
# Insert underscore before uppercase letters (but not at start)
|
|
45
|
+
s1 = re.sub("(.)([A-Z][a-z]+)", r"\1_\2", name)
|
|
46
|
+
# Insert underscore before uppercase letters that follow lowercase
|
|
47
|
+
s2 = re.sub("([a-z0-9])([A-Z])", r"\1_\2", s1)
|
|
48
|
+
# Convert to lowercase
|
|
49
|
+
return s2.lower()
|
|
50
|
+
|
|
51
|
+
|
|
52
|
+
async def _execute_recipe(recipe: Recipe, console: Console, verbose: bool):
|
|
53
|
+
"""Execute a recipe asynchronously.
|
|
54
|
+
|
|
55
|
+
Args:
|
|
56
|
+
recipe: Recipe to execute
|
|
57
|
+
console: Rich console for output
|
|
58
|
+
verbose: Verbose logging
|
|
59
|
+
|
|
60
|
+
Returns:
|
|
61
|
+
Dictionary mapping layer names to GeoDataFrames
|
|
62
|
+
"""
|
|
63
|
+
import geopandas as gpd
|
|
64
|
+
|
|
65
|
+
# Import provider module to register providers
|
|
66
|
+
import giskit.providers.pdok # noqa: F401
|
|
67
|
+
|
|
68
|
+
# Get bbox from location
|
|
69
|
+
with console.status("[bold green]Calculating bounding box..."):
|
|
70
|
+
bbox = await recipe.get_bbox_wgs84()
|
|
71
|
+
|
|
72
|
+
if verbose:
|
|
73
|
+
console.print(f" BBox (WGS84): {bbox}")
|
|
74
|
+
|
|
75
|
+
# Download each dataset - store as separate layers
|
|
76
|
+
layers = {}
|
|
77
|
+
|
|
78
|
+
for i, dataset in enumerate(recipe.datasets, 1):
|
|
79
|
+
console.print(f"\n[bold]Dataset {i}/{len(recipe.datasets)}:[/bold] {dataset.provider}")
|
|
80
|
+
|
|
81
|
+
if dataset.service:
|
|
82
|
+
console.print(f" Service: {dataset.service}")
|
|
83
|
+
if dataset.layers:
|
|
84
|
+
console.print(f" Layers: {', '.join(dataset.layers)}")
|
|
85
|
+
|
|
86
|
+
try:
|
|
87
|
+
# Get provider
|
|
88
|
+
provider = get_provider(dataset.provider)
|
|
89
|
+
|
|
90
|
+
# Convert bbox to Location for compatibility
|
|
91
|
+
from giskit.core.recipe import Location, LocationType
|
|
92
|
+
|
|
93
|
+
bbox_location = Location(type=LocationType.BBOX, value=list(bbox), crs="EPSG:4326")
|
|
94
|
+
|
|
95
|
+
# Download dataset
|
|
96
|
+
with console.status(f"[bold green]Downloading from {dataset.provider}..."):
|
|
97
|
+
gdf = await provider.download_dataset(
|
|
98
|
+
dataset=dataset,
|
|
99
|
+
location=bbox_location,
|
|
100
|
+
output_path=recipe.output.path,
|
|
101
|
+
output_crs=recipe.output.crs,
|
|
102
|
+
)
|
|
103
|
+
|
|
104
|
+
if not gdf.empty:
|
|
105
|
+
console.print(f" [green]✓[/green] Downloaded {len(gdf)} features")
|
|
106
|
+
|
|
107
|
+
# Store with layer name: service_layer or just service
|
|
108
|
+
service = dataset.service or dataset.provider
|
|
109
|
+
|
|
110
|
+
# Check if gdf has collection/layer information (from multi-layer downloads)
|
|
111
|
+
if "_collection" in gdf.columns:
|
|
112
|
+
# Split by collection/layer
|
|
113
|
+
for collection_name in gdf["_collection"].unique():
|
|
114
|
+
layer_gdf = gdf[gdf["_collection"] == collection_name].copy()
|
|
115
|
+
# Normalize collection name to snake_case
|
|
116
|
+
normalized_name = _normalize_layer_name(collection_name)
|
|
117
|
+
full_layer_name = f"{service}_{normalized_name}"
|
|
118
|
+
layers[full_layer_name] = layer_gdf
|
|
119
|
+
elif "_layer" in gdf.columns:
|
|
120
|
+
# Alternative layer column name
|
|
121
|
+
for layer_name in gdf["_layer"].unique():
|
|
122
|
+
layer_gdf = gdf[gdf["_layer"] == layer_name].copy()
|
|
123
|
+
# Normalize layer name to snake_case
|
|
124
|
+
normalized_name = _normalize_layer_name(layer_name)
|
|
125
|
+
full_layer_name = f"{service}_{normalized_name}"
|
|
126
|
+
layers[full_layer_name] = layer_gdf
|
|
127
|
+
else:
|
|
128
|
+
# Single layer - use service name or first layer from request
|
|
129
|
+
if dataset.layers and len(dataset.layers) == 1:
|
|
130
|
+
layer_name = f"{service}_{dataset.layers[0]}"
|
|
131
|
+
else:
|
|
132
|
+
layer_name = service
|
|
133
|
+
layers[layer_name] = gdf
|
|
134
|
+
else:
|
|
135
|
+
console.print(" [yellow]No features found[/yellow]")
|
|
136
|
+
|
|
137
|
+
except Exception as e:
|
|
138
|
+
console.print(f" [red]✗[/red] Failed: {e}")
|
|
139
|
+
if verbose:
|
|
140
|
+
console.print_exception()
|
|
141
|
+
# Continue with other datasets
|
|
142
|
+
|
|
143
|
+
# Add metadata layer if we have a bbox
|
|
144
|
+
if layers and recipe.output.format.value == "gpkg":
|
|
145
|
+
from datetime import datetime
|
|
146
|
+
|
|
147
|
+
from shapely.geometry import Point
|
|
148
|
+
|
|
149
|
+
from giskit.core.spatial import transform_bbox
|
|
150
|
+
|
|
151
|
+
# Transform bbox to output CRS
|
|
152
|
+
bbox_output_crs = transform_bbox(bbox, "EPSG:4326", recipe.output.crs)
|
|
153
|
+
|
|
154
|
+
# Get center point
|
|
155
|
+
center_x = (bbox_output_crs[0] + bbox_output_crs[2]) / 2
|
|
156
|
+
center_y = (bbox_output_crs[1] + bbox_output_crs[3]) / 2
|
|
157
|
+
|
|
158
|
+
# Build metadata dict - exact column order matching Sitedb
|
|
159
|
+
metadata_dict = {
|
|
160
|
+
"address": [None],
|
|
161
|
+
"x": [center_x],
|
|
162
|
+
"y": [center_y],
|
|
163
|
+
"radius": [None],
|
|
164
|
+
"bbox_minx": [bbox_output_crs[0]],
|
|
165
|
+
"bbox_miny": [bbox_output_crs[1]],
|
|
166
|
+
"bbox_maxx": [bbox_output_crs[2]],
|
|
167
|
+
"bbox_maxy": [bbox_output_crs[3]],
|
|
168
|
+
"download_date": [datetime.now().isoformat()],
|
|
169
|
+
"crs": [recipe.output.crs],
|
|
170
|
+
"grid_size": [None], # For raster data, optional
|
|
171
|
+
"bgt_layers": [None], # Which BGT layers were requested
|
|
172
|
+
"bag3d_lods": [None], # Which BAG3D LOD levels were requested
|
|
173
|
+
}
|
|
174
|
+
|
|
175
|
+
# Add location-specific fields
|
|
176
|
+
if recipe.location.type.value == "address":
|
|
177
|
+
metadata_dict["address"] = [recipe.location.value]
|
|
178
|
+
if recipe.location.radius is not None:
|
|
179
|
+
metadata_dict["radius"] = [recipe.location.radius]
|
|
180
|
+
elif recipe.location.type.value == "point":
|
|
181
|
+
if recipe.location.radius is not None:
|
|
182
|
+
metadata_dict["radius"] = [recipe.location.radius]
|
|
183
|
+
|
|
184
|
+
# Extract dataset-specific metadata for traceability
|
|
185
|
+
bgt_layers_list = []
|
|
186
|
+
bag3d_lods_list = []
|
|
187
|
+
|
|
188
|
+
for dataset in recipe.datasets:
|
|
189
|
+
service = dataset.service or dataset.provider
|
|
190
|
+
|
|
191
|
+
# Track BGT layers
|
|
192
|
+
if service == "bgt" and dataset.layers:
|
|
193
|
+
bgt_layers_list.extend(dataset.layers)
|
|
194
|
+
|
|
195
|
+
# Track BAG3D LOD levels
|
|
196
|
+
elif service == "bag3d" and dataset.layers:
|
|
197
|
+
# Extract LOD levels (lod12 -> 1.2, lod13 -> 1.3, lod22 -> 2.2)
|
|
198
|
+
for layer in dataset.layers:
|
|
199
|
+
if layer.startswith("lod"):
|
|
200
|
+
# Convert lod12 -> 1.2
|
|
201
|
+
lod_num = layer[3:] # "12", "13", "22"
|
|
202
|
+
if len(lod_num) == 2:
|
|
203
|
+
lod_formatted = f"{lod_num[0]}.{lod_num[1]}"
|
|
204
|
+
bag3d_lods_list.append(lod_formatted)
|
|
205
|
+
|
|
206
|
+
# Track grid_size if resolution is specified (for raster data)
|
|
207
|
+
if dataset.resolution is not None:
|
|
208
|
+
metadata_dict["grid_size"] = [dataset.resolution]
|
|
209
|
+
|
|
210
|
+
# Store BGT layers (or "all" if many layers)
|
|
211
|
+
if bgt_layers_list:
|
|
212
|
+
# Sitedb uses "all" if all BGT layers are included
|
|
213
|
+
if len(bgt_layers_list) > 40: # Heuristic: if many layers, assume "all"
|
|
214
|
+
metadata_dict["bgt_layers"] = ["all"]
|
|
215
|
+
else:
|
|
216
|
+
metadata_dict["bgt_layers"] = [",".join(sorted(bgt_layers_list))]
|
|
217
|
+
|
|
218
|
+
# Store BAG3D LOD levels
|
|
219
|
+
if bag3d_lods_list:
|
|
220
|
+
metadata_dict["bag3d_lods"] = [",".join(sorted(bag3d_lods_list))]
|
|
221
|
+
|
|
222
|
+
# Create metadata GeoDataFrame
|
|
223
|
+
metadata_gdf = gpd.GeoDataFrame(
|
|
224
|
+
metadata_dict, geometry=[Point(center_x, center_y)], crs=recipe.output.crs
|
|
225
|
+
)
|
|
226
|
+
|
|
227
|
+
layers["_metadata"] = metadata_gdf
|
|
228
|
+
|
|
229
|
+
return layers if layers else None
|
|
230
|
+
|
|
231
|
+
|
|
232
|
+
@app.callback()
|
|
233
|
+
def main() -> None:
|
|
234
|
+
"""GISKit - Recipe-driven spatial data downloader."""
|
|
235
|
+
pass
|
|
236
|
+
|
|
237
|
+
|
|
238
|
+
@app.command()
|
|
239
|
+
def version() -> None:
|
|
240
|
+
"""Show GISKit version."""
|
|
241
|
+
console.print(f"GISKit version {__version__}")
|
|
242
|
+
|
|
243
|
+
|
|
244
|
+
@app.command()
|
|
245
|
+
def run(
|
|
246
|
+
recipe_path: Path = typer.Argument(
|
|
247
|
+
..., help="Path to recipe JSON file", exists=True, dir_okay=False
|
|
248
|
+
),
|
|
249
|
+
verbose: bool = typer.Option(False, "--verbose", "-v", help="Verbose output"),
|
|
250
|
+
dry_run: bool = typer.Option(False, "--dry-run", help="Validate without downloading"),
|
|
251
|
+
) -> None:
|
|
252
|
+
"""Run a recipe to download spatial data.
|
|
253
|
+
|
|
254
|
+
Examples:
|
|
255
|
+
giskit run amsterdam.json
|
|
256
|
+
giskit run --dry-run test.json
|
|
257
|
+
giskit run --verbose utrecht.json
|
|
258
|
+
"""
|
|
259
|
+
try:
|
|
260
|
+
# Load recipe
|
|
261
|
+
with console.status("[bold green]Loading recipe..."):
|
|
262
|
+
recipe = Recipe.from_file(recipe_path)
|
|
263
|
+
|
|
264
|
+
console.print(f"[bold green]✓[/bold green] Loaded recipe: {recipe.name or 'Unnamed'}")
|
|
265
|
+
|
|
266
|
+
if recipe.description:
|
|
267
|
+
console.print(f" {recipe.description}")
|
|
268
|
+
|
|
269
|
+
# Display recipe summary
|
|
270
|
+
console.print(f"\n[bold]Location:[/bold] {recipe.location.type.value}")
|
|
271
|
+
if recipe.location.type.value == "address":
|
|
272
|
+
console.print(f" Address: {recipe.location.value}")
|
|
273
|
+
console.print(f" Radius: {recipe.location.radius}m")
|
|
274
|
+
elif recipe.location.type.value == "bbox":
|
|
275
|
+
console.print(f" BBox: {recipe.location.value}")
|
|
276
|
+
|
|
277
|
+
console.print(f"\n[bold]Datasets:[/bold] {len(recipe.datasets)} datasets")
|
|
278
|
+
for i, ds in enumerate(recipe.datasets, 1):
|
|
279
|
+
console.print(f" {i}. {ds.provider}", end="")
|
|
280
|
+
if ds.service:
|
|
281
|
+
console.print(f" → {ds.service}", end="")
|
|
282
|
+
if ds.layers:
|
|
283
|
+
console.print(f" → {', '.join(ds.layers)}", end="")
|
|
284
|
+
console.print()
|
|
285
|
+
|
|
286
|
+
console.print(f"\n[bold]Output:[/bold] {recipe.output.path}")
|
|
287
|
+
console.print(f" Format: {recipe.output.format.value}")
|
|
288
|
+
console.print(f" CRS: {recipe.output.crs}")
|
|
289
|
+
|
|
290
|
+
# Convert dry_run to bool if it's a string (Typer bug workaround)
|
|
291
|
+
if isinstance(dry_run, str):
|
|
292
|
+
dry_run = dry_run.lower() in ("true", "1", "yes")
|
|
293
|
+
|
|
294
|
+
if dry_run:
|
|
295
|
+
console.print("\n[yellow]Dry run - no data downloaded[/yellow]")
|
|
296
|
+
return
|
|
297
|
+
|
|
298
|
+
# Execute recipe
|
|
299
|
+
console.print("\n[bold]Executing recipe...[/bold]")
|
|
300
|
+
|
|
301
|
+
try:
|
|
302
|
+
# Run async download - returns dict of layer_name -> GeoDataFrame
|
|
303
|
+
layers = asyncio.run(_execute_recipe(recipe, console, verbose))
|
|
304
|
+
|
|
305
|
+
# Save to output file
|
|
306
|
+
if layers is not None and len(layers) > 0:
|
|
307
|
+
output_path = recipe.output.path
|
|
308
|
+
output_format = recipe.output.format.value
|
|
309
|
+
|
|
310
|
+
with console.status(f"[bold green]Saving to {output_path}..."):
|
|
311
|
+
if output_format == "gpkg":
|
|
312
|
+
# Save each layer separately in GeoPackage
|
|
313
|
+
total_features = 0
|
|
314
|
+
for layer_name, gdf in layers.items():
|
|
315
|
+
# Remove internal columns before saving
|
|
316
|
+
save_gdf = gdf.copy()
|
|
317
|
+
for col in ["_provider", "_service", "_layer", "_collection"]:
|
|
318
|
+
if col in save_gdf.columns:
|
|
319
|
+
save_gdf = save_gdf.drop(columns=[col])
|
|
320
|
+
|
|
321
|
+
save_gdf.to_file(output_path, driver="GPKG", layer=layer_name)
|
|
322
|
+
total_features += len(save_gdf)
|
|
323
|
+
|
|
324
|
+
console.print(
|
|
325
|
+
f"\n[bold green]✓[/bold green] Successfully saved {total_features} features in {len(layers)} layers to {output_path}"
|
|
326
|
+
)
|
|
327
|
+
elif output_format == "geojson":
|
|
328
|
+
# GeoJSON doesn't support layers - combine all
|
|
329
|
+
import geopandas as gpd
|
|
330
|
+
|
|
331
|
+
combined = gpd.GeoDataFrame(
|
|
332
|
+
gpd.pd.concat(layers.values(), ignore_index=True)
|
|
333
|
+
)
|
|
334
|
+
combined.to_file(output_path, driver="GeoJSON")
|
|
335
|
+
console.print(
|
|
336
|
+
f"\n[bold green]✓[/bold green] Successfully saved {len(combined)} features to {output_path}"
|
|
337
|
+
)
|
|
338
|
+
elif output_format == "shp":
|
|
339
|
+
# Shapefile doesn't support layers - combine all
|
|
340
|
+
import geopandas as gpd
|
|
341
|
+
|
|
342
|
+
combined = gpd.GeoDataFrame(
|
|
343
|
+
gpd.pd.concat(layers.values(), ignore_index=True)
|
|
344
|
+
)
|
|
345
|
+
combined.to_file(output_path, driver="ESRI Shapefile")
|
|
346
|
+
console.print(
|
|
347
|
+
f"\n[bold green]✓[/bold green] Successfully saved {len(combined)} features to {output_path}"
|
|
348
|
+
)
|
|
349
|
+
elif output_format == "fgb":
|
|
350
|
+
# FlatGeobuf doesn't support layers - combine all
|
|
351
|
+
import geopandas as gpd
|
|
352
|
+
|
|
353
|
+
combined = gpd.GeoDataFrame(
|
|
354
|
+
gpd.pd.concat(layers.values(), ignore_index=True)
|
|
355
|
+
)
|
|
356
|
+
combined.to_file(output_path, driver="FlatGeobuf")
|
|
357
|
+
console.print(
|
|
358
|
+
f"\n[bold green]✓[/bold green] Successfully saved {len(combined)} features to {output_path}"
|
|
359
|
+
)
|
|
360
|
+
else:
|
|
361
|
+
console.print("\n[yellow]No features downloaded[/yellow]")
|
|
362
|
+
|
|
363
|
+
except Exception as download_error:
|
|
364
|
+
console.print(f"\n[bold red]Download failed:[/bold red] {download_error}")
|
|
365
|
+
if verbose:
|
|
366
|
+
console.print_exception()
|
|
367
|
+
raise typer.Exit(1)
|
|
368
|
+
|
|
369
|
+
except Exception as e:
|
|
370
|
+
console.print(f"[bold red]Error:[/bold red] {e}")
|
|
371
|
+
if verbose:
|
|
372
|
+
console.print_exception()
|
|
373
|
+
raise typer.Exit(1)
|
|
374
|
+
|
|
375
|
+
|
|
376
|
+
@app.command()
|
|
377
|
+
def validate(
|
|
378
|
+
recipe_path: Path = typer.Argument(
|
|
379
|
+
..., help="Path to recipe JSON file", exists=True, dir_okay=False
|
|
380
|
+
),
|
|
381
|
+
) -> None:
|
|
382
|
+
"""Validate a recipe file without running it.
|
|
383
|
+
|
|
384
|
+
Examples:
|
|
385
|
+
giskit validate recipe.json
|
|
386
|
+
"""
|
|
387
|
+
try:
|
|
388
|
+
with console.status("[bold green]Validating recipe..."):
|
|
389
|
+
recipe = Recipe.from_file(recipe_path)
|
|
390
|
+
|
|
391
|
+
console.print("[bold green]✓[/bold green] Recipe is valid")
|
|
392
|
+
console.print(f" Name: {recipe.name or 'Unnamed'}")
|
|
393
|
+
console.print(f" Datasets: {len(recipe.datasets)}")
|
|
394
|
+
console.print(f" Output: {recipe.output.path}")
|
|
395
|
+
|
|
396
|
+
except Exception as e:
|
|
397
|
+
console.print("[bold red]✗[/bold red] Recipe validation failed")
|
|
398
|
+
console.print(f"[red]{e}[/red]")
|
|
399
|
+
raise typer.Exit(1)
|
|
400
|
+
|
|
401
|
+
|
|
402
|
+
providers_app = typer.Typer(help="Manage and query data providers")
|
|
403
|
+
app.add_typer(providers_app, name="providers")
|
|
404
|
+
|
|
405
|
+
|
|
406
|
+
@providers_app.command("list")
|
|
407
|
+
def providers_list() -> None:
|
|
408
|
+
"""List all available data providers.
|
|
409
|
+
|
|
410
|
+
Examples:
|
|
411
|
+
giskit providers list
|
|
412
|
+
"""
|
|
413
|
+
providers = list_providers()
|
|
414
|
+
|
|
415
|
+
if not providers:
|
|
416
|
+
console.print("[yellow]No providers registered[/yellow]")
|
|
417
|
+
console.print("[dim]Providers will be registered in Phase 2[/dim]")
|
|
418
|
+
return
|
|
419
|
+
|
|
420
|
+
table = Table(title="Available Providers")
|
|
421
|
+
table.add_column("Provider", style="cyan")
|
|
422
|
+
table.add_column("Status", style="green")
|
|
423
|
+
|
|
424
|
+
for provider in sorted(providers):
|
|
425
|
+
table.add_row(provider, "✓")
|
|
426
|
+
|
|
427
|
+
console.print(table)
|
|
428
|
+
|
|
429
|
+
|
|
430
|
+
@providers_app.command("info")
|
|
431
|
+
def providers_info(
|
|
432
|
+
provider_name: str = typer.Argument(..., help="Provider name (e.g., 'pdok')"),
|
|
433
|
+
) -> None:
|
|
434
|
+
"""Show detailed information about a provider.
|
|
435
|
+
|
|
436
|
+
Examples:
|
|
437
|
+
giskit providers info pdok
|
|
438
|
+
giskit providers info osm
|
|
439
|
+
"""
|
|
440
|
+
# TODO: Implement provider metadata retrieval
|
|
441
|
+
console.print(f"[bold]Provider:[/bold] {provider_name}")
|
|
442
|
+
console.print("[yellow]Provider metadata not yet implemented[/yellow]")
|
|
443
|
+
console.print("[dim]Implementation coming in Phase 2[/dim]")
|
|
444
|
+
|
|
445
|
+
|
|
446
|
+
# Quirks commands
|
|
447
|
+
quirks_app = typer.Typer(help="View and manage API quirks")
|
|
448
|
+
app.add_typer(quirks_app, name="quirks")
|
|
449
|
+
|
|
450
|
+
|
|
451
|
+
@quirks_app.command("list")
|
|
452
|
+
def quirks_list() -> None:
|
|
453
|
+
"""List all known provider quirks.
|
|
454
|
+
|
|
455
|
+
Examples:
|
|
456
|
+
giskit quirks list
|
|
457
|
+
"""
|
|
458
|
+
if not KNOWN_QUIRKS:
|
|
459
|
+
console.print("[yellow]No quirks registered[/yellow]")
|
|
460
|
+
return
|
|
461
|
+
|
|
462
|
+
table = Table(title="Known API Quirks")
|
|
463
|
+
table.add_column("Provider", style="cyan")
|
|
464
|
+
table.add_column("Protocol", style="blue")
|
|
465
|
+
table.add_column("Quirks", style="yellow")
|
|
466
|
+
|
|
467
|
+
for provider, protocols in sorted(KNOWN_QUIRKS.items()):
|
|
468
|
+
for protocol, quirks in sorted(protocols.items()):
|
|
469
|
+
# Count active quirks
|
|
470
|
+
active_quirks = []
|
|
471
|
+
if quirks.requires_trailing_slash:
|
|
472
|
+
active_quirks.append("trailing_slash")
|
|
473
|
+
if quirks.require_format_param:
|
|
474
|
+
active_quirks.append("format_param")
|
|
475
|
+
if quirks.max_features_limit:
|
|
476
|
+
active_quirks.append(f"max_limit={quirks.max_features_limit}")
|
|
477
|
+
if quirks.custom_timeout:
|
|
478
|
+
active_quirks.append(f"timeout={quirks.custom_timeout}s")
|
|
479
|
+
if quirks.custom_headers:
|
|
480
|
+
active_quirks.append(f"headers({len(quirks.custom_headers)})")
|
|
481
|
+
|
|
482
|
+
quirks_str = ", ".join(active_quirks) if active_quirks else "[dim]none[/dim]"
|
|
483
|
+
table.add_row(provider, protocol, quirks_str)
|
|
484
|
+
|
|
485
|
+
console.print(table)
|
|
486
|
+
|
|
487
|
+
|
|
488
|
+
@quirks_app.command("show")
|
|
489
|
+
def quirks_show(
|
|
490
|
+
provider: str = typer.Argument(..., help="Provider name (e.g., 'pdok')"),
|
|
491
|
+
protocol: str = typer.Argument(..., help="Protocol name (e.g., 'ogc-features')"),
|
|
492
|
+
) -> None:
|
|
493
|
+
"""Show detailed quirks for a specific provider/protocol.
|
|
494
|
+
|
|
495
|
+
Examples:
|
|
496
|
+
giskit quirks show pdok ogc-features
|
|
497
|
+
"""
|
|
498
|
+
quirks = get_quirks(provider, protocol)
|
|
499
|
+
|
|
500
|
+
# Check if this is a known quirk configuration
|
|
501
|
+
is_known = provider in KNOWN_QUIRKS and protocol in KNOWN_QUIRKS[provider]
|
|
502
|
+
|
|
503
|
+
# Create panel title
|
|
504
|
+
title = f"[bold]{provider}/{protocol}[/bold]"
|
|
505
|
+
if not is_known:
|
|
506
|
+
title += " [dim](using defaults)[/dim]"
|
|
507
|
+
|
|
508
|
+
# Build quirks info
|
|
509
|
+
info_lines = []
|
|
510
|
+
|
|
511
|
+
# URL Quirks
|
|
512
|
+
info_lines.append("[bold cyan]URL Quirks:[/bold cyan]")
|
|
513
|
+
info_lines.append(f" requires_trailing_slash: {quirks.requires_trailing_slash}")
|
|
514
|
+
|
|
515
|
+
# Parameter Quirks
|
|
516
|
+
info_lines.append("\n[bold cyan]Parameter Quirks:[/bold cyan]")
|
|
517
|
+
info_lines.append(f" require_format_param: {quirks.require_format_param}")
|
|
518
|
+
if quirks.require_format_param:
|
|
519
|
+
info_lines.append(f" param_name: {quirks.format_param_name}")
|
|
520
|
+
info_lines.append(f" param_value: {quirks.format_param_value}")
|
|
521
|
+
info_lines.append(f" max_features_limit: {quirks.max_features_limit or 'none'}")
|
|
522
|
+
|
|
523
|
+
# Timeout Quirks
|
|
524
|
+
info_lines.append("\n[bold cyan]Timeout Quirks:[/bold cyan]")
|
|
525
|
+
info_lines.append(f" custom_timeout: {quirks.custom_timeout or 'none'}")
|
|
526
|
+
|
|
527
|
+
# Header Quirks
|
|
528
|
+
info_lines.append("\n[bold cyan]Header Quirks:[/bold cyan]")
|
|
529
|
+
if quirks.custom_headers:
|
|
530
|
+
for header, value in quirks.custom_headers.items():
|
|
531
|
+
info_lines.append(f" {header}: {value}")
|
|
532
|
+
else:
|
|
533
|
+
info_lines.append(" [dim]none[/dim]")
|
|
534
|
+
|
|
535
|
+
# Metadata
|
|
536
|
+
if is_known:
|
|
537
|
+
info_lines.append("\n[bold cyan]Metadata:[/bold cyan]")
|
|
538
|
+
if quirks.description:
|
|
539
|
+
info_lines.append(f" Description: {quirks.description}")
|
|
540
|
+
if quirks.issue_url:
|
|
541
|
+
info_lines.append(f" Issue URL: {quirks.issue_url}")
|
|
542
|
+
if quirks.workaround_date:
|
|
543
|
+
info_lines.append(f" Workaround Date: {quirks.workaround_date}")
|
|
544
|
+
|
|
545
|
+
console.print(Panel("\n".join(info_lines), title=title, border_style="blue"))
|
|
546
|
+
|
|
547
|
+
|
|
548
|
+
@quirks_app.command("monitor")
|
|
549
|
+
def quirks_monitor() -> None:
|
|
550
|
+
"""Show quirks usage statistics.
|
|
551
|
+
|
|
552
|
+
Examples:
|
|
553
|
+
giskit quirks monitor
|
|
554
|
+
"""
|
|
555
|
+
from giskit.protocols.quirks_monitor import get_monitor
|
|
556
|
+
|
|
557
|
+
monitor = get_monitor()
|
|
558
|
+
stats = monitor.get_statistics()
|
|
559
|
+
|
|
560
|
+
if not stats:
|
|
561
|
+
console.print("[yellow]No quirks have been applied yet[/yellow]")
|
|
562
|
+
console.print("[dim]Run some downloads first, then check monitor again[/dim]")
|
|
563
|
+
return
|
|
564
|
+
|
|
565
|
+
# Print report
|
|
566
|
+
monitor.print_report()
|
|
567
|
+
|
|
568
|
+
|
|
569
|
+
# Export commands
|
|
570
|
+
export_app = typer.Typer(help="Export GeoPackage to various formats")
|
|
571
|
+
app.add_typer(export_app, name="export")
|
|
572
|
+
|
|
573
|
+
|
|
574
|
+
@export_app.command("ifc")
|
|
575
|
+
def export_ifc(
|
|
576
|
+
input_path: Path = typer.Argument(
|
|
577
|
+
..., help="Path to input GeoPackage file", exists=True, dir_okay=False
|
|
578
|
+
),
|
|
579
|
+
output_path: Path = typer.Argument(..., help="Path to output IFC file"),
|
|
580
|
+
ifc_version: str = typer.Option(
|
|
581
|
+
"IFC4X3_ADD2", "--version", "-v", help="IFC schema version (IFC4X3_ADD2, IFC4, IFC2X3)"
|
|
582
|
+
),
|
|
583
|
+
site_name: str = typer.Option("Site", "--site-name", "-s", help="Name for the IFC site"),
|
|
584
|
+
absolute: bool = typer.Option(
|
|
585
|
+
False, "--absolute", help="Use absolute RD coordinates (default: relative)"
|
|
586
|
+
),
|
|
587
|
+
absolute_z: bool = typer.Option(
|
|
588
|
+
False, "--absolute-z", help="Keep absolute NAP elevations (default: normalize to ground)"
|
|
589
|
+
),
|
|
590
|
+
ref_x: Optional[float] = typer.Option(
|
|
591
|
+
None, "--ref-x", help="Reference point X coordinate (auto-detect if not specified)"
|
|
592
|
+
),
|
|
593
|
+
ref_y: Optional[float] = typer.Option(
|
|
594
|
+
None, "--ref-y", help="Reference point Y coordinate (auto-detect if not specified)"
|
|
595
|
+
),
|
|
596
|
+
) -> None:
|
|
597
|
+
"""Export GeoPackage to IFC format.
|
|
598
|
+
|
|
599
|
+
Examples:
|
|
600
|
+
giskit export ifc input.gpkg output.ifc
|
|
601
|
+
giskit export ifc --version IFC4 input.gpkg output.ifc
|
|
602
|
+
giskit export ifc --site-name "Amsterdam Dam" input.gpkg output.ifc
|
|
603
|
+
giskit export ifc --absolute --absolute-z input.gpkg output.ifc
|
|
604
|
+
"""
|
|
605
|
+
try:
|
|
606
|
+
from giskit.exporters.ifc import IFCExporter
|
|
607
|
+
except ImportError:
|
|
608
|
+
console.print("[bold red]Error:[/bold red] IfcOpenShell not installed")
|
|
609
|
+
console.print("\nIFC export requires ifcopenshell.")
|
|
610
|
+
console.print("\nInstall using pip:")
|
|
611
|
+
console.print(" [bold]pip install giskit[ifc][/bold]")
|
|
612
|
+
console.print("\nOr separately:")
|
|
613
|
+
console.print(" [bold]pip install ifcopenshell[/bold]")
|
|
614
|
+
raise typer.Exit(1)
|
|
615
|
+
|
|
616
|
+
# Convert flags
|
|
617
|
+
relative = not absolute
|
|
618
|
+
normalize_z = not absolute_z
|
|
619
|
+
|
|
620
|
+
try:
|
|
621
|
+
console.print(f"[bold green]Exporting to IFC:[/bold green] {output_path}")
|
|
622
|
+
console.print(f" Input: {input_path}")
|
|
623
|
+
console.print(f" IFC Version: {ifc_version}")
|
|
624
|
+
console.print(f" Site Name: {site_name}")
|
|
625
|
+
console.print(f" Coordinate Mode: {'relative' if relative else 'absolute'}")
|
|
626
|
+
console.print(f" Z-Normalization: {'enabled' if normalize_z else 'disabled'}")
|
|
627
|
+
console.print()
|
|
628
|
+
|
|
629
|
+
# Create exporter
|
|
630
|
+
exporter = IFCExporter(ifc_version=ifc_version, author="GISKit", organization="A190")
|
|
631
|
+
|
|
632
|
+
# Export with progress
|
|
633
|
+
with console.status("[bold green]Exporting layers..."):
|
|
634
|
+
exporter.export(
|
|
635
|
+
db_path=input_path,
|
|
636
|
+
output_path=output_path,
|
|
637
|
+
layers=None, # Export all supported layers
|
|
638
|
+
relative=relative,
|
|
639
|
+
normalize_z=normalize_z,
|
|
640
|
+
site_name=site_name,
|
|
641
|
+
ref_x=ref_x,
|
|
642
|
+
ref_y=ref_y,
|
|
643
|
+
)
|
|
644
|
+
|
|
645
|
+
console.print("\n[bold green]✓[/bold green] Export complete!")
|
|
646
|
+
console.print(f" Output: {output_path}")
|
|
647
|
+
|
|
648
|
+
# Show file size
|
|
649
|
+
if output_path.exists():
|
|
650
|
+
size_mb = output_path.stat().st_size / (1024 * 1024)
|
|
651
|
+
console.print(f" Size: {size_mb:.1f} MB")
|
|
652
|
+
|
|
653
|
+
except Exception as e:
|
|
654
|
+
console.print(f"[bold red]✗[/bold red] Export failed: {e}")
|
|
655
|
+
raise typer.Exit(code=1)
|
|
656
|
+
|
|
657
|
+
|
|
658
|
+
if __name__ == "__main__":
|
|
659
|
+
app()
|
|
@@ -0,0 +1,26 @@
|
|
|
1
|
+
"""GISKit configuration package.
|
|
2
|
+
|
|
3
|
+
Configuration loaders for services and quirks.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from giskit.config.loader import (
|
|
7
|
+
QuirkDefinition,
|
|
8
|
+
QuirksConfig,
|
|
9
|
+
ServiceDefinition,
|
|
10
|
+
ServicesConfig,
|
|
11
|
+
load_quirks,
|
|
12
|
+
load_services,
|
|
13
|
+
save_quirks,
|
|
14
|
+
save_services,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
__all__ = [
|
|
18
|
+
"load_services",
|
|
19
|
+
"load_quirks",
|
|
20
|
+
"save_services",
|
|
21
|
+
"save_quirks",
|
|
22
|
+
"ServiceDefinition",
|
|
23
|
+
"ServicesConfig",
|
|
24
|
+
"QuirkDefinition",
|
|
25
|
+
"QuirksConfig",
|
|
26
|
+
]
|