ssb-sgis 0.3.7__py3-none-any.whl → 0.3.9__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sgis/__init__.py +6 -3
- sgis/geopandas_tools/buffer_dissolve_explode.py +13 -9
- sgis/geopandas_tools/centerlines.py +110 -47
- sgis/geopandas_tools/cleaning.py +331 -0
- sgis/geopandas_tools/conversion.py +17 -7
- sgis/geopandas_tools/duplicates.py +67 -49
- sgis/geopandas_tools/general.py +15 -1
- sgis/geopandas_tools/neighbors.py +12 -0
- sgis/geopandas_tools/overlay.py +26 -17
- sgis/geopandas_tools/polygon_operations.py +281 -100
- sgis/geopandas_tools/polygons_as_rings.py +72 -10
- sgis/geopandas_tools/sfilter.py +8 -8
- sgis/helpers.py +20 -3
- sgis/io/{dapla.py → dapla_functions.py} +28 -6
- sgis/io/write_municipality_data.py +13 -7
- sgis/maps/examine.py +10 -7
- sgis/maps/explore.py +102 -25
- sgis/maps/map.py +32 -6
- sgis/maps/maps.py +40 -58
- sgis/maps/tilesources.py +61 -0
- sgis/networkanalysis/closing_network_holes.py +89 -62
- sgis/networkanalysis/cutting_lines.py +11 -5
- sgis/networkanalysis/finding_isolated_networks.py +1 -1
- sgis/networkanalysis/nodes.py +1 -1
- sgis/networkanalysis/traveling_salesman.py +8 -4
- sgis/parallel/parallel.py +66 -12
- sgis/raster/raster.py +29 -27
- {ssb_sgis-0.3.7.dist-info → ssb_sgis-0.3.9.dist-info}/METADATA +6 -3
- ssb_sgis-0.3.9.dist-info/RECORD +59 -0
- {ssb_sgis-0.3.7.dist-info → ssb_sgis-0.3.9.dist-info}/WHEEL +1 -1
- sgis/geopandas_tools/snap_polygons.py +0 -0
- ssb_sgis-0.3.7.dist-info/RECORD +0 -58
- {ssb_sgis-0.3.7.dist-info → ssb_sgis-0.3.9.dist-info}/LICENSE +0 -0
|
@@ -1,6 +1,4 @@
|
|
|
1
|
-
import
|
|
2
|
-
import itertools
|
|
3
|
-
from typing import Callable, Iterable
|
|
1
|
+
from typing import Any, Callable, Iterable
|
|
4
2
|
|
|
5
3
|
import geopandas as gpd
|
|
6
4
|
import igraph
|
|
@@ -56,7 +54,7 @@ from shapely.geometry import (
|
|
|
56
54
|
Polygon,
|
|
57
55
|
)
|
|
58
56
|
|
|
59
|
-
from .conversion import to_gdf
|
|
57
|
+
from .conversion import to_gdf, to_geoseries
|
|
60
58
|
|
|
61
59
|
|
|
62
60
|
class PolygonsAsRings:
|
|
@@ -71,6 +69,7 @@ class PolygonsAsRings:
|
|
|
71
69
|
if not isinstance(polys, pd.DataFrame):
|
|
72
70
|
polys = to_gdf(polys, crs)
|
|
73
71
|
|
|
72
|
+
self._index_mapper = dict(enumerate(polys.index))
|
|
74
73
|
self.gdf = polys.reset_index(drop=True)
|
|
75
74
|
|
|
76
75
|
if crs is not None:
|
|
@@ -161,7 +160,11 @@ class PolygonsAsRings:
|
|
|
161
160
|
|
|
162
161
|
self.rings.loc[:] = np.array(
|
|
163
162
|
func(
|
|
164
|
-
GeoSeries(
|
|
163
|
+
GeoSeries(
|
|
164
|
+
self.rings.values,
|
|
165
|
+
crs=self.crs,
|
|
166
|
+
index=self.rings.index.get_level_values(1).map(self._index_mapper),
|
|
167
|
+
),
|
|
165
168
|
*args,
|
|
166
169
|
**kwargs,
|
|
167
170
|
)
|
|
@@ -179,7 +182,7 @@ class PolygonsAsRings:
|
|
|
179
182
|
gdf = GeoDataFrame(
|
|
180
183
|
{"geometry": self.rings.values},
|
|
181
184
|
crs=self.crs,
|
|
182
|
-
index=self.rings.index.get_level_values(1),
|
|
185
|
+
index=self.rings.index.get_level_values(1).map(self._index_mapper),
|
|
183
186
|
).join(self.gdf.drop(columns="geometry"))
|
|
184
187
|
|
|
185
188
|
assert len(gdf) == len(self.rings)
|
|
@@ -246,18 +249,26 @@ class PolygonsAsRings:
|
|
|
246
249
|
self.gdf.geometry = self.to_numpy()
|
|
247
250
|
return self.gdf
|
|
248
251
|
|
|
252
|
+
def to_geoseries(self) -> GeoDataFrame:
|
|
253
|
+
"""Return the GeoDataFrame with polygons."""
|
|
254
|
+
self.gdf.geometry = self.to_numpy()
|
|
255
|
+
return self.gdf.geometry
|
|
256
|
+
|
|
249
257
|
def to_numpy(self) -> NDArray[Polygon]:
|
|
250
258
|
"""Return a numpy array of polygons."""
|
|
251
259
|
if not len(self.rings):
|
|
252
260
|
return np.array([])
|
|
253
261
|
|
|
254
|
-
exterior = self.rings.loc[self.is_exterior].sort_index()
|
|
262
|
+
exterior = self.rings.loc[self.is_exterior].sort_index()
|
|
255
263
|
assert exterior.shape == (len(self.gdf),)
|
|
256
264
|
|
|
257
265
|
nonempty_interiors = self.rings.loc[self.is_interior]
|
|
258
266
|
|
|
259
267
|
if not len(nonempty_interiors):
|
|
260
|
-
|
|
268
|
+
try:
|
|
269
|
+
return make_valid(polygons(exterior.values))
|
|
270
|
+
except Exception:
|
|
271
|
+
return _geoms_to_linearrings_fallback(exterior)
|
|
261
272
|
|
|
262
273
|
empty_interiors = pd.Series(
|
|
263
274
|
[None for _ in range(len(self.gdf) * self.max_rings)],
|
|
@@ -270,8 +281,59 @@ class PolygonsAsRings:
|
|
|
270
281
|
# make each ring level a column with same length and order as gdf
|
|
271
282
|
.unstack(level=2)
|
|
272
283
|
.sort_index()
|
|
273
|
-
.values
|
|
274
284
|
)
|
|
275
285
|
assert interiors.shape == (len(self.gdf), self.max_rings), interiors.shape
|
|
276
286
|
|
|
277
|
-
|
|
287
|
+
try:
|
|
288
|
+
return make_valid(polygons(exterior.values, interiors.values))
|
|
289
|
+
except Exception:
|
|
290
|
+
return _geoms_to_linearrings_fallback(exterior, interiors)
|
|
291
|
+
|
|
292
|
+
|
|
293
|
+
def get_linearring_series(geoms: Any) -> pd.Series:
|
|
294
|
+
geoms = to_geoseries(geoms).explode(index_parts=False)
|
|
295
|
+
coords, indices = get_coordinates(geoms, return_index=True)
|
|
296
|
+
return pd.Series(linearrings(coords, indices=indices), index=geoms.index)
|
|
297
|
+
|
|
298
|
+
|
|
299
|
+
def _geoms_to_linearrings_fallback(
|
|
300
|
+
exterior: pd.Series, interiors: pd.Series | None = None
|
|
301
|
+
) -> pd.Series:
|
|
302
|
+
exterior.index = exterior.index.get_level_values(1)
|
|
303
|
+
|
|
304
|
+
exterior = get_linearring_series(exterior)
|
|
305
|
+
|
|
306
|
+
if interiors is None:
|
|
307
|
+
return (
|
|
308
|
+
pd.Series(
|
|
309
|
+
make_valid(polygons(exterior.values)),
|
|
310
|
+
index=exterior.index,
|
|
311
|
+
)
|
|
312
|
+
.groupby(level=0)
|
|
313
|
+
.agg(unary_union)
|
|
314
|
+
)
|
|
315
|
+
|
|
316
|
+
interiors.index = interiors.index.get_level_values(1)
|
|
317
|
+
new_interiors = []
|
|
318
|
+
for col in interiors:
|
|
319
|
+
new_interiors.append(get_linearring_series(interiors[col]))
|
|
320
|
+
|
|
321
|
+
all_none = [[None] * len(new_interiors)] * len(exterior)
|
|
322
|
+
cols = list(interiors.columns)
|
|
323
|
+
out_interiors = pd.DataFrame(
|
|
324
|
+
all_none,
|
|
325
|
+
columns=cols,
|
|
326
|
+
index=exterior.index,
|
|
327
|
+
)
|
|
328
|
+
out_interiors[cols] = pd.concat(new_interiors, axis=1)
|
|
329
|
+
for col in out_interiors:
|
|
330
|
+
out_interiors.loc[out_interiors[col].isna(), col] = None
|
|
331
|
+
|
|
332
|
+
return (
|
|
333
|
+
pd.Series(
|
|
334
|
+
make_valid(polygons(exterior.values, out_interiors.values)),
|
|
335
|
+
index=exterior.index,
|
|
336
|
+
)
|
|
337
|
+
.groupby(level=0)
|
|
338
|
+
.agg(unary_union)
|
|
339
|
+
)
|
sgis/geopandas_tools/sfilter.py
CHANGED
|
@@ -65,11 +65,11 @@ def _get_sfilter_indices(
|
|
|
65
65
|
|
|
66
66
|
|
|
67
67
|
def sfilter(
|
|
68
|
-
gdf: GeoDataFrame,
|
|
68
|
+
gdf: GeoDataFrame | GeoSeries,
|
|
69
69
|
other: GeoDataFrame | GeoSeries | Geometry,
|
|
70
70
|
predicate: str = "intersects",
|
|
71
71
|
) -> GeoDataFrame:
|
|
72
|
-
"""Filter a GeoDataFrame by spatial predicate.
|
|
72
|
+
"""Filter a GeoDataFrame or GeoSeries by spatial predicate.
|
|
73
73
|
|
|
74
74
|
Does an sjoin and returns the rows of 'gdf' that were returned
|
|
75
75
|
without getting duplicates or columns from 'other'.
|
|
@@ -135,11 +135,11 @@ def sfilter(
|
|
|
135
135
|
|
|
136
136
|
|
|
137
137
|
def sfilter_split(
|
|
138
|
-
gdf: GeoDataFrame,
|
|
138
|
+
gdf: GeoDataFrame | GeoSeries,
|
|
139
139
|
other: GeoDataFrame | GeoSeries | Geometry,
|
|
140
140
|
predicate: str = "intersects",
|
|
141
141
|
) -> tuple[GeoDataFrame, GeoDataFrame]:
|
|
142
|
-
"""Split a GeoDataFrame by spatial predicate.
|
|
142
|
+
"""Split a GeoDataFrame or GeoSeries by spatial predicate.
|
|
143
143
|
|
|
144
144
|
Like sfilter, but returns both the rows that do and do not match
|
|
145
145
|
the spatial predicate as separate GeoDataFrames.
|
|
@@ -207,16 +207,16 @@ def sfilter_split(
|
|
|
207
207
|
|
|
208
208
|
|
|
209
209
|
def sfilter_inverse(
|
|
210
|
-
gdf: GeoDataFrame,
|
|
210
|
+
gdf: GeoDataFrame | GeoSeries,
|
|
211
211
|
other: GeoDataFrame | GeoSeries | Geometry,
|
|
212
212
|
predicate: str = "intersects",
|
|
213
|
-
) ->
|
|
214
|
-
"""Filter a GeoDataFrame by inverse spatial predicate.
|
|
213
|
+
) -> GeoDataFrame | GeoSeries:
|
|
214
|
+
"""Filter a GeoDataFrame or GeoSeries by inverse spatial predicate.
|
|
215
215
|
|
|
216
216
|
Returns the rows that do not match the spatial predicate.
|
|
217
217
|
|
|
218
218
|
Args:
|
|
219
|
-
gdf: The GeoDataFrame.
|
|
219
|
+
gdf: The GeoDataFrame or GeoSeries.
|
|
220
220
|
other: The geometry object to filter 'gdf' by.
|
|
221
221
|
predicate: Spatial predicate to use. Defaults to 'intersects'.
|
|
222
222
|
|
sgis/helpers.py
CHANGED
|
@@ -153,23 +153,38 @@ def unit_is_degrees(gdf: GeoDataFrame) -> bool:
|
|
|
153
153
|
return False
|
|
154
154
|
|
|
155
155
|
|
|
156
|
-
def get_object_name(
|
|
156
|
+
def get_object_name(
|
|
157
|
+
var: object, start: int = 2, stop: int = 7, ignore_self: bool = True
|
|
158
|
+
) -> str | None:
|
|
157
159
|
"""Searches through the local variables down one level at a time."""
|
|
158
|
-
frame = inspect.currentframe()
|
|
160
|
+
frame = inspect.currentframe()
|
|
159
161
|
|
|
160
|
-
for _ in range(
|
|
162
|
+
for _ in range(start):
|
|
163
|
+
frame = frame.f_back
|
|
164
|
+
|
|
165
|
+
for _ in np.arange(start, stop):
|
|
161
166
|
names = [
|
|
162
167
|
var_name for var_name, var_val in frame.f_locals.items() if var_val is var
|
|
163
168
|
]
|
|
164
169
|
if names and len(names) == 1:
|
|
170
|
+
if ignore_self and names[0] == "self":
|
|
171
|
+
frame = frame.f_back
|
|
172
|
+
continue
|
|
165
173
|
return names[0]
|
|
166
174
|
|
|
167
175
|
names = [name for name in names if not name.startswith("_")]
|
|
168
176
|
|
|
169
177
|
if names and len(names) == 1:
|
|
178
|
+
if ignore_self and names[0] == "self":
|
|
179
|
+
frame = frame.f_back
|
|
180
|
+
continue
|
|
181
|
+
|
|
170
182
|
return names[0]
|
|
171
183
|
|
|
172
184
|
if names and len(names) > 1:
|
|
185
|
+
if ignore_self and names[0] == "self":
|
|
186
|
+
frame = frame.f_back
|
|
187
|
+
continue
|
|
173
188
|
warnings.warn(
|
|
174
189
|
"More than one local variable matches the object. Name might be wrong."
|
|
175
190
|
)
|
|
@@ -193,6 +208,8 @@ def make_namedict(gdfs: tuple[GeoDataFrame]) -> dict[int, str]:
|
|
|
193
208
|
|
|
194
209
|
|
|
195
210
|
def sort_nans_last(df, ignore_index: bool = False):
|
|
211
|
+
if not len(df):
|
|
212
|
+
return df
|
|
196
213
|
df["n_nan"] = df.isna().sum(axis=1).values
|
|
197
214
|
|
|
198
215
|
df["_idx"] = range(len(df))
|
|
@@ -1,7 +1,7 @@
|
|
|
1
1
|
"""Functions for reading and writing GeoDataFrames in Statistics Norway's GCS Dapla.
|
|
2
2
|
"""
|
|
3
|
-
import os
|
|
4
3
|
from pathlib import Path
|
|
4
|
+
from typing import Optional
|
|
5
5
|
|
|
6
6
|
import dapla as dp
|
|
7
7
|
import geopandas as gpd
|
|
@@ -12,21 +12,31 @@ from pandas import DataFrame
|
|
|
12
12
|
from pyarrow import parquet
|
|
13
13
|
|
|
14
14
|
|
|
15
|
-
def read_geopandas(
|
|
15
|
+
def read_geopandas(
|
|
16
|
+
gcs_path: str | Path,
|
|
17
|
+
pandas_fallback: bool = False,
|
|
18
|
+
fs: Optional[dp.gcs.GCSFileSystem] = None,
|
|
19
|
+
**kwargs,
|
|
20
|
+
) -> GeoDataFrame | DataFrame:
|
|
16
21
|
"""Reads geoparquet or other geodata from a file on GCS.
|
|
17
22
|
|
|
23
|
+
If the file has 0 rows, the contents will be returned as a pandas.DataFrame,
|
|
24
|
+
since geopandas does not read and write empty tables.
|
|
25
|
+
|
|
18
26
|
Note:
|
|
19
27
|
Does not currently read shapefiles or filegeodatabases.
|
|
20
28
|
|
|
21
29
|
Args:
|
|
22
30
|
gcs_path: path to a file on Google Cloud Storage.
|
|
31
|
+
pandas_fallback: If False (default), an exception is raised if the file can
|
|
32
|
+
not be read with geopandas and the number of rows is more than 0. If True,
|
|
33
|
+
the file will be read as
|
|
23
34
|
**kwargs: Additional keyword arguments passed to geopandas' read_parquet
|
|
24
35
|
or read_file, depending on the file type.
|
|
25
36
|
|
|
26
37
|
Returns:
|
|
27
38
|
A GeoDataFrame if it has rows. If zero rows, a pandas DataFrame is returned.
|
|
28
39
|
"""
|
|
29
|
-
fs = dp.FileClient.get_gcs_file_system()
|
|
30
40
|
|
|
31
41
|
if not isinstance(gcs_path, str):
|
|
32
42
|
try:
|
|
@@ -34,13 +44,17 @@ def read_geopandas(gcs_path: str | Path, **kwargs) -> GeoDataFrame | DataFrame:
|
|
|
34
44
|
except TypeError:
|
|
35
45
|
raise TypeError(f"Unexpected type {type(gcs_path)}.")
|
|
36
46
|
|
|
47
|
+
if fs is None:
|
|
48
|
+
fs = dp.FileClient.get_gcs_file_system()
|
|
49
|
+
|
|
37
50
|
if "parquet" in gcs_path or "prqt" in gcs_path:
|
|
38
51
|
with fs.open(gcs_path, mode="rb") as file:
|
|
39
52
|
try:
|
|
40
53
|
return gpd.read_parquet(file, **kwargs)
|
|
41
54
|
except ValueError as e:
|
|
42
55
|
df = dp.read_pandas(gcs_path, **kwargs)
|
|
43
|
-
|
|
56
|
+
|
|
57
|
+
if pandas_fallback or not len(df):
|
|
44
58
|
return df
|
|
45
59
|
else:
|
|
46
60
|
raise e
|
|
@@ -50,14 +64,19 @@ def read_geopandas(gcs_path: str | Path, **kwargs) -> GeoDataFrame | DataFrame:
|
|
|
50
64
|
return gpd.read_file(file, **kwargs)
|
|
51
65
|
except ValueError as e:
|
|
52
66
|
df = dp.read_pandas(gcs_path, **kwargs)
|
|
53
|
-
|
|
67
|
+
|
|
68
|
+
if pandas_fallback or not len(df):
|
|
54
69
|
return df
|
|
55
70
|
else:
|
|
56
71
|
raise e
|
|
57
72
|
|
|
58
73
|
|
|
59
74
|
def write_geopandas(
|
|
60
|
-
df: gpd.GeoDataFrame,
|
|
75
|
+
df: gpd.GeoDataFrame,
|
|
76
|
+
gcs_path: str | Path,
|
|
77
|
+
overwrite: bool = True,
|
|
78
|
+
fs: Optional[dp.gcs.GCSFileSystem] = None,
|
|
79
|
+
**kwargs,
|
|
61
80
|
) -> None:
|
|
62
81
|
"""Writes a GeoDataFrame to the speficied format.
|
|
63
82
|
|
|
@@ -81,6 +100,9 @@ def write_geopandas(
|
|
|
81
100
|
if not overwrite and exists(gcs_path):
|
|
82
101
|
raise ValueError("File already exists.")
|
|
83
102
|
|
|
103
|
+
if fs is None:
|
|
104
|
+
fs = dp.FileClient.get_gcs_file_system()
|
|
105
|
+
|
|
84
106
|
pd.io.parquet.BaseImpl.validate_dataframe(df)
|
|
85
107
|
|
|
86
108
|
if not len(df):
|
|
@@ -1,14 +1,14 @@
|
|
|
1
|
-
from pathlib import Path
|
|
2
1
|
from collections.abc import Callable
|
|
2
|
+
from pathlib import Path
|
|
3
3
|
|
|
4
4
|
import pandas as pd
|
|
5
|
-
from dapla import write_pandas
|
|
5
|
+
from dapla import read_pandas, write_pandas
|
|
6
6
|
from geopandas import GeoDataFrame
|
|
7
7
|
from pandas import DataFrame
|
|
8
8
|
|
|
9
9
|
from ..geopandas_tools.general import clean_clip, clean_geoms
|
|
10
10
|
from ..geopandas_tools.neighbors import get_neighbor_indices
|
|
11
|
-
from .
|
|
11
|
+
from .dapla_functions import read_geopandas, write_geopandas
|
|
12
12
|
|
|
13
13
|
|
|
14
14
|
def write_municipality_data(
|
|
@@ -66,8 +66,14 @@ def _write_municipality_data(
|
|
|
66
66
|
data = _validate_data(data)
|
|
67
67
|
|
|
68
68
|
if isinstance(data, (str, Path)):
|
|
69
|
-
|
|
70
|
-
|
|
69
|
+
try:
|
|
70
|
+
gdf = read_geopandas(str(data))
|
|
71
|
+
except ValueError as e:
|
|
72
|
+
try:
|
|
73
|
+
gdf = read_pandas(str(data))
|
|
74
|
+
except ValueError:
|
|
75
|
+
raise e.__class__(e, data)
|
|
76
|
+
elif isinstance(data, DataFrame):
|
|
71
77
|
gdf = data
|
|
72
78
|
else:
|
|
73
79
|
raise TypeError(type(data))
|
|
@@ -84,14 +90,14 @@ def _write_municipality_data(
|
|
|
84
90
|
|
|
85
91
|
if not len(gdf_muni):
|
|
86
92
|
if write_empty:
|
|
87
|
-
gdf_muni = gdf_muni.drop(columns="geometry")
|
|
93
|
+
gdf_muni = gdf_muni.drop(columns="geometry", errors="ignore")
|
|
88
94
|
gdf_muni["geometry"] = None
|
|
89
95
|
write_pandas(gdf_muni, out)
|
|
90
96
|
continue
|
|
91
97
|
|
|
92
98
|
if not len(gdf_muni):
|
|
93
99
|
if write_empty:
|
|
94
|
-
gdf_muni = gdf_muni.drop(columns="geometry")
|
|
100
|
+
gdf_muni = gdf_muni.drop(columns="geometry", errors="ignore")
|
|
95
101
|
gdf_muni["geometry"] = None
|
|
96
102
|
write_pandas(gdf_muni, out)
|
|
97
103
|
continue
|
sgis/maps/examine.py
CHANGED
|
@@ -78,7 +78,7 @@ class Examine:
|
|
|
78
78
|
only_show_mask: bool = False,
|
|
79
79
|
**kwargs,
|
|
80
80
|
):
|
|
81
|
-
if not all(isinstance(gdf, gpd.GeoDataFrame) for gdf in gdfs):
|
|
81
|
+
if not all(isinstance(gdf, gpd.GeoDataFrame) or not len(gdf) for gdf in gdfs):
|
|
82
82
|
raise ValueError("gdfs must be of type GeoDataFrame.")
|
|
83
83
|
|
|
84
84
|
self._gdfs = gdfs
|
|
@@ -87,6 +87,15 @@ class Examine:
|
|
|
87
87
|
else:
|
|
88
88
|
self.mask_gdf = mask_gdf
|
|
89
89
|
|
|
90
|
+
self.indices = list(range(len(self.mask_gdf)))
|
|
91
|
+
self.i = 0
|
|
92
|
+
self.column = column
|
|
93
|
+
self.size = size
|
|
94
|
+
self.kwargs = kwargs
|
|
95
|
+
|
|
96
|
+
if not len(self.mask_gdf):
|
|
97
|
+
return
|
|
98
|
+
|
|
90
99
|
if unit_is_degrees(self.mask_gdf) and size > 360:
|
|
91
100
|
raise ValueError(
|
|
92
101
|
"CRS unit is degrees. Use geopandas' "
|
|
@@ -103,12 +112,6 @@ class Examine:
|
|
|
103
112
|
self.mask_gdf["area"] = self.mask_gdf.area
|
|
104
113
|
self.mask_gdf = self.mask_gdf.sort_values(sort_values)
|
|
105
114
|
|
|
106
|
-
self.indices = list(range(len(gdfs[0])))
|
|
107
|
-
self.i = 0
|
|
108
|
-
self.column = column
|
|
109
|
-
self.size = size
|
|
110
|
-
self.kwargs = kwargs
|
|
111
|
-
|
|
112
115
|
if only_show_mask:
|
|
113
116
|
self.kwargs["show"] = [True] + [False] * len(self._gdfs[1:])
|
|
114
117
|
elif not kwargs.get("show"):
|
sgis/maps/explore.py
CHANGED
|
@@ -14,19 +14,21 @@ import folium
|
|
|
14
14
|
import matplotlib
|
|
15
15
|
import numpy as np
|
|
16
16
|
import pandas as pd
|
|
17
|
+
import xyzservices
|
|
17
18
|
from folium import plugins
|
|
18
19
|
from geopandas import GeoDataFrame
|
|
19
20
|
from IPython.display import display
|
|
20
21
|
from jinja2 import Template
|
|
22
|
+
from pandas.api.types import is_datetime64_any_dtype
|
|
21
23
|
from shapely import Geometry
|
|
22
24
|
from shapely.geometry import LineString
|
|
23
25
|
|
|
24
26
|
from ..geopandas_tools.conversion import to_gdf
|
|
25
27
|
from ..geopandas_tools.general import clean_geoms, make_all_singlepart
|
|
26
28
|
from ..geopandas_tools.geometry_types import get_geom_type, to_single_geom_type
|
|
27
|
-
from ..helpers import unit_is_degrees
|
|
28
29
|
from .httpserver import run_html_server
|
|
29
30
|
from .map import Map
|
|
31
|
+
from .tilesources import kartverket, xyz
|
|
30
32
|
|
|
31
33
|
|
|
32
34
|
# the geopandas._explore raises a deprication warning. Ignoring for now.
|
|
@@ -91,7 +93,52 @@ class MeasureControlFix(plugins.MeasureControl):
|
|
|
91
93
|
)
|
|
92
94
|
|
|
93
95
|
|
|
96
|
+
def to_tile(tile: str | xyzservices.TileProvider, max_zoom: int) -> folium.TileLayer:
|
|
97
|
+
common_bgmaps = {
|
|
98
|
+
"openstreetmap": folium.TileLayer(
|
|
99
|
+
"OpenStreetMap", min_zoom=0, max_zoom=max_zoom
|
|
100
|
+
),
|
|
101
|
+
"grunnkart": kartverket.norges_grunnkart,
|
|
102
|
+
"gråtone": kartverket.norges_grunnkart_gråtone,
|
|
103
|
+
"norge_i_bilder": kartverket.norge_i_bilder,
|
|
104
|
+
"dark": xyz.CartoDB.DarkMatter,
|
|
105
|
+
"voyager": xyz.CartoDB.Voyager,
|
|
106
|
+
}
|
|
107
|
+
try:
|
|
108
|
+
name = tile["name"]
|
|
109
|
+
except TypeError:
|
|
110
|
+
name = tile
|
|
111
|
+
|
|
112
|
+
if not isinstance(tile, str):
|
|
113
|
+
try:
|
|
114
|
+
return folium.TileLayer(tile, name=name, max_zoom=max_zoom)
|
|
115
|
+
except TypeError:
|
|
116
|
+
return folium.TileLayer(tile, max_zoom=max_zoom)
|
|
117
|
+
|
|
118
|
+
try:
|
|
119
|
+
provider = common_bgmaps[tile.lower()]
|
|
120
|
+
except KeyError:
|
|
121
|
+
provider = xyzservices.providers.query_name(tile)
|
|
122
|
+
|
|
123
|
+
if isinstance(provider, folium.TileLayer):
|
|
124
|
+
return provider
|
|
125
|
+
|
|
126
|
+
if isinstance(provider, xyzservices.TileProvider):
|
|
127
|
+
attr = provider.html_attribution
|
|
128
|
+
provider = provider.build_url(scale_factor="{r}")
|
|
129
|
+
else:
|
|
130
|
+
try:
|
|
131
|
+
attr = provider["attr"]
|
|
132
|
+
except (AttributeError, TypeError):
|
|
133
|
+
attr = None
|
|
134
|
+
|
|
135
|
+
return folium.TileLayer(provider, name=name, attr=attr, max_zoom=max_zoom)
|
|
136
|
+
|
|
137
|
+
|
|
94
138
|
class Explore(Map):
|
|
139
|
+
# class attribute that can be overridden locally
|
|
140
|
+
tiles = ("OpenStreetMap", "dark", "norge_i_bilder", "grunnkart")
|
|
141
|
+
|
|
95
142
|
def __init__(
|
|
96
143
|
self,
|
|
97
144
|
*gdfs,
|
|
@@ -104,7 +151,7 @@ class Explore(Map):
|
|
|
104
151
|
measure_control: bool = True,
|
|
105
152
|
geocoder: bool = True,
|
|
106
153
|
save=None,
|
|
107
|
-
show: bool | Iterable[bool] =
|
|
154
|
+
show: bool | Iterable[bool] | None = None,
|
|
108
155
|
**kwargs,
|
|
109
156
|
):
|
|
110
157
|
self.popup = popup
|
|
@@ -121,6 +168,12 @@ class Explore(Map):
|
|
|
121
168
|
if not self.browser and "in_browser" in kwargs:
|
|
122
169
|
self.browser = kwargs.pop("in_browser")
|
|
123
170
|
|
|
171
|
+
if show is None:
|
|
172
|
+
show_was_none = True
|
|
173
|
+
show = True
|
|
174
|
+
else:
|
|
175
|
+
show_was_none = False
|
|
176
|
+
|
|
124
177
|
super().__init__(*gdfs, column=column, show=show, **kwargs)
|
|
125
178
|
|
|
126
179
|
if self.gdfs is None:
|
|
@@ -129,7 +182,18 @@ class Explore(Map):
|
|
|
129
182
|
# stringify or remove columns not renerable by leaflet (list, geometry etc.)
|
|
130
183
|
new_gdfs, show_new = [], []
|
|
131
184
|
for gdf, show in zip(self.gdfs, self.show, strict=True):
|
|
185
|
+
try:
|
|
186
|
+
gdf = gdf.reset_index()
|
|
187
|
+
except Exception:
|
|
188
|
+
pass
|
|
132
189
|
for col in gdf.columns:
|
|
190
|
+
if is_datetime64_any_dtype(gdf[col]):
|
|
191
|
+
try:
|
|
192
|
+
gdf[col] = [str(x) for x in gdf[col].dt.round("d")]
|
|
193
|
+
except Exception:
|
|
194
|
+
gdf = gdf.drop(col, axis=1)
|
|
195
|
+
continue
|
|
196
|
+
|
|
133
197
|
if not len(gdf.loc[gdf[col].notna()]):
|
|
134
198
|
continue
|
|
135
199
|
if not isinstance(
|
|
@@ -139,7 +203,7 @@ class Explore(Map):
|
|
|
139
203
|
and isinstance(gdf.loc[gdf[col].notna(), col].iloc[0], (Geometry))
|
|
140
204
|
):
|
|
141
205
|
try:
|
|
142
|
-
gdf[col] = gdf[col].astype(str)
|
|
206
|
+
gdf[col] = gdf[col].astype(str).fillna(pd.NA)
|
|
143
207
|
except Exception:
|
|
144
208
|
gdf = gdf.drop(col, axis=1)
|
|
145
209
|
|
|
@@ -150,8 +214,12 @@ class Explore(Map):
|
|
|
150
214
|
new_gdfs.append(gdf)
|
|
151
215
|
show_new.append(show)
|
|
152
216
|
self._gdfs = new_gdfs
|
|
217
|
+
self._gdf = pd.concat(new_gdfs, ignore_index=True)
|
|
153
218
|
self.show = show_new
|
|
154
219
|
|
|
220
|
+
if show_was_none and len(self._gdfs) > 6:
|
|
221
|
+
self.show = [False] * len(self._gdfs)
|
|
222
|
+
|
|
155
223
|
if self._is_categorical:
|
|
156
224
|
if len(self.gdfs) == 1:
|
|
157
225
|
self._split_categories()
|
|
@@ -167,6 +235,9 @@ class Explore(Map):
|
|
|
167
235
|
def explore(
|
|
168
236
|
self, column: str | None = None, center=None, size=None, **kwargs
|
|
169
237
|
) -> None:
|
|
238
|
+
if not any(len(gdf) for gdf in self._gdfs):
|
|
239
|
+
warnings.warn("None of the GeoDataFrames have rows.")
|
|
240
|
+
return
|
|
170
241
|
if column:
|
|
171
242
|
self._column = column
|
|
172
243
|
self._update_column()
|
|
@@ -342,6 +413,7 @@ class Explore(Map):
|
|
|
342
413
|
|
|
343
414
|
gdf = self._to_single_geom_type(gdf)
|
|
344
415
|
gdf = self._prepare_gdf_for_map(gdf)
|
|
416
|
+
|
|
345
417
|
gjs = self._make_geojson(
|
|
346
418
|
gdf,
|
|
347
419
|
show=show,
|
|
@@ -351,10 +423,9 @@ class Explore(Map):
|
|
|
351
423
|
**{
|
|
352
424
|
key: value
|
|
353
425
|
for key, value in self.kwargs.items()
|
|
354
|
-
if key not in ["title"]
|
|
426
|
+
if key not in ["title", "tiles"]
|
|
355
427
|
},
|
|
356
428
|
)
|
|
357
|
-
|
|
358
429
|
gjs.layer_name = label
|
|
359
430
|
|
|
360
431
|
gjs.add_to(f)
|
|
@@ -366,10 +437,15 @@ class Explore(Map):
|
|
|
366
437
|
self._categories_colors_dict.keys(),
|
|
367
438
|
self._categories_colors_dict.values(),
|
|
368
439
|
)
|
|
369
|
-
|
|
370
|
-
folium.TileLayer("cartodbdark_matter", max_zoom=self.max_zoom).add_to(self.map)
|
|
440
|
+
|
|
371
441
|
self.map.add_child(folium.LayerControl())
|
|
372
442
|
|
|
443
|
+
def _add_tiles(
|
|
444
|
+
self, mapobj: folium.Map, tiles: list[str, xyzservices.TileProvider]
|
|
445
|
+
):
|
|
446
|
+
for tile in tiles:
|
|
447
|
+
to_tile(tile, max_zoom=self.max_zoom).add_to(mapobj)
|
|
448
|
+
|
|
373
449
|
def _create_continous_map(self):
|
|
374
450
|
self._prepare_continous_map()
|
|
375
451
|
if self.scheme:
|
|
@@ -428,8 +504,6 @@ class Explore(Map):
|
|
|
428
504
|
self.map.add_child(f)
|
|
429
505
|
|
|
430
506
|
self.map.add_child(colorbar)
|
|
431
|
-
folium.TileLayer("stamentoner").add_to(self.map)
|
|
432
|
-
folium.TileLayer("cartodbdark_matter").add_to(self.map)
|
|
433
507
|
self.map.add_child(folium.LayerControl())
|
|
434
508
|
|
|
435
509
|
def _tooltip_cols(self, gdf: GeoDataFrame) -> list:
|
|
@@ -456,18 +530,19 @@ class Explore(Map):
|
|
|
456
530
|
self,
|
|
457
531
|
bounds,
|
|
458
532
|
attr=None,
|
|
459
|
-
tiles=
|
|
533
|
+
tiles=None,
|
|
460
534
|
width="100%",
|
|
461
535
|
height="100%",
|
|
462
536
|
control_scale=True,
|
|
463
537
|
map_kwds=None,
|
|
464
538
|
**kwargs,
|
|
465
539
|
):
|
|
466
|
-
import xyzservices
|
|
467
|
-
|
|
468
540
|
if not map_kwds:
|
|
469
541
|
map_kwds = {}
|
|
470
542
|
|
|
543
|
+
if tiles is None:
|
|
544
|
+
tiles = self.tiles
|
|
545
|
+
|
|
471
546
|
# create folium.Map object
|
|
472
547
|
# Get bounds to specify location and map extent
|
|
473
548
|
location = kwargs.pop("location", None)
|
|
@@ -493,30 +568,32 @@ class Explore(Map):
|
|
|
493
568
|
**map_kwds,
|
|
494
569
|
**{i: kwargs[i] for i in kwargs.keys() if i in _MAP_KWARGS},
|
|
495
570
|
}
|
|
571
|
+
map_kwds["min_zoom"] = 0
|
|
572
|
+
map_kwds["max_zoom"] = kwargs.get("max_zoom", self.max_zoom)
|
|
496
573
|
|
|
497
|
-
|
|
498
|
-
|
|
499
|
-
|
|
500
|
-
|
|
501
|
-
except ValueError:
|
|
502
|
-
pass
|
|
574
|
+
if isinstance(tiles, (list, tuple)):
|
|
575
|
+
default_tile, *more_tiles = tiles
|
|
576
|
+
else:
|
|
577
|
+
default_tile, more_tiles = tiles, []
|
|
503
578
|
|
|
504
|
-
|
|
505
|
-
|
|
506
|
-
|
|
507
|
-
|
|
508
|
-
|
|
579
|
+
default_tile = to_tile(default_tile, max_zoom=self.max_zoom)
|
|
580
|
+
|
|
581
|
+
if isinstance(default_tile, xyzservices.TileProvider):
|
|
582
|
+
attr = attr if attr else default_tile.html_attribution
|
|
583
|
+
default_tile = default_tile.build_url(scale_factor="{r}")
|
|
509
584
|
|
|
510
585
|
m = folium.Map(
|
|
511
586
|
location=location,
|
|
512
587
|
control_scale=control_scale,
|
|
513
|
-
tiles=
|
|
588
|
+
tiles=default_tile,
|
|
514
589
|
attr=attr,
|
|
515
590
|
width=width,
|
|
516
591
|
height=height,
|
|
517
592
|
**map_kwds,
|
|
518
593
|
)
|
|
519
594
|
|
|
595
|
+
self._add_tiles(m, more_tiles)
|
|
596
|
+
|
|
520
597
|
if self.measure_control:
|
|
521
598
|
MeasureControlFix(
|
|
522
599
|
primary_length_unit="meters",
|
|
@@ -539,7 +616,7 @@ class Explore(Map):
|
|
|
539
616
|
separator=", ",
|
|
540
617
|
empty_string="NaN",
|
|
541
618
|
lng_first=True,
|
|
542
|
-
num_digits=
|
|
619
|
+
num_digits=8,
|
|
543
620
|
).add_to(m)
|
|
544
621
|
|
|
545
622
|
if self.geocoder:
|