ssb-sgis 0.3.8__tar.gz → 0.3.9__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/PKG-INFO +4 -1
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/README.md +2 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/pyproject.toml +1 -1
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/__init__.py +5 -2
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/geopandas_tools/buffer_dissolve_explode.py +13 -9
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/geopandas_tools/centerlines.py +110 -47
- ssb_sgis-0.3.9/src/sgis/geopandas_tools/cleaning.py +331 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/geopandas_tools/conversion.py +9 -3
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/geopandas_tools/duplicates.py +67 -49
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/geopandas_tools/general.py +15 -1
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/geopandas_tools/neighbors.py +12 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/geopandas_tools/overlay.py +26 -17
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/geopandas_tools/polygon_operations.py +281 -100
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/geopandas_tools/polygons_as_rings.py +72 -10
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/geopandas_tools/sfilter.py +8 -8
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/helpers.py +20 -3
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/io/dapla_functions.py +28 -6
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/io/write_municipality_data.py +11 -5
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/maps/examine.py +10 -7
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/maps/explore.py +102 -25
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/maps/map.py +32 -6
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/maps/maps.py +40 -58
- ssb_sgis-0.3.9/src/sgis/maps/tilesources.py +61 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/networkanalysis/closing_network_holes.py +89 -62
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/networkanalysis/cutting_lines.py +1 -1
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/networkanalysis/nodes.py +1 -1
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/networkanalysis/traveling_salesman.py +8 -4
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/parallel/parallel.py +63 -10
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/raster/raster.py +29 -27
- ssb_sgis-0.3.8/src/sgis/geopandas_tools/snap_polygons.py +0 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/LICENSE +0 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/exceptions.py +0 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/geopandas_tools/__init__.py +0 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/geopandas_tools/bounds.py +0 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/geopandas_tools/geocoding.py +0 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/geopandas_tools/geometry_types.py +0 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/geopandas_tools/point_operations.py +0 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/io/_is_dapla.py +0 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/io/opener.py +0 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/io/read_parquet.py +0 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/maps/__init__.py +0 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/maps/httpserver.py +0 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/maps/legend.py +0 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/maps/thematicmap.py +0 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/networkanalysis/__init__.py +0 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/networkanalysis/_get_route.py +0 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/networkanalysis/_od_cost_matrix.py +0 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/networkanalysis/_points.py +0 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/networkanalysis/_service_area.py +0 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/networkanalysis/directednetwork.py +0 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/networkanalysis/finding_isolated_networks.py +0 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/networkanalysis/network.py +0 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/networkanalysis/networkanalysis.py +0 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/networkanalysis/networkanalysisrules.py +0 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/py.typed +0 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/raster/__init__.py +0 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/raster/base.py +0 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/raster/elevationraster.py +0 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/raster/sentinel.py +0 -0
- {ssb_sgis-0.3.8 → ssb_sgis-0.3.9}/src/sgis/raster/zonal.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
Metadata-Version: 2.1
|
|
2
2
|
Name: ssb-sgis
|
|
3
|
-
Version: 0.3.
|
|
3
|
+
Version: 0.3.9
|
|
4
4
|
Summary: GIS functions used at Statistics Norway.
|
|
5
5
|
Home-page: https://github.com/statisticsnorway/ssb-sgis
|
|
6
6
|
License: MIT
|
|
@@ -12,6 +12,7 @@ Classifier: License :: OSI Approved :: MIT License
|
|
|
12
12
|
Classifier: Programming Language :: Python :: 3
|
|
13
13
|
Classifier: Programming Language :: Python :: 3.10
|
|
14
14
|
Classifier: Programming Language :: Python :: 3.11
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
15
16
|
Classifier: Topic :: Scientific/Engineering :: GIS
|
|
16
17
|
Requires-Dist: branca (>=0.6.0)
|
|
17
18
|
Requires-Dist: folium (>=0.14.0)
|
|
@@ -44,6 +45,8 @@ Description-Content-Type: text/markdown
|
|
|
44
45
|
|
|
45
46
|
GIS Python tools used in [Statistics Norway](https://www.ssb.no/en).
|
|
46
47
|
|
|
48
|
+
See documentation [here](https://statisticsnorway.github.io/ssb-sgis/reference/index.html).
|
|
49
|
+
|
|
47
50
|
[][pypi_]
|
|
48
51
|
[][status]
|
|
49
52
|
[][python version]
|
|
@@ -2,6 +2,8 @@
|
|
|
2
2
|
|
|
3
3
|
GIS Python tools used in [Statistics Norway](https://www.ssb.no/en).
|
|
4
4
|
|
|
5
|
+
See documentation [here](https://statisticsnorway.github.io/ssb-sgis/reference/index.html).
|
|
6
|
+
|
|
5
7
|
[][pypi_]
|
|
6
8
|
[][status]
|
|
7
9
|
[][python version]
|
|
@@ -1,6 +1,7 @@
|
|
|
1
1
|
from .geopandas_tools.bounds import (
|
|
2
2
|
bounds_to_points,
|
|
3
3
|
bounds_to_polygon,
|
|
4
|
+
get_total_bounds,
|
|
4
5
|
gridloop,
|
|
5
6
|
make_grid,
|
|
6
7
|
make_grid_from_bbox,
|
|
@@ -17,6 +18,7 @@ from .geopandas_tools.buffer_dissolve_explode import (
|
|
|
17
18
|
dissexp_by_cluster,
|
|
18
19
|
)
|
|
19
20
|
from .geopandas_tools.centerlines import get_rough_centerlines
|
|
21
|
+
from .geopandas_tools.cleaning import coverage_clean, remove_spikes
|
|
20
22
|
from .geopandas_tools.conversion import (
|
|
21
23
|
coordinate_array,
|
|
22
24
|
get_lonlat,
|
|
@@ -51,6 +53,7 @@ from .geopandas_tools.geometry_types import (
|
|
|
51
53
|
from .geopandas_tools.neighbors import (
|
|
52
54
|
get_all_distances,
|
|
53
55
|
get_k_nearest_neighbors,
|
|
56
|
+
get_neighbor_dfs,
|
|
54
57
|
get_neighbor_indices,
|
|
55
58
|
k_nearest_neighbors,
|
|
56
59
|
)
|
|
@@ -69,8 +72,6 @@ from .geopandas_tools.polygon_operations import (
|
|
|
69
72
|
)
|
|
70
73
|
from .geopandas_tools.polygons_as_rings import PolygonsAsRings
|
|
71
74
|
from .geopandas_tools.sfilter import sfilter, sfilter_inverse, sfilter_split
|
|
72
|
-
|
|
73
|
-
# from .geopandas_tools.snap_polygons import coverage_clean, snap_polygons
|
|
74
75
|
from .helpers import get_object_name, sort_nans_last
|
|
75
76
|
from .io.opener import opener
|
|
76
77
|
from .io.read_parquet import read_parquet_url
|
|
@@ -80,6 +81,8 @@ from .maps.httpserver import run_html_server
|
|
|
80
81
|
from .maps.legend import Legend
|
|
81
82
|
from .maps.maps import clipmap, explore, explore_locals, qtm, samplemap
|
|
82
83
|
from .maps.thematicmap import ThematicMap
|
|
84
|
+
from .maps.tilesources import kartverket as kartverket_tiles
|
|
85
|
+
from .maps.tilesources import xyz as xyztiles
|
|
83
86
|
from .networkanalysis.closing_network_holes import (
|
|
84
87
|
close_network_holes,
|
|
85
88
|
close_network_holes_to_deadends,
|
|
@@ -18,7 +18,11 @@ from geopandas import GeoDataFrame, GeoSeries
|
|
|
18
18
|
|
|
19
19
|
from .general import _push_geom_col
|
|
20
20
|
from .geometry_types import make_all_singlepart
|
|
21
|
-
from .polygon_operations import
|
|
21
|
+
from .polygon_operations import (
|
|
22
|
+
get_cluster_mapper,
|
|
23
|
+
get_grouped_centroids,
|
|
24
|
+
get_polygon_clusters,
|
|
25
|
+
)
|
|
22
26
|
|
|
23
27
|
|
|
24
28
|
def _decide_ignore_index(kwargs: dict) -> tuple[dict, bool]:
|
|
@@ -237,21 +241,21 @@ def dissexp_by_cluster(gdf: GeoDataFrame, **dissolve_kwargs) -> GeoDataFrame:
|
|
|
237
241
|
|
|
238
242
|
def get_group_clusters(group: GeoDataFrame):
|
|
239
243
|
"""Adds cluster column. Applied to each group because much faster."""
|
|
240
|
-
|
|
241
|
-
|
|
242
|
-
|
|
243
|
-
|
|
244
|
-
)
|
|
244
|
+
group = group.reset_index(drop=True)
|
|
245
|
+
group["_cluster"] = get_cluster_mapper(group) # component_mapper
|
|
246
|
+
group["_cluster"] = get_grouped_centroids(group, groupby="_cluster")
|
|
247
|
+
return group
|
|
245
248
|
|
|
246
249
|
if by:
|
|
247
250
|
dissolved = (
|
|
248
|
-
gdf
|
|
251
|
+
make_all_singlepart(gdf)
|
|
252
|
+
.groupby(by, group_keys=True, dropna=False, as_index=False)
|
|
249
253
|
.apply(get_group_clusters)
|
|
250
254
|
.pipe(dissexp, by=["_cluster"] + by, **dissolve_kwargs)
|
|
251
255
|
)
|
|
252
256
|
else:
|
|
253
|
-
dissolved = get_group_clusters(gdf).pipe(
|
|
254
|
-
dissexp, by=
|
|
257
|
+
dissolved = get_group_clusters(make_all_singlepart(gdf)).pipe(
|
|
258
|
+
dissexp, by="_cluster", **dissolve_kwargs
|
|
255
259
|
)
|
|
256
260
|
|
|
257
261
|
if not by:
|
|
@@ -1,12 +1,17 @@
|
|
|
1
|
+
import functools
|
|
1
2
|
import warnings
|
|
2
3
|
|
|
4
|
+
import numpy as np
|
|
3
5
|
import pandas as pd
|
|
6
|
+
import shapely
|
|
4
7
|
from geopandas import GeoDataFrame, GeoSeries
|
|
8
|
+
from geopandas.array import GeometryArray
|
|
5
9
|
from numpy.typing import NDArray
|
|
6
10
|
from shapely import (
|
|
7
11
|
STRtree,
|
|
8
12
|
distance,
|
|
9
13
|
extract_unique_points,
|
|
14
|
+
get_parts,
|
|
10
15
|
get_rings,
|
|
11
16
|
line_merge,
|
|
12
17
|
make_valid,
|
|
@@ -18,18 +23,55 @@ from shapely.errors import GEOSException
|
|
|
18
23
|
from shapely.geometry import LineString
|
|
19
24
|
from shapely.ops import nearest_points
|
|
20
25
|
|
|
26
|
+
from ..maps.maps import explore, explore_locals
|
|
21
27
|
from ..networkanalysis.traveling_salesman import traveling_salesman_problem
|
|
22
|
-
from .conversion import to_geoseries
|
|
23
|
-
from .general import clean_geoms, make_lines_between_points
|
|
24
|
-
from .
|
|
28
|
+
from .conversion import to_gdf, to_geoseries
|
|
29
|
+
from .general import clean_geoms, make_lines_between_points, sort_long_first
|
|
30
|
+
from .geometry_types import make_all_singlepart
|
|
31
|
+
from .sfilter import sfilter_inverse, sfilter_split
|
|
25
32
|
|
|
26
33
|
|
|
27
34
|
warnings.simplefilter(action="ignore", category=FutureWarning)
|
|
28
35
|
|
|
29
36
|
|
|
37
|
+
def get_traveling_salesman_lines(df, return_to_start=False):
|
|
38
|
+
path = traveling_salesman_problem(df, return_to_start=return_to_start)
|
|
39
|
+
|
|
40
|
+
try:
|
|
41
|
+
return [LineString([p1, p2]) for p1, p2 in zip(path[:-1], path[1:])]
|
|
42
|
+
except IndexError as e:
|
|
43
|
+
if len(path) == 1:
|
|
44
|
+
return path
|
|
45
|
+
raise e
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def remove_longest_if_not_intersecting(centerlines, geoms):
|
|
49
|
+
centerlines = sort_long_first(make_all_singlepart(centerlines))
|
|
50
|
+
|
|
51
|
+
has_only_one_line = centerlines.groupby(level=0).size() == 1
|
|
52
|
+
only_one_line = centerlines[has_only_one_line]
|
|
53
|
+
centerlines = centerlines[~has_only_one_line]
|
|
54
|
+
|
|
55
|
+
longest = centerlines.loc[lambda x: ~x.index.duplicated()]
|
|
56
|
+
not_longest = centerlines.loc[lambda x: x.index.duplicated()]
|
|
57
|
+
|
|
58
|
+
longest_endpoints = longest.boundary.explode(index_parts=False).sort_index()
|
|
59
|
+
|
|
60
|
+
nearest = longest_endpoints.groupby(level=0).apply(
|
|
61
|
+
lambda x: nearest_points(
|
|
62
|
+
x, not_longest[not_longest.index.isin(x.index)].unary_union
|
|
63
|
+
)[1]
|
|
64
|
+
)
|
|
65
|
+
longest_endpoints.loc[:] = make_lines_between_points(
|
|
66
|
+
longest_endpoints.values, nearest.values
|
|
67
|
+
)
|
|
68
|
+
|
|
69
|
+
return pd.concat([only_one_line, not_longest, longest_endpoints])
|
|
70
|
+
|
|
71
|
+
|
|
30
72
|
def get_rough_centerlines(
|
|
31
73
|
gdf: GeoDataFrame,
|
|
32
|
-
max_segment_length: int
|
|
74
|
+
max_segment_length: int,
|
|
33
75
|
) -> GeoDataFrame:
|
|
34
76
|
"""Get a cheaply calculated centerline of a polygon.
|
|
35
77
|
|
|
@@ -42,7 +84,7 @@ def get_rough_centerlines(
|
|
|
42
84
|
|
|
43
85
|
"""
|
|
44
86
|
|
|
45
|
-
|
|
87
|
+
PRECISION = 0.01
|
|
46
88
|
|
|
47
89
|
if not len(gdf):
|
|
48
90
|
return gdf
|
|
@@ -54,12 +96,12 @@ def get_rough_centerlines(
|
|
|
54
96
|
|
|
55
97
|
segmentized: GeoSeries = segmentize(geoms, max_segment_length=max_segment_length)
|
|
56
98
|
|
|
57
|
-
points: GeoSeries = get_points_in_polygons(segmentized,
|
|
99
|
+
points: GeoSeries = get_points_in_polygons(segmentized, PRECISION)
|
|
58
100
|
|
|
59
101
|
has_no_points = geoms.loc[(~geoms.index.isin(points.index))]
|
|
60
102
|
|
|
61
103
|
more_points: GeoSeries = get_points_in_polygons(
|
|
62
|
-
has_no_points.buffer(
|
|
104
|
+
has_no_points.buffer(PRECISION), PRECISION
|
|
63
105
|
)
|
|
64
106
|
|
|
65
107
|
# Geometries that have no lines inside, might be perfect circles.
|
|
@@ -106,7 +148,7 @@ def get_rough_centerlines(
|
|
|
106
148
|
# keep lines 90 percent intersecting the polygon
|
|
107
149
|
length_now = end_to_end.length
|
|
108
150
|
end_to_end = (
|
|
109
|
-
end_to_end.intersection(geoms.buffer(
|
|
151
|
+
end_to_end.intersection(geoms.buffer(PRECISION))
|
|
110
152
|
.dropna()
|
|
111
153
|
.loc[lambda x: x.length > length_now * 0.9]
|
|
112
154
|
)
|
|
@@ -114,8 +156,8 @@ def get_rough_centerlines(
|
|
|
114
156
|
# straight end buffer to remove all in between ends
|
|
115
157
|
to_be_erased = points.index.isin(end_to_end.index)
|
|
116
158
|
|
|
117
|
-
|
|
118
|
-
points.iloc[to_be_erased], end_to_end.buffer(
|
|
159
|
+
dont_intersect = sfilter_inverse(
|
|
160
|
+
points.iloc[to_be_erased], end_to_end.buffer(PRECISION, cap_style=2)
|
|
119
161
|
)
|
|
120
162
|
|
|
121
163
|
points = (
|
|
@@ -140,18 +182,15 @@ def get_rough_centerlines(
|
|
|
140
182
|
]
|
|
141
183
|
)
|
|
142
184
|
|
|
143
|
-
|
|
144
|
-
|
|
145
|
-
|
|
146
|
-
return [LineString([p1, p2]) for p1, p2 in zip(path[:-1], path[1:])]
|
|
147
|
-
except IndexError as e:
|
|
148
|
-
if len(path) == 1:
|
|
149
|
-
return path
|
|
150
|
-
raise e
|
|
185
|
+
explore(points=to_gdf(points, 25833), gdf=gdf)
|
|
186
|
+
|
|
187
|
+
remove_longest = functools.partial(remove_longest_if_not_intersecting, geoms=geoms)
|
|
151
188
|
|
|
152
189
|
centerlines = GeoSeries(
|
|
153
190
|
points.groupby(level=0).apply(get_traveling_salesman_lines).explode()
|
|
154
|
-
)
|
|
191
|
+
).pipe(remove_longest)
|
|
192
|
+
|
|
193
|
+
# centerlines = sort_long_first(centerlines).loc[lambda x: x.index.duplicated()]
|
|
155
194
|
|
|
156
195
|
# fix sharp turns by using the centroids of the centerline
|
|
157
196
|
centerlines2 = GeoSeries(
|
|
@@ -165,7 +204,7 @@ def get_rough_centerlines(
|
|
|
165
204
|
.groupby(level=0)
|
|
166
205
|
.apply(get_traveling_salesman_lines)
|
|
167
206
|
).explode()
|
|
168
|
-
)
|
|
207
|
+
).pipe(remove_longest)
|
|
169
208
|
|
|
170
209
|
centerlines3 = GeoSeries(
|
|
171
210
|
(
|
|
@@ -178,10 +217,11 @@ def get_rough_centerlines(
|
|
|
178
217
|
.groupby(level=0)
|
|
179
218
|
.apply(get_traveling_salesman_lines)
|
|
180
219
|
).explode()
|
|
181
|
-
)
|
|
220
|
+
).pipe(remove_longest)
|
|
182
221
|
|
|
183
222
|
centerlines = centerlines3.groupby(level=0).agg(
|
|
184
223
|
lambda x: line_merge(unary_union(x))
|
|
224
|
+
# lambda x: unary_union(x)
|
|
185
225
|
)
|
|
186
226
|
|
|
187
227
|
if isinstance(gdf, GeoSeries):
|
|
@@ -210,7 +250,7 @@ def get_points_in_polygons(geometries: GeoSeries, precision: float) -> GeoSeries
|
|
|
210
250
|
)
|
|
211
251
|
|
|
212
252
|
crossing_lines = (
|
|
213
|
-
geometries.buffer(precision)
|
|
253
|
+
geometries.buffer(precision, resolution=10)
|
|
214
254
|
.intersection(voronoi_lines)
|
|
215
255
|
.explode(index_parts=False)
|
|
216
256
|
)
|
|
@@ -230,7 +270,11 @@ def get_points_in_polygons(geometries: GeoSeries, precision: float) -> GeoSeries
|
|
|
230
270
|
def get_approximate_polygon_endpoints(geoms: GeoSeries) -> GeoSeries:
|
|
231
271
|
out_geoms = []
|
|
232
272
|
|
|
233
|
-
|
|
273
|
+
are_thin = geoms.buffer(-1e-2).is_empty
|
|
274
|
+
not_thin = geoms.loc[~are_thin]
|
|
275
|
+
thin = geoms.loc[are_thin].buffer(1e-2)
|
|
276
|
+
|
|
277
|
+
rectangles = pd.concat([not_thin, thin]).minimum_rotated_rectangle()
|
|
234
278
|
|
|
235
279
|
# get_rings returns array with integer index that must be mapped to pandas index
|
|
236
280
|
rings, indices = get_rings(rectangles, return_index=True)
|
|
@@ -240,7 +284,6 @@ def get_approximate_polygon_endpoints(geoms: GeoSeries) -> GeoSeries:
|
|
|
240
284
|
rectangles.loc[:] = (
|
|
241
285
|
pd.Series(rings, index=indices).groupby(level=0).agg(unary_union)
|
|
242
286
|
)
|
|
243
|
-
|
|
244
287
|
corner_points = (
|
|
245
288
|
GeoSeries(
|
|
246
289
|
extract_unique_points(rectangles)
|
|
@@ -328,39 +371,59 @@ def get_approximate_polygon_endpoints(geoms: GeoSeries) -> GeoSeries:
|
|
|
328
371
|
|
|
329
372
|
|
|
330
373
|
def multipoints_to_line_segments(
|
|
331
|
-
multipoints: GeoSeries | GeoDataFrame, to_next: bool = True
|
|
374
|
+
multipoints: GeoSeries | GeoDataFrame, to_next: bool = True, cycle: bool = True
|
|
332
375
|
) -> GeoSeries | GeoDataFrame:
|
|
333
376
|
if not len(multipoints):
|
|
334
377
|
return multipoints
|
|
335
378
|
|
|
336
|
-
|
|
379
|
+
multipoints = to_geoseries(multipoints)
|
|
380
|
+
|
|
381
|
+
if isinstance(multipoints.index, pd.MultiIndex):
|
|
382
|
+
index = [
|
|
383
|
+
multipoints.index.get_level_values(i)
|
|
384
|
+
for i in range(multipoints.index.nlevels)
|
|
385
|
+
]
|
|
386
|
+
multipoints.index = pd.MultiIndex.from_arrays(
|
|
387
|
+
[list(range(len(multipoints)))] + index,
|
|
388
|
+
names=["range_idx"] + multipoints.index.names,
|
|
389
|
+
)
|
|
390
|
+
else:
|
|
391
|
+
multipoints.index = pd.MultiIndex.from_arrays(
|
|
392
|
+
[np.arange(0, len(multipoints)), multipoints.index],
|
|
393
|
+
names=["range_idx"] + [multipoints.index.name],
|
|
394
|
+
)
|
|
337
395
|
|
|
338
396
|
try:
|
|
339
397
|
crs = multipoints.crs
|
|
340
398
|
except AttributeError:
|
|
341
399
|
crs = None
|
|
342
400
|
|
|
343
|
-
point_df =
|
|
401
|
+
point_df = multipoints.explode(index_parts=False).to_frame("geometry")
|
|
344
402
|
|
|
345
|
-
point_df
|
|
346
403
|
if to_next:
|
|
347
404
|
shift = -1
|
|
348
|
-
filt = lambda x: ~x.index.duplicated(keep="first")
|
|
405
|
+
filt = lambda x: ~x.index.get_level_values(0).duplicated(keep="first")
|
|
349
406
|
else:
|
|
350
407
|
shift = 1
|
|
351
|
-
filt = lambda x: ~x.index.duplicated(keep="last")
|
|
408
|
+
filt = lambda x: ~x.index.get_level_values(0).duplicated(keep="last")
|
|
352
409
|
|
|
353
410
|
point_df["next"] = point_df.groupby(level=0)["geometry"].shift(shift)
|
|
354
411
|
|
|
355
|
-
|
|
356
|
-
|
|
412
|
+
if cycle:
|
|
413
|
+
first_points = point_df.loc[filt, "geometry"]
|
|
414
|
+
is_last_point = point_df["next"].isna()
|
|
357
415
|
|
|
358
|
-
|
|
359
|
-
|
|
416
|
+
point_df.loc[is_last_point, "next"] = first_points
|
|
417
|
+
assert point_df["next"].notna().all()
|
|
418
|
+
else:
|
|
419
|
+
point_df = point_df[point_df["next"].notna()]
|
|
360
420
|
|
|
361
421
|
point_df["geometry"] = [
|
|
362
422
|
LineString([x1, x2]) for x1, x2 in zip(point_df["geometry"], point_df["next"])
|
|
363
423
|
]
|
|
424
|
+
if isinstance(multipoints.index, pd.MultiIndex):
|
|
425
|
+
point_df.index = point_df.index.droplevel(0)
|
|
426
|
+
|
|
364
427
|
if isinstance(multipoints, GeoDataFrame):
|
|
365
428
|
return GeoDataFrame(
|
|
366
429
|
point_df.drop(columns=["next"]), geometry="geometry", crs=crs
|
|
@@ -368,18 +431,18 @@ def multipoints_to_line_segments(
|
|
|
368
431
|
return GeoSeries(point_df["geometry"], crs=crs)
|
|
369
432
|
|
|
370
433
|
|
|
371
|
-
def get_line_segments(lines) -> GeoDataFrame:
|
|
372
|
-
|
|
373
|
-
|
|
374
|
-
|
|
375
|
-
|
|
376
|
-
lines._geometry_column_name: extract_unique_points(
|
|
377
|
-
lines.geometry.values
|
|
378
|
-
)
|
|
379
|
-
}
|
|
380
|
-
)
|
|
381
|
-
return multipoints_to_line_segments(multipoints.geometry)
|
|
434
|
+
def get_line_segments(lines, extract_unique: bool = False, cycle=False) -> GeoDataFrame:
|
|
435
|
+
try:
|
|
436
|
+
assert lines.index.is_unique
|
|
437
|
+
except AttributeError:
|
|
438
|
+
pass
|
|
382
439
|
|
|
383
|
-
|
|
440
|
+
lines = to_geoseries(lines)
|
|
441
|
+
|
|
442
|
+
if extract_unique:
|
|
443
|
+
points = extract_unique_points(lines.values)
|
|
444
|
+
else:
|
|
445
|
+
coords, indices = shapely.get_coordinates(lines, return_index=True)
|
|
446
|
+
points = GeoSeries(shapely.points(coords), index=indices)
|
|
384
447
|
|
|
385
|
-
return multipoints_to_line_segments(
|
|
448
|
+
return multipoints_to_line_segments(points, cycle=cycle)
|