ssb-sgis 1.0.1__py3-none-any.whl → 1.0.3__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sgis/__init__.py +107 -121
- sgis/exceptions.py +5 -3
- sgis/geopandas_tools/__init__.py +1 -0
- sgis/geopandas_tools/bounds.py +86 -47
- sgis/geopandas_tools/buffer_dissolve_explode.py +62 -39
- sgis/geopandas_tools/centerlines.py +53 -44
- sgis/geopandas_tools/cleaning.py +87 -104
- sgis/geopandas_tools/conversion.py +164 -107
- sgis/geopandas_tools/duplicates.py +33 -19
- sgis/geopandas_tools/general.py +84 -52
- sgis/geopandas_tools/geometry_types.py +24 -10
- sgis/geopandas_tools/neighbors.py +23 -11
- sgis/geopandas_tools/overlay.py +136 -53
- sgis/geopandas_tools/point_operations.py +11 -10
- sgis/geopandas_tools/polygon_operations.py +53 -61
- sgis/geopandas_tools/polygons_as_rings.py +121 -78
- sgis/geopandas_tools/sfilter.py +17 -17
- sgis/helpers.py +116 -58
- sgis/io/dapla_functions.py +32 -23
- sgis/io/opener.py +13 -6
- sgis/io/read_parquet.py +2 -2
- sgis/maps/examine.py +55 -28
- sgis/maps/explore.py +471 -112
- sgis/maps/httpserver.py +12 -12
- sgis/maps/legend.py +285 -134
- sgis/maps/map.py +248 -129
- sgis/maps/maps.py +123 -119
- sgis/maps/thematicmap.py +260 -94
- sgis/maps/tilesources.py +3 -8
- sgis/networkanalysis/_get_route.py +5 -4
- sgis/networkanalysis/_od_cost_matrix.py +44 -1
- sgis/networkanalysis/_points.py +10 -4
- sgis/networkanalysis/_service_area.py +5 -2
- sgis/networkanalysis/closing_network_holes.py +22 -64
- sgis/networkanalysis/cutting_lines.py +58 -46
- sgis/networkanalysis/directednetwork.py +16 -8
- sgis/networkanalysis/finding_isolated_networks.py +6 -5
- sgis/networkanalysis/network.py +15 -13
- sgis/networkanalysis/networkanalysis.py +79 -61
- sgis/networkanalysis/networkanalysisrules.py +21 -17
- sgis/networkanalysis/nodes.py +2 -3
- sgis/networkanalysis/traveling_salesman.py +6 -3
- sgis/parallel/parallel.py +372 -142
- sgis/raster/base.py +9 -3
- sgis/raster/cube.py +331 -213
- sgis/raster/cubebase.py +15 -29
- sgis/raster/image_collection.py +2560 -0
- sgis/raster/indices.py +17 -12
- sgis/raster/raster.py +356 -275
- sgis/raster/sentinel_config.py +104 -0
- sgis/raster/zonal.py +38 -14
- {ssb_sgis-1.0.1.dist-info → ssb_sgis-1.0.3.dist-info}/LICENSE +1 -1
- {ssb_sgis-1.0.1.dist-info → ssb_sgis-1.0.3.dist-info}/METADATA +87 -16
- ssb_sgis-1.0.3.dist-info/RECORD +61 -0
- {ssb_sgis-1.0.1.dist-info → ssb_sgis-1.0.3.dist-info}/WHEEL +1 -1
- sgis/raster/bands.py +0 -48
- sgis/raster/gradient.py +0 -78
- sgis/raster/methods_as_functions.py +0 -124
- sgis/raster/torchgeo.py +0 -150
- ssb_sgis-1.0.1.dist-info/RECORD +0 -63
sgis/geopandas_tools/overlay.py
CHANGED
|
@@ -9,35 +9,30 @@ version of the solution from GH 2792.
|
|
|
9
9
|
"""
|
|
10
10
|
|
|
11
11
|
import functools
|
|
12
|
+
from collections.abc import Callable
|
|
12
13
|
|
|
13
|
-
import dask
|
|
14
14
|
import dask.array as da
|
|
15
15
|
import geopandas as gpd
|
|
16
16
|
import joblib
|
|
17
17
|
import numpy as np
|
|
18
18
|
import pandas as pd
|
|
19
|
-
from geopandas import GeoDataFrame
|
|
19
|
+
from geopandas import GeoDataFrame
|
|
20
|
+
from geopandas import GeoSeries
|
|
20
21
|
from pandas import DataFrame
|
|
21
|
-
from shapely import
|
|
22
|
-
|
|
23
|
-
|
|
24
|
-
|
|
25
|
-
|
|
26
|
-
|
|
27
|
-
|
|
28
|
-
make_valid,
|
|
29
|
-
unary_union,
|
|
30
|
-
)
|
|
22
|
+
from shapely import Geometry
|
|
23
|
+
from shapely import STRtree
|
|
24
|
+
from shapely import box
|
|
25
|
+
from shapely import difference
|
|
26
|
+
from shapely import intersection
|
|
27
|
+
from shapely import make_valid
|
|
28
|
+
from shapely import unary_union
|
|
31
29
|
from shapely.errors import GEOSException
|
|
32
30
|
|
|
33
|
-
from .general import
|
|
34
|
-
|
|
35
|
-
|
|
36
|
-
|
|
37
|
-
|
|
38
|
-
)
|
|
39
|
-
from .geometry_types import get_geom_type, make_all_singlepart, to_single_geom_type
|
|
40
|
-
|
|
31
|
+
from .general import _determine_geom_type_args
|
|
32
|
+
from .general import clean_geoms
|
|
33
|
+
from .geometry_types import get_geom_type
|
|
34
|
+
from .geometry_types import make_all_singlepart
|
|
35
|
+
from .geometry_types import to_single_geom_type
|
|
41
36
|
|
|
42
37
|
DEFAULT_GRID_SIZE = None
|
|
43
38
|
DEFAULT_LSUFFIX = "_1"
|
|
@@ -75,6 +70,10 @@ def clean_overlay(
|
|
|
75
70
|
"point".
|
|
76
71
|
grid_size: Precision grid size to round the geometries. Will use the highest
|
|
77
72
|
precision of the inputs by default.
|
|
73
|
+
n_jobs: number of threads.
|
|
74
|
+
predicate: Spatial predicate in the spatial tree.
|
|
75
|
+
lsuffix: Suffix of columns in df1 that are also in df2.
|
|
76
|
+
rsuffix: Suffix of columns in df2 that are also in df1.
|
|
78
77
|
|
|
79
78
|
Returns:
|
|
80
79
|
GeoDataFrame with overlayed and fixed geometries and columns from both
|
|
@@ -190,9 +189,10 @@ def _join_and_get_no_rows(df1, df2, lsuffix, rsuffix):
|
|
|
190
189
|
)
|
|
191
190
|
|
|
192
191
|
|
|
193
|
-
def _no_intersections_return(
|
|
194
|
-
|
|
195
|
-
|
|
192
|
+
def _no_intersections_return(
|
|
193
|
+
df1: GeoDataFrame, df2: GeoDataFrame, how: str, lsuffix, rsuffix: str
|
|
194
|
+
) -> GeoDataFrame:
|
|
195
|
+
"""Return with no overlay if no intersecting bounding box."""
|
|
196
196
|
if how == "intersection":
|
|
197
197
|
return _join_and_get_no_rows(df1, df2, lsuffix, rsuffix)
|
|
198
198
|
|
|
@@ -226,9 +226,9 @@ def _shapely_pd_overlay(
|
|
|
226
226
|
how: str,
|
|
227
227
|
grid_size: float = DEFAULT_GRID_SIZE,
|
|
228
228
|
predicate: str = "intersects",
|
|
229
|
-
lsuffix=DEFAULT_LSUFFIX,
|
|
230
|
-
rsuffix=DEFAULT_RSUFFIX,
|
|
231
|
-
geom_type=None,
|
|
229
|
+
lsuffix: str = DEFAULT_LSUFFIX,
|
|
230
|
+
rsuffix: str = DEFAULT_RSUFFIX,
|
|
231
|
+
geom_type: str | None = None,
|
|
232
232
|
n_jobs: int = 1,
|
|
233
233
|
) -> DataFrame:
|
|
234
234
|
if not grid_size and not len(df1) or not len(df2):
|
|
@@ -316,7 +316,15 @@ def _shapely_pd_overlay(
|
|
|
316
316
|
return overlayed
|
|
317
317
|
|
|
318
318
|
|
|
319
|
-
def _update(
|
|
319
|
+
def _update(
|
|
320
|
+
pairs: pd.DataFrame,
|
|
321
|
+
df1: pd.DataFrame,
|
|
322
|
+
df2: pd.DataFrame,
|
|
323
|
+
left: np.ndarray,
|
|
324
|
+
grid_size: float | None | int,
|
|
325
|
+
geom_type: str | None,
|
|
326
|
+
n_jobs: int,
|
|
327
|
+
) -> GeoDataFrame:
|
|
320
328
|
overlayed = _difference(
|
|
321
329
|
pairs, df1, left, grid_size=grid_size, geom_type=geom_type, n_jobs=n_jobs
|
|
322
330
|
)
|
|
@@ -324,24 +332,36 @@ def _update(pairs, df1, df2, left, grid_size, geom_type, n_jobs) -> GeoDataFrame
|
|
|
324
332
|
return overlayed + [df2]
|
|
325
333
|
|
|
326
334
|
|
|
327
|
-
def _run_overlay_dask(
|
|
335
|
+
def _run_overlay_dask(
|
|
336
|
+
arr1: np.ndarray,
|
|
337
|
+
arr2: np.ndarray,
|
|
338
|
+
func: Callable,
|
|
339
|
+
n_jobs: int,
|
|
340
|
+
grid_size: float | int | None,
|
|
341
|
+
) -> np.ndarray:
|
|
328
342
|
if len(arr1) // n_jobs <= 1:
|
|
329
343
|
try:
|
|
330
344
|
return func(arr1, arr2, grid_size=grid_size)
|
|
331
345
|
except TypeError as e:
|
|
332
|
-
raise TypeError(e, {type(x) for x in arr1}, {type(x) for x in arr2})
|
|
346
|
+
raise TypeError(e, {type(x) for x in arr1}, {type(x) for x in arr2}) from e
|
|
333
347
|
arr1 = da.from_array(arr1, chunks=len(arr1) // n_jobs)
|
|
334
348
|
arr2 = da.from_array(arr2, chunks=len(arr2) // n_jobs)
|
|
335
349
|
res = arr1.map_blocks(func, arr2, grid_size=grid_size, dtype=float)
|
|
336
350
|
return res.compute(scheduler="threads", optimize_graph=False, num_workers=n_jobs)
|
|
337
351
|
|
|
338
352
|
|
|
339
|
-
def _run_overlay_joblib_threading(
|
|
353
|
+
def _run_overlay_joblib_threading(
|
|
354
|
+
arr1: np.ndarray,
|
|
355
|
+
arr2: np.ndarray,
|
|
356
|
+
func: Callable,
|
|
357
|
+
n_jobs: int,
|
|
358
|
+
grid_size: int | float | None,
|
|
359
|
+
) -> list[Geometry]:
|
|
340
360
|
if len(arr1) // n_jobs <= 1:
|
|
341
361
|
try:
|
|
342
362
|
return func(arr1, arr2, grid_size=grid_size)
|
|
343
363
|
except TypeError as e:
|
|
344
|
-
raise TypeError(e, {type(x) for x in arr1}, {type(x) for x in arr2})
|
|
364
|
+
raise TypeError(e, {type(x) for x in arr1}, {type(x) for x in arr2}) from e
|
|
345
365
|
with joblib.Parallel(n_jobs=n_jobs, backend="threading") as parallel:
|
|
346
366
|
return parallel(
|
|
347
367
|
joblib.delayed(func)(g1, g2, grid_size=grid_size)
|
|
@@ -349,7 +369,12 @@ def _run_overlay_joblib_threading(arr1, arr2, func, n_jobs, grid_size):
|
|
|
349
369
|
)
|
|
350
370
|
|
|
351
371
|
|
|
352
|
-
def _intersection(
|
|
372
|
+
def _intersection(
|
|
373
|
+
pairs: pd.DataFrame,
|
|
374
|
+
grid_size: None | float | int,
|
|
375
|
+
geom_type: str | None,
|
|
376
|
+
n_jobs: int = 1,
|
|
377
|
+
) -> GeoDataFrame:
|
|
353
378
|
if not len(pairs):
|
|
354
379
|
return pairs.drop(columns="geom_right")
|
|
355
380
|
|
|
@@ -410,7 +435,17 @@ def _intersection(pairs, grid_size, geom_type, n_jobs=1) -> GeoDataFrame:
|
|
|
410
435
|
return intersections.drop(columns="geom_right")
|
|
411
436
|
|
|
412
437
|
|
|
413
|
-
def _union(
|
|
438
|
+
def _union(
|
|
439
|
+
pairs: pd.DataFrame,
|
|
440
|
+
df1: pd.DataFrame,
|
|
441
|
+
df2: pd.DataFrame,
|
|
442
|
+
left: np.ndarray,
|
|
443
|
+
right: np.ndarray,
|
|
444
|
+
grid_size: int | float | None,
|
|
445
|
+
rsuffix: str,
|
|
446
|
+
geom_type: str | None,
|
|
447
|
+
n_jobs: int = 1,
|
|
448
|
+
) -> list[GeoDataFrame]:
|
|
414
449
|
merged = []
|
|
415
450
|
if len(left):
|
|
416
451
|
intersections = _intersection(
|
|
@@ -432,7 +467,14 @@ def _union(pairs, df1, df2, left, right, grid_size, rsuffix, geom_type, n_jobs=1
|
|
|
432
467
|
return merged
|
|
433
468
|
|
|
434
469
|
|
|
435
|
-
def _identity(
|
|
470
|
+
def _identity(
|
|
471
|
+
pairs: pd.DataFrame,
|
|
472
|
+
df1: pd.DataFrame,
|
|
473
|
+
left: np.ndarray,
|
|
474
|
+
grid_size: int | float | None,
|
|
475
|
+
geom_type: str | None,
|
|
476
|
+
n_jobs: int = 1,
|
|
477
|
+
) -> list[GeoDataFrame]:
|
|
436
478
|
merged = []
|
|
437
479
|
if len(left):
|
|
438
480
|
intersections = _intersection(
|
|
@@ -445,8 +487,16 @@ def _identity(pairs, df1, left, grid_size, geom_type, n_jobs=1):
|
|
|
445
487
|
|
|
446
488
|
|
|
447
489
|
def _symmetric_difference(
|
|
448
|
-
pairs
|
|
449
|
-
|
|
490
|
+
pairs: pd.DataFrame,
|
|
491
|
+
df1: pd.DataFrame,
|
|
492
|
+
df2: pd.DataFrame,
|
|
493
|
+
left: np.ndarray,
|
|
494
|
+
right: np.ndarray,
|
|
495
|
+
grid_size: int | float | None,
|
|
496
|
+
rsuffix: str,
|
|
497
|
+
geom_type: str | None,
|
|
498
|
+
n_jobs: int = 1,
|
|
499
|
+
) -> list[GeoDataFrame]:
|
|
450
500
|
merged = []
|
|
451
501
|
|
|
452
502
|
difference_left = _difference(
|
|
@@ -472,7 +522,14 @@ def _symmetric_difference(
|
|
|
472
522
|
return merged
|
|
473
523
|
|
|
474
524
|
|
|
475
|
-
def _difference(
|
|
525
|
+
def _difference(
|
|
526
|
+
pairs: pd.DataFrame,
|
|
527
|
+
df1: pd.DataFrame,
|
|
528
|
+
left: np.ndarray,
|
|
529
|
+
grid_size: int | float | None = None,
|
|
530
|
+
geom_type: str | None = None,
|
|
531
|
+
n_jobs: int = 1,
|
|
532
|
+
) -> list[GeoDataFrame]:
|
|
476
533
|
merged = []
|
|
477
534
|
if len(left):
|
|
478
535
|
clip_left = _shapely_diffclip_left(
|
|
@@ -493,7 +550,7 @@ def _get_intersects_pairs(
|
|
|
493
550
|
df2: GeoDataFrame,
|
|
494
551
|
left: np.ndarray,
|
|
495
552
|
right: np.ndarray,
|
|
496
|
-
rsuffix,
|
|
553
|
+
rsuffix: str,
|
|
497
554
|
) -> DataFrame:
|
|
498
555
|
return pd.concat(
|
|
499
556
|
[
|
|
@@ -512,7 +569,9 @@ def _get_intersects_pairs(
|
|
|
512
569
|
)
|
|
513
570
|
|
|
514
571
|
|
|
515
|
-
def _add_suffix_left(
|
|
572
|
+
def _add_suffix_left(
|
|
573
|
+
overlayed: pd.DataFrame, df1: pd.DataFrame, df2: pd.DataFrame, lsuffix: str
|
|
574
|
+
):
|
|
516
575
|
"""Separating this from _add_indices_from_left, since this suffix is not needed in difference."""
|
|
517
576
|
return overlayed.rename(
|
|
518
577
|
columns={
|
|
@@ -526,12 +585,12 @@ def _add_suffix_left(overlayed, df1, df2, lsuffix):
|
|
|
526
585
|
)
|
|
527
586
|
|
|
528
587
|
|
|
529
|
-
def _add_indices_from_left(df1, left):
|
|
588
|
+
def _add_indices_from_left(df1: pd.DataFrame, left: np.ndarray) -> pd.DataFrame:
|
|
530
589
|
return df1.take(np.setdiff1d(np.arange(len(df1)), left))
|
|
531
590
|
|
|
532
591
|
|
|
533
592
|
def _add_from_right(
|
|
534
|
-
df1: GeoDataFrame, df2: GeoDataFrame, right: np.ndarray, rsuffix
|
|
593
|
+
df1: GeoDataFrame, df2: GeoDataFrame, right: np.ndarray, rsuffix: str
|
|
535
594
|
) -> GeoDataFrame:
|
|
536
595
|
return df2.take(np.setdiff1d(np.arange(len(df2)), right)).rename(
|
|
537
596
|
columns={
|
|
@@ -541,13 +600,17 @@ def _add_from_right(
|
|
|
541
600
|
)
|
|
542
601
|
|
|
543
602
|
|
|
544
|
-
def _shapely_diffclip_left(
|
|
545
|
-
|
|
546
|
-
|
|
547
|
-
|
|
603
|
+
def _shapely_diffclip_left(
|
|
604
|
+
pairs: pd.DataFrame,
|
|
605
|
+
df1: pd.DataFrame,
|
|
606
|
+
grid_size: int | float | None,
|
|
607
|
+
geom_type: str | None,
|
|
608
|
+
n_jobs: int,
|
|
609
|
+
) -> pd.DataFrame:
|
|
610
|
+
"""Aggregate areas in right by unique values from left, then erases those from left."""
|
|
548
611
|
keep_cols = list(df1.columns.difference({"_overlay_index_right"})) + ["geom_right"]
|
|
549
612
|
|
|
550
|
-
agg_geoms_partial = functools.partial(
|
|
613
|
+
agg_geoms_partial = functools.partial(_agg_geoms, grid_size=grid_size)
|
|
551
614
|
|
|
552
615
|
try:
|
|
553
616
|
only_one = pairs.groupby(level=0).transform("size") == 1
|
|
@@ -592,7 +655,9 @@ def _shapely_diffclip_left(pairs, df1, grid_size, geom_type, n_jobs):
|
|
|
592
655
|
{
|
|
593
656
|
i: g
|
|
594
657
|
for i, g in zip(
|
|
595
|
-
many_hits["_overlay_index_right"],
|
|
658
|
+
many_hits["_overlay_index_right"],
|
|
659
|
+
many_hits["geom_right"],
|
|
660
|
+
strict=False,
|
|
596
661
|
)
|
|
597
662
|
}
|
|
598
663
|
)
|
|
@@ -624,8 +689,16 @@ def _shapely_diffclip_left(pairs, df1, grid_size, geom_type, n_jobs):
|
|
|
624
689
|
return clip_left.drop(columns="geom_right")
|
|
625
690
|
|
|
626
691
|
|
|
627
|
-
def _shapely_diffclip_right(
|
|
628
|
-
|
|
692
|
+
def _shapely_diffclip_right(
|
|
693
|
+
pairs: pd.DataFrame,
|
|
694
|
+
df1: pd.DataFrame,
|
|
695
|
+
df2: pd.DataFrame,
|
|
696
|
+
grid_size: int | float | None,
|
|
697
|
+
rsuffix: str,
|
|
698
|
+
geom_type: str | None,
|
|
699
|
+
n_jobs: int,
|
|
700
|
+
) -> pd.DataFrame:
|
|
701
|
+
agg_geoms_partial = functools.partial(_agg_geoms, grid_size=grid_size)
|
|
629
702
|
|
|
630
703
|
pairs = pairs.rename(columns={"geometry": "geom_left", "geom_right": "geometry"})
|
|
631
704
|
|
|
@@ -675,7 +748,13 @@ def _shapely_diffclip_right(pairs, df1, df2, grid_size, rsuffix, geom_type, n_jo
|
|
|
675
748
|
return clip_right.drop(columns="geom_left")
|
|
676
749
|
|
|
677
750
|
|
|
678
|
-
def _try_difference(
|
|
751
|
+
def _try_difference(
|
|
752
|
+
left: np.ndarray,
|
|
753
|
+
right: np.ndarray,
|
|
754
|
+
grid_size: int | float | None,
|
|
755
|
+
geom_type: str | None,
|
|
756
|
+
n_jobs: int = 1,
|
|
757
|
+
) -> np.ndarray:
|
|
679
758
|
"""Try difference overlay, then make_valid and retry."""
|
|
680
759
|
if n_jobs > 1 and len(left) / n_jobs > 10:
|
|
681
760
|
try:
|
|
@@ -722,17 +801,21 @@ def _try_difference(left, right, grid_size, geom_type, n_jobs=1):
|
|
|
722
801
|
grid_size=grid_size,
|
|
723
802
|
)
|
|
724
803
|
except GEOSException as e:
|
|
725
|
-
raise e.__class__(e, f"{grid_size=}", f"{left=}", f"{right=}")
|
|
804
|
+
raise e.__class__(e, f"{grid_size=}", f"{left=}", f"{right=}") from e
|
|
726
805
|
|
|
727
806
|
|
|
728
807
|
def make_valid_and_keep_geom_type(
|
|
729
|
-
geoms: np.ndarray, geom_type: str, n_jobs
|
|
808
|
+
geoms: np.ndarray, geom_type: str, n_jobs: int
|
|
730
809
|
) -> GeoSeries:
|
|
731
810
|
"""Make GeometryCollections into (Multi)Polygons, (Multi)LineStrings or (Multi)Points.
|
|
732
811
|
|
|
733
812
|
Because GeometryCollections might appear after dissolving (unary_union).
|
|
734
813
|
And this makes shapely difference/intersection fail.
|
|
735
814
|
|
|
815
|
+
Args:
|
|
816
|
+
geoms: Array of geometries.
|
|
817
|
+
geom_type: geometry type to be kept.
|
|
818
|
+
n_jobs: Number of treads.
|
|
736
819
|
"""
|
|
737
820
|
geoms = GeoSeries(geoms)
|
|
738
821
|
geoms.index = range(len(geoms))
|
|
@@ -744,5 +827,5 @@ def make_valid_and_keep_geom_type(
|
|
|
744
827
|
return pd.concat([one_hit, many_hits]).sort_index()
|
|
745
828
|
|
|
746
829
|
|
|
747
|
-
def
|
|
830
|
+
def _agg_geoms(g: np.ndarray, grid_size: int | float | None = None) -> Geometry:
|
|
748
831
|
return make_valid(unary_union(g, grid_size=grid_size))
|
|
@@ -2,12 +2,14 @@
|
|
|
2
2
|
|
|
3
3
|
import numpy as np
|
|
4
4
|
import pandas as pd
|
|
5
|
-
from geopandas import GeoDataFrame
|
|
6
|
-
from
|
|
5
|
+
from geopandas import GeoDataFrame
|
|
6
|
+
from geopandas import GeoSeries
|
|
7
|
+
from shapely import distance
|
|
8
|
+
from shapely import unary_union
|
|
7
9
|
from shapely.ops import nearest_points
|
|
8
10
|
|
|
9
|
-
from ..geopandas_tools.
|
|
10
|
-
from ..geopandas_tools.geometry_types import
|
|
11
|
+
from ..geopandas_tools.geometry_types import get_geom_type
|
|
12
|
+
from ..geopandas_tools.geometry_types import to_single_geom_type
|
|
11
13
|
from ..geopandas_tools.polygon_operations import PolygonsAsRings
|
|
12
14
|
|
|
13
15
|
|
|
@@ -45,8 +47,8 @@ def snap_within_distance(
|
|
|
45
47
|
'to' geometries to multipoint before snapping if the snap points should be
|
|
46
48
|
vertices.
|
|
47
49
|
|
|
48
|
-
Examples
|
|
49
|
-
|
|
50
|
+
Examples:
|
|
51
|
+
---------
|
|
50
52
|
Create som points.
|
|
51
53
|
|
|
52
54
|
>>> from sgis import snap_within_distance, to_gdf
|
|
@@ -76,7 +78,6 @@ def snap_within_distance(
|
|
|
76
78
|
0 POINT (2.00000 2.00000) 2.828427
|
|
77
79
|
1 POINT (2.00000 2.00000) 1.414214
|
|
78
80
|
"""
|
|
79
|
-
|
|
80
81
|
to = _polygons_to_rings(to)
|
|
81
82
|
|
|
82
83
|
if not distance_col and not isinstance(points, GeoDataFrame):
|
|
@@ -134,8 +135,8 @@ def snap_all(
|
|
|
134
135
|
'to' geometries to multipoint before snapping if the snap points should be
|
|
135
136
|
vertices.
|
|
136
137
|
|
|
137
|
-
Examples
|
|
138
|
-
|
|
138
|
+
Examples:
|
|
139
|
+
---------
|
|
139
140
|
Create som points.
|
|
140
141
|
|
|
141
142
|
>>> from sgis import snap_all, to_gdf
|
|
@@ -183,7 +184,7 @@ def snap_all(
|
|
|
183
184
|
return copied
|
|
184
185
|
|
|
185
186
|
|
|
186
|
-
def _polygons_to_rings(gdf):
|
|
187
|
+
def _polygons_to_rings(gdf: GeoDataFrame) -> GeoDataFrame:
|
|
187
188
|
if get_geom_type(gdf) == "polygon":
|
|
188
189
|
return PolygonsAsRings(gdf).get_rings()
|
|
189
190
|
if get_geom_type(gdf) != "mixed":
|
|
@@ -3,40 +3,38 @@
|
|
|
3
3
|
import networkx as nx
|
|
4
4
|
import numpy as np
|
|
5
5
|
import pandas as pd
|
|
6
|
-
import
|
|
7
|
-
from geopandas import
|
|
8
|
-
from
|
|
9
|
-
from shapely import
|
|
10
|
-
|
|
11
|
-
|
|
12
|
-
|
|
13
|
-
|
|
14
|
-
|
|
15
|
-
|
|
16
|
-
|
|
17
|
-
|
|
18
|
-
|
|
19
|
-
|
|
20
|
-
|
|
21
|
-
polygons,
|
|
22
|
-
unary_union,
|
|
23
|
-
)
|
|
6
|
+
from geopandas import GeoDataFrame
|
|
7
|
+
from geopandas import GeoSeries
|
|
8
|
+
from shapely import STRtree
|
|
9
|
+
from shapely import area
|
|
10
|
+
from shapely import box
|
|
11
|
+
from shapely import buffer
|
|
12
|
+
from shapely import difference
|
|
13
|
+
from shapely import get_exterior_ring
|
|
14
|
+
from shapely import get_interior_ring
|
|
15
|
+
from shapely import get_num_interior_rings
|
|
16
|
+
from shapely import get_parts
|
|
17
|
+
from shapely import is_empty
|
|
18
|
+
from shapely import make_valid
|
|
19
|
+
from shapely import polygons
|
|
20
|
+
from shapely import unary_union
|
|
24
21
|
from shapely.errors import GEOSException
|
|
25
22
|
|
|
26
|
-
from .
|
|
27
|
-
from .general import
|
|
28
|
-
|
|
29
|
-
|
|
30
|
-
|
|
31
|
-
|
|
32
|
-
|
|
33
|
-
|
|
34
|
-
|
|
35
|
-
from .geometry_types import get_geom_type, make_all_singlepart, to_single_geom_type
|
|
23
|
+
from .general import _parallel_unary_union
|
|
24
|
+
from .general import _parallel_unary_union_geoseries
|
|
25
|
+
from .general import _push_geom_col
|
|
26
|
+
from .general import clean_geoms
|
|
27
|
+
from .general import get_grouped_centroids
|
|
28
|
+
from .general import to_lines
|
|
29
|
+
from .geometry_types import get_geom_type
|
|
30
|
+
from .geometry_types import make_all_singlepart
|
|
31
|
+
from .geometry_types import to_single_geom_type
|
|
36
32
|
from .neighbors import get_neighbor_indices
|
|
37
|
-
from .overlay import _try_difference
|
|
33
|
+
from .overlay import _try_difference
|
|
34
|
+
from .overlay import clean_overlay
|
|
38
35
|
from .polygons_as_rings import PolygonsAsRings
|
|
39
|
-
from .sfilter import sfilter
|
|
36
|
+
from .sfilter import sfilter
|
|
37
|
+
from .sfilter import sfilter_inverse
|
|
40
38
|
|
|
41
39
|
|
|
42
40
|
def get_polygon_clusters(
|
|
@@ -63,6 +61,7 @@ def get_polygon_clusters(
|
|
|
63
61
|
cluster_col: Name of the resulting cluster column.
|
|
64
62
|
allow_multipart: Whether to allow mutipart geometries in the gdfs.
|
|
65
63
|
Defaults to False to avoid confusing results.
|
|
64
|
+
predicate: Spatial predicate. Defaults to "intersects".
|
|
66
65
|
as_string: Whether to return the cluster column values as a string with x and y
|
|
67
66
|
coordinates. Convinient to always get unique ids.
|
|
68
67
|
Defaults to False because of speed.
|
|
@@ -70,9 +69,8 @@ def get_polygon_clusters(
|
|
|
70
69
|
Returns:
|
|
71
70
|
One or more GeoDataFrames (same amount as was given) with a new cluster column.
|
|
72
71
|
|
|
73
|
-
Examples
|
|
74
|
-
|
|
75
|
-
|
|
72
|
+
Examples:
|
|
73
|
+
---------
|
|
76
74
|
Create geometries with three clusters of overlapping polygons.
|
|
77
75
|
|
|
78
76
|
>>> import sgis as sg
|
|
@@ -186,7 +184,9 @@ def get_polygon_clusters(
|
|
|
186
184
|
return unconcated
|
|
187
185
|
|
|
188
186
|
|
|
189
|
-
def get_cluster_mapper(
|
|
187
|
+
def get_cluster_mapper(
|
|
188
|
+
gdf: GeoDataFrame | GeoSeries, predicate: str = "intersects"
|
|
189
|
+
) -> dict[int, int]:
|
|
190
190
|
if not gdf.index.is_unique:
|
|
191
191
|
raise ValueError("Index must be unique")
|
|
192
192
|
neighbors = get_neighbor_indices(gdf, gdf, predicate=predicate)
|
|
@@ -241,15 +241,16 @@ def eliminate_by_longest(
|
|
|
241
241
|
'gdf' are sorted first, but if 'gdf' has missing values, the resulting
|
|
242
242
|
polygons might get values from the polygons to be eliminated
|
|
243
243
|
(if aggfunc="first").
|
|
244
|
-
|
|
244
|
+
grid_size: Rounding of the coordinates. Defaults to None.
|
|
245
|
+
n_jobs: Number of threads to use. Defaults to 1.
|
|
246
|
+
**kwargs: Keyword arguments passed to the dissolve method.
|
|
245
247
|
|
|
246
248
|
Returns:
|
|
247
249
|
The GeoDataFrame (gdf) with the geometries of 'to_eliminate' dissolved in.
|
|
248
250
|
If multiple GeoDataFrame are passed as 'gdf', they are returned as a tuple.
|
|
249
251
|
|
|
250
|
-
Examples
|
|
251
|
-
|
|
252
|
-
|
|
252
|
+
Examples:
|
|
253
|
+
---------
|
|
253
254
|
Create two polygons with a sliver in between:
|
|
254
255
|
|
|
255
256
|
>>> sliver = sg.to_gdf(Polygon([(0, 0), (0.1, 1), (0, 2), (-0.1, 1)]))
|
|
@@ -398,6 +399,8 @@ def eliminate_by_largest(
|
|
|
398
399
|
Args:
|
|
399
400
|
gdf: GeoDataFrame with polygon geometries, or a list of GeoDataFrames.
|
|
400
401
|
to_eliminate: The geometries to be eliminated by 'gdf'.
|
|
402
|
+
max_distance: Max distance to search for neighbors. Defaults to None, meaning
|
|
403
|
+
0.
|
|
401
404
|
remove_isolated: If False (default), polygons in 'to_eliminate' that share
|
|
402
405
|
no border with any polygon in 'gdf' will be kept. If True, the isolated
|
|
403
406
|
polygons will be removed.
|
|
@@ -414,15 +417,16 @@ def eliminate_by_largest(
|
|
|
414
417
|
polygons might get values from the polygons to be eliminated
|
|
415
418
|
(if aggfunc="first").
|
|
416
419
|
predicate: Binary predicate passed to sjoin. Defaults to "intersects".
|
|
417
|
-
|
|
420
|
+
grid_size: Rounding of the coordinates. Defaults to None.
|
|
421
|
+
n_jobs: Number of threads to use. Defaults to 1.
|
|
422
|
+
**kwargs: Keyword arguments passed to the dissolve method.
|
|
418
423
|
|
|
419
424
|
Returns:
|
|
420
425
|
The GeoDataFrame (gdf) with the geometries of 'to_eliminate' dissolved in.
|
|
421
426
|
If multiple GeoDataFrame are passed as 'gdf', they are returned as a tuple.
|
|
422
427
|
|
|
423
|
-
Examples
|
|
424
|
-
|
|
425
|
-
|
|
428
|
+
Examples:
|
|
429
|
+
---------
|
|
426
430
|
Create two polygons with a sliver in between:
|
|
427
431
|
|
|
428
432
|
>>> sliver = sg.to_gdf(Polygon([(0, 0), (0.1, 1), (0, 2), (-0.1, 1)]))
|
|
@@ -700,7 +704,7 @@ def _eliminate(
|
|
|
700
704
|
|
|
701
705
|
if n_jobs > 1:
|
|
702
706
|
eliminated["geometry"] = GeoSeries(
|
|
703
|
-
|
|
707
|
+
_parallel_unary_union_geoseries(
|
|
704
708
|
pd.concat([eliminators, soon_erased, missing]),
|
|
705
709
|
level=0,
|
|
706
710
|
grid_size=grid_size,
|
|
@@ -721,7 +725,7 @@ def _eliminate(
|
|
|
721
725
|
|
|
722
726
|
else:
|
|
723
727
|
if n_jobs > 1:
|
|
724
|
-
eliminated["geometry"] =
|
|
728
|
+
eliminated["geometry"] = _parallel_unary_union(
|
|
725
729
|
many_hits, by="_dissolve_idx", grid_size=grid_size, n_jobs=n_jobs
|
|
726
730
|
)
|
|
727
731
|
else:
|
|
@@ -773,16 +777,6 @@ def close_thin_holes(gdf: GeoDataFrame, tolerance: int | float) -> GeoDataFrame:
|
|
|
773
777
|
return PolygonsAsRings(gdf).apply_numpy_func_to_interiors(to_none_if_thin).to_gdf()
|
|
774
778
|
|
|
775
779
|
|
|
776
|
-
def return_correct_geometry_object(in_obj, out_obj):
|
|
777
|
-
if isinstance(in_obj, GeoDataFrame):
|
|
778
|
-
in_obj.geometry = out_obj
|
|
779
|
-
return in_obj
|
|
780
|
-
elif isinstance(in_obj, GeoSeries):
|
|
781
|
-
return GeoSeries(out_obj, crs=in_obj.crs)
|
|
782
|
-
else:
|
|
783
|
-
return out_obj
|
|
784
|
-
|
|
785
|
-
|
|
786
780
|
def close_all_holes(
|
|
787
781
|
gdf: GeoDataFrame | GeoSeries,
|
|
788
782
|
*,
|
|
@@ -808,8 +802,8 @@ def close_all_holes(
|
|
|
808
802
|
A GeoDataFrame or GeoSeries of polygons with closed holes in the geometry
|
|
809
803
|
column.
|
|
810
804
|
|
|
811
|
-
Examples
|
|
812
|
-
|
|
805
|
+
Examples:
|
|
806
|
+
---------
|
|
813
807
|
Let's create a circle with a hole in it.
|
|
814
808
|
|
|
815
809
|
>>> point = sg.to_gdf([260000, 6650000], crs=25833)
|
|
@@ -897,9 +891,8 @@ def close_small_holes(
|
|
|
897
891
|
meter units.
|
|
898
892
|
ValueError: If both 'max_m2' and 'max_km2' is given.
|
|
899
893
|
|
|
900
|
-
Examples
|
|
901
|
-
|
|
902
|
-
|
|
894
|
+
Examples:
|
|
895
|
+
---------
|
|
903
896
|
Let's create a circle with a hole in it.
|
|
904
897
|
|
|
905
898
|
>>> point = sg.to_gdf([260000, 6650000], crs=25833)
|
|
@@ -984,7 +977,6 @@ def close_small_holes(
|
|
|
984
977
|
|
|
985
978
|
def _close_small_holes_no_islands(poly, max_area, all_geoms):
|
|
986
979
|
"""Closes small holes within one shapely geometry of polygons."""
|
|
987
|
-
|
|
988
980
|
# start with a list containing the polygon,
|
|
989
981
|
# then append all holes smaller than 'max_km2' to the list.
|
|
990
982
|
holes_closed = [poly]
|
|
@@ -1010,7 +1002,6 @@ def _close_small_holes_no_islands(poly, max_area, all_geoms):
|
|
|
1010
1002
|
|
|
1011
1003
|
def _close_all_holes_no_islands(poly, all_geoms):
|
|
1012
1004
|
"""Closes all holes within one shapely geometry of polygons."""
|
|
1013
|
-
|
|
1014
1005
|
# start with a list containing the polygon,
|
|
1015
1006
|
# then append all holes smaller than 'max_km2' to the list.
|
|
1016
1007
|
holes_closed = [poly]
|
|
@@ -1041,6 +1032,7 @@ def get_gaps(
|
|
|
1041
1032
|
gdf: GeoDataFrame of polygons.
|
|
1042
1033
|
include_interiors: If False (default), the holes inside individual polygons
|
|
1043
1034
|
will not be included as gaps.
|
|
1035
|
+
grid_size: Rounding of the coordinates.
|
|
1044
1036
|
|
|
1045
1037
|
Note:
|
|
1046
1038
|
See get_holes to find holes inside singlepart polygons.
|