ssb-sgis 1.0.3__py3-none-any.whl → 1.0.5__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (39) hide show
  1. sgis/__init__.py +10 -3
  2. sgis/debug_config.py +24 -0
  3. sgis/geopandas_tools/bounds.py +16 -21
  4. sgis/geopandas_tools/buffer_dissolve_explode.py +112 -30
  5. sgis/geopandas_tools/centerlines.py +4 -91
  6. sgis/geopandas_tools/cleaning.py +1576 -583
  7. sgis/geopandas_tools/conversion.py +24 -14
  8. sgis/geopandas_tools/duplicates.py +27 -6
  9. sgis/geopandas_tools/general.py +259 -100
  10. sgis/geopandas_tools/geometry_types.py +1 -1
  11. sgis/geopandas_tools/neighbors.py +16 -12
  12. sgis/geopandas_tools/overlay.py +7 -3
  13. sgis/geopandas_tools/point_operations.py +3 -3
  14. sgis/geopandas_tools/polygon_operations.py +505 -100
  15. sgis/geopandas_tools/polygons_as_rings.py +40 -8
  16. sgis/geopandas_tools/sfilter.py +26 -9
  17. sgis/io/dapla_functions.py +238 -19
  18. sgis/maps/examine.py +11 -10
  19. sgis/maps/explore.py +227 -155
  20. sgis/maps/legend.py +13 -4
  21. sgis/maps/map.py +22 -13
  22. sgis/maps/maps.py +100 -29
  23. sgis/maps/thematicmap.py +25 -18
  24. sgis/networkanalysis/_service_area.py +6 -1
  25. sgis/networkanalysis/cutting_lines.py +12 -5
  26. sgis/networkanalysis/finding_isolated_networks.py +13 -6
  27. sgis/networkanalysis/networkanalysis.py +10 -12
  28. sgis/parallel/parallel.py +27 -10
  29. sgis/raster/base.py +208 -0
  30. sgis/raster/cube.py +3 -3
  31. sgis/raster/image_collection.py +1421 -724
  32. sgis/raster/indices.py +10 -7
  33. sgis/raster/raster.py +7 -7
  34. sgis/raster/sentinel_config.py +33 -17
  35. {ssb_sgis-1.0.3.dist-info → ssb_sgis-1.0.5.dist-info}/METADATA +6 -7
  36. ssb_sgis-1.0.5.dist-info/RECORD +62 -0
  37. ssb_sgis-1.0.3.dist-info/RECORD +0 -61
  38. {ssb_sgis-1.0.3.dist-info → ssb_sgis-1.0.5.dist-info}/LICENSE +0 -0
  39. {ssb_sgis-1.0.3.dist-info → ssb_sgis-1.0.5.dist-info}/WHEEL +0 -0
sgis/__init__.py CHANGED
@@ -2,6 +2,7 @@ config = {
2
2
  "n_jobs": 1,
3
3
  }
4
4
 
5
+
5
6
  import sgis.raster.indices as indices
6
7
  from sgis.raster.raster import Raster
7
8
  from sgis.raster.raster import get_shape_from_bounds
@@ -15,7 +16,6 @@ from .geopandas_tools.bounds import gridloop
15
16
  from .geopandas_tools.bounds import make_grid
16
17
  from .geopandas_tools.bounds import make_grid_from_bbox
17
18
  from .geopandas_tools.bounds import make_ssb_grid
18
- from .geopandas_tools.bounds import points_in_bounds
19
19
  from .geopandas_tools.buffer_dissolve_explode import buff
20
20
  from .geopandas_tools.buffer_dissolve_explode import buffdiss
21
21
  from .geopandas_tools.buffer_dissolve_explode import buffdissexp
@@ -26,9 +26,9 @@ from .geopandas_tools.buffer_dissolve_explode import dissexp
26
26
  from .geopandas_tools.buffer_dissolve_explode import dissexp_by_cluster
27
27
  from .geopandas_tools.centerlines import get_rough_centerlines
28
28
  from .geopandas_tools.cleaning import coverage_clean
29
- from .geopandas_tools.cleaning import remove_spikes
30
29
  from .geopandas_tools.cleaning import split_and_eliminate_by_longest
31
- from .geopandas_tools.cleaning import split_by_neighbors
30
+
31
+ # from .geopandas_tools.cleaning import split_by_neighbors
32
32
  from .geopandas_tools.conversion import coordinate_array
33
33
  from .geopandas_tools.conversion import from_4326
34
34
  from .geopandas_tools.conversion import to_4326
@@ -44,6 +44,8 @@ from .geopandas_tools.general import clean_geoms
44
44
  from .geopandas_tools.general import drop_inactive_geometry_columns
45
45
  from .geopandas_tools.general import get_common_crs
46
46
  from .geopandas_tools.general import get_grouped_centroids
47
+ from .geopandas_tools.general import get_line_segments
48
+ from .geopandas_tools.general import points_in_bounds
47
49
  from .geopandas_tools.general import random_points
48
50
  from .geopandas_tools.general import random_points_in_polygons
49
51
  from .geopandas_tools.general import sort_large_first
@@ -66,6 +68,7 @@ from .geopandas_tools.neighbors import sjoin_within_distance
66
68
  from .geopandas_tools.overlay import clean_overlay
67
69
  from .geopandas_tools.point_operations import snap_all
68
70
  from .geopandas_tools.point_operations import snap_within_distance
71
+ from .geopandas_tools.polygon_operations import clean_dissexp
69
72
  from .geopandas_tools.polygon_operations import close_all_holes
70
73
  from .geopandas_tools.polygon_operations import close_small_holes
71
74
  from .geopandas_tools.polygon_operations import close_thin_holes
@@ -76,6 +79,7 @@ from .geopandas_tools.polygon_operations import get_cluster_mapper
76
79
  from .geopandas_tools.polygon_operations import get_gaps
77
80
  from .geopandas_tools.polygon_operations import get_holes
78
81
  from .geopandas_tools.polygon_operations import get_polygon_clusters
82
+ from .geopandas_tools.polygon_operations import split_polygons_by_lines
79
83
  from .geopandas_tools.polygons_as_rings import PolygonsAsRings
80
84
  from .geopandas_tools.sfilter import sfilter
81
85
  from .geopandas_tools.sfilter import sfilter_inverse
@@ -119,7 +123,9 @@ from .raster.cube import concat_cubes
119
123
  from .raster.image_collection import Band
120
124
  from .raster.image_collection import Image
121
125
  from .raster.image_collection import ImageCollection
126
+ from .raster.image_collection import NDVIBand
122
127
  from .raster.image_collection import Sentinel2Band
128
+ from .raster.image_collection import Sentinel2CloudlessBand
123
129
  from .raster.image_collection import Sentinel2CloudlessCollection
124
130
  from .raster.image_collection import Sentinel2CloudlessImage
125
131
  from .raster.image_collection import Sentinel2Collection
@@ -128,6 +134,7 @@ from .raster.image_collection import concat_image_collections
128
134
 
129
135
  try:
130
136
  from .io.dapla_functions import check_files
137
+ from .io.dapla_functions import get_bounds_series
131
138
  from .io.dapla_functions import read_geopandas
132
139
  from .io.dapla_functions import write_geopandas
133
140
  except ImportError:
sgis/debug_config.py ADDED
@@ -0,0 +1,24 @@
1
+ from typing import Any
2
+
3
+
4
+ class _NoExplore:
5
+ """Simply so signal that explore functions should be immediately exited."""
6
+
7
+
8
+ _DEBUG_CONFIG = {
9
+ # "center": (5.3719398, 59.00999914, 0.01),
10
+ # "center": (5.27306727, 59.44232754, 200),
11
+ # "center": (5.85575588, 62.33991158, 200),
12
+ # "center": (26.02870514, 70.68108478, 200),
13
+ "center": _NoExplore(),
14
+ "print": False,
15
+ }
16
+
17
+
18
+ def _try_debug_print(*args: Any) -> None:
19
+ if not _DEBUG_CONFIG["print"]:
20
+ return
21
+ try:
22
+ print(*args)
23
+ except Exception:
24
+ pass
@@ -18,7 +18,7 @@ from ..parallel.parallel import Parallel
18
18
  from .conversion import to_bbox
19
19
  from .conversion import to_gdf
20
20
  from .general import clean_clip
21
- from .general import is_bbox_like
21
+ from .general import get_common_crs
22
22
 
23
23
 
24
24
  @dataclass
@@ -133,11 +133,7 @@ class Gridlooper:
133
133
  )
134
134
  results = self.parallelizer.map(func_with_clip, buffered_grid)
135
135
  if not self.gridbuffer or not self.clip:
136
- return (
137
- results
138
- if not self.concat
139
- else pd.concat(results, ignore_index=True)
140
- )
136
+ return self._return(results, args, kwargs)
141
137
  out = []
142
138
  for cell_res, unbuffered in zip(results, grid, strict=True):
143
139
  out.append(
@@ -145,7 +141,7 @@ class Gridlooper:
145
141
  cell_res, unbuffered, self.keep_geom_type
146
142
  )
147
143
  )
148
- return out if not self.concat else pd.concat(out, ignore_index=True)
144
+ return self._return(out, args, kwargs)
149
145
 
150
146
  results = []
151
147
  for i, (unbuffered, buffered) in enumerate(
@@ -175,7 +171,18 @@ class Gridlooper:
175
171
  if self.verbose:
176
172
  print(f"Done with {i+1} of {n} grid cells", end="\r")
177
173
 
178
- return results if not self.concat else pd.concat(results, ignore_index=True)
174
+ return self._return(results, args, kwargs)
175
+
176
+ def _return(
177
+ self, results: list[Any], args: tuple[Any], kwargs: dict[str, Any]
178
+ ) -> list[Any] | GeoDataFrame:
179
+ if self.concat and len(results):
180
+ return pd.concat(results, ignore_index=True)
181
+ elif self.concat:
182
+ crs = get_common_crs(list(args) + list(kwargs.values()))
183
+ return GeoDataFrame({"geometry": []}, crs=crs)
184
+ else:
185
+ return results
179
186
 
180
187
 
181
188
  def gridloop(
@@ -650,6 +657,7 @@ def bounds_to_points(
650
657
  Examples:
651
658
  ---------
652
659
  >>> import sgis as sg
660
+ >>> from shapely.geometry import MultiPoint, Point
653
661
  >>> gdf = sg.to_gdf([MultiPoint([(0, 0), (1, 1)]), Point(0, 0)])
654
662
  >>> gdf
655
663
  geometry
@@ -690,16 +698,3 @@ def get_total_bounds(
690
698
  else:
691
699
  continue
692
700
  return min(xs), min(ys), max(xs), max(ys)
693
-
694
-
695
- def points_in_bounds(gdf: GeoDataFrame | GeoSeries, n2: int) -> GeoDataFrame:
696
- """Get a GeoDataFrame of points within the bounds of the GeoDataFrame."""
697
- if not isinstance(gdf, (GeoDataFrame | GeoSeries)) and is_bbox_like(gdf):
698
- minx, miny, maxx, maxy = gdf
699
- else:
700
- minx, miny, maxx, maxy = gdf.total_bounds
701
- xs = np.linspace(minx, maxx, num=n2)
702
- ys = np.linspace(miny, maxy, num=n2)
703
- x_coords, y_coords = np.meshgrid(xs, ys, indexing="ij")
704
- coords = np.concatenate((x_coords.reshape(-1, 1), y_coords.reshape(-1, 1)), axis=1)
705
- return to_gdf(coords, crs=gdf.crs)
@@ -21,9 +21,12 @@ import numpy as np
21
21
  import pandas as pd
22
22
  from geopandas import GeoDataFrame
23
23
  from geopandas import GeoSeries
24
+ from shapely import get_num_geometries
24
25
 
25
- from .general import _merge_geometries
26
+ from ..parallel.parallel import Parallel
27
+ from .general import _grouped_unary_union
26
28
  from .general import _parallel_unary_union
29
+ from .general import _unary_union_for_notna
27
30
  from .geometry_types import make_all_singlepart
28
31
  from .polygon_operations import get_cluster_mapper
29
32
  from .polygon_operations import get_grouped_centroids
@@ -186,6 +189,7 @@ def _dissolve(
186
189
  aggfunc: str = "first",
187
190
  grid_size: None | float = None,
188
191
  n_jobs: int = 1,
192
+ as_index: bool = True,
189
193
  **dissolve_kwargs,
190
194
  ) -> GeoDataFrame:
191
195
 
@@ -194,6 +198,13 @@ def _dissolve(
194
198
 
195
199
  geom_col = gdf._geometry_column_name
196
200
 
201
+ gdf[geom_col] = gdf[geom_col].make_valid()
202
+
203
+ more_than_one = get_num_geometries(gdf.geometry.values) > 1
204
+ gdf.loc[more_than_one, geom_col] = gdf.loc[more_than_one, geom_col].apply(
205
+ _unary_union_for_notna
206
+ )
207
+
197
208
  by = dissolve_kwargs.pop("by", None)
198
209
 
199
210
  by_was_none = not bool(by)
@@ -207,7 +218,9 @@ def _dissolve(
207
218
  other_cols = list(gdf.columns.difference({geom_col} | set(by or {})))
208
219
 
209
220
  try:
210
- is_one_hit = gdf.groupby(by, **dissolve_kwargs).transform("size") == 1
221
+ is_one_hit = (
222
+ gdf.groupby(by, as_index=True, **dissolve_kwargs).transform("size") == 1
223
+ )
211
224
  except IndexError:
212
225
  # if no rows when dropna=True
213
226
  original_by = [x for x in by]
@@ -216,16 +229,17 @@ def _dissolve(
216
229
  query &= gdf[col].notna()
217
230
  gdf = gdf.loc[query]
218
231
  assert not len(gdf), gdf
219
- if not by_was_none and dissolve_kwargs.get("as_index", True):
232
+ if not by_was_none and as_index:
220
233
  try:
221
234
  gdf = gdf.set_index(original_by)
222
235
  except Exception as e:
223
236
  print(gdf)
224
237
  print(original_by)
225
238
  raise e
239
+
226
240
  return gdf
227
241
 
228
- if not by_was_none and dissolve_kwargs.get("as_index", True):
242
+ if not by_was_none and as_index:
229
243
  one_hit = gdf[is_one_hit].set_index(by)
230
244
  else:
231
245
  one_hit = gdf[is_one_hit]
@@ -234,14 +248,21 @@ def _dissolve(
234
248
  if not len(many_hits):
235
249
  return GeoDataFrame(one_hit, geometry=geom_col, crs=gdf.crs)
236
250
 
237
- dissolved = many_hits.groupby(by, **dissolve_kwargs)[other_cols].agg(aggfunc)
251
+ dissolved = many_hits.groupby(by, as_index=True, **dissolve_kwargs)[other_cols].agg(
252
+ aggfunc
253
+ )
238
254
 
239
255
  # dissolved = gdf.groupby(by, **dissolve_kwargs)[other_cols].agg(aggfunc)
240
256
 
241
257
  if n_jobs > 1:
242
258
  try:
243
259
  agged = _parallel_unary_union(
244
- many_hits, n_jobs=n_jobs, by=by, grid_size=grid_size, **dissolve_kwargs
260
+ many_hits,
261
+ n_jobs=n_jobs,
262
+ by=by,
263
+ grid_size=grid_size,
264
+ as_index=True,
265
+ **dissolve_kwargs,
245
266
  )
246
267
  dissolved[geom_col] = agged
247
268
  return GeoDataFrame(dissolved, geometry=geom_col, crs=gdf.crs)
@@ -249,21 +270,55 @@ def _dissolve(
249
270
  print(e, dissolved, agged, many_hits)
250
271
  raise e
251
272
 
252
- geoms_agged = many_hits.groupby(by, **dissolve_kwargs)[geom_col].agg(
253
- lambda x: _merge_geometries(x, grid_size=grid_size)
254
- )
273
+ # geoms_agged = many_hits.groupby(by, **dissolve_kwargs)[geom_col].agg(
274
+ # lambda x: _unary_union_for_notna(x, grid_size=grid_size)
275
+ # )
276
+ # print("\n\n\ngeomsagged\n", geoms_agged, geoms_agged.shape)
277
+ geoms_agged = _grouped_unary_union(many_hits, by, as_index=True, **dissolve_kwargs)
278
+ # print(geoms_agged, geoms_agged.shape)
255
279
 
256
- if not dissolve_kwargs.get("as_index"):
257
- try:
258
- geoms_agged = geoms_agged[geom_col]
259
- except KeyError:
260
- pass
280
+ # if not as_index:
281
+ # try:
282
+ # geoms_agged = geoms_agged[geom_col]
283
+ # except KeyError:
284
+ # pass
261
285
 
262
286
  dissolved[geom_col] = geoms_agged
263
287
 
264
- return GeoDataFrame(
265
- pd.concat([dissolved, one_hit]).sort_index(), geometry=geom_col, crs=gdf.crs
266
- )
288
+ if not as_index:
289
+ dissolved = dissolved.reset_index()
290
+ # else:
291
+ # one_hit = one_hit.set
292
+ # dissolved = dissolved.reset_index()
293
+
294
+ # from ..maps.maps import explore, explore_locals
295
+ # from .conversion import to_gdf
296
+
297
+ # try:
298
+ # explore(
299
+ # dissolved=to_gdf(dissolved, 25833),
300
+ # geoms_agged=to_gdf(geoms_agged, 25833),
301
+ # gdf=gdf,
302
+ # column="ARTYPE",
303
+ # )
304
+ # except Exception:
305
+ # explore(
306
+ # dissolved=to_gdf(dissolved, 25833),
307
+ # geoms_agged=to_gdf(geoms_agged, 25833),
308
+ # gdf=gdf,
309
+ # )
310
+
311
+ # from ..maps.maps import explore_locals
312
+ # from .conversion import to_gdf
313
+
314
+ # explore_locals()
315
+
316
+ try:
317
+ return GeoDataFrame(
318
+ pd.concat([dissolved, one_hit]).sort_index(), geometry=geom_col, crs=gdf.crs
319
+ )
320
+ except TypeError as e:
321
+ raise e.__class__(e, dissolved.index, one_hit.index) from e
267
322
 
268
323
 
269
324
  def diss(
@@ -358,7 +413,10 @@ def dissexp(
358
413
 
359
414
 
360
415
  def dissexp_by_cluster(
361
- gdf: GeoDataFrame, predicate: str | None = None, n_jobs: int = 1, **dissolve_kwargs
416
+ gdf: GeoDataFrame,
417
+ predicate: str | None = "intersects",
418
+ n_jobs: int = 1,
419
+ **dissolve_kwargs,
362
420
  ) -> GeoDataFrame:
363
421
  """Dissolves overlapping geometries through clustering with sjoin and networkx.
364
422
 
@@ -414,12 +472,14 @@ def diss_by_cluster(
414
472
  def _run_func_by_cluster(
415
473
  func: Callable,
416
474
  gdf: GeoDataFrame,
417
- predicate: str | None = None,
475
+ predicate: str | None = "intersects",
418
476
  n_jobs: int = 1,
419
477
  **dissolve_kwargs,
420
478
  ) -> GeoDataFrame:
421
479
  is_geoseries = isinstance(gdf, GeoSeries)
422
480
 
481
+ processes = dissolve_kwargs.pop("processes", 1)
482
+
423
483
  by = dissolve_kwargs.pop("by", [])
424
484
  if isinstance(by, str):
425
485
  by = [by]
@@ -432,22 +492,44 @@ def _run_func_by_cluster(
432
492
  def get_group_clusters(group: GeoDataFrame):
433
493
  """Adds cluster column. Applied to each group because much faster."""
434
494
  group = group.reset_index(drop=True)
435
- group["_cluster"] = get_cluster_mapper(
436
- group, predicate=predicate
437
- ) # component_mapper
495
+ group["_cluster"] = get_cluster_mapper(group, predicate=predicate)
438
496
  group["_cluster"] = get_grouped_centroids(group, groupby="_cluster")
439
497
  return group
440
498
 
499
+ gdf = make_all_singlepart(gdf)
500
+
441
501
  if by:
442
- dissolved = (
443
- make_all_singlepart(gdf)
444
- .groupby(by, group_keys=True, dropna=False, as_index=False)
445
- .apply(get_group_clusters)
446
- .pipe(func, by=["_cluster"] + by, n_jobs=n_jobs, **dissolve_kwargs)
447
- )
502
+ if processes == 1:
503
+ gdf = gdf.groupby(by, group_keys=False, dropna=False, as_index=False).apply(
504
+ get_group_clusters
505
+ )
506
+ else:
507
+ gdf = pd.concat(
508
+ Parallel(processes, backend="loky").map(
509
+ get_group_clusters,
510
+ [
511
+ gdf[lambda x: x[by].values == values]
512
+ for values in np.unique(gdf[by].values)
513
+ ],
514
+ ),
515
+ )
516
+ _by = ["_cluster"] + by
517
+ else:
518
+ gdf = get_group_clusters(gdf)
519
+ _by = ["_cluster"]
520
+
521
+ if processes == 1:
522
+ dissolved = func(gdf, by=_by, n_jobs=n_jobs, **dissolve_kwargs)
448
523
  else:
449
- dissolved = get_group_clusters(make_all_singlepart(gdf)).pipe(
450
- func, by="_cluster", n_jobs=n_jobs, **dissolve_kwargs
524
+ dissolved = pd.concat(
525
+ Parallel(processes, backend="loky").map(
526
+ func,
527
+ [
528
+ gdf[gdf["_cluster"] == cluster]
529
+ for cluster in gdf["_cluster"].unique()
530
+ ],
531
+ kwargs=dissolve_kwargs | {"n_jobs": n_jobs, "by": _by},
532
+ ),
451
533
  )
452
534
 
453
535
  if not by:
@@ -2,9 +2,7 @@ import functools
2
2
  import itertools
3
3
  import warnings
4
4
 
5
- import numpy as np
6
5
  import pandas as pd
7
- import shapely
8
6
  from geopandas import GeoDataFrame
9
7
  from geopandas import GeoSeries
10
8
  from numpy.typing import NDArray
@@ -16,17 +14,17 @@ from shapely import line_merge
16
14
  from shapely import make_valid
17
15
  from shapely import segmentize
18
16
  from shapely import unary_union
17
+ from shapely import union_all
19
18
  from shapely import voronoi_polygons
20
19
  from shapely.errors import GEOSException
21
20
  from shapely.geometry import LineString
22
21
  from shapely.ops import nearest_points
23
22
 
24
- from ..maps.maps import explore
25
23
  from ..networkanalysis.traveling_salesman import traveling_salesman_problem
26
- from .conversion import to_gdf
27
24
  from .conversion import to_geoseries
28
25
  from .general import clean_geoms
29
26
  from .general import make_lines_between_points
27
+ from .general import multipoints_to_line_segments
30
28
  from .general import sort_long_first
31
29
  from .geometry_types import make_all_singlepart
32
30
  from .sfilter import sfilter_inverse
@@ -64,7 +62,7 @@ def _remove_longest_if_not_intersecting(
64
62
 
65
63
  nearest = longest_endpoints.groupby(level=0).apply(
66
64
  lambda x: nearest_points(
67
- x, not_longest[not_longest.index.isin(x.index)].unary_union
65
+ x, union_all(not_longest[not_longest.index.isin(x.index)].geometry.values)
68
66
  )[1]
69
67
  )
70
68
  longest_endpoints.loc[:] = make_lines_between_points(
@@ -186,8 +184,6 @@ def get_rough_centerlines(
186
184
  ]
187
185
  )
188
186
 
189
- explore(points=to_gdf(points, 25833), gdf=gdf)
190
-
191
187
  remove_longest = functools.partial(_remove_longest_if_not_intersecting, geoms=geoms)
192
188
 
193
189
  centerlines = GeoSeries(
@@ -336,7 +332,7 @@ def _get_approximate_polygon_endpoints(geoms: GeoSeries) -> GeoSeries:
336
332
 
337
333
  out_geoms.append(nearest_geom_points)
338
334
 
339
- lines_around_geometries = _multipoints_to_line_segments(
335
+ lines_around_geometries = multipoints_to_line_segments(
340
336
  extract_unique_points(rectangles)
341
337
  )
342
338
 
@@ -372,86 +368,3 @@ def _get_approximate_polygon_endpoints(geoms: GeoSeries) -> GeoSeries:
372
368
  out_geoms.append(points_moved)
373
369
 
374
370
  return pd.concat(out_geoms)
375
-
376
-
377
- def _multipoints_to_line_segments(
378
- multipoints: GeoSeries | GeoDataFrame, to_next: bool = True, cycle: bool = True
379
- ) -> GeoSeries | GeoDataFrame:
380
- if not len(multipoints):
381
- return multipoints
382
-
383
- multipoints = to_geoseries(multipoints)
384
-
385
- if isinstance(multipoints.index, pd.MultiIndex):
386
- index = [
387
- multipoints.index.get_level_values(i)
388
- for i in range(multipoints.index.nlevels)
389
- ]
390
- multipoints.index = pd.MultiIndex.from_arrays(
391
- [list(range(len(multipoints))), *index],
392
- names=["range_idx", *multipoints.index.names],
393
- )
394
- else:
395
- multipoints.index = pd.MultiIndex.from_arrays(
396
- [np.arange(0, len(multipoints)), multipoints.index],
397
- names=["range_idx", multipoints.index.name],
398
- )
399
-
400
- try:
401
- crs = multipoints.crs
402
- except AttributeError:
403
- crs = None
404
-
405
- point_df = multipoints.explode(index_parts=False).to_frame("geometry")
406
-
407
- if to_next:
408
- shift = -1
409
- keep = "first"
410
- else:
411
- shift = 1
412
- keep = "last"
413
-
414
- point_df["next"] = point_df.groupby(level=0)["geometry"].shift(shift)
415
-
416
- if cycle:
417
- first_points: GeoSeries = point_df.loc[
418
- lambda x: ~x.index.get_level_values(0).duplicated(keep=keep), "geometry"
419
- ]
420
- is_last_point = point_df["next"].isna()
421
-
422
- point_df.loc[is_last_point, "next"] = first_points
423
- assert point_df["next"].notna().all()
424
- else:
425
- point_df = point_df[point_df["next"].notna()]
426
-
427
- point_df["geometry"] = [
428
- LineString([x1, x2])
429
- for x1, x2 in zip(point_df["geometry"], point_df["next"], strict=False)
430
- ]
431
- if isinstance(multipoints.index, pd.MultiIndex):
432
- point_df.index = point_df.index.droplevel(0)
433
-
434
- if isinstance(multipoints, GeoDataFrame):
435
- return GeoDataFrame(
436
- point_df.drop(columns=["next"]), geometry="geometry", crs=crs
437
- )
438
- return GeoSeries(point_df["geometry"], crs=crs)
439
-
440
-
441
- def get_line_segments(
442
- lines: GeoDataFrame | GeoSeries, extract_unique: bool = False, cycle=False
443
- ) -> GeoDataFrame:
444
- try:
445
- assert lines.index.is_unique
446
- except AttributeError:
447
- pass
448
-
449
- lines = to_geoseries(lines)
450
-
451
- if extract_unique:
452
- points = extract_unique_points(lines.values)
453
- else:
454
- coords, indices = shapely.get_coordinates(lines, return_index=True)
455
- points = GeoSeries(shapely.points(coords), index=indices)
456
-
457
- return _multipoints_to_line_segments(points, cycle=cycle)