ssb-sgis 1.0.2__py3-none-any.whl → 1.0.4__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- sgis/__init__.py +20 -9
- sgis/debug_config.py +24 -0
- sgis/exceptions.py +2 -2
- sgis/geopandas_tools/bounds.py +33 -36
- sgis/geopandas_tools/buffer_dissolve_explode.py +136 -35
- sgis/geopandas_tools/centerlines.py +4 -91
- sgis/geopandas_tools/cleaning.py +1576 -583
- sgis/geopandas_tools/conversion.py +38 -19
- sgis/geopandas_tools/duplicates.py +29 -8
- sgis/geopandas_tools/general.py +263 -100
- sgis/geopandas_tools/geometry_types.py +4 -4
- sgis/geopandas_tools/neighbors.py +19 -15
- sgis/geopandas_tools/overlay.py +2 -2
- sgis/geopandas_tools/point_operations.py +5 -5
- sgis/geopandas_tools/polygon_operations.py +510 -105
- sgis/geopandas_tools/polygons_as_rings.py +40 -8
- sgis/geopandas_tools/sfilter.py +29 -12
- sgis/helpers.py +3 -3
- sgis/io/dapla_functions.py +238 -19
- sgis/io/read_parquet.py +1 -1
- sgis/maps/examine.py +27 -12
- sgis/maps/explore.py +450 -65
- sgis/maps/legend.py +177 -76
- sgis/maps/map.py +206 -103
- sgis/maps/maps.py +178 -105
- sgis/maps/thematicmap.py +243 -83
- sgis/networkanalysis/_service_area.py +6 -1
- sgis/networkanalysis/closing_network_holes.py +2 -2
- sgis/networkanalysis/cutting_lines.py +15 -8
- sgis/networkanalysis/directednetwork.py +1 -1
- sgis/networkanalysis/finding_isolated_networks.py +15 -8
- sgis/networkanalysis/networkanalysis.py +17 -19
- sgis/networkanalysis/networkanalysisrules.py +1 -1
- sgis/networkanalysis/traveling_salesman.py +1 -1
- sgis/parallel/parallel.py +64 -27
- sgis/raster/__init__.py +0 -6
- sgis/raster/base.py +208 -0
- sgis/raster/cube.py +54 -8
- sgis/raster/image_collection.py +3257 -0
- sgis/raster/indices.py +17 -5
- sgis/raster/raster.py +138 -243
- sgis/raster/sentinel_config.py +120 -0
- sgis/raster/zonal.py +0 -1
- {ssb_sgis-1.0.2.dist-info → ssb_sgis-1.0.4.dist-info}/METADATA +6 -7
- ssb_sgis-1.0.4.dist-info/RECORD +62 -0
- sgis/raster/methods_as_functions.py +0 -0
- sgis/raster/torchgeo.py +0 -171
- ssb_sgis-1.0.2.dist-info/RECORD +0 -61
- {ssb_sgis-1.0.2.dist-info → ssb_sgis-1.0.4.dist-info}/LICENSE +0 -0
- {ssb_sgis-1.0.2.dist-info → ssb_sgis-1.0.4.dist-info}/WHEEL +0 -0
sgis/raster/cube.py
CHANGED
|
@@ -2,6 +2,7 @@ import functools
|
|
|
2
2
|
import itertools
|
|
3
3
|
import multiprocessing
|
|
4
4
|
import re
|
|
5
|
+
import warnings
|
|
5
6
|
from collections.abc import Callable
|
|
6
7
|
from collections.abc import Iterable
|
|
7
8
|
from collections.abc import Iterator
|
|
@@ -140,6 +141,10 @@ class DataCube:
|
|
|
140
141
|
copy: If True, makes deep copies of Rasters provided.
|
|
141
142
|
parallelizer: sgis.Parallel instance to handle concurrent operations.
|
|
142
143
|
"""
|
|
144
|
+
warnings.warn(
|
|
145
|
+
"This class is deprecated in favor of ImageCollection", stacklevel=1
|
|
146
|
+
)
|
|
147
|
+
|
|
143
148
|
self._arrays = None
|
|
144
149
|
self._res = res
|
|
145
150
|
self.parallelizer = parallelizer
|
|
@@ -207,6 +212,7 @@ class DataCube:
|
|
|
207
212
|
check_for_df: bool = True,
|
|
208
213
|
contains: str | None = None,
|
|
209
214
|
endswith: str = ".tif",
|
|
215
|
+
bands: str | list[str] | None = None,
|
|
210
216
|
filename_regex: str | None = None,
|
|
211
217
|
parallelizer: Parallel | None = None,
|
|
212
218
|
file_system=None,
|
|
@@ -221,6 +227,7 @@ class DataCube:
|
|
|
221
227
|
that holds metadata for the files in the directory.
|
|
222
228
|
contains: Filter files containing specific substrings.
|
|
223
229
|
endswith: Filter files that end with specific substrings.
|
|
230
|
+
bands: One or more band ids to keep.
|
|
224
231
|
filename_regex: Regular expression to match file names
|
|
225
232
|
and attributes (date, band, tile, resolution).
|
|
226
233
|
parallelizer: sgis.Parallel instance for concurrent file processing.
|
|
@@ -233,6 +240,7 @@ class DataCube:
|
|
|
233
240
|
kwargs["res"] = res
|
|
234
241
|
kwargs["filename_regex"] = filename_regex
|
|
235
242
|
kwargs["contains"] = contains
|
|
243
|
+
kwargs["bands"] = bands
|
|
236
244
|
kwargs["endswith"] = endswith
|
|
237
245
|
|
|
238
246
|
if is_dapla():
|
|
@@ -283,6 +291,7 @@ class DataCube:
|
|
|
283
291
|
parallelizer: Parallel | None = None,
|
|
284
292
|
file_system=None,
|
|
285
293
|
contains: str | None = None,
|
|
294
|
+
bands: str | list[str] | None = None,
|
|
286
295
|
endswith: str = ".tif",
|
|
287
296
|
filename_regex: str | None = None,
|
|
288
297
|
**kwargs,
|
|
@@ -296,6 +305,7 @@ class DataCube:
|
|
|
296
305
|
file_system: File system to use for file operations, used in Dapla environment.
|
|
297
306
|
contains: Filter files containing specific substrings.
|
|
298
307
|
endswith: Filter files that end with specific substrings.
|
|
308
|
+
bands: One or more band ids to keep.
|
|
299
309
|
filename_regex: Regular expression to match file names.
|
|
300
310
|
**kwargs: Additional keyword arguments to pass to the raster loading function.
|
|
301
311
|
|
|
@@ -311,6 +321,10 @@ class DataCube:
|
|
|
311
321
|
if filename_regex:
|
|
312
322
|
compiled = re.compile(filename_regex, re.VERBOSE)
|
|
313
323
|
paths = [path for path in paths if re.search(compiled, Path(path).name)]
|
|
324
|
+
if bands:
|
|
325
|
+
if isinstance(bands, str):
|
|
326
|
+
bands = [bands]
|
|
327
|
+
paths = [path for path in paths if any(band in str(path) for band in bands)]
|
|
314
328
|
|
|
315
329
|
if not paths:
|
|
316
330
|
return cls(crs=crs, parallelizer=parallelizer, res=res)
|
|
@@ -369,7 +383,7 @@ class DataCube:
|
|
|
369
383
|
|
|
370
384
|
if grid is None:
|
|
371
385
|
crs = get_common_crs(gdf)
|
|
372
|
-
total_bounds = shapely.
|
|
386
|
+
total_bounds = shapely.union_all(
|
|
373
387
|
[shapely.box(*frame.total_bounds) for frame in gdf]
|
|
374
388
|
)
|
|
375
389
|
grid = make_grid(total_bounds, gridsize=tile_size, crs=crs)
|
|
@@ -544,6 +558,19 @@ class DataCube:
|
|
|
544
558
|
self.data = data
|
|
545
559
|
return self
|
|
546
560
|
|
|
561
|
+
def sample(self, n: int, copy: bool = True, **kwargs) -> Self:
|
|
562
|
+
"""Take n samples of the cube."""
|
|
563
|
+
if self.crs is None:
|
|
564
|
+
self._crs = get_common_crs(self.data)
|
|
565
|
+
|
|
566
|
+
cube = self.copy() if copy else self
|
|
567
|
+
|
|
568
|
+
cube.data = list(pd.Series(cube.data).sample(n))
|
|
569
|
+
|
|
570
|
+
cube.data = cube.run_raster_method("load", **kwargs)
|
|
571
|
+
|
|
572
|
+
return cube
|
|
573
|
+
|
|
547
574
|
def load(self, copy: bool = True, **kwargs) -> Self:
|
|
548
575
|
"""Load all images as arrays into a DataCube copy."""
|
|
549
576
|
if self.crs is None:
|
|
@@ -568,7 +595,7 @@ class DataCube:
|
|
|
568
595
|
"""Spatially filter images by bounding box or geometry object."""
|
|
569
596
|
other = to_shapely(other)
|
|
570
597
|
cube = self.copy() if copy else self
|
|
571
|
-
cube.data = [raster for raster in self if raster.
|
|
598
|
+
cube.data = [raster for raster in self if raster.union_all().intersects(other)]
|
|
572
599
|
return cube
|
|
573
600
|
|
|
574
601
|
def clip(
|
|
@@ -620,7 +647,7 @@ class DataCube:
|
|
|
620
647
|
).items()
|
|
621
648
|
if key in ALLOWED_KEYS and key not in ["array", "indexes"]
|
|
622
649
|
}
|
|
623
|
-
if raster.
|
|
650
|
+
if raster.values is None:
|
|
624
651
|
return [
|
|
625
652
|
raster.__class__.from_dict({"indexes": i} | all_meta)
|
|
626
653
|
for i in raster.indexes_as_tuple()
|
|
@@ -830,7 +857,7 @@ class DataCube:
|
|
|
830
857
|
@property
|
|
831
858
|
def arrays(self) -> list[np.ndarray]:
|
|
832
859
|
"""The arrays of the images as a list."""
|
|
833
|
-
return [raster.
|
|
860
|
+
return [raster.values for raster in self]
|
|
834
861
|
|
|
835
862
|
@arrays.setter
|
|
836
863
|
def arrays(self, new_arrays: list[np.ndarray]):
|
|
@@ -944,7 +971,7 @@ class DataCube:
|
|
|
944
971
|
@property
|
|
945
972
|
def unary_union(self) -> Geometry:
|
|
946
973
|
"""Box polygon of the combined bounds of each image."""
|
|
947
|
-
return shapely.
|
|
974
|
+
return shapely.union_all([shapely.box(*r.bounds) for r in self])
|
|
948
975
|
|
|
949
976
|
@property
|
|
950
977
|
def centroid(self) -> GeoSeries:
|
|
@@ -995,12 +1022,22 @@ class DataCube:
|
|
|
995
1022
|
|
|
996
1023
|
def _check_for_array(self, text: str = "") -> None:
|
|
997
1024
|
mess = "Arrays are not loaded. " + text
|
|
998
|
-
if all(raster.
|
|
1025
|
+
if all(raster.values is None for raster in self):
|
|
999
1026
|
raise ValueError(mess)
|
|
1000
1027
|
|
|
1001
1028
|
def __getitem__(
|
|
1002
1029
|
self,
|
|
1003
|
-
item:
|
|
1030
|
+
item: (
|
|
1031
|
+
str
|
|
1032
|
+
| slice
|
|
1033
|
+
| int
|
|
1034
|
+
| Series
|
|
1035
|
+
| list
|
|
1036
|
+
| tuple
|
|
1037
|
+
| Callable
|
|
1038
|
+
| Geometry
|
|
1039
|
+
| BoundingBox
|
|
1040
|
+
),
|
|
1004
1041
|
) -> Self | Raster | TORCHGEO_RETURN_TYPE:
|
|
1005
1042
|
"""Select one or more of the Rasters based on indexing or spatial or boolean predicates.
|
|
1006
1043
|
|
|
@@ -1026,6 +1063,14 @@ class DataCube:
|
|
|
1026
1063
|
|
|
1027
1064
|
"""
|
|
1028
1065
|
copy = self.copy()
|
|
1066
|
+
if isinstance(item, str) and copy.path is not None:
|
|
1067
|
+
copy.data = [raster for raster in copy if item in raster.path]
|
|
1068
|
+
if len(copy) == 1:
|
|
1069
|
+
return copy[0]
|
|
1070
|
+
elif not len(copy):
|
|
1071
|
+
return Raster()
|
|
1072
|
+
return copy
|
|
1073
|
+
|
|
1029
1074
|
if isinstance(item, slice):
|
|
1030
1075
|
copy.data = copy.data[item]
|
|
1031
1076
|
return copy
|
|
@@ -1127,7 +1172,7 @@ def _merge(
|
|
|
1127
1172
|
bounds: Any | None = None,
|
|
1128
1173
|
**kwargs,
|
|
1129
1174
|
) -> DataCube:
|
|
1130
|
-
if not all(r.
|
|
1175
|
+
if not all(r.values is None for r in cube):
|
|
1131
1176
|
raise ValueError("Arrays can't be loaded when calling merge.")
|
|
1132
1177
|
|
|
1133
1178
|
bounds = to_bbox(bounds) if bounds is not None else bounds
|
|
@@ -1185,6 +1230,7 @@ def _merge_by_bounds(
|
|
|
1185
1230
|
|
|
1186
1231
|
|
|
1187
1232
|
def _merge(cube: DataCube, **kwargs) -> DataCube:
|
|
1233
|
+
by = kwargs.pop("by")
|
|
1188
1234
|
if cube.crs is None:
|
|
1189
1235
|
cube._crs = get_common_crs(cube.data)
|
|
1190
1236
|
|