ssb-sgis 1.1.5__tar.gz → 1.1.7__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/PKG-INFO +1 -1
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/pyproject.toml +2 -1
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/geopandas_tools/cleaning.py +18 -14
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/geopandas_tools/duplicates.py +3 -1
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/geopandas_tools/general.py +5 -2
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/io/dapla_functions.py +9 -4
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/maps/explore.py +2 -3
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/maps/httpserver.py +12 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/maps/map.py +37 -12
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/parallel/parallel.py +0 -1
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/raster/image_collection.py +17 -3
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/LICENSE +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/README.md +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/__init__.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/conf.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/debug_config.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/exceptions.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/geopandas_tools/__init__.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/geopandas_tools/bounds.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/geopandas_tools/buffer_dissolve_explode.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/geopandas_tools/centerlines.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/geopandas_tools/conversion.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/geopandas_tools/geocoding.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/geopandas_tools/geometry_types.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/geopandas_tools/neighbors.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/geopandas_tools/overlay.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/geopandas_tools/point_operations.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/geopandas_tools/polygon_operations.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/geopandas_tools/polygons_as_rings.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/geopandas_tools/sfilter.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/helpers.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/io/__init__.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/io/_is_dapla.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/io/opener.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/io/read_parquet.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/maps/__init__.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/maps/examine.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/maps/legend.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/maps/maps.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/maps/norge_i_bilder.json +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/maps/thematicmap.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/maps/tilesources.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/maps/wms.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/networkanalysis/__init__.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/networkanalysis/_get_route.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/networkanalysis/_od_cost_matrix.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/networkanalysis/_points.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/networkanalysis/_service_area.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/networkanalysis/closing_network_holes.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/networkanalysis/cutting_lines.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/networkanalysis/directednetwork.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/networkanalysis/finding_isolated_networks.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/networkanalysis/network.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/networkanalysis/networkanalysis.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/networkanalysis/networkanalysisrules.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/networkanalysis/nodes.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/networkanalysis/traveling_salesman.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/parallel/__init__.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/py.typed +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/raster/__init__.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/raster/base.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/raster/indices.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/raster/regex.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/raster/sentinel_config.py +0 -0
- {ssb_sgis-1.1.5 → ssb_sgis-1.1.7}/src/sgis/raster/zonal.py +0 -0
|
@@ -1,6 +1,6 @@
|
|
|
1
1
|
[tool.poetry]
|
|
2
2
|
name = "ssb-sgis"
|
|
3
|
-
version = "1.1.
|
|
3
|
+
version = "1.1.7"
|
|
4
4
|
description = "GIS functions used at Statistics Norway."
|
|
5
5
|
authors = ["Morten Letnes <morten.letnes@ssb.no>"]
|
|
6
6
|
license = "MIT"
|
|
@@ -56,6 +56,7 @@ bucket = ["dapla-toolbelt", "gcsfs"]
|
|
|
56
56
|
torch = ["torch", "torchgeo"]
|
|
57
57
|
xarray = ["xarray", "rioxarray"]
|
|
58
58
|
test = ["xarray", "rioxarray", "torch", "torchgeo", "dask"]
|
|
59
|
+
|
|
59
60
|
[tool.poetry.group.dev.dependencies]
|
|
60
61
|
pygments = ">=2.10.0"
|
|
61
62
|
black = { extras = ["jupyter"], version = ">=23.1.0" }
|
|
@@ -101,18 +101,18 @@ def coverage_clean(
|
|
|
101
101
|
|
|
102
102
|
_cleaning_checks(gdf, tolerance, duplicate_action)
|
|
103
103
|
|
|
104
|
-
if not gdf.index.is_unique:
|
|
105
|
-
|
|
104
|
+
# if not gdf.index.is_unique:
|
|
105
|
+
# gdf = gdf.reset_index(drop=True)
|
|
106
106
|
|
|
107
|
-
gdf = make_all_singlepart(gdf).loc[
|
|
108
|
-
|
|
109
|
-
]
|
|
107
|
+
# gdf = make_all_singlepart(gdf).loc[
|
|
108
|
+
# lambda x: x.geom_type.isin(["Polygon", "MultiPolygon"])
|
|
109
|
+
# ]
|
|
110
110
|
|
|
111
|
-
gdf = safe_simplify(gdf, PRECISION)
|
|
111
|
+
# gdf = safe_simplify(gdf, PRECISION)
|
|
112
112
|
|
|
113
113
|
gdf = (
|
|
114
114
|
clean_geoms(gdf)
|
|
115
|
-
.pipe(make_all_singlepart)
|
|
115
|
+
.pipe(make_all_singlepart, ignore_index=True)
|
|
116
116
|
.loc[lambda x: x.geom_type.isin(["Polygon", "MultiPolygon"])]
|
|
117
117
|
)
|
|
118
118
|
|
|
@@ -475,14 +475,14 @@ def _dissolve_thick_double_and_update(gdf, double, thin_double):
|
|
|
475
475
|
large = (
|
|
476
476
|
double.loc[~double["_double_idx"].isin(thin_double["_double_idx"])]
|
|
477
477
|
.drop(columns="_double_idx")
|
|
478
|
-
|
|
479
|
-
.sort_values("_poly_idx")
|
|
478
|
+
.pipe(sort_small_first)
|
|
479
|
+
# .sort_values("_poly_idx")
|
|
480
480
|
.pipe(update_geometries, geom_type="polygon")
|
|
481
481
|
)
|
|
482
482
|
return (
|
|
483
|
-
clean_overlay(gdf, large, how="update")
|
|
484
|
-
# .
|
|
485
|
-
.
|
|
483
|
+
clean_overlay(gdf, large, how="update").pipe(sort_small_first)
|
|
484
|
+
# .sort_values("_poly_idx")
|
|
485
|
+
.pipe(update_geometries, geom_type="polygon")
|
|
486
486
|
)
|
|
487
487
|
|
|
488
488
|
|
|
@@ -534,7 +534,8 @@ def split_and_eliminate_by_longest(
|
|
|
534
534
|
**kwargs,
|
|
535
535
|
) -> GeoDataFrame | tuple[GeoDataFrame]:
|
|
536
536
|
if not len(to_eliminate):
|
|
537
|
-
|
|
537
|
+
gdf = (gdf,) if isinstance(gdf, GeoDataFrame) else gdf
|
|
538
|
+
return (*gdf, to_eliminate)
|
|
538
539
|
|
|
539
540
|
if not isinstance(gdf, (GeoDataFrame, GeoSeries)):
|
|
540
541
|
as_gdf = pd.concat(gdf, ignore_index=True)
|
|
@@ -596,7 +597,10 @@ def split_by_neighbors(df, split_by, tolerance, grid_size=None) -> GeoDataFrame:
|
|
|
596
597
|
|
|
597
598
|
intersecting_lines = (
|
|
598
599
|
clean_overlay(
|
|
599
|
-
to_lines(split_by)
|
|
600
|
+
to_lines(split_by.explode(ignore_index=True)[lambda x: x.length > 0]),
|
|
601
|
+
buff(df, tolerance),
|
|
602
|
+
how="identity",
|
|
603
|
+
grid_size=grid_size,
|
|
600
604
|
)
|
|
601
605
|
.pipe(get_line_segments)
|
|
602
606
|
.reset_index(drop=True)
|
|
@@ -164,6 +164,8 @@ def update_geometries(
|
|
|
164
164
|
|
|
165
165
|
copied.index = copied.index.map(index_mapper)
|
|
166
166
|
|
|
167
|
+
copied = make_all_singlepart(copied)
|
|
168
|
+
|
|
167
169
|
# TODO check why polygons dissappear in rare cases. For now, just add back the missing
|
|
168
170
|
dissapeared = sfilter_inverse(gdf, copied.buffer(-PRECISION))
|
|
169
171
|
copied = pd.concat([copied, dissapeared])
|
|
@@ -307,7 +309,7 @@ def _get_intersecting_geometries(
|
|
|
307
309
|
right["idx_right"] = right.index
|
|
308
310
|
|
|
309
311
|
left = (
|
|
310
|
-
gdf
|
|
312
|
+
gdf.copy()
|
|
311
313
|
if not any("index_" in str(col) for col in gdf)
|
|
312
314
|
else gdf.loc[:, lambda x: x.columns.difference({"index_right", "index_left"})]
|
|
313
315
|
)
|
|
@@ -594,8 +594,9 @@ def to_lines(
|
|
|
594
594
|
"""
|
|
595
595
|
gdf = (
|
|
596
596
|
pd.concat(df.assign(**{"_df_idx": i}) for i, df in enumerate(gdfs))
|
|
597
|
-
.pipe(make_all_singlepart
|
|
597
|
+
.pipe(make_all_singlepart)
|
|
598
598
|
.pipe(clean_geoms)
|
|
599
|
+
.pipe(make_all_singlepart, ignore_index=True)
|
|
599
600
|
)
|
|
600
601
|
geom_col = gdf.geometry.name
|
|
601
602
|
|
|
@@ -607,7 +608,9 @@ def to_lines(
|
|
|
607
608
|
if (geoms.geom_type == "Polygon").all():
|
|
608
609
|
geoms = polygons_to_lines(geoms, copy=copy)
|
|
609
610
|
elif (geoms.geom_type != "LineString").any():
|
|
610
|
-
raise ValueError(
|
|
611
|
+
raise ValueError(
|
|
612
|
+
f"Point geometries not allowed in 'to_lines'. {geoms.geom_type.value_counts()}"
|
|
613
|
+
)
|
|
611
614
|
|
|
612
615
|
gdf.geometry.loc[:] = geoms
|
|
613
616
|
|
|
@@ -86,6 +86,9 @@ def read_geopandas(
|
|
|
86
86
|
"""
|
|
87
87
|
file_system = _get_file_system(file_system, kwargs)
|
|
88
88
|
|
|
89
|
+
if isinstance(gcs_path, (Path | os.PathLike)):
|
|
90
|
+
gcs_path = str(gcs_path)
|
|
91
|
+
|
|
89
92
|
if not isinstance(gcs_path, (str | Path | os.PathLike)):
|
|
90
93
|
return _read_geopandas_from_iterable(
|
|
91
94
|
gcs_path,
|
|
@@ -96,12 +99,14 @@ def read_geopandas(
|
|
|
96
99
|
**kwargs,
|
|
97
100
|
)
|
|
98
101
|
|
|
99
|
-
|
|
102
|
+
single_eq_filter = (
|
|
100
103
|
isinstance(filters, Iterable)
|
|
101
104
|
and len(filters) == 1
|
|
102
105
|
and ("=" in next(iter(filters)) or "==" in next(iter(filters)))
|
|
103
|
-
)
|
|
104
|
-
|
|
106
|
+
)
|
|
107
|
+
# try to read files in subfolder path / "column=value"
|
|
108
|
+
# because glob is slow without GCSFileSystem from the root partition
|
|
109
|
+
if single_eq_filter:
|
|
105
110
|
try:
|
|
106
111
|
expression = "".join(next(iter(filters))).replace("==", "=")
|
|
107
112
|
glob_func = _get_glob(file_system)
|
|
@@ -664,7 +669,7 @@ def _read_geopandas(
|
|
|
664
669
|
return read_func(file, **kwargs)
|
|
665
670
|
except ValueError as e:
|
|
666
671
|
if "Missing geo metadata" not in str(e) and "geometry" not in str(e):
|
|
667
|
-
raise e
|
|
672
|
+
raise e.__class__(f"{e.__class__.__name__}: {e} for {file}. ") from e
|
|
668
673
|
df = getattr(pd, f"read_{file_format}")(file, **kwargs)
|
|
669
674
|
if not len(df):
|
|
670
675
|
return GeoDataFrame(df)
|
|
@@ -441,11 +441,10 @@ class Explore(Map):
|
|
|
441
441
|
self.cmap_start = self.kwargs.pop("cmap_start", 0)
|
|
442
442
|
self.cmap_stop = self.kwargs.pop("cmap_stop", 256)
|
|
443
443
|
|
|
444
|
-
# if self._gdf.crs is None:
|
|
445
|
-
# self.kwargs["crs"] = "Simple"
|
|
446
|
-
|
|
447
444
|
self.original_crs = self.gdf.crs
|
|
448
445
|
|
|
446
|
+
self._to_categorical()
|
|
447
|
+
|
|
449
448
|
def __repr__(self) -> str:
|
|
450
449
|
"""Representation."""
|
|
451
450
|
return f"{self.__class__.__name__}({len(self)})"
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
import os
|
|
2
|
+
import socket
|
|
2
3
|
import webbrowser
|
|
3
4
|
from http.server import BaseHTTPRequestHandler
|
|
4
5
|
from http.server import HTTPServer
|
|
@@ -7,8 +8,19 @@ from IPython.display import HTML
|
|
|
7
8
|
from IPython.display import display
|
|
8
9
|
|
|
9
10
|
|
|
11
|
+
def find_available_port(start_port: int, max_attempts: int = 10) -> int:
|
|
12
|
+
"""Find an available port starting from `start_port`."""
|
|
13
|
+
for port in range(start_port, start_port + max_attempts):
|
|
14
|
+
with socket.socket(socket.AF_INET, socket.SOCK_STREAM) as sock:
|
|
15
|
+
if sock.connect_ex(("127.0.0.1", port)) != 0:
|
|
16
|
+
return port # Port is available
|
|
17
|
+
raise RuntimeError("No available ports found in range.")
|
|
18
|
+
|
|
19
|
+
|
|
10
20
|
def run_html_server(contents: str | None = None, port: int = 3000) -> None:
|
|
11
21
|
"""Run a simple, temporary http web server for serving static HTML content."""
|
|
22
|
+
port = find_available_port(port)
|
|
23
|
+
|
|
12
24
|
if "JUPYTERHUB_SERVICE_PREFIX" in os.environ:
|
|
13
25
|
# Create a link using the https://github.com/jupyterhub/jupyter-server-proxy
|
|
14
26
|
display_address = os.environ["JUPYTERHUB_SERVICE_PREFIX"] + f"proxy/{port}/"
|
|
@@ -110,6 +110,7 @@ class Map:
|
|
|
110
110
|
nan_color="#c2c2c2",
|
|
111
111
|
scheme: str = DEFAULT_SCHEME,
|
|
112
112
|
cmap: str | None = None,
|
|
113
|
+
categorical: bool | None = None,
|
|
113
114
|
**kwargs,
|
|
114
115
|
) -> None:
|
|
115
116
|
"""Initialiser.
|
|
@@ -124,8 +125,11 @@ class Map:
|
|
|
124
125
|
scheme: Classification scheme to be used.
|
|
125
126
|
cmap (str): Colormap of the plot. See:
|
|
126
127
|
https://matplotlib.org/stable/tutorials/colors/colormaps.html
|
|
128
|
+
categorical: Set to True to convert 'column' to string values.
|
|
127
129
|
**kwargs: Arbitrary keyword arguments.
|
|
128
130
|
"""
|
|
131
|
+
self.kwargs = {}
|
|
132
|
+
|
|
129
133
|
gdfs, column, kwargs = self._separate_args(gdfs, column, kwargs)
|
|
130
134
|
|
|
131
135
|
self._column = column
|
|
@@ -173,7 +177,6 @@ class Map:
|
|
|
173
177
|
self.labels = new_labels
|
|
174
178
|
|
|
175
179
|
# pop all geometry-like items from kwargs into self._gdfs
|
|
176
|
-
self.kwargs = {}
|
|
177
180
|
i = 0
|
|
178
181
|
for key, value in kwargs.items():
|
|
179
182
|
try:
|
|
@@ -198,9 +201,13 @@ class Map:
|
|
|
198
201
|
f"length as gdfs ({len(gdfs)}). Got len {len(show)}"
|
|
199
202
|
)
|
|
200
203
|
|
|
204
|
+
if categorical is not None:
|
|
205
|
+
self._is_categorical = categorical
|
|
206
|
+
|
|
201
207
|
if not self._gdfs or not any(len(gdf) for gdf in self._gdfs):
|
|
202
208
|
self._gdfs = []
|
|
203
|
-
|
|
209
|
+
if categorical is None:
|
|
210
|
+
self._is_categorical = True
|
|
204
211
|
self._unique_values = []
|
|
205
212
|
self._nan_idx = []
|
|
206
213
|
return
|
|
@@ -209,7 +216,8 @@ class Map:
|
|
|
209
216
|
self._set_labels()
|
|
210
217
|
|
|
211
218
|
self._gdfs = self._to_common_crs_and_one_geom_col(self._gdfs)
|
|
212
|
-
|
|
219
|
+
if categorical is None:
|
|
220
|
+
self._is_categorical = self._check_if_categorical()
|
|
213
221
|
|
|
214
222
|
if self._column:
|
|
215
223
|
self._fillna_if_col_is_missing()
|
|
@@ -230,11 +238,34 @@ class Map:
|
|
|
230
238
|
self._gdf = pd.concat(self._gdfs, ignore_index=True)
|
|
231
239
|
|
|
232
240
|
self._nan_idx = self._gdf[self._column].isna()
|
|
241
|
+
self._to_categorical()
|
|
233
242
|
self._get_unique_values()
|
|
234
243
|
|
|
235
|
-
def
|
|
236
|
-
|
|
237
|
-
|
|
244
|
+
def _to_categorical(self):
|
|
245
|
+
if not (self._is_categorical and self.column is not None):
|
|
246
|
+
return
|
|
247
|
+
|
|
248
|
+
def to_string_via_int(series):
|
|
249
|
+
if not pd.api.types.is_numeric_dtype(series):
|
|
250
|
+
return series.astype("string")
|
|
251
|
+
try:
|
|
252
|
+
series = series.astype(float)
|
|
253
|
+
except ValueError:
|
|
254
|
+
return series
|
|
255
|
+
no_decimals: bool = (series.dropna() % 1 == 0).all()
|
|
256
|
+
if no_decimals:
|
|
257
|
+
return series.astype("Int64").astype("string")
|
|
258
|
+
else:
|
|
259
|
+
return series.astype("string")
|
|
260
|
+
|
|
261
|
+
for i, gdf in enumerate(self._gdfs):
|
|
262
|
+
if self.column in gdf:
|
|
263
|
+
self._gdfs[i][self.column] = to_string_via_int(gdf[self.column])
|
|
264
|
+
self._gdf[self.column] = to_string_via_int(self._gdf[self.column])
|
|
265
|
+
|
|
266
|
+
# def __getattr__(self, attr: str) -> Any:
|
|
267
|
+
# """Search for attribute in kwargs."""
|
|
268
|
+
# return self.kwargs.get(attr, super().__getattribute__(attr))
|
|
238
269
|
|
|
239
270
|
def __bool__(self) -> bool:
|
|
240
271
|
"""True of any gdfs with more than 0 rows."""
|
|
@@ -524,12 +555,6 @@ class Map:
|
|
|
524
555
|
)
|
|
525
556
|
n = n + maybe_area + maybe_length
|
|
526
557
|
|
|
527
|
-
if n == 0:
|
|
528
|
-
raise ValueError(
|
|
529
|
-
f"The column {self._column!r} is not present in any "
|
|
530
|
-
"of the passed GeoDataFrames."
|
|
531
|
-
)
|
|
532
|
-
|
|
533
558
|
def _check_if_categorical(self) -> bool:
|
|
534
559
|
"""Quite messy this..."""
|
|
535
560
|
if not self._column or not self._gdfs:
|
|
@@ -8,6 +8,7 @@ import re
|
|
|
8
8
|
import time
|
|
9
9
|
from abc import abstractmethod
|
|
10
10
|
from collections.abc import Callable
|
|
11
|
+
from collections.abc import Generator
|
|
11
12
|
from collections.abc import Iterable
|
|
12
13
|
from collections.abc import Iterator
|
|
13
14
|
from collections.abc import Sequence
|
|
@@ -86,6 +87,14 @@ except ImportError:
|
|
|
86
87
|
raise ImportError("xarray")
|
|
87
88
|
|
|
88
89
|
|
|
90
|
+
try:
|
|
91
|
+
from gcsfs.core import GCSFile
|
|
92
|
+
except ImportError:
|
|
93
|
+
|
|
94
|
+
class GCSFile:
|
|
95
|
+
"""Placeholder."""
|
|
96
|
+
|
|
97
|
+
|
|
89
98
|
from ..geopandas_tools.bounds import get_total_bounds
|
|
90
99
|
from ..geopandas_tools.conversion import to_bbox
|
|
91
100
|
from ..geopandas_tools.conversion import to_gdf
|
|
@@ -2645,9 +2654,10 @@ class ImageCollection(_ImageBase):
|
|
|
2645
2654
|
|
|
2646
2655
|
other = to_shapely(other)
|
|
2647
2656
|
|
|
2648
|
-
|
|
2649
|
-
[
|
|
2650
|
-
|
|
2657
|
+
with ThreadPoolExecutor() as executor:
|
|
2658
|
+
bounds_iterable: Generator[Polygon] = executor.map(_union_all, self)
|
|
2659
|
+
|
|
2660
|
+
intersects_list: pd.Series = GeoSeries(list(bounds_iterable)).intersects(other)
|
|
2651
2661
|
|
|
2652
2662
|
self.images = [
|
|
2653
2663
|
image
|
|
@@ -3479,6 +3489,10 @@ def _open_raster(path: str | Path) -> rasterio.io.DatasetReader:
|
|
|
3479
3489
|
return rasterio.open(file)
|
|
3480
3490
|
|
|
3481
3491
|
|
|
3492
|
+
def _union_all(obj: _ImageBase) -> Polygon:
|
|
3493
|
+
return obj.union_all()
|
|
3494
|
+
|
|
3495
|
+
|
|
3482
3496
|
def _read_mask_array(self: Band | Image, **kwargs) -> np.ndarray:
|
|
3483
3497
|
mask_band_id = self.masking["band_id"]
|
|
3484
3498
|
mask_paths = [path for path in self._all_file_paths if mask_band_id in path]
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|
|
File without changes
|