rasterix 0.1a1__py3-none-any.whl → 0.1a2__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- rasterix/__init__.py +2 -2
- rasterix/_version.py +21 -1
- rasterix/raster_index.py +44 -11
- rasterix/rasterize/__init__.py +0 -0
- rasterix/rasterize/exact.py +197 -0
- rasterix/rasterize/rasterio.py +335 -0
- rasterix/rasterize/utils.py +68 -0
- rasterix/rioxarray_compat.py +224 -0
- rasterix-0.1a2.dist-info/METADATA +81 -0
- rasterix-0.1a2.dist-info/RECORD +12 -0
- {rasterix-0.1a1.dist-info → rasterix-0.1a2.dist-info}/WHEEL +1 -2
- rasterix-0.1a1.dist-info/METADATA +0 -26
- rasterix-0.1a1.dist-info/RECORD +0 -8
- rasterix-0.1a1.dist-info/top_level.txt +0 -1
- {rasterix-0.1a1.dist-info → rasterix-0.1a2.dist-info}/licenses/LICENSE +0 -0
rasterix/__init__.py
CHANGED
|
@@ -1,4 +1,4 @@
|
|
|
1
|
-
from .raster_index import RasterIndex
|
|
1
|
+
from .raster_index import RasterIndex, assign_index
|
|
2
2
|
|
|
3
3
|
|
|
4
4
|
def _get_version():
|
|
@@ -12,4 +12,4 @@ def _get_version():
|
|
|
12
12
|
|
|
13
13
|
__version__ = _get_version()
|
|
14
14
|
|
|
15
|
-
__all__ = ["RasterIndex"]
|
|
15
|
+
__all__ = ["RasterIndex", "assign_index"]
|
rasterix/_version.py
CHANGED
|
@@ -1 +1,21 @@
|
|
|
1
|
-
|
|
1
|
+
# file generated by setuptools-scm
|
|
2
|
+
# don't change, don't track in version control
|
|
3
|
+
|
|
4
|
+
__all__ = ["__version__", "__version_tuple__", "version", "version_tuple"]
|
|
5
|
+
|
|
6
|
+
TYPE_CHECKING = False
|
|
7
|
+
if TYPE_CHECKING:
|
|
8
|
+
from typing import Tuple
|
|
9
|
+
from typing import Union
|
|
10
|
+
|
|
11
|
+
VERSION_TUPLE = Tuple[Union[int, str], ...]
|
|
12
|
+
else:
|
|
13
|
+
VERSION_TUPLE = object
|
|
14
|
+
|
|
15
|
+
version: str
|
|
16
|
+
__version__: str
|
|
17
|
+
__version_tuple__: VERSION_TUPLE
|
|
18
|
+
version_tuple: VERSION_TUPLE
|
|
19
|
+
|
|
20
|
+
__version__ = version = '0.1a2'
|
|
21
|
+
__version_tuple__ = version_tuple = (0, 1, 'a2')
|
rasterix/raster_index.py
CHANGED
|
@@ -1,17 +1,36 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
1
3
|
import textwrap
|
|
2
4
|
from collections.abc import Hashable, Mapping
|
|
3
|
-
from typing import Any
|
|
5
|
+
from typing import Any, TypeVar
|
|
4
6
|
|
|
5
7
|
import numpy as np
|
|
6
8
|
import pandas as pd
|
|
7
9
|
from affine import Affine
|
|
8
|
-
from xarray import DataArray, Index, Variable
|
|
10
|
+
from xarray import Coordinates, DataArray, Dataset, Index, Variable
|
|
9
11
|
from xarray.core.coordinate_transform import CoordinateTransform
|
|
10
12
|
|
|
11
13
|
# TODO: import from public API once it is available
|
|
12
14
|
from xarray.core.indexes import CoordinateTransformIndex, PandasIndex
|
|
13
15
|
from xarray.core.indexing import IndexSelResult, merge_sel_results
|
|
14
16
|
|
|
17
|
+
from rasterix.rioxarray_compat import guess_dims
|
|
18
|
+
|
|
19
|
+
T_Xarray = TypeVar("T_Xarray", "DataArray", "Dataset")
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def assign_index(obj: T_Xarray, *, x_dim: str | None = None, y_dim: str | None = None) -> T_Xarray:
|
|
23
|
+
if x_dim is None or y_dim is None:
|
|
24
|
+
guessed_x, guessed_y = guess_dims(obj)
|
|
25
|
+
x_dim = x_dim or guessed_x
|
|
26
|
+
y_dim = y_dim or guessed_y
|
|
27
|
+
|
|
28
|
+
index = RasterIndex.from_transform(
|
|
29
|
+
obj.rio.transform(), obj.sizes[x_dim], obj.sizes[y_dim], x_dim=x_dim, y_dim=y_dim
|
|
30
|
+
)
|
|
31
|
+
coords = Coordinates.from_xindex(index)
|
|
32
|
+
return obj.assign_coords(coords)
|
|
33
|
+
|
|
15
34
|
|
|
16
35
|
class AffineTransform(CoordinateTransform):
|
|
17
36
|
"""Affine 2D transform wrapper."""
|
|
@@ -126,7 +145,7 @@ class AxisAffineTransform(CoordinateTransform):
|
|
|
126
145
|
assert dims is None or dims == self.dims
|
|
127
146
|
return self.forward({self.dim: np.arange(self.size)})
|
|
128
147
|
|
|
129
|
-
def slice(self, slice: slice) ->
|
|
148
|
+
def slice(self, slice: slice) -> AxisAffineTransform:
|
|
130
149
|
start = max(slice.start or 0, 0)
|
|
131
150
|
stop = min(slice.stop or self.size, self.size)
|
|
132
151
|
step = slice.step or 1
|
|
@@ -183,7 +202,7 @@ class AxisAffineTransformIndex(CoordinateTransformIndex):
|
|
|
183
202
|
|
|
184
203
|
def isel( # type: ignore[override]
|
|
185
204
|
self, indexers: Mapping[Any, int | slice | np.ndarray | Variable]
|
|
186
|
-
) ->
|
|
205
|
+
) -> AxisAffineTransformIndex | PandasIndex | None:
|
|
187
206
|
idxer = indexers[self.dim]
|
|
188
207
|
|
|
189
208
|
# generate a new index with updated transform if a slice is given
|
|
@@ -209,10 +228,13 @@ class AxisAffineTransformIndex(CoordinateTransformIndex):
|
|
|
209
228
|
label = labels[coord_name]
|
|
210
229
|
|
|
211
230
|
if isinstance(label, slice):
|
|
231
|
+
if label.start is None:
|
|
232
|
+
label = slice(0, label.stop, label.step)
|
|
212
233
|
if label.step is None:
|
|
213
234
|
# continuous interval slice indexing (preserves the index)
|
|
214
235
|
pos = self.transform.reverse({coord_name: np.array([label.start, label.stop])})
|
|
215
|
-
|
|
236
|
+
# np.round rounds to even, this way we round upwards
|
|
237
|
+
pos = np.floor(pos[self.dim] + 0.5).astype("int")
|
|
216
238
|
new_start = max(pos[0], 0)
|
|
217
239
|
new_stop = min(pos[1], self.axis_transform.size)
|
|
218
240
|
return IndexSelResult({self.dim: slice(new_start, new_stop)})
|
|
@@ -240,7 +262,7 @@ class AxisAffineTransformIndex(CoordinateTransformIndex):
|
|
|
240
262
|
|
|
241
263
|
return result
|
|
242
264
|
|
|
243
|
-
def to_pandas_index(self) ->
|
|
265
|
+
def to_pandas_index(self) -> pd.Index:
|
|
244
266
|
import pandas as pd
|
|
245
267
|
|
|
246
268
|
values = self.transform.generate_coords()
|
|
@@ -303,7 +325,7 @@ class RasterIndex(Index):
|
|
|
303
325
|
@classmethod
|
|
304
326
|
def from_transform(
|
|
305
327
|
cls, affine: Affine, width: int, height: int, x_dim: str = "x", y_dim: str = "y"
|
|
306
|
-
) ->
|
|
328
|
+
) -> RasterIndex:
|
|
307
329
|
indexes: dict[WrappedIndexCoords, AxisAffineTransformIndex | CoordinateTransformIndex]
|
|
308
330
|
|
|
309
331
|
# pixel centered coordinates
|
|
@@ -328,7 +350,7 @@ class RasterIndex(Index):
|
|
|
328
350
|
variables: Mapping[Any, Variable],
|
|
329
351
|
*,
|
|
330
352
|
options: Mapping[str, Any],
|
|
331
|
-
) ->
|
|
353
|
+
) -> RasterIndex:
|
|
332
354
|
# TODO: compute bounds, resolution and affine transform from explicit coordinates.
|
|
333
355
|
raise NotImplementedError("Creating a RasterIndex from existing coordinates is not yet supported.")
|
|
334
356
|
|
|
@@ -340,7 +362,7 @@ class RasterIndex(Index):
|
|
|
340
362
|
|
|
341
363
|
return new_variables
|
|
342
364
|
|
|
343
|
-
def isel(self, indexers: Mapping[Any, int | slice | np.ndarray | Variable]) ->
|
|
365
|
+
def isel(self, indexers: Mapping[Any, int | slice | np.ndarray | Variable]) -> RasterIndex | None:
|
|
344
366
|
new_indexes: dict[WrappedIndexCoords, WrappedIndex] = {}
|
|
345
367
|
|
|
346
368
|
for coord_names, index in self._wrapped_indexes.items():
|
|
@@ -368,7 +390,7 @@ class RasterIndex(Index):
|
|
|
368
390
|
for coord_names, index in self._wrapped_indexes.items():
|
|
369
391
|
if not isinstance(coord_names, tuple):
|
|
370
392
|
coord_names = (coord_names,)
|
|
371
|
-
index_labels = {k: v for k, v in labels if k in coord_names}
|
|
393
|
+
index_labels = {k: v for k, v in labels.items() if k in coord_names}
|
|
372
394
|
if index_labels:
|
|
373
395
|
results.append(index.sel(index_labels, method=method, tolerance=tolerance))
|
|
374
396
|
|
|
@@ -385,7 +407,7 @@ class RasterIndex(Index):
|
|
|
385
407
|
for k, index in self._wrapped_indexes.items()
|
|
386
408
|
)
|
|
387
409
|
|
|
388
|
-
def to_pandas_index(self) ->
|
|
410
|
+
def to_pandas_index(self) -> pd.Index:
|
|
389
411
|
# conversion is possible only if this raster index encapsulates
|
|
390
412
|
# exactly one AxisAffineTransformIndex or a PandasIndex associated
|
|
391
413
|
# to either the x or y axis (1-dimensional) coordinate.
|
|
@@ -403,3 +425,14 @@ class RasterIndex(Index):
|
|
|
403
425
|
items += [repr(coord_names) + ":", textwrap.indent(repr(index), " ")]
|
|
404
426
|
|
|
405
427
|
return "RasterIndex\n" + "\n".join(items)
|
|
428
|
+
|
|
429
|
+
def transform(self) -> Affine:
|
|
430
|
+
"""Returns Affine transform for top-left corners."""
|
|
431
|
+
if len(self._wrapped_indexes) > 1:
|
|
432
|
+
x = self._wrapped_indexes["x"].axis_transform.affine
|
|
433
|
+
y = self._wrapped_indexes["y"].axis_transform.affine
|
|
434
|
+
aff = Affine(x.a, x.b, x.c, y.d, y.e, y.f)
|
|
435
|
+
else:
|
|
436
|
+
index = next(iter(self._wrapped_indexes.values()))
|
|
437
|
+
aff = index.affine
|
|
438
|
+
return aff * Affine.translation(-0.5, -0.5)
|
|
File without changes
|
|
@@ -0,0 +1,197 @@
|
|
|
1
|
+
# exactexact wrappers
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
from typing import TYPE_CHECKING, Any, Literal
|
|
5
|
+
|
|
6
|
+
import geopandas as gpd
|
|
7
|
+
import numpy as np
|
|
8
|
+
import xarray as xr
|
|
9
|
+
from exactextract import exact_extract
|
|
10
|
+
from exactextract.raster import NumPyRasterSource
|
|
11
|
+
|
|
12
|
+
from .utils import geometries_as_dask_array, is_in_memory
|
|
13
|
+
|
|
14
|
+
if TYPE_CHECKING:
|
|
15
|
+
import dask.array
|
|
16
|
+
import dask_geopandas
|
|
17
|
+
|
|
18
|
+
MIN_CHUNK_SIZE = 2 # exactextract cannot handle arrays of size 1.
|
|
19
|
+
|
|
20
|
+
__all__ = [
|
|
21
|
+
"coverage",
|
|
22
|
+
]
|
|
23
|
+
|
|
24
|
+
|
|
25
|
+
def get_dtype(coverage_weight, geometries):
|
|
26
|
+
if coverage_weight.lower() == "fraction":
|
|
27
|
+
dtype = "float64"
|
|
28
|
+
elif coverage_weight.lower() == "none":
|
|
29
|
+
dtype = np.min_scalar_type(len(geometries))
|
|
30
|
+
else:
|
|
31
|
+
raise NotImplementedError
|
|
32
|
+
return dtype
|
|
33
|
+
|
|
34
|
+
|
|
35
|
+
def np_coverage(
|
|
36
|
+
x: np.ndarray,
|
|
37
|
+
y: np.ndarray,
|
|
38
|
+
*,
|
|
39
|
+
geometries: gpd.GeoDataFrame,
|
|
40
|
+
coverage_weight: Literal["fraction", "none"] = "fraction",
|
|
41
|
+
) -> np.ndarray[Any, Any]:
|
|
42
|
+
"""
|
|
43
|
+
Parameters
|
|
44
|
+
----------
|
|
45
|
+
|
|
46
|
+
"""
|
|
47
|
+
assert x.ndim == 1
|
|
48
|
+
assert y.ndim == 1
|
|
49
|
+
|
|
50
|
+
dtype = get_dtype(coverage_weight, geometries)
|
|
51
|
+
|
|
52
|
+
xsize = x.size
|
|
53
|
+
ysize = y.size
|
|
54
|
+
|
|
55
|
+
# we need the top left corner, and the bottom right corner
|
|
56
|
+
dx0 = (x[1] - x[0]) / 2
|
|
57
|
+
dx1 = (x[-1] - x[-2]) / 2
|
|
58
|
+
dy0 = np.abs(y[1] - y[0]) / 2
|
|
59
|
+
dy1 = np.abs(y[-1] - y[-2]) / 2
|
|
60
|
+
if y[0] > y[-1]:
|
|
61
|
+
dy0, dy1 = dy1, dy0
|
|
62
|
+
|
|
63
|
+
shape = (ysize, xsize)
|
|
64
|
+
raster = NumPyRasterSource(
|
|
65
|
+
np.broadcast_to([1], shape),
|
|
66
|
+
xmin=x.min() - dx0,
|
|
67
|
+
xmax=x.max() + dx1,
|
|
68
|
+
ymin=y.min() - dy0,
|
|
69
|
+
ymax=y.max() + dy1,
|
|
70
|
+
srs_wkt=geometries.crs.to_wkt(),
|
|
71
|
+
)
|
|
72
|
+
result = exact_extract(
|
|
73
|
+
rast=raster,
|
|
74
|
+
vec=geometries,
|
|
75
|
+
ops=["cell_id", f"coverage(coverage_weight={coverage_weight})"],
|
|
76
|
+
output="pandas",
|
|
77
|
+
# max_cells_in_memory=2*x.size * y.size
|
|
78
|
+
)
|
|
79
|
+
out = np.zeros((len(geometries), *shape), dtype=dtype)
|
|
80
|
+
# TODO: vectorized assignment?
|
|
81
|
+
for i in range(len(geometries)):
|
|
82
|
+
res = result.loc[i]
|
|
83
|
+
# indices = np.unravel_index(res.cell_id, shape=shape)
|
|
84
|
+
# out[(i, *indices)] = offset + i + 1 # 0 is the fill value
|
|
85
|
+
out[i, ...].flat[res.cell_id] = res.coverage
|
|
86
|
+
return out
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def coverage_np_dask_wrapper(
|
|
90
|
+
x: np.ndarray, y: np.ndarray, geom_array: np.ndarray, coverage_weight, crs
|
|
91
|
+
) -> np.ndarray:
|
|
92
|
+
return np_coverage(
|
|
93
|
+
x=x,
|
|
94
|
+
y=y,
|
|
95
|
+
geometries=gpd.GeoDataFrame(geometry=geom_array, crs=crs),
|
|
96
|
+
coverage_weight=coverage_weight,
|
|
97
|
+
)
|
|
98
|
+
|
|
99
|
+
|
|
100
|
+
def dask_coverage(
|
|
101
|
+
x: dask.array.Array,
|
|
102
|
+
y: dask.array.Array,
|
|
103
|
+
*,
|
|
104
|
+
geom_array: dask.array.Array,
|
|
105
|
+
coverage_weight: Literal["fraction", "none"] = "fraction",
|
|
106
|
+
crs: Any,
|
|
107
|
+
) -> dask.array.Array:
|
|
108
|
+
import dask.array
|
|
109
|
+
|
|
110
|
+
if any(c == 1 for c in x.chunks) or any(c == 1 for c in y.chunks):
|
|
111
|
+
raise ValueError("exactextract does not support a chunksize of 1. Please rechunk to avoid this")
|
|
112
|
+
|
|
113
|
+
return dask.array.blockwise(
|
|
114
|
+
coverage_np_dask_wrapper,
|
|
115
|
+
"gji",
|
|
116
|
+
x,
|
|
117
|
+
"i",
|
|
118
|
+
y,
|
|
119
|
+
"j",
|
|
120
|
+
geom_array,
|
|
121
|
+
"g",
|
|
122
|
+
crs=crs,
|
|
123
|
+
coverage_weight=coverage_weight,
|
|
124
|
+
dtype=get_dtype(coverage_weight, geom_array),
|
|
125
|
+
)
|
|
126
|
+
|
|
127
|
+
|
|
128
|
+
def coverage(
|
|
129
|
+
obj: xr.Dataset | xr.DataArray,
|
|
130
|
+
geometries: gpd.GeoDataFrame | dask_geopandas.GeoDataFrame,
|
|
131
|
+
*,
|
|
132
|
+
xdim="x",
|
|
133
|
+
ydim="y",
|
|
134
|
+
coverage_weight="fraction",
|
|
135
|
+
) -> xr.DataArray:
|
|
136
|
+
"""
|
|
137
|
+
Returns "coverage" fractions for each pixel for each geometry calculated using exactextract.
|
|
138
|
+
|
|
139
|
+
Parameters
|
|
140
|
+
----------
|
|
141
|
+
obj : xr.DataArray | xr.Dataset
|
|
142
|
+
Xarray object used to extract the grid
|
|
143
|
+
geometries: GeoDataFrame | DaskGeoDataFrame
|
|
144
|
+
Geometries used for to calculate coverage
|
|
145
|
+
xdim: str
|
|
146
|
+
Name of the "x" dimension on ``obj``.
|
|
147
|
+
ydim: str
|
|
148
|
+
Name of the "y" dimension on ``obj``.
|
|
149
|
+
coverage_weight: {"fraction", "none", "area_cartesian", "area_spherical_m2", "area_spherical_km2"}
|
|
150
|
+
Weights to estimate, passed directly to exactextract.
|
|
151
|
+
|
|
152
|
+
Returns
|
|
153
|
+
-------
|
|
154
|
+
DataArray
|
|
155
|
+
3D dataarray with coverage fraction. The additional dimension is "geometry".
|
|
156
|
+
"""
|
|
157
|
+
if "spatial_ref" not in obj.coords:
|
|
158
|
+
raise ValueError("Xarray object must contain the `spatial_ref` variable.")
|
|
159
|
+
# FIXME: assert obj.crs == geometries.crs
|
|
160
|
+
if is_in_memory(obj=obj, geometries=geometries):
|
|
161
|
+
out = np_coverage(
|
|
162
|
+
x=obj[xdim].data,
|
|
163
|
+
y=obj[ydim].data,
|
|
164
|
+
geometries=geometries,
|
|
165
|
+
coverage_weight=coverage_weight,
|
|
166
|
+
)
|
|
167
|
+
geom_array = geometries.to_numpy().squeeze(axis=1)
|
|
168
|
+
else:
|
|
169
|
+
from dask.array import from_array
|
|
170
|
+
|
|
171
|
+
geom_dask_array = geometries_as_dask_array(geometries)
|
|
172
|
+
out = dask_coverage(
|
|
173
|
+
x=from_array(obj[xdim].data, chunks=obj.chunksizes.get(xdim, -1)),
|
|
174
|
+
y=from_array(obj[ydim].data, chunks=obj.chunksizes.get(ydim, -1)),
|
|
175
|
+
geom_array=geom_dask_array,
|
|
176
|
+
crs=geometries.crs,
|
|
177
|
+
coverage_weight=coverage_weight,
|
|
178
|
+
)
|
|
179
|
+
if isinstance(geometries, gpd.GeoDataFrame):
|
|
180
|
+
geom_array = geometries.to_numpy().squeeze(axis=1)
|
|
181
|
+
else:
|
|
182
|
+
geom_array = geom_dask_array
|
|
183
|
+
|
|
184
|
+
coverage = xr.DataArray(
|
|
185
|
+
dims=("geometry", ydim, xdim),
|
|
186
|
+
data=out,
|
|
187
|
+
coords=xr.Coordinates(
|
|
188
|
+
coords={
|
|
189
|
+
xdim: obj.coords[xdim],
|
|
190
|
+
ydim: obj.coords[ydim],
|
|
191
|
+
"spatial_ref": obj.spatial_ref,
|
|
192
|
+
"geometry": geom_array,
|
|
193
|
+
},
|
|
194
|
+
indexes={xdim: obj.xindexes[xdim], ydim: obj.xindexes[ydim]},
|
|
195
|
+
),
|
|
196
|
+
)
|
|
197
|
+
return coverage
|
|
@@ -0,0 +1,335 @@
|
|
|
1
|
+
# rasterio wrappers
|
|
2
|
+
from __future__ import annotations
|
|
3
|
+
|
|
4
|
+
from collections.abc import Sequence
|
|
5
|
+
from functools import partial
|
|
6
|
+
from typing import TYPE_CHECKING, Any
|
|
7
|
+
|
|
8
|
+
import geopandas as gpd
|
|
9
|
+
import numpy as np
|
|
10
|
+
import odc.geo.xr # noqa
|
|
11
|
+
import rasterio as rio
|
|
12
|
+
import xarray as xr
|
|
13
|
+
from rasterio.features import MergeAlg, geometry_mask
|
|
14
|
+
from rasterio.features import rasterize as rasterize_rio
|
|
15
|
+
|
|
16
|
+
from .utils import is_in_memory, prepare_for_dask
|
|
17
|
+
|
|
18
|
+
if TYPE_CHECKING:
|
|
19
|
+
import dask_geopandas
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
def dask_rasterize_wrapper(
|
|
23
|
+
geom_array: np.ndarray,
|
|
24
|
+
tile_array: np.ndarray,
|
|
25
|
+
offset_array: np.ndarray,
|
|
26
|
+
*,
|
|
27
|
+
fill: Any,
|
|
28
|
+
all_touched: bool,
|
|
29
|
+
merge_alg: MergeAlg,
|
|
30
|
+
dtype_: np.dtype,
|
|
31
|
+
env: rio.Env | None = None,
|
|
32
|
+
) -> np.ndarray:
|
|
33
|
+
tile = tile_array.item()
|
|
34
|
+
offset = offset_array.item()
|
|
35
|
+
|
|
36
|
+
return rasterize_geometries(
|
|
37
|
+
geom_array[:, 0, 0].tolist(),
|
|
38
|
+
tile=tile,
|
|
39
|
+
offset=offset,
|
|
40
|
+
all_touched=all_touched,
|
|
41
|
+
merge_alg=merge_alg,
|
|
42
|
+
fill=fill,
|
|
43
|
+
dtype=dtype_,
|
|
44
|
+
env=env,
|
|
45
|
+
)[np.newaxis, :, :]
|
|
46
|
+
|
|
47
|
+
|
|
48
|
+
def rasterize_geometries(
|
|
49
|
+
geometries: Sequence[Any],
|
|
50
|
+
*,
|
|
51
|
+
dtype: np.dtype,
|
|
52
|
+
tile,
|
|
53
|
+
offset,
|
|
54
|
+
env: rio.Env | None = None,
|
|
55
|
+
clear_cache: bool = False,
|
|
56
|
+
**kwargs,
|
|
57
|
+
):
|
|
58
|
+
# From https://rasterio.readthedocs.io/en/latest/api/rasterio.features.html#rasterio.features.rasterize
|
|
59
|
+
# The out array will be copied and additional temporary raster memory equal to 2x the smaller of out data
|
|
60
|
+
# or GDAL’s max cache size (controlled by GDAL_CACHEMAX, default is 5% of the computer’s physical memory) is required.
|
|
61
|
+
# If GDAL max cache size is smaller than the output data, the array of shapes will be iterated multiple times.
|
|
62
|
+
# Performance is thus a linear function of buffer size. For maximum speed, ensure that GDAL_CACHEMAX
|
|
63
|
+
# is larger than the size of out or out_shape.
|
|
64
|
+
if env is None:
|
|
65
|
+
# out_size = dtype.itemsize * math.prod(tile.shape)
|
|
66
|
+
# env = rio.Env(GDAL_CACHEMAX=1.2 * out_size)
|
|
67
|
+
# FIXME: figure out a good default
|
|
68
|
+
env = rio.Env()
|
|
69
|
+
with env:
|
|
70
|
+
res = rasterize_rio(
|
|
71
|
+
zip(geometries, range(offset, offset + len(geometries)), strict=True),
|
|
72
|
+
out_shape=tile.shape,
|
|
73
|
+
transform=tile.affine,
|
|
74
|
+
**kwargs,
|
|
75
|
+
)
|
|
76
|
+
if clear_cache:
|
|
77
|
+
with rio.Env(GDAL_CACHEMAX=0):
|
|
78
|
+
try:
|
|
79
|
+
from osgeo import gdal
|
|
80
|
+
|
|
81
|
+
# attempt to force-clear the GDAL cache
|
|
82
|
+
assert gdal.GetCacheMax() == 0
|
|
83
|
+
except ImportError:
|
|
84
|
+
pass
|
|
85
|
+
assert res.shape == tile.shape
|
|
86
|
+
return res
|
|
87
|
+
|
|
88
|
+
|
|
89
|
+
def rasterize(
|
|
90
|
+
obj: xr.Dataset | xr.DataArray,
|
|
91
|
+
geometries: gpd.GeoDataFrame | dask_geopandas.GeoDataFrame,
|
|
92
|
+
*,
|
|
93
|
+
xdim="x",
|
|
94
|
+
ydim="y",
|
|
95
|
+
all_touched: bool = False,
|
|
96
|
+
merge_alg: MergeAlg = MergeAlg.replace,
|
|
97
|
+
geoms_rechunk_size: int | None = None,
|
|
98
|
+
env: rio.Env | None = None,
|
|
99
|
+
) -> xr.DataArray:
|
|
100
|
+
"""
|
|
101
|
+
Dask-aware wrapper around ``rasterio.features.rasterize``.
|
|
102
|
+
|
|
103
|
+
Returns a 2D DataArray with integer codes for cells that are within the provided geometries.
|
|
104
|
+
|
|
105
|
+
Parameters
|
|
106
|
+
----------
|
|
107
|
+
obj: xr.Dataset or xr.DataArray
|
|
108
|
+
Xarray object, whose grid to rasterize
|
|
109
|
+
geometries: GeoDataFrame
|
|
110
|
+
Either a geopandas or dask_geopandas GeoDataFrame
|
|
111
|
+
xdim: str
|
|
112
|
+
Name of the "x" dimension on ``obj``.
|
|
113
|
+
ydim: str
|
|
114
|
+
Name of the "y" dimension on ``obj``.
|
|
115
|
+
all_touched: bool = False
|
|
116
|
+
Passed to ``rasterio.features.rasterize``
|
|
117
|
+
merge_alg: rasterio.MergeAlg
|
|
118
|
+
Passed to ``rasterio.features.rasterize``.
|
|
119
|
+
geoms_rechunk_size: int | None = None
|
|
120
|
+
Size to rechunk the geometry array to *after* conversion from dataframe.
|
|
121
|
+
env: rasterio.Env
|
|
122
|
+
Rasterio Environment configuration. For example, use set ``GDAL_CACHEMAX`
|
|
123
|
+
by passing ``env = rio.Env(GDAL_CACHEMAX=100 * 1e6)``.
|
|
124
|
+
|
|
125
|
+
Returns
|
|
126
|
+
-------
|
|
127
|
+
DataArray
|
|
128
|
+
2D DataArray with geometries "burned in"
|
|
129
|
+
"""
|
|
130
|
+
if xdim not in obj.dims or ydim not in obj.dims:
|
|
131
|
+
raise ValueError(f"Received {xdim=!r}, {ydim=!r} but obj.dims={tuple(obj.dims)}")
|
|
132
|
+
box = obj.odc.geobox
|
|
133
|
+
rasterize_kwargs = dict(all_touched=all_touched, merge_alg=merge_alg)
|
|
134
|
+
# FIXME: box.crs == geometries.crs
|
|
135
|
+
if is_in_memory(obj=obj, geometries=geometries):
|
|
136
|
+
geom_array = geometries.to_numpy().squeeze(axis=1)
|
|
137
|
+
rasterized = rasterize_geometries(
|
|
138
|
+
geom_array.tolist(),
|
|
139
|
+
tile=box,
|
|
140
|
+
offset=0,
|
|
141
|
+
dtype=np.min_scalar_type(len(geometries)),
|
|
142
|
+
fill=len(geometries),
|
|
143
|
+
env=env,
|
|
144
|
+
**rasterize_kwargs,
|
|
145
|
+
)
|
|
146
|
+
else:
|
|
147
|
+
from dask.array import from_array, map_blocks
|
|
148
|
+
|
|
149
|
+
chunks, tiles_array, geom_array = prepare_for_dask(
|
|
150
|
+
obj, geometries, xdim=xdim, ydim=ydim, geoms_rechunk_size=geoms_rechunk_size
|
|
151
|
+
)
|
|
152
|
+
# DaskGeoDataFrame.len() computes!
|
|
153
|
+
num_geoms = geom_array.size
|
|
154
|
+
# with dask, we use 0 as a fill value and replace it later
|
|
155
|
+
dtype = np.min_scalar_type(num_geoms)
|
|
156
|
+
# add 1 to the offset, to account for 0 as fill value
|
|
157
|
+
npoffsets = np.cumsum(np.array([0, *geom_array.chunks[0][:-1]])) + 1
|
|
158
|
+
offsets = from_array(npoffsets, chunks=1)
|
|
159
|
+
|
|
160
|
+
rasterized = map_blocks(
|
|
161
|
+
dask_rasterize_wrapper,
|
|
162
|
+
geom_array[:, np.newaxis, np.newaxis],
|
|
163
|
+
tiles_array[np.newaxis, :, :],
|
|
164
|
+
offsets[:, np.newaxis, np.newaxis],
|
|
165
|
+
chunks=((1,) * geom_array.numblocks[0], chunks[0], chunks[1]),
|
|
166
|
+
meta=np.array([], dtype=dtype),
|
|
167
|
+
fill=0, # good identity value for both sum & replace.
|
|
168
|
+
**rasterize_kwargs,
|
|
169
|
+
dtype_=dtype,
|
|
170
|
+
)
|
|
171
|
+
if merge_alg is MergeAlg.replace:
|
|
172
|
+
rasterized = rasterized.max(axis=0)
|
|
173
|
+
elif merge_alg is MergeAlg.add:
|
|
174
|
+
rasterized = rasterized.sum(axis=0)
|
|
175
|
+
|
|
176
|
+
# and reduce every other value by 1
|
|
177
|
+
rasterized = rasterized.map_blocks(partial(replace_values, to=num_geoms))
|
|
178
|
+
|
|
179
|
+
return xr.DataArray(
|
|
180
|
+
dims=(ydim, xdim),
|
|
181
|
+
data=rasterized,
|
|
182
|
+
coords=xr.Coordinates(
|
|
183
|
+
coords={
|
|
184
|
+
xdim: obj.coords[xdim],
|
|
185
|
+
ydim: obj.coords[ydim],
|
|
186
|
+
"spatial_ref": obj.spatial_ref,
|
|
187
|
+
# TODO: figure out how to propagate geometry array
|
|
188
|
+
# "geometry": geom_array,
|
|
189
|
+
},
|
|
190
|
+
indexes={xdim: obj.xindexes[xdim], ydim: obj.xindexes[ydim]},
|
|
191
|
+
),
|
|
192
|
+
name="rasterized",
|
|
193
|
+
)
|
|
194
|
+
|
|
195
|
+
|
|
196
|
+
def replace_values(array: np.ndarray, to, *, from_=0) -> np.ndarray:
|
|
197
|
+
mask = array == from_
|
|
198
|
+
array[~mask] -= 1
|
|
199
|
+
array[mask] = to
|
|
200
|
+
return array
|
|
201
|
+
|
|
202
|
+
|
|
203
|
+
# ===========> geometry_mask
|
|
204
|
+
|
|
205
|
+
|
|
206
|
+
def dask_mask_wrapper(
|
|
207
|
+
geom_array: np.ndarray,
|
|
208
|
+
tile_array: np.ndarray,
|
|
209
|
+
*,
|
|
210
|
+
all_touched: bool,
|
|
211
|
+
invert: bool,
|
|
212
|
+
env: rio.Env | None = None,
|
|
213
|
+
) -> np.ndarray[Any, np.dtype[np.bool_]]:
|
|
214
|
+
tile = tile_array.item()
|
|
215
|
+
|
|
216
|
+
return np_geometry_mask(
|
|
217
|
+
geom_array[:, 0, 0].tolist(),
|
|
218
|
+
tile=tile,
|
|
219
|
+
all_touched=all_touched,
|
|
220
|
+
invert=invert,
|
|
221
|
+
env=env,
|
|
222
|
+
)[np.newaxis, :, :]
|
|
223
|
+
|
|
224
|
+
|
|
225
|
+
def np_geometry_mask(
|
|
226
|
+
geometries: Sequence[Any],
|
|
227
|
+
*,
|
|
228
|
+
tile,
|
|
229
|
+
env: rio.Env | None = None,
|
|
230
|
+
clear_cache: bool = False,
|
|
231
|
+
**kwargs,
|
|
232
|
+
) -> np.ndarray[Any, np.dtype[np.bool_]]:
|
|
233
|
+
# From https://rasterio.readthedocs.io/en/latest/api/rasterio.features.html#rasterio.features.rasterize
|
|
234
|
+
# The out array will be copied and additional temporary raster memory equal to 2x the smaller of out data
|
|
235
|
+
# or GDAL’s max cache size (controlled by GDAL_CACHEMAX, default is 5% of the computer’s physical memory) is required.
|
|
236
|
+
# If GDAL max cache size is smaller than the output data, the array of shapes will be iterated multiple times.
|
|
237
|
+
# Performance is thus a linear function of buffer size. For maximum speed, ensure that GDAL_CACHEMAX
|
|
238
|
+
# is larger than the size of out or out_shape.
|
|
239
|
+
if env is None:
|
|
240
|
+
# out_size = np.bool_.itemsize * math.prod(tile.shape)
|
|
241
|
+
# env = rio.Env(GDAL_CACHEMAX=1.2 * out_size)
|
|
242
|
+
# FIXME: figure out a good default
|
|
243
|
+
env = rio.Env()
|
|
244
|
+
with env:
|
|
245
|
+
res = geometry_mask(geometries, out_shape=tile.shape, transform=tile.affine, **kwargs)
|
|
246
|
+
if clear_cache:
|
|
247
|
+
with rio.Env(GDAL_CACHEMAX=0):
|
|
248
|
+
try:
|
|
249
|
+
from osgeo import gdal
|
|
250
|
+
|
|
251
|
+
# attempt to force-clear the GDAL cache
|
|
252
|
+
assert gdal.GetCacheMax() == 0
|
|
253
|
+
except ImportError:
|
|
254
|
+
pass
|
|
255
|
+
assert res.shape == tile.shape
|
|
256
|
+
return res
|
|
257
|
+
|
|
258
|
+
|
|
259
|
+
def geometry_clip(
|
|
260
|
+
obj: xr.Dataset | xr.DataArray,
|
|
261
|
+
geometries: gpd.GeoDataFrame | dask_geopandas.GeoDataFrame,
|
|
262
|
+
*,
|
|
263
|
+
xdim="x",
|
|
264
|
+
ydim="y",
|
|
265
|
+
all_touched: bool = False,
|
|
266
|
+
invert: bool = False,
|
|
267
|
+
geoms_rechunk_size: int | None = None,
|
|
268
|
+
env: rio.Env | None = None,
|
|
269
|
+
) -> xr.DataArray:
|
|
270
|
+
"""
|
|
271
|
+
Dask-ified version of rioxarray.clip
|
|
272
|
+
|
|
273
|
+
Parameters
|
|
274
|
+
----------
|
|
275
|
+
obj : xr.DataArray | xr.Dataset
|
|
276
|
+
Xarray object used to extract the grid
|
|
277
|
+
geometries: GeoDataFrame | DaskGeoDataFrame
|
|
278
|
+
Geometries used for clipping
|
|
279
|
+
xdim: str
|
|
280
|
+
Name of the "x" dimension on ``obj``.
|
|
281
|
+
ydim: str
|
|
282
|
+
Name of the "y" dimension on ``obj``
|
|
283
|
+
all_touched: bool
|
|
284
|
+
Passed to rasterio
|
|
285
|
+
invert: bool
|
|
286
|
+
Whether to preserve values inside the geometry.
|
|
287
|
+
geoms_rechunk_size: int | None = None,
|
|
288
|
+
Chunksize for geometry dimension of the output.
|
|
289
|
+
env: rasterio.Env
|
|
290
|
+
Rasterio Environment configuration. For example, use set ``GDAL_CACHEMAX`
|
|
291
|
+
by passing ``env = rio.Env(GDAL_CACHEMAX=100 * 1e6)``.
|
|
292
|
+
|
|
293
|
+
Returns
|
|
294
|
+
-------
|
|
295
|
+
DataArray
|
|
296
|
+
3D dataarray with coverage fraction. The additional dimension is "geometry".
|
|
297
|
+
"""
|
|
298
|
+
invert = not invert # rioxarray clip convention -> rasterio geometry_mask convention
|
|
299
|
+
if xdim not in obj.dims or ydim not in obj.dims:
|
|
300
|
+
raise ValueError(f"Received {xdim=!r}, {ydim=!r} but obj.dims={tuple(obj.dims)}")
|
|
301
|
+
box = obj.odc.geobox
|
|
302
|
+
geometry_mask_kwargs = dict(all_touched=all_touched, invert=invert)
|
|
303
|
+
|
|
304
|
+
if is_in_memory(obj=obj, geometries=geometries):
|
|
305
|
+
geom_array = geometries.to_numpy().squeeze(axis=1)
|
|
306
|
+
mask = np_geometry_mask(geom_array.tolist(), tile=box, env=env, **geometry_mask_kwargs)
|
|
307
|
+
else:
|
|
308
|
+
from dask.array import map_blocks
|
|
309
|
+
|
|
310
|
+
chunks, tiles_array, geom_array = prepare_for_dask(
|
|
311
|
+
obj, geometries, xdim=xdim, ydim=ydim, geoms_rechunk_size=geoms_rechunk_size
|
|
312
|
+
)
|
|
313
|
+
mask = map_blocks(
|
|
314
|
+
dask_mask_wrapper,
|
|
315
|
+
geom_array[:, np.newaxis, np.newaxis],
|
|
316
|
+
tiles_array[np.newaxis, :, :],
|
|
317
|
+
chunks=((1,) * geom_array.numblocks[0], chunks[0], chunks[1]),
|
|
318
|
+
meta=np.array([], dtype=bool),
|
|
319
|
+
**geometry_mask_kwargs,
|
|
320
|
+
)
|
|
321
|
+
mask = mask.any(axis=0)
|
|
322
|
+
|
|
323
|
+
mask_da = xr.DataArray(
|
|
324
|
+
dims=(ydim, xdim),
|
|
325
|
+
data=mask,
|
|
326
|
+
coords=xr.Coordinates(
|
|
327
|
+
coords={
|
|
328
|
+
xdim: obj.coords[xdim],
|
|
329
|
+
ydim: obj.coords[ydim],
|
|
330
|
+
"spatial_ref": obj.spatial_ref,
|
|
331
|
+
},
|
|
332
|
+
indexes={xdim: obj.xindexes[xdim], ydim: obj.xindexes[ydim]},
|
|
333
|
+
),
|
|
334
|
+
)
|
|
335
|
+
return obj.where(mask_da)
|
|
@@ -0,0 +1,68 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
from typing import TYPE_CHECKING
|
|
4
|
+
|
|
5
|
+
import geopandas as gpd
|
|
6
|
+
import numpy as np
|
|
7
|
+
import xarray as xr
|
|
8
|
+
from odc.geo.geobox import GeoboxTiles
|
|
9
|
+
|
|
10
|
+
if TYPE_CHECKING:
|
|
11
|
+
import dask.array
|
|
12
|
+
import dask_geopandas
|
|
13
|
+
|
|
14
|
+
|
|
15
|
+
def tiles_to_array(tiles: GeoboxTiles) -> np.ndarray:
|
|
16
|
+
shape = tiles.shape
|
|
17
|
+
array = np.empty(shape=(shape.y, shape.x), dtype=object)
|
|
18
|
+
for i in range(shape.x):
|
|
19
|
+
for j in range(shape.y):
|
|
20
|
+
array[j, i] = tiles[j, i]
|
|
21
|
+
|
|
22
|
+
assert array.shape == tiles.shape
|
|
23
|
+
return array
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
def is_in_memory(*, obj, geometries) -> bool:
|
|
27
|
+
return not obj.chunks and isinstance(geometries, gpd.GeoDataFrame)
|
|
28
|
+
|
|
29
|
+
|
|
30
|
+
def geometries_as_dask_array(
|
|
31
|
+
geometries: gpd.GeoDataFrame | dask_geopandas.GeoDataFrame,
|
|
32
|
+
) -> dask.array.Array:
|
|
33
|
+
from dask.array import from_array
|
|
34
|
+
|
|
35
|
+
if isinstance(geometries, gpd.GeoDataFrame):
|
|
36
|
+
return from_array(geometries.geometry.to_numpy(), chunks=-1)
|
|
37
|
+
else:
|
|
38
|
+
divisions = geometries.divisions
|
|
39
|
+
if any(d is None for d in divisions):
|
|
40
|
+
print("computing current divisions, this may be expensive.")
|
|
41
|
+
divisions = geometries.compute_current_divisions()
|
|
42
|
+
chunks = np.diff(divisions).tolist()
|
|
43
|
+
chunks[-1] += 1
|
|
44
|
+
return geometries.to_dask_array(lengths=chunks).squeeze(axis=1)
|
|
45
|
+
|
|
46
|
+
|
|
47
|
+
def prepare_for_dask(
|
|
48
|
+
obj: xr.Dataset | xr.DataArray,
|
|
49
|
+
geometries,
|
|
50
|
+
*,
|
|
51
|
+
xdim: str,
|
|
52
|
+
ydim: str,
|
|
53
|
+
geoms_rechunk_size: int | None,
|
|
54
|
+
):
|
|
55
|
+
from dask.array import from_array
|
|
56
|
+
|
|
57
|
+
box = obj.odc.geobox
|
|
58
|
+
|
|
59
|
+
chunks = (
|
|
60
|
+
obj.chunksizes.get(ydim, obj.sizes[ydim]),
|
|
61
|
+
obj.chunksizes.get(xdim, obj.sizes[ydim]),
|
|
62
|
+
)
|
|
63
|
+
tiles = GeoboxTiles(box, tile_shape=chunks)
|
|
64
|
+
tiles_array = from_array(tiles_to_array(tiles), chunks=(1, 1))
|
|
65
|
+
geom_array = geometries_as_dask_array(geometries)
|
|
66
|
+
if geoms_rechunk_size is not None:
|
|
67
|
+
geom_array = geom_array.rechunk({0: geoms_rechunk_size})
|
|
68
|
+
return chunks, tiles_array, geom_array
|
|
@@ -0,0 +1,224 @@
|
|
|
1
|
+
# Code adapted from `rioxarray` under the terms of the Apache-2.0 license reproduced below
|
|
2
|
+
# Copyright (c) 2019-2023, Corteva Agriscience™
|
|
3
|
+
# All rights reserved.
|
|
4
|
+
#
|
|
5
|
+
# Licensed under the Apache License, Version 2.0 (the "License");
|
|
6
|
+
# you may not use this file except in compliance with the License.
|
|
7
|
+
# You may obtain a copy of the License at
|
|
8
|
+
#
|
|
9
|
+
# http://www.apache.org/licenses/LICENSE-2.0
|
|
10
|
+
#
|
|
11
|
+
# Unless required by applicable law or agreed to in writing, software
|
|
12
|
+
# distributed under the License is distributed on an "AS IS" BASIS,
|
|
13
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
|
14
|
+
# See the License for the specific language governing permissions and
|
|
15
|
+
# limitations under the License.
|
|
16
|
+
#
|
|
17
|
+
#
|
|
18
|
+
# Apache License
|
|
19
|
+
# Version 2.0, January 2004
|
|
20
|
+
# http://www.apache.org/licenses/
|
|
21
|
+
#
|
|
22
|
+
# TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION
|
|
23
|
+
#
|
|
24
|
+
# 1. Definitions.
|
|
25
|
+
#
|
|
26
|
+
# "License" shall mean the terms and conditions for use, reproduction,
|
|
27
|
+
# and distribution as defined by Sections 1 through 9 of this document.
|
|
28
|
+
#
|
|
29
|
+
# "Licensor" shall mean the copyright owner or entity authorized by
|
|
30
|
+
# the copyright owner that is granting the License.
|
|
31
|
+
#
|
|
32
|
+
# "Legal Entity" shall mean the union of the acting entity and all
|
|
33
|
+
# other entities that control, are controlled by, or are under common
|
|
34
|
+
# control with that entity. For the purposes of this definition,
|
|
35
|
+
# "control" means (i) the power, direct or indirect, to cause the
|
|
36
|
+
# direction or management of such entity, whether by contract or
|
|
37
|
+
# otherwise, or (ii) ownership of fifty percent (50%) or more of the
|
|
38
|
+
# outstanding shares, or (iii) beneficial ownership of such entity.
|
|
39
|
+
#
|
|
40
|
+
# "You" (or "Your") shall mean an individual or Legal Entity
|
|
41
|
+
# exercising permissions granted by this License.
|
|
42
|
+
#
|
|
43
|
+
# "Source" form shall mean the preferred form for making modifications,
|
|
44
|
+
# including but not limited to software source code, documentation
|
|
45
|
+
# source, and configuration files.
|
|
46
|
+
#
|
|
47
|
+
# "Object" form shall mean any form resulting from mechanical
|
|
48
|
+
# transformation or translation of a Source form, including but
|
|
49
|
+
# not limited to compiled object code, generated documentation,
|
|
50
|
+
# and conversions to other media types.
|
|
51
|
+
#
|
|
52
|
+
# "Work" shall mean the work of authorship, whether in Source or
|
|
53
|
+
# Object form, made available under the License, as indicated by a
|
|
54
|
+
# copyright notice that is included in or attached to the work
|
|
55
|
+
# (an example is provided in the Appendix below).
|
|
56
|
+
#
|
|
57
|
+
# "Derivative Works" shall mean any work, whether in Source or Object
|
|
58
|
+
# form, that is based on (or derived from) the Work and for which the
|
|
59
|
+
# editorial revisions, annotations, elaborations, or other modifications
|
|
60
|
+
# represent, as a whole, an original work of authorship. For the purposes
|
|
61
|
+
# of this License, Derivative Works shall not include works that remain
|
|
62
|
+
# separable from, or merely link (or bind by name) to the interfaces of,
|
|
63
|
+
# the Work and Derivative Works thereof.
|
|
64
|
+
#
|
|
65
|
+
# "Contribution" shall mean any work of authorship, including
|
|
66
|
+
# the original version of the Work and any modifications or additions
|
|
67
|
+
# to that Work or Derivative Works thereof, that is intentionally
|
|
68
|
+
# submitted to Licensor for inclusion in the Work by the copyright owner
|
|
69
|
+
# or by an individual or Legal Entity authorized to submit on behalf of
|
|
70
|
+
# the copyright owner. For the purposes of this definition, "submitted"
|
|
71
|
+
# means any form of electronic, verbal, or written communication sent
|
|
72
|
+
# to the Licensor or its representatives, including but not limited to
|
|
73
|
+
# communication on electronic mailing lists, source code control systems,
|
|
74
|
+
# and issue tracking systems that are managed by, or on behalf of, the
|
|
75
|
+
# Licensor for the purpose of discussing and improving the Work, but
|
|
76
|
+
# excluding communication that is conspicuously marked or otherwise
|
|
77
|
+
# designated in writing by the copyright owner as "Not a Contribution."
|
|
78
|
+
#
|
|
79
|
+
# "Contributor" shall mean Licensor and any individual or Legal Entity
|
|
80
|
+
# on behalf of whom a Contribution has been received by Licensor and
|
|
81
|
+
# subsequently incorporated within the Work.
|
|
82
|
+
#
|
|
83
|
+
# 2. Grant of Copyright License. Subject to the terms and conditions of
|
|
84
|
+
# this License, each Contributor hereby grants to You a perpetual,
|
|
85
|
+
# worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
86
|
+
# copyright license to reproduce, prepare Derivative Works of,
|
|
87
|
+
# publicly display, publicly perform, sublicense, and distribute the
|
|
88
|
+
# Work and such Derivative Works in Source or Object form.
|
|
89
|
+
#
|
|
90
|
+
# 3. Grant of Patent License. Subject to the terms and conditions of
|
|
91
|
+
# this License, each Contributor hereby grants to You a perpetual,
|
|
92
|
+
# worldwide, non-exclusive, no-charge, royalty-free, irrevocable
|
|
93
|
+
# (except as stated in this section) patent license to make, have made,
|
|
94
|
+
# use, offer to sell, sell, import, and otherwise transfer the Work,
|
|
95
|
+
# where such license applies only to those patent claims licensable
|
|
96
|
+
# by such Contributor that are necessarily infringed by their
|
|
97
|
+
# Contribution(s) alone or by combination of their Contribution(s)
|
|
98
|
+
# with the Work to which such Contribution(s) was submitted. If You
|
|
99
|
+
# institute patent litigation against any entity (including a
|
|
100
|
+
# cross-claim or counterclaim in a lawsuit) alleging that the Work
|
|
101
|
+
# or a Contribution incorporated within the Work constitutes direct
|
|
102
|
+
# or contributory patent infringement, then any patent licenses
|
|
103
|
+
# granted to You under this License for that Work shall terminate
|
|
104
|
+
# as of the date such litigation is filed.
|
|
105
|
+
#
|
|
106
|
+
# 4. Redistribution. You may reproduce and distribute copies of the
|
|
107
|
+
# Work or Derivative Works thereof in any medium, with or without
|
|
108
|
+
# modifications, and in Source or Object form, provided that You
|
|
109
|
+
# meet the following conditions:
|
|
110
|
+
#
|
|
111
|
+
# (a) You must give any other recipients of the Work or
|
|
112
|
+
# Derivative Works a copy of this License; and
|
|
113
|
+
#
|
|
114
|
+
# (b) You must cause any modified files to carry prominent notices
|
|
115
|
+
# stating that You changed the files; and
|
|
116
|
+
#
|
|
117
|
+
# (c) You must retain, in the Source form of any Derivative Works
|
|
118
|
+
# that You distribute, all copyright, patent, trademark, and
|
|
119
|
+
# attribution notices from the Source form of the Work,
|
|
120
|
+
# excluding those notices that do not pertain to any part of
|
|
121
|
+
# the Derivative Works; and
|
|
122
|
+
#
|
|
123
|
+
# (d) If the Work includes a "NOTICE" text file as part of its
|
|
124
|
+
# distribution, then any Derivative Works that You distribute must
|
|
125
|
+
# include a readable copy of the attribution notices contained
|
|
126
|
+
# within such NOTICE file, excluding those notices that do not
|
|
127
|
+
# pertain to any part of the Derivative Works, in at least one
|
|
128
|
+
# of the following places: within a NOTICE text file distributed
|
|
129
|
+
# as part of the Derivative Works; within the Source form or
|
|
130
|
+
# documentation, if provided along with the Derivative Works; or,
|
|
131
|
+
# within a display generated by the Derivative Works, if and
|
|
132
|
+
# wherever such third-party notices normally appear. The contents
|
|
133
|
+
# of the NOTICE file are for informational purposes only and
|
|
134
|
+
# do not modify the License. You may add Your own attribution
|
|
135
|
+
# notices within Derivative Works that You distribute, alongside
|
|
136
|
+
# or as an addendum to the NOTICE text from the Work, provided
|
|
137
|
+
# that such additional attribution notices cannot be construed
|
|
138
|
+
# as modifying the License.
|
|
139
|
+
#
|
|
140
|
+
# You may add Your own copyright statement to Your modifications and
|
|
141
|
+
# may provide additional or different license terms and conditions
|
|
142
|
+
# for use, reproduction, or distribution of Your modifications, or
|
|
143
|
+
# for any such Derivative Works as a whole, provided Your use,
|
|
144
|
+
# reproduction, and distribution of the Work otherwise complies with
|
|
145
|
+
# the conditions stated in this License.
|
|
146
|
+
#
|
|
147
|
+
# 5. Submission of Contributions. Unless You explicitly state otherwise,
|
|
148
|
+
# any Contribution intentionally submitted for inclusion in the Work
|
|
149
|
+
# by You to the Licensor shall be under the terms and conditions of
|
|
150
|
+
# this License, without any additional terms or conditions.
|
|
151
|
+
# Notwithstanding the above, nothing herein shall supersede or modify
|
|
152
|
+
# the terms of any separate license agreement you may have executed
|
|
153
|
+
# with Licensor regarding such Contributions.
|
|
154
|
+
#
|
|
155
|
+
# 6. Trademarks. This License does not grant permission to use the trade
|
|
156
|
+
# names, trademarks, service marks, or product names of the Licensor,
|
|
157
|
+
# except as required for reasonable and customary use in describing the
|
|
158
|
+
# origin of the Work and reproducing the content of the NOTICE file.
|
|
159
|
+
#
|
|
160
|
+
# 7. Disclaimer of Warranty. Unless required by applicable law or
|
|
161
|
+
# agreed to in writing, Licensor provides the Work (and each
|
|
162
|
+
# Contributor provides its Contributions) on an "AS IS" BASIS,
|
|
163
|
+
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
|
|
164
|
+
# implied, including, without limitation, any warranties or conditions
|
|
165
|
+
# of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A
|
|
166
|
+
# PARTICULAR PURPOSE. You are solely responsible for determining the
|
|
167
|
+
# appropriateness of using or redistributing the Work and assume any
|
|
168
|
+
# risks associated with Your exercise of permissions under this License.
|
|
169
|
+
#
|
|
170
|
+
# 8. Limitation of Liability. In no event and under no legal theory,
|
|
171
|
+
# whether in tort (including negligence), contract, or otherwise,
|
|
172
|
+
# unless required by applicable law (such as deliberate and grossly
|
|
173
|
+
# negligent acts) or agreed to in writing, shall any Contributor be
|
|
174
|
+
# liable to You for damages, including any direct, indirect, special,
|
|
175
|
+
# incidental, or consequential damages of any character arising as a
|
|
176
|
+
# result of this License or out of the use or inability to use the
|
|
177
|
+
# Work (including but not limited to damages for loss of goodwill,
|
|
178
|
+
# work stoppage, computer failure or malfunction, or any and all
|
|
179
|
+
# other commercial damages or losses), even if such Contributor
|
|
180
|
+
# has been advised of the possibility of such damages.
|
|
181
|
+
#
|
|
182
|
+
# 9. Accepting Warranty or Additional Liability. While redistributing
|
|
183
|
+
# the Work or Derivative Works thereof, You may choose to offer,
|
|
184
|
+
# and charge a fee for, acceptance of support, warranty, indemnity,
|
|
185
|
+
# or other liability obligations and/or rights consistent with this
|
|
186
|
+
# License. However, in accepting such obligations, You may act only
|
|
187
|
+
# on Your own behalf and on Your sole responsibility, not on behalf
|
|
188
|
+
# of any other Contributor, and only if You agree to indemnify,
|
|
189
|
+
# defend, and hold each Contributor harmless for any liability
|
|
190
|
+
# incurred by, or claims asserted against, such Contributor by reason
|
|
191
|
+
# of your accepting any such warranty or additional liability.
|
|
192
|
+
#
|
|
193
|
+
# END OF TERMS AND CONDITIONS
|
|
194
|
+
|
|
195
|
+
from xarray.core.types import T_Xarray
|
|
196
|
+
|
|
197
|
+
|
|
198
|
+
def guess_dims(obj: T_Xarray) -> tuple[str, str]:
|
|
199
|
+
# Determine the spatial dimensions of the `xarray.DataArray`
|
|
200
|
+
if "x" in obj.dims and "y" in obj.dims:
|
|
201
|
+
x_dim = "x"
|
|
202
|
+
y_dim = "y"
|
|
203
|
+
elif "longitude" in obj.dims and "latitude" in obj.dims:
|
|
204
|
+
x_dim = "longitude"
|
|
205
|
+
y_dim = "latitude"
|
|
206
|
+
else:
|
|
207
|
+
# look for coordinates with CF attributes
|
|
208
|
+
for coord in obj.coords:
|
|
209
|
+
# make sure to only look in 1D coordinates
|
|
210
|
+
# that has the same dimension name as the coordinate
|
|
211
|
+
if obj.coords[coord].dims != (coord,):
|
|
212
|
+
continue
|
|
213
|
+
if (obj.coords[coord].attrs.get("axis", "").upper() == "X") or (
|
|
214
|
+
obj.coords[coord].attrs.get("standard_name", "").lower()
|
|
215
|
+
in ("longitude", "projection_x_coordinate")
|
|
216
|
+
):
|
|
217
|
+
x_dim = coord
|
|
218
|
+
elif (obj.coords[coord].attrs.get("axis", "").upper() == "Y") or (
|
|
219
|
+
obj.coords[coord].attrs.get("standard_name", "").lower()
|
|
220
|
+
in ("latitude", "projection_y_coordinate")
|
|
221
|
+
):
|
|
222
|
+
y_dim = coord
|
|
223
|
+
|
|
224
|
+
return x_dim, y_dim
|
|
@@ -0,0 +1,81 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: rasterix
|
|
3
|
+
Version: 0.1a2
|
|
4
|
+
Summary: Raster extensions for Xarray
|
|
5
|
+
License-Expression: Apache-2.0
|
|
6
|
+
License-File: LICENSE
|
|
7
|
+
Keywords: xarray
|
|
8
|
+
Classifier: Development Status :: 4 - Beta
|
|
9
|
+
Classifier: Natural Language :: English
|
|
10
|
+
Classifier: Operating System :: OS Independent
|
|
11
|
+
Classifier: Programming Language :: Python
|
|
12
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
13
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
14
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.13
|
|
16
|
+
Requires-Python: >=3.10
|
|
17
|
+
Requires-Dist: affine
|
|
18
|
+
Requires-Dist: numpy>=2
|
|
19
|
+
Requires-Dist: pandas>=2
|
|
20
|
+
Requires-Dist: xarray>=2025
|
|
21
|
+
Provides-Extra: dask
|
|
22
|
+
Requires-Dist: dask-geopandas; extra == 'dask'
|
|
23
|
+
Provides-Extra: exactextract
|
|
24
|
+
Requires-Dist: exactextract; extra == 'exactextract'
|
|
25
|
+
Provides-Extra: rasterize
|
|
26
|
+
Requires-Dist: odc-geo; extra == 'rasterize'
|
|
27
|
+
Requires-Dist: rasterio; extra == 'rasterize'
|
|
28
|
+
Requires-Dist: rioxarray; extra == 'rasterize'
|
|
29
|
+
Provides-Extra: test
|
|
30
|
+
Requires-Dist: dask-geopandas; extra == 'test'
|
|
31
|
+
Requires-Dist: exactextract; extra == 'test'
|
|
32
|
+
Requires-Dist: geodatasets; extra == 'test'
|
|
33
|
+
Requires-Dist: netcdf4; extra == 'test'
|
|
34
|
+
Requires-Dist: odc-geo; extra == 'test'
|
|
35
|
+
Requires-Dist: rasterio; extra == 'test'
|
|
36
|
+
Requires-Dist: rioxarray; extra == 'test'
|
|
37
|
+
Description-Content-Type: text/markdown
|
|
38
|
+
|
|
39
|
+
# rasterix: Raster tricks for Xarray
|
|
40
|
+
|
|
41
|
+
<img src="rasterix.png" width="300">
|
|
42
|
+
|
|
43
|
+
This WIP project contains tools to make it easier to analyze raster data with Xarray.
|
|
44
|
+
|
|
45
|
+
The intent is to provide reusable building blocks for the many sub-ecosystems around: e.g. rioxarray, odc-geo, etc.
|
|
46
|
+
|
|
47
|
+
It currently has two pieces.
|
|
48
|
+
|
|
49
|
+
## 1. RasterIndex
|
|
50
|
+
|
|
51
|
+
See `src/ rasterix/raster_index.py` and `notebooks/raster_index.ipynb` for a brief demo.
|
|
52
|
+
|
|
53
|
+
## 2. Dask-aware rasterization wrappers
|
|
54
|
+
|
|
55
|
+
See `src/rasterix/rasterize.py` for dask-aware wrappers around [`exactextract`](https://github.com/dcherian/rasterix/blob/ec3f51e60e25aa312e6f48c4b22f91bec70413ed/rasterize.py#L165), [`rasterio.features.rasterize`](https://github.com/dcherian/rasterix/blob/ec3f51e60e25aa312e6f48c4b22f91bec70413ed/rasterize.py#L307), and [`rasterio.features.geometry_mask`](https://github.com/dcherian/rasterix/blob/ec3f51e60e25aa312e6f48c4b22f91bec70413ed/rasterize.py#L472).
|
|
56
|
+
|
|
57
|
+
This code is likely to move elsewhere!
|
|
58
|
+
|
|
59
|
+
## Trying it out
|
|
60
|
+
|
|
61
|
+
### PyPI
|
|
62
|
+
|
|
63
|
+
`rasterix` alpha releases are available on pypi
|
|
64
|
+
|
|
65
|
+
```
|
|
66
|
+
pip install rasterix
|
|
67
|
+
```
|
|
68
|
+
|
|
69
|
+
### Installing and testing from source
|
|
70
|
+
|
|
71
|
+
1. Clone the repo
|
|
72
|
+
```
|
|
73
|
+
git remote add upstream git@github.com:dcherian/rasterix.git
|
|
74
|
+
cd rasterix
|
|
75
|
+
```
|
|
76
|
+
1. [Install hatch](https://hatch.pypa.io/1.12/install/)
|
|
77
|
+
1. Run the tests
|
|
78
|
+
```
|
|
79
|
+
hatch env run --env test.py3.13 run-pytest # Run the tests without coverage reports
|
|
80
|
+
hatch env run --env test.py3.13 run-coverage-html # Run the tests with an html coverage report
|
|
81
|
+
```
|
|
@@ -0,0 +1,12 @@
|
|
|
1
|
+
rasterix/__init__.py,sha256=iSC54A4b-7zgxtKV0CUok0O2-ApGtzVA-6hWG3a5AAA,283
|
|
2
|
+
rasterix/_version.py,sha256=GXl_zMcYDzgVFuUwKX58RZpTAyqqGinrPO2magm0o-c,514
|
|
3
|
+
rasterix/raster_index.py,sha256=5tOG6Sf43Z1DvE_AASxOC4jzkxFCBoIMc97a9qqMqgI,16264
|
|
4
|
+
rasterix/rioxarray_compat.py,sha256=o32UBkWBpNhmC35ar0Ozn2QszpWdPvdDDXif3Mq2ZKg,12428
|
|
5
|
+
rasterix/rasterize/__init__.py,sha256=47DEQpj8HBSa-_TImW-5JCeuQeRkm5NMpJWZG3hSuFU,0
|
|
6
|
+
rasterix/rasterize/exact.py,sha256=Lbl9dfbXInhMqCP_pA91QATt9UywvQstk7sRI7pUqZQ,5623
|
|
7
|
+
rasterix/rasterize/rasterio.py,sha256=TTaS8fJz9mQJ-o_gGs3Cf8NUfTKMTQRbCg8WpSRkWig,11275
|
|
8
|
+
rasterix/rasterize/utils.py,sha256=lzTSraJDBEvfzn7S0jh4o73i6r9VTdQb1dA4hhNceUA,1961
|
|
9
|
+
rasterix-0.1a2.dist-info/METADATA,sha256=9GC9tTVl-4bMnzsctAauJcrXyzHr3GdtcrUkR92ivAc,2830
|
|
10
|
+
rasterix-0.1a2.dist-info/WHEEL,sha256=qtCwoSJWgHk21S1Kb4ihdzI2rlJ1ZKaIurTj_ngOhyQ,87
|
|
11
|
+
rasterix-0.1a2.dist-info/licenses/LICENSE,sha256=QFnsASMx8_yBNbrS7GVOhJ5CglGsLMj83Rn61uWyMs8,10265
|
|
12
|
+
rasterix-0.1a2.dist-info/RECORD,,
|
|
@@ -1,26 +0,0 @@
|
|
|
1
|
-
Metadata-Version: 2.4
|
|
2
|
-
Name: rasterix
|
|
3
|
-
Version: 0.1a1
|
|
4
|
-
Summary: Raster extensions for Xarray
|
|
5
|
-
License-Expression: Apache-2.0
|
|
6
|
-
Keywords: xarray
|
|
7
|
-
Classifier: Development Status :: 4 - Beta
|
|
8
|
-
Classifier: Natural Language :: English
|
|
9
|
-
Classifier: Operating System :: OS Independent
|
|
10
|
-
Classifier: Programming Language :: Python
|
|
11
|
-
Classifier: Programming Language :: Python :: 3.10
|
|
12
|
-
Classifier: Programming Language :: Python :: 3.11
|
|
13
|
-
Classifier: Programming Language :: Python :: 3.12
|
|
14
|
-
Classifier: Programming Language :: Python :: 3.13
|
|
15
|
-
Requires-Python: >=3.10
|
|
16
|
-
Description-Content-Type: text/markdown
|
|
17
|
-
License-File: LICENSE
|
|
18
|
-
Requires-Dist: affine
|
|
19
|
-
Requires-Dist: pandas>=2
|
|
20
|
-
Requires-Dist: numpy>=2
|
|
21
|
-
Requires-Dist: xarray>=2025
|
|
22
|
-
Dynamic: license-file
|
|
23
|
-
|
|
24
|
-
# rasterix: Raster tricks for Xarray
|
|
25
|
-
|
|
26
|
-
<img src="rasterix.png" width="400">
|
rasterix-0.1a1.dist-info/RECORD
DELETED
|
@@ -1,8 +0,0 @@
|
|
|
1
|
-
rasterix/__init__.py,sha256=3-ipA2RgFW3FZ_uOr0_CagEw8p8JTMWepIIg3tZCSjY,253
|
|
2
|
-
rasterix/_version.py,sha256=oixvE9DVoe4z2vLWfAweh5smrXr9lPfc9uA5ZFJvxvo,21
|
|
3
|
-
rasterix/raster_index.py,sha256=Lq-X2F5Zrz5dR2odKvUuG-zYk0nwZ6bBnT-pkRVxJVQ,14976
|
|
4
|
-
rasterix-0.1a1.dist-info/licenses/LICENSE,sha256=QFnsASMx8_yBNbrS7GVOhJ5CglGsLMj83Rn61uWyMs8,10265
|
|
5
|
-
rasterix-0.1a1.dist-info/METADATA,sha256=0HsuR9lB_sx5wDg7nMWcGcc-ryDajKLGsnAQKQrl8FU,798
|
|
6
|
-
rasterix-0.1a1.dist-info/WHEEL,sha256=CmyFI0kx5cdEMTLiONQRbGQwjIoR1aIYB7eCAQ4KPJ0,91
|
|
7
|
-
rasterix-0.1a1.dist-info/top_level.txt,sha256=xrxwvEojFP_KT72cB7W-Bax_gPoV4e7Jw_4c41uiaj0,9
|
|
8
|
-
rasterix-0.1a1.dist-info/RECORD,,
|
|
@@ -1 +0,0 @@
|
|
|
1
|
-
rasterix
|
|
File without changes
|