yirgacheffe 1.9.2__tar.gz → 1.9.3__tar.gz
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Potentially problematic release.
This version of yirgacheffe might be problematic. Click here for more details.
- yirgacheffe-1.9.3/PKG-INFO +118 -0
- yirgacheffe-1.9.3/README.md +71 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/pyproject.toml +1 -1
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/tests/test_datatypes.py +1 -1
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/tests/test_intersection.py +5 -5
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/tests/test_openers.py +2 -2
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/tests/test_operators.py +13 -13
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/tests/test_parallel_operators.py +3 -3
- yirgacheffe-1.9.3/tests/test_reduce.py +23 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/tests/test_rescaling.py +2 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/tests/test_union.py +7 -7
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/yirgacheffe/__init__.py +4 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/yirgacheffe/_operators.py +1 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/yirgacheffe/rounding.py +2 -1
- yirgacheffe-1.9.3/yirgacheffe.egg-info/PKG-INFO +118 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/yirgacheffe.egg-info/SOURCES.txt +1 -0
- yirgacheffe-1.9.2/PKG-INFO +0 -600
- yirgacheffe-1.9.2/README.md +0 -553
- yirgacheffe-1.9.2/yirgacheffe.egg-info/PKG-INFO +0 -600
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/LICENSE +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/MANIFEST.in +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/setup.cfg +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/tests/test_area.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/tests/test_auto_windowing.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/tests/test_constants.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/tests/test_group.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/tests/test_h3layer.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/tests/test_multiband.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/tests/test_nodata.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/tests/test_optimisation.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/tests/test_pickle.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/tests/test_pixel_coord.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/tests/test_projection.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/tests/test_raster.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/tests/test_rounding.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/tests/test_save_with_window.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/tests/test_sum_with_window.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/tests/test_uniform_area_layer.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/tests/test_vectors.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/tests/test_window.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/yirgacheffe/_backends/__init__.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/yirgacheffe/_backends/enumeration.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/yirgacheffe/_backends/mlx.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/yirgacheffe/_backends/numpy.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/yirgacheffe/_core.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/yirgacheffe/constants.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/yirgacheffe/layers/__init__.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/yirgacheffe/layers/area.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/yirgacheffe/layers/base.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/yirgacheffe/layers/constant.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/yirgacheffe/layers/group.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/yirgacheffe/layers/h3layer.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/yirgacheffe/layers/rasters.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/yirgacheffe/layers/rescaled.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/yirgacheffe/layers/vectors.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/yirgacheffe/operators.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/yirgacheffe/py.typed +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/yirgacheffe/window.py +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/yirgacheffe.egg-info/dependency_links.txt +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/yirgacheffe.egg-info/entry_points.txt +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/yirgacheffe.egg-info/requires.txt +0 -0
- {yirgacheffe-1.9.2 → yirgacheffe-1.9.3}/yirgacheffe.egg-info/top_level.txt +0 -0
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: yirgacheffe
|
|
3
|
+
Version: 1.9.3
|
|
4
|
+
Summary: Abstraction of gdal datasets for doing basic math operations
|
|
5
|
+
Author-email: Michael Dales <mwd24@cam.ac.uk>
|
|
6
|
+
License-Expression: ISC
|
|
7
|
+
Project-URL: Homepage, https://yirgacheffe.org/
|
|
8
|
+
Project-URL: Repository, https://github.com/quantifyearth/yirgacheffe.git
|
|
9
|
+
Project-URL: Issues, https://github.com/quantifyearth/yirgacheffe/issues
|
|
10
|
+
Project-URL: Changelog, https://yirgacheffe.org/latest/changelog/
|
|
11
|
+
Keywords: gdal,gis,geospatial,declarative
|
|
12
|
+
Classifier: Development Status :: 5 - Production/Stable
|
|
13
|
+
Classifier: Intended Audience :: Science/Research
|
|
14
|
+
Classifier: Programming Language :: Python :: 3
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
18
|
+
Classifier: Topic :: Scientific/Engineering :: GIS
|
|
19
|
+
Requires-Python: >=3.10
|
|
20
|
+
Description-Content-Type: text/markdown
|
|
21
|
+
License-File: LICENSE
|
|
22
|
+
Requires-Dist: numpy<3.0,>=1.24
|
|
23
|
+
Requires-Dist: gdal[numpy]<4.0,>=3.8
|
|
24
|
+
Requires-Dist: scikit-image<1.0,>=0.20
|
|
25
|
+
Requires-Dist: torch
|
|
26
|
+
Requires-Dist: dill
|
|
27
|
+
Requires-Dist: deprecation
|
|
28
|
+
Requires-Dist: tomli
|
|
29
|
+
Requires-Dist: h3
|
|
30
|
+
Requires-Dist: pyproj
|
|
31
|
+
Provides-Extra: mlx
|
|
32
|
+
Requires-Dist: mlx; extra == "mlx"
|
|
33
|
+
Provides-Extra: matplotlib
|
|
34
|
+
Requires-Dist: matplotlib; extra == "matplotlib"
|
|
35
|
+
Provides-Extra: dev
|
|
36
|
+
Requires-Dist: mypy; extra == "dev"
|
|
37
|
+
Requires-Dist: pylint; extra == "dev"
|
|
38
|
+
Requires-Dist: pytest; extra == "dev"
|
|
39
|
+
Requires-Dist: pytest-cov; extra == "dev"
|
|
40
|
+
Requires-Dist: build; extra == "dev"
|
|
41
|
+
Requires-Dist: twine; extra == "dev"
|
|
42
|
+
Requires-Dist: mkdocs-material; extra == "dev"
|
|
43
|
+
Requires-Dist: mkdocstrings-python; extra == "dev"
|
|
44
|
+
Requires-Dist: mike; extra == "dev"
|
|
45
|
+
Requires-Dist: mkdocs-gen-files; extra == "dev"
|
|
46
|
+
Dynamic: license-file
|
|
47
|
+
|
|
48
|
+
# Yirgacheffe: a declarative geospatial library for Python to make data-science with maps easier
|
|
49
|
+
|
|
50
|
+
[](https://github.com/quantifyearth/yirgacheffe/actions)
|
|
51
|
+
[](https://yirgacheffe.org)
|
|
52
|
+
[](https://pypi.org/project/yirgacheffe/)
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
## Overview
|
|
56
|
+
|
|
57
|
+
Yirgacheffe is a declarative geospatial library, allowing you to operate on both raster and polygon geospatial datasets without having to do all the tedious book keeping around layer alignment or dealing with hardware concerns around memory or parallelism. you can load into memory safely.
|
|
58
|
+
|
|
59
|
+
Example common use-cases:
|
|
60
|
+
|
|
61
|
+
* Do the datasets overlap? Yirgacheffe will let you define either the intersection or the union of a set of different datasets, scaling up or down the area as required.
|
|
62
|
+
* Rasterisation of vector layers: if you have a vector dataset then you can add that to your computation and yirgaceffe will rasterize it on demand, so you never need to store more data in memory than necessary.
|
|
63
|
+
* Do the raster layers get big and take up large amounts of memory? Yirgacheffe will let you do simple numerical operations with layers directly and then worry about the memory management behind the scenes for you.
|
|
64
|
+
* Parallelisation of operations over many CPU cores.
|
|
65
|
+
* Built in support for optionally using GPUs via [MLX](https://ml-explore.github.io/mlx/build/html/index.html) support.
|
|
66
|
+
|
|
67
|
+
## Installation
|
|
68
|
+
|
|
69
|
+
Yirgacheffe is available via pypi, so can be installed with pip for example:
|
|
70
|
+
|
|
71
|
+
```SystemShell
|
|
72
|
+
$ pip install yirgacheffe
|
|
73
|
+
```
|
|
74
|
+
|
|
75
|
+
## Documentation
|
|
76
|
+
|
|
77
|
+
The documentation can be found on [yirgacheffe.org](https://yirgacheffe.org/)
|
|
78
|
+
|
|
79
|
+
## Simple examples:
|
|
80
|
+
|
|
81
|
+
Here is how to do cloud removal from [Sentinel-2 data](https://browser.dataspace.copernicus.eu/?zoom=14&lat=6.15468&lng=38.20581&themeId=DEFAULT-THEME&visualizationUrl=U2FsdGVkX1944lrmeTJcaSsnoxNMp4oucN1AjklGUANHd2cRZWyXnepHvzpaOWzMhH8SrWQo%2BqrOvOnu6f9FeCMrS%2FDZmvjzID%2FoE1tbOCEHK8ohPXjFqYojeR9%2B82ri&datasetId=S2_L2A_CDAS&fromTime=2025-09-09T00%3A00%3A00.000Z&toTime=2025-09-09T23%3A59%3A59.999Z&layerId=1_TRUE_COLOR&demSource3D=%22MAPZEN%22&cloudCoverage=30&dateMode=SINGLE), using the [Scene Classification Layer](https://custom-scripts.sentinel-hub.com/custom-scripts/sentinel-2/scene-classification/) data:
|
|
82
|
+
|
|
83
|
+
```python
|
|
84
|
+
import yirgaceffe as yg
|
|
85
|
+
|
|
86
|
+
with (
|
|
87
|
+
yg.read_raster("T37NCG_20250909T073609_B06_20m.jp2") as vre2,
|
|
88
|
+
yg.read_raster("T37NCG_20250909T073609_SCL_20m.jp2") as scl,
|
|
89
|
+
):
|
|
90
|
+
is_cloud = (scl == 8) | (scl == 9) | (scl == 10) # various cloud types
|
|
91
|
+
is_shadow = (scl == 3)
|
|
92
|
+
is_bad = is_cloud | is_shadow
|
|
93
|
+
|
|
94
|
+
masked_vre2 = yg.where(is_bad, float("nan"), vre2)
|
|
95
|
+
masked_vre2.to_geotiff("vre2_cleaned.tif")
|
|
96
|
+
```
|
|
97
|
+
|
|
98
|
+
or a species' [Area of Habitat](https://www.sciencedirect.com/science/article/pii/S0169534719301892) calculation:
|
|
99
|
+
|
|
100
|
+
```python
|
|
101
|
+
import yirgaceffe as yg
|
|
102
|
+
|
|
103
|
+
with (
|
|
104
|
+
yg.read_raster("habitats.tif") as habitat_map,
|
|
105
|
+
yg.read_raster('elevation.tif') as elevation_map,
|
|
106
|
+
yg.read_shape('species123.geojson') as range_map,
|
|
107
|
+
):
|
|
108
|
+
refined_habitat = habitat_map.isin([...species habitat codes...])
|
|
109
|
+
refined_elevation = (elevation_map >= species_min) && (elevation_map <= species_max)
|
|
110
|
+
aoh = refined_habitat * refined_elevation * range_polygon * area_per_pixel_map
|
|
111
|
+
print(f'Area of habitat: {aoh.sum()}')
|
|
112
|
+
```
|
|
113
|
+
|
|
114
|
+
## Thanks
|
|
115
|
+
|
|
116
|
+
Thanks to discussion and feedback from my colleagues, particularly Alison Eyres, Patrick Ferris, Amelia Holcomb, and Anil Madhavapeddy.
|
|
117
|
+
|
|
118
|
+
Inspired by the work of Daniele Baisero in his AoH library.
|
|
@@ -0,0 +1,71 @@
|
|
|
1
|
+
# Yirgacheffe: a declarative geospatial library for Python to make data-science with maps easier
|
|
2
|
+
|
|
3
|
+
[](https://github.com/quantifyearth/yirgacheffe/actions)
|
|
4
|
+
[](https://yirgacheffe.org)
|
|
5
|
+
[](https://pypi.org/project/yirgacheffe/)
|
|
6
|
+
|
|
7
|
+
|
|
8
|
+
## Overview
|
|
9
|
+
|
|
10
|
+
Yirgacheffe is a declarative geospatial library, allowing you to operate on both raster and polygon geospatial datasets without having to do all the tedious book keeping around layer alignment or dealing with hardware concerns around memory or parallelism. you can load into memory safely.
|
|
11
|
+
|
|
12
|
+
Example common use-cases:
|
|
13
|
+
|
|
14
|
+
* Do the datasets overlap? Yirgacheffe will let you define either the intersection or the union of a set of different datasets, scaling up or down the area as required.
|
|
15
|
+
* Rasterisation of vector layers: if you have a vector dataset then you can add that to your computation and yirgaceffe will rasterize it on demand, so you never need to store more data in memory than necessary.
|
|
16
|
+
* Do the raster layers get big and take up large amounts of memory? Yirgacheffe will let you do simple numerical operations with layers directly and then worry about the memory management behind the scenes for you.
|
|
17
|
+
* Parallelisation of operations over many CPU cores.
|
|
18
|
+
* Built in support for optionally using GPUs via [MLX](https://ml-explore.github.io/mlx/build/html/index.html) support.
|
|
19
|
+
|
|
20
|
+
## Installation
|
|
21
|
+
|
|
22
|
+
Yirgacheffe is available via pypi, so can be installed with pip for example:
|
|
23
|
+
|
|
24
|
+
```SystemShell
|
|
25
|
+
$ pip install yirgacheffe
|
|
26
|
+
```
|
|
27
|
+
|
|
28
|
+
## Documentation
|
|
29
|
+
|
|
30
|
+
The documentation can be found on [yirgacheffe.org](https://yirgacheffe.org/)
|
|
31
|
+
|
|
32
|
+
## Simple examples:
|
|
33
|
+
|
|
34
|
+
Here is how to do cloud removal from [Sentinel-2 data](https://browser.dataspace.copernicus.eu/?zoom=14&lat=6.15468&lng=38.20581&themeId=DEFAULT-THEME&visualizationUrl=U2FsdGVkX1944lrmeTJcaSsnoxNMp4oucN1AjklGUANHd2cRZWyXnepHvzpaOWzMhH8SrWQo%2BqrOvOnu6f9FeCMrS%2FDZmvjzID%2FoE1tbOCEHK8ohPXjFqYojeR9%2B82ri&datasetId=S2_L2A_CDAS&fromTime=2025-09-09T00%3A00%3A00.000Z&toTime=2025-09-09T23%3A59%3A59.999Z&layerId=1_TRUE_COLOR&demSource3D=%22MAPZEN%22&cloudCoverage=30&dateMode=SINGLE), using the [Scene Classification Layer](https://custom-scripts.sentinel-hub.com/custom-scripts/sentinel-2/scene-classification/) data:
|
|
35
|
+
|
|
36
|
+
```python
|
|
37
|
+
import yirgaceffe as yg
|
|
38
|
+
|
|
39
|
+
with (
|
|
40
|
+
yg.read_raster("T37NCG_20250909T073609_B06_20m.jp2") as vre2,
|
|
41
|
+
yg.read_raster("T37NCG_20250909T073609_SCL_20m.jp2") as scl,
|
|
42
|
+
):
|
|
43
|
+
is_cloud = (scl == 8) | (scl == 9) | (scl == 10) # various cloud types
|
|
44
|
+
is_shadow = (scl == 3)
|
|
45
|
+
is_bad = is_cloud | is_shadow
|
|
46
|
+
|
|
47
|
+
masked_vre2 = yg.where(is_bad, float("nan"), vre2)
|
|
48
|
+
masked_vre2.to_geotiff("vre2_cleaned.tif")
|
|
49
|
+
```
|
|
50
|
+
|
|
51
|
+
or a species' [Area of Habitat](https://www.sciencedirect.com/science/article/pii/S0169534719301892) calculation:
|
|
52
|
+
|
|
53
|
+
```python
|
|
54
|
+
import yirgaceffe as yg
|
|
55
|
+
|
|
56
|
+
with (
|
|
57
|
+
yg.read_raster("habitats.tif") as habitat_map,
|
|
58
|
+
yg.read_raster('elevation.tif') as elevation_map,
|
|
59
|
+
yg.read_shape('species123.geojson') as range_map,
|
|
60
|
+
):
|
|
61
|
+
refined_habitat = habitat_map.isin([...species habitat codes...])
|
|
62
|
+
refined_elevation = (elevation_map >= species_min) && (elevation_map <= species_max)
|
|
63
|
+
aoh = refined_habitat * refined_elevation * range_polygon * area_per_pixel_map
|
|
64
|
+
print(f'Area of habitat: {aoh.sum()}')
|
|
65
|
+
```
|
|
66
|
+
|
|
67
|
+
## Thanks
|
|
68
|
+
|
|
69
|
+
Thanks to discussion and feedback from my colleagues, particularly Alison Eyres, Patrick Ferris, Amelia Holcomb, and Anil Madhavapeddy.
|
|
70
|
+
|
|
71
|
+
Inspired by the work of Daniele Baisero in his AoH library.
|
|
@@ -6,7 +6,7 @@ build-backend = "setuptools.build_meta"
|
|
|
6
6
|
|
|
7
7
|
[project]
|
|
8
8
|
name = "yirgacheffe"
|
|
9
|
-
version = "1.9.
|
|
9
|
+
version = "1.9.3"
|
|
10
10
|
description = "Abstraction of gdal datasets for doing basic math operations"
|
|
11
11
|
readme = "README.md"
|
|
12
12
|
authors = [{ name = "Michael Dales", email = "mwd24@cam.ac.uk" }]
|
|
@@ -62,5 +62,5 @@ def test_float_to_int() -> None:
|
|
|
62
62
|
comp.save(result)
|
|
63
63
|
|
|
64
64
|
expected = backend.promote(np.array([[1, 2, 3, 4], [5, 6, 7, 8]]))
|
|
65
|
-
actual =
|
|
65
|
+
actual = result.read_array(0, 0, 4, 2)
|
|
66
66
|
assert (expected == actual).all()
|
|
@@ -67,7 +67,7 @@ def test_find_intersection_with_vector_unbound() -> None:
|
|
|
67
67
|
path = Path(tempdir) / "test.gpkg"
|
|
68
68
|
area = Area(left=58, top=74, right=180, bottom=42)
|
|
69
69
|
make_vectors_with_id(42, {area}, path)
|
|
70
|
-
assert path.exists
|
|
70
|
+
assert path.exists()
|
|
71
71
|
|
|
72
72
|
raster = RasterLayer(gdal_dataset_of_region(Area(left=-180.05, top=90.09, right=180.05, bottom=-90.09), 0.13))
|
|
73
73
|
vector = VectorLayer.layer_from_file(path, None, None, None)
|
|
@@ -86,10 +86,10 @@ def test_find_intersection_with_vector_bound() -> None:
|
|
|
86
86
|
path = Path(tempdir) / "test.gpkg"
|
|
87
87
|
area = Area(left=58, top=74, right=180, bottom=42)
|
|
88
88
|
make_vectors_with_id(42, {area}, path)
|
|
89
|
-
assert path.exists
|
|
89
|
+
assert path.exists()
|
|
90
90
|
|
|
91
91
|
raster = RasterLayer(gdal_dataset_of_region(Area(left=-180.05, top=90.09, right=180.05, bottom=-90.09), 0.13))
|
|
92
|
-
vector = VectorLayer.
|
|
92
|
+
vector = VectorLayer.layer_from_file_like(path, raster)
|
|
93
93
|
assert vector.area != area
|
|
94
94
|
|
|
95
95
|
layers = [raster, vector]
|
|
@@ -104,10 +104,10 @@ def test_find_intersection_with_vector_awkward_rounding() -> None:
|
|
|
104
104
|
path = Path(tempdir) / "test.gpkg"
|
|
105
105
|
area = Area(left=-90, top=45, right=90, bottom=-45)
|
|
106
106
|
make_vectors_with_id(42, {area}, path)
|
|
107
|
-
assert path.exists
|
|
107
|
+
assert path.exists()
|
|
108
108
|
|
|
109
109
|
raster = RasterLayer(gdal_dataset_of_region(Area(left=-180, top=90, right=180, bottom=-90), 18.0))
|
|
110
|
-
vector = VectorLayer.
|
|
110
|
+
vector = VectorLayer.layer_from_file_like(path, raster)
|
|
111
111
|
|
|
112
112
|
rounded_area = Area(left=-90, top=54, right=90, bottom=-54)
|
|
113
113
|
assert vector.area == rounded_area
|
|
@@ -38,7 +38,7 @@ def test_open_raster_file_as_path() -> None:
|
|
|
38
38
|
area = Area(-10, 10, 10, -10)
|
|
39
39
|
dataset = gdal_dataset_of_region(area, 0.02, filename=path)
|
|
40
40
|
dataset.Close()
|
|
41
|
-
assert path.exists
|
|
41
|
+
assert path.exists()
|
|
42
42
|
|
|
43
43
|
with yg.read_raster(path) as layer:
|
|
44
44
|
assert layer.area == area
|
|
@@ -156,7 +156,7 @@ def test_open_shape_like() -> None:
|
|
|
156
156
|
assert os.path.exists(path)
|
|
157
157
|
|
|
158
158
|
with yg.read_raster(path) as raster_layer:
|
|
159
|
-
path =
|
|
159
|
+
path = Path(tempdir) / "test.gpkg"
|
|
160
160
|
area = Area(-10.0, 10.0, 10.0, 0.0)
|
|
161
161
|
make_vectors_with_id(42, {area}, path)
|
|
162
162
|
|
|
@@ -78,18 +78,18 @@ def test_add_byte_layers_with_callback(skip, expected_steps) -> None:
|
|
|
78
78
|
assert layer1.datatype == DataType.Byte
|
|
79
79
|
assert layer2.datatype == DataType.Byte
|
|
80
80
|
|
|
81
|
-
|
|
81
|
+
callback_positions: list[float] = []
|
|
82
82
|
|
|
83
83
|
comp = layer1 + layer2
|
|
84
84
|
comp.ystep = skip
|
|
85
|
-
comp.save(result, callback=
|
|
85
|
+
comp.save(result, callback=callback_positions.append)
|
|
86
86
|
|
|
87
87
|
expected = data1 + data2
|
|
88
88
|
actual = result.read_array(0, 0, 4, 2)
|
|
89
89
|
|
|
90
90
|
assert (expected == actual).all()
|
|
91
91
|
|
|
92
|
-
assert
|
|
92
|
+
assert callback_positions == expected_steps
|
|
93
93
|
|
|
94
94
|
def test_sub_byte_layers() -> None:
|
|
95
95
|
data1 = np.array([[10, 20, 30, 40], [50, 60, 70, 80]])
|
|
@@ -150,7 +150,7 @@ def test_mult_float_layers() -> None:
|
|
|
150
150
|
comp = layer1 * layer2
|
|
151
151
|
comp.save(result)
|
|
152
152
|
|
|
153
|
-
expected = backend.promote(data1) * backend.promote(data2)
|
|
153
|
+
expected = backend.demote_array(backend.promote(data1) * backend.promote(data2))
|
|
154
154
|
backend.eval_op(expected)
|
|
155
155
|
|
|
156
156
|
actual = backend.demote_array(result.read_array(0, 0, 4, 2))
|
|
@@ -168,7 +168,7 @@ def test_div_float_layers() -> None:
|
|
|
168
168
|
comp = layer1 / layer2
|
|
169
169
|
comp.save(result)
|
|
170
170
|
|
|
171
|
-
expected = backend.promote(data1) / backend.promote(data2)
|
|
171
|
+
expected = backend.demote_array(backend.promote(data1) / backend.promote(data2))
|
|
172
172
|
backend.eval_op(expected)
|
|
173
173
|
|
|
174
174
|
actual = backend.demote_array(result.read_array(0, 0, 4, 2))
|
|
@@ -186,7 +186,7 @@ def test_floor_div_float_layers() -> None:
|
|
|
186
186
|
comp = layer1 // layer2
|
|
187
187
|
comp.save(result)
|
|
188
188
|
|
|
189
|
-
expected = backend.promote(data1) // backend.promote(data2)
|
|
189
|
+
expected = backend.demote_array(backend.promote(data1) // backend.promote(data2))
|
|
190
190
|
backend.eval_op(expected)
|
|
191
191
|
|
|
192
192
|
actual = backend.demote_array(result.read_array(0, 0, 4, 2))
|
|
@@ -204,7 +204,7 @@ def test_remainder_float_layers() -> None:
|
|
|
204
204
|
comp = layer1 % layer2
|
|
205
205
|
comp.save(result)
|
|
206
206
|
|
|
207
|
-
expected = backend.promote(data1) % backend.promote(data2)
|
|
207
|
+
expected = backend.demote_array(backend.promote(data1) % backend.promote(data2))
|
|
208
208
|
backend.eval_op(expected)
|
|
209
209
|
|
|
210
210
|
actual = backend.demote_array(result.read_array(0, 0, 4, 2))
|
|
@@ -264,7 +264,7 @@ def test_div_float_layer_by_const() -> None:
|
|
|
264
264
|
comp = layer1 / 2.5
|
|
265
265
|
comp.save(result)
|
|
266
266
|
|
|
267
|
-
expected = backend.promote(data1) / 2.5
|
|
267
|
+
expected = backend.demote_array(backend.promote(data1) / 2.5)
|
|
268
268
|
backend.eval_op(expected)
|
|
269
269
|
|
|
270
270
|
actual = backend.demote_array(result.read_array(0, 0, 4, 2))
|
|
@@ -279,7 +279,7 @@ def test_floordiv_float_layer_by_const() -> None:
|
|
|
279
279
|
comp = layer1 // 2.5
|
|
280
280
|
comp.save(result)
|
|
281
281
|
|
|
282
|
-
expected = backend.promote(data1) // 2.5
|
|
282
|
+
expected = backend.demote_array(backend.promote(data1) // 2.5)
|
|
283
283
|
backend.eval_op(expected)
|
|
284
284
|
|
|
285
285
|
actual = backend.demote_array(result.read_array(0, 0, 4, 2))
|
|
@@ -294,7 +294,7 @@ def test_remainder_float_layer_by_const() -> None:
|
|
|
294
294
|
comp = layer1 % 2.5
|
|
295
295
|
comp.save(result)
|
|
296
296
|
|
|
297
|
-
expected = backend.promote(data1) % 2.5
|
|
297
|
+
expected = backend.demote_array(backend.promote(data1) % 2.5)
|
|
298
298
|
backend.eval_op(expected)
|
|
299
299
|
|
|
300
300
|
actual = backend.demote_array(result.read_array(0, 0, 4, 2))
|
|
@@ -1546,7 +1546,7 @@ def test_raster_and_vector() -> None:
|
|
|
1546
1546
|
path = Path(tempdir) / "test.gpkg"
|
|
1547
1547
|
area = Area(-5.0, 5.0, 5.0, -5.0)
|
|
1548
1548
|
make_vectors_with_id(42, {area}, path)
|
|
1549
|
-
assert path.exists
|
|
1549
|
+
assert path.exists()
|
|
1550
1550
|
|
|
1551
1551
|
vector = VectorLayer.layer_from_file(path, None, PixelScale(1.0, -1.0), yirgacheffe.WGS_84_PROJECTION)
|
|
1552
1552
|
|
|
@@ -1562,7 +1562,7 @@ def test_raster_and_vector_mixed_projection() -> None:
|
|
|
1562
1562
|
path = Path(tempdir) / "test.gpkg"
|
|
1563
1563
|
area = Area(-5.0, 5.0, 5.0, -5.0)
|
|
1564
1564
|
make_vectors_with_id(42, {area}, path)
|
|
1565
|
-
assert path.exists
|
|
1565
|
+
assert path.exists()
|
|
1566
1566
|
|
|
1567
1567
|
vector = VectorLayer.layer_from_file(path, None, PixelScale(1.0, -1.0), yirgacheffe.WGS_84_PROJECTION)
|
|
1568
1568
|
|
|
@@ -1577,7 +1577,7 @@ def test_raster_and_vector_no_scale_on_vector() -> None:
|
|
|
1577
1577
|
path = Path(tempdir) / "test.gpkg"
|
|
1578
1578
|
area = Area(-5.0, 5.0, 5.0, -5.0)
|
|
1579
1579
|
make_vectors_with_id(42, {area}, path)
|
|
1580
|
-
assert path.exists
|
|
1580
|
+
assert path.exists()
|
|
1581
1581
|
|
|
1582
1582
|
vector = VectorLayer.layer_from_file(path, None, None, None)
|
|
1583
1583
|
|
|
@@ -182,18 +182,18 @@ def test_parallel_with_different_skip(monkeypatch, skip, expected_steps) -> None
|
|
|
182
182
|
layer2 = RasterLayer.layer_from_file(path2)
|
|
183
183
|
result = RasterLayer.empty_raster_layer_like(layer1)
|
|
184
184
|
|
|
185
|
-
|
|
185
|
+
callback_positions: list[float] = []
|
|
186
186
|
|
|
187
187
|
comp = layer1 + layer2
|
|
188
188
|
comp.ystep = skip
|
|
189
|
-
comp.parallel_save(result, callback=
|
|
189
|
+
comp.parallel_save(result, callback=callback_positions.append)
|
|
190
190
|
|
|
191
191
|
expected = data1 + data2
|
|
192
192
|
actual = result.read_array(0, 0, 4, 4)
|
|
193
193
|
|
|
194
194
|
assert (expected == actual).all()
|
|
195
195
|
|
|
196
|
-
assert
|
|
196
|
+
assert callback_positions == expected_steps
|
|
197
197
|
|
|
198
198
|
@pytest.mark.skipif(yirgacheffe._backends.BACKEND != "NUMPY", reason="Only applies for numpy")
|
|
199
199
|
def test_parallel_equality(monkeypatch) -> None:
|
|
@@ -0,0 +1,23 @@
|
|
|
1
|
+
from functools import reduce
|
|
2
|
+
import operator
|
|
3
|
+
|
|
4
|
+
import numpy as np
|
|
5
|
+
|
|
6
|
+
from yirgacheffe.layers import RasterLayer
|
|
7
|
+
from tests.helpers import gdal_dataset_with_data
|
|
8
|
+
|
|
9
|
+
def test_add_similar_layers() -> None:
|
|
10
|
+
data = [
|
|
11
|
+
np.array([[1, 2, 3, 4], [5, 6, 7, 8]]),
|
|
12
|
+
np.array([[10, 20, 30, 40], [50, 60, 70, 80]]),
|
|
13
|
+
np.array([[100, 200, 300, 400], [500, 600, 700, 800]]),
|
|
14
|
+
]
|
|
15
|
+
|
|
16
|
+
layers = [RasterLayer(gdal_dataset_with_data((0,0), 1.0, x)) for x in data]
|
|
17
|
+
|
|
18
|
+
summed_layers = reduce(operator.add, layers)
|
|
19
|
+
actual = summed_layers.read_array(0, 0, 4, 2)
|
|
20
|
+
|
|
21
|
+
expected = reduce(operator.add, data)
|
|
22
|
+
|
|
23
|
+
assert (expected == actual).all()
|
|
@@ -170,6 +170,7 @@ def test_rescaled_up_in_operation() -> None:
|
|
|
170
170
|
data1[4:8,0:4] = 1
|
|
171
171
|
dataset1 = gdal_dataset_with_data((0, 0), 1.0, data1)
|
|
172
172
|
raster1 = RasterLayer(dataset1)
|
|
173
|
+
assert raster1.map_projection
|
|
173
174
|
|
|
174
175
|
data2 = np.zeros((4, 4))
|
|
175
176
|
data2[0:2,0:2] = 1
|
|
@@ -198,6 +199,7 @@ def test_rescaled_down_in_operation() -> None:
|
|
|
198
199
|
data2[2:4,2:4] = 1
|
|
199
200
|
dataset2 = gdal_dataset_with_data((0, 0), 2.0, data2)
|
|
200
201
|
raster2 = RasterLayer(dataset2)
|
|
202
|
+
assert raster2.map_projection
|
|
201
203
|
|
|
202
204
|
rescaled = RescaledRasterLayer(raster1, raster2.map_projection)
|
|
203
205
|
|
|
@@ -73,7 +73,7 @@ def test_find_union_with_vector_unbound() -> None:
|
|
|
73
73
|
path = Path(tempdir) / "test.gpkg"
|
|
74
74
|
area = Area(left=58, top=74, right=180, bottom=42)
|
|
75
75
|
make_vectors_with_id(42, {area}, path)
|
|
76
|
-
assert path.exists
|
|
76
|
+
assert path.exists()
|
|
77
77
|
|
|
78
78
|
raster = RasterLayer(gdal_dataset_of_region(Area(left=59.93, top=70.07, right=170.04, bottom=44.98), 0.13))
|
|
79
79
|
vector = VectorLayer.layer_from_file(path, None, None, None)
|
|
@@ -93,10 +93,10 @@ def test_find_union_with_vector_bound() -> None:
|
|
|
93
93
|
path = Path(tempdir) / "test.gpkg"
|
|
94
94
|
area = Area(left=58, top=74, right=180, bottom=42)
|
|
95
95
|
make_vectors_with_id(42, {area}, path)
|
|
96
|
-
assert path.exists
|
|
96
|
+
assert path.exists()
|
|
97
97
|
|
|
98
98
|
raster = RasterLayer(gdal_dataset_of_region(Area(left=59.93, top=70.07, right=170.04, bottom=44.98), 0.13))
|
|
99
|
-
vector = VectorLayer.
|
|
99
|
+
vector = VectorLayer.layer_from_file_like(path, raster)
|
|
100
100
|
assert vector.area != area
|
|
101
101
|
|
|
102
102
|
layers = [raster, vector]
|
|
@@ -158,10 +158,10 @@ def test_set_union_superset(left_padding: int, right_padding: int, top_padding:
|
|
|
158
158
|
superset = Area(-1 - left_padding, 1 + top_padding, 1 + right_padding, -1 - bottom_padding)
|
|
159
159
|
layer.set_window_for_union(superset)
|
|
160
160
|
assert layer.window == Window(
|
|
161
|
-
(0 - left_padding) / pixel_density,
|
|
162
|
-
(0 - top_padding) / pixel_density,
|
|
163
|
-
(2 + left_padding + right_padding) / pixel_density,
|
|
164
|
-
(2 + top_padding + bottom_padding) / pixel_density,
|
|
161
|
+
round((0 - left_padding) / pixel_density),
|
|
162
|
+
round((0 - top_padding) / pixel_density),
|
|
163
|
+
round((2 + left_padding + right_padding) / pixel_density),
|
|
164
|
+
round((2 + top_padding + bottom_padding) / pixel_density),
|
|
165
165
|
)
|
|
166
166
|
|
|
167
167
|
origin_after_pixel = layer.read_array(
|
|
@@ -17,4 +17,8 @@ from .constants import WGS_84_PROJECTION
|
|
|
17
17
|
from .window import Area, MapProjection, Window
|
|
18
18
|
from ._backends.enumeration import dtype as DataType
|
|
19
19
|
|
|
20
|
+
from ._operators import where, minumum, maximum, clip, log, log2, log10, exp, exp2, nan_to_num, isin, \
|
|
21
|
+
floor, ceil # pylint: disable=W0611
|
|
22
|
+
from ._operators import abs, round # pylint: disable=W0611,W0622
|
|
23
|
+
|
|
20
24
|
gdal.UseExceptions()
|
|
@@ -1,4 +1,5 @@
|
|
|
1
1
|
from __future__ import annotations
|
|
2
|
+
from typing import Sequence
|
|
2
3
|
|
|
3
4
|
import math
|
|
4
5
|
import sys
|
|
@@ -40,7 +41,7 @@ def round_down_pixels(value: float, pixelscale: float) -> int:
|
|
|
40
41
|
else:
|
|
41
42
|
return math.floor(value)
|
|
42
43
|
|
|
43
|
-
def are_pixel_scales_equal_enough(pixel_scales:
|
|
44
|
+
def are_pixel_scales_equal_enough(pixel_scales: Sequence[PixelScale | None]) -> bool:
|
|
44
45
|
# some layers (e.g., constant layers) have no scale, and always work, so filter
|
|
45
46
|
# them out first
|
|
46
47
|
cleaned_pixel_scales: list[PixelScale] = [x for x in pixel_scales if x is not None]
|
|
@@ -0,0 +1,118 @@
|
|
|
1
|
+
Metadata-Version: 2.4
|
|
2
|
+
Name: yirgacheffe
|
|
3
|
+
Version: 1.9.3
|
|
4
|
+
Summary: Abstraction of gdal datasets for doing basic math operations
|
|
5
|
+
Author-email: Michael Dales <mwd24@cam.ac.uk>
|
|
6
|
+
License-Expression: ISC
|
|
7
|
+
Project-URL: Homepage, https://yirgacheffe.org/
|
|
8
|
+
Project-URL: Repository, https://github.com/quantifyearth/yirgacheffe.git
|
|
9
|
+
Project-URL: Issues, https://github.com/quantifyearth/yirgacheffe/issues
|
|
10
|
+
Project-URL: Changelog, https://yirgacheffe.org/latest/changelog/
|
|
11
|
+
Keywords: gdal,gis,geospatial,declarative
|
|
12
|
+
Classifier: Development Status :: 5 - Production/Stable
|
|
13
|
+
Classifier: Intended Audience :: Science/Research
|
|
14
|
+
Classifier: Programming Language :: Python :: 3
|
|
15
|
+
Classifier: Programming Language :: Python :: 3.10
|
|
16
|
+
Classifier: Programming Language :: Python :: 3.11
|
|
17
|
+
Classifier: Programming Language :: Python :: 3.12
|
|
18
|
+
Classifier: Topic :: Scientific/Engineering :: GIS
|
|
19
|
+
Requires-Python: >=3.10
|
|
20
|
+
Description-Content-Type: text/markdown
|
|
21
|
+
License-File: LICENSE
|
|
22
|
+
Requires-Dist: numpy<3.0,>=1.24
|
|
23
|
+
Requires-Dist: gdal[numpy]<4.0,>=3.8
|
|
24
|
+
Requires-Dist: scikit-image<1.0,>=0.20
|
|
25
|
+
Requires-Dist: torch
|
|
26
|
+
Requires-Dist: dill
|
|
27
|
+
Requires-Dist: deprecation
|
|
28
|
+
Requires-Dist: tomli
|
|
29
|
+
Requires-Dist: h3
|
|
30
|
+
Requires-Dist: pyproj
|
|
31
|
+
Provides-Extra: mlx
|
|
32
|
+
Requires-Dist: mlx; extra == "mlx"
|
|
33
|
+
Provides-Extra: matplotlib
|
|
34
|
+
Requires-Dist: matplotlib; extra == "matplotlib"
|
|
35
|
+
Provides-Extra: dev
|
|
36
|
+
Requires-Dist: mypy; extra == "dev"
|
|
37
|
+
Requires-Dist: pylint; extra == "dev"
|
|
38
|
+
Requires-Dist: pytest; extra == "dev"
|
|
39
|
+
Requires-Dist: pytest-cov; extra == "dev"
|
|
40
|
+
Requires-Dist: build; extra == "dev"
|
|
41
|
+
Requires-Dist: twine; extra == "dev"
|
|
42
|
+
Requires-Dist: mkdocs-material; extra == "dev"
|
|
43
|
+
Requires-Dist: mkdocstrings-python; extra == "dev"
|
|
44
|
+
Requires-Dist: mike; extra == "dev"
|
|
45
|
+
Requires-Dist: mkdocs-gen-files; extra == "dev"
|
|
46
|
+
Dynamic: license-file
|
|
47
|
+
|
|
48
|
+
# Yirgacheffe: a declarative geospatial library for Python to make data-science with maps easier
|
|
49
|
+
|
|
50
|
+
[](https://github.com/quantifyearth/yirgacheffe/actions)
|
|
51
|
+
[](https://yirgacheffe.org)
|
|
52
|
+
[](https://pypi.org/project/yirgacheffe/)
|
|
53
|
+
|
|
54
|
+
|
|
55
|
+
## Overview
|
|
56
|
+
|
|
57
|
+
Yirgacheffe is a declarative geospatial library, allowing you to operate on both raster and polygon geospatial datasets without having to do all the tedious book keeping around layer alignment or dealing with hardware concerns around memory or parallelism. you can load into memory safely.
|
|
58
|
+
|
|
59
|
+
Example common use-cases:
|
|
60
|
+
|
|
61
|
+
* Do the datasets overlap? Yirgacheffe will let you define either the intersection or the union of a set of different datasets, scaling up or down the area as required.
|
|
62
|
+
* Rasterisation of vector layers: if you have a vector dataset then you can add that to your computation and yirgaceffe will rasterize it on demand, so you never need to store more data in memory than necessary.
|
|
63
|
+
* Do the raster layers get big and take up large amounts of memory? Yirgacheffe will let you do simple numerical operations with layers directly and then worry about the memory management behind the scenes for you.
|
|
64
|
+
* Parallelisation of operations over many CPU cores.
|
|
65
|
+
* Built in support for optionally using GPUs via [MLX](https://ml-explore.github.io/mlx/build/html/index.html) support.
|
|
66
|
+
|
|
67
|
+
## Installation
|
|
68
|
+
|
|
69
|
+
Yirgacheffe is available via pypi, so can be installed with pip for example:
|
|
70
|
+
|
|
71
|
+
```SystemShell
|
|
72
|
+
$ pip install yirgacheffe
|
|
73
|
+
```
|
|
74
|
+
|
|
75
|
+
## Documentation
|
|
76
|
+
|
|
77
|
+
The documentation can be found on [yirgacheffe.org](https://yirgacheffe.org/)
|
|
78
|
+
|
|
79
|
+
## Simple examples:
|
|
80
|
+
|
|
81
|
+
Here is how to do cloud removal from [Sentinel-2 data](https://browser.dataspace.copernicus.eu/?zoom=14&lat=6.15468&lng=38.20581&themeId=DEFAULT-THEME&visualizationUrl=U2FsdGVkX1944lrmeTJcaSsnoxNMp4oucN1AjklGUANHd2cRZWyXnepHvzpaOWzMhH8SrWQo%2BqrOvOnu6f9FeCMrS%2FDZmvjzID%2FoE1tbOCEHK8ohPXjFqYojeR9%2B82ri&datasetId=S2_L2A_CDAS&fromTime=2025-09-09T00%3A00%3A00.000Z&toTime=2025-09-09T23%3A59%3A59.999Z&layerId=1_TRUE_COLOR&demSource3D=%22MAPZEN%22&cloudCoverage=30&dateMode=SINGLE), using the [Scene Classification Layer](https://custom-scripts.sentinel-hub.com/custom-scripts/sentinel-2/scene-classification/) data:
|
|
82
|
+
|
|
83
|
+
```python
|
|
84
|
+
import yirgaceffe as yg
|
|
85
|
+
|
|
86
|
+
with (
|
|
87
|
+
yg.read_raster("T37NCG_20250909T073609_B06_20m.jp2") as vre2,
|
|
88
|
+
yg.read_raster("T37NCG_20250909T073609_SCL_20m.jp2") as scl,
|
|
89
|
+
):
|
|
90
|
+
is_cloud = (scl == 8) | (scl == 9) | (scl == 10) # various cloud types
|
|
91
|
+
is_shadow = (scl == 3)
|
|
92
|
+
is_bad = is_cloud | is_shadow
|
|
93
|
+
|
|
94
|
+
masked_vre2 = yg.where(is_bad, float("nan"), vre2)
|
|
95
|
+
masked_vre2.to_geotiff("vre2_cleaned.tif")
|
|
96
|
+
```
|
|
97
|
+
|
|
98
|
+
or a species' [Area of Habitat](https://www.sciencedirect.com/science/article/pii/S0169534719301892) calculation:
|
|
99
|
+
|
|
100
|
+
```python
|
|
101
|
+
import yirgaceffe as yg
|
|
102
|
+
|
|
103
|
+
with (
|
|
104
|
+
yg.read_raster("habitats.tif") as habitat_map,
|
|
105
|
+
yg.read_raster('elevation.tif') as elevation_map,
|
|
106
|
+
yg.read_shape('species123.geojson') as range_map,
|
|
107
|
+
):
|
|
108
|
+
refined_habitat = habitat_map.isin([...species habitat codes...])
|
|
109
|
+
refined_elevation = (elevation_map >= species_min) && (elevation_map <= species_max)
|
|
110
|
+
aoh = refined_habitat * refined_elevation * range_polygon * area_per_pixel_map
|
|
111
|
+
print(f'Area of habitat: {aoh.sum()}')
|
|
112
|
+
```
|
|
113
|
+
|
|
114
|
+
## Thanks
|
|
115
|
+
|
|
116
|
+
Thanks to discussion and feedback from my colleagues, particularly Alison Eyres, Patrick Ferris, Amelia Holcomb, and Anil Madhavapeddy.
|
|
117
|
+
|
|
118
|
+
Inspired by the work of Daniele Baisero in his AoH library.
|