mapchete-eo 2026.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (89) hide show
  1. mapchete_eo/__init__.py +1 -0
  2. mapchete_eo/array/__init__.py +0 -0
  3. mapchete_eo/array/buffer.py +16 -0
  4. mapchete_eo/array/color.py +29 -0
  5. mapchete_eo/array/convert.py +163 -0
  6. mapchete_eo/base.py +653 -0
  7. mapchete_eo/blacklist.txt +175 -0
  8. mapchete_eo/cli/__init__.py +30 -0
  9. mapchete_eo/cli/bounds.py +22 -0
  10. mapchete_eo/cli/options_arguments.py +227 -0
  11. mapchete_eo/cli/s2_brdf.py +77 -0
  12. mapchete_eo/cli/s2_cat_results.py +130 -0
  13. mapchete_eo/cli/s2_find_broken_products.py +77 -0
  14. mapchete_eo/cli/s2_jp2_static_catalog.py +166 -0
  15. mapchete_eo/cli/s2_mask.py +71 -0
  16. mapchete_eo/cli/s2_mgrs.py +45 -0
  17. mapchete_eo/cli/s2_rgb.py +114 -0
  18. mapchete_eo/cli/s2_verify.py +129 -0
  19. mapchete_eo/cli/static_catalog.py +82 -0
  20. mapchete_eo/eostac.py +30 -0
  21. mapchete_eo/exceptions.py +87 -0
  22. mapchete_eo/image_operations/__init__.py +12 -0
  23. mapchete_eo/image_operations/blend_functions.py +579 -0
  24. mapchete_eo/image_operations/color_correction.py +136 -0
  25. mapchete_eo/image_operations/compositing.py +266 -0
  26. mapchete_eo/image_operations/dtype_scale.py +43 -0
  27. mapchete_eo/image_operations/fillnodata.py +130 -0
  28. mapchete_eo/image_operations/filters.py +319 -0
  29. mapchete_eo/image_operations/linear_normalization.py +81 -0
  30. mapchete_eo/image_operations/sigmoidal.py +114 -0
  31. mapchete_eo/io/__init__.py +37 -0
  32. mapchete_eo/io/assets.py +496 -0
  33. mapchete_eo/io/items.py +162 -0
  34. mapchete_eo/io/levelled_cubes.py +259 -0
  35. mapchete_eo/io/path.py +155 -0
  36. mapchete_eo/io/products.py +423 -0
  37. mapchete_eo/io/profiles.py +45 -0
  38. mapchete_eo/platforms/sentinel2/__init__.py +17 -0
  39. mapchete_eo/platforms/sentinel2/_mapper_registry.py +89 -0
  40. mapchete_eo/platforms/sentinel2/bandpass_adjustment.py +104 -0
  41. mapchete_eo/platforms/sentinel2/brdf/__init__.py +8 -0
  42. mapchete_eo/platforms/sentinel2/brdf/config.py +32 -0
  43. mapchete_eo/platforms/sentinel2/brdf/correction.py +260 -0
  44. mapchete_eo/platforms/sentinel2/brdf/hls.py +251 -0
  45. mapchete_eo/platforms/sentinel2/brdf/models.py +44 -0
  46. mapchete_eo/platforms/sentinel2/brdf/protocols.py +27 -0
  47. mapchete_eo/platforms/sentinel2/brdf/ross_thick.py +136 -0
  48. mapchete_eo/platforms/sentinel2/brdf/sun_angle_arrays.py +76 -0
  49. mapchete_eo/platforms/sentinel2/config.py +241 -0
  50. mapchete_eo/platforms/sentinel2/driver.py +43 -0
  51. mapchete_eo/platforms/sentinel2/masks.py +329 -0
  52. mapchete_eo/platforms/sentinel2/metadata_parser/__init__.py +6 -0
  53. mapchete_eo/platforms/sentinel2/metadata_parser/base.py +56 -0
  54. mapchete_eo/platforms/sentinel2/metadata_parser/default_path_mapper.py +135 -0
  55. mapchete_eo/platforms/sentinel2/metadata_parser/models.py +78 -0
  56. mapchete_eo/platforms/sentinel2/metadata_parser/s2metadata.py +639 -0
  57. mapchete_eo/platforms/sentinel2/preconfigured_sources/__init__.py +57 -0
  58. mapchete_eo/platforms/sentinel2/preconfigured_sources/guessers.py +108 -0
  59. mapchete_eo/platforms/sentinel2/preconfigured_sources/item_mappers.py +171 -0
  60. mapchete_eo/platforms/sentinel2/preconfigured_sources/metadata_xml_mappers.py +217 -0
  61. mapchete_eo/platforms/sentinel2/preprocessing_tasks.py +50 -0
  62. mapchete_eo/platforms/sentinel2/processing_baseline.py +163 -0
  63. mapchete_eo/platforms/sentinel2/product.py +747 -0
  64. mapchete_eo/platforms/sentinel2/source.py +114 -0
  65. mapchete_eo/platforms/sentinel2/types.py +114 -0
  66. mapchete_eo/processes/__init__.py +0 -0
  67. mapchete_eo/processes/config.py +51 -0
  68. mapchete_eo/processes/dtype_scale.py +112 -0
  69. mapchete_eo/processes/eo_to_xarray.py +19 -0
  70. mapchete_eo/processes/merge_rasters.py +239 -0
  71. mapchete_eo/product.py +323 -0
  72. mapchete_eo/protocols.py +61 -0
  73. mapchete_eo/search/__init__.py +14 -0
  74. mapchete_eo/search/base.py +285 -0
  75. mapchete_eo/search/config.py +113 -0
  76. mapchete_eo/search/s2_mgrs.py +313 -0
  77. mapchete_eo/search/stac_search.py +278 -0
  78. mapchete_eo/search/stac_static.py +197 -0
  79. mapchete_eo/search/utm_search.py +251 -0
  80. mapchete_eo/settings.py +25 -0
  81. mapchete_eo/sort.py +60 -0
  82. mapchete_eo/source.py +109 -0
  83. mapchete_eo/time.py +62 -0
  84. mapchete_eo/types.py +76 -0
  85. mapchete_eo-2026.2.0.dist-info/METADATA +91 -0
  86. mapchete_eo-2026.2.0.dist-info/RECORD +89 -0
  87. mapchete_eo-2026.2.0.dist-info/WHEEL +4 -0
  88. mapchete_eo-2026.2.0.dist-info/entry_points.txt +11 -0
  89. mapchete_eo-2026.2.0.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,114 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Optional, List, Callable, Dict, Any, Union
4
+ import warnings
5
+
6
+ from pydantic import model_validator
7
+ from pystac import Item
8
+
9
+ from mapchete_eo.platforms.sentinel2.metadata_parser.s2metadata import S2Metadata
10
+ from mapchete_eo.source import Source
11
+ from mapchete_eo.platforms.sentinel2.preconfigured_sources import (
12
+ DEPRECATED_ARCHIVES,
13
+ KNOWN_SOURCES,
14
+ )
15
+ from mapchete_eo.platforms.sentinel2.types import (
16
+ DataArchive,
17
+ MetadataArchive,
18
+ )
19
+ from mapchete_eo.platforms.sentinel2._mapper_registry import MAPPER_REGISTRIES
20
+
21
+
22
+ def known_collection_to_url(collection: str) -> str:
23
+ if collection in KNOWN_SOURCES:
24
+ return KNOWN_SOURCES[collection]["collection"]
25
+ return collection
26
+
27
+
28
+ class Sentinel2Source(Source):
29
+ """All information required to consume Sentinel-2 products."""
30
+
31
+ # extends base model with those properties
32
+ data_archive: Optional[DataArchive] = None
33
+ metadata_archive: MetadataArchive = "roda"
34
+
35
+ @property
36
+ def item_modifier_funcs(self) -> List[Callable]:
37
+ return [
38
+ func
39
+ for func in (self.get_id_mapper(), *self.get_stac_metadata_mappers())
40
+ if func is not None
41
+ ]
42
+
43
+ @model_validator(mode="before")
44
+ def determine_data_source(cls, values: Dict[str, Any]) -> Dict[str, Any]:
45
+ """Handles short names of sources."""
46
+ if isinstance(values, str):
47
+ values = dict(collection=values)
48
+ collection = values.get("collection", None)
49
+ if collection in KNOWN_SOURCES:
50
+ values.update(KNOWN_SOURCES[collection])
51
+ elif collection in DEPRECATED_ARCHIVES:
52
+ warnings.warn(
53
+ f"deprecated archive '{collection}' found",
54
+ category=DeprecationWarning,
55
+ stacklevel=2,
56
+ )
57
+ values.update(DEPRECATED_ARCHIVES[collection])
58
+ return values
59
+
60
+ @model_validator(mode="after")
61
+ def verify_mappers(self) -> Sentinel2Source:
62
+ # make sure all required mappers are registered
63
+ self.get_id_mapper()
64
+ self.get_stac_metadata_mappers()
65
+ self.get_s2metadata_mapper()
66
+ return self
67
+
68
+ def get_id_mapper(self) -> Union[Callable[[Item], Item], None]:
69
+ if self.catalog_type == "static":
70
+ return None
71
+ for key in MAPPER_REGISTRIES["ID"].keys():
72
+ if self.collection == known_collection_to_url(key):
73
+ return MAPPER_REGISTRIES["ID"][key]
74
+ else:
75
+ raise ValueError(f"no ID mapper for {self.collection} found")
76
+
77
+ def get_stac_metadata_mappers(self) -> List[Callable[[Item], Item]]:
78
+ """Find mapper function.
79
+
80
+ A mapper function must be provided if a custom data_archive was configured.
81
+ """
82
+ mappers: List[Callable] = []
83
+ if self.catalog_type == "static":
84
+ return mappers
85
+ for key in MAPPER_REGISTRIES["STAC metadata"]:
86
+ if isinstance(key, tuple):
87
+ collection, data_archive = key
88
+ if (
89
+ self.collection == known_collection_to_url(collection)
90
+ and data_archive == self.data_archive
91
+ ):
92
+ mappers.append(MAPPER_REGISTRIES["STAC metadata"][key])
93
+ elif self.collection == known_collection_to_url(key):
94
+ mappers.append(MAPPER_REGISTRIES["STAC metadata"][key])
95
+ if mappers or self.data_archive is None:
96
+ return mappers
97
+ raise ValueError(
98
+ f"no STAC metadata mapper from {self.collection} to {self.data_archive} found"
99
+ )
100
+
101
+ def get_s2metadata_mapper(self) -> Union[Callable[[Item], S2Metadata], None]:
102
+ if self.catalog_type == "static" or self.metadata_archive is None:
103
+ return None
104
+ for key in MAPPER_REGISTRIES["S2Metadata"]:
105
+ collection, metadata_archive = key
106
+ if (
107
+ self.collection == known_collection_to_url(collection)
108
+ and metadata_archive == self.metadata_archive
109
+ ):
110
+ return MAPPER_REGISTRIES["S2Metadata"][key]
111
+ else:
112
+ raise ValueError(
113
+ f"no S2Metadata mapper from {self.collection} to {self.metadata_archive} found"
114
+ )
@@ -0,0 +1,114 @@
1
+ from enum import Enum
2
+ from typing import Literal
3
+
4
+ Resolution = Enum(
5
+ "Resolution",
6
+ {
7
+ "original": None,
8
+ "10m": 10,
9
+ "20m": 20,
10
+ "60m": 60,
11
+ "120m": 120,
12
+ },
13
+ )
14
+
15
+
16
+ ProductQIMaskResolution = Enum(
17
+ "ProductQIMaskResolution",
18
+ {
19
+ "20m": 20,
20
+ "60m": 60,
21
+ },
22
+ )
23
+
24
+
25
+ class CloudType(str, Enum):
26
+ """Available cloud types in masks."""
27
+
28
+ opaque = "opaque"
29
+ cirrus = "cirrus"
30
+ all = "all"
31
+
32
+
33
+ class ClassificationBandIndex(int, Enum):
34
+ """Band index used for classification masks."""
35
+
36
+ opaque = 1
37
+ cirrus = 2
38
+ # this is only available since PB 04.00
39
+ snow_ice = 3
40
+
41
+
42
+ class L2ABand(int, Enum):
43
+ """Mapping between band identifier and metadata internal band index."""
44
+
45
+ B01 = 0
46
+ B02 = 1
47
+ B03 = 2
48
+ B04 = 3
49
+ B05 = 4
50
+ B06 = 5
51
+ B07 = 6
52
+ B08 = 7
53
+ B8A = 8
54
+ B09 = 9
55
+ B10 = 10
56
+ B11 = 11
57
+ B12 = 12
58
+
59
+
60
+ class ProcessingLevel(Enum):
61
+ """Available processing levels of Sentinel-2."""
62
+
63
+ level1c = "L1C"
64
+ level2a = "L2A"
65
+
66
+
67
+ class ProductQI(str, Enum):
68
+ """Product specific quality indicators."""
69
+
70
+ classification = "classification"
71
+ cloud_probability = "cloud_probability"
72
+ snow_probability = "snow_probability"
73
+
74
+
75
+ class BandQI(str, Enum):
76
+ """Band specific quality indicators."""
77
+
78
+ detector_footprints = "detector_footprints"
79
+ technical_quality = "technical_quality"
80
+ # the following masks are deprecated:
81
+ # nodata = "nodata"
82
+ # defect = "defect"
83
+ # saturated = "saturated"
84
+
85
+
86
+ class SunAngle(str, Enum):
87
+ zenith = "Zenith"
88
+ azimuth = "Azimuth"
89
+
90
+
91
+ class ViewAngle(str, Enum):
92
+ zenith = "Zenith"
93
+ azimuth = "Azimuth"
94
+
95
+
96
+ class SceneClassification(int, Enum):
97
+ """Mapping of pixel values to class in SCL bands."""
98
+
99
+ nodata = 0
100
+ saturated_or_defected = 1
101
+ dark_area_pixels = 2
102
+ cloud_shadows = 3
103
+ vegetation = 4
104
+ not_vegetated = 5
105
+ water = 6
106
+ unclassified = 7
107
+ cloud_medium_probability = 8
108
+ cloud_high_probability = 9
109
+ thin_cirrus = 10
110
+ snow = 11
111
+
112
+
113
+ DataArchive = Literal["AWSCOG", "AWSJP2"]
114
+ MetadataArchive = Literal["roda", "CDSE"]
File without changes
@@ -0,0 +1,51 @@
1
+ from __future__ import annotations
2
+
3
+ from typing import Dict, Optional, Tuple, Union
4
+
5
+ from pydantic import BaseModel
6
+
7
+
8
+ class SmoothConfig(BaseModel):
9
+ radius: Optional[int] = 1
10
+ smooth_more: Optional[bool] = False
11
+
12
+ @staticmethod
13
+ def parse(inp: Union[SmoothConfig, dict]) -> SmoothConfig:
14
+ if isinstance(inp, SmoothConfig):
15
+ return inp
16
+ elif isinstance(inp, dict):
17
+ return SmoothConfig(**inp)
18
+ else:
19
+ raise TypeError(f"cannot parse SmoothConfig from {inp}")
20
+
21
+
22
+ class RGBCompositeConfig(BaseModel):
23
+ red: Tuple[int, int] = (0, 2300)
24
+ green: Tuple[int, int] = (0, 2300)
25
+ blue: Tuple[int, int] = (0, 2300)
26
+ gamma: float = 1.15
27
+ saturation: float = 1.3
28
+ clahe_flag: bool = True
29
+ clahe_clip_limit: float = 3.2
30
+ clahe_tile_grid_size: Tuple[int, int] = (32, 32)
31
+ sigmoidal_flag: bool = False
32
+ sigmoidal_contrast: int = 0
33
+ sigmoidal_bias: float = 0.0
34
+ fuzzy_radius: Optional[int] = 0
35
+ sharpen: Optional[bool] = False
36
+ smooth: Optional[bool] = False
37
+ smooth_config: SmoothConfig = SmoothConfig()
38
+ smooth_water: Optional[bool] = False
39
+ smooth_water_config: SmoothConfig = SmoothConfig(radius=6, smooth_more=True)
40
+ smooth_water_ndwi_threshold: float = 0.2
41
+ calculations_dtype: str = "float16"
42
+
43
+ @staticmethod
44
+ def parse(inp: Union[RGBCompositeConfig, Dict]) -> RGBCompositeConfig:
45
+ if isinstance(inp, RGBCompositeConfig):
46
+ return inp
47
+ elif isinstance(inp, dict):
48
+ smooth_config = SmoothConfig.parse(inp.pop("smooth_config", {}))
49
+ return RGBCompositeConfig(smooth_config=smooth_config, **inp)
50
+ else:
51
+ raise TypeError(f"cannot parse RGBCompositeConfig from {inp}")
@@ -0,0 +1,112 @@
1
+ import logging
2
+ from typing import Optional
3
+
4
+ from mapchete import MapcheteProcess, RasterInput
5
+ import numpy as np
6
+ import numpy.ma as ma
7
+ from mapchete.errors import MapcheteNodataTile
8
+ from mapchete.types import NodataVal
9
+
10
+ from mapchete_eo.exceptions import EmptyStackException
11
+ from mapchete_eo.image_operations import dtype_scale
12
+
13
+ logger = logging.getLogger(__name__)
14
+
15
+
16
+ def execute(
17
+ mp: MapcheteProcess,
18
+ inp: RasterInput,
19
+ bands: list = [1, 2, 3, 4],
20
+ resampling: str = "nearest",
21
+ matching_method: Optional[str] = "gdal",
22
+ matching_max_zoom: int = 13,
23
+ matching_precision: int = 8,
24
+ fallback_to_higher_zoom: bool = False,
25
+ out_dtype: Optional[str] = "uint8",
26
+ out_nodata: NodataVal = None,
27
+ max_source_value: float = 10000.0,
28
+ max_output_value: Optional[float] = None,
29
+ ) -> ma.MaskedArray:
30
+ """
31
+ Scale input to different value range.
32
+
33
+ Inputs:
34
+ -------
35
+ inp
36
+ raster input to be scaled
37
+
38
+ Parameters:
39
+ -----------
40
+ bands : list
41
+ List of band indexes.
42
+ tresampling : str (default: 'nearest')
43
+ Resampling used when reading from mosaic.
44
+ matching_method : str ('gdal' or 'min') (default: 'gdal')
45
+ gdal: Uses GDAL's standard method. Here, the target resolution is
46
+ calculated by averaging the extent's pixel sizes over both x and y
47
+ axes. This approach returns a zoom level which may not have the
48
+ best quality but will speed up reading significantly.
49
+ min: Returns the zoom level which matches the minimum resolution of the
50
+ extents four corner pixels. This approach returns the zoom level
51
+ with the best possible quality but with low performance. If the
52
+ tile extent is outside of the destination pyramid, a
53
+ TopologicalError will be raised.
54
+ matching_max_zoom : int (optional, default: None)
55
+ If set, it will prevent reading from zoom levels above the maximum.
56
+ matching_precision : int (default: 8)
57
+ Round resolutions to n digits before comparing.
58
+ fallback_to_higher_zoom : bool (default: False)
59
+ In case no data is found at zoom level, try to read data from higher
60
+ zoom levels. Enabling this setting can lead to many IO requests in
61
+ areas with no data.
62
+ out_dtype: string
63
+ Output dtype for the target values, should fit the designated scaling from source,
64
+ if the output scaled values do not fit, they will be clipped to the output dtype.
65
+ out_nodata: float, int
66
+ Output Nodata, per default read from output nodata of the mapchete config.
67
+ max_source_value : float
68
+ Upper limit for clipping and scaling (e.g. 10000 for Sentinel-2).
69
+ max_output_value : float, None
70
+ Output value range (e.g. 255 for 8 bit). If None it will be determined by the out_dtype
71
+
72
+ Output:
73
+ -------
74
+ ma.ndarray
75
+ stretched input bands
76
+ """
77
+ logger.debug("read input mosaic")
78
+ if inp.is_empty():
79
+ logger.debug("mosaic empty")
80
+ raise MapcheteNodataTile
81
+ try:
82
+ mosaic = inp.read(
83
+ indexes=bands,
84
+ resampling=resampling,
85
+ matching_method=matching_method,
86
+ matching_max_zoom=matching_max_zoom,
87
+ matching_precision=matching_precision,
88
+ fallback_to_higher_zoom=fallback_to_higher_zoom,
89
+ ).astype(np.int16, copy=False)
90
+ except EmptyStackException:
91
+ logger.debug("mosaic empty: EmptyStackException")
92
+ raise MapcheteNodataTile
93
+ if mosaic[0].mask.all():
94
+ logger.debug("mosaic empty: all masked")
95
+ raise MapcheteNodataTile
96
+
97
+ if mp.output_params and mp.output_params.get("nodata") and out_nodata is None:
98
+ out_nodata = mp.output_params.get("nodata")
99
+ elif out_nodata is None:
100
+ logger.debug("Out nodata is None setting it to 0")
101
+ out_nodata = 0
102
+
103
+ logger.debug(
104
+ f"scale input raster values to wished dtype up to the max source value: {max_output_value}"
105
+ )
106
+ return dtype_scale(
107
+ bands=mosaic,
108
+ nodata=out_nodata,
109
+ out_dtype=out_dtype,
110
+ max_source_value=max_source_value,
111
+ max_output_value=max_output_value,
112
+ )
@@ -0,0 +1,19 @@
1
+ from typing import List, Optional
2
+
3
+ from rasterio.enums import Resampling
4
+ from xarray import Dataset
5
+
6
+ from mapchete_eo.base import EODataCube
7
+ from mapchete_eo.types import MergeMethod
8
+
9
+
10
+ def execute(
11
+ inp: EODataCube,
12
+ assets: Optional[List[str]] = None,
13
+ resampling: Resampling = Resampling.nearest,
14
+ merge_method: MergeMethod = MergeMethod.average,
15
+ ) -> Dataset:
16
+ """
17
+ Convert EO Data Cube into xarray.
18
+ """
19
+ return inp.read(assets=assets, resampling=resampling, merge_method=merge_method)
@@ -0,0 +1,239 @@
1
+ import logging
2
+ from enum import Enum
3
+ from typing import List, Optional
4
+
5
+ import numpy as np
6
+ import numpy.ma as ma
7
+ from mapchete import RasterInputGroup, VectorInputGroup, Timer
8
+ from mapchete.errors import MapcheteNodataTile
9
+ from mapchete.io.vector import to_shape
10
+ from mapchete.processing.mp import MapcheteProcess
11
+ from mapchete.tile import BufferedTile
12
+ from rasterio.features import geometry_mask
13
+ from shapely import unary_union
14
+ from shapely.geometry import mapping, shape
15
+ from shapely.geometry.base import BaseGeometry
16
+
17
+ from mapchete_eo.image_operations import filters
18
+
19
+ logger = logging.getLogger(__name__)
20
+
21
+
22
+ class MergeMethod(str, Enum):
23
+ fill = "fill"
24
+ footprint_gradient = "footprint_gradient"
25
+
26
+
27
+ def execute(
28
+ mp: MapcheteProcess,
29
+ rasters: RasterInputGroup,
30
+ vectors: VectorInputGroup,
31
+ gradient_buffer: int = 10,
32
+ merge_method: MergeMethod = MergeMethod.footprint_gradient,
33
+ ) -> ma.MaskedArray:
34
+ """
35
+ Merge multiple rasters into one.
36
+ """
37
+ raster_arrays = []
38
+ region_footprints = []
39
+
40
+ with Timer() as tt:
41
+ for raster_region, vector_region in zip(rasters, vectors):
42
+ # Vector Part
43
+ if vector_region is not None:
44
+ region_name_vector, region_vector = vector_region
45
+ region_geoms = region_vector.read()
46
+ if not region_geoms:
47
+ logger.debug("%s vector is empty", region_name_vector)
48
+ continue
49
+
50
+ # When there are multiple overlaps of aois/clipping creates multiple geoms,
51
+ # # make an union of all shapes, so that the rasters, vectors lists have the the same number of elements
52
+ region_geoms_shapes = []
53
+ for region_geom in region_geoms:
54
+ region_geoms_shapes.append(shape(region_geom["geometry"]))
55
+
56
+ if len(region_geoms_shapes) > 1:
57
+ region_geoms_shapes = unary_union(region_geoms_shapes)
58
+ region_footprints.append(region_geoms_shapes)
59
+ else:
60
+ region_footprints.append(shape(region_geoms[0]["geometry"]))
61
+
62
+ # Raster Part
63
+ region_name, region = raster_region
64
+
65
+ if region_name != region_name_vector:
66
+ raise ValueError(
67
+ "Raster and Vector names should be the same to make sure they match itself, before area property of RasterInput works!"
68
+ )
69
+
70
+ raster = region.read()
71
+ if raster.mask.all():
72
+ logger.debug("%s raster is empty", region_name)
73
+ continue
74
+
75
+ raster_arrays.append(raster)
76
+
77
+ # This below wont work until area property of RasterInputs is working!
78
+ # if vector_region is None:
79
+ # region_footprints.append(region.area)
80
+
81
+ logger.debug("%s rasters created in %s", len(raster_arrays), tt)
82
+
83
+ if len(raster_arrays) == 0:
84
+ raise MapcheteNodataTile("no input rasters found")
85
+
86
+ with Timer() as tt:
87
+ merged = merge_rasters(
88
+ raster_arrays,
89
+ mp.tile,
90
+ footprints=region_footprints,
91
+ method=merge_method,
92
+ gradient_buffer=gradient_buffer,
93
+ )
94
+ logger.debug("%s mosaics merged in %s", len(raster_arrays), tt)
95
+ return merged
96
+
97
+
98
+ def merge_rasters(
99
+ rasters: List[ma.MaskedArray],
100
+ tile: BufferedTile,
101
+ method: MergeMethod = MergeMethod.fill,
102
+ footprints: Optional[List[BaseGeometry]] = None,
103
+ gradient_buffer: int = 10,
104
+ ) -> ma.MaskedArray:
105
+ footprints = footprints or []
106
+ if len(rasters) == 0:
107
+ raise ValueError("no rasters provided")
108
+ elif len(rasters) == 1:
109
+ return rasters[0]
110
+
111
+ if method == MergeMethod.fill:
112
+ return fillnodata_merge(rasters)
113
+
114
+ elif method == MergeMethod.footprint_gradient:
115
+ if footprints is None:
116
+ raise TypeError(
117
+ "for gradient_merge, a list of footprints has to be provided"
118
+ )
119
+ return gradient_merge(
120
+ rasters=rasters,
121
+ footprints=footprints,
122
+ tile=tile,
123
+ gradient_buffer=gradient_buffer,
124
+ )
125
+ else: # pragma: no cover
126
+ raise ValueError(f"unkonw merge method '{method}'")
127
+
128
+
129
+ def fillnodata_merge(
130
+ rasters: List[ma.MaskedArray],
131
+ ) -> ma.MaskedArray:
132
+ """
133
+ Read rasters sequentially and update masked pixels with values of next raster.
134
+ """
135
+ out = ma.empty_like(rasters[0])
136
+ for raster in rasters:
137
+ out[~raster.mask] = raster[~raster.mask]
138
+ out.mask[~raster.mask] = raster.mask[~raster.mask]
139
+ # if output is already full, don't add any further raster data
140
+ if not out.mask.any():
141
+ break
142
+ return out
143
+
144
+
145
+ def gradient_merge(
146
+ rasters: List[ma.MaskedArray],
147
+ footprints: List[BaseGeometry],
148
+ tile: BufferedTile,
149
+ gradient_buffer: int = 10,
150
+ ) -> ma.MaskedArray:
151
+ """Use footprint geometries to merge rasters using a gradient buffer."""
152
+ if len(footprints) != len(rasters): # pragma: no cover
153
+ raise ValueError(
154
+ f"footprints ({len(footprints)}) do not match rasters ({len(rasters)}) count"
155
+ )
156
+
157
+ out_data = np.zeros(rasters[0].shape, dtype=np.float16)
158
+ out_mask = np.ones(rasters[0].shape, dtype=bool)
159
+
160
+ for raster, footprint in zip(rasters, footprints):
161
+ # create gradient mask from footprint
162
+ footprint_geom = to_shape(footprint)
163
+ if footprint_geom.is_empty:
164
+ footprint_mask = np.ones(shape=raster.mask[0].shape, dtype=bool)
165
+ else:
166
+ footprint_mask = geometry_mask(
167
+ [mapping(footprint_geom)],
168
+ raster.mask[0].shape,
169
+ tile.transform,
170
+ all_touched=False,
171
+ invert=False,
172
+ )
173
+
174
+ # TODO: the gaussian_blur function demands a 3-band array, so we have to
175
+ # hack around that. This could be improved.
176
+ gradient_1band = filters.gaussian_blur(
177
+ (~np.stack([footprint_mask for _ in range(3)]) * 255).astype("uint8"),
178
+ radius=gradient_buffer,
179
+ )[0]
180
+ # gradient_1band now has values from 1 (no footprint coverage) to 255 (full
181
+ # footprint coverage)
182
+ # set 1 to 0:
183
+ gradient_1band[gradient_1band == 1] = 0
184
+ logger.debug(
185
+ f"gradient_1band; min: {np.min(gradient_1band)}, max: {np.max(gradient_1band)}"
186
+ )
187
+
188
+ # extrude array to match number of raster bands
189
+ gradient_8bit = np.stack([gradient_1band for _ in range(raster.shape[0])])
190
+ logger.debug(
191
+ f"gradient_8bit; min: {np.min(gradient_8bit)}, max: {np.max(gradient_8bit)}"
192
+ )
193
+
194
+ # scale gradient from 0 to 1
195
+ gradient = gradient_8bit / 255
196
+ logger.debug(f"gradient; min: {np.min(gradient)} , max: {np.max(gradient)}")
197
+
198
+ # now only apply the gradient where out and raster have values
199
+ # otherwise pick the remaining existing value or keep a masked
200
+ # pixel if both are masked
201
+
202
+ # clip raster with end of gradient:
203
+ clip_mask = raster.mask + (gradient_8bit == 0)
204
+ raster.mask = clip_mask
205
+
206
+ # the weight array is going to be used to merge the existing output array with
207
+ # current raster
208
+ weight = np.zeros(gradient.shape, dtype=np.float16)
209
+
210
+ # set weight values according to the following rules:
211
+ # both values available: use gradient (1 for full raster and 0 for full out)
212
+ weight[~out_mask & ~clip_mask] = gradient[~out_mask & ~clip_mask]
213
+ # only raster data available: 1
214
+ weight[out_mask & ~clip_mask] = 1.0
215
+ # only out data available: 0
216
+ weight[~out_mask & clip_mask] = 0.0
217
+ # none of them available: 0
218
+ weight[out_mask & clip_mask] = 0.0
219
+
220
+ # update out mask
221
+ weight_mask = np.zeros(weight.shape, dtype=bool)
222
+ # both values available: False
223
+ # only raster: False
224
+ # only out: False
225
+ # none: True
226
+ weight_mask[out_mask & clip_mask] = True
227
+
228
+ # sum of weighted existing data with new data
229
+ out_data[~clip_mask] = (
230
+ # weight existing data
231
+ (out_data[~clip_mask] * (1.0 - weight[~clip_mask]))
232
+ # weight new data
233
+ + (raster[~clip_mask].astype(np.float16) * weight[~clip_mask])
234
+ )
235
+ out_mask[~clip_mask] = weight_mask[~clip_mask]
236
+
237
+ return ma.MaskedArray(
238
+ data=out_data.astype(rasters[0].dtype, copy=False), mask=out_mask
239
+ )