mapchete-eo 2025.7.0__py2.py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (87) hide show
  1. mapchete_eo/__init__.py +1 -0
  2. mapchete_eo/archives/__init__.py +0 -0
  3. mapchete_eo/archives/base.py +65 -0
  4. mapchete_eo/array/__init__.py +0 -0
  5. mapchete_eo/array/buffer.py +16 -0
  6. mapchete_eo/array/color.py +29 -0
  7. mapchete_eo/array/convert.py +157 -0
  8. mapchete_eo/base.py +528 -0
  9. mapchete_eo/blacklist.txt +175 -0
  10. mapchete_eo/cli/__init__.py +30 -0
  11. mapchete_eo/cli/bounds.py +22 -0
  12. mapchete_eo/cli/options_arguments.py +243 -0
  13. mapchete_eo/cli/s2_brdf.py +77 -0
  14. mapchete_eo/cli/s2_cat_results.py +146 -0
  15. mapchete_eo/cli/s2_find_broken_products.py +93 -0
  16. mapchete_eo/cli/s2_jp2_static_catalog.py +166 -0
  17. mapchete_eo/cli/s2_mask.py +71 -0
  18. mapchete_eo/cli/s2_mgrs.py +45 -0
  19. mapchete_eo/cli/s2_rgb.py +114 -0
  20. mapchete_eo/cli/s2_verify.py +129 -0
  21. mapchete_eo/cli/static_catalog.py +123 -0
  22. mapchete_eo/eostac.py +30 -0
  23. mapchete_eo/exceptions.py +87 -0
  24. mapchete_eo/geometry.py +271 -0
  25. mapchete_eo/image_operations/__init__.py +12 -0
  26. mapchete_eo/image_operations/color_correction.py +136 -0
  27. mapchete_eo/image_operations/compositing.py +247 -0
  28. mapchete_eo/image_operations/dtype_scale.py +43 -0
  29. mapchete_eo/image_operations/fillnodata.py +130 -0
  30. mapchete_eo/image_operations/filters.py +319 -0
  31. mapchete_eo/image_operations/linear_normalization.py +81 -0
  32. mapchete_eo/image_operations/sigmoidal.py +114 -0
  33. mapchete_eo/io/__init__.py +37 -0
  34. mapchete_eo/io/assets.py +492 -0
  35. mapchete_eo/io/items.py +147 -0
  36. mapchete_eo/io/levelled_cubes.py +228 -0
  37. mapchete_eo/io/path.py +144 -0
  38. mapchete_eo/io/products.py +413 -0
  39. mapchete_eo/io/profiles.py +45 -0
  40. mapchete_eo/known_catalogs.py +42 -0
  41. mapchete_eo/platforms/sentinel2/__init__.py +17 -0
  42. mapchete_eo/platforms/sentinel2/archives.py +190 -0
  43. mapchete_eo/platforms/sentinel2/bandpass_adjustment.py +104 -0
  44. mapchete_eo/platforms/sentinel2/brdf/__init__.py +8 -0
  45. mapchete_eo/platforms/sentinel2/brdf/config.py +32 -0
  46. mapchete_eo/platforms/sentinel2/brdf/correction.py +260 -0
  47. mapchete_eo/platforms/sentinel2/brdf/hls.py +251 -0
  48. mapchete_eo/platforms/sentinel2/brdf/models.py +44 -0
  49. mapchete_eo/platforms/sentinel2/brdf/protocols.py +27 -0
  50. mapchete_eo/platforms/sentinel2/brdf/ross_thick.py +136 -0
  51. mapchete_eo/platforms/sentinel2/brdf/sun_angle_arrays.py +76 -0
  52. mapchete_eo/platforms/sentinel2/config.py +181 -0
  53. mapchete_eo/platforms/sentinel2/driver.py +78 -0
  54. mapchete_eo/platforms/sentinel2/masks.py +325 -0
  55. mapchete_eo/platforms/sentinel2/metadata_parser.py +734 -0
  56. mapchete_eo/platforms/sentinel2/path_mappers/__init__.py +29 -0
  57. mapchete_eo/platforms/sentinel2/path_mappers/base.py +56 -0
  58. mapchete_eo/platforms/sentinel2/path_mappers/earthsearch.py +34 -0
  59. mapchete_eo/platforms/sentinel2/path_mappers/metadata_xml.py +135 -0
  60. mapchete_eo/platforms/sentinel2/path_mappers/sinergise.py +105 -0
  61. mapchete_eo/platforms/sentinel2/preprocessing_tasks.py +26 -0
  62. mapchete_eo/platforms/sentinel2/processing_baseline.py +160 -0
  63. mapchete_eo/platforms/sentinel2/product.py +669 -0
  64. mapchete_eo/platforms/sentinel2/types.py +109 -0
  65. mapchete_eo/processes/__init__.py +0 -0
  66. mapchete_eo/processes/config.py +51 -0
  67. mapchete_eo/processes/dtype_scale.py +112 -0
  68. mapchete_eo/processes/eo_to_xarray.py +19 -0
  69. mapchete_eo/processes/merge_rasters.py +235 -0
  70. mapchete_eo/product.py +278 -0
  71. mapchete_eo/protocols.py +56 -0
  72. mapchete_eo/search/__init__.py +14 -0
  73. mapchete_eo/search/base.py +222 -0
  74. mapchete_eo/search/config.py +42 -0
  75. mapchete_eo/search/s2_mgrs.py +314 -0
  76. mapchete_eo/search/stac_search.py +251 -0
  77. mapchete_eo/search/stac_static.py +236 -0
  78. mapchete_eo/search/utm_search.py +251 -0
  79. mapchete_eo/settings.py +24 -0
  80. mapchete_eo/sort.py +48 -0
  81. mapchete_eo/time.py +53 -0
  82. mapchete_eo/types.py +73 -0
  83. mapchete_eo-2025.7.0.dist-info/METADATA +38 -0
  84. mapchete_eo-2025.7.0.dist-info/RECORD +87 -0
  85. mapchete_eo-2025.7.0.dist-info/WHEEL +5 -0
  86. mapchete_eo-2025.7.0.dist-info/entry_points.txt +11 -0
  87. mapchete_eo-2025.7.0.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,734 @@
1
+ """
2
+ A metadata parser helps to read additional Sentinel-2 metadata such as
3
+ sun angles, quality masks, etc.
4
+ """
5
+
6
+ from __future__ import annotations
7
+
8
+ import logging
9
+ import warnings
10
+ from functools import cached_property
11
+ from typing import Any, Callable, Dict, List, Optional, Union
12
+ from xml.etree.ElementTree import Element, ParseError
13
+
14
+ import numpy as np
15
+ import numpy.ma as ma
16
+ from pydantic import BaseModel
17
+ import pystac
18
+ from affine import Affine
19
+ from fiona.transform import transform_geom
20
+ from mapchete import Timer
21
+ from mapchete.io.raster import ReferencedRaster, resample_from_array
22
+ from mapchete.path import MPath
23
+ from mapchete.protocols import GridProtocol
24
+ from mapchete.types import Bounds, Grid
25
+ from rasterio.crs import CRS
26
+ from rasterio.enums import Resampling
27
+ from rasterio.fill import fillnodata
28
+ from rasterio.transform import from_bounds
29
+ from shapely import MultiPolygon, Polygon
30
+ from shapely.geometry import mapping, shape
31
+ from shapely.geometry.base import BaseGeometry
32
+ from tilematrix import Shape
33
+
34
+ from mapchete_eo.exceptions import AssetEmpty, AssetMissing, CorruptedProductMetadata
35
+ from mapchete_eo.io import open_xml, read_mask_as_raster
36
+ from mapchete_eo.platforms.sentinel2.path_mappers import default_path_mapper_guesser
37
+ from mapchete_eo.platforms.sentinel2.path_mappers.base import S2PathMapper
38
+ from mapchete_eo.platforms.sentinel2.path_mappers.metadata_xml import XMLMapper
39
+ from mapchete_eo.platforms.sentinel2.processing_baseline import ProcessingBaseline
40
+ from mapchete_eo.platforms.sentinel2.types import (
41
+ BandQI,
42
+ ClassificationBandIndex,
43
+ CloudType,
44
+ L2ABand,
45
+ ProductQI,
46
+ ProductQIMaskResolution,
47
+ Resolution,
48
+ SunAngle,
49
+ ViewAngle,
50
+ )
51
+
52
+ logger = logging.getLogger(__name__)
53
+
54
+
55
+ def open_granule_metadata_xml(metadata_xml: MPath) -> Element:
56
+ try:
57
+ return open_xml(metadata_xml)
58
+ except ParseError as exc:
59
+ raise CorruptedProductMetadata(exc)
60
+
61
+
62
+ def s2metadata_from_stac_item(
63
+ item: pystac.Item,
64
+ metadata_assets: List[str] = ["metadata", "granule_metadata"],
65
+ boa_offset_fields: List[str] = [
66
+ "sentinel:boa_offset_applied",
67
+ "sentinel2:boa_offset_applied",
68
+ "earthsearch:boa_offset_applied",
69
+ ],
70
+ processing_baseline_fields: List[str] = [
71
+ "s2:processing_baseline",
72
+ "sentinel:processing_baseline",
73
+ "sentinel2:processing_baseline",
74
+ "processing:version",
75
+ ],
76
+ **kwargs,
77
+ ) -> S2Metadata:
78
+ """Custom code to initialize S2Metadata from a STAC item.
79
+
80
+ Depending on from which catalog the STAC item comes, this function should correctly
81
+ set all custom flags such as BOA offsets or pass on the correct path to the metadata XML
82
+ using the proper asset name.
83
+ """
84
+ metadata_assets = metadata_assets
85
+ for metadata_asset in metadata_assets:
86
+ if metadata_asset in item.assets:
87
+ metadata_path = MPath(item.assets[metadata_asset].href)
88
+ break
89
+ else: # pragma: no cover
90
+ raise KeyError(
91
+ f"could not find path to metadata XML file in assets: {', '.join(item.assets.keys())}"
92
+ )
93
+
94
+ def _determine_offset():
95
+ for field in boa_offset_fields:
96
+ if item.properties.get(field):
97
+ return True
98
+
99
+ return False
100
+
101
+ boa_offset_applied = _determine_offset()
102
+
103
+ if metadata_path.is_remote() or metadata_path.is_absolute():
104
+ metadata_xml = metadata_path
105
+ else:
106
+ metadata_xml = MPath(item.self_href).parent / metadata_path
107
+ for processing_baseline_field in processing_baseline_fields:
108
+ try:
109
+ processing_baseline = item.properties[processing_baseline_field]
110
+ break
111
+ except KeyError:
112
+ pass
113
+ else: # pragma: no cover
114
+ raise KeyError(
115
+ f"could not find processing baseline version in item properties: {item.properties}"
116
+ )
117
+ return S2Metadata.from_metadata_xml(
118
+ metadata_xml=metadata_xml,
119
+ processing_baseline=processing_baseline,
120
+ boa_offset_applied=boa_offset_applied,
121
+ **kwargs,
122
+ )
123
+
124
+
125
+ class S2Metadata:
126
+ metadata_xml: MPath
127
+ path_mapper: S2PathMapper
128
+ processing_baseline: ProcessingBaseline
129
+ boa_offset_applied: bool = False
130
+ _cached_xml_root: Optional[Element] = None
131
+ path_mapper_guesser: Callable = default_path_mapper_guesser
132
+ from_stac_item_constructor: Callable = s2metadata_from_stac_item
133
+ crs: CRS
134
+ bounds: Bounds
135
+ footprint: Union[Polygon, MultiPolygon]
136
+ _cache: dict
137
+
138
+ def __init__(
139
+ self,
140
+ metadata_xml: MPath,
141
+ path_mapper: S2PathMapper,
142
+ xml_root: Optional[Element] = None,
143
+ boa_offset_applied: bool = False,
144
+ **kwargs,
145
+ ):
146
+ self.metadata_xml = metadata_xml
147
+ self._cached_xml_root = xml_root
148
+ self._cache = dict(viewing_incidence_angles=dict(), detector_footprints=dict())
149
+ self.path_mapper = path_mapper
150
+ self.processing_baseline = path_mapper.processing_baseline
151
+ self.boa_offset_applied = boa_offset_applied
152
+ self._metadata_dir = metadata_xml.parent
153
+
154
+ # get geoinformation per resolution and bounds
155
+ self.crs = self._crs
156
+ self._grids = _get_grids(self.xml_root, self.crs)
157
+ self.bounds = self._grids[Resolution["10m"]].bounds
158
+ self.footprint = shape(self.bounds)
159
+
160
+ def __repr__(self):
161
+ return f"<S2Metadata id={self.product_id}, processing_baseline={self.processing_baseline}>"
162
+
163
+ def clear_cached_data(self):
164
+ logger.debug("clear S2Metadata internal caches")
165
+ self._cache = dict(viewing_incidence_angles=dict(), detector_footprints=dict())
166
+ if self._cached_xml_root is not None:
167
+ logger.debug("clear S2Metadata xml cache")
168
+ self._cached_xml_root.clear()
169
+ self._cached_xml_root = None
170
+ self.path_mapper.clear_cached_data()
171
+
172
+ @property
173
+ def __geo_interface__(self) -> dict:
174
+ return mapping(self.footprint)
175
+
176
+ @property
177
+ def footprint_latlon(self) -> BaseGeometry:
178
+ return shape(
179
+ transform_geom(
180
+ src_crs=self.crs,
181
+ dst_crs="EPSG:4326",
182
+ geom=self.__geo_interface__,
183
+ antimeridian_cutting=True,
184
+ )
185
+ )
186
+
187
+ @classmethod
188
+ def from_metadata_xml(
189
+ cls,
190
+ metadata_xml: Union[str, MPath],
191
+ processing_baseline: Optional[str] = None,
192
+ path_mapper: Optional[S2PathMapper] = None,
193
+ **kwargs,
194
+ ) -> S2Metadata:
195
+ metadata_xml = MPath.from_inp(metadata_xml, **kwargs)
196
+ xml_root = open_granule_metadata_xml(metadata_xml)
197
+ if path_mapper is None:
198
+ # guess correct path mapper
199
+ path_mapper = cls.path_mapper_guesser(
200
+ metadata_xml,
201
+ xml_root=xml_root,
202
+ **kwargs,
203
+ )
204
+
205
+ # use processing baseline version from argument if available
206
+ if processing_baseline:
207
+ path_mapper.processing_baseline = ProcessingBaseline.from_version(
208
+ processing_baseline
209
+ )
210
+ # use the information about processing baseline gained when initializing the default mapper to
211
+ # let the path mapper generate the right paths
212
+ else:
213
+ _default_path_mapper = XMLMapper(
214
+ xml_root=xml_root, metadata_xml=metadata_xml, **kwargs
215
+ )
216
+ path_mapper.processing_baseline = _default_path_mapper.processing_baseline
217
+
218
+ return S2Metadata(
219
+ metadata_xml, path_mapper=path_mapper, xml_root=xml_root, **kwargs
220
+ )
221
+
222
+ @classmethod
223
+ def from_stac_item(cls, item: pystac.Item, **kwargs) -> S2Metadata:
224
+ return cls.from_stac_item_constructor(item, **kwargs)
225
+
226
+ @property
227
+ def xml_root(self):
228
+ if self._cached_xml_root is None: # pragma: no cover
229
+ self._cached_xml_root = open_granule_metadata_xml(self.metadata_xml)
230
+ return self._cached_xml_root
231
+
232
+ @cached_property
233
+ def product_id(self) -> str:
234
+ return next(self.xml_root.iter("TILE_ID")).text
235
+
236
+ @cached_property
237
+ def datastrip_id(self) -> str:
238
+ return next(self.xml_root.iter("DATASTRIP_ID")).text
239
+
240
+ @cached_property
241
+ def _crs(self) -> CRS:
242
+ crs_str = next(self.xml_root.iter("HORIZONTAL_CS_CODE")).text
243
+ return CRS.from_string(crs_str)
244
+
245
+ @property
246
+ def sun_angles(self) -> SunAnglesData:
247
+ """
248
+ Return sun angle grids.
249
+ """
250
+ sun_angles: dict = {angle.value.lower(): dict() for angle in SunAngle}
251
+ for angle in SunAngle:
252
+ raster = _get_grid_data(
253
+ group=next(self.xml_root.iter("Sun_Angles_Grid")),
254
+ tag=angle,
255
+ bounds=self.bounds,
256
+ crs=self.crs,
257
+ )
258
+ mean = float(
259
+ next(self.xml_root.iter("Mean_Sun_Angle"))
260
+ .findall(f"{angle.value.upper()}_ANGLE")[0]
261
+ .text
262
+ )
263
+ sun_angles[angle.value.lower()] = SunAngleData(raster=raster, mean=mean)
264
+ return SunAnglesData(**sun_angles)
265
+
266
+ @property
267
+ def assets(self) -> Dict[str, MPath]:
268
+ """
269
+ Mapping of all available metadata assets such as QI bands
270
+ """
271
+ out = dict()
272
+ for product_qi_mask in ProductQI:
273
+ if product_qi_mask == ProductQI.classification:
274
+ out[product_qi_mask.name] = self.path_mapper.product_qi_mask(
275
+ product_qi_mask
276
+ )
277
+ else:
278
+ for resolution in ProductQIMaskResolution:
279
+ out[f"{product_qi_mask.name}-{resolution.name}"] = (
280
+ self.path_mapper.product_qi_mask(
281
+ product_qi_mask, resolution=resolution
282
+ )
283
+ )
284
+
285
+ for band_qi_mask in BandQI:
286
+ for band in L2ABand:
287
+ out[f"{band_qi_mask.name}-{band.name}"] = self.path_mapper.band_qi_mask(
288
+ qi_mask=band_qi_mask, band=band
289
+ )
290
+
291
+ return out
292
+
293
+ def grid(self, resolution: Resolution) -> Grid:
294
+ """
295
+ Return grid for resolution.
296
+ """
297
+ return self._grids[resolution]
298
+
299
+ def shape(self, resolution: Resolution) -> Shape:
300
+ """
301
+ Return grid shape for resolution.
302
+ """
303
+ return self._grids[resolution].shape
304
+
305
+ def transform(self, resolution: Resolution) -> Affine:
306
+ """
307
+ Return Affine object for resolution.
308
+ """
309
+ return self._grids[resolution].transform
310
+
311
+ #####################
312
+ # product QI layers #
313
+ #####################
314
+ def l1c_cloud_mask(
315
+ self,
316
+ cloud_type: CloudType = CloudType.all,
317
+ dst_grid: Union[GridProtocol, Resolution, None] = None,
318
+ cached_read: bool = False,
319
+ ) -> ReferencedRaster:
320
+ """
321
+ Return L1C classification cloud mask.
322
+ """
323
+ dst_grid = dst_grid or Resolution["20m"]
324
+ if isinstance(dst_grid, Resolution):
325
+ dst_grid = self.grid(dst_grid)
326
+ if cloud_type == CloudType.all:
327
+ indexes = [
328
+ ClassificationBandIndex[CloudType.cirrus.name].value,
329
+ ClassificationBandIndex[CloudType.opaque.name].value,
330
+ ]
331
+ cloud_types = [CloudType.cirrus.name, CloudType.opaque.name]
332
+ else:
333
+ indexes = [ClassificationBandIndex[cloud_type.name].value]
334
+ cloud_types = [cloud_type.name]
335
+ return read_mask_as_raster(
336
+ self.path_mapper.classification_mask(),
337
+ indexes=indexes,
338
+ dst_grid=dst_grid,
339
+ rasterize_feature_filter=lambda feature: feature["properties"][
340
+ "maskType"
341
+ ].lower()
342
+ in cloud_types,
343
+ rasterize_value_func=lambda feature: True,
344
+ dtype=bool,
345
+ masked=False,
346
+ cached_read=cached_read,
347
+ )
348
+
349
+ def snow_ice_mask(
350
+ self,
351
+ dst_grid: Union[GridProtocol, Resolution, None] = None,
352
+ cached_read: bool = False,
353
+ ) -> ReferencedRaster:
354
+ dst_grid = dst_grid or Resolution["20m"]
355
+ if isinstance(dst_grid, Resolution):
356
+ dst_grid = self.grid(dst_grid)
357
+ return read_mask_as_raster(
358
+ self.path_mapper.classification_mask(),
359
+ indexes=[ClassificationBandIndex.snow_ice.value],
360
+ dst_grid=dst_grid,
361
+ rasterize_feature_filter=lambda feature: False,
362
+ rasterize_value_func=lambda feature: True,
363
+ dtype=bool,
364
+ masked=False,
365
+ cached_read=cached_read,
366
+ )
367
+
368
+ def cloud_probability(
369
+ self,
370
+ dst_grid: Union[GridProtocol, Resolution, None] = None,
371
+ resampling: Resampling = Resampling.bilinear,
372
+ from_resolution: ProductQIMaskResolution = ProductQIMaskResolution["60m"],
373
+ cached_read: bool = False,
374
+ ) -> ReferencedRaster:
375
+ """Return classification cloud mask."""
376
+ dst_grid = dst_grid or Resolution["20m"]
377
+ if isinstance(dst_grid, Resolution):
378
+ dst_grid = self.grid(dst_grid)
379
+ # TODO: determine whether to read the 20m or the 60m file
380
+ return read_mask_as_raster(
381
+ self.path_mapper.cloud_probability_mask(resolution=from_resolution),
382
+ dst_grid=dst_grid,
383
+ resampling=resampling,
384
+ rasterize_value_func=lambda feature: True,
385
+ masked=False,
386
+ cached_read=cached_read,
387
+ )
388
+
389
+ def snow_probability(
390
+ self,
391
+ dst_grid: Union[GridProtocol, Resolution, None] = None,
392
+ resampling: Resampling = Resampling.bilinear,
393
+ from_resolution: ProductQIMaskResolution = ProductQIMaskResolution["60m"],
394
+ cached_read: bool = False,
395
+ ) -> ReferencedRaster:
396
+ """Return classification cloud mask."""
397
+ dst_grid = dst_grid or Resolution["20m"]
398
+ if isinstance(dst_grid, Resolution):
399
+ dst_grid = self.grid(dst_grid)
400
+ # TODO: determine whether to read the 20m or the 60m file
401
+ return read_mask_as_raster(
402
+ self.path_mapper.snow_probability_mask(resolution=from_resolution),
403
+ dst_grid=dst_grid,
404
+ resampling=resampling,
405
+ rasterize_value_func=lambda feature: True,
406
+ masked=False,
407
+ cached_read=cached_read,
408
+ )
409
+
410
+ ##############
411
+ # band masks #
412
+ ##############
413
+ def detector_footprints(
414
+ self,
415
+ band: L2ABand,
416
+ dst_grid: Union[GridProtocol, Resolution] = Resolution["60m"],
417
+ cached_read: bool = False,
418
+ ) -> ReferencedRaster:
419
+ """
420
+ Return detector footprints.
421
+ """
422
+
423
+ def _get_detector_id(feature) -> int:
424
+ return int(feature["properties"]["gml_id"].split("-")[-2])
425
+
426
+ if isinstance(dst_grid, Resolution):
427
+ dst_grid = self.grid(dst_grid)
428
+
429
+ cache_item_id = f"{band}-{str(dst_grid)}"
430
+ if cache_item_id not in self._cache["detector_footprints"]:
431
+ try:
432
+ path = self.path_mapper.band_qi_mask(
433
+ qi_mask=BandQI.detector_footprints, band=band
434
+ )
435
+ logger.debug("reading footprints from %s ...", path)
436
+ footprints = read_mask_as_raster(
437
+ path,
438
+ dst_grid=dst_grid,
439
+ rasterize_value_func=_get_detector_id,
440
+ cached_read=cached_read,
441
+ dtype=np.uint8,
442
+ )
443
+ except FileNotFoundError as exc:
444
+ raise AssetMissing(exc)
445
+
446
+ if not footprints.data.any():
447
+ raise AssetEmpty(
448
+ f"No detector footprints found for band {band} in {self}"
449
+ )
450
+ self._cache["detector_footprints"][cache_item_id] = footprints
451
+ return self._cache["detector_footprints"][cache_item_id]
452
+
453
+ def technical_quality_mask(
454
+ self,
455
+ band: L2ABand,
456
+ dst_grid: Union[GridProtocol, Resolution] = Resolution["60m"],
457
+ ) -> ReferencedRaster:
458
+ """
459
+ Return technical quality mask.
460
+ """
461
+ if isinstance(dst_grid, Resolution):
462
+ dst_grid = self.grid(dst_grid)
463
+ try:
464
+ return read_mask_as_raster(
465
+ self.path_mapper.band_qi_mask(
466
+ qi_mask=BandQI.technical_quality, band=band
467
+ ),
468
+ dst_grid=dst_grid,
469
+ )
470
+ except FileNotFoundError as exc:
471
+ raise AssetMissing(exc)
472
+
473
+ def viewing_incidence_angles(self, band: L2ABand) -> ViewingIncidenceAngles:
474
+ """
475
+ Return viewing incidence angles.
476
+
477
+ Paramerters
478
+ -----------
479
+ band_idx : int
480
+ L2ABand index.
481
+
482
+ """
483
+ if self._cache["viewing_incidence_angles"].get(band) is None:
484
+ angles: Dict[str, Any] = {
485
+ "zenith": {"raster": None, "detectors": dict(), "mean": None},
486
+ "azimuth": {"raster": None, "detectors": dict(), "mean": None},
487
+ }
488
+ for grids in self.xml_root.iter("Viewing_Incidence_Angles_Grids"):
489
+ band_idx = int(grids.get("bandId"))
490
+ if band_idx == band.value:
491
+ detector_id = int(grids.get("detectorId"))
492
+ for angle in ViewAngle:
493
+ raster = _get_grid_data(
494
+ group=grids,
495
+ tag=angle.value,
496
+ bounds=self.bounds,
497
+ crs=self.crs,
498
+ )
499
+ angles[angle.value.lower()]["detectors"][detector_id] = raster
500
+ for band_angles in self.xml_root.iter("Mean_Viewing_Incidence_Angle_List"):
501
+ for band_angle in band_angles:
502
+ band_idx = int(band_angle.get("bandId"))
503
+ if band_idx == band.value:
504
+ for angle in ViewAngle:
505
+ angles[angle.value.lower()].update(
506
+ mean=float(
507
+ band_angle.findall(f"{angle.value.upper()}_ANGLE")[
508
+ 0
509
+ ].text
510
+ )
511
+ )
512
+ self._cache["viewing_incidence_angles"][band] = ViewingIncidenceAngles(
513
+ **angles
514
+ )
515
+ return self._cache["viewing_incidence_angles"][band]
516
+
517
+ def viewing_incidence_angle(
518
+ self, band: L2ABand, detector_id: int, angle: ViewAngle = ViewAngle.zenith
519
+ ) -> ReferencedRaster:
520
+ return (
521
+ self.viewing_incidence_angles(band).get_angle(angle).detectors[detector_id]
522
+ )
523
+
524
+ def mean_viewing_incidence_angles(
525
+ self,
526
+ bands: Union[List[L2ABand], L2ABand, None] = None,
527
+ angle: ViewAngle = ViewAngle.zenith,
528
+ resolution: Resolution = Resolution["120m"],
529
+ resampling: Resampling = Resampling.nearest,
530
+ smoothing_iterations: int = 10,
531
+ cached_read: bool = False,
532
+ ) -> ma.MaskedArray:
533
+ bands = list(L2ABand) if bands is None else bands
534
+ bands = [bands] if isinstance(bands, L2ABand) else bands
535
+
536
+ def _band_angles(band: L2ABand) -> ma.MaskedArray:
537
+ detector_angles = (
538
+ self.viewing_incidence_angles(band).get_angle(angle).detectors
539
+ )
540
+ band_angles = ma.masked_equal(
541
+ np.zeros(self.shape(resolution), dtype=np.float32), 0
542
+ )
543
+ detector_footprints = self.detector_footprints(
544
+ band, dst_grid=resolution, cached_read=cached_read
545
+ )
546
+ detector_ids = [x for x in np.unique(detector_footprints.data) if x != 0]
547
+
548
+ for detector_id in detector_ids:
549
+ # handle rare cases where detector geometries are available but no respective
550
+ # angle arrays:
551
+ if detector_id not in detector_angles: # pragma: no cover
552
+ logger.debug(
553
+ f"no {angle} angles grid found for detector {detector_id}"
554
+ )
555
+ continue
556
+ detector_angles_raster = detector_angles[detector_id]
557
+ # interpolate missing nodata edges and return BRDF difference model
558
+ detector_angles_raster.data = ma.masked_invalid(
559
+ fillnodata(
560
+ detector_angles_raster.data,
561
+ smoothing_iterations=smoothing_iterations,
562
+ )
563
+ )
564
+ # resample detector angles to output resolution
565
+ detector_angle = resample_from_array(
566
+ detector_angles_raster,
567
+ nodata=0,
568
+ out_grid=self.grid(resolution),
569
+ resampling=resampling,
570
+ keep_2d=True,
571
+ )
572
+ # select pixels which are covered by detector
573
+ detector_mask = np.where(
574
+ detector_footprints.data == detector_id, True, False
575
+ )
576
+ if len(detector_footprints.data.shape) == 3:
577
+ detector_mask = detector_mask[0]
578
+ # merge detector stripes
579
+ band_angles[detector_mask] = detector_angle[detector_mask]
580
+ band_angles.mask[detector_mask] = detector_angle.mask[detector_mask]
581
+
582
+ return band_angles
583
+
584
+ with Timer() as tt:
585
+ mean = ma.mean(ma.stack([_band_angles(band) for band in bands]), axis=0)
586
+ logger.debug(
587
+ "mean viewing incidence angles for %s bands generated in %s", len(bands), tt
588
+ )
589
+ return mean
590
+
591
+
592
+ class SunAngleData(BaseModel):
593
+ model_config = dict(arbitrary_types_allowed=True)
594
+ raster: ReferencedRaster
595
+ mean: float
596
+
597
+
598
+ class SunAnglesData(BaseModel):
599
+ azimuth: SunAngleData
600
+ zenith: SunAngleData
601
+
602
+ def get_angle(self, angle: SunAngle) -> SunAngleData:
603
+ if angle == SunAngle.azimuth:
604
+ return self.azimuth
605
+ elif angle == SunAngle.zenith:
606
+ return self.zenith
607
+ else:
608
+ raise KeyError(f"unknown angle: {angle}")
609
+
610
+
611
+ class ViewingIncidenceAngle(BaseModel):
612
+ model_config = dict(arbitrary_types_allowed=True)
613
+ detectors: Dict[int, ReferencedRaster]
614
+ mean: float
615
+
616
+ def merge_detectors(
617
+ self, fill_edges: bool = True, smoothing_iterations: int = 3
618
+ ) -> ReferencedRaster:
619
+ if not self.detectors:
620
+ raise CorruptedProductMetadata("no viewing incidence angles available")
621
+ sample = next(iter(self.detectors.values()))
622
+ with warnings.catch_warnings():
623
+ warnings.simplefilter("ignore", category=RuntimeWarning)
624
+ merged = np.nanmean(
625
+ np.stack([raster.data for raster in self.detectors.values()]), axis=0
626
+ )
627
+ if fill_edges:
628
+ merged = fillnodata(
629
+ ma.masked_invalid(merged), smoothing_iterations=smoothing_iterations
630
+ )
631
+ return ReferencedRaster.from_array_like(
632
+ array_like=ma.masked_invalid(merged),
633
+ transform=sample.transform,
634
+ crs=sample.crs,
635
+ )
636
+
637
+
638
+ class ViewingIncidenceAngles(BaseModel):
639
+ azimuth: ViewingIncidenceAngle
640
+ zenith: ViewingIncidenceAngle
641
+
642
+ def get_angle(self, angle: ViewAngle) -> ViewingIncidenceAngle:
643
+ if angle == ViewAngle.azimuth:
644
+ return self.azimuth
645
+ elif angle == ViewAngle.zenith:
646
+ return self.zenith
647
+ else:
648
+ raise KeyError(f"unknown angle: {angle}")
649
+
650
+
651
+ def _get_grids(root: Element, crs: CRS) -> Dict[Resolution, Grid]:
652
+ geoinfo = {
653
+ Resolution["10m"]: dict(crs=crs),
654
+ Resolution["20m"]: dict(crs=crs),
655
+ Resolution["60m"]: dict(crs=crs),
656
+ }
657
+ for size in root.iter("Size"):
658
+ resolution = Resolution[f"{size.get('resolution')}m"]
659
+ for item in size:
660
+ if item.text is None:
661
+ raise TypeError(f"cannot derive height or width from: {item.text}")
662
+ if item.tag == "NROWS":
663
+ height = int(item.text)
664
+ elif item.tag == "NCOLS":
665
+ width = int(item.text)
666
+ geoinfo[resolution].update(height=height, width=width)
667
+
668
+ for geoposition in root.iter("Geoposition"):
669
+ resolution = Resolution[f"{geoposition.get('resolution')}m"]
670
+ for item in geoposition:
671
+ if item.text is None:
672
+ raise TypeError(f"cannot derive float values from: {item.text}")
673
+ if item.tag == "ULX":
674
+ left = float(item.text)
675
+ elif item.tag == "ULY":
676
+ top = float(item.text)
677
+ elif item.tag == "XDIM":
678
+ x_size = float(item.text)
679
+ elif item.tag == "YDIM":
680
+ y_size = float(item.text)
681
+ right = left + width * x_size
682
+ bottom = top + height * y_size
683
+ geoinfo[resolution].update(
684
+ transform=from_bounds(left, bottom, right, top, width, height),
685
+ )
686
+ out_grids = {k: Grid(**v) for k, v in geoinfo.items()}
687
+ for additional_resolution in [120]:
688
+ resolution = Resolution[f"{additional_resolution}m"]
689
+ grid_10m = out_grids[Resolution["10m"]]
690
+ relation = additional_resolution // 10
691
+ width = grid_10m.width // relation
692
+ height = grid_10m.height // relation
693
+ out_grids[resolution] = Grid(
694
+ from_bounds(left, bottom, right, top, width, height), height, width, crs
695
+ )
696
+ return out_grids
697
+
698
+
699
+ def _get_grid_data(group, tag, bounds, crs) -> ReferencedRaster:
700
+ def _get_grid(values_list):
701
+ return ma.masked_invalid(
702
+ np.array(
703
+ [
704
+ [
705
+ np.nan if cell == "NaN" else float(cell)
706
+ for cell in row.text.split()
707
+ ]
708
+ for row in values_list
709
+ ],
710
+ dtype=np.float32,
711
+ )
712
+ )
713
+
714
+ def _get_affine(bounds=None, row_step=None, col_step=None, shape=None):
715
+ left, _, _, top = bounds
716
+ height, width = shape
717
+
718
+ angles_left = left - col_step / 2
719
+ angles_right = angles_left + col_step * width
720
+ angles_top = top + row_step / 2
721
+ angles_bottom = angles_top - row_step * height
722
+
723
+ return from_bounds(
724
+ angles_left, angles_bottom, angles_right, angles_top, width, height
725
+ )
726
+
727
+ items = group.findall(tag)[0]
728
+ col_step = int(items.findall("COL_STEP")[0].text)
729
+ row_step = int(items.findall("ROW_STEP")[0].text)
730
+ grid = _get_grid(items.findall("Values_List")[0])
731
+ affine = _get_affine(
732
+ bounds=bounds, row_step=row_step, col_step=col_step, shape=grid.shape
733
+ )
734
+ return ReferencedRaster(data=grid, transform=affine, bounds=bounds, crs=crs)