mapchete-eo 2026.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mapchete_eo/__init__.py +1 -0
- mapchete_eo/array/__init__.py +0 -0
- mapchete_eo/array/buffer.py +16 -0
- mapchete_eo/array/color.py +29 -0
- mapchete_eo/array/convert.py +163 -0
- mapchete_eo/base.py +653 -0
- mapchete_eo/blacklist.txt +175 -0
- mapchete_eo/cli/__init__.py +30 -0
- mapchete_eo/cli/bounds.py +22 -0
- mapchete_eo/cli/options_arguments.py +227 -0
- mapchete_eo/cli/s2_brdf.py +77 -0
- mapchete_eo/cli/s2_cat_results.py +130 -0
- mapchete_eo/cli/s2_find_broken_products.py +77 -0
- mapchete_eo/cli/s2_jp2_static_catalog.py +166 -0
- mapchete_eo/cli/s2_mask.py +71 -0
- mapchete_eo/cli/s2_mgrs.py +45 -0
- mapchete_eo/cli/s2_rgb.py +114 -0
- mapchete_eo/cli/s2_verify.py +129 -0
- mapchete_eo/cli/static_catalog.py +82 -0
- mapchete_eo/eostac.py +30 -0
- mapchete_eo/exceptions.py +87 -0
- mapchete_eo/image_operations/__init__.py +12 -0
- mapchete_eo/image_operations/blend_functions.py +579 -0
- mapchete_eo/image_operations/color_correction.py +136 -0
- mapchete_eo/image_operations/compositing.py +266 -0
- mapchete_eo/image_operations/dtype_scale.py +43 -0
- mapchete_eo/image_operations/fillnodata.py +130 -0
- mapchete_eo/image_operations/filters.py +319 -0
- mapchete_eo/image_operations/linear_normalization.py +81 -0
- mapchete_eo/image_operations/sigmoidal.py +114 -0
- mapchete_eo/io/__init__.py +37 -0
- mapchete_eo/io/assets.py +496 -0
- mapchete_eo/io/items.py +162 -0
- mapchete_eo/io/levelled_cubes.py +259 -0
- mapchete_eo/io/path.py +155 -0
- mapchete_eo/io/products.py +423 -0
- mapchete_eo/io/profiles.py +45 -0
- mapchete_eo/platforms/sentinel2/__init__.py +17 -0
- mapchete_eo/platforms/sentinel2/_mapper_registry.py +89 -0
- mapchete_eo/platforms/sentinel2/bandpass_adjustment.py +104 -0
- mapchete_eo/platforms/sentinel2/brdf/__init__.py +8 -0
- mapchete_eo/platforms/sentinel2/brdf/config.py +32 -0
- mapchete_eo/platforms/sentinel2/brdf/correction.py +260 -0
- mapchete_eo/platforms/sentinel2/brdf/hls.py +251 -0
- mapchete_eo/platforms/sentinel2/brdf/models.py +44 -0
- mapchete_eo/platforms/sentinel2/brdf/protocols.py +27 -0
- mapchete_eo/platforms/sentinel2/brdf/ross_thick.py +136 -0
- mapchete_eo/platforms/sentinel2/brdf/sun_angle_arrays.py +76 -0
- mapchete_eo/platforms/sentinel2/config.py +241 -0
- mapchete_eo/platforms/sentinel2/driver.py +43 -0
- mapchete_eo/platforms/sentinel2/masks.py +329 -0
- mapchete_eo/platforms/sentinel2/metadata_parser/__init__.py +6 -0
- mapchete_eo/platforms/sentinel2/metadata_parser/base.py +56 -0
- mapchete_eo/platforms/sentinel2/metadata_parser/default_path_mapper.py +135 -0
- mapchete_eo/platforms/sentinel2/metadata_parser/models.py +78 -0
- mapchete_eo/platforms/sentinel2/metadata_parser/s2metadata.py +639 -0
- mapchete_eo/platforms/sentinel2/preconfigured_sources/__init__.py +57 -0
- mapchete_eo/platforms/sentinel2/preconfigured_sources/guessers.py +108 -0
- mapchete_eo/platforms/sentinel2/preconfigured_sources/item_mappers.py +171 -0
- mapchete_eo/platforms/sentinel2/preconfigured_sources/metadata_xml_mappers.py +217 -0
- mapchete_eo/platforms/sentinel2/preprocessing_tasks.py +50 -0
- mapchete_eo/platforms/sentinel2/processing_baseline.py +163 -0
- mapchete_eo/platforms/sentinel2/product.py +747 -0
- mapchete_eo/platforms/sentinel2/source.py +114 -0
- mapchete_eo/platforms/sentinel2/types.py +114 -0
- mapchete_eo/processes/__init__.py +0 -0
- mapchete_eo/processes/config.py +51 -0
- mapchete_eo/processes/dtype_scale.py +112 -0
- mapchete_eo/processes/eo_to_xarray.py +19 -0
- mapchete_eo/processes/merge_rasters.py +239 -0
- mapchete_eo/product.py +323 -0
- mapchete_eo/protocols.py +61 -0
- mapchete_eo/search/__init__.py +14 -0
- mapchete_eo/search/base.py +285 -0
- mapchete_eo/search/config.py +113 -0
- mapchete_eo/search/s2_mgrs.py +313 -0
- mapchete_eo/search/stac_search.py +278 -0
- mapchete_eo/search/stac_static.py +197 -0
- mapchete_eo/search/utm_search.py +251 -0
- mapchete_eo/settings.py +25 -0
- mapchete_eo/sort.py +60 -0
- mapchete_eo/source.py +109 -0
- mapchete_eo/time.py +62 -0
- mapchete_eo/types.py +76 -0
- mapchete_eo-2026.2.0.dist-info/METADATA +91 -0
- mapchete_eo-2026.2.0.dist-info/RECORD +89 -0
- mapchete_eo-2026.2.0.dist-info/WHEEL +4 -0
- mapchete_eo-2026.2.0.dist-info/entry_points.txt +11 -0
- mapchete_eo-2026.2.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,639 @@
|
|
|
1
|
+
"""
|
|
2
|
+
A metadata parser helps to read additional Sentinel-2 metadata such as
|
|
3
|
+
sun angles, quality masks, etc.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
from __future__ import annotations
|
|
7
|
+
|
|
8
|
+
import logging
|
|
9
|
+
from functools import cached_property
|
|
10
|
+
from typing import Any, Dict, List, Optional, Tuple, Union
|
|
11
|
+
from xml.etree.ElementTree import Element, ParseError
|
|
12
|
+
|
|
13
|
+
import numpy as np
|
|
14
|
+
import numpy.ma as ma
|
|
15
|
+
import pystac
|
|
16
|
+
from affine import Affine
|
|
17
|
+
from fiona.transform import transform_geom
|
|
18
|
+
from mapchete import Timer
|
|
19
|
+
from mapchete.io.raster import ReferencedRaster, resample_from_array
|
|
20
|
+
from mapchete.path import MPath
|
|
21
|
+
from mapchete.protocols import GridProtocol
|
|
22
|
+
from mapchete.types import Bounds, Grid
|
|
23
|
+
from rasterio.crs import CRS
|
|
24
|
+
from rasterio.enums import Resampling
|
|
25
|
+
from rasterio.fill import fillnodata
|
|
26
|
+
from rasterio.transform import from_bounds
|
|
27
|
+
from shapely import MultiPolygon, Polygon
|
|
28
|
+
from shapely.geometry import mapping, shape
|
|
29
|
+
from shapely.geometry.base import BaseGeometry
|
|
30
|
+
from tilematrix import Shape
|
|
31
|
+
|
|
32
|
+
from mapchete_eo.exceptions import AssetEmpty, AssetMissing, CorruptedProductMetadata
|
|
33
|
+
from mapchete_eo.io import open_xml, read_mask_as_raster
|
|
34
|
+
from mapchete_eo.io.items import get_item_property
|
|
35
|
+
from mapchete_eo.io.path import asset_mpath
|
|
36
|
+
from mapchete_eo.platforms.sentinel2.metadata_parser.models import (
|
|
37
|
+
ViewingIncidenceAngles,
|
|
38
|
+
SunAngleData,
|
|
39
|
+
SunAnglesData,
|
|
40
|
+
)
|
|
41
|
+
from mapchete_eo.platforms.sentinel2.metadata_parser.base import S2MetadataPathMapper
|
|
42
|
+
from mapchete_eo.platforms.sentinel2.metadata_parser.default_path_mapper import (
|
|
43
|
+
XMLMapper,
|
|
44
|
+
)
|
|
45
|
+
from mapchete_eo.platforms.sentinel2.processing_baseline import ProcessingBaseline
|
|
46
|
+
from mapchete_eo.platforms.sentinel2.types import (
|
|
47
|
+
BandQI,
|
|
48
|
+
ClassificationBandIndex,
|
|
49
|
+
CloudType,
|
|
50
|
+
L2ABand,
|
|
51
|
+
ProductQI,
|
|
52
|
+
ProductQIMaskResolution,
|
|
53
|
+
Resolution,
|
|
54
|
+
SunAngle,
|
|
55
|
+
ViewAngle,
|
|
56
|
+
)
|
|
57
|
+
|
|
58
|
+
logger = logging.getLogger(__name__)
|
|
59
|
+
|
|
60
|
+
|
|
61
|
+
def open_granule_metadata_xml(metadata_xml: MPath) -> Element:
|
|
62
|
+
try:
|
|
63
|
+
return open_xml(metadata_xml)
|
|
64
|
+
except ParseError as exc:
|
|
65
|
+
raise CorruptedProductMetadata(exc)
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
class S2Metadata:
|
|
69
|
+
metadata_xml: MPath
|
|
70
|
+
path_mapper: S2MetadataPathMapper
|
|
71
|
+
processing_baseline: ProcessingBaseline
|
|
72
|
+
boa_offset_applied: bool = False
|
|
73
|
+
_cached_xml_root: Optional[Element] = None
|
|
74
|
+
crs: CRS
|
|
75
|
+
bounds: Bounds
|
|
76
|
+
footprint: Union[Polygon, MultiPolygon]
|
|
77
|
+
_cache: dict
|
|
78
|
+
|
|
79
|
+
def __init__(
|
|
80
|
+
self,
|
|
81
|
+
metadata_xml: MPath,
|
|
82
|
+
path_mapper: S2MetadataPathMapper,
|
|
83
|
+
xml_root: Optional[Element] = None,
|
|
84
|
+
boa_offset_applied: bool = False,
|
|
85
|
+
**kwargs,
|
|
86
|
+
):
|
|
87
|
+
self.metadata_xml = metadata_xml
|
|
88
|
+
self._cached_xml_root = xml_root
|
|
89
|
+
self._cache = dict(viewing_incidence_angles=dict(), detector_footprints=dict())
|
|
90
|
+
self.path_mapper = path_mapper
|
|
91
|
+
self.processing_baseline = path_mapper.processing_baseline
|
|
92
|
+
self.boa_offset_applied = boa_offset_applied
|
|
93
|
+
self._metadata_dir = metadata_xml.parent
|
|
94
|
+
|
|
95
|
+
# get geoinformation per resolution and bounds
|
|
96
|
+
self.crs = self._crs
|
|
97
|
+
self._grids = _get_grids(self.xml_root, self.crs)
|
|
98
|
+
self.bounds = self._grids[Resolution["10m"]].bounds
|
|
99
|
+
self.footprint = shape(self.bounds)
|
|
100
|
+
|
|
101
|
+
def __repr__(self):
|
|
102
|
+
return f"<S2Metadata id={self.product_id}, processing_baseline={self.processing_baseline}>"
|
|
103
|
+
|
|
104
|
+
def clear_cached_data(self):
|
|
105
|
+
self._cache = dict(viewing_incidence_angles=dict(), detector_footprints=dict())
|
|
106
|
+
if self._cached_xml_root is not None:
|
|
107
|
+
self._cached_xml_root.clear()
|
|
108
|
+
self._cached_xml_root = None
|
|
109
|
+
self.path_mapper.clear_cached_data()
|
|
110
|
+
|
|
111
|
+
@property
|
|
112
|
+
def __geo_interface__(self) -> dict:
|
|
113
|
+
return mapping(self.footprint)
|
|
114
|
+
|
|
115
|
+
@property
|
|
116
|
+
def footprint_latlon(self) -> BaseGeometry:
|
|
117
|
+
return shape(
|
|
118
|
+
transform_geom(
|
|
119
|
+
src_crs=self.crs,
|
|
120
|
+
dst_crs="EPSG:4326",
|
|
121
|
+
geom=self.__geo_interface__,
|
|
122
|
+
antimeridian_cutting=True,
|
|
123
|
+
)
|
|
124
|
+
)
|
|
125
|
+
|
|
126
|
+
@classmethod
|
|
127
|
+
def from_metadata_xml(
|
|
128
|
+
cls,
|
|
129
|
+
metadata_xml: Union[str, MPath],
|
|
130
|
+
path_mapper: Optional[S2MetadataPathMapper] = None,
|
|
131
|
+
processing_baseline: Optional[str] = None,
|
|
132
|
+
**kwargs,
|
|
133
|
+
) -> S2Metadata:
|
|
134
|
+
metadata_xml = MPath.from_inp(metadata_xml, **kwargs)
|
|
135
|
+
xml_root = open_granule_metadata_xml(metadata_xml)
|
|
136
|
+
|
|
137
|
+
if path_mapper is None:
|
|
138
|
+
path_mapper = XMLMapper(metadata_xml=metadata_xml, xml_root=xml_root)
|
|
139
|
+
|
|
140
|
+
# use processing baseline version from argument if available
|
|
141
|
+
if processing_baseline:
|
|
142
|
+
path_mapper.processing_baseline = ProcessingBaseline.from_version(
|
|
143
|
+
processing_baseline
|
|
144
|
+
)
|
|
145
|
+
# use the information about processing baseline gained when initializing the default mapper to
|
|
146
|
+
# let the path mapper generate the right paths
|
|
147
|
+
else:
|
|
148
|
+
_default_path_mapper = XMLMapper(
|
|
149
|
+
xml_root=xml_root, metadata_xml=metadata_xml, **kwargs
|
|
150
|
+
)
|
|
151
|
+
path_mapper.processing_baseline = _default_path_mapper.processing_baseline
|
|
152
|
+
|
|
153
|
+
return S2Metadata(
|
|
154
|
+
metadata_xml, path_mapper=path_mapper, xml_root=xml_root, **kwargs
|
|
155
|
+
)
|
|
156
|
+
|
|
157
|
+
@staticmethod
|
|
158
|
+
def from_stac_item(
|
|
159
|
+
item: pystac.Item,
|
|
160
|
+
metadata_xml_asset_name: Tuple[str, ...] = ("metadata", "granule_metadata"),
|
|
161
|
+
boa_offset_field: Union[str, Tuple[str, ...]] = (
|
|
162
|
+
"earthsearch:boa_offset_applied"
|
|
163
|
+
),
|
|
164
|
+
processing_baseline_field: Union[str, Tuple[str, ...]] = (
|
|
165
|
+
"s2:processing_baseline",
|
|
166
|
+
"sentinel2:processing_baseline",
|
|
167
|
+
"processing:version",
|
|
168
|
+
),
|
|
169
|
+
**kwargs,
|
|
170
|
+
) -> S2Metadata:
|
|
171
|
+
# try to find path to metadata.xml
|
|
172
|
+
metadata_xml_path = asset_mpath(item, metadata_xml_asset_name)
|
|
173
|
+
# make path absolute
|
|
174
|
+
if not (metadata_xml_path.is_remote() or metadata_xml_path.is_absolute()):
|
|
175
|
+
metadata_xml_path = MPath(item.self_href).parent / metadata_xml_path
|
|
176
|
+
|
|
177
|
+
# try to find information on processing baseline version
|
|
178
|
+
processing_baseline = get_item_property(item, processing_baseline_field)
|
|
179
|
+
|
|
180
|
+
# see if boa_offset_applied flag is available
|
|
181
|
+
boa_offset_applied = get_item_property(item, boa_offset_field, default=False)
|
|
182
|
+
|
|
183
|
+
return S2Metadata.from_metadata_xml(
|
|
184
|
+
metadata_xml=metadata_xml_path,
|
|
185
|
+
processing_baseline=processing_baseline,
|
|
186
|
+
boa_offset_applied=boa_offset_applied,
|
|
187
|
+
**kwargs,
|
|
188
|
+
)
|
|
189
|
+
|
|
190
|
+
@property
|
|
191
|
+
def xml_root(self):
|
|
192
|
+
if self._cached_xml_root is None: # pragma: no cover
|
|
193
|
+
self._cached_xml_root = open_granule_metadata_xml(self.metadata_xml)
|
|
194
|
+
return self._cached_xml_root
|
|
195
|
+
|
|
196
|
+
@cached_property
|
|
197
|
+
def product_id(self) -> str:
|
|
198
|
+
return next(self.xml_root.iter("TILE_ID")).text
|
|
199
|
+
|
|
200
|
+
@cached_property
|
|
201
|
+
def datastrip_id(self) -> str:
|
|
202
|
+
return next(self.xml_root.iter("DATASTRIP_ID")).text
|
|
203
|
+
|
|
204
|
+
@cached_property
|
|
205
|
+
def _crs(self) -> CRS:
|
|
206
|
+
crs_str = next(self.xml_root.iter("HORIZONTAL_CS_CODE")).text
|
|
207
|
+
return CRS.from_string(crs_str)
|
|
208
|
+
|
|
209
|
+
@property
|
|
210
|
+
def sun_angles(self) -> SunAnglesData:
|
|
211
|
+
"""
|
|
212
|
+
Return sun angle grids.
|
|
213
|
+
"""
|
|
214
|
+
sun_angles: dict = {angle.value.lower(): dict() for angle in SunAngle}
|
|
215
|
+
for angle in SunAngle:
|
|
216
|
+
raster = _get_grid_data(
|
|
217
|
+
group=next(self.xml_root.iter("Sun_Angles_Grid")),
|
|
218
|
+
tag=angle,
|
|
219
|
+
bounds=self.bounds,
|
|
220
|
+
crs=self.crs,
|
|
221
|
+
)
|
|
222
|
+
mean = float(
|
|
223
|
+
next(self.xml_root.iter("Mean_Sun_Angle"))
|
|
224
|
+
.findall(f"{angle.value.upper()}_ANGLE")[0]
|
|
225
|
+
.text
|
|
226
|
+
)
|
|
227
|
+
sun_angles[angle.value.lower()] = SunAngleData(raster=raster, mean=mean)
|
|
228
|
+
return SunAnglesData(**sun_angles)
|
|
229
|
+
|
|
230
|
+
@property
|
|
231
|
+
def assets(self) -> Dict[str, MPath]:
|
|
232
|
+
"""
|
|
233
|
+
Mapping of all available metadata assets such as QI bands
|
|
234
|
+
"""
|
|
235
|
+
out = dict()
|
|
236
|
+
for product_qi_mask in ProductQI:
|
|
237
|
+
if product_qi_mask == ProductQI.classification:
|
|
238
|
+
out[product_qi_mask.name] = self.path_mapper.product_qi_mask(
|
|
239
|
+
qi_mask=product_qi_mask
|
|
240
|
+
)
|
|
241
|
+
else:
|
|
242
|
+
for resolution in ProductQIMaskResolution:
|
|
243
|
+
out[f"{product_qi_mask.name}-{resolution.name}"] = (
|
|
244
|
+
self.path_mapper.product_qi_mask(
|
|
245
|
+
qi_mask=product_qi_mask, resolution=resolution
|
|
246
|
+
)
|
|
247
|
+
)
|
|
248
|
+
|
|
249
|
+
for band_qi_mask in BandQI:
|
|
250
|
+
for band in L2ABand:
|
|
251
|
+
out[f"{band_qi_mask.name}-{band.name}"] = self.path_mapper.band_qi_mask(
|
|
252
|
+
qi_mask=band_qi_mask, band=band
|
|
253
|
+
)
|
|
254
|
+
|
|
255
|
+
return out
|
|
256
|
+
|
|
257
|
+
def grid(self, resolution: Resolution) -> Grid:
|
|
258
|
+
"""
|
|
259
|
+
Return grid for resolution.
|
|
260
|
+
"""
|
|
261
|
+
return self._grids[resolution]
|
|
262
|
+
|
|
263
|
+
def shape(self, resolution: Resolution) -> Shape:
|
|
264
|
+
"""
|
|
265
|
+
Return grid shape for resolution.
|
|
266
|
+
"""
|
|
267
|
+
return self._grids[resolution].shape
|
|
268
|
+
|
|
269
|
+
def transform(self, resolution: Resolution) -> Affine:
|
|
270
|
+
"""
|
|
271
|
+
Return Affine object for resolution.
|
|
272
|
+
"""
|
|
273
|
+
return self._grids[resolution].transform
|
|
274
|
+
|
|
275
|
+
#####################
|
|
276
|
+
# product QI layers #
|
|
277
|
+
#####################
|
|
278
|
+
def l1c_cloud_mask(
|
|
279
|
+
self,
|
|
280
|
+
cloud_type: CloudType = CloudType.all,
|
|
281
|
+
dst_grid: Union[GridProtocol, Resolution, None] = None,
|
|
282
|
+
cached_read: bool = False,
|
|
283
|
+
) -> ReferencedRaster:
|
|
284
|
+
"""
|
|
285
|
+
Return L1C classification cloud mask.
|
|
286
|
+
"""
|
|
287
|
+
dst_grid = dst_grid or Resolution["20m"]
|
|
288
|
+
if isinstance(dst_grid, Resolution):
|
|
289
|
+
dst_grid = self.grid(dst_grid)
|
|
290
|
+
if cloud_type == CloudType.all:
|
|
291
|
+
indexes = [
|
|
292
|
+
ClassificationBandIndex[CloudType.cirrus.name].value,
|
|
293
|
+
ClassificationBandIndex[CloudType.opaque.name].value,
|
|
294
|
+
]
|
|
295
|
+
cloud_types = [CloudType.cirrus.name, CloudType.opaque.name]
|
|
296
|
+
else:
|
|
297
|
+
indexes = [ClassificationBandIndex[cloud_type.name].value]
|
|
298
|
+
cloud_types = [cloud_type.name]
|
|
299
|
+
return read_mask_as_raster(
|
|
300
|
+
self.path_mapper.classification_mask(),
|
|
301
|
+
indexes=indexes,
|
|
302
|
+
dst_grid=dst_grid,
|
|
303
|
+
rasterize_feature_filter=lambda feature: feature["properties"][
|
|
304
|
+
"maskType"
|
|
305
|
+
].lower()
|
|
306
|
+
in cloud_types,
|
|
307
|
+
rasterize_value_func=lambda feature: True,
|
|
308
|
+
dtype=bool,
|
|
309
|
+
masked=False,
|
|
310
|
+
cached_read=cached_read,
|
|
311
|
+
)
|
|
312
|
+
|
|
313
|
+
def snow_ice_mask(
|
|
314
|
+
self,
|
|
315
|
+
dst_grid: Union[GridProtocol, Resolution, None] = None,
|
|
316
|
+
cached_read: bool = False,
|
|
317
|
+
) -> ReferencedRaster:
|
|
318
|
+
dst_grid = dst_grid or Resolution["20m"]
|
|
319
|
+
if isinstance(dst_grid, Resolution):
|
|
320
|
+
dst_grid = self.grid(dst_grid)
|
|
321
|
+
return read_mask_as_raster(
|
|
322
|
+
self.path_mapper.classification_mask(),
|
|
323
|
+
indexes=[ClassificationBandIndex.snow_ice.value],
|
|
324
|
+
dst_grid=dst_grid,
|
|
325
|
+
rasterize_feature_filter=lambda feature: False,
|
|
326
|
+
rasterize_value_func=lambda feature: True,
|
|
327
|
+
dtype=bool,
|
|
328
|
+
masked=False,
|
|
329
|
+
cached_read=cached_read,
|
|
330
|
+
)
|
|
331
|
+
|
|
332
|
+
def cloud_probability(
|
|
333
|
+
self,
|
|
334
|
+
dst_grid: Union[GridProtocol, Resolution, None] = None,
|
|
335
|
+
resampling: Resampling = Resampling.bilinear,
|
|
336
|
+
from_resolution: ProductQIMaskResolution = ProductQIMaskResolution["60m"],
|
|
337
|
+
cached_read: bool = False,
|
|
338
|
+
) -> ReferencedRaster:
|
|
339
|
+
"""Return classification cloud mask."""
|
|
340
|
+
dst_grid = dst_grid or Resolution["20m"]
|
|
341
|
+
if isinstance(dst_grid, Resolution):
|
|
342
|
+
dst_grid = self.grid(dst_grid)
|
|
343
|
+
# TODO: determine whether to read the 20m or the 60m file
|
|
344
|
+
return read_mask_as_raster(
|
|
345
|
+
self.path_mapper.cloud_probability_mask(resolution=from_resolution),
|
|
346
|
+
dst_grid=dst_grid,
|
|
347
|
+
resampling=resampling,
|
|
348
|
+
rasterize_value_func=lambda feature: True,
|
|
349
|
+
masked=False,
|
|
350
|
+
cached_read=cached_read,
|
|
351
|
+
)
|
|
352
|
+
|
|
353
|
+
def snow_probability(
|
|
354
|
+
self,
|
|
355
|
+
dst_grid: Union[GridProtocol, Resolution, None] = None,
|
|
356
|
+
resampling: Resampling = Resampling.bilinear,
|
|
357
|
+
from_resolution: ProductQIMaskResolution = ProductQIMaskResolution["60m"],
|
|
358
|
+
cached_read: bool = False,
|
|
359
|
+
) -> ReferencedRaster:
|
|
360
|
+
"""Return classification cloud mask."""
|
|
361
|
+
dst_grid = dst_grid or Resolution["20m"]
|
|
362
|
+
if isinstance(dst_grid, Resolution):
|
|
363
|
+
dst_grid = self.grid(dst_grid)
|
|
364
|
+
# TODO: determine whether to read the 20m or the 60m file
|
|
365
|
+
return read_mask_as_raster(
|
|
366
|
+
self.path_mapper.snow_probability_mask(resolution=from_resolution),
|
|
367
|
+
dst_grid=dst_grid,
|
|
368
|
+
resampling=resampling,
|
|
369
|
+
rasterize_value_func=lambda feature: True,
|
|
370
|
+
masked=False,
|
|
371
|
+
cached_read=cached_read,
|
|
372
|
+
)
|
|
373
|
+
|
|
374
|
+
##############
|
|
375
|
+
# band masks #
|
|
376
|
+
##############
|
|
377
|
+
def detector_footprints(
|
|
378
|
+
self,
|
|
379
|
+
band: L2ABand,
|
|
380
|
+
dst_grid: Union[GridProtocol, Resolution] = Resolution["60m"],
|
|
381
|
+
cached_read: bool = False,
|
|
382
|
+
) -> ReferencedRaster:
|
|
383
|
+
"""
|
|
384
|
+
Return detector footprints.
|
|
385
|
+
"""
|
|
386
|
+
|
|
387
|
+
def _get_detector_id(feature) -> int:
|
|
388
|
+
return int(feature["properties"]["gml_id"].split("-")[-2])
|
|
389
|
+
|
|
390
|
+
if isinstance(dst_grid, Resolution):
|
|
391
|
+
dst_grid = self.grid(dst_grid)
|
|
392
|
+
|
|
393
|
+
cache_item_id = f"{band}-{str(dst_grid)}"
|
|
394
|
+
if cache_item_id not in self._cache["detector_footprints"]:
|
|
395
|
+
try:
|
|
396
|
+
path = self.path_mapper.band_qi_mask(
|
|
397
|
+
qi_mask=BandQI.detector_footprints, band=band
|
|
398
|
+
)
|
|
399
|
+
logger.debug("reading footprints from %s ...", path)
|
|
400
|
+
footprints = read_mask_as_raster(
|
|
401
|
+
path,
|
|
402
|
+
dst_grid=dst_grid,
|
|
403
|
+
rasterize_value_func=_get_detector_id,
|
|
404
|
+
cached_read=cached_read,
|
|
405
|
+
dtype=np.uint8,
|
|
406
|
+
)
|
|
407
|
+
except FileNotFoundError as exc:
|
|
408
|
+
raise AssetMissing(exc)
|
|
409
|
+
|
|
410
|
+
if not footprints.data.any():
|
|
411
|
+
raise AssetEmpty(
|
|
412
|
+
f"No detector footprints found for band {band} in {self}"
|
|
413
|
+
)
|
|
414
|
+
self._cache["detector_footprints"][cache_item_id] = footprints
|
|
415
|
+
return self._cache["detector_footprints"][cache_item_id]
|
|
416
|
+
|
|
417
|
+
def technical_quality_mask(
|
|
418
|
+
self,
|
|
419
|
+
band: L2ABand,
|
|
420
|
+
dst_grid: Union[GridProtocol, Resolution] = Resolution["60m"],
|
|
421
|
+
) -> ReferencedRaster:
|
|
422
|
+
"""
|
|
423
|
+
Return technical quality mask.
|
|
424
|
+
"""
|
|
425
|
+
if isinstance(dst_grid, Resolution):
|
|
426
|
+
dst_grid = self.grid(dst_grid)
|
|
427
|
+
try:
|
|
428
|
+
return read_mask_as_raster(
|
|
429
|
+
self.path_mapper.band_qi_mask(
|
|
430
|
+
qi_mask=BandQI.technical_quality, band=band
|
|
431
|
+
),
|
|
432
|
+
dst_grid=dst_grid,
|
|
433
|
+
)
|
|
434
|
+
except FileNotFoundError as exc:
|
|
435
|
+
raise AssetMissing(exc)
|
|
436
|
+
|
|
437
|
+
def viewing_incidence_angles(self, band: L2ABand) -> ViewingIncidenceAngles:
|
|
438
|
+
"""
|
|
439
|
+
Return viewing incidence angles.
|
|
440
|
+
|
|
441
|
+
Paramerters
|
|
442
|
+
-----------
|
|
443
|
+
band_idx : int
|
|
444
|
+
L2ABand index.
|
|
445
|
+
|
|
446
|
+
"""
|
|
447
|
+
if self._cache["viewing_incidence_angles"].get(band) is None:
|
|
448
|
+
angles: Dict[str, Any] = {
|
|
449
|
+
"zenith": {"raster": None, "detectors": dict(), "mean": None},
|
|
450
|
+
"azimuth": {"raster": None, "detectors": dict(), "mean": None},
|
|
451
|
+
}
|
|
452
|
+
for grids in self.xml_root.iter("Viewing_Incidence_Angles_Grids"):
|
|
453
|
+
band_idx = int(grids.get("bandId"))
|
|
454
|
+
if band_idx == band.value:
|
|
455
|
+
detector_id = int(grids.get("detectorId"))
|
|
456
|
+
for angle in ViewAngle:
|
|
457
|
+
raster = _get_grid_data(
|
|
458
|
+
group=grids,
|
|
459
|
+
tag=angle.value,
|
|
460
|
+
bounds=self.bounds,
|
|
461
|
+
crs=self.crs,
|
|
462
|
+
)
|
|
463
|
+
angles[angle.value.lower()]["detectors"][detector_id] = raster
|
|
464
|
+
for band_angles in self.xml_root.iter("Mean_Viewing_Incidence_Angle_List"):
|
|
465
|
+
for band_angle in band_angles:
|
|
466
|
+
band_idx = int(band_angle.get("bandId"))
|
|
467
|
+
if band_idx == band.value:
|
|
468
|
+
for angle in ViewAngle:
|
|
469
|
+
angles[angle.value.lower()].update(
|
|
470
|
+
mean=float(
|
|
471
|
+
band_angle.findall(f"{angle.value.upper()}_ANGLE")[
|
|
472
|
+
0
|
|
473
|
+
].text
|
|
474
|
+
)
|
|
475
|
+
)
|
|
476
|
+
self._cache["viewing_incidence_angles"][band] = ViewingIncidenceAngles(
|
|
477
|
+
**angles
|
|
478
|
+
)
|
|
479
|
+
return self._cache["viewing_incidence_angles"][band]
|
|
480
|
+
|
|
481
|
+
def viewing_incidence_angle(
|
|
482
|
+
self, band: L2ABand, detector_id: int, angle: ViewAngle = ViewAngle.zenith
|
|
483
|
+
) -> ReferencedRaster:
|
|
484
|
+
return (
|
|
485
|
+
self.viewing_incidence_angles(band).get_angle(angle).detectors[detector_id]
|
|
486
|
+
)
|
|
487
|
+
|
|
488
|
+
def mean_viewing_incidence_angles(
|
|
489
|
+
self,
|
|
490
|
+
bands: Union[List[L2ABand], L2ABand, None] = None,
|
|
491
|
+
angle: ViewAngle = ViewAngle.zenith,
|
|
492
|
+
resolution: Resolution = Resolution["120m"],
|
|
493
|
+
resampling: Resampling = Resampling.nearest,
|
|
494
|
+
smoothing_iterations: int = 10,
|
|
495
|
+
cached_read: bool = False,
|
|
496
|
+
) -> ma.MaskedArray:
|
|
497
|
+
bands = list(L2ABand) if bands is None else bands
|
|
498
|
+
bands = [bands] if isinstance(bands, L2ABand) else bands
|
|
499
|
+
|
|
500
|
+
def _band_angles(band: L2ABand) -> ma.MaskedArray:
|
|
501
|
+
detector_angles = (
|
|
502
|
+
self.viewing_incidence_angles(band).get_angle(angle).detectors
|
|
503
|
+
)
|
|
504
|
+
band_angles = ma.masked_equal(
|
|
505
|
+
np.zeros(self.shape(resolution), dtype=np.float32), 0
|
|
506
|
+
)
|
|
507
|
+
detector_footprints = self.detector_footprints(
|
|
508
|
+
band, dst_grid=resolution, cached_read=cached_read
|
|
509
|
+
)
|
|
510
|
+
detector_ids = [x for x in np.unique(detector_footprints.data) if x != 0]
|
|
511
|
+
|
|
512
|
+
for detector_id in detector_ids:
|
|
513
|
+
# handle rare cases where detector geometries are available but no respective
|
|
514
|
+
# angle arrays:
|
|
515
|
+
if detector_id not in detector_angles: # pragma: no cover
|
|
516
|
+
logger.debug(
|
|
517
|
+
f"no {angle} angles grid found for detector {detector_id}"
|
|
518
|
+
)
|
|
519
|
+
continue
|
|
520
|
+
detector_angles_raster = detector_angles[detector_id]
|
|
521
|
+
# interpolate missing nodata edges and return BRDF difference model
|
|
522
|
+
detector_angles_raster.data = ma.masked_invalid(
|
|
523
|
+
fillnodata(
|
|
524
|
+
detector_angles_raster.data,
|
|
525
|
+
smoothing_iterations=smoothing_iterations,
|
|
526
|
+
)
|
|
527
|
+
)
|
|
528
|
+
# resample detector angles to output resolution
|
|
529
|
+
detector_angle = resample_from_array(
|
|
530
|
+
detector_angles_raster,
|
|
531
|
+
nodata=0,
|
|
532
|
+
out_grid=self.grid(resolution),
|
|
533
|
+
resampling=resampling,
|
|
534
|
+
keep_2d=True,
|
|
535
|
+
)
|
|
536
|
+
# select pixels which are covered by detector
|
|
537
|
+
detector_mask = np.where(
|
|
538
|
+
detector_footprints.data == detector_id, True, False
|
|
539
|
+
)
|
|
540
|
+
if len(detector_footprints.data.shape) == 3:
|
|
541
|
+
detector_mask = detector_mask[0]
|
|
542
|
+
# merge detector stripes
|
|
543
|
+
band_angles[detector_mask] = detector_angle[detector_mask]
|
|
544
|
+
band_angles.mask[detector_mask] = detector_angle.mask[detector_mask]
|
|
545
|
+
|
|
546
|
+
return band_angles
|
|
547
|
+
|
|
548
|
+
with Timer() as tt:
|
|
549
|
+
mean = ma.mean(ma.stack([_band_angles(band) for band in bands]), axis=0)
|
|
550
|
+
logger.debug(
|
|
551
|
+
"mean viewing incidence angles for %s bands generated in %s", len(bands), tt
|
|
552
|
+
)
|
|
553
|
+
return mean
|
|
554
|
+
|
|
555
|
+
|
|
556
|
+
def _get_grids(root: Element, crs: CRS) -> Dict[Resolution, Grid]:
|
|
557
|
+
geoinfo = {
|
|
558
|
+
Resolution["10m"]: dict(crs=crs),
|
|
559
|
+
Resolution["20m"]: dict(crs=crs),
|
|
560
|
+
Resolution["60m"]: dict(crs=crs),
|
|
561
|
+
}
|
|
562
|
+
for size in root.iter("Size"):
|
|
563
|
+
resolution = Resolution[f"{size.get('resolution')}m"]
|
|
564
|
+
for item in size:
|
|
565
|
+
if item.text is None:
|
|
566
|
+
raise TypeError(f"cannot derive height or width from: {item.text}")
|
|
567
|
+
if item.tag == "NROWS":
|
|
568
|
+
height = int(item.text)
|
|
569
|
+
elif item.tag == "NCOLS":
|
|
570
|
+
width = int(item.text)
|
|
571
|
+
geoinfo[resolution].update(height=height, width=width)
|
|
572
|
+
|
|
573
|
+
for geoposition in root.iter("Geoposition"):
|
|
574
|
+
resolution = Resolution[f"{geoposition.get('resolution')}m"]
|
|
575
|
+
for item in geoposition:
|
|
576
|
+
if item.text is None:
|
|
577
|
+
raise TypeError(f"cannot derive float values from: {item.text}")
|
|
578
|
+
if item.tag == "ULX":
|
|
579
|
+
left = float(item.text)
|
|
580
|
+
elif item.tag == "ULY":
|
|
581
|
+
top = float(item.text)
|
|
582
|
+
elif item.tag == "XDIM":
|
|
583
|
+
x_size = float(item.text)
|
|
584
|
+
elif item.tag == "YDIM":
|
|
585
|
+
y_size = float(item.text)
|
|
586
|
+
right = left + width * x_size
|
|
587
|
+
bottom = top + height * y_size
|
|
588
|
+
geoinfo[resolution].update(
|
|
589
|
+
transform=from_bounds(left, bottom, right, top, width, height),
|
|
590
|
+
)
|
|
591
|
+
out_grids = {k: Grid(**v) for k, v in geoinfo.items()}
|
|
592
|
+
for additional_resolution in [120]:
|
|
593
|
+
resolution = Resolution[f"{additional_resolution}m"]
|
|
594
|
+
grid_10m = out_grids[Resolution["10m"]]
|
|
595
|
+
relation = additional_resolution // 10
|
|
596
|
+
width = grid_10m.width // relation
|
|
597
|
+
height = grid_10m.height // relation
|
|
598
|
+
out_grids[resolution] = Grid(
|
|
599
|
+
from_bounds(left, bottom, right, top, width, height), height, width, crs
|
|
600
|
+
)
|
|
601
|
+
return out_grids
|
|
602
|
+
|
|
603
|
+
|
|
604
|
+
def _get_grid_data(group, tag, bounds, crs) -> ReferencedRaster:
|
|
605
|
+
def _get_grid(values_list):
|
|
606
|
+
return ma.masked_invalid(
|
|
607
|
+
np.array(
|
|
608
|
+
[
|
|
609
|
+
[
|
|
610
|
+
np.nan if cell == "NaN" else float(cell)
|
|
611
|
+
for cell in row.text.split()
|
|
612
|
+
]
|
|
613
|
+
for row in values_list
|
|
614
|
+
],
|
|
615
|
+
dtype=np.float32,
|
|
616
|
+
)
|
|
617
|
+
)
|
|
618
|
+
|
|
619
|
+
def _get_affine(bounds=None, row_step=None, col_step=None, shape=None):
|
|
620
|
+
left, _, _, top = bounds
|
|
621
|
+
height, width = shape
|
|
622
|
+
|
|
623
|
+
angles_left = left - col_step / 2
|
|
624
|
+
angles_right = angles_left + col_step * width
|
|
625
|
+
angles_top = top + row_step / 2
|
|
626
|
+
angles_bottom = angles_top - row_step * height
|
|
627
|
+
|
|
628
|
+
return from_bounds(
|
|
629
|
+
angles_left, angles_bottom, angles_right, angles_top, width, height
|
|
630
|
+
)
|
|
631
|
+
|
|
632
|
+
items = group.findall(tag)[0]
|
|
633
|
+
col_step = int(items.findall("COL_STEP")[0].text)
|
|
634
|
+
row_step = int(items.findall("ROW_STEP")[0].text)
|
|
635
|
+
grid = _get_grid(items.findall("Values_List")[0])
|
|
636
|
+
affine = _get_affine(
|
|
637
|
+
bounds=bounds, row_step=row_step, col_step=col_step, shape=grid.shape
|
|
638
|
+
)
|
|
639
|
+
return ReferencedRaster(data=grid, transform=affine, bounds=bounds, crs=crs)
|
|
@@ -0,0 +1,57 @@
|
|
|
1
|
+
from typing import Dict, Any
|
|
2
|
+
|
|
3
|
+
# importing this is crucial so the mapping functions get registered before registry is accessed
|
|
4
|
+
from mapchete_eo.platforms.sentinel2.preconfigured_sources.item_mappers import (
|
|
5
|
+
earthsearch_assets_paths_mapper,
|
|
6
|
+
earthsearch_id_mapper,
|
|
7
|
+
earthsearch_to_s2metadata,
|
|
8
|
+
cdse_asset_names,
|
|
9
|
+
cdse_s2metadata,
|
|
10
|
+
)
|
|
11
|
+
from mapchete_eo.platforms.sentinel2.preconfigured_sources.guessers import (
|
|
12
|
+
guess_metadata_path_mapper,
|
|
13
|
+
guess_s2metadata_from_item,
|
|
14
|
+
guess_s2metadata_from_metadata_xml,
|
|
15
|
+
)
|
|
16
|
+
|
|
17
|
+
|
|
18
|
+
__all__ = [
|
|
19
|
+
"guess_metadata_path_mapper",
|
|
20
|
+
"guess_s2metadata_from_item",
|
|
21
|
+
"guess_s2metadata_from_metadata_xml",
|
|
22
|
+
"earthsearch_assets_paths_mapper",
|
|
23
|
+
"earthsearch_id_mapper",
|
|
24
|
+
"earthsearch_to_s2metadata",
|
|
25
|
+
"cdse_asset_names",
|
|
26
|
+
"cdse_s2metadata",
|
|
27
|
+
]
|
|
28
|
+
|
|
29
|
+
KNOWN_SOURCES: Dict[str, Any] = {
|
|
30
|
+
"EarthSearch": {
|
|
31
|
+
"collection": "https://earth-search.aws.element84.com/v1/collections/sentinel-2-c1-l2a",
|
|
32
|
+
},
|
|
33
|
+
"EarthSearch_legacy": {
|
|
34
|
+
"collection": "https://earth-search.aws.element84.com/v1/collections/sentinel-2-l2a",
|
|
35
|
+
},
|
|
36
|
+
"CDSE": {
|
|
37
|
+
"collection": "https://stac.dataspace.copernicus.eu/v1/collections/sentinel-2-l2a",
|
|
38
|
+
"metadata_archive": "CDSE",
|
|
39
|
+
},
|
|
40
|
+
}
|
|
41
|
+
|
|
42
|
+
DEPRECATED_ARCHIVES = {
|
|
43
|
+
"S2AWS_COG": {
|
|
44
|
+
"collection": "https://earth-search.aws.element84.com/v1/collections/sentinel-2-c1-l2a",
|
|
45
|
+
},
|
|
46
|
+
"S2AWS_JP2": {
|
|
47
|
+
"collection": "https://stac.dataspace.copernicus.eu/v1/collections/sentinel-2-l2a",
|
|
48
|
+
"data_archive": "AWSJP2",
|
|
49
|
+
},
|
|
50
|
+
"S2CDSE_AWSJP2": {
|
|
51
|
+
"collection": "https://stac.dataspace.copernicus.eu/v1/collections/sentinel-2-l2a",
|
|
52
|
+
"data_archive": "AWSJP2",
|
|
53
|
+
},
|
|
54
|
+
"S2CDSE_JP2": {
|
|
55
|
+
"collection": "https://stac.dataspace.copernicus.eu/v1/collections/sentinel-2-l2a",
|
|
56
|
+
},
|
|
57
|
+
}
|