mapchete-eo 2026.2.0__py3-none-any.whl
This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
- mapchete_eo/__init__.py +1 -0
- mapchete_eo/array/__init__.py +0 -0
- mapchete_eo/array/buffer.py +16 -0
- mapchete_eo/array/color.py +29 -0
- mapchete_eo/array/convert.py +163 -0
- mapchete_eo/base.py +653 -0
- mapchete_eo/blacklist.txt +175 -0
- mapchete_eo/cli/__init__.py +30 -0
- mapchete_eo/cli/bounds.py +22 -0
- mapchete_eo/cli/options_arguments.py +227 -0
- mapchete_eo/cli/s2_brdf.py +77 -0
- mapchete_eo/cli/s2_cat_results.py +130 -0
- mapchete_eo/cli/s2_find_broken_products.py +77 -0
- mapchete_eo/cli/s2_jp2_static_catalog.py +166 -0
- mapchete_eo/cli/s2_mask.py +71 -0
- mapchete_eo/cli/s2_mgrs.py +45 -0
- mapchete_eo/cli/s2_rgb.py +114 -0
- mapchete_eo/cli/s2_verify.py +129 -0
- mapchete_eo/cli/static_catalog.py +82 -0
- mapchete_eo/eostac.py +30 -0
- mapchete_eo/exceptions.py +87 -0
- mapchete_eo/image_operations/__init__.py +12 -0
- mapchete_eo/image_operations/blend_functions.py +579 -0
- mapchete_eo/image_operations/color_correction.py +136 -0
- mapchete_eo/image_operations/compositing.py +266 -0
- mapchete_eo/image_operations/dtype_scale.py +43 -0
- mapchete_eo/image_operations/fillnodata.py +130 -0
- mapchete_eo/image_operations/filters.py +319 -0
- mapchete_eo/image_operations/linear_normalization.py +81 -0
- mapchete_eo/image_operations/sigmoidal.py +114 -0
- mapchete_eo/io/__init__.py +37 -0
- mapchete_eo/io/assets.py +496 -0
- mapchete_eo/io/items.py +162 -0
- mapchete_eo/io/levelled_cubes.py +259 -0
- mapchete_eo/io/path.py +155 -0
- mapchete_eo/io/products.py +423 -0
- mapchete_eo/io/profiles.py +45 -0
- mapchete_eo/platforms/sentinel2/__init__.py +17 -0
- mapchete_eo/platforms/sentinel2/_mapper_registry.py +89 -0
- mapchete_eo/platforms/sentinel2/bandpass_adjustment.py +104 -0
- mapchete_eo/platforms/sentinel2/brdf/__init__.py +8 -0
- mapchete_eo/platforms/sentinel2/brdf/config.py +32 -0
- mapchete_eo/platforms/sentinel2/brdf/correction.py +260 -0
- mapchete_eo/platforms/sentinel2/brdf/hls.py +251 -0
- mapchete_eo/platforms/sentinel2/brdf/models.py +44 -0
- mapchete_eo/platforms/sentinel2/brdf/protocols.py +27 -0
- mapchete_eo/platforms/sentinel2/brdf/ross_thick.py +136 -0
- mapchete_eo/platforms/sentinel2/brdf/sun_angle_arrays.py +76 -0
- mapchete_eo/platforms/sentinel2/config.py +241 -0
- mapchete_eo/platforms/sentinel2/driver.py +43 -0
- mapchete_eo/platforms/sentinel2/masks.py +329 -0
- mapchete_eo/platforms/sentinel2/metadata_parser/__init__.py +6 -0
- mapchete_eo/platforms/sentinel2/metadata_parser/base.py +56 -0
- mapchete_eo/platforms/sentinel2/metadata_parser/default_path_mapper.py +135 -0
- mapchete_eo/platforms/sentinel2/metadata_parser/models.py +78 -0
- mapchete_eo/platforms/sentinel2/metadata_parser/s2metadata.py +639 -0
- mapchete_eo/platforms/sentinel2/preconfigured_sources/__init__.py +57 -0
- mapchete_eo/platforms/sentinel2/preconfigured_sources/guessers.py +108 -0
- mapchete_eo/platforms/sentinel2/preconfigured_sources/item_mappers.py +171 -0
- mapchete_eo/platforms/sentinel2/preconfigured_sources/metadata_xml_mappers.py +217 -0
- mapchete_eo/platforms/sentinel2/preprocessing_tasks.py +50 -0
- mapchete_eo/platforms/sentinel2/processing_baseline.py +163 -0
- mapchete_eo/platforms/sentinel2/product.py +747 -0
- mapchete_eo/platforms/sentinel2/source.py +114 -0
- mapchete_eo/platforms/sentinel2/types.py +114 -0
- mapchete_eo/processes/__init__.py +0 -0
- mapchete_eo/processes/config.py +51 -0
- mapchete_eo/processes/dtype_scale.py +112 -0
- mapchete_eo/processes/eo_to_xarray.py +19 -0
- mapchete_eo/processes/merge_rasters.py +239 -0
- mapchete_eo/product.py +323 -0
- mapchete_eo/protocols.py +61 -0
- mapchete_eo/search/__init__.py +14 -0
- mapchete_eo/search/base.py +285 -0
- mapchete_eo/search/config.py +113 -0
- mapchete_eo/search/s2_mgrs.py +313 -0
- mapchete_eo/search/stac_search.py +278 -0
- mapchete_eo/search/stac_static.py +197 -0
- mapchete_eo/search/utm_search.py +251 -0
- mapchete_eo/settings.py +25 -0
- mapchete_eo/sort.py +60 -0
- mapchete_eo/source.py +109 -0
- mapchete_eo/time.py +62 -0
- mapchete_eo/types.py +76 -0
- mapchete_eo-2026.2.0.dist-info/METADATA +91 -0
- mapchete_eo-2026.2.0.dist-info/RECORD +89 -0
- mapchete_eo-2026.2.0.dist-info/WHEEL +4 -0
- mapchete_eo-2026.2.0.dist-info/entry_points.txt +11 -0
- mapchete_eo-2026.2.0.dist-info/licenses/LICENSE +21 -0
|
@@ -0,0 +1,329 @@
|
|
|
1
|
+
from collections import defaultdict
|
|
2
|
+
import logging
|
|
3
|
+
from typing import Generator, Iterator, List, Optional, Sequence, Union
|
|
4
|
+
|
|
5
|
+
import numpy as np
|
|
6
|
+
import numpy.ma as ma
|
|
7
|
+
import xarray as xr
|
|
8
|
+
from mapchete.protocols import GridProtocol
|
|
9
|
+
from mapchete.types import NodataVals
|
|
10
|
+
|
|
11
|
+
from mapchete_eo.io.products import Slice
|
|
12
|
+
from mapchete_eo.array.convert import to_dataarray, to_masked_array
|
|
13
|
+
from mapchete_eo.exceptions import NoSourceProducts
|
|
14
|
+
from mapchete_eo.platforms.sentinel2.product import S2Product
|
|
15
|
+
from mapchete_eo.platforms.sentinel2.types import Resolution
|
|
16
|
+
from mapchete_eo.sort import SortMethodConfig
|
|
17
|
+
from mapchete_eo.types import MergeMethod
|
|
18
|
+
from mapchete_eo.exceptions import (
|
|
19
|
+
AssetKeyError,
|
|
20
|
+
CorruptedProduct,
|
|
21
|
+
CorruptedSlice,
|
|
22
|
+
EmptySliceException,
|
|
23
|
+
EmptyStackException,
|
|
24
|
+
)
|
|
25
|
+
|
|
26
|
+
from mapchete_eo.protocols import EOProductProtocol
|
|
27
|
+
|
|
28
|
+
logger = logging.getLogger(__name__)
|
|
29
|
+
|
|
30
|
+
|
|
31
|
+
def read_masks(
|
|
32
|
+
products: List[S2Product],
|
|
33
|
+
grid: Optional[GridProtocol] = None,
|
|
34
|
+
nodatavals: NodataVals = None,
|
|
35
|
+
product_read_kwargs: dict = {},
|
|
36
|
+
) -> ma.MaskedArray:
|
|
37
|
+
"""Read grid window of Masks and merge into a 4D xarray."""
|
|
38
|
+
return ma.stack(
|
|
39
|
+
[
|
|
40
|
+
to_masked_array(m)
|
|
41
|
+
for m in generate_masks(
|
|
42
|
+
products=products,
|
|
43
|
+
grid=grid,
|
|
44
|
+
nodatavals=nodatavals,
|
|
45
|
+
product_read_kwargs=product_read_kwargs,
|
|
46
|
+
)
|
|
47
|
+
]
|
|
48
|
+
)
|
|
49
|
+
|
|
50
|
+
|
|
51
|
+
def masks_to_xarray(
|
|
52
|
+
products: List[S2Product],
|
|
53
|
+
grid: Optional[GridProtocol] = None,
|
|
54
|
+
slice_axis_name: str = "time",
|
|
55
|
+
band_axis_name: str = "bands",
|
|
56
|
+
x_axis_name: str = "x",
|
|
57
|
+
y_axis_name: str = "y",
|
|
58
|
+
merge_products_by: Optional[str] = None,
|
|
59
|
+
merge_method: MergeMethod = MergeMethod.first,
|
|
60
|
+
sort: Optional[SortMethodConfig] = None,
|
|
61
|
+
raise_empty: bool = True,
|
|
62
|
+
product_read_kwargs: dict = {},
|
|
63
|
+
) -> xr.Dataset:
|
|
64
|
+
"""
|
|
65
|
+
Read masks of products and merge into an xarray.Dataset.
|
|
66
|
+
"""
|
|
67
|
+
data_vars = [
|
|
68
|
+
s
|
|
69
|
+
for s in generate_slice_masks_dataarrays(
|
|
70
|
+
products=products,
|
|
71
|
+
grid=grid,
|
|
72
|
+
merge_products_by=merge_products_by,
|
|
73
|
+
merge_method=merge_method,
|
|
74
|
+
sort=sort,
|
|
75
|
+
product_read_kwargs=product_read_kwargs,
|
|
76
|
+
raise_empty=raise_empty,
|
|
77
|
+
)
|
|
78
|
+
]
|
|
79
|
+
if merge_products_by and merge_products_by not in ["date", "datetime"]:
|
|
80
|
+
coords = {merge_products_by: [s.name for s in data_vars]}
|
|
81
|
+
slice_axis_name = merge_products_by
|
|
82
|
+
else:
|
|
83
|
+
coords = {
|
|
84
|
+
slice_axis_name: list(
|
|
85
|
+
np.array(
|
|
86
|
+
[product.item.datetime for product in products], dtype=np.datetime64
|
|
87
|
+
)
|
|
88
|
+
)
|
|
89
|
+
}
|
|
90
|
+
|
|
91
|
+
return xr.Dataset(
|
|
92
|
+
data_vars={s.name: s for s in data_vars},
|
|
93
|
+
coords=coords,
|
|
94
|
+
).transpose(slice_axis_name, band_axis_name, x_axis_name, y_axis_name)
|
|
95
|
+
|
|
96
|
+
|
|
97
|
+
def generate_masks(
|
|
98
|
+
products: List[S2Product],
|
|
99
|
+
grid: Optional[GridProtocol] = None,
|
|
100
|
+
nodatavals: NodataVals = None,
|
|
101
|
+
product_read_kwargs: dict = {},
|
|
102
|
+
) -> Iterator[xr.DataArray]:
|
|
103
|
+
"""
|
|
104
|
+
Yield masks of slices or products into a cube to support advanced cube handling as Dataarrays.
|
|
105
|
+
|
|
106
|
+
This should be analog to other read functions and can be used for read functions
|
|
107
|
+
and read_levelled_cube as input if needed.
|
|
108
|
+
|
|
109
|
+
TODO apply these masks on existing cubes (np_arrays, xarrays)
|
|
110
|
+
"""
|
|
111
|
+
if len(products) == 0:
|
|
112
|
+
raise NoSourceProducts("no products to read")
|
|
113
|
+
|
|
114
|
+
logger.debug(f"reading {len(products)} product masks")
|
|
115
|
+
|
|
116
|
+
if isinstance(nodatavals, list):
|
|
117
|
+
nodataval = nodatavals[0]
|
|
118
|
+
elif isinstance(nodatavals, float):
|
|
119
|
+
nodataval = nodatavals
|
|
120
|
+
else:
|
|
121
|
+
nodataval = nodatavals
|
|
122
|
+
|
|
123
|
+
product_read_kwargs = dict(
|
|
124
|
+
product_read_kwargs,
|
|
125
|
+
)
|
|
126
|
+
for product_ in products:
|
|
127
|
+
if grid is None:
|
|
128
|
+
grid = product_.metadata.grid(Resolution["10m"])
|
|
129
|
+
elif isinstance(grid, Resolution):
|
|
130
|
+
grid = product_.metadata.grid(grid)
|
|
131
|
+
mask_grid = product_.get_mask(
|
|
132
|
+
grid=grid,
|
|
133
|
+
mask_config=product_read_kwargs["mask_config"],
|
|
134
|
+
)
|
|
135
|
+
yield to_dataarray(
|
|
136
|
+
ma.masked_where(mask_grid == 0, ma.expand_dims(mask_grid.data, axis=0)),
|
|
137
|
+
nodataval=nodataval,
|
|
138
|
+
name=product_.id,
|
|
139
|
+
attrs=product_.item.properties,
|
|
140
|
+
)
|
|
141
|
+
|
|
142
|
+
|
|
143
|
+
def merge_products_masks(
|
|
144
|
+
products: Sequence[Union[S2Product, EOProductProtocol]],
|
|
145
|
+
merge_method: MergeMethod = MergeMethod.first,
|
|
146
|
+
product_read_kwargs: dict = {},
|
|
147
|
+
raise_empty: bool = False,
|
|
148
|
+
) -> ma.MaskedArray:
|
|
149
|
+
"""
|
|
150
|
+
Merge given products masks into one array.
|
|
151
|
+
Should be analog to the merging of product slices with default 'first' method and 'all' method to use all available masks in datastripe/slice.
|
|
152
|
+
"""
|
|
153
|
+
|
|
154
|
+
if merge_method == "average":
|
|
155
|
+
raise ValueError(
|
|
156
|
+
"Merge Method 'average' makes no sense for 'merge_products_masks' either 'first' or 'all'!"
|
|
157
|
+
)
|
|
158
|
+
|
|
159
|
+
def read_remaining_valid_products_masks(
|
|
160
|
+
products_iter: Iterator[Union[S2Product, EOProductProtocol]],
|
|
161
|
+
product_read_kwargs: dict,
|
|
162
|
+
) -> Generator[ma.MaskedArray, None, None]:
|
|
163
|
+
"""Yields and reads remaining products masks from iterator while discarding corrupt products."""
|
|
164
|
+
try:
|
|
165
|
+
for product in products_iter:
|
|
166
|
+
try:
|
|
167
|
+
new = np.expand_dims(
|
|
168
|
+
product.get_mask(**product_read_kwargs).data, axis=0
|
|
169
|
+
)
|
|
170
|
+
yield ma.masked_array(data=new.astype(bool, copy=False))
|
|
171
|
+
except (AssetKeyError, CorruptedProduct) as exc:
|
|
172
|
+
logger.debug("skip product %s because of %s", product.item.id, exc)
|
|
173
|
+
except StopIteration:
|
|
174
|
+
return
|
|
175
|
+
|
|
176
|
+
if len(products) == 0: # pragma: no cover
|
|
177
|
+
raise NoSourceProducts("no products to merge")
|
|
178
|
+
|
|
179
|
+
products_iter = iter(products)
|
|
180
|
+
|
|
181
|
+
# read first valid product
|
|
182
|
+
for product in products_iter:
|
|
183
|
+
try:
|
|
184
|
+
out: ma.MaskedArray = ma.masked_array(
|
|
185
|
+
data=np.expand_dims(
|
|
186
|
+
product.get_mask(**product_read_kwargs).data, axis=0
|
|
187
|
+
).astype(bool, copy=False)
|
|
188
|
+
)
|
|
189
|
+
break
|
|
190
|
+
except (AssetKeyError, CorruptedProduct) as exc:
|
|
191
|
+
logger.debug("skip product mask %s because of %s", product.item.id, exc)
|
|
192
|
+
else:
|
|
193
|
+
# we cannot do anything here, as all products are broken
|
|
194
|
+
raise CorruptedSlice("all products (masks) are broken here")
|
|
195
|
+
|
|
196
|
+
# fill in gaps sequentially, product by product
|
|
197
|
+
if merge_method == MergeMethod.first:
|
|
198
|
+
for new in read_remaining_valid_products_masks(
|
|
199
|
+
products_iter, product_read_kwargs
|
|
200
|
+
):
|
|
201
|
+
masked = out.mask
|
|
202
|
+
# Update values at masked locations
|
|
203
|
+
out[masked] = new[masked].astype(bool, copy=False)
|
|
204
|
+
# if whole output array is filled, there is no point in reading more data
|
|
205
|
+
if out.all():
|
|
206
|
+
return out
|
|
207
|
+
|
|
208
|
+
# read all and concatate
|
|
209
|
+
elif merge_method == MergeMethod.all:
|
|
210
|
+
|
|
211
|
+
def _generate_arrays(
|
|
212
|
+
first_product_array: ma.MaskedArray,
|
|
213
|
+
remaining_product_arrays: Generator[ma.MaskedArray, None, None],
|
|
214
|
+
) -> Generator[ma.MaskedArray, None, None]:
|
|
215
|
+
"""Yield all available product arrays."""
|
|
216
|
+
yield first_product_array
|
|
217
|
+
yield from remaining_product_arrays
|
|
218
|
+
|
|
219
|
+
# explicitly specify dtype to avoid casting of integer arrays to floats
|
|
220
|
+
# during mean conversion:
|
|
221
|
+
# https://numpy.org/doc/stable/reference/generated/numpy.mean.html#numpy.mean
|
|
222
|
+
arrays = list(
|
|
223
|
+
_generate_arrays(
|
|
224
|
+
out,
|
|
225
|
+
read_remaining_valid_products_masks(products_iter, product_read_kwargs),
|
|
226
|
+
)
|
|
227
|
+
)
|
|
228
|
+
|
|
229
|
+
# Filter out arrays that are entirely masked
|
|
230
|
+
valid_arrays = [a for a in arrays if not ma.getmaskarray(a).all()]
|
|
231
|
+
|
|
232
|
+
if valid_arrays:
|
|
233
|
+
stacked = ma.stack(valid_arrays, dtype=out.dtype)
|
|
234
|
+
out = stacked.min(axis=0)
|
|
235
|
+
else:
|
|
236
|
+
# All arrays were fully masked — return fully masked output
|
|
237
|
+
out = ma.masked_all(out.shape, dtype=out.dtype)
|
|
238
|
+
|
|
239
|
+
else: # pragma: no cover
|
|
240
|
+
raise NotImplementedError(f"unknown merge method: {merge_method}")
|
|
241
|
+
|
|
242
|
+
if raise_empty and out.all():
|
|
243
|
+
raise EmptySliceException(
|
|
244
|
+
f"slice is empty after combining {len(products)} products"
|
|
245
|
+
)
|
|
246
|
+
|
|
247
|
+
return out
|
|
248
|
+
|
|
249
|
+
|
|
250
|
+
def generate_slice_masks_dataarrays(
|
|
251
|
+
products: List[S2Product],
|
|
252
|
+
grid: Optional[GridProtocol] = None,
|
|
253
|
+
merge_products_by: Optional[str] = None,
|
|
254
|
+
merge_method: MergeMethod = MergeMethod.first,
|
|
255
|
+
sort: Optional[SortMethodConfig] = None,
|
|
256
|
+
mask_name: List = ["EOxCloudless_masks"],
|
|
257
|
+
product_read_kwargs: dict = {},
|
|
258
|
+
raise_empty: bool = True,
|
|
259
|
+
) -> Iterator[xr.DataArray]:
|
|
260
|
+
"""
|
|
261
|
+
Yield products or merged products into slices as DataArrays.
|
|
262
|
+
"""
|
|
263
|
+
if len(products) == 0:
|
|
264
|
+
raise NoSourceProducts("no products to read")
|
|
265
|
+
|
|
266
|
+
stack_empty = True
|
|
267
|
+
|
|
268
|
+
# group products into slices and sort slices if configured
|
|
269
|
+
slices = product_masks_to_slices(
|
|
270
|
+
products, group_by_property=merge_products_by, sort=sort
|
|
271
|
+
)
|
|
272
|
+
|
|
273
|
+
logger.debug(
|
|
274
|
+
"reading %s products in %s groups...",
|
|
275
|
+
len(products),
|
|
276
|
+
len(slices),
|
|
277
|
+
)
|
|
278
|
+
for slice in slices:
|
|
279
|
+
try:
|
|
280
|
+
# if merge_products_by is none, each slice contains just one product
|
|
281
|
+
# so nothing will have to be merged anyways
|
|
282
|
+
with slice.cached():
|
|
283
|
+
yield to_dataarray(
|
|
284
|
+
merge_products_masks(
|
|
285
|
+
products=slice.products,
|
|
286
|
+
merge_method=merge_method,
|
|
287
|
+
product_read_kwargs=dict(
|
|
288
|
+
product_read_kwargs,
|
|
289
|
+
grid=grid,
|
|
290
|
+
),
|
|
291
|
+
raise_empty=raise_empty,
|
|
292
|
+
),
|
|
293
|
+
# nodataval=1.0,
|
|
294
|
+
name=slice.name,
|
|
295
|
+
band_names=mask_name,
|
|
296
|
+
attrs=slice.properties,
|
|
297
|
+
)
|
|
298
|
+
# if at least one slice can be yielded, the stack is not empty
|
|
299
|
+
stack_empty = False
|
|
300
|
+
except (EmptySliceException, CorruptedSlice):
|
|
301
|
+
pass
|
|
302
|
+
|
|
303
|
+
if stack_empty:
|
|
304
|
+
raise EmptyStackException("all slices are empty")
|
|
305
|
+
|
|
306
|
+
|
|
307
|
+
def product_masks_to_slices(
|
|
308
|
+
products: List[S2Product],
|
|
309
|
+
group_by_property: Optional[str] = None,
|
|
310
|
+
sort: Optional[SortMethodConfig] = None,
|
|
311
|
+
) -> List[Slice]:
|
|
312
|
+
"""
|
|
313
|
+
Group products by a property into Slices and optionally sort.
|
|
314
|
+
"""
|
|
315
|
+
if group_by_property:
|
|
316
|
+
grouped = defaultdict(list)
|
|
317
|
+
for product in products:
|
|
318
|
+
grouped[product.get_property(group_by_property)].append(product)
|
|
319
|
+
slices = [Slice(key, products) for key, products in grouped.items()]
|
|
320
|
+
else:
|
|
321
|
+
slices = [Slice(product.item.id, [product]) for product in products]
|
|
322
|
+
|
|
323
|
+
# also check if slices is even a list, otherwise it will raise an error
|
|
324
|
+
if sort and slices:
|
|
325
|
+
sort_dict = sort.model_dump()
|
|
326
|
+
func = sort_dict.pop("func")
|
|
327
|
+
slices = func(slices, **sort_dict)
|
|
328
|
+
|
|
329
|
+
return slices
|
|
@@ -0,0 +1,56 @@
|
|
|
1
|
+
from abc import ABC, abstractmethod
|
|
2
|
+
|
|
3
|
+
from mapchete.path import MPath
|
|
4
|
+
|
|
5
|
+
from mapchete_eo.platforms.sentinel2.processing_baseline import ProcessingBaseline
|
|
6
|
+
from mapchete_eo.platforms.sentinel2.types import (
|
|
7
|
+
BandQI,
|
|
8
|
+
L2ABand,
|
|
9
|
+
ProductQI,
|
|
10
|
+
ProductQIMaskResolution,
|
|
11
|
+
)
|
|
12
|
+
|
|
13
|
+
|
|
14
|
+
class S2MetadataPathMapper(ABC):
|
|
15
|
+
"""
|
|
16
|
+
Abstract class to help mapping asset paths from metadata.xml to their
|
|
17
|
+
locations of various data archives.
|
|
18
|
+
|
|
19
|
+
This is mainly used for additional data like QI masks.
|
|
20
|
+
"""
|
|
21
|
+
|
|
22
|
+
# All available bands for Sentinel-2 Level 2A.
|
|
23
|
+
_bands = [band.name for band in L2ABand]
|
|
24
|
+
|
|
25
|
+
processing_baseline: ProcessingBaseline
|
|
26
|
+
|
|
27
|
+
@abstractmethod
|
|
28
|
+
def product_qi_mask(
|
|
29
|
+
self,
|
|
30
|
+
qi_mask: ProductQI,
|
|
31
|
+
resolution: ProductQIMaskResolution = ProductQIMaskResolution["60m"],
|
|
32
|
+
) -> MPath: ...
|
|
33
|
+
|
|
34
|
+
@abstractmethod
|
|
35
|
+
def classification_mask(self) -> MPath: ...
|
|
36
|
+
|
|
37
|
+
@abstractmethod
|
|
38
|
+
def cloud_probability_mask(
|
|
39
|
+
self, resolution: ProductQIMaskResolution = ProductQIMaskResolution["60m"]
|
|
40
|
+
) -> MPath: ...
|
|
41
|
+
|
|
42
|
+
@abstractmethod
|
|
43
|
+
def snow_probability_mask(
|
|
44
|
+
self, resolution: ProductQIMaskResolution = ProductQIMaskResolution["60m"]
|
|
45
|
+
) -> MPath: ...
|
|
46
|
+
|
|
47
|
+
@abstractmethod
|
|
48
|
+
def band_qi_mask(self, qi_mask: BandQI, band: L2ABand) -> MPath: ...
|
|
49
|
+
|
|
50
|
+
@abstractmethod
|
|
51
|
+
def technical_quality_mask(self, band: L2ABand) -> MPath: ...
|
|
52
|
+
|
|
53
|
+
@abstractmethod
|
|
54
|
+
def detector_footprints(self, band: L2ABand) -> MPath: ...
|
|
55
|
+
|
|
56
|
+
def clear_cached_data(self) -> None: ...
|
|
@@ -0,0 +1,135 @@
|
|
|
1
|
+
"""
|
|
2
|
+
A path mapper maps from an metadata XML file to additional metadata
|
|
3
|
+
on a given archive or a local SAFE file.
|
|
4
|
+
"""
|
|
5
|
+
|
|
6
|
+
import logging
|
|
7
|
+
from xml.etree.ElementTree import Element
|
|
8
|
+
from functools import cached_property
|
|
9
|
+
from typing import Optional
|
|
10
|
+
|
|
11
|
+
from mapchete.path import MPath
|
|
12
|
+
|
|
13
|
+
from mapchete_eo.io import open_xml
|
|
14
|
+
from mapchete_eo.platforms.sentinel2.metadata_parser.base import S2MetadataPathMapper
|
|
15
|
+
from mapchete_eo.platforms.sentinel2.processing_baseline import ProcessingBaseline
|
|
16
|
+
from mapchete_eo.platforms.sentinel2.types import (
|
|
17
|
+
BandQI,
|
|
18
|
+
L2ABand,
|
|
19
|
+
ProductQI,
|
|
20
|
+
ProductQIMaskResolution,
|
|
21
|
+
)
|
|
22
|
+
|
|
23
|
+
logger = logging.getLogger(__name__)
|
|
24
|
+
|
|
25
|
+
|
|
26
|
+
class XMLMapper(S2MetadataPathMapper):
|
|
27
|
+
def __init__(
|
|
28
|
+
self, metadata_xml: MPath, xml_root: Optional[Element] = None, **kwargs
|
|
29
|
+
):
|
|
30
|
+
self.metadata_xml = metadata_xml
|
|
31
|
+
self._cached_xml_root = xml_root
|
|
32
|
+
self._metadata_dir = metadata_xml.parent
|
|
33
|
+
|
|
34
|
+
def clear_cached_data(self):
|
|
35
|
+
if self._cached_xml_root is not None:
|
|
36
|
+
logger.debug("clear XMLMapper xml cache")
|
|
37
|
+
self._cached_xml_root.clear()
|
|
38
|
+
self._cached_xml_root = None
|
|
39
|
+
|
|
40
|
+
@property
|
|
41
|
+
def xml_root(self) -> Element:
|
|
42
|
+
if self._cached_xml_root is None:
|
|
43
|
+
self._cached_xml_root = open_xml(self.metadata_xml)
|
|
44
|
+
return self._cached_xml_root
|
|
45
|
+
|
|
46
|
+
@cached_property
|
|
47
|
+
def processing_baseline(self):
|
|
48
|
+
# try to guess processing baseline from product id
|
|
49
|
+
def _get_version(tag="TILE_ID"):
|
|
50
|
+
product_id = next(self.xml_root.iter(tag)).text
|
|
51
|
+
appendix = product_id.split("_")[-1]
|
|
52
|
+
if appendix.startswith("N"):
|
|
53
|
+
return appendix.lstrip("N")
|
|
54
|
+
|
|
55
|
+
version = _get_version()
|
|
56
|
+
try:
|
|
57
|
+
return ProcessingBaseline.from_version(version)
|
|
58
|
+
except Exception: # pragma: no cover
|
|
59
|
+
# try use L1C product version as fallback
|
|
60
|
+
# we don't need to test this because HOPEFULLY we won't be confronted
|
|
61
|
+
# with such data
|
|
62
|
+
try:
|
|
63
|
+
l1c_version = _get_version("L1C_TILE_ID")
|
|
64
|
+
except StopIteration:
|
|
65
|
+
l1c_version = "02.06"
|
|
66
|
+
if l1c_version is not None:
|
|
67
|
+
return ProcessingBaseline.from_version(f"{l1c_version}")
|
|
68
|
+
|
|
69
|
+
def product_qi_mask(
|
|
70
|
+
self,
|
|
71
|
+
qi_mask: ProductQI,
|
|
72
|
+
resolution: ProductQIMaskResolution = ProductQIMaskResolution["60m"],
|
|
73
|
+
) -> MPath:
|
|
74
|
+
"""Determine product QI mask from metadata.xml."""
|
|
75
|
+
qi_mask_type = dict(self.processing_baseline.product_mask_types)[qi_mask]
|
|
76
|
+
for i in self.xml_root.iter():
|
|
77
|
+
if i.tag == "MASK_FILENAME" and i.get("type") == qi_mask_type:
|
|
78
|
+
path = self._metadata_dir / i.text
|
|
79
|
+
if qi_mask == ProductQI.classification:
|
|
80
|
+
return path
|
|
81
|
+
else:
|
|
82
|
+
if resolution.name in path.name:
|
|
83
|
+
return path
|
|
84
|
+
else:
|
|
85
|
+
raise KeyError(f"no {qi_mask_type} with item found in metadata")
|
|
86
|
+
|
|
87
|
+
def classification_mask(self) -> MPath:
|
|
88
|
+
return self.product_qi_mask(ProductQI.classification)
|
|
89
|
+
|
|
90
|
+
def cloud_probability_mask(
|
|
91
|
+
self, resolution: ProductQIMaskResolution = ProductQIMaskResolution["60m"]
|
|
92
|
+
) -> MPath:
|
|
93
|
+
return self.product_qi_mask(ProductQI.cloud_probability, resolution=resolution)
|
|
94
|
+
|
|
95
|
+
def snow_probability_mask(
|
|
96
|
+
self, resolution: ProductQIMaskResolution = ProductQIMaskResolution["60m"]
|
|
97
|
+
) -> MPath:
|
|
98
|
+
return self.product_qi_mask(ProductQI.snow_probability, resolution=resolution)
|
|
99
|
+
|
|
100
|
+
def band_qi_mask(self, qi_mask: BandQI, band: L2ABand) -> MPath:
|
|
101
|
+
"""Determine band QI mask from metadata.xml."""
|
|
102
|
+
if qi_mask.name not in dict(self.processing_baseline.band_mask_types).keys():
|
|
103
|
+
raise DeprecationWarning(
|
|
104
|
+
f"QI mask '{qi_mask}' not available for this product"
|
|
105
|
+
)
|
|
106
|
+
mask_types = set()
|
|
107
|
+
for masks in self.xml_root.iter("Pixel_Level_QI"):
|
|
108
|
+
if masks.get("geometry") == "FULL_RESOLUTION":
|
|
109
|
+
for mask_path in masks:
|
|
110
|
+
qi_mask_type = dict(self.processing_baseline.band_mask_types)[
|
|
111
|
+
qi_mask
|
|
112
|
+
]
|
|
113
|
+
mask_type = mask_path.get("type")
|
|
114
|
+
if mask_type:
|
|
115
|
+
mask_types.add(mask_type)
|
|
116
|
+
if mask_type == qi_mask_type:
|
|
117
|
+
band_id = mask_path.get("bandId")
|
|
118
|
+
if band_id is not None:
|
|
119
|
+
band_idx = int(band_id)
|
|
120
|
+
if band_idx == band.value:
|
|
121
|
+
return self._metadata_dir / mask_path.text
|
|
122
|
+
else: # pragma: no cover
|
|
123
|
+
raise KeyError(
|
|
124
|
+
f"no {qi_mask_type} for band {band.name} not found in metadata: {', '.join(mask_types)}"
|
|
125
|
+
)
|
|
126
|
+
else: # pragma: no cover
|
|
127
|
+
raise KeyError(
|
|
128
|
+
f"no {qi_mask_type} not found in metadata: {', '.join(mask_types)}"
|
|
129
|
+
)
|
|
130
|
+
|
|
131
|
+
def technical_quality_mask(self, band: L2ABand) -> MPath:
|
|
132
|
+
return self.band_qi_mask(BandQI.technical_quality, band)
|
|
133
|
+
|
|
134
|
+
def detector_footprints(self, band: L2ABand) -> MPath:
|
|
135
|
+
return self.band_qi_mask(BandQI.detector_footprints, band)
|
|
@@ -0,0 +1,78 @@
|
|
|
1
|
+
from __future__ import annotations
|
|
2
|
+
|
|
3
|
+
import logging
|
|
4
|
+
import warnings
|
|
5
|
+
from typing import Dict
|
|
6
|
+
|
|
7
|
+
import numpy as np
|
|
8
|
+
import numpy.ma as ma
|
|
9
|
+
from pydantic import BaseModel
|
|
10
|
+
from mapchete.io.raster import ReferencedRaster
|
|
11
|
+
from rasterio.fill import fillnodata
|
|
12
|
+
|
|
13
|
+
from mapchete_eo.exceptions import CorruptedProductMetadata
|
|
14
|
+
from mapchete_eo.platforms.sentinel2.types import (
|
|
15
|
+
SunAngle,
|
|
16
|
+
ViewAngle,
|
|
17
|
+
)
|
|
18
|
+
|
|
19
|
+
logger = logging.getLogger(__name__)
|
|
20
|
+
|
|
21
|
+
|
|
22
|
+
class SunAngleData(BaseModel):
|
|
23
|
+
model_config = dict(arbitrary_types_allowed=True)
|
|
24
|
+
raster: ReferencedRaster
|
|
25
|
+
mean: float
|
|
26
|
+
|
|
27
|
+
|
|
28
|
+
class SunAnglesData(BaseModel):
|
|
29
|
+
azimuth: SunAngleData
|
|
30
|
+
zenith: SunAngleData
|
|
31
|
+
|
|
32
|
+
def get_angle(self, angle: SunAngle) -> SunAngleData:
|
|
33
|
+
if angle == SunAngle.azimuth:
|
|
34
|
+
return self.azimuth
|
|
35
|
+
elif angle == SunAngle.zenith:
|
|
36
|
+
return self.zenith
|
|
37
|
+
else:
|
|
38
|
+
raise KeyError(f"unknown angle: {angle}")
|
|
39
|
+
|
|
40
|
+
|
|
41
|
+
class ViewingIncidenceAngle(BaseModel):
|
|
42
|
+
model_config = dict(arbitrary_types_allowed=True)
|
|
43
|
+
detectors: Dict[int, ReferencedRaster]
|
|
44
|
+
mean: float
|
|
45
|
+
|
|
46
|
+
def merge_detectors(
|
|
47
|
+
self, fill_edges: bool = True, smoothing_iterations: int = 3
|
|
48
|
+
) -> ReferencedRaster:
|
|
49
|
+
if not self.detectors:
|
|
50
|
+
raise CorruptedProductMetadata("no viewing incidence angles available")
|
|
51
|
+
sample = next(iter(self.detectors.values()))
|
|
52
|
+
with warnings.catch_warnings():
|
|
53
|
+
warnings.simplefilter("ignore", category=RuntimeWarning)
|
|
54
|
+
merged = np.nanmean(
|
|
55
|
+
np.stack([raster.data for raster in self.detectors.values()]), axis=0
|
|
56
|
+
)
|
|
57
|
+
if fill_edges:
|
|
58
|
+
merged = fillnodata(
|
|
59
|
+
ma.masked_invalid(merged), smoothing_iterations=smoothing_iterations
|
|
60
|
+
)
|
|
61
|
+
return ReferencedRaster.from_array_like(
|
|
62
|
+
array_like=ma.masked_invalid(merged),
|
|
63
|
+
transform=sample.transform,
|
|
64
|
+
crs=sample.crs,
|
|
65
|
+
)
|
|
66
|
+
|
|
67
|
+
|
|
68
|
+
class ViewingIncidenceAngles(BaseModel):
|
|
69
|
+
azimuth: ViewingIncidenceAngle
|
|
70
|
+
zenith: ViewingIncidenceAngle
|
|
71
|
+
|
|
72
|
+
def get_angle(self, angle: ViewAngle) -> ViewingIncidenceAngle:
|
|
73
|
+
if angle == ViewAngle.azimuth:
|
|
74
|
+
return self.azimuth
|
|
75
|
+
elif angle == ViewAngle.zenith:
|
|
76
|
+
return self.zenith
|
|
77
|
+
else:
|
|
78
|
+
raise KeyError(f"unknown angle: {angle}")
|