mapchete-eo 2026.2.0__py3-none-any.whl

This diff represents the content of publicly available package versions that have been released to one of the supported registries. The information contained in this diff is provided for informational purposes only and reflects changes between package versions as they appear in their respective public registries.
Files changed (89) hide show
  1. mapchete_eo/__init__.py +1 -0
  2. mapchete_eo/array/__init__.py +0 -0
  3. mapchete_eo/array/buffer.py +16 -0
  4. mapchete_eo/array/color.py +29 -0
  5. mapchete_eo/array/convert.py +163 -0
  6. mapchete_eo/base.py +653 -0
  7. mapchete_eo/blacklist.txt +175 -0
  8. mapchete_eo/cli/__init__.py +30 -0
  9. mapchete_eo/cli/bounds.py +22 -0
  10. mapchete_eo/cli/options_arguments.py +227 -0
  11. mapchete_eo/cli/s2_brdf.py +77 -0
  12. mapchete_eo/cli/s2_cat_results.py +130 -0
  13. mapchete_eo/cli/s2_find_broken_products.py +77 -0
  14. mapchete_eo/cli/s2_jp2_static_catalog.py +166 -0
  15. mapchete_eo/cli/s2_mask.py +71 -0
  16. mapchete_eo/cli/s2_mgrs.py +45 -0
  17. mapchete_eo/cli/s2_rgb.py +114 -0
  18. mapchete_eo/cli/s2_verify.py +129 -0
  19. mapchete_eo/cli/static_catalog.py +82 -0
  20. mapchete_eo/eostac.py +30 -0
  21. mapchete_eo/exceptions.py +87 -0
  22. mapchete_eo/image_operations/__init__.py +12 -0
  23. mapchete_eo/image_operations/blend_functions.py +579 -0
  24. mapchete_eo/image_operations/color_correction.py +136 -0
  25. mapchete_eo/image_operations/compositing.py +266 -0
  26. mapchete_eo/image_operations/dtype_scale.py +43 -0
  27. mapchete_eo/image_operations/fillnodata.py +130 -0
  28. mapchete_eo/image_operations/filters.py +319 -0
  29. mapchete_eo/image_operations/linear_normalization.py +81 -0
  30. mapchete_eo/image_operations/sigmoidal.py +114 -0
  31. mapchete_eo/io/__init__.py +37 -0
  32. mapchete_eo/io/assets.py +496 -0
  33. mapchete_eo/io/items.py +162 -0
  34. mapchete_eo/io/levelled_cubes.py +259 -0
  35. mapchete_eo/io/path.py +155 -0
  36. mapchete_eo/io/products.py +423 -0
  37. mapchete_eo/io/profiles.py +45 -0
  38. mapchete_eo/platforms/sentinel2/__init__.py +17 -0
  39. mapchete_eo/platforms/sentinel2/_mapper_registry.py +89 -0
  40. mapchete_eo/platforms/sentinel2/bandpass_adjustment.py +104 -0
  41. mapchete_eo/platforms/sentinel2/brdf/__init__.py +8 -0
  42. mapchete_eo/platforms/sentinel2/brdf/config.py +32 -0
  43. mapchete_eo/platforms/sentinel2/brdf/correction.py +260 -0
  44. mapchete_eo/platforms/sentinel2/brdf/hls.py +251 -0
  45. mapchete_eo/platforms/sentinel2/brdf/models.py +44 -0
  46. mapchete_eo/platforms/sentinel2/brdf/protocols.py +27 -0
  47. mapchete_eo/platforms/sentinel2/brdf/ross_thick.py +136 -0
  48. mapchete_eo/platforms/sentinel2/brdf/sun_angle_arrays.py +76 -0
  49. mapchete_eo/platforms/sentinel2/config.py +241 -0
  50. mapchete_eo/platforms/sentinel2/driver.py +43 -0
  51. mapchete_eo/platforms/sentinel2/masks.py +329 -0
  52. mapchete_eo/platforms/sentinel2/metadata_parser/__init__.py +6 -0
  53. mapchete_eo/platforms/sentinel2/metadata_parser/base.py +56 -0
  54. mapchete_eo/platforms/sentinel2/metadata_parser/default_path_mapper.py +135 -0
  55. mapchete_eo/platforms/sentinel2/metadata_parser/models.py +78 -0
  56. mapchete_eo/platforms/sentinel2/metadata_parser/s2metadata.py +639 -0
  57. mapchete_eo/platforms/sentinel2/preconfigured_sources/__init__.py +57 -0
  58. mapchete_eo/platforms/sentinel2/preconfigured_sources/guessers.py +108 -0
  59. mapchete_eo/platforms/sentinel2/preconfigured_sources/item_mappers.py +171 -0
  60. mapchete_eo/platforms/sentinel2/preconfigured_sources/metadata_xml_mappers.py +217 -0
  61. mapchete_eo/platforms/sentinel2/preprocessing_tasks.py +50 -0
  62. mapchete_eo/platforms/sentinel2/processing_baseline.py +163 -0
  63. mapchete_eo/platforms/sentinel2/product.py +747 -0
  64. mapchete_eo/platforms/sentinel2/source.py +114 -0
  65. mapchete_eo/platforms/sentinel2/types.py +114 -0
  66. mapchete_eo/processes/__init__.py +0 -0
  67. mapchete_eo/processes/config.py +51 -0
  68. mapchete_eo/processes/dtype_scale.py +112 -0
  69. mapchete_eo/processes/eo_to_xarray.py +19 -0
  70. mapchete_eo/processes/merge_rasters.py +239 -0
  71. mapchete_eo/product.py +323 -0
  72. mapchete_eo/protocols.py +61 -0
  73. mapchete_eo/search/__init__.py +14 -0
  74. mapchete_eo/search/base.py +285 -0
  75. mapchete_eo/search/config.py +113 -0
  76. mapchete_eo/search/s2_mgrs.py +313 -0
  77. mapchete_eo/search/stac_search.py +278 -0
  78. mapchete_eo/search/stac_static.py +197 -0
  79. mapchete_eo/search/utm_search.py +251 -0
  80. mapchete_eo/settings.py +25 -0
  81. mapchete_eo/sort.py +60 -0
  82. mapchete_eo/source.py +109 -0
  83. mapchete_eo/time.py +62 -0
  84. mapchete_eo/types.py +76 -0
  85. mapchete_eo-2026.2.0.dist-info/METADATA +91 -0
  86. mapchete_eo-2026.2.0.dist-info/RECORD +89 -0
  87. mapchete_eo-2026.2.0.dist-info/WHEEL +4 -0
  88. mapchete_eo-2026.2.0.dist-info/entry_points.txt +11 -0
  89. mapchete_eo-2026.2.0.dist-info/licenses/LICENSE +21 -0
@@ -0,0 +1,423 @@
1
+ from __future__ import annotations
2
+
3
+ from contextlib import contextmanager
4
+ import logging
5
+ from collections import defaultdict
6
+ from datetime import datetime
7
+ import gc
8
+ from typing import Any, Dict, Generator, Iterator, List, Optional, Sequence
9
+
10
+ from mapchete import Timer
11
+ import numpy as np
12
+ import numpy.ma as ma
13
+ from numpy.typing import DTypeLike
14
+ import xarray as xr
15
+ from mapchete.config import get_hash
16
+ from mapchete.geometry import to_shape
17
+ from mapchete.protocols import GridProtocol
18
+ from mapchete.types import NodataVals
19
+ from rasterio.enums import Resampling
20
+ from shapely.geometry import mapping
21
+ from shapely.ops import unary_union
22
+
23
+ from mapchete_eo.array.convert import to_dataarray, to_masked_array
24
+ from mapchete_eo.exceptions import (
25
+ AssetKeyError,
26
+ CorruptedProduct,
27
+ CorruptedSlice,
28
+ EmptySliceException,
29
+ EmptyStackException,
30
+ NoSourceProducts,
31
+ )
32
+ from mapchete_eo.protocols import EOProductProtocol
33
+ from mapchete_eo.sort import SortMethodConfig
34
+ from mapchete_eo.types import MergeMethod
35
+
36
+
37
+ logger = logging.getLogger(__name__)
38
+
39
+
40
+ def products_to_np_array(
41
+ products: List[EOProductProtocol],
42
+ assets: Optional[List[str]] = None,
43
+ eo_bands: Optional[List[str]] = None,
44
+ grid: Optional[GridProtocol] = None,
45
+ resampling: Resampling = Resampling.nearest,
46
+ nodatavals: NodataVals = None,
47
+ merge_products_by: Optional[str] = None,
48
+ merge_method: MergeMethod = MergeMethod.first,
49
+ sort: Optional[SortMethodConfig] = None,
50
+ product_read_kwargs: dict = {},
51
+ raise_empty: bool = True,
52
+ out_dtype: Optional[DTypeLike] = None,
53
+ read_mask: Optional[np.ndarray] = None,
54
+ ) -> ma.MaskedArray:
55
+ """Read grid window of EOProducts and merge into a 4D xarray."""
56
+ return ma.stack(
57
+ [
58
+ to_masked_array(s, out_dtype=out_dtype)
59
+ for s in generate_slice_dataarrays(
60
+ products=products,
61
+ assets=assets,
62
+ eo_bands=eo_bands,
63
+ grid=grid,
64
+ resampling=resampling,
65
+ nodatavals=nodatavals,
66
+ merge_products_by=merge_products_by,
67
+ merge_method=merge_method,
68
+ sort=sort,
69
+ product_read_kwargs=product_read_kwargs,
70
+ raise_empty=raise_empty,
71
+ read_mask=read_mask,
72
+ )
73
+ ]
74
+ )
75
+
76
+
77
+ def products_to_xarray(
78
+ products: List[EOProductProtocol],
79
+ assets: Optional[List[str]] = None,
80
+ eo_bands: Optional[List[str]] = None,
81
+ grid: Optional[GridProtocol] = None,
82
+ resampling: Resampling = Resampling.nearest,
83
+ nodatavals: NodataVals = None,
84
+ slice_axis_name: str = "time",
85
+ band_axis_name: str = "bands",
86
+ x_axis_name: str = "x",
87
+ y_axis_name: str = "y",
88
+ merge_products_by: Optional[str] = None,
89
+ merge_method: MergeMethod = MergeMethod.first,
90
+ sort: Optional[SortMethodConfig] = None,
91
+ raise_empty: bool = True,
92
+ product_read_kwargs: dict = {},
93
+ read_mask: Optional[np.ndarray] = None,
94
+ ) -> xr.Dataset:
95
+ """Read grid window of EOProducts and merge into a 4D xarray."""
96
+ data_vars = [
97
+ s
98
+ for s in generate_slice_dataarrays(
99
+ products=products,
100
+ assets=assets,
101
+ eo_bands=eo_bands,
102
+ grid=grid,
103
+ resampling=resampling,
104
+ nodatavals=nodatavals,
105
+ merge_products_by=merge_products_by,
106
+ merge_method=merge_method,
107
+ sort=sort,
108
+ product_read_kwargs=product_read_kwargs,
109
+ raise_empty=raise_empty,
110
+ read_mask=read_mask,
111
+ )
112
+ ]
113
+ if merge_products_by and merge_products_by not in ["date", "datetime"]:
114
+ coords = {merge_products_by: [s.name for s in data_vars]}
115
+ slice_axis_name = merge_products_by
116
+ else:
117
+ coords = {
118
+ slice_axis_name: list(
119
+ np.array(
120
+ [product.get_property("datetime") for product in products],
121
+ dtype=np.datetime64,
122
+ )
123
+ )
124
+ }
125
+ return xr.Dataset(
126
+ data_vars={s.name: s for s in data_vars},
127
+ coords=coords,
128
+ ).transpose(slice_axis_name, band_axis_name, x_axis_name, y_axis_name)
129
+
130
+
131
+ class Slice:
132
+ """Combine multiple products into one slice."""
133
+
134
+ name: Any
135
+ products: Sequence[EOProductProtocol]
136
+ datetime: datetime
137
+
138
+ def __init__(
139
+ self,
140
+ name: Any,
141
+ products: Sequence[EOProductProtocol],
142
+ ):
143
+ self.name = name
144
+
145
+ # a Slice can only be valid if it contains one or more products
146
+ if products:
147
+ self.products = products
148
+ else: # pragma: no cover
149
+ raise ValueError("at least one product must be provided.")
150
+
151
+ # calculate mean datetime
152
+ timestamps = [
153
+ product.get_property("datetime").timestamp()
154
+ for product in self.products
155
+ if product.get_property("datetime")
156
+ ]
157
+ mean_timestamp = sum(timestamps) / len(timestamps)
158
+ self.datetime = datetime.fromtimestamp(mean_timestamp)
159
+
160
+ def __repr__(self) -> str:
161
+ return f"<Slice {self.name} ({len(self.products)} products)>"
162
+
163
+ @property
164
+ def __geom_interface__(self) -> Dict:
165
+ if self.products:
166
+ return mapping(
167
+ unary_union([to_shape(product) for product in self.products])
168
+ )
169
+
170
+ raise EmptySliceException
171
+
172
+ @property
173
+ def properties(self) -> Dict[str, Any]:
174
+ # generate combined properties
175
+ properties: Dict[str, Any] = {}
176
+ for key in self.products[0].item.properties.keys():
177
+ try:
178
+ properties[key] = self.get_property(key)
179
+ except ValueError:
180
+ properties[key] = None
181
+ return properties
182
+
183
+ @contextmanager
184
+ def cached(self) -> Generator[Slice, None, None]:
185
+ """Clear caches and run garbage collector when context manager is closed."""
186
+ yield self
187
+ with Timer() as tt:
188
+ self.clear_cached_data()
189
+ gc.collect()
190
+ logger.debug("Slice cache cleared and garbage collected in %s", tt)
191
+
192
+ def clear_cached_data(self):
193
+ logger.debug("clear caches of all products in slice")
194
+ for product in self.products:
195
+ product.clear_cached_data()
196
+
197
+ def get_property(self, property: str) -> Any:
198
+ """
199
+ Return merged property over all products.
200
+
201
+ If property values are the same over all products, it will be returned. Otherwise a
202
+ ValueError is raised.
203
+ """
204
+ # if set of value hashes has a length of 1, all values are the same
205
+ values = [get_hash(product.get_property(property)) for product in self.products]
206
+ if len(set(values)) == 1:
207
+ return self.products[0].get_property(property)
208
+
209
+ raise ValueError(
210
+ f"cannot get unique property {property} from products {self.products}"
211
+ )
212
+
213
+ def read(
214
+ self,
215
+ merge_method: MergeMethod = MergeMethod.first,
216
+ product_read_kwargs: dict = {},
217
+ raise_empty: bool = True,
218
+ ) -> ma.MaskedArray:
219
+ logger.debug("Slice: read from %s products", len(self.products))
220
+ return merge_products(
221
+ products=self.products,
222
+ merge_method=merge_method,
223
+ product_read_kwargs=product_read_kwargs,
224
+ raise_empty=raise_empty,
225
+ )
226
+
227
+
228
+ def products_to_slices(
229
+ products: List[EOProductProtocol],
230
+ group_by_property: Optional[str] = None,
231
+ sort: Optional[SortMethodConfig] = None,
232
+ ) -> List[Slice]:
233
+ """Group products per given property into Slice objects and optionally sort slices."""
234
+ if group_by_property:
235
+ grouped = defaultdict(list)
236
+ for product in products:
237
+ grouped[product.get_property(group_by_property)].append(product)
238
+ slices = [Slice(key, products) for key, products in grouped.items()]
239
+ else:
240
+ slices = [Slice(product.id, [product]) for product in products]
241
+
242
+ # also check if slices is even a list, otherwise it will raise an error
243
+ if sort and slices:
244
+ sort_dict = sort.model_dump()
245
+ func = sort_dict.pop("func")
246
+ slices = func(slices, **sort_dict)
247
+
248
+ return slices
249
+
250
+
251
+ def merge_products(
252
+ products: Sequence[EOProductProtocol],
253
+ merge_method: MergeMethod = MergeMethod.first,
254
+ product_read_kwargs: dict = {},
255
+ raise_empty: bool = True,
256
+ ) -> ma.MaskedArray:
257
+ """
258
+ Merge given products into one array.
259
+ """
260
+
261
+ def read_remaining_valid_products(
262
+ products_iter: Iterator[EOProductProtocol], product_read_kwargs: dict
263
+ ) -> Generator[ma.MaskedArray, None, None]:
264
+ """Yields and reads remaining products from iterator while discarding corrupt products."""
265
+ try:
266
+ for product in products_iter:
267
+ try:
268
+ yield product.read_np_array(**product_read_kwargs)
269
+ except (AssetKeyError, CorruptedProduct) as exc:
270
+ logger.warning("skip product %s because of %s", product.id, exc)
271
+ except StopIteration:
272
+ return
273
+
274
+ if len(products) == 0: # pragma: no cover
275
+ raise NoSourceProducts("no products to merge")
276
+
277
+ # we need to deactivate raising the EmptyProductException
278
+ product_read_kwargs.update(raise_empty=False)
279
+
280
+ products_iter = iter(products)
281
+
282
+ # read first valid product
283
+ for product in products_iter:
284
+ try:
285
+ out = product.read_np_array(**product_read_kwargs)
286
+ break
287
+ except (AssetKeyError, CorruptedProduct) as exc:
288
+ logger.warning("skip product %s because of %s", product.id, exc)
289
+ else:
290
+ # we cannot do anything here, as all products are broken
291
+ raise CorruptedSlice("all products are broken here")
292
+
293
+ # fill in gaps sequentially, product by product
294
+ if merge_method == MergeMethod.first:
295
+ for new in read_remaining_valid_products(products_iter, product_read_kwargs):
296
+ masked = out.mask
297
+ # Update values at masked locations
298
+ out[masked] = new[masked]
299
+ # Update mask at masked locations (e.g., unmask them)
300
+ out.mask[masked] = new.mask[masked]
301
+ # if whole output array is filled, there is no point in reading more data
302
+ if not out.mask.any():
303
+ return out
304
+
305
+ # read all and average
306
+ elif merge_method == MergeMethod.average:
307
+
308
+ def _generate_arrays(
309
+ first_product_array: ma.MaskedArray,
310
+ remaining_product_arrays: Generator[ma.MaskedArray, None, None],
311
+ ) -> Generator[ma.MaskedArray, None, None]:
312
+ """Yield all available product arrays."""
313
+ yield first_product_array
314
+ yield from remaining_product_arrays
315
+
316
+ # explicitly specify dtype to avoid casting of integer arrays to floats
317
+ # during mean conversion:
318
+ # https://numpy.org/doc/stable/reference/generated/numpy.mean.html#numpy.mean
319
+ arrays = list(
320
+ _generate_arrays(
321
+ out,
322
+ read_remaining_valid_products(products_iter, product_read_kwargs),
323
+ )
324
+ )
325
+
326
+ # Filter out arrays that are entirely masked
327
+ valid_arrays = [a for a in arrays if not ma.getmaskarray(a).all()]
328
+
329
+ if valid_arrays:
330
+ out_dtype = out.dtype
331
+ out_fill_value = out.fill_value
332
+ stacked = ma.stack(valid_arrays, dtype=out_dtype)
333
+ out = stacked.mean(axis=0, dtype=out_dtype).astype(out_dtype, copy=False)
334
+ out.set_fill_value(out_fill_value)
335
+ else:
336
+ # All arrays were fully masked — return fully masked output
337
+ out = ma.masked_all(out.shape, dtype=out.dtype)
338
+
339
+ else: # pragma: no cover
340
+ raise NotImplementedError(f"unknown merge method: {merge_method}")
341
+
342
+ if raise_empty and out.mask.all():
343
+ raise EmptySliceException(
344
+ f"slice is empty after combining {len(products)} products"
345
+ )
346
+
347
+ return out
348
+
349
+
350
+ def generate_slice_dataarrays(
351
+ products: List[EOProductProtocol],
352
+ assets: Optional[List[str]] = None,
353
+ eo_bands: Optional[List[str]] = None,
354
+ grid: Optional[GridProtocol] = None,
355
+ resampling: Resampling = Resampling.nearest,
356
+ nodatavals: NodataVals = None,
357
+ merge_products_by: Optional[str] = None,
358
+ merge_method: MergeMethod = MergeMethod.first,
359
+ sort: Optional[SortMethodConfig] = None,
360
+ product_read_kwargs: dict = {},
361
+ raise_empty: bool = True,
362
+ read_mask: Optional[np.ndarray] = None,
363
+ ) -> Iterator[xr.DataArray]:
364
+ """
365
+ Yield products or merged products into slices as DataArrays.
366
+ """
367
+
368
+ if len(products) == 0:
369
+ raise NoSourceProducts("no products to read")
370
+
371
+ stack_empty = True
372
+ assets = assets or []
373
+ eo_bands = eo_bands or []
374
+ variables = assets or eo_bands
375
+
376
+ # group products into slices and sort slices if configured
377
+ slices = products_to_slices(
378
+ products, group_by_property=merge_products_by, sort=sort
379
+ )
380
+ logger.debug(
381
+ "reading %s products in %s groups...",
382
+ len(products),
383
+ len(slices),
384
+ )
385
+ if isinstance(nodatavals, list):
386
+ nodataval = nodatavals[0]
387
+ elif isinstance(nodatavals, float):
388
+ nodataval = nodatavals
389
+ else:
390
+ nodataval = nodatavals
391
+ for slice in slices:
392
+ try:
393
+ # if merge_products_by is none, each slice contains just one product
394
+ # so nothing will have to be merged anyways
395
+ with slice.cached():
396
+ yield to_dataarray(
397
+ merge_products(
398
+ products=slice.products,
399
+ merge_method=merge_method,
400
+ product_read_kwargs=dict(
401
+ product_read_kwargs,
402
+ assets=assets,
403
+ eo_bands=eo_bands,
404
+ grid=grid,
405
+ resampling=resampling,
406
+ nodatavals=nodatavals,
407
+ raise_empty=raise_empty,
408
+ read_mask=read_mask,
409
+ ),
410
+ raise_empty=raise_empty,
411
+ ),
412
+ nodataval=nodataval,
413
+ name=slice.name,
414
+ band_names=variables,
415
+ attrs=slice.properties,
416
+ )
417
+ # if at least one slice can be yielded, the stack is not empty
418
+ stack_empty = False
419
+ except (EmptySliceException, CorruptedSlice) as exception:
420
+ logger.warning(exception)
421
+
422
+ if stack_empty:
423
+ raise EmptyStackException("all slices are empty")
@@ -0,0 +1,45 @@
1
+ from rasterio.profiles import Profile
2
+
3
+
4
+ class COGDeflateProfile(Profile):
5
+ """Standard COG profile."""
6
+
7
+ defaults = {
8
+ "driver": "COG",
9
+ "tiled": True,
10
+ "blockxsize": 512,
11
+ "blockysize": 512,
12
+ "compress": "DEFLATE",
13
+ }
14
+
15
+
16
+ class JP2LossyProfile(Profile):
17
+ """Very lossy JP2 profile used for low size test data."""
18
+
19
+ defaults = {
20
+ "driver": "JP2OpenJPEG",
21
+ "tiled": True,
22
+ "blockxsize": 512,
23
+ "blockysize": 512,
24
+ "quality": 50,
25
+ }
26
+
27
+
28
+ class JP2LosslessProfile(Profile):
29
+ """Lossless JP2 profile used for lower size data."""
30
+
31
+ defaults = {
32
+ "driver": "JP2OpenJPEG",
33
+ "tiled": True,
34
+ "blockxsize": 512,
35
+ "blockysize": 512,
36
+ "quality": 100,
37
+ "reversible": True,
38
+ }
39
+
40
+
41
+ rio_profiles = {
42
+ "cog_deflate": COGDeflateProfile(),
43
+ "jp2_lossy": JP2LossyProfile(),
44
+ "jp2_lossless": JP2LosslessProfile(),
45
+ }
@@ -0,0 +1,17 @@
1
+ from mapchete_eo.platforms.sentinel2.driver import (
2
+ METADATA,
3
+ InputData,
4
+ Sentinel2Cube,
5
+ Sentinel2CubeGroup,
6
+ )
7
+ from mapchete_eo.platforms.sentinel2.metadata_parser.s2metadata import S2Metadata
8
+ from mapchete_eo.platforms.sentinel2.product import S2Product
9
+
10
+ __all__ = [
11
+ "S2Metadata",
12
+ "METADATA",
13
+ "InputData",
14
+ "Sentinel2Cube",
15
+ "Sentinel2CubeGroup",
16
+ "S2Product",
17
+ ]
@@ -0,0 +1,89 @@
1
+ from typing import List, Callable, Dict, Any, Optional
2
+
3
+ from pystac import Item
4
+
5
+ from mapchete_eo.platforms.sentinel2.metadata_parser.s2metadata import S2Metadata
6
+ from mapchete_eo.platforms.sentinel2.types import DataArchive, MetadataArchive
7
+
8
+
9
+ # decorators for mapper functions using the registry pattern #
10
+ ##############################################################
11
+ ID_MAPPER_REGISTRY: Dict[Any, Callable[[Item], Item]] = {}
12
+ STAC_METADATA_MAPPER_REGISTRY: Dict[Any, Callable[[Item], Item]] = {}
13
+ S2METADATA_MAPPER_REGISTRY: Dict[Any, Callable[[Item], S2Metadata]] = {}
14
+
15
+ MAPPER_REGISTRIES: Dict[str, Any] = {
16
+ "ID": ID_MAPPER_REGISTRY,
17
+ "STAC metadata": STAC_METADATA_MAPPER_REGISTRY,
18
+ "S2Metadata": S2METADATA_MAPPER_REGISTRY,
19
+ }
20
+
21
+
22
+ def _register_func(registry: Dict[str, Callable], key: Any, func: Callable):
23
+ if key in registry:
24
+ raise ValueError(f"{key} already registered in {registry}")
25
+ registry[key] = func
26
+
27
+
28
+ def maps_item_id(from_collections: List[str]):
29
+ """
30
+ Decorator registering mapper to common ID.
31
+ """
32
+
33
+ def decorator(func):
34
+ # Use a tuple of the metadata as the key
35
+ # key = (path_type, version)
36
+ for collection in from_collections:
37
+ _register_func(registry=ID_MAPPER_REGISTRY, key=collection, func=func)
38
+ return func
39
+
40
+ return decorator
41
+
42
+
43
+ def maps_stac_metadata(
44
+ from_collections: List[str], to_data_archives: Optional[List[DataArchive]] = None
45
+ ):
46
+ """
47
+ Decorator registering STAC metadata mapper.
48
+ """
49
+
50
+ def decorator(func):
51
+ # Use a tuple of the metadata as the key
52
+ for collection in from_collections:
53
+ if to_data_archives:
54
+ for data_archive in to_data_archives:
55
+ _register_func(
56
+ registry=STAC_METADATA_MAPPER_REGISTRY,
57
+ key=(collection, data_archive),
58
+ func=func,
59
+ )
60
+ else:
61
+ _register_func(
62
+ registry=STAC_METADATA_MAPPER_REGISTRY,
63
+ key=collection,
64
+ func=func,
65
+ )
66
+ return func
67
+
68
+ return decorator
69
+
70
+
71
+ def creates_s2metadata(
72
+ from_collections: List[str], to_metadata_archives: List[MetadataArchive]
73
+ ):
74
+ """
75
+ Decorator registering S2Metadata creator.
76
+ """
77
+
78
+ def decorator(func):
79
+ # Use a tuple of the metadata as the key
80
+ for collection in from_collections:
81
+ for metadata_archive in to_metadata_archives:
82
+ _register_func(
83
+ registry=S2METADATA_MAPPER_REGISTRY,
84
+ key=(collection, metadata_archive),
85
+ func=func,
86
+ )
87
+ return func
88
+
89
+ return decorator
@@ -0,0 +1,104 @@
1
+ from enum import Enum
2
+ from typing import NamedTuple
3
+ import numpy as np
4
+ import numpy.ma as ma
5
+ from numpy.typing import DTypeLike
6
+
7
+ from pystac import Item
8
+
9
+ from mapchete_eo.platforms.sentinel2.types import L2ABand
10
+
11
+
12
+ class BandpassAdjustment(NamedTuple):
13
+ slope: float
14
+ intercept: float
15
+
16
+
17
+ # Bandpass Adjustment for Sentinel-2
18
+ # Try using HLS bandpass adjustmets
19
+ # https://hls.gsfc.nasa.gov/algorithms/bandpass-adjustment/
20
+ # https://lpdaac.usgs.gov/documents/1698/HLS_User_Guide_V2.pdf
21
+ # These are for Sentinel-2B bandpass adjustment; fisrt is slope second is intercept
22
+ # out_band = band * slope + intercept
23
+ # B1 0.996 0.002
24
+ # B2 1.001 -0.002
25
+ # B3 0.999 0.001
26
+ # B4 1.001 -0.003
27
+ # B5 0.998 0.004
28
+ # B6 0.997 0.005
29
+ # B7 1.000 0.000
30
+ # B8 0.999 0.001
31
+ # B8A 0.998 0.004
32
+ # B9 0.996 0.006
33
+ # B10 1.001 -0.001 B10 is not present in Sentinel-2 L2A products ommited in params below
34
+ # B11 0.997 0.002
35
+ # B12 0.998 0.003
36
+
37
+
38
+ class L2AS2ABandpassAdjustmentParams(Enum):
39
+ B01 = BandpassAdjustment(0.9959, -0.0002)
40
+ B02 = BandpassAdjustment(0.9778, -0.004)
41
+ B03 = BandpassAdjustment(1.0053, -0.0009)
42
+ B04 = BandpassAdjustment(0.9765, 0.0009)
43
+ B05 = BandpassAdjustment(1.0, 0.0)
44
+ B06 = BandpassAdjustment(1.0, 0.0)
45
+ B07 = BandpassAdjustment(1.0, 0.0)
46
+ B08 = BandpassAdjustment(0.9983, -0.0001)
47
+ B8A = BandpassAdjustment(0.9983, -0.0001)
48
+ B09 = BandpassAdjustment(1.0, 0.0)
49
+ B11 = BandpassAdjustment(0.9987, -0.0011)
50
+ B12 = BandpassAdjustment(1.003, -0.0012)
51
+
52
+
53
+ class L2AS2BBandpassAdjustmentParams(Enum):
54
+ B01 = BandpassAdjustment(0.9959, -0.0002)
55
+ B02 = BandpassAdjustment(0.9778, -0.004)
56
+ B03 = BandpassAdjustment(1.0075, -0.0008)
57
+ B04 = BandpassAdjustment(0.9761, 0.001)
58
+ B05 = BandpassAdjustment(0.998, 0.004)
59
+ B06 = BandpassAdjustment(0.997, 0.005)
60
+ B07 = BandpassAdjustment(1.000, 0.000)
61
+ B08 = BandpassAdjustment(0.9966, 0.000)
62
+ B8A = BandpassAdjustment(0.9966, 0.000)
63
+ B09 = BandpassAdjustment(0.996, 0.006)
64
+ B11 = BandpassAdjustment(1.000, -0.0003)
65
+ B12 = BandpassAdjustment(0.9867, 0.0004)
66
+
67
+
68
+ def item_to_params(
69
+ sentinel2_item: Item,
70
+ l2a_band: L2ABand,
71
+ ) -> BandpassAdjustment:
72
+ if sentinel2_item.properties["platform"].lower() == "sentinel-2a":
73
+ return L2AS2ABandpassAdjustmentParams[l2a_band.name].value
74
+ elif sentinel2_item.properties["platform"].lower() == "sentinel-2b":
75
+ return L2AS2BBandpassAdjustmentParams[l2a_band.name].value
76
+ else:
77
+ raise TypeError(
78
+ f"cannot determine Sentinel-2 platform from pystac.Item: {sentinel2_item}"
79
+ )
80
+
81
+
82
+ def apply_bandpass_adjustment(
83
+ band_arr: ma.MaskedArray,
84
+ item: Item,
85
+ l2a_band: L2ABand,
86
+ computing_dtype: DTypeLike = np.float32,
87
+ out_dtype: DTypeLike = np.uint16,
88
+ ) -> ma.MaskedArray:
89
+ params = item_to_params(item, l2a_band)
90
+ return ma.MaskedArray(
91
+ data=(
92
+ np.clip(
93
+ band_arr.astype(computing_dtype, copy=False) / 10000 * params.slope
94
+ + params.intercept,
95
+ 0,
96
+ 1,
97
+ )
98
+ * 10000
99
+ )
100
+ .astype(out_dtype, copy=False)
101
+ .data,
102
+ mask=band_arr.mask,
103
+ fill_value=band_arr.fill_value,
104
+ )
@@ -0,0 +1,8 @@
1
+ from mapchete_eo.platforms.sentinel2.brdf.correction import (
2
+ correction_values,
3
+ apply_correction,
4
+ )
5
+
6
+ from mapchete_eo.platforms.sentinel2.brdf.models import get_model
7
+
8
+ __all__ = ["correction_values", "apply_correction", "get_model"]